1
0
mirror of https://github.com/esphome/esphome.git synced 2025-11-05 17:41:49 +00:00

Compare commits

...

40 Commits

Author SHA1 Message Date
J. Nick Koston
3291488a8b compat 2025-10-21 12:24:39 -10:00
J. Nick Koston
75550b39f4 compat 2025-10-21 12:23:16 -10:00
J. Nick Koston
02e1ed2130 multiple networks 2025-10-21 11:57:06 -10:00
J. Nick Koston
2948264917 try to avoid some of the ram 2025-10-21 11:46:30 -10:00
J. Nick Koston
660411ac42 try to avoid some of the ram 2025-10-21 11:44:56 -10:00
J. Nick Koston
88e3f02c9c try to avoid some of the ram 2025-10-21 11:40:48 -10:00
J. Nick Koston
f3f419077b [wifi] Optimize WiFi network storage with FixedVector 2025-10-21 11:29:27 -10:00
Jeff Brown
8e8a2bde95 [light] Decouple AddressableLight and Light transition classes (#11166)
Co-authored-by: J. Nick Koston <nick@koston.org>
2025-10-21 10:37:29 -10:00
Petr Kejval
80265a6bd2 [sensor] Add optimistic option to heartbeat filter (#10993)
Co-authored-by: Jonathan Swoboda <154711427+swoboda1337@users.noreply.github.com>
Co-authored-by: J. Nick Koston <nick@koston.org>
2025-10-21 09:17:07 -04:00
J. Nick Koston
87e9a7a1bd [climate] Remove unnecessary vector allocations in state save/restore (#11445) 2025-10-21 04:35:18 -05:00
J. Nick Koston
3aedfe8be3 [binary_sensor] Optimize AutorepeatFilter with FixedVector (#11444) 2025-10-21 04:30:13 -05:00
J. Nick Koston
7f2cc47ed6 [binary_sensor] Add compile test for auto repeat (#11443) 2025-10-21 04:25:59 -05:00
J. Nick Koston
a5542e0d2b [sensor] Optimize calibration and Or filters with FixedVector (#11437) 2025-10-20 21:38:05 -10:00
Keith Burzinski
66afe4a9be [climate] Add some integration tests (#11439) 2025-10-21 02:26:18 -05:00
J. Nick Koston
0ae9009e41 [ci] Fix clang-tidy split mode for core file changes (#11434) 2025-10-20 20:39:50 -10:00
J. Nick Koston
0b2f5fcd7e Add additional sensor filter tests (#11438) 2025-10-20 20:39:21 -10:00
J. Nick Koston
7a2887e2ed [analyze-memory] Improve symbol categorization accuracy (#11440) 2025-10-20 20:39:05 -10:00
J. Nick Koston
cd2d3f061d [espnow] Fix compilation error with initializer_list after #11433 (#11436) 2025-10-20 19:58:24 -10:00
J. Nick Koston
73f5d01c2d [core] Optimize automation actions memory usage with std::initializer_list (#11433) 2025-10-21 04:32:58 +00:00
Jesse Hills
0938609f7a [improv] Put next_url behind defines to save flash (#11420)
Co-authored-by: J. Nick Koston <nick@koston.org>
2025-10-21 16:58:26 +13:00
J. Nick Koston
77203f0cb4 [text_sensor] Optimize filters with FixedVector (1.6KB flash savings) (#11423)
Co-authored-by: Jesse Hills <3060199+jesserockz@users.noreply.github.com>
2025-10-21 03:24:51 +00:00
J. Nick Koston
040130e357 [ci] Fix memory impact workflow for new components (#11421) 2025-10-21 16:02:07 +13:00
J. Nick Koston
85959e3004 [sensor,text_sensor,binary_sensor] Optimize filter parameters with std::initializer_list (#11426) 2025-10-21 15:47:13 +13:00
Jonathan Swoboda
a809a13729 [core] Add support for extern "C" includes (#11422) 2025-10-21 15:46:50 +13:00
J. Nick Koston
3b6ff615e8 [ci] Fix clang-tidy split decision to account for component dependencies (#11430) 2025-10-21 15:39:15 +13:00
J. Nick Koston
05216db5f0 ESP8266: Complete testing mode memory patches with DRAM and Flash (#11427) 2025-10-21 15:26:49 +13:00
J. Nick Koston
9f668b0c4b Add basic text_sensor tests (#11424) 2025-10-21 15:26:41 +13:00
J. Nick Koston
6a239f4d1c [ci] Prefer platform-specific tests for memory impact analysis (#11398) 2025-10-21 10:25:33 +13:00
J. Nick Koston
ffb0e854b6 [ci] Optimize clang-tidy for small PRs by avoiding unnecessary job spitting (#11402)
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
2025-10-21 10:24:46 +13:00
Jonathan Swoboda
6fbd0e3385 [esp32_hosted] Bump esp hosted (#11414) 2025-10-20 11:12:07 -10:00
dependabot[bot]
426511e78d Bump actions/download-artifact from 4.3.0 to 5.0.0 (#11419)
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-10-20 11:11:15 -10:00
dependabot[bot]
97d91fee85 Bump pylint from 4.0.1 to 4.0.2 (#11418)
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-10-20 11:10:33 -10:00
J. Nick Koston
0f4b54aa82 [esp32_improv, improv_base] Reduce flash usage by 352 bytes (#11406) 2025-10-20 11:07:39 -10:00
J. Nick Koston
1706a69fad [sensor] Optimize filter memory usage with ValueListFilter base class (#11407)
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
2025-10-21 09:38:49 +13:00
J. Nick Koston
e23d66a8cf [esp32] Automatic CONFIG_LWIP_MAX_SOCKETS configuration based on component needs (#11378) 2025-10-21 09:38:34 +13:00
J. Nick Koston
46101fd830 Add tests for FilterOutValueFilter and ThrottleWithPriorityFilter (#11408)
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
2025-10-20 09:25:03 -10:00
J. Nick Koston
e988905c2f [json] Add basic compile tests (#11409)
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
2025-10-21 07:31:59 +13:00
Patrick
abb57f08f5 [pipsolar] cleanup / refactoring (#10291)
Co-authored-by: pre-commit-ci-lite[bot] <117423508+pre-commit-ci-lite[bot]@users.noreply.github.com>
2025-10-20 17:08:31 +00:00
EasilyBoredEngineer
ca2fe994a1 [espnow] Add transport platform for packet_transport (#11025)
Co-authored-by: pre-commit-ci-lite[bot] <117423508+pre-commit-ci-lite[bot]@users.noreply.github.com>
Co-authored-by: Jonathan Swoboda <154711427+swoboda1337@users.noreply.github.com>
Co-authored-by: J. Nick Koston <nick@koston.org>
2025-10-20 10:44:20 -04:00
Peter Zich
03def13917 [hdc1080] Make HDC1080_CMD_CONFIGURATION failure a warning (and log it) (#11355)
Co-authored-by: J. Nick Koston <nick@koston.org>
2025-10-20 09:13:13 -04:00
96 changed files with 4144 additions and 1419 deletions

View File

@@ -170,11 +170,13 @@ jobs:
outputs:
integration-tests: ${{ steps.determine.outputs.integration-tests }}
clang-tidy: ${{ steps.determine.outputs.clang-tidy }}
clang-tidy-mode: ${{ steps.determine.outputs.clang-tidy-mode }}
python-linters: ${{ steps.determine.outputs.python-linters }}
changed-components: ${{ steps.determine.outputs.changed-components }}
changed-components-with-tests: ${{ steps.determine.outputs.changed-components-with-tests }}
directly-changed-components-with-tests: ${{ steps.determine.outputs.directly-changed-components-with-tests }}
component-test-count: ${{ steps.determine.outputs.component-test-count }}
changed-cpp-file-count: ${{ steps.determine.outputs.changed-cpp-file-count }}
memory_impact: ${{ steps.determine.outputs.memory-impact }}
steps:
- name: Check out code from GitHub
@@ -200,11 +202,13 @@ jobs:
# Extract individual fields
echo "integration-tests=$(echo "$output" | jq -r '.integration_tests')" >> $GITHUB_OUTPUT
echo "clang-tidy=$(echo "$output" | jq -r '.clang_tidy')" >> $GITHUB_OUTPUT
echo "clang-tidy-mode=$(echo "$output" | jq -r '.clang_tidy_mode')" >> $GITHUB_OUTPUT
echo "python-linters=$(echo "$output" | jq -r '.python_linters')" >> $GITHUB_OUTPUT
echo "changed-components=$(echo "$output" | jq -c '.changed_components')" >> $GITHUB_OUTPUT
echo "changed-components-with-tests=$(echo "$output" | jq -c '.changed_components_with_tests')" >> $GITHUB_OUTPUT
echo "directly-changed-components-with-tests=$(echo "$output" | jq -c '.directly_changed_components_with_tests')" >> $GITHUB_OUTPUT
echo "component-test-count=$(echo "$output" | jq -r '.component_test_count')" >> $GITHUB_OUTPUT
echo "changed-cpp-file-count=$(echo "$output" | jq -r '.changed_cpp_file_count')" >> $GITHUB_OUTPUT
echo "memory-impact=$(echo "$output" | jq -c '.memory_impact')" >> $GITHUB_OUTPUT
integration-tests:
@@ -243,7 +247,7 @@ jobs:
. venv/bin/activate
pytest -vv --no-cov --tb=native -n auto tests/integration/
clang-tidy:
clang-tidy-single:
name: ${{ matrix.name }}
runs-on: ubuntu-24.04
needs:
@@ -261,22 +265,6 @@ jobs:
name: Run script/clang-tidy for ESP8266
options: --environment esp8266-arduino-tidy --grep USE_ESP8266
pio_cache_key: tidyesp8266
- id: clang-tidy
name: Run script/clang-tidy for ESP32 Arduino 1/4
options: --environment esp32-arduino-tidy --split-num 4 --split-at 1
pio_cache_key: tidyesp32
- id: clang-tidy
name: Run script/clang-tidy for ESP32 Arduino 2/4
options: --environment esp32-arduino-tidy --split-num 4 --split-at 2
pio_cache_key: tidyesp32
- id: clang-tidy
name: Run script/clang-tidy for ESP32 Arduino 3/4
options: --environment esp32-arduino-tidy --split-num 4 --split-at 3
pio_cache_key: tidyesp32
- id: clang-tidy
name: Run script/clang-tidy for ESP32 Arduino 4/4
options: --environment esp32-arduino-tidy --split-num 4 --split-at 4
pio_cache_key: tidyesp32
- id: clang-tidy
name: Run script/clang-tidy for ESP32 IDF
options: --environment esp32-idf-tidy --grep USE_ESP_IDF
@@ -357,6 +345,166 @@ jobs:
# yamllint disable-line rule:line-length
if: always()
clang-tidy-nosplit:
name: Run script/clang-tidy for ESP32 Arduino
runs-on: ubuntu-24.04
needs:
- common
- determine-jobs
if: needs.determine-jobs.outputs.clang-tidy-mode == 'nosplit'
env:
GH_TOKEN: ${{ github.token }}
steps:
- name: Check out code from GitHub
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
with:
# Need history for HEAD~1 to work for checking changed files
fetch-depth: 2
- name: Restore Python
uses: ./.github/actions/restore-python
with:
python-version: ${{ env.DEFAULT_PYTHON }}
cache-key: ${{ needs.common.outputs.cache-key }}
- name: Cache platformio
if: github.ref == 'refs/heads/dev'
uses: actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
with:
path: ~/.platformio
key: platformio-tidyesp32-${{ hashFiles('platformio.ini') }}
- name: Cache platformio
if: github.ref != 'refs/heads/dev'
uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
with:
path: ~/.platformio
key: platformio-tidyesp32-${{ hashFiles('platformio.ini') }}
- name: Register problem matchers
run: |
echo "::add-matcher::.github/workflows/matchers/gcc.json"
echo "::add-matcher::.github/workflows/matchers/clang-tidy.json"
- name: Check if full clang-tidy scan needed
id: check_full_scan
run: |
. venv/bin/activate
if python script/clang_tidy_hash.py --check; then
echo "full_scan=true" >> $GITHUB_OUTPUT
echo "reason=hash_changed" >> $GITHUB_OUTPUT
else
echo "full_scan=false" >> $GITHUB_OUTPUT
echo "reason=normal" >> $GITHUB_OUTPUT
fi
- name: Run clang-tidy
run: |
. venv/bin/activate
if [ "${{ steps.check_full_scan.outputs.full_scan }}" = "true" ]; then
echo "Running FULL clang-tidy scan (hash changed)"
script/clang-tidy --all-headers --fix --environment esp32-arduino-tidy
else
echo "Running clang-tidy on changed files only"
script/clang-tidy --all-headers --fix --changed --environment esp32-arduino-tidy
fi
env:
# Also cache libdeps, store them in a ~/.platformio subfolder
PLATFORMIO_LIBDEPS_DIR: ~/.platformio/libdeps
- name: Suggested changes
run: script/ci-suggest-changes
if: always()
clang-tidy-split:
name: ${{ matrix.name }}
runs-on: ubuntu-24.04
needs:
- common
- determine-jobs
if: needs.determine-jobs.outputs.clang-tidy-mode == 'split'
env:
GH_TOKEN: ${{ github.token }}
strategy:
fail-fast: false
max-parallel: 1
matrix:
include:
- id: clang-tidy
name: Run script/clang-tidy for ESP32 Arduino 1/4
options: --environment esp32-arduino-tidy --split-num 4 --split-at 1
- id: clang-tidy
name: Run script/clang-tidy for ESP32 Arduino 2/4
options: --environment esp32-arduino-tidy --split-num 4 --split-at 2
- id: clang-tidy
name: Run script/clang-tidy for ESP32 Arduino 3/4
options: --environment esp32-arduino-tidy --split-num 4 --split-at 3
- id: clang-tidy
name: Run script/clang-tidy for ESP32 Arduino 4/4
options: --environment esp32-arduino-tidy --split-num 4 --split-at 4
steps:
- name: Check out code from GitHub
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
with:
# Need history for HEAD~1 to work for checking changed files
fetch-depth: 2
- name: Restore Python
uses: ./.github/actions/restore-python
with:
python-version: ${{ env.DEFAULT_PYTHON }}
cache-key: ${{ needs.common.outputs.cache-key }}
- name: Cache platformio
if: github.ref == 'refs/heads/dev'
uses: actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
with:
path: ~/.platformio
key: platformio-tidyesp32-${{ hashFiles('platformio.ini') }}
- name: Cache platformio
if: github.ref != 'refs/heads/dev'
uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
with:
path: ~/.platformio
key: platformio-tidyesp32-${{ hashFiles('platformio.ini') }}
- name: Register problem matchers
run: |
echo "::add-matcher::.github/workflows/matchers/gcc.json"
echo "::add-matcher::.github/workflows/matchers/clang-tidy.json"
- name: Check if full clang-tidy scan needed
id: check_full_scan
run: |
. venv/bin/activate
if python script/clang_tidy_hash.py --check; then
echo "full_scan=true" >> $GITHUB_OUTPUT
echo "reason=hash_changed" >> $GITHUB_OUTPUT
else
echo "full_scan=false" >> $GITHUB_OUTPUT
echo "reason=normal" >> $GITHUB_OUTPUT
fi
- name: Run clang-tidy
run: |
. venv/bin/activate
if [ "${{ steps.check_full_scan.outputs.full_scan }}" = "true" ]; then
echo "Running FULL clang-tidy scan (hash changed)"
script/clang-tidy --all-headers --fix ${{ matrix.options }}
else
echo "Running clang-tidy on changed files only"
script/clang-tidy --all-headers --fix --changed ${{ matrix.options }}
fi
env:
# Also cache libdeps, store them in a ~/.platformio subfolder
PLATFORMIO_LIBDEPS_DIR: ~/.platformio/libdeps
- name: Suggested changes
run: script/ci-suggest-changes
if: always()
test-build-components-splitter:
name: Split components for intelligent grouping (40 weighted per batch)
runs-on: ubuntu-24.04
@@ -764,13 +912,13 @@ jobs:
python-version: ${{ env.DEFAULT_PYTHON }}
cache-key: ${{ needs.common.outputs.cache-key }}
- name: Download target analysis JSON
uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4.3.0
uses: actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0 # v5.0.0
with:
name: memory-analysis-target
path: ./memory-analysis
continue-on-error: true
- name: Download PR analysis JSON
uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4.3.0
uses: actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0 # v5.0.0
with:
name: memory-analysis-pr
path: ./memory-analysis
@@ -797,7 +945,9 @@ jobs:
- pylint
- pytest
- integration-tests
- clang-tidy
- clang-tidy-single
- clang-tidy-nosplit
- clang-tidy-split
- determine-jobs
- test-build-components-splitter
- test-build-components-split

View File

@@ -161,6 +161,7 @@ esphome/components/esp32_rmt_led_strip/* @jesserockz
esphome/components/esp8266/* @esphome/core
esphome/components/esp_ldo/* @clydebarrow
esphome/components/espnow/* @jesserockz
esphome/components/espnow/packet_transport/* @EasilyBoredEngineer
esphome/components/ethernet_info/* @gtjadsonsantos
esphome/components/event/* @nohat
esphome/components/exposure_notifications/* @OttoWinter

View File

@@ -231,9 +231,22 @@ class MemoryAnalyzerCLI(MemoryAnalyzer):
api_component = (name, mem)
break
# Combine all components to analyze: top ESPHome + all external + API if not already included
components_to_analyze = list(top_esphome_components) + list(
top_external_components
# Also include wifi_stack and other important system components if they exist
system_components_to_include = [
# Empty list - we've finished debugging symbol categorization
# Add component names here if you need to debug their symbols
]
system_components = [
(name, mem)
for name, mem in components
if name in system_components_to_include
]
# Combine all components to analyze: top ESPHome + all external + API if not already included + system components
components_to_analyze = (
list(top_esphome_components)
+ list(top_external_components)
+ system_components
)
if api_component and api_component not in components_to_analyze:
components_to_analyze.append(api_component)

View File

@@ -127,40 +127,39 @@ SYMBOL_PATTERNS = {
"tryget_socket_unconn",
"cs_create_ctrl_sock",
"netbuf_alloc",
"tcp_", # TCP protocol functions
"udp_", # UDP protocol functions
"lwip_", # LwIP stack functions
"eagle_lwip", # ESP-specific LwIP functions
"new_linkoutput", # Link output function
"acd_", # Address Conflict Detection (ACD)
"eth_", # Ethernet functions
"mac_enable_bb", # MAC baseband enable
"reassemble_and_dispatch", # Packet reassembly
],
# dhcp must come before libc to avoid "dhcp_select" matching "select" pattern
"dhcp": ["dhcp", "handle_dhcp"],
"ipv6_stack": ["nd6_", "ip6_", "mld6_", "icmp6_", "icmp6_input"],
"wifi_stack": [
"ieee80211",
"hostap",
"sta_",
"ap_",
"scan_",
"wifi_",
"wpa_",
"wps_",
"esp_wifi",
"cnx_",
"wpa3_",
"sae_",
"wDev_",
"ic_",
"mac_",
"esf_buf",
"gWpaSm",
"sm_WPA",
"eapol_",
"owe_",
"wifiLowLevelInit",
"s_do_mapping",
"gScanStruct",
"ppSearchTxframe",
"ppMapWaitTxq",
"ppFillAMPDUBar",
"ppCheckTxConnTrafficIdle",
"ppCalTkipMic",
# Order matters! More specific categories must come before general ones.
# mdns must come before bluetooth to avoid "_mdns_disable_pcb" matching "ble_" pattern
"mdns_lib": ["mdns"],
# memory_mgmt must come before wifi_stack to catch mmu_hal_* symbols
"memory_mgmt": [
"mem_",
"memory_",
"tlsf_",
"memp_",
"pbuf_",
"pbuf_alloc",
"pbuf_copy_partial_pbuf",
"esp_mmu_map",
"mmu_hal_",
"s_do_mapping", # Memory mapping function, not WiFi
"hash_map_", # Hash map data structure
"umm_assimilate", # UMM malloc assimilation
],
"bluetooth": ["bt_", "ble_", "l2c_", "gatt_", "gap_", "hci_", "BT_init"],
"wifi_bt_coex": ["coex"],
# Bluetooth categories must come BEFORE wifi_stack to avoid misclassification
# Many BLE symbols contain patterns like "ble_" that would otherwise match wifi patterns
"bluetooth_rom": ["r_ble", "r_lld", "r_llc", "r_llm"],
"bluedroid_bt": [
"bluedroid",
@@ -207,6 +206,61 @@ SYMBOL_PATTERNS = {
"copy_extra_byte_in_db",
"parse_read_local_supported_commands_response",
],
"bluetooth": [
"bt_",
"_ble_", # More specific than "ble_" to avoid matching "able_", "enable_", "disable_"
"l2c_",
"l2ble_", # L2CAP for BLE
"gatt_",
"gap_",
"hci_",
"btsnd_hcic_", # Bluetooth HCI command send functions
"BT_init",
"BT_tx_", # Bluetooth transmit functions
"esp_ble_", # Catch esp_ble_* functions
],
"bluetooth_ll": [
"llm_", # Link layer manager
"llc_", # Link layer control
"lld_", # Link layer driver
"ld_acl_", # Link layer ACL (Asynchronous Connection-Oriented)
"llcp_", # Link layer control protocol
"lmp_", # Link manager protocol
],
"wifi_bt_coex": ["coex"],
"wifi_stack": [
"ieee80211",
"hostap",
"sta_",
"wifi_ap_", # More specific than "ap_" to avoid matching "cap_", "map_"
"wifi_scan_", # More specific than "scan_" to avoid matching "_scan_" in other contexts
"wifi_",
"wpa_",
"wps_",
"esp_wifi",
"cnx_",
"wpa3_",
"sae_",
"wDev_",
"ic_mac_", # More specific than "mac_" to avoid matching emac_
"esf_buf",
"gWpaSm",
"sm_WPA",
"eapol_",
"owe_",
"wifiLowLevelInit",
# Removed "s_do_mapping" - this is memory management, not WiFi
"gScanStruct",
"ppSearchTxframe",
"ppMapWaitTxq",
"ppFillAMPDUBar",
"ppCheckTxConnTrafficIdle",
"ppCalTkipMic",
"phy_force_wifi",
"phy_unforce_wifi",
"write_wifi_chan",
"wifi_track_pll",
],
"crypto_math": [
"ecp_",
"bignum_",
@@ -231,13 +285,36 @@ SYMBOL_PATTERNS = {
"p_256_init_curve",
"shift_sub_rows",
"rshift",
"rijndaelEncrypt", # AES Rijndael encryption
],
# System and Arduino core functions must come before libc
"esp_system": [
"system_", # ESP system functions
"postmortem_", # Postmortem reporting
],
"arduino_core": [
"pinMode",
"resetPins",
"millis",
"micros",
"delay(", # More specific - Arduino delay function with parenthesis
"delayMicroseconds",
"digitalWrite",
"digitalRead",
],
"sntp": ["sntp_", "sntp_recv"],
"scheduler": [
"run_scheduled_",
"compute_scheduled_",
"event_TaskQueue",
],
"hw_crypto": ["esp_aes", "esp_sha", "esp_rsa", "esp_bignum", "esp_mpi"],
"libc": [
"printf",
"scanf",
"malloc",
"free",
"_free", # More specific than "free" to match _free, __free_r, etc. but not arbitrary "free" substring
"umm_free", # UMM malloc free function
"memcpy",
"memset",
"strcpy",
@@ -259,7 +336,7 @@ SYMBOL_PATTERNS = {
"_setenv_r",
"_tzset_unlocked_r",
"__tzcalc_limits",
"select",
"_select", # More specific than "select" to avoid matching "dhcp_select", etc.
"scalbnf",
"strtof",
"strtof_l",
@@ -316,8 +393,24 @@ SYMBOL_PATTERNS = {
"CSWTCH$",
"dst$",
"sulp",
"_strtol_l", # String to long with locale
"__cvt", # Convert
"__utoa", # Unsigned to ASCII
"__global_locale", # Global locale
"_ctype_", # Character type
"impure_data", # Impure data
],
"string_ops": [
"strcmp",
"strncmp",
"strchr",
"strstr",
"strtok",
"strdup",
"strncasecmp_P", # String compare (case insensitive, from program memory)
"strnlen_P", # String length (from program memory)
"strncat_P", # String concatenate (from program memory)
],
"string_ops": ["strcmp", "strncmp", "strchr", "strstr", "strtok", "strdup"],
"memory_alloc": ["malloc", "calloc", "realloc", "free", "_sbrk"],
"file_io": [
"fread",
@@ -338,10 +431,26 @@ SYMBOL_PATTERNS = {
"vsscanf",
],
"cpp_anonymous": ["_GLOBAL__N_", "n$"],
"cpp_runtime": ["__cxx", "_ZN", "_ZL", "_ZSt", "__gxx_personality", "_Z16"],
"exception_handling": ["__cxa_", "_Unwind_", "__gcc_personality", "uw_frame_state"],
# Plain C patterns only - C++ symbols will be categorized via DEMANGLED_PATTERNS
"nvs": ["nvs_"], # Plain C NVS functions
"ota": ["ota_", "OTA", "esp_ota", "app_desc"],
# cpp_runtime: Removed _ZN, _ZL to let DEMANGLED_PATTERNS categorize C++ symbols properly
# Only keep patterns that are truly runtime-specific and not categorizable by namespace
"cpp_runtime": ["__cxx", "_ZSt", "__gxx_personality", "_Z16"],
"exception_handling": [
"__cxa_",
"_Unwind_",
"__gcc_personality",
"uw_frame_state",
"search_object", # Search for exception handling object
"get_cie_encoding", # Get CIE encoding
"add_fdes", # Add frame description entries
"fde_unencoded_compare", # Compare FDEs
"fde_mixed_encoding_compare", # Compare mixed encoding FDEs
"frame_downheap", # Frame heap operations
"frame_heapsort", # Frame heap sorting
],
"static_init": ["_GLOBAL__sub_I_"],
"mdns_lib": ["mdns"],
"phy_radio": [
"phy_",
"rf_",
@@ -394,10 +503,47 @@ SYMBOL_PATTERNS = {
"txcal_debuge_mode",
"ant_wifitx_cfg",
"reg_init_begin",
"tx_cap_init", # TX capacitance init
"ram_set_txcap", # RAM TX capacitance setting
"tx_atten_", # TX attenuation
"txiq_", # TX I/Q calibration
"ram_cal_", # RAM calibration
"ram_rxiq_", # RAM RX I/Q
"readvdd33", # Read VDD33
"test_tout", # Test timeout
"tsen_meas", # Temperature sensor measurement
"bbpll_cal", # Baseband PLL calibration
"set_cal_", # Set calibration
"set_rfanagain_", # Set RF analog gain
"set_txdc_", # Set TX DC
"get_vdd33_", # Get VDD33
"gen_rx_gain_table", # Generate RX gain table
"ram_ana_inf_gating_en", # RAM analog interface gating enable
"tx_cont_en", # TX continuous enable
"tx_delay_cfg", # TX delay configuration
"tx_gain_table_set", # TX gain table set
"check_and_reset_hw_deadlock", # Hardware deadlock check
"s_config", # System/hardware config
"chan14_mic_cfg", # Channel 14 MIC config
],
"wifi_phy_pp": [
"pp_",
"ppT",
"ppR",
"ppP",
"ppInstall",
"ppCalTxAMPDULength",
"ppCheckTx", # Packet processor TX check
"ppCal", # Packet processor calibration
"HdlAllBuffedEb", # Handle buffered EB
],
"wifi_phy_pp": ["pp_", "ppT", "ppR", "ppP", "ppInstall", "ppCalTxAMPDULength"],
"wifi_lmac": ["lmac"],
"wifi_device": ["wdev", "wDev_"],
"wifi_device": [
"wdev",
"wDev_",
"ic_set_sta", # Set station mode
"ic_set_vif", # Set virtual interface
],
"power_mgmt": [
"pm_",
"sleep",
@@ -406,15 +552,7 @@ SYMBOL_PATTERNS = {
"deep_sleep",
"power_down",
"g_pm",
],
"memory_mgmt": [
"mem_",
"memory_",
"tlsf_",
"memp_",
"pbuf_",
"pbuf_alloc",
"pbuf_copy_partial_pbuf",
"pmc", # Power Management Controller
],
"hal_layer": ["hal_"],
"clock_mgmt": [
@@ -439,7 +577,6 @@ SYMBOL_PATTERNS = {
"error_handling": ["panic", "abort", "assert", "error_", "fault"],
"authentication": ["auth"],
"ppp_protocol": ["ppp", "ipcp_", "lcp_", "chap_", "LcpEchoCheck"],
"dhcp": ["dhcp", "handle_dhcp"],
"ethernet_phy": [
"emac_",
"eth_phy_",
@@ -618,7 +755,15 @@ SYMBOL_PATTERNS = {
"ampdu_dispatch_upto",
],
"ieee802_11": ["ieee802_11_", "ieee802_11_parse_elems"],
"rate_control": ["rssi_margin", "rcGetSched", "get_rate_fcc_index"],
"rate_control": [
"rssi_margin",
"rcGetSched",
"get_rate_fcc_index",
"rcGetRate", # Get rate
"rc_get_", # Rate control getters
"rc_set_", # Rate control setters
"rc_enable_", # Rate control enable functions
],
"nan": ["nan_dp_", "nan_dp_post_tx", "nan_dp_delete_peer"],
"channel_mgmt": ["chm_init", "chm_set_current_channel"],
"trace": ["trc_init", "trc_onAmpduOp"],
@@ -799,31 +944,18 @@ SYMBOL_PATTERNS = {
"supports_interlaced_inquiry_scan",
"supports_reading_remote_extended_features",
],
"bluetooth_ll": [
"lld_pdu_",
"ld_acl_",
"lld_stop_ind_handler",
"lld_evt_winsize_change",
"config_lld_evt_funcs_reset",
"config_lld_funcs_reset",
"config_llm_funcs_reset",
"llm_set_long_adv_data",
"lld_retry_tx_prog",
"llc_link_sup_to_ind_handler",
"config_llc_funcs_reset",
"lld_evt_rxwin_compute",
"config_btdm_funcs_reset",
"config_ea_funcs_reset",
"llc_defalut_state_tab_reset",
"config_rwip_funcs_reset",
"ke_lmp_rx_flooding_detect",
],
}
# Demangled patterns: patterns found in demangled C++ names
DEMANGLED_PATTERNS = {
"gpio_driver": ["GPIO"],
"uart_driver": ["UART"],
# mdns_lib must come before network_stack to avoid "udp" matching "_udpReadBuffer" in MDNSResponder
"mdns_lib": [
"MDNSResponder",
"MDNSImplementation",
"MDNS",
],
"network_stack": [
"lwip",
"tcp",
@@ -836,6 +968,24 @@ DEMANGLED_PATTERNS = {
"ethernet",
"ppp",
"slip",
"UdpContext", # UDP context class
"DhcpServer", # DHCP server class
],
"arduino_core": [
"String::", # Arduino String class
"Print::", # Arduino Print class
"HardwareSerial::", # Serial class
"IPAddress::", # IP address class
"EspClass::", # ESP class
"experimental::_SPI", # Experimental SPI
],
"ota": [
"UpdaterClass",
"Updater::",
],
"wifi": [
"ESP8266WiFi",
"WiFi::",
],
"wifi_stack": ["NetworkInterface"],
"nimble_bt": [
@@ -854,7 +1004,6 @@ DEMANGLED_PATTERNS = {
"rtti": ["__type_info", "__class_type_info"],
"web_server_lib": ["AsyncWebServer", "AsyncWebHandler", "WebServer"],
"async_tcp": ["AsyncClient", "AsyncServer"],
"mdns_lib": ["mdns"],
"json_lib": [
"ArduinoJson",
"JsonDocument",

View File

@@ -155,6 +155,17 @@ def _validate_api_config(config: ConfigType) -> ConfigType:
return config
def _consume_api_sockets(config: ConfigType) -> ConfigType:
"""Register socket needs for API component."""
from esphome.components import socket
# API needs 1 listening socket + typically 3 concurrent client connections
# (not max_connections, which is the upper limit rarely reached)
sockets_needed = 1 + 3
socket.consume_sockets(sockets_needed, "api")(config)
return config
CONFIG_SCHEMA = cv.All(
cv.Schema(
{
@@ -222,6 +233,7 @@ CONFIG_SCHEMA = cv.All(
).extend(cv.COMPONENT_SCHEMA),
cv.rename_key(CONF_SERVICES, CONF_ACTIONS),
_validate_api_config,
_consume_api_sockets,
)

View File

@@ -1143,7 +1143,7 @@ message ListEntitiesSelectResponse {
reserved 4; // Deprecated: was string unique_id
string icon = 5 [(field_ifdef) = "USE_ENTITY_ICON"];
repeated string options = 6 [(container_pointer) = "std::vector"];
repeated string options = 6 [(container_pointer) = "FixedVector"];
bool disabled_by_default = 7;
EntityCategory entity_category = 8;
uint32 device_id = 9 [(field_ifdef) = "USE_DEVICES"];

View File

@@ -1534,7 +1534,7 @@ class ListEntitiesSelectResponse final : public InfoResponseProtoMessage {
#ifdef HAS_PROTO_MESSAGE_DUMP
const char *message_name() const override { return "list_entities_select_response"; }
#endif
const std::vector<std::string> *options{};
const FixedVector<std::string> *options{};
void encode(ProtoWriteBuffer buffer) const override;
void calculate_size(ProtoSize &size) const override;
#ifdef HAS_PROTO_MESSAGE_DUMP

View File

@@ -264,20 +264,31 @@ async def delayed_off_filter_to_code(config, filter_id):
),
)
async def autorepeat_filter_to_code(config, filter_id):
timings = []
if len(config) > 0:
timings.extend(
(conf[CONF_DELAY], conf[CONF_TIME_OFF], conf[CONF_TIME_ON])
for conf in config
)
else:
timings.append(
(
cv.time_period_str_unit(DEFAULT_DELAY).total_milliseconds,
cv.time_period_str_unit(DEFAULT_TIME_OFF).total_milliseconds,
cv.time_period_str_unit(DEFAULT_TIME_ON).total_milliseconds,
timings = [
cg.StructInitializer(
cg.MockObj("AutorepeatFilterTiming", "esphome::binary_sensor::"),
("delay", conf[CONF_DELAY]),
("time_off", conf[CONF_TIME_OFF]),
("time_on", conf[CONF_TIME_ON]),
)
)
for conf in config
]
else:
timings = [
cg.StructInitializer(
cg.MockObj("AutorepeatFilterTiming", "esphome::binary_sensor::"),
("delay", cv.time_period_str_unit(DEFAULT_DELAY).total_milliseconds),
(
"time_off",
cv.time_period_str_unit(DEFAULT_TIME_OFF).total_milliseconds,
),
(
"time_on",
cv.time_period_str_unit(DEFAULT_TIME_ON).total_milliseconds,
),
)
]
var = cg.new_Pvariable(filter_id, timings)
await cg.register_component(var, {})
return var

View File

@@ -51,7 +51,7 @@ void BinarySensor::add_filter(Filter *filter) {
last_filter->next_ = filter;
}
}
void BinarySensor::add_filters(const std::vector<Filter *> &filters) {
void BinarySensor::add_filters(std::initializer_list<Filter *> filters) {
for (Filter *filter : filters) {
this->add_filter(filter);
}

View File

@@ -4,7 +4,7 @@
#include "esphome/core/helpers.h"
#include "esphome/components/binary_sensor/filter.h"
#include <vector>
#include <initializer_list>
namespace esphome {
@@ -48,7 +48,7 @@ class BinarySensor : public StatefulEntityBase<bool>, public EntityBase_DeviceCl
void publish_initial_state(bool new_state);
void add_filter(Filter *filter);
void add_filters(const std::vector<Filter *> &filters);
void add_filters(std::initializer_list<Filter *> filters);
// ========== INTERNAL METHODS ==========
// (In most use cases you won't need these)

View File

@@ -1,7 +1,6 @@
#include "filter.h"
#include "binary_sensor.h"
#include <utility>
namespace esphome {
@@ -68,7 +67,7 @@ float DelayedOffFilter::get_setup_priority() const { return setup_priority::HARD
optional<bool> InvertFilter::new_value(bool value) { return !value; }
AutorepeatFilter::AutorepeatFilter(std::vector<AutorepeatFilterTiming> timings) : timings_(std::move(timings)) {}
AutorepeatFilter::AutorepeatFilter(std::initializer_list<AutorepeatFilterTiming> timings) : timings_(timings) {}
optional<bool> AutorepeatFilter::new_value(bool value) {
if (value) {

View File

@@ -4,8 +4,6 @@
#include "esphome/core/component.h"
#include "esphome/core/helpers.h"
#include <vector>
namespace esphome {
namespace binary_sensor {
@@ -82,11 +80,6 @@ class InvertFilter : public Filter {
};
struct AutorepeatFilterTiming {
AutorepeatFilterTiming(uint32_t delay, uint32_t off, uint32_t on) {
this->delay = delay;
this->time_off = off;
this->time_on = on;
}
uint32_t delay;
uint32_t time_off;
uint32_t time_on;
@@ -94,7 +87,7 @@ struct AutorepeatFilterTiming {
class AutorepeatFilter : public Filter, public Component {
public:
explicit AutorepeatFilter(std::vector<AutorepeatFilterTiming> timings);
explicit AutorepeatFilter(std::initializer_list<AutorepeatFilterTiming> timings);
optional<bool> new_value(bool value) override;
@@ -104,7 +97,7 @@ class AutorepeatFilter : public Filter, public Component {
void next_timing_();
void next_value_(bool val);
std::vector<AutorepeatFilterTiming> timings_;
FixedVector<AutorepeatFilterTiming> timings_;
uint8_t active_timing_{0};
};

View File

@@ -385,12 +385,14 @@ void Climate::save_state_() {
if (!traits.get_supported_custom_fan_modes().empty() && custom_fan_mode.has_value()) {
state.uses_custom_fan_mode = true;
const auto &supported = traits.get_supported_custom_fan_modes();
std::vector<std::string> vec{supported.begin(), supported.end()};
for (size_t i = 0; i < vec.size(); i++) {
if (vec[i] == custom_fan_mode) {
// std::set has consistent order (lexicographic for strings)
size_t i = 0;
for (const auto &mode : supported) {
if (mode == custom_fan_mode) {
state.custom_fan_mode = i;
break;
}
i++;
}
}
if (traits.get_supports_presets() && preset.has_value()) {
@@ -400,12 +402,14 @@ void Climate::save_state_() {
if (!traits.get_supported_custom_presets().empty() && custom_preset.has_value()) {
state.uses_custom_preset = true;
const auto &supported = traits.get_supported_custom_presets();
std::vector<std::string> vec{supported.begin(), supported.end()};
for (size_t i = 0; i < vec.size(); i++) {
if (vec[i] == custom_preset) {
// std::set has consistent order (lexicographic for strings)
size_t i = 0;
for (const auto &preset : supported) {
if (preset == custom_preset) {
state.custom_preset = i;
break;
}
i++;
}
}
if (traits.get_supports_swing_modes()) {
@@ -549,22 +553,34 @@ void ClimateDeviceRestoreState::apply(Climate *climate) {
climate->fan_mode = this->fan_mode;
}
if (!traits.get_supported_custom_fan_modes().empty() && this->uses_custom_fan_mode) {
// std::set has consistent order (lexicographic for strings), so this is ok
// std::set has consistent order (lexicographic for strings)
const auto &modes = traits.get_supported_custom_fan_modes();
std::vector<std::string> modes_vec{modes.begin(), modes.end()};
if (custom_fan_mode < modes_vec.size()) {
climate->custom_fan_mode = modes_vec[this->custom_fan_mode];
if (custom_fan_mode < modes.size()) {
size_t i = 0;
for (const auto &mode : modes) {
if (i == this->custom_fan_mode) {
climate->custom_fan_mode = mode;
break;
}
i++;
}
}
}
if (traits.get_supports_presets() && !this->uses_custom_preset) {
climate->preset = this->preset;
}
if (!traits.get_supported_custom_presets().empty() && uses_custom_preset) {
// std::set has consistent order (lexicographic for strings), so this is ok
// std::set has consistent order (lexicographic for strings)
const auto &presets = traits.get_supported_custom_presets();
std::vector<std::string> presets_vec{presets.begin(), presets.end()};
if (custom_preset < presets_vec.size()) {
climate->custom_preset = presets_vec[this->custom_preset];
if (custom_preset < presets.size()) {
size_t i = 0;
for (const auto &preset : presets) {
if (i == this->custom_preset) {
climate->custom_preset = preset;
break;
}
i++;
}
}
}
if (traits.get_supports_swing_modes()) {

View File

@@ -9,7 +9,8 @@ static const char *const TAG = "copy.select";
void CopySelect::setup() {
source_->add_on_state_callback([this](const std::string &value, size_t index) { this->publish_state(value); });
traits.set_options(source_->traits.get_options());
// Copy options from source select
this->traits.copy_options(source_->traits.get_options());
if (source_->has_state())
this->publish_state(source_->state);

View File

@@ -1,3 +1,4 @@
import contextlib
from dataclasses import dataclass
import itertools
import logging
@@ -102,6 +103,10 @@ COMPILER_OPTIMIZATIONS = {
"SIZE": "CONFIG_COMPILER_OPTIMIZATION_SIZE",
}
# Socket limit configuration for ESP-IDF
# ESP-IDF CONFIG_LWIP_MAX_SOCKETS has range 1-253, default 10
DEFAULT_MAX_SOCKETS = 10 # ESP-IDF default
ARDUINO_ALLOWED_VARIANTS = [
VARIANT_ESP32,
VARIANT_ESP32C3,
@@ -746,6 +751,72 @@ CONFIG_SCHEMA = cv.All(
FINAL_VALIDATE_SCHEMA = cv.Schema(final_validate)
def _configure_lwip_max_sockets(conf: dict) -> None:
"""Calculate and set CONFIG_LWIP_MAX_SOCKETS based on component needs.
Socket component tracks consumer needs via consume_sockets() called during config validation.
This function runs in to_code() after all components have registered their socket needs.
User-provided sdkconfig_options take precedence.
"""
from esphome.components.socket import KEY_SOCKET_CONSUMERS
# Check if user manually specified CONFIG_LWIP_MAX_SOCKETS
user_max_sockets = conf.get(CONF_SDKCONFIG_OPTIONS, {}).get(
"CONFIG_LWIP_MAX_SOCKETS"
)
socket_consumers: dict[str, int] = CORE.data.get(KEY_SOCKET_CONSUMERS, {})
total_sockets = sum(socket_consumers.values())
# Early return if no sockets registered and no user override
if total_sockets == 0 and user_max_sockets is None:
return
components_list = ", ".join(
f"{name}={count}" for name, count in sorted(socket_consumers.items())
)
# User specified their own value - respect it but warn if insufficient
if user_max_sockets is not None:
_LOGGER.info(
"Using user-provided CONFIG_LWIP_MAX_SOCKETS: %s",
user_max_sockets,
)
# Warn if user's value is less than what components need
if total_sockets > 0:
user_sockets_int = 0
with contextlib.suppress(ValueError, TypeError):
user_sockets_int = int(user_max_sockets)
if user_sockets_int < total_sockets:
_LOGGER.warning(
"CONFIG_LWIP_MAX_SOCKETS is set to %d but your configuration "
"needs %d sockets (registered: %s). You may experience socket "
"exhaustion errors. Consider increasing to at least %d.",
user_sockets_int,
total_sockets,
components_list,
total_sockets,
)
# User's value already added via sdkconfig_options processing
return
# Auto-calculate based on component needs
# Use at least the ESP-IDF default (10), or the total needed by components
max_sockets = max(DEFAULT_MAX_SOCKETS, total_sockets)
log_level = logging.INFO if max_sockets > DEFAULT_MAX_SOCKETS else logging.DEBUG
_LOGGER.log(
log_level,
"Setting CONFIG_LWIP_MAX_SOCKETS to %d (registered: %s)",
max_sockets,
components_list,
)
add_idf_sdkconfig_option("CONFIG_LWIP_MAX_SOCKETS", max_sockets)
async def to_code(config):
cg.add_platformio_option("board", config[CONF_BOARD])
cg.add_platformio_option("board_upload.flash_size", config[CONF_FLASH_SIZE])
@@ -866,6 +937,9 @@ async def to_code(config):
add_idf_sdkconfig_option("CONFIG_LWIP_DNS_SUPPORT_MDNS_QUERIES", False)
if not advanced.get(CONF_ENABLE_LWIP_BRIDGE_INTERFACE, False):
add_idf_sdkconfig_option("CONFIG_LWIP_BRIDGEIF_MAX_PORTS", 0)
_configure_lwip_max_sockets(conf)
if advanced.get(CONF_EXECUTE_FROM_PSRAM, False):
add_idf_sdkconfig_option("CONFIG_SPIRAM_FETCH_INSTRUCTIONS", True)
add_idf_sdkconfig_option("CONFIG_SPIRAM_RODATA", True)

View File

@@ -1,6 +1,7 @@
import esphome.codegen as cg
import esphome.config_validation as cv
from esphome.const import CONF_ID, CONF_MODE, CONF_PORT
from esphome.types import ConfigType
CODEOWNERS = ["@ayufan"]
AUTO_LOAD = ["camera"]
@@ -13,13 +14,27 @@ Mode = esp32_camera_web_server_ns.enum("Mode")
MODES = {"STREAM": Mode.STREAM, "SNAPSHOT": Mode.SNAPSHOT}
CONFIG_SCHEMA = cv.Schema(
{
cv.GenerateID(): cv.declare_id(CameraWebServer),
cv.Required(CONF_PORT): cv.port,
cv.Required(CONF_MODE): cv.enum(MODES, upper=True),
},
).extend(cv.COMPONENT_SCHEMA)
def _consume_camera_web_server_sockets(config: ConfigType) -> ConfigType:
"""Register socket needs for camera web server."""
from esphome.components import socket
# Each camera web server instance needs 1 listening socket + 2 client connections
sockets_needed = 3
socket.consume_sockets(sockets_needed, "esp32_camera_web_server")(config)
return config
CONFIG_SCHEMA = cv.All(
cv.Schema(
{
cv.GenerateID(): cv.declare_id(CameraWebServer),
cv.Required(CONF_PORT): cv.port,
cv.Required(CONF_MODE): cv.enum(MODES, upper=True),
},
).extend(cv.COMPONENT_SCHEMA),
_consume_camera_web_server_sockets,
)
async def to_code(config):

View File

@@ -95,7 +95,7 @@ async def to_code(config):
if framework_ver >= cv.Version(5, 5, 0):
esp32.add_idf_component(name="espressif/esp_wifi_remote", ref="1.1.5")
esp32.add_idf_component(name="espressif/eppp_link", ref="1.1.3")
esp32.add_idf_component(name="espressif/esp_hosted", ref="2.5.11")
esp32.add_idf_component(name="espressif/esp_hosted", ref="2.6.1")
else:
esp32.add_idf_component(name="espressif/esp_wifi_remote", ref="0.13.0")
esp32.add_idf_component(name="espressif/eppp_link", ref="0.2.0")

View File

@@ -112,7 +112,7 @@ async def to_code(config):
cg.add_define("USE_IMPROV")
await improv_base.setup_improv_core(var, config)
await improv_base.setup_improv_core(var, config, "esp32_improv")
cg.add(var.set_identify_duration(config[CONF_IDENTIFY_DURATION]))
cg.add(var.set_authorized_duration(config[CONF_AUTHORIZED_DURATION]))

View File

@@ -384,26 +384,34 @@ void ESP32ImprovComponent::check_wifi_connection_() {
this->connecting_sta_ = {};
this->cancel_timeout("wifi-connect-timeout");
std::vector<std::string> urls;
// Build URL list with minimal allocations
// Maximum 3 URLs: custom next_url + ESPHOME_MY_LINK + webserver URL
std::string url_strings[3];
size_t url_count = 0;
#ifdef USE_ESP32_IMPROV_NEXT_URL
// Add next_url if configured (should be first per Improv BLE spec)
std::string next_url = this->get_formatted_next_url_();
if (!next_url.empty()) {
urls.push_back(next_url);
url_strings[url_count++] = std::move(next_url);
}
#endif
// Add default URLs for backward compatibility
urls.emplace_back(ESPHOME_MY_LINK);
url_strings[url_count++] = ESPHOME_MY_LINK;
#ifdef USE_WEBSERVER
for (auto &ip : wifi::global_wifi_component->wifi_sta_ip_addresses()) {
if (ip.is_ip4()) {
std::string webserver_url = "http://" + ip.str() + ":" + to_string(USE_WEBSERVER_PORT);
urls.push_back(webserver_url);
char url_buffer[64];
snprintf(url_buffer, sizeof(url_buffer), "http://%s:%d", ip.str().c_str(), USE_WEBSERVER_PORT);
url_strings[url_count++] = url_buffer;
break;
}
}
#endif
std::vector<uint8_t> data = improv::build_rpc_response(improv::WIFI_SETTINGS, urls);
// Pass to build_rpc_response using vector constructor from iterators to avoid extra copies
std::vector<uint8_t> data = improv::build_rpc_response(
improv::WIFI_SETTINGS, std::vector<std::string>(url_strings, url_strings + url_count));
this->send_response_(data);
} else if (this->is_active() && this->state_ != improv::STATE_PROVISIONED) {
ESP_LOGD(TAG, "WiFi provisioned externally");

View File

@@ -190,7 +190,9 @@ async def to_code(config):
cg.add_define("ESPHOME_VARIANT", "ESP8266")
cg.add_define(ThreadModel.SINGLE)
cg.add_platformio_option("extra_scripts", ["pre:iram_fix.py", "post:post_build.py"])
cg.add_platformio_option(
"extra_scripts", ["pre:testing_mode.py", "post:post_build.py"]
)
conf = config[CONF_FRAMEWORK]
cg.add_platformio_option("framework", "arduino")
@@ -230,9 +232,9 @@ async def to_code(config):
# For cases where nullptrs can be handled, use nothrow: `new (std::nothrow) T;`
cg.add_build_flag("-DNEW_OOM_ABORT")
# In testing mode, fake a larger IRAM to allow linking grouped component tests
# Real ESP8266 hardware only has 32KB IRAM, but for CI testing we pretend it has 2MB
# This is done via a pre-build script that generates a custom linker script
# In testing mode, fake larger memory to allow linking grouped component tests
# Real ESP8266 hardware only has 32KB IRAM and ~80KB RAM, but for CI testing
# we pretend it has much larger memory to test that components compile together
if CORE.testing_mode:
cg.add_build_flag("-DESPHOME_TESTING_MODE")
@@ -271,8 +273,8 @@ def copy_files():
post_build_file,
CORE.relative_build_path("post_build.py"),
)
iram_fix_file = dir / "iram_fix.py.script"
testing_mode_file = dir / "testing_mode.py.script"
copy_file_if_changed(
iram_fix_file,
CORE.relative_build_path("iram_fix.py"),
testing_mode_file,
CORE.relative_build_path("testing_mode.py"),
)

View File

@@ -1,44 +0,0 @@
import os
import re
# pylint: disable=E0602
Import("env") # noqa
def patch_linker_script_after_preprocess(source, target, env):
"""Patch the local linker script after PlatformIO preprocesses it."""
# Check if we're in testing mode by looking for the define
build_flags = env.get("BUILD_FLAGS", [])
testing_mode = any("-DESPHOME_TESTING_MODE" in flag for flag in build_flags)
if not testing_mode:
return
# Get the local linker script path
build_dir = env.subst("$BUILD_DIR")
local_ld = os.path.join(build_dir, "ld", "local.eagle.app.v6.common.ld")
if not os.path.exists(local_ld):
return
# Read the linker script
with open(local_ld, "r") as f:
content = f.read()
# Replace IRAM size from 0x8000 (32KB) to 0x200000 (2MB)
# The line looks like: iram1_0_seg : org = 0x40100000, len = 0x8000
updated = re.sub(
r"(iram1_0_seg\s*:\s*org\s*=\s*0x40100000\s*,\s*len\s*=\s*)0x8000",
r"\g<1>0x200000",
content,
)
if updated != content:
with open(local_ld, "w") as f:
f.write(updated)
print("ESPHome: Patched IRAM size to 2MB for testing mode")
# Hook into the build process right before linking
# This runs after PlatformIO has already preprocessed the linker scripts
env.AddPreAction("$BUILD_DIR/${PROGNAME}.elf", patch_linker_script_after_preprocess)

View File

@@ -0,0 +1,166 @@
import os
import re
# pylint: disable=E0602
Import("env") # noqa
# Memory sizes for testing mode (allow larger builds for CI component grouping)
TESTING_IRAM_SIZE = "0x200000" # 2MB
TESTING_DRAM_SIZE = "0x200000" # 2MB
TESTING_FLASH_SIZE = "0x2000000" # 32MB
def patch_segment_size(content, segment_name, new_size, label):
"""Patch a memory segment's length in linker script.
Args:
content: Linker script content
segment_name: Name of the segment (e.g., 'iram1_0_seg')
new_size: New size as hex string (e.g., '0x200000')
label: Human-readable label for logging (e.g., 'IRAM')
Returns:
Tuple of (patched_content, was_patched)
"""
# Match: segment_name : org = 0x..., len = 0x...
pattern = rf"({segment_name}\s*:\s*org\s*=\s*0x[0-9a-fA-F]+\s*,\s*len\s*=\s*)0x[0-9a-fA-F]+"
new_content = re.sub(pattern, rf"\g<1>{new_size}", content)
return new_content, new_content != content
def apply_memory_patches(content):
"""Apply IRAM, DRAM, and Flash patches to linker script content.
Args:
content: Linker script content as string
Returns:
Patched content as string
"""
patches_applied = []
# Patch IRAM (for larger code in IRAM)
content, patched = patch_segment_size(content, "iram1_0_seg", TESTING_IRAM_SIZE, "IRAM")
if patched:
patches_applied.append("IRAM")
# Patch DRAM (for larger BSS/data sections)
content, patched = patch_segment_size(content, "dram0_0_seg", TESTING_DRAM_SIZE, "DRAM")
if patched:
patches_applied.append("DRAM")
# Patch Flash (for larger code sections)
content, patched = patch_segment_size(content, "irom0_0_seg", TESTING_FLASH_SIZE, "Flash")
if patched:
patches_applied.append("Flash")
if patches_applied:
iram_mb = int(TESTING_IRAM_SIZE, 16) // (1024 * 1024)
dram_mb = int(TESTING_DRAM_SIZE, 16) // (1024 * 1024)
flash_mb = int(TESTING_FLASH_SIZE, 16) // (1024 * 1024)
print(f" Patched memory segments: {', '.join(patches_applied)} (IRAM/DRAM: {iram_mb}MB, Flash: {flash_mb}MB)")
return content
def patch_linker_script_file(filepath, description):
"""Patch a linker script file in the build directory with enlarged memory segments.
This function modifies linker scripts in the build directory only (never SDK files).
It patches IRAM, DRAM, and Flash segments to allow larger builds in testing mode.
Args:
filepath: Path to the linker script file in the build directory
description: Human-readable description for logging
Returns:
True if the file was patched, False if already patched or not found
"""
if not os.path.exists(filepath):
print(f"ESPHome: {description} not found at {filepath}")
return False
print(f"ESPHome: Patching {description}...")
with open(filepath, "r") as f:
content = f.read()
patched_content = apply_memory_patches(content)
if patched_content != content:
with open(filepath, "w") as f:
f.write(patched_content)
print(f"ESPHome: Successfully patched {description}")
return True
else:
print(f"ESPHome: {description} already patched or no changes needed")
return False
def patch_local_linker_script(source, target, env):
"""Patch the local.eagle.app.v6.common.ld in build directory.
This patches the preprocessed linker script that PlatformIO creates in the build
directory, enlarging IRAM, DRAM, and Flash segments for testing mode.
Args:
source: SCons source nodes
target: SCons target nodes
env: SCons environment
"""
# Check if we're in testing mode
build_flags = env.get("BUILD_FLAGS", [])
testing_mode = any("-DESPHOME_TESTING_MODE" in flag for flag in build_flags)
if not testing_mode:
return
# Patch the local linker script if it exists
build_dir = env.subst("$BUILD_DIR")
ld_dir = os.path.join(build_dir, "ld")
if os.path.exists(ld_dir):
local_ld = os.path.join(ld_dir, "local.eagle.app.v6.common.ld")
if os.path.exists(local_ld):
patch_linker_script_file(local_ld, "local.eagle.app.v6.common.ld")
# Check if we're in testing mode
build_flags = env.get("BUILD_FLAGS", [])
testing_mode = any("-DESPHOME_TESTING_MODE" in flag for flag in build_flags)
if testing_mode:
# Create a custom linker script in the build directory with patched memory limits
# This allows larger IRAM/DRAM/Flash for CI component grouping tests
build_dir = env.subst("$BUILD_DIR")
ldscript = env.GetProjectOption("board_build.ldscript", "")
assert ldscript, "No linker script configured in board_build.ldscript"
framework_dir = env.PioPlatform().get_package_dir("framework-arduinoespressif8266")
assert framework_dir is not None, "Could not find framework-arduinoespressif8266 package"
# Read the original SDK linker script (read-only, SDK is never modified)
sdk_ld = os.path.join(framework_dir, "tools", "sdk", "ld", ldscript)
# Create a custom version in the build directory (isolated, temporary)
custom_ld = os.path.join(build_dir, f"testing_{ldscript}")
if os.path.exists(sdk_ld) and not os.path.exists(custom_ld):
# Read the SDK linker script
with open(sdk_ld, "r") as f:
content = f.read()
# Apply memory patches (IRAM: 2MB, DRAM: 2MB, Flash: 32MB)
patched_content = apply_memory_patches(content)
# Write the patched linker script to the build directory
with open(custom_ld, "w") as f:
f.write(patched_content)
print(f"ESPHome: Created custom linker script: {custom_ld}")
# Tell the linker to use our custom script from the build directory
assert os.path.exists(custom_ld), f"Custom linker script not found: {custom_ld}"
env.Replace(LDSCRIPT_PATH=custom_ld)
print(f"ESPHome: Using custom linker script with patched memory limits")
# Also patch local.eagle.app.v6.common.ld after PlatformIO creates it
env.AddPreAction("$BUILD_DIR/${PROGNAME}.elf", patch_local_linker_script)

View File

@@ -103,7 +103,16 @@ def ota_esphome_final_validate(config):
)
CONFIG_SCHEMA = (
def _consume_ota_sockets(config: ConfigType) -> ConfigType:
"""Register socket needs for OTA component."""
from esphome.components import socket
# OTA needs 1 listening socket (client connections are temporary during updates)
socket.consume_sockets(1, "ota")(config)
return config
CONFIG_SCHEMA = cv.All(
cv.Schema(
{
cv.GenerateID(): cv.declare_id(ESPHomeOTAComponent),
@@ -130,7 +139,8 @@ CONFIG_SCHEMA = (
}
)
.extend(BASE_OTA_SCHEMA)
.extend(cv.COMPONENT_SCHEMA)
.extend(cv.COMPONENT_SCHEMA),
_consume_ota_sockets,
)
FINAL_VALIDATE_SCHEMA = ota_esphome_final_validate

View File

@@ -14,13 +14,13 @@ template<typename... Ts> class SendAction : public Action<Ts...>, public Parente
TEMPLATABLE_VALUE(std::vector<uint8_t>, data);
public:
void add_on_sent(const std::vector<Action<Ts...> *> &actions) {
void add_on_sent(const std::initializer_list<Action<Ts...> *> &actions) {
this->sent_.add_actions(actions);
if (this->flags_.wait_for_sent) {
this->sent_.add_action(new LambdaAction<Ts...>([this](Ts... x) { this->play_next_(x...); }));
}
}
void add_on_error(const std::vector<Action<Ts...> *> &actions) {
void add_on_error(const std::initializer_list<Action<Ts...> *> &actions) {
this->error_.add_actions(actions);
if (this->flags_.wait_for_sent) {
this->error_.add_action(new LambdaAction<Ts...>([this](Ts... x) {

View File

@@ -0,0 +1,39 @@
"""ESP-NOW transport platform for packet_transport component."""
import esphome.codegen as cg
from esphome.components.packet_transport import (
PacketTransport,
new_packet_transport,
transport_schema,
)
import esphome.config_validation as cv
from esphome.core import HexInt
from esphome.cpp_types import PollingComponent
from .. import ESPNowComponent, espnow_ns
CODEOWNERS = ["@EasilyBoredEngineer"]
DEPENDENCIES = ["espnow"]
ESPNowTransport = espnow_ns.class_("ESPNowTransport", PacketTransport, PollingComponent)
CONF_ESPNOW_ID = "espnow_id"
CONF_PEER_ADDRESS = "peer_address"
CONFIG_SCHEMA = transport_schema(ESPNowTransport).extend(
{
cv.GenerateID(CONF_ESPNOW_ID): cv.use_id(ESPNowComponent),
cv.Optional(CONF_PEER_ADDRESS, default="FF:FF:FF:FF:FF:FF"): cv.mac_address,
}
)
async def to_code(config):
"""Set up the ESP-NOW transport component."""
var, _ = await new_packet_transport(config)
await cg.register_parented(var, config[CONF_ESPNOW_ID])
# Set peer address - convert MAC to parts array like ESP-NOW does
mac = config[CONF_PEER_ADDRESS]
cg.add(var.set_peer_address([HexInt(x) for x in mac.parts]))

View File

@@ -0,0 +1,97 @@
#include "espnow_transport.h"
#ifdef USE_ESP32
#include "esphome/core/application.h"
#include "esphome/core/log.h"
namespace esphome {
namespace espnow {
static const char *const TAG = "espnow.transport";
bool ESPNowTransport::should_send() { return this->parent_ != nullptr && !this->parent_->is_failed(); }
void ESPNowTransport::setup() {
packet_transport::PacketTransport::setup();
if (this->parent_ == nullptr) {
ESP_LOGE(TAG, "ESPNow component not set");
this->mark_failed();
return;
}
ESP_LOGI(TAG, "Registering ESP-NOW handlers");
ESP_LOGI(TAG, "Peer address: %02X:%02X:%02X:%02X:%02X:%02X", this->peer_address_[0], this->peer_address_[1],
this->peer_address_[2], this->peer_address_[3], this->peer_address_[4], this->peer_address_[5]);
// Register received handler
this->parent_->register_received_handler(static_cast<ESPNowReceivedPacketHandler *>(this));
// Register broadcasted handler
this->parent_->register_broadcasted_handler(static_cast<ESPNowBroadcastedHandler *>(this));
}
void ESPNowTransport::update() {
packet_transport::PacketTransport::update();
this->updated_ = true;
}
void ESPNowTransport::send_packet(const std::vector<uint8_t> &buf) const {
if (this->parent_ == nullptr) {
ESP_LOGE(TAG, "ESPNow component not set");
return;
}
if (buf.empty()) {
ESP_LOGW(TAG, "Attempted to send empty packet");
return;
}
if (buf.size() > ESP_NOW_MAX_DATA_LEN) {
ESP_LOGE(TAG, "Packet too large: %zu bytes (max %d)", buf.size(), ESP_NOW_MAX_DATA_LEN);
return;
}
// Send to configured peer address
this->parent_->send(this->peer_address_.data(), buf.data(), buf.size(), [](esp_err_t err) {
if (err != ESP_OK) {
ESP_LOGW(TAG, "Send failed: %d", err);
}
});
}
bool ESPNowTransport::on_received(const ESPNowRecvInfo &info, const uint8_t *data, uint8_t size) {
ESP_LOGV(TAG, "Received packet of size %u from %02X:%02X:%02X:%02X:%02X:%02X", size, info.src_addr[0],
info.src_addr[1], info.src_addr[2], info.src_addr[3], info.src_addr[4], info.src_addr[5]);
if (data == nullptr || size == 0) {
ESP_LOGW(TAG, "Received empty or null packet");
return false;
}
this->packet_buffer_.resize(size);
memcpy(this->packet_buffer_.data(), data, size);
this->process_(this->packet_buffer_);
return false; // Allow other handlers to run
}
bool ESPNowTransport::on_broadcasted(const ESPNowRecvInfo &info, const uint8_t *data, uint8_t size) {
ESP_LOGV(TAG, "Received broadcast packet of size %u from %02X:%02X:%02X:%02X:%02X:%02X", size, info.src_addr[0],
info.src_addr[1], info.src_addr[2], info.src_addr[3], info.src_addr[4], info.src_addr[5]);
if (data == nullptr || size == 0) {
ESP_LOGW(TAG, "Received empty or null broadcast packet");
return false;
}
this->packet_buffer_.resize(size);
memcpy(this->packet_buffer_.data(), data, size);
this->process_(this->packet_buffer_);
return false; // Allow other handlers to run
}
} // namespace espnow
} // namespace esphome
#endif // USE_ESP32

View File

@@ -0,0 +1,44 @@
#pragma once
#include "../espnow_component.h"
#ifdef USE_ESP32
#include "esphome/core/component.h"
#include "esphome/components/packet_transport/packet_transport.h"
#include <vector>
namespace esphome {
namespace espnow {
class ESPNowTransport : public packet_transport::PacketTransport,
public Parented<ESPNowComponent>,
public ESPNowReceivedPacketHandler,
public ESPNowBroadcastedHandler {
public:
void setup() override;
void update() override;
float get_setup_priority() const override { return setup_priority::AFTER_WIFI; }
void set_peer_address(peer_address_t address) {
memcpy(this->peer_address_.data(), address.data(), ESP_NOW_ETH_ALEN);
}
// ESPNow handler interface
bool on_received(const ESPNowRecvInfo &info, const uint8_t *data, uint8_t size) override;
bool on_broadcasted(const ESPNowRecvInfo &info, const uint8_t *data, uint8_t size) override;
protected:
void send_packet(const std::vector<uint8_t> &buf) const override;
size_t get_max_packet_size() override { return ESP_NOW_MAX_DATA_LEN; }
bool should_send() override;
peer_address_t peer_address_{{0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF}};
std::vector<uint8_t> packet_buffer_;
};
} // namespace espnow
} // namespace esphome
#endif // USE_ESP32

View File

@@ -16,7 +16,8 @@ void HDC1080Component::setup() {
// if configuration fails - there is a problem
if (this->write_register(HDC1080_CMD_CONFIGURATION, config, 2) != i2c::ERROR_OK) {
this->mark_failed();
ESP_LOGW(TAG, "Failed to configure HDC1080");
this->status_set_warning();
return;
}
}

View File

@@ -3,6 +3,8 @@ import re
import esphome.codegen as cg
import esphome.config_validation as cv
from esphome.const import __version__
from esphome.cpp_generator import MockObj
from esphome.types import ConfigType
CODEOWNERS = ["@esphome/core"]
@@ -35,7 +37,9 @@ def _process_next_url(url: str):
return url
async def setup_improv_core(var, config):
if CONF_NEXT_URL in config:
cg.add(var.set_next_url(_process_next_url(config[CONF_NEXT_URL])))
async def setup_improv_core(var: MockObj, config: ConfigType, component: str):
if next_url := config.get(CONF_NEXT_URL):
cg.add(var.set_next_url(_process_next_url(next_url)))
cg.add_define(f"USE_{component.upper()}_NEXT_URL")
cg.add_library("improv/Improv", "1.2.4")

View File

@@ -2,10 +2,27 @@
#include "esphome/components/network/util.h"
#include "esphome/core/application.h"
#include "esphome/core/defines.h"
namespace esphome {
namespace improv_base {
#if defined(USE_ESP32_IMPROV_NEXT_URL) || defined(USE_IMPROV_SERIAL_NEXT_URL)
static constexpr const char DEVICE_NAME_PLACEHOLDER[] = "{{device_name}}";
static constexpr size_t DEVICE_NAME_PLACEHOLDER_LEN = sizeof(DEVICE_NAME_PLACEHOLDER) - 1;
static constexpr const char IP_ADDRESS_PLACEHOLDER[] = "{{ip_address}}";
static constexpr size_t IP_ADDRESS_PLACEHOLDER_LEN = sizeof(IP_ADDRESS_PLACEHOLDER) - 1;
static void replace_all_in_place(std::string &str, const char *placeholder, size_t placeholder_len,
const std::string &replacement) {
size_t pos = 0;
const size_t replacement_len = replacement.length();
while ((pos = str.find(placeholder, pos)) != std::string::npos) {
str.replace(pos, placeholder_len, replacement);
pos += replacement_len;
}
}
std::string ImprovBase::get_formatted_next_url_() {
if (this->next_url_.empty()) {
return "";
@@ -14,33 +31,21 @@ std::string ImprovBase::get_formatted_next_url_() {
std::string formatted_url = this->next_url_;
// Replace all occurrences of {{device_name}}
const std::string device_name_placeholder = "{{device_name}}";
const std::string &device_name = App.get_name();
size_t pos = 0;
while ((pos = formatted_url.find(device_name_placeholder, pos)) != std::string::npos) {
formatted_url.replace(pos, device_name_placeholder.length(), device_name);
pos += device_name.length();
}
replace_all_in_place(formatted_url, DEVICE_NAME_PLACEHOLDER, DEVICE_NAME_PLACEHOLDER_LEN, App.get_name());
// Replace all occurrences of {{ip_address}}
const std::string ip_address_placeholder = "{{ip_address}}";
std::string ip_address_str;
for (auto &ip : network::get_ip_addresses()) {
if (ip.is_ip4()) {
ip_address_str = ip.str();
replace_all_in_place(formatted_url, IP_ADDRESS_PLACEHOLDER, IP_ADDRESS_PLACEHOLDER_LEN, ip.str());
break;
}
}
pos = 0;
while ((pos = formatted_url.find(ip_address_placeholder, pos)) != std::string::npos) {
formatted_url.replace(pos, ip_address_placeholder.length(), ip_address_str);
pos += ip_address_str.length();
}
// Note: {{esphome_version}} is replaced at code generation time in Python
return formatted_url;
}
#endif
} // namespace improv_base
} // namespace esphome

View File

@@ -1,17 +1,22 @@
#pragma once
#include <string>
#include "esphome/core/defines.h"
namespace esphome {
namespace improv_base {
class ImprovBase {
public:
#if defined(USE_ESP32_IMPROV_NEXT_URL) || defined(USE_IMPROV_SERIAL_NEXT_URL)
void set_next_url(const std::string &next_url) { this->next_url_ = next_url; }
#endif
protected:
#if defined(USE_ESP32_IMPROV_NEXT_URL) || defined(USE_IMPROV_SERIAL_NEXT_URL)
std::string get_formatted_next_url_();
std::string next_url_;
#endif
};
} // namespace improv_base

View File

@@ -43,4 +43,4 @@ FINAL_VALIDATE_SCHEMA = validate_logger
async def to_code(config):
var = cg.new_Pvariable(config[CONF_ID])
await cg.register_component(var, config)
await improv_base.setup_improv_core(var, config)
await improv_base.setup_improv_core(var, config, "improv_serial")

View File

@@ -146,9 +146,11 @@ void ImprovSerialComponent::loop() {
std::vector<uint8_t> ImprovSerialComponent::build_rpc_settings_response_(improv::Command command) {
std::vector<std::string> urls;
#ifdef USE_IMPROV_SERIAL_NEXT_URL
if (!this->next_url_.empty()) {
urls.push_back(this->get_formatted_next_url_());
}
#endif
#ifdef USE_WEBSERVER
for (auto &ip : wifi::global_wifi_component->wifi_sta_ip_addresses()) {
if (ip.is_ip4()) {

View File

@@ -62,7 +62,7 @@ void AddressableLightTransformer::start() {
}
optional<LightColorValues> AddressableLightTransformer::apply() {
float smoothed_progress = LightTransitionTransformer::smoothed_progress(this->get_progress_());
float smoothed_progress = LightTransformer::smoothed_progress(this->get_progress_());
// When running an output-buffer modifying effect, don't try to transition individual LEDs, but instead just fade the
// LightColorValues. write_state() then picks up the change in brightness, and the color change is picked up by the

View File

@@ -8,7 +8,7 @@
#include "esphome/core/defines.h"
#include "light_output.h"
#include "light_state.h"
#include "transformers.h"
#include "light_transformer.h"
#ifdef USE_POWER_SUPPLY
#include "esphome/components/power_supply/power_supply.h"
@@ -103,7 +103,7 @@ class AddressableLight : public LightOutput, public Component {
bool effect_active_{false};
};
class AddressableLightTransformer : public LightTransitionTransformer {
class AddressableLightTransformer : public LightTransformer {
public:
AddressableLightTransformer(AddressableLight &light) : light_(light) {}

View File

@@ -38,6 +38,10 @@ class LightTransformer {
const LightColorValues &get_target_values() const { return this->target_values_; }
protected:
// This looks crazy, but it reduces to 6x^5 - 15x^4 + 10x^3 which is just a smooth sigmoid-like
// transition from 0 to 1 on x = [0, 1]
static float smoothed_progress(float x) { return x * x * x * (x * (x * 6.0f - 15.0f) + 10.0f); }
/// The progress of this transition, on a scale of 0 to 1.
float get_progress_() {
uint32_t now = esphome::millis();

View File

@@ -50,15 +50,11 @@ class LightTransitionTransformer : public LightTransformer {
if (this->changing_color_mode_)
p = p < 0.5f ? p * 2 : (p - 0.5) * 2;
float v = LightTransitionTransformer::smoothed_progress(p);
float v = LightTransformer::smoothed_progress(p);
return LightColorValues::lerp(start, end, v);
}
protected:
// This looks crazy, but it reduces to 6x^5 - 15x^4 + 10x^3 which is just a smooth sigmoid-like
// transition from 0 to 1 on x = [0, 1]
static float smoothed_progress(float x) { return x * x * x * (x * (x * 6.0f - 15.0f) + 10.0f); }
LightColorValues end_values_{};
LightColorValues intermediate_values_{};
bool changing_color_mode_{false};

View File

@@ -300,11 +300,11 @@ void LvSelectable::set_selected_text(const std::string &text, lv_anim_enable_t a
}
}
void LvSelectable::set_options(std::vector<std::string> options) {
void LvSelectable::set_options(std::initializer_list<std::string> options) {
auto index = this->get_selected_index();
if (index >= options.size())
index = options.size() - 1;
this->options_ = std::move(options);
this->options_ = options;
this->set_option_string(join_string(this->options_).c_str());
lv_event_send(this->obj, LV_EVENT_REFRESH, nullptr);
this->set_selected_index(index, LV_ANIM_OFF);

View File

@@ -358,12 +358,12 @@ class LvSelectable : public LvCompound {
virtual void set_selected_index(size_t index, lv_anim_enable_t anim) = 0;
void set_selected_text(const std::string &text, lv_anim_enable_t anim);
std::string get_selected_text();
std::vector<std::string> get_options() { return this->options_; }
void set_options(std::vector<std::string> options);
const FixedVector<std::string> &get_options() { return this->options_; }
void set_options(std::initializer_list<std::string> options);
protected:
virtual void set_option_string(const char *options) = 0;
std::vector<std::string> options_{};
FixedVector<std::string> options_{};
};
#ifdef USE_LVGL_DROPDOWN

View File

@@ -53,7 +53,10 @@ class LVGLSelect : public select::Select, public Component {
this->widget_->set_selected_text(value, this->anim_);
this->publish();
}
void set_options_() { this->traits.set_options(this->widget_->get_options()); }
void set_options_() {
// Copy options from lvgl widget to select traits
this->traits.copy_options(this->widget_->get_options());
}
LvSelectable *widget_;
lv_anim_enable_t anim_;

View File

@@ -13,6 +13,7 @@ from esphome.const import (
)
from esphome.core import CORE, Lambda, coroutine_with_priority
from esphome.coroutine import CoroPriority
from esphome.types import ConfigType
CODEOWNERS = ["@esphome/core"]
DEPENDENCIES = ["network"]
@@ -46,6 +47,19 @@ SERVICE_SCHEMA = cv.Schema(
}
)
def _consume_mdns_sockets(config: ConfigType) -> ConfigType:
"""Register socket needs for mDNS component."""
if config.get(CONF_DISABLED):
return config
from esphome.components import socket
# mDNS needs 2 sockets (IPv4 + IPv6 multicast)
socket.consume_sockets(2, "mdns")(config)
return config
CONFIG_SCHEMA = cv.All(
cv.Schema(
{
@@ -55,6 +69,7 @@ CONFIG_SCHEMA = cv.All(
}
),
_remove_id_if_disabled,
_consume_mdns_sockets,
)

View File

@@ -58,6 +58,7 @@ from esphome.const import (
PlatformFramework,
)
from esphome.core import CORE, CoroPriority, coroutine_with_priority
from esphome.types import ConfigType
DEPENDENCIES = ["network"]
@@ -210,6 +211,15 @@ def validate_fingerprint(value):
return value
def _consume_mqtt_sockets(config: ConfigType) -> ConfigType:
"""Register socket needs for MQTT component."""
from esphome.components import socket
# MQTT needs 1 socket for the broker connection
socket.consume_sockets(1, "mqtt")(config)
return config
CONFIG_SCHEMA = cv.All(
cv.Schema(
{
@@ -306,6 +316,7 @@ CONFIG_SCHEMA = cv.All(
),
validate_config,
cv.only_on([PLATFORM_ESP32, PLATFORM_ESP8266, PLATFORM_BK72XX]),
_consume_mqtt_sockets,
)

View File

@@ -62,7 +62,7 @@ CONF_WARNING_MPPT_OVERLOAD = "warning_mppt_overload"
CONF_WARNING_BATTERY_TOO_LOW_TO_CHARGE = "warning_battery_too_low_to_charge"
CONF_FAULT_DC_DC_OVER_CURRENT = "fault_dc_dc_over_current"
CONF_FAULT_CODE = "fault_code"
CONF_WARNUNG_LOW_PV_ENERGY = "warnung_low_pv_energy"
CONF_WARNING_LOW_PV_ENERGY = "warning_low_pv_energy"
CONF_WARNING_HIGH_AC_INPUT_DURING_BUS_SOFT_START = (
"warning_high_ac_input_during_bus_soft_start"
)
@@ -122,7 +122,7 @@ TYPES = [
CONF_WARNING_BATTERY_TOO_LOW_TO_CHARGE,
CONF_FAULT_DC_DC_OVER_CURRENT,
CONF_FAULT_CODE,
CONF_WARNUNG_LOW_PV_ENERGY,
CONF_WARNING_LOW_PV_ENERGY,
CONF_WARNING_HIGH_AC_INPUT_DURING_BUS_SOFT_START,
CONF_WARNING_BATTERY_EQUALIZATION,
]

View File

@@ -13,7 +13,7 @@ void PipsolarOutput::write_state(float state) {
if (std::find(this->possible_values_.begin(), this->possible_values_.end(), state) != this->possible_values_.end()) {
ESP_LOGD(TAG, "Will write: %s out of value %f / %02.0f", tmp, state, state);
this->parent_->switch_command(std::string(tmp));
this->parent_->queue_command(std::string(tmp));
} else {
ESP_LOGD(TAG, "Will not write: %s as it is not in list of allowed values", tmp);
}

File diff suppressed because it is too large Load Diff

View File

@@ -7,6 +7,7 @@
#include "esphome/components/uart/uart.h"
#include "esphome/core/automation.h"
#include "esphome/core/component.h"
#include "esphome/core/helpers.h"
namespace esphome {
namespace pipsolar {
@@ -28,10 +29,17 @@ struct PollingCommand {
bool needs_update;
};
#define PIPSOLAR_VALUED_ENTITY_(type, name, polling_command, value_type) \
protected: \
value_type value_##name##_; \
PIPSOLAR_ENTITY_(type, name, polling_command)
struct QFLAGValues {
esphome::optional<bool> silence_buzzer_open_buzzer;
esphome::optional<bool> overload_bypass_function;
esphome::optional<bool> lcd_escape_to_default;
esphome::optional<bool> overload_restart_function;
esphome::optional<bool> over_temperature_restart_function;
esphome::optional<bool> backlight_on;
esphome::optional<bool> alarm_on_when_primary_source_interrupt;
esphome::optional<bool> fault_code_record;
esphome::optional<bool> power_saving;
};
#define PIPSOLAR_ENTITY_(type, name, polling_command) \
protected: \
@@ -43,126 +51,123 @@ struct PollingCommand {
this->add_polling_command_(#polling_command, POLLING_##polling_command); \
}
#define PIPSOLAR_SENSOR(name, polling_command, value_type) \
PIPSOLAR_VALUED_ENTITY_(sensor::Sensor, name, polling_command, value_type)
#define PIPSOLAR_SENSOR(name, polling_command) PIPSOLAR_ENTITY_(sensor::Sensor, name, polling_command)
#define PIPSOLAR_SWITCH(name, polling_command) PIPSOLAR_ENTITY_(switch_::Switch, name, polling_command)
#define PIPSOLAR_BINARY_SENSOR(name, polling_command, value_type) \
PIPSOLAR_VALUED_ENTITY_(binary_sensor::BinarySensor, name, polling_command, value_type)
#define PIPSOLAR_VALUED_TEXT_SENSOR(name, polling_command, value_type) \
PIPSOLAR_VALUED_ENTITY_(text_sensor::TextSensor, name, polling_command, value_type)
#define PIPSOLAR_BINARY_SENSOR(name, polling_command) \
PIPSOLAR_ENTITY_(binary_sensor::BinarySensor, name, polling_command)
#define PIPSOLAR_TEXT_SENSOR(name, polling_command) PIPSOLAR_ENTITY_(text_sensor::TextSensor, name, polling_command)
class Pipsolar : public uart::UARTDevice, public PollingComponent {
// QPIGS values
PIPSOLAR_SENSOR(grid_voltage, QPIGS, float)
PIPSOLAR_SENSOR(grid_frequency, QPIGS, float)
PIPSOLAR_SENSOR(ac_output_voltage, QPIGS, float)
PIPSOLAR_SENSOR(ac_output_frequency, QPIGS, float)
PIPSOLAR_SENSOR(ac_output_apparent_power, QPIGS, int)
PIPSOLAR_SENSOR(ac_output_active_power, QPIGS, int)
PIPSOLAR_SENSOR(output_load_percent, QPIGS, int)
PIPSOLAR_SENSOR(bus_voltage, QPIGS, int)
PIPSOLAR_SENSOR(battery_voltage, QPIGS, float)
PIPSOLAR_SENSOR(battery_charging_current, QPIGS, int)
PIPSOLAR_SENSOR(battery_capacity_percent, QPIGS, int)
PIPSOLAR_SENSOR(inverter_heat_sink_temperature, QPIGS, int)
PIPSOLAR_SENSOR(pv_input_current_for_battery, QPIGS, float)
PIPSOLAR_SENSOR(pv_input_voltage, QPIGS, float)
PIPSOLAR_SENSOR(battery_voltage_scc, QPIGS, float)
PIPSOLAR_SENSOR(battery_discharge_current, QPIGS, int)
PIPSOLAR_BINARY_SENSOR(add_sbu_priority_version, QPIGS, int)
PIPSOLAR_BINARY_SENSOR(configuration_status, QPIGS, int)
PIPSOLAR_BINARY_SENSOR(scc_firmware_version, QPIGS, int)
PIPSOLAR_BINARY_SENSOR(load_status, QPIGS, int)
PIPSOLAR_BINARY_SENSOR(battery_voltage_to_steady_while_charging, QPIGS, int)
PIPSOLAR_BINARY_SENSOR(charging_status, QPIGS, int)
PIPSOLAR_BINARY_SENSOR(scc_charging_status, QPIGS, int)
PIPSOLAR_BINARY_SENSOR(ac_charging_status, QPIGS, int)
PIPSOLAR_SENSOR(battery_voltage_offset_for_fans_on, QPIGS, int) //.1 scale
PIPSOLAR_SENSOR(eeprom_version, QPIGS, int)
PIPSOLAR_SENSOR(pv_charging_power, QPIGS, int)
PIPSOLAR_BINARY_SENSOR(charging_to_floating_mode, QPIGS, int)
PIPSOLAR_BINARY_SENSOR(switch_on, QPIGS, int)
PIPSOLAR_BINARY_SENSOR(dustproof_installed, QPIGS, int)
PIPSOLAR_SENSOR(grid_voltage, QPIGS)
PIPSOLAR_SENSOR(grid_frequency, QPIGS)
PIPSOLAR_SENSOR(ac_output_voltage, QPIGS)
PIPSOLAR_SENSOR(ac_output_frequency, QPIGS)
PIPSOLAR_SENSOR(ac_output_apparent_power, QPIGS)
PIPSOLAR_SENSOR(ac_output_active_power, QPIGS)
PIPSOLAR_SENSOR(output_load_percent, QPIGS)
PIPSOLAR_SENSOR(bus_voltage, QPIGS)
PIPSOLAR_SENSOR(battery_voltage, QPIGS)
PIPSOLAR_SENSOR(battery_charging_current, QPIGS)
PIPSOLAR_SENSOR(battery_capacity_percent, QPIGS)
PIPSOLAR_SENSOR(inverter_heat_sink_temperature, QPIGS)
PIPSOLAR_SENSOR(pv_input_current_for_battery, QPIGS)
PIPSOLAR_SENSOR(pv_input_voltage, QPIGS)
PIPSOLAR_SENSOR(battery_voltage_scc, QPIGS)
PIPSOLAR_SENSOR(battery_discharge_current, QPIGS)
PIPSOLAR_BINARY_SENSOR(add_sbu_priority_version, QPIGS)
PIPSOLAR_BINARY_SENSOR(configuration_status, QPIGS)
PIPSOLAR_BINARY_SENSOR(scc_firmware_version, QPIGS)
PIPSOLAR_BINARY_SENSOR(load_status, QPIGS)
PIPSOLAR_BINARY_SENSOR(battery_voltage_to_steady_while_charging, QPIGS)
PIPSOLAR_BINARY_SENSOR(charging_status, QPIGS)
PIPSOLAR_BINARY_SENSOR(scc_charging_status, QPIGS)
PIPSOLAR_BINARY_SENSOR(ac_charging_status, QPIGS)
PIPSOLAR_SENSOR(battery_voltage_offset_for_fans_on, QPIGS) //.1 scale
PIPSOLAR_SENSOR(eeprom_version, QPIGS)
PIPSOLAR_SENSOR(pv_charging_power, QPIGS)
PIPSOLAR_BINARY_SENSOR(charging_to_floating_mode, QPIGS)
PIPSOLAR_BINARY_SENSOR(switch_on, QPIGS)
PIPSOLAR_BINARY_SENSOR(dustproof_installed, QPIGS)
// QPIRI values
PIPSOLAR_SENSOR(grid_rating_voltage, QPIRI, float)
PIPSOLAR_SENSOR(grid_rating_current, QPIRI, float)
PIPSOLAR_SENSOR(ac_output_rating_voltage, QPIRI, float)
PIPSOLAR_SENSOR(ac_output_rating_frequency, QPIRI, float)
PIPSOLAR_SENSOR(ac_output_rating_current, QPIRI, float)
PIPSOLAR_SENSOR(ac_output_rating_apparent_power, QPIRI, int)
PIPSOLAR_SENSOR(ac_output_rating_active_power, QPIRI, int)
PIPSOLAR_SENSOR(battery_rating_voltage, QPIRI, float)
PIPSOLAR_SENSOR(battery_recharge_voltage, QPIRI, float)
PIPSOLAR_SENSOR(battery_under_voltage, QPIRI, float)
PIPSOLAR_SENSOR(battery_bulk_voltage, QPIRI, float)
PIPSOLAR_SENSOR(battery_float_voltage, QPIRI, float)
PIPSOLAR_SENSOR(battery_type, QPIRI, int)
PIPSOLAR_SENSOR(current_max_ac_charging_current, QPIRI, int)
PIPSOLAR_SENSOR(current_max_charging_current, QPIRI, int)
PIPSOLAR_SENSOR(input_voltage_range, QPIRI, int)
PIPSOLAR_SENSOR(output_source_priority, QPIRI, int)
PIPSOLAR_SENSOR(charger_source_priority, QPIRI, int)
PIPSOLAR_SENSOR(parallel_max_num, QPIRI, int)
PIPSOLAR_SENSOR(machine_type, QPIRI, int)
PIPSOLAR_SENSOR(topology, QPIRI, int)
PIPSOLAR_SENSOR(output_mode, QPIRI, int)
PIPSOLAR_SENSOR(battery_redischarge_voltage, QPIRI, float)
PIPSOLAR_SENSOR(pv_ok_condition_for_parallel, QPIRI, int)
PIPSOLAR_SENSOR(pv_power_balance, QPIRI, int)
PIPSOLAR_SENSOR(grid_rating_voltage, QPIRI)
PIPSOLAR_SENSOR(grid_rating_current, QPIRI)
PIPSOLAR_SENSOR(ac_output_rating_voltage, QPIRI)
PIPSOLAR_SENSOR(ac_output_rating_frequency, QPIRI)
PIPSOLAR_SENSOR(ac_output_rating_current, QPIRI)
PIPSOLAR_SENSOR(ac_output_rating_apparent_power, QPIRI)
PIPSOLAR_SENSOR(ac_output_rating_active_power, QPIRI)
PIPSOLAR_SENSOR(battery_rating_voltage, QPIRI)
PIPSOLAR_SENSOR(battery_recharge_voltage, QPIRI)
PIPSOLAR_SENSOR(battery_under_voltage, QPIRI)
PIPSOLAR_SENSOR(battery_bulk_voltage, QPIRI)
PIPSOLAR_SENSOR(battery_float_voltage, QPIRI)
PIPSOLAR_SENSOR(battery_type, QPIRI)
PIPSOLAR_SENSOR(current_max_ac_charging_current, QPIRI)
PIPSOLAR_SENSOR(current_max_charging_current, QPIRI)
PIPSOLAR_SENSOR(input_voltage_range, QPIRI)
PIPSOLAR_SENSOR(output_source_priority, QPIRI)
PIPSOLAR_SENSOR(charger_source_priority, QPIRI)
PIPSOLAR_SENSOR(parallel_max_num, QPIRI)
PIPSOLAR_SENSOR(machine_type, QPIRI)
PIPSOLAR_SENSOR(topology, QPIRI)
PIPSOLAR_SENSOR(output_mode, QPIRI)
PIPSOLAR_SENSOR(battery_redischarge_voltage, QPIRI)
PIPSOLAR_SENSOR(pv_ok_condition_for_parallel, QPIRI)
PIPSOLAR_SENSOR(pv_power_balance, QPIRI)
// QMOD values
PIPSOLAR_VALUED_TEXT_SENSOR(device_mode, QMOD, char)
PIPSOLAR_TEXT_SENSOR(device_mode, QMOD)
// QFLAG values
PIPSOLAR_BINARY_SENSOR(silence_buzzer_open_buzzer, QFLAG, int)
PIPSOLAR_BINARY_SENSOR(overload_bypass_function, QFLAG, int)
PIPSOLAR_BINARY_SENSOR(lcd_escape_to_default, QFLAG, int)
PIPSOLAR_BINARY_SENSOR(overload_restart_function, QFLAG, int)
PIPSOLAR_BINARY_SENSOR(over_temperature_restart_function, QFLAG, int)
PIPSOLAR_BINARY_SENSOR(backlight_on, QFLAG, int)
PIPSOLAR_BINARY_SENSOR(alarm_on_when_primary_source_interrupt, QFLAG, int)
PIPSOLAR_BINARY_SENSOR(fault_code_record, QFLAG, int)
PIPSOLAR_BINARY_SENSOR(power_saving, QFLAG, int)
PIPSOLAR_BINARY_SENSOR(silence_buzzer_open_buzzer, QFLAG)
PIPSOLAR_BINARY_SENSOR(overload_bypass_function, QFLAG)
PIPSOLAR_BINARY_SENSOR(lcd_escape_to_default, QFLAG)
PIPSOLAR_BINARY_SENSOR(overload_restart_function, QFLAG)
PIPSOLAR_BINARY_SENSOR(over_temperature_restart_function, QFLAG)
PIPSOLAR_BINARY_SENSOR(backlight_on, QFLAG)
PIPSOLAR_BINARY_SENSOR(alarm_on_when_primary_source_interrupt, QFLAG)
PIPSOLAR_BINARY_SENSOR(fault_code_record, QFLAG)
PIPSOLAR_BINARY_SENSOR(power_saving, QFLAG)
// QPIWS values
PIPSOLAR_BINARY_SENSOR(warnings_present, QPIWS, bool)
PIPSOLAR_BINARY_SENSOR(faults_present, QPIWS, bool)
PIPSOLAR_BINARY_SENSOR(warning_power_loss, QPIWS, bool)
PIPSOLAR_BINARY_SENSOR(fault_inverter_fault, QPIWS, bool)
PIPSOLAR_BINARY_SENSOR(fault_bus_over, QPIWS, bool)
PIPSOLAR_BINARY_SENSOR(fault_bus_under, QPIWS, bool)
PIPSOLAR_BINARY_SENSOR(fault_bus_soft_fail, QPIWS, bool)
PIPSOLAR_BINARY_SENSOR(warning_line_fail, QPIWS, bool)
PIPSOLAR_BINARY_SENSOR(fault_opvshort, QPIWS, bool)
PIPSOLAR_BINARY_SENSOR(fault_inverter_voltage_too_low, QPIWS, bool)
PIPSOLAR_BINARY_SENSOR(fault_inverter_voltage_too_high, QPIWS, bool)
PIPSOLAR_BINARY_SENSOR(warning_over_temperature, QPIWS, bool)
PIPSOLAR_BINARY_SENSOR(warning_fan_lock, QPIWS, bool)
PIPSOLAR_BINARY_SENSOR(warning_battery_voltage_high, QPIWS, bool)
PIPSOLAR_BINARY_SENSOR(warning_battery_low_alarm, QPIWS, bool)
PIPSOLAR_BINARY_SENSOR(warning_battery_under_shutdown, QPIWS, bool)
PIPSOLAR_BINARY_SENSOR(warning_battery_derating, QPIWS, bool)
PIPSOLAR_BINARY_SENSOR(warning_over_load, QPIWS, bool)
PIPSOLAR_BINARY_SENSOR(warning_eeprom_failed, QPIWS, bool)
PIPSOLAR_BINARY_SENSOR(fault_inverter_over_current, QPIWS, bool)
PIPSOLAR_BINARY_SENSOR(fault_inverter_soft_failed, QPIWS, bool)
PIPSOLAR_BINARY_SENSOR(fault_self_test_failed, QPIWS, bool)
PIPSOLAR_BINARY_SENSOR(fault_op_dc_voltage_over, QPIWS, bool)
PIPSOLAR_BINARY_SENSOR(fault_battery_open, QPIWS, bool)
PIPSOLAR_BINARY_SENSOR(fault_current_sensor_failed, QPIWS, bool)
PIPSOLAR_BINARY_SENSOR(fault_battery_short, QPIWS, bool)
PIPSOLAR_BINARY_SENSOR(warning_power_limit, QPIWS, bool)
PIPSOLAR_BINARY_SENSOR(warning_pv_voltage_high, QPIWS, bool)
PIPSOLAR_BINARY_SENSOR(fault_mppt_overload, QPIWS, bool)
PIPSOLAR_BINARY_SENSOR(warning_mppt_overload, QPIWS, bool)
PIPSOLAR_BINARY_SENSOR(warning_battery_too_low_to_charge, QPIWS, bool)
PIPSOLAR_BINARY_SENSOR(fault_dc_dc_over_current, QPIWS, bool)
PIPSOLAR_BINARY_SENSOR(fault_code, QPIWS, int)
PIPSOLAR_BINARY_SENSOR(warnung_low_pv_energy, QPIWS, bool)
PIPSOLAR_BINARY_SENSOR(warning_high_ac_input_during_bus_soft_start, QPIWS, bool)
PIPSOLAR_BINARY_SENSOR(warning_battery_equalization, QPIWS, bool)
PIPSOLAR_BINARY_SENSOR(warnings_present, QPIWS)
PIPSOLAR_BINARY_SENSOR(faults_present, QPIWS)
PIPSOLAR_BINARY_SENSOR(warning_power_loss, QPIWS)
PIPSOLAR_BINARY_SENSOR(fault_inverter_fault, QPIWS)
PIPSOLAR_BINARY_SENSOR(fault_bus_over, QPIWS)
PIPSOLAR_BINARY_SENSOR(fault_bus_under, QPIWS)
PIPSOLAR_BINARY_SENSOR(fault_bus_soft_fail, QPIWS)
PIPSOLAR_BINARY_SENSOR(warning_line_fail, QPIWS)
PIPSOLAR_BINARY_SENSOR(fault_opvshort, QPIWS)
PIPSOLAR_BINARY_SENSOR(fault_inverter_voltage_too_low, QPIWS)
PIPSOLAR_BINARY_SENSOR(fault_inverter_voltage_too_high, QPIWS)
PIPSOLAR_BINARY_SENSOR(warning_over_temperature, QPIWS)
PIPSOLAR_BINARY_SENSOR(warning_fan_lock, QPIWS)
PIPSOLAR_BINARY_SENSOR(warning_battery_voltage_high, QPIWS)
PIPSOLAR_BINARY_SENSOR(warning_battery_low_alarm, QPIWS)
PIPSOLAR_BINARY_SENSOR(warning_battery_under_shutdown, QPIWS)
PIPSOLAR_BINARY_SENSOR(warning_battery_derating, QPIWS)
PIPSOLAR_BINARY_SENSOR(warning_over_load, QPIWS)
PIPSOLAR_BINARY_SENSOR(warning_eeprom_failed, QPIWS)
PIPSOLAR_BINARY_SENSOR(fault_inverter_over_current, QPIWS)
PIPSOLAR_BINARY_SENSOR(fault_inverter_soft_failed, QPIWS)
PIPSOLAR_BINARY_SENSOR(fault_self_test_failed, QPIWS)
PIPSOLAR_BINARY_SENSOR(fault_op_dc_voltage_over, QPIWS)
PIPSOLAR_BINARY_SENSOR(fault_battery_open, QPIWS)
PIPSOLAR_BINARY_SENSOR(fault_current_sensor_failed, QPIWS)
PIPSOLAR_BINARY_SENSOR(fault_battery_short, QPIWS)
PIPSOLAR_BINARY_SENSOR(warning_power_limit, QPIWS)
PIPSOLAR_BINARY_SENSOR(warning_pv_voltage_high, QPIWS)
PIPSOLAR_BINARY_SENSOR(fault_mppt_overload, QPIWS)
PIPSOLAR_BINARY_SENSOR(warning_mppt_overload, QPIWS)
PIPSOLAR_BINARY_SENSOR(warning_battery_too_low_to_charge, QPIWS)
PIPSOLAR_BINARY_SENSOR(fault_dc_dc_over_current, QPIWS)
PIPSOLAR_BINARY_SENSOR(fault_code, QPIWS)
PIPSOLAR_BINARY_SENSOR(warning_low_pv_energy, QPIWS)
PIPSOLAR_BINARY_SENSOR(warning_high_ac_input_during_bus_soft_start, QPIWS)
PIPSOLAR_BINARY_SENSOR(warning_battery_equalization, QPIWS)
PIPSOLAR_TEXT_SENSOR(last_qpigs, QPIGS)
PIPSOLAR_TEXT_SENSOR(last_qpiri, QPIRI)
@@ -180,14 +185,14 @@ class Pipsolar : public uart::UARTDevice, public PollingComponent {
PIPSOLAR_SWITCH(pv_ok_condition_for_parallel_switch, QPIRI)
PIPSOLAR_SWITCH(pv_power_balance_switch, QPIRI)
void switch_command(const std::string &command);
void queue_command(const std::string &command);
void setup() override;
void loop() override;
void dump_config() override;
void update() override;
protected:
static const size_t PIPSOLAR_READ_BUFFER_LENGTH = 110; // maximum supported answer length
static const size_t PIPSOLAR_READ_BUFFER_LENGTH = 128; // maximum supported answer length
static const size_t COMMAND_QUEUE_LENGTH = 10;
static const size_t COMMAND_TIMEOUT = 5000;
static const size_t POLLING_COMMANDS_MAX = 15;
@@ -198,7 +203,26 @@ class Pipsolar : public uart::UARTDevice, public PollingComponent {
uint16_t pipsolar_crc_(uint8_t *msg, uint8_t len);
bool send_next_command_();
bool send_next_poll_();
void queue_command_(const char *command, uint8_t length);
void handle_qpiri_(const char *message);
void handle_qpigs_(const char *message);
void handle_qmod_(const char *message);
void handle_qflag_(const char *message);
void handle_qpiws_(const char *message);
void handle_qt_(const char *message);
void handle_qmn_(const char *message);
void skip_start_(const char *message, size_t *pos);
void skip_field_(const char *message, size_t *pos);
std::string read_field_(const char *message, size_t *pos);
void read_float_sensor_(const char *message, size_t *pos, sensor::Sensor *sensor);
void read_int_sensor_(const char *message, size_t *pos, sensor::Sensor *sensor);
void publish_binary_sensor_(esphome::optional<bool> b, binary_sensor::BinarySensor *sensor);
esphome::optional<bool> get_bit_(std::string bits, uint8_t bit_pos);
std::string command_queue_[COMMAND_QUEUE_LENGTH];
uint8_t command_queue_position_ = 0;
uint8_t read_buffer_[PIPSOLAR_READ_BUFFER_LENGTH];
@@ -213,11 +237,10 @@ class Pipsolar : public uart::UARTDevice, public PollingComponent {
STATE_POLL_COMPLETE = 3,
STATE_COMMAND_COMPLETE = 4,
STATE_POLL_CHECKED = 5,
STATE_POLL_DECODED = 6,
};
uint8_t last_polling_command_ = 0;
PollingCommand used_polling_commands_[POLLING_COMMANDS_MAX];
PollingCommand enabled_polling_commands_[POLLING_COMMANDS_MAX];
};
} // namespace pipsolar

View File

@@ -11,11 +11,11 @@ void PipsolarSwitch::dump_config() { LOG_SWITCH("", "Pipsolar Switch", this); }
void PipsolarSwitch::write_state(bool state) {
if (state) {
if (!this->on_command_.empty()) {
this->parent_->switch_command(this->on_command_);
this->parent_->queue_command(this->on_command_);
}
} else {
if (!this->off_command_.empty()) {
this->parent_->switch_command(this->off_command_);
this->parent_->queue_command(this->off_command_);
}
}
}

View File

@@ -3,9 +3,9 @@
namespace esphome {
namespace select {
void SelectTraits::set_options(std::vector<std::string> options) { this->options_ = std::move(options); }
void SelectTraits::set_options(std::initializer_list<std::string> options) { this->options_ = options; }
const std::vector<std::string> &SelectTraits::get_options() const { return this->options_; }
const FixedVector<std::string> &SelectTraits::get_options() const { return this->options_; }
} // namespace select
} // namespace esphome

View File

@@ -1,18 +1,21 @@
#pragma once
#include <vector>
#include <string>
#include <initializer_list>
#include "esphome/core/helpers.h"
namespace esphome {
namespace select {
class SelectTraits {
public:
void set_options(std::vector<std::string> options);
const std::vector<std::string> &get_options() const;
void set_options(std::initializer_list<std::string> options);
const FixedVector<std::string> &get_options() const;
/// Copy options from another SelectTraits (for copy_select, lvgl)
void copy_options(const FixedVector<std::string> &other) { this->options_.copy_from(other); }
protected:
std::vector<std::string> options_;
FixedVector<std::string> options_;
};
} // namespace select

View File

@@ -28,6 +28,8 @@ from esphome.const import (
CONF_ON_RAW_VALUE,
CONF_ON_VALUE,
CONF_ON_VALUE_RANGE,
CONF_OPTIMISTIC,
CONF_PERIOD,
CONF_QUANTILE,
CONF_SEND_EVERY,
CONF_SEND_FIRST_AT,
@@ -261,9 +263,12 @@ ThrottleAverageFilter = sensor_ns.class_("ThrottleAverageFilter", Filter, cg.Com
LambdaFilter = sensor_ns.class_("LambdaFilter", Filter)
OffsetFilter = sensor_ns.class_("OffsetFilter", Filter)
MultiplyFilter = sensor_ns.class_("MultiplyFilter", Filter)
FilterOutValueFilter = sensor_ns.class_("FilterOutValueFilter", Filter)
ValueListFilter = sensor_ns.class_("ValueListFilter", Filter)
FilterOutValueFilter = sensor_ns.class_("FilterOutValueFilter", ValueListFilter)
ThrottleFilter = sensor_ns.class_("ThrottleFilter", Filter)
ThrottleWithPriorityFilter = sensor_ns.class_("ThrottleWithPriorityFilter", Filter)
ThrottleWithPriorityFilter = sensor_ns.class_(
"ThrottleWithPriorityFilter", ValueListFilter
)
TimeoutFilter = sensor_ns.class_("TimeoutFilter", Filter, cg.Component)
DebounceFilter = sensor_ns.class_("DebounceFilter", Filter, cg.Component)
HeartbeatFilter = sensor_ns.class_("HeartbeatFilter", Filter, cg.Component)
@@ -641,10 +646,29 @@ async def throttle_with_priority_filter_to_code(config, filter_id):
return cg.new_Pvariable(filter_id, config[CONF_TIMEOUT], template_)
HEARTBEAT_SCHEMA = cv.Schema(
{
cv.Required(CONF_PERIOD): cv.positive_time_period_milliseconds,
cv.Optional(CONF_OPTIMISTIC, default=False): cv.boolean,
}
)
@FILTER_REGISTRY.register(
"heartbeat", HeartbeatFilter, cv.positive_time_period_milliseconds
"heartbeat",
HeartbeatFilter,
cv.Any(
cv.positive_time_period_milliseconds,
HEARTBEAT_SCHEMA,
),
)
async def heartbeat_filter_to_code(config, filter_id):
if isinstance(config, dict):
var = cg.new_Pvariable(filter_id, config[CONF_PERIOD])
await cg.register_component(var, {})
cg.add(var.set_optimistic(config[CONF_OPTIMISTIC]))
return var
var = cg.new_Pvariable(filter_id, config)
await cg.register_component(var, {})
return var

View File

@@ -228,27 +228,40 @@ MultiplyFilter::MultiplyFilter(TemplatableValue<float> multiplier) : multiplier_
optional<float> MultiplyFilter::new_value(float value) { return value * this->multiplier_.value(); }
// FilterOutValueFilter
FilterOutValueFilter::FilterOutValueFilter(std::vector<TemplatableValue<float>> values_to_filter_out)
: values_to_filter_out_(std::move(values_to_filter_out)) {}
// ValueListFilter (base class)
ValueListFilter::ValueListFilter(std::initializer_list<TemplatableValue<float>> values) : values_(values) {}
optional<float> FilterOutValueFilter::new_value(float value) {
bool ValueListFilter::value_matches_any_(float sensor_value) {
int8_t accuracy = this->parent_->get_accuracy_decimals();
float accuracy_mult = powf(10.0f, accuracy);
for (auto filter_value : this->values_to_filter_out_) {
if (std::isnan(filter_value.value())) {
if (std::isnan(value)) {
return {};
}
float rounded_sensor = roundf(accuracy_mult * sensor_value);
for (auto &filter_value : this->values_) {
float fv = filter_value.value();
// Handle NaN comparison
if (std::isnan(fv)) {
if (std::isnan(sensor_value))
return true;
continue;
}
float rounded_filter_out = roundf(accuracy_mult * filter_value.value());
float rounded_value = roundf(accuracy_mult * value);
if (rounded_filter_out == rounded_value) {
return {};
}
// Compare rounded values
if (roundf(accuracy_mult * fv) == rounded_sensor)
return true;
}
return value;
return false;
}
// FilterOutValueFilter
FilterOutValueFilter::FilterOutValueFilter(std::initializer_list<TemplatableValue<float>> values_to_filter_out)
: ValueListFilter(values_to_filter_out) {}
optional<float> FilterOutValueFilter::new_value(float value) {
if (this->value_matches_any_(value))
return {}; // Filter out
return value; // Pass through
}
// ThrottleFilter
@@ -263,33 +276,15 @@ optional<float> ThrottleFilter::new_value(float value) {
}
// ThrottleWithPriorityFilter
ThrottleWithPriorityFilter::ThrottleWithPriorityFilter(uint32_t min_time_between_inputs,
std::vector<TemplatableValue<float>> prioritized_values)
: min_time_between_inputs_(min_time_between_inputs), prioritized_values_(std::move(prioritized_values)) {}
ThrottleWithPriorityFilter::ThrottleWithPriorityFilter(
uint32_t min_time_between_inputs, std::initializer_list<TemplatableValue<float>> prioritized_values)
: ValueListFilter(prioritized_values), min_time_between_inputs_(min_time_between_inputs) {}
optional<float> ThrottleWithPriorityFilter::new_value(float value) {
bool is_prioritized_value = false;
int8_t accuracy = this->parent_->get_accuracy_decimals();
float accuracy_mult = powf(10.0f, accuracy);
const uint32_t now = App.get_loop_component_start_time();
// First, determine if the new value is one of the prioritized values
for (auto prioritized_value : this->prioritized_values_) {
if (std::isnan(prioritized_value.value())) {
if (std::isnan(value)) {
is_prioritized_value = true;
break;
}
continue;
}
float rounded_prioritized_value = roundf(accuracy_mult * prioritized_value.value());
float rounded_value = roundf(accuracy_mult * value);
if (rounded_prioritized_value == rounded_value) {
is_prioritized_value = true;
break;
}
}
// Finally, determine if the new value should be throttled and pass it through if not
if (this->last_input_ == 0 || now - this->last_input_ >= min_time_between_inputs_ || is_prioritized_value) {
// Allow value through if: no previous input, time expired, or is prioritized
if (this->last_input_ == 0 || now - this->last_input_ >= min_time_between_inputs_ ||
this->value_matches_any_(value)) {
this->last_input_ = now;
return value;
}
@@ -318,7 +313,7 @@ optional<float> DeltaFilter::new_value(float value) {
}
// OrFilter
OrFilter::OrFilter(std::vector<Filter *> filters) : filters_(std::move(filters)), phi_(this) {}
OrFilter::OrFilter(std::initializer_list<Filter *> filters) : filters_(filters), phi_(this) {}
OrFilter::PhiNode::PhiNode(OrFilter *or_parent) : or_parent_(or_parent) {}
optional<float> OrFilter::PhiNode::new_value(float value) {
@@ -331,14 +326,14 @@ optional<float> OrFilter::PhiNode::new_value(float value) {
}
optional<float> OrFilter::new_value(float value) {
this->has_value_ = false;
for (Filter *filter : this->filters_)
for (auto *filter : this->filters_)
filter->input(value);
return {};
}
void OrFilter::initialize(Sensor *parent, Filter *next) {
Filter::initialize(parent, next);
for (Filter *filter : this->filters_) {
for (auto *filter : this->filters_) {
filter->initialize(parent, &this->phi_);
}
this->phi_.initialize(parent, nullptr);
@@ -377,8 +372,12 @@ optional<float> HeartbeatFilter::new_value(float value) {
this->last_input_ = value;
this->has_value_ = true;
if (this->optimistic_) {
return value;
}
return {};
}
void HeartbeatFilter::setup() {
this->set_interval("heartbeat", this->time_period_, [this]() {
ESP_LOGVV(TAG, "HeartbeatFilter(%p)::interval(has_value=%s, last_input=%f)", this, YESNO(this->has_value_),
@@ -389,20 +388,27 @@ void HeartbeatFilter::setup() {
this->output(this->last_input_);
});
}
float HeartbeatFilter::get_setup_priority() const { return setup_priority::HARDWARE; }
CalibrateLinearFilter::CalibrateLinearFilter(std::initializer_list<std::array<float, 3>> linear_functions)
: linear_functions_(linear_functions) {}
optional<float> CalibrateLinearFilter::new_value(float value) {
for (std::array<float, 3> f : this->linear_functions_) {
for (const auto &f : this->linear_functions_) {
if (!std::isfinite(f[2]) || value < f[2])
return (value * f[0]) + f[1];
}
return NAN;
}
CalibratePolynomialFilter::CalibratePolynomialFilter(std::initializer_list<float> coefficients)
: coefficients_(coefficients) {}
optional<float> CalibratePolynomialFilter::new_value(float value) {
float res = 0.0f;
float x = 1.0f;
for (float coefficient : this->coefficients_) {
for (const auto &coefficient : this->coefficients_) {
res += x * coefficient;
x *= value;
}

View File

@@ -317,15 +317,28 @@ class MultiplyFilter : public Filter {
TemplatableValue<float> multiplier_;
};
/** Base class for filters that compare sensor values against a list of configured values.
*
* This base class provides common functionality for filters that need to check if a sensor
* value matches any value in a configured list, with proper handling of NaN values and
* accuracy-based rounding for comparisons.
*/
class ValueListFilter : public Filter {
protected:
explicit ValueListFilter(std::initializer_list<TemplatableValue<float>> values);
/// Check if sensor value matches any configured value (with accuracy rounding)
bool value_matches_any_(float sensor_value);
FixedVector<TemplatableValue<float>> values_;
};
/// A simple filter that only forwards the filter chain if it doesn't receive `value_to_filter_out`.
class FilterOutValueFilter : public Filter {
class FilterOutValueFilter : public ValueListFilter {
public:
explicit FilterOutValueFilter(std::vector<TemplatableValue<float>> values_to_filter_out);
explicit FilterOutValueFilter(std::initializer_list<TemplatableValue<float>> values_to_filter_out);
optional<float> new_value(float value) override;
protected:
std::vector<TemplatableValue<float>> values_to_filter_out_;
};
class ThrottleFilter : public Filter {
@@ -340,17 +353,16 @@ class ThrottleFilter : public Filter {
};
/// Same as 'throttle' but will immediately publish values contained in `value_to_prioritize`.
class ThrottleWithPriorityFilter : public Filter {
class ThrottleWithPriorityFilter : public ValueListFilter {
public:
explicit ThrottleWithPriorityFilter(uint32_t min_time_between_inputs,
std::vector<TemplatableValue<float>> prioritized_values);
std::initializer_list<TemplatableValue<float>> prioritized_values);
optional<float> new_value(float value) override;
protected:
uint32_t last_input_{0};
uint32_t min_time_between_inputs_;
std::vector<TemplatableValue<float>> prioritized_values_;
};
class TimeoutFilter : public Filter, public Component {
@@ -384,15 +396,16 @@ class HeartbeatFilter : public Filter, public Component {
explicit HeartbeatFilter(uint32_t time_period);
void setup() override;
optional<float> new_value(float value) override;
float get_setup_priority() const override;
void set_optimistic(bool optimistic) { this->optimistic_ = optimistic; }
protected:
uint32_t time_period_;
float last_input_;
bool has_value_{false};
bool optimistic_{false};
};
class DeltaFilter : public Filter {
@@ -410,7 +423,7 @@ class DeltaFilter : public Filter {
class OrFilter : public Filter {
public:
explicit OrFilter(std::vector<Filter *> filters);
explicit OrFilter(std::initializer_list<Filter *> filters);
void initialize(Sensor *parent, Filter *next) override;
@@ -426,28 +439,27 @@ class OrFilter : public Filter {
OrFilter *or_parent_;
};
std::vector<Filter *> filters_;
FixedVector<Filter *> filters_;
PhiNode phi_;
bool has_value_{false};
};
class CalibrateLinearFilter : public Filter {
public:
CalibrateLinearFilter(std::vector<std::array<float, 3>> linear_functions)
: linear_functions_(std::move(linear_functions)) {}
explicit CalibrateLinearFilter(std::initializer_list<std::array<float, 3>> linear_functions);
optional<float> new_value(float value) override;
protected:
std::vector<std::array<float, 3>> linear_functions_;
FixedVector<std::array<float, 3>> linear_functions_;
};
class CalibratePolynomialFilter : public Filter {
public:
CalibratePolynomialFilter(std::vector<float> coefficients) : coefficients_(std::move(coefficients)) {}
explicit CalibratePolynomialFilter(std::initializer_list<float> coefficients);
optional<float> new_value(float value) override;
protected:
std::vector<float> coefficients_;
FixedVector<float> coefficients_;
};
class ClampFilter : public Filter {

View File

@@ -107,12 +107,12 @@ void Sensor::add_filter(Filter *filter) {
}
filter->initialize(this, nullptr);
}
void Sensor::add_filters(const std::vector<Filter *> &filters) {
void Sensor::add_filters(std::initializer_list<Filter *> filters) {
for (Filter *filter : filters) {
this->add_filter(filter);
}
}
void Sensor::set_filters(const std::vector<Filter *> &filters) {
void Sensor::set_filters(std::initializer_list<Filter *> filters) {
this->clear_filters();
this->add_filters(filters);
}

View File

@@ -6,7 +6,7 @@
#include "esphome/core/log.h"
#include "esphome/components/sensor/filter.h"
#include <vector>
#include <initializer_list>
#include <memory>
namespace esphome {
@@ -77,10 +77,10 @@ class Sensor : public EntityBase, public EntityBase_DeviceClass, public EntityBa
* SlidingWindowMovingAverageFilter(15, 15), // average over last 15 values
* });
*/
void add_filters(const std::vector<Filter *> &filters);
void add_filters(std::initializer_list<Filter *> filters);
/// Clear the filters and replace them by filters.
void set_filters(const std::vector<Filter *> &filters);
void set_filters(std::initializer_list<Filter *> filters);
/// Clear the entire filter chain.
void clear_filters();

View File

@@ -1,3 +1,5 @@
from collections.abc import Callable, MutableMapping
import esphome.codegen as cg
import esphome.config_validation as cv
from esphome.core import CORE
@@ -9,6 +11,32 @@ IMPLEMENTATION_LWIP_TCP = "lwip_tcp"
IMPLEMENTATION_LWIP_SOCKETS = "lwip_sockets"
IMPLEMENTATION_BSD_SOCKETS = "bsd_sockets"
# Socket tracking infrastructure
# Components register their socket needs and platforms read this to configure appropriately
KEY_SOCKET_CONSUMERS = "socket_consumers"
def consume_sockets(
value: int, consumer: str
) -> Callable[[MutableMapping], MutableMapping]:
"""Register socket usage for a component.
Args:
value: Number of sockets needed by the component
consumer: Name of the component consuming the sockets
Returns:
A validator function that records the socket usage
"""
def _consume_sockets(config: MutableMapping) -> MutableMapping:
consumers: dict[str, int] = CORE.data.setdefault(KEY_SOCKET_CONSUMERS, {})
consumers[consumer] = consumers.get(consumer, 0) + value
return config
return _consume_sockets
CONFIG_SCHEMA = cv.Schema(
{
cv.SplitDefault(

View File

@@ -110,17 +110,28 @@ def validate_mapping(value):
"substitute", SubstituteFilter, cv.ensure_list(validate_mapping)
)
async def substitute_filter_to_code(config, filter_id):
from_strings = [conf[CONF_FROM] for conf in config]
to_strings = [conf[CONF_TO] for conf in config]
return cg.new_Pvariable(filter_id, from_strings, to_strings)
substitutions = [
cg.StructInitializer(
cg.MockObj("Substitution", "esphome::text_sensor::"),
("from", conf[CONF_FROM]),
("to", conf[CONF_TO]),
)
for conf in config
]
return cg.new_Pvariable(filter_id, substitutions)
@FILTER_REGISTRY.register("map", MapFilter, cv.ensure_list(validate_mapping))
async def map_filter_to_code(config, filter_id):
map_ = cg.std_ns.class_("map").template(cg.std_string, cg.std_string)
return cg.new_Pvariable(
filter_id, map_([(item[CONF_FROM], item[CONF_TO]) for item in config])
)
mappings = [
cg.StructInitializer(
cg.MockObj("Substitution", "esphome::text_sensor::"),
("from", conf[CONF_FROM]),
("to", conf[CONF_TO]),
)
for conf in config
]
return cg.new_Pvariable(filter_id, mappings)
validate_device_class = cv.one_of(*DEVICE_CLASSES, lower=True, space="_")

View File

@@ -62,19 +62,27 @@ optional<std::string> AppendFilter::new_value(std::string value) { return value
optional<std::string> PrependFilter::new_value(std::string value) { return this->prefix_ + value; }
// Substitute
SubstituteFilter::SubstituteFilter(const std::initializer_list<Substitution> &substitutions)
: substitutions_(substitutions) {}
optional<std::string> SubstituteFilter::new_value(std::string value) {
std::size_t pos;
for (size_t i = 0; i < this->from_strings_.size(); i++) {
while ((pos = value.find(this->from_strings_[i])) != std::string::npos)
value.replace(pos, this->from_strings_[i].size(), this->to_strings_[i]);
for (const auto &sub : this->substitutions_) {
while ((pos = value.find(sub.from)) != std::string::npos)
value.replace(pos, sub.from.size(), sub.to);
}
return value;
}
// Map
MapFilter::MapFilter(const std::initializer_list<Substitution> &mappings) : mappings_(mappings) {}
optional<std::string> MapFilter::new_value(std::string value) {
auto item = mappings_.find(value);
return item == mappings_.end() ? value : item->second;
for (const auto &mapping : this->mappings_) {
if (mapping.from == value)
return mapping.to;
}
return value; // Pass through if no match
}
} // namespace text_sensor

View File

@@ -2,10 +2,6 @@
#include "esphome/core/component.h"
#include "esphome/core/helpers.h"
#include <queue>
#include <utility>
#include <map>
#include <vector>
namespace esphome {
namespace text_sensor {
@@ -98,26 +94,52 @@ class PrependFilter : public Filter {
std::string prefix_;
};
struct Substitution {
std::string from;
std::string to;
};
/// A simple filter that replaces a substring with another substring
class SubstituteFilter : public Filter {
public:
SubstituteFilter(std::vector<std::string> from_strings, std::vector<std::string> to_strings)
: from_strings_(std::move(from_strings)), to_strings_(std::move(to_strings)) {}
explicit SubstituteFilter(const std::initializer_list<Substitution> &substitutions);
optional<std::string> new_value(std::string value) override;
protected:
std::vector<std::string> from_strings_;
std::vector<std::string> to_strings_;
FixedVector<Substitution> substitutions_;
};
/// A filter that maps values from one set to another
/** A filter that maps values from one set to another
*
* Uses linear search instead of std::map for typical small datasets (2-20 mappings).
* Linear search on contiguous memory is faster than red-black tree lookups when:
* - Dataset is small (< ~30 items)
* - Memory is contiguous (cache-friendly, better CPU cache utilization)
* - No pointer chasing overhead (tree node traversal)
* - String comparison cost dominates lookup time
*
* Benchmark results (see benchmark_map_filter.cpp):
* - 2 mappings: Linear 1.26x faster than std::map
* - 5 mappings: Linear 2.25x faster than std::map
* - 10 mappings: Linear 1.83x faster than std::map
* - 20 mappings: Linear 1.59x faster than std::map
* - 30 mappings: Linear 1.09x faster than std::map
* - 40 mappings: std::map 1.27x faster than Linear (break-even)
*
* Benefits over std::map:
* - ~2KB smaller flash (no red-black tree code)
* - ~24-32 bytes less RAM per mapping (no tree node overhead)
* - Faster for typical ESPHome usage (2-10 mappings common, 20+ rare)
*
* Break-even point: ~35-40 mappings, but ESPHome configs rarely exceed 20
*/
class MapFilter : public Filter {
public:
MapFilter(std::map<std::string, std::string> mappings) : mappings_(std::move(mappings)) {}
explicit MapFilter(const std::initializer_list<Substitution> &mappings);
optional<std::string> new_value(std::string value) override;
protected:
std::map<std::string, std::string> mappings_;
FixedVector<Substitution> mappings_;
};
} // namespace text_sensor

View File

@@ -51,12 +51,12 @@ void TextSensor::add_filter(Filter *filter) {
}
filter->initialize(this, nullptr);
}
void TextSensor::add_filters(const std::vector<Filter *> &filters) {
void TextSensor::add_filters(std::initializer_list<Filter *> filters) {
for (Filter *filter : filters) {
this->add_filter(filter);
}
}
void TextSensor::set_filters(const std::vector<Filter *> &filters) {
void TextSensor::set_filters(std::initializer_list<Filter *> filters) {
this->clear_filters();
this->add_filters(filters);
}

View File

@@ -5,7 +5,7 @@
#include "esphome/core/helpers.h"
#include "esphome/components/text_sensor/filter.h"
#include <vector>
#include <initializer_list>
#include <memory>
namespace esphome {
@@ -37,10 +37,10 @@ class TextSensor : public EntityBase, public EntityBase_DeviceClass {
void add_filter(Filter *filter);
/// Add a list of vectors to the back of the filter chain.
void add_filters(const std::vector<Filter *> &filters);
void add_filters(std::initializer_list<Filter *> filters);
/// Clear the filters and replace them by filters.
void set_filters(const std::vector<Filter *> &filters);
void set_filters(std::initializer_list<Filter *> filters);
/// Clear the entire filter chain.
void clear_filters();

View File

@@ -136,6 +136,18 @@ def _final_validate_sorting(config: ConfigType) -> ConfigType:
FINAL_VALIDATE_SCHEMA = _final_validate_sorting
def _consume_web_server_sockets(config: ConfigType) -> ConfigType:
"""Register socket needs for web_server component."""
from esphome.components import socket
# Web server needs 1 listening socket + typically 2 concurrent client connections
# (browser makes 2 connections for page + event stream)
sockets_needed = 3
socket.consume_sockets(sockets_needed, "web_server")(config)
return config
sorting_group = {
cv.Required(CONF_ID): cv.declare_id(cg.int_),
cv.Required(CONF_NAME): cv.string,
@@ -205,6 +217,7 @@ CONFIG_SCHEMA = cv.All(
validate_local,
validate_sorting_groups,
validate_ota,
_consume_web_server_sockets,
)

View File

@@ -378,14 +378,18 @@ async def to_code(config):
# Track if any network uses Enterprise authentication
has_eap = False
def add_sta(ap, network):
ip_config = network.get(CONF_MANUAL_IP, config.get(CONF_MANUAL_IP))
cg.add(var.add_sta(wifi_network(network, ap, ip_config)))
# Build all WiFiAP objects
networks = config.get(CONF_NETWORKS, [])
if networks:
wifi_aps = []
for network in networks:
if CONF_EAP in network:
has_eap = True
ip_config = network.get(CONF_MANUAL_IP, config.get(CONF_MANUAL_IP))
wifi_aps.append(wifi_network(network, WiFiAP(), ip_config))
for network in config.get(CONF_NETWORKS, []):
if CONF_EAP in network:
has_eap = True
cg.with_local_variable(network[CONF_ID], WiFiAP(), add_sta, network)
# Set all WiFi networks at once
cg.add(var.set_stas(wifi_aps))
if CONF_AP in config:
conf = config[CONF_AP]

View File

@@ -330,11 +330,8 @@ float WiFiComponent::get_loop_priority() const {
return 10.0f; // before other loop components
}
void WiFiComponent::add_sta(const WiFiAP &ap) { this->sta_.push_back(ap); }
void WiFiComponent::set_sta(const WiFiAP &ap) {
this->clear_sta();
this->add_sta(ap);
}
void WiFiComponent::set_stas(const std::initializer_list<WiFiAP> &aps) { this->sta_ = aps; }
void WiFiComponent::set_sta(const WiFiAP &ap) { this->set_stas({ap}); }
void WiFiComponent::clear_sta() { this->sta_.clear(); }
void WiFiComponent::save_wifi_sta(const std::string &ssid, const std::string &password) {
SavedWifiSettings save{}; // zero-initialized - all bytes set to \0, guaranteeing null termination

View File

@@ -219,7 +219,7 @@ class WiFiComponent : public Component {
void set_sta(const WiFiAP &ap);
WiFiAP get_sta() { return this->selected_ap_; }
void add_sta(const WiFiAP &ap);
void set_stas(const std::initializer_list<WiFiAP> &aps);
void clear_sta();
#ifdef USE_WIFI_AP
@@ -393,7 +393,7 @@ class WiFiComponent : public Component {
#endif
std::string use_address_;
std::vector<WiFiAP> sta_;
FixedVector<WiFiAP> sta_;
std::vector<WiFiSTAPriority> sta_priorities_;
wifi_scan_vector_t<WiFiScanResult> scan_result_;
WiFiAP selected_ap_;

View File

@@ -471,6 +471,7 @@ CONF_IMPORT_REACTIVE_ENERGY = "import_reactive_energy"
CONF_INC_PIN = "inc_pin"
CONF_INCLUDE_INTERNAL = "include_internal"
CONF_INCLUDES = "includes"
CONF_INCLUDES_C = "includes_c"
CONF_INDEX = "index"
CONF_INDOOR = "indoor"
CONF_INFRARED = "infrared"

View File

@@ -243,7 +243,7 @@ template<typename... Ts> class ActionList {
}
this->actions_end_ = action;
}
void add_actions(const std::vector<Action<Ts...> *> &actions) {
void add_actions(const std::initializer_list<Action<Ts...> *> &actions) {
for (auto *action : actions) {
this->add_action(action);
}
@@ -286,7 +286,7 @@ template<typename... Ts> class Automation {
explicit Automation(Trigger<Ts...> *trigger) : trigger_(trigger) { this->trigger_->set_automation_parent(this); }
void add_action(Action<Ts...> *action) { this->actions_.add_action(action); }
void add_actions(const std::vector<Action<Ts...> *> &actions) { this->actions_.add_actions(actions); }
void add_actions(const std::initializer_list<Action<Ts...> *> &actions) { this->actions_.add_actions(actions); }
void stop() { this->actions_.stop(); }

View File

@@ -194,12 +194,12 @@ template<typename... Ts> class IfAction : public Action<Ts...> {
public:
explicit IfAction(Condition<Ts...> *condition) : condition_(condition) {}
void add_then(const std::vector<Action<Ts...> *> &actions) {
void add_then(const std::initializer_list<Action<Ts...> *> &actions) {
this->then_.add_actions(actions);
this->then_.add_action(new LambdaAction<Ts...>([this](Ts... x) { this->play_next_(x...); }));
}
void add_else(const std::vector<Action<Ts...> *> &actions) {
void add_else(const std::initializer_list<Action<Ts...> *> &actions) {
this->else_.add_actions(actions);
this->else_.add_action(new LambdaAction<Ts...>([this](Ts... x) { this->play_next_(x...); }));
}
@@ -240,7 +240,7 @@ template<typename... Ts> class WhileAction : public Action<Ts...> {
public:
WhileAction(Condition<Ts...> *condition) : condition_(condition) {}
void add_then(const std::vector<Action<Ts...> *> &actions) {
void add_then(const std::initializer_list<Action<Ts...> *> &actions) {
this->then_.add_actions(actions);
this->then_.add_action(new LambdaAction<Ts...>([this](Ts... x) {
if (this->num_running_ > 0 && this->condition_->check_tuple(this->var_)) {
@@ -287,7 +287,7 @@ template<typename... Ts> class RepeatAction : public Action<Ts...> {
public:
TEMPLATABLE_VALUE(uint32_t, count)
void add_then(const std::vector<Action<uint32_t, Ts...> *> &actions) {
void add_then(const std::initializer_list<Action<uint32_t, Ts...> *> &actions) {
this->then_.add_actions(actions);
this->then_.add_action(new LambdaAction<uint32_t, Ts...>([this](uint32_t iteration, Ts... x) {
iteration++;

View File

@@ -21,6 +21,7 @@ from esphome.const import (
CONF_FRIENDLY_NAME,
CONF_ID,
CONF_INCLUDES,
CONF_INCLUDES_C,
CONF_LIBRARIES,
CONF_MIN_VERSION,
CONF_NAME,
@@ -227,6 +228,7 @@ CONFIG_SCHEMA = cv.All(
}
),
cv.Optional(CONF_INCLUDES, default=[]): cv.ensure_list(valid_include),
cv.Optional(CONF_INCLUDES_C, default=[]): cv.ensure_list(valid_include),
cv.Optional(CONF_LIBRARIES, default=[]): cv.ensure_list(cv.string_strict),
cv.Optional(CONF_NAME_ADD_MAC_SUFFIX, default=False): cv.boolean,
cv.Optional(CONF_DEBUG_SCHEDULER, default=False): cv.boolean,
@@ -302,6 +304,17 @@ def _list_target_platforms():
return target_platforms
def _sort_includes_by_type(includes: list[str]) -> tuple[list[str], list[str]]:
system_includes = []
other_includes = []
for include in includes:
if include.startswith("<") and include.endswith(">"):
system_includes.append(include)
else:
other_includes.append(include)
return system_includes, other_includes
def preload_core_config(config, result) -> str:
with cv.prepend_path(CONF_ESPHOME):
conf = PRELOAD_CONFIG_SCHEMA(config[CONF_ESPHOME])
@@ -339,7 +352,7 @@ def preload_core_config(config, result) -> str:
return target_platforms[0]
def include_file(path: Path, basename: Path):
def include_file(path: Path, basename: Path, is_c_header: bool = False):
parts = basename.parts
dst = CORE.relative_src_path(*parts)
copy_file_if_changed(path, dst)
@@ -347,7 +360,14 @@ def include_file(path: Path, basename: Path):
ext = path.suffix
if ext in [".h", ".hpp", ".tcc"]:
# Header, add include statement
cg.add_global(cg.RawStatement(f'#include "{basename}"'))
if is_c_header:
# Wrap in extern "C" block for C headers
cg.add_global(
cg.RawStatement(f'extern "C" {{\n #include "{basename}"\n}}')
)
else:
# Regular include
cg.add_global(cg.RawStatement(f'#include "{basename}"'))
ARDUINO_GLUE_CODE = """\
@@ -377,7 +397,7 @@ async def add_arduino_global_workaround():
@coroutine_with_priority(CoroPriority.FINAL)
async def add_includes(includes: list[str]) -> None:
async def add_includes(includes: list[str], is_c_header: bool = False) -> None:
# Add includes at the very end, so that the included files can access global variables
for include in includes:
path = CORE.relative_config_path(include)
@@ -385,11 +405,11 @@ async def add_includes(includes: list[str]) -> None:
# Directory, copy tree
for p in walk_files(path):
basename = p.relative_to(path.parent)
include_file(p, basename)
include_file(p, basename, is_c_header)
else:
# Copy file
basename = Path(path.name)
include_file(path, basename)
include_file(path, basename, is_c_header)
@coroutine_with_priority(CoroPriority.FINAL)
@@ -494,19 +514,25 @@ async def to_code(config: ConfigType) -> None:
CORE.add_job(add_arduino_global_workaround)
if config[CONF_INCLUDES]:
# Get the <...> includes
system_includes = []
other_includes = []
for include in config[CONF_INCLUDES]:
if include.startswith("<") and include.endswith(">"):
system_includes.append(include)
else:
other_includes.append(include)
system_includes, other_includes = _sort_includes_by_type(config[CONF_INCLUDES])
# <...> includes should be at the start
for include in system_includes:
cg.add_global(cg.RawStatement(f"#include {include}"), prepend=True)
# Other includes should be at the end
CORE.add_job(add_includes, other_includes)
CORE.add_job(add_includes, other_includes, False)
if config[CONF_INCLUDES_C]:
system_includes, other_includes = _sort_includes_by_type(
config[CONF_INCLUDES_C]
)
# <...> includes should be at the start
for include in system_includes:
cg.add_global(
cg.RawStatement(f'extern "C" {{\n #include {include}\n}}'),
prepend=True,
)
# Other includes should be at the end
CORE.add_job(add_includes, other_includes, True)
if project_conf := config.get(CONF_PROJECT):
cg.add_define("ESPHOME_PROJECT_NAME", project_conf[CONF_NAME])

View File

@@ -44,6 +44,7 @@
#define USE_GRAPHICAL_DISPLAY_MENU
#define USE_HOMEASSISTANT_TIME
#define USE_HTTP_REQUEST_OTA_WATCHDOG_TIMEOUT 8000 // NOLINT
#define USE_IMPROV_SERIAL_NEXT_URL
#define USE_JSON
#define USE_LIGHT
#define USE_LOCK
@@ -186,6 +187,7 @@
#define USE_ESP32_CAMERA_JPEG_ENCODER
#define USE_I2C
#define USE_IMPROV
#define USE_ESP32_IMPROV_NEXT_URL
#define USE_MICROPHONE
#define USE_PSRAM
#define USE_SOCKET_IMPL_BSD_SOCKETS

View File

@@ -194,12 +194,8 @@ template<typename T> class FixedVector {
size_ = 0;
}
public:
FixedVector() = default;
/// Constructor from initializer list - allocates exact size needed
/// This enables brace initialization: FixedVector<int> v = {1, 2, 3};
FixedVector(std::initializer_list<T> init_list) {
// Helper to assign from initializer list (shared by constructor and assignment operator)
void assign_from_initializer_list_(std::initializer_list<T> init_list) {
init(init_list.size());
size_t idx = 0;
for (const auto &item : init_list) {
@@ -209,9 +205,17 @@ template<typename T> class FixedVector {
size_ = init_list.size();
}
public:
FixedVector() = default;
/// Constructor from initializer list - allocates exact size needed
/// This enables brace initialization: FixedVector<int> v = {1, 2, 3};
FixedVector(std::initializer_list<T> init_list) { assign_from_initializer_list_(init_list); }
~FixedVector() { cleanup_(); }
// Disable copy operations (avoid accidental expensive copies)
// Use copy_from() for explicit copying when needed (e.g., copy_select)
FixedVector(const FixedVector &) = delete;
FixedVector &operator=(const FixedVector &) = delete;
@@ -234,6 +238,28 @@ template<typename T> class FixedVector {
return *this;
}
/// Assignment from initializer list - avoids temporary and move overhead
/// This enables: FixedVector<int> v; v = {1, 2, 3};
FixedVector &operator=(std::initializer_list<T> init_list) {
cleanup_();
reset_();
assign_from_initializer_list_(init_list);
return *this;
}
/// Explicitly copy another FixedVector
/// This method exists instead of operator= to make copying intentional and visible.
/// Copying is expensive on embedded systems, so we require explicit opt-in.
/// Use cases: copy_select (copying source options), lvgl (copying widget options)
void copy_from(const FixedVector &other) {
cleanup_();
reset_();
init(other.size());
for (const auto &item : other) {
push_back(item);
}
}
// Allocate capacity - can be called multiple times to reinit
void init(size_t n) {
cleanup_();
@@ -292,6 +318,11 @@ template<typename T> class FixedVector {
return data_[size_ - 1];
}
/// Access first element (no bounds checking - matches std::vector behavior)
/// Caller must ensure vector is not empty (size() > 0)
T &front() { return data_[0]; }
const T &front() const { return data_[0]; }
/// Access last element (no bounds checking - matches std::vector behavior)
/// Caller must ensure vector is not empty (size() > 0)
T &back() { return data_[size_ - 1]; }
@@ -305,6 +336,12 @@ template<typename T> class FixedVector {
T &operator[](size_t i) { return data_[i]; }
const T &operator[](size_t i) const { return data_[i]; }
/// Access element with bounds checking (matches std::vector behavior)
/// Returns reference to element at index i
/// Behavior for out of bounds access matches std::vector::at() (undefined on embedded)
T &at(size_t i) { return data_[i]; }
const T &at(size_t i) const { return data_[i]; }
// Iterator support for range-based for loops
T *begin() { return data_; }
T *end() { return data_ + size_; }

View File

@@ -1,4 +1,4 @@
pylint==4.0.1
pylint==4.0.2
flake8==7.3.0 # also change in .pre-commit-config.yaml when updating
ruff==0.14.1 # also change in .pre-commit-config.yaml when updating
pyupgrade==3.21.0 # also change in .pre-commit-config.yaml when updating

View File

@@ -43,7 +43,6 @@ from enum import StrEnum
from functools import cache
import json
import os
from pathlib import Path
import subprocess
import sys
from typing import Any
@@ -53,14 +52,23 @@ from helpers import (
CPP_FILE_EXTENSIONS,
PYTHON_FILE_EXTENSIONS,
changed_files,
filter_component_files,
get_all_dependencies,
get_changed_components,
get_component_from_path,
get_component_test_files,
get_components_from_integration_fixtures,
get_components_with_dependencies,
git_ls_files,
parse_test_filename,
root_path,
)
# Threshold for splitting clang-tidy jobs
# For small PRs (< 65 files), use nosplit for faster CI
# For large PRs (>= 65 files), use split for better parallelization
CLANG_TIDY_SPLIT_THRESHOLD = 65
class Platform(StrEnum):
"""Platform identifiers for memory impact analysis."""
@@ -78,11 +86,16 @@ MEMORY_IMPACT_FALLBACK_COMPONENT = "api" # Representative component for core ch
MEMORY_IMPACT_FALLBACK_PLATFORM = Platform.ESP32_IDF # Most representative platform
# Platform preference order for memory impact analysis
# Prefer newer platforms first as they represent the future of ESPHome
# ESP8266 is most constrained but many new features don't support it
# This order is used when no platform-specific hints are detected from filenames
# Priority rationale:
# 1. ESP32-C6 IDF - Newest platform, supports Thread/Zigbee
# 2. ESP8266 Arduino - Most memory constrained (best for detecting memory impact),
# fastest build times, most sensitive to code size changes
# 3. ESP32 IDF - Primary ESP32 platform, most representative of modern ESPHome
# 4-6. Other ESP32 variants - Less commonly used but still supported
MEMORY_IMPACT_PLATFORM_PREFERENCE = [
Platform.ESP32_C6_IDF, # ESP32-C6 IDF (newest, supports Thread/Zigbee)
Platform.ESP8266_ARD, # ESP8266 Arduino (most memory constrained - best for impact analysis)
Platform.ESP8266_ARD, # ESP8266 Arduino (most memory constrained, fastest builds)
Platform.ESP32_IDF, # ESP32 IDF platform (primary ESP32 platform, most representative)
Platform.ESP32_C3_IDF, # ESP32-C3 IDF
Platform.ESP32_S2_IDF, # ESP32-S2 IDF
@@ -152,6 +165,26 @@ def should_run_integration_tests(branch: str | None = None) -> bool:
return False
@cache
def _is_clang_tidy_full_scan() -> bool:
"""Check if clang-tidy configuration changed (requires full scan).
Returns:
True if full scan is needed (hash changed), False otherwise.
"""
try:
result = subprocess.run(
[os.path.join(root_path, "script", "clang_tidy_hash.py"), "--check"],
capture_output=True,
check=False,
)
# Exit 0 means hash changed (full scan needed)
return result.returncode == 0
except Exception:
# If hash check fails, run full scan to be safe
return True
def should_run_clang_tidy(branch: str | None = None) -> bool:
"""Determine if clang-tidy should run based on changed files.
@@ -188,17 +221,7 @@ def should_run_clang_tidy(branch: str | None = None) -> bool:
True if clang-tidy should run, False otherwise.
"""
# First check if clang-tidy configuration changed (full scan needed)
try:
result = subprocess.run(
[os.path.join(root_path, "script", "clang_tidy_hash.py"), "--check"],
capture_output=True,
check=False,
)
# Exit 0 means hash changed (full scan needed)
if result.returncode == 0:
return True
except Exception:
# If hash check fails, run clang-tidy to be safe
if _is_clang_tidy_full_scan():
return True
# Check if .clang-tidy.hash file itself was changed
@@ -210,6 +233,22 @@ def should_run_clang_tidy(branch: str | None = None) -> bool:
return _any_changed_file_endswith(branch, CPP_FILE_EXTENSIONS)
def count_changed_cpp_files(branch: str | None = None) -> int:
"""Count the number of changed C++ files.
This is used to determine whether to split clang-tidy jobs or run them as a single job.
For PRs with < 65 changed C++ files, running a single job is faster than splitting.
Args:
branch: Branch to compare against. If None, uses default.
Returns:
Number of changed C++ files.
"""
files = changed_files(branch)
return sum(1 for file in files if file.endswith(CPP_FILE_EXTENSIONS))
def should_run_clang_format(branch: str | None = None) -> bool:
"""Determine if clang-format should run based on changed files.
@@ -264,6 +303,91 @@ def _component_has_tests(component: str) -> bool:
return bool(get_component_test_files(component))
def _select_platform_by_preference(
platforms: list[Platform] | set[Platform],
) -> Platform:
"""Select the most preferred platform from a list/set based on MEMORY_IMPACT_PLATFORM_PREFERENCE.
Args:
platforms: List or set of platforms to choose from
Returns:
The most preferred platform (earliest in MEMORY_IMPACT_PLATFORM_PREFERENCE)
"""
return min(platforms, key=MEMORY_IMPACT_PLATFORM_PREFERENCE.index)
def _select_platform_by_count(
platform_counts: Counter[Platform],
) -> Platform:
"""Select platform by count, using MEMORY_IMPACT_PLATFORM_PREFERENCE as tiebreaker.
Args:
platform_counts: Counter mapping platforms to their counts
Returns:
Platform with highest count, breaking ties by preference order
"""
return min(
platform_counts.keys(),
key=lambda p: (
-platform_counts[p], # Negative to prefer higher counts
MEMORY_IMPACT_PLATFORM_PREFERENCE.index(p),
),
)
def _detect_platform_hint_from_filename(filename: str) -> Platform | None:
"""Detect platform hint from filename patterns.
Detects platform-specific files using patterns like:
- wifi_component_esp_idf.cpp, *_idf.h -> ESP32 IDF variants
- wifi_component_esp8266.cpp, *_esp8266.h -> ESP8266_ARD
- *_esp32*.cpp -> ESP32 IDF (generic)
- *_libretiny.cpp, *_retiny.* -> LibreTiny (not in preference list)
- *_pico.cpp, *_rp2040.* -> RP2040 (not in preference list)
Args:
filename: File path to check
Returns:
Platform enum if a specific platform is detected, None otherwise
"""
filename_lower = filename.lower()
# ESP-IDF platforms (check specific variants first)
if "esp_idf" in filename_lower or "_idf" in filename_lower:
# Check for specific ESP32 variants
if "c6" in filename_lower or "esp32c6" in filename_lower:
return Platform.ESP32_C6_IDF
if "c3" in filename_lower or "esp32c3" in filename_lower:
return Platform.ESP32_C3_IDF
if "s2" in filename_lower or "esp32s2" in filename_lower:
return Platform.ESP32_S2_IDF
if "s3" in filename_lower or "esp32s3" in filename_lower:
return Platform.ESP32_S3_IDF
# Default to ESP32 IDF for generic esp_idf files
return Platform.ESP32_IDF
# ESP8266 Arduino
if "esp8266" in filename_lower:
return Platform.ESP8266_ARD
# Generic ESP32 (without _idf suffix, could be Arduino or shared code)
# Prefer IDF as it's the modern platform
if "esp32" in filename_lower:
return Platform.ESP32_IDF
# LibreTiny and RP2040 are not in MEMORY_IMPACT_PLATFORM_PREFERENCE
# so we don't return them as hints
# if "retiny" in filename_lower or "libretiny" in filename_lower:
# return None # No specific LibreTiny platform preference
# if "pico" in filename_lower or "rp2040" in filename_lower:
# return None # No RP2040 platform preference
return None
def detect_memory_impact_config(
branch: str | None = None,
) -> dict[str, Any]:
@@ -273,6 +397,9 @@ def detect_memory_impact_config(
building a merged configuration with all changed components (like
test_build_components.py does) to get comprehensive memory analysis.
When platform-specific files are detected (e.g., wifi_component_esp_idf.cpp),
prefers that platform for testing to ensure the most relevant memory analysis.
For core C++ file changes without component changes, runs a fallback
analysis using a representative component to measure the impact.
@@ -291,8 +418,10 @@ def detect_memory_impact_config(
files = changed_files(branch)
# Find all changed components (excluding core and base bus components)
# Also collect platform hints from platform-specific filenames
changed_component_set: set[str] = set()
has_core_cpp_changes = False
platform_hints: list[Platform] = []
for file in files:
component = get_component_from_path(file)
@@ -300,6 +429,10 @@ def detect_memory_impact_config(
# Skip base bus components as they're used across many builds
if component not in BASE_BUS_COMPONENTS:
changed_component_set.add(component)
# Check if this is a platform-specific file
platform_hint = _detect_platform_hint_from_filename(file)
if platform_hint:
platform_hints.append(platform_hint)
elif file.startswith("esphome/") and file.endswith(CPP_FILE_EXTENSIONS):
# Core ESPHome C++ files changed (not component-specific)
# Only C++ files affect memory usage
@@ -356,27 +489,42 @@ def detect_memory_impact_config(
common_platforms &= platforms
# Select the most preferred platform from the common set
# Exception: for core changes, use fallback platform (most representative of codebase)
if force_fallback_platform:
# Priority order:
# 1. Platform hints from filenames (e.g., wifi_component_esp_idf.cpp suggests ESP32_IDF)
# 2. Core changes use fallback platform (most representative of codebase)
# 3. Common platforms supported by all components
# 4. Most commonly supported platform
if platform_hints:
# Use most common platform hint that's also supported by all components
hint_counts = Counter(platform_hints)
# Filter to only hints that are in common_platforms (if any common platforms exist)
valid_hints = (
[h for h in hint_counts if h in common_platforms]
if common_platforms
else list(hint_counts.keys())
)
if valid_hints:
platform = _select_platform_by_count(
Counter({p: hint_counts[p] for p in valid_hints})
)
elif common_platforms:
# Hints exist but none match common platforms, use common platform logic
platform = _select_platform_by_preference(common_platforms)
else:
# Use the most common hint even if it's not in common platforms
platform = _select_platform_by_count(hint_counts)
elif force_fallback_platform:
platform = MEMORY_IMPACT_FALLBACK_PLATFORM
elif common_platforms:
# Pick the most preferred platform that all components support
platform = min(common_platforms, key=MEMORY_IMPACT_PLATFORM_PREFERENCE.index)
platform = _select_platform_by_preference(common_platforms)
else:
# No common platform - pick the most commonly supported platform
# This allows testing components individually even if they can't be merged
# Count how many components support each platform
platform_counts = Counter(
p for platforms in component_platforms_map.values() for p in platforms
)
# Pick the platform supported by most components, preferring earlier in MEMORY_IMPACT_PLATFORM_PREFERENCE
platform = max(
platform_counts.keys(),
key=lambda p: (
platform_counts[p],
-MEMORY_IMPACT_PLATFORM_PREFERENCE.index(p),
),
)
platform = _select_platform_by_count(platform_counts)
# Debug output
print("Memory impact analysis:", file=sys.stderr)
@@ -386,6 +534,7 @@ def detect_memory_impact_config(
f" Component platforms: {dict(sorted(component_platforms_map.items()))}",
file=sys.stderr,
)
print(f" Platform hints from filenames: {platform_hints}", file=sys.stderr)
print(f" Common platforms: {sorted(common_platforms)}", file=sys.stderr)
print(f" Selected platform: {platform}", file=sys.stderr)
@@ -412,17 +561,31 @@ def main() -> None:
run_clang_tidy = should_run_clang_tidy(args.branch)
run_clang_format = should_run_clang_format(args.branch)
run_python_linters = should_run_python_linters(args.branch)
changed_cpp_file_count = count_changed_cpp_files(args.branch)
# Get both directly changed and all changed components (with dependencies) in one call
script_path = Path(__file__).parent / "list-components.py"
cmd = [sys.executable, str(script_path), "--changed-with-deps"]
if args.branch:
cmd.extend(["-b", args.branch])
# Get changed components
# get_changed_components() returns:
# None: Core files changed (need full scan)
# []: No components changed
# [list]: Changed components (already includes dependencies)
changed_components_result = get_changed_components()
result = subprocess.run(cmd, capture_output=True, text=True, check=True)
component_data = json.loads(result.stdout)
directly_changed_components = component_data["directly_changed"]
changed_components = component_data["all_changed"]
if changed_components_result is None:
# Core files changed - will trigger full clang-tidy scan
# No specific components to test
changed_components = []
directly_changed_components = []
is_core_change = True
else:
# Get both directly changed and all changed (with dependencies)
changed = changed_files(args.branch)
component_files = [f for f in changed if filter_component_files(f)]
directly_changed_components = get_components_with_dependencies(
component_files, False
)
changed_components = get_components_with_dependencies(component_files, True)
is_core_change = False
# Filter to only components that have test files
# Components without tests shouldn't generate CI test jobs
@@ -433,11 +596,11 @@ def main() -> None:
# Get directly changed components with tests (for isolated testing)
# These will be tested WITHOUT --testing-mode in CI to enable full validation
# (pin conflicts, etc.) since they contain the actual changes being reviewed
directly_changed_with_tests = [
directly_changed_with_tests = {
component
for component in directly_changed_components
if _component_has_tests(component)
]
}
# Get dependency-only components (for grouped testing)
dependency_only_components = [
@@ -449,19 +612,54 @@ def main() -> None:
# Detect components for memory impact analysis (merged config)
memory_impact = detect_memory_impact_config(args.branch)
# Determine clang-tidy mode based on actual files that will be checked
if run_clang_tidy:
# Full scan needed if: hash changed OR core files changed
is_full_scan = _is_clang_tidy_full_scan() or is_core_change
if is_full_scan:
# Full scan checks all files - always use split mode for efficiency
clang_tidy_mode = "split"
files_to_check_count = -1 # Sentinel value for "all files"
else:
# Targeted scan - calculate actual files that will be checked
# This accounts for component dependencies, not just directly changed files
if changed_components:
# Count C++ files in all changed components (including dependencies)
all_cpp_files = list(git_ls_files(["*.cpp"]).keys())
component_set = set(changed_components)
files_to_check_count = sum(
1
for f in all_cpp_files
if get_component_from_path(f) in component_set
)
else:
# If no components changed, use the simple count of changed C++ files
files_to_check_count = changed_cpp_file_count
if files_to_check_count < CLANG_TIDY_SPLIT_THRESHOLD:
clang_tidy_mode = "nosplit"
else:
clang_tidy_mode = "split"
else:
clang_tidy_mode = "disabled"
files_to_check_count = 0
# Build output
output: dict[str, Any] = {
"integration_tests": run_integration,
"clang_tidy": run_clang_tidy,
"clang_tidy_mode": clang_tidy_mode,
"clang_format": run_clang_format,
"python_linters": run_python_linters,
"changed_components": changed_components,
"changed_components_with_tests": changed_components_with_tests,
"directly_changed_components_with_tests": directly_changed_with_tests,
"directly_changed_components_with_tests": list(directly_changed_with_tests),
"dependency_only_components_with_tests": dependency_only_components,
"component_test_count": len(changed_components_with_tests),
"directly_changed_count": len(directly_changed_with_tests),
"dependency_only_count": len(dependency_only_components),
"changed_cpp_file_count": changed_cpp_file_count,
"memory_impact": memory_impact,
}

View File

@@ -1,5 +1,6 @@
from __future__ import annotations
from collections.abc import Callable
from functools import cache
import json
import os
@@ -7,6 +8,7 @@ import os.path
from pathlib import Path
import re
import subprocess
import sys
import time
from typing import Any
@@ -304,7 +306,10 @@ def get_changed_components() -> list[str] | None:
for f in changed
)
if core_cpp_changed:
print("Core C++/header files changed - will run full clang-tidy scan")
print(
"Core C++/header files changed - will run full clang-tidy scan",
file=sys.stderr,
)
return None
# Use list-components.py to get changed components
@@ -318,7 +323,10 @@ def get_changed_components() -> list[str] | None:
return parse_list_components_output(result.stdout)
except subprocess.CalledProcessError:
# If the script fails, fall back to full scan
print("Could not determine changed components - will run full clang-tidy scan")
print(
"Could not determine changed components - will run full clang-tidy scan",
file=sys.stderr,
)
return None
@@ -370,14 +378,14 @@ def _filter_changed_ci(files: list[str]) -> list[str]:
if f in changed and not f.startswith(ESPHOME_COMPONENTS_PATH)
]
if not files:
print("No files changed")
print("No files changed", file=sys.stderr)
return files
# Scenario 3: Specific components changed
# Action: Check ALL files in each changed component
# Convert component list to set for O(1) lookups
component_set = set(components)
print(f"Changed components: {', '.join(sorted(components))}")
print(f"Changed components: {', '.join(sorted(components))}", file=sys.stderr)
# The 'files' parameter contains ALL files in the codebase that clang-tidy would check.
# We filter this down to only files in the changed components.
@@ -648,3 +656,220 @@ def get_components_from_integration_fixtures() -> set[str]:
components.add(item["platform"])
return components
def filter_component_files(file_path: str) -> bool:
"""Check if a file path is a component file.
Args:
file_path: Path to check
Returns:
True if the file is in a component directory
"""
return file_path.startswith("esphome/components/") or file_path.startswith(
"tests/components/"
)
def extract_component_names_from_files(files: list[str]) -> list[str]:
"""Extract unique component names from a list of file paths.
Args:
files: List of file paths
Returns:
List of unique component names (preserves order)
"""
return list(
dict.fromkeys(comp for file in files if (comp := get_component_from_path(file)))
)
def add_item_to_components_graph(
components_graph: dict[str, list[str]], parent: str, child: str
) -> None:
"""Add a dependency relationship to the components graph.
Args:
components_graph: Graph mapping parent components to their children
parent: Parent component name
child: Child component name (dependent)
"""
if not parent.startswith("__") and parent != child:
if parent not in components_graph:
components_graph[parent] = []
if child not in components_graph[parent]:
components_graph[parent].append(child)
def resolve_auto_load(
auto_load: list[str] | Callable[[], list[str]] | Callable[[dict | None], list[str]],
config: dict | None = None,
) -> list[str]:
"""Resolve AUTO_LOAD to a list, handling callables with or without config parameter.
Args:
auto_load: The AUTO_LOAD value (list or callable)
config: Optional config to pass to callable AUTO_LOAD functions
Returns:
List of component names to auto-load
"""
if not callable(auto_load):
return auto_load
import inspect
if inspect.signature(auto_load).parameters:
return auto_load(config)
return auto_load()
def create_components_graph() -> dict[str, list[str]]:
"""Create a graph of component dependencies.
Returns:
Dictionary mapping parent components to their children (dependencies)
"""
from pathlib import Path
from esphome import const
from esphome.core import CORE
from esphome.loader import ComponentManifest, get_component, get_platform
# The root directory of the repo
root = Path(__file__).parent.parent
components_dir = root / "esphome" / "components"
# Fake some directory so that get_component works
CORE.config_path = root
# Various configuration to capture different outcomes used by `AUTO_LOAD` function.
KEY_CORE = const.KEY_CORE
KEY_TARGET_FRAMEWORK = const.KEY_TARGET_FRAMEWORK
KEY_TARGET_PLATFORM = const.KEY_TARGET_PLATFORM
PLATFORM_ESP32 = const.PLATFORM_ESP32
PLATFORM_ESP8266 = const.PLATFORM_ESP8266
TARGET_CONFIGURATIONS = [
{KEY_TARGET_FRAMEWORK: None, KEY_TARGET_PLATFORM: None},
{KEY_TARGET_FRAMEWORK: "arduino", KEY_TARGET_PLATFORM: None},
{KEY_TARGET_FRAMEWORK: "esp-idf", KEY_TARGET_PLATFORM: None},
{KEY_TARGET_FRAMEWORK: None, KEY_TARGET_PLATFORM: PLATFORM_ESP32},
{KEY_TARGET_FRAMEWORK: None, KEY_TARGET_PLATFORM: PLATFORM_ESP8266},
]
CORE.data[KEY_CORE] = TARGET_CONFIGURATIONS[0]
components_graph = {}
platforms = []
components: list[tuple[ComponentManifest, str, Path]] = []
for path in components_dir.iterdir():
if not path.is_dir():
continue
if not (path / "__init__.py").is_file():
continue
name = path.name
comp = get_component(name)
if comp is None:
raise RuntimeError(
f"Cannot find component {name}. Make sure current path is pip installed ESPHome"
)
components.append((comp, name, path))
if comp.is_platform_component:
platforms.append(name)
platforms = set(platforms)
for comp, name, path in components:
for dependency in comp.dependencies:
add_item_to_components_graph(
components_graph, dependency.split(".")[0], name
)
for target_config in TARGET_CONFIGURATIONS:
CORE.data[KEY_CORE] = target_config
for item in resolve_auto_load(comp.auto_load, config=None):
add_item_to_components_graph(components_graph, item, name)
# restore config
CORE.data[KEY_CORE] = TARGET_CONFIGURATIONS[0]
for platform_path in path.iterdir():
platform_name = platform_path.stem
if platform_name == name or platform_name not in platforms:
continue
platform = get_platform(platform_name, name)
if platform is None:
continue
add_item_to_components_graph(components_graph, platform_name, name)
for dependency in platform.dependencies:
add_item_to_components_graph(
components_graph, dependency.split(".")[0], name
)
for target_config in TARGET_CONFIGURATIONS:
CORE.data[KEY_CORE] = target_config
for item in resolve_auto_load(platform.auto_load, config={}):
add_item_to_components_graph(components_graph, item, name)
# restore config
CORE.data[KEY_CORE] = TARGET_CONFIGURATIONS[0]
return components_graph
def find_children_of_component(
components_graph: dict[str, list[str]], component_name: str, depth: int = 0
) -> list[str]:
"""Find all components that depend on the given component (recursively).
Args:
components_graph: Graph mapping parent components to their children
component_name: Component name to find children for
depth: Current recursion depth (max 10)
Returns:
List of all dependent component names (may contain duplicates removed at end)
"""
if component_name not in components_graph:
return []
children = []
for child in components_graph[component_name]:
children.append(child)
if depth < 10:
children.extend(
find_children_of_component(components_graph, child, depth + 1)
)
# Remove duplicate values
return list(set(children))
def get_components_with_dependencies(
files: list[str], get_dependencies: bool = False
) -> list[str]:
"""Get component names from files, optionally including their dependencies.
Args:
files: List of file paths
get_dependencies: If True, include all dependent components
Returns:
Sorted list of component names
"""
components = extract_component_names_from_files(files)
if get_dependencies:
components_graph = create_components_graph()
all_components = components.copy()
for c in components:
all_components.extend(find_children_of_component(components_graph, c))
# Remove duplicate values
all_changed_components = list(set(all_components))
return sorted(all_changed_components)
return sorted(components)

View File

@@ -1,24 +1,12 @@
#!/usr/bin/env python3
import argparse
from collections.abc import Callable
from pathlib import Path
import sys
from helpers import changed_files, get_component_from_path, git_ls_files
from esphome.const import (
KEY_CORE,
KEY_TARGET_FRAMEWORK,
KEY_TARGET_PLATFORM,
PLATFORM_ESP32,
PLATFORM_ESP8266,
from helpers import (
changed_files,
filter_component_files,
get_components_with_dependencies,
git_ls_files,
)
from esphome.core import CORE
from esphome.loader import ComponentManifest, get_component, get_platform
def filter_component_files(str):
return str.startswith("esphome/components/") | str.startswith("tests/components/")
def get_all_component_files() -> list[str]:
@@ -27,156 +15,6 @@ def get_all_component_files() -> list[str]:
return list(filter(filter_component_files, files))
def extract_component_names_array_from_files_array(files):
components = []
for file in files:
component_name = get_component_from_path(file)
if component_name and component_name not in components:
components.append(component_name)
return components
def add_item_to_components_graph(components_graph, parent, child):
if not parent.startswith("__") and parent != child:
if parent not in components_graph:
components_graph[parent] = []
if child not in components_graph[parent]:
components_graph[parent].append(child)
def resolve_auto_load(
auto_load: list[str] | Callable[[], list[str]] | Callable[[dict | None], list[str]],
config: dict | None = None,
) -> list[str]:
"""Resolve AUTO_LOAD to a list, handling callables with or without config parameter.
Args:
auto_load: The AUTO_LOAD value (list or callable)
config: Optional config to pass to callable AUTO_LOAD functions
Returns:
List of component names to auto-load
"""
if not callable(auto_load):
return auto_load
import inspect
if inspect.signature(auto_load).parameters:
return auto_load(config)
return auto_load()
def create_components_graph():
# The root directory of the repo
root = Path(__file__).parent.parent
components_dir = root / "esphome" / "components"
# Fake some directory so that get_component works
CORE.config_path = root
# Various configuration to capture different outcomes used by `AUTO_LOAD` function.
TARGET_CONFIGURATIONS = [
{KEY_TARGET_FRAMEWORK: None, KEY_TARGET_PLATFORM: None},
{KEY_TARGET_FRAMEWORK: "arduino", KEY_TARGET_PLATFORM: None},
{KEY_TARGET_FRAMEWORK: "esp-idf", KEY_TARGET_PLATFORM: None},
{KEY_TARGET_FRAMEWORK: None, KEY_TARGET_PLATFORM: PLATFORM_ESP32},
{KEY_TARGET_FRAMEWORK: None, KEY_TARGET_PLATFORM: PLATFORM_ESP8266},
]
CORE.data[KEY_CORE] = TARGET_CONFIGURATIONS[0]
components_graph = {}
platforms = []
components: list[tuple[ComponentManifest, str, Path]] = []
for path in components_dir.iterdir():
if not path.is_dir():
continue
if not (path / "__init__.py").is_file():
continue
name = path.name
comp = get_component(name)
if comp is None:
print(
f"Cannot find component {name}. Make sure current path is pip installed ESPHome"
)
sys.exit(1)
components.append((comp, name, path))
if comp.is_platform_component:
platforms.append(name)
platforms = set(platforms)
for comp, name, path in components:
for dependency in comp.dependencies:
add_item_to_components_graph(
components_graph, dependency.split(".")[0], name
)
for target_config in TARGET_CONFIGURATIONS:
CORE.data[KEY_CORE] = target_config
for item in resolve_auto_load(comp.auto_load, config=None):
add_item_to_components_graph(components_graph, item, name)
# restore config
CORE.data[KEY_CORE] = TARGET_CONFIGURATIONS[0]
for platform_path in path.iterdir():
platform_name = platform_path.stem
if platform_name == name or platform_name not in platforms:
continue
platform = get_platform(platform_name, name)
if platform is None:
continue
add_item_to_components_graph(components_graph, platform_name, name)
for dependency in platform.dependencies:
add_item_to_components_graph(
components_graph, dependency.split(".")[0], name
)
for target_config in TARGET_CONFIGURATIONS:
CORE.data[KEY_CORE] = target_config
for item in resolve_auto_load(platform.auto_load, config={}):
add_item_to_components_graph(components_graph, item, name)
# restore config
CORE.data[KEY_CORE] = TARGET_CONFIGURATIONS[0]
return components_graph
def find_children_of_component(components_graph, component_name, depth=0):
if component_name not in components_graph:
return []
children = []
for child in components_graph[component_name]:
children.append(child)
if depth < 10:
children.extend(
find_children_of_component(components_graph, child, depth + 1)
)
# Remove duplicate values
return list(set(children))
def get_components(files: list[str], get_dependencies: bool = False):
components = extract_component_names_array_from_files_array(files)
if get_dependencies:
components_graph = create_components_graph()
all_components = components.copy()
for c in components:
all_components.extend(find_children_of_component(components_graph, c))
# Remove duplicate values
all_changed_components = list(set(all_components))
return sorted(all_changed_components)
return sorted(components)
def main():
parser = argparse.ArgumentParser()
parser.add_argument(
@@ -251,8 +89,8 @@ def main():
# Return JSON with both directly changed and all changed components
import json
directly_changed = get_components(files, False)
all_changed = get_components(files, True)
directly_changed = get_components_with_dependencies(files, False)
all_changed = get_components_with_dependencies(files, True)
output = {
"directly_changed": directly_changed,
"all_changed": all_changed,
@@ -260,11 +98,11 @@ def main():
print(json.dumps(output))
elif args.changed_direct:
# Return only directly changed components (without dependencies)
for c in get_components(files, False):
for c in get_components_with_dependencies(files, False):
print(c)
else:
# Return all changed components (with dependencies) - default behavior
for c in get_components(files, args.changed):
for c in get_components_with_dependencies(files, args.changed):
print(c)

View File

@@ -966,11 +966,33 @@ def test_components(
# Find all component tests
all_tests = {}
for pattern in component_patterns:
# Skip empty patterns (happens when components list is empty string)
if not pattern:
continue
all_tests.update(find_component_tests(tests_dir, pattern, base_only))
# If no components found, build a reference configuration for baseline comparison
# Create a synthetic "empty" component test that will build just the base config
if not all_tests:
print(f"No components found matching: {component_patterns}")
return 1
print(
"Building reference configuration with no components for baseline comparison..."
)
# Create empty test files for each platform (or filtered platform)
reference_tests: list[Path] = []
for platform_name, base_file in platform_bases.items():
if platform_filter and not platform_name.startswith(platform_filter):
continue
# Create an empty test file named to match the platform
empty_test_file = build_dir / f"reference.{platform_name}.yaml"
empty_test_file.write_text(
"# Empty component test for baseline reference\n"
)
reference_tests.append(empty_test_file)
# Add to all_tests dict with component name "reference"
all_tests["reference"] = reference_tests
print(f"Found {len(all_tests)} components to test")

View File

@@ -37,3 +37,36 @@ binary_sensor:
format: "New state is %s"
args: ['x.has_value() ? ONOFF(x) : "Unknown"']
- binary_sensor.invalidate_state: some_binary_sensor
# Test autorepeat with default configuration (no timings)
- platform: template
id: autorepeat_default
name: "Autorepeat Default"
filters:
- autorepeat:
# Test autorepeat with single timing entry
- platform: template
id: autorepeat_single
name: "Autorepeat Single"
filters:
- autorepeat:
- delay: 2s
time_off: 200ms
time_on: 800ms
# Test autorepeat with three timing entries
- platform: template
id: autorepeat_multiple
name: "Autorepeat Multiple"
filters:
- autorepeat:
- delay: 500ms
time_off: 50ms
time_on: 950ms
- delay: 2s
time_off: 100ms
time_on: 900ms
- delay: 10s
time_off: 200ms
time_on: 800ms

View File

@@ -1,4 +1,5 @@
espnow:
id: espnow_component
auto_add_peer: false
channel: 1
peers:
@@ -50,3 +51,26 @@ espnow:
- format_mac_address_pretty(info.src_addr).c_str()
- format_hex_pretty(data, size).c_str()
- info.rx_ctrl->rssi
packet_transport:
- platform: espnow
id: transport1
espnow_id: espnow_component
peer_address: "FF:FF:FF:FF:FF:FF"
encryption:
key: "0123456789abcdef0123456789abcdef"
sensors:
- temp_sensor
providers:
- name: test_provider
encryption:
key: "0123456789abcdef0123456789abcdef"
sensor:
- platform: internal_temperature
id: temp_sensor
- platform: packet_transport
provider: test_provider
remote_id: temp_sensor
id: remote_temp

View File

@@ -0,0 +1,33 @@
json:
interval:
- interval: 60s
then:
- lambda: |-
// Test build_json
std::string json_str = esphome::json::build_json([](JsonObject root) {
root["sensor"] = "temperature";
root["value"] = 23.5;
root["unit"] = "°C";
});
ESP_LOGD("test", "Built JSON: %s", json_str.c_str());
// Test parse_json
bool parse_ok = esphome::json::parse_json(json_str, [](JsonObject root) {
if (root.containsKey("sensor") && root.containsKey("value")) {
const char* sensor = root["sensor"];
float value = root["value"];
ESP_LOGD("test", "Parsed: sensor=%s, value=%.1f", sensor, value);
} else {
ESP_LOGD("test", "Parsed JSON missing required keys");
}
});
ESP_LOGD("test", "Parse result (JSON syntax only): %s", parse_ok ? "success" : "failed");
// Test JsonBuilder class
esphome::json::JsonBuilder builder;
JsonObject obj = builder.root();
obj["test"] = "direct_builder";
obj["count"] = 42;
std::string result = builder.serialize();
ESP_LOGD("test", "JsonBuilder result: %s", result.c_str());

View File

@@ -0,0 +1 @@
<<: !include common.yaml

View File

@@ -0,0 +1 @@
<<: !include common.yaml

View File

@@ -99,3 +99,140 @@ sensor:
window_size: 10
send_every: 10
send_first_at: 1 # Send after first value
# ValueListFilter-based filters tests
# FilterOutValueFilter - single value
- platform: copy
source_id: source_sensor
name: "Filter Out Single Value"
filters:
- filter_out: 42.0 # Should filter out exactly 42.0
# FilterOutValueFilter - multiple values
- platform: copy
source_id: source_sensor
name: "Filter Out Multiple Values"
filters:
- filter_out: [0.0, 42.0, 100.0] # List of values to filter
# FilterOutValueFilter - with NaN
- platform: copy
source_id: source_sensor
name: "Filter Out NaN"
filters:
- filter_out: nan # Filter out NaN values
# FilterOutValueFilter - mixed values with NaN
- platform: copy
source_id: source_sensor
name: "Filter Out Mixed with NaN"
filters:
- filter_out: [nan, 0.0, 42.0]
# ThrottleWithPriorityFilter - single priority value
- platform: copy
source_id: source_sensor
name: "Throttle with Single Priority"
filters:
- throttle_with_priority:
timeout: 1000ms
value: 42.0 # Priority value bypasses throttle
# ThrottleWithPriorityFilter - multiple priority values
- platform: copy
source_id: source_sensor
name: "Throttle with Multiple Priorities"
filters:
- throttle_with_priority:
timeout: 500ms
value: [0.0, 42.0, 100.0] # Multiple priority values
# ThrottleWithPriorityFilter - with NaN priority
- platform: copy
source_id: source_sensor
name: "Throttle with NaN Priority"
filters:
- throttle_with_priority:
timeout: 1000ms
value: nan # NaN as priority value
# Combined filters - FilterOutValueFilter + other filters
- platform: copy
source_id: source_sensor
name: "Filter Out Then Throttle"
filters:
- filter_out: [0.0, 100.0]
- throttle: 500ms
# Combined filters - ThrottleWithPriorityFilter + other filters
- platform: copy
source_id: source_sensor
name: "Throttle Priority Then Scale"
filters:
- throttle_with_priority:
timeout: 1000ms
value: [42.0]
- multiply: 2.0
# CalibrateLinearFilter - piecewise linear calibration
- platform: copy
source_id: source_sensor
name: "Calibrate Linear Two Points"
filters:
- calibrate_linear:
- 0.0 -> 0.0
- 100.0 -> 100.0
- platform: copy
source_id: source_sensor
name: "Calibrate Linear Multiple Segments"
filters:
- calibrate_linear:
- 0.0 -> 0.0
- 50.0 -> 55.0
- 100.0 -> 102.5
- platform: copy
source_id: source_sensor
name: "Calibrate Linear Least Squares"
filters:
- calibrate_linear:
method: least_squares
datapoints:
- 0.0 -> 0.0
- 50.0 -> 55.0
- 100.0 -> 102.5
# CalibratePolynomialFilter - polynomial calibration
- platform: copy
source_id: source_sensor
name: "Calibrate Polynomial Degree 2"
filters:
- calibrate_polynomial:
degree: 2
datapoints:
- 0.0 -> 0.0
- 50.0 -> 55.0
- 100.0 -> 102.5
- platform: copy
source_id: source_sensor
name: "Calibrate Polynomial Degree 3"
filters:
- calibrate_polynomial:
degree: 3
datapoints:
- 0.0 -> 0.0
- 25.0 -> 26.0
- 50.0 -> 55.0
- 100.0 -> 102.5
# OrFilter - filter branching
- platform: copy
source_id: source_sensor
name: "Or Filter with Multiple Branches"
filters:
- or:
- multiply: 2.0
- offset: 10.0
- lambda: return x * 3.0;

View File

@@ -101,6 +101,9 @@ sensor:
- filter_out: 10
- filter_out: !lambda return NAN;
- heartbeat: 5s
- heartbeat:
period: 5s
optimistic: true
- lambda: return x * (9.0/5.0) + 32.0;
- max:
window_size: 10

View File

@@ -0,0 +1,66 @@
text_sensor:
- platform: template
name: "Test Substitute Single"
id: test_substitute_single
filters:
- substitute:
- ERROR -> Error
- platform: template
name: "Test Substitute Multiple"
id: test_substitute_multiple
filters:
- substitute:
- ERROR -> Error
- WARN -> Warning
- INFO -> Information
- DEBUG -> Debug
- platform: template
name: "Test Substitute Chained"
id: test_substitute_chained
filters:
- substitute:
- foo -> bar
- to_upper
- substitute:
- BAR -> baz
- platform: template
name: "Test Map Single"
id: test_map_single
filters:
- map:
- ON -> Active
- platform: template
name: "Test Map Multiple"
id: test_map_multiple
filters:
- map:
- ON -> Active
- OFF -> Inactive
- UNKNOWN -> Error
- IDLE -> Standby
- platform: template
name: "Test Map Passthrough"
id: test_map_passthrough
filters:
- map:
- Good -> Excellent
- Bad -> Poor
- platform: template
name: "Test All Filters"
id: test_all_filters
filters:
- to_upper
- to_lower
- append: " suffix"
- prepend: "prefix "
- substitute:
- prefix -> PREFIX
- suffix -> SUFFIX
- map:
- PREFIX text SUFFIX -> mapped

View File

@@ -0,0 +1 @@
<<: !include common.yaml

View File

@@ -12,5 +12,8 @@ esphome:
- logger.log: "Failed to connect to WiFi!"
wifi:
ssid: MySSID
password: password1
networks:
- ssid: MySSID
password: password1
- ssid: MySSID2
password: password2

View File

@@ -0,0 +1,112 @@
esphome:
name: host-climate-test
host:
api:
logger:
climate:
- platform: thermostat
id: dual_mode_thermostat
name: Dual-mode Thermostat
sensor: host_thermostat_temperature_sensor
humidity_sensor: host_thermostat_humidity_sensor
humidity_hysteresis: 1.0
min_cooling_off_time: 20s
min_cooling_run_time: 20s
max_cooling_run_time: 30s
supplemental_cooling_delta: 3.0
min_heating_off_time: 20s
min_heating_run_time: 20s
max_heating_run_time: 30s
supplemental_heating_delta: 3.0
min_fanning_off_time: 20s
min_fanning_run_time: 20s
min_idle_time: 10s
visual:
min_humidity: 20%
max_humidity: 70%
min_temperature: 15.0
max_temperature: 32.0
temperature_step: 0.1
default_preset: home
preset:
- name: "away"
default_target_temperature_low: 18.0
default_target_temperature_high: 24.0
- name: "home"
default_target_temperature_low: 18.0
default_target_temperature_high: 24.0
auto_mode:
- logger.log: "AUTO mode set"
heat_cool_mode:
- logger.log: "HEAT_COOL mode set"
cool_action:
- switch.turn_on: air_cond
supplemental_cooling_action:
- switch.turn_on: air_cond_2
heat_action:
- switch.turn_on: heater
supplemental_heating_action:
- switch.turn_on: heater_2
dry_action:
- switch.turn_on: air_cond
fan_only_action:
- switch.turn_on: fan_only
idle_action:
- switch.turn_off: air_cond
- switch.turn_off: air_cond_2
- switch.turn_off: heater
- switch.turn_off: heater_2
- switch.turn_off: fan_only
humidity_control_humidify_action:
- switch.turn_on: humidifier
humidity_control_off_action:
- switch.turn_off: humidifier
sensor:
- platform: template
id: host_thermostat_humidity_sensor
unit_of_measurement: °C
accuracy_decimals: 2
state_class: measurement
force_update: true
lambda: return 42.0;
update_interval: 0.1s
- platform: template
id: host_thermostat_temperature_sensor
unit_of_measurement: °C
accuracy_decimals: 2
state_class: measurement
force_update: true
lambda: return 22.0;
update_interval: 0.1s
switch:
- platform: template
id: air_cond
name: Air Conditioner
optimistic: true
- platform: template
id: air_cond_2
name: Air Conditioner 2
optimistic: true
- platform: template
id: fan_only
name: Fan
optimistic: true
- platform: template
id: heater
name: Heater
optimistic: true
- platform: template
id: heater_2
name: Heater 2
optimistic: true
- platform: template
id: dehumidifier
name: Dehumidifier
optimistic: true
- platform: template
id: humidifier
name: Humidifier
optimistic: true

View File

@@ -0,0 +1,108 @@
esphome:
name: host-climate-test
host:
api:
logger:
climate:
- platform: thermostat
id: dual_mode_thermostat
name: Dual-mode Thermostat
sensor: host_thermostat_temperature_sensor
humidity_sensor: host_thermostat_humidity_sensor
humidity_hysteresis: 1.0
min_cooling_off_time: 20s
min_cooling_run_time: 20s
max_cooling_run_time: 30s
supplemental_cooling_delta: 3.0
min_heating_off_time: 20s
min_heating_run_time: 20s
max_heating_run_time: 30s
supplemental_heating_delta: 3.0
min_fanning_off_time: 20s
min_fanning_run_time: 20s
min_idle_time: 10s
visual:
min_humidity: 20%
max_humidity: 70%
min_temperature: 15.0
max_temperature: 32.0
temperature_step: 0.1
default_preset: home
preset:
- name: "away"
default_target_temperature_low: 18.0
default_target_temperature_high: 24.0
- name: "home"
default_target_temperature_low: 18.0
default_target_temperature_high: 24.0
auto_mode:
- logger.log: "AUTO mode set"
heat_cool_mode:
- logger.log: "HEAT_COOL mode set"
cool_action:
- switch.turn_on: air_cond
supplemental_cooling_action:
- switch.turn_on: air_cond_2
heat_action:
- switch.turn_on: heater
supplemental_heating_action:
- switch.turn_on: heater_2
dry_action:
- switch.turn_on: air_cond
fan_only_action:
- switch.turn_on: fan_only
idle_action:
- switch.turn_off: air_cond
- switch.turn_off: air_cond_2
- switch.turn_off: heater
- switch.turn_off: heater_2
- switch.turn_off: fan_only
humidity_control_humidify_action:
- switch.turn_on: humidifier
humidity_control_off_action:
- switch.turn_off: humidifier
sensor:
- platform: template
id: host_thermostat_humidity_sensor
unit_of_measurement: °C
accuracy_decimals: 2
state_class: measurement
force_update: true
lambda: return 42.0;
update_interval: 0.1s
- platform: template
id: host_thermostat_temperature_sensor
unit_of_measurement: °C
accuracy_decimals: 2
state_class: measurement
force_update: true
lambda: return 22.0;
update_interval: 0.1s
switch:
- platform: template
id: air_cond
name: Air Conditioner
optimistic: true
- platform: template
id: air_cond_2
name: Air Conditioner 2
optimistic: true
- platform: template
id: fan_only
name: Fan
optimistic: true
- platform: template
id: heater
name: Heater
optimistic: true
- platform: template
id: heater_2
name: Heater 2
optimistic: true
- platform: template
id: humidifier
name: Humidifier
optimistic: true

View File

@@ -210,7 +210,15 @@ sensor:
name: "Test Sensor 50"
lambda: return 50.0;
update_interval: 0.1s
# Temperature sensor for the thermostat
# Sensors for the thermostat
- platform: template
name: "Humidity Sensor"
id: humidity_sensor
lambda: return 35.0;
unit_of_measurement: "%"
device_class: humidity
state_class: measurement
update_interval: 5s
- platform: template
name: "Temperature Sensor"
id: temp_sensor
@@ -295,6 +303,11 @@ valve:
- logger.log: "Valve stopping"
output:
- platform: template
id: humidifier_output
type: binary
write_action:
- logger.log: "Humidifier output changed"
- platform: template
id: heater_output
type: binary
@@ -305,18 +318,31 @@ output:
type: binary
write_action:
- logger.log: "Cooler output changed"
- platform: template
id: fan_output
type: binary
write_action:
- logger.log: "Fan output changed"
climate:
- platform: thermostat
name: "Test Thermostat"
sensor: temp_sensor
humidity_sensor: humidity_sensor
default_preset: Home
on_boot_restore_from: default_preset
min_heating_off_time: 1s
min_heating_run_time: 1s
min_cooling_off_time: 1s
min_cooling_run_time: 1s
min_fan_mode_switching_time: 1s
min_idle_time: 1s
visual:
min_humidity: 20%
max_humidity: 70%
min_temperature: 15.0
max_temperature: 32.0
temperature_step: 0.1
heat_action:
- output.turn_on: heater_output
cool_action:
@@ -324,6 +350,14 @@ climate:
idle_action:
- output.turn_off: heater_output
- output.turn_off: cooler_output
humidity_control_humidify_action:
- output.turn_on: humidifier_output
humidity_control_off_action:
- output.turn_off: humidifier_output
fan_mode_auto_action:
- output.turn_off: fan_output
fan_mode_on_action:
- output.turn_on: fan_output
preset:
- name: Home
default_target_temperature_low: 20

View File

@@ -0,0 +1,332 @@
esphome:
name: test-value-list-filters
host:
api:
batch_delay: 0ms # Disable batching to receive all state updates
logger:
level: DEBUG
# Template sensors - one for each test to avoid cross-test interference
sensor:
- platform: template
name: "Source Sensor 1"
id: source_sensor_1
accuracy_decimals: 1
- platform: template
name: "Source Sensor 2"
id: source_sensor_2
accuracy_decimals: 1
- platform: template
name: "Source Sensor 3"
id: source_sensor_3
accuracy_decimals: 1
- platform: template
name: "Source Sensor 4"
id: source_sensor_4
accuracy_decimals: 1
- platform: template
name: "Source Sensor 5"
id: source_sensor_5
accuracy_decimals: 1
- platform: template
name: "Source Sensor 6"
id: source_sensor_6
accuracy_decimals: 2
- platform: template
name: "Source Sensor 7"
id: source_sensor_7
accuracy_decimals: 1
# FilterOutValueFilter - single value
- platform: copy
source_id: source_sensor_1
name: "Filter Out Single"
id: filter_out_single
filters:
- filter_out: 42.0
# FilterOutValueFilter - multiple values
- platform: copy
source_id: source_sensor_2
name: "Filter Out Multiple"
id: filter_out_multiple
filters:
- filter_out: [0.0, 42.0, 100.0]
# FilterOutValueFilter - with NaN
- platform: copy
source_id: source_sensor_1
name: "Filter Out NaN"
id: filter_out_nan
filters:
- filter_out: nan
# ThrottleWithPriorityFilter - single priority value
- platform: copy
source_id: source_sensor_3
name: "Throttle Priority Single"
id: throttle_priority_single
filters:
- throttle_with_priority:
timeout: 200ms
value: 42.0
# ThrottleWithPriorityFilter - multiple priority values
- platform: copy
source_id: source_sensor_4
name: "Throttle Priority Multiple"
id: throttle_priority_multiple
filters:
- throttle_with_priority:
timeout: 200ms
value: [0.0, 42.0, 100.0]
# Edge case: Filter Out NaN explicitly
- platform: copy
source_id: source_sensor_5
name: "Filter Out NaN Test"
id: filter_out_nan_test
filters:
- filter_out: nan
# Edge case: Accuracy decimals - 2 decimals
- platform: copy
source_id: source_sensor_6
name: "Filter Out Accuracy 2"
id: filter_out_accuracy_2
filters:
- filter_out: 42.0
# Edge case: Throttle with NaN priority
- platform: copy
source_id: source_sensor_7
name: "Throttle Priority NaN"
id: throttle_priority_nan
filters:
- throttle_with_priority:
timeout: 200ms
value: nan
# Script to test FilterOutValueFilter
script:
- id: test_filter_out_single
then:
# Should pass through: 1.0, 2.0, 3.0
# Should filter out: 42.0
- sensor.template.publish:
id: source_sensor_1
state: 1.0
- delay: 20ms
- sensor.template.publish:
id: source_sensor_1
state: 42.0 # Filtered out
- delay: 20ms
- sensor.template.publish:
id: source_sensor_1
state: 2.0
- delay: 20ms
- sensor.template.publish:
id: source_sensor_1
state: 42.0 # Filtered out
- delay: 20ms
- sensor.template.publish:
id: source_sensor_1
state: 3.0
- id: test_filter_out_multiple
then:
# Should filter out: 0.0, 42.0, 100.0
# Should pass through: 1.0, 2.0, 50.0
- sensor.template.publish:
id: source_sensor_2
state: 0.0 # Filtered out
- delay: 20ms
- sensor.template.publish:
id: source_sensor_2
state: 1.0
- delay: 20ms
- sensor.template.publish:
id: source_sensor_2
state: 42.0 # Filtered out
- delay: 20ms
- sensor.template.publish:
id: source_sensor_2
state: 2.0
- delay: 20ms
- sensor.template.publish:
id: source_sensor_2
state: 100.0 # Filtered out
- delay: 20ms
- sensor.template.publish:
id: source_sensor_2
state: 50.0
- id: test_throttle_priority_single
then:
# 42.0 bypasses throttle, other values are throttled
- sensor.template.publish:
id: source_sensor_3
state: 1.0 # First value - passes
- delay: 50ms
- sensor.template.publish:
id: source_sensor_3
state: 2.0 # Throttled
- delay: 50ms
- sensor.template.publish:
id: source_sensor_3
state: 42.0 # Priority - passes immediately
- delay: 50ms
- sensor.template.publish:
id: source_sensor_3
state: 3.0 # Throttled
- delay: 250ms # Wait for throttle to expire
- sensor.template.publish:
id: source_sensor_3
state: 4.0 # Passes after timeout
- id: test_throttle_priority_multiple
then:
# 0.0, 42.0, 100.0 bypass throttle
- sensor.template.publish:
id: source_sensor_4
state: 1.0 # First value - passes
- delay: 50ms
- sensor.template.publish:
id: source_sensor_4
state: 2.0 # Throttled
- delay: 50ms
- sensor.template.publish:
id: source_sensor_4
state: 0.0 # Priority - passes
- delay: 50ms
- sensor.template.publish:
id: source_sensor_4
state: 3.0 # Throttled
- delay: 50ms
- sensor.template.publish:
id: source_sensor_4
state: 42.0 # Priority - passes
- delay: 50ms
- sensor.template.publish:
id: source_sensor_4
state: 4.0 # Throttled
- delay: 50ms
- sensor.template.publish:
id: source_sensor_4
state: 100.0 # Priority - passes
- id: test_filter_out_nan
then:
# NaN should be filtered out, regular values pass
- sensor.template.publish:
id: source_sensor_5
state: 1.0 # Pass
- delay: 20ms
- sensor.template.publish:
id: source_sensor_5
state: !lambda 'return NAN;' # Filtered out
- delay: 20ms
- sensor.template.publish:
id: source_sensor_5
state: 2.0 # Pass
- delay: 20ms
- sensor.template.publish:
id: source_sensor_5
state: !lambda 'return NAN;' # Filtered out
- delay: 20ms
- sensor.template.publish:
id: source_sensor_5
state: 3.0 # Pass
- id: test_filter_out_accuracy_2
then:
# With 2 decimal places, 42.00 filtered, 42.01 and 42.15 pass
- sensor.template.publish:
id: source_sensor_6
state: 42.0 # Filtered (rounds to 42.00)
- delay: 20ms
- sensor.template.publish:
id: source_sensor_6
state: 42.01 # Pass (rounds to 42.01)
- delay: 20ms
- sensor.template.publish:
id: source_sensor_6
state: 42.15 # Pass (rounds to 42.15)
- delay: 20ms
- sensor.template.publish:
id: source_sensor_6
state: 42.0 # Filtered (rounds to 42.00)
- id: test_throttle_priority_nan
then:
# NaN bypasses throttle, regular values throttled
- sensor.template.publish:
id: source_sensor_7
state: 1.0 # First value - passes
- delay: 50ms
- sensor.template.publish:
id: source_sensor_7
state: 2.0 # Throttled
- delay: 50ms
- sensor.template.publish:
id: source_sensor_7
state: !lambda 'return NAN;' # Priority NaN - passes
- delay: 50ms
- sensor.template.publish:
id: source_sensor_7
state: 3.0 # Throttled
- delay: 50ms
- sensor.template.publish:
id: source_sensor_7
state: !lambda 'return NAN;' # Priority NaN - passes
# Buttons to trigger each test
button:
- platform: template
name: "Test Filter Out Single"
id: btn_filter_out_single
on_press:
- script.execute: test_filter_out_single
- platform: template
name: "Test Filter Out Multiple"
id: btn_filter_out_multiple
on_press:
- script.execute: test_filter_out_multiple
- platform: template
name: "Test Throttle Priority Single"
id: btn_throttle_priority_single
on_press:
- script.execute: test_throttle_priority_single
- platform: template
name: "Test Throttle Priority Multiple"
id: btn_throttle_priority_multiple
on_press:
- script.execute: test_throttle_priority_multiple
- platform: template
name: "Test Filter Out NaN"
id: btn_filter_out_nan
on_press:
- script.execute: test_filter_out_nan
- platform: template
name: "Test Filter Out Accuracy 2"
id: btn_filter_out_accuracy_2
on_press:
- script.execute: test_filter_out_accuracy_2
- platform: template
name: "Test Throttle Priority NaN"
id: btn_throttle_priority_nan
on_press:
- script.execute: test_throttle_priority_nan

View File

@@ -0,0 +1,49 @@
"""Integration test for Host mode with climate."""
from __future__ import annotations
import asyncio
import aioesphomeapi
from aioesphomeapi import ClimateAction, ClimateMode, ClimatePreset, EntityState
import pytest
from .types import APIClientConnectedFactory, RunCompiledFunction
@pytest.mark.asyncio
async def test_host_mode_climate_basic_state(
yaml_config: str,
run_compiled: RunCompiledFunction,
api_client_connected: APIClientConnectedFactory,
) -> None:
"""Test basic climate state reporting."""
loop = asyncio.get_running_loop()
async with run_compiled(yaml_config), api_client_connected() as client:
states: dict[int, EntityState] = {}
climate_future: asyncio.Future[EntityState] = loop.create_future()
def on_state(state: EntityState) -> None:
states[state.key] = state
if (
isinstance(state, aioesphomeapi.ClimateState)
and not climate_future.done()
):
climate_future.set_result(state)
client.subscribe_states(on_state)
try:
climate_state = await asyncio.wait_for(climate_future, timeout=5.0)
except TimeoutError:
pytest.fail("Climate state not received within 5 seconds")
assert isinstance(climate_state, aioesphomeapi.ClimateState)
assert climate_state.mode == ClimateMode.OFF
assert climate_state.action == ClimateAction.OFF
assert climate_state.current_temperature == 22.0
assert climate_state.target_temperature_low == 18.0
assert climate_state.target_temperature_high == 24.0
assert climate_state.preset == ClimatePreset.HOME
assert climate_state.current_humidity == 42.0
assert climate_state.target_humidity == 20.0

View File

@@ -0,0 +1,76 @@
"""Integration test for Host mode with climate."""
from __future__ import annotations
import asyncio
import aioesphomeapi
from aioesphomeapi import ClimateInfo, ClimateMode, EntityState
import pytest
from .state_utils import InitialStateHelper
from .types import APIClientConnectedFactory, RunCompiledFunction
@pytest.mark.asyncio
async def test_host_mode_climate_control(
yaml_config: str,
run_compiled: RunCompiledFunction,
api_client_connected: APIClientConnectedFactory,
) -> None:
"""Test climate mode control."""
loop = asyncio.get_running_loop()
async with run_compiled(yaml_config), api_client_connected() as client:
states: dict[int, EntityState] = {}
climate_future: asyncio.Future[EntityState] = loop.create_future()
def on_state(state: EntityState) -> None:
states[state.key] = state
if (
isinstance(state, aioesphomeapi.ClimateState)
and state.mode == ClimateMode.HEAT
and state.target_temperature_low == 21.5
and state.target_temperature_high == 26.5
and not climate_future.done()
):
climate_future.set_result(state)
# Get entities and set up state synchronization
entities, services = await client.list_entities_services()
initial_state_helper = InitialStateHelper(entities)
climate_infos = [e for e in entities if isinstance(e, ClimateInfo)]
assert len(climate_infos) >= 1, "Expected at least 1 climate entity"
# Subscribe with the wrapper that filters initial states
client.subscribe_states(initial_state_helper.on_state_wrapper(on_state))
# Wait for all initial states to be broadcast
try:
await initial_state_helper.wait_for_initial_states()
except TimeoutError:
pytest.fail("Timeout waiting for initial states")
test_climate = next(
(c for c in climate_infos if c.name == "Dual-mode Thermostat"), None
)
assert test_climate is not None, (
"Dual-mode Thermostat thermostat climate not found"
)
# Adjust setpoints
client.climate_command(
test_climate.key,
mode=ClimateMode.HEAT,
target_temperature_low=21.5,
target_temperature_high=26.5,
)
try:
climate_state = await asyncio.wait_for(climate_future, timeout=5.0)
except TimeoutError:
pytest.fail("Climate state not received within 5 seconds")
assert isinstance(climate_state, aioesphomeapi.ClimateState)
assert climate_state.mode == ClimateMode.HEAT
assert climate_state.target_temperature_low == 21.5
assert climate_state.target_temperature_high == 26.5

View File

@@ -5,7 +5,10 @@ from __future__ import annotations
import asyncio
from aioesphomeapi import (
ClimateFanMode,
ClimateFeature,
ClimateInfo,
ClimateMode,
DateInfo,
DateState,
DateTimeInfo,
@@ -121,6 +124,46 @@ async def test_host_mode_many_entities(
assert len(climate_infos) >= 1, "Expected at least 1 climate entity"
climate_info = climate_infos[0]
# Verify feature flags set as expected
assert climate_info.feature_flags == (
ClimateFeature.SUPPORTS_ACTION
| ClimateFeature.SUPPORTS_CURRENT_HUMIDITY
| ClimateFeature.SUPPORTS_CURRENT_TEMPERATURE
| ClimateFeature.SUPPORTS_TWO_POINT_TARGET_TEMPERATURE
| ClimateFeature.SUPPORTS_TARGET_HUMIDITY
)
# Verify modes
assert climate_info.supported_modes == [
ClimateMode.OFF,
ClimateMode.COOL,
ClimateMode.HEAT,
], f"Expected modes [OFF, COOL, HEAT], got {climate_info.supported_modes}"
# Verify visual parameters
assert climate_info.visual_min_temperature == 15.0, (
f"Expected min_temperature=15.0, got {climate_info.visual_min_temperature}"
)
assert climate_info.visual_max_temperature == 32.0, (
f"Expected max_temperature=32.0, got {climate_info.visual_max_temperature}"
)
assert climate_info.visual_target_temperature_step == 0.1, (
f"Expected temperature_step=0.1, got {climate_info.visual_target_temperature_step}"
)
assert climate_info.visual_min_humidity == 20.0, (
f"Expected min_humidity=20.0, got {climate_info.visual_min_humidity}"
)
assert climate_info.visual_max_humidity == 70.0, (
f"Expected max_humidity=70.0, got {climate_info.visual_max_humidity}"
)
# Verify fan modes
assert climate_info.supported_fan_modes == [
ClimateFanMode.ON,
ClimateFanMode.AUTO,
], f"Expected fan modes [ON, AUTO], got {climate_info.supported_fan_modes}"
# Verify the thermostat has presets
assert len(climate_info.supported_presets) > 0, (
"Expected climate to have presets"

View File

@@ -0,0 +1,263 @@
"""Test sensor ValueListFilter functionality (FilterOutValueFilter and ThrottleWithPriorityFilter)."""
from __future__ import annotations
import asyncio
import math
from aioesphomeapi import ButtonInfo, EntityState, SensorState
import pytest
from .state_utils import InitialStateHelper, build_key_to_entity_mapping
from .types import APIClientConnectedFactory, RunCompiledFunction
@pytest.mark.asyncio
async def test_sensor_filters_value_list(
yaml_config: str,
run_compiled: RunCompiledFunction,
api_client_connected: APIClientConnectedFactory,
) -> None:
"""Test that ValueListFilter-based filters work correctly."""
loop = asyncio.get_running_loop()
# Track state changes for all sensors
sensor_values: dict[str, list[float]] = {
"filter_out_single": [],
"filter_out_multiple": [],
"throttle_priority_single": [],
"throttle_priority_multiple": [],
"filter_out_nan_test": [],
"filter_out_accuracy_2": [],
"throttle_priority_nan": [],
}
# Futures for each test
filter_out_single_done = loop.create_future()
filter_out_multiple_done = loop.create_future()
throttle_single_done = loop.create_future()
throttle_multiple_done = loop.create_future()
filter_out_nan_done = loop.create_future()
filter_out_accuracy_2_done = loop.create_future()
throttle_nan_done = loop.create_future()
def on_state(state: EntityState) -> None:
"""Track sensor state updates."""
if not isinstance(state, SensorState) or state.missing_state:
return
sensor_name = key_to_sensor.get(state.key)
if sensor_name not in sensor_values:
return
sensor_values[sensor_name].append(state.state)
# Check completion conditions
if (
sensor_name == "filter_out_single"
and len(sensor_values[sensor_name]) == 3
and not filter_out_single_done.done()
):
filter_out_single_done.set_result(True)
elif (
sensor_name == "filter_out_multiple"
and len(sensor_values[sensor_name]) == 3
and not filter_out_multiple_done.done()
):
filter_out_multiple_done.set_result(True)
elif (
sensor_name == "throttle_priority_single"
and len(sensor_values[sensor_name]) == 3
and not throttle_single_done.done()
):
throttle_single_done.set_result(True)
elif (
sensor_name == "throttle_priority_multiple"
and len(sensor_values[sensor_name]) == 4
and not throttle_multiple_done.done()
):
throttle_multiple_done.set_result(True)
elif (
sensor_name == "filter_out_nan_test"
and len(sensor_values[sensor_name]) == 3
and not filter_out_nan_done.done()
):
filter_out_nan_done.set_result(True)
elif (
sensor_name == "filter_out_accuracy_2"
and len(sensor_values[sensor_name]) == 2
and not filter_out_accuracy_2_done.done()
):
filter_out_accuracy_2_done.set_result(True)
elif (
sensor_name == "throttle_priority_nan"
and len(sensor_values[sensor_name]) == 3
and not throttle_nan_done.done()
):
throttle_nan_done.set_result(True)
async with (
run_compiled(yaml_config),
api_client_connected() as client,
):
# Get entities and build key mapping
entities, _ = await client.list_entities_services()
key_to_sensor = build_key_to_entity_mapping(
entities,
{
"filter_out_single": "Filter Out Single",
"filter_out_multiple": "Filter Out Multiple",
"throttle_priority_single": "Throttle Priority Single",
"throttle_priority_multiple": "Throttle Priority Multiple",
"filter_out_nan_test": "Filter Out NaN Test",
"filter_out_accuracy_2": "Filter Out Accuracy 2",
"throttle_priority_nan": "Throttle Priority NaN",
},
)
# Set up initial state helper with all entities
initial_state_helper = InitialStateHelper(entities)
# Subscribe to state changes with wrapper
client.subscribe_states(initial_state_helper.on_state_wrapper(on_state))
# Wait for initial states
await initial_state_helper.wait_for_initial_states()
# Find all buttons
button_name_map = {
"Test Filter Out Single": "filter_out_single",
"Test Filter Out Multiple": "filter_out_multiple",
"Test Throttle Priority Single": "throttle_priority_single",
"Test Throttle Priority Multiple": "throttle_priority_multiple",
"Test Filter Out NaN": "filter_out_nan",
"Test Filter Out Accuracy 2": "filter_out_accuracy_2",
"Test Throttle Priority NaN": "throttle_priority_nan",
}
buttons = {}
for entity in entities:
if isinstance(entity, ButtonInfo) and entity.name in button_name_map:
buttons[button_name_map[entity.name]] = entity.key
assert len(buttons) == 7, f"Expected 7 buttons, found {len(buttons)}"
# Test 1: FilterOutValueFilter - single value
sensor_values["filter_out_single"].clear()
client.button_command(buttons["filter_out_single"])
try:
await asyncio.wait_for(filter_out_single_done, timeout=2.0)
except TimeoutError:
pytest.fail(
f"Test 1 timed out. Values: {sensor_values['filter_out_single']}"
)
expected = [1.0, 2.0, 3.0]
assert sensor_values["filter_out_single"] == pytest.approx(expected), (
f"Test 1 failed: expected {expected}, got {sensor_values['filter_out_single']}"
)
# Test 2: FilterOutValueFilter - multiple values
sensor_values["filter_out_multiple"].clear()
filter_out_multiple_done = loop.create_future()
client.button_command(buttons["filter_out_multiple"])
try:
await asyncio.wait_for(filter_out_multiple_done, timeout=2.0)
except TimeoutError:
pytest.fail(
f"Test 2 timed out. Values: {sensor_values['filter_out_multiple']}"
)
expected = [1.0, 2.0, 50.0]
assert sensor_values["filter_out_multiple"] == pytest.approx(expected), (
f"Test 2 failed: expected {expected}, got {sensor_values['filter_out_multiple']}"
)
# Test 3: ThrottleWithPriorityFilter - single priority
sensor_values["throttle_priority_single"].clear()
throttle_single_done = loop.create_future()
client.button_command(buttons["throttle_priority_single"])
try:
await asyncio.wait_for(throttle_single_done, timeout=2.0)
except TimeoutError:
pytest.fail(
f"Test 3 timed out. Values: {sensor_values['throttle_priority_single']}"
)
expected = [1.0, 42.0, 4.0]
assert sensor_values["throttle_priority_single"] == pytest.approx(expected), (
f"Test 3 failed: expected {expected}, got {sensor_values['throttle_priority_single']}"
)
# Test 4: ThrottleWithPriorityFilter - multiple priorities
sensor_values["throttle_priority_multiple"].clear()
throttle_multiple_done = loop.create_future()
client.button_command(buttons["throttle_priority_multiple"])
try:
await asyncio.wait_for(throttle_multiple_done, timeout=2.0)
except TimeoutError:
pytest.fail(
f"Test 4 timed out. Values: {sensor_values['throttle_priority_multiple']}"
)
expected = [1.0, 0.0, 42.0, 100.0]
assert sensor_values["throttle_priority_multiple"] == pytest.approx(expected), (
f"Test 4 failed: expected {expected}, got {sensor_values['throttle_priority_multiple']}"
)
# Test 5: FilterOutValueFilter - NaN handling
sensor_values["filter_out_nan_test"].clear()
filter_out_nan_done = loop.create_future()
client.button_command(buttons["filter_out_nan"])
try:
await asyncio.wait_for(filter_out_nan_done, timeout=2.0)
except TimeoutError:
pytest.fail(
f"Test 5 timed out. Values: {sensor_values['filter_out_nan_test']}"
)
expected = [1.0, 2.0, 3.0]
assert sensor_values["filter_out_nan_test"] == pytest.approx(expected), (
f"Test 5 failed: expected {expected}, got {sensor_values['filter_out_nan_test']}"
)
# Test 6: FilterOutValueFilter - Accuracy decimals (2)
sensor_values["filter_out_accuracy_2"].clear()
filter_out_accuracy_2_done = loop.create_future()
client.button_command(buttons["filter_out_accuracy_2"])
try:
await asyncio.wait_for(filter_out_accuracy_2_done, timeout=2.0)
except TimeoutError:
pytest.fail(
f"Test 6 timed out. Values: {sensor_values['filter_out_accuracy_2']}"
)
expected = [42.01, 42.15]
assert sensor_values["filter_out_accuracy_2"] == pytest.approx(expected), (
f"Test 6 failed: expected {expected}, got {sensor_values['filter_out_accuracy_2']}"
)
# Test 7: ThrottleWithPriorityFilter - NaN priority
sensor_values["throttle_priority_nan"].clear()
throttle_nan_done = loop.create_future()
client.button_command(buttons["throttle_priority_nan"])
try:
await asyncio.wait_for(throttle_nan_done, timeout=2.0)
except TimeoutError:
pytest.fail(
f"Test 7 timed out. Values: {sensor_values['throttle_priority_nan']}"
)
# First value (1.0) + two NaN priority values
# NaN values will be compared using math.isnan
assert len(sensor_values["throttle_priority_nan"]) == 3, (
f"Test 7 failed: expected 3 values, got {len(sensor_values['throttle_priority_nan'])}"
)
assert sensor_values["throttle_priority_nan"][0] == pytest.approx(1.0), (
f"Test 7 failed: first value should be 1.0, got {sensor_values['throttle_priority_nan'][0]}"
)
assert math.isnan(sensor_values["throttle_priority_nan"][1]), (
f"Test 7 failed: second value should be NaN, got {sensor_values['throttle_priority_nan'][1]}"
)
assert math.isnan(sensor_values["throttle_priority_nan"][2]), (
f"Test 7 failed: third value should be NaN, got {sensor_values['throttle_priority_nan'][2]}"
)

View File

@@ -71,6 +71,12 @@ def mock_changed_files() -> Generator[Mock, None, None]:
yield mock
@pytest.fixture(autouse=True)
def clear_clang_tidy_cache() -> None:
"""Clear the clang-tidy full scan cache before each test."""
determine_jobs._is_clang_tidy_full_scan.cache_clear()
def test_main_all_tests_should_run(
mock_should_run_integration_tests: Mock,
mock_should_run_clang_tidy: Mock,
@@ -90,15 +96,35 @@ def test_main_all_tests_should_run(
mock_should_run_clang_format.return_value = True
mock_should_run_python_linters.return_value = True
# Mock list-components.py output (now returns JSON with --changed-with-deps)
mock_result = Mock()
mock_result.stdout = json.dumps(
{"directly_changed": ["wifi", "api"], "all_changed": ["wifi", "api", "sensor"]}
)
mock_subprocess_run.return_value = mock_result
# Mock changed_files to return non-component files (to avoid memory impact)
# Memory impact only runs when component C++ files change
mock_changed_files.return_value = [
"esphome/config.py",
"esphome/helpers.py",
]
# Run main function with mocked argv
with patch("sys.argv", ["determine-jobs.py"]):
with (
patch("sys.argv", ["determine-jobs.py"]),
patch.object(determine_jobs, "_is_clang_tidy_full_scan", return_value=False),
patch.object(
determine_jobs,
"get_changed_components",
return_value=["wifi", "api", "sensor"],
),
patch.object(
determine_jobs,
"filter_component_files",
side_effect=lambda f: f.startswith("esphome/components/"),
),
patch.object(
determine_jobs,
"get_components_with_dependencies",
side_effect=lambda files, deps: ["wifi", "api"]
if not deps
else ["wifi", "api", "sensor"],
),
):
determine_jobs.main()
# Check output
@@ -107,6 +133,7 @@ def test_main_all_tests_should_run(
assert output["integration_tests"] is True
assert output["clang_tidy"] is True
assert output["clang_tidy_mode"] in ["nosplit", "split"]
assert output["clang_format"] is True
assert output["python_linters"] is True
assert output["changed_components"] == ["wifi", "api", "sensor"]
@@ -117,9 +144,12 @@ def test_main_all_tests_should_run(
assert output["component_test_count"] == len(
output["changed_components_with_tests"]
)
# memory_impact should be present
# changed_cpp_file_count should be present
assert "changed_cpp_file_count" in output
assert isinstance(output["changed_cpp_file_count"], int)
# memory_impact should be false (no component C++ files changed)
assert "memory_impact" in output
assert output["memory_impact"]["should_run"] == "false" # No files changed
assert output["memory_impact"]["should_run"] == "false"
def test_main_no_tests_should_run(
@@ -141,13 +171,18 @@ def test_main_no_tests_should_run(
mock_should_run_clang_format.return_value = False
mock_should_run_python_linters.return_value = False
# Mock empty list-components.py output
mock_result = Mock()
mock_result.stdout = json.dumps({"directly_changed": [], "all_changed": []})
mock_subprocess_run.return_value = mock_result
# Mock changed_files to return no component files
mock_changed_files.return_value = []
# Run main function with mocked argv
with patch("sys.argv", ["determine-jobs.py"]):
with (
patch("sys.argv", ["determine-jobs.py"]),
patch.object(determine_jobs, "get_changed_components", return_value=[]),
patch.object(determine_jobs, "filter_component_files", return_value=False),
patch.object(
determine_jobs, "get_components_with_dependencies", return_value=[]
),
):
determine_jobs.main()
# Check output
@@ -156,11 +191,14 @@ def test_main_no_tests_should_run(
assert output["integration_tests"] is False
assert output["clang_tidy"] is False
assert output["clang_tidy_mode"] == "disabled"
assert output["clang_format"] is False
assert output["python_linters"] is False
assert output["changed_components"] == []
assert output["changed_components_with_tests"] == []
assert output["component_test_count"] == 0
# changed_cpp_file_count should be 0
assert output["changed_cpp_file_count"] == 0
# memory_impact should be present
assert "memory_impact" in output
assert output["memory_impact"]["should_run"] == "false"
@@ -210,14 +248,23 @@ def test_main_with_branch_argument(
mock_should_run_clang_format.return_value = False
mock_should_run_python_linters.return_value = True
# Mock list-components.py output
mock_result = Mock()
mock_result.stdout = json.dumps(
{"directly_changed": ["mqtt"], "all_changed": ["mqtt"]}
)
mock_subprocess_run.return_value = mock_result
# Mock changed_files to return non-component files (to avoid memory impact)
# Memory impact only runs when component C++ files change
mock_changed_files.return_value = ["esphome/config.py"]
with patch("sys.argv", ["script.py", "-b", "main"]):
with (
patch("sys.argv", ["script.py", "-b", "main"]),
patch.object(determine_jobs, "_is_clang_tidy_full_scan", return_value=False),
patch.object(determine_jobs, "get_changed_components", return_value=["mqtt"]),
patch.object(
determine_jobs,
"filter_component_files",
side_effect=lambda f: f.startswith("esphome/components/"),
),
patch.object(
determine_jobs, "get_components_with_dependencies", return_value=["mqtt"]
),
):
determine_jobs.main()
# Check that functions were called with branch
@@ -226,19 +273,13 @@ def test_main_with_branch_argument(
mock_should_run_clang_format.assert_called_once_with("main")
mock_should_run_python_linters.assert_called_once_with("main")
# Check that list-components.py was called with branch
mock_subprocess_run.assert_called_once()
call_args = mock_subprocess_run.call_args[0][0]
assert "--changed-with-deps" in call_args
assert "-b" in call_args
assert "main" in call_args
# Check output
captured = capsys.readouterr()
output = json.loads(captured.out)
assert output["integration_tests"] is False
assert output["clang_tidy"] is True
assert output["clang_tidy_mode"] in ["nosplit", "split"]
assert output["clang_format"] is False
assert output["python_linters"] is True
assert output["changed_components"] == ["mqtt"]
@@ -249,7 +290,10 @@ def test_main_with_branch_argument(
assert output["component_test_count"] == len(
output["changed_components_with_tests"]
)
# memory_impact should be present
# changed_cpp_file_count should be present
assert "changed_cpp_file_count" in output
assert isinstance(output["changed_cpp_file_count"], int)
# memory_impact should be false (no component C++ files changed)
assert "memory_impact" in output
assert output["memory_impact"]["should_run"] == "false"
@@ -352,16 +396,6 @@ def test_should_run_clang_tidy_hash_check_exception() -> None:
result = determine_jobs.should_run_clang_tidy()
assert result is True # Fail safe - run clang-tidy
# Even with C++ files, exception should trigger clang-tidy
with (
patch.object(
determine_jobs, "changed_files", return_value=["esphome/core.cpp"]
),
patch("subprocess.run", side_effect=Exception("Hash check failed")),
):
result = determine_jobs.should_run_clang_tidy()
assert result is True
def test_should_run_clang_tidy_with_branch() -> None:
"""Test should_run_clang_tidy with branch argument."""
@@ -433,6 +467,40 @@ def test_should_run_clang_format_with_branch() -> None:
mock_changed.assert_called_once_with("release")
@pytest.mark.parametrize(
("changed_files", "expected_count"),
[
(["esphome/core.cpp"], 1),
(["esphome/core.h"], 1),
(["test.hpp"], 1),
(["test.cc"], 1),
(["test.cxx"], 1),
(["test.c"], 1),
(["test.tcc"], 1),
(["esphome/core.cpp", "esphome/core.h"], 2),
(["esphome/core.cpp", "esphome/core.h", "test.cc"], 3),
(["README.md"], 0),
(["esphome/config.py"], 0),
(["README.md", "esphome/config.py"], 0),
(["esphome/core.cpp", "README.md", "esphome/config.py"], 1),
([], 0),
],
)
def test_count_changed_cpp_files(changed_files: list[str], expected_count: int) -> None:
"""Test count_changed_cpp_files function."""
with patch.object(determine_jobs, "changed_files", return_value=changed_files):
result = determine_jobs.count_changed_cpp_files()
assert result == expected_count
def test_count_changed_cpp_files_with_branch() -> None:
"""Test count_changed_cpp_files with branch argument."""
with patch.object(determine_jobs, "changed_files") as mock_changed:
mock_changed.return_value = []
determine_jobs.count_changed_cpp_files("release")
mock_changed.assert_called_once_with("release")
def test_main_filters_components_without_tests(
mock_should_run_integration_tests: Mock,
mock_should_run_clang_tidy: Mock,
@@ -453,16 +521,11 @@ def test_main_filters_components_without_tests(
mock_should_run_clang_format.return_value = False
mock_should_run_python_linters.return_value = False
# Mock list-components.py output with 3 components
# wifi: has tests, sensor: has tests, airthings_ble: no tests
mock_result = Mock()
mock_result.stdout = json.dumps(
{
"directly_changed": ["wifi", "sensor"],
"all_changed": ["wifi", "sensor", "airthings_ble"],
}
)
mock_subprocess_run.return_value = mock_result
# Mock changed_files to return component files
mock_changed_files.return_value = [
"esphome/components/wifi/wifi.cpp",
"esphome/components/sensor/sensor.h",
]
# Create test directory structure
tests_dir = tmp_path / "tests" / "components"
@@ -486,6 +549,23 @@ def test_main_filters_components_without_tests(
patch.object(determine_jobs, "root_path", str(tmp_path)),
patch.object(helpers, "root_path", str(tmp_path)),
patch("sys.argv", ["determine-jobs.py"]),
patch.object(
determine_jobs,
"get_changed_components",
return_value=["wifi", "sensor", "airthings_ble"],
),
patch.object(
determine_jobs,
"filter_component_files",
side_effect=lambda f: f.startswith("esphome/components/"),
),
patch.object(
determine_jobs,
"get_components_with_dependencies",
side_effect=lambda files, deps: ["wifi", "sensor"]
if not deps
else ["wifi", "sensor", "airthings_ble"],
),
):
# Clear the cache since we're mocking root_path
determine_jobs._component_has_tests.cache_clear()
@@ -501,6 +581,9 @@ def test_main_filters_components_without_tests(
assert set(output["changed_components_with_tests"]) == {"wifi", "sensor"}
# component_test_count should be based on components with tests
assert output["component_test_count"] == 2
# changed_cpp_file_count should be present
assert "changed_cpp_file_count" in output
assert isinstance(output["changed_cpp_file_count"], int)
# memory_impact should be present
assert "memory_impact" in output
assert output["memory_impact"]["should_run"] == "false"
@@ -715,3 +798,130 @@ def test_detect_memory_impact_config_skips_base_bus_components(tmp_path: Path) -
assert result["should_run"] == "true"
assert result["components"] == ["wifi"]
assert "i2c" not in result["components"]
# Tests for clang-tidy split mode logic
def test_clang_tidy_mode_full_scan(
mock_should_run_integration_tests: Mock,
mock_should_run_clang_tidy: Mock,
mock_should_run_clang_format: Mock,
mock_should_run_python_linters: Mock,
mock_subprocess_run: Mock,
mock_changed_files: Mock,
capsys: pytest.CaptureFixture[str],
monkeypatch: pytest.MonkeyPatch,
) -> None:
"""Test that full scan (hash changed) always uses split mode."""
monkeypatch.delenv("GITHUB_ACTIONS", raising=False)
mock_should_run_integration_tests.return_value = False
mock_should_run_clang_tidy.return_value = True
mock_should_run_clang_format.return_value = False
mock_should_run_python_linters.return_value = False
# Mock changed_files to return no component files
mock_changed_files.return_value = []
# Mock full scan (hash changed)
with (
patch("sys.argv", ["determine-jobs.py"]),
patch.object(determine_jobs, "_is_clang_tidy_full_scan", return_value=True),
patch.object(determine_jobs, "get_changed_components", return_value=[]),
patch.object(determine_jobs, "filter_component_files", return_value=False),
patch.object(
determine_jobs, "get_components_with_dependencies", return_value=[]
),
):
determine_jobs.main()
captured = capsys.readouterr()
output = json.loads(captured.out)
# Full scan should always use split mode
assert output["clang_tidy_mode"] == "split"
@pytest.mark.parametrize(
("component_count", "files_per_component", "expected_mode"),
[
# Small PR: 5 files in 1 component -> nosplit
(1, 5, "nosplit"),
# Medium PR: 30 files in 2 components -> nosplit
(2, 15, "nosplit"),
# Medium PR: 64 files total -> nosplit (just under threshold)
(2, 32, "nosplit"),
# Large PR: 65 files total -> split (at threshold)
(2, 33, "split"), # 2 * 33 = 66 files
# Large PR: 100 files in 10 components -> split
(10, 10, "split"),
],
ids=[
"1_comp_5_files_nosplit",
"2_comp_30_files_nosplit",
"2_comp_64_files_nosplit_under_threshold",
"2_comp_66_files_split_at_threshold",
"10_comp_100_files_split",
],
)
def test_clang_tidy_mode_targeted_scan(
component_count: int,
files_per_component: int,
expected_mode: str,
mock_should_run_integration_tests: Mock,
mock_should_run_clang_tidy: Mock,
mock_should_run_clang_format: Mock,
mock_should_run_python_linters: Mock,
mock_subprocess_run: Mock,
mock_changed_files: Mock,
capsys: pytest.CaptureFixture[str],
monkeypatch: pytest.MonkeyPatch,
) -> None:
"""Test clang-tidy mode selection based on files_to_check count."""
monkeypatch.delenv("GITHUB_ACTIONS", raising=False)
mock_should_run_integration_tests.return_value = False
mock_should_run_clang_tidy.return_value = True
mock_should_run_clang_format.return_value = False
mock_should_run_python_linters.return_value = False
# Create component names
components = [f"comp{i}" for i in range(component_count)]
# Mock changed_files to return component files
mock_changed_files.return_value = [
f"esphome/components/{comp}/file.cpp" for comp in components
]
# Mock git_ls_files to return files for each component
cpp_files = {
f"esphome/components/{comp}/file{i}.cpp": 0
for comp in components
for i in range(files_per_component)
}
# Create a mock that returns the cpp_files dict for any call
def mock_git_ls_files(patterns=None):
return cpp_files
with (
patch("sys.argv", ["determine-jobs.py"]),
patch.object(determine_jobs, "_is_clang_tidy_full_scan", return_value=False),
patch.object(determine_jobs, "git_ls_files", side_effect=mock_git_ls_files),
patch.object(determine_jobs, "get_changed_components", return_value=components),
patch.object(
determine_jobs,
"filter_component_files",
side_effect=lambda f: f.startswith("esphome/components/"),
),
patch.object(
determine_jobs, "get_components_with_dependencies", return_value=components
),
):
determine_jobs.main()
captured = capsys.readouterr()
output = json.loads(captured.out)
assert output["clang_tidy_mode"] == expected_mode

View File

@@ -3,7 +3,7 @@ esphome:
friendly_name: $component_name
esp8266:
board: d1_mini
board: d1_mini_pro
logger:
level: VERY_VERBOSE

View File

@@ -517,6 +517,35 @@ def test_include_file_cpp(tmp_path: Path, mock_copy_file_if_changed: Mock) -> No
mock_cg.add_global.assert_not_called()
def test_include_file_with_c_header(
tmp_path: Path, mock_copy_file_if_changed: Mock
) -> None:
"""Test include_file wraps header in extern C block when is_c_header is True."""
src_file = tmp_path / "c_library.h"
src_file.write_text("// C library header")
CORE.build_path = tmp_path / "build"
with patch("esphome.core.config.cg") as mock_cg:
# Mock RawStatement to capture the text
mock_raw_statement = MagicMock()
mock_raw_statement.text = ""
def raw_statement_side_effect(text):
mock_raw_statement.text = text
return mock_raw_statement
mock_cg.RawStatement.side_effect = raw_statement_side_effect
config.include_file(src_file, Path("c_library.h"), is_c_header=True)
mock_copy_file_if_changed.assert_called_once()
mock_cg.add_global.assert_called_once()
# Check that include statement is wrapped in extern "C" block
assert 'extern "C"' in mock_raw_statement.text
assert '#include "c_library.h"' in mock_raw_statement.text
def test_get_usable_cpu_count() -> None:
"""Test get_usable_cpu_count returns CPU count."""
count = config.get_usable_cpu_count()