1
0
mirror of https://github.com/esphome/esphome.git synced 2025-11-05 01:21:50 +00:00

Compare commits

..

2 Commits

76 changed files with 1056 additions and 2770 deletions

View File

@@ -170,13 +170,11 @@ jobs:
outputs:
integration-tests: ${{ steps.determine.outputs.integration-tests }}
clang-tidy: ${{ steps.determine.outputs.clang-tidy }}
clang-tidy-mode: ${{ steps.determine.outputs.clang-tidy-mode }}
python-linters: ${{ steps.determine.outputs.python-linters }}
changed-components: ${{ steps.determine.outputs.changed-components }}
changed-components-with-tests: ${{ steps.determine.outputs.changed-components-with-tests }}
directly-changed-components-with-tests: ${{ steps.determine.outputs.directly-changed-components-with-tests }}
component-test-count: ${{ steps.determine.outputs.component-test-count }}
changed-cpp-file-count: ${{ steps.determine.outputs.changed-cpp-file-count }}
memory_impact: ${{ steps.determine.outputs.memory-impact }}
steps:
- name: Check out code from GitHub
@@ -202,13 +200,11 @@ jobs:
# Extract individual fields
echo "integration-tests=$(echo "$output" | jq -r '.integration_tests')" >> $GITHUB_OUTPUT
echo "clang-tidy=$(echo "$output" | jq -r '.clang_tidy')" >> $GITHUB_OUTPUT
echo "clang-tidy-mode=$(echo "$output" | jq -r '.clang_tidy_mode')" >> $GITHUB_OUTPUT
echo "python-linters=$(echo "$output" | jq -r '.python_linters')" >> $GITHUB_OUTPUT
echo "changed-components=$(echo "$output" | jq -c '.changed_components')" >> $GITHUB_OUTPUT
echo "changed-components-with-tests=$(echo "$output" | jq -c '.changed_components_with_tests')" >> $GITHUB_OUTPUT
echo "directly-changed-components-with-tests=$(echo "$output" | jq -c '.directly_changed_components_with_tests')" >> $GITHUB_OUTPUT
echo "component-test-count=$(echo "$output" | jq -r '.component_test_count')" >> $GITHUB_OUTPUT
echo "changed-cpp-file-count=$(echo "$output" | jq -r '.changed_cpp_file_count')" >> $GITHUB_OUTPUT
echo "memory-impact=$(echo "$output" | jq -c '.memory_impact')" >> $GITHUB_OUTPUT
integration-tests:
@@ -247,7 +243,7 @@ jobs:
. venv/bin/activate
pytest -vv --no-cov --tb=native -n auto tests/integration/
clang-tidy-single:
clang-tidy:
name: ${{ matrix.name }}
runs-on: ubuntu-24.04
needs:
@@ -265,6 +261,22 @@ jobs:
name: Run script/clang-tidy for ESP8266
options: --environment esp8266-arduino-tidy --grep USE_ESP8266
pio_cache_key: tidyesp8266
- id: clang-tidy
name: Run script/clang-tidy for ESP32 Arduino 1/4
options: --environment esp32-arduino-tidy --split-num 4 --split-at 1
pio_cache_key: tidyesp32
- id: clang-tidy
name: Run script/clang-tidy for ESP32 Arduino 2/4
options: --environment esp32-arduino-tidy --split-num 4 --split-at 2
pio_cache_key: tidyesp32
- id: clang-tidy
name: Run script/clang-tidy for ESP32 Arduino 3/4
options: --environment esp32-arduino-tidy --split-num 4 --split-at 3
pio_cache_key: tidyesp32
- id: clang-tidy
name: Run script/clang-tidy for ESP32 Arduino 4/4
options: --environment esp32-arduino-tidy --split-num 4 --split-at 4
pio_cache_key: tidyesp32
- id: clang-tidy
name: Run script/clang-tidy for ESP32 IDF
options: --environment esp32-idf-tidy --grep USE_ESP_IDF
@@ -345,166 +357,6 @@ jobs:
# yamllint disable-line rule:line-length
if: always()
clang-tidy-nosplit:
name: Run script/clang-tidy for ESP32 Arduino
runs-on: ubuntu-24.04
needs:
- common
- determine-jobs
if: needs.determine-jobs.outputs.clang-tidy-mode == 'nosplit'
env:
GH_TOKEN: ${{ github.token }}
steps:
- name: Check out code from GitHub
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
with:
# Need history for HEAD~1 to work for checking changed files
fetch-depth: 2
- name: Restore Python
uses: ./.github/actions/restore-python
with:
python-version: ${{ env.DEFAULT_PYTHON }}
cache-key: ${{ needs.common.outputs.cache-key }}
- name: Cache platformio
if: github.ref == 'refs/heads/dev'
uses: actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
with:
path: ~/.platformio
key: platformio-tidyesp32-${{ hashFiles('platformio.ini') }}
- name: Cache platformio
if: github.ref != 'refs/heads/dev'
uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
with:
path: ~/.platformio
key: platformio-tidyesp32-${{ hashFiles('platformio.ini') }}
- name: Register problem matchers
run: |
echo "::add-matcher::.github/workflows/matchers/gcc.json"
echo "::add-matcher::.github/workflows/matchers/clang-tidy.json"
- name: Check if full clang-tidy scan needed
id: check_full_scan
run: |
. venv/bin/activate
if python script/clang_tidy_hash.py --check; then
echo "full_scan=true" >> $GITHUB_OUTPUT
echo "reason=hash_changed" >> $GITHUB_OUTPUT
else
echo "full_scan=false" >> $GITHUB_OUTPUT
echo "reason=normal" >> $GITHUB_OUTPUT
fi
- name: Run clang-tidy
run: |
. venv/bin/activate
if [ "${{ steps.check_full_scan.outputs.full_scan }}" = "true" ]; then
echo "Running FULL clang-tidy scan (hash changed)"
script/clang-tidy --all-headers --fix --environment esp32-arduino-tidy
else
echo "Running clang-tidy on changed files only"
script/clang-tidy --all-headers --fix --changed --environment esp32-arduino-tidy
fi
env:
# Also cache libdeps, store them in a ~/.platformio subfolder
PLATFORMIO_LIBDEPS_DIR: ~/.platformio/libdeps
- name: Suggested changes
run: script/ci-suggest-changes
if: always()
clang-tidy-split:
name: ${{ matrix.name }}
runs-on: ubuntu-24.04
needs:
- common
- determine-jobs
if: needs.determine-jobs.outputs.clang-tidy-mode == 'split'
env:
GH_TOKEN: ${{ github.token }}
strategy:
fail-fast: false
max-parallel: 1
matrix:
include:
- id: clang-tidy
name: Run script/clang-tidy for ESP32 Arduino 1/4
options: --environment esp32-arduino-tidy --split-num 4 --split-at 1
- id: clang-tidy
name: Run script/clang-tidy for ESP32 Arduino 2/4
options: --environment esp32-arduino-tidy --split-num 4 --split-at 2
- id: clang-tidy
name: Run script/clang-tidy for ESP32 Arduino 3/4
options: --environment esp32-arduino-tidy --split-num 4 --split-at 3
- id: clang-tidy
name: Run script/clang-tidy for ESP32 Arduino 4/4
options: --environment esp32-arduino-tidy --split-num 4 --split-at 4
steps:
- name: Check out code from GitHub
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
with:
# Need history for HEAD~1 to work for checking changed files
fetch-depth: 2
- name: Restore Python
uses: ./.github/actions/restore-python
with:
python-version: ${{ env.DEFAULT_PYTHON }}
cache-key: ${{ needs.common.outputs.cache-key }}
- name: Cache platformio
if: github.ref == 'refs/heads/dev'
uses: actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
with:
path: ~/.platformio
key: platformio-tidyesp32-${{ hashFiles('platformio.ini') }}
- name: Cache platformio
if: github.ref != 'refs/heads/dev'
uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
with:
path: ~/.platformio
key: platformio-tidyesp32-${{ hashFiles('platformio.ini') }}
- name: Register problem matchers
run: |
echo "::add-matcher::.github/workflows/matchers/gcc.json"
echo "::add-matcher::.github/workflows/matchers/clang-tidy.json"
- name: Check if full clang-tidy scan needed
id: check_full_scan
run: |
. venv/bin/activate
if python script/clang_tidy_hash.py --check; then
echo "full_scan=true" >> $GITHUB_OUTPUT
echo "reason=hash_changed" >> $GITHUB_OUTPUT
else
echo "full_scan=false" >> $GITHUB_OUTPUT
echo "reason=normal" >> $GITHUB_OUTPUT
fi
- name: Run clang-tidy
run: |
. venv/bin/activate
if [ "${{ steps.check_full_scan.outputs.full_scan }}" = "true" ]; then
echo "Running FULL clang-tidy scan (hash changed)"
script/clang-tidy --all-headers --fix ${{ matrix.options }}
else
echo "Running clang-tidy on changed files only"
script/clang-tidy --all-headers --fix --changed ${{ matrix.options }}
fi
env:
# Also cache libdeps, store them in a ~/.platformio subfolder
PLATFORMIO_LIBDEPS_DIR: ~/.platformio/libdeps
- name: Suggested changes
run: script/ci-suggest-changes
if: always()
test-build-components-splitter:
name: Split components for intelligent grouping (40 weighted per batch)
runs-on: ubuntu-24.04
@@ -912,13 +764,13 @@ jobs:
python-version: ${{ env.DEFAULT_PYTHON }}
cache-key: ${{ needs.common.outputs.cache-key }}
- name: Download target analysis JSON
uses: actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0 # v5.0.0
uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4.3.0
with:
name: memory-analysis-target
path: ./memory-analysis
continue-on-error: true
- name: Download PR analysis JSON
uses: actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0 # v5.0.0
uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4.3.0
with:
name: memory-analysis-pr
path: ./memory-analysis
@@ -945,9 +797,7 @@ jobs:
- pylint
- pytest
- integration-tests
- clang-tidy-single
- clang-tidy-nosplit
- clang-tidy-split
- clang-tidy
- determine-jobs
- test-build-components-splitter
- test-build-components-split

View File

@@ -161,7 +161,6 @@ esphome/components/esp32_rmt_led_strip/* @jesserockz
esphome/components/esp8266/* @esphome/core
esphome/components/esp_ldo/* @clydebarrow
esphome/components/espnow/* @jesserockz
esphome/components/espnow/packet_transport/* @EasilyBoredEngineer
esphome/components/ethernet_info/* @gtjadsonsantos
esphome/components/event/* @nohat
esphome/components/exposure_notifications/* @OttoWinter

View File

@@ -731,13 +731,6 @@ def command_vscode(args: ArgsProtocol) -> int | None:
def command_compile(args: ArgsProtocol, config: ConfigType) -> int | None:
# Set memory analysis options in config
if args.analyze_memory:
config.setdefault(CONF_ESPHOME, {})["analyze_memory"] = True
if args.memory_report:
config.setdefault(CONF_ESPHOME, {})["memory_report_file"] = args.memory_report
exit_code = write_cpp(config)
if exit_code != 0:
return exit_code
@@ -1199,17 +1192,6 @@ def parse_args(argv):
help="Only generate source code, do not compile.",
action="store_true",
)
parser_compile.add_argument(
"--analyze-memory",
help="Analyze and display memory usage by component after compilation.",
action="store_true",
)
parser_compile.add_argument(
"--memory-report",
help="Save memory analysis report to a file (supports .json or .txt).",
type=str,
metavar="FILE",
)
parser_upload = subparsers.add_parser(
"upload",

View File

@@ -1,7 +1,6 @@
"""CLI interface for memory analysis with report generation."""
from collections import defaultdict
import json
import sys
from . import (
@@ -271,28 +270,6 @@ class MemoryAnalyzerCLI(MemoryAnalyzer):
return "\n".join(lines)
def to_json(self) -> str:
"""Export analysis results as JSON."""
data = {
"components": {
name: {
"text": mem.text_size,
"rodata": mem.rodata_size,
"data": mem.data_size,
"bss": mem.bss_size,
"flash_total": mem.flash_total,
"ram_total": mem.ram_total,
"symbol_count": mem.symbol_count,
}
for name, mem in self.components.items()
},
"totals": {
"flash": sum(c.flash_total for c in self.components.values()),
"ram": sum(c.ram_total for c in self.components.values()),
},
}
return json.dumps(data, indent=2)
def dump_uncategorized_symbols(self, output_file: str | None = None) -> None:
"""Dump uncategorized symbols for analysis."""
# Sort by size descending

View File

@@ -155,17 +155,6 @@ def _validate_api_config(config: ConfigType) -> ConfigType:
return config
def _consume_api_sockets(config: ConfigType) -> ConfigType:
"""Register socket needs for API component."""
from esphome.components import socket
# API needs 1 listening socket + typically 3 concurrent client connections
# (not max_connections, which is the upper limit rarely reached)
sockets_needed = 1 + 3
socket.consume_sockets(sockets_needed, "api")(config)
return config
CONFIG_SCHEMA = cv.All(
cv.Schema(
{
@@ -233,7 +222,6 @@ CONFIG_SCHEMA = cv.All(
).extend(cv.COMPONENT_SCHEMA),
cv.rename_key(CONF_SERVICES, CONF_ACTIONS),
_validate_api_config,
_consume_api_sockets,
)

View File

@@ -142,11 +142,6 @@ APIError APINoiseFrameHelper::loop() {
* errno API_ERROR_HANDSHAKE_PACKET_LEN: Packet too big for this phase.
*/
APIError APINoiseFrameHelper::try_read_frame_() {
// Clear buffer when starting a new frame (rx_buf_len_ == 0 means not resuming after WOULD_BLOCK)
if (this->rx_buf_len_ == 0) {
this->rx_buf_.clear();
}
// read header
if (rx_header_buf_len_ < 3) {
// no header information yet

View File

@@ -54,11 +54,6 @@ APIError APIPlaintextFrameHelper::loop() {
* error API_ERROR_BAD_INDICATOR: Bad indicator byte at start of frame.
*/
APIError APIPlaintextFrameHelper::try_read_frame_() {
// Clear buffer when starting a new frame (rx_buf_len_ == 0 means not resuming after WOULD_BLOCK)
if (this->rx_buf_len_ == 0) {
this->rx_buf_.clear();
}
// read header
while (!rx_header_parsed_) {
// Now that we know when the socket is ready, we can read up to 3 bytes

View File

@@ -51,7 +51,7 @@ void BinarySensor::add_filter(Filter *filter) {
last_filter->next_ = filter;
}
}
void BinarySensor::add_filters(std::initializer_list<Filter *> filters) {
void BinarySensor::add_filters(const std::vector<Filter *> &filters) {
for (Filter *filter : filters) {
this->add_filter(filter);
}

View File

@@ -4,7 +4,7 @@
#include "esphome/core/helpers.h"
#include "esphome/components/binary_sensor/filter.h"
#include <initializer_list>
#include <vector>
namespace esphome {
@@ -48,7 +48,7 @@ class BinarySensor : public StatefulEntityBase<bool>, public EntityBase_DeviceCl
void publish_initial_state(bool new_state);
void add_filter(Filter *filter);
void add_filters(std::initializer_list<Filter *> filters);
void add_filters(const std::vector<Filter *> &filters);
// ========== INTERNAL METHODS ==========
// (In most use cases you won't need these)

View File

@@ -77,6 +77,9 @@ void BLESensor::gattc_event_handler(esp_gattc_cb_event_t event, esp_gatt_if_t ga
}
} else {
this->node_state = espbt::ClientState::ESTABLISHED;
// For non-notify characteristics, trigger an immediate read after service discovery
// to avoid peripherals disconnecting due to inactivity
this->update();
}
break;
}

View File

@@ -79,6 +79,9 @@ void BLETextSensor::gattc_event_handler(esp_gattc_cb_event_t event, esp_gatt_if_
}
} else {
this->node_state = espbt::ClientState::ESTABLISHED;
// For non-notify characteristics, trigger an immediate read after service discovery
// to avoid peripherals disconnecting due to inactivity
this->update();
}
break;
}

View File

@@ -1,4 +1,3 @@
import contextlib
from dataclasses import dataclass
import itertools
import logging
@@ -103,10 +102,6 @@ COMPILER_OPTIMIZATIONS = {
"SIZE": "CONFIG_COMPILER_OPTIMIZATION_SIZE",
}
# Socket limit configuration for ESP-IDF
# ESP-IDF CONFIG_LWIP_MAX_SOCKETS has range 1-253, default 10
DEFAULT_MAX_SOCKETS = 10 # ESP-IDF default
ARDUINO_ALLOWED_VARIANTS = [
VARIANT_ESP32,
VARIANT_ESP32C3,
@@ -751,72 +746,6 @@ CONFIG_SCHEMA = cv.All(
FINAL_VALIDATE_SCHEMA = cv.Schema(final_validate)
def _configure_lwip_max_sockets(conf: dict) -> None:
"""Calculate and set CONFIG_LWIP_MAX_SOCKETS based on component needs.
Socket component tracks consumer needs via consume_sockets() called during config validation.
This function runs in to_code() after all components have registered their socket needs.
User-provided sdkconfig_options take precedence.
"""
from esphome.components.socket import KEY_SOCKET_CONSUMERS
# Check if user manually specified CONFIG_LWIP_MAX_SOCKETS
user_max_sockets = conf.get(CONF_SDKCONFIG_OPTIONS, {}).get(
"CONFIG_LWIP_MAX_SOCKETS"
)
socket_consumers: dict[str, int] = CORE.data.get(KEY_SOCKET_CONSUMERS, {})
total_sockets = sum(socket_consumers.values())
# Early return if no sockets registered and no user override
if total_sockets == 0 and user_max_sockets is None:
return
components_list = ", ".join(
f"{name}={count}" for name, count in sorted(socket_consumers.items())
)
# User specified their own value - respect it but warn if insufficient
if user_max_sockets is not None:
_LOGGER.info(
"Using user-provided CONFIG_LWIP_MAX_SOCKETS: %s",
user_max_sockets,
)
# Warn if user's value is less than what components need
if total_sockets > 0:
user_sockets_int = 0
with contextlib.suppress(ValueError, TypeError):
user_sockets_int = int(user_max_sockets)
if user_sockets_int < total_sockets:
_LOGGER.warning(
"CONFIG_LWIP_MAX_SOCKETS is set to %d but your configuration "
"needs %d sockets (registered: %s). You may experience socket "
"exhaustion errors. Consider increasing to at least %d.",
user_sockets_int,
total_sockets,
components_list,
total_sockets,
)
# User's value already added via sdkconfig_options processing
return
# Auto-calculate based on component needs
# Use at least the ESP-IDF default (10), or the total needed by components
max_sockets = max(DEFAULT_MAX_SOCKETS, total_sockets)
log_level = logging.INFO if max_sockets > DEFAULT_MAX_SOCKETS else logging.DEBUG
_LOGGER.log(
log_level,
"Setting CONFIG_LWIP_MAX_SOCKETS to %d (registered: %s)",
max_sockets,
components_list,
)
add_idf_sdkconfig_option("CONFIG_LWIP_MAX_SOCKETS", max_sockets)
async def to_code(config):
cg.add_platformio_option("board", config[CONF_BOARD])
cg.add_platformio_option("board_upload.flash_size", config[CONF_FLASH_SIZE])
@@ -937,9 +866,6 @@ async def to_code(config):
add_idf_sdkconfig_option("CONFIG_LWIP_DNS_SUPPORT_MDNS_QUERIES", False)
if not advanced.get(CONF_ENABLE_LWIP_BRIDGE_INTERFACE, False):
add_idf_sdkconfig_option("CONFIG_LWIP_BRIDGEIF_MAX_PORTS", 0)
_configure_lwip_max_sockets(conf)
if advanced.get(CONF_EXECUTE_FROM_PSRAM, False):
add_idf_sdkconfig_option("CONFIG_SPIRAM_FETCH_INSTRUCTIONS", True)
add_idf_sdkconfig_option("CONFIG_SPIRAM_RODATA", True)

View File

@@ -76,10 +76,6 @@ void ESP32BLE::advertising_set_service_data(const std::vector<uint8_t> &data) {
}
void ESP32BLE::advertising_set_manufacturer_data(const std::vector<uint8_t> &data) {
this->advertising_set_manufacturer_data(std::span<const uint8_t>(data));
}
void ESP32BLE::advertising_set_manufacturer_data(std::span<const uint8_t> data) {
this->advertising_init_();
this->advertising_->set_manufacturer_data(data);
this->advertising_start();

View File

@@ -118,7 +118,6 @@ class ESP32BLE : public Component {
void advertising_start();
void advertising_set_service_data(const std::vector<uint8_t> &data);
void advertising_set_manufacturer_data(const std::vector<uint8_t> &data);
void advertising_set_manufacturer_data(std::span<const uint8_t> data);
void advertising_set_appearance(uint16_t appearance) { this->appearance_ = appearance; }
void advertising_set_service_data_and_name(std::span<const uint8_t> data, bool include_name);
void advertising_add_service_uuid(ESPBTUUID uuid);

View File

@@ -59,10 +59,6 @@ void BLEAdvertising::set_service_data(const std::vector<uint8_t> &data) {
}
void BLEAdvertising::set_manufacturer_data(const std::vector<uint8_t> &data) {
this->set_manufacturer_data(std::span<const uint8_t>(data));
}
void BLEAdvertising::set_manufacturer_data(std::span<const uint8_t> data) {
delete[] this->advertising_data_.p_manufacturer_data;
this->advertising_data_.p_manufacturer_data = nullptr;
this->advertising_data_.manufacturer_len = data.size();

View File

@@ -37,7 +37,6 @@ class BLEAdvertising {
void set_scan_response(bool scan_response) { this->scan_response_ = scan_response; }
void set_min_preferred_interval(uint16_t interval) { this->advertising_data_.min_interval = interval; }
void set_manufacturer_data(const std::vector<uint8_t> &data);
void set_manufacturer_data(std::span<const uint8_t> data);
void set_appearance(uint16_t appearance) { this->advertising_data_.appearance = appearance; }
void set_service_data(const std::vector<uint8_t> &data);
void set_service_data(std::span<const uint8_t> data);

View File

@@ -1,6 +1,5 @@
#include "esp32_ble_beacon.h"
#include "esphome/core/log.h"
#include "esphome/core/helpers.h"
#ifdef USE_ESP32

View File

@@ -15,10 +15,7 @@ Trigger<std::vector<uint8_t>, uint16_t> *BLETriggers::create_characteristic_on_w
Trigger<std::vector<uint8_t>, uint16_t> *on_write_trigger = // NOLINT(cppcoreguidelines-owning-memory)
new Trigger<std::vector<uint8_t>, uint16_t>();
characteristic->on_write([on_write_trigger](std::span<const uint8_t> data, uint16_t id) {
// Convert span to vector for trigger - copy is necessary because:
// 1. Trigger stores the data for use in automation actions that execute later
// 2. The span is only valid during this callback (points to temporary BLE stack data)
// 3. User lambdas in automations need persistent data they can access asynchronously
// Convert span to vector for trigger
on_write_trigger->trigger(std::vector<uint8_t>(data.begin(), data.end()), id);
});
return on_write_trigger;
@@ -30,10 +27,7 @@ Trigger<std::vector<uint8_t>, uint16_t> *BLETriggers::create_descriptor_on_write
Trigger<std::vector<uint8_t>, uint16_t> *on_write_trigger = // NOLINT(cppcoreguidelines-owning-memory)
new Trigger<std::vector<uint8_t>, uint16_t>();
descriptor->on_write([on_write_trigger](std::span<const uint8_t> data, uint16_t id) {
// Convert span to vector for trigger - copy is necessary because:
// 1. Trigger stores the data for use in automation actions that execute later
// 2. The span is only valid during this callback (points to temporary BLE stack data)
// 3. User lambdas in automations need persistent data they can access asynchronously
// Convert span to vector for trigger
on_write_trigger->trigger(std::vector<uint8_t>(data.begin(), data.end()), id);
});
return on_write_trigger;

View File

@@ -1,7 +1,6 @@
import esphome.codegen as cg
import esphome.config_validation as cv
from esphome.const import CONF_ID, CONF_MODE, CONF_PORT
from esphome.types import ConfigType
CODEOWNERS = ["@ayufan"]
AUTO_LOAD = ["camera"]
@@ -14,27 +13,13 @@ Mode = esp32_camera_web_server_ns.enum("Mode")
MODES = {"STREAM": Mode.STREAM, "SNAPSHOT": Mode.SNAPSHOT}
def _consume_camera_web_server_sockets(config: ConfigType) -> ConfigType:
"""Register socket needs for camera web server."""
from esphome.components import socket
# Each camera web server instance needs 1 listening socket + 2 client connections
sockets_needed = 3
socket.consume_sockets(sockets_needed, "esp32_camera_web_server")(config)
return config
CONFIG_SCHEMA = cv.All(
cv.Schema(
{
cv.GenerateID(): cv.declare_id(CameraWebServer),
cv.Required(CONF_PORT): cv.port,
cv.Required(CONF_MODE): cv.enum(MODES, upper=True),
},
).extend(cv.COMPONENT_SCHEMA),
_consume_camera_web_server_sockets,
)
CONFIG_SCHEMA = cv.Schema(
{
cv.GenerateID(): cv.declare_id(CameraWebServer),
cv.Required(CONF_PORT): cv.port,
cv.Required(CONF_MODE): cv.enum(MODES, upper=True),
},
).extend(cv.COMPONENT_SCHEMA)
async def to_code(config):

View File

@@ -95,7 +95,7 @@ async def to_code(config):
if framework_ver >= cv.Version(5, 5, 0):
esp32.add_idf_component(name="espressif/esp_wifi_remote", ref="1.1.5")
esp32.add_idf_component(name="espressif/eppp_link", ref="1.1.3")
esp32.add_idf_component(name="espressif/esp_hosted", ref="2.6.1")
esp32.add_idf_component(name="espressif/esp_hosted", ref="2.5.11")
else:
esp32.add_idf_component(name="espressif/esp_wifi_remote", ref="0.13.0")
esp32.add_idf_component(name="espressif/eppp_link", ref="0.2.0")

View File

@@ -384,32 +384,26 @@ void ESP32ImprovComponent::check_wifi_connection_() {
this->connecting_sta_ = {};
this->cancel_timeout("wifi-connect-timeout");
// Build URL list with minimal allocations
// Maximum 3 URLs: custom next_url + ESPHOME_MY_LINK + webserver URL
std::string url_strings[3];
size_t url_count = 0;
std::vector<std::string> urls;
// Add next_url if configured (should be first per Improv BLE spec)
std::string next_url = this->get_formatted_next_url_();
if (!next_url.empty()) {
url_strings[url_count++] = std::move(next_url);
urls.push_back(next_url);
}
// Add default URLs for backward compatibility
url_strings[url_count++] = ESPHOME_MY_LINK;
urls.emplace_back(ESPHOME_MY_LINK);
#ifdef USE_WEBSERVER
for (auto &ip : wifi::global_wifi_component->wifi_sta_ip_addresses()) {
if (ip.is_ip4()) {
char url_buffer[64];
snprintf(url_buffer, sizeof(url_buffer), "http://%s:%d", ip.str().c_str(), USE_WEBSERVER_PORT);
url_strings[url_count++] = url_buffer;
std::string webserver_url = "http://" + ip.str() + ":" + to_string(USE_WEBSERVER_PORT);
urls.push_back(webserver_url);
break;
}
}
#endif
// Pass to build_rpc_response using vector constructor from iterators to avoid extra copies
std::vector<uint8_t> data = improv::build_rpc_response(
improv::WIFI_SETTINGS, std::vector<std::string>(url_strings, url_strings + url_count));
std::vector<uint8_t> data = improv::build_rpc_response(improv::WIFI_SETTINGS, urls);
this->send_response_(data);
} else if (this->is_active() && this->state_ != improv::STATE_PROVISIONED) {
ESP_LOGD(TAG, "WiFi provisioned externally");

View File

@@ -190,9 +190,7 @@ async def to_code(config):
cg.add_define("ESPHOME_VARIANT", "ESP8266")
cg.add_define(ThreadModel.SINGLE)
cg.add_platformio_option(
"extra_scripts", ["pre:testing_mode.py", "post:post_build.py"]
)
cg.add_platformio_option("extra_scripts", ["pre:iram_fix.py", "post:post_build.py"])
conf = config[CONF_FRAMEWORK]
cg.add_platformio_option("framework", "arduino")
@@ -232,9 +230,9 @@ async def to_code(config):
# For cases where nullptrs can be handled, use nothrow: `new (std::nothrow) T;`
cg.add_build_flag("-DNEW_OOM_ABORT")
# In testing mode, fake larger memory to allow linking grouped component tests
# Real ESP8266 hardware only has 32KB IRAM and ~80KB RAM, but for CI testing
# we pretend it has much larger memory to test that components compile together
# In testing mode, fake a larger IRAM to allow linking grouped component tests
# Real ESP8266 hardware only has 32KB IRAM, but for CI testing we pretend it has 2MB
# This is done via a pre-build script that generates a custom linker script
if CORE.testing_mode:
cg.add_build_flag("-DESPHOME_TESTING_MODE")
@@ -273,8 +271,8 @@ def copy_files():
post_build_file,
CORE.relative_build_path("post_build.py"),
)
testing_mode_file = dir / "testing_mode.py.script"
iram_fix_file = dir / "iram_fix.py.script"
copy_file_if_changed(
testing_mode_file,
CORE.relative_build_path("testing_mode.py"),
iram_fix_file,
CORE.relative_build_path("iram_fix.py"),
)

View File

@@ -0,0 +1,44 @@
import os
import re
# pylint: disable=E0602
Import("env") # noqa
def patch_linker_script_after_preprocess(source, target, env):
"""Patch the local linker script after PlatformIO preprocesses it."""
# Check if we're in testing mode by looking for the define
build_flags = env.get("BUILD_FLAGS", [])
testing_mode = any("-DESPHOME_TESTING_MODE" in flag for flag in build_flags)
if not testing_mode:
return
# Get the local linker script path
build_dir = env.subst("$BUILD_DIR")
local_ld = os.path.join(build_dir, "ld", "local.eagle.app.v6.common.ld")
if not os.path.exists(local_ld):
return
# Read the linker script
with open(local_ld, "r") as f:
content = f.read()
# Replace IRAM size from 0x8000 (32KB) to 0x200000 (2MB)
# The line looks like: iram1_0_seg : org = 0x40100000, len = 0x8000
updated = re.sub(
r"(iram1_0_seg\s*:\s*org\s*=\s*0x40100000\s*,\s*len\s*=\s*)0x8000",
r"\g<1>0x200000",
content,
)
if updated != content:
with open(local_ld, "w") as f:
f.write(updated)
print("ESPHome: Patched IRAM size to 2MB for testing mode")
# Hook into the build process right before linking
# This runs after PlatformIO has already preprocessed the linker scripts
env.AddPreAction("$BUILD_DIR/${PROGNAME}.elf", patch_linker_script_after_preprocess)

View File

@@ -1,166 +0,0 @@
import os
import re
# pylint: disable=E0602
Import("env") # noqa
# Memory sizes for testing mode (allow larger builds for CI component grouping)
TESTING_IRAM_SIZE = "0x200000" # 2MB
TESTING_DRAM_SIZE = "0x200000" # 2MB
TESTING_FLASH_SIZE = "0x2000000" # 32MB
def patch_segment_size(content, segment_name, new_size, label):
"""Patch a memory segment's length in linker script.
Args:
content: Linker script content
segment_name: Name of the segment (e.g., 'iram1_0_seg')
new_size: New size as hex string (e.g., '0x200000')
label: Human-readable label for logging (e.g., 'IRAM')
Returns:
Tuple of (patched_content, was_patched)
"""
# Match: segment_name : org = 0x..., len = 0x...
pattern = rf"({segment_name}\s*:\s*org\s*=\s*0x[0-9a-fA-F]+\s*,\s*len\s*=\s*)0x[0-9a-fA-F]+"
new_content = re.sub(pattern, rf"\g<1>{new_size}", content)
return new_content, new_content != content
def apply_memory_patches(content):
"""Apply IRAM, DRAM, and Flash patches to linker script content.
Args:
content: Linker script content as string
Returns:
Patched content as string
"""
patches_applied = []
# Patch IRAM (for larger code in IRAM)
content, patched = patch_segment_size(content, "iram1_0_seg", TESTING_IRAM_SIZE, "IRAM")
if patched:
patches_applied.append("IRAM")
# Patch DRAM (for larger BSS/data sections)
content, patched = patch_segment_size(content, "dram0_0_seg", TESTING_DRAM_SIZE, "DRAM")
if patched:
patches_applied.append("DRAM")
# Patch Flash (for larger code sections)
content, patched = patch_segment_size(content, "irom0_0_seg", TESTING_FLASH_SIZE, "Flash")
if patched:
patches_applied.append("Flash")
if patches_applied:
iram_mb = int(TESTING_IRAM_SIZE, 16) // (1024 * 1024)
dram_mb = int(TESTING_DRAM_SIZE, 16) // (1024 * 1024)
flash_mb = int(TESTING_FLASH_SIZE, 16) // (1024 * 1024)
print(f" Patched memory segments: {', '.join(patches_applied)} (IRAM/DRAM: {iram_mb}MB, Flash: {flash_mb}MB)")
return content
def patch_linker_script_file(filepath, description):
"""Patch a linker script file in the build directory with enlarged memory segments.
This function modifies linker scripts in the build directory only (never SDK files).
It patches IRAM, DRAM, and Flash segments to allow larger builds in testing mode.
Args:
filepath: Path to the linker script file in the build directory
description: Human-readable description for logging
Returns:
True if the file was patched, False if already patched or not found
"""
if not os.path.exists(filepath):
print(f"ESPHome: {description} not found at {filepath}")
return False
print(f"ESPHome: Patching {description}...")
with open(filepath, "r") as f:
content = f.read()
patched_content = apply_memory_patches(content)
if patched_content != content:
with open(filepath, "w") as f:
f.write(patched_content)
print(f"ESPHome: Successfully patched {description}")
return True
else:
print(f"ESPHome: {description} already patched or no changes needed")
return False
def patch_local_linker_script(source, target, env):
"""Patch the local.eagle.app.v6.common.ld in build directory.
This patches the preprocessed linker script that PlatformIO creates in the build
directory, enlarging IRAM, DRAM, and Flash segments for testing mode.
Args:
source: SCons source nodes
target: SCons target nodes
env: SCons environment
"""
# Check if we're in testing mode
build_flags = env.get("BUILD_FLAGS", [])
testing_mode = any("-DESPHOME_TESTING_MODE" in flag for flag in build_flags)
if not testing_mode:
return
# Patch the local linker script if it exists
build_dir = env.subst("$BUILD_DIR")
ld_dir = os.path.join(build_dir, "ld")
if os.path.exists(ld_dir):
local_ld = os.path.join(ld_dir, "local.eagle.app.v6.common.ld")
if os.path.exists(local_ld):
patch_linker_script_file(local_ld, "local.eagle.app.v6.common.ld")
# Check if we're in testing mode
build_flags = env.get("BUILD_FLAGS", [])
testing_mode = any("-DESPHOME_TESTING_MODE" in flag for flag in build_flags)
if testing_mode:
# Create a custom linker script in the build directory with patched memory limits
# This allows larger IRAM/DRAM/Flash for CI component grouping tests
build_dir = env.subst("$BUILD_DIR")
ldscript = env.GetProjectOption("board_build.ldscript", "")
assert ldscript, "No linker script configured in board_build.ldscript"
framework_dir = env.PioPlatform().get_package_dir("framework-arduinoespressif8266")
assert framework_dir is not None, "Could not find framework-arduinoespressif8266 package"
# Read the original SDK linker script (read-only, SDK is never modified)
sdk_ld = os.path.join(framework_dir, "tools", "sdk", "ld", ldscript)
# Create a custom version in the build directory (isolated, temporary)
custom_ld = os.path.join(build_dir, f"testing_{ldscript}")
if os.path.exists(sdk_ld) and not os.path.exists(custom_ld):
# Read the SDK linker script
with open(sdk_ld, "r") as f:
content = f.read()
# Apply memory patches (IRAM: 2MB, DRAM: 2MB, Flash: 32MB)
patched_content = apply_memory_patches(content)
# Write the patched linker script to the build directory
with open(custom_ld, "w") as f:
f.write(patched_content)
print(f"ESPHome: Created custom linker script: {custom_ld}")
# Tell the linker to use our custom script from the build directory
assert os.path.exists(custom_ld), f"Custom linker script not found: {custom_ld}"
env.Replace(LDSCRIPT_PATH=custom_ld)
print(f"ESPHome: Using custom linker script with patched memory limits")
# Also patch local.eagle.app.v6.common.ld after PlatformIO creates it
env.AddPreAction("$BUILD_DIR/${PROGNAME}.elf", patch_local_linker_script)

View File

@@ -103,16 +103,7 @@ def ota_esphome_final_validate(config):
)
def _consume_ota_sockets(config: ConfigType) -> ConfigType:
"""Register socket needs for OTA component."""
from esphome.components import socket
# OTA needs 1 listening socket (client connections are temporary during updates)
socket.consume_sockets(1, "ota")(config)
return config
CONFIG_SCHEMA = cv.All(
CONFIG_SCHEMA = (
cv.Schema(
{
cv.GenerateID(): cv.declare_id(ESPHomeOTAComponent),
@@ -139,8 +130,7 @@ CONFIG_SCHEMA = cv.All(
}
)
.extend(BASE_OTA_SCHEMA)
.extend(cv.COMPONENT_SCHEMA),
_consume_ota_sockets,
.extend(cv.COMPONENT_SCHEMA)
)
FINAL_VALIDATE_SCHEMA = ota_esphome_final_validate

View File

@@ -14,13 +14,13 @@ template<typename... Ts> class SendAction : public Action<Ts...>, public Parente
TEMPLATABLE_VALUE(std::vector<uint8_t>, data);
public:
void add_on_sent(const std::initializer_list<Action<Ts...> *> &actions) {
void add_on_sent(const std::vector<Action<Ts...> *> &actions) {
this->sent_.add_actions(actions);
if (this->flags_.wait_for_sent) {
this->sent_.add_action(new LambdaAction<Ts...>([this](Ts... x) { this->play_next_(x...); }));
}
}
void add_on_error(const std::initializer_list<Action<Ts...> *> &actions) {
void add_on_error(const std::vector<Action<Ts...> *> &actions) {
this->error_.add_actions(actions);
if (this->flags_.wait_for_sent) {
this->error_.add_action(new LambdaAction<Ts...>([this](Ts... x) {

View File

@@ -1,39 +0,0 @@
"""ESP-NOW transport platform for packet_transport component."""
import esphome.codegen as cg
from esphome.components.packet_transport import (
PacketTransport,
new_packet_transport,
transport_schema,
)
import esphome.config_validation as cv
from esphome.core import HexInt
from esphome.cpp_types import PollingComponent
from .. import ESPNowComponent, espnow_ns
CODEOWNERS = ["@EasilyBoredEngineer"]
DEPENDENCIES = ["espnow"]
ESPNowTransport = espnow_ns.class_("ESPNowTransport", PacketTransport, PollingComponent)
CONF_ESPNOW_ID = "espnow_id"
CONF_PEER_ADDRESS = "peer_address"
CONFIG_SCHEMA = transport_schema(ESPNowTransport).extend(
{
cv.GenerateID(CONF_ESPNOW_ID): cv.use_id(ESPNowComponent),
cv.Optional(CONF_PEER_ADDRESS, default="FF:FF:FF:FF:FF:FF"): cv.mac_address,
}
)
async def to_code(config):
"""Set up the ESP-NOW transport component."""
var, _ = await new_packet_transport(config)
await cg.register_parented(var, config[CONF_ESPNOW_ID])
# Set peer address - convert MAC to parts array like ESP-NOW does
mac = config[CONF_PEER_ADDRESS]
cg.add(var.set_peer_address([HexInt(x) for x in mac.parts]))

View File

@@ -1,97 +0,0 @@
#include "espnow_transport.h"
#ifdef USE_ESP32
#include "esphome/core/application.h"
#include "esphome/core/log.h"
namespace esphome {
namespace espnow {
static const char *const TAG = "espnow.transport";
bool ESPNowTransport::should_send() { return this->parent_ != nullptr && !this->parent_->is_failed(); }
void ESPNowTransport::setup() {
packet_transport::PacketTransport::setup();
if (this->parent_ == nullptr) {
ESP_LOGE(TAG, "ESPNow component not set");
this->mark_failed();
return;
}
ESP_LOGI(TAG, "Registering ESP-NOW handlers");
ESP_LOGI(TAG, "Peer address: %02X:%02X:%02X:%02X:%02X:%02X", this->peer_address_[0], this->peer_address_[1],
this->peer_address_[2], this->peer_address_[3], this->peer_address_[4], this->peer_address_[5]);
// Register received handler
this->parent_->register_received_handler(static_cast<ESPNowReceivedPacketHandler *>(this));
// Register broadcasted handler
this->parent_->register_broadcasted_handler(static_cast<ESPNowBroadcastedHandler *>(this));
}
void ESPNowTransport::update() {
packet_transport::PacketTransport::update();
this->updated_ = true;
}
void ESPNowTransport::send_packet(const std::vector<uint8_t> &buf) const {
if (this->parent_ == nullptr) {
ESP_LOGE(TAG, "ESPNow component not set");
return;
}
if (buf.empty()) {
ESP_LOGW(TAG, "Attempted to send empty packet");
return;
}
if (buf.size() > ESP_NOW_MAX_DATA_LEN) {
ESP_LOGE(TAG, "Packet too large: %zu bytes (max %d)", buf.size(), ESP_NOW_MAX_DATA_LEN);
return;
}
// Send to configured peer address
this->parent_->send(this->peer_address_.data(), buf.data(), buf.size(), [](esp_err_t err) {
if (err != ESP_OK) {
ESP_LOGW(TAG, "Send failed: %d", err);
}
});
}
bool ESPNowTransport::on_received(const ESPNowRecvInfo &info, const uint8_t *data, uint8_t size) {
ESP_LOGV(TAG, "Received packet of size %u from %02X:%02X:%02X:%02X:%02X:%02X", size, info.src_addr[0],
info.src_addr[1], info.src_addr[2], info.src_addr[3], info.src_addr[4], info.src_addr[5]);
if (data == nullptr || size == 0) {
ESP_LOGW(TAG, "Received empty or null packet");
return false;
}
this->packet_buffer_.resize(size);
memcpy(this->packet_buffer_.data(), data, size);
this->process_(this->packet_buffer_);
return false; // Allow other handlers to run
}
bool ESPNowTransport::on_broadcasted(const ESPNowRecvInfo &info, const uint8_t *data, uint8_t size) {
ESP_LOGV(TAG, "Received broadcast packet of size %u from %02X:%02X:%02X:%02X:%02X:%02X", size, info.src_addr[0],
info.src_addr[1], info.src_addr[2], info.src_addr[3], info.src_addr[4], info.src_addr[5]);
if (data == nullptr || size == 0) {
ESP_LOGW(TAG, "Received empty or null broadcast packet");
return false;
}
this->packet_buffer_.resize(size);
memcpy(this->packet_buffer_.data(), data, size);
this->process_(this->packet_buffer_);
return false; // Allow other handlers to run
}
} // namespace espnow
} // namespace esphome
#endif // USE_ESP32

View File

@@ -1,44 +0,0 @@
#pragma once
#include "../espnow_component.h"
#ifdef USE_ESP32
#include "esphome/core/component.h"
#include "esphome/components/packet_transport/packet_transport.h"
#include <vector>
namespace esphome {
namespace espnow {
class ESPNowTransport : public packet_transport::PacketTransport,
public Parented<ESPNowComponent>,
public ESPNowReceivedPacketHandler,
public ESPNowBroadcastedHandler {
public:
void setup() override;
void update() override;
float get_setup_priority() const override { return setup_priority::AFTER_WIFI; }
void set_peer_address(peer_address_t address) {
memcpy(this->peer_address_.data(), address.data(), ESP_NOW_ETH_ALEN);
}
// ESPNow handler interface
bool on_received(const ESPNowRecvInfo &info, const uint8_t *data, uint8_t size) override;
bool on_broadcasted(const ESPNowRecvInfo &info, const uint8_t *data, uint8_t size) override;
protected:
void send_packet(const std::vector<uint8_t> &buf) const override;
size_t get_max_packet_size() override { return ESP_NOW_MAX_DATA_LEN; }
bool should_send() override;
peer_address_t peer_address_{{0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF}};
std::vector<uint8_t> packet_buffer_;
};
} // namespace espnow
} // namespace esphome
#endif // USE_ESP32

View File

@@ -16,8 +16,7 @@ void HDC1080Component::setup() {
// if configuration fails - there is a problem
if (this->write_register(HDC1080_CMD_CONFIGURATION, config, 2) != i2c::ERROR_OK) {
ESP_LOGW(TAG, "Failed to configure HDC1080");
this->status_set_warning();
this->mark_failed();
return;
}
}

View File

@@ -107,7 +107,7 @@ void IDFI2CBus::dump_config() {
if (s.second) {
ESP_LOGCONFIG(TAG, "Found device at address 0x%02X", s.first);
} else {
ESP_LOGCONFIG(TAG, "Unknown error at address 0x%02X", s.first);
ESP_LOGE(TAG, "Unknown error at address 0x%02X", s.first);
}
}
}

View File

@@ -6,21 +6,6 @@
namespace esphome {
namespace improv_base {
static constexpr const char DEVICE_NAME_PLACEHOLDER[] = "{{device_name}}";
static constexpr size_t DEVICE_NAME_PLACEHOLDER_LEN = sizeof(DEVICE_NAME_PLACEHOLDER) - 1;
static constexpr const char IP_ADDRESS_PLACEHOLDER[] = "{{ip_address}}";
static constexpr size_t IP_ADDRESS_PLACEHOLDER_LEN = sizeof(IP_ADDRESS_PLACEHOLDER) - 1;
static void replace_all_in_place(std::string &str, const char *placeholder, size_t placeholder_len,
const std::string &replacement) {
size_t pos = 0;
const size_t replacement_len = replacement.length();
while ((pos = str.find(placeholder, pos)) != std::string::npos) {
str.replace(pos, placeholder_len, replacement);
pos += replacement_len;
}
}
std::string ImprovBase::get_formatted_next_url_() {
if (this->next_url_.empty()) {
return "";
@@ -29,15 +14,28 @@ std::string ImprovBase::get_formatted_next_url_() {
std::string formatted_url = this->next_url_;
// Replace all occurrences of {{device_name}}
replace_all_in_place(formatted_url, DEVICE_NAME_PLACEHOLDER, DEVICE_NAME_PLACEHOLDER_LEN, App.get_name());
const std::string device_name_placeholder = "{{device_name}}";
const std::string &device_name = App.get_name();
size_t pos = 0;
while ((pos = formatted_url.find(device_name_placeholder, pos)) != std::string::npos) {
formatted_url.replace(pos, device_name_placeholder.length(), device_name);
pos += device_name.length();
}
// Replace all occurrences of {{ip_address}}
const std::string ip_address_placeholder = "{{ip_address}}";
std::string ip_address_str;
for (auto &ip : network::get_ip_addresses()) {
if (ip.is_ip4()) {
replace_all_in_place(formatted_url, IP_ADDRESS_PLACEHOLDER, IP_ADDRESS_PLACEHOLDER_LEN, ip.str());
ip_address_str = ip.str();
break;
}
}
pos = 0;
while ((pos = formatted_url.find(ip_address_placeholder, pos)) != std::string::npos) {
formatted_url.replace(pos, ip_address_placeholder.length(), ip_address_str);
pos += ip_address_str.length();
}
// Note: {{esphome_version}} is replaced at code generation time in Python

View File

@@ -56,7 +56,7 @@ void MCP23016::pin_mode(uint8_t pin, gpio::Flags flags) {
this->update_reg_(pin, false, iodir);
}
}
float MCP23016::get_setup_priority() const { return setup_priority::IO; }
float MCP23016::get_setup_priority() const { return setup_priority::HARDWARE; }
bool MCP23016::read_reg_(uint8_t reg, uint8_t *value) {
if (this->is_failed())
return false;

View File

@@ -13,7 +13,6 @@ from esphome.const import (
)
from esphome.core import CORE, Lambda, coroutine_with_priority
from esphome.coroutine import CoroPriority
from esphome.types import ConfigType
CODEOWNERS = ["@esphome/core"]
DEPENDENCIES = ["network"]
@@ -47,19 +46,6 @@ SERVICE_SCHEMA = cv.Schema(
}
)
def _consume_mdns_sockets(config: ConfigType) -> ConfigType:
"""Register socket needs for mDNS component."""
if config.get(CONF_DISABLED):
return config
from esphome.components import socket
# mDNS needs 2 sockets (IPv4 + IPv6 multicast)
socket.consume_sockets(2, "mdns")(config)
return config
CONFIG_SCHEMA = cv.All(
cv.Schema(
{
@@ -69,7 +55,6 @@ CONFIG_SCHEMA = cv.All(
}
),
_remove_id_if_disabled,
_consume_mdns_sockets,
)

View File

@@ -58,7 +58,6 @@ from esphome.const import (
PlatformFramework,
)
from esphome.core import CORE, CoroPriority, coroutine_with_priority
from esphome.types import ConfigType
DEPENDENCIES = ["network"]
@@ -211,15 +210,6 @@ def validate_fingerprint(value):
return value
def _consume_mqtt_sockets(config: ConfigType) -> ConfigType:
"""Register socket needs for MQTT component."""
from esphome.components import socket
# MQTT needs 1 socket for the broker connection
socket.consume_sockets(1, "mqtt")(config)
return config
CONFIG_SCHEMA = cv.All(
cv.Schema(
{
@@ -316,7 +306,6 @@ CONFIG_SCHEMA = cv.All(
),
validate_config,
cv.only_on([PLATFORM_ESP32, PLATFORM_ESP8266, PLATFORM_BK72XX]),
_consume_mqtt_sockets,
)

View File

@@ -62,7 +62,7 @@ CONF_WARNING_MPPT_OVERLOAD = "warning_mppt_overload"
CONF_WARNING_BATTERY_TOO_LOW_TO_CHARGE = "warning_battery_too_low_to_charge"
CONF_FAULT_DC_DC_OVER_CURRENT = "fault_dc_dc_over_current"
CONF_FAULT_CODE = "fault_code"
CONF_WARNING_LOW_PV_ENERGY = "warning_low_pv_energy"
CONF_WARNUNG_LOW_PV_ENERGY = "warnung_low_pv_energy"
CONF_WARNING_HIGH_AC_INPUT_DURING_BUS_SOFT_START = (
"warning_high_ac_input_during_bus_soft_start"
)
@@ -122,7 +122,7 @@ TYPES = [
CONF_WARNING_BATTERY_TOO_LOW_TO_CHARGE,
CONF_FAULT_DC_DC_OVER_CURRENT,
CONF_FAULT_CODE,
CONF_WARNING_LOW_PV_ENERGY,
CONF_WARNUNG_LOW_PV_ENERGY,
CONF_WARNING_HIGH_AC_INPUT_DURING_BUS_SOFT_START,
CONF_WARNING_BATTERY_EQUALIZATION,
]

View File

@@ -13,7 +13,7 @@ void PipsolarOutput::write_state(float state) {
if (std::find(this->possible_values_.begin(), this->possible_values_.end(), state) != this->possible_values_.end()) {
ESP_LOGD(TAG, "Will write: %s out of value %f / %02.0f", tmp, state, state);
this->parent_->queue_command(std::string(tmp));
this->parent_->switch_command(std::string(tmp));
} else {
ESP_LOGD(TAG, "Will not write: %s as it is not in list of allowed values", tmp);
}

File diff suppressed because it is too large Load Diff

View File

@@ -7,7 +7,6 @@
#include "esphome/components/uart/uart.h"
#include "esphome/core/automation.h"
#include "esphome/core/component.h"
#include "esphome/core/helpers.h"
namespace esphome {
namespace pipsolar {
@@ -29,17 +28,10 @@ struct PollingCommand {
bool needs_update;
};
struct QFLAGValues {
esphome::optional<bool> silence_buzzer_open_buzzer;
esphome::optional<bool> overload_bypass_function;
esphome::optional<bool> lcd_escape_to_default;
esphome::optional<bool> overload_restart_function;
esphome::optional<bool> over_temperature_restart_function;
esphome::optional<bool> backlight_on;
esphome::optional<bool> alarm_on_when_primary_source_interrupt;
esphome::optional<bool> fault_code_record;
esphome::optional<bool> power_saving;
};
#define PIPSOLAR_VALUED_ENTITY_(type, name, polling_command, value_type) \
protected: \
value_type value_##name##_; \
PIPSOLAR_ENTITY_(type, name, polling_command)
#define PIPSOLAR_ENTITY_(type, name, polling_command) \
protected: \
@@ -51,123 +43,126 @@ struct QFLAGValues {
this->add_polling_command_(#polling_command, POLLING_##polling_command); \
}
#define PIPSOLAR_SENSOR(name, polling_command) PIPSOLAR_ENTITY_(sensor::Sensor, name, polling_command)
#define PIPSOLAR_SENSOR(name, polling_command, value_type) \
PIPSOLAR_VALUED_ENTITY_(sensor::Sensor, name, polling_command, value_type)
#define PIPSOLAR_SWITCH(name, polling_command) PIPSOLAR_ENTITY_(switch_::Switch, name, polling_command)
#define PIPSOLAR_BINARY_SENSOR(name, polling_command) \
PIPSOLAR_ENTITY_(binary_sensor::BinarySensor, name, polling_command)
#define PIPSOLAR_BINARY_SENSOR(name, polling_command, value_type) \
PIPSOLAR_VALUED_ENTITY_(binary_sensor::BinarySensor, name, polling_command, value_type)
#define PIPSOLAR_VALUED_TEXT_SENSOR(name, polling_command, value_type) \
PIPSOLAR_VALUED_ENTITY_(text_sensor::TextSensor, name, polling_command, value_type)
#define PIPSOLAR_TEXT_SENSOR(name, polling_command) PIPSOLAR_ENTITY_(text_sensor::TextSensor, name, polling_command)
class Pipsolar : public uart::UARTDevice, public PollingComponent {
// QPIGS values
PIPSOLAR_SENSOR(grid_voltage, QPIGS)
PIPSOLAR_SENSOR(grid_frequency, QPIGS)
PIPSOLAR_SENSOR(ac_output_voltage, QPIGS)
PIPSOLAR_SENSOR(ac_output_frequency, QPIGS)
PIPSOLAR_SENSOR(ac_output_apparent_power, QPIGS)
PIPSOLAR_SENSOR(ac_output_active_power, QPIGS)
PIPSOLAR_SENSOR(output_load_percent, QPIGS)
PIPSOLAR_SENSOR(bus_voltage, QPIGS)
PIPSOLAR_SENSOR(battery_voltage, QPIGS)
PIPSOLAR_SENSOR(battery_charging_current, QPIGS)
PIPSOLAR_SENSOR(battery_capacity_percent, QPIGS)
PIPSOLAR_SENSOR(inverter_heat_sink_temperature, QPIGS)
PIPSOLAR_SENSOR(pv_input_current_for_battery, QPIGS)
PIPSOLAR_SENSOR(pv_input_voltage, QPIGS)
PIPSOLAR_SENSOR(battery_voltage_scc, QPIGS)
PIPSOLAR_SENSOR(battery_discharge_current, QPIGS)
PIPSOLAR_BINARY_SENSOR(add_sbu_priority_version, QPIGS)
PIPSOLAR_BINARY_SENSOR(configuration_status, QPIGS)
PIPSOLAR_BINARY_SENSOR(scc_firmware_version, QPIGS)
PIPSOLAR_BINARY_SENSOR(load_status, QPIGS)
PIPSOLAR_BINARY_SENSOR(battery_voltage_to_steady_while_charging, QPIGS)
PIPSOLAR_BINARY_SENSOR(charging_status, QPIGS)
PIPSOLAR_BINARY_SENSOR(scc_charging_status, QPIGS)
PIPSOLAR_BINARY_SENSOR(ac_charging_status, QPIGS)
PIPSOLAR_SENSOR(battery_voltage_offset_for_fans_on, QPIGS) //.1 scale
PIPSOLAR_SENSOR(eeprom_version, QPIGS)
PIPSOLAR_SENSOR(pv_charging_power, QPIGS)
PIPSOLAR_BINARY_SENSOR(charging_to_floating_mode, QPIGS)
PIPSOLAR_BINARY_SENSOR(switch_on, QPIGS)
PIPSOLAR_BINARY_SENSOR(dustproof_installed, QPIGS)
PIPSOLAR_SENSOR(grid_voltage, QPIGS, float)
PIPSOLAR_SENSOR(grid_frequency, QPIGS, float)
PIPSOLAR_SENSOR(ac_output_voltage, QPIGS, float)
PIPSOLAR_SENSOR(ac_output_frequency, QPIGS, float)
PIPSOLAR_SENSOR(ac_output_apparent_power, QPIGS, int)
PIPSOLAR_SENSOR(ac_output_active_power, QPIGS, int)
PIPSOLAR_SENSOR(output_load_percent, QPIGS, int)
PIPSOLAR_SENSOR(bus_voltage, QPIGS, int)
PIPSOLAR_SENSOR(battery_voltage, QPIGS, float)
PIPSOLAR_SENSOR(battery_charging_current, QPIGS, int)
PIPSOLAR_SENSOR(battery_capacity_percent, QPIGS, int)
PIPSOLAR_SENSOR(inverter_heat_sink_temperature, QPIGS, int)
PIPSOLAR_SENSOR(pv_input_current_for_battery, QPIGS, float)
PIPSOLAR_SENSOR(pv_input_voltage, QPIGS, float)
PIPSOLAR_SENSOR(battery_voltage_scc, QPIGS, float)
PIPSOLAR_SENSOR(battery_discharge_current, QPIGS, int)
PIPSOLAR_BINARY_SENSOR(add_sbu_priority_version, QPIGS, int)
PIPSOLAR_BINARY_SENSOR(configuration_status, QPIGS, int)
PIPSOLAR_BINARY_SENSOR(scc_firmware_version, QPIGS, int)
PIPSOLAR_BINARY_SENSOR(load_status, QPIGS, int)
PIPSOLAR_BINARY_SENSOR(battery_voltage_to_steady_while_charging, QPIGS, int)
PIPSOLAR_BINARY_SENSOR(charging_status, QPIGS, int)
PIPSOLAR_BINARY_SENSOR(scc_charging_status, QPIGS, int)
PIPSOLAR_BINARY_SENSOR(ac_charging_status, QPIGS, int)
PIPSOLAR_SENSOR(battery_voltage_offset_for_fans_on, QPIGS, int) //.1 scale
PIPSOLAR_SENSOR(eeprom_version, QPIGS, int)
PIPSOLAR_SENSOR(pv_charging_power, QPIGS, int)
PIPSOLAR_BINARY_SENSOR(charging_to_floating_mode, QPIGS, int)
PIPSOLAR_BINARY_SENSOR(switch_on, QPIGS, int)
PIPSOLAR_BINARY_SENSOR(dustproof_installed, QPIGS, int)
// QPIRI values
PIPSOLAR_SENSOR(grid_rating_voltage, QPIRI)
PIPSOLAR_SENSOR(grid_rating_current, QPIRI)
PIPSOLAR_SENSOR(ac_output_rating_voltage, QPIRI)
PIPSOLAR_SENSOR(ac_output_rating_frequency, QPIRI)
PIPSOLAR_SENSOR(ac_output_rating_current, QPIRI)
PIPSOLAR_SENSOR(ac_output_rating_apparent_power, QPIRI)
PIPSOLAR_SENSOR(ac_output_rating_active_power, QPIRI)
PIPSOLAR_SENSOR(battery_rating_voltage, QPIRI)
PIPSOLAR_SENSOR(battery_recharge_voltage, QPIRI)
PIPSOLAR_SENSOR(battery_under_voltage, QPIRI)
PIPSOLAR_SENSOR(battery_bulk_voltage, QPIRI)
PIPSOLAR_SENSOR(battery_float_voltage, QPIRI)
PIPSOLAR_SENSOR(battery_type, QPIRI)
PIPSOLAR_SENSOR(current_max_ac_charging_current, QPIRI)
PIPSOLAR_SENSOR(current_max_charging_current, QPIRI)
PIPSOLAR_SENSOR(input_voltage_range, QPIRI)
PIPSOLAR_SENSOR(output_source_priority, QPIRI)
PIPSOLAR_SENSOR(charger_source_priority, QPIRI)
PIPSOLAR_SENSOR(parallel_max_num, QPIRI)
PIPSOLAR_SENSOR(machine_type, QPIRI)
PIPSOLAR_SENSOR(topology, QPIRI)
PIPSOLAR_SENSOR(output_mode, QPIRI)
PIPSOLAR_SENSOR(battery_redischarge_voltage, QPIRI)
PIPSOLAR_SENSOR(pv_ok_condition_for_parallel, QPIRI)
PIPSOLAR_SENSOR(pv_power_balance, QPIRI)
PIPSOLAR_SENSOR(grid_rating_voltage, QPIRI, float)
PIPSOLAR_SENSOR(grid_rating_current, QPIRI, float)
PIPSOLAR_SENSOR(ac_output_rating_voltage, QPIRI, float)
PIPSOLAR_SENSOR(ac_output_rating_frequency, QPIRI, float)
PIPSOLAR_SENSOR(ac_output_rating_current, QPIRI, float)
PIPSOLAR_SENSOR(ac_output_rating_apparent_power, QPIRI, int)
PIPSOLAR_SENSOR(ac_output_rating_active_power, QPIRI, int)
PIPSOLAR_SENSOR(battery_rating_voltage, QPIRI, float)
PIPSOLAR_SENSOR(battery_recharge_voltage, QPIRI, float)
PIPSOLAR_SENSOR(battery_under_voltage, QPIRI, float)
PIPSOLAR_SENSOR(battery_bulk_voltage, QPIRI, float)
PIPSOLAR_SENSOR(battery_float_voltage, QPIRI, float)
PIPSOLAR_SENSOR(battery_type, QPIRI, int)
PIPSOLAR_SENSOR(current_max_ac_charging_current, QPIRI, int)
PIPSOLAR_SENSOR(current_max_charging_current, QPIRI, int)
PIPSOLAR_SENSOR(input_voltage_range, QPIRI, int)
PIPSOLAR_SENSOR(output_source_priority, QPIRI, int)
PIPSOLAR_SENSOR(charger_source_priority, QPIRI, int)
PIPSOLAR_SENSOR(parallel_max_num, QPIRI, int)
PIPSOLAR_SENSOR(machine_type, QPIRI, int)
PIPSOLAR_SENSOR(topology, QPIRI, int)
PIPSOLAR_SENSOR(output_mode, QPIRI, int)
PIPSOLAR_SENSOR(battery_redischarge_voltage, QPIRI, float)
PIPSOLAR_SENSOR(pv_ok_condition_for_parallel, QPIRI, int)
PIPSOLAR_SENSOR(pv_power_balance, QPIRI, int)
// QMOD values
PIPSOLAR_TEXT_SENSOR(device_mode, QMOD)
PIPSOLAR_VALUED_TEXT_SENSOR(device_mode, QMOD, char)
// QFLAG values
PIPSOLAR_BINARY_SENSOR(silence_buzzer_open_buzzer, QFLAG)
PIPSOLAR_BINARY_SENSOR(overload_bypass_function, QFLAG)
PIPSOLAR_BINARY_SENSOR(lcd_escape_to_default, QFLAG)
PIPSOLAR_BINARY_SENSOR(overload_restart_function, QFLAG)
PIPSOLAR_BINARY_SENSOR(over_temperature_restart_function, QFLAG)
PIPSOLAR_BINARY_SENSOR(backlight_on, QFLAG)
PIPSOLAR_BINARY_SENSOR(alarm_on_when_primary_source_interrupt, QFLAG)
PIPSOLAR_BINARY_SENSOR(fault_code_record, QFLAG)
PIPSOLAR_BINARY_SENSOR(power_saving, QFLAG)
PIPSOLAR_BINARY_SENSOR(silence_buzzer_open_buzzer, QFLAG, int)
PIPSOLAR_BINARY_SENSOR(overload_bypass_function, QFLAG, int)
PIPSOLAR_BINARY_SENSOR(lcd_escape_to_default, QFLAG, int)
PIPSOLAR_BINARY_SENSOR(overload_restart_function, QFLAG, int)
PIPSOLAR_BINARY_SENSOR(over_temperature_restart_function, QFLAG, int)
PIPSOLAR_BINARY_SENSOR(backlight_on, QFLAG, int)
PIPSOLAR_BINARY_SENSOR(alarm_on_when_primary_source_interrupt, QFLAG, int)
PIPSOLAR_BINARY_SENSOR(fault_code_record, QFLAG, int)
PIPSOLAR_BINARY_SENSOR(power_saving, QFLAG, int)
// QPIWS values
PIPSOLAR_BINARY_SENSOR(warnings_present, QPIWS)
PIPSOLAR_BINARY_SENSOR(faults_present, QPIWS)
PIPSOLAR_BINARY_SENSOR(warning_power_loss, QPIWS)
PIPSOLAR_BINARY_SENSOR(fault_inverter_fault, QPIWS)
PIPSOLAR_BINARY_SENSOR(fault_bus_over, QPIWS)
PIPSOLAR_BINARY_SENSOR(fault_bus_under, QPIWS)
PIPSOLAR_BINARY_SENSOR(fault_bus_soft_fail, QPIWS)
PIPSOLAR_BINARY_SENSOR(warning_line_fail, QPIWS)
PIPSOLAR_BINARY_SENSOR(fault_opvshort, QPIWS)
PIPSOLAR_BINARY_SENSOR(fault_inverter_voltage_too_low, QPIWS)
PIPSOLAR_BINARY_SENSOR(fault_inverter_voltage_too_high, QPIWS)
PIPSOLAR_BINARY_SENSOR(warning_over_temperature, QPIWS)
PIPSOLAR_BINARY_SENSOR(warning_fan_lock, QPIWS)
PIPSOLAR_BINARY_SENSOR(warning_battery_voltage_high, QPIWS)
PIPSOLAR_BINARY_SENSOR(warning_battery_low_alarm, QPIWS)
PIPSOLAR_BINARY_SENSOR(warning_battery_under_shutdown, QPIWS)
PIPSOLAR_BINARY_SENSOR(warning_battery_derating, QPIWS)
PIPSOLAR_BINARY_SENSOR(warning_over_load, QPIWS)
PIPSOLAR_BINARY_SENSOR(warning_eeprom_failed, QPIWS)
PIPSOLAR_BINARY_SENSOR(fault_inverter_over_current, QPIWS)
PIPSOLAR_BINARY_SENSOR(fault_inverter_soft_failed, QPIWS)
PIPSOLAR_BINARY_SENSOR(fault_self_test_failed, QPIWS)
PIPSOLAR_BINARY_SENSOR(fault_op_dc_voltage_over, QPIWS)
PIPSOLAR_BINARY_SENSOR(fault_battery_open, QPIWS)
PIPSOLAR_BINARY_SENSOR(fault_current_sensor_failed, QPIWS)
PIPSOLAR_BINARY_SENSOR(fault_battery_short, QPIWS)
PIPSOLAR_BINARY_SENSOR(warning_power_limit, QPIWS)
PIPSOLAR_BINARY_SENSOR(warning_pv_voltage_high, QPIWS)
PIPSOLAR_BINARY_SENSOR(fault_mppt_overload, QPIWS)
PIPSOLAR_BINARY_SENSOR(warning_mppt_overload, QPIWS)
PIPSOLAR_BINARY_SENSOR(warning_battery_too_low_to_charge, QPIWS)
PIPSOLAR_BINARY_SENSOR(fault_dc_dc_over_current, QPIWS)
PIPSOLAR_BINARY_SENSOR(fault_code, QPIWS)
PIPSOLAR_BINARY_SENSOR(warning_low_pv_energy, QPIWS)
PIPSOLAR_BINARY_SENSOR(warning_high_ac_input_during_bus_soft_start, QPIWS)
PIPSOLAR_BINARY_SENSOR(warning_battery_equalization, QPIWS)
PIPSOLAR_BINARY_SENSOR(warnings_present, QPIWS, bool)
PIPSOLAR_BINARY_SENSOR(faults_present, QPIWS, bool)
PIPSOLAR_BINARY_SENSOR(warning_power_loss, QPIWS, bool)
PIPSOLAR_BINARY_SENSOR(fault_inverter_fault, QPIWS, bool)
PIPSOLAR_BINARY_SENSOR(fault_bus_over, QPIWS, bool)
PIPSOLAR_BINARY_SENSOR(fault_bus_under, QPIWS, bool)
PIPSOLAR_BINARY_SENSOR(fault_bus_soft_fail, QPIWS, bool)
PIPSOLAR_BINARY_SENSOR(warning_line_fail, QPIWS, bool)
PIPSOLAR_BINARY_SENSOR(fault_opvshort, QPIWS, bool)
PIPSOLAR_BINARY_SENSOR(fault_inverter_voltage_too_low, QPIWS, bool)
PIPSOLAR_BINARY_SENSOR(fault_inverter_voltage_too_high, QPIWS, bool)
PIPSOLAR_BINARY_SENSOR(warning_over_temperature, QPIWS, bool)
PIPSOLAR_BINARY_SENSOR(warning_fan_lock, QPIWS, bool)
PIPSOLAR_BINARY_SENSOR(warning_battery_voltage_high, QPIWS, bool)
PIPSOLAR_BINARY_SENSOR(warning_battery_low_alarm, QPIWS, bool)
PIPSOLAR_BINARY_SENSOR(warning_battery_under_shutdown, QPIWS, bool)
PIPSOLAR_BINARY_SENSOR(warning_battery_derating, QPIWS, bool)
PIPSOLAR_BINARY_SENSOR(warning_over_load, QPIWS, bool)
PIPSOLAR_BINARY_SENSOR(warning_eeprom_failed, QPIWS, bool)
PIPSOLAR_BINARY_SENSOR(fault_inverter_over_current, QPIWS, bool)
PIPSOLAR_BINARY_SENSOR(fault_inverter_soft_failed, QPIWS, bool)
PIPSOLAR_BINARY_SENSOR(fault_self_test_failed, QPIWS, bool)
PIPSOLAR_BINARY_SENSOR(fault_op_dc_voltage_over, QPIWS, bool)
PIPSOLAR_BINARY_SENSOR(fault_battery_open, QPIWS, bool)
PIPSOLAR_BINARY_SENSOR(fault_current_sensor_failed, QPIWS, bool)
PIPSOLAR_BINARY_SENSOR(fault_battery_short, QPIWS, bool)
PIPSOLAR_BINARY_SENSOR(warning_power_limit, QPIWS, bool)
PIPSOLAR_BINARY_SENSOR(warning_pv_voltage_high, QPIWS, bool)
PIPSOLAR_BINARY_SENSOR(fault_mppt_overload, QPIWS, bool)
PIPSOLAR_BINARY_SENSOR(warning_mppt_overload, QPIWS, bool)
PIPSOLAR_BINARY_SENSOR(warning_battery_too_low_to_charge, QPIWS, bool)
PIPSOLAR_BINARY_SENSOR(fault_dc_dc_over_current, QPIWS, bool)
PIPSOLAR_BINARY_SENSOR(fault_code, QPIWS, int)
PIPSOLAR_BINARY_SENSOR(warnung_low_pv_energy, QPIWS, bool)
PIPSOLAR_BINARY_SENSOR(warning_high_ac_input_during_bus_soft_start, QPIWS, bool)
PIPSOLAR_BINARY_SENSOR(warning_battery_equalization, QPIWS, bool)
PIPSOLAR_TEXT_SENSOR(last_qpigs, QPIGS)
PIPSOLAR_TEXT_SENSOR(last_qpiri, QPIRI)
@@ -185,14 +180,14 @@ class Pipsolar : public uart::UARTDevice, public PollingComponent {
PIPSOLAR_SWITCH(pv_ok_condition_for_parallel_switch, QPIRI)
PIPSOLAR_SWITCH(pv_power_balance_switch, QPIRI)
void queue_command(const std::string &command);
void switch_command(const std::string &command);
void setup() override;
void loop() override;
void dump_config() override;
void update() override;
protected:
static const size_t PIPSOLAR_READ_BUFFER_LENGTH = 128; // maximum supported answer length
static const size_t PIPSOLAR_READ_BUFFER_LENGTH = 110; // maximum supported answer length
static const size_t COMMAND_QUEUE_LENGTH = 10;
static const size_t COMMAND_TIMEOUT = 5000;
static const size_t POLLING_COMMANDS_MAX = 15;
@@ -203,26 +198,7 @@ class Pipsolar : public uart::UARTDevice, public PollingComponent {
uint16_t pipsolar_crc_(uint8_t *msg, uint8_t len);
bool send_next_command_();
bool send_next_poll_();
void handle_qpiri_(const char *message);
void handle_qpigs_(const char *message);
void handle_qmod_(const char *message);
void handle_qflag_(const char *message);
void handle_qpiws_(const char *message);
void handle_qt_(const char *message);
void handle_qmn_(const char *message);
void skip_start_(const char *message, size_t *pos);
void skip_field_(const char *message, size_t *pos);
std::string read_field_(const char *message, size_t *pos);
void read_float_sensor_(const char *message, size_t *pos, sensor::Sensor *sensor);
void read_int_sensor_(const char *message, size_t *pos, sensor::Sensor *sensor);
void publish_binary_sensor_(esphome::optional<bool> b, binary_sensor::BinarySensor *sensor);
esphome::optional<bool> get_bit_(std::string bits, uint8_t bit_pos);
void queue_command_(const char *command, uint8_t length);
std::string command_queue_[COMMAND_QUEUE_LENGTH];
uint8_t command_queue_position_ = 0;
uint8_t read_buffer_[PIPSOLAR_READ_BUFFER_LENGTH];
@@ -237,10 +213,11 @@ class Pipsolar : public uart::UARTDevice, public PollingComponent {
STATE_POLL_COMPLETE = 3,
STATE_COMMAND_COMPLETE = 4,
STATE_POLL_CHECKED = 5,
STATE_POLL_DECODED = 6,
};
uint8_t last_polling_command_ = 0;
PollingCommand enabled_polling_commands_[POLLING_COMMANDS_MAX];
PollingCommand used_polling_commands_[POLLING_COMMANDS_MAX];
};
} // namespace pipsolar

View File

@@ -11,11 +11,11 @@ void PipsolarSwitch::dump_config() { LOG_SWITCH("", "Pipsolar Switch", this); }
void PipsolarSwitch::write_state(bool state) {
if (state) {
if (!this->on_command_.empty()) {
this->parent_->queue_command(this->on_command_);
this->parent_->switch_command(this->on_command_);
}
} else {
if (!this->off_command_.empty()) {
this->parent_->queue_command(this->off_command_);
this->parent_->switch_command(this->off_command_);
}
}
}

View File

@@ -261,12 +261,9 @@ ThrottleAverageFilter = sensor_ns.class_("ThrottleAverageFilter", Filter, cg.Com
LambdaFilter = sensor_ns.class_("LambdaFilter", Filter)
OffsetFilter = sensor_ns.class_("OffsetFilter", Filter)
MultiplyFilter = sensor_ns.class_("MultiplyFilter", Filter)
ValueListFilter = sensor_ns.class_("ValueListFilter", Filter)
FilterOutValueFilter = sensor_ns.class_("FilterOutValueFilter", ValueListFilter)
FilterOutValueFilter = sensor_ns.class_("FilterOutValueFilter", Filter)
ThrottleFilter = sensor_ns.class_("ThrottleFilter", Filter)
ThrottleWithPriorityFilter = sensor_ns.class_(
"ThrottleWithPriorityFilter", ValueListFilter
)
ThrottleWithPriorityFilter = sensor_ns.class_("ThrottleWithPriorityFilter", Filter)
TimeoutFilter = sensor_ns.class_("TimeoutFilter", Filter, cg.Component)
DebounceFilter = sensor_ns.class_("DebounceFilter", Filter, cg.Component)
HeartbeatFilter = sensor_ns.class_("HeartbeatFilter", Filter, cg.Component)

View File

@@ -228,40 +228,27 @@ MultiplyFilter::MultiplyFilter(TemplatableValue<float> multiplier) : multiplier_
optional<float> MultiplyFilter::new_value(float value) { return value * this->multiplier_.value(); }
// ValueListFilter (base class)
ValueListFilter::ValueListFilter(std::initializer_list<TemplatableValue<float>> values) : values_(values) {}
bool ValueListFilter::value_matches_any_(float sensor_value) {
int8_t accuracy = this->parent_->get_accuracy_decimals();
float accuracy_mult = powf(10.0f, accuracy);
float rounded_sensor = roundf(accuracy_mult * sensor_value);
for (auto &filter_value : this->values_) {
float fv = filter_value.value();
// Handle NaN comparison
if (std::isnan(fv)) {
if (std::isnan(sensor_value))
return true;
continue;
}
// Compare rounded values
if (roundf(accuracy_mult * fv) == rounded_sensor)
return true;
}
return false;
}
// FilterOutValueFilter
FilterOutValueFilter::FilterOutValueFilter(std::initializer_list<TemplatableValue<float>> values_to_filter_out)
: ValueListFilter(values_to_filter_out) {}
FilterOutValueFilter::FilterOutValueFilter(std::vector<TemplatableValue<float>> values_to_filter_out)
: values_to_filter_out_(std::move(values_to_filter_out)) {}
optional<float> FilterOutValueFilter::new_value(float value) {
if (this->value_matches_any_(value))
return {}; // Filter out
return value; // Pass through
int8_t accuracy = this->parent_->get_accuracy_decimals();
float accuracy_mult = powf(10.0f, accuracy);
for (auto filter_value : this->values_to_filter_out_) {
if (std::isnan(filter_value.value())) {
if (std::isnan(value)) {
return {};
}
continue;
}
float rounded_filter_out = roundf(accuracy_mult * filter_value.value());
float rounded_value = roundf(accuracy_mult * value);
if (rounded_filter_out == rounded_value) {
return {};
}
}
return value;
}
// ThrottleFilter
@@ -276,15 +263,33 @@ optional<float> ThrottleFilter::new_value(float value) {
}
// ThrottleWithPriorityFilter
ThrottleWithPriorityFilter::ThrottleWithPriorityFilter(
uint32_t min_time_between_inputs, std::initializer_list<TemplatableValue<float>> prioritized_values)
: ValueListFilter(prioritized_values), min_time_between_inputs_(min_time_between_inputs) {}
ThrottleWithPriorityFilter::ThrottleWithPriorityFilter(uint32_t min_time_between_inputs,
std::vector<TemplatableValue<float>> prioritized_values)
: min_time_between_inputs_(min_time_between_inputs), prioritized_values_(std::move(prioritized_values)) {}
optional<float> ThrottleWithPriorityFilter::new_value(float value) {
bool is_prioritized_value = false;
int8_t accuracy = this->parent_->get_accuracy_decimals();
float accuracy_mult = powf(10.0f, accuracy);
const uint32_t now = App.get_loop_component_start_time();
// Allow value through if: no previous input, time expired, or is prioritized
if (this->last_input_ == 0 || now - this->last_input_ >= min_time_between_inputs_ ||
this->value_matches_any_(value)) {
// First, determine if the new value is one of the prioritized values
for (auto prioritized_value : this->prioritized_values_) {
if (std::isnan(prioritized_value.value())) {
if (std::isnan(value)) {
is_prioritized_value = true;
break;
}
continue;
}
float rounded_prioritized_value = roundf(accuracy_mult * prioritized_value.value());
float rounded_value = roundf(accuracy_mult * value);
if (rounded_prioritized_value == rounded_value) {
is_prioritized_value = true;
break;
}
}
// Finally, determine if the new value should be throttled and pass it through if not
if (this->last_input_ == 0 || now - this->last_input_ >= min_time_between_inputs_ || is_prioritized_value) {
this->last_input_ = now;
return value;
}

View File

@@ -317,28 +317,15 @@ class MultiplyFilter : public Filter {
TemplatableValue<float> multiplier_;
};
/** Base class for filters that compare sensor values against a list of configured values.
*
* This base class provides common functionality for filters that need to check if a sensor
* value matches any value in a configured list, with proper handling of NaN values and
* accuracy-based rounding for comparisons.
*/
class ValueListFilter : public Filter {
protected:
explicit ValueListFilter(std::initializer_list<TemplatableValue<float>> values);
/// Check if sensor value matches any configured value (with accuracy rounding)
bool value_matches_any_(float sensor_value);
FixedVector<TemplatableValue<float>> values_;
};
/// A simple filter that only forwards the filter chain if it doesn't receive `value_to_filter_out`.
class FilterOutValueFilter : public ValueListFilter {
class FilterOutValueFilter : public Filter {
public:
explicit FilterOutValueFilter(std::initializer_list<TemplatableValue<float>> values_to_filter_out);
explicit FilterOutValueFilter(std::vector<TemplatableValue<float>> values_to_filter_out);
optional<float> new_value(float value) override;
protected:
std::vector<TemplatableValue<float>> values_to_filter_out_;
};
class ThrottleFilter : public Filter {
@@ -353,16 +340,17 @@ class ThrottleFilter : public Filter {
};
/// Same as 'throttle' but will immediately publish values contained in `value_to_prioritize`.
class ThrottleWithPriorityFilter : public ValueListFilter {
class ThrottleWithPriorityFilter : public Filter {
public:
explicit ThrottleWithPriorityFilter(uint32_t min_time_between_inputs,
std::initializer_list<TemplatableValue<float>> prioritized_values);
std::vector<TemplatableValue<float>> prioritized_values);
optional<float> new_value(float value) override;
protected:
uint32_t last_input_{0};
uint32_t min_time_between_inputs_;
std::vector<TemplatableValue<float>> prioritized_values_;
};
class TimeoutFilter : public Filter, public Component {

View File

@@ -107,12 +107,12 @@ void Sensor::add_filter(Filter *filter) {
}
filter->initialize(this, nullptr);
}
void Sensor::add_filters(std::initializer_list<Filter *> filters) {
void Sensor::add_filters(const std::vector<Filter *> &filters) {
for (Filter *filter : filters) {
this->add_filter(filter);
}
}
void Sensor::set_filters(std::initializer_list<Filter *> filters) {
void Sensor::set_filters(const std::vector<Filter *> &filters) {
this->clear_filters();
this->add_filters(filters);
}

View File

@@ -6,7 +6,7 @@
#include "esphome/core/log.h"
#include "esphome/components/sensor/filter.h"
#include <initializer_list>
#include <vector>
#include <memory>
namespace esphome {
@@ -77,10 +77,10 @@ class Sensor : public EntityBase, public EntityBase_DeviceClass, public EntityBa
* SlidingWindowMovingAverageFilter(15, 15), // average over last 15 values
* });
*/
void add_filters(std::initializer_list<Filter *> filters);
void add_filters(const std::vector<Filter *> &filters);
/// Clear the filters and replace them by filters.
void set_filters(std::initializer_list<Filter *> filters);
void set_filters(const std::vector<Filter *> &filters);
/// Clear the entire filter chain.
void clear_filters();

View File

@@ -1,5 +1,3 @@
from collections.abc import Callable, MutableMapping
import esphome.codegen as cg
import esphome.config_validation as cv
from esphome.core import CORE
@@ -11,32 +9,6 @@ IMPLEMENTATION_LWIP_TCP = "lwip_tcp"
IMPLEMENTATION_LWIP_SOCKETS = "lwip_sockets"
IMPLEMENTATION_BSD_SOCKETS = "bsd_sockets"
# Socket tracking infrastructure
# Components register their socket needs and platforms read this to configure appropriately
KEY_SOCKET_CONSUMERS = "socket_consumers"
def consume_sockets(
value: int, consumer: str
) -> Callable[[MutableMapping], MutableMapping]:
"""Register socket usage for a component.
Args:
value: Number of sockets needed by the component
consumer: Name of the component consuming the sockets
Returns:
A validator function that records the socket usage
"""
def _consume_sockets(config: MutableMapping) -> MutableMapping:
consumers: dict[str, int] = CORE.data.setdefault(KEY_SOCKET_CONSUMERS, {})
consumers[consumer] = consumers.get(consumer, 0) + value
return config
return _consume_sockets
CONFIG_SCHEMA = cv.Schema(
{
cv.SplitDefault(

View File

@@ -110,28 +110,17 @@ def validate_mapping(value):
"substitute", SubstituteFilter, cv.ensure_list(validate_mapping)
)
async def substitute_filter_to_code(config, filter_id):
substitutions = [
cg.StructInitializer(
cg.MockObj("Substitution", "esphome::text_sensor::"),
("from", conf[CONF_FROM]),
("to", conf[CONF_TO]),
)
for conf in config
]
return cg.new_Pvariable(filter_id, substitutions)
from_strings = [conf[CONF_FROM] for conf in config]
to_strings = [conf[CONF_TO] for conf in config]
return cg.new_Pvariable(filter_id, from_strings, to_strings)
@FILTER_REGISTRY.register("map", MapFilter, cv.ensure_list(validate_mapping))
async def map_filter_to_code(config, filter_id):
mappings = [
cg.StructInitializer(
cg.MockObj("Substitution", "esphome::text_sensor::"),
("from", conf[CONF_FROM]),
("to", conf[CONF_TO]),
)
for conf in config
]
return cg.new_Pvariable(filter_id, mappings)
map_ = cg.std_ns.class_("map").template(cg.std_string, cg.std_string)
return cg.new_Pvariable(
filter_id, map_([(item[CONF_FROM], item[CONF_TO]) for item in config])
)
validate_device_class = cv.one_of(*DEVICE_CLASSES, lower=True, space="_")

View File

@@ -62,26 +62,19 @@ optional<std::string> AppendFilter::new_value(std::string value) { return value
optional<std::string> PrependFilter::new_value(std::string value) { return this->prefix_ + value; }
// Substitute
SubstituteFilter::SubstituteFilter(std::initializer_list<Substitution> substitutions) : substitutions_(substitutions) {}
optional<std::string> SubstituteFilter::new_value(std::string value) {
std::size_t pos;
for (const auto &sub : this->substitutions_) {
while ((pos = value.find(sub.from)) != std::string::npos)
value.replace(pos, sub.from.size(), sub.to);
for (size_t i = 0; i < this->from_strings_.size(); i++) {
while ((pos = value.find(this->from_strings_[i])) != std::string::npos)
value.replace(pos, this->from_strings_[i].size(), this->to_strings_[i]);
}
return value;
}
// Map
MapFilter::MapFilter(std::initializer_list<Substitution> mappings) : mappings_(mappings) {}
optional<std::string> MapFilter::new_value(std::string value) {
for (const auto &mapping : this->mappings_) {
if (mapping.from == value)
return mapping.to;
}
return value; // Pass through if no match
auto item = mappings_.find(value);
return item == mappings_.end() ? value : item->second;
}
} // namespace text_sensor

View File

@@ -2,6 +2,10 @@
#include "esphome/core/component.h"
#include "esphome/core/helpers.h"
#include <queue>
#include <utility>
#include <map>
#include <vector>
namespace esphome {
namespace text_sensor {
@@ -94,52 +98,26 @@ class PrependFilter : public Filter {
std::string prefix_;
};
struct Substitution {
std::string from;
std::string to;
};
/// A simple filter that replaces a substring with another substring
class SubstituteFilter : public Filter {
public:
explicit SubstituteFilter(std::initializer_list<Substitution> substitutions);
SubstituteFilter(std::vector<std::string> from_strings, std::vector<std::string> to_strings)
: from_strings_(std::move(from_strings)), to_strings_(std::move(to_strings)) {}
optional<std::string> new_value(std::string value) override;
protected:
FixedVector<Substitution> substitutions_;
std::vector<std::string> from_strings_;
std::vector<std::string> to_strings_;
};
/** A filter that maps values from one set to another
*
* Uses linear search instead of std::map for typical small datasets (2-20 mappings).
* Linear search on contiguous memory is faster than red-black tree lookups when:
* - Dataset is small (< ~30 items)
* - Memory is contiguous (cache-friendly, better CPU cache utilization)
* - No pointer chasing overhead (tree node traversal)
* - String comparison cost dominates lookup time
*
* Benchmark results (see benchmark_map_filter.cpp):
* - 2 mappings: Linear 1.26x faster than std::map
* - 5 mappings: Linear 2.25x faster than std::map
* - 10 mappings: Linear 1.83x faster than std::map
* - 20 mappings: Linear 1.59x faster than std::map
* - 30 mappings: Linear 1.09x faster than std::map
* - 40 mappings: std::map 1.27x faster than Linear (break-even)
*
* Benefits over std::map:
* - ~2KB smaller flash (no red-black tree code)
* - ~24-32 bytes less RAM per mapping (no tree node overhead)
* - Faster for typical ESPHome usage (2-10 mappings common, 20+ rare)
*
* Break-even point: ~35-40 mappings, but ESPHome configs rarely exceed 20
*/
/// A filter that maps values from one set to another
class MapFilter : public Filter {
public:
explicit MapFilter(std::initializer_list<Substitution> mappings);
MapFilter(std::map<std::string, std::string> mappings) : mappings_(std::move(mappings)) {}
optional<std::string> new_value(std::string value) override;
protected:
FixedVector<Substitution> mappings_;
std::map<std::string, std::string> mappings_;
};
} // namespace text_sensor

View File

@@ -51,12 +51,12 @@ void TextSensor::add_filter(Filter *filter) {
}
filter->initialize(this, nullptr);
}
void TextSensor::add_filters(std::initializer_list<Filter *> filters) {
void TextSensor::add_filters(const std::vector<Filter *> &filters) {
for (Filter *filter : filters) {
this->add_filter(filter);
}
}
void TextSensor::set_filters(std::initializer_list<Filter *> filters) {
void TextSensor::set_filters(const std::vector<Filter *> &filters) {
this->clear_filters();
this->add_filters(filters);
}

View File

@@ -5,7 +5,7 @@
#include "esphome/core/helpers.h"
#include "esphome/components/text_sensor/filter.h"
#include <initializer_list>
#include <vector>
#include <memory>
namespace esphome {
@@ -37,10 +37,10 @@ class TextSensor : public EntityBase, public EntityBase_DeviceClass {
void add_filter(Filter *filter);
/// Add a list of vectors to the back of the filter chain.
void add_filters(std::initializer_list<Filter *> filters);
void add_filters(const std::vector<Filter *> &filters);
/// Clear the filters and replace them by filters.
void set_filters(std::initializer_list<Filter *> filters);
void set_filters(const std::vector<Filter *> &filters);
/// Clear the entire filter chain.
void clear_filters();

View File

@@ -136,18 +136,6 @@ def _final_validate_sorting(config: ConfigType) -> ConfigType:
FINAL_VALIDATE_SCHEMA = _final_validate_sorting
def _consume_web_server_sockets(config: ConfigType) -> ConfigType:
"""Register socket needs for web_server component."""
from esphome.components import socket
# Web server needs 1 listening socket + typically 2 concurrent client connections
# (browser makes 2 connections for page + event stream)
sockets_needed = 3
socket.consume_sockets(sockets_needed, "web_server")(config)
return config
sorting_group = {
cv.Required(CONF_ID): cv.declare_id(cg.int_),
cv.Required(CONF_NAME): cv.string,
@@ -217,7 +205,6 @@ CONFIG_SCHEMA = cv.All(
validate_local,
validate_sorting_groups,
validate_ota,
_consume_web_server_sockets,
)

View File

@@ -709,15 +709,6 @@ class EsphomeCore:
def relative_piolibdeps_path(self, *path: str | Path) -> Path:
return self.relative_build_path(".piolibdeps", *path)
@property
def platformio_cache_dir(self) -> str:
"""Get the PlatformIO cache directory path."""
# Check if running in Docker/HA addon with custom cache dir
if (cache_dir := os.environ.get("PLATFORMIO_CACHE_DIR")) and cache_dir.strip():
return cache_dir
# Default PlatformIO cache location
return os.path.expanduser("~/.platformio/.cache")
@property
def firmware_bin(self) -> Path:
if self.is_libretiny:

View File

@@ -243,7 +243,7 @@ template<typename... Ts> class ActionList {
}
this->actions_end_ = action;
}
void add_actions(const std::initializer_list<Action<Ts...> *> &actions) {
void add_actions(const std::vector<Action<Ts...> *> &actions) {
for (auto *action : actions) {
this->add_action(action);
}
@@ -286,7 +286,7 @@ template<typename... Ts> class Automation {
explicit Automation(Trigger<Ts...> *trigger) : trigger_(trigger) { this->trigger_->set_automation_parent(this); }
void add_action(Action<Ts...> *action) { this->actions_.add_action(action); }
void add_actions(const std::initializer_list<Action<Ts...> *> &actions) { this->actions_.add_actions(actions); }
void add_actions(const std::vector<Action<Ts...> *> &actions) { this->actions_.add_actions(actions); }
void stop() { this->actions_.stop(); }

View File

@@ -194,12 +194,12 @@ template<typename... Ts> class IfAction : public Action<Ts...> {
public:
explicit IfAction(Condition<Ts...> *condition) : condition_(condition) {}
void add_then(const std::initializer_list<Action<Ts...> *> &actions) {
void add_then(const std::vector<Action<Ts...> *> &actions) {
this->then_.add_actions(actions);
this->then_.add_action(new LambdaAction<Ts...>([this](Ts... x) { this->play_next_(x...); }));
}
void add_else(const std::initializer_list<Action<Ts...> *> &actions) {
void add_else(const std::vector<Action<Ts...> *> &actions) {
this->else_.add_actions(actions);
this->else_.add_action(new LambdaAction<Ts...>([this](Ts... x) { this->play_next_(x...); }));
}
@@ -240,7 +240,7 @@ template<typename... Ts> class WhileAction : public Action<Ts...> {
public:
WhileAction(Condition<Ts...> *condition) : condition_(condition) {}
void add_then(const std::initializer_list<Action<Ts...> *> &actions) {
void add_then(const std::vector<Action<Ts...> *> &actions) {
this->then_.add_actions(actions);
this->then_.add_action(new LambdaAction<Ts...>([this](Ts... x) {
if (this->num_running_ > 0 && this->condition_->check_tuple(this->var_)) {
@@ -287,7 +287,7 @@ template<typename... Ts> class RepeatAction : public Action<Ts...> {
public:
TEMPLATABLE_VALUE(uint32_t, count)
void add_then(const std::initializer_list<Action<uint32_t, Ts...> *> &actions) {
void add_then(const std::vector<Action<uint32_t, Ts...> *> &actions) {
this->then_.add_actions(actions);
this->then_.add_action(new LambdaAction<uint32_t, Ts...>([this](uint32_t iteration, Ts... x) {
iteration++;

View File

@@ -318,7 +318,8 @@ def preload_core_config(config, result) -> str:
target_platforms = []
for domain in config:
if domain.startswith("."):
# Skip package keys which may contain periods (e.g., "ratgdo.esphome")
if "." in domain:
continue
if _is_target_platform(domain):
target_platforms += [domain]

View File

@@ -414,8 +414,10 @@ int8_t step_to_accuracy_decimals(float step) {
return str.length() - dot_pos - 1;
}
// Store BASE64 characters as array - automatically placed in flash/ROM on embedded platforms
static const char BASE64_CHARS[] = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/";
// Use C-style string constant to store in ROM instead of RAM (saves 24 bytes)
static constexpr const char *BASE64_CHARS = "ABCDEFGHIJKLMNOPQRSTUVWXYZ"
"abcdefghijklmnopqrstuvwxyz"
"0123456789+/";
// Helper function to find the index of a base64 character in the lookup table.
// Returns the character's position (0-63) if found, or 0 if not found.
@@ -425,8 +427,8 @@ static const char BASE64_CHARS[] = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqr
// stops processing at the first invalid character due to the is_base64() check in its
// while loop condition, making this edge case harmless in practice.
static inline uint8_t base64_find_char(char c) {
const void *ptr = memchr(BASE64_CHARS, c, sizeof(BASE64_CHARS));
return ptr ? (static_cast<const char *>(ptr) - BASE64_CHARS) : 0;
const char *pos = strchr(BASE64_CHARS, c);
return pos ? (pos - BASE64_CHARS) : 0;
}
static inline bool is_base64(char c) { return (isalnum(c) || (c == '+') || (c == '/')); }

View File

@@ -143,9 +143,6 @@ template<typename T, size_t N> class StaticVector {
size_t size() const { return count_; }
bool empty() const { return count_ == 0; }
// Direct access to size counter for efficient in-place construction
size_t &count() { return count_; }
T &operator[](size_t i) { return data_[i]; }
const T &operator[](size_t i) const { return data_[i]; }

View File

@@ -95,9 +95,10 @@ class Scheduler {
} name_;
uint32_t interval;
// Split time to handle millis() rollover. The scheduler combines the 32-bit millis()
// with a 16-bit rollover counter to create a 48-bit time space (stored as 64-bit
// for compatibility). With 49.7 days per 32-bit rollover, the 16-bit counter
// supports 49.7 days × 65536 = ~8900 years. This ensures correct scheduling
// with a 16-bit rollover counter to create a 48-bit time space (using 32+16 bits).
// This is intentionally limited to 48 bits, not stored as a full 64-bit value.
// With 49.7 days per 32-bit rollover, the 16-bit counter supports
// 49.7 days × 65536 = ~8900 years. This ensures correct scheduling
// even when devices run for months. Split into two fields for better memory
// alignment on 32-bit systems.
uint32_t next_execution_low_; // Lower 32 bits of execution time (millis value)

View File

@@ -145,16 +145,7 @@ def run_compile(config, verbose):
args = []
if CONF_COMPILE_PROCESS_LIMIT in config[CONF_ESPHOME]:
args += [f"-j{config[CONF_ESPHOME][CONF_COMPILE_PROCESS_LIMIT]}"]
result = run_platformio_cli_run(config, verbose, *args)
# Run memory analysis if enabled
if config.get(CONF_ESPHOME, {}).get("analyze_memory", False):
try:
analyze_memory_usage(config)
except Exception as e:
_LOGGER.warning("Failed to analyze memory usage: %s", e)
return result
return run_platformio_cli_run(config, verbose, *args)
def _run_idedata(config):
@@ -403,74 +394,3 @@ class IDEData:
if path.endswith(".exe")
else f"{path[:-3]}readelf"
)
def analyze_memory_usage(config: dict[str, Any]) -> None:
"""Analyze memory usage by component after compilation."""
# Lazy import to avoid overhead when not needed
from esphome.analyze_memory.cli import MemoryAnalyzerCLI
from esphome.analyze_memory.helpers import get_esphome_components
idedata = get_idedata(config)
# Get paths to tools
elf_path = idedata.firmware_elf_path
objdump_path = idedata.objdump_path
readelf_path = idedata.readelf_path
# Debug logging
_LOGGER.debug("ELF path from idedata: %s", elf_path)
# Check if file exists
if not Path(elf_path).exists():
# Try alternate path
alt_path = Path(CORE.relative_build_path(".pioenvs", CORE.name, "firmware.elf"))
if alt_path.exists():
elf_path = str(alt_path)
_LOGGER.debug("Using alternate ELF path: %s", elf_path)
else:
_LOGGER.warning("ELF file not found at %s or %s", elf_path, alt_path)
return
# Extract external components from config
external_components = set()
# Get the list of built-in ESPHome components
builtin_components = get_esphome_components()
# Special non-component keys that appear in configs
NON_COMPONENT_KEYS = {
CONF_ESPHOME,
"substitutions",
"packages",
"globals",
"<<",
}
# Check all top-level keys in config
for key in config:
if key not in builtin_components and key not in NON_COMPONENT_KEYS:
# This is an external component
external_components.add(key)
_LOGGER.debug("Detected external components: %s", external_components)
# Create analyzer and run analysis
analyzer = MemoryAnalyzerCLI(
elf_path, objdump_path, readelf_path, external_components
)
analyzer.analyze()
# Generate and print report
report = analyzer.generate_report()
_LOGGER.info("\n%s", report)
# Optionally save to file
if config.get(CONF_ESPHOME, {}).get("memory_report_file"):
report_file = Path(config[CONF_ESPHOME]["memory_report_file"])
if report_file.suffix == ".json":
report_file.write_text(analyzer.to_json())
_LOGGER.info("Memory report saved to %s", report_file)
else:
report_file.write_text(report)
_LOGGER.info("Memory report saved to %s", report_file)

View File

@@ -1,4 +1,4 @@
pylint==4.0.2
pylint==4.0.1
flake8==7.3.0 # also change in .pre-commit-config.yaml when updating
ruff==0.14.1 # also change in .pre-commit-config.yaml when updating
pyupgrade==3.21.0 # also change in .pre-commit-config.yaml when updating

View File

@@ -61,11 +61,6 @@ from helpers import (
root_path,
)
# Threshold for splitting clang-tidy jobs
# For small PRs (< 65 files), use nosplit for faster CI
# For large PRs (>= 65 files), use split for better parallelization
CLANG_TIDY_SPLIT_THRESHOLD = 65
class Platform(StrEnum):
"""Platform identifiers for memory impact analysis."""
@@ -83,16 +78,11 @@ MEMORY_IMPACT_FALLBACK_COMPONENT = "api" # Representative component for core ch
MEMORY_IMPACT_FALLBACK_PLATFORM = Platform.ESP32_IDF # Most representative platform
# Platform preference order for memory impact analysis
# This order is used when no platform-specific hints are detected from filenames
# Priority rationale:
# 1. ESP32-C6 IDF - Newest platform, supports Thread/Zigbee
# 2. ESP8266 Arduino - Most memory constrained (best for detecting memory impact),
# fastest build times, most sensitive to code size changes
# 3. ESP32 IDF - Primary ESP32 platform, most representative of modern ESPHome
# 4-6. Other ESP32 variants - Less commonly used but still supported
# Prefer newer platforms first as they represent the future of ESPHome
# ESP8266 is most constrained but many new features don't support it
MEMORY_IMPACT_PLATFORM_PREFERENCE = [
Platform.ESP32_C6_IDF, # ESP32-C6 IDF (newest, supports Thread/Zigbee)
Platform.ESP8266_ARD, # ESP8266 Arduino (most memory constrained, fastest builds)
Platform.ESP8266_ARD, # ESP8266 Arduino (most memory constrained - best for impact analysis)
Platform.ESP32_IDF, # ESP32 IDF platform (primary ESP32 platform, most representative)
Platform.ESP32_C3_IDF, # ESP32-C3 IDF
Platform.ESP32_S2_IDF, # ESP32-S2 IDF
@@ -220,22 +210,6 @@ def should_run_clang_tidy(branch: str | None = None) -> bool:
return _any_changed_file_endswith(branch, CPP_FILE_EXTENSIONS)
def count_changed_cpp_files(branch: str | None = None) -> int:
"""Count the number of changed C++ files.
This is used to determine whether to split clang-tidy jobs or run them as a single job.
For PRs with < 65 changed C++ files, running a single job is faster than splitting.
Args:
branch: Branch to compare against. If None, uses default.
Returns:
Number of changed C++ files.
"""
files = changed_files(branch)
return sum(1 for file in files if file.endswith(CPP_FILE_EXTENSIONS))
def should_run_clang_format(branch: str | None = None) -> bool:
"""Determine if clang-format should run based on changed files.
@@ -290,91 +264,6 @@ def _component_has_tests(component: str) -> bool:
return bool(get_component_test_files(component))
def _select_platform_by_preference(
platforms: list[Platform] | set[Platform],
) -> Platform:
"""Select the most preferred platform from a list/set based on MEMORY_IMPACT_PLATFORM_PREFERENCE.
Args:
platforms: List or set of platforms to choose from
Returns:
The most preferred platform (earliest in MEMORY_IMPACT_PLATFORM_PREFERENCE)
"""
return min(platforms, key=MEMORY_IMPACT_PLATFORM_PREFERENCE.index)
def _select_platform_by_count(
platform_counts: Counter[Platform],
) -> Platform:
"""Select platform by count, using MEMORY_IMPACT_PLATFORM_PREFERENCE as tiebreaker.
Args:
platform_counts: Counter mapping platforms to their counts
Returns:
Platform with highest count, breaking ties by preference order
"""
return min(
platform_counts.keys(),
key=lambda p: (
-platform_counts[p], # Negative to prefer higher counts
MEMORY_IMPACT_PLATFORM_PREFERENCE.index(p),
),
)
def _detect_platform_hint_from_filename(filename: str) -> Platform | None:
"""Detect platform hint from filename patterns.
Detects platform-specific files using patterns like:
- wifi_component_esp_idf.cpp, *_idf.h -> ESP32 IDF variants
- wifi_component_esp8266.cpp, *_esp8266.h -> ESP8266_ARD
- *_esp32*.cpp -> ESP32 IDF (generic)
- *_libretiny.cpp, *_retiny.* -> LibreTiny (not in preference list)
- *_pico.cpp, *_rp2040.* -> RP2040 (not in preference list)
Args:
filename: File path to check
Returns:
Platform enum if a specific platform is detected, None otherwise
"""
filename_lower = filename.lower()
# ESP-IDF platforms (check specific variants first)
if "esp_idf" in filename_lower or "_idf" in filename_lower:
# Check for specific ESP32 variants
if "c6" in filename_lower or "esp32c6" in filename_lower:
return Platform.ESP32_C6_IDF
if "c3" in filename_lower or "esp32c3" in filename_lower:
return Platform.ESP32_C3_IDF
if "s2" in filename_lower or "esp32s2" in filename_lower:
return Platform.ESP32_S2_IDF
if "s3" in filename_lower or "esp32s3" in filename_lower:
return Platform.ESP32_S3_IDF
# Default to ESP32 IDF for generic esp_idf files
return Platform.ESP32_IDF
# ESP8266 Arduino
if "esp8266" in filename_lower:
return Platform.ESP8266_ARD
# Generic ESP32 (without _idf suffix, could be Arduino or shared code)
# Prefer IDF as it's the modern platform
if "esp32" in filename_lower:
return Platform.ESP32_IDF
# LibreTiny and RP2040 are not in MEMORY_IMPACT_PLATFORM_PREFERENCE
# so we don't return them as hints
# if "retiny" in filename_lower or "libretiny" in filename_lower:
# return None # No specific LibreTiny platform preference
# if "pico" in filename_lower or "rp2040" in filename_lower:
# return None # No RP2040 platform preference
return None
def detect_memory_impact_config(
branch: str | None = None,
) -> dict[str, Any]:
@@ -384,9 +273,6 @@ def detect_memory_impact_config(
building a merged configuration with all changed components (like
test_build_components.py does) to get comprehensive memory analysis.
When platform-specific files are detected (e.g., wifi_component_esp_idf.cpp),
prefers that platform for testing to ensure the most relevant memory analysis.
For core C++ file changes without component changes, runs a fallback
analysis using a representative component to measure the impact.
@@ -405,10 +291,8 @@ def detect_memory_impact_config(
files = changed_files(branch)
# Find all changed components (excluding core and base bus components)
# Also collect platform hints from platform-specific filenames
changed_component_set: set[str] = set()
has_core_cpp_changes = False
platform_hints: list[Platform] = []
for file in files:
component = get_component_from_path(file)
@@ -416,10 +300,6 @@ def detect_memory_impact_config(
# Skip base bus components as they're used across many builds
if component not in BASE_BUS_COMPONENTS:
changed_component_set.add(component)
# Check if this is a platform-specific file
platform_hint = _detect_platform_hint_from_filename(file)
if platform_hint:
platform_hints.append(platform_hint)
elif file.startswith("esphome/") and file.endswith(CPP_FILE_EXTENSIONS):
# Core ESPHome C++ files changed (not component-specific)
# Only C++ files affect memory usage
@@ -476,42 +356,27 @@ def detect_memory_impact_config(
common_platforms &= platforms
# Select the most preferred platform from the common set
# Priority order:
# 1. Platform hints from filenames (e.g., wifi_component_esp_idf.cpp suggests ESP32_IDF)
# 2. Core changes use fallback platform (most representative of codebase)
# 3. Common platforms supported by all components
# 4. Most commonly supported platform
if platform_hints:
# Use most common platform hint that's also supported by all components
hint_counts = Counter(platform_hints)
# Filter to only hints that are in common_platforms (if any common platforms exist)
valid_hints = (
[h for h in hint_counts if h in common_platforms]
if common_platforms
else list(hint_counts.keys())
)
if valid_hints:
platform = _select_platform_by_count(
Counter({p: hint_counts[p] for p in valid_hints})
)
elif common_platforms:
# Hints exist but none match common platforms, use common platform logic
platform = _select_platform_by_preference(common_platforms)
else:
# Use the most common hint even if it's not in common platforms
platform = _select_platform_by_count(hint_counts)
elif force_fallback_platform:
# Exception: for core changes, use fallback platform (most representative of codebase)
if force_fallback_platform:
platform = MEMORY_IMPACT_FALLBACK_PLATFORM
elif common_platforms:
# Pick the most preferred platform that all components support
platform = _select_platform_by_preference(common_platforms)
platform = min(common_platforms, key=MEMORY_IMPACT_PLATFORM_PREFERENCE.index)
else:
# No common platform - pick the most commonly supported platform
# This allows testing components individually even if they can't be merged
# Count how many components support each platform
platform_counts = Counter(
p for platforms in component_platforms_map.values() for p in platforms
)
platform = _select_platform_by_count(platform_counts)
# Pick the platform supported by most components, preferring earlier in MEMORY_IMPACT_PLATFORM_PREFERENCE
platform = max(
platform_counts.keys(),
key=lambda p: (
platform_counts[p],
-MEMORY_IMPACT_PLATFORM_PREFERENCE.index(p),
),
)
# Debug output
print("Memory impact analysis:", file=sys.stderr)
@@ -521,7 +386,6 @@ def detect_memory_impact_config(
f" Component platforms: {dict(sorted(component_platforms_map.items()))}",
file=sys.stderr,
)
print(f" Platform hints from filenames: {platform_hints}", file=sys.stderr)
print(f" Common platforms: {sorted(common_platforms)}", file=sys.stderr)
print(f" Selected platform: {platform}", file=sys.stderr)
@@ -548,7 +412,6 @@ def main() -> None:
run_clang_tidy = should_run_clang_tidy(args.branch)
run_clang_format = should_run_clang_format(args.branch)
run_python_linters = should_run_python_linters(args.branch)
changed_cpp_file_count = count_changed_cpp_files(args.branch)
# Get both directly changed and all changed components (with dependencies) in one call
script_path = Path(__file__).parent / "list-components.py"
@@ -586,19 +449,10 @@ def main() -> None:
# Detect components for memory impact analysis (merged config)
memory_impact = detect_memory_impact_config(args.branch)
if run_clang_tidy:
if changed_cpp_file_count < CLANG_TIDY_SPLIT_THRESHOLD:
clang_tidy_mode = "nosplit"
else:
clang_tidy_mode = "split"
else:
clang_tidy_mode = "disabled"
# Build output
output: dict[str, Any] = {
"integration_tests": run_integration,
"clang_tidy": run_clang_tidy,
"clang_tidy_mode": clang_tidy_mode,
"clang_format": run_clang_format,
"python_linters": run_python_linters,
"changed_components": changed_components,
@@ -608,7 +462,6 @@ def main() -> None:
"component_test_count": len(changed_components_with_tests),
"directly_changed_count": len(directly_changed_with_tests),
"dependency_only_count": len(dependency_only_components),
"changed_cpp_file_count": changed_cpp_file_count,
"memory_impact": memory_impact,
}

View File

@@ -1,5 +1,4 @@
espnow:
id: espnow_component
auto_add_peer: false
channel: 1
peers:
@@ -51,26 +50,3 @@ espnow:
- format_mac_address_pretty(info.src_addr).c_str()
- format_hex_pretty(data, size).c_str()
- info.rx_ctrl->rssi
packet_transport:
- platform: espnow
id: transport1
espnow_id: espnow_component
peer_address: "FF:FF:FF:FF:FF:FF"
encryption:
key: "0123456789abcdef0123456789abcdef"
sensors:
- temp_sensor
providers:
- name: test_provider
encryption:
key: "0123456789abcdef0123456789abcdef"
sensor:
- platform: internal_temperature
id: temp_sensor
- platform: packet_transport
provider: test_provider
remote_id: temp_sensor
id: remote_temp

View File

@@ -1,33 +0,0 @@
json:
interval:
- interval: 60s
then:
- lambda: |-
// Test build_json
std::string json_str = esphome::json::build_json([](JsonObject root) {
root["sensor"] = "temperature";
root["value"] = 23.5;
root["unit"] = "°C";
});
ESP_LOGD("test", "Built JSON: %s", json_str.c_str());
// Test parse_json
bool parse_ok = esphome::json::parse_json(json_str, [](JsonObject root) {
if (root.containsKey("sensor") && root.containsKey("value")) {
const char* sensor = root["sensor"];
float value = root["value"];
ESP_LOGD("test", "Parsed: sensor=%s, value=%.1f", sensor, value);
} else {
ESP_LOGD("test", "Parsed JSON missing required keys");
}
});
ESP_LOGD("test", "Parse result (JSON syntax only): %s", parse_ok ? "success" : "failed");
// Test JsonBuilder class
esphome::json::JsonBuilder builder;
JsonObject obj = builder.root();
obj["test"] = "direct_builder";
obj["count"] = 42;
std::string result = builder.serialize();
ESP_LOGD("test", "JsonBuilder result: %s", result.c_str());

View File

@@ -1 +0,0 @@
<<: !include common.yaml

View File

@@ -1 +0,0 @@
<<: !include common.yaml

View File

@@ -99,77 +99,3 @@ sensor:
window_size: 10
send_every: 10
send_first_at: 1 # Send after first value
# ValueListFilter-based filters tests
# FilterOutValueFilter - single value
- platform: copy
source_id: source_sensor
name: "Filter Out Single Value"
filters:
- filter_out: 42.0 # Should filter out exactly 42.0
# FilterOutValueFilter - multiple values
- platform: copy
source_id: source_sensor
name: "Filter Out Multiple Values"
filters:
- filter_out: [0.0, 42.0, 100.0] # List of values to filter
# FilterOutValueFilter - with NaN
- platform: copy
source_id: source_sensor
name: "Filter Out NaN"
filters:
- filter_out: nan # Filter out NaN values
# FilterOutValueFilter - mixed values with NaN
- platform: copy
source_id: source_sensor
name: "Filter Out Mixed with NaN"
filters:
- filter_out: [nan, 0.0, 42.0]
# ThrottleWithPriorityFilter - single priority value
- platform: copy
source_id: source_sensor
name: "Throttle with Single Priority"
filters:
- throttle_with_priority:
timeout: 1000ms
value: 42.0 # Priority value bypasses throttle
# ThrottleWithPriorityFilter - multiple priority values
- platform: copy
source_id: source_sensor
name: "Throttle with Multiple Priorities"
filters:
- throttle_with_priority:
timeout: 500ms
value: [0.0, 42.0, 100.0] # Multiple priority values
# ThrottleWithPriorityFilter - with NaN priority
- platform: copy
source_id: source_sensor
name: "Throttle with NaN Priority"
filters:
- throttle_with_priority:
timeout: 1000ms
value: nan # NaN as priority value
# Combined filters - FilterOutValueFilter + other filters
- platform: copy
source_id: source_sensor
name: "Filter Out Then Throttle"
filters:
- filter_out: [0.0, 100.0]
- throttle: 500ms
# Combined filters - ThrottleWithPriorityFilter + other filters
- platform: copy
source_id: source_sensor
name: "Throttle Priority Then Scale"
filters:
- throttle_with_priority:
timeout: 1000ms
value: [42.0]
- multiply: 2.0

View File

@@ -1,66 +0,0 @@
text_sensor:
- platform: template
name: "Test Substitute Single"
id: test_substitute_single
filters:
- substitute:
- ERROR -> Error
- platform: template
name: "Test Substitute Multiple"
id: test_substitute_multiple
filters:
- substitute:
- ERROR -> Error
- WARN -> Warning
- INFO -> Information
- DEBUG -> Debug
- platform: template
name: "Test Substitute Chained"
id: test_substitute_chained
filters:
- substitute:
- foo -> bar
- to_upper
- substitute:
- BAR -> baz
- platform: template
name: "Test Map Single"
id: test_map_single
filters:
- map:
- ON -> Active
- platform: template
name: "Test Map Multiple"
id: test_map_multiple
filters:
- map:
- ON -> Active
- OFF -> Inactive
- UNKNOWN -> Error
- IDLE -> Standby
- platform: template
name: "Test Map Passthrough"
id: test_map_passthrough
filters:
- map:
- Good -> Excellent
- Bad -> Poor
- platform: template
name: "Test All Filters"
id: test_all_filters
filters:
- to_upper
- to_lower
- append: " suffix"
- prepend: "prefix "
- substitute:
- prefix -> PREFIX
- suffix -> SUFFIX
- map:
- PREFIX text SUFFIX -> mapped

View File

@@ -1 +0,0 @@
<<: !include common.yaml

View File

@@ -1,332 +0,0 @@
esphome:
name: test-value-list-filters
host:
api:
batch_delay: 0ms # Disable batching to receive all state updates
logger:
level: DEBUG
# Template sensors - one for each test to avoid cross-test interference
sensor:
- platform: template
name: "Source Sensor 1"
id: source_sensor_1
accuracy_decimals: 1
- platform: template
name: "Source Sensor 2"
id: source_sensor_2
accuracy_decimals: 1
- platform: template
name: "Source Sensor 3"
id: source_sensor_3
accuracy_decimals: 1
- platform: template
name: "Source Sensor 4"
id: source_sensor_4
accuracy_decimals: 1
- platform: template
name: "Source Sensor 5"
id: source_sensor_5
accuracy_decimals: 1
- platform: template
name: "Source Sensor 6"
id: source_sensor_6
accuracy_decimals: 2
- platform: template
name: "Source Sensor 7"
id: source_sensor_7
accuracy_decimals: 1
# FilterOutValueFilter - single value
- platform: copy
source_id: source_sensor_1
name: "Filter Out Single"
id: filter_out_single
filters:
- filter_out: 42.0
# FilterOutValueFilter - multiple values
- platform: copy
source_id: source_sensor_2
name: "Filter Out Multiple"
id: filter_out_multiple
filters:
- filter_out: [0.0, 42.0, 100.0]
# FilterOutValueFilter - with NaN
- platform: copy
source_id: source_sensor_1
name: "Filter Out NaN"
id: filter_out_nan
filters:
- filter_out: nan
# ThrottleWithPriorityFilter - single priority value
- platform: copy
source_id: source_sensor_3
name: "Throttle Priority Single"
id: throttle_priority_single
filters:
- throttle_with_priority:
timeout: 200ms
value: 42.0
# ThrottleWithPriorityFilter - multiple priority values
- platform: copy
source_id: source_sensor_4
name: "Throttle Priority Multiple"
id: throttle_priority_multiple
filters:
- throttle_with_priority:
timeout: 200ms
value: [0.0, 42.0, 100.0]
# Edge case: Filter Out NaN explicitly
- platform: copy
source_id: source_sensor_5
name: "Filter Out NaN Test"
id: filter_out_nan_test
filters:
- filter_out: nan
# Edge case: Accuracy decimals - 2 decimals
- platform: copy
source_id: source_sensor_6
name: "Filter Out Accuracy 2"
id: filter_out_accuracy_2
filters:
- filter_out: 42.0
# Edge case: Throttle with NaN priority
- platform: copy
source_id: source_sensor_7
name: "Throttle Priority NaN"
id: throttle_priority_nan
filters:
- throttle_with_priority:
timeout: 200ms
value: nan
# Script to test FilterOutValueFilter
script:
- id: test_filter_out_single
then:
# Should pass through: 1.0, 2.0, 3.0
# Should filter out: 42.0
- sensor.template.publish:
id: source_sensor_1
state: 1.0
- delay: 20ms
- sensor.template.publish:
id: source_sensor_1
state: 42.0 # Filtered out
- delay: 20ms
- sensor.template.publish:
id: source_sensor_1
state: 2.0
- delay: 20ms
- sensor.template.publish:
id: source_sensor_1
state: 42.0 # Filtered out
- delay: 20ms
- sensor.template.publish:
id: source_sensor_1
state: 3.0
- id: test_filter_out_multiple
then:
# Should filter out: 0.0, 42.0, 100.0
# Should pass through: 1.0, 2.0, 50.0
- sensor.template.publish:
id: source_sensor_2
state: 0.0 # Filtered out
- delay: 20ms
- sensor.template.publish:
id: source_sensor_2
state: 1.0
- delay: 20ms
- sensor.template.publish:
id: source_sensor_2
state: 42.0 # Filtered out
- delay: 20ms
- sensor.template.publish:
id: source_sensor_2
state: 2.0
- delay: 20ms
- sensor.template.publish:
id: source_sensor_2
state: 100.0 # Filtered out
- delay: 20ms
- sensor.template.publish:
id: source_sensor_2
state: 50.0
- id: test_throttle_priority_single
then:
# 42.0 bypasses throttle, other values are throttled
- sensor.template.publish:
id: source_sensor_3
state: 1.0 # First value - passes
- delay: 50ms
- sensor.template.publish:
id: source_sensor_3
state: 2.0 # Throttled
- delay: 50ms
- sensor.template.publish:
id: source_sensor_3
state: 42.0 # Priority - passes immediately
- delay: 50ms
- sensor.template.publish:
id: source_sensor_3
state: 3.0 # Throttled
- delay: 250ms # Wait for throttle to expire
- sensor.template.publish:
id: source_sensor_3
state: 4.0 # Passes after timeout
- id: test_throttle_priority_multiple
then:
# 0.0, 42.0, 100.0 bypass throttle
- sensor.template.publish:
id: source_sensor_4
state: 1.0 # First value - passes
- delay: 50ms
- sensor.template.publish:
id: source_sensor_4
state: 2.0 # Throttled
- delay: 50ms
- sensor.template.publish:
id: source_sensor_4
state: 0.0 # Priority - passes
- delay: 50ms
- sensor.template.publish:
id: source_sensor_4
state: 3.0 # Throttled
- delay: 50ms
- sensor.template.publish:
id: source_sensor_4
state: 42.0 # Priority - passes
- delay: 50ms
- sensor.template.publish:
id: source_sensor_4
state: 4.0 # Throttled
- delay: 50ms
- sensor.template.publish:
id: source_sensor_4
state: 100.0 # Priority - passes
- id: test_filter_out_nan
then:
# NaN should be filtered out, regular values pass
- sensor.template.publish:
id: source_sensor_5
state: 1.0 # Pass
- delay: 20ms
- sensor.template.publish:
id: source_sensor_5
state: !lambda 'return NAN;' # Filtered out
- delay: 20ms
- sensor.template.publish:
id: source_sensor_5
state: 2.0 # Pass
- delay: 20ms
- sensor.template.publish:
id: source_sensor_5
state: !lambda 'return NAN;' # Filtered out
- delay: 20ms
- sensor.template.publish:
id: source_sensor_5
state: 3.0 # Pass
- id: test_filter_out_accuracy_2
then:
# With 2 decimal places, 42.00 filtered, 42.01 and 42.15 pass
- sensor.template.publish:
id: source_sensor_6
state: 42.0 # Filtered (rounds to 42.00)
- delay: 20ms
- sensor.template.publish:
id: source_sensor_6
state: 42.01 # Pass (rounds to 42.01)
- delay: 20ms
- sensor.template.publish:
id: source_sensor_6
state: 42.15 # Pass (rounds to 42.15)
- delay: 20ms
- sensor.template.publish:
id: source_sensor_6
state: 42.0 # Filtered (rounds to 42.00)
- id: test_throttle_priority_nan
then:
# NaN bypasses throttle, regular values throttled
- sensor.template.publish:
id: source_sensor_7
state: 1.0 # First value - passes
- delay: 50ms
- sensor.template.publish:
id: source_sensor_7
state: 2.0 # Throttled
- delay: 50ms
- sensor.template.publish:
id: source_sensor_7
state: !lambda 'return NAN;' # Priority NaN - passes
- delay: 50ms
- sensor.template.publish:
id: source_sensor_7
state: 3.0 # Throttled
- delay: 50ms
- sensor.template.publish:
id: source_sensor_7
state: !lambda 'return NAN;' # Priority NaN - passes
# Buttons to trigger each test
button:
- platform: template
name: "Test Filter Out Single"
id: btn_filter_out_single
on_press:
- script.execute: test_filter_out_single
- platform: template
name: "Test Filter Out Multiple"
id: btn_filter_out_multiple
on_press:
- script.execute: test_filter_out_multiple
- platform: template
name: "Test Throttle Priority Single"
id: btn_throttle_priority_single
on_press:
- script.execute: test_throttle_priority_single
- platform: template
name: "Test Throttle Priority Multiple"
id: btn_throttle_priority_multiple
on_press:
- script.execute: test_throttle_priority_multiple
- platform: template
name: "Test Filter Out NaN"
id: btn_filter_out_nan
on_press:
- script.execute: test_filter_out_nan
- platform: template
name: "Test Filter Out Accuracy 2"
id: btn_filter_out_accuracy_2
on_press:
- script.execute: test_filter_out_accuracy_2
- platform: template
name: "Test Throttle Priority NaN"
id: btn_throttle_priority_nan
on_press:
- script.execute: test_throttle_priority_nan

View File

@@ -1,263 +0,0 @@
"""Test sensor ValueListFilter functionality (FilterOutValueFilter and ThrottleWithPriorityFilter)."""
from __future__ import annotations
import asyncio
import math
from aioesphomeapi import ButtonInfo, EntityState, SensorState
import pytest
from .state_utils import InitialStateHelper, build_key_to_entity_mapping
from .types import APIClientConnectedFactory, RunCompiledFunction
@pytest.mark.asyncio
async def test_sensor_filters_value_list(
yaml_config: str,
run_compiled: RunCompiledFunction,
api_client_connected: APIClientConnectedFactory,
) -> None:
"""Test that ValueListFilter-based filters work correctly."""
loop = asyncio.get_running_loop()
# Track state changes for all sensors
sensor_values: dict[str, list[float]] = {
"filter_out_single": [],
"filter_out_multiple": [],
"throttle_priority_single": [],
"throttle_priority_multiple": [],
"filter_out_nan_test": [],
"filter_out_accuracy_2": [],
"throttle_priority_nan": [],
}
# Futures for each test
filter_out_single_done = loop.create_future()
filter_out_multiple_done = loop.create_future()
throttle_single_done = loop.create_future()
throttle_multiple_done = loop.create_future()
filter_out_nan_done = loop.create_future()
filter_out_accuracy_2_done = loop.create_future()
throttle_nan_done = loop.create_future()
def on_state(state: EntityState) -> None:
"""Track sensor state updates."""
if not isinstance(state, SensorState) or state.missing_state:
return
sensor_name = key_to_sensor.get(state.key)
if sensor_name not in sensor_values:
return
sensor_values[sensor_name].append(state.state)
# Check completion conditions
if (
sensor_name == "filter_out_single"
and len(sensor_values[sensor_name]) == 3
and not filter_out_single_done.done()
):
filter_out_single_done.set_result(True)
elif (
sensor_name == "filter_out_multiple"
and len(sensor_values[sensor_name]) == 3
and not filter_out_multiple_done.done()
):
filter_out_multiple_done.set_result(True)
elif (
sensor_name == "throttle_priority_single"
and len(sensor_values[sensor_name]) == 3
and not throttle_single_done.done()
):
throttle_single_done.set_result(True)
elif (
sensor_name == "throttle_priority_multiple"
and len(sensor_values[sensor_name]) == 4
and not throttle_multiple_done.done()
):
throttle_multiple_done.set_result(True)
elif (
sensor_name == "filter_out_nan_test"
and len(sensor_values[sensor_name]) == 3
and not filter_out_nan_done.done()
):
filter_out_nan_done.set_result(True)
elif (
sensor_name == "filter_out_accuracy_2"
and len(sensor_values[sensor_name]) == 2
and not filter_out_accuracy_2_done.done()
):
filter_out_accuracy_2_done.set_result(True)
elif (
sensor_name == "throttle_priority_nan"
and len(sensor_values[sensor_name]) == 3
and not throttle_nan_done.done()
):
throttle_nan_done.set_result(True)
async with (
run_compiled(yaml_config),
api_client_connected() as client,
):
# Get entities and build key mapping
entities, _ = await client.list_entities_services()
key_to_sensor = build_key_to_entity_mapping(
entities,
{
"filter_out_single": "Filter Out Single",
"filter_out_multiple": "Filter Out Multiple",
"throttle_priority_single": "Throttle Priority Single",
"throttle_priority_multiple": "Throttle Priority Multiple",
"filter_out_nan_test": "Filter Out NaN Test",
"filter_out_accuracy_2": "Filter Out Accuracy 2",
"throttle_priority_nan": "Throttle Priority NaN",
},
)
# Set up initial state helper with all entities
initial_state_helper = InitialStateHelper(entities)
# Subscribe to state changes with wrapper
client.subscribe_states(initial_state_helper.on_state_wrapper(on_state))
# Wait for initial states
await initial_state_helper.wait_for_initial_states()
# Find all buttons
button_name_map = {
"Test Filter Out Single": "filter_out_single",
"Test Filter Out Multiple": "filter_out_multiple",
"Test Throttle Priority Single": "throttle_priority_single",
"Test Throttle Priority Multiple": "throttle_priority_multiple",
"Test Filter Out NaN": "filter_out_nan",
"Test Filter Out Accuracy 2": "filter_out_accuracy_2",
"Test Throttle Priority NaN": "throttle_priority_nan",
}
buttons = {}
for entity in entities:
if isinstance(entity, ButtonInfo) and entity.name in button_name_map:
buttons[button_name_map[entity.name]] = entity.key
assert len(buttons) == 7, f"Expected 7 buttons, found {len(buttons)}"
# Test 1: FilterOutValueFilter - single value
sensor_values["filter_out_single"].clear()
client.button_command(buttons["filter_out_single"])
try:
await asyncio.wait_for(filter_out_single_done, timeout=2.0)
except TimeoutError:
pytest.fail(
f"Test 1 timed out. Values: {sensor_values['filter_out_single']}"
)
expected = [1.0, 2.0, 3.0]
assert sensor_values["filter_out_single"] == pytest.approx(expected), (
f"Test 1 failed: expected {expected}, got {sensor_values['filter_out_single']}"
)
# Test 2: FilterOutValueFilter - multiple values
sensor_values["filter_out_multiple"].clear()
filter_out_multiple_done = loop.create_future()
client.button_command(buttons["filter_out_multiple"])
try:
await asyncio.wait_for(filter_out_multiple_done, timeout=2.0)
except TimeoutError:
pytest.fail(
f"Test 2 timed out. Values: {sensor_values['filter_out_multiple']}"
)
expected = [1.0, 2.0, 50.0]
assert sensor_values["filter_out_multiple"] == pytest.approx(expected), (
f"Test 2 failed: expected {expected}, got {sensor_values['filter_out_multiple']}"
)
# Test 3: ThrottleWithPriorityFilter - single priority
sensor_values["throttle_priority_single"].clear()
throttle_single_done = loop.create_future()
client.button_command(buttons["throttle_priority_single"])
try:
await asyncio.wait_for(throttle_single_done, timeout=2.0)
except TimeoutError:
pytest.fail(
f"Test 3 timed out. Values: {sensor_values['throttle_priority_single']}"
)
expected = [1.0, 42.0, 4.0]
assert sensor_values["throttle_priority_single"] == pytest.approx(expected), (
f"Test 3 failed: expected {expected}, got {sensor_values['throttle_priority_single']}"
)
# Test 4: ThrottleWithPriorityFilter - multiple priorities
sensor_values["throttle_priority_multiple"].clear()
throttle_multiple_done = loop.create_future()
client.button_command(buttons["throttle_priority_multiple"])
try:
await asyncio.wait_for(throttle_multiple_done, timeout=2.0)
except TimeoutError:
pytest.fail(
f"Test 4 timed out. Values: {sensor_values['throttle_priority_multiple']}"
)
expected = [1.0, 0.0, 42.0, 100.0]
assert sensor_values["throttle_priority_multiple"] == pytest.approx(expected), (
f"Test 4 failed: expected {expected}, got {sensor_values['throttle_priority_multiple']}"
)
# Test 5: FilterOutValueFilter - NaN handling
sensor_values["filter_out_nan_test"].clear()
filter_out_nan_done = loop.create_future()
client.button_command(buttons["filter_out_nan"])
try:
await asyncio.wait_for(filter_out_nan_done, timeout=2.0)
except TimeoutError:
pytest.fail(
f"Test 5 timed out. Values: {sensor_values['filter_out_nan_test']}"
)
expected = [1.0, 2.0, 3.0]
assert sensor_values["filter_out_nan_test"] == pytest.approx(expected), (
f"Test 5 failed: expected {expected}, got {sensor_values['filter_out_nan_test']}"
)
# Test 6: FilterOutValueFilter - Accuracy decimals (2)
sensor_values["filter_out_accuracy_2"].clear()
filter_out_accuracy_2_done = loop.create_future()
client.button_command(buttons["filter_out_accuracy_2"])
try:
await asyncio.wait_for(filter_out_accuracy_2_done, timeout=2.0)
except TimeoutError:
pytest.fail(
f"Test 6 timed out. Values: {sensor_values['filter_out_accuracy_2']}"
)
expected = [42.01, 42.15]
assert sensor_values["filter_out_accuracy_2"] == pytest.approx(expected), (
f"Test 6 failed: expected {expected}, got {sensor_values['filter_out_accuracy_2']}"
)
# Test 7: ThrottleWithPriorityFilter - NaN priority
sensor_values["throttle_priority_nan"].clear()
throttle_nan_done = loop.create_future()
client.button_command(buttons["throttle_priority_nan"])
try:
await asyncio.wait_for(throttle_nan_done, timeout=2.0)
except TimeoutError:
pytest.fail(
f"Test 7 timed out. Values: {sensor_values['throttle_priority_nan']}"
)
# First value (1.0) + two NaN priority values
# NaN values will be compared using math.isnan
assert len(sensor_values["throttle_priority_nan"]) == 3, (
f"Test 7 failed: expected 3 values, got {len(sensor_values['throttle_priority_nan'])}"
)
assert sensor_values["throttle_priority_nan"][0] == pytest.approx(1.0), (
f"Test 7 failed: first value should be 1.0, got {sensor_values['throttle_priority_nan'][0]}"
)
assert math.isnan(sensor_values["throttle_priority_nan"][1]), (
f"Test 7 failed: second value should be NaN, got {sensor_values['throttle_priority_nan'][1]}"
)
assert math.isnan(sensor_values["throttle_priority_nan"][2]), (
f"Test 7 failed: third value should be NaN, got {sensor_values['throttle_priority_nan'][2]}"
)

View File

@@ -107,7 +107,6 @@ def test_main_all_tests_should_run(
assert output["integration_tests"] is True
assert output["clang_tidy"] is True
assert output["clang_tidy_mode"] in ["nosplit", "split"]
assert output["clang_format"] is True
assert output["python_linters"] is True
assert output["changed_components"] == ["wifi", "api", "sensor"]
@@ -118,9 +117,6 @@ def test_main_all_tests_should_run(
assert output["component_test_count"] == len(
output["changed_components_with_tests"]
)
# changed_cpp_file_count should be present
assert "changed_cpp_file_count" in output
assert isinstance(output["changed_cpp_file_count"], int)
# memory_impact should be present
assert "memory_impact" in output
assert output["memory_impact"]["should_run"] == "false" # No files changed
@@ -160,14 +156,11 @@ def test_main_no_tests_should_run(
assert output["integration_tests"] is False
assert output["clang_tidy"] is False
assert output["clang_tidy_mode"] == "disabled"
assert output["clang_format"] is False
assert output["python_linters"] is False
assert output["changed_components"] == []
assert output["changed_components_with_tests"] == []
assert output["component_test_count"] == 0
# changed_cpp_file_count should be 0
assert output["changed_cpp_file_count"] == 0
# memory_impact should be present
assert "memory_impact" in output
assert output["memory_impact"]["should_run"] == "false"
@@ -246,7 +239,6 @@ def test_main_with_branch_argument(
assert output["integration_tests"] is False
assert output["clang_tidy"] is True
assert output["clang_tidy_mode"] in ["nosplit", "split"]
assert output["clang_format"] is False
assert output["python_linters"] is True
assert output["changed_components"] == ["mqtt"]
@@ -257,9 +249,6 @@ def test_main_with_branch_argument(
assert output["component_test_count"] == len(
output["changed_components_with_tests"]
)
# changed_cpp_file_count should be present
assert "changed_cpp_file_count" in output
assert isinstance(output["changed_cpp_file_count"], int)
# memory_impact should be present
assert "memory_impact" in output
assert output["memory_impact"]["should_run"] == "false"
@@ -444,40 +433,6 @@ def test_should_run_clang_format_with_branch() -> None:
mock_changed.assert_called_once_with("release")
@pytest.mark.parametrize(
("changed_files", "expected_count"),
[
(["esphome/core.cpp"], 1),
(["esphome/core.h"], 1),
(["test.hpp"], 1),
(["test.cc"], 1),
(["test.cxx"], 1),
(["test.c"], 1),
(["test.tcc"], 1),
(["esphome/core.cpp", "esphome/core.h"], 2),
(["esphome/core.cpp", "esphome/core.h", "test.cc"], 3),
(["README.md"], 0),
(["esphome/config.py"], 0),
(["README.md", "esphome/config.py"], 0),
(["esphome/core.cpp", "README.md", "esphome/config.py"], 1),
([], 0),
],
)
def test_count_changed_cpp_files(changed_files: list[str], expected_count: int) -> None:
"""Test count_changed_cpp_files function."""
with patch.object(determine_jobs, "changed_files", return_value=changed_files):
result = determine_jobs.count_changed_cpp_files()
assert result == expected_count
def test_count_changed_cpp_files_with_branch() -> None:
"""Test count_changed_cpp_files with branch argument."""
with patch.object(determine_jobs, "changed_files") as mock_changed:
mock_changed.return_value = []
determine_jobs.count_changed_cpp_files("release")
mock_changed.assert_called_once_with("release")
def test_main_filters_components_without_tests(
mock_should_run_integration_tests: Mock,
mock_should_run_clang_tidy: Mock,
@@ -546,9 +501,6 @@ def test_main_filters_components_without_tests(
assert set(output["changed_components_with_tests"]) == {"wifi", "sensor"}
# component_test_count should be based on components with tests
assert output["component_test_count"] == 2
# changed_cpp_file_count should be present
assert "changed_cpp_file_count" in output
assert isinstance(output["changed_cpp_file_count"], int)
# memory_impact should be present
assert "memory_impact" in output
assert output["memory_impact"]["should_run"] == "false"

View File

@@ -3,7 +3,7 @@ esphome:
friendly_name: $component_name
esp8266:
board: d1_mini_pro
board: d1_mini
logger:
level: VERY_VERBOSE

View File

@@ -476,6 +476,55 @@ def test_preload_core_config_multiple_platforms(setup_core: Path) -> None:
preload_core_config(config, result)
def test_preload_core_config_skips_package_keys_with_periods(setup_core: Path) -> None:
"""Test preload_core_config skips package keys containing periods.
Package keys can contain periods (e.g., "ratgdo.esphome") and should be
skipped when searching for target platforms to avoid triggering the
assertion in get_component() that component names cannot contain periods.
Regression test for: https://github.com/esphome/esphome/issues/11182
"""
config = {
CONF_ESPHOME: {
CONF_NAME: "test_device",
},
"esp32": {},
# Package key with period should be ignored
"ratgdo.esphome": "github://ratgdo/esphome-ratgdo/v32disco_secplusv1.yaml",
}
result = {}
# Should not raise AssertionError from get_component()
platform = preload_core_config(config, result)
assert platform == "esp32"
assert CORE.name == "test_device"
def test_preload_core_config_skips_keys_starting_with_period(setup_core: Path) -> None:
"""Test preload_core_config skips keys starting with period.
Keys starting with "." are special ESPHome internal keys and should be
skipped when searching for target platforms.
"""
config = {
CONF_ESPHOME: {
CONF_NAME: "test_device",
},
"esp8266": {},
# Internal key starting with period should be ignored
".platformio_options": {"board_build.flash_mode": "dout"},
}
result = {}
# Should not raise any errors
platform = preload_core_config(config, result)
assert platform == "esp8266"
assert CORE.name == "test_device"
def test_include_file_header(tmp_path: Path, mock_copy_file_if_changed: Mock) -> None:
"""Test include_file adds include statement for header files."""
src_file = tmp_path / "source.h"

View File

@@ -668,45 +668,3 @@ class TestEsphomeCore:
os.environ.pop("ESPHOME_IS_HA_ADDON", None)
os.environ.pop("ESPHOME_DATA_DIR", None)
assert target.data_dir == Path(expected_default)
def test_platformio_cache_dir_with_env_var(self):
"""Test platformio_cache_dir when PLATFORMIO_CACHE_DIR env var is set."""
target = core.EsphomeCore()
test_cache_dir = "/custom/cache/dir"
with patch.dict(os.environ, {"PLATFORMIO_CACHE_DIR": test_cache_dir}):
assert target.platformio_cache_dir == test_cache_dir
def test_platformio_cache_dir_without_env_var(self):
"""Test platformio_cache_dir defaults to ~/.platformio/.cache."""
target = core.EsphomeCore()
with patch.dict(os.environ, {}, clear=True):
# Ensure env var is not set
os.environ.pop("PLATFORMIO_CACHE_DIR", None)
expected = os.path.expanduser("~/.platformio/.cache")
assert target.platformio_cache_dir == expected
def test_platformio_cache_dir_empty_env_var(self):
"""Test platformio_cache_dir with empty env var falls back to default."""
target = core.EsphomeCore()
with patch.dict(os.environ, {"PLATFORMIO_CACHE_DIR": ""}):
expected = os.path.expanduser("~/.platformio/.cache")
assert target.platformio_cache_dir == expected
def test_platformio_cache_dir_whitespace_env_var(self):
"""Test platformio_cache_dir with whitespace-only env var falls back to default."""
target = core.EsphomeCore()
with patch.dict(os.environ, {"PLATFORMIO_CACHE_DIR": " "}):
expected = os.path.expanduser("~/.platformio/.cache")
assert target.platformio_cache_dir == expected
def test_platformio_cache_dir_docker_addon_path(self):
"""Test platformio_cache_dir in Docker/HA addon environment."""
target = core.EsphomeCore()
addon_cache = "/data/cache/platformio"
with patch.dict(os.environ, {"PLATFORMIO_CACHE_DIR": addon_cache}):
assert target.platformio_cache_dir == addon_cache

View File

@@ -355,7 +355,6 @@ def test_clean_build(
mock_core.relative_pioenvs_path.return_value = pioenvs_dir
mock_core.relative_piolibdeps_path.return_value = piolibdeps_dir
mock_core.relative_build_path.return_value = dependencies_lock
mock_core.platformio_cache_dir = str(platformio_cache_dir)
# Verify all exist before
assert pioenvs_dir.exists()