1
0
mirror of https://github.com/esphome/esphome.git synced 2025-11-19 00:05:43 +00:00

Compare commits

..

2 Commits

Author SHA1 Message Date
J. Nick Koston
4ad2da6562 update tests 2025-11-16 13:08:16 -06:00
J. Nick Koston
8997fb3443 [core] Reduce flash size by combining set_name() and set_object_id() calls 2025-11-16 13:02:59 -06:00
89 changed files with 475 additions and 1275 deletions

View File

@@ -22,7 +22,7 @@ jobs:
if: github.event.action != 'labeled' || github.event.sender.type != 'Bot'
steps:
- name: Checkout
uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- name: Generate a token
id: generate-token

View File

@@ -21,7 +21,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- name: Set up Python
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
with:

View File

@@ -21,7 +21,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- name: Set up Python
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0

View File

@@ -43,7 +43,7 @@ jobs:
- "docker"
# - "lint"
steps:
- uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- name: Set up Python
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
with:

View File

@@ -49,7 +49,7 @@ jobs:
- name: Check out code from base repository
if: steps.pr.outputs.skip != 'true'
uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
with:
# Always check out from the base repository (esphome/esphome), never from forks
# Use the PR's target branch to ensure we run trusted code from the main repo

View File

@@ -36,7 +36,7 @@ jobs:
cache-key: ${{ steps.cache-key.outputs.key }}
steps:
- name: Check out code from GitHub
uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- name: Generate cache-key
id: cache-key
run: echo key="${{ hashFiles('requirements.txt', 'requirements_test.txt', '.pre-commit-config.yaml') }}" >> $GITHUB_OUTPUT
@@ -70,7 +70,7 @@ jobs:
if: needs.determine-jobs.outputs.python-linters == 'true'
steps:
- name: Check out code from GitHub
uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- name: Restore Python
uses: ./.github/actions/restore-python
with:
@@ -91,7 +91,7 @@ jobs:
- common
steps:
- name: Check out code from GitHub
uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- name: Restore Python
uses: ./.github/actions/restore-python
with:
@@ -132,7 +132,7 @@ jobs:
- common
steps:
- name: Check out code from GitHub
uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- name: Restore Python
id: restore-python
uses: ./.github/actions/restore-python
@@ -183,7 +183,7 @@ jobs:
component-test-batches: ${{ steps.determine.outputs.component-test-batches }}
steps:
- name: Check out code from GitHub
uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
with:
# Fetch enough history to find the merge base
fetch-depth: 2
@@ -237,7 +237,7 @@ jobs:
if: needs.determine-jobs.outputs.integration-tests == 'true'
steps:
- name: Check out code from GitHub
uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- name: Set up Python 3.13
id: python
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
@@ -273,7 +273,7 @@ jobs:
if: github.event_name == 'pull_request' && (needs.determine-jobs.outputs.cpp-unit-tests-run-all == 'true' || needs.determine-jobs.outputs.cpp-unit-tests-components != '[]')
steps:
- name: Check out code from GitHub
uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- name: Restore Python
uses: ./.github/actions/restore-python
@@ -321,7 +321,7 @@ jobs:
steps:
- name: Check out code from GitHub
uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
with:
# Need history for HEAD~1 to work for checking changed files
fetch-depth: 2
@@ -400,7 +400,7 @@ jobs:
GH_TOKEN: ${{ github.token }}
steps:
- name: Check out code from GitHub
uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
with:
# Need history for HEAD~1 to work for checking changed files
fetch-depth: 2
@@ -489,7 +489,7 @@ jobs:
steps:
- name: Check out code from GitHub
uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
with:
# Need history for HEAD~1 to work for checking changed files
fetch-depth: 2
@@ -577,7 +577,7 @@ jobs:
version: 1.0
- name: Check out code from GitHub
uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- name: Restore Python
uses: ./.github/actions/restore-python
with:
@@ -662,7 +662,7 @@ jobs:
if: github.event_name == 'pull_request' && !startsWith(github.base_ref, 'beta') && !startsWith(github.base_ref, 'release')
steps:
- name: Check out code from GitHub
uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- name: Restore Python
uses: ./.github/actions/restore-python
with:
@@ -688,7 +688,7 @@ jobs:
skip: ${{ steps.check-script.outputs.skip }}
steps:
- name: Check out target branch
uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
with:
ref: ${{ github.base_ref }}
@@ -840,7 +840,7 @@ jobs:
flash_usage: ${{ steps.extract.outputs.flash_usage }}
steps:
- name: Check out PR branch
uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- name: Restore Python
uses: ./.github/actions/restore-python
with:
@@ -908,7 +908,7 @@ jobs:
GH_TOKEN: ${{ github.token }}
steps:
- name: Check out code
uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- name: Restore Python
uses: ./.github/actions/restore-python
with:

View File

@@ -54,11 +54,11 @@ jobs:
# your codebase is analyzed, see https://docs.github.com/en/code-security/code-scanning/creating-an-advanced-setup-for-code-scanning/codeql-code-scanning-for-compiled-languages
steps:
- name: Checkout repository
uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
# Initializes the CodeQL tools for scanning.
- name: Initialize CodeQL
uses: github/codeql-action/init@e12f0178983d466f2f6028f5cc7a6d786fd97f4b # v4.31.4
uses: github/codeql-action/init@014f16e7ab1402f30e7c3329d33797e7948572db # v4.31.3
with:
languages: ${{ matrix.language }}
build-mode: ${{ matrix.build-mode }}
@@ -86,6 +86,6 @@ jobs:
exit 1
- name: Perform CodeQL Analysis
uses: github/codeql-action/analyze@e12f0178983d466f2f6028f5cc7a6d786fd97f4b # v4.31.4
uses: github/codeql-action/analyze@014f16e7ab1402f30e7c3329d33797e7948572db # v4.31.3
with:
category: "/language:${{matrix.language}}"

View File

@@ -20,7 +20,7 @@ jobs:
branch_build: ${{ steps.tag.outputs.branch_build }}
deploy_env: ${{ steps.tag.outputs.deploy_env }}
steps:
- uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- name: Get tag
id: tag
# yamllint disable rule:line-length
@@ -60,7 +60,7 @@ jobs:
contents: read
id-token: write
steps:
- uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- name: Set up Python
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
with:
@@ -92,7 +92,7 @@ jobs:
os: "ubuntu-24.04-arm"
steps:
- uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- name: Set up Python
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
with:
@@ -168,7 +168,7 @@ jobs:
- ghcr
- dockerhub
steps:
- uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- name: Download digests
uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0

View File

@@ -13,10 +13,10 @@ jobs:
if: github.repository == 'esphome/esphome'
steps:
- name: Checkout
uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- name: Checkout Home Assistant
uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
with:
repository: home-assistant/core
path: lib/home-assistant

View File

@@ -90,8 +90,8 @@ static const int CAMERA_STOP_STREAM = 5000;
APIConnection::APIConnection(std::unique_ptr<socket::Socket> sock, APIServer *parent)
: parent_(parent), initial_state_iterator_(this), list_entities_iterator_(this) {
#if defined(USE_API_PLAINTEXT) && defined(USE_API_NOISE)
auto &noise_ctx = parent->get_noise_ctx();
if (noise_ctx.has_psk()) {
auto noise_ctx = parent->get_noise_ctx();
if (noise_ctx->has_psk()) {
this->helper_ =
std::unique_ptr<APIFrameHelper>{new APINoiseFrameHelper(std::move(sock), noise_ctx, &this->client_info_)};
} else {

View File

@@ -527,7 +527,7 @@ APIError APINoiseFrameHelper::init_handshake_() {
if (aerr != APIError::OK)
return aerr;
const auto &psk = this->ctx_.get_psk();
const auto &psk = ctx_->get_psk();
err = noise_handshakestate_set_pre_shared_key(handshake_, psk.data(), psk.size());
aerr = handle_noise_error_(err, LOG_STR("noise_handshakestate_set_pre_shared_key"),
APIError::HANDSHAKESTATE_SETUP_FAILED);

View File

@@ -9,8 +9,9 @@ namespace esphome::api {
class APINoiseFrameHelper final : public APIFrameHelper {
public:
APINoiseFrameHelper(std::unique_ptr<socket::Socket> socket, APINoiseContext &ctx, const ClientInfo *client_info)
: APIFrameHelper(std::move(socket), client_info), ctx_(ctx) {
APINoiseFrameHelper(std::unique_ptr<socket::Socket> socket, std::shared_ptr<APINoiseContext> ctx,
const ClientInfo *client_info)
: APIFrameHelper(std::move(socket), client_info), ctx_(std::move(ctx)) {
// Noise header structure:
// Pos 0: indicator (0x01)
// Pos 1-2: encrypted payload size (16-bit big-endian)
@@ -40,8 +41,8 @@ class APINoiseFrameHelper final : public APIFrameHelper {
NoiseCipherState *send_cipher_{nullptr};
NoiseCipherState *recv_cipher_{nullptr};
// Reference to noise context (4 bytes on 32-bit)
APINoiseContext &ctx_;
// Shared pointer (8 bytes on 32-bit = 4 bytes control block pointer + 4 bytes object pointer)
std::shared_ptr<APINoiseContext> ctx_;
// Vector (12 bytes on 32-bit)
std::vector<uint8_t> prologue_;

View File

@@ -227,8 +227,8 @@ void APIServer::dump_config() {
" Max connections: %u",
network::get_use_address(), this->port_, this->listen_backlog_, this->max_connections_);
#ifdef USE_API_NOISE
ESP_LOGCONFIG(TAG, " Noise encryption: %s", YESNO(this->noise_ctx_.has_psk()));
if (!this->noise_ctx_.has_psk()) {
ESP_LOGCONFIG(TAG, " Noise encryption: %s", YESNO(this->noise_ctx_->has_psk()));
if (!this->noise_ctx_->has_psk()) {
ESP_LOGCONFIG(TAG, " Supports encryption: YES");
}
#else
@@ -493,7 +493,7 @@ bool APIServer::save_noise_psk(psk_t psk, bool make_active) {
ESP_LOGW(TAG, "Key set in YAML");
return false;
#else
auto &old_psk = this->noise_ctx_.get_psk();
auto &old_psk = this->noise_ctx_->get_psk();
if (std::equal(old_psk.begin(), old_psk.end(), psk.begin())) {
ESP_LOGW(TAG, "New PSK matches old");
return true;

View File

@@ -54,8 +54,8 @@ class APIServer : public Component, public Controller {
#ifdef USE_API_NOISE
bool save_noise_psk(psk_t psk, bool make_active = true);
bool clear_noise_psk(bool make_active = true);
void set_noise_psk(psk_t psk) { this->noise_ctx_.set_psk(psk); }
APINoiseContext &get_noise_ctx() { return this->noise_ctx_; }
void set_noise_psk(psk_t psk) { noise_ctx_->set_psk(psk); }
std::shared_ptr<APINoiseContext> get_noise_ctx() { return noise_ctx_; }
#endif // USE_API_NOISE
void handle_disconnect(APIConnection *conn);
@@ -228,7 +228,7 @@ class APIServer : public Component, public Controller {
// 7 bytes used, 1 byte padding
#ifdef USE_API_NOISE
APINoiseContext noise_ctx_;
std::shared_ptr<APINoiseContext> noise_ctx_ = std::make_shared<APINoiseContext>();
ESPPreferenceObject noise_pref_;
#endif // USE_API_NOISE
};

View File

@@ -70,9 +70,6 @@ void BME68xBSEC2Component::dump_config() {
if (this->is_failed()) {
ESP_LOGE(TAG, "Communication failed (BSEC2 status: %d, BME68X status: %d)", this->bsec_status_,
this->bme68x_status_);
if (this->bsec_status_ == BSEC_I_SU_SUBSCRIBEDOUTPUTGATES) {
ESP_LOGE(TAG, "No sensors, add at least one sensor to the config");
}
}
if (this->algorithm_output_ != ALGORITHM_OUTPUT_IAQ) {

View File

@@ -72,16 +72,6 @@ def _final_validate(config: ConfigType) -> ConfigType:
"Add 'ap:' to your WiFi configuration to enable the captive portal."
)
# Register socket needs for DNS server and additional HTTP connections
# - 1 UDP socket for DNS server
# - 3 additional TCP sockets for captive portal detection probes + configuration requests
# OS captive portal detection makes multiple probe requests that stay in TIME_WAIT.
# Need headroom for actual user configuration requests.
# LRU purging will reclaim idle sockets to prevent exhaustion from repeated attempts.
from esphome.components import socket
socket.consume_sockets(4, "captive_portal")(config)
return config

View File

@@ -50,8 +50,8 @@ void CaptivePortal::handle_wifisave(AsyncWebServerRequest *request) {
ESP_LOGI(TAG, "Requested WiFi Settings Change:");
ESP_LOGI(TAG, " SSID='%s'", ssid.c_str());
ESP_LOGI(TAG, " Password=" LOG_SECRET("'%s'"), psk.c_str());
// Defer save to main loop thread to avoid NVS operations from HTTP thread
this->defer([ssid, psk]() { wifi::global_wifi_component->save_wifi_sta(ssid, psk); });
wifi::global_wifi_component->save_wifi_sta(ssid, psk);
wifi::global_wifi_component->start_scanning();
request->redirect(ESPHOME_F("/?save"));
}
@@ -63,12 +63,6 @@ void CaptivePortal::start() {
this->base_->init();
if (!this->initialized_) {
this->base_->add_handler(this);
#ifdef USE_ESP32
// Enable LRU socket purging to handle captive portal detection probe bursts
// OS captive portal detection makes many simultaneous HTTP requests which can
// exhaust sockets. LRU purging automatically closes oldest idle connections.
this->base_->get_server()->set_lru_purge_enable(true);
#endif
}
network::IPAddress ip = wifi::global_wifi_component->wifi_soft_ap_ip();

View File

@@ -40,10 +40,6 @@ class CaptivePortal : public AsyncWebHandler, public Component {
void end() {
this->active_ = false;
this->disable_loop(); // Stop processing DNS requests
#ifdef USE_ESP32
// Disable LRU socket purging now that captive portal is done
this->base_->get_server()->set_lru_purge_enable(false);
#endif
this->base_->deinit();
if (this->dns_server_ != nullptr) {
this->dns_server_->stop();

View File

@@ -4,7 +4,8 @@
#include "esphome/core/automation.h"
#include "cover.h"
namespace esphome::cover {
namespace esphome {
namespace cover {
template<typename... Ts> class OpenAction : public Action<Ts...> {
public:
@@ -130,4 +131,5 @@ class CoverClosedTrigger : public Trigger<> {
}
};
} // namespace esphome::cover
} // namespace cover
} // namespace esphome

View File

@@ -6,7 +6,8 @@
#include "esphome/core/log.h"
namespace esphome::cover {
namespace esphome {
namespace cover {
static const char *const TAG = "cover";
@@ -211,4 +212,5 @@ void CoverRestoreState::apply(Cover *cover) {
cover->publish_state();
}
} // namespace esphome::cover
} // namespace cover
} // namespace esphome

View File

@@ -7,7 +7,8 @@
#include "cover_traits.h"
namespace esphome::cover {
namespace esphome {
namespace cover {
const extern float COVER_OPEN;
const extern float COVER_CLOSED;
@@ -156,4 +157,5 @@ class Cover : public EntityBase, public EntityBase_DeviceClass {
ESPPreferenceObject rtc_;
};
} // namespace esphome::cover
} // namespace cover
} // namespace esphome

View File

@@ -1,6 +1,7 @@
#pragma once
namespace esphome::cover {
namespace esphome {
namespace cover {
class CoverTraits {
public:
@@ -25,4 +26,5 @@ class CoverTraits {
bool supports_stop_{false};
};
} // namespace esphome::cover
} // namespace cover
} // namespace esphome

View File

@@ -3,10 +3,10 @@
namespace esphome {
namespace dashboard_import {
static const char *g_package_import_url = ""; // NOLINT
static std::string g_package_import_url; // NOLINT
const char *get_package_import_url() { return g_package_import_url; }
void set_package_import_url(const char *url) { g_package_import_url = url; }
const std::string &get_package_import_url() { return g_package_import_url; }
void set_package_import_url(std::string url) { g_package_import_url = std::move(url); }
} // namespace dashboard_import
} // namespace esphome

View File

@@ -1,10 +1,12 @@
#pragma once
#include <string>
namespace esphome {
namespace dashboard_import {
const char *get_package_import_url();
void set_package_import_url(const char *url);
const std::string &get_package_import_url();
void set_package_import_url(std::string url);
} // namespace dashboard_import
} // namespace esphome

View File

@@ -931,12 +931,6 @@ async def to_code(config):
add_idf_sdkconfig_option("CONFIG_MBEDTLS_CERTIFICATE_BUNDLE", True)
add_idf_sdkconfig_option("CONFIG_ESP_PHY_REDUCE_TX_POWER", True)
# ESP32-S2 Arduino: Disable USB Serial on boot to avoid TinyUSB dependency
if get_esp32_variant() == VARIANT_ESP32S2:
cg.add_build_unflag("-DARDUINO_USB_CDC_ON_BOOT=1")
cg.add_build_unflag("-DARDUINO_USB_CDC_ON_BOOT=0")
cg.add_build_flag("-DARDUINO_USB_CDC_ON_BOOT=0")
cg.add_build_flag("-Wno-nonnull-compare")
add_idf_sdkconfig_option(f"CONFIG_IDF_TARGET_{variant}", True)

View File

@@ -20,10 +20,6 @@ CONF_ON_STOP = "on_stop"
CONF_STATUS_INDICATOR = "status_indicator"
CONF_WIFI_TIMEOUT = "wifi_timeout"
# Default WiFi timeout - aligned with WiFi component ap_timeout
# Allows sufficient time to try all BSSIDs before starting provisioning mode
DEFAULT_WIFI_TIMEOUT = "90s"
improv_ns = cg.esphome_ns.namespace("improv")
Error = improv_ns.enum("Error")
@@ -63,7 +59,7 @@ CONFIG_SCHEMA = (
CONF_AUTHORIZED_DURATION, default="1min"
): cv.positive_time_period_milliseconds,
cv.Optional(
CONF_WIFI_TIMEOUT, default=DEFAULT_WIFI_TIMEOUT
CONF_WIFI_TIMEOUT, default="1min"
): cv.positive_time_period_milliseconds,
cv.Optional(CONF_ON_PROVISIONED): automation.validate_automation(
{

View File

@@ -127,7 +127,6 @@ void ESP32ImprovComponent::loop() {
// Set initial state based on whether we have an authorizer
this->set_state_(this->get_initial_state_(), false);
this->set_error_(improv::ERROR_NONE);
this->should_start_ = false; // Clear flag after starting
ESP_LOGD(TAG, "Service started!");
}
}

View File

@@ -45,7 +45,6 @@ class ESP32ImprovComponent : public Component, public improv_base::ImprovBase {
void start();
void stop();
bool is_active() const { return this->state_ != improv::STATE_STOPPED; }
bool should_start() const { return this->should_start_; }
#ifdef USE_ESP32_IMPROV_STATE_CALLBACK
void add_on_state_callback(std::function<void(improv::State, improv::Error)> &&callback) {

View File

@@ -486,8 +486,6 @@ class GlyphInfo:
def glyph_to_glyphinfo(glyph, font, size, bpp):
# Convert to 32 bit unicode codepoint
glyph = ord(glyph)
scale = 256 // (1 << bpp)
if not font.is_scalable:
sizes = [pt_to_px(x.size) for x in font.available_sizes]

View File

@@ -6,147 +6,42 @@
namespace esphome {
namespace font {
static const char *const TAG = "font";
#ifdef USE_LVGL_FONT
const uint8_t *Font::get_glyph_bitmap(const lv_font_t *font, uint32_t unicode_letter) {
auto *fe = (Font *) font->dsc;
const auto *gd = fe->get_glyph_data_(unicode_letter);
if (gd == nullptr) {
return nullptr;
// Compare the char at the string position with this char.
// Return true if this char is less than or equal the other.
bool Glyph::compare_to(const uint8_t *str) const {
// 1 -> this->char_
// 2 -> str
for (uint32_t i = 0;; i++) {
if (this->a_char[i] == '\0')
return true;
if (str[i] == '\0')
return false;
if (this->a_char[i] > str[i])
return false;
if (this->a_char[i] < str[i])
return true;
}
return gd->data;
// this should not happen
return false;
}
bool Font::get_glyph_dsc_cb(const lv_font_t *font, lv_font_glyph_dsc_t *dsc, uint32_t unicode_letter, uint32_t next) {
auto *fe = (Font *) font->dsc;
const auto *gd = fe->get_glyph_data_(unicode_letter);
if (gd == nullptr) {
return false;
int Glyph::match_length(const uint8_t *str) const {
for (uint32_t i = 0;; i++) {
if (this->a_char[i] == '\0')
return i;
if (str[i] != this->a_char[i])
return 0;
}
dsc->adv_w = gd->advance;
dsc->ofs_x = gd->offset_x;
dsc->ofs_y = fe->height_ - gd->height - gd->offset_y - fe->lv_font_.base_line;
dsc->box_w = gd->width;
dsc->box_h = gd->height;
dsc->is_placeholder = 0;
dsc->bpp = fe->get_bpp();
return true;
// this should not happen
return 0;
}
const Glyph *Font::get_glyph_data_(uint32_t unicode_letter) {
if (unicode_letter == this->last_letter_ && this->last_letter_ != 0)
return this->last_data_;
auto *glyph = this->find_glyph(unicode_letter);
if (glyph == nullptr) {
return nullptr;
}
this->last_data_ = glyph;
this->last_letter_ = unicode_letter;
return glyph;
}
#endif
/**
* Attempt to extract a 32 bit Unicode codepoint from a UTF-8 string.
* If successful, return the codepoint and set the length to the number of bytes read.
* If the end of the string has been reached and a valid codepoint has not been found, return 0 and set the length to
* 0.
*
* @param utf8_str The input string
* @param length Pointer to length storage
* @return The extracted code point
*/
static uint32_t extract_unicode_codepoint(const char *utf8_str, size_t *length) {
// Safely cast to uint8_t* for correct bitwise operations on bytes
const uint8_t *current = reinterpret_cast<const uint8_t *>(utf8_str);
uint32_t code_point = 0;
uint8_t c1 = *current++;
// check for end of string
if (c1 == 0) {
*length = 0;
return 0;
}
// --- 1-Byte Sequence: 0xxxxxxx (ASCII) ---
if (c1 < 0x80) {
// Valid ASCII byte.
code_point = c1;
// Optimization: No need to check for continuation bytes.
}
// --- 2-Byte Sequence: 110xxxxx 10xxxxxx ---
else if ((c1 & 0xE0) == 0xC0) {
uint8_t c2 = *current++;
// Error Check 1: Check if c2 is a valid continuation byte (10xxxxxx)
if ((c2 & 0xC0) != 0x80) {
*length = 0;
return 0;
}
code_point = (c1 & 0x1F) << 6;
code_point |= (c2 & 0x3F);
// Error Check 2: Overlong check (2-byte must be > 0x7F)
if (code_point <= 0x7F) {
*length = 0;
return 0;
}
}
// --- 3-Byte Sequence: 1110xxxx 10xxxxxx 10xxxxxx ---
else if ((c1 & 0xF0) == 0xE0) {
uint8_t c2 = *current++;
uint8_t c3 = *current++;
// Error Check 1: Check continuation bytes
if (((c2 & 0xC0) != 0x80) || ((c3 & 0xC0) != 0x80)) {
*length = 0;
return 0;
}
code_point = (c1 & 0x0F) << 12;
code_point |= (c2 & 0x3F) << 6;
code_point |= (c3 & 0x3F);
// Error Check 2: Overlong check (3-byte must be > 0x7FF)
// Also check for surrogates (0xD800-0xDFFF)
if (code_point <= 0x7FF || (code_point >= 0xD800 && code_point <= 0xDFFF)) {
*length = 0;
return 0;
}
}
// --- 4-Byte Sequence: 11110xxx 10xxxxxx 10xxxxxx 10xxxxxx ---
else if ((c1 & 0xF8) == 0xF0) {
uint8_t c2 = *current++;
uint8_t c3 = *current++;
uint8_t c4 = *current++;
// Error Check 1: Check continuation bytes
if (((c2 & 0xC0) != 0x80) || ((c3 & 0xC0) != 0x80) || ((c4 & 0xC0) != 0x80)) {
*length = 0;
return 0;
}
code_point = (c1 & 0x07) << 18;
code_point |= (c2 & 0x3F) << 12;
code_point |= (c3 & 0x3F) << 6;
code_point |= (c4 & 0x3F);
// Error Check 2: Overlong check (4-byte must be > 0xFFFF)
// Also check for valid Unicode range (must be <= 0x10FFFF)
if (code_point <= 0xFFFF || code_point > 0x10FFFF) {
*length = 0;
return 0;
}
}
// --- Invalid leading byte (e.g., 10xxxxxx or 11111xxx) ---
else {
*length = 0;
return 0;
}
*length = current - reinterpret_cast<const uint8_t *>(utf8_str);
return code_point;
void Glyph::scan_area(int *x1, int *y1, int *width, int *height) const {
*x1 = this->offset_x;
*y1 = this->offset_y;
*width = this->width;
*height = this->height;
}
Font::Font(const Glyph *data, int data_nr, int baseline, int height, int descender, int xheight, int capheight,
@@ -158,93 +53,82 @@ Font::Font(const Glyph *data, int data_nr, int baseline, int height, int descend
linegap_(height - baseline - descender),
xheight_(xheight),
capheight_(capheight),
bpp_(bpp) {
#ifdef USE_LVGL_FONT
this->lv_font_.dsc = this;
this->lv_font_.line_height = this->get_height();
this->lv_font_.base_line = this->lv_font_.line_height - this->get_baseline();
this->lv_font_.get_glyph_dsc = get_glyph_dsc_cb;
this->lv_font_.get_glyph_bitmap = get_glyph_bitmap;
this->lv_font_.subpx = LV_FONT_SUBPX_NONE;
this->lv_font_.underline_position = -1;
this->lv_font_.underline_thickness = 1;
#endif
}
const Glyph *Font::find_glyph(uint32_t codepoint) const {
bpp_(bpp) {}
int Font::match_next_glyph(const uint8_t *str, int *match_length) const {
int lo = 0;
int hi = this->glyphs_.size() - 1;
while (lo != hi) {
int mid = (lo + hi + 1) / 2;
if (this->glyphs_[mid].is_less_or_equal(codepoint)) {
if (this->glyphs_[mid].compare_to(str)) {
lo = mid;
} else {
hi = mid - 1;
}
}
auto *result = &this->glyphs_[lo];
if (result->code_point == codepoint)
return result;
return nullptr;
*match_length = this->glyphs_[lo].match_length(str);
if (*match_length <= 0)
return -1;
return lo;
}
#ifdef USE_DISPLAY
void Font::measure(const char *str, int *width, int *x_offset, int *baseline, int *height) {
*baseline = this->baseline_;
*height = this->height_;
int i = 0;
int min_x = 0;
bool has_char = false;
int x = 0;
for (;;) {
size_t length;
auto code_point = extract_unicode_codepoint(str, &length);
if (length == 0)
break;
str += length;
auto *glyph = this->find_glyph(code_point);
if (glyph == nullptr) {
while (str[i] != '\0') {
int match_length;
int glyph_n = this->match_next_glyph((const uint8_t *) str + i, &match_length);
if (glyph_n < 0) {
// Unknown char, skip
if (!this->glyphs_.empty())
x += this->glyphs_[0].advance;
if (!this->get_glyphs().empty())
x += this->get_glyphs()[0].advance;
i++;
continue;
}
const Glyph &glyph = this->glyphs_[glyph_n];
if (!has_char) {
min_x = glyph->offset_x;
min_x = glyph.offset_x;
} else {
min_x = std::min(min_x, x + glyph->offset_x);
min_x = std::min(min_x, x + glyph.offset_x);
}
x += glyph->advance;
x += glyph.advance;
i += match_length;
has_char = true;
}
*x_offset = min_x;
*width = x - min_x;
}
void Font::print(int x_start, int y_start, display::Display *display, Color color, const char *text, Color background) {
int i = 0;
int x_at = x_start;
for (;;) {
size_t length;
auto code_point = extract_unicode_codepoint(text, &length);
if (length == 0)
break;
text += length;
auto *glyph = this->find_glyph(code_point);
if (glyph == nullptr) {
int scan_x1, scan_y1, scan_width, scan_height;
while (text[i] != '\0') {
int match_length;
int glyph_n = this->match_next_glyph((const uint8_t *) text + i, &match_length);
if (glyph_n < 0) {
// Unknown char, skip
ESP_LOGW(TAG, "Codepoint 0x%08" PRIx32 " not found in font", code_point);
if (!this->glyphs_.empty()) {
uint8_t glyph_width = this->glyphs_[0].advance;
display->rectangle(x_at, y_start, glyph_width, this->height_, color);
ESP_LOGW(TAG, "Encountered character without representation in font: '%c'", text[i]);
if (!this->get_glyphs().empty()) {
uint8_t glyph_width = this->get_glyphs()[0].advance;
display->filled_rectangle(x_at, y_start, glyph_width, this->height_, color);
x_at += glyph_width;
}
i++;
continue;
}
const uint8_t *data = glyph->data;
const int max_x = x_at + glyph->offset_x + glyph->width;
const int max_y = y_start + glyph->offset_y + glyph->height;
const Glyph &glyph = this->get_glyphs()[glyph_n];
glyph.scan_area(&scan_x1, &scan_y1, &scan_width, &scan_height);
const uint8_t *data = glyph.data;
const int max_x = x_at + scan_x1 + scan_width;
const int max_y = y_start + scan_y1 + scan_height;
uint8_t bitmask = 0;
uint8_t pixel_data = 0;
@@ -257,10 +141,10 @@ void Font::print(int x_start, int y_start, display::Display *display, Color colo
auto b_g = (float) background.g;
auto b_b = (float) background.b;
auto b_w = (float) background.w;
for (int glyph_y = y_start + glyph->offset_y; glyph_y != max_y; glyph_y++) {
for (int glyph_x = x_at + glyph->offset_x; glyph_x != max_x; glyph_x++) {
for (int glyph_y = y_start + scan_y1; glyph_y != max_y; glyph_y++) {
for (int glyph_x = x_at + scan_x1; glyph_x != max_x; glyph_x++) {
uint8_t pixel = 0;
for (uint8_t bit_num = 0; bit_num != this->bpp_; bit_num++) {
for (int bit_num = 0; bit_num != this->bpp_; bit_num++) {
if (bitmask == 0) {
pixel_data = progmem_read_byte(data++);
bitmask = 0x80;
@@ -280,9 +164,12 @@ void Font::print(int x_start, int y_start, display::Display *display, Color colo
}
}
}
x_at += glyph->advance;
x_at += glyph.advance;
i += match_length;
}
}
#endif
} // namespace font
} // namespace esphome

View File

@@ -6,9 +6,6 @@
#ifdef USE_DISPLAY
#include "esphome/components/display/display.h"
#endif
#ifdef USE_LVGL_FONT
#include <lvgl.h>
#endif
namespace esphome {
namespace font {
@@ -17,9 +14,9 @@ class Font;
class Glyph {
public:
constexpr Glyph(uint32_t code_point, const uint8_t *data, int advance, int offset_x, int offset_y, int width,
constexpr Glyph(const char *a_char, const uint8_t *data, int advance, int offset_x, int offset_y, int width,
int height)
: code_point(code_point),
: a_char(a_char),
data(data),
advance(advance),
offset_x(offset_x),
@@ -27,15 +24,24 @@ class Glyph {
width(width),
height(height) {}
bool is_less_or_equal(uint32_t other) const { return this->code_point <= other; }
const uint8_t *get_char() const { return reinterpret_cast<const uint8_t *>(this->a_char); }
const uint32_t code_point;
bool compare_to(const uint8_t *str) const;
int match_length(const uint8_t *str) const;
void scan_area(int *x1, int *y1, int *width, int *height) const;
const char *a_char;
const uint8_t *data;
int advance;
int offset_x;
int offset_y;
int width;
int height;
protected:
friend Font;
};
class Font
@@ -58,7 +64,7 @@ class Font
Font(const Glyph *data, int data_nr, int baseline, int height, int descender, int xheight, int capheight,
uint8_t bpp = 1);
const Glyph *find_glyph(uint32_t codepoint) const;
int match_next_glyph(const uint8_t *str, int *match_length) const;
#ifdef USE_DISPLAY
void print(int x_start, int y_start, display::Display *display, Color color, const char *text,
@@ -73,9 +79,6 @@ class Font
inline int get_xheight() { return this->xheight_; }
inline int get_capheight() { return this->capheight_; }
inline int get_bpp() { return this->bpp_; }
#ifdef USE_LVGL_FONT
const lv_font_t *get_lv_font() const { return &this->lv_font_; }
#endif
const ConstVector<Glyph> &get_glyphs() const { return glyphs_; }
@@ -88,14 +91,6 @@ class Font
int xheight_;
int capheight_;
uint8_t bpp_; // bits per pixel
#ifdef USE_LVGL_FONT
lv_font_t lv_font_{};
static const uint8_t *get_glyph_bitmap(const lv_font_t *font, uint32_t unicode_letter);
static bool get_glyph_dsc_cb(const lv_font_t *font, lv_font_glyph_dsc_t *dsc, uint32_t unicode_letter, uint32_t next);
const Glyph *get_glyph_data_(uint32_t unicode_letter);
uint32_t last_letter_{};
const Glyph *last_data_{};
#endif
};
} // namespace font

View File

@@ -13,6 +13,8 @@ namespace esphome {
namespace ld2410 {
static const char *const TAG = "ld2410";
static const char *const UNKNOWN_MAC = "unknown";
static const char *const VERSION_FMT = "%u.%02X.%02X%02X%02X%02X";
enum BaudRate : uint8_t {
BAUD_RATE_9600 = 1,
@@ -179,15 +181,15 @@ static inline bool validate_header_footer(const uint8_t *header_footer, const ui
}
void LD2410Component::dump_config() {
char mac_s[18];
char version_s[20];
const char *mac_str = ld24xx::format_mac_str(this->mac_address_, mac_s);
ld24xx::format_version_str(this->version_, version_s);
std::string mac_str =
mac_address_is_valid(this->mac_address_) ? format_mac_address_pretty(this->mac_address_) : UNKNOWN_MAC;
std::string version = str_sprintf(VERSION_FMT, this->version_[1], this->version_[0], this->version_[5],
this->version_[4], this->version_[3], this->version_[2]);
ESP_LOGCONFIG(TAG,
"LD2410:\n"
" Firmware version: %s\n"
" MAC address: %s",
version_s, mac_str);
version.c_str(), mac_str.c_str());
#ifdef USE_BINARY_SENSOR
ESP_LOGCONFIG(TAG, "Binary Sensors:");
LOG_BINARY_SENSOR(" ", "Target", this->target_binary_sensor_);
@@ -446,12 +448,12 @@ bool LD2410Component::handle_ack_data_() {
case CMD_QUERY_VERSION: {
std::memcpy(this->version_, &this->buffer_data_[12], sizeof(this->version_));
char version_s[20];
ld24xx::format_version_str(this->version_, version_s);
ESP_LOGV(TAG, "Firmware version: %s", version_s);
std::string version = str_sprintf(VERSION_FMT, this->version_[1], this->version_[0], this->version_[5],
this->version_[4], this->version_[3], this->version_[2]);
ESP_LOGV(TAG, "Firmware version: %s", version.c_str());
#ifdef USE_TEXT_SENSOR
if (this->version_text_sensor_ != nullptr) {
this->version_text_sensor_->publish_state(version_s);
this->version_text_sensor_->publish_state(version);
}
#endif
break;
@@ -504,9 +506,9 @@ bool LD2410Component::handle_ack_data_() {
std::memcpy(this->mac_address_, &this->buffer_data_[10], sizeof(this->mac_address_));
}
char mac_s[18];
const char *mac_str = ld24xx::format_mac_str(this->mac_address_, mac_s);
ESP_LOGV(TAG, "MAC address: %s", mac_str);
std::string mac_str =
mac_address_is_valid(this->mac_address_) ? format_mac_address_pretty(this->mac_address_) : UNKNOWN_MAC;
ESP_LOGV(TAG, "MAC address: %s", mac_str.c_str());
#ifdef USE_TEXT_SENSOR
if (this->mac_text_sensor_ != nullptr) {
this->mac_text_sensor_->publish_state(mac_str);

View File

@@ -14,6 +14,8 @@ namespace esphome {
namespace ld2412 {
static const char *const TAG = "ld2412";
static const char *const UNKNOWN_MAC = "unknown";
static const char *const VERSION_FMT = "%u.%02X.%02X%02X%02X%02X";
enum BaudRate : uint8_t {
BAUD_RATE_9600 = 1,
@@ -198,15 +200,15 @@ static inline bool validate_header_footer(const uint8_t *header_footer, const ui
}
void LD2412Component::dump_config() {
char mac_s[18];
char version_s[20];
const char *mac_str = ld24xx::format_mac_str(this->mac_address_, mac_s);
ld24xx::format_version_str(this->version_, version_s);
std::string mac_str =
mac_address_is_valid(this->mac_address_) ? format_mac_address_pretty(this->mac_address_) : UNKNOWN_MAC;
std::string version = str_sprintf(VERSION_FMT, this->version_[1], this->version_[0], this->version_[5],
this->version_[4], this->version_[3], this->version_[2]);
ESP_LOGCONFIG(TAG,
"LD2412:\n"
" Firmware version: %s\n"
" MAC address: %s",
version_s, mac_str);
version.c_str(), mac_str.c_str());
#ifdef USE_BINARY_SENSOR
ESP_LOGCONFIG(TAG, "Binary Sensors:");
LOG_BINARY_SENSOR(" ", "DynamicBackgroundCorrectionStatus",
@@ -490,12 +492,12 @@ bool LD2412Component::handle_ack_data_() {
case CMD_QUERY_VERSION: {
std::memcpy(this->version_, &this->buffer_data_[12], sizeof(this->version_));
char version_s[20];
ld24xx::format_version_str(this->version_, version_s);
ESP_LOGV(TAG, "Firmware version: %s", version_s);
std::string version = str_sprintf(VERSION_FMT, this->version_[1], this->version_[0], this->version_[5],
this->version_[4], this->version_[3], this->version_[2]);
ESP_LOGV(TAG, "Firmware version: %s", version.c_str());
#ifdef USE_TEXT_SENSOR
if (this->version_text_sensor_ != nullptr) {
this->version_text_sensor_->publish_state(version_s);
this->version_text_sensor_->publish_state(version);
}
#endif
break;
@@ -542,9 +544,9 @@ bool LD2412Component::handle_ack_data_() {
std::memcpy(this->mac_address_, &this->buffer_data_[10], sizeof(this->mac_address_));
}
char mac_s[18];
const char *mac_str = ld24xx::format_mac_str(this->mac_address_, mac_s);
ESP_LOGV(TAG, "MAC address: %s", mac_str);
std::string mac_str =
mac_address_is_valid(this->mac_address_) ? format_mac_address_pretty(this->mac_address_) : UNKNOWN_MAC;
ESP_LOGV(TAG, "MAC address: %s", mac_str.c_str());
#ifdef USE_TEXT_SENSOR
if (this->mac_text_sensor_ != nullptr) {
this->mac_text_sensor_->publish_state(mac_str);

View File

@@ -17,6 +17,8 @@ namespace esphome {
namespace ld2450 {
static const char *const TAG = "ld2450";
static const char *const UNKNOWN_MAC = "unknown";
static const char *const VERSION_FMT = "%u.%02X.%02X%02X%02X%02X";
enum BaudRate : uint8_t {
BAUD_RATE_9600 = 1,
@@ -190,15 +192,15 @@ void LD2450Component::setup() {
}
void LD2450Component::dump_config() {
char mac_s[18];
char version_s[20];
const char *mac_str = ld24xx::format_mac_str(this->mac_address_, mac_s);
ld24xx::format_version_str(this->version_, version_s);
std::string mac_str =
mac_address_is_valid(this->mac_address_) ? format_mac_address_pretty(this->mac_address_) : UNKNOWN_MAC;
std::string version = str_sprintf(VERSION_FMT, this->version_[1], this->version_[0], this->version_[5],
this->version_[4], this->version_[3], this->version_[2]);
ESP_LOGCONFIG(TAG,
"LD2450:\n"
" Firmware version: %s\n"
" MAC address: %s",
version_s, mac_str);
version.c_str(), mac_str.c_str());
#ifdef USE_BINARY_SENSOR
ESP_LOGCONFIG(TAG, "Binary Sensors:");
LOG_BINARY_SENSOR(" ", "MovingTarget", this->moving_target_binary_sensor_);
@@ -640,12 +642,12 @@ bool LD2450Component::handle_ack_data_() {
case CMD_QUERY_VERSION: {
std::memcpy(this->version_, &this->buffer_data_[12], sizeof(this->version_));
char version_s[20];
ld24xx::format_version_str(this->version_, version_s);
ESP_LOGV(TAG, "Firmware version: %s", version_s);
std::string version = str_sprintf(VERSION_FMT, this->version_[1], this->version_[0], this->version_[5],
this->version_[4], this->version_[3], this->version_[2]);
ESP_LOGV(TAG, "Firmware version: %s", version.c_str());
#ifdef USE_TEXT_SENSOR
if (this->version_text_sensor_ != nullptr) {
this->version_text_sensor_->publish_state(version_s);
this->version_text_sensor_->publish_state(version);
}
#endif
break;
@@ -661,9 +663,9 @@ bool LD2450Component::handle_ack_data_() {
std::memcpy(this->mac_address_, &this->buffer_data_[10], sizeof(this->mac_address_));
}
char mac_s[18];
const char *mac_str = ld24xx::format_mac_str(this->mac_address_, mac_s);
ESP_LOGV(TAG, "MAC address: %s", mac_str);
std::string mac_str =
mac_address_is_valid(this->mac_address_) ? format_mac_address_pretty(this->mac_address_) : UNKNOWN_MAC;
ESP_LOGV(TAG, "MAC address: %s", mac_str.c_str());
#ifdef USE_TEXT_SENSOR
if (this->mac_text_sensor_ != nullptr) {
this->mac_text_sensor_->publish_state(mac_str);

View File

@@ -1,12 +1,11 @@
#pragma once
#include "esphome/core/defines.h"
#include "esphome/core/helpers.h"
#include <memory>
#include <span>
#ifdef USE_SENSOR
#include "esphome/core/helpers.h"
#include "esphome/components/sensor/sensor.h"
#define SUB_SENSOR_WITH_DEDUP(name, dedup_type) \
@@ -40,27 +39,6 @@
namespace esphome {
namespace ld24xx {
static const char *const UNKNOWN_MAC = "unknown";
static const char *const VERSION_FMT = "%u.%02X.%02X%02X%02X%02X";
// Helper function to format MAC address with stack allocation
// Returns pointer to UNKNOWN_MAC constant or formatted buffer
// Buffer must be exactly 18 bytes (17 for "XX:XX:XX:XX:XX:XX" + null terminator)
inline const char *format_mac_str(const uint8_t *mac_address, std::span<char, 18> buffer) {
if (mac_address_is_valid(mac_address)) {
format_mac_addr_upper(mac_address, buffer.data());
return buffer.data();
}
return UNKNOWN_MAC;
}
// Helper function to format firmware version with stack allocation
// Buffer must be exactly 20 bytes (format: "x.xxXXXXXX" fits in 11 + null terminator, 20 for safety)
inline void format_version_str(const uint8_t *version, std::span<char, 20> buffer) {
snprintf(buffer.data(), buffer.size(), VERSION_FMT, version[1], version[0], version[5], version[4], version[3],
version[2]);
}
#ifdef USE_SENSOR
// Helper class to store a sensor with a deduplicator & publish state only when the value changes
template<typename T> class SensorWithDedup {

View File

@@ -52,7 +52,15 @@ from .schemas import (
from .styles import add_top_layer, styles_to_code, theme_to_code
from .touchscreens import touchscreen_schema, touchscreens_to_code
from .trigger import add_on_boot_triggers, generate_triggers
from .types import IdleTrigger, PlainTrigger, lv_font_t, lv_group_t, lv_style_t, lvgl_ns
from .types import (
FontEngine,
IdleTrigger,
PlainTrigger,
lv_font_t,
lv_group_t,
lv_style_t,
lvgl_ns,
)
from .widgets import (
LvScrActType,
Widget,
@@ -236,6 +244,7 @@ async def to_code(configs):
cg.add_global(lvgl_ns.using)
for font in helpers.esphome_fonts_used:
await cg.get_variable(font)
cg.new_Pvariable(ID(f"{font}_engine", True, type=FontEngine), MockObj(font))
default_font = config_0[df.CONF_DEFAULT_FONT]
if not lvalid.is_lv_font(default_font):
add_define(
@@ -247,8 +256,7 @@ async def to_code(configs):
type=lv_font_t.operator("ptr").operator("const"),
)
cg.new_variable(
globfont_id,
MockObj(await lvalid.lv_font.process(default_font), "->").get_lv_font(),
globfont_id, MockObj(await lvalid.lv_font.process(default_font))
)
add_define("LV_FONT_DEFAULT", df.DEFAULT_ESPHOME_FONT)
else:

View File

@@ -0,0 +1,76 @@
#include "lvgl_esphome.h"
#ifdef USE_LVGL_FONT
namespace esphome {
namespace lvgl {
static const uint8_t *get_glyph_bitmap(const lv_font_t *font, uint32_t unicode_letter) {
auto *fe = (FontEngine *) font->dsc;
const auto *gd = fe->get_glyph_data(unicode_letter);
if (gd == nullptr)
return nullptr;
// esph_log_d(TAG, "Returning bitmap @ %X", (uint32_t)gd->data);
return gd->data;
}
static bool get_glyph_dsc_cb(const lv_font_t *font, lv_font_glyph_dsc_t *dsc, uint32_t unicode_letter, uint32_t next) {
auto *fe = (FontEngine *) font->dsc;
const auto *gd = fe->get_glyph_data(unicode_letter);
if (gd == nullptr)
return false;
dsc->adv_w = gd->advance;
dsc->ofs_x = gd->offset_x;
dsc->ofs_y = fe->height - gd->height - gd->offset_y - fe->baseline;
dsc->box_w = gd->width;
dsc->box_h = gd->height;
dsc->is_placeholder = 0;
dsc->bpp = fe->bpp;
return true;
}
FontEngine::FontEngine(font::Font *esp_font) : font_(esp_font) {
this->bpp = esp_font->get_bpp();
this->lv_font_.dsc = this;
this->lv_font_.line_height = this->height = esp_font->get_height();
this->lv_font_.base_line = this->baseline = this->lv_font_.line_height - esp_font->get_baseline();
this->lv_font_.get_glyph_dsc = get_glyph_dsc_cb;
this->lv_font_.get_glyph_bitmap = get_glyph_bitmap;
this->lv_font_.subpx = LV_FONT_SUBPX_NONE;
this->lv_font_.underline_position = -1;
this->lv_font_.underline_thickness = 1;
}
const lv_font_t *FontEngine::get_lv_font() { return &this->lv_font_; }
const font::Glyph *FontEngine::get_glyph_data(uint32_t unicode_letter) {
if (unicode_letter == last_letter_)
return this->last_data_;
uint8_t unicode[5];
memset(unicode, 0, sizeof unicode);
if (unicode_letter > 0xFFFF) {
unicode[0] = 0xF0 + ((unicode_letter >> 18) & 0x7);
unicode[1] = 0x80 + ((unicode_letter >> 12) & 0x3F);
unicode[2] = 0x80 + ((unicode_letter >> 6) & 0x3F);
unicode[3] = 0x80 + (unicode_letter & 0x3F);
} else if (unicode_letter > 0x7FF) {
unicode[0] = 0xE0 + ((unicode_letter >> 12) & 0xF);
unicode[1] = 0x80 + ((unicode_letter >> 6) & 0x3F);
unicode[2] = 0x80 + (unicode_letter & 0x3F);
} else if (unicode_letter > 0x7F) {
unicode[0] = 0xC0 + ((unicode_letter >> 6) & 0x1F);
unicode[1] = 0x80 + (unicode_letter & 0x3F);
} else {
unicode[0] = unicode_letter;
}
int match_length;
int glyph_n = this->font_->match_next_glyph(unicode, &match_length);
if (glyph_n < 0)
return nullptr;
this->last_data_ = &this->font_->get_glyphs()[glyph_n];
this->last_letter_ = unicode_letter;
return this->last_data_;
}
} // namespace lvgl
} // namespace esphome
#endif // USES_LVGL_FONT

View File

@@ -493,7 +493,6 @@ class LvFont(LValidator):
return LV_FONTS
if is_lv_font(value):
return lv_builtin_font(value)
add_lv_use("font")
fontval = cv.use_id(Font)(value)
esphome_fonts_used.add(fontval)
return requires_component("font")(fontval)
@@ -503,9 +502,7 @@ class LvFont(LValidator):
async def process(self, value, args=()):
if is_lv_font(value):
return literal(f"&lv_font_{value}")
if isinstance(value, str):
return literal(f"{value}")
return await super().process(value, args)
return literal(f"{value}_engine->get_lv_font()")
lv_font = LvFont()

View File

@@ -50,14 +50,6 @@ static const display::ColorBitness LV_BITNESS = display::ColorBitness::COLOR_BIT
static const display::ColorBitness LV_BITNESS = display::ColorBitness::COLOR_BITNESS_332;
#endif // LV_COLOR_DEPTH
#ifdef USE_LVGL_FONT
inline void lv_obj_set_style_text_font(lv_obj_t *obj, const font::Font *font, lv_style_selector_t part) {
lv_obj_set_style_text_font(obj, font->get_lv_font(), part);
}
inline void lv_style_set_text_font(lv_style_t *style, const font::Font *font) {
lv_style_set_text_font(style, font->get_lv_font());
}
#endif
#ifdef USE_LVGL_IMAGE
// Shortcut / overload, so that the source of an image can easily be updated
// from within a lambda.
@@ -142,6 +134,24 @@ template<typename... Ts> class ObjUpdateAction : public Action<Ts...> {
protected:
std::function<void(Ts...)> lamb_;
};
#ifdef USE_LVGL_FONT
class FontEngine {
public:
FontEngine(font::Font *esp_font);
const lv_font_t *get_lv_font();
const font::Glyph *get_glyph_data(uint32_t unicode_letter);
uint16_t baseline{};
uint16_t height{};
uint8_t bpp{};
protected:
font::Font *font_{};
uint32_t last_letter_{};
const font::Glyph *last_data_{};
lv_font_t lv_font_{};
};
#endif // USE_LVGL_FONT
#ifdef USE_LVGL_ANIMIMG
void lv_animimg_stop(lv_obj_t *obj);
#endif // USE_LVGL_ANIMIMG

View File

@@ -45,6 +45,7 @@ lv_coord_t = cg.global_ns.namespace("lv_coord_t")
lv_event_code_t = cg.global_ns.enum("lv_event_code_t")
lv_indev_type_t = cg.global_ns.enum("lv_indev_type_t")
lv_key_t = cg.global_ns.enum("lv_key_t")
FontEngine = lvgl_ns.class_("FontEngine")
PlainTrigger = esphome_ns.class_("Trigger<>", automation.Trigger.template())
DrawEndTrigger = esphome_ns.class_(
"Trigger<uint32_t, uint32_t>", automation.Trigger.template(cg.uint32, cg.uint32)

View File

@@ -1,7 +1,6 @@
from esphome import automation
import esphome.config_validation as cv
from esphome.const import CONF_ID, CONF_RANGE_FROM, CONF_RANGE_TO, CONF_STEP, CONF_VALUE
from esphome.cpp_generator import MockObj
from ..automation import action_to_code
from ..defines import (
@@ -115,9 +114,7 @@ class SpinboxType(WidgetType):
w.obj, digits, digits - config[CONF_DECIMAL_PLACES]
)
if (value := config.get(CONF_VALUE)) is not None:
lv.spinbox_set_value(
w.obj, MockObj(await lv_float.process(value)) * w.get_scale()
)
lv.spinbox_set_value(w.obj, await lv_float.process(value))
def get_scale(self, config):
return 10 ** config[CONF_DECIMAL_PLACES]

View File

@@ -119,7 +119,7 @@ void MDNSComponent::compile_records_(StaticVector<MDNSService, MDNS_SERVICE_COUN
MDNS_STATIC_CONST_CHAR(TXT_API_ENCRYPTION, "api_encryption");
MDNS_STATIC_CONST_CHAR(TXT_API_ENCRYPTION_SUPPORTED, "api_encryption_supported");
MDNS_STATIC_CONST_CHAR(NOISE_ENCRYPTION, "Noise_NNpsk0_25519_ChaChaPoly_SHA256");
bool has_psk = api::global_api_server->get_noise_ctx().has_psk();
bool has_psk = api::global_api_server->get_noise_ctx()->has_psk();
const char *encryption_key = has_psk ? TXT_API_ENCRYPTION : TXT_API_ENCRYPTION_SUPPORTED;
txt_records.push_back({MDNS_STR(encryption_key), MDNS_STR(NOISE_ENCRYPTION)});
#endif
@@ -135,7 +135,8 @@ void MDNSComponent::compile_records_(StaticVector<MDNSService, MDNS_SERVICE_COUN
#ifdef USE_DASHBOARD_IMPORT
MDNS_STATIC_CONST_CHAR(TXT_PACKAGE_IMPORT_URL, "package_import_url");
txt_records.push_back({MDNS_STR(TXT_PACKAGE_IMPORT_URL), MDNS_STR(dashboard_import::get_package_import_url())});
txt_records.push_back(
{MDNS_STR(TXT_PACKAGE_IMPORT_URL), MDNS_STR(dashboard_import::get_package_import_url().c_str())});
#endif
}
#endif // USE_API

View File

@@ -350,7 +350,6 @@ void MipiRgb::dump_config() {
"\n Width: %u"
"\n Height: %u"
"\n Rotation: %d degrees"
"\n PCLK Inverted: %s"
"\n HSync Pulse Width: %u"
"\n HSync Back Porch: %u"
"\n HSync Front Porch: %u"
@@ -358,18 +357,18 @@ void MipiRgb::dump_config() {
"\n VSync Back Porch: %u"
"\n VSync Front Porch: %u"
"\n Invert Colors: %s"
"\n Pixel Clock: %uMHz"
"\n Pixel Clock: %dMHz"
"\n Reset Pin: %s"
"\n DE Pin: %s"
"\n PCLK Pin: %s"
"\n HSYNC Pin: %s"
"\n VSYNC Pin: %s",
this->model_, this->width_, this->height_, this->rotation_, YESNO(this->pclk_inverted_),
this->hsync_pulse_width_, this->hsync_back_porch_, this->hsync_front_porch_, this->vsync_pulse_width_,
this->vsync_back_porch_, this->vsync_front_porch_, YESNO(this->invert_colors_),
(unsigned) (this->pclk_frequency_ / 1000000), get_pin_name(this->reset_pin_).c_str(),
get_pin_name(this->de_pin_).c_str(), get_pin_name(this->pclk_pin_).c_str(),
get_pin_name(this->hsync_pin_).c_str(), get_pin_name(this->vsync_pin_).c_str());
this->model_, this->width_, this->height_, this->rotation_, this->hsync_pulse_width_,
this->hsync_back_porch_, this->hsync_front_porch_, this->vsync_pulse_width_, this->vsync_back_porch_,
this->vsync_front_porch_, YESNO(this->invert_colors_), this->pclk_frequency_ / 1000000,
get_pin_name(this->reset_pin_).c_str(), get_pin_name(this->de_pin_).c_str(),
get_pin_name(this->pclk_pin_).c_str(), get_pin_name(this->hsync_pin_).c_str(),
get_pin_name(this->vsync_pin_).c_str());
if (this->madctl_ & MADCTL_BGR) {
this->dump_pins_(8, 13, "Blue", 0);

View File

@@ -11,7 +11,6 @@ st7701s.extend(
vsync_pin=17,
pclk_pin=21,
pclk_frequency="12MHz",
pclk_inverted=False,
pixel_mode="18bit",
mirror_x=True,
mirror_y=True,

View File

@@ -140,7 +140,7 @@ void MQTTClientComponent::send_device_info_() {
#endif
#ifdef USE_API_NOISE
root[api::global_api_server->get_noise_ctx().has_psk() ? "api_encryption" : "api_encryption_supported"] =
root[api::global_api_server->get_noise_ctx()->has_psk() ? "api_encryption" : "api_encryption_supported"] =
"Noise_NNpsk0_25519_ChaChaPoly_SHA256";
#endif
},

View File

@@ -1,7 +1,8 @@
#include "automation.h"
#include "esphome/core/log.h"
namespace esphome::number {
namespace esphome {
namespace number {
static const char *const TAG = "number.automation";
@@ -51,4 +52,5 @@ void ValueRangeTrigger::on_state_(float state) {
this->rtc_.save(&in_range);
}
} // namespace esphome::number
} // namespace number
} // namespace esphome

View File

@@ -4,7 +4,8 @@
#include "esphome/core/automation.h"
#include "esphome/core/component.h"
namespace esphome::number {
namespace esphome {
namespace number {
class NumberStateTrigger : public Trigger<float> {
public:
@@ -90,4 +91,5 @@ template<typename... Ts> class NumberInRangeCondition : public Condition<Ts...>
float max_{NAN};
};
} // namespace esphome::number
} // namespace number
} // namespace esphome

View File

@@ -3,7 +3,8 @@
#include "esphome/core/controller_registry.h"
#include "esphome/core/log.h"
namespace esphome::number {
namespace esphome {
namespace number {
static const char *const TAG = "number";
@@ -42,4 +43,5 @@ void Number::add_on_state_callback(std::function<void(float)> &&callback) {
this->state_callback_.add(std::move(callback));
}
} // namespace esphome::number
} // namespace number
} // namespace esphome

View File

@@ -6,7 +6,8 @@
#include "number_call.h"
#include "number_traits.h"
namespace esphome::number {
namespace esphome {
namespace number {
class Number;
void log_number(const char *tag, const char *prefix, const char *type, Number *obj);
@@ -52,4 +53,5 @@ class Number : public EntityBase {
CallbackManager<void(float)> state_callback_;
};
} // namespace esphome::number
} // namespace number
} // namespace esphome

View File

@@ -2,7 +2,8 @@
#include "number.h"
#include "esphome/core/log.h"
namespace esphome::number {
namespace esphome {
namespace number {
static const char *const TAG = "number";
@@ -124,4 +125,5 @@ void NumberCall::perform() {
this->parent_->control(target_value);
}
} // namespace esphome::number
} // namespace number
} // namespace esphome

View File

@@ -4,7 +4,8 @@
#include "esphome/core/log.h"
#include "number_traits.h"
namespace esphome::number {
namespace esphome {
namespace number {
class Number;
@@ -43,4 +44,5 @@ class NumberCall {
bool cycle_;
};
} // namespace esphome::number
} // namespace number
} // namespace esphome

View File

@@ -1,8 +1,10 @@
#include "esphome/core/log.h"
#include "number_traits.h"
namespace esphome::number {
namespace esphome {
namespace number {
static const char *const TAG = "number";
} // namespace esphome::number
} // namespace number
} // namespace esphome

View File

@@ -3,7 +3,8 @@
#include "esphome/core/entity_base.h"
#include "esphome/core/helpers.h"
namespace esphome::number {
namespace esphome {
namespace number {
enum NumberMode : uint8_t {
NUMBER_MODE_AUTO = 0,
@@ -34,4 +35,5 @@ class NumberTraits : public EntityBase_DeviceClass, public EntityBase_UnitOfMeas
NumberMode mode_{NUMBER_MODE_AUTO};
};
} // namespace esphome::number
} // namespace number
} // namespace esphome

View File

@@ -73,17 +73,17 @@ void SFA30Component::update() {
}
if (this->formaldehyde_sensor_ != nullptr) {
const float formaldehyde = static_cast<int16_t>(raw_data[0]) / 5.0f;
const float formaldehyde = raw_data[0] / 5.0f;
this->formaldehyde_sensor_->publish_state(formaldehyde);
}
if (this->humidity_sensor_ != nullptr) {
const float humidity = static_cast<int16_t>(raw_data[1]) / 100.0f;
const float humidity = raw_data[1] / 100.0f;
this->humidity_sensor_->publish_state(humidity);
}
if (this->temperature_sensor_ != nullptr) {
const float temperature = static_cast<int16_t>(raw_data[2]) / 200.0f;
const float temperature = raw_data[2] / 200.0f;
this->temperature_sensor_->publish_state(temperature);
}

View File

@@ -1,14 +1,9 @@
import logging
import esphome.codegen as cg
from esphome.components import time as time_
from esphome.config_helpers import merge_config
import esphome.config_validation as cv
from esphome.const import (
CONF_ID,
CONF_PLATFORM,
CONF_SERVERS,
CONF_TIME,
PLATFORM_BK72XX,
PLATFORM_ESP32,
PLATFORM_ESP8266,
@@ -17,74 +12,13 @@ from esphome.const import (
PLATFORM_RTL87XX,
)
from esphome.core import CORE
import esphome.final_validate as fv
from esphome.types import ConfigType
_LOGGER = logging.getLogger(__name__)
DEPENDENCIES = ["network"]
CONF_SNTP = "sntp"
sntp_ns = cg.esphome_ns.namespace("sntp")
SNTPComponent = sntp_ns.class_("SNTPComponent", time_.RealTimeClock)
DEFAULT_SERVERS = ["0.pool.ntp.org", "1.pool.ntp.org", "2.pool.ntp.org"]
def _sntp_final_validate(config: ConfigType) -> None:
"""Merge multiple SNTP instances into one, similar to OTA merging behavior."""
full_conf = fv.full_config.get()
time_confs = full_conf.get(CONF_TIME, [])
sntp_configs: list[ConfigType] = []
other_time_configs: list[ConfigType] = []
for time_conf in time_confs:
if time_conf.get(CONF_PLATFORM) == CONF_SNTP:
sntp_configs.append(time_conf)
else:
other_time_configs.append(time_conf)
if len(sntp_configs) <= 1:
return
# Merge all SNTP configs into the first one
merged = sntp_configs[0]
for sntp_conf in sntp_configs[1:]:
# Validate that IDs are consistent if manually specified
if merged[CONF_ID].is_manual and sntp_conf[CONF_ID].is_manual:
raise cv.Invalid(
f"Found multiple SNTP configurations but {CONF_ID} is inconsistent"
)
merged = merge_config(merged, sntp_conf)
# Deduplicate servers while preserving order
servers = merged[CONF_SERVERS]
unique_servers = list(dict.fromkeys(servers))
# Warn if we're dropping servers due to 3-server limit
if len(unique_servers) > 3:
dropped = unique_servers[3:]
unique_servers = unique_servers[:3]
_LOGGER.warning(
"SNTP supports maximum 3 servers. Dropped excess server(s): %s",
dropped,
)
merged[CONF_SERVERS] = unique_servers
_LOGGER.warning(
"Found and merged %d SNTP time configurations into one instance",
len(sntp_configs),
)
# Replace time configs with merged SNTP + other time platforms
other_time_configs.append(merged)
full_conf[CONF_TIME] = other_time_configs
fv.full_config.set(full_conf)
CONFIG_SCHEMA = cv.All(
time_.TIME_SCHEMA.extend(
{
@@ -106,8 +40,6 @@ CONFIG_SCHEMA = cv.All(
),
)
FINAL_VALIDATE_SCHEMA = _sntp_final_validate
async def to_code(config):
servers = config[CONF_SERVERS]

View File

@@ -56,19 +56,11 @@ uint32_t ESP8266UartComponent::get_config() {
}
void ESP8266UartComponent::setup() {
auto setup_pin_if_needed = [](InternalGPIOPin *pin) {
if (!pin) {
return;
}
const auto mask = gpio::Flags::FLAG_OPEN_DRAIN | gpio::Flags::FLAG_PULLUP | gpio::Flags::FLAG_PULLDOWN;
if ((pin->get_flags() & mask) != gpio::Flags::FLAG_NONE) {
pin->setup();
}
};
setup_pin_if_needed(this->rx_pin_);
if (this->rx_pin_ != this->tx_pin_) {
setup_pin_if_needed(this->tx_pin_);
if (this->rx_pin_) {
this->rx_pin_->setup();
}
if (this->tx_pin_ && this->rx_pin_ != this->tx_pin_) {
this->tx_pin_->setup();
}
// Use Arduino HardwareSerial UARTs if all used pins match the ones

View File

@@ -133,19 +133,11 @@ void IDFUARTComponent::load_settings(bool dump_config) {
return;
}
auto setup_pin_if_needed = [](InternalGPIOPin *pin) {
if (!pin) {
return;
}
const auto mask = gpio::Flags::FLAG_OPEN_DRAIN | gpio::Flags::FLAG_PULLUP | gpio::Flags::FLAG_PULLDOWN;
if ((pin->get_flags() & mask) != gpio::Flags::FLAG_NONE) {
pin->setup();
}
};
setup_pin_if_needed(this->rx_pin_);
if (this->rx_pin_ != this->tx_pin_) {
setup_pin_if_needed(this->tx_pin_);
if (this->rx_pin_) {
this->rx_pin_->setup();
}
if (this->tx_pin_ && this->rx_pin_ != this->tx_pin_) {
this->tx_pin_->setup();
}
int8_t tx = this->tx_pin_ != nullptr ? this->tx_pin_->get_pin() : -1;

View File

@@ -53,7 +53,7 @@ void LibreTinyUARTComponent::setup() {
auto shouldFallbackToSoftwareSerial = [&]() -> bool {
auto hasFlags = [](InternalGPIOPin *pin, const gpio::Flags mask) -> bool {
return pin && (pin->get_flags() & mask) != gpio::Flags::FLAG_NONE;
return pin && pin->get_flags() & mask != gpio::Flags::FLAG_NONE;
};
if (hasFlags(this->tx_pin_, gpio::Flags::FLAG_OPEN_DRAIN | gpio::Flags::FLAG_PULLUP | gpio::Flags::FLAG_PULLDOWN) ||
hasFlags(this->rx_pin_, gpio::Flags::FLAG_OPEN_DRAIN | gpio::Flags::FLAG_PULLUP | gpio::Flags::FLAG_PULLDOWN)) {

View File

@@ -52,19 +52,11 @@ uint16_t RP2040UartComponent::get_config() {
}
void RP2040UartComponent::setup() {
auto setup_pin_if_needed = [](InternalGPIOPin *pin) {
if (!pin) {
return;
}
const auto mask = gpio::Flags::FLAG_OPEN_DRAIN | gpio::Flags::FLAG_PULLUP | gpio::Flags::FLAG_PULLDOWN;
if ((pin->get_flags() & mask) != gpio::Flags::FLAG_NONE) {
pin->setup();
}
};
setup_pin_if_needed(this->rx_pin_);
if (this->rx_pin_ != this->tx_pin_) {
setup_pin_if_needed(this->tx_pin_);
if (this->rx_pin_) {
this->rx_pin_->setup();
}
if (this->tx_pin_ && this->rx_pin_ != this->tx_pin_) {
this->tx_pin_->setup();
}
uint16_t config = get_config();

View File

@@ -1,17 +1,10 @@
import logging
import esphome.codegen as cg
from esphome.components.esp32 import add_idf_component
from esphome.components.ota import BASE_OTA_SCHEMA, OTAComponent, ota_to_code
from esphome.config_helpers import merge_config
import esphome.config_validation as cv
from esphome.const import CONF_ID, CONF_OTA, CONF_PLATFORM, CONF_WEB_SERVER
from esphome.const import CONF_ID
from esphome.core import CORE, coroutine_with_priority
from esphome.coroutine import CoroPriority
import esphome.final_validate as fv
from esphome.types import ConfigType
_LOGGER = logging.getLogger(__name__)
CODEOWNERS = ["@esphome/core"]
DEPENDENCIES = ["network", "web_server_base"]
@@ -19,53 +12,6 @@ DEPENDENCIES = ["network", "web_server_base"]
web_server_ns = cg.esphome_ns.namespace("web_server")
WebServerOTAComponent = web_server_ns.class_("WebServerOTAComponent", OTAComponent)
def _web_server_ota_final_validate(config: ConfigType) -> None:
"""Merge multiple web_server OTA instances into one.
Multiple web_server OTA instances register duplicate HTTP handlers for /update,
causing undefined behavior. Merge them into a single instance.
"""
full_conf = fv.full_config.get()
ota_confs = full_conf.get(CONF_OTA, [])
web_server_ota_configs: list[ConfigType] = []
other_ota_configs: list[ConfigType] = []
for ota_conf in ota_confs:
if ota_conf.get(CONF_PLATFORM) == CONF_WEB_SERVER:
web_server_ota_configs.append(ota_conf)
else:
other_ota_configs.append(ota_conf)
if len(web_server_ota_configs) <= 1:
return
# Merge all web_server OTA configs into the first one
merged = web_server_ota_configs[0]
for ota_conf in web_server_ota_configs[1:]:
# Validate that IDs are consistent if manually specified
if (
merged[CONF_ID].is_manual
and ota_conf[CONF_ID].is_manual
and merged[CONF_ID] != ota_conf[CONF_ID]
):
raise cv.Invalid(
f"Found multiple web_server OTA configurations but {CONF_ID} is inconsistent"
)
merged = merge_config(merged, ota_conf)
_LOGGER.warning(
"Found and merged %d web_server OTA configurations into one instance",
len(web_server_ota_configs),
)
# Replace OTA configs with merged web_server + other OTA platforms
other_ota_configs.append(merged)
full_conf[CONF_OTA] = other_ota_configs
fv.full_config.set(full_conf)
CONFIG_SCHEMA = (
cv.Schema(
{
@@ -76,8 +22,6 @@ CONFIG_SCHEMA = (
.extend(cv.COMPONENT_SCHEMA)
)
FINAL_VALIDATE_SCHEMA = _web_server_ota_final_validate
@coroutine_with_priority(CoroPriority.WEB_SERVER_OTA)
async def to_code(config):

View File

@@ -94,18 +94,6 @@ void AsyncWebServer::end() {
}
}
void AsyncWebServer::set_lru_purge_enable(bool enable) {
if (this->lru_purge_enable_ == enable) {
return; // No change needed
}
this->lru_purge_enable_ = enable;
// If server is already running, restart it with new config
if (this->server_) {
this->end();
this->begin();
}
}
void AsyncWebServer::begin() {
if (this->server_) {
this->end();
@@ -113,8 +101,6 @@ void AsyncWebServer::begin() {
httpd_config_t config = HTTPD_DEFAULT_CONFIG();
config.server_port = this->port_;
config.uri_match_fn = [](const char * /*unused*/, const char * /*unused*/, size_t /*unused*/) { return true; };
// Enable LRU purging if requested (e.g., by captive portal to handle probe bursts)
config.lru_purge_enable = this->lru_purge_enable_;
if (httpd_start(&this->server_, &config) == ESP_OK) {
const httpd_uri_t handler_get = {
.uri = "",
@@ -256,7 +242,6 @@ void AsyncWebServerRequest::send(int code, const char *content_type, const char
void AsyncWebServerRequest::redirect(const std::string &url) {
httpd_resp_set_status(*this, "302 Found");
httpd_resp_set_hdr(*this, "Location", url.c_str());
httpd_resp_set_hdr(*this, "Connection", "close");
httpd_resp_send(*this, nullptr, 0);
}
@@ -504,18 +489,10 @@ AsyncEventSourceResponse::AsyncEventSourceResponse(const AsyncWebServerRequest *
void AsyncEventSourceResponse::destroy(void *ptr) {
auto *rsp = static_cast<AsyncEventSourceResponse *>(ptr);
int fd = rsp->fd_.exchange(0); // Atomically get and clear fd
if (fd > 0) {
ESP_LOGD(TAG, "Event source connection closed (fd: %d)", fd);
// Immediately shut down the socket to prevent lwIP from delivering more data
// This prevents "recv_tcp: recv for wrong pcb!" assertions when the TCP stack
// tries to deliver queued data after the session is marked as dead
// See: https://github.com/esphome/esphome/issues/11936
shutdown(fd, SHUT_RDWR);
// Note: We don't close() the socket - httpd owns it and will close it
}
// Session will be cleaned up in the main loop to avoid race conditions
ESP_LOGD(TAG, "Event source connection closed (fd: %d)", rsp->fd_.load());
// Mark as dead by setting fd to 0 - will be cleaned up in the main loop
rsp->fd_.store(0);
// Note: We don't delete or remove from set here to avoid race conditions
}
// helper for allowing only unique entries in the queue

View File

@@ -199,13 +199,9 @@ class AsyncWebServer {
return *handler;
}
void set_lru_purge_enable(bool enable);
httpd_handle_t get_server() { return this->server_; }
protected:
uint16_t port_{};
httpd_handle_t server_{};
bool lru_purge_enable_{false};
static esp_err_t request_handler(httpd_req_t *r);
static esp_err_t request_post_handler(httpd_req_t *r);
esp_err_t request_handler_(AsyncWebServerRequest *request) const;

View File

@@ -69,12 +69,6 @@ CONF_MIN_AUTH_MODE = "min_auth_mode"
# Limited to 127 because selected_sta_index_ is int8_t in C++
MAX_WIFI_NETWORKS = 127
# Default AP timeout - allows sufficient time to try all BSSIDs during initial connection
# After AP starts, WiFi scanning is skipped to avoid disrupting the AP, so we only
# get best-effort connection attempts. Longer timeout ensures we exhaust all options
# before falling back to AP mode. Aligned with improv wifi_timeout default.
DEFAULT_AP_TIMEOUT = "90s"
wifi_ns = cg.esphome_ns.namespace("wifi")
EAPAuth = wifi_ns.struct("EAPAuth")
ManualIP = wifi_ns.struct("ManualIP")
@@ -183,7 +177,7 @@ CONF_AP_TIMEOUT = "ap_timeout"
WIFI_NETWORK_AP = WIFI_NETWORK_BASE.extend(
{
cv.Optional(
CONF_AP_TIMEOUT, default=DEFAULT_AP_TIMEOUT
CONF_AP_TIMEOUT, default="1min"
): cv.positive_time_period_milliseconds,
}
)
@@ -485,14 +479,11 @@ async def to_code(config):
cg.add(var.set_min_auth_mode(config[CONF_MIN_AUTH_MODE]))
if config[CONF_FAST_CONNECT]:
cg.add_define("USE_WIFI_FAST_CONNECT")
# passive_scan defaults to false in C++ - only set if true
if config[CONF_PASSIVE_SCAN]:
cg.add(var.set_passive_scan(True))
cg.add(var.set_passive_scan(config[CONF_PASSIVE_SCAN]))
if CONF_OUTPUT_POWER in config:
cg.add(var.set_output_power(config[CONF_OUTPUT_POWER]))
# enable_on_boot defaults to true in C++ - only set if false
if not config[CONF_ENABLE_ON_BOOT]:
cg.add(var.set_enable_on_boot(False))
cg.add(var.set_enable_on_boot(config[CONF_ENABLE_ON_BOOT]))
if CORE.is_esp8266:
cg.add_library("ESP8266WiFi", None)

View File

@@ -199,12 +199,7 @@ static constexpr uint8_t WIFI_RETRY_COUNT_PER_AP = 1;
/// Cooldown duration in milliseconds after adapter restart or repeated failures
/// Allows WiFi hardware to stabilize before next connection attempt
static constexpr uint32_t WIFI_COOLDOWN_DURATION_MS = 500;
/// Cooldown duration when fallback AP is active and captive portal may be running
/// Longer interval gives users time to configure WiFi without constant connection attempts
/// While connecting, WiFi can't beacon the AP properly, so needs longer cooldown
static constexpr uint32_t WIFI_COOLDOWN_WITH_AP_ACTIVE_MS = 30000;
static constexpr uint32_t WIFI_COOLDOWN_DURATION_MS = 1000;
static constexpr uint8_t get_max_retries_for_phase(WiFiRetryPhase phase) {
switch (phase) {
@@ -280,9 +275,7 @@ int8_t WiFiComponent::find_next_hidden_sta_(int8_t start_index) {
}
}
// If we didn't scan this cycle, treat all networks as potentially hidden
// Otherwise, only retry networks that weren't seen in the scan
if (!this->did_scan_this_cycle_ || !this->ssid_was_seen_in_scan_(sta.get_ssid())) {
if (!this->ssid_was_seen_in_scan_(sta.get_ssid())) {
ESP_LOGD(TAG, "Hidden candidate " LOG_SECRET("'%s'") " at index %d", sta.get_ssid().c_str(), static_cast<int>(i));
return static_cast<int8_t>(i);
}
@@ -424,6 +417,10 @@ void WiFiComponent::start() {
void WiFiComponent::restart_adapter() {
ESP_LOGW(TAG, "Restarting adapter");
this->wifi_mode_(false, {});
// Enter cooldown state to allow WiFi hardware to stabilize after restart
// Don't set retry_phase_ or num_retried_ here - state machine handles transitions
this->state_ = WIFI_COMPONENT_STATE_COOLDOWN;
this->action_started_ = millis();
this->error_from_callback_ = false;
}
@@ -444,16 +441,7 @@ void WiFiComponent::loop() {
switch (this->state_) {
case WIFI_COMPONENT_STATE_COOLDOWN: {
this->status_set_warning(LOG_STR("waiting to reconnect"));
// Skip cooldown if new credentials were provided while connecting
if (this->skip_cooldown_next_cycle_) {
this->skip_cooldown_next_cycle_ = false;
this->check_connecting_finished();
break;
}
// Use longer cooldown when captive portal/improv is active to avoid disrupting user config
bool portal_active = this->is_captive_portal_active_() || this->is_esp32_improv_active_();
uint32_t cooldown_duration = portal_active ? WIFI_COOLDOWN_WITH_AP_ACTIVE_MS : WIFI_COOLDOWN_DURATION_MS;
if (now - this->action_started_ > cooldown_duration) {
if (now - this->action_started_ > WIFI_COOLDOWN_DURATION_MS) {
// After cooldown we either restarted the adapter because of
// a failure, or something tried to connect over and over
// so we entered cooldown. In both cases we call
@@ -507,8 +495,7 @@ void WiFiComponent::loop() {
#endif // USE_WIFI_AP
#ifdef USE_IMPROV
if (esp32_improv::global_improv_component != nullptr && !esp32_improv::global_improv_component->is_active() &&
!esp32_improv::global_improv_component->should_start()) {
if (esp32_improv::global_improv_component != nullptr && !esp32_improv::global_improv_component->is_active()) {
if (now - this->last_connected_ > esp32_improv::global_improv_component->get_wifi_timeout()) {
if (this->wifi_mode_(true, {}))
esp32_improv::global_improv_component->start();
@@ -618,8 +605,6 @@ void WiFiComponent::set_sta(const WiFiAP &ap) {
this->init_sta(1);
this->add_sta(ap);
this->selected_sta_index_ = 0;
// When new credentials are set (e.g., from improv), skip cooldown to retry immediately
this->skip_cooldown_next_cycle_ = true;
}
WiFiAP WiFiComponent::build_params_for_current_phase_() {
@@ -681,17 +666,6 @@ void WiFiComponent::save_wifi_sta(const std::string &ssid, const std::string &pa
sta.set_ssid(ssid);
sta.set_password(password);
this->set_sta(sta);
// Trigger connection attempt (exits cooldown if needed, no-op if already connecting/connected)
this->connect_soon_();
}
void WiFiComponent::connect_soon_() {
// Only trigger retry if we're in cooldown - if already connecting/connected, do nothing
if (this->state_ == WIFI_COMPONENT_STATE_COOLDOWN) {
ESP_LOGD(TAG, "Exiting cooldown early due to new WiFi credentials");
this->retry_connect();
}
}
void WiFiComponent::start_connecting(const WiFiAP &ap) {
@@ -989,7 +963,6 @@ void WiFiComponent::check_scanning_finished() {
return;
}
this->scan_done_ = false;
this->did_scan_this_cycle_ = true;
if (this->scan_result_.empty()) {
ESP_LOGW(TAG, "No networks found");
@@ -1256,16 +1229,9 @@ WiFiRetryPhase WiFiComponent::determine_next_phase_() {
return WiFiRetryPhase::RESTARTING_ADAPTER;
case WiFiRetryPhase::RESTARTING_ADAPTER:
// After restart, go back to explicit hidden if we went through it initially
if (this->went_through_explicit_hidden_phase_()) {
return WiFiRetryPhase::EXPLICIT_HIDDEN;
}
// Skip scanning when captive portal/improv is active to avoid disrupting AP
// Even passive scans can cause brief AP disconnections on ESP32
if (this->is_captive_portal_active_() || this->is_esp32_improv_active_()) {
return WiFiRetryPhase::RETRY_HIDDEN;
}
return WiFiRetryPhase::SCAN_CONNECTING;
// After restart, go back to explicit hidden if we went through it initially, otherwise scan
return this->went_through_explicit_hidden_phase_() ? WiFiRetryPhase::EXPLICIT_HIDDEN
: WiFiRetryPhase::SCAN_CONNECTING;
}
// Should never reach here
@@ -1353,12 +1319,6 @@ bool WiFiComponent::transition_to_phase_(WiFiRetryPhase new_phase) {
if (!this->is_captive_portal_active_() && !this->is_esp32_improv_active_()) {
this->restart_adapter();
}
// Clear scan flag - we're starting a new retry cycle
this->did_scan_this_cycle_ = false;
// Always enter cooldown after restart (or skip-restart) to allow stabilization
// Use extended cooldown when AP is active to avoid constant scanning that blocks DNS
this->state_ = WIFI_COMPONENT_STATE_COOLDOWN;
this->action_started_ = millis();
// Return true to indicate we should wait (go to COOLDOWN) instead of immediately connecting
return true;

View File

@@ -291,7 +291,6 @@ class WiFiComponent : public Component {
void set_passive_scan(bool passive);
void save_wifi_sta(const std::string &ssid, const std::string &password);
// ========== INTERNAL METHODS ==========
// (In most use cases you won't need these)
/// Setup WiFi interface.
@@ -425,8 +424,6 @@ class WiFiComponent : public Component {
return true;
}
void connect_soon_();
void wifi_loop_();
bool wifi_mode_(optional<bool> sta, optional<bool> ap);
bool wifi_sta_pre_setup_();
@@ -529,11 +526,9 @@ class WiFiComponent : public Component {
bool btm_{false};
bool rrm_{false};
#endif
bool enable_on_boot_{true};
bool enable_on_boot_;
bool got_ipv4_address_{false};
bool keep_scan_results_{false};
bool did_scan_this_cycle_{false};
bool skip_cooldown_next_cycle_{false};
// Pointers at the end (naturally aligned)
Trigger<> *connect_trigger_{new Trigger<>()};

View File

@@ -338,44 +338,21 @@ def check_replaceme(value):
)
def _get_item_id(item: Any) -> str | Extend | Remove | None:
"""Attempts to get a list item's ID"""
if not isinstance(item, dict):
return None # not a dict, can't have ID
# 1.- Check regular case:
# - id: my_id
item_id = item.get(CONF_ID)
if item_id is None and len(item) == 1:
# 2.- Check single-key dict case:
# - obj:
# id: my_id
item = next(iter(item.values()))
if isinstance(item, dict):
item_id = item.get(CONF_ID)
if isinstance(item_id, Extend):
# Remove instances of Extend so they don't overwrite the original item when merging:
del item[CONF_ID]
return item_id
def _build_list_index(
lst: list[Any],
) -> tuple[
OrderedDict[str | Extend | Remove, Any], list[tuple[int, str, Any]], set[str]
]:
def _build_list_index(lst):
index = OrderedDict()
extensions, removals = [], set()
for pos, item in enumerate(lst):
for item in lst:
if item is None:
removals.add(None)
continue
item_id = _get_item_id(item)
if isinstance(item_id, Extend):
extensions.append((pos, item_id.value, item))
continue
if isinstance(item_id, Remove):
removals.add(item_id.value)
continue
item_id = None
if isinstance(item, dict) and (item_id := item.get(CONF_ID)):
if isinstance(item_id, Extend):
extensions.append(item)
continue
if isinstance(item_id, Remove):
removals.add(item_id.value)
continue
if not item_id or item_id in index:
# no id or duplicate -> pass through with identity-based key
item_id = id(item)
@@ -383,7 +360,7 @@ def _build_list_index(
return index, extensions, removals
def resolve_extend_remove(value: Any, is_key: bool = False) -> None:
def resolve_extend_remove(value, is_key=None):
if isinstance(value, ESPLiteralValue):
return # do not check inside literal blocks
if isinstance(value, list):
@@ -391,16 +368,26 @@ def resolve_extend_remove(value: Any, is_key: bool = False) -> None:
if extensions or removals:
# Rebuild the original list after
# processing all extensions and removals
for pos, item_id, item in extensions:
for item in extensions:
item_id = item[CONF_ID].value
if item_id in removals:
continue
old = index.get(item_id)
if old is None:
# Failed to find source for extension
with cv.prepend_path(pos):
# Find index of item to show error at correct position
i = next(
(
i
for i, d in enumerate(value)
if d.get(CONF_ID) == item[CONF_ID]
)
)
with cv.prepend_path(i):
raise cv.Invalid(
f"Source for extension of ID '{item_id}' was not found."
)
item[CONF_ID] = item_id
index[item_id] = merge_config(old, item)
for item_id in removals:
index.pop(item_id, None)

View File

@@ -336,7 +336,6 @@ CONF_ENERGY = "energy"
CONF_ENTITY_CATEGORY = "entity_category"
CONF_ENTITY_ID = "entity_id"
CONF_ENUM_DATAPOINT = "enum_datapoint"
CONF_ENVIRONMENT_VARIABLES = "environment_variables"
CONF_EQUATION = "equation"
CONF_ESP8266_DISABLE_SSL_SUPPORT = "esp8266_disable_ssl_support"
CONF_ESPHOME = "esphome"

View File

@@ -17,7 +17,6 @@ from esphome.const import (
CONF_COMPILE_PROCESS_LIMIT,
CONF_DEBUG_SCHEDULER,
CONF_DEVICES,
CONF_ENVIRONMENT_VARIABLES,
CONF_ESPHOME,
CONF_FRIENDLY_NAME,
CONF_ID,
@@ -216,11 +215,6 @@ CONFIG_SCHEMA = cv.All(
cv.string_strict: cv.Any([cv.string], cv.string),
}
),
cv.Optional(CONF_ENVIRONMENT_VARIABLES, default={}): cv.Schema(
{
cv.string_strict: cv.string,
}
),
cv.Optional(CONF_ON_BOOT): automation.validate_automation(
{
cv.GenerateID(CONF_TRIGGER_ID): cv.declare_id(StartupTrigger),
@@ -432,12 +426,6 @@ async def _add_platformio_options(pio_options):
cg.add_platformio_option(key, val)
@coroutine_with_priority(CoroPriority.FINAL)
async def _add_environment_variables(env_vars: dict[str, str]) -> None:
# Set environment variables for the build process
os.environ.update(env_vars)
@coroutine_with_priority(CoroPriority.AUTOMATION)
async def _add_automations(config):
for conf in config.get(CONF_ON_BOOT, []):
@@ -575,9 +563,6 @@ async def to_code(config: ConfigType) -> None:
if config[CONF_PLATFORMIO_OPTIONS]:
CORE.add_job(_add_platformio_options, config[CONF_PLATFORMIO_OPTIONS])
if config[CONF_ENVIRONMENT_VARIABLES]:
CORE.add_job(_add_environment_variables, config[CONF_ENVIRONMENT_VARIABLES])
# Process areas
all_areas: list[dict[str, str | core.ID]] = []
if CONF_AREA in config:

View File

@@ -74,6 +74,12 @@ void EntityBase::set_object_id(const char *object_id) {
this->calc_object_id_();
}
void EntityBase::set_name_and_object_id(const char *name, const char *object_id) {
this->set_name(name);
this->object_id_c_str_ = object_id;
this->calc_object_id_();
}
// Calculate Object ID Hash from Entity Name
void EntityBase::calc_object_id_() {
this->object_id_hash_ =

View File

@@ -41,6 +41,9 @@ class EntityBase {
std::string get_object_id() const;
void set_object_id(const char *object_id);
// Set both name and object_id in one call (reduces generated code size)
void set_name_and_object_id(const char *name, const char *object_id);
// Get the unique Object ID of this Entity
uint32_t get_object_id_hash();

View File

@@ -84,8 +84,6 @@ async def setup_entity(var: MockObj, config: ConfigType, platform: str) -> None:
# Get device name for object ID calculation
device_name = device_id_obj.id
add(var.set_name(config[CONF_NAME]))
# Calculate base object_id using the same logic as C++
# This must match the C++ behavior in esphome/core/entity_base.cpp
base_object_id = get_base_entity_object_id(
@@ -97,8 +95,8 @@ async def setup_entity(var: MockObj, config: ConfigType, platform: str) -> None:
"Entity has empty name, using '%s' as object_id base", base_object_id
)
# Set the object ID
add(var.set_object_id(base_object_id))
# Set both name and object_id in one call to reduce generated code size
add(var.set_name_and_object_id(config[CONF_NAME], base_object_id))
_LOGGER.debug(
"Setting object_id '%s' for entity '%s' on platform '%s'",
base_object_id,

View File

@@ -154,8 +154,8 @@ void HOT Scheduler::set_timer_common_(Component *component, SchedulerItem::Type
// For retries, check if there's a cancelled timeout first
if (is_retry && name_cstr != nullptr && type == SchedulerItem::TIMEOUT &&
(has_cancelled_timeout_in_container_locked_(this->items_, component, name_cstr, /* match_retry= */ true) ||
has_cancelled_timeout_in_container_locked_(this->to_add_, component, name_cstr, /* match_retry= */ true))) {
(has_cancelled_timeout_in_container_(this->items_, component, name_cstr, /* match_retry= */ true) ||
has_cancelled_timeout_in_container_(this->to_add_, component, name_cstr, /* match_retry= */ true))) {
// Skip scheduling - the retry was cancelled
#ifdef ESPHOME_DEBUG_SCHEDULER
ESP_LOGD(TAG, "Skipping retry '%s' - found cancelled item", name_cstr);
@@ -556,8 +556,7 @@ bool HOT Scheduler::cancel_item_locked_(Component *component, const char *name_c
#ifndef ESPHOME_THREAD_SINGLE
// Mark items in defer queue as cancelled (they'll be skipped when processed)
if (type == SchedulerItem::TIMEOUT) {
total_cancelled +=
this->mark_matching_items_removed_locked_(this->defer_queue_, component, name_cstr, type, match_retry);
total_cancelled += this->mark_matching_items_removed_(this->defer_queue_, component, name_cstr, type, match_retry);
}
#endif /* not ESPHOME_THREAD_SINGLE */
@@ -566,20 +565,19 @@ bool HOT Scheduler::cancel_item_locked_(Component *component, const char *name_c
// (removing the last element doesn't break heap structure)
if (!this->items_.empty()) {
auto &last_item = this->items_.back();
if (this->matches_item_locked_(last_item, component, name_cstr, type, match_retry)) {
if (this->matches_item_(last_item, component, name_cstr, type, match_retry)) {
this->recycle_item_(std::move(this->items_.back()));
this->items_.pop_back();
total_cancelled++;
}
// For other items in heap, we can only mark for removal (can't remove from middle of heap)
size_t heap_cancelled =
this->mark_matching_items_removed_locked_(this->items_, component, name_cstr, type, match_retry);
size_t heap_cancelled = this->mark_matching_items_removed_(this->items_, component, name_cstr, type, match_retry);
total_cancelled += heap_cancelled;
this->to_remove_ += heap_cancelled; // Track removals for heap items
}
// Cancel items in to_add_
total_cancelled += this->mark_matching_items_removed_locked_(this->to_add_, component, name_cstr, type, match_retry);
total_cancelled += this->mark_matching_items_removed_(this->to_add_, component, name_cstr, type, match_retry);
return total_cancelled > 0;
}

View File

@@ -243,18 +243,8 @@ class Scheduler {
}
// Helper function to check if item matches criteria for cancellation
// IMPORTANT: Must be called with scheduler lock held
inline bool HOT matches_item_locked_(const std::unique_ptr<SchedulerItem> &item, Component *component,
const char *name_cstr, SchedulerItem::Type type, bool match_retry,
bool skip_removed = true) const {
// THREAD SAFETY: Check for nullptr first to prevent LoadProhibited crashes. On multi-threaded
// platforms, items can be moved out of defer_queue_ during processing, leaving nullptr entries.
// PR #11305 added nullptr checks in callers (mark_matching_items_removed_locked_() and
// has_cancelled_timeout_in_container_locked_()), but this check provides defense-in-depth: helper
// functions should be safe regardless of caller behavior.
// Fixes: https://github.com/esphome/esphome/issues/11940
if (!item)
return false;
inline bool HOT matches_item_(const std::unique_ptr<SchedulerItem> &item, Component *component, const char *name_cstr,
SchedulerItem::Type type, bool match_retry, bool skip_removed = true) const {
if (item->component != component || item->type != type || (skip_removed && item->remove) ||
(match_retry && !item->is_retry)) {
return false;
@@ -314,8 +304,8 @@ class Scheduler {
// SAFETY: Moving out the unique_ptr leaves a nullptr in the vector at defer_queue_front_.
// This is intentional and safe because:
// 1. The vector is only cleaned up by cleanup_defer_queue_locked_() at the end of this function
// 2. Any code iterating defer_queue_ MUST check for nullptr items (see mark_matching_items_removed_locked_
// and has_cancelled_timeout_in_container_locked_ in scheduler.h)
// 2. Any code iterating defer_queue_ MUST check for nullptr items (see mark_matching_items_removed_
// and has_cancelled_timeout_in_container_ in scheduler.h)
// 3. The lock protects concurrent access, but the nullptr remains until cleanup
item = std::move(this->defer_queue_[this->defer_queue_front_]);
this->defer_queue_front_++;
@@ -403,10 +393,10 @@ class Scheduler {
// Helper to mark matching items in a container as removed
// Returns the number of items marked for removal
// IMPORTANT: Must be called with scheduler lock held
// IMPORTANT: Caller must hold the scheduler lock before calling this function.
template<typename Container>
size_t mark_matching_items_removed_locked_(Container &container, Component *component, const char *name_cstr,
SchedulerItem::Type type, bool match_retry) {
size_t mark_matching_items_removed_(Container &container, Component *component, const char *name_cstr,
SchedulerItem::Type type, bool match_retry) {
size_t count = 0;
for (auto &item : container) {
// Skip nullptr items (can happen in defer_queue_ when items are being processed)
@@ -415,7 +405,7 @@ class Scheduler {
// the vector can still contain nullptr items from the processing loop. This check prevents crashes.
if (!item)
continue;
if (this->matches_item_locked_(item, component, name_cstr, type, match_retry)) {
if (this->matches_item_(item, component, name_cstr, type, match_retry)) {
// Mark item for removal (platform-specific)
this->set_item_removed_(item.get(), true);
count++;
@@ -425,10 +415,9 @@ class Scheduler {
}
// Template helper to check if any item in a container matches our criteria
// IMPORTANT: Must be called with scheduler lock held
template<typename Container>
bool has_cancelled_timeout_in_container_locked_(const Container &container, Component *component,
const char *name_cstr, bool match_retry) const {
bool has_cancelled_timeout_in_container_(const Container &container, Component *component, const char *name_cstr,
bool match_retry) const {
for (const auto &item : container) {
// Skip nullptr items (can happen in defer_queue_ when items are being processed)
// The defer_queue_ uses index-based processing: items are std::moved out but left in the
@@ -437,8 +426,8 @@ class Scheduler {
if (!item)
continue;
if (is_item_removed_(item.get()) &&
this->matches_item_locked_(item, component, name_cstr, SchedulerItem::TIMEOUT, match_retry,
/* skip_removed= */ false)) {
this->matches_item_(item, component, name_cstr, SchedulerItem::TIMEOUT, match_retry,
/* skip_removed= */ false)) {
return true;
}
}

View File

@@ -121,7 +121,7 @@ def update_storage_json() -> None:
)
else:
_LOGGER.info("Core config or version changed, cleaning build files...")
clean_build(clear_pio_cache=False)
clean_build()
elif storage_should_update_cmake_cache(old, new):
_LOGGER.info("Integrations changed, cleaning cmake cache...")
clean_cmake_cache()
@@ -301,7 +301,7 @@ def clean_cmake_cache():
pioenvs_cmake_path.unlink()
def clean_build(clear_pio_cache: bool = True):
def clean_build():
import shutil
# Allow skipping cache cleaning for integration tests
@@ -322,9 +322,6 @@ def clean_build(clear_pio_cache: bool = True):
_LOGGER.info("Deleting %s", dependencies_lock)
dependencies_lock.unlink()
if not clear_pio_cache:
return
# Clean PlatformIO cache to resolve CMake compiler detection issues
# This helps when toolchain paths change or get corrupted
try:

View File

@@ -16,7 +16,7 @@ aioesphomeapi==42.7.0
zeroconf==0.148.0
puremagic==1.30
ruamel.yaml==0.18.16 # dashboard_import
ruamel.yaml.clib==0.2.15 # dashboard_import
ruamel.yaml.clib==0.2.14 # dashboard_import
esphome-glyphsets==0.2.0
pillow==11.3.0
cairosvg==2.8.2

View File

@@ -29,7 +29,7 @@ def test_binary_sensor_sets_mandatory_fields(generate_main):
)
# Then
assert 'bs_1->set_name("test bs1");' in main_cpp
assert 'bs_1->set_name_and_object_id("test bs1", "test_bs1");' in main_cpp
assert "bs_1->set_pin(" in main_cpp

View File

@@ -26,7 +26,7 @@ def test_button_sets_mandatory_fields(generate_main):
main_cpp = generate_main("tests/component_tests/button/test_button.yaml")
# Then
assert 'wol_1->set_name("wol_test_1");' in main_cpp
assert 'wol_1->set_name_and_object_id("wol_test_1", "wol_test_1");' in main_cpp
assert "wol_2->set_macaddr(18, 52, 86, 120, 144, 171);" in main_cpp

View File

@@ -1,18 +1,6 @@
"""Tests for the web_server OTA platform."""
from __future__ import annotations
from collections.abc import Callable
import logging
from typing import Any
import pytest
from esphome import config_validation as cv
from esphome.components.web_server.ota import _web_server_ota_final_validate
from esphome.const import CONF_ID, CONF_OTA, CONF_PLATFORM, CONF_WEB_SERVER
from esphome.core import ID
import esphome.final_validate as fv
def test_web_server_ota_generated(generate_main: Callable[[str], str]) -> None:
@@ -112,144 +100,3 @@ def test_web_server_ota_esp8266(generate_main: Callable[[str], str]) -> None:
# Check web server OTA component is present
assert "WebServerOTAComponent" in main_cpp
assert "web_server::WebServerOTAComponent" in main_cpp
@pytest.mark.parametrize(
("ota_configs", "expected_count", "warning_expected"),
[
pytest.param(
[
{
CONF_PLATFORM: CONF_WEB_SERVER,
CONF_ID: ID("ota_web", is_manual=False),
}
],
1,
False,
id="single_instance_no_merge",
),
pytest.param(
[
{
CONF_PLATFORM: CONF_WEB_SERVER,
CONF_ID: ID("ota_web_1", is_manual=False),
},
{
CONF_PLATFORM: CONF_WEB_SERVER,
CONF_ID: ID("ota_web_2", is_manual=False),
},
],
1,
True,
id="two_instances_merged",
),
pytest.param(
[
{
CONF_PLATFORM: CONF_WEB_SERVER,
CONF_ID: ID("ota_web_1", is_manual=False),
},
{
CONF_PLATFORM: "esphome",
CONF_ID: ID("ota_esphome", is_manual=False),
},
{
CONF_PLATFORM: CONF_WEB_SERVER,
CONF_ID: ID("ota_web_2", is_manual=False),
},
],
2,
True,
id="mixed_platforms_web_server_merged",
),
],
)
def test_web_server_ota_instance_merging(
ota_configs: list[dict[str, Any]],
expected_count: int,
warning_expected: bool,
caplog: pytest.LogCaptureFixture,
) -> None:
"""Test web_server OTA instance merging behavior."""
full_conf = {CONF_OTA: ota_configs.copy()}
token = fv.full_config.set(full_conf)
try:
with caplog.at_level(logging.WARNING):
_web_server_ota_final_validate({})
updated_conf = fv.full_config.get()
# Verify total number of OTA platforms
assert len(updated_conf[CONF_OTA]) == expected_count
# Verify warning
if warning_expected:
assert any(
"Found and merged" in record.message
and "web_server OTA" in record.message
for record in caplog.records
), "Expected merge warning not found in log"
else:
assert len(caplog.records) == 0, "Unexpected warnings logged"
finally:
fv.full_config.reset(token)
def test_web_server_ota_consistent_manual_ids(
caplog: pytest.LogCaptureFixture,
) -> None:
"""Test that consistent manual IDs can be merged successfully."""
ota_configs = [
{
CONF_PLATFORM: CONF_WEB_SERVER,
CONF_ID: ID("ota_web", is_manual=True),
},
{
CONF_PLATFORM: CONF_WEB_SERVER,
CONF_ID: ID("ota_web", is_manual=True),
},
]
full_conf = {CONF_OTA: ota_configs}
token = fv.full_config.set(full_conf)
try:
with caplog.at_level(logging.WARNING):
_web_server_ota_final_validate({})
updated_conf = fv.full_config.get()
assert len(updated_conf[CONF_OTA]) == 1
assert updated_conf[CONF_OTA][0][CONF_ID].id == "ota_web"
assert any(
"Found and merged" in record.message and "web_server OTA" in record.message
for record in caplog.records
)
finally:
fv.full_config.reset(token)
def test_web_server_ota_inconsistent_manual_ids() -> None:
"""Test that inconsistent manual IDs raise an error."""
ota_configs = [
{
CONF_PLATFORM: CONF_WEB_SERVER,
CONF_ID: ID("ota_web_1", is_manual=True),
},
{
CONF_PLATFORM: CONF_WEB_SERVER,
CONF_ID: ID("ota_web_2", is_manual=True),
},
]
full_conf = {CONF_OTA: ota_configs}
token = fv.full_config.set(full_conf)
try:
with pytest.raises(
cv.Invalid,
match="Found multiple web_server OTA configurations but id is inconsistent",
):
_web_server_ota_final_validate({})
finally:
fv.full_config.reset(token)

View File

@@ -1 +0,0 @@
"""Tests for SNTP component."""

View File

@@ -1,22 +0,0 @@
esphome:
name: sntp-test
esp32:
board: esp32dev
framework:
type: esp-idf
wifi:
ssid: "testssid"
password: "testpassword"
# Test multiple SNTP instances that should be merged
time:
- platform: sntp
servers:
- 192.168.1.1
- pool.ntp.org
- platform: sntp
servers:
- pool.ntp.org
- 192.168.1.2

View File

@@ -1,238 +0,0 @@
"""Tests for SNTP time configuration validation."""
from __future__ import annotations
import logging
from typing import Any
import pytest
from esphome import config_validation as cv
from esphome.components.sntp.time import CONF_SNTP, _sntp_final_validate
from esphome.const import CONF_ID, CONF_PLATFORM, CONF_SERVERS, CONF_TIME
from esphome.core import ID
import esphome.final_validate as fv
@pytest.mark.parametrize(
("time_configs", "expected_count", "expected_servers", "warning_messages"),
[
pytest.param(
[
{
CONF_PLATFORM: CONF_SNTP,
CONF_ID: ID("sntp_time", is_manual=False),
CONF_SERVERS: ["192.168.1.1", "pool.ntp.org"],
}
],
1,
["192.168.1.1", "pool.ntp.org"],
[],
id="single_instance_no_merge",
),
pytest.param(
[
{
CONF_PLATFORM: CONF_SNTP,
CONF_ID: ID("sntp_time_1", is_manual=False),
CONF_SERVERS: ["192.168.1.1", "pool.ntp.org"],
},
{
CONF_PLATFORM: CONF_SNTP,
CONF_ID: ID("sntp_time_2", is_manual=False),
CONF_SERVERS: ["192.168.1.2"],
},
],
1,
["192.168.1.1", "pool.ntp.org", "192.168.1.2"],
["Found and merged 2 SNTP time configurations into one instance"],
id="two_instances_merged",
),
pytest.param(
[
{
CONF_PLATFORM: CONF_SNTP,
CONF_ID: ID("sntp_time_1", is_manual=False),
CONF_SERVERS: ["192.168.1.1", "pool.ntp.org"],
},
{
CONF_PLATFORM: CONF_SNTP,
CONF_ID: ID("sntp_time_2", is_manual=False),
CONF_SERVERS: ["pool.ntp.org", "192.168.1.2"],
},
],
1,
["192.168.1.1", "pool.ntp.org", "192.168.1.2"],
["Found and merged 2 SNTP time configurations into one instance"],
id="deduplication_preserves_order",
),
pytest.param(
[
{
CONF_PLATFORM: CONF_SNTP,
CONF_ID: ID("sntp_time_1", is_manual=False),
CONF_SERVERS: ["192.168.1.1", "pool.ntp.org"],
},
{
CONF_PLATFORM: CONF_SNTP,
CONF_ID: ID("sntp_time_2", is_manual=False),
CONF_SERVERS: ["192.168.1.2", "pool2.ntp.org"],
},
{
CONF_PLATFORM: CONF_SNTP,
CONF_ID: ID("sntp_time_3", is_manual=False),
CONF_SERVERS: ["pool3.ntp.org"],
},
],
1,
["192.168.1.1", "pool.ntp.org", "192.168.1.2"],
[
"SNTP supports maximum 3 servers. Dropped excess server(s): ['pool2.ntp.org', 'pool3.ntp.org']",
"Found and merged 3 SNTP time configurations into one instance",
],
id="three_instances_drops_excess_servers",
),
pytest.param(
[
{
CONF_PLATFORM: CONF_SNTP,
CONF_ID: ID("sntp_time_1", is_manual=False),
CONF_SERVERS: [
"192.168.1.1",
"pool.ntp.org",
"pool.ntp.org",
"192.168.1.1",
],
},
{
CONF_PLATFORM: CONF_SNTP,
CONF_ID: ID("sntp_time_2", is_manual=False),
CONF_SERVERS: ["pool.ntp.org", "192.168.1.2"],
},
],
1,
["192.168.1.1", "pool.ntp.org", "192.168.1.2"],
["Found and merged 2 SNTP time configurations into one instance"],
id="deduplication_multiple_duplicates",
),
],
)
def test_sntp_instance_merging(
time_configs: list[dict[str, Any]],
expected_count: int,
expected_servers: list[str],
warning_messages: list[str],
caplog: pytest.LogCaptureFixture,
) -> None:
"""Test SNTP instance merging behavior."""
# Create a mock full config with time configs
full_conf = {CONF_TIME: time_configs.copy()}
# Set the context var
token = fv.full_config.set(full_conf)
try:
with caplog.at_level(logging.WARNING):
_sntp_final_validate({})
# Get the updated config
updated_conf = fv.full_config.get()
# Check if merging occurred
if len(time_configs) > 1:
# Verify only one SNTP instance remains
sntp_instances = [
tc
for tc in updated_conf[CONF_TIME]
if tc.get(CONF_PLATFORM) == CONF_SNTP
]
assert len(sntp_instances) == expected_count
# Verify server list
assert sntp_instances[0][CONF_SERVERS] == expected_servers
# Verify warnings
for expected_msg in warning_messages:
assert any(
expected_msg in record.message for record in caplog.records
), f"Expected warning message '{expected_msg}' not found in log"
else:
# Single instance should not trigger merging or warnings
assert len(caplog.records) == 0
# Config should be unchanged
assert updated_conf[CONF_TIME] == time_configs
finally:
fv.full_config.reset(token)
def test_sntp_inconsistent_manual_ids() -> None:
"""Test that inconsistent manual IDs raise an error."""
# Create configs with manual IDs that are inconsistent
time_configs = [
{
CONF_PLATFORM: CONF_SNTP,
CONF_ID: ID("sntp_time_1", is_manual=True),
CONF_SERVERS: ["192.168.1.1"],
},
{
CONF_PLATFORM: CONF_SNTP,
CONF_ID: ID("sntp_time_2", is_manual=True),
CONF_SERVERS: ["192.168.1.2"],
},
]
full_conf = {CONF_TIME: time_configs}
token = fv.full_config.set(full_conf)
try:
with pytest.raises(
cv.Invalid,
match="Found multiple SNTP configurations but id is inconsistent",
):
_sntp_final_validate({})
finally:
fv.full_config.reset(token)
def test_sntp_with_other_time_platforms(caplog: pytest.LogCaptureFixture) -> None:
"""Test that SNTP merging doesn't affect other time platforms."""
time_configs = [
{
CONF_PLATFORM: CONF_SNTP,
CONF_ID: ID("sntp_time_1", is_manual=False),
CONF_SERVERS: ["192.168.1.1"],
},
{
CONF_PLATFORM: "homeassistant",
CONF_ID: ID("homeassistant_time", is_manual=False),
},
{
CONF_PLATFORM: CONF_SNTP,
CONF_ID: ID("sntp_time_2", is_manual=False),
CONF_SERVERS: ["192.168.1.2"],
},
]
full_conf = {CONF_TIME: time_configs.copy()}
token = fv.full_config.set(full_conf)
try:
with caplog.at_level(logging.WARNING):
_sntp_final_validate({})
updated_conf = fv.full_config.get()
# Should have 2 time platforms: 1 merged SNTP + 1 homeassistant
assert len(updated_conf[CONF_TIME]) == 2
# Find the platforms
platforms = {tc[CONF_PLATFORM] for tc in updated_conf[CONF_TIME]}
assert platforms == {CONF_SNTP, "homeassistant"}
# Verify SNTP was merged
sntp_instances = [
tc for tc in updated_conf[CONF_TIME] if tc[CONF_PLATFORM] == CONF_SNTP
]
assert len(sntp_instances) == 1
assert sntp_instances[0][CONF_SERVERS] == ["192.168.1.1", "192.168.1.2"]
finally:
fv.full_config.reset(token)

View File

@@ -25,7 +25,7 @@ def test_text_sets_mandatory_fields(generate_main):
main_cpp = generate_main("tests/component_tests/text/test_text.yaml")
# Then
assert 'it_1->set_name("test 1 text");' in main_cpp
assert 'it_1->set_name_and_object_id("test 1 text", "test_1_text");' in main_cpp
def test_text_config_value_internal_set(generate_main):

View File

@@ -25,9 +25,18 @@ def test_text_sensor_sets_mandatory_fields(generate_main):
main_cpp = generate_main("tests/component_tests/text_sensor/test_text_sensor.yaml")
# Then
assert 'ts_1->set_name("Template Text Sensor 1");' in main_cpp
assert 'ts_2->set_name("Template Text Sensor 2");' in main_cpp
assert 'ts_3->set_name("Template Text Sensor 3");' in main_cpp
assert (
'ts_1->set_name_and_object_id("Template Text Sensor 1", "template_text_sensor_1");'
in main_cpp
)
assert (
'ts_2->set_name_and_object_id("Template Text Sensor 2", "template_text_sensor_2");'
in main_cpp
)
assert (
'ts_3->set_name_and_object_id("Template Text Sensor 3", "template_text_sensor_3");'
in main_cpp
)
def test_text_sensor_config_value_internal_set(generate_main):

View File

@@ -2,9 +2,6 @@ esphome:
debug_scheduler: true
platformio_options:
board_build.flash_mode: dio
environment_variables:
TEST_ENV_VAR: "test_value"
BUILD_NUMBER: "12345"
area:
id: testing_area
name: Testing Area

View File

@@ -76,7 +76,7 @@ lvgl:
line_width: 8
line_rounded: true
- id: date_style
text_font: !lambda return id(roboto10);
text_font: roboto10
align: center
text_color: !lambda return color_id2;
bg_opa: cover
@@ -267,7 +267,7 @@ lvgl:
snprintf(buf, sizeof(buf), "Setup: %d", 42);
return std::string(buf);
align: top_mid
text_font: !lambda return id(space16);
text_font: space16
- label:
id: chip_info_label
# Test complex setup lambda (real-world pattern)
@@ -703,9 +703,7 @@ lvgl:
on_value:
- lvgl.spinbox.update:
id: spinbox_id
value: !lambda |-
static float yyy = 83.0;
return yyy + .8;
value: !lambda return x;
- button:
styles: spin_button
id: spin_up

View File

@@ -18,7 +18,6 @@ touchscreen:
lvgl:
- id: lvgl_0
default_font: space16
displays: sdl0
- id: lvgl_1
displays: sdl1
@@ -40,8 +39,3 @@ lvgl:
text: Click ME
on_click:
logger.log: Clicked
font:
- file: "gfonts://Roboto"
id: space16
bpp: 4

View File

@@ -27,8 +27,13 @@ from esphome.helpers import sanitize, snake_case
from .common import load_config_from_fixture
# Pre-compiled regex pattern for extracting object IDs from expressions
# Pre-compiled regex patterns for extracting object IDs from expressions
# Matches both old format: .set_object_id("obj_id")
# and new format: .set_name_and_object_id("name", "obj_id")
OBJECT_ID_PATTERN = re.compile(r'\.set_object_id\(["\'](.*?)["\']\)')
COMBINED_PATTERN = re.compile(
r'\.set_name_and_object_id\(["\'].*?["\']\s*,\s*["\'](.*?)["\']\)'
)
FIXTURES_DIR = Path(__file__).parent.parent / "fixtures" / "core" / "entity_helpers"
@@ -273,8 +278,10 @@ def setup_test_environment() -> Generator[list[str], None, None]:
def extract_object_id_from_expressions(expressions: list[str]) -> str | None:
"""Extract the object ID that was set from the generated expressions."""
for expr in expressions:
# Look for set_object_id calls with regex to handle various formats
# Matches: var.set_object_id("temperature_2") or var.set_object_id('temperature_2')
# First try new combined format: .set_name_and_object_id("name", "obj_id")
if match := COMBINED_PATTERN.search(expr):
return match.group(1)
# Fall back to old format: .set_object_id("obj_id")
if match := OBJECT_ID_PATTERN.search(expr):
return match.group(1)
return None

View File

@@ -7,27 +7,3 @@ some_component:
value: 2
- id: component2
value: 5
lvgl:
pages:
- id: page1
widgets:
- obj:
id: object1
x: 3
y: 2
width: 4
- obj:
id: object3
x: 6
y: 12
widgets:
- obj:
id: object4
x: 14
y: 9
width: 15
height: 13
- obj:
id: object5
x: 10
y: 11

View File

@@ -13,30 +13,6 @@ packages:
value: 5
- id: component3
value: 6
- lvgl:
pages:
- id: page1
widgets:
- obj:
id: object1
x: 1
y: 2
- obj:
id: object2
x: 5
- obj:
id: object3
x: 6
y: 7
widgets:
- obj:
id: object4
x: 8
y: 9
- obj:
id: object5
x: 10
y: 11
some_component:
- id: !extend ${A}
@@ -44,23 +20,3 @@ some_component:
- id: component2
value: 3
- id: !remove ${C}
lvgl:
pages:
- id: !extend page1
widgets:
- obj:
id: !extend object1
x: 3
width: 4
- obj:
id: !remove object2
- obj:
id: !extend object3
y: 12
height: 13
widgets:
- obj:
id: !extend object4
x: 14
width: 15