1
0
mirror of https://github.com/esphome/esphome.git synced 2025-11-17 15:26:01 +00:00

Compare commits

..

41 Commits

Author SHA1 Message Date
J. Nick Koston
4c9d903773 [logger] Eliminate strlen overhead on LibreTiny 2025-11-16 11:56:52 -06:00
J. Nick Koston
d60c358f48 preen 2025-11-15 18:43:39 -06:00
J. Nick Koston
26b820272a optimize esp8266 as well 2025-11-15 18:36:39 -06:00
J. Nick Koston
9557c90c20 comment 2025-11-15 18:31:07 -06:00
J. Nick Koston
d64bcf27b3 cleanup 2025-11-15 18:17:50 -06:00
J. Nick Koston
d5d61546e7 cleanup 2025-11-15 18:15:10 -06:00
J. Nick Koston
554cdbd5a4 bot is right 2025-11-15 18:03:08 -06:00
J. Nick Koston
0d147e5d10 missed one 2025-11-15 17:42:01 -06:00
J. Nick Koston
730a70ee8b missed header 2025-11-15 17:37:37 -06:00
J. Nick Koston
d096f1192d make bot happy 2025-11-15 17:35:41 -06:00
J. Nick Koston
88a23acc4b tweak 2025-11-15 17:19:37 -06:00
J. Nick Koston
950dff1a38 [logger] Reduce ESP32 UART mutex overhead by 50% 2025-11-15 16:54:18 -06:00
J. Nick Koston
61eddfdcda [logger] Reduce ESP32 UART mutex overhead by 50% 2025-11-15 16:47:35 -06:00
dependabot[bot]
1df996601d Bump ruff from 0.14.4 to 0.14.5 (#11910)
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
Co-authored-by: J. Nick Koston <nick@home-assistant.io>
2025-11-14 19:14:07 +00:00
dependabot[bot]
c32891ec02 Bump github/codeql-action from 4.31.2 to 4.31.3 (#11911)
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-11-14 13:09:59 -06:00
Jonathan Swoboda
2bf6d48fcf [uart] Improve error handling and validate buffer size (#11895)
Co-authored-by: J. Nick Koston <nick+github@koston.org>
2025-11-14 14:06:08 -05:00
Edward Firmo
e49a943cf7 [wifi] Allow use_psram with Arduino (#11902) 2025-11-14 09:13:48 -05:00
dependabot[bot]
67524e14ee Bump pylint from 4.0.2 to 4.0.3 (#11894)
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-11-13 19:05:02 +00:00
Edward Firmo
2290eb0dd2 [light] Fix missing ColorMode::BRIGHTNESS case in logging (#11836) 2025-11-13 12:08:06 -06:00
Clyde Stubbs
0afcf67c32 [esp32] Add sdkconfig flag to make OTA work for 32MB flash (#11883)
Co-authored-by: Jonathan Swoboda <154711427+swoboda1337@users.noreply.github.com>
2025-11-13 10:52:08 -05:00
Clyde Stubbs
952bdfaac2 [esp32] Make esp-idf default framework for P4 (#11884) 2025-11-13 09:55:48 -05:00
Jesse Hills
ed7e5cd325 Bump version to 2025.12.0-dev 2025-11-13 17:00:47 +13:00
Jonathan Swoboda
a15f46e741 Merge branch 'beta' into dev 2025-11-12 22:46:34 -05:00
tomaszduda23
d869108416 [nrf52] add settings for dcdc converter (#11841) 2025-11-12 20:06:20 -06:00
J. Nick Koston
2d6618da3c [wifi] Fix slow reconnection after connection loss for all network types (#11873)
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
2025-11-13 13:44:22 +13:00
J. Nick Koston
47fe84e922 [wifi][ethernet] Fix spurious warnings and unclear status after PR #9823 (#11871) 2025-11-13 13:43:51 +13:00
J. Nick Koston
735bf9930a [light] Fix dangling reference in compute_color_mode causing memory corruption (#11868) 2025-11-13 13:41:28 +13:00
J. Nick Koston
769137fc09 [mqtt] Fix crash with empty broker during upload/logs (#11866)
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
2025-11-13 13:40:26 +13:00
J. Nick Koston
3a5b3ad77d [thermostat] Replace std::map with FixedVector, reduce flash usage (#11875) 2025-11-12 17:55:06 -06:00
J. Nick Koston
859101ddc9 [api][event] Send events immediately to prevent loss during rapid triggers (#11777) 2025-11-13 12:42:50 +13:00
J. Nick Koston
29a50da635 [wifi] Use stack allocation for BSSID formatting in logging (#11859) 2025-11-12 14:27:06 -06:00
J. Nick Koston
5f0fa68d73 [esp32_ble] Use stack allocation for MAC formatting in dump_config (#11860) 2025-11-12 14:26:57 -06:00
J. Nick Koston
2f39b10baa [esp32_ble_tracker] Use initializer_list to eliminate compiler warning and reduce flash usage (#11861) 2025-11-12 14:26:46 -06:00
J. Nick Koston
5a550cc579 [api] Eliminate heap allocations when transmitting Event types (#11773) 2025-11-12 14:26:36 -06:00
J. Nick Koston
4b58cb4ce6 [wifi] Pass ManualIP by const reference to reduce stack usage (#11858) 2025-11-12 14:01:19 -06:00
J. Nick Koston
3872a2fd91 [captive_portal] Warn when enabled without WiFi AP configured (#11856) 2025-11-12 14:01:07 -06:00
dependabot[bot]
5d613ada83 Bump pytest from 9.0.0 to 9.0.1 (#11874)
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-11-12 14:00:50 -06:00
J. Nick Koston
9de80b635a [core] Fix wait_until hanging when used in on_boot automations (#11869) 2025-11-12 17:56:19 +00:00
Jonathan Swoboda
748aee584a [esp32] Update the recommended platform to 55.03.31-2 (#11865) 2025-11-12 10:41:22 -05:00
Jonathan Swoboda
3cbfddcc83 Merge branch 'beta' into dev 2025-11-11 23:27:24 -05:00
J. Nick Koston
398dba4fc8 [ci] Reduce release time by removing 21 redundant ESP32-S3 IDF tests (#11850) 2025-11-12 16:44:19 +13:00
40 changed files with 214 additions and 826 deletions

View File

@@ -58,7 +58,7 @@ jobs:
# Initializes the CodeQL tools for scanning.
- name: Initialize CodeQL
uses: github/codeql-action/init@0499de31b99561a6d14a36a5f662c2a54f91beee # v4.31.2
uses: github/codeql-action/init@014f16e7ab1402f30e7c3329d33797e7948572db # v4.31.3
with:
languages: ${{ matrix.language }}
build-mode: ${{ matrix.build-mode }}
@@ -86,6 +86,6 @@ jobs:
exit 1
- name: Perform CodeQL Analysis
uses: github/codeql-action/analyze@0499de31b99561a6d14a36a5f662c2a54f91beee # v4.31.2
uses: github/codeql-action/analyze@014f16e7ab1402f30e7c3329d33797e7948572db # v4.31.3
with:
category: "/language:${{matrix.language}}"

View File

@@ -11,7 +11,7 @@ ci:
repos:
- repo: https://github.com/astral-sh/ruff-pre-commit
# Ruff version.
rev: v0.14.4
rev: v0.14.5
hooks:
# Run the linter.
- id: ruff

View File

@@ -48,7 +48,7 @@ PROJECT_NAME = ESPHome
# could be handy for archiving the generated documentation or if some version
# control system is used.
PROJECT_NUMBER = 2025.11.0b3
PROJECT_NUMBER = 2025.12.0-dev
# Using the PROJECT_BRIEF tag one can provide an optional one line description
# for a project that appears at the top of each page and should give viewer a

View File

@@ -15,11 +15,6 @@ from . import (
class MemoryAnalyzerCLI(MemoryAnalyzer):
"""Memory analyzer with CLI-specific report generation."""
# Symbol size threshold for detailed analysis
SYMBOL_SIZE_THRESHOLD: int = (
100 # Show symbols larger than this in detailed analysis
)
# Column width constants
COL_COMPONENT: int = 29
COL_FLASH_TEXT: int = 14
@@ -196,21 +191,14 @@ class MemoryAnalyzerCLI(MemoryAnalyzer):
f"{len(symbols):>{self.COL_CORE_COUNT}} | {percentage:>{self.COL_CORE_PERCENT - 1}.1f}%"
)
# All core symbols above threshold
# Top 15 largest core symbols
lines.append("")
lines.append(f"Top 15 Largest {_COMPONENT_CORE} Symbols:")
sorted_core_symbols = sorted(
self._esphome_core_symbols, key=lambda x: x[2], reverse=True
)
large_core_symbols = [
(symbol, demangled, size)
for symbol, demangled, size in sorted_core_symbols
if size > self.SYMBOL_SIZE_THRESHOLD
]
lines.append(
f"{_COMPONENT_CORE} Symbols > {self.SYMBOL_SIZE_THRESHOLD} B ({len(large_core_symbols)} symbols):"
)
for i, (symbol, demangled, size) in enumerate(large_core_symbols):
for i, (symbol, demangled, size) in enumerate(sorted_core_symbols[:15]):
lines.append(f"{i + 1}. {demangled} ({size:,} B)")
lines.append("=" * self.TABLE_WIDTH)
@@ -280,15 +268,13 @@ class MemoryAnalyzerCLI(MemoryAnalyzer):
lines.append(f"Total size: {comp_mem.flash_total:,} B")
lines.append("")
# Show all symbols above threshold for better visibility
# Show all symbols > 100 bytes for better visibility
large_symbols = [
(sym, dem, size)
for sym, dem, size in sorted_symbols
if size > self.SYMBOL_SIZE_THRESHOLD
(sym, dem, size) for sym, dem, size in sorted_symbols if size > 100
]
lines.append(
f"{comp_name} Symbols > {self.SYMBOL_SIZE_THRESHOLD} B ({len(large_symbols)} symbols):"
f"{comp_name} Symbols > 100 B ({len(large_symbols)} symbols):"
)
for i, (symbol, demangled, size) in enumerate(large_symbols):
lines.append(f"{i + 1}. {demangled} ({size:,} B)")

View File

@@ -1,9 +1,12 @@
import logging
import esphome.codegen as cg
from esphome.components import web_server_base
from esphome.components.web_server_base import CONF_WEB_SERVER_BASE_ID
from esphome.config_helpers import filter_source_files_from_platform
import esphome.config_validation as cv
from esphome.const import (
CONF_AP,
CONF_ID,
PLATFORM_BK72XX,
PLATFORM_ESP32,
@@ -14,6 +17,10 @@ from esphome.const import (
)
from esphome.core import CORE, coroutine_with_priority
from esphome.coroutine import CoroPriority
import esphome.final_validate as fv
from esphome.types import ConfigType
_LOGGER = logging.getLogger(__name__)
def AUTO_LOAD() -> list[str]:
@@ -50,6 +57,27 @@ CONFIG_SCHEMA = cv.All(
)
def _final_validate(config: ConfigType) -> ConfigType:
full_config = fv.full_config.get()
wifi_conf = full_config.get("wifi")
if wifi_conf is None:
# This shouldn't happen due to DEPENDENCIES = ["wifi"], but check anyway
raise cv.Invalid("Captive portal requires the wifi component to be configured")
if CONF_AP not in wifi_conf:
_LOGGER.warning(
"Captive portal is enabled but no WiFi AP is configured. "
"The captive portal will not be accessible. "
"Add 'ap:' to your WiFi configuration to enable the captive portal."
)
return config
FINAL_VALIDATE_SCHEMA = _final_validate
@coroutine_with_priority(CoroPriority.CAPTIVE_PORTAL)
async def to_code(config):
paren = await cg.get_variable(config[CONF_WEB_SERVER_BASE_ID])

View File

@@ -634,11 +634,13 @@ void ESP32BLE::dump_config() {
io_capability_s = "invalid";
break;
}
char mac_s[18];
format_mac_addr_upper(mac_address, mac_s);
ESP_LOGCONFIG(TAG,
"BLE:\n"
" MAC address: %s\n"
" IO Capability: %s",
format_mac_address_pretty(mac_address).c_str(), io_capability_s);
mac_s, io_capability_s);
} else {
ESP_LOGCONFIG(TAG, "Bluetooth stack is not enabled");
}

View File

@@ -31,83 +31,35 @@ CONFIG_SCHEMA = cv.Schema(
cv.GenerateID(CONF_LD2410_ID): cv.use_id(LD2410Component),
cv.Optional(CONF_MOVING_DISTANCE): sensor.sensor_schema(
device_class=DEVICE_CLASS_DISTANCE,
filters=[
{
"timeout": {
"timeout": cv.TimePeriod(milliseconds=1000),
"value": "last",
}
},
{"throttle_with_priority": cv.TimePeriod(milliseconds=1000)},
],
filters=[{"throttle_with_priority": cv.TimePeriod(milliseconds=1000)}],
icon=ICON_SIGNAL,
unit_of_measurement=UNIT_CENTIMETER,
),
cv.Optional(CONF_STILL_DISTANCE): sensor.sensor_schema(
device_class=DEVICE_CLASS_DISTANCE,
filters=[
{
"timeout": {
"timeout": cv.TimePeriod(milliseconds=1000),
"value": "last",
}
},
{"throttle_with_priority": cv.TimePeriod(milliseconds=1000)},
],
filters=[{"throttle_with_priority": cv.TimePeriod(milliseconds=1000)}],
icon=ICON_SIGNAL,
unit_of_measurement=UNIT_CENTIMETER,
),
cv.Optional(CONF_MOVING_ENERGY): sensor.sensor_schema(
filters=[
{
"timeout": {
"timeout": cv.TimePeriod(milliseconds=1000),
"value": "last",
}
},
{"throttle_with_priority": cv.TimePeriod(milliseconds=1000)},
],
filters=[{"throttle_with_priority": cv.TimePeriod(milliseconds=1000)}],
icon=ICON_MOTION_SENSOR,
unit_of_measurement=UNIT_PERCENT,
),
cv.Optional(CONF_STILL_ENERGY): sensor.sensor_schema(
filters=[
{
"timeout": {
"timeout": cv.TimePeriod(milliseconds=1000),
"value": "last",
}
},
{"throttle_with_priority": cv.TimePeriod(milliseconds=1000)},
],
filters=[{"throttle_with_priority": cv.TimePeriod(milliseconds=1000)}],
icon=ICON_FLASH,
unit_of_measurement=UNIT_PERCENT,
),
cv.Optional(CONF_LIGHT): sensor.sensor_schema(
device_class=DEVICE_CLASS_ILLUMINANCE,
entity_category=ENTITY_CATEGORY_DIAGNOSTIC,
filters=[
{
"timeout": {
"timeout": cv.TimePeriod(milliseconds=1000),
"value": "last",
}
},
{"throttle_with_priority": cv.TimePeriod(milliseconds=1000)},
],
filters=[{"throttle_with_priority": cv.TimePeriod(milliseconds=1000)}],
icon=ICON_LIGHTBULB,
),
cv.Optional(CONF_DETECTION_DISTANCE): sensor.sensor_schema(
device_class=DEVICE_CLASS_DISTANCE,
filters=[
{
"timeout": {
"timeout": cv.TimePeriod(milliseconds=1000),
"value": "last",
}
},
{"throttle_with_priority": cv.TimePeriod(milliseconds=1000)},
],
filters=[{"throttle_with_priority": cv.TimePeriod(milliseconds=1000)}],
icon=ICON_SIGNAL,
unit_of_measurement=UNIT_CENTIMETER,
),
@@ -121,13 +73,7 @@ CONFIG_SCHEMA = CONFIG_SCHEMA.extend(
cv.Optional(CONF_MOVE_ENERGY): sensor.sensor_schema(
entity_category=ENTITY_CATEGORY_DIAGNOSTIC,
filters=[
{
"timeout": {
"timeout": cv.TimePeriod(milliseconds=1000),
"value": "last",
}
},
{"throttle_with_priority": cv.TimePeriod(milliseconds=1000)},
{"throttle_with_priority": cv.TimePeriod(milliseconds=1000)}
],
icon=ICON_MOTION_SENSOR,
unit_of_measurement=UNIT_PERCENT,
@@ -135,13 +81,7 @@ CONFIG_SCHEMA = CONFIG_SCHEMA.extend(
cv.Optional(CONF_STILL_ENERGY): sensor.sensor_schema(
entity_category=ENTITY_CATEGORY_DIAGNOSTIC,
filters=[
{
"timeout": {
"timeout": cv.TimePeriod(milliseconds=1000),
"value": "last",
}
},
{"throttle_with_priority": cv.TimePeriod(milliseconds=1000)},
{"throttle_with_priority": cv.TimePeriod(milliseconds=1000)}
],
icon=ICON_FLASH,
unit_of_measurement=UNIT_PERCENT,

View File

@@ -31,84 +31,36 @@ CONFIG_SCHEMA = cv.Schema(
cv.GenerateID(CONF_LD2412_ID): cv.use_id(LD2412Component),
cv.Optional(CONF_DETECTION_DISTANCE): sensor.sensor_schema(
device_class=DEVICE_CLASS_DISTANCE,
filters=[
{
"timeout": {
"timeout": cv.TimePeriod(milliseconds=1000),
"value": "last",
}
},
{"throttle_with_priority": cv.TimePeriod(milliseconds=1000)},
],
filters=[{"throttle_with_priority": cv.TimePeriod(milliseconds=1000)}],
icon=ICON_SIGNAL,
unit_of_measurement=UNIT_CENTIMETER,
),
cv.Optional(CONF_LIGHT): sensor.sensor_schema(
device_class=DEVICE_CLASS_ILLUMINANCE,
entity_category=ENTITY_CATEGORY_DIAGNOSTIC,
filters=[
{
"timeout": {
"timeout": cv.TimePeriod(milliseconds=1000),
"value": "last",
}
},
{"throttle_with_priority": cv.TimePeriod(milliseconds=1000)},
],
filters=[{"throttle_with_priority": cv.TimePeriod(milliseconds=1000)}],
icon=ICON_LIGHTBULB,
unit_of_measurement=UNIT_EMPTY, # No standard unit for this light sensor
),
cv.Optional(CONF_MOVING_DISTANCE): sensor.sensor_schema(
device_class=DEVICE_CLASS_DISTANCE,
filters=[
{
"timeout": {
"timeout": cv.TimePeriod(milliseconds=1000),
"value": "last",
}
},
{"throttle_with_priority": cv.TimePeriod(milliseconds=1000)},
],
filters=[{"throttle_with_priority": cv.TimePeriod(milliseconds=1000)}],
icon=ICON_SIGNAL,
unit_of_measurement=UNIT_CENTIMETER,
),
cv.Optional(CONF_MOVING_ENERGY): sensor.sensor_schema(
filters=[
{
"timeout": {
"timeout": cv.TimePeriod(milliseconds=1000),
"value": "last",
}
},
{"throttle_with_priority": cv.TimePeriod(milliseconds=1000)},
],
filters=[{"throttle_with_priority": cv.TimePeriod(milliseconds=1000)}],
icon=ICON_MOTION_SENSOR,
unit_of_measurement=UNIT_PERCENT,
),
cv.Optional(CONF_STILL_DISTANCE): sensor.sensor_schema(
device_class=DEVICE_CLASS_DISTANCE,
filters=[
{
"timeout": {
"timeout": cv.TimePeriod(milliseconds=1000),
"value": "last",
}
},
{"throttle_with_priority": cv.TimePeriod(milliseconds=1000)},
],
filters=[{"throttle_with_priority": cv.TimePeriod(milliseconds=1000)}],
icon=ICON_SIGNAL,
unit_of_measurement=UNIT_CENTIMETER,
),
cv.Optional(CONF_STILL_ENERGY): sensor.sensor_schema(
filters=[
{
"timeout": {
"timeout": cv.TimePeriod(milliseconds=1000),
"value": "last",
}
},
{"throttle_with_priority": cv.TimePeriod(milliseconds=1000)},
],
filters=[{"throttle_with_priority": cv.TimePeriod(milliseconds=1000)}],
icon=ICON_FLASH,
unit_of_measurement=UNIT_PERCENT,
),
@@ -122,13 +74,7 @@ CONFIG_SCHEMA = CONFIG_SCHEMA.extend(
cv.Optional(CONF_MOVE_ENERGY): sensor.sensor_schema(
entity_category=ENTITY_CATEGORY_DIAGNOSTIC,
filters=[
{
"timeout": {
"timeout": cv.TimePeriod(milliseconds=1000),
"value": "last",
}
},
{"throttle_with_priority": cv.TimePeriod(milliseconds=1000)},
{"throttle_with_priority": cv.TimePeriod(milliseconds=1000)}
],
icon=ICON_MOTION_SENSOR,
unit_of_measurement=UNIT_PERCENT,
@@ -136,13 +82,7 @@ CONFIG_SCHEMA = CONFIG_SCHEMA.extend(
cv.Optional(CONF_STILL_ENERGY): sensor.sensor_schema(
entity_category=ENTITY_CATEGORY_DIAGNOSTIC,
filters=[
{
"timeout": {
"timeout": cv.TimePeriod(milliseconds=1000),
"value": "last",
}
},
{"throttle_with_priority": cv.TimePeriod(milliseconds=1000)},
{"throttle_with_priority": cv.TimePeriod(milliseconds=1000)}
],
icon=ICON_FLASH,
unit_of_measurement=UNIT_PERCENT,

View File

@@ -365,8 +365,10 @@ async def to_code(config):
if CORE.is_esp32:
if config[CONF_HARDWARE_UART] == USB_CDC:
add_idf_sdkconfig_option("CONFIG_ESP_CONSOLE_USB_CDC", True)
cg.add_define("USE_LOGGER_UART_SELECTION_USB_CDC")
elif config[CONF_HARDWARE_UART] == USB_SERIAL_JTAG:
add_idf_sdkconfig_option("CONFIG_ESP_CONSOLE_USB_SERIAL_JTAG", True)
cg.add_define("USE_LOGGER_UART_SELECTION_USB_SERIAL_JTAG")
try:
uart_selection(USB_SERIAL_JTAG)
cg.add_define("USE_LOGGER_USB_SERIAL_JTAG")

View File

@@ -65,7 +65,9 @@ void HOT Logger::log_vprintf_(uint8_t level, const char *tag, int line, const ch
uint16_t buffer_at = 0; // Initialize buffer position
this->format_log_to_buffer_with_terminator_(level, tag, line, format, args, console_buffer, &buffer_at,
MAX_CONSOLE_LOG_MSG_SIZE);
this->write_msg_(console_buffer);
// Add newline if platform needs it (ESP32 doesn't add via write_msg_)
this->add_newline_to_buffer_if_needed_(console_buffer, &buffer_at, MAX_CONSOLE_LOG_MSG_SIZE);
this->write_msg_(console_buffer, buffer_at);
}
// Reset the recursion guard for this task
@@ -131,18 +133,19 @@ void Logger::log_vprintf_(uint8_t level, const char *tag, int line, const __Flas
// Save the offset before calling format_log_to_buffer_with_terminator_
// since it will increment tx_buffer_at_ to the end of the formatted string
uint32_t msg_start = this->tx_buffer_at_;
uint16_t msg_start = this->tx_buffer_at_;
this->format_log_to_buffer_with_terminator_(level, tag, line, this->tx_buffer_, args, this->tx_buffer_,
&this->tx_buffer_at_, this->tx_buffer_size_);
// Write to console and send callback starting at the msg_start
if (this->baud_rate_ > 0) {
this->write_msg_(this->tx_buffer_ + msg_start);
}
size_t msg_length =
uint16_t msg_length =
this->tx_buffer_at_ - msg_start; // Don't subtract 1 - tx_buffer_at_ is already at the null terminator position
// Callbacks get message first (before console write)
this->log_callback_.call(level, tag, this->tx_buffer_ + msg_start, msg_length);
// Write to console starting at the msg_start
this->write_tx_buffer_to_console_(msg_start, &msg_length);
global_recursion_guard_ = false;
}
#endif // USE_STORE_LOG_STR_IN_FLASH
@@ -209,9 +212,7 @@ void Logger::process_messages_() {
// This ensures all log messages appear on the console in a clean, serialized manner
// Note: Messages may appear slightly out of order due to async processing, but
// this is preferred over corrupted/interleaved console output
if (this->baud_rate_ > 0) {
this->write_msg_(this->tx_buffer_);
}
this->write_tx_buffer_to_console_();
}
} else {
// No messages to process, disable loop if appropriate

View File

@@ -71,6 +71,17 @@ static constexpr uint16_t MAX_HEADER_SIZE = 128;
// "0x" + 2 hex digits per byte + '\0'
static constexpr size_t MAX_POINTER_REPRESENTATION = 2 + sizeof(void *) * 2 + 1;
// Platform-specific: does write_msg_ add its own newline?
// false: Caller must add newline to buffer before calling write_msg_ (ESP32, ESP8266, LibreTiny)
// Allows single write call with newline included for efficiency
// true: write_msg_ adds newline itself via puts()/println() (other platforms)
// Newline should NOT be added to buffer
#if defined(USE_ESP32) || defined(USE_ESP8266) || defined(USE_LIBRETINY)
static constexpr bool WRITE_MSG_ADDS_NEWLINE = false;
#else
static constexpr bool WRITE_MSG_ADDS_NEWLINE = true;
#endif
#if defined(USE_ESP32) || defined(USE_ESP8266) || defined(USE_RP2040) || defined(USE_LIBRETINY) || defined(USE_ZEPHYR)
/** Enum for logging UART selection
*
@@ -173,7 +184,7 @@ class Logger : public Component {
protected:
void process_messages_();
void write_msg_(const char *msg);
void write_msg_(const char *msg, size_t len);
// Format a log message with printf-style arguments and write it to a buffer with header, footer, and null terminator
// It's the caller's responsibility to initialize buffer_at (typically to 0)
@@ -200,6 +211,35 @@ class Logger : public Component {
}
}
// Helper to add newline to buffer for platforms that need it
// Modifies buffer_at to include the newline
inline void HOT add_newline_to_buffer_if_needed_(char *buffer, uint16_t *buffer_at, uint16_t buffer_size) {
if constexpr (!WRITE_MSG_ADDS_NEWLINE) {
// Add newline - don't need to maintain null termination
// write_msg_ now always receives explicit length, so we can safely overwrite the null terminator
// This is safe because:
// 1. Callbacks already received the message (before we add newline)
// 2. write_msg_ receives the length explicitly (doesn't need null terminator)
if (*buffer_at < buffer_size) {
buffer[(*buffer_at)++] = '\n';
} else if (buffer_size > 0) {
// Buffer was full - replace last char with newline to ensure it's visible
buffer[buffer_size - 1] = '\n';
*buffer_at = buffer_size;
}
}
}
// Helper to write tx_buffer_ to console if logging is enabled
// INTERNAL USE ONLY - offset > 0 requires length parameter to be non-null
inline void HOT write_tx_buffer_to_console_(uint16_t offset = 0, uint16_t *length = nullptr) {
if (this->baud_rate_ > 0) {
uint16_t *len_ptr = length ? length : &this->tx_buffer_at_;
this->add_newline_to_buffer_if_needed_(this->tx_buffer_ + offset, len_ptr, this->tx_buffer_size_ - offset);
this->write_msg_(this->tx_buffer_ + offset, *len_ptr);
}
}
// Helper to format and send a log message to both console and callbacks
inline void HOT log_message_to_buffer_and_send_(uint8_t level, const char *tag, int line, const char *format,
va_list args) {
@@ -208,10 +248,11 @@ class Logger : public Component {
this->format_log_to_buffer_with_terminator_(level, tag, line, format, args, this->tx_buffer_, &this->tx_buffer_at_,
this->tx_buffer_size_);
if (this->baud_rate_ > 0) {
this->write_msg_(this->tx_buffer_); // If logging is enabled, write to console
}
// Callbacks get message WITHOUT newline (for API/MQTT/syslog)
this->log_callback_.call(level, tag, this->tx_buffer_, this->tx_buffer_at_);
// Console gets message WITH newline (if platform needs it)
this->write_tx_buffer_to_console_();
}
// Write the body of the log message to the buffer
@@ -425,7 +466,9 @@ class Logger : public Component {
}
// Update buffer_at with the formatted length (handle truncation)
uint16_t formatted_len = (ret >= remaining) ? remaining : ret;
// When vsnprintf truncates (ret >= remaining), it writes (remaining - 1) chars + null terminator
// When it doesn't truncate (ret < remaining), it writes ret chars + null terminator
uint16_t formatted_len = (ret >= remaining) ? (remaining - 1) : ret;
*buffer_at += formatted_len;
// Remove all trailing newlines right after formatting

View File

@@ -121,25 +121,23 @@ void Logger::pre_setup() {
ESP_LOGI(TAG, "Log initialized");
}
void HOT Logger::write_msg_(const char *msg) {
if (
#if defined(USE_LOGGER_USB_CDC) && !defined(USE_LOGGER_USB_SERIAL_JTAG)
this->uart_ == UART_SELECTION_USB_CDC
#elif defined(USE_LOGGER_USB_SERIAL_JTAG) && !defined(USE_LOGGER_USB_CDC)
this->uart_ == UART_SELECTION_USB_SERIAL_JTAG
#elif defined(USE_LOGGER_USB_CDC) && defined(USE_LOGGER_USB_SERIAL_JTAG)
this->uart_ == UART_SELECTION_USB_CDC || this->uart_ == UART_SELECTION_USB_SERIAL_JTAG
void HOT Logger::write_msg_(const char *msg, size_t len) {
// Length is now always passed explicitly - no strlen() fallback needed
#if defined(USE_LOGGER_UART_SELECTION_USB_CDC) || defined(USE_LOGGER_UART_SELECTION_USB_SERIAL_JTAG)
// USB CDC/JTAG - single write including newline (already in buffer)
// Use fwrite to stdout which goes through VFS to USB console
//
// Note: These defines indicate the user's YAML configuration choice (hardware_uart: USB_CDC/USB_SERIAL_JTAG).
// They are ONLY defined when the user explicitly selects USB as the logger output in their config.
// This is compile-time selection, not runtime detection - if USB is configured, it's always used.
// There is no fallback to regular UART if "USB isn't connected" - that's the user's responsibility
// to configure correctly for their hardware. This approach eliminates runtime overhead.
fwrite(msg, 1, len, stdout);
#else
/* DISABLES CODE */ (false) // NOLINT
// Regular UART - single write including newline (already in buffer)
uart_write_bytes(this->uart_num_, msg, len);
#endif
) {
puts(msg);
} else {
// Use tx_buffer_at_ if msg points to tx_buffer_, otherwise fall back to strlen
size_t len = (msg == this->tx_buffer_) ? this->tx_buffer_at_ : strlen(msg);
uart_write_bytes(this->uart_num_, msg, len);
uart_write_bytes(this->uart_num_, "\n", 1);
}
}
const LogString *Logger::get_uart_selection_() {

View File

@@ -33,7 +33,10 @@ void Logger::pre_setup() {
ESP_LOGI(TAG, "Log initialized");
}
void HOT Logger::write_msg_(const char *msg) { this->hw_serial_->println(msg); }
void HOT Logger::write_msg_(const char *msg, size_t len) {
// Single write with newline already in buffer (added by caller)
this->hw_serial_->write(msg, len);
}
const LogString *Logger::get_uart_selection_() {
switch (this->uart_) {

View File

@@ -3,7 +3,7 @@
namespace esphome::logger {
void HOT Logger::write_msg_(const char *msg) {
void HOT Logger::write_msg_(const char *msg, size_t) {
time_t rawtime;
struct tm *timeinfo;
char buffer[80];

View File

@@ -49,7 +49,7 @@ void Logger::pre_setup() {
ESP_LOGI(TAG, "Log initialized");
}
void HOT Logger::write_msg_(const char *msg) { this->hw_serial_->println(msg); }
void HOT Logger::write_msg_(const char *msg, size_t len) { this->hw_serial_->write(msg, len); }
const LogString *Logger::get_uart_selection_() {
switch (this->uart_) {

View File

@@ -27,7 +27,7 @@ void Logger::pre_setup() {
ESP_LOGI(TAG, "Log initialized");
}
void HOT Logger::write_msg_(const char *msg) { this->hw_serial_->println(msg); }
void HOT Logger::write_msg_(const char *msg, size_t) { this->hw_serial_->println(msg); }
const LogString *Logger::get_uart_selection_() {
switch (this->uart_) {

View File

@@ -62,7 +62,7 @@ void Logger::pre_setup() {
ESP_LOGI(TAG, "Log initialized");
}
void HOT Logger::write_msg_(const char *msg) {
void HOT Logger::write_msg_(const char *msg, size_t) {
#ifdef CONFIG_PRINTK
printk("%s\n", msg);
#endif

View File

@@ -118,10 +118,10 @@ struct IPAddress {
operator arduino_ns::IPAddress() const { return ip_addr_get_ip4_u32(&ip_addr_); }
#endif
bool is_set() { return !ip_addr_isany(&ip_addr_); } // NOLINT(readability-simplify-boolean-expr)
bool is_ip4() { return IP_IS_V4(&ip_addr_); }
bool is_ip6() { return IP_IS_V6(&ip_addr_); }
bool is_multicast() { return ip_addr_ismulticast(&ip_addr_); }
bool is_set() const { return !ip_addr_isany(&ip_addr_); } // NOLINT(readability-simplify-boolean-expr)
bool is_ip4() const { return IP_IS_V4(&ip_addr_); }
bool is_ip6() const { return IP_IS_V6(&ip_addr_); }
bool is_multicast() const { return ip_addr_ismulticast(&ip_addr_); }
std::string str() const { return str_lower_case(ipaddr_ntoa(&ip_addr_)); }
bool operator==(const IPAddress &other) const { return ip_addr_cmp(&ip_addr_, &other.ip_addr_); }
bool operator!=(const IPAddress &other) const { return !ip_addr_cmp(&ip_addr_, &other.ip_addr_); }

View File

@@ -103,6 +103,7 @@ nrf52_ns = cg.esphome_ns.namespace("nrf52")
DeviceFirmwareUpdate = nrf52_ns.class_("DeviceFirmwareUpdate", cg.Component)
CONF_DFU = "dfu"
CONF_DCDC = "dcdc"
CONF_REG0 = "reg0"
CONF_UICR_ERASE = "uicr_erase"
@@ -121,6 +122,7 @@ CONFIG_SCHEMA = cv.All(
cv.Required(CONF_RESET_PIN): pins.gpio_output_pin_schema,
}
),
cv.Optional(CONF_DCDC, default=True): cv.boolean,
cv.Optional(CONF_REG0): cv.Schema(
{
cv.Required(CONF_VOLTAGE): cv.All(
@@ -196,6 +198,7 @@ async def to_code(config: ConfigType) -> None:
if dfu_config := config.get(CONF_DFU):
CORE.add_job(_dfu_to_code, dfu_config)
zephyr_add_prj_conf("BOARD_ENABLE_DCDC", config[CONF_DCDC])
if reg0_config := config.get(CONF_REG0):
value = VOLTAGE_LEVELS.index(reg0_config[CONF_VOLTAGE])

View File

@@ -1,14 +1,9 @@
import logging
import esphome.codegen as cg
from esphome.components import time as time_
from esphome.config_helpers import merge_config
import esphome.config_validation as cv
from esphome.const import (
CONF_ID,
CONF_PLATFORM,
CONF_SERVERS,
CONF_TIME,
PLATFORM_BK72XX,
PLATFORM_ESP32,
PLATFORM_ESP8266,
@@ -17,74 +12,13 @@ from esphome.const import (
PLATFORM_RTL87XX,
)
from esphome.core import CORE
import esphome.final_validate as fv
from esphome.types import ConfigType
_LOGGER = logging.getLogger(__name__)
DEPENDENCIES = ["network"]
CONF_SNTP = "sntp"
sntp_ns = cg.esphome_ns.namespace("sntp")
SNTPComponent = sntp_ns.class_("SNTPComponent", time_.RealTimeClock)
DEFAULT_SERVERS = ["0.pool.ntp.org", "1.pool.ntp.org", "2.pool.ntp.org"]
def _sntp_final_validate(config: ConfigType) -> None:
"""Merge multiple SNTP instances into one, similar to OTA merging behavior."""
full_conf = fv.full_config.get()
time_confs = full_conf.get(CONF_TIME, [])
sntp_configs: list[ConfigType] = []
other_time_configs: list[ConfigType] = []
for time_conf in time_confs:
if time_conf.get(CONF_PLATFORM) == CONF_SNTP:
sntp_configs.append(time_conf)
else:
other_time_configs.append(time_conf)
if len(sntp_configs) <= 1:
return
# Merge all SNTP configs into the first one
merged = sntp_configs[0]
for sntp_conf in sntp_configs[1:]:
# Validate that IDs are consistent if manually specified
if merged[CONF_ID].is_manual and sntp_conf[CONF_ID].is_manual:
raise cv.Invalid(
f"Found multiple SNTP configurations but {CONF_ID} is inconsistent"
)
merged = merge_config(merged, sntp_conf)
# Deduplicate servers while preserving order
servers = merged[CONF_SERVERS]
unique_servers = list(dict.fromkeys(servers))
# Warn if we're dropping servers due to 3-server limit
if len(unique_servers) > 3:
dropped = unique_servers[3:]
unique_servers = unique_servers[:3]
_LOGGER.warning(
"SNTP supports maximum 3 servers. Dropped excess server(s): %s",
dropped,
)
merged[CONF_SERVERS] = unique_servers
_LOGGER.warning(
"Found and merged %d SNTP time configurations into one instance",
len(sntp_configs),
)
# Replace time configs with merged SNTP + other time platforms
other_time_configs.append(merged)
full_conf[CONF_TIME] = other_time_configs
fv.full_config.set(full_conf)
CONFIG_SCHEMA = cv.All(
time_.TIME_SCHEMA.extend(
{
@@ -106,8 +40,6 @@ CONFIG_SCHEMA = cv.All(
),
)
FINAL_VALIDATE_SCHEMA = _sntp_final_validate
async def to_code(config):
servers = config[CONF_SERVERS]

View File

@@ -56,19 +56,11 @@ uint32_t ESP8266UartComponent::get_config() {
}
void ESP8266UartComponent::setup() {
auto setup_pin_if_needed = [](InternalGPIOPin *pin) {
if (!pin) {
return;
}
const auto mask = gpio::Flags::FLAG_OPEN_DRAIN | gpio::Flags::FLAG_PULLUP | gpio::Flags::FLAG_PULLDOWN;
if ((pin->get_flags() & mask) != gpio::Flags::FLAG_NONE) {
pin->setup();
}
};
setup_pin_if_needed(this->rx_pin_);
if (this->rx_pin_ != this->tx_pin_) {
setup_pin_if_needed(this->tx_pin_);
if (this->rx_pin_) {
this->rx_pin_->setup();
}
if (this->tx_pin_ && this->rx_pin_ != this->tx_pin_) {
this->tx_pin_->setup();
}
// Use Arduino HardwareSerial UARTs if all used pins match the ones

View File

@@ -133,19 +133,11 @@ void IDFUARTComponent::load_settings(bool dump_config) {
return;
}
auto setup_pin_if_needed = [](InternalGPIOPin *pin) {
if (!pin) {
return;
}
const auto mask = gpio::Flags::FLAG_OPEN_DRAIN | gpio::Flags::FLAG_PULLUP | gpio::Flags::FLAG_PULLDOWN;
if ((pin->get_flags() & mask) != gpio::Flags::FLAG_NONE) {
pin->setup();
}
};
setup_pin_if_needed(this->rx_pin_);
if (this->rx_pin_ != this->tx_pin_) {
setup_pin_if_needed(this->tx_pin_);
if (this->rx_pin_) {
this->rx_pin_->setup();
}
if (this->tx_pin_ && this->rx_pin_ != this->tx_pin_) {
this->tx_pin_->setup();
}
int8_t tx = this->tx_pin_ != nullptr ? this->tx_pin_->get_pin() : -1;

View File

@@ -53,7 +53,7 @@ void LibreTinyUARTComponent::setup() {
auto shouldFallbackToSoftwareSerial = [&]() -> bool {
auto hasFlags = [](InternalGPIOPin *pin, const gpio::Flags mask) -> bool {
return pin && (pin->get_flags() & mask) != gpio::Flags::FLAG_NONE;
return pin && pin->get_flags() & mask != gpio::Flags::FLAG_NONE;
};
if (hasFlags(this->tx_pin_, gpio::Flags::FLAG_OPEN_DRAIN | gpio::Flags::FLAG_PULLUP | gpio::Flags::FLAG_PULLDOWN) ||
hasFlags(this->rx_pin_, gpio::Flags::FLAG_OPEN_DRAIN | gpio::Flags::FLAG_PULLUP | gpio::Flags::FLAG_PULLDOWN)) {

View File

@@ -52,19 +52,11 @@ uint16_t RP2040UartComponent::get_config() {
}
void RP2040UartComponent::setup() {
auto setup_pin_if_needed = [](InternalGPIOPin *pin) {
if (!pin) {
return;
}
const auto mask = gpio::Flags::FLAG_OPEN_DRAIN | gpio::Flags::FLAG_PULLUP | gpio::Flags::FLAG_PULLDOWN;
if ((pin->get_flags() & mask) != gpio::Flags::FLAG_NONE) {
pin->setup();
}
};
setup_pin_if_needed(this->rx_pin_);
if (this->rx_pin_ != this->tx_pin_) {
setup_pin_if_needed(this->tx_pin_);
if (this->rx_pin_) {
this->rx_pin_->setup();
}
if (this->tx_pin_ && this->rx_pin_ != this->tx_pin_) {
this->tx_pin_->setup();
}
uint16_t config = get_config();

View File

@@ -1,17 +1,10 @@
import logging
import esphome.codegen as cg
from esphome.components.esp32 import add_idf_component
from esphome.components.ota import BASE_OTA_SCHEMA, OTAComponent, ota_to_code
from esphome.config_helpers import merge_config
import esphome.config_validation as cv
from esphome.const import CONF_ID, CONF_OTA, CONF_PLATFORM, CONF_WEB_SERVER
from esphome.const import CONF_ID
from esphome.core import CORE, coroutine_with_priority
from esphome.coroutine import CoroPriority
import esphome.final_validate as fv
from esphome.types import ConfigType
_LOGGER = logging.getLogger(__name__)
CODEOWNERS = ["@esphome/core"]
DEPENDENCIES = ["network", "web_server_base"]
@@ -19,53 +12,6 @@ DEPENDENCIES = ["network", "web_server_base"]
web_server_ns = cg.esphome_ns.namespace("web_server")
WebServerOTAComponent = web_server_ns.class_("WebServerOTAComponent", OTAComponent)
def _web_server_ota_final_validate(config: ConfigType) -> None:
"""Merge multiple web_server OTA instances into one.
Multiple web_server OTA instances register duplicate HTTP handlers for /update,
causing undefined behavior. Merge them into a single instance.
"""
full_conf = fv.full_config.get()
ota_confs = full_conf.get(CONF_OTA, [])
web_server_ota_configs: list[ConfigType] = []
other_ota_configs: list[ConfigType] = []
for ota_conf in ota_confs:
if ota_conf.get(CONF_PLATFORM) == CONF_WEB_SERVER:
web_server_ota_configs.append(ota_conf)
else:
other_ota_configs.append(ota_conf)
if len(web_server_ota_configs) <= 1:
return
# Merge all web_server OTA configs into the first one
merged = web_server_ota_configs[0]
for ota_conf in web_server_ota_configs[1:]:
# Validate that IDs are consistent if manually specified
if (
merged[CONF_ID].is_manual
and ota_conf[CONF_ID].is_manual
and merged[CONF_ID] != ota_conf[CONF_ID]
):
raise cv.Invalid(
f"Found multiple web_server OTA configurations but {CONF_ID} is inconsistent"
)
merged = merge_config(merged, ota_conf)
_LOGGER.warning(
"Found and merged %d web_server OTA configurations into one instance",
len(web_server_ota_configs),
)
# Replace OTA configs with merged web_server + other OTA platforms
other_ota_configs.append(merged)
full_conf[CONF_OTA] = other_ota_configs
fv.full_config.set(full_conf)
CONFIG_SCHEMA = (
cv.Schema(
{
@@ -76,8 +22,6 @@ CONFIG_SCHEMA = (
.extend(cv.COMPONENT_SCHEMA)
)
FINAL_VALIDATE_SCHEMA = _web_server_ota_final_validate
@coroutine_with_priority(CoroPriority.WEB_SERVER_OTA)
async def to_code(config):

View File

@@ -489,18 +489,10 @@ AsyncEventSourceResponse::AsyncEventSourceResponse(const AsyncWebServerRequest *
void AsyncEventSourceResponse::destroy(void *ptr) {
auto *rsp = static_cast<AsyncEventSourceResponse *>(ptr);
int fd = rsp->fd_.exchange(0); // Atomically get and clear fd
if (fd > 0) {
ESP_LOGD(TAG, "Event source connection closed (fd: %d)", fd);
// Immediately shut down the socket to prevent lwIP from delivering more data
// This prevents "recv_tcp: recv for wrong pcb!" assertions when the TCP stack
// tries to deliver queued data after the session is marked as dead
// See: https://github.com/esphome/esphome/issues/11936
shutdown(fd, SHUT_RDWR);
// Note: We don't close() the socket - httpd owns it and will close it
}
// Session will be cleaned up in the main loop to avoid race conditions
ESP_LOGD(TAG, "Event source connection closed (fd: %d)", rsp->fd_.load());
// Mark as dead by setting fd to 0 - will be cleaned up in the main loop
rsp->fd_.store(0);
// Note: We don't delete or remove from set here to avoid race conditions
}
// helper for allowing only unique entries in the queue

View File

@@ -670,25 +670,25 @@ void WiFiComponent::save_wifi_sta(const std::string &ssid, const std::string &pa
void WiFiComponent::start_connecting(const WiFiAP &ap) {
// Log connection attempt at INFO level with priority
std::string bssid_formatted;
char bssid_s[18];
int8_t priority = 0;
if (ap.get_bssid().has_value()) {
bssid_formatted = format_mac_address_pretty(ap.get_bssid().value().data());
format_mac_addr_upper(ap.get_bssid().value().data(), bssid_s);
priority = this->get_sta_priority(ap.get_bssid().value());
}
ESP_LOGI(TAG,
"Connecting to " LOG_SECRET("'%s'") " " LOG_SECRET("(%s)") " (priority %d, attempt %u/%u in phase %s)...",
ap.get_ssid().c_str(), ap.get_bssid().has_value() ? bssid_formatted.c_str() : LOG_STR_LITERAL("any"),
priority, this->num_retried_ + 1, get_max_retries_for_phase(this->retry_phase_),
ap.get_ssid().c_str(), ap.get_bssid().has_value() ? bssid_s : LOG_STR_LITERAL("any"), priority,
this->num_retried_ + 1, get_max_retries_for_phase(this->retry_phase_),
LOG_STR_ARG(retry_phase_to_log_string(this->retry_phase_)));
#ifdef ESPHOME_LOG_HAS_VERBOSE
ESP_LOGV(TAG, "Connection Params:");
ESP_LOGV(TAG, " SSID: '%s'", ap.get_ssid().c_str());
if (ap.get_bssid().has_value()) {
ESP_LOGV(TAG, " BSSID: %s", format_mac_address_pretty(ap.get_bssid()->data()).c_str());
ESP_LOGV(TAG, " BSSID: %s", bssid_s);
} else {
ESP_LOGV(TAG, " BSSID: Not Set");
}
@@ -797,6 +797,8 @@ const LogString *get_signal_bars(int8_t rssi) {
void WiFiComponent::print_connect_params_() {
bssid_t bssid = wifi_bssid();
char bssid_s[18];
format_mac_addr_upper(bssid.data(), bssid_s);
ESP_LOGCONFIG(TAG, " Local MAC: %s", get_mac_address_pretty().c_str());
if (this->is_disabled()) {
@@ -819,9 +821,9 @@ void WiFiComponent::print_connect_params_() {
" Gateway: %s\n"
" DNS1: %s\n"
" DNS2: %s",
wifi_ssid().c_str(), format_mac_address_pretty(bssid.data()).c_str(), App.get_name().c_str(), rssi,
LOG_STR_ARG(get_signal_bars(rssi)), get_wifi_channel(), wifi_subnet_mask_().str().c_str(),
wifi_gateway_ip_().str().c_str(), wifi_dns_ip_(0).str().c_str(), wifi_dns_ip_(1).str().c_str());
wifi_ssid().c_str(), bssid_s, App.get_name().c_str(), rssi, LOG_STR_ARG(get_signal_bars(rssi)),
get_wifi_channel(), wifi_subnet_mask_().str().c_str(), wifi_gateway_ip_().str().c_str(),
wifi_dns_ip_(0).str().c_str(), wifi_dns_ip_(1).str().c_str());
#ifdef ESPHOME_LOG_HAS_VERBOSE
if (const WiFiAP *config = this->get_selected_sta_(); config && config->get_bssid().has_value()) {
ESP_LOGV(TAG, " Priority: %d", this->get_sta_priority(*config->get_bssid()));
@@ -1412,8 +1414,10 @@ void WiFiComponent::log_and_adjust_priority_for_failed_connect_() {
(old_priority > std::numeric_limits<int8_t>::min()) ? (old_priority - 1) : std::numeric_limits<int8_t>::min();
this->set_sta_priority(failed_bssid.value(), new_priority);
}
ESP_LOGD(TAG, "Failed " LOG_SECRET("'%s'") " " LOG_SECRET("(%s)") ", priority %d → %d", ssid.c_str(),
format_mac_address_pretty(failed_bssid.value().data()).c_str(), old_priority, new_priority);
char bssid_s[18];
format_mac_addr_upper(failed_bssid.value().data(), bssid_s);
ESP_LOGD(TAG, "Failed " LOG_SECRET("'%s'") " " LOG_SECRET("(%s)") ", priority %d → %d", ssid.c_str(), bssid_s,
old_priority, new_priority);
// After adjusting priority, check if all priorities are now at minimum
// If so, clear the vector to save memory and reset for fresh start

View File

@@ -429,7 +429,7 @@ class WiFiComponent : public Component {
bool wifi_sta_pre_setup_();
bool wifi_apply_output_power_(float output_power);
bool wifi_apply_power_save_();
bool wifi_sta_ip_config_(optional<ManualIP> manual_ip);
bool wifi_sta_ip_config_(const optional<ManualIP> &manual_ip);
bool wifi_apply_hostname_();
bool wifi_sta_connect_(const WiFiAP &ap);
void wifi_pre_setup_();
@@ -437,7 +437,7 @@ class WiFiComponent : public Component {
bool wifi_scan_start_(bool passive);
#ifdef USE_WIFI_AP
bool wifi_ap_ip_config_(optional<ManualIP> manual_ip);
bool wifi_ap_ip_config_(const optional<ManualIP> &manual_ip);
bool wifi_start_ap_(const WiFiAP &ap);
#endif // USE_WIFI_AP

View File

@@ -117,7 +117,7 @@ void netif_set_addr(struct netif *netif, const ip4_addr_t *ip, const ip4_addr_t
};
#endif
bool WiFiComponent::wifi_sta_ip_config_(optional<ManualIP> manual_ip) {
bool WiFiComponent::wifi_sta_ip_config_(const optional<ManualIP> &manual_ip) {
// enable STA
if (!this->wifi_mode_(true, {}))
return false;
@@ -525,8 +525,10 @@ void WiFiComponent::wifi_event_callback(System_Event_t *event) {
ESP_LOGW(TAG, "Disconnected ssid='%s' reason='Probe Request Unsuccessful'", buf);
s_sta_connect_not_found = true;
} else {
ESP_LOGW(TAG, "Disconnected ssid='%s' bssid=" LOG_SECRET("%s") " reason='%s'", buf,
format_mac_address_pretty(it.bssid).c_str(), LOG_STR_ARG(get_disconnect_reason_str(it.reason)));
char bssid_s[18];
format_mac_addr_upper(it.bssid, bssid_s);
ESP_LOGW(TAG, "Disconnected ssid='%s' bssid=" LOG_SECRET("%s") " reason='%s'", buf, bssid_s,
LOG_STR_ARG(get_disconnect_reason_str(it.reason)));
s_sta_connect_error = true;
}
s_sta_connected = false;
@@ -730,7 +732,7 @@ void WiFiComponent::wifi_scan_done_callback_(void *arg, STATUS status) {
}
#ifdef USE_WIFI_AP
bool WiFiComponent::wifi_ap_ip_config_(optional<ManualIP> manual_ip) {
bool WiFiComponent::wifi_ap_ip_config_(const optional<ManualIP> &manual_ip) {
// enable AP
if (!this->wifi_mode_({}, true))
return false;

View File

@@ -487,7 +487,7 @@ bool WiFiComponent::wifi_sta_connect_(const WiFiAP &ap) {
return true;
}
bool WiFiComponent::wifi_sta_ip_config_(optional<ManualIP> manual_ip) {
bool WiFiComponent::wifi_sta_ip_config_(const optional<ManualIP> &manual_ip) {
// enable STA
if (!this->wifi_mode_(true, {}))
return false;
@@ -746,8 +746,10 @@ void WiFiComponent::wifi_process_event_(IDFWiFiEvent *data) {
ESP_LOGI(TAG, "Disconnected ssid='%s' reason='Station Roaming'", buf);
return;
} else {
ESP_LOGW(TAG, "Disconnected ssid='%s' bssid=" LOG_SECRET("%s") " reason='%s'", buf,
format_mac_address_pretty(it.bssid).c_str(), get_disconnect_reason_str(it.reason));
char bssid_s[18];
format_mac_addr_upper(it.bssid, bssid_s);
ESP_LOGW(TAG, "Disconnected ssid='%s' bssid=" LOG_SECRET("%s") " reason='%s'", buf, bssid_s,
get_disconnect_reason_str(it.reason));
s_sta_connect_error = true;
}
s_sta_connected = false;
@@ -884,7 +886,7 @@ bool WiFiComponent::wifi_scan_start_(bool passive) {
}
#ifdef USE_WIFI_AP
bool WiFiComponent::wifi_ap_ip_config_(optional<ManualIP> manual_ip) {
bool WiFiComponent::wifi_ap_ip_config_(const optional<ManualIP> &manual_ip) {
esp_err_t err;
// enable AP

View File

@@ -68,7 +68,7 @@ bool WiFiComponent::wifi_sta_pre_setup_() {
return true;
}
bool WiFiComponent::wifi_apply_power_save_() { return WiFi.setSleep(this->power_save_ != WIFI_POWER_SAVE_NONE); }
bool WiFiComponent::wifi_sta_ip_config_(optional<ManualIP> manual_ip) {
bool WiFiComponent::wifi_sta_ip_config_(const optional<ManualIP> &manual_ip) {
// enable STA
if (!this->wifi_mode_(true, {}))
return false;
@@ -299,8 +299,10 @@ void WiFiComponent::wifi_event_callback_(esphome_wifi_event_id_t event, esphome_
if (it.reason == WIFI_REASON_NO_AP_FOUND) {
ESP_LOGW(TAG, "Disconnected ssid='%s' reason='Probe Request Unsuccessful'", buf);
} else {
ESP_LOGW(TAG, "Disconnected ssid='%s' bssid=" LOG_SECRET("%s") " reason='%s'", buf,
format_mac_address_pretty(it.bssid).c_str(), get_disconnect_reason_str(it.reason));
char bssid_s[18];
format_mac_addr_upper(it.bssid, bssid_s);
ESP_LOGW(TAG, "Disconnected ssid='%s' bssid=" LOG_SECRET("%s") " reason='%s'", buf, bssid_s,
get_disconnect_reason_str(it.reason));
}
uint8_t reason = it.reason;
@@ -434,7 +436,7 @@ void WiFiComponent::wifi_scan_done_callback_() {
}
#ifdef USE_WIFI_AP
bool WiFiComponent::wifi_ap_ip_config_(optional<ManualIP> manual_ip) {
bool WiFiComponent::wifi_ap_ip_config_(const optional<ManualIP> &manual_ip) {
// enable AP
if (!this->wifi_mode_({}, true))
return false;

View File

@@ -72,7 +72,7 @@ bool WiFiComponent::wifi_sta_connect_(const WiFiAP &ap) {
bool WiFiComponent::wifi_sta_pre_setup_() { return this->wifi_mode_(true, {}); }
bool WiFiComponent::wifi_sta_ip_config_(optional<ManualIP> manual_ip) {
bool WiFiComponent::wifi_sta_ip_config_(const optional<ManualIP> &manual_ip) {
if (!manual_ip.has_value()) {
return true;
}
@@ -146,7 +146,7 @@ bool WiFiComponent::wifi_scan_start_(bool passive) {
}
#ifdef USE_WIFI_AP
bool WiFiComponent::wifi_ap_ip_config_(optional<ManualIP> manual_ip) {
bool WiFiComponent::wifi_ap_ip_config_(const optional<ManualIP> &manual_ip) {
esphome::network::IPAddress ip_address, gateway, subnet, dns;
if (manual_ip.has_value()) {
ip_address = manual_ip->static_ip;

View File

@@ -4,7 +4,7 @@ from enum import Enum
from esphome.enum import StrEnum
__version__ = "2025.11.0b3"
__version__ = "2025.12.0-dev"
ALLOWED_NAME_CHARS = "abcdefghijklmnopqrstuvwxyz0123456789-_"
VALID_SUBSTITUTIONS_CHARACTERS = (

View File

@@ -609,12 +609,13 @@ uint64_t Scheduler::millis_64_(uint32_t now) {
if (now < last && (last - now) > HALF_MAX_UINT32) {
this->millis_major_++;
major++;
this->last_millis_ = now;
#ifdef ESPHOME_DEBUG_SCHEDULER
ESP_LOGD(TAG, "Detected true 32-bit rollover at %" PRIu32 "ms (was %" PRIu32 ")", now, last);
#endif /* ESPHOME_DEBUG_SCHEDULER */
} else if (now > last) {
// Only update if time moved forward
}
// Only update if time moved forward
if (now > last) {
this->last_millis_ = now;
}

View File

@@ -1,11 +1,11 @@
pylint==4.0.2
pylint==4.0.3
flake8==7.3.0 # also change in .pre-commit-config.yaml when updating
ruff==0.14.4 # also change in .pre-commit-config.yaml when updating
ruff==0.14.5 # also change in .pre-commit-config.yaml when updating
pyupgrade==3.21.1 # also change in .pre-commit-config.yaml when updating
pre-commit
# Unit tests
pytest==9.0.0
pytest==9.0.1
pytest-cov==7.0.0
pytest-mock==3.15.1
pytest-asyncio==1.3.0

View File

@@ -1,18 +1,6 @@
"""Tests for the web_server OTA platform."""
from __future__ import annotations
from collections.abc import Callable
import logging
from typing import Any
import pytest
from esphome import config_validation as cv
from esphome.components.web_server.ota import _web_server_ota_final_validate
from esphome.const import CONF_ID, CONF_OTA, CONF_PLATFORM, CONF_WEB_SERVER
from esphome.core import ID
import esphome.final_validate as fv
def test_web_server_ota_generated(generate_main: Callable[[str], str]) -> None:
@@ -112,144 +100,3 @@ def test_web_server_ota_esp8266(generate_main: Callable[[str], str]) -> None:
# Check web server OTA component is present
assert "WebServerOTAComponent" in main_cpp
assert "web_server::WebServerOTAComponent" in main_cpp
@pytest.mark.parametrize(
("ota_configs", "expected_count", "warning_expected"),
[
pytest.param(
[
{
CONF_PLATFORM: CONF_WEB_SERVER,
CONF_ID: ID("ota_web", is_manual=False),
}
],
1,
False,
id="single_instance_no_merge",
),
pytest.param(
[
{
CONF_PLATFORM: CONF_WEB_SERVER,
CONF_ID: ID("ota_web_1", is_manual=False),
},
{
CONF_PLATFORM: CONF_WEB_SERVER,
CONF_ID: ID("ota_web_2", is_manual=False),
},
],
1,
True,
id="two_instances_merged",
),
pytest.param(
[
{
CONF_PLATFORM: CONF_WEB_SERVER,
CONF_ID: ID("ota_web_1", is_manual=False),
},
{
CONF_PLATFORM: "esphome",
CONF_ID: ID("ota_esphome", is_manual=False),
},
{
CONF_PLATFORM: CONF_WEB_SERVER,
CONF_ID: ID("ota_web_2", is_manual=False),
},
],
2,
True,
id="mixed_platforms_web_server_merged",
),
],
)
def test_web_server_ota_instance_merging(
ota_configs: list[dict[str, Any]],
expected_count: int,
warning_expected: bool,
caplog: pytest.LogCaptureFixture,
) -> None:
"""Test web_server OTA instance merging behavior."""
full_conf = {CONF_OTA: ota_configs.copy()}
token = fv.full_config.set(full_conf)
try:
with caplog.at_level(logging.WARNING):
_web_server_ota_final_validate({})
updated_conf = fv.full_config.get()
# Verify total number of OTA platforms
assert len(updated_conf[CONF_OTA]) == expected_count
# Verify warning
if warning_expected:
assert any(
"Found and merged" in record.message
and "web_server OTA" in record.message
for record in caplog.records
), "Expected merge warning not found in log"
else:
assert len(caplog.records) == 0, "Unexpected warnings logged"
finally:
fv.full_config.reset(token)
def test_web_server_ota_consistent_manual_ids(
caplog: pytest.LogCaptureFixture,
) -> None:
"""Test that consistent manual IDs can be merged successfully."""
ota_configs = [
{
CONF_PLATFORM: CONF_WEB_SERVER,
CONF_ID: ID("ota_web", is_manual=True),
},
{
CONF_PLATFORM: CONF_WEB_SERVER,
CONF_ID: ID("ota_web", is_manual=True),
},
]
full_conf = {CONF_OTA: ota_configs}
token = fv.full_config.set(full_conf)
try:
with caplog.at_level(logging.WARNING):
_web_server_ota_final_validate({})
updated_conf = fv.full_config.get()
assert len(updated_conf[CONF_OTA]) == 1
assert updated_conf[CONF_OTA][0][CONF_ID].id == "ota_web"
assert any(
"Found and merged" in record.message and "web_server OTA" in record.message
for record in caplog.records
)
finally:
fv.full_config.reset(token)
def test_web_server_ota_inconsistent_manual_ids() -> None:
"""Test that inconsistent manual IDs raise an error."""
ota_configs = [
{
CONF_PLATFORM: CONF_WEB_SERVER,
CONF_ID: ID("ota_web_1", is_manual=True),
},
{
CONF_PLATFORM: CONF_WEB_SERVER,
CONF_ID: ID("ota_web_2", is_manual=True),
},
]
full_conf = {CONF_OTA: ota_configs}
token = fv.full_config.set(full_conf)
try:
with pytest.raises(
cv.Invalid,
match="Found multiple web_server OTA configurations but id is inconsistent",
):
_web_server_ota_final_validate({})
finally:
fv.full_config.reset(token)

View File

@@ -1 +0,0 @@
"""Tests for SNTP component."""

View File

@@ -1,22 +0,0 @@
esphome:
name: sntp-test
esp32:
board: esp32dev
framework:
type: esp-idf
wifi:
ssid: "testssid"
password: "testpassword"
# Test multiple SNTP instances that should be merged
time:
- platform: sntp
servers:
- 192.168.1.1
- pool.ntp.org
- platform: sntp
servers:
- pool.ntp.org
- 192.168.1.2

View File

@@ -1,238 +0,0 @@
"""Tests for SNTP time configuration validation."""
from __future__ import annotations
import logging
from typing import Any
import pytest
from esphome import config_validation as cv
from esphome.components.sntp.time import CONF_SNTP, _sntp_final_validate
from esphome.const import CONF_ID, CONF_PLATFORM, CONF_SERVERS, CONF_TIME
from esphome.core import ID
import esphome.final_validate as fv
@pytest.mark.parametrize(
("time_configs", "expected_count", "expected_servers", "warning_messages"),
[
pytest.param(
[
{
CONF_PLATFORM: CONF_SNTP,
CONF_ID: ID("sntp_time", is_manual=False),
CONF_SERVERS: ["192.168.1.1", "pool.ntp.org"],
}
],
1,
["192.168.1.1", "pool.ntp.org"],
[],
id="single_instance_no_merge",
),
pytest.param(
[
{
CONF_PLATFORM: CONF_SNTP,
CONF_ID: ID("sntp_time_1", is_manual=False),
CONF_SERVERS: ["192.168.1.1", "pool.ntp.org"],
},
{
CONF_PLATFORM: CONF_SNTP,
CONF_ID: ID("sntp_time_2", is_manual=False),
CONF_SERVERS: ["192.168.1.2"],
},
],
1,
["192.168.1.1", "pool.ntp.org", "192.168.1.2"],
["Found and merged 2 SNTP time configurations into one instance"],
id="two_instances_merged",
),
pytest.param(
[
{
CONF_PLATFORM: CONF_SNTP,
CONF_ID: ID("sntp_time_1", is_manual=False),
CONF_SERVERS: ["192.168.1.1", "pool.ntp.org"],
},
{
CONF_PLATFORM: CONF_SNTP,
CONF_ID: ID("sntp_time_2", is_manual=False),
CONF_SERVERS: ["pool.ntp.org", "192.168.1.2"],
},
],
1,
["192.168.1.1", "pool.ntp.org", "192.168.1.2"],
["Found and merged 2 SNTP time configurations into one instance"],
id="deduplication_preserves_order",
),
pytest.param(
[
{
CONF_PLATFORM: CONF_SNTP,
CONF_ID: ID("sntp_time_1", is_manual=False),
CONF_SERVERS: ["192.168.1.1", "pool.ntp.org"],
},
{
CONF_PLATFORM: CONF_SNTP,
CONF_ID: ID("sntp_time_2", is_manual=False),
CONF_SERVERS: ["192.168.1.2", "pool2.ntp.org"],
},
{
CONF_PLATFORM: CONF_SNTP,
CONF_ID: ID("sntp_time_3", is_manual=False),
CONF_SERVERS: ["pool3.ntp.org"],
},
],
1,
["192.168.1.1", "pool.ntp.org", "192.168.1.2"],
[
"SNTP supports maximum 3 servers. Dropped excess server(s): ['pool2.ntp.org', 'pool3.ntp.org']",
"Found and merged 3 SNTP time configurations into one instance",
],
id="three_instances_drops_excess_servers",
),
pytest.param(
[
{
CONF_PLATFORM: CONF_SNTP,
CONF_ID: ID("sntp_time_1", is_manual=False),
CONF_SERVERS: [
"192.168.1.1",
"pool.ntp.org",
"pool.ntp.org",
"192.168.1.1",
],
},
{
CONF_PLATFORM: CONF_SNTP,
CONF_ID: ID("sntp_time_2", is_manual=False),
CONF_SERVERS: ["pool.ntp.org", "192.168.1.2"],
},
],
1,
["192.168.1.1", "pool.ntp.org", "192.168.1.2"],
["Found and merged 2 SNTP time configurations into one instance"],
id="deduplication_multiple_duplicates",
),
],
)
def test_sntp_instance_merging(
time_configs: list[dict[str, Any]],
expected_count: int,
expected_servers: list[str],
warning_messages: list[str],
caplog: pytest.LogCaptureFixture,
) -> None:
"""Test SNTP instance merging behavior."""
# Create a mock full config with time configs
full_conf = {CONF_TIME: time_configs.copy()}
# Set the context var
token = fv.full_config.set(full_conf)
try:
with caplog.at_level(logging.WARNING):
_sntp_final_validate({})
# Get the updated config
updated_conf = fv.full_config.get()
# Check if merging occurred
if len(time_configs) > 1:
# Verify only one SNTP instance remains
sntp_instances = [
tc
for tc in updated_conf[CONF_TIME]
if tc.get(CONF_PLATFORM) == CONF_SNTP
]
assert len(sntp_instances) == expected_count
# Verify server list
assert sntp_instances[0][CONF_SERVERS] == expected_servers
# Verify warnings
for expected_msg in warning_messages:
assert any(
expected_msg in record.message for record in caplog.records
), f"Expected warning message '{expected_msg}' not found in log"
else:
# Single instance should not trigger merging or warnings
assert len(caplog.records) == 0
# Config should be unchanged
assert updated_conf[CONF_TIME] == time_configs
finally:
fv.full_config.reset(token)
def test_sntp_inconsistent_manual_ids() -> None:
"""Test that inconsistent manual IDs raise an error."""
# Create configs with manual IDs that are inconsistent
time_configs = [
{
CONF_PLATFORM: CONF_SNTP,
CONF_ID: ID("sntp_time_1", is_manual=True),
CONF_SERVERS: ["192.168.1.1"],
},
{
CONF_PLATFORM: CONF_SNTP,
CONF_ID: ID("sntp_time_2", is_manual=True),
CONF_SERVERS: ["192.168.1.2"],
},
]
full_conf = {CONF_TIME: time_configs}
token = fv.full_config.set(full_conf)
try:
with pytest.raises(
cv.Invalid,
match="Found multiple SNTP configurations but id is inconsistent",
):
_sntp_final_validate({})
finally:
fv.full_config.reset(token)
def test_sntp_with_other_time_platforms(caplog: pytest.LogCaptureFixture) -> None:
"""Test that SNTP merging doesn't affect other time platforms."""
time_configs = [
{
CONF_PLATFORM: CONF_SNTP,
CONF_ID: ID("sntp_time_1", is_manual=False),
CONF_SERVERS: ["192.168.1.1"],
},
{
CONF_PLATFORM: "homeassistant",
CONF_ID: ID("homeassistant_time", is_manual=False),
},
{
CONF_PLATFORM: CONF_SNTP,
CONF_ID: ID("sntp_time_2", is_manual=False),
CONF_SERVERS: ["192.168.1.2"],
},
]
full_conf = {CONF_TIME: time_configs.copy()}
token = fv.full_config.set(full_conf)
try:
with caplog.at_level(logging.WARNING):
_sntp_final_validate({})
updated_conf = fv.full_config.get()
# Should have 2 time platforms: 1 merged SNTP + 1 homeassistant
assert len(updated_conf[CONF_TIME]) == 2
# Find the platforms
platforms = {tc[CONF_PLATFORM] for tc in updated_conf[CONF_TIME]}
assert platforms == {CONF_SNTP, "homeassistant"}
# Verify SNTP was merged
sntp_instances = [
tc for tc in updated_conf[CONF_TIME] if tc[CONF_PLATFORM] == CONF_SNTP
]
assert len(sntp_instances) == 1
assert sntp_instances[0][CONF_SERVERS] == ["192.168.1.1", "192.168.1.2"]
finally:
fv.full_config.reset(token)

View File

@@ -15,6 +15,7 @@ nrf52:
inverted: true
mode:
output: true
dcdc: False
reg0:
voltage: 2.1V
uicr_erase: true