mirror of
https://github.com/esphome/esphome.git
synced 2025-11-18 15:55:46 +00:00
2
Doxyfile
2
Doxyfile
@@ -48,7 +48,7 @@ PROJECT_NAME = ESPHome
|
|||||||
# could be handy for archiving the generated documentation or if some version
|
# could be handy for archiving the generated documentation or if some version
|
||||||
# control system is used.
|
# control system is used.
|
||||||
|
|
||||||
PROJECT_NUMBER = 2025.11.0b2
|
PROJECT_NUMBER = 2025.11.0b3
|
||||||
|
|
||||||
# Using the PROJECT_BRIEF tag one can provide an optional one line description
|
# Using the PROJECT_BRIEF tag one can provide an optional one line description
|
||||||
# for a project that appears at the top of each page and should give viewer a
|
# for a project that appears at the top of each page and should give viewer a
|
||||||
|
|||||||
@@ -15,6 +15,11 @@ from . import (
|
|||||||
class MemoryAnalyzerCLI(MemoryAnalyzer):
|
class MemoryAnalyzerCLI(MemoryAnalyzer):
|
||||||
"""Memory analyzer with CLI-specific report generation."""
|
"""Memory analyzer with CLI-specific report generation."""
|
||||||
|
|
||||||
|
# Symbol size threshold for detailed analysis
|
||||||
|
SYMBOL_SIZE_THRESHOLD: int = (
|
||||||
|
100 # Show symbols larger than this in detailed analysis
|
||||||
|
)
|
||||||
|
|
||||||
# Column width constants
|
# Column width constants
|
||||||
COL_COMPONENT: int = 29
|
COL_COMPONENT: int = 29
|
||||||
COL_FLASH_TEXT: int = 14
|
COL_FLASH_TEXT: int = 14
|
||||||
@@ -191,14 +196,21 @@ class MemoryAnalyzerCLI(MemoryAnalyzer):
|
|||||||
f"{len(symbols):>{self.COL_CORE_COUNT}} | {percentage:>{self.COL_CORE_PERCENT - 1}.1f}%"
|
f"{len(symbols):>{self.COL_CORE_COUNT}} | {percentage:>{self.COL_CORE_PERCENT - 1}.1f}%"
|
||||||
)
|
)
|
||||||
|
|
||||||
# Top 15 largest core symbols
|
# All core symbols above threshold
|
||||||
lines.append("")
|
lines.append("")
|
||||||
lines.append(f"Top 15 Largest {_COMPONENT_CORE} Symbols:")
|
|
||||||
sorted_core_symbols = sorted(
|
sorted_core_symbols = sorted(
|
||||||
self._esphome_core_symbols, key=lambda x: x[2], reverse=True
|
self._esphome_core_symbols, key=lambda x: x[2], reverse=True
|
||||||
)
|
)
|
||||||
|
large_core_symbols = [
|
||||||
|
(symbol, demangled, size)
|
||||||
|
for symbol, demangled, size in sorted_core_symbols
|
||||||
|
if size > self.SYMBOL_SIZE_THRESHOLD
|
||||||
|
]
|
||||||
|
|
||||||
for i, (symbol, demangled, size) in enumerate(sorted_core_symbols[:15]):
|
lines.append(
|
||||||
|
f"{_COMPONENT_CORE} Symbols > {self.SYMBOL_SIZE_THRESHOLD} B ({len(large_core_symbols)} symbols):"
|
||||||
|
)
|
||||||
|
for i, (symbol, demangled, size) in enumerate(large_core_symbols):
|
||||||
lines.append(f"{i + 1}. {demangled} ({size:,} B)")
|
lines.append(f"{i + 1}. {demangled} ({size:,} B)")
|
||||||
|
|
||||||
lines.append("=" * self.TABLE_WIDTH)
|
lines.append("=" * self.TABLE_WIDTH)
|
||||||
@@ -268,13 +280,15 @@ class MemoryAnalyzerCLI(MemoryAnalyzer):
|
|||||||
lines.append(f"Total size: {comp_mem.flash_total:,} B")
|
lines.append(f"Total size: {comp_mem.flash_total:,} B")
|
||||||
lines.append("")
|
lines.append("")
|
||||||
|
|
||||||
# Show all symbols > 100 bytes for better visibility
|
# Show all symbols above threshold for better visibility
|
||||||
large_symbols = [
|
large_symbols = [
|
||||||
(sym, dem, size) for sym, dem, size in sorted_symbols if size > 100
|
(sym, dem, size)
|
||||||
|
for sym, dem, size in sorted_symbols
|
||||||
|
if size > self.SYMBOL_SIZE_THRESHOLD
|
||||||
]
|
]
|
||||||
|
|
||||||
lines.append(
|
lines.append(
|
||||||
f"{comp_name} Symbols > 100 B ({len(large_symbols)} symbols):"
|
f"{comp_name} Symbols > {self.SYMBOL_SIZE_THRESHOLD} B ({len(large_symbols)} symbols):"
|
||||||
)
|
)
|
||||||
for i, (symbol, demangled, size) in enumerate(large_symbols):
|
for i, (symbol, demangled, size) in enumerate(large_symbols):
|
||||||
lines.append(f"{i + 1}. {demangled} ({size:,} B)")
|
lines.append(f"{i + 1}. {demangled} ({size:,} B)")
|
||||||
|
|||||||
@@ -381,8 +381,9 @@ PLATFORM_VERSION_LOOKUP = {
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
def _check_versions(value):
|
def _check_versions(config):
|
||||||
value = value.copy()
|
config = config.copy()
|
||||||
|
value = config[CONF_FRAMEWORK]
|
||||||
|
|
||||||
if value[CONF_VERSION] in PLATFORM_VERSION_LOOKUP:
|
if value[CONF_VERSION] in PLATFORM_VERSION_LOOKUP:
|
||||||
if CONF_SOURCE in value or CONF_PLATFORM_VERSION in value:
|
if CONF_SOURCE in value or CONF_PLATFORM_VERSION in value:
|
||||||
@@ -447,7 +448,7 @@ def _check_versions(value):
|
|||||||
"If there are connectivity or build issues please remove the manual version."
|
"If there are connectivity or build issues please remove the manual version."
|
||||||
)
|
)
|
||||||
|
|
||||||
return value
|
return config
|
||||||
|
|
||||||
|
|
||||||
def _parse_platform_version(value):
|
def _parse_platform_version(value):
|
||||||
@@ -497,6 +498,8 @@ def final_validate(config):
|
|||||||
from esphome.components.psram import DOMAIN as PSRAM_DOMAIN
|
from esphome.components.psram import DOMAIN as PSRAM_DOMAIN
|
||||||
|
|
||||||
errs = []
|
errs = []
|
||||||
|
conf_fw = config[CONF_FRAMEWORK]
|
||||||
|
advanced = conf_fw[CONF_ADVANCED]
|
||||||
full_config = fv.full_config.get()
|
full_config = fv.full_config.get()
|
||||||
if pio_options := full_config[CONF_ESPHOME].get(CONF_PLATFORMIO_OPTIONS):
|
if pio_options := full_config[CONF_ESPHOME].get(CONF_PLATFORMIO_OPTIONS):
|
||||||
pio_flash_size_key = "board_upload.flash_size"
|
pio_flash_size_key = "board_upload.flash_size"
|
||||||
@@ -513,22 +516,14 @@ def final_validate(config):
|
|||||||
f"Please specify {CONF_FLASH_SIZE} within esp32 configuration only"
|
f"Please specify {CONF_FLASH_SIZE} within esp32 configuration only"
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
if (
|
if config[CONF_VARIANT] != VARIANT_ESP32 and advanced[CONF_IGNORE_EFUSE_MAC_CRC]:
|
||||||
config[CONF_VARIANT] != VARIANT_ESP32
|
|
||||||
and CONF_ADVANCED in (conf_fw := config[CONF_FRAMEWORK])
|
|
||||||
and CONF_IGNORE_EFUSE_MAC_CRC in conf_fw[CONF_ADVANCED]
|
|
||||||
):
|
|
||||||
errs.append(
|
errs.append(
|
||||||
cv.Invalid(
|
cv.Invalid(
|
||||||
f"'{CONF_IGNORE_EFUSE_MAC_CRC}' is not supported on {config[CONF_VARIANT]}",
|
f"'{CONF_IGNORE_EFUSE_MAC_CRC}' is not supported on {config[CONF_VARIANT]}",
|
||||||
path=[CONF_FRAMEWORK, CONF_ADVANCED, CONF_IGNORE_EFUSE_MAC_CRC],
|
path=[CONF_FRAMEWORK, CONF_ADVANCED, CONF_IGNORE_EFUSE_MAC_CRC],
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
if (
|
if advanced[CONF_EXECUTE_FROM_PSRAM]:
|
||||||
config.get(CONF_FRAMEWORK, {})
|
|
||||||
.get(CONF_ADVANCED, {})
|
|
||||||
.get(CONF_EXECUTE_FROM_PSRAM)
|
|
||||||
):
|
|
||||||
if config[CONF_VARIANT] != VARIANT_ESP32S3:
|
if config[CONF_VARIANT] != VARIANT_ESP32S3:
|
||||||
errs.append(
|
errs.append(
|
||||||
cv.Invalid(
|
cv.Invalid(
|
||||||
@@ -544,6 +539,17 @@ def final_validate(config):
|
|||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
|
if (
|
||||||
|
config[CONF_FLASH_SIZE] == "32MB"
|
||||||
|
and "ota" in full_config
|
||||||
|
and not advanced[CONF_ENABLE_IDF_EXPERIMENTAL_FEATURES]
|
||||||
|
):
|
||||||
|
errs.append(
|
||||||
|
cv.Invalid(
|
||||||
|
f"OTA with 32MB flash requires '{CONF_ENABLE_IDF_EXPERIMENTAL_FEATURES}' to be set in the '{CONF_ADVANCED}' section of the esp32 configuration",
|
||||||
|
path=[CONF_FLASH_SIZE],
|
||||||
|
)
|
||||||
|
)
|
||||||
if errs:
|
if errs:
|
||||||
raise cv.MultipleInvalid(errs)
|
raise cv.MultipleInvalid(errs)
|
||||||
|
|
||||||
@@ -598,12 +604,9 @@ def _validate_idf_component(config: ConfigType) -> ConfigType:
|
|||||||
|
|
||||||
FRAMEWORK_ESP_IDF = "esp-idf"
|
FRAMEWORK_ESP_IDF = "esp-idf"
|
||||||
FRAMEWORK_ARDUINO = "arduino"
|
FRAMEWORK_ARDUINO = "arduino"
|
||||||
FRAMEWORK_SCHEMA = cv.All(
|
FRAMEWORK_SCHEMA = cv.Schema(
|
||||||
cv.Schema(
|
|
||||||
{
|
{
|
||||||
cv.Optional(CONF_TYPE, default=FRAMEWORK_ARDUINO): cv.one_of(
|
cv.Optional(CONF_TYPE): cv.one_of(FRAMEWORK_ESP_IDF, FRAMEWORK_ARDUINO),
|
||||||
FRAMEWORK_ESP_IDF, FRAMEWORK_ARDUINO
|
|
||||||
),
|
|
||||||
cv.Optional(CONF_VERSION, default="recommended"): cv.string_strict,
|
cv.Optional(CONF_VERSION, default="recommended"): cv.string_strict,
|
||||||
cv.Optional(CONF_RELEASE): cv.string_strict,
|
cv.Optional(CONF_RELEASE): cv.string_strict,
|
||||||
cv.Optional(CONF_SOURCE): cv.string_strict,
|
cv.Optional(CONF_SOURCE): cv.string_strict,
|
||||||
@@ -622,21 +625,19 @@ FRAMEWORK_SCHEMA = cv.All(
|
|||||||
cv.Optional(CONF_COMPILER_OPTIMIZATION, default="SIZE"): cv.one_of(
|
cv.Optional(CONF_COMPILER_OPTIMIZATION, default="SIZE"): cv.one_of(
|
||||||
*COMPILER_OPTIMIZATIONS, upper=True
|
*COMPILER_OPTIMIZATIONS, upper=True
|
||||||
),
|
),
|
||||||
cv.Optional(CONF_ENABLE_IDF_EXPERIMENTAL_FEATURES): cv.boolean,
|
|
||||||
cv.Optional(CONF_ENABLE_LWIP_ASSERT, default=True): cv.boolean,
|
|
||||||
cv.Optional(
|
cv.Optional(
|
||||||
CONF_IGNORE_EFUSE_CUSTOM_MAC, default=False
|
CONF_ENABLE_IDF_EXPERIMENTAL_FEATURES, default=False
|
||||||
): cv.boolean,
|
): cv.boolean,
|
||||||
cv.Optional(CONF_IGNORE_EFUSE_MAC_CRC): cv.boolean,
|
cv.Optional(CONF_ENABLE_LWIP_ASSERT, default=True): cv.boolean,
|
||||||
|
cv.Optional(CONF_IGNORE_EFUSE_CUSTOM_MAC, default=False): cv.boolean,
|
||||||
|
cv.Optional(CONF_IGNORE_EFUSE_MAC_CRC, default=False): cv.boolean,
|
||||||
# DHCP server is needed for WiFi AP mode. When WiFi component is used,
|
# DHCP server is needed for WiFi AP mode. When WiFi component is used,
|
||||||
# it will handle disabling DHCP server when AP is not configured.
|
# it will handle disabling DHCP server when AP is not configured.
|
||||||
# Default to false (disabled) when WiFi is not used.
|
# Default to false (disabled) when WiFi is not used.
|
||||||
cv.OnlyWithout(
|
cv.OnlyWithout(
|
||||||
CONF_ENABLE_LWIP_DHCP_SERVER, "wifi", default=False
|
CONF_ENABLE_LWIP_DHCP_SERVER, "wifi", default=False
|
||||||
): cv.boolean,
|
): cv.boolean,
|
||||||
cv.Optional(
|
cv.Optional(CONF_ENABLE_LWIP_MDNS_QUERIES, default=True): cv.boolean,
|
||||||
CONF_ENABLE_LWIP_MDNS_QUERIES, default=True
|
|
||||||
): cv.boolean,
|
|
||||||
cv.Optional(
|
cv.Optional(
|
||||||
CONF_ENABLE_LWIP_BRIDGE_INTERFACE, default=False
|
CONF_ENABLE_LWIP_BRIDGE_INTERFACE, default=False
|
||||||
): cv.boolean,
|
): cv.boolean,
|
||||||
@@ -646,17 +647,11 @@ FRAMEWORK_SCHEMA = cv.All(
|
|||||||
cv.Optional(
|
cv.Optional(
|
||||||
CONF_ENABLE_LWIP_CHECK_THREAD_SAFETY, default=True
|
CONF_ENABLE_LWIP_CHECK_THREAD_SAFETY, default=True
|
||||||
): cv.boolean,
|
): cv.boolean,
|
||||||
cv.Optional(
|
cv.Optional(CONF_DISABLE_LIBC_LOCKS_IN_IRAM, default=True): cv.boolean,
|
||||||
CONF_DISABLE_LIBC_LOCKS_IN_IRAM, default=True
|
cv.Optional(CONF_DISABLE_VFS_SUPPORT_TERMIOS, default=True): cv.boolean,
|
||||||
): cv.boolean,
|
cv.Optional(CONF_DISABLE_VFS_SUPPORT_SELECT, default=True): cv.boolean,
|
||||||
cv.Optional(
|
|
||||||
CONF_DISABLE_VFS_SUPPORT_TERMIOS, default=True
|
|
||||||
): cv.boolean,
|
|
||||||
cv.Optional(
|
|
||||||
CONF_DISABLE_VFS_SUPPORT_SELECT, default=True
|
|
||||||
): cv.boolean,
|
|
||||||
cv.Optional(CONF_DISABLE_VFS_SUPPORT_DIR, default=True): cv.boolean,
|
cv.Optional(CONF_DISABLE_VFS_SUPPORT_DIR, default=True): cv.boolean,
|
||||||
cv.Optional(CONF_EXECUTE_FROM_PSRAM): cv.boolean,
|
cv.Optional(CONF_EXECUTE_FROM_PSRAM, default=False): cv.boolean,
|
||||||
cv.Optional(CONF_LOOP_TASK_STACK_SIZE, default=8192): cv.int_range(
|
cv.Optional(CONF_LOOP_TASK_STACK_SIZE, default=8192): cv.int_range(
|
||||||
min=8192, max=32768
|
min=8192, max=32768
|
||||||
),
|
),
|
||||||
@@ -670,17 +665,13 @@ FRAMEWORK_SCHEMA = cv.All(
|
|||||||
cv.Optional(CONF_SOURCE): cv.git_ref,
|
cv.Optional(CONF_SOURCE): cv.git_ref,
|
||||||
cv.Optional(CONF_REF): cv.string,
|
cv.Optional(CONF_REF): cv.string,
|
||||||
cv.Optional(CONF_PATH): cv.string,
|
cv.Optional(CONF_PATH): cv.string,
|
||||||
cv.Optional(CONF_REFRESH): cv.All(
|
cv.Optional(CONF_REFRESH): cv.All(cv.string, cv.source_refresh),
|
||||||
cv.string, cv.source_refresh
|
|
||||||
),
|
|
||||||
}
|
}
|
||||||
),
|
),
|
||||||
_validate_idf_component,
|
_validate_idf_component,
|
||||||
)
|
)
|
||||||
),
|
),
|
||||||
}
|
}
|
||||||
),
|
|
||||||
_check_versions,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@@ -743,11 +734,11 @@ def _show_framework_migration_message(name: str, variant: str) -> None:
|
|||||||
|
|
||||||
|
|
||||||
def _set_default_framework(config):
|
def _set_default_framework(config):
|
||||||
if CONF_FRAMEWORK not in config:
|
|
||||||
config = config.copy()
|
config = config.copy()
|
||||||
|
if CONF_FRAMEWORK not in config:
|
||||||
variant = config[CONF_VARIANT]
|
|
||||||
config[CONF_FRAMEWORK] = FRAMEWORK_SCHEMA({})
|
config[CONF_FRAMEWORK] = FRAMEWORK_SCHEMA({})
|
||||||
|
if CONF_TYPE not in config[CONF_FRAMEWORK]:
|
||||||
|
variant = config[CONF_VARIANT]
|
||||||
if variant in ARDUINO_ALLOWED_VARIANTS:
|
if variant in ARDUINO_ALLOWED_VARIANTS:
|
||||||
config[CONF_FRAMEWORK][CONF_TYPE] = FRAMEWORK_ARDUINO
|
config[CONF_FRAMEWORK][CONF_TYPE] = FRAMEWORK_ARDUINO
|
||||||
_show_framework_migration_message(
|
_show_framework_migration_message(
|
||||||
@@ -787,6 +778,7 @@ CONFIG_SCHEMA = cv.All(
|
|||||||
),
|
),
|
||||||
_detect_variant,
|
_detect_variant,
|
||||||
_set_default_framework,
|
_set_default_framework,
|
||||||
|
_check_versions,
|
||||||
set_core_data,
|
set_core_data,
|
||||||
cv.has_at_least_one_key(CONF_BOARD, CONF_VARIANT),
|
cv.has_at_least_one_key(CONF_BOARD, CONF_VARIANT),
|
||||||
)
|
)
|
||||||
@@ -805,9 +797,7 @@ def _configure_lwip_max_sockets(conf: dict) -> None:
|
|||||||
from esphome.components.socket import KEY_SOCKET_CONSUMERS
|
from esphome.components.socket import KEY_SOCKET_CONSUMERS
|
||||||
|
|
||||||
# Check if user manually specified CONFIG_LWIP_MAX_SOCKETS
|
# Check if user manually specified CONFIG_LWIP_MAX_SOCKETS
|
||||||
user_max_sockets = conf.get(CONF_SDKCONFIG_OPTIONS, {}).get(
|
user_max_sockets = conf[CONF_SDKCONFIG_OPTIONS].get("CONFIG_LWIP_MAX_SOCKETS")
|
||||||
"CONFIG_LWIP_MAX_SOCKETS"
|
|
||||||
)
|
|
||||||
|
|
||||||
socket_consumers: dict[str, int] = CORE.data.get(KEY_SOCKET_CONSUMERS, {})
|
socket_consumers: dict[str, int] = CORE.data.get(KEY_SOCKET_CONSUMERS, {})
|
||||||
total_sockets = sum(socket_consumers.values())
|
total_sockets = sum(socket_consumers.values())
|
||||||
@@ -977,23 +967,18 @@ async def to_code(config):
|
|||||||
# WiFi component handles its own optimization when AP mode is not used
|
# WiFi component handles its own optimization when AP mode is not used
|
||||||
# When using Arduino with Ethernet, DHCP server functions must be available
|
# When using Arduino with Ethernet, DHCP server functions must be available
|
||||||
# for the Network library to compile, even if not actively used
|
# for the Network library to compile, even if not actively used
|
||||||
if (
|
if advanced.get(CONF_ENABLE_LWIP_DHCP_SERVER) is False and not (
|
||||||
CONF_ENABLE_LWIP_DHCP_SERVER in advanced
|
conf[CONF_TYPE] == FRAMEWORK_ARDUINO and "ethernet" in CORE.loaded_integrations
|
||||||
and not advanced[CONF_ENABLE_LWIP_DHCP_SERVER]
|
|
||||||
and not (
|
|
||||||
conf[CONF_TYPE] == FRAMEWORK_ARDUINO
|
|
||||||
and "ethernet" in CORE.loaded_integrations
|
|
||||||
)
|
|
||||||
):
|
):
|
||||||
add_idf_sdkconfig_option("CONFIG_LWIP_DHCPS", False)
|
add_idf_sdkconfig_option("CONFIG_LWIP_DHCPS", False)
|
||||||
if not advanced.get(CONF_ENABLE_LWIP_MDNS_QUERIES, True):
|
if not advanced[CONF_ENABLE_LWIP_MDNS_QUERIES]:
|
||||||
add_idf_sdkconfig_option("CONFIG_LWIP_DNS_SUPPORT_MDNS_QUERIES", False)
|
add_idf_sdkconfig_option("CONFIG_LWIP_DNS_SUPPORT_MDNS_QUERIES", False)
|
||||||
if not advanced.get(CONF_ENABLE_LWIP_BRIDGE_INTERFACE, False):
|
if not advanced[CONF_ENABLE_LWIP_BRIDGE_INTERFACE]:
|
||||||
add_idf_sdkconfig_option("CONFIG_LWIP_BRIDGEIF_MAX_PORTS", 0)
|
add_idf_sdkconfig_option("CONFIG_LWIP_BRIDGEIF_MAX_PORTS", 0)
|
||||||
|
|
||||||
_configure_lwip_max_sockets(conf)
|
_configure_lwip_max_sockets(conf)
|
||||||
|
|
||||||
if advanced.get(CONF_EXECUTE_FROM_PSRAM, False):
|
if advanced[CONF_EXECUTE_FROM_PSRAM]:
|
||||||
add_idf_sdkconfig_option("CONFIG_SPIRAM_FETCH_INSTRUCTIONS", True)
|
add_idf_sdkconfig_option("CONFIG_SPIRAM_FETCH_INSTRUCTIONS", True)
|
||||||
add_idf_sdkconfig_option("CONFIG_SPIRAM_RODATA", True)
|
add_idf_sdkconfig_option("CONFIG_SPIRAM_RODATA", True)
|
||||||
|
|
||||||
@@ -1004,23 +989,22 @@ async def to_code(config):
|
|||||||
# - select() on 4 sockets: ~190μs (Arduino/core locking) vs ~235μs (ESP-IDF default)
|
# - select() on 4 sockets: ~190μs (Arduino/core locking) vs ~235μs (ESP-IDF default)
|
||||||
# - Up to 200% slower under load when all operations queue through tcpip_thread
|
# - Up to 200% slower under load when all operations queue through tcpip_thread
|
||||||
# Enabling this makes ESP-IDF socket performance match Arduino framework.
|
# Enabling this makes ESP-IDF socket performance match Arduino framework.
|
||||||
if advanced.get(CONF_ENABLE_LWIP_TCPIP_CORE_LOCKING, True):
|
if advanced[CONF_ENABLE_LWIP_TCPIP_CORE_LOCKING]:
|
||||||
add_idf_sdkconfig_option("CONFIG_LWIP_TCPIP_CORE_LOCKING", True)
|
add_idf_sdkconfig_option("CONFIG_LWIP_TCPIP_CORE_LOCKING", True)
|
||||||
if advanced.get(CONF_ENABLE_LWIP_CHECK_THREAD_SAFETY, True):
|
if advanced[CONF_ENABLE_LWIP_CHECK_THREAD_SAFETY]:
|
||||||
add_idf_sdkconfig_option("CONFIG_LWIP_CHECK_THREAD_SAFETY", True)
|
add_idf_sdkconfig_option("CONFIG_LWIP_CHECK_THREAD_SAFETY", True)
|
||||||
|
|
||||||
# Disable placing libc locks in IRAM to save RAM
|
# Disable placing libc locks in IRAM to save RAM
|
||||||
# This is safe for ESPHome since no IRAM ISRs (interrupts that run while cache is disabled)
|
# This is safe for ESPHome since no IRAM ISRs (interrupts that run while cache is disabled)
|
||||||
# use libc lock APIs. Saves approximately 1.3KB (1,356 bytes) of IRAM.
|
# use libc lock APIs. Saves approximately 1.3KB (1,356 bytes) of IRAM.
|
||||||
if advanced.get(CONF_DISABLE_LIBC_LOCKS_IN_IRAM, True):
|
if advanced[CONF_DISABLE_LIBC_LOCKS_IN_IRAM]:
|
||||||
add_idf_sdkconfig_option("CONFIG_LIBC_LOCKS_PLACE_IN_IRAM", False)
|
add_idf_sdkconfig_option("CONFIG_LIBC_LOCKS_PLACE_IN_IRAM", False)
|
||||||
|
|
||||||
# Disable VFS support for termios (terminal I/O functions)
|
# Disable VFS support for termios (terminal I/O functions)
|
||||||
# ESPHome doesn't use termios functions on ESP32 (only used in host UART driver).
|
# ESPHome doesn't use termios functions on ESP32 (only used in host UART driver).
|
||||||
# Saves approximately 1.8KB of flash when disabled (default).
|
# Saves approximately 1.8KB of flash when disabled (default).
|
||||||
add_idf_sdkconfig_option(
|
add_idf_sdkconfig_option(
|
||||||
"CONFIG_VFS_SUPPORT_TERMIOS",
|
"CONFIG_VFS_SUPPORT_TERMIOS", not advanced[CONF_DISABLE_VFS_SUPPORT_TERMIOS]
|
||||||
not advanced.get(CONF_DISABLE_VFS_SUPPORT_TERMIOS, True),
|
|
||||||
)
|
)
|
||||||
|
|
||||||
# Disable VFS support for select() with file descriptors
|
# Disable VFS support for select() with file descriptors
|
||||||
@@ -1034,8 +1018,7 @@ async def to_code(config):
|
|||||||
else:
|
else:
|
||||||
# No component needs it - allow user to control (default: disabled)
|
# No component needs it - allow user to control (default: disabled)
|
||||||
add_idf_sdkconfig_option(
|
add_idf_sdkconfig_option(
|
||||||
"CONFIG_VFS_SUPPORT_SELECT",
|
"CONFIG_VFS_SUPPORT_SELECT", not advanced[CONF_DISABLE_VFS_SUPPORT_SELECT]
|
||||||
not advanced.get(CONF_DISABLE_VFS_SUPPORT_SELECT, True),
|
|
||||||
)
|
)
|
||||||
|
|
||||||
# Disable VFS support for directory functions (opendir, readdir, mkdir, etc.)
|
# Disable VFS support for directory functions (opendir, readdir, mkdir, etc.)
|
||||||
@@ -1048,8 +1031,7 @@ async def to_code(config):
|
|||||||
else:
|
else:
|
||||||
# No component needs it - allow user to control (default: disabled)
|
# No component needs it - allow user to control (default: disabled)
|
||||||
add_idf_sdkconfig_option(
|
add_idf_sdkconfig_option(
|
||||||
"CONFIG_VFS_SUPPORT_DIR",
|
"CONFIG_VFS_SUPPORT_DIR", not advanced[CONF_DISABLE_VFS_SUPPORT_DIR]
|
||||||
not advanced.get(CONF_DISABLE_VFS_SUPPORT_DIR, True),
|
|
||||||
)
|
)
|
||||||
|
|
||||||
cg.add_platformio_option("board_build.partitions", "partitions.csv")
|
cg.add_platformio_option("board_build.partitions", "partitions.csv")
|
||||||
@@ -1063,7 +1045,7 @@ async def to_code(config):
|
|||||||
add_idf_sdkconfig_option(flag, assertion_level == key)
|
add_idf_sdkconfig_option(flag, assertion_level == key)
|
||||||
|
|
||||||
add_idf_sdkconfig_option("CONFIG_COMPILER_OPTIMIZATION_DEFAULT", False)
|
add_idf_sdkconfig_option("CONFIG_COMPILER_OPTIMIZATION_DEFAULT", False)
|
||||||
compiler_optimization = advanced.get(CONF_COMPILER_OPTIMIZATION)
|
compiler_optimization = advanced[CONF_COMPILER_OPTIMIZATION]
|
||||||
for key, flag in COMPILER_OPTIMIZATIONS.items():
|
for key, flag in COMPILER_OPTIMIZATIONS.items():
|
||||||
add_idf_sdkconfig_option(flag, compiler_optimization == key)
|
add_idf_sdkconfig_option(flag, compiler_optimization == key)
|
||||||
|
|
||||||
@@ -1072,19 +1054,21 @@ async def to_code(config):
|
|||||||
conf[CONF_ADVANCED][CONF_ENABLE_LWIP_ASSERT],
|
conf[CONF_ADVANCED][CONF_ENABLE_LWIP_ASSERT],
|
||||||
)
|
)
|
||||||
|
|
||||||
if advanced.get(CONF_IGNORE_EFUSE_MAC_CRC):
|
if advanced[CONF_IGNORE_EFUSE_MAC_CRC]:
|
||||||
add_idf_sdkconfig_option("CONFIG_ESP_MAC_IGNORE_MAC_CRC_ERROR", True)
|
add_idf_sdkconfig_option("CONFIG_ESP_MAC_IGNORE_MAC_CRC_ERROR", True)
|
||||||
add_idf_sdkconfig_option("CONFIG_ESP_PHY_CALIBRATION_AND_DATA_STORAGE", False)
|
add_idf_sdkconfig_option("CONFIG_ESP_PHY_CALIBRATION_AND_DATA_STORAGE", False)
|
||||||
if advanced.get(CONF_ENABLE_IDF_EXPERIMENTAL_FEATURES):
|
if advanced[CONF_ENABLE_IDF_EXPERIMENTAL_FEATURES]:
|
||||||
_LOGGER.warning(
|
_LOGGER.warning(
|
||||||
"Using experimental features in ESP-IDF may result in unexpected failures."
|
"Using experimental features in ESP-IDF may result in unexpected failures."
|
||||||
)
|
)
|
||||||
add_idf_sdkconfig_option("CONFIG_IDF_EXPERIMENTAL_FEATURES", True)
|
add_idf_sdkconfig_option("CONFIG_IDF_EXPERIMENTAL_FEATURES", True)
|
||||||
|
if config[CONF_FLASH_SIZE] == "32MB":
|
||||||
cg.add_define(
|
add_idf_sdkconfig_option(
|
||||||
"ESPHOME_LOOP_TASK_STACK_SIZE", advanced.get(CONF_LOOP_TASK_STACK_SIZE)
|
"CONFIG_BOOTLOADER_CACHE_32BIT_ADDR_QUAD_FLASH", True
|
||||||
)
|
)
|
||||||
|
|
||||||
|
cg.add_define("ESPHOME_LOOP_TASK_STACK_SIZE", advanced[CONF_LOOP_TASK_STACK_SIZE])
|
||||||
|
|
||||||
cg.add_define(
|
cg.add_define(
|
||||||
"USE_ESP_IDF_VERSION_CODE",
|
"USE_ESP_IDF_VERSION_CODE",
|
||||||
cg.RawExpression(
|
cg.RawExpression(
|
||||||
|
|||||||
@@ -31,35 +31,83 @@ CONFIG_SCHEMA = cv.Schema(
|
|||||||
cv.GenerateID(CONF_LD2410_ID): cv.use_id(LD2410Component),
|
cv.GenerateID(CONF_LD2410_ID): cv.use_id(LD2410Component),
|
||||||
cv.Optional(CONF_MOVING_DISTANCE): sensor.sensor_schema(
|
cv.Optional(CONF_MOVING_DISTANCE): sensor.sensor_schema(
|
||||||
device_class=DEVICE_CLASS_DISTANCE,
|
device_class=DEVICE_CLASS_DISTANCE,
|
||||||
filters=[{"throttle_with_priority": cv.TimePeriod(milliseconds=1000)}],
|
filters=[
|
||||||
|
{
|
||||||
|
"timeout": {
|
||||||
|
"timeout": cv.TimePeriod(milliseconds=1000),
|
||||||
|
"value": "last",
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{"throttle_with_priority": cv.TimePeriod(milliseconds=1000)},
|
||||||
|
],
|
||||||
icon=ICON_SIGNAL,
|
icon=ICON_SIGNAL,
|
||||||
unit_of_measurement=UNIT_CENTIMETER,
|
unit_of_measurement=UNIT_CENTIMETER,
|
||||||
),
|
),
|
||||||
cv.Optional(CONF_STILL_DISTANCE): sensor.sensor_schema(
|
cv.Optional(CONF_STILL_DISTANCE): sensor.sensor_schema(
|
||||||
device_class=DEVICE_CLASS_DISTANCE,
|
device_class=DEVICE_CLASS_DISTANCE,
|
||||||
filters=[{"throttle_with_priority": cv.TimePeriod(milliseconds=1000)}],
|
filters=[
|
||||||
|
{
|
||||||
|
"timeout": {
|
||||||
|
"timeout": cv.TimePeriod(milliseconds=1000),
|
||||||
|
"value": "last",
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{"throttle_with_priority": cv.TimePeriod(milliseconds=1000)},
|
||||||
|
],
|
||||||
icon=ICON_SIGNAL,
|
icon=ICON_SIGNAL,
|
||||||
unit_of_measurement=UNIT_CENTIMETER,
|
unit_of_measurement=UNIT_CENTIMETER,
|
||||||
),
|
),
|
||||||
cv.Optional(CONF_MOVING_ENERGY): sensor.sensor_schema(
|
cv.Optional(CONF_MOVING_ENERGY): sensor.sensor_schema(
|
||||||
filters=[{"throttle_with_priority": cv.TimePeriod(milliseconds=1000)}],
|
filters=[
|
||||||
|
{
|
||||||
|
"timeout": {
|
||||||
|
"timeout": cv.TimePeriod(milliseconds=1000),
|
||||||
|
"value": "last",
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{"throttle_with_priority": cv.TimePeriod(milliseconds=1000)},
|
||||||
|
],
|
||||||
icon=ICON_MOTION_SENSOR,
|
icon=ICON_MOTION_SENSOR,
|
||||||
unit_of_measurement=UNIT_PERCENT,
|
unit_of_measurement=UNIT_PERCENT,
|
||||||
),
|
),
|
||||||
cv.Optional(CONF_STILL_ENERGY): sensor.sensor_schema(
|
cv.Optional(CONF_STILL_ENERGY): sensor.sensor_schema(
|
||||||
filters=[{"throttle_with_priority": cv.TimePeriod(milliseconds=1000)}],
|
filters=[
|
||||||
|
{
|
||||||
|
"timeout": {
|
||||||
|
"timeout": cv.TimePeriod(milliseconds=1000),
|
||||||
|
"value": "last",
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{"throttle_with_priority": cv.TimePeriod(milliseconds=1000)},
|
||||||
|
],
|
||||||
icon=ICON_FLASH,
|
icon=ICON_FLASH,
|
||||||
unit_of_measurement=UNIT_PERCENT,
|
unit_of_measurement=UNIT_PERCENT,
|
||||||
),
|
),
|
||||||
cv.Optional(CONF_LIGHT): sensor.sensor_schema(
|
cv.Optional(CONF_LIGHT): sensor.sensor_schema(
|
||||||
device_class=DEVICE_CLASS_ILLUMINANCE,
|
device_class=DEVICE_CLASS_ILLUMINANCE,
|
||||||
entity_category=ENTITY_CATEGORY_DIAGNOSTIC,
|
entity_category=ENTITY_CATEGORY_DIAGNOSTIC,
|
||||||
filters=[{"throttle_with_priority": cv.TimePeriod(milliseconds=1000)}],
|
filters=[
|
||||||
|
{
|
||||||
|
"timeout": {
|
||||||
|
"timeout": cv.TimePeriod(milliseconds=1000),
|
||||||
|
"value": "last",
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{"throttle_with_priority": cv.TimePeriod(milliseconds=1000)},
|
||||||
|
],
|
||||||
icon=ICON_LIGHTBULB,
|
icon=ICON_LIGHTBULB,
|
||||||
),
|
),
|
||||||
cv.Optional(CONF_DETECTION_DISTANCE): sensor.sensor_schema(
|
cv.Optional(CONF_DETECTION_DISTANCE): sensor.sensor_schema(
|
||||||
device_class=DEVICE_CLASS_DISTANCE,
|
device_class=DEVICE_CLASS_DISTANCE,
|
||||||
filters=[{"throttle_with_priority": cv.TimePeriod(milliseconds=1000)}],
|
filters=[
|
||||||
|
{
|
||||||
|
"timeout": {
|
||||||
|
"timeout": cv.TimePeriod(milliseconds=1000),
|
||||||
|
"value": "last",
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{"throttle_with_priority": cv.TimePeriod(milliseconds=1000)},
|
||||||
|
],
|
||||||
icon=ICON_SIGNAL,
|
icon=ICON_SIGNAL,
|
||||||
unit_of_measurement=UNIT_CENTIMETER,
|
unit_of_measurement=UNIT_CENTIMETER,
|
||||||
),
|
),
|
||||||
@@ -73,7 +121,13 @@ CONFIG_SCHEMA = CONFIG_SCHEMA.extend(
|
|||||||
cv.Optional(CONF_MOVE_ENERGY): sensor.sensor_schema(
|
cv.Optional(CONF_MOVE_ENERGY): sensor.sensor_schema(
|
||||||
entity_category=ENTITY_CATEGORY_DIAGNOSTIC,
|
entity_category=ENTITY_CATEGORY_DIAGNOSTIC,
|
||||||
filters=[
|
filters=[
|
||||||
{"throttle_with_priority": cv.TimePeriod(milliseconds=1000)}
|
{
|
||||||
|
"timeout": {
|
||||||
|
"timeout": cv.TimePeriod(milliseconds=1000),
|
||||||
|
"value": "last",
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{"throttle_with_priority": cv.TimePeriod(milliseconds=1000)},
|
||||||
],
|
],
|
||||||
icon=ICON_MOTION_SENSOR,
|
icon=ICON_MOTION_SENSOR,
|
||||||
unit_of_measurement=UNIT_PERCENT,
|
unit_of_measurement=UNIT_PERCENT,
|
||||||
@@ -81,7 +135,13 @@ CONFIG_SCHEMA = CONFIG_SCHEMA.extend(
|
|||||||
cv.Optional(CONF_STILL_ENERGY): sensor.sensor_schema(
|
cv.Optional(CONF_STILL_ENERGY): sensor.sensor_schema(
|
||||||
entity_category=ENTITY_CATEGORY_DIAGNOSTIC,
|
entity_category=ENTITY_CATEGORY_DIAGNOSTIC,
|
||||||
filters=[
|
filters=[
|
||||||
{"throttle_with_priority": cv.TimePeriod(milliseconds=1000)}
|
{
|
||||||
|
"timeout": {
|
||||||
|
"timeout": cv.TimePeriod(milliseconds=1000),
|
||||||
|
"value": "last",
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{"throttle_with_priority": cv.TimePeriod(milliseconds=1000)},
|
||||||
],
|
],
|
||||||
icon=ICON_FLASH,
|
icon=ICON_FLASH,
|
||||||
unit_of_measurement=UNIT_PERCENT,
|
unit_of_measurement=UNIT_PERCENT,
|
||||||
|
|||||||
@@ -31,36 +31,84 @@ CONFIG_SCHEMA = cv.Schema(
|
|||||||
cv.GenerateID(CONF_LD2412_ID): cv.use_id(LD2412Component),
|
cv.GenerateID(CONF_LD2412_ID): cv.use_id(LD2412Component),
|
||||||
cv.Optional(CONF_DETECTION_DISTANCE): sensor.sensor_schema(
|
cv.Optional(CONF_DETECTION_DISTANCE): sensor.sensor_schema(
|
||||||
device_class=DEVICE_CLASS_DISTANCE,
|
device_class=DEVICE_CLASS_DISTANCE,
|
||||||
filters=[{"throttle_with_priority": cv.TimePeriod(milliseconds=1000)}],
|
filters=[
|
||||||
|
{
|
||||||
|
"timeout": {
|
||||||
|
"timeout": cv.TimePeriod(milliseconds=1000),
|
||||||
|
"value": "last",
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{"throttle_with_priority": cv.TimePeriod(milliseconds=1000)},
|
||||||
|
],
|
||||||
icon=ICON_SIGNAL,
|
icon=ICON_SIGNAL,
|
||||||
unit_of_measurement=UNIT_CENTIMETER,
|
unit_of_measurement=UNIT_CENTIMETER,
|
||||||
),
|
),
|
||||||
cv.Optional(CONF_LIGHT): sensor.sensor_schema(
|
cv.Optional(CONF_LIGHT): sensor.sensor_schema(
|
||||||
device_class=DEVICE_CLASS_ILLUMINANCE,
|
device_class=DEVICE_CLASS_ILLUMINANCE,
|
||||||
entity_category=ENTITY_CATEGORY_DIAGNOSTIC,
|
entity_category=ENTITY_CATEGORY_DIAGNOSTIC,
|
||||||
filters=[{"throttle_with_priority": cv.TimePeriod(milliseconds=1000)}],
|
filters=[
|
||||||
|
{
|
||||||
|
"timeout": {
|
||||||
|
"timeout": cv.TimePeriod(milliseconds=1000),
|
||||||
|
"value": "last",
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{"throttle_with_priority": cv.TimePeriod(milliseconds=1000)},
|
||||||
|
],
|
||||||
icon=ICON_LIGHTBULB,
|
icon=ICON_LIGHTBULB,
|
||||||
unit_of_measurement=UNIT_EMPTY, # No standard unit for this light sensor
|
unit_of_measurement=UNIT_EMPTY, # No standard unit for this light sensor
|
||||||
),
|
),
|
||||||
cv.Optional(CONF_MOVING_DISTANCE): sensor.sensor_schema(
|
cv.Optional(CONF_MOVING_DISTANCE): sensor.sensor_schema(
|
||||||
device_class=DEVICE_CLASS_DISTANCE,
|
device_class=DEVICE_CLASS_DISTANCE,
|
||||||
filters=[{"throttle_with_priority": cv.TimePeriod(milliseconds=1000)}],
|
filters=[
|
||||||
|
{
|
||||||
|
"timeout": {
|
||||||
|
"timeout": cv.TimePeriod(milliseconds=1000),
|
||||||
|
"value": "last",
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{"throttle_with_priority": cv.TimePeriod(milliseconds=1000)},
|
||||||
|
],
|
||||||
icon=ICON_SIGNAL,
|
icon=ICON_SIGNAL,
|
||||||
unit_of_measurement=UNIT_CENTIMETER,
|
unit_of_measurement=UNIT_CENTIMETER,
|
||||||
),
|
),
|
||||||
cv.Optional(CONF_MOVING_ENERGY): sensor.sensor_schema(
|
cv.Optional(CONF_MOVING_ENERGY): sensor.sensor_schema(
|
||||||
filters=[{"throttle_with_priority": cv.TimePeriod(milliseconds=1000)}],
|
filters=[
|
||||||
|
{
|
||||||
|
"timeout": {
|
||||||
|
"timeout": cv.TimePeriod(milliseconds=1000),
|
||||||
|
"value": "last",
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{"throttle_with_priority": cv.TimePeriod(milliseconds=1000)},
|
||||||
|
],
|
||||||
icon=ICON_MOTION_SENSOR,
|
icon=ICON_MOTION_SENSOR,
|
||||||
unit_of_measurement=UNIT_PERCENT,
|
unit_of_measurement=UNIT_PERCENT,
|
||||||
),
|
),
|
||||||
cv.Optional(CONF_STILL_DISTANCE): sensor.sensor_schema(
|
cv.Optional(CONF_STILL_DISTANCE): sensor.sensor_schema(
|
||||||
device_class=DEVICE_CLASS_DISTANCE,
|
device_class=DEVICE_CLASS_DISTANCE,
|
||||||
filters=[{"throttle_with_priority": cv.TimePeriod(milliseconds=1000)}],
|
filters=[
|
||||||
|
{
|
||||||
|
"timeout": {
|
||||||
|
"timeout": cv.TimePeriod(milliseconds=1000),
|
||||||
|
"value": "last",
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{"throttle_with_priority": cv.TimePeriod(milliseconds=1000)},
|
||||||
|
],
|
||||||
icon=ICON_SIGNAL,
|
icon=ICON_SIGNAL,
|
||||||
unit_of_measurement=UNIT_CENTIMETER,
|
unit_of_measurement=UNIT_CENTIMETER,
|
||||||
),
|
),
|
||||||
cv.Optional(CONF_STILL_ENERGY): sensor.sensor_schema(
|
cv.Optional(CONF_STILL_ENERGY): sensor.sensor_schema(
|
||||||
filters=[{"throttle_with_priority": cv.TimePeriod(milliseconds=1000)}],
|
filters=[
|
||||||
|
{
|
||||||
|
"timeout": {
|
||||||
|
"timeout": cv.TimePeriod(milliseconds=1000),
|
||||||
|
"value": "last",
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{"throttle_with_priority": cv.TimePeriod(milliseconds=1000)},
|
||||||
|
],
|
||||||
icon=ICON_FLASH,
|
icon=ICON_FLASH,
|
||||||
unit_of_measurement=UNIT_PERCENT,
|
unit_of_measurement=UNIT_PERCENT,
|
||||||
),
|
),
|
||||||
@@ -74,7 +122,13 @@ CONFIG_SCHEMA = CONFIG_SCHEMA.extend(
|
|||||||
cv.Optional(CONF_MOVE_ENERGY): sensor.sensor_schema(
|
cv.Optional(CONF_MOVE_ENERGY): sensor.sensor_schema(
|
||||||
entity_category=ENTITY_CATEGORY_DIAGNOSTIC,
|
entity_category=ENTITY_CATEGORY_DIAGNOSTIC,
|
||||||
filters=[
|
filters=[
|
||||||
{"throttle_with_priority": cv.TimePeriod(milliseconds=1000)}
|
{
|
||||||
|
"timeout": {
|
||||||
|
"timeout": cv.TimePeriod(milliseconds=1000),
|
||||||
|
"value": "last",
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{"throttle_with_priority": cv.TimePeriod(milliseconds=1000)},
|
||||||
],
|
],
|
||||||
icon=ICON_MOTION_SENSOR,
|
icon=ICON_MOTION_SENSOR,
|
||||||
unit_of_measurement=UNIT_PERCENT,
|
unit_of_measurement=UNIT_PERCENT,
|
||||||
@@ -82,7 +136,13 @@ CONFIG_SCHEMA = CONFIG_SCHEMA.extend(
|
|||||||
cv.Optional(CONF_STILL_ENERGY): sensor.sensor_schema(
|
cv.Optional(CONF_STILL_ENERGY): sensor.sensor_schema(
|
||||||
entity_category=ENTITY_CATEGORY_DIAGNOSTIC,
|
entity_category=ENTITY_CATEGORY_DIAGNOSTIC,
|
||||||
filters=[
|
filters=[
|
||||||
{"throttle_with_priority": cv.TimePeriod(milliseconds=1000)}
|
{
|
||||||
|
"timeout": {
|
||||||
|
"timeout": cv.TimePeriod(milliseconds=1000),
|
||||||
|
"value": "last",
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{"throttle_with_priority": cv.TimePeriod(milliseconds=1000)},
|
||||||
],
|
],
|
||||||
icon=ICON_FLASH,
|
icon=ICON_FLASH,
|
||||||
unit_of_measurement=UNIT_PERCENT,
|
unit_of_measurement=UNIT_PERCENT,
|
||||||
|
|||||||
@@ -52,8 +52,10 @@ static void log_invalid_parameter(const char *name, const LogString *message) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
static const LogString *color_mode_to_human(ColorMode color_mode) {
|
static const LogString *color_mode_to_human(ColorMode color_mode) {
|
||||||
if (color_mode == ColorMode::UNKNOWN)
|
if (color_mode == ColorMode::ON_OFF)
|
||||||
return LOG_STR("Unknown");
|
return LOG_STR("On/Off");
|
||||||
|
if (color_mode == ColorMode::BRIGHTNESS)
|
||||||
|
return LOG_STR("Brightness");
|
||||||
if (color_mode == ColorMode::WHITE)
|
if (color_mode == ColorMode::WHITE)
|
||||||
return LOG_STR("White");
|
return LOG_STR("White");
|
||||||
if (color_mode == ColorMode::COLOR_TEMPERATURE)
|
if (color_mode == ColorMode::COLOR_TEMPERATURE)
|
||||||
@@ -68,7 +70,7 @@ static const LogString *color_mode_to_human(ColorMode color_mode) {
|
|||||||
return LOG_STR("RGB + cold/warm white");
|
return LOG_STR("RGB + cold/warm white");
|
||||||
if (color_mode == ColorMode::RGB_COLOR_TEMPERATURE)
|
if (color_mode == ColorMode::RGB_COLOR_TEMPERATURE)
|
||||||
return LOG_STR("RGB + color temperature");
|
return LOG_STR("RGB + color temperature");
|
||||||
return LOG_STR("");
|
return LOG_STR("Unknown");
|
||||||
}
|
}
|
||||||
|
|
||||||
// Helper to log percentage values
|
// Helper to log percentage values
|
||||||
|
|||||||
@@ -1,9 +1,14 @@
|
|||||||
|
import logging
|
||||||
|
|
||||||
import esphome.codegen as cg
|
import esphome.codegen as cg
|
||||||
from esphome.components import time as time_
|
from esphome.components import time as time_
|
||||||
|
from esphome.config_helpers import merge_config
|
||||||
import esphome.config_validation as cv
|
import esphome.config_validation as cv
|
||||||
from esphome.const import (
|
from esphome.const import (
|
||||||
CONF_ID,
|
CONF_ID,
|
||||||
|
CONF_PLATFORM,
|
||||||
CONF_SERVERS,
|
CONF_SERVERS,
|
||||||
|
CONF_TIME,
|
||||||
PLATFORM_BK72XX,
|
PLATFORM_BK72XX,
|
||||||
PLATFORM_ESP32,
|
PLATFORM_ESP32,
|
||||||
PLATFORM_ESP8266,
|
PLATFORM_ESP8266,
|
||||||
@@ -12,13 +17,74 @@ from esphome.const import (
|
|||||||
PLATFORM_RTL87XX,
|
PLATFORM_RTL87XX,
|
||||||
)
|
)
|
||||||
from esphome.core import CORE
|
from esphome.core import CORE
|
||||||
|
import esphome.final_validate as fv
|
||||||
|
from esphome.types import ConfigType
|
||||||
|
|
||||||
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
DEPENDENCIES = ["network"]
|
DEPENDENCIES = ["network"]
|
||||||
|
|
||||||
|
CONF_SNTP = "sntp"
|
||||||
|
|
||||||
sntp_ns = cg.esphome_ns.namespace("sntp")
|
sntp_ns = cg.esphome_ns.namespace("sntp")
|
||||||
SNTPComponent = sntp_ns.class_("SNTPComponent", time_.RealTimeClock)
|
SNTPComponent = sntp_ns.class_("SNTPComponent", time_.RealTimeClock)
|
||||||
|
|
||||||
DEFAULT_SERVERS = ["0.pool.ntp.org", "1.pool.ntp.org", "2.pool.ntp.org"]
|
DEFAULT_SERVERS = ["0.pool.ntp.org", "1.pool.ntp.org", "2.pool.ntp.org"]
|
||||||
|
|
||||||
|
|
||||||
|
def _sntp_final_validate(config: ConfigType) -> None:
|
||||||
|
"""Merge multiple SNTP instances into one, similar to OTA merging behavior."""
|
||||||
|
full_conf = fv.full_config.get()
|
||||||
|
time_confs = full_conf.get(CONF_TIME, [])
|
||||||
|
|
||||||
|
sntp_configs: list[ConfigType] = []
|
||||||
|
other_time_configs: list[ConfigType] = []
|
||||||
|
|
||||||
|
for time_conf in time_confs:
|
||||||
|
if time_conf.get(CONF_PLATFORM) == CONF_SNTP:
|
||||||
|
sntp_configs.append(time_conf)
|
||||||
|
else:
|
||||||
|
other_time_configs.append(time_conf)
|
||||||
|
|
||||||
|
if len(sntp_configs) <= 1:
|
||||||
|
return
|
||||||
|
|
||||||
|
# Merge all SNTP configs into the first one
|
||||||
|
merged = sntp_configs[0]
|
||||||
|
for sntp_conf in sntp_configs[1:]:
|
||||||
|
# Validate that IDs are consistent if manually specified
|
||||||
|
if merged[CONF_ID].is_manual and sntp_conf[CONF_ID].is_manual:
|
||||||
|
raise cv.Invalid(
|
||||||
|
f"Found multiple SNTP configurations but {CONF_ID} is inconsistent"
|
||||||
|
)
|
||||||
|
merged = merge_config(merged, sntp_conf)
|
||||||
|
|
||||||
|
# Deduplicate servers while preserving order
|
||||||
|
servers = merged[CONF_SERVERS]
|
||||||
|
unique_servers = list(dict.fromkeys(servers))
|
||||||
|
|
||||||
|
# Warn if we're dropping servers due to 3-server limit
|
||||||
|
if len(unique_servers) > 3:
|
||||||
|
dropped = unique_servers[3:]
|
||||||
|
unique_servers = unique_servers[:3]
|
||||||
|
_LOGGER.warning(
|
||||||
|
"SNTP supports maximum 3 servers. Dropped excess server(s): %s",
|
||||||
|
dropped,
|
||||||
|
)
|
||||||
|
|
||||||
|
merged[CONF_SERVERS] = unique_servers
|
||||||
|
|
||||||
|
_LOGGER.warning(
|
||||||
|
"Found and merged %d SNTP time configurations into one instance",
|
||||||
|
len(sntp_configs),
|
||||||
|
)
|
||||||
|
|
||||||
|
# Replace time configs with merged SNTP + other time platforms
|
||||||
|
other_time_configs.append(merged)
|
||||||
|
full_conf[CONF_TIME] = other_time_configs
|
||||||
|
fv.full_config.set(full_conf)
|
||||||
|
|
||||||
|
|
||||||
CONFIG_SCHEMA = cv.All(
|
CONFIG_SCHEMA = cv.All(
|
||||||
time_.TIME_SCHEMA.extend(
|
time_.TIME_SCHEMA.extend(
|
||||||
{
|
{
|
||||||
@@ -40,6 +106,8 @@ CONFIG_SCHEMA = cv.All(
|
|||||||
),
|
),
|
||||||
)
|
)
|
||||||
|
|
||||||
|
FINAL_VALIDATE_SCHEMA = _sntp_final_validate
|
||||||
|
|
||||||
|
|
||||||
async def to_code(config):
|
async def to_code(config):
|
||||||
servers = config[CONF_SERVERS]
|
servers = config[CONF_SERVERS]
|
||||||
|
|||||||
@@ -1,3 +1,4 @@
|
|||||||
|
from logging import getLogger
|
||||||
import math
|
import math
|
||||||
import re
|
import re
|
||||||
|
|
||||||
@@ -35,6 +36,8 @@ from esphome.core import CORE, ID
|
|||||||
import esphome.final_validate as fv
|
import esphome.final_validate as fv
|
||||||
from esphome.yaml_util import make_data_base
|
from esphome.yaml_util import make_data_base
|
||||||
|
|
||||||
|
_LOGGER = getLogger(__name__)
|
||||||
|
|
||||||
CODEOWNERS = ["@esphome/core"]
|
CODEOWNERS = ["@esphome/core"]
|
||||||
uart_ns = cg.esphome_ns.namespace("uart")
|
uart_ns = cg.esphome_ns.namespace("uart")
|
||||||
UARTComponent = uart_ns.class_("UARTComponent")
|
UARTComponent = uart_ns.class_("UARTComponent")
|
||||||
@@ -130,6 +133,21 @@ def validate_host_config(config):
|
|||||||
return config
|
return config
|
||||||
|
|
||||||
|
|
||||||
|
def validate_rx_buffer_size(config):
|
||||||
|
if CORE.is_esp32:
|
||||||
|
# ESP32 UART hardware FIFO is 128 bytes (LP UART is 16 bytes, but we use 128 as safe minimum)
|
||||||
|
# rx_buffer_size must be greater than the hardware FIFO length
|
||||||
|
min_buffer_size = 128
|
||||||
|
if config[CONF_RX_BUFFER_SIZE] <= min_buffer_size:
|
||||||
|
_LOGGER.warning(
|
||||||
|
"UART rx_buffer_size (%d bytes) is too small and must be greater than the hardware "
|
||||||
|
"FIFO size (%d bytes). The buffer size will be automatically adjusted at runtime.",
|
||||||
|
config[CONF_RX_BUFFER_SIZE],
|
||||||
|
min_buffer_size,
|
||||||
|
)
|
||||||
|
return config
|
||||||
|
|
||||||
|
|
||||||
def _uart_declare_type(value):
|
def _uart_declare_type(value):
|
||||||
if CORE.is_esp8266:
|
if CORE.is_esp8266:
|
||||||
return cv.declare_id(ESP8266UartComponent)(value)
|
return cv.declare_id(ESP8266UartComponent)(value)
|
||||||
@@ -247,6 +265,7 @@ CONFIG_SCHEMA = cv.All(
|
|||||||
).extend(cv.COMPONENT_SCHEMA),
|
).extend(cv.COMPONENT_SCHEMA),
|
||||||
cv.has_at_least_one_key(CONF_TX_PIN, CONF_RX_PIN, CONF_PORT),
|
cv.has_at_least_one_key(CONF_TX_PIN, CONF_RX_PIN, CONF_PORT),
|
||||||
validate_host_config,
|
validate_host_config,
|
||||||
|
validate_rx_buffer_size,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -56,11 +56,19 @@ uint32_t ESP8266UartComponent::get_config() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
void ESP8266UartComponent::setup() {
|
void ESP8266UartComponent::setup() {
|
||||||
if (this->rx_pin_) {
|
auto setup_pin_if_needed = [](InternalGPIOPin *pin) {
|
||||||
this->rx_pin_->setup();
|
if (!pin) {
|
||||||
|
return;
|
||||||
}
|
}
|
||||||
if (this->tx_pin_ && this->rx_pin_ != this->tx_pin_) {
|
const auto mask = gpio::Flags::FLAG_OPEN_DRAIN | gpio::Flags::FLAG_PULLUP | gpio::Flags::FLAG_PULLDOWN;
|
||||||
this->tx_pin_->setup();
|
if ((pin->get_flags() & mask) != gpio::Flags::FLAG_NONE) {
|
||||||
|
pin->setup();
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
setup_pin_if_needed(this->rx_pin_);
|
||||||
|
if (this->rx_pin_ != this->tx_pin_) {
|
||||||
|
setup_pin_if_needed(this->tx_pin_);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Use Arduino HardwareSerial UARTs if all used pins match the ones
|
// Use Arduino HardwareSerial UARTs if all used pins match the ones
|
||||||
|
|||||||
@@ -91,6 +91,16 @@ void IDFUARTComponent::setup() {
|
|||||||
this->uart_num_ = static_cast<uart_port_t>(next_uart_num++);
|
this->uart_num_ = static_cast<uart_port_t>(next_uart_num++);
|
||||||
this->lock_ = xSemaphoreCreateMutex();
|
this->lock_ = xSemaphoreCreateMutex();
|
||||||
|
|
||||||
|
#if (SOC_UART_LP_NUM >= 1)
|
||||||
|
size_t fifo_len = ((this->uart_num_ < SOC_UART_HP_NUM) ? SOC_UART_FIFO_LEN : SOC_LP_UART_FIFO_LEN);
|
||||||
|
#else
|
||||||
|
size_t fifo_len = SOC_UART_FIFO_LEN;
|
||||||
|
#endif
|
||||||
|
if (this->rx_buffer_size_ <= fifo_len) {
|
||||||
|
ESP_LOGW(TAG, "rx_buffer_size is too small, must be greater than %zu", fifo_len);
|
||||||
|
this->rx_buffer_size_ = fifo_len * 2;
|
||||||
|
}
|
||||||
|
|
||||||
xSemaphoreTake(this->lock_, portMAX_DELAY);
|
xSemaphoreTake(this->lock_, portMAX_DELAY);
|
||||||
|
|
||||||
this->load_settings(false);
|
this->load_settings(false);
|
||||||
@@ -123,11 +133,19 @@ void IDFUARTComponent::load_settings(bool dump_config) {
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (this->rx_pin_) {
|
auto setup_pin_if_needed = [](InternalGPIOPin *pin) {
|
||||||
this->rx_pin_->setup();
|
if (!pin) {
|
||||||
|
return;
|
||||||
}
|
}
|
||||||
if (this->tx_pin_ && this->rx_pin_ != this->tx_pin_) {
|
const auto mask = gpio::Flags::FLAG_OPEN_DRAIN | gpio::Flags::FLAG_PULLUP | gpio::Flags::FLAG_PULLDOWN;
|
||||||
this->tx_pin_->setup();
|
if ((pin->get_flags() & mask) != gpio::Flags::FLAG_NONE) {
|
||||||
|
pin->setup();
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
setup_pin_if_needed(this->rx_pin_);
|
||||||
|
if (this->rx_pin_ != this->tx_pin_) {
|
||||||
|
setup_pin_if_needed(this->tx_pin_);
|
||||||
}
|
}
|
||||||
|
|
||||||
int8_t tx = this->tx_pin_ != nullptr ? this->tx_pin_->get_pin() : -1;
|
int8_t tx = this->tx_pin_ != nullptr ? this->tx_pin_->get_pin() : -1;
|
||||||
@@ -237,8 +255,12 @@ void IDFUARTComponent::set_rx_timeout(size_t rx_timeout) {
|
|||||||
|
|
||||||
void IDFUARTComponent::write_array(const uint8_t *data, size_t len) {
|
void IDFUARTComponent::write_array(const uint8_t *data, size_t len) {
|
||||||
xSemaphoreTake(this->lock_, portMAX_DELAY);
|
xSemaphoreTake(this->lock_, portMAX_DELAY);
|
||||||
uart_write_bytes(this->uart_num_, data, len);
|
int32_t write_len = uart_write_bytes(this->uart_num_, data, len);
|
||||||
xSemaphoreGive(this->lock_);
|
xSemaphoreGive(this->lock_);
|
||||||
|
if (write_len != (int32_t) len) {
|
||||||
|
ESP_LOGW(TAG, "uart_write_bytes failed: %d != %zu", write_len, len);
|
||||||
|
this->mark_failed();
|
||||||
|
}
|
||||||
#ifdef USE_UART_DEBUGGER
|
#ifdef USE_UART_DEBUGGER
|
||||||
for (size_t i = 0; i < len; i++) {
|
for (size_t i = 0; i < len; i++) {
|
||||||
this->debug_callback_.call(UART_DIRECTION_TX, data[i]);
|
this->debug_callback_.call(UART_DIRECTION_TX, data[i]);
|
||||||
@@ -267,6 +289,7 @@ bool IDFUARTComponent::peek_byte(uint8_t *data) {
|
|||||||
|
|
||||||
bool IDFUARTComponent::read_array(uint8_t *data, size_t len) {
|
bool IDFUARTComponent::read_array(uint8_t *data, size_t len) {
|
||||||
size_t length_to_read = len;
|
size_t length_to_read = len;
|
||||||
|
int32_t read_len = 0;
|
||||||
if (!this->check_read_timeout_(len))
|
if (!this->check_read_timeout_(len))
|
||||||
return false;
|
return false;
|
||||||
xSemaphoreTake(this->lock_, portMAX_DELAY);
|
xSemaphoreTake(this->lock_, portMAX_DELAY);
|
||||||
@@ -277,25 +300,31 @@ bool IDFUARTComponent::read_array(uint8_t *data, size_t len) {
|
|||||||
this->has_peek_ = false;
|
this->has_peek_ = false;
|
||||||
}
|
}
|
||||||
if (length_to_read > 0)
|
if (length_to_read > 0)
|
||||||
uart_read_bytes(this->uart_num_, data, length_to_read, 20 / portTICK_PERIOD_MS);
|
read_len = uart_read_bytes(this->uart_num_, data, length_to_read, 20 / portTICK_PERIOD_MS);
|
||||||
xSemaphoreGive(this->lock_);
|
xSemaphoreGive(this->lock_);
|
||||||
#ifdef USE_UART_DEBUGGER
|
#ifdef USE_UART_DEBUGGER
|
||||||
for (size_t i = 0; i < len; i++) {
|
for (size_t i = 0; i < len; i++) {
|
||||||
this->debug_callback_.call(UART_DIRECTION_RX, data[i]);
|
this->debug_callback_.call(UART_DIRECTION_RX, data[i]);
|
||||||
}
|
}
|
||||||
#endif
|
#endif
|
||||||
return true;
|
return read_len == (int32_t) length_to_read;
|
||||||
}
|
}
|
||||||
|
|
||||||
int IDFUARTComponent::available() {
|
int IDFUARTComponent::available() {
|
||||||
size_t available;
|
size_t available = 0;
|
||||||
|
esp_err_t err;
|
||||||
|
|
||||||
xSemaphoreTake(this->lock_, portMAX_DELAY);
|
xSemaphoreTake(this->lock_, portMAX_DELAY);
|
||||||
uart_get_buffered_data_len(this->uart_num_, &available);
|
err = uart_get_buffered_data_len(this->uart_num_, &available);
|
||||||
if (this->has_peek_)
|
|
||||||
available++;
|
|
||||||
xSemaphoreGive(this->lock_);
|
xSemaphoreGive(this->lock_);
|
||||||
|
|
||||||
|
if (err != ESP_OK) {
|
||||||
|
ESP_LOGW(TAG, "uart_get_buffered_data_len failed: %s", esp_err_to_name(err));
|
||||||
|
this->mark_failed();
|
||||||
|
}
|
||||||
|
if (this->has_peek_) {
|
||||||
|
available++;
|
||||||
|
}
|
||||||
return available;
|
return available;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -53,7 +53,7 @@ void LibreTinyUARTComponent::setup() {
|
|||||||
|
|
||||||
auto shouldFallbackToSoftwareSerial = [&]() -> bool {
|
auto shouldFallbackToSoftwareSerial = [&]() -> bool {
|
||||||
auto hasFlags = [](InternalGPIOPin *pin, const gpio::Flags mask) -> bool {
|
auto hasFlags = [](InternalGPIOPin *pin, const gpio::Flags mask) -> bool {
|
||||||
return pin && pin->get_flags() & mask != gpio::Flags::FLAG_NONE;
|
return pin && (pin->get_flags() & mask) != gpio::Flags::FLAG_NONE;
|
||||||
};
|
};
|
||||||
if (hasFlags(this->tx_pin_, gpio::Flags::FLAG_OPEN_DRAIN | gpio::Flags::FLAG_PULLUP | gpio::Flags::FLAG_PULLDOWN) ||
|
if (hasFlags(this->tx_pin_, gpio::Flags::FLAG_OPEN_DRAIN | gpio::Flags::FLAG_PULLUP | gpio::Flags::FLAG_PULLDOWN) ||
|
||||||
hasFlags(this->rx_pin_, gpio::Flags::FLAG_OPEN_DRAIN | gpio::Flags::FLAG_PULLUP | gpio::Flags::FLAG_PULLDOWN)) {
|
hasFlags(this->rx_pin_, gpio::Flags::FLAG_OPEN_DRAIN | gpio::Flags::FLAG_PULLUP | gpio::Flags::FLAG_PULLDOWN)) {
|
||||||
|
|||||||
@@ -52,11 +52,19 @@ uint16_t RP2040UartComponent::get_config() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
void RP2040UartComponent::setup() {
|
void RP2040UartComponent::setup() {
|
||||||
if (this->rx_pin_) {
|
auto setup_pin_if_needed = [](InternalGPIOPin *pin) {
|
||||||
this->rx_pin_->setup();
|
if (!pin) {
|
||||||
|
return;
|
||||||
}
|
}
|
||||||
if (this->tx_pin_ && this->rx_pin_ != this->tx_pin_) {
|
const auto mask = gpio::Flags::FLAG_OPEN_DRAIN | gpio::Flags::FLAG_PULLUP | gpio::Flags::FLAG_PULLDOWN;
|
||||||
this->tx_pin_->setup();
|
if ((pin->get_flags() & mask) != gpio::Flags::FLAG_NONE) {
|
||||||
|
pin->setup();
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
setup_pin_if_needed(this->rx_pin_);
|
||||||
|
if (this->rx_pin_ != this->tx_pin_) {
|
||||||
|
setup_pin_if_needed(this->tx_pin_);
|
||||||
}
|
}
|
||||||
|
|
||||||
uint16_t config = get_config();
|
uint16_t config = get_config();
|
||||||
|
|||||||
@@ -1,10 +1,17 @@
|
|||||||
|
import logging
|
||||||
|
|
||||||
import esphome.codegen as cg
|
import esphome.codegen as cg
|
||||||
from esphome.components.esp32 import add_idf_component
|
from esphome.components.esp32 import add_idf_component
|
||||||
from esphome.components.ota import BASE_OTA_SCHEMA, OTAComponent, ota_to_code
|
from esphome.components.ota import BASE_OTA_SCHEMA, OTAComponent, ota_to_code
|
||||||
|
from esphome.config_helpers import merge_config
|
||||||
import esphome.config_validation as cv
|
import esphome.config_validation as cv
|
||||||
from esphome.const import CONF_ID
|
from esphome.const import CONF_ID, CONF_OTA, CONF_PLATFORM, CONF_WEB_SERVER
|
||||||
from esphome.core import CORE, coroutine_with_priority
|
from esphome.core import CORE, coroutine_with_priority
|
||||||
from esphome.coroutine import CoroPriority
|
from esphome.coroutine import CoroPriority
|
||||||
|
import esphome.final_validate as fv
|
||||||
|
from esphome.types import ConfigType
|
||||||
|
|
||||||
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
CODEOWNERS = ["@esphome/core"]
|
CODEOWNERS = ["@esphome/core"]
|
||||||
DEPENDENCIES = ["network", "web_server_base"]
|
DEPENDENCIES = ["network", "web_server_base"]
|
||||||
@@ -12,6 +19,53 @@ DEPENDENCIES = ["network", "web_server_base"]
|
|||||||
web_server_ns = cg.esphome_ns.namespace("web_server")
|
web_server_ns = cg.esphome_ns.namespace("web_server")
|
||||||
WebServerOTAComponent = web_server_ns.class_("WebServerOTAComponent", OTAComponent)
|
WebServerOTAComponent = web_server_ns.class_("WebServerOTAComponent", OTAComponent)
|
||||||
|
|
||||||
|
|
||||||
|
def _web_server_ota_final_validate(config: ConfigType) -> None:
|
||||||
|
"""Merge multiple web_server OTA instances into one.
|
||||||
|
|
||||||
|
Multiple web_server OTA instances register duplicate HTTP handlers for /update,
|
||||||
|
causing undefined behavior. Merge them into a single instance.
|
||||||
|
"""
|
||||||
|
full_conf = fv.full_config.get()
|
||||||
|
ota_confs = full_conf.get(CONF_OTA, [])
|
||||||
|
|
||||||
|
web_server_ota_configs: list[ConfigType] = []
|
||||||
|
other_ota_configs: list[ConfigType] = []
|
||||||
|
|
||||||
|
for ota_conf in ota_confs:
|
||||||
|
if ota_conf.get(CONF_PLATFORM) == CONF_WEB_SERVER:
|
||||||
|
web_server_ota_configs.append(ota_conf)
|
||||||
|
else:
|
||||||
|
other_ota_configs.append(ota_conf)
|
||||||
|
|
||||||
|
if len(web_server_ota_configs) <= 1:
|
||||||
|
return
|
||||||
|
|
||||||
|
# Merge all web_server OTA configs into the first one
|
||||||
|
merged = web_server_ota_configs[0]
|
||||||
|
for ota_conf in web_server_ota_configs[1:]:
|
||||||
|
# Validate that IDs are consistent if manually specified
|
||||||
|
if (
|
||||||
|
merged[CONF_ID].is_manual
|
||||||
|
and ota_conf[CONF_ID].is_manual
|
||||||
|
and merged[CONF_ID] != ota_conf[CONF_ID]
|
||||||
|
):
|
||||||
|
raise cv.Invalid(
|
||||||
|
f"Found multiple web_server OTA configurations but {CONF_ID} is inconsistent"
|
||||||
|
)
|
||||||
|
merged = merge_config(merged, ota_conf)
|
||||||
|
|
||||||
|
_LOGGER.warning(
|
||||||
|
"Found and merged %d web_server OTA configurations into one instance",
|
||||||
|
len(web_server_ota_configs),
|
||||||
|
)
|
||||||
|
|
||||||
|
# Replace OTA configs with merged web_server + other OTA platforms
|
||||||
|
other_ota_configs.append(merged)
|
||||||
|
full_conf[CONF_OTA] = other_ota_configs
|
||||||
|
fv.full_config.set(full_conf)
|
||||||
|
|
||||||
|
|
||||||
CONFIG_SCHEMA = (
|
CONFIG_SCHEMA = (
|
||||||
cv.Schema(
|
cv.Schema(
|
||||||
{
|
{
|
||||||
@@ -22,6 +76,8 @@ CONFIG_SCHEMA = (
|
|||||||
.extend(cv.COMPONENT_SCHEMA)
|
.extend(cv.COMPONENT_SCHEMA)
|
||||||
)
|
)
|
||||||
|
|
||||||
|
FINAL_VALIDATE_SCHEMA = _web_server_ota_final_validate
|
||||||
|
|
||||||
|
|
||||||
@coroutine_with_priority(CoroPriority.WEB_SERVER_OTA)
|
@coroutine_with_priority(CoroPriority.WEB_SERVER_OTA)
|
||||||
async def to_code(config):
|
async def to_code(config):
|
||||||
|
|||||||
@@ -489,10 +489,18 @@ AsyncEventSourceResponse::AsyncEventSourceResponse(const AsyncWebServerRequest *
|
|||||||
|
|
||||||
void AsyncEventSourceResponse::destroy(void *ptr) {
|
void AsyncEventSourceResponse::destroy(void *ptr) {
|
||||||
auto *rsp = static_cast<AsyncEventSourceResponse *>(ptr);
|
auto *rsp = static_cast<AsyncEventSourceResponse *>(ptr);
|
||||||
ESP_LOGD(TAG, "Event source connection closed (fd: %d)", rsp->fd_.load());
|
int fd = rsp->fd_.exchange(0); // Atomically get and clear fd
|
||||||
// Mark as dead by setting fd to 0 - will be cleaned up in the main loop
|
|
||||||
rsp->fd_.store(0);
|
if (fd > 0) {
|
||||||
// Note: We don't delete or remove from set here to avoid race conditions
|
ESP_LOGD(TAG, "Event source connection closed (fd: %d)", fd);
|
||||||
|
// Immediately shut down the socket to prevent lwIP from delivering more data
|
||||||
|
// This prevents "recv_tcp: recv for wrong pcb!" assertions when the TCP stack
|
||||||
|
// tries to deliver queued data after the session is marked as dead
|
||||||
|
// See: https://github.com/esphome/esphome/issues/11936
|
||||||
|
shutdown(fd, SHUT_RDWR);
|
||||||
|
// Note: We don't close() the socket - httpd owns it and will close it
|
||||||
|
}
|
||||||
|
// Session will be cleaned up in the main loop to avoid race conditions
|
||||||
}
|
}
|
||||||
|
|
||||||
// helper for allowing only unique entries in the queue
|
// helper for allowing only unique entries in the queue
|
||||||
|
|||||||
@@ -12,7 +12,6 @@ from esphome.components.network import (
|
|||||||
from esphome.components.psram import is_guaranteed as psram_is_guaranteed
|
from esphome.components.psram import is_guaranteed as psram_is_guaranteed
|
||||||
from esphome.config_helpers import filter_source_files_from_platform
|
from esphome.config_helpers import filter_source_files_from_platform
|
||||||
import esphome.config_validation as cv
|
import esphome.config_validation as cv
|
||||||
from esphome.config_validation import only_with_esp_idf
|
|
||||||
from esphome.const import (
|
from esphome.const import (
|
||||||
CONF_AP,
|
CONF_AP,
|
||||||
CONF_BSSID,
|
CONF_BSSID,
|
||||||
@@ -352,7 +351,7 @@ CONFIG_SCHEMA = cv.All(
|
|||||||
single=True
|
single=True
|
||||||
),
|
),
|
||||||
cv.Optional(CONF_USE_PSRAM): cv.All(
|
cv.Optional(CONF_USE_PSRAM): cv.All(
|
||||||
only_with_esp_idf, cv.requires_component("psram"), cv.boolean
|
cv.only_on_esp32, cv.requires_component("psram"), cv.boolean
|
||||||
),
|
),
|
||||||
}
|
}
|
||||||
),
|
),
|
||||||
|
|||||||
@@ -4,7 +4,7 @@ from enum import Enum
|
|||||||
|
|
||||||
from esphome.enum import StrEnum
|
from esphome.enum import StrEnum
|
||||||
|
|
||||||
__version__ = "2025.11.0b2"
|
__version__ = "2025.11.0b3"
|
||||||
|
|
||||||
ALLOWED_NAME_CHARS = "abcdefghijklmnopqrstuvwxyz0123456789-_"
|
ALLOWED_NAME_CHARS = "abcdefghijklmnopqrstuvwxyz0123456789-_"
|
||||||
VALID_SUBSTITUTIONS_CHARACTERS = (
|
VALID_SUBSTITUTIONS_CHARACTERS = (
|
||||||
|
|||||||
@@ -609,13 +609,12 @@ uint64_t Scheduler::millis_64_(uint32_t now) {
|
|||||||
if (now < last && (last - now) > HALF_MAX_UINT32) {
|
if (now < last && (last - now) > HALF_MAX_UINT32) {
|
||||||
this->millis_major_++;
|
this->millis_major_++;
|
||||||
major++;
|
major++;
|
||||||
|
this->last_millis_ = now;
|
||||||
#ifdef ESPHOME_DEBUG_SCHEDULER
|
#ifdef ESPHOME_DEBUG_SCHEDULER
|
||||||
ESP_LOGD(TAG, "Detected true 32-bit rollover at %" PRIu32 "ms (was %" PRIu32 ")", now, last);
|
ESP_LOGD(TAG, "Detected true 32-bit rollover at %" PRIu32 "ms (was %" PRIu32 ")", now, last);
|
||||||
#endif /* ESPHOME_DEBUG_SCHEDULER */
|
#endif /* ESPHOME_DEBUG_SCHEDULER */
|
||||||
}
|
} else if (now > last) {
|
||||||
|
|
||||||
// Only update if time moved forward
|
// Only update if time moved forward
|
||||||
if (now > last) {
|
|
||||||
this->last_millis_ = now;
|
this->last_millis_ = now;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -1,6 +1,18 @@
|
|||||||
"""Tests for the web_server OTA platform."""
|
"""Tests for the web_server OTA platform."""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
from collections.abc import Callable
|
from collections.abc import Callable
|
||||||
|
import logging
|
||||||
|
from typing import Any
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
|
||||||
|
from esphome import config_validation as cv
|
||||||
|
from esphome.components.web_server.ota import _web_server_ota_final_validate
|
||||||
|
from esphome.const import CONF_ID, CONF_OTA, CONF_PLATFORM, CONF_WEB_SERVER
|
||||||
|
from esphome.core import ID
|
||||||
|
import esphome.final_validate as fv
|
||||||
|
|
||||||
|
|
||||||
def test_web_server_ota_generated(generate_main: Callable[[str], str]) -> None:
|
def test_web_server_ota_generated(generate_main: Callable[[str], str]) -> None:
|
||||||
@@ -100,3 +112,144 @@ def test_web_server_ota_esp8266(generate_main: Callable[[str], str]) -> None:
|
|||||||
# Check web server OTA component is present
|
# Check web server OTA component is present
|
||||||
assert "WebServerOTAComponent" in main_cpp
|
assert "WebServerOTAComponent" in main_cpp
|
||||||
assert "web_server::WebServerOTAComponent" in main_cpp
|
assert "web_server::WebServerOTAComponent" in main_cpp
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize(
|
||||||
|
("ota_configs", "expected_count", "warning_expected"),
|
||||||
|
[
|
||||||
|
pytest.param(
|
||||||
|
[
|
||||||
|
{
|
||||||
|
CONF_PLATFORM: CONF_WEB_SERVER,
|
||||||
|
CONF_ID: ID("ota_web", is_manual=False),
|
||||||
|
}
|
||||||
|
],
|
||||||
|
1,
|
||||||
|
False,
|
||||||
|
id="single_instance_no_merge",
|
||||||
|
),
|
||||||
|
pytest.param(
|
||||||
|
[
|
||||||
|
{
|
||||||
|
CONF_PLATFORM: CONF_WEB_SERVER,
|
||||||
|
CONF_ID: ID("ota_web_1", is_manual=False),
|
||||||
|
},
|
||||||
|
{
|
||||||
|
CONF_PLATFORM: CONF_WEB_SERVER,
|
||||||
|
CONF_ID: ID("ota_web_2", is_manual=False),
|
||||||
|
},
|
||||||
|
],
|
||||||
|
1,
|
||||||
|
True,
|
||||||
|
id="two_instances_merged",
|
||||||
|
),
|
||||||
|
pytest.param(
|
||||||
|
[
|
||||||
|
{
|
||||||
|
CONF_PLATFORM: CONF_WEB_SERVER,
|
||||||
|
CONF_ID: ID("ota_web_1", is_manual=False),
|
||||||
|
},
|
||||||
|
{
|
||||||
|
CONF_PLATFORM: "esphome",
|
||||||
|
CONF_ID: ID("ota_esphome", is_manual=False),
|
||||||
|
},
|
||||||
|
{
|
||||||
|
CONF_PLATFORM: CONF_WEB_SERVER,
|
||||||
|
CONF_ID: ID("ota_web_2", is_manual=False),
|
||||||
|
},
|
||||||
|
],
|
||||||
|
2,
|
||||||
|
True,
|
||||||
|
id="mixed_platforms_web_server_merged",
|
||||||
|
),
|
||||||
|
],
|
||||||
|
)
|
||||||
|
def test_web_server_ota_instance_merging(
|
||||||
|
ota_configs: list[dict[str, Any]],
|
||||||
|
expected_count: int,
|
||||||
|
warning_expected: bool,
|
||||||
|
caplog: pytest.LogCaptureFixture,
|
||||||
|
) -> None:
|
||||||
|
"""Test web_server OTA instance merging behavior."""
|
||||||
|
full_conf = {CONF_OTA: ota_configs.copy()}
|
||||||
|
|
||||||
|
token = fv.full_config.set(full_conf)
|
||||||
|
try:
|
||||||
|
with caplog.at_level(logging.WARNING):
|
||||||
|
_web_server_ota_final_validate({})
|
||||||
|
|
||||||
|
updated_conf = fv.full_config.get()
|
||||||
|
|
||||||
|
# Verify total number of OTA platforms
|
||||||
|
assert len(updated_conf[CONF_OTA]) == expected_count
|
||||||
|
|
||||||
|
# Verify warning
|
||||||
|
if warning_expected:
|
||||||
|
assert any(
|
||||||
|
"Found and merged" in record.message
|
||||||
|
and "web_server OTA" in record.message
|
||||||
|
for record in caplog.records
|
||||||
|
), "Expected merge warning not found in log"
|
||||||
|
else:
|
||||||
|
assert len(caplog.records) == 0, "Unexpected warnings logged"
|
||||||
|
finally:
|
||||||
|
fv.full_config.reset(token)
|
||||||
|
|
||||||
|
|
||||||
|
def test_web_server_ota_consistent_manual_ids(
|
||||||
|
caplog: pytest.LogCaptureFixture,
|
||||||
|
) -> None:
|
||||||
|
"""Test that consistent manual IDs can be merged successfully."""
|
||||||
|
ota_configs = [
|
||||||
|
{
|
||||||
|
CONF_PLATFORM: CONF_WEB_SERVER,
|
||||||
|
CONF_ID: ID("ota_web", is_manual=True),
|
||||||
|
},
|
||||||
|
{
|
||||||
|
CONF_PLATFORM: CONF_WEB_SERVER,
|
||||||
|
CONF_ID: ID("ota_web", is_manual=True),
|
||||||
|
},
|
||||||
|
]
|
||||||
|
|
||||||
|
full_conf = {CONF_OTA: ota_configs}
|
||||||
|
|
||||||
|
token = fv.full_config.set(full_conf)
|
||||||
|
try:
|
||||||
|
with caplog.at_level(logging.WARNING):
|
||||||
|
_web_server_ota_final_validate({})
|
||||||
|
|
||||||
|
updated_conf = fv.full_config.get()
|
||||||
|
assert len(updated_conf[CONF_OTA]) == 1
|
||||||
|
assert updated_conf[CONF_OTA][0][CONF_ID].id == "ota_web"
|
||||||
|
assert any(
|
||||||
|
"Found and merged" in record.message and "web_server OTA" in record.message
|
||||||
|
for record in caplog.records
|
||||||
|
)
|
||||||
|
finally:
|
||||||
|
fv.full_config.reset(token)
|
||||||
|
|
||||||
|
|
||||||
|
def test_web_server_ota_inconsistent_manual_ids() -> None:
|
||||||
|
"""Test that inconsistent manual IDs raise an error."""
|
||||||
|
ota_configs = [
|
||||||
|
{
|
||||||
|
CONF_PLATFORM: CONF_WEB_SERVER,
|
||||||
|
CONF_ID: ID("ota_web_1", is_manual=True),
|
||||||
|
},
|
||||||
|
{
|
||||||
|
CONF_PLATFORM: CONF_WEB_SERVER,
|
||||||
|
CONF_ID: ID("ota_web_2", is_manual=True),
|
||||||
|
},
|
||||||
|
]
|
||||||
|
|
||||||
|
full_conf = {CONF_OTA: ota_configs}
|
||||||
|
|
||||||
|
token = fv.full_config.set(full_conf)
|
||||||
|
try:
|
||||||
|
with pytest.raises(
|
||||||
|
cv.Invalid,
|
||||||
|
match="Found multiple web_server OTA configurations but id is inconsistent",
|
||||||
|
):
|
||||||
|
_web_server_ota_final_validate({})
|
||||||
|
finally:
|
||||||
|
fv.full_config.reset(token)
|
||||||
|
|||||||
1
tests/component_tests/sntp/__init__.py
Normal file
1
tests/component_tests/sntp/__init__.py
Normal file
@@ -0,0 +1 @@
|
|||||||
|
"""Tests for SNTP component."""
|
||||||
22
tests/component_tests/sntp/config/sntp_test.yaml
Normal file
22
tests/component_tests/sntp/config/sntp_test.yaml
Normal file
@@ -0,0 +1,22 @@
|
|||||||
|
esphome:
|
||||||
|
name: sntp-test
|
||||||
|
|
||||||
|
esp32:
|
||||||
|
board: esp32dev
|
||||||
|
framework:
|
||||||
|
type: esp-idf
|
||||||
|
|
||||||
|
wifi:
|
||||||
|
ssid: "testssid"
|
||||||
|
password: "testpassword"
|
||||||
|
|
||||||
|
# Test multiple SNTP instances that should be merged
|
||||||
|
time:
|
||||||
|
- platform: sntp
|
||||||
|
servers:
|
||||||
|
- 192.168.1.1
|
||||||
|
- pool.ntp.org
|
||||||
|
- platform: sntp
|
||||||
|
servers:
|
||||||
|
- pool.ntp.org
|
||||||
|
- 192.168.1.2
|
||||||
238
tests/component_tests/sntp/test_init.py
Normal file
238
tests/component_tests/sntp/test_init.py
Normal file
@@ -0,0 +1,238 @@
|
|||||||
|
"""Tests for SNTP time configuration validation."""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import logging
|
||||||
|
from typing import Any
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
|
||||||
|
from esphome import config_validation as cv
|
||||||
|
from esphome.components.sntp.time import CONF_SNTP, _sntp_final_validate
|
||||||
|
from esphome.const import CONF_ID, CONF_PLATFORM, CONF_SERVERS, CONF_TIME
|
||||||
|
from esphome.core import ID
|
||||||
|
import esphome.final_validate as fv
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize(
|
||||||
|
("time_configs", "expected_count", "expected_servers", "warning_messages"),
|
||||||
|
[
|
||||||
|
pytest.param(
|
||||||
|
[
|
||||||
|
{
|
||||||
|
CONF_PLATFORM: CONF_SNTP,
|
||||||
|
CONF_ID: ID("sntp_time", is_manual=False),
|
||||||
|
CONF_SERVERS: ["192.168.1.1", "pool.ntp.org"],
|
||||||
|
}
|
||||||
|
],
|
||||||
|
1,
|
||||||
|
["192.168.1.1", "pool.ntp.org"],
|
||||||
|
[],
|
||||||
|
id="single_instance_no_merge",
|
||||||
|
),
|
||||||
|
pytest.param(
|
||||||
|
[
|
||||||
|
{
|
||||||
|
CONF_PLATFORM: CONF_SNTP,
|
||||||
|
CONF_ID: ID("sntp_time_1", is_manual=False),
|
||||||
|
CONF_SERVERS: ["192.168.1.1", "pool.ntp.org"],
|
||||||
|
},
|
||||||
|
{
|
||||||
|
CONF_PLATFORM: CONF_SNTP,
|
||||||
|
CONF_ID: ID("sntp_time_2", is_manual=False),
|
||||||
|
CONF_SERVERS: ["192.168.1.2"],
|
||||||
|
},
|
||||||
|
],
|
||||||
|
1,
|
||||||
|
["192.168.1.1", "pool.ntp.org", "192.168.1.2"],
|
||||||
|
["Found and merged 2 SNTP time configurations into one instance"],
|
||||||
|
id="two_instances_merged",
|
||||||
|
),
|
||||||
|
pytest.param(
|
||||||
|
[
|
||||||
|
{
|
||||||
|
CONF_PLATFORM: CONF_SNTP,
|
||||||
|
CONF_ID: ID("sntp_time_1", is_manual=False),
|
||||||
|
CONF_SERVERS: ["192.168.1.1", "pool.ntp.org"],
|
||||||
|
},
|
||||||
|
{
|
||||||
|
CONF_PLATFORM: CONF_SNTP,
|
||||||
|
CONF_ID: ID("sntp_time_2", is_manual=False),
|
||||||
|
CONF_SERVERS: ["pool.ntp.org", "192.168.1.2"],
|
||||||
|
},
|
||||||
|
],
|
||||||
|
1,
|
||||||
|
["192.168.1.1", "pool.ntp.org", "192.168.1.2"],
|
||||||
|
["Found and merged 2 SNTP time configurations into one instance"],
|
||||||
|
id="deduplication_preserves_order",
|
||||||
|
),
|
||||||
|
pytest.param(
|
||||||
|
[
|
||||||
|
{
|
||||||
|
CONF_PLATFORM: CONF_SNTP,
|
||||||
|
CONF_ID: ID("sntp_time_1", is_manual=False),
|
||||||
|
CONF_SERVERS: ["192.168.1.1", "pool.ntp.org"],
|
||||||
|
},
|
||||||
|
{
|
||||||
|
CONF_PLATFORM: CONF_SNTP,
|
||||||
|
CONF_ID: ID("sntp_time_2", is_manual=False),
|
||||||
|
CONF_SERVERS: ["192.168.1.2", "pool2.ntp.org"],
|
||||||
|
},
|
||||||
|
{
|
||||||
|
CONF_PLATFORM: CONF_SNTP,
|
||||||
|
CONF_ID: ID("sntp_time_3", is_manual=False),
|
||||||
|
CONF_SERVERS: ["pool3.ntp.org"],
|
||||||
|
},
|
||||||
|
],
|
||||||
|
1,
|
||||||
|
["192.168.1.1", "pool.ntp.org", "192.168.1.2"],
|
||||||
|
[
|
||||||
|
"SNTP supports maximum 3 servers. Dropped excess server(s): ['pool2.ntp.org', 'pool3.ntp.org']",
|
||||||
|
"Found and merged 3 SNTP time configurations into one instance",
|
||||||
|
],
|
||||||
|
id="three_instances_drops_excess_servers",
|
||||||
|
),
|
||||||
|
pytest.param(
|
||||||
|
[
|
||||||
|
{
|
||||||
|
CONF_PLATFORM: CONF_SNTP,
|
||||||
|
CONF_ID: ID("sntp_time_1", is_manual=False),
|
||||||
|
CONF_SERVERS: [
|
||||||
|
"192.168.1.1",
|
||||||
|
"pool.ntp.org",
|
||||||
|
"pool.ntp.org",
|
||||||
|
"192.168.1.1",
|
||||||
|
],
|
||||||
|
},
|
||||||
|
{
|
||||||
|
CONF_PLATFORM: CONF_SNTP,
|
||||||
|
CONF_ID: ID("sntp_time_2", is_manual=False),
|
||||||
|
CONF_SERVERS: ["pool.ntp.org", "192.168.1.2"],
|
||||||
|
},
|
||||||
|
],
|
||||||
|
1,
|
||||||
|
["192.168.1.1", "pool.ntp.org", "192.168.1.2"],
|
||||||
|
["Found and merged 2 SNTP time configurations into one instance"],
|
||||||
|
id="deduplication_multiple_duplicates",
|
||||||
|
),
|
||||||
|
],
|
||||||
|
)
|
||||||
|
def test_sntp_instance_merging(
|
||||||
|
time_configs: list[dict[str, Any]],
|
||||||
|
expected_count: int,
|
||||||
|
expected_servers: list[str],
|
||||||
|
warning_messages: list[str],
|
||||||
|
caplog: pytest.LogCaptureFixture,
|
||||||
|
) -> None:
|
||||||
|
"""Test SNTP instance merging behavior."""
|
||||||
|
# Create a mock full config with time configs
|
||||||
|
full_conf = {CONF_TIME: time_configs.copy()}
|
||||||
|
|
||||||
|
# Set the context var
|
||||||
|
token = fv.full_config.set(full_conf)
|
||||||
|
try:
|
||||||
|
with caplog.at_level(logging.WARNING):
|
||||||
|
_sntp_final_validate({})
|
||||||
|
|
||||||
|
# Get the updated config
|
||||||
|
updated_conf = fv.full_config.get()
|
||||||
|
|
||||||
|
# Check if merging occurred
|
||||||
|
if len(time_configs) > 1:
|
||||||
|
# Verify only one SNTP instance remains
|
||||||
|
sntp_instances = [
|
||||||
|
tc
|
||||||
|
for tc in updated_conf[CONF_TIME]
|
||||||
|
if tc.get(CONF_PLATFORM) == CONF_SNTP
|
||||||
|
]
|
||||||
|
assert len(sntp_instances) == expected_count
|
||||||
|
|
||||||
|
# Verify server list
|
||||||
|
assert sntp_instances[0][CONF_SERVERS] == expected_servers
|
||||||
|
|
||||||
|
# Verify warnings
|
||||||
|
for expected_msg in warning_messages:
|
||||||
|
assert any(
|
||||||
|
expected_msg in record.message for record in caplog.records
|
||||||
|
), f"Expected warning message '{expected_msg}' not found in log"
|
||||||
|
else:
|
||||||
|
# Single instance should not trigger merging or warnings
|
||||||
|
assert len(caplog.records) == 0
|
||||||
|
# Config should be unchanged
|
||||||
|
assert updated_conf[CONF_TIME] == time_configs
|
||||||
|
finally:
|
||||||
|
fv.full_config.reset(token)
|
||||||
|
|
||||||
|
|
||||||
|
def test_sntp_inconsistent_manual_ids() -> None:
|
||||||
|
"""Test that inconsistent manual IDs raise an error."""
|
||||||
|
# Create configs with manual IDs that are inconsistent
|
||||||
|
time_configs = [
|
||||||
|
{
|
||||||
|
CONF_PLATFORM: CONF_SNTP,
|
||||||
|
CONF_ID: ID("sntp_time_1", is_manual=True),
|
||||||
|
CONF_SERVERS: ["192.168.1.1"],
|
||||||
|
},
|
||||||
|
{
|
||||||
|
CONF_PLATFORM: CONF_SNTP,
|
||||||
|
CONF_ID: ID("sntp_time_2", is_manual=True),
|
||||||
|
CONF_SERVERS: ["192.168.1.2"],
|
||||||
|
},
|
||||||
|
]
|
||||||
|
|
||||||
|
full_conf = {CONF_TIME: time_configs}
|
||||||
|
|
||||||
|
token = fv.full_config.set(full_conf)
|
||||||
|
try:
|
||||||
|
with pytest.raises(
|
||||||
|
cv.Invalid,
|
||||||
|
match="Found multiple SNTP configurations but id is inconsistent",
|
||||||
|
):
|
||||||
|
_sntp_final_validate({})
|
||||||
|
finally:
|
||||||
|
fv.full_config.reset(token)
|
||||||
|
|
||||||
|
|
||||||
|
def test_sntp_with_other_time_platforms(caplog: pytest.LogCaptureFixture) -> None:
|
||||||
|
"""Test that SNTP merging doesn't affect other time platforms."""
|
||||||
|
time_configs = [
|
||||||
|
{
|
||||||
|
CONF_PLATFORM: CONF_SNTP,
|
||||||
|
CONF_ID: ID("sntp_time_1", is_manual=False),
|
||||||
|
CONF_SERVERS: ["192.168.1.1"],
|
||||||
|
},
|
||||||
|
{
|
||||||
|
CONF_PLATFORM: "homeassistant",
|
||||||
|
CONF_ID: ID("homeassistant_time", is_manual=False),
|
||||||
|
},
|
||||||
|
{
|
||||||
|
CONF_PLATFORM: CONF_SNTP,
|
||||||
|
CONF_ID: ID("sntp_time_2", is_manual=False),
|
||||||
|
CONF_SERVERS: ["192.168.1.2"],
|
||||||
|
},
|
||||||
|
]
|
||||||
|
|
||||||
|
full_conf = {CONF_TIME: time_configs.copy()}
|
||||||
|
|
||||||
|
token = fv.full_config.set(full_conf)
|
||||||
|
try:
|
||||||
|
with caplog.at_level(logging.WARNING):
|
||||||
|
_sntp_final_validate({})
|
||||||
|
|
||||||
|
updated_conf = fv.full_config.get()
|
||||||
|
|
||||||
|
# Should have 2 time platforms: 1 merged SNTP + 1 homeassistant
|
||||||
|
assert len(updated_conf[CONF_TIME]) == 2
|
||||||
|
|
||||||
|
# Find the platforms
|
||||||
|
platforms = {tc[CONF_PLATFORM] for tc in updated_conf[CONF_TIME]}
|
||||||
|
assert platforms == {CONF_SNTP, "homeassistant"}
|
||||||
|
|
||||||
|
# Verify SNTP was merged
|
||||||
|
sntp_instances = [
|
||||||
|
tc for tc in updated_conf[CONF_TIME] if tc[CONF_PLATFORM] == CONF_SNTP
|
||||||
|
]
|
||||||
|
assert len(sntp_instances) == 1
|
||||||
|
assert sntp_instances[0][CONF_SERVERS] == ["192.168.1.1", "192.168.1.2"]
|
||||||
|
finally:
|
||||||
|
fv.full_config.reset(token)
|
||||||
27
tests/components/esp32/test.esp32-p4-idf.yaml
Normal file
27
tests/components/esp32/test.esp32-p4-idf.yaml
Normal file
@@ -0,0 +1,27 @@
|
|||||||
|
esp32:
|
||||||
|
variant: esp32p4
|
||||||
|
flash_size: 32MB
|
||||||
|
cpu_frequency: 400MHz
|
||||||
|
framework:
|
||||||
|
type: esp-idf
|
||||||
|
advanced:
|
||||||
|
enable_idf_experimental_features: yes
|
||||||
|
|
||||||
|
ota:
|
||||||
|
platform: esphome
|
||||||
|
|
||||||
|
wifi:
|
||||||
|
ssid: MySSID
|
||||||
|
password: password1
|
||||||
|
|
||||||
|
esp32_hosted:
|
||||||
|
variant: ESP32C6
|
||||||
|
slot: 1
|
||||||
|
active_high: true
|
||||||
|
reset_pin: GPIO15
|
||||||
|
cmd_pin: GPIO13
|
||||||
|
clk_pin: GPIO12
|
||||||
|
d0_pin: GPIO11
|
||||||
|
d1_pin: GPIO10
|
||||||
|
d2_pin: GPIO9
|
||||||
|
d3_pin: GPIO8
|
||||||
Reference in New Issue
Block a user