1
0
mirror of https://github.com/esphome/esphome.git synced 2025-11-05 09:31:54 +00:00

Compare commits

...

27 Commits

Author SHA1 Message Date
Jesse Hills
00155989af Merge pull request #11703 from esphome/bump-2025.10.4
2025.10.4
2025-11-04 16:04:04 +13:00
Jesse Hills
a3583da17d Bump version to 2025.10.4 2025-11-04 11:25:33 +13:00
Clyde Stubbs
0f6fd91304 [sdl] Fix keymappings (#11635) 2025-11-04 11:25:33 +13:00
Clyde Stubbs
2f5f1da16f [lvgl] Fix event for binary sensor (#11636) 2025-11-04 11:25:33 +13:00
Clyde Stubbs
51745d1d5e [image] Catch and report svg load errors (#11619) 2025-11-04 11:25:33 +13:00
J. Nick Koston
fecc8399a5 [lvgl] Fix nested lambdas in automations unable to access parameters (#11583)
Co-authored-by: clydebarrow <2366188+clydebarrow@users.noreply.github.com>
2025-11-04 11:25:33 +13:00
Clyde Stubbs
db395a662d [mipi_rgb] Fix rotation with custom model (#11585) 2025-11-04 11:25:33 +13:00
Anton Sergunov
641dd24b21 Fix the LiberTiny bug with UART pin setup (#11518) 2025-11-04 11:25:32 +13:00
Keith Burzinski
57f2e32b00 [uart] Fix order of initialization calls (#11510) 2025-11-04 11:25:32 +13:00
Jesse Hills
6a478b9070 Merge pull request #11506 from esphome/bump-2025.10.3
2025.10.3
2025-10-24 14:08:12 +13:00
Jesse Hills
a32a1d11fb Bump version to 2025.10.3 2025-10-24 07:51:38 +13:00
Markus
daeb8ef88c [core] handle mixed IP and DNS addresses correctly in resolve_ip_address (#11503)
Co-authored-by: J. Nick Koston <nick@home-assistant.io>
2025-10-24 07:51:38 +13:00
Anton Sergunov
febee437d6 [uart] Make rx pin respect pullup and pulldown settings (#9248) 2025-10-24 07:51:38 +13:00
Peter Zich
de2f475dbd [hdc1080] Make HDC1080_CMD_CONFIGURATION failure a warning (and log it) (#11355)
Co-authored-by: J. Nick Koston <nick@koston.org>
2025-10-24 07:51:38 +13:00
Jesse Hills
ebc0f5f7c9 Merge pull request #11387 from esphome/bump-2025.10.2
2025.10.2
2025-10-20 13:42:48 +13:00
J. Nick Koston
87ca8784ef [openthread] Backport address resolution support to prevent OTA crash (#11312)
Co-authored-by: Daniel Stiner <danstiner@gmail.com>
2025-10-20 10:12:56 +13:00
Jesse Hills
a186c1062f Bump version to 2025.10.2 2025-10-20 10:06:43 +13:00
Jonathan Swoboda
ea38237f29 [esp32] Fix OTA rollback (#11300)
Co-authored-by: J. Nick Koston <nick@koston.org>
2025-10-20 10:06:43 +13:00
J. Nick Koston
6aff1394ad [core] Fix IndexError when OTA devices cannot be resolved (#11311) 2025-10-20 10:06:43 +13:00
Spectre5
0e34d1b64d Change all temperature offsets to temperature_delta (#11347) 2025-10-20 10:06:43 +13:00
tomaszduda23
1483cee0fb [dashboard] fix migration to Path (#11342)
Co-authored-by: J. Nick Koston <nick@home-assistant.io>
2025-10-20 10:06:43 +13:00
J. Nick Koston
8c1bd2fd85 [dashboard] Fix binary download with packages using secrets after Path migration (#11313) 2025-10-20 10:06:43 +13:00
Daniel Stiner
ea609dc0f6 [const] Add CONF_OPENTHREAD (#11318) 2025-10-20 10:06:42 +13:00
Jonathan Swoboda
913095f6be [esp32] Reduce tx power on Arduino (#11304) 2025-10-20 10:06:42 +13:00
Jonathan Swoboda
bb24ad4a30 [htu21d] Revert register address change (#11291) 2025-10-20 10:06:42 +13:00
Jonathan Swoboda
0d612fecfc [core] Add ESP32 ROM functions to reserved ids (#11293) 2025-10-20 10:06:42 +13:00
J. Nick Koston
9c235b4140 [datetime] Fix DateTimeStateTrigger compilation when time component is not used (#11287) 2025-10-20 10:06:42 +13:00
38 changed files with 968 additions and 386 deletions

View File

@@ -48,7 +48,7 @@ PROJECT_NAME = ESPHome
# could be handy for archiving the generated documentation or if some version
# control system is used.
PROJECT_NUMBER = 2025.10.1
PROJECT_NUMBER = 2025.10.4
# Using the PROJECT_BRIEF tag one can provide an optional one line description
# for a project that appears at the top of each page and should give viewer a

View File

@@ -185,7 +185,9 @@ def choose_upload_log_host(
else:
resolved.append(device)
if not resolved:
_LOGGER.error("All specified devices: %s could not be resolved.", defaults)
raise EsphomeError(
f"All specified devices {defaults} could not be resolved. Is the device connected to the network?"
)
return resolved
# No devices specified, show interactive chooser

View File

@@ -41,7 +41,7 @@ CONFIG_SCHEMA = cv.All(
cv.Schema(
{
cv.GenerateID(): cv.declare_id(BME680BSECComponent),
cv.Optional(CONF_TEMPERATURE_OFFSET, default=0): cv.temperature,
cv.Optional(CONF_TEMPERATURE_OFFSET, default=0): cv.temperature_delta,
cv.Optional(CONF_IAQ_MODE, default="STATIC"): cv.enum(
IAQ_MODE_OPTIONS, upper=True
),

View File

@@ -139,7 +139,7 @@ CONFIG_SCHEMA_BASE = (
cv.Optional(CONF_SUPPLY_VOLTAGE, default="3.3V"): cv.enum(
VOLTAGE_OPTIONS, upper=True
),
cv.Optional(CONF_TEMPERATURE_OFFSET, default=0): cv.temperature,
cv.Optional(CONF_TEMPERATURE_OFFSET, default=0): cv.temperature_delta,
cv.Optional(
CONF_STATE_SAVE_INTERVAL, default="6hours"
): cv.positive_time_period_minutes,

View File

@@ -30,14 +30,12 @@ class DateTimeBase : public EntityBase {
#endif
};
#ifdef USE_TIME
class DateTimeStateTrigger : public Trigger<ESPTime> {
public:
explicit DateTimeStateTrigger(DateTimeBase *parent) {
parent->add_on_state_callback([this, parent]() { this->trigger(parent->state_as_esptime()); });
}
};
#endif
} // namespace datetime
} // namespace esphome

View File

@@ -790,6 +790,7 @@ async def to_code(config):
add_idf_sdkconfig_option("CONFIG_AUTOSTART_ARDUINO", True)
add_idf_sdkconfig_option("CONFIG_MBEDTLS_PSK_MODES", True)
add_idf_sdkconfig_option("CONFIG_MBEDTLS_CERTIFICATE_BUNDLE", True)
add_idf_sdkconfig_option("CONFIG_ESP_PHY_REDUCE_TX_POWER", True)
cg.add_build_flag("-Wno-nonnull-compare")

View File

@@ -6,6 +6,7 @@
#include <freertos/FreeRTOS.h>
#include <freertos/task.h>
#include <esp_idf_version.h>
#include <esp_ota_ops.h>
#include <esp_task_wdt.h>
#include <esp_timer.h>
#include <soc/rtc.h>
@@ -52,6 +53,16 @@ void arch_init() {
disableCore1WDT();
#endif
#endif
// If the bootloader was compiled with CONFIG_BOOTLOADER_APP_ROLLBACK_ENABLE the current
// partition will get rolled back unless it is marked as valid.
esp_ota_img_states_t state;
const esp_partition_t *running = esp_ota_get_running_partition();
if (esp_ota_get_state_partition(running, &state) == ESP_OK) {
if (state == ESP_OTA_IMG_PENDING_VERIFY) {
esp_ota_mark_app_valid_cancel_rollback();
}
}
}
void IRAM_ATTR HOT arch_feed_wdt() { esp_task_wdt_reset(); }

View File

@@ -16,7 +16,8 @@ void HDC1080Component::setup() {
// if configuration fails - there is a problem
if (this->write_register(HDC1080_CMD_CONFIGURATION, config, 2) != i2c::ERROR_OK) {
this->mark_failed();
ESP_LOGW(TAG, "Failed to configure HDC1080");
this->status_set_warning();
return;
}
}

View File

@@ -9,8 +9,8 @@ static const char *const TAG = "htu21d";
static const uint8_t HTU21D_ADDRESS = 0x40;
static const uint8_t HTU21D_REGISTER_RESET = 0xFE;
static const uint8_t HTU21D_REGISTER_TEMPERATURE = 0xE3;
static const uint8_t HTU21D_REGISTER_HUMIDITY = 0xE5;
static const uint8_t HTU21D_REGISTER_TEMPERATURE = 0xF3;
static const uint8_t HTU21D_REGISTER_HUMIDITY = 0xF5;
static const uint8_t HTU21D_WRITERHT_REG_CMD = 0xE6; /**< Write RH/T User Register 1 */
static const uint8_t HTU21D_REGISTER_STATUS = 0xE7;
static const uint8_t HTU21D_WRITEHEATER_REG_CMD = 0x51; /**< Write Heater Control Register */

View File

@@ -671,18 +671,33 @@ async def write_image(config, all_frames=False):
resize = config.get(CONF_RESIZE)
if is_svg_file(path):
# Local import so use of non-SVG files needn't require cairosvg installed
from pyexpat import ExpatError
from xml.etree.ElementTree import ParseError
from cairosvg import svg2png
from cairosvg.helpers import PointError
if not resize:
resize = (None, None)
with open(path, "rb") as file:
image = svg2png(
file_obj=file,
output_width=resize[0],
output_height=resize[1],
)
image = Image.open(io.BytesIO(image))
width, height = image.size
try:
with open(path, "rb") as file:
image = svg2png(
file_obj=file,
output_width=resize[0],
output_height=resize[1],
)
image = Image.open(io.BytesIO(image))
width, height = image.size
except (
ValueError,
ParseError,
IndexError,
ExpatError,
AttributeError,
TypeError,
PointError,
) as e:
raise core.EsphomeError(f"Could not load SVG image {path}: {e}") from e
else:
image = Image.open(path)
width, height = image.size

View File

@@ -31,7 +31,7 @@ async def to_code(config):
lvgl_static.add_event_cb(
widget.obj,
await pressed_ctx.get_lambda(),
LV_EVENT.PRESSING,
LV_EVENT.PRESSED,
LV_EVENT.RELEASED,
)
)

View File

@@ -5,6 +5,7 @@ Constants already defined in esphome.const are not duplicated here and must be i
"""
import logging
from typing import TYPE_CHECKING, Any
from esphome import codegen as cg, config_validation as cv
from esphome.const import CONF_ITEMS
@@ -12,6 +13,7 @@ from esphome.core import ID, Lambda
from esphome.cpp_generator import LambdaExpression, MockObj
from esphome.cpp_types import uint32
from esphome.schema_extractors import SCHEMA_EXTRACT, schema_extractor
from esphome.types import Expression, SafeExpType
from .helpers import requires_component
@@ -42,7 +44,13 @@ def static_cast(type, value):
def call_lambda(lamb: LambdaExpression):
expr = lamb.content.strip()
if expr.startswith("return") and expr.endswith(";"):
return expr[6:][:-1].strip()
return expr[6:-1].strip()
# If lambda has parameters, call it with those parameter names
# Parameter names come from hardcoded component code (like "x", "it", "event")
# not from user input, so they're safe to use directly
if lamb.parameters and lamb.parameters.parameters:
param_names = ", ".join(str(param.id) for param in lamb.parameters.parameters)
return f"{lamb}({param_names})"
return f"{lamb}()"
@@ -65,10 +73,20 @@ class LValidator:
return cv.returning_lambda(value)
return self.validator(value)
async def process(self, value, args=()):
async def process(
self, value: Any, args: list[tuple[SafeExpType, str]] | None = None
) -> Expression:
if value is None:
return None
if isinstance(value, Lambda):
# Local import to avoid circular import
from .lvcode import CodeContext, LambdaContext
if TYPE_CHECKING:
# CodeContext does not have get_automation_parameters
# so we need to assert the type here
assert isinstance(CodeContext.code_context, LambdaContext)
args = args or CodeContext.code_context.get_automation_parameters()
return cg.RawExpression(
call_lambda(
await cg.process_lambda(value, args, return_type=self.rtype)

View File

@@ -1,3 +1,5 @@
from typing import TYPE_CHECKING, Any
import esphome.codegen as cg
from esphome.components import image
from esphome.components.color import CONF_HEX, ColorStruct, from_rgbw
@@ -17,6 +19,7 @@ from esphome.cpp_generator import MockObj
from esphome.cpp_types import ESPTime, int32, uint32
from esphome.helpers import cpp_string_escape
from esphome.schema_extractors import SCHEMA_EXTRACT, schema_extractor
from esphome.types import Expression, SafeExpType
from . import types as ty
from .defines import (
@@ -388,11 +391,23 @@ class TextValidator(LValidator):
return value
return super().__call__(value)
async def process(self, value, args=()):
async def process(
self, value: Any, args: list[tuple[SafeExpType, str]] | None = None
) -> Expression:
# Local import to avoid circular import at module level
from .lvcode import CodeContext, LambdaContext
if TYPE_CHECKING:
# CodeContext does not have get_automation_parameters
# so we need to assert the type here
assert isinstance(CodeContext.code_context, LambdaContext)
args = args or CodeContext.code_context.get_automation_parameters()
if isinstance(value, dict):
if format_str := value.get(CONF_FORMAT):
args = [str(x) for x in value[CONF_ARGS]]
arg_expr = cg.RawExpression(",".join(args))
str_args = [str(x) for x in value[CONF_ARGS]]
arg_expr = cg.RawExpression(",".join(str_args))
format_str = cpp_string_escape(format_str)
return literal(f"str_sprintf({format_str}, {arg_expr}).c_str()")
if time_format := value.get(CONF_TIME_FORMAT):

View File

@@ -164,6 +164,9 @@ class LambdaContext(CodeContext):
code_text.append(text)
return code_text
def get_automation_parameters(self) -> list[tuple[SafeExpType, str]]:
return self.parameters
async def __aenter__(self):
await super().__aenter__()
add_line_marks(self.where)
@@ -178,9 +181,8 @@ class LvContext(LambdaContext):
added_lambda_count = 0
def __init__(self, args=None):
self.args = args or LVGL_COMP_ARG
super().__init__(parameters=self.args)
def __init__(self):
super().__init__(parameters=LVGL_COMP_ARG)
async def __aexit__(self, exc_type, exc_val, exc_tb):
await super().__aexit__(exc_type, exc_val, exc_tb)
@@ -189,6 +191,11 @@ class LvContext(LambdaContext):
cg.add(expression)
return expression
def get_automation_parameters(self) -> list[tuple[SafeExpType, str]]:
# When generating automations, we don't want the `lv_component` parameter to be passed
# to the lambda.
return []
def __call__(self, *args):
return self.add(*args)

View File

@@ -5,7 +5,6 @@ from ..defines import CONF_WIDGET
from ..lvcode import (
API_EVENT,
EVENT_ARG,
LVGL_COMP_ARG,
UPDATE_EVENT,
LambdaContext,
LvContext,
@@ -30,7 +29,7 @@ async def to_code(config):
await wait_for_widgets()
async with LambdaContext(EVENT_ARG) as lamb:
lv_add(sensor.publish_state(widget.get_value()))
async with LvContext(LVGL_COMP_ARG):
async with LvContext():
lv_add(
lvgl_static.add_event_cb(
widget.obj,

View File

@@ -384,6 +384,18 @@ class DriverChip:
transform[CONF_TRANSFORM] = True
return transform
def swap_xy_schema(self):
uses_swap = self.get_default(CONF_SWAP_XY, None) != cv.UNDEFINED
def validator(value):
if value:
raise cv.Invalid("Axis swapping not supported by this model")
return cv.boolean(value)
if uses_swap:
return {cv.Required(CONF_SWAP_XY): cv.boolean}
return {cv.Optional(CONF_SWAP_XY, default=False): validator}
def add_madctl(self, sequence: list, config: dict):
# Add the MADCTL command to the sequence based on the configuration.
use_flip = config.get(CONF_USE_AXIS_FLIPS)

View File

@@ -46,6 +46,7 @@ from esphome.const import (
CONF_DATA_RATE,
CONF_DC_PIN,
CONF_DIMENSIONS,
CONF_DISABLED,
CONF_ENABLE_PIN,
CONF_GREEN,
CONF_HSYNC_PIN,
@@ -117,16 +118,16 @@ def data_pin_set(length):
def model_schema(config):
model = MODELS[config[CONF_MODEL].upper()]
if transforms := model.transforms:
transform = cv.Schema({cv.Required(x): cv.boolean for x in transforms})
for x in (CONF_SWAP_XY, CONF_MIRROR_X, CONF_MIRROR_Y):
if x not in transforms:
transform = transform.extend(
{cv.Optional(x): cv.invalid(f"{x} not supported by this model")}
)
else:
transform = cv.invalid("This model does not support transforms")
transform = cv.Any(
cv.Schema(
{
cv.Required(CONF_MIRROR_X): cv.boolean,
cv.Required(CONF_MIRROR_Y): cv.boolean,
**model.swap_xy_schema(),
}
),
cv.one_of(CONF_DISABLED, lower=True),
)
# RPI model does not use an init sequence, indicates with empty list
if model.initsequence is None:
# Custom model requires an init sequence
@@ -135,12 +136,16 @@ def model_schema(config):
else:
iseqconf = cv.Optional(CONF_INIT_SEQUENCE)
uses_spi = CONF_INIT_SEQUENCE in config or len(model.initsequence) != 0
swap_xy = config.get(CONF_TRANSFORM, {}).get(CONF_SWAP_XY, False)
# Dimensions are optional if the model has a default width and the swap_xy transform is not overridden
cv_dimensions = (
cv.Optional if model.get_default(CONF_WIDTH) and not swap_xy else cv.Required
# Dimensions are optional if the model has a default width and the x-y transform is not overridden
transform_config = config.get(CONF_TRANSFORM, {})
is_swapped = (
isinstance(transform_config, dict)
and transform_config.get(CONF_SWAP_XY, False) is True
)
cv_dimensions = (
cv.Optional if model.get_default(CONF_WIDTH) and not is_swapped else cv.Required
)
pixel_modes = (PIXEL_MODE_16BIT, PIXEL_MODE_18BIT, "16", "18")
schema = display.FULL_DISPLAY_SCHEMA.extend(
{
@@ -157,7 +162,7 @@ def model_schema(config):
model.option(CONF_PIXEL_MODE, PIXEL_MODE_16BIT): cv.one_of(
*pixel_modes, lower=True
),
model.option(CONF_TRANSFORM, cv.UNDEFINED): transform,
cv.Optional(CONF_TRANSFORM): transform,
cv.Required(CONF_MODEL): cv.one_of(model.name, upper=True),
model.option(CONF_INVERT_COLORS, False): cv.boolean,
model.option(CONF_USE_AXIS_FLIPS, True): cv.boolean,
@@ -270,7 +275,6 @@ async def to_code(config):
cg.add(var.set_vsync_front_porch(config[CONF_VSYNC_FRONT_PORCH]))
cg.add(var.set_pclk_inverted(config[CONF_PCLK_INVERTED]))
cg.add(var.set_pclk_frequency(config[CONF_PCLK_FREQUENCY]))
index = 0
dpins = []
if CONF_RED in config[CONF_DATA_PINS]:
red_pins = config[CONF_DATA_PINS][CONF_RED]

View File

@@ -131,19 +131,6 @@ def denominator(config):
) from StopIteration
def swap_xy_schema(model):
uses_swap = model.get_default(CONF_SWAP_XY, None) != cv.UNDEFINED
def validator(value):
if value:
raise cv.Invalid("Axis swapping not supported by this model")
return cv.boolean(value)
if uses_swap:
return {cv.Required(CONF_SWAP_XY): cv.boolean}
return {cv.Optional(CONF_SWAP_XY, default=False): validator}
def model_schema(config):
model = MODELS[config[CONF_MODEL]]
bus_mode = config[CONF_BUS_MODE]
@@ -152,7 +139,7 @@ def model_schema(config):
{
cv.Required(CONF_MIRROR_X): cv.boolean,
cv.Required(CONF_MIRROR_Y): cv.boolean,
**swap_xy_schema(model),
**model.swap_xy_schema(),
}
),
cv.one_of(CONF_DISABLED, lower=True),

View File

@@ -81,7 +81,7 @@ CONFIG_SCHEMA = (
cv.int_range(min=0, max=0xFFFF, max_included=False),
),
cv.Optional(CONF_AMBIENT_PRESSURE_COMPENSATION): cv.pressure,
cv.Optional(CONF_TEMPERATURE_OFFSET, default="4°C"): cv.temperature,
cv.Optional(CONF_TEMPERATURE_OFFSET, default="4°C"): cv.temperature_delta,
cv.Optional(CONF_AMBIENT_PRESSURE_COMPENSATION_SOURCE): cv.use_id(
sensor.Sensor
),

View File

@@ -12,241 +12,256 @@ CODEOWNERS = ["@bdm310"]
STATE_ARG = "state"
SDL_KEYMAP = {
"SDLK_UNKNOWN": 0,
"SDLK_FIRST": 0,
"SDLK_BACKSPACE": 8,
"SDLK_TAB": 9,
"SDLK_CLEAR": 12,
"SDLK_RETURN": 13,
"SDLK_PAUSE": 19,
"SDLK_ESCAPE": 27,
"SDLK_SPACE": 32,
"SDLK_EXCLAIM": 33,
"SDLK_QUOTEDBL": 34,
"SDLK_HASH": 35,
"SDLK_DOLLAR": 36,
"SDLK_AMPERSAND": 38,
"SDLK_QUOTE": 39,
"SDLK_LEFTPAREN": 40,
"SDLK_RIGHTPAREN": 41,
"SDLK_ASTERISK": 42,
"SDLK_PLUS": 43,
"SDLK_COMMA": 44,
"SDLK_MINUS": 45,
"SDLK_PERIOD": 46,
"SDLK_SLASH": 47,
"SDLK_0": 48,
"SDLK_1": 49,
"SDLK_2": 50,
"SDLK_3": 51,
"SDLK_4": 52,
"SDLK_5": 53,
"SDLK_6": 54,
"SDLK_7": 55,
"SDLK_8": 56,
"SDLK_9": 57,
"SDLK_COLON": 58,
"SDLK_SEMICOLON": 59,
"SDLK_LESS": 60,
"SDLK_EQUALS": 61,
"SDLK_GREATER": 62,
"SDLK_QUESTION": 63,
"SDLK_AT": 64,
"SDLK_LEFTBRACKET": 91,
"SDLK_BACKSLASH": 92,
"SDLK_RIGHTBRACKET": 93,
"SDLK_CARET": 94,
"SDLK_UNDERSCORE": 95,
"SDLK_BACKQUOTE": 96,
"SDLK_a": 97,
"SDLK_b": 98,
"SDLK_c": 99,
"SDLK_d": 100,
"SDLK_e": 101,
"SDLK_f": 102,
"SDLK_g": 103,
"SDLK_h": 104,
"SDLK_i": 105,
"SDLK_j": 106,
"SDLK_k": 107,
"SDLK_l": 108,
"SDLK_m": 109,
"SDLK_n": 110,
"SDLK_o": 111,
"SDLK_p": 112,
"SDLK_q": 113,
"SDLK_r": 114,
"SDLK_s": 115,
"SDLK_t": 116,
"SDLK_u": 117,
"SDLK_v": 118,
"SDLK_w": 119,
"SDLK_x": 120,
"SDLK_y": 121,
"SDLK_z": 122,
"SDLK_DELETE": 127,
"SDLK_WORLD_0": 160,
"SDLK_WORLD_1": 161,
"SDLK_WORLD_2": 162,
"SDLK_WORLD_3": 163,
"SDLK_WORLD_4": 164,
"SDLK_WORLD_5": 165,
"SDLK_WORLD_6": 166,
"SDLK_WORLD_7": 167,
"SDLK_WORLD_8": 168,
"SDLK_WORLD_9": 169,
"SDLK_WORLD_10": 170,
"SDLK_WORLD_11": 171,
"SDLK_WORLD_12": 172,
"SDLK_WORLD_13": 173,
"SDLK_WORLD_14": 174,
"SDLK_WORLD_15": 175,
"SDLK_WORLD_16": 176,
"SDLK_WORLD_17": 177,
"SDLK_WORLD_18": 178,
"SDLK_WORLD_19": 179,
"SDLK_WORLD_20": 180,
"SDLK_WORLD_21": 181,
"SDLK_WORLD_22": 182,
"SDLK_WORLD_23": 183,
"SDLK_WORLD_24": 184,
"SDLK_WORLD_25": 185,
"SDLK_WORLD_26": 186,
"SDLK_WORLD_27": 187,
"SDLK_WORLD_28": 188,
"SDLK_WORLD_29": 189,
"SDLK_WORLD_30": 190,
"SDLK_WORLD_31": 191,
"SDLK_WORLD_32": 192,
"SDLK_WORLD_33": 193,
"SDLK_WORLD_34": 194,
"SDLK_WORLD_35": 195,
"SDLK_WORLD_36": 196,
"SDLK_WORLD_37": 197,
"SDLK_WORLD_38": 198,
"SDLK_WORLD_39": 199,
"SDLK_WORLD_40": 200,
"SDLK_WORLD_41": 201,
"SDLK_WORLD_42": 202,
"SDLK_WORLD_43": 203,
"SDLK_WORLD_44": 204,
"SDLK_WORLD_45": 205,
"SDLK_WORLD_46": 206,
"SDLK_WORLD_47": 207,
"SDLK_WORLD_48": 208,
"SDLK_WORLD_49": 209,
"SDLK_WORLD_50": 210,
"SDLK_WORLD_51": 211,
"SDLK_WORLD_52": 212,
"SDLK_WORLD_53": 213,
"SDLK_WORLD_54": 214,
"SDLK_WORLD_55": 215,
"SDLK_WORLD_56": 216,
"SDLK_WORLD_57": 217,
"SDLK_WORLD_58": 218,
"SDLK_WORLD_59": 219,
"SDLK_WORLD_60": 220,
"SDLK_WORLD_61": 221,
"SDLK_WORLD_62": 222,
"SDLK_WORLD_63": 223,
"SDLK_WORLD_64": 224,
"SDLK_WORLD_65": 225,
"SDLK_WORLD_66": 226,
"SDLK_WORLD_67": 227,
"SDLK_WORLD_68": 228,
"SDLK_WORLD_69": 229,
"SDLK_WORLD_70": 230,
"SDLK_WORLD_71": 231,
"SDLK_WORLD_72": 232,
"SDLK_WORLD_73": 233,
"SDLK_WORLD_74": 234,
"SDLK_WORLD_75": 235,
"SDLK_WORLD_76": 236,
"SDLK_WORLD_77": 237,
"SDLK_WORLD_78": 238,
"SDLK_WORLD_79": 239,
"SDLK_WORLD_80": 240,
"SDLK_WORLD_81": 241,
"SDLK_WORLD_82": 242,
"SDLK_WORLD_83": 243,
"SDLK_WORLD_84": 244,
"SDLK_WORLD_85": 245,
"SDLK_WORLD_86": 246,
"SDLK_WORLD_87": 247,
"SDLK_WORLD_88": 248,
"SDLK_WORLD_89": 249,
"SDLK_WORLD_90": 250,
"SDLK_WORLD_91": 251,
"SDLK_WORLD_92": 252,
"SDLK_WORLD_93": 253,
"SDLK_WORLD_94": 254,
"SDLK_WORLD_95": 255,
"SDLK_KP0": 256,
"SDLK_KP1": 257,
"SDLK_KP2": 258,
"SDLK_KP3": 259,
"SDLK_KP4": 260,
"SDLK_KP5": 261,
"SDLK_KP6": 262,
"SDLK_KP7": 263,
"SDLK_KP8": 264,
"SDLK_KP9": 265,
"SDLK_KP_PERIOD": 266,
"SDLK_KP_DIVIDE": 267,
"SDLK_KP_MULTIPLY": 268,
"SDLK_KP_MINUS": 269,
"SDLK_KP_PLUS": 270,
"SDLK_KP_ENTER": 271,
"SDLK_KP_EQUALS": 272,
"SDLK_UP": 273,
"SDLK_DOWN": 274,
"SDLK_RIGHT": 275,
"SDLK_LEFT": 276,
"SDLK_INSERT": 277,
"SDLK_HOME": 278,
"SDLK_END": 279,
"SDLK_PAGEUP": 280,
"SDLK_PAGEDOWN": 281,
"SDLK_F1": 282,
"SDLK_F2": 283,
"SDLK_F3": 284,
"SDLK_F4": 285,
"SDLK_F5": 286,
"SDLK_F6": 287,
"SDLK_F7": 288,
"SDLK_F8": 289,
"SDLK_F9": 290,
"SDLK_F10": 291,
"SDLK_F11": 292,
"SDLK_F12": 293,
"SDLK_F13": 294,
"SDLK_F14": 295,
"SDLK_F15": 296,
"SDLK_NUMLOCK": 300,
"SDLK_CAPSLOCK": 301,
"SDLK_SCROLLOCK": 302,
"SDLK_RSHIFT": 303,
"SDLK_LSHIFT": 304,
"SDLK_RCTRL": 305,
"SDLK_LCTRL": 306,
"SDLK_RALT": 307,
"SDLK_LALT": 308,
"SDLK_RMETA": 309,
"SDLK_LMETA": 310,
"SDLK_LSUPER": 311,
"SDLK_RSUPER": 312,
"SDLK_MODE": 313,
"SDLK_COMPOSE": 314,
"SDLK_HELP": 315,
"SDLK_PRINT": 316,
"SDLK_SYSREQ": 317,
"SDLK_BREAK": 318,
"SDLK_MENU": 319,
"SDLK_POWER": 320,
"SDLK_EURO": 321,
"SDLK_UNDO": 322,
}
SDL_KeyCode = cg.global_ns.enum("SDL_KeyCode")
SDL_KEYS = (
"SDLK_UNKNOWN",
"SDLK_RETURN",
"SDLK_ESCAPE",
"SDLK_BACKSPACE",
"SDLK_TAB",
"SDLK_SPACE",
"SDLK_EXCLAIM",
"SDLK_QUOTEDBL",
"SDLK_HASH",
"SDLK_PERCENT",
"SDLK_DOLLAR",
"SDLK_AMPERSAND",
"SDLK_QUOTE",
"SDLK_LEFTPAREN",
"SDLK_RIGHTPAREN",
"SDLK_ASTERISK",
"SDLK_PLUS",
"SDLK_COMMA",
"SDLK_MINUS",
"SDLK_PERIOD",
"SDLK_SLASH",
"SDLK_0",
"SDLK_1",
"SDLK_2",
"SDLK_3",
"SDLK_4",
"SDLK_5",
"SDLK_6",
"SDLK_7",
"SDLK_8",
"SDLK_9",
"SDLK_COLON",
"SDLK_SEMICOLON",
"SDLK_LESS",
"SDLK_EQUALS",
"SDLK_GREATER",
"SDLK_QUESTION",
"SDLK_AT",
"SDLK_LEFTBRACKET",
"SDLK_BACKSLASH",
"SDLK_RIGHTBRACKET",
"SDLK_CARET",
"SDLK_UNDERSCORE",
"SDLK_BACKQUOTE",
"SDLK_a",
"SDLK_b",
"SDLK_c",
"SDLK_d",
"SDLK_e",
"SDLK_f",
"SDLK_g",
"SDLK_h",
"SDLK_i",
"SDLK_j",
"SDLK_k",
"SDLK_l",
"SDLK_m",
"SDLK_n",
"SDLK_o",
"SDLK_p",
"SDLK_q",
"SDLK_r",
"SDLK_s",
"SDLK_t",
"SDLK_u",
"SDLK_v",
"SDLK_w",
"SDLK_x",
"SDLK_y",
"SDLK_z",
"SDLK_CAPSLOCK",
"SDLK_F1",
"SDLK_F2",
"SDLK_F3",
"SDLK_F4",
"SDLK_F5",
"SDLK_F6",
"SDLK_F7",
"SDLK_F8",
"SDLK_F9",
"SDLK_F10",
"SDLK_F11",
"SDLK_F12",
"SDLK_PRINTSCREEN",
"SDLK_SCROLLLOCK",
"SDLK_PAUSE",
"SDLK_INSERT",
"SDLK_HOME",
"SDLK_PAGEUP",
"SDLK_DELETE",
"SDLK_END",
"SDLK_PAGEDOWN",
"SDLK_RIGHT",
"SDLK_LEFT",
"SDLK_DOWN",
"SDLK_UP",
"SDLK_NUMLOCKCLEAR",
"SDLK_KP_DIVIDE",
"SDLK_KP_MULTIPLY",
"SDLK_KP_MINUS",
"SDLK_KP_PLUS",
"SDLK_KP_ENTER",
"SDLK_KP_1",
"SDLK_KP_2",
"SDLK_KP_3",
"SDLK_KP_4",
"SDLK_KP_5",
"SDLK_KP_6",
"SDLK_KP_7",
"SDLK_KP_8",
"SDLK_KP_9",
"SDLK_KP_0",
"SDLK_KP_PERIOD",
"SDLK_APPLICATION",
"SDLK_POWER",
"SDLK_KP_EQUALS",
"SDLK_F13",
"SDLK_F14",
"SDLK_F15",
"SDLK_F16",
"SDLK_F17",
"SDLK_F18",
"SDLK_F19",
"SDLK_F20",
"SDLK_F21",
"SDLK_F22",
"SDLK_F23",
"SDLK_F24",
"SDLK_EXECUTE",
"SDLK_HELP",
"SDLK_MENU",
"SDLK_SELECT",
"SDLK_STOP",
"SDLK_AGAIN",
"SDLK_UNDO",
"SDLK_CUT",
"SDLK_COPY",
"SDLK_PASTE",
"SDLK_FIND",
"SDLK_MUTE",
"SDLK_VOLUMEUP",
"SDLK_VOLUMEDOWN",
"SDLK_KP_COMMA",
"SDLK_KP_EQUALSAS400",
"SDLK_ALTERASE",
"SDLK_SYSREQ",
"SDLK_CANCEL",
"SDLK_CLEAR",
"SDLK_PRIOR",
"SDLK_RETURN2",
"SDLK_SEPARATOR",
"SDLK_OUT",
"SDLK_OPER",
"SDLK_CLEARAGAIN",
"SDLK_CRSEL",
"SDLK_EXSEL",
"SDLK_KP_00",
"SDLK_KP_000",
"SDLK_THOUSANDSSEPARATOR",
"SDLK_DECIMALSEPARATOR",
"SDLK_CURRENCYUNIT",
"SDLK_CURRENCYSUBUNIT",
"SDLK_KP_LEFTPAREN",
"SDLK_KP_RIGHTPAREN",
"SDLK_KP_LEFTBRACE",
"SDLK_KP_RIGHTBRACE",
"SDLK_KP_TAB",
"SDLK_KP_BACKSPACE",
"SDLK_KP_A",
"SDLK_KP_B",
"SDLK_KP_C",
"SDLK_KP_D",
"SDLK_KP_E",
"SDLK_KP_F",
"SDLK_KP_XOR",
"SDLK_KP_POWER",
"SDLK_KP_PERCENT",
"SDLK_KP_LESS",
"SDLK_KP_GREATER",
"SDLK_KP_AMPERSAND",
"SDLK_KP_DBLAMPERSAND",
"SDLK_KP_VERTICALBAR",
"SDLK_KP_DBLVERTICALBAR",
"SDLK_KP_COLON",
"SDLK_KP_HASH",
"SDLK_KP_SPACE",
"SDLK_KP_AT",
"SDLK_KP_EXCLAM",
"SDLK_KP_MEMSTORE",
"SDLK_KP_MEMRECALL",
"SDLK_KP_MEMCLEAR",
"SDLK_KP_MEMADD",
"SDLK_KP_MEMSUBTRACT",
"SDLK_KP_MEMMULTIPLY",
"SDLK_KP_MEMDIVIDE",
"SDLK_KP_PLUSMINUS",
"SDLK_KP_CLEAR",
"SDLK_KP_CLEARENTRY",
"SDLK_KP_BINARY",
"SDLK_KP_OCTAL",
"SDLK_KP_DECIMAL",
"SDLK_KP_HEXADECIMAL",
"SDLK_LCTRL",
"SDLK_LSHIFT",
"SDLK_LALT",
"SDLK_LGUI",
"SDLK_RCTRL",
"SDLK_RSHIFT",
"SDLK_RALT",
"SDLK_RGUI",
"SDLK_MODE",
"SDLK_AUDIONEXT",
"SDLK_AUDIOPREV",
"SDLK_AUDIOSTOP",
"SDLK_AUDIOPLAY",
"SDLK_AUDIOMUTE",
"SDLK_MEDIASELECT",
"SDLK_WWW",
"SDLK_MAIL",
"SDLK_CALCULATOR",
"SDLK_COMPUTER",
"SDLK_AC_SEARCH",
"SDLK_AC_HOME",
"SDLK_AC_BACK",
"SDLK_AC_FORWARD",
"SDLK_AC_STOP",
"SDLK_AC_REFRESH",
"SDLK_AC_BOOKMARKS",
"SDLK_BRIGHTNESSDOWN",
"SDLK_BRIGHTNESSUP",
"SDLK_DISPLAYSWITCH",
"SDLK_KBDILLUMTOGGLE",
"SDLK_KBDILLUMDOWN",
"SDLK_KBDILLUMUP",
"SDLK_EJECT",
"SDLK_SLEEP",
"SDLK_APP1",
"SDLK_APP2",
"SDLK_AUDIOREWIND",
"SDLK_AUDIOFASTFORWARD",
"SDLK_SOFTLEFT",
"SDLK_SOFTRIGHT",
"SDLK_CALL",
"SDLK_ENDCALL",
)
SDL_KEYMAP = {key: getattr(SDL_KeyCode, key) for key in SDL_KEYS}
CONFIG_SCHEMA = (
binary_sensor.binary_sensor_schema(BinarySensor)

View File

@@ -56,6 +56,13 @@ uint32_t ESP8266UartComponent::get_config() {
}
void ESP8266UartComponent::setup() {
if (this->rx_pin_) {
this->rx_pin_->setup();
}
if (this->tx_pin_ && this->rx_pin_ != this->tx_pin_) {
this->tx_pin_->setup();
}
// Use Arduino HardwareSerial UARTs if all used pins match the ones
// preconfigured by the platform. For example if RX disabled but TX pin
// is 1 we still want to use Serial.

View File

@@ -6,6 +6,9 @@
#include "esphome/core/defines.h"
#include "esphome/core/helpers.h"
#include "esphome/core/log.h"
#include "esphome/core/gpio.h"
#include "driver/gpio.h"
#include "soc/gpio_num.h"
#ifdef USE_LOGGER
#include "esphome/components/logger/logger.h"
@@ -96,23 +99,48 @@ void IDFUARTComponent::setup() {
}
void IDFUARTComponent::load_settings(bool dump_config) {
uart_config_t uart_config = this->get_config_();
esp_err_t err = uart_param_config(this->uart_num_, &uart_config);
esp_err_t err;
if (uart_is_driver_installed(this->uart_num_)) {
err = uart_driver_delete(this->uart_num_);
if (err != ESP_OK) {
ESP_LOGW(TAG, "uart_driver_delete failed: %s", esp_err_to_name(err));
this->mark_failed();
return;
}
}
err = uart_driver_install(this->uart_num_, // UART number
this->rx_buffer_size_, // RX ring buffer size
0, // TX ring buffer size. If zero, driver will not use a TX buffer and TX function will
// block task until all data has been sent out
20, // event queue size/depth
&this->uart_event_queue_, // event queue
0 // Flags used to allocate the interrupt
);
if (err != ESP_OK) {
ESP_LOGW(TAG, "uart_param_config failed: %s", esp_err_to_name(err));
ESP_LOGW(TAG, "uart_driver_install failed: %s", esp_err_to_name(err));
this->mark_failed();
return;
}
if (this->rx_pin_) {
this->rx_pin_->setup();
}
if (this->tx_pin_ && this->rx_pin_ != this->tx_pin_) {
this->tx_pin_->setup();
}
int8_t tx = this->tx_pin_ != nullptr ? this->tx_pin_->get_pin() : -1;
int8_t rx = this->rx_pin_ != nullptr ? this->rx_pin_->get_pin() : -1;
int8_t flow_control = this->flow_control_pin_ != nullptr ? this->flow_control_pin_->get_pin() : -1;
uint32_t invert = 0;
if (this->tx_pin_ != nullptr && this->tx_pin_->is_inverted())
if (this->tx_pin_ != nullptr && this->tx_pin_->is_inverted()) {
invert |= UART_SIGNAL_TXD_INV;
if (this->rx_pin_ != nullptr && this->rx_pin_->is_inverted())
}
if (this->rx_pin_ != nullptr && this->rx_pin_->is_inverted()) {
invert |= UART_SIGNAL_RXD_INV;
}
err = uart_set_line_inverse(this->uart_num_, invert);
if (err != ESP_OK) {
@@ -128,26 +156,6 @@ void IDFUARTComponent::load_settings(bool dump_config) {
return;
}
if (uart_is_driver_installed(this->uart_num_)) {
uart_driver_delete(this->uart_num_);
if (err != ESP_OK) {
ESP_LOGW(TAG, "uart_driver_delete failed: %s", esp_err_to_name(err));
this->mark_failed();
return;
}
}
err = uart_driver_install(this->uart_num_, /* UART RX ring buffer size. */ this->rx_buffer_size_,
/* UART TX ring buffer size. If set to zero, driver will not use TX buffer, TX function will
block task until all data have been sent out.*/
0,
/* UART event queue size/depth. */ 20, &(this->uart_event_queue_),
/* Flags used to allocate the interrupt. */ 0);
if (err != ESP_OK) {
ESP_LOGW(TAG, "uart_driver_install failed: %s", esp_err_to_name(err));
this->mark_failed();
return;
}
err = uart_set_rx_full_threshold(this->uart_num_, this->rx_full_threshold_);
if (err != ESP_OK) {
ESP_LOGW(TAG, "uart_set_rx_full_threshold failed: %s", esp_err_to_name(err));
@@ -163,24 +171,32 @@ void IDFUARTComponent::load_settings(bool dump_config) {
}
auto mode = this->flow_control_pin_ != nullptr ? UART_MODE_RS485_HALF_DUPLEX : UART_MODE_UART;
err = uart_set_mode(this->uart_num_, mode);
err = uart_set_mode(this->uart_num_, mode); // per docs, must be called only after uart_driver_install()
if (err != ESP_OK) {
ESP_LOGW(TAG, "uart_set_mode failed: %s", esp_err_to_name(err));
this->mark_failed();
return;
}
uart_config_t uart_config = this->get_config_();
err = uart_param_config(this->uart_num_, &uart_config);
if (err != ESP_OK) {
ESP_LOGW(TAG, "uart_param_config failed: %s", esp_err_to_name(err));
this->mark_failed();
return;
}
if (dump_config) {
ESP_LOGCONFIG(TAG, "UART %u was reloaded.", this->uart_num_);
ESP_LOGCONFIG(TAG, "Reloaded UART %u", this->uart_num_);
this->dump_config();
}
}
void IDFUARTComponent::dump_config() {
ESP_LOGCONFIG(TAG, "UART Bus %u:", this->uart_num_);
LOG_PIN(" TX Pin: ", tx_pin_);
LOG_PIN(" RX Pin: ", rx_pin_);
LOG_PIN(" Flow Control Pin: ", flow_control_pin_);
LOG_PIN(" TX Pin: ", this->tx_pin_);
LOG_PIN(" RX Pin: ", this->rx_pin_);
LOG_PIN(" Flow Control Pin: ", this->flow_control_pin_);
if (this->rx_pin_ != nullptr) {
ESP_LOGCONFIG(TAG,
" RX Buffer Size: %u\n"

View File

@@ -51,28 +51,53 @@ void LibreTinyUARTComponent::setup() {
bool tx_inverted = tx_pin_ != nullptr && tx_pin_->is_inverted();
bool rx_inverted = rx_pin_ != nullptr && rx_pin_->is_inverted();
auto shouldFallbackToSoftwareSerial = [&]() -> bool {
auto hasFlags = [](InternalGPIOPin *pin, const gpio::Flags mask) -> bool {
return pin && pin->get_flags() & mask != gpio::Flags::FLAG_NONE;
};
if (hasFlags(this->tx_pin_, gpio::Flags::FLAG_OPEN_DRAIN | gpio::Flags::FLAG_PULLUP | gpio::Flags::FLAG_PULLDOWN) ||
hasFlags(this->rx_pin_, gpio::Flags::FLAG_OPEN_DRAIN | gpio::Flags::FLAG_PULLUP | gpio::Flags::FLAG_PULLDOWN)) {
#if LT_ARD_HAS_SOFTSERIAL
ESP_LOGI(TAG, "Pins has flags set. Using Software Serial");
return true;
#else
ESP_LOGW(TAG, "Pin flags are set but not supported for hardware serial. Ignoring");
#endif
}
return false;
};
if (false)
return;
#if LT_HW_UART0
else if ((tx_pin == -1 || tx_pin == PIN_SERIAL0_TX) && (rx_pin == -1 || rx_pin == PIN_SERIAL0_RX)) {
else if ((tx_pin == -1 || tx_pin == PIN_SERIAL0_TX) && (rx_pin == -1 || rx_pin == PIN_SERIAL0_RX) &&
!shouldFallbackToSoftwareSerial()) {
this->serial_ = &Serial0;
this->hardware_idx_ = 0;
}
#endif
#if LT_HW_UART1
else if ((tx_pin == -1 || tx_pin == PIN_SERIAL1_TX) && (rx_pin == -1 || rx_pin == PIN_SERIAL1_RX)) {
else if ((tx_pin == -1 || tx_pin == PIN_SERIAL1_TX) && (rx_pin == -1 || rx_pin == PIN_SERIAL1_RX) &&
!shouldFallbackToSoftwareSerial()) {
this->serial_ = &Serial1;
this->hardware_idx_ = 1;
}
#endif
#if LT_HW_UART2
else if ((tx_pin == -1 || tx_pin == PIN_SERIAL2_TX) && (rx_pin == -1 || rx_pin == PIN_SERIAL2_RX)) {
else if ((tx_pin == -1 || tx_pin == PIN_SERIAL2_TX) && (rx_pin == -1 || rx_pin == PIN_SERIAL2_RX) &&
!shouldFallbackToSoftwareSerial()) {
this->serial_ = &Serial2;
this->hardware_idx_ = 2;
}
#endif
else {
#if LT_ARD_HAS_SOFTSERIAL
if (this->rx_pin_) {
this->rx_pin_->setup();
}
if (this->tx_pin_ && this->rx_pin_ != this->tx_pin_) {
this->tx_pin_->setup();
}
this->serial_ = new SoftwareSerial(rx_pin, tx_pin, rx_inverted || tx_inverted);
#else
this->serial_ = &Serial;

View File

@@ -52,6 +52,13 @@ uint16_t RP2040UartComponent::get_config() {
}
void RP2040UartComponent::setup() {
if (this->rx_pin_) {
this->rx_pin_->setup();
}
if (this->tx_pin_ && this->rx_pin_ != this->tx_pin_) {
this->tx_pin_->setup();
}
uint16_t config = get_config();
constexpr uint32_t valid_tx_uart_0 = __bitset({0, 12, 16, 28});

View File

@@ -244,6 +244,20 @@ RESERVED_IDS = [
"uart0",
"uart1",
"uart2",
# ESP32 ROM functions
"crc16_be",
"crc16_le",
"crc32_be",
"crc32_le",
"crc8_be",
"crc8_le",
"dbg_state",
"debug_timer",
"one_bits",
"recv_packet",
"send_packet",
"check_pos",
"software_reset",
]

View File

@@ -4,7 +4,7 @@ from enum import Enum
from esphome.enum import StrEnum
__version__ = "2025.10.1"
__version__ = "2025.10.4"
ALLOWED_NAME_CHARS = "abcdefghijklmnopqrstuvwxyz0123456789-_"
VALID_SUBSTITUTIONS_CHARACTERS = (
@@ -696,6 +696,7 @@ CONF_OPEN_DRAIN = "open_drain"
CONF_OPEN_DRAIN_INTERRUPT = "open_drain_interrupt"
CONF_OPEN_DURATION = "open_duration"
CONF_OPEN_ENDSTOP = "open_endstop"
CONF_OPENTHREAD = "openthread"
CONF_OPERATION = "operation"
CONF_OPTIMISTIC = "optimistic"
CONF_OPTION = "option"

View File

@@ -11,6 +11,7 @@ from esphome.const import (
CONF_COMMENT,
CONF_ESPHOME,
CONF_ETHERNET,
CONF_OPENTHREAD,
CONF_PORT,
CONF_USE_ADDRESS,
CONF_WEB_SERVER,
@@ -641,6 +642,9 @@ class EsphomeCore:
if CONF_ETHERNET in self.config:
return self.config[CONF_ETHERNET][CONF_USE_ADDRESS]
if CONF_OPENTHREAD in self.config:
return f"{self.name}.local"
return None
@property

View File

@@ -10,6 +10,10 @@ from esphome.helpers import get_bool_env
from .util.password import password_hash
# Sentinel file name used for CORE.config_path when dashboard initializes.
# This ensures .parent returns the config directory instead of root.
_DASHBOARD_SENTINEL_FILE = "___DASHBOARD_SENTINEL___.yaml"
class DashboardSettings:
"""Settings for the dashboard."""
@@ -48,7 +52,12 @@ class DashboardSettings:
self.config_dir = Path(args.configuration)
self.absolute_config_dir = self.config_dir.resolve()
self.verbose = args.verbose
CORE.config_path = self.config_dir / "."
# Set to a sentinel file so .parent gives us the config directory.
# Previously this was `os.path.join(self.config_dir, ".")` which worked because
# os.path.dirname("/config/.") returns "/config", but Path("/config/.").parent
# normalizes to Path("/config") first, then .parent returns Path("/"), breaking
# secret resolution. Using a sentinel file ensures .parent gives the correct directory.
CORE.config_path = self.config_dir / _DASHBOARD_SENTINEL_FILE
@property
def relative_url(self) -> str:

View File

@@ -1058,7 +1058,8 @@ class DownloadBinaryRequestHandler(BaseHandler):
"download",
f"{storage_json.name}-{file_name}",
)
path = storage_json.firmware_bin_path.with_name(file_name)
path = storage_json.firmware_bin_path.parent.joinpath(file_name)
if not path.is_file():
args = ["esphome", "idedata", settings.rel_path(configuration)]

View File

@@ -224,36 +224,37 @@ def resolve_ip_address(
return res
# Process hosts
cached_addresses: list[str] = []
uncached_hosts: list[str] = []
has_cache = address_cache is not None
for h in hosts:
if is_ip_address(h):
if has_cache:
# If we have a cache, treat IPs as cached
cached_addresses.append(h)
else:
# If no cache, pass IPs through to resolver with hostnames
uncached_hosts.append(h)
_add_ip_addresses_to_addrinfo([h], port, res)
elif address_cache and (cached := address_cache.get_addresses(h)):
# Found in cache
cached_addresses.extend(cached)
_add_ip_addresses_to_addrinfo(cached, port, res)
else:
# Not cached, need to resolve
if address_cache and address_cache.has_cache():
_LOGGER.info("Host %s not in cache, will need to resolve", h)
uncached_hosts.append(h)
# Process cached addresses (includes direct IPs and cached lookups)
_add_ip_addresses_to_addrinfo(cached_addresses, port, res)
# If we have uncached hosts (only non-IP hostnames), resolve them
if uncached_hosts:
from aioesphomeapi.host_resolver import AddrInfo as AioAddrInfo
from esphome.core import EsphomeError
from esphome.resolver import AsyncResolver
resolver = AsyncResolver(uncached_hosts, port)
addr_infos = resolver.resolve()
addr_infos: list[AioAddrInfo] = []
try:
addr_infos = resolver.resolve()
except EsphomeError as err:
if not res:
# No pre-resolved addresses available, DNS resolution is fatal
raise
_LOGGER.info("%s (using %d already resolved IP addresses)", err, len(res))
# Convert aioesphomeapi AddrInfo to our format
for addr_info in addr_infos:
sockaddr = addr_info.sockaddr

View File

@@ -52,6 +52,19 @@ number:
widget: spinbox_id
id: lvgl_spinbox_number
name: LVGL Spinbox Number
- platform: template
id: test_brightness
name: "Test Brightness"
min_value: 0
max_value: 255
step: 1
optimistic: true
# Test lambda in automation accessing x parameter directly
# This is a real-world pattern from user configs
on_value:
- lambda: !lambda |-
// Direct use of x parameter in automation
ESP_LOGD("test", "Brightness: %.0f", x);
light:
- platform: lvgl
@@ -110,3 +123,21 @@ text:
platform: lvgl
widget: hello_label
mode: text
text_sensor:
- platform: template
id: test_text_sensor
name: "Test Text Sensor"
# Test nested lambdas in LVGL actions can access automation parameters
on_value:
- lvgl.label.update:
id: hello_label
text: !lambda return x.c_str();
- lvgl.label.update:
id: hello_label
text: !lambda |-
// Test complex lambda with conditionals accessing x parameter
if (x == "*") {
return "WILDCARD";
}
return x.c_str();

View File

@@ -257,7 +257,30 @@ lvgl:
text: "Hello shiny day"
text_color: 0xFFFFFF
align: bottom_mid
- label:
id: setup_lambda_label
# Test lambda in widget property during setup (LvContext)
# Should NOT receive lv_component parameter
text: !lambda |-
char buf[32];
snprintf(buf, sizeof(buf), "Setup: %d", 42);
return std::string(buf);
align: top_mid
text_font: space16
- label:
id: chip_info_label
# Test complex setup lambda (real-world pattern)
# Should NOT receive lv_component parameter
text: !lambda |-
// Test conditional compilation and string formatting
char buf[64];
#ifdef USE_ESP_IDF
snprintf(buf, sizeof(buf), "IDF: v%d.%d", ESP_IDF_VERSION_MAJOR, ESP_IDF_VERSION_MINOR);
#else
snprintf(buf, sizeof(buf), "Arduino");
#endif
return std::string(buf);
align: top_left
- obj:
align: center
arc_opa: COVER

View File

@@ -14,10 +14,10 @@ display:
binary_sensor:
- platform: sdl
id: key_up
key: SDLK_a
key: SDLK_UP
- platform: sdl
id: key_down
key: SDLK_d
key: SDLK_DOWN
- platform: sdl
id: key_enter
key: SDLK_s
key: SDLK_RETURN

View File

@@ -2,11 +2,13 @@
from __future__ import annotations
from argparse import Namespace
from pathlib import Path
import tempfile
import pytest
from esphome.core import CORE
from esphome.dashboard.settings import DashboardSettings
@@ -159,3 +161,63 @@ def test_rel_path_with_numeric_args(dashboard_settings: DashboardSettings) -> No
result = dashboard_settings.rel_path("123", "456.789")
expected = dashboard_settings.config_dir / "123" / "456.789"
assert result == expected
def test_config_path_parent_resolves_to_config_dir(tmp_path: Path) -> None:
"""Test that CORE.config_path.parent resolves to config_dir after parse_args.
This is a regression test for issue #11280 where binary download failed
when using packages with secrets after the Path migration in 2025.10.0.
The issue was that after switching from os.path to Path:
- Before: os.path.dirname("/config/.") → "/config"
- After: Path("/config/.").parent → Path("/") (normalized first!)
The fix uses a sentinel file so .parent returns the correct directory:
- Fixed: Path("/config/___DASHBOARD_SENTINEL___.yaml").parent → Path("/config")
"""
# Create test directory structure with secrets and packages
config_dir = tmp_path / "config"
config_dir.mkdir()
# Create secrets.yaml with obviously fake test values
secrets_file = config_dir / "secrets.yaml"
secrets_file.write_text(
"wifi_ssid: TEST-DUMMY-SSID\n"
"wifi_password: not-a-real-password-just-for-testing\n"
)
# Create package file that uses secrets
package_file = config_dir / "common.yaml"
package_file.write_text(
"wifi:\n ssid: !secret wifi_ssid\n password: !secret wifi_password\n"
)
# Create main device config that includes the package
device_config = config_dir / "test-device.yaml"
device_config.write_text(
"esphome:\n name: test-device\n\npackages:\n common: !include common.yaml\n"
)
# Set up dashboard settings with our test config directory
settings = DashboardSettings()
args = Namespace(
configuration=str(config_dir),
password=None,
username=None,
ha_addon=False,
verbose=False,
)
settings.parse_args(args)
# Verify that CORE.config_path.parent correctly points to the config directory
# This is critical for secret resolution in yaml_util.py which does:
# main_config_dir = CORE.config_path.parent
# main_secret_yml = main_config_dir / "secrets.yaml"
assert CORE.config_path.parent == config_dir.resolve()
assert (CORE.config_path.parent / "secrets.yaml").exists()
assert (CORE.config_path.parent / "common.yaml").exists()
# Verify that CORE.config_path itself uses the sentinel file
assert CORE.config_path.name == "___DASHBOARD_SENTINEL___.yaml"
assert not CORE.config_path.exists() # Sentinel file doesn't actually exist

View File

@@ -1,5 +1,6 @@
from __future__ import annotations
from argparse import Namespace
import asyncio
from collections.abc import Generator
from contextlib import asynccontextmanager
@@ -17,6 +18,8 @@ from tornado.ioloop import IOLoop
from tornado.testing import bind_unused_port
from tornado.websocket import WebSocketClientConnection, websocket_connect
from esphome import yaml_util
from esphome.core import CORE
from esphome.dashboard import web_server
from esphome.dashboard.const import DashboardEvent
from esphome.dashboard.core import DASHBOARD
@@ -32,6 +35,26 @@ from esphome.zeroconf import DiscoveredImport
from .common import get_fixture_path
def get_build_path(base_path: Path, device_name: str) -> Path:
"""Get the build directory path for a device.
This is a test helper that constructs the standard ESPHome build directory
structure. Note: This helper does NOT perform path traversal sanitization
because it's only used in tests where we control the inputs. The actual
web_server.py code handles sanitization in DownloadBinaryRequestHandler.get()
via file_name.replace("..", "").lstrip("/").
Args:
base_path: The base temporary path (typically tmp_path from pytest)
device_name: The name of the device (should not contain path separators
in production use, but tests may use it for specific scenarios)
Returns:
Path to the build directory (.esphome/build/device_name)
"""
return base_path / ".esphome" / "build" / device_name
class DashboardTestHelper:
def __init__(self, io_loop: IOLoop, client: AsyncHTTPClient, port: int) -> None:
self.io_loop = io_loop
@@ -414,6 +437,180 @@ async def test_download_binary_handler_idedata_fallback(
assert response.body == b"bootloader content"
@pytest.mark.asyncio
@pytest.mark.usefixtures("mock_ext_storage_path")
async def test_download_binary_handler_subdirectory_file(
dashboard: DashboardTestHelper,
tmp_path: Path,
mock_storage_json: MagicMock,
) -> None:
"""Test the DownloadBinaryRequestHandler.get with file in subdirectory (nRF52 case).
This is a regression test for issue #11343 where the Path migration broke
downloads for nRF52 firmware files in subdirectories like 'zephyr/zephyr.uf2'.
The issue was that with_name() doesn't accept path separators:
- Before: path = storage_json.firmware_bin_path.with_name(file_name)
ValueError: Invalid name 'zephyr/zephyr.uf2'
- After: path = storage_json.firmware_bin_path.parent.joinpath(file_name)
Works correctly with subdirectory paths
"""
# Create a fake nRF52 build structure with firmware in subdirectory
build_dir = get_build_path(tmp_path, "nrf52-device")
zephyr_dir = build_dir / "zephyr"
zephyr_dir.mkdir(parents=True)
# Create the main firmware binary (would be in build root)
firmware_file = build_dir / "firmware.bin"
firmware_file.write_bytes(b"main firmware")
# Create the UF2 file in zephyr subdirectory (nRF52 specific)
uf2_file = zephyr_dir / "zephyr.uf2"
uf2_file.write_bytes(b"nRF52 UF2 firmware content")
# Mock storage JSON
mock_storage = Mock()
mock_storage.name = "nrf52-device"
mock_storage.firmware_bin_path = firmware_file
mock_storage_json.load.return_value = mock_storage
# Request the UF2 file with subdirectory path
response = await dashboard.fetch(
"/download.bin?configuration=nrf52-device.yaml&file=zephyr/zephyr.uf2",
method="GET",
)
assert response.code == 200
assert response.body == b"nRF52 UF2 firmware content"
assert response.headers["Content-Type"] == "application/octet-stream"
assert "attachment" in response.headers["Content-Disposition"]
# Download name should be device-name + full file path
assert "nrf52-device-zephyr/zephyr.uf2" in response.headers["Content-Disposition"]
@pytest.mark.asyncio
@pytest.mark.usefixtures("mock_ext_storage_path")
async def test_download_binary_handler_subdirectory_file_url_encoded(
dashboard: DashboardTestHelper,
tmp_path: Path,
mock_storage_json: MagicMock,
) -> None:
"""Test the DownloadBinaryRequestHandler.get with URL-encoded subdirectory path.
Verifies that URL-encoded paths (e.g., zephyr%2Fzephyr.uf2) are correctly
decoded and handled, and that custom download names work with subdirectories.
"""
# Create a fake build structure with firmware in subdirectory
build_dir = get_build_path(tmp_path, "test")
zephyr_dir = build_dir / "zephyr"
zephyr_dir.mkdir(parents=True)
firmware_file = build_dir / "firmware.bin"
firmware_file.write_bytes(b"content")
uf2_file = zephyr_dir / "zephyr.uf2"
uf2_file.write_bytes(b"content")
# Mock storage JSON
mock_storage = Mock()
mock_storage.name = "test_device"
mock_storage.firmware_bin_path = firmware_file
mock_storage_json.load.return_value = mock_storage
# Request with URL-encoded path and custom download name
response = await dashboard.fetch(
"/download.bin?configuration=test.yaml&file=zephyr%2Fzephyr.uf2&download=custom_name.bin",
method="GET",
)
assert response.code == 200
assert "custom_name.bin" in response.headers["Content-Disposition"]
@pytest.mark.asyncio
@pytest.mark.usefixtures("mock_ext_storage_path")
@pytest.mark.parametrize(
"attack_path",
[
pytest.param("../../../secrets.yaml", id="basic_traversal"),
pytest.param("..%2F..%2F..%2Fsecrets.yaml", id="url_encoded"),
pytest.param("zephyr/../../../secrets.yaml", id="traversal_with_prefix"),
pytest.param("/etc/passwd", id="absolute_path"),
pytest.param("//etc/passwd", id="double_slash_absolute"),
pytest.param("....//secrets.yaml", id="multiple_dots"),
],
)
async def test_download_binary_handler_path_traversal_protection(
dashboard: DashboardTestHelper,
tmp_path: Path,
mock_storage_json: MagicMock,
attack_path: str,
) -> None:
"""Test that DownloadBinaryRequestHandler prevents path traversal attacks.
Verifies that attempts to use '..' in file paths are sanitized to prevent
accessing files outside the build directory. Tests multiple attack vectors.
"""
# Create build structure
build_dir = get_build_path(tmp_path, "test")
build_dir.mkdir(parents=True)
firmware_file = build_dir / "firmware.bin"
firmware_file.write_bytes(b"firmware content")
# Create a sensitive file outside the build directory that should NOT be accessible
sensitive_file = tmp_path / "secrets.yaml"
sensitive_file.write_bytes(b"secret: my_secret_password")
# Mock storage JSON
mock_storage = Mock()
mock_storage.name = "test_device"
mock_storage.firmware_bin_path = firmware_file
mock_storage_json.load.return_value = mock_storage
# Attempt path traversal attack - should be blocked
with pytest.raises(HTTPClientError) as exc_info:
await dashboard.fetch(
f"/download.bin?configuration=test.yaml&file={attack_path}",
method="GET",
)
# Should get 404 (file not found after sanitization) or 500 (idedata fails)
assert exc_info.value.code in (404, 500)
@pytest.mark.asyncio
@pytest.mark.usefixtures("mock_ext_storage_path")
async def test_download_binary_handler_multiple_subdirectory_levels(
dashboard: DashboardTestHelper,
tmp_path: Path,
mock_storage_json: MagicMock,
) -> None:
"""Test downloading files from multiple subdirectory levels.
Verifies that joinpath correctly handles multi-level paths like 'build/output/firmware.bin'.
"""
# Create nested directory structure
build_dir = get_build_path(tmp_path, "test")
nested_dir = build_dir / "build" / "output"
nested_dir.mkdir(parents=True)
firmware_file = build_dir / "firmware.bin"
firmware_file.write_bytes(b"main")
nested_file = nested_dir / "firmware.bin"
nested_file.write_bytes(b"nested firmware content")
# Mock storage JSON
mock_storage = Mock()
mock_storage.name = "test_device"
mock_storage.firmware_bin_path = firmware_file
mock_storage_json.load.return_value = mock_storage
response = await dashboard.fetch(
"/download.bin?configuration=test.yaml&file=build/output/firmware.bin",
method="GET",
)
assert response.code == 200
assert response.body == b"nested firmware content"
@pytest.mark.asyncio
async def test_edit_request_handler_post_invalid_file(
dashboard: DashboardTestHelper,
@@ -1302,3 +1499,71 @@ async def test_dashboard_subscriber_refresh_event(
# Give it a moment to clean up
await asyncio.sleep(0.01)
@pytest.mark.asyncio
async def test_dashboard_yaml_loading_with_packages_and_secrets(
tmp_path: Path,
) -> None:
"""Test dashboard YAML loading with packages referencing secrets.
This is a regression test for issue #11280 where binary download failed
when using packages with secrets after the Path migration in 2025.10.0.
This test verifies that CORE.config_path initialization in the dashboard
allows yaml_util.load_yaml() to correctly resolve secrets from packages.
"""
# Create test directory structure with secrets and packages
config_dir = tmp_path / "config"
config_dir.mkdir()
# Create secrets.yaml with obviously fake test values
secrets_file = config_dir / "secrets.yaml"
secrets_file.write_text(
"wifi_ssid: TEST-DUMMY-SSID\n"
"wifi_password: not-a-real-password-just-for-testing\n"
)
# Create package file that uses secrets
package_file = config_dir / "common.yaml"
package_file.write_text(
"wifi:\n ssid: !secret wifi_ssid\n password: !secret wifi_password\n"
)
# Create main device config that includes the package
device_config = config_dir / "test-download-secrets.yaml"
device_config.write_text(
"esphome:\n name: test-download-secrets\n platform: ESP32\n board: esp32dev\n\n"
"packages:\n common: !include common.yaml\n"
)
# Initialize DASHBOARD settings with our test config directory
# This is what sets CORE.config_path - the critical code path for the bug
args = Namespace(
configuration=str(config_dir),
password=None,
username=None,
ha_addon=False,
verbose=False,
)
DASHBOARD.settings.parse_args(args)
# With the fix: CORE.config_path should be config_dir / "___DASHBOARD_SENTINEL___.yaml"
# so CORE.config_path.parent would be config_dir
# Without the fix: CORE.config_path is config_dir / "." which normalizes to config_dir
# so CORE.config_path.parent would be tmp_path (the parent of config_dir)
# The fix ensures CORE.config_path.parent points to config_dir
assert CORE.config_path.parent == config_dir.resolve(), (
f"CORE.config_path.parent should point to config_dir. "
f"Got {CORE.config_path.parent}, expected {config_dir.resolve()}. "
f"CORE.config_path is {CORE.config_path}"
)
# Now load the YAML with packages that reference secrets
# This is where the bug would manifest - yaml_util.load_yaml would fail
# to find secrets.yaml because CORE.config_path.parent pointed to the wrong place
config = yaml_util.load_yaml(device_config)
# If we get here, secret resolution worked!
assert "esphome" in config
assert config["esphome"]["name"] == "test-download-secrets"

View File

@@ -570,6 +570,13 @@ class TestEsphomeCore:
assert target.address == "4.3.2.1"
def test_address__openthread(self, target):
target.name = "test-device"
target.config = {}
target.config[const.CONF_OPENTHREAD] = {}
assert target.address == "test-device.local"
def test_is_esp32(self, target):
target.data[const.KEY_CORE] = {const.KEY_TARGET_PLATFORM: "esp32"}

View File

@@ -454,9 +454,27 @@ def test_resolve_ip_address_mixed_list() -> None:
# Mix of IP and hostname - should use async resolver
result = helpers.resolve_ip_address(["192.168.1.100", "test.local"], 6053)
assert len(result) == 2
assert result[0][4][0] == "192.168.1.100"
assert result[1][4][0] == "192.168.1.200"
MockResolver.assert_called_once_with(["test.local"], 6053)
mock_resolver.resolve.assert_called_once()
def test_resolve_ip_address_mixed_list_fail() -> None:
"""Test resolving a mix of IPs and hostnames with resolve failed."""
with patch("esphome.resolver.AsyncResolver") as MockResolver:
mock_resolver = MockResolver.return_value
mock_resolver.resolve.side_effect = EsphomeError(
"Error resolving IP address: [test.local]"
)
# Mix of IP and hostname - should use async resolver
result = helpers.resolve_ip_address(["192.168.1.100", "test.local"], 6053)
assert len(result) == 1
assert result[0][4][0] == "192.168.1.200"
MockResolver.assert_called_once_with(["192.168.1.100", "test.local"], 6053)
assert result[0][4][0] == "192.168.1.100"
MockResolver.assert_called_once_with(["test.local"], 6053)
mock_resolver.resolve.assert_called_once()

View File

@@ -321,12 +321,14 @@ def test_choose_upload_log_host_with_serial_device_no_ports(
) -> None:
"""Test SERIAL device when no serial ports are found."""
setup_core()
result = choose_upload_log_host(
default="SERIAL",
check_default=None,
purpose=Purpose.UPLOADING,
)
assert result == []
with pytest.raises(
EsphomeError, match="All specified devices .* could not be resolved"
):
choose_upload_log_host(
default="SERIAL",
check_default=None,
purpose=Purpose.UPLOADING,
)
assert "No serial ports found, skipping SERIAL device" in caplog.text
@@ -367,12 +369,14 @@ def test_choose_upload_log_host_with_ota_device_with_api_config() -> None:
"""Test OTA device when API is configured (no upload without OTA in config)."""
setup_core(config={CONF_API: {}}, address="192.168.1.100")
result = choose_upload_log_host(
default="OTA",
check_default=None,
purpose=Purpose.UPLOADING,
)
assert result == []
with pytest.raises(
EsphomeError, match="All specified devices .* could not be resolved"
):
choose_upload_log_host(
default="OTA",
check_default=None,
purpose=Purpose.UPLOADING,
)
def test_choose_upload_log_host_with_ota_device_with_api_config_logging() -> None:
@@ -405,12 +409,14 @@ def test_choose_upload_log_host_with_ota_device_no_fallback() -> None:
"""Test OTA device with no valid fallback options."""
setup_core()
result = choose_upload_log_host(
default="OTA",
check_default=None,
purpose=Purpose.UPLOADING,
)
assert result == []
with pytest.raises(
EsphomeError, match="All specified devices .* could not be resolved"
):
choose_upload_log_host(
default="OTA",
check_default=None,
purpose=Purpose.UPLOADING,
)
@pytest.mark.usefixtures("mock_choose_prompt")
@@ -615,21 +621,19 @@ def test_choose_upload_log_host_empty_defaults_list() -> None:
@pytest.mark.usefixtures("mock_no_serial_ports", "mock_no_mqtt_logging")
def test_choose_upload_log_host_all_devices_unresolved(
caplog: pytest.LogCaptureFixture,
) -> None:
def test_choose_upload_log_host_all_devices_unresolved() -> None:
"""Test when all specified devices cannot be resolved."""
setup_core()
result = choose_upload_log_host(
default=["SERIAL", "OTA"],
check_default=None,
purpose=Purpose.UPLOADING,
)
assert result == []
assert (
"All specified devices: ['SERIAL', 'OTA'] could not be resolved." in caplog.text
)
with pytest.raises(
EsphomeError,
match=r"All specified devices \['SERIAL', 'OTA'\] could not be resolved",
):
choose_upload_log_host(
default=["SERIAL", "OTA"],
check_default=None,
purpose=Purpose.UPLOADING,
)
@pytest.mark.usefixtures("mock_no_serial_ports", "mock_no_mqtt_logging")
@@ -762,12 +766,14 @@ def test_choose_upload_log_host_no_address_with_ota_config() -> None:
"""Test OTA device when OTA is configured but no address is set."""
setup_core(config={CONF_OTA: {}})
result = choose_upload_log_host(
default="OTA",
check_default=None,
purpose=Purpose.UPLOADING,
)
assert result == []
with pytest.raises(
EsphomeError, match="All specified devices .* could not be resolved"
):
choose_upload_log_host(
default="OTA",
check_default=None,
purpose=Purpose.UPLOADING,
)
@dataclass