mirror of
https://github.com/esphome/esphome.git
synced 2026-02-11 10:12:38 +00:00
Compare commits
10 Commits
broken_tar
...
2025.12.0b
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
a3a2a6d965 | ||
|
|
26770e09dc | ||
|
|
9f2693ead5 | ||
|
|
3642399460 | ||
|
|
3a6edbc2c7 | ||
|
|
608f834eaa | ||
|
|
5919355d18 | ||
|
|
1e23b10eed | ||
|
|
ad0218fd40 | ||
|
|
87142efbb4 |
16
.github/workflows/ci.yml
vendored
16
.github/workflows/ci.yml
vendored
@@ -959,13 +959,13 @@ jobs:
|
||||
- memory-impact-comment
|
||||
if: always()
|
||||
steps:
|
||||
- name: Success
|
||||
if: ${{ !(contains(needs.*.result, 'failure')) }}
|
||||
run: exit 0
|
||||
- name: Failure
|
||||
if: ${{ contains(needs.*.result, 'failure') }}
|
||||
- name: Check job results
|
||||
env:
|
||||
JSON_DOC: ${{ toJSON(needs) }}
|
||||
NEEDS_JSON: ${{ toJSON(needs) }}
|
||||
run: |
|
||||
echo $JSON_DOC | jq
|
||||
exit 1
|
||||
# memory-impact-target-branch is allowed to fail without blocking CI.
|
||||
# This job builds the target branch (dev/beta/release) which may fail because:
|
||||
# 1. The target branch has a build issue independent of this PR
|
||||
# 2. This PR fixes a build issue on the target branch
|
||||
# In either case, we only care that the PR branch builds successfully.
|
||||
echo "$NEEDS_JSON" | jq -e 'del(.["memory-impact-target-branch"]) | all(.result != "failure")'
|
||||
|
||||
2
Doxyfile
2
Doxyfile
@@ -48,7 +48,7 @@ PROJECT_NAME = ESPHome
|
||||
# could be handy for archiving the generated documentation or if some version
|
||||
# control system is used.
|
||||
|
||||
PROJECT_NUMBER = 2025.12.0-dev
|
||||
PROJECT_NUMBER = 2025.12.0b1
|
||||
|
||||
# Using the PROJECT_BRIEF tag one can provide an optional one line description
|
||||
# for a project that appears at the top of each page and should give viewer a
|
||||
|
||||
@@ -41,6 +41,7 @@ AUTO_LOAD = ["split_buffer"]
|
||||
DEPENDENCIES = ["spi"]
|
||||
|
||||
CONF_INIT_SEQUENCE_ID = "init_sequence_id"
|
||||
CONF_MINIMUM_UPDATE_INTERVAL = "minimum_update_interval"
|
||||
|
||||
epaper_spi_ns = cg.esphome_ns.namespace("epaper_spi")
|
||||
EPaperBase = epaper_spi_ns.class_(
|
||||
@@ -71,6 +72,9 @@ TRANSFORM_OPTIONS = {CONF_MIRROR_X, CONF_MIRROR_Y, CONF_SWAP_XY}
|
||||
def model_schema(config):
|
||||
model = MODELS[config[CONF_MODEL]]
|
||||
class_name = epaper_spi_ns.class_(model.class_name, EPaperBase)
|
||||
minimum_update_interval = update_interval(
|
||||
model.get_default(CONF_MINIMUM_UPDATE_INTERVAL, "1s")
|
||||
)
|
||||
cv_dimensions = cv.Optional if model.get_default(CONF_WIDTH) else cv.Required
|
||||
return (
|
||||
display.FULL_DISPLAY_SCHEMA.extend(
|
||||
@@ -90,9 +94,9 @@ def model_schema(config):
|
||||
{
|
||||
cv.Optional(CONF_ROTATION, default=0): validate_rotation,
|
||||
cv.Required(CONF_MODEL): cv.one_of(model.name, upper=True),
|
||||
cv.Optional(
|
||||
CONF_UPDATE_INTERVAL, default=cv.UNDEFINED
|
||||
): update_interval,
|
||||
cv.Optional(CONF_UPDATE_INTERVAL, default=cv.UNDEFINED): cv.All(
|
||||
update_interval, cv.Range(min=minimum_update_interval)
|
||||
),
|
||||
cv.Optional(CONF_TRANSFORM): cv.Schema(
|
||||
{
|
||||
cv.Required(CONF_MIRROR_X): cv.boolean,
|
||||
@@ -153,9 +157,8 @@ def _final_validate(config):
|
||||
else:
|
||||
# If no drawing methods are configured, and LVGL is not enabled, show a test card
|
||||
config[CONF_SHOW_TEST_CARD] = True
|
||||
config[CONF_UPDATE_INTERVAL] = core.TimePeriod(
|
||||
seconds=60
|
||||
).total_milliseconds
|
||||
elif CONF_UPDATE_INTERVAL not in config:
|
||||
config[CONF_UPDATE_INTERVAL] = update_interval("1min")
|
||||
return config
|
||||
|
||||
|
||||
|
||||
@@ -286,7 +286,7 @@ void EPaperBase::initialise_() {
|
||||
* @param y
|
||||
* @return false if the coordinates are out of bounds
|
||||
*/
|
||||
bool EPaperBase::rotate_coordinates_(int &x, int &y) const {
|
||||
bool EPaperBase::rotate_coordinates_(int &x, int &y) {
|
||||
if (!this->get_clipping().inside(x, y))
|
||||
return false;
|
||||
if (this->transform_ & SWAP_XY)
|
||||
@@ -297,6 +297,10 @@ bool EPaperBase::rotate_coordinates_(int &x, int &y) const {
|
||||
y = this->height_ - y - 1;
|
||||
if (x >= this->width_ || y >= this->height_ || x < 0 || y < 0)
|
||||
return false;
|
||||
this->x_low_ = clamp_at_most(this->x_low_, x);
|
||||
this->x_high_ = clamp_at_least(this->x_high_, x + 1);
|
||||
this->y_low_ = clamp_at_most(this->y_low_, y);
|
||||
this->y_high_ = clamp_at_least(this->y_high_, y + 1);
|
||||
return true;
|
||||
}
|
||||
|
||||
@@ -319,10 +323,6 @@ void HOT EPaperBase::draw_pixel_at(int x, int y, Color color) {
|
||||
} else {
|
||||
this->buffer_[byte_position] = original | pixel_bit;
|
||||
}
|
||||
this->x_low_ = clamp_at_most(this->x_low_, x);
|
||||
this->x_high_ = clamp_at_least(this->x_high_, x + 1);
|
||||
this->y_low_ = clamp_at_most(this->y_low_, y);
|
||||
this->y_high_ = clamp_at_least(this->y_high_, y + 1);
|
||||
}
|
||||
|
||||
void EPaperBase::dump_config() {
|
||||
|
||||
@@ -106,7 +106,7 @@ class EPaperBase : public Display,
|
||||
void initialise_();
|
||||
void wait_for_idle_(bool should_wait);
|
||||
bool init_buffer_(size_t buffer_length);
|
||||
bool rotate_coordinates_(int &x, int &y) const;
|
||||
bool rotate_coordinates_(int &x, int &y);
|
||||
|
||||
/**
|
||||
* Methods that must be implemented by concrete classes to control the display
|
||||
|
||||
@@ -4,8 +4,8 @@ from . import EpaperModel
|
||||
|
||||
|
||||
class SpectraE6(EpaperModel):
|
||||
def __init__(self, name, class_name="EPaperSpectraE6", **kwargs):
|
||||
super().__init__(name, class_name, **kwargs)
|
||||
def __init__(self, name, class_name="EPaperSpectraE6", **defaults):
|
||||
super().__init__(name, class_name, **defaults)
|
||||
|
||||
# fmt: off
|
||||
def get_init_sequence(self, config: dict):
|
||||
@@ -30,7 +30,7 @@ class SpectraE6(EpaperModel):
|
||||
return self.defaults.get(key, fallback)
|
||||
|
||||
|
||||
spectra_e6 = SpectraE6("spectra-e6")
|
||||
spectra_e6 = SpectraE6("spectra-e6", minimum_update_interval="30s")
|
||||
|
||||
spectra_e6_7p3 = spectra_e6.extend(
|
||||
"7.3in-Spectra-E6",
|
||||
|
||||
@@ -498,12 +498,12 @@ void LvglComponent::setup() {
|
||||
buf_bytes /= MIN_BUFFER_FRAC;
|
||||
buffer = lv_custom_mem_alloc(buf_bytes); // NOLINT
|
||||
}
|
||||
this->buffer_frac_ = frac;
|
||||
if (buffer == nullptr) {
|
||||
this->status_set_error(LOG_STR("Memory allocation failure"));
|
||||
this->mark_failed();
|
||||
return;
|
||||
}
|
||||
this->buffer_frac_ = frac;
|
||||
lv_disp_draw_buf_init(&this->draw_buf_, buffer, nullptr, buffer_pixels);
|
||||
this->disp_drv_.hor_res = display->get_width();
|
||||
this->disp_drv_.ver_res = display->get_height();
|
||||
|
||||
@@ -24,7 +24,7 @@ from esphome.components.mipi import (
|
||||
CONF_VSYNC_BACK_PORCH,
|
||||
CONF_VSYNC_FRONT_PORCH,
|
||||
CONF_VSYNC_PULSE_WIDTH,
|
||||
MODE_BGR,
|
||||
MODE_RGB,
|
||||
PIXEL_MODE_16BIT,
|
||||
PIXEL_MODE_18BIT,
|
||||
DriverChip,
|
||||
@@ -157,7 +157,7 @@ def model_schema(config):
|
||||
model.option(CONF_ENABLE_PIN, cv.UNDEFINED): cv.ensure_list(
|
||||
pins.gpio_output_pin_schema
|
||||
),
|
||||
model.option(CONF_COLOR_ORDER, MODE_BGR): cv.enum(COLOR_ORDERS, upper=True),
|
||||
model.option(CONF_COLOR_ORDER, MODE_RGB): cv.enum(COLOR_ORDERS, upper=True),
|
||||
model.option(CONF_DRAW_ROUNDING, 2): power_of_two,
|
||||
model.option(CONF_PIXEL_MODE, PIXEL_MODE_16BIT): cv.one_of(
|
||||
*pixel_modes, lower=True
|
||||
@@ -280,14 +280,9 @@ async def to_code(config):
|
||||
red_pins = config[CONF_DATA_PINS][CONF_RED]
|
||||
green_pins = config[CONF_DATA_PINS][CONF_GREEN]
|
||||
blue_pins = config[CONF_DATA_PINS][CONF_BLUE]
|
||||
if config[CONF_COLOR_ORDER] == "BGR":
|
||||
dpins.extend(red_pins)
|
||||
dpins.extend(green_pins)
|
||||
dpins.extend(blue_pins)
|
||||
else:
|
||||
dpins.extend(blue_pins)
|
||||
dpins.extend(green_pins)
|
||||
dpins.extend(red_pins)
|
||||
dpins.extend(blue_pins)
|
||||
dpins.extend(green_pins)
|
||||
dpins.extend(red_pins)
|
||||
# swap bytes to match big-endian format
|
||||
dpins = dpins[8:16] + dpins[0:8]
|
||||
else:
|
||||
|
||||
@@ -371,17 +371,10 @@ void MipiRgb::dump_config() {
|
||||
get_pin_name(this->de_pin_).c_str(), get_pin_name(this->pclk_pin_).c_str(),
|
||||
get_pin_name(this->hsync_pin_).c_str(), get_pin_name(this->vsync_pin_).c_str());
|
||||
|
||||
if (this->madctl_ & MADCTL_BGR) {
|
||||
this->dump_pins_(8, 13, "Blue", 0);
|
||||
this->dump_pins_(13, 16, "Green", 0);
|
||||
this->dump_pins_(0, 3, "Green", 3);
|
||||
this->dump_pins_(3, 8, "Red", 0);
|
||||
} else {
|
||||
this->dump_pins_(8, 13, "Red", 0);
|
||||
this->dump_pins_(13, 16, "Green", 0);
|
||||
this->dump_pins_(0, 3, "Green", 3);
|
||||
this->dump_pins_(3, 8, "Blue", 0);
|
||||
}
|
||||
this->dump_pins_(8, 13, "Blue", 0);
|
||||
this->dump_pins_(13, 16, "Green", 0);
|
||||
this->dump_pins_(0, 3, "Green", 3);
|
||||
this->dump_pins_(3, 8, "Red", 0);
|
||||
}
|
||||
|
||||
} // namespace mipi_rgb
|
||||
|
||||
@@ -7,7 +7,6 @@ ST7701S(
|
||||
"T-PANEL-S3",
|
||||
width=480,
|
||||
height=480,
|
||||
color_order="BGR",
|
||||
invert_colors=False,
|
||||
swap_xy=UNDEFINED,
|
||||
spi_mode="MODE3",
|
||||
@@ -56,7 +55,6 @@ t_rgb = ST7701S(
|
||||
"T-RGB-2.1",
|
||||
width=480,
|
||||
height=480,
|
||||
color_order="BGR",
|
||||
pixel_mode="18bit",
|
||||
invert_colors=False,
|
||||
swap_xy=UNDEFINED,
|
||||
|
||||
@@ -82,7 +82,6 @@ st7701s.extend(
|
||||
"MAKERFABS-4",
|
||||
width=480,
|
||||
height=480,
|
||||
color_order="RGB",
|
||||
invert_colors=True,
|
||||
pixel_mode="18bit",
|
||||
cs_pin=1,
|
||||
|
||||
@@ -1,13 +1,13 @@
|
||||
from esphome.components.mipi import DriverChip
|
||||
from esphome.components.mipi import DriverChip, delay
|
||||
from esphome.config_validation import UNDEFINED
|
||||
|
||||
from .st7701s import st7701s
|
||||
|
||||
# fmt: off
|
||||
wave_4_3 = DriverChip(
|
||||
"ESP32-S3-TOUCH-LCD-4.3",
|
||||
swap_xy=UNDEFINED,
|
||||
initsequence=(),
|
||||
color_order="RGB",
|
||||
width=800,
|
||||
height=480,
|
||||
pclk_frequency="16MHz",
|
||||
@@ -55,10 +55,9 @@ wave_4_3.extend(
|
||||
)
|
||||
|
||||
st7701s.extend(
|
||||
"WAVESHARE-4-480x480",
|
||||
"WAVESHARE-4-480X480",
|
||||
data_rate="2MHz",
|
||||
spi_mode="MODE3",
|
||||
color_order="BGR",
|
||||
pixel_mode="18bit",
|
||||
width=480,
|
||||
height=480,
|
||||
@@ -76,3 +75,72 @@ st7701s.extend(
|
||||
"blue": [5, 45, 48, 47, 21],
|
||||
},
|
||||
)
|
||||
|
||||
st7701s.extend(
|
||||
"WAVESHARE-3.16-320X820",
|
||||
width=320,
|
||||
height=820,
|
||||
de_pin=40,
|
||||
hsync_pin=38,
|
||||
vsync_pin=39,
|
||||
pclk_pin=41,
|
||||
cs_pin={
|
||||
"number": 0,
|
||||
"ignore_strapping_warning": True,
|
||||
},
|
||||
pclk_frequency="18MHz",
|
||||
reset_pin=16,
|
||||
hsync_back_porch=30,
|
||||
hsync_front_porch=30,
|
||||
hsync_pulse_width=6,
|
||||
vsync_back_porch=20,
|
||||
vsync_front_porch=20,
|
||||
vsync_pulse_width=40,
|
||||
data_pins={
|
||||
"red": [17, 46, 3, 8, 18],
|
||||
"green": [14, 13, 12, 11, 10, 9],
|
||||
"blue": [21, 5, 45, 48, 47],
|
||||
},
|
||||
initsequence=(
|
||||
(0xFF, 0x77, 0x01, 0x00, 0x00, 0x13),
|
||||
(0xEF, 0x08),
|
||||
(0xFF, 0x77, 0x01, 0x00, 0x00, 0x10),
|
||||
(0xC0, 0xE5, 0x02),
|
||||
(0xC1, 0x15, 0x0A),
|
||||
(0xC2, 0x07, 0x02),
|
||||
(0xCC, 0x10),
|
||||
(0xB0, 0x00, 0x08, 0x51, 0x0D, 0xCE, 0x06, 0x00, 0x08, 0x08, 0x24, 0x05, 0xD0, 0x0F, 0x6F, 0x36, 0x1F),
|
||||
(0xB1, 0x00, 0x10, 0x4F, 0x0C, 0x11, 0x05, 0x00, 0x07, 0x07, 0x18, 0x02, 0xD3, 0x11, 0x6E, 0x34, 0x1F),
|
||||
(0xFF, 0x77, 0x01, 0x00, 0x00, 0x11),
|
||||
(0xB0, 0x4D),
|
||||
(0xB1, 0x37),
|
||||
(0xB2, 0x87),
|
||||
(0xB3, 0x80),
|
||||
(0xB5, 0x4A),
|
||||
(0xB7, 0x85),
|
||||
(0xB8, 0x21),
|
||||
(0xB9, 0x00, 0x13),
|
||||
(0xC0, 0x09),
|
||||
(0xC1, 0x78),
|
||||
(0xC2, 0x78),
|
||||
(0xD0, 0x88),
|
||||
(0xE0, 0x80, 0x00, 0x02),
|
||||
(0xE1, 0x0F, 0xA0, 0x00, 0x00, 0x10, 0xA0, 0x00, 0x00, 0x00, 0x60, 0x60),
|
||||
(0xE2, 0x30, 0x30, 0x60, 0x60, 0x45, 0xA0, 0x00, 0x00, 0x46, 0xA0, 0x00, 0x00, 0x00),
|
||||
(0xE3, 0x00, 0x00, 0x33, 0x33),
|
||||
(0xE4, 0x44, 0x44),
|
||||
(0xE5, 0x0F, 0x4A, 0xA0, 0xA0, 0x11, 0x4A, 0xA0, 0xA0, 0x13, 0x4A, 0xA0, 0xA0, 0x15, 0x4A, 0xA0, 0xA0),
|
||||
(0xE6, 0x00, 0x00, 0x33, 0x33),
|
||||
(0xE7, 0x44, 0x44),
|
||||
(0xE8, 0x10, 0x4A, 0xA0, 0xA0, 0x12, 0x4A, 0xA0, 0xA0, 0x14, 0x4A, 0xA0, 0xA0, 0x16, 0x4A, 0xA0, 0xA0),
|
||||
(0xEB, 0x02, 0x00, 0x4E, 0x4E, 0xEE, 0x44, 0x00),
|
||||
(0xED, 0xFF, 0xFF, 0x04, 0x56, 0x72, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0x27, 0x65, 0x40, 0xFF, 0xFF),
|
||||
(0xEF, 0x08, 0x08, 0x08, 0x40, 0x3F, 0x64),
|
||||
(0xFF, 0x77, 0x01, 0x00, 0x00, 0x13),
|
||||
(0xE8, 0x00, 0x0E),
|
||||
(0xE8, 0x00, 0x0C),
|
||||
delay(10),
|
||||
(0xE8, 0x00, 0x00),
|
||||
(0xFF, 0x77, 0x01, 0x00, 0x00, 0x00),
|
||||
)
|
||||
)
|
||||
|
||||
@@ -1,5 +1,9 @@
|
||||
from collections import UserDict
|
||||
from collections.abc import Callable
|
||||
from functools import reduce
|
||||
import logging
|
||||
from pathlib import Path
|
||||
from typing import Any
|
||||
|
||||
from esphome import git, yaml_util
|
||||
from esphome.components.substitutions.jinja import has_jinja
|
||||
@@ -15,6 +19,7 @@ from esphome.const import (
|
||||
CONF_PATH,
|
||||
CONF_REF,
|
||||
CONF_REFRESH,
|
||||
CONF_SUBSTITUTIONS,
|
||||
CONF_URL,
|
||||
CONF_USERNAME,
|
||||
CONF_VARS,
|
||||
@@ -27,32 +32,43 @@ _LOGGER = logging.getLogger(__name__)
|
||||
DOMAIN = CONF_PACKAGES
|
||||
|
||||
|
||||
def valid_package_contents(package_config: dict):
|
||||
"""Validates that a package_config that will be merged looks as much as possible to a valid config
|
||||
to fail early on obvious mistakes."""
|
||||
if isinstance(package_config, dict):
|
||||
if CONF_URL in package_config:
|
||||
# If a URL key is found, then make sure the config conforms to a remote package schema:
|
||||
return REMOTE_PACKAGE_SCHEMA(package_config)
|
||||
def validate_has_jinja(value: Any):
|
||||
if not isinstance(value, str) or not has_jinja(value):
|
||||
raise cv.Invalid("string does not contain Jinja syntax")
|
||||
return value
|
||||
|
||||
# Validate manually since Voluptuous would regenerate dicts and lose metadata
|
||||
# such as ESPHomeDataBase
|
||||
for k, v in package_config.items():
|
||||
if not isinstance(k, str):
|
||||
raise cv.Invalid("Package content keys must be strings")
|
||||
if isinstance(v, (dict, list, Remove)):
|
||||
continue # e.g. script: [], psram: !remove, logger: {level: debug}
|
||||
if v is None:
|
||||
continue # e.g. web_server:
|
||||
if isinstance(v, str) and has_jinja(v):
|
||||
# e.g: remote package shorthand:
|
||||
# package_name: github://esphome/repo/file.yaml@${ branch }
|
||||
continue
|
||||
|
||||
raise cv.Invalid("Invalid component content in package definition")
|
||||
return package_config
|
||||
def valid_package_contents(allow_jinja: bool = True) -> Callable[[Any], dict]:
|
||||
"""Returns a validator that checks if a package_config that will be merged looks as
|
||||
much as possible to a valid config to fail early on obvious mistakes."""
|
||||
|
||||
raise cv.Invalid("Package contents must be a dict")
|
||||
def validator(package_config: dict) -> dict:
|
||||
if isinstance(package_config, dict):
|
||||
if CONF_URL in package_config:
|
||||
# If a URL key is found, then make sure the config conforms to a remote package schema:
|
||||
return REMOTE_PACKAGE_SCHEMA(package_config)
|
||||
|
||||
# Validate manually since Voluptuous would regenerate dicts and lose metadata
|
||||
# such as ESPHomeDataBase
|
||||
for k, v in package_config.items():
|
||||
if not isinstance(k, str):
|
||||
raise cv.Invalid("Package content keys must be strings")
|
||||
if isinstance(v, (dict, list, Remove)):
|
||||
continue # e.g. script: [], psram: !remove, logger: {level: debug}
|
||||
if v is None:
|
||||
continue # e.g. web_server:
|
||||
if allow_jinja and isinstance(v, str) and has_jinja(v):
|
||||
# e.g: remote package shorthand:
|
||||
# package_name: github://esphome/repo/file.yaml@${ branch }, or:
|
||||
# switch: ${ expression that evals to a switch }
|
||||
continue
|
||||
|
||||
raise cv.Invalid("Invalid component content in package definition")
|
||||
return package_config
|
||||
|
||||
raise cv.Invalid("Package contents must be a dict")
|
||||
|
||||
return validator
|
||||
|
||||
|
||||
def expand_file_to_files(config: dict):
|
||||
@@ -142,7 +158,10 @@ REMOTE_PACKAGE_SCHEMA = cv.All(
|
||||
PACKAGE_SCHEMA = cv.Any( # A package definition is either:
|
||||
validate_source_shorthand, # A git URL shorthand string that expands to a remote package schema, or
|
||||
REMOTE_PACKAGE_SCHEMA, # a valid remote package schema, or
|
||||
valid_package_contents, # Something that at least looks like an actual package, e.g. {wifi:{ssid: xxx}}
|
||||
validate_has_jinja, # a Jinja string that may resolve to a package, or
|
||||
valid_package_contents(
|
||||
allow_jinja=True
|
||||
), # Something that at least looks like an actual package, e.g. {wifi:{ssid: xxx}}
|
||||
# which will have to be fully validated later as per each component's schema.
|
||||
)
|
||||
|
||||
@@ -235,32 +254,84 @@ def _process_remote_package(config: dict, skip_update: bool = False) -> dict:
|
||||
return {"packages": packages}
|
||||
|
||||
|
||||
def _process_package(package_config, config, skip_update: bool = False):
|
||||
recursive_package = package_config
|
||||
if CONF_URL in package_config:
|
||||
package_config = _process_remote_package(package_config, skip_update)
|
||||
if isinstance(package_config, dict):
|
||||
recursive_package = do_packages_pass(package_config, skip_update)
|
||||
return merge_config(recursive_package, config)
|
||||
|
||||
|
||||
def do_packages_pass(config: dict, skip_update: bool = False):
|
||||
def _walk_packages(
|
||||
config: dict, callback: Callable[[dict], dict], validate_deprecated: bool = True
|
||||
) -> dict:
|
||||
if CONF_PACKAGES not in config:
|
||||
return config
|
||||
packages = config[CONF_PACKAGES]
|
||||
with cv.prepend_path(CONF_PACKAGES):
|
||||
|
||||
# The following block and `validate_deprecated` parameter can be safely removed
|
||||
# once single-package deprecation is effective
|
||||
if validate_deprecated:
|
||||
packages = CONFIG_SCHEMA(packages)
|
||||
|
||||
with cv.prepend_path(CONF_PACKAGES):
|
||||
if isinstance(packages, dict):
|
||||
for package_name, package_config in reversed(packages.items()):
|
||||
with cv.prepend_path(package_name):
|
||||
config = _process_package(package_config, config, skip_update)
|
||||
package_config = callback(package_config)
|
||||
packages[package_name] = _walk_packages(package_config, callback)
|
||||
elif isinstance(packages, list):
|
||||
for package_config in reversed(packages):
|
||||
config = _process_package(package_config, config, skip_update)
|
||||
for idx in reversed(range(len(packages))):
|
||||
with cv.prepend_path(idx):
|
||||
package_config = callback(packages[idx])
|
||||
packages[idx] = _walk_packages(package_config, callback)
|
||||
else:
|
||||
raise cv.Invalid(
|
||||
f"Packages must be a key to value mapping or list, got {type(packages)} instead"
|
||||
)
|
||||
|
||||
del config[CONF_PACKAGES]
|
||||
config[CONF_PACKAGES] = packages
|
||||
return config
|
||||
|
||||
|
||||
def do_packages_pass(config: dict, skip_update: bool = False) -> dict:
|
||||
"""Processes, downloads and validates all packages in the config.
|
||||
Also extracts and merges all substitutions found in packages into the main config substitutions.
|
||||
"""
|
||||
if CONF_PACKAGES not in config:
|
||||
return config
|
||||
|
||||
substitutions = UserDict(config.pop(CONF_SUBSTITUTIONS, {}))
|
||||
|
||||
def process_package_callback(package_config: dict) -> dict:
|
||||
"""This will be called for each package found in the config."""
|
||||
package_config = PACKAGE_SCHEMA(package_config)
|
||||
if isinstance(package_config, str):
|
||||
return package_config # Jinja string, skip processing
|
||||
if CONF_URL in package_config:
|
||||
package_config = _process_remote_package(package_config, skip_update)
|
||||
# Extract substitutions from the package and merge them into the main substitutions:
|
||||
substitutions.data = merge_config(
|
||||
package_config.pop(CONF_SUBSTITUTIONS, {}), substitutions.data
|
||||
)
|
||||
return package_config
|
||||
|
||||
_walk_packages(config, process_package_callback)
|
||||
|
||||
if substitutions:
|
||||
config[CONF_SUBSTITUTIONS] = substitutions.data
|
||||
|
||||
return config
|
||||
|
||||
|
||||
def merge_packages(config: dict) -> dict:
|
||||
"""Merges all packages into the main config and removes the `packages:` key."""
|
||||
if CONF_PACKAGES not in config:
|
||||
return config
|
||||
|
||||
# Build flat list of all package configs to merge in priority order:
|
||||
merge_list: list[dict] = []
|
||||
|
||||
validate_package = valid_package_contents(allow_jinja=False)
|
||||
|
||||
def process_package_callback(package_config: dict) -> dict:
|
||||
"""This will be called for each package found in the config."""
|
||||
merge_list.append(validate_package(package_config))
|
||||
return package_config
|
||||
|
||||
_walk_packages(config, process_package_callback, validate_deprecated=False)
|
||||
# Merge all packages into the main config:
|
||||
config = reduce(lambda new, old: merge_config(old, new), merge_list, config)
|
||||
del config[CONF_PACKAGES]
|
||||
return config
|
||||
|
||||
@@ -1012,14 +1012,20 @@ def validate_config(
|
||||
|
||||
CORE.raw_config = config
|
||||
|
||||
# 1.1. Resolve !extend and !remove and check for REPLACEME
|
||||
# 1.1. Merge packages
|
||||
if CONF_PACKAGES in config:
|
||||
from esphome.components.packages import merge_packages
|
||||
|
||||
config = merge_packages(config)
|
||||
|
||||
# 1.2. Resolve !extend and !remove and check for REPLACEME
|
||||
# After this step, there will not be any Extend or Remove values in the config anymore
|
||||
try:
|
||||
resolve_extend_remove(config)
|
||||
except vol.Invalid as err:
|
||||
result.add_error(err)
|
||||
|
||||
# 1.2. Load external_components
|
||||
# 1.3. Load external_components
|
||||
if CONF_EXTERNAL_COMPONENTS in config:
|
||||
from esphome.components.external_components import do_external_components_pass
|
||||
|
||||
|
||||
@@ -4,7 +4,7 @@ from enum import Enum
|
||||
|
||||
from esphome.enum import StrEnum
|
||||
|
||||
__version__ = "2025.12.0-dev"
|
||||
__version__ = "2025.12.0b1"
|
||||
|
||||
ALLOWED_NAME_CHARS = "abcdefghijklmnopqrstuvwxyz0123456789-_"
|
||||
VALID_SUBSTITUTIONS_CHARACTERS = (
|
||||
|
||||
@@ -12,7 +12,7 @@ platformio==6.1.18 # When updating platformio, also update /docker/Dockerfile
|
||||
esptool==5.1.0
|
||||
click==8.1.7
|
||||
esphome-dashboard==20251013.0
|
||||
aioesphomeapi==43.1.0
|
||||
aioesphomeapi==43.2.1
|
||||
zeroconf==0.148.0
|
||||
puremagic==1.30
|
||||
ruamel.yaml==0.18.16 # dashboard_import
|
||||
|
||||
@@ -87,6 +87,7 @@ ISOLATED_COMPONENTS = {
|
||||
"neopixelbus": "RMT type conflict with ESP32 Arduino/ESP-IDF headers (enum vs struct rmt_channel_t)",
|
||||
"packages": "cannot merge packages",
|
||||
"tinyusb": "Conflicts with usb_host component - cannot be used together",
|
||||
"usb_cdc_acm": "Depends on tinyusb which conflicts with usb_host",
|
||||
}
|
||||
|
||||
|
||||
|
||||
@@ -215,6 +215,20 @@ def prepare_symbol_changes_data(
|
||||
}
|
||||
|
||||
|
||||
def format_components_str(components: list[str]) -> str:
|
||||
"""Format a list of components for display.
|
||||
|
||||
Args:
|
||||
components: List of component names
|
||||
|
||||
Returns:
|
||||
Formatted string with backtick-quoted component names
|
||||
"""
|
||||
if len(components) == 1:
|
||||
return f"`{components[0]}`"
|
||||
return ", ".join(f"`{c}`" for c in sorted(components))
|
||||
|
||||
|
||||
def prepare_component_breakdown_data(
|
||||
target_analysis: dict | None, pr_analysis: dict | None
|
||||
) -> list[tuple[str, int, int, int]] | None:
|
||||
@@ -316,11 +330,10 @@ def create_comment_body(
|
||||
}
|
||||
|
||||
# Format components list
|
||||
context["components_str"] = format_components_str(components)
|
||||
if len(components) == 1:
|
||||
context["components_str"] = f"`{components[0]}`"
|
||||
context["config_note"] = "a representative test configuration"
|
||||
else:
|
||||
context["components_str"] = ", ".join(f"`{c}`" for c in sorted(components))
|
||||
context["config_note"] = (
|
||||
f"a merged configuration with {len(components)} components"
|
||||
)
|
||||
@@ -502,6 +515,43 @@ def post_or_update_comment(pr_number: str, comment_body: str) -> None:
|
||||
print("Comment posted/updated successfully", file=sys.stderr)
|
||||
|
||||
|
||||
def create_target_unavailable_comment(
|
||||
pr_data: dict,
|
||||
) -> str:
|
||||
"""Create a comment body when target branch data is unavailable.
|
||||
|
||||
This happens when the target branch (dev/beta/release) fails to build.
|
||||
This can occur because:
|
||||
1. The target branch has a build issue independent of this PR
|
||||
2. This PR fixes a build issue on the target branch
|
||||
In either case, we only care that the PR branch builds successfully.
|
||||
|
||||
Args:
|
||||
pr_data: Dictionary with PR branch analysis results
|
||||
|
||||
Returns:
|
||||
Formatted comment body
|
||||
"""
|
||||
components = pr_data.get("components", [])
|
||||
platform = pr_data.get("platform", "unknown")
|
||||
pr_ram = pr_data.get("ram_bytes", 0)
|
||||
pr_flash = pr_data.get("flash_bytes", 0)
|
||||
|
||||
env = Environment(
|
||||
loader=FileSystemLoader(TEMPLATE_DIR),
|
||||
trim_blocks=True,
|
||||
lstrip_blocks=True,
|
||||
)
|
||||
template = env.get_template("ci_memory_impact_target_unavailable.j2")
|
||||
return template.render(
|
||||
comment_marker=COMMENT_MARKER,
|
||||
components_str=format_components_str(components),
|
||||
platform=platform,
|
||||
pr_ram=format_bytes(pr_ram),
|
||||
pr_flash=format_bytes(pr_flash),
|
||||
)
|
||||
|
||||
|
||||
def main() -> int:
|
||||
"""Main entry point."""
|
||||
parser = argparse.ArgumentParser(
|
||||
@@ -523,15 +573,25 @@ def main() -> int:
|
||||
|
||||
# Load analysis JSON files (all data comes from JSON for security)
|
||||
target_data: dict | None = load_analysis_json(args.target_json)
|
||||
if not target_data:
|
||||
print("Error: Failed to load target analysis JSON", file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
pr_data: dict | None = load_analysis_json(args.pr_json)
|
||||
|
||||
# PR data is required - if the PR branch can't build, that's a real error
|
||||
if not pr_data:
|
||||
print("Error: Failed to load PR analysis JSON", file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
# Target data is optional - target branch (dev) may fail to build because:
|
||||
# 1. The target branch has a build issue independent of this PR
|
||||
# 2. This PR fixes a build issue on the target branch
|
||||
if not target_data:
|
||||
print(
|
||||
"Warning: Target branch analysis unavailable, posting limited comment",
|
||||
file=sys.stderr,
|
||||
)
|
||||
comment_body = create_target_unavailable_comment(pr_data)
|
||||
post_or_update_comment(args.pr_number, comment_body)
|
||||
return 0
|
||||
|
||||
# Extract detailed analysis if available
|
||||
target_analysis: dict | None = None
|
||||
pr_analysis: dict | None = None
|
||||
|
||||
19
script/templates/ci_memory_impact_target_unavailable.j2
Normal file
19
script/templates/ci_memory_impact_target_unavailable.j2
Normal file
@@ -0,0 +1,19 @@
|
||||
{{ comment_marker }}
|
||||
## Memory Impact Analysis
|
||||
|
||||
**Components:** {{ components_str }}
|
||||
**Platform:** `{{ platform }}`
|
||||
|
||||
| Metric | This PR |
|
||||
|--------|---------|
|
||||
| **RAM** | {{ pr_ram }} |
|
||||
| **Flash** | {{ pr_flash }} |
|
||||
|
||||
> ⚠️ **Target branch comparison unavailable** - The target branch failed to build.
|
||||
> This can happen when the target branch has a build issue, or when this PR fixes a build issue on the target branch.
|
||||
> The PR branch compiled successfully with the memory usage shown above.
|
||||
|
||||
---
|
||||
> **Note:** This analysis measures **static RAM and Flash usage** only (compile-time allocation).
|
||||
|
||||
*This analysis runs automatically when components change.*
|
||||
@@ -5,7 +5,7 @@ from unittest.mock import MagicMock, patch
|
||||
|
||||
import pytest
|
||||
|
||||
from esphome.components.packages import CONFIG_SCHEMA, do_packages_pass
|
||||
from esphome.components.packages import CONFIG_SCHEMA, do_packages_pass, merge_packages
|
||||
from esphome.config import resolve_extend_remove
|
||||
from esphome.config_helpers import Extend, Remove
|
||||
import esphome.config_validation as cv
|
||||
@@ -27,6 +27,7 @@ from esphome.const import (
|
||||
CONF_REFRESH,
|
||||
CONF_SENSOR,
|
||||
CONF_SSID,
|
||||
CONF_SUBSTITUTIONS,
|
||||
CONF_UPDATE_INTERVAL,
|
||||
CONF_URL,
|
||||
CONF_VARS,
|
||||
@@ -68,11 +69,12 @@ def fixture_basic_esphome():
|
||||
def packages_pass(config):
|
||||
"""Wrapper around packages_pass that also resolves Extend and Remove."""
|
||||
config = do_packages_pass(config)
|
||||
config = merge_packages(config)
|
||||
resolve_extend_remove(config)
|
||||
return config
|
||||
|
||||
|
||||
def test_package_unused(basic_esphome, basic_wifi):
|
||||
def test_package_unused(basic_esphome, basic_wifi) -> None:
|
||||
"""
|
||||
Ensures do_package_pass does not change a config if packages aren't used.
|
||||
"""
|
||||
@@ -82,7 +84,7 @@ def test_package_unused(basic_esphome, basic_wifi):
|
||||
assert actual == config
|
||||
|
||||
|
||||
def test_package_invalid_dict(basic_esphome, basic_wifi):
|
||||
def test_package_invalid_dict(basic_esphome, basic_wifi) -> None:
|
||||
"""
|
||||
If a url: key is present, it's expected to be well-formed remote package spec. Ensure an error is raised if not.
|
||||
Any other simple dict passed as a package will be merged as usual but may fail later validation.
|
||||
@@ -107,7 +109,7 @@ def test_package_invalid_dict(basic_esphome, basic_wifi):
|
||||
],
|
||||
],
|
||||
)
|
||||
def test_package_shorthand(packages):
|
||||
def test_package_shorthand(packages) -> None:
|
||||
CONFIG_SCHEMA(packages)
|
||||
|
||||
|
||||
@@ -133,12 +135,12 @@ def test_package_shorthand(packages):
|
||||
[3],
|
||||
],
|
||||
)
|
||||
def test_package_invalid(packages):
|
||||
def test_package_invalid(packages) -> None:
|
||||
with pytest.raises(cv.Invalid):
|
||||
CONFIG_SCHEMA(packages)
|
||||
|
||||
|
||||
def test_package_include(basic_wifi, basic_esphome):
|
||||
def test_package_include(basic_wifi, basic_esphome) -> None:
|
||||
"""
|
||||
Tests the simple case where an independent config present in a package is added to the top-level config as is.
|
||||
|
||||
@@ -159,7 +161,7 @@ def test_single_package(
|
||||
basic_esphome,
|
||||
basic_wifi,
|
||||
caplog: pytest.LogCaptureFixture,
|
||||
):
|
||||
) -> None:
|
||||
"""
|
||||
Tests the simple case where a single package is added to the top-level config as is.
|
||||
In this test, the CONF_WIFI config is expected to be simply added to the top-level config.
|
||||
@@ -179,7 +181,7 @@ def test_single_package(
|
||||
assert "This method for including packages will go away in 2026.7.0" in caplog.text
|
||||
|
||||
|
||||
def test_package_append(basic_wifi, basic_esphome):
|
||||
def test_package_append(basic_wifi, basic_esphome) -> None:
|
||||
"""
|
||||
Tests the case where a key is present in both a package and top-level config.
|
||||
|
||||
@@ -204,7 +206,7 @@ def test_package_append(basic_wifi, basic_esphome):
|
||||
assert actual == expected
|
||||
|
||||
|
||||
def test_package_override(basic_wifi, basic_esphome):
|
||||
def test_package_override(basic_wifi, basic_esphome) -> None:
|
||||
"""
|
||||
Ensures that the top-level configuration takes precedence over duplicate keys defined in a package.
|
||||
|
||||
@@ -228,7 +230,7 @@ def test_package_override(basic_wifi, basic_esphome):
|
||||
assert actual == expected
|
||||
|
||||
|
||||
def test_multiple_package_order():
|
||||
def test_multiple_package_order() -> None:
|
||||
"""
|
||||
Ensures that mutiple packages are merged in order.
|
||||
"""
|
||||
@@ -257,7 +259,7 @@ def test_multiple_package_order():
|
||||
assert actual == expected
|
||||
|
||||
|
||||
def test_package_list_merge():
|
||||
def test_package_list_merge() -> None:
|
||||
"""
|
||||
Ensures lists defined in both a package and the top-level config are merged correctly
|
||||
"""
|
||||
@@ -313,7 +315,7 @@ def test_package_list_merge():
|
||||
assert actual == expected
|
||||
|
||||
|
||||
def test_package_list_merge_by_id():
|
||||
def test_package_list_merge_by_id() -> None:
|
||||
"""
|
||||
Ensures that components with matching IDs are merged correctly.
|
||||
|
||||
@@ -391,7 +393,7 @@ def test_package_list_merge_by_id():
|
||||
assert actual == expected
|
||||
|
||||
|
||||
def test_package_merge_by_id_with_list():
|
||||
def test_package_merge_by_id_with_list() -> None:
|
||||
"""
|
||||
Ensures that components with matching IDs are merged correctly when their configuration contains lists.
|
||||
|
||||
@@ -430,7 +432,7 @@ def test_package_merge_by_id_with_list():
|
||||
assert actual == expected
|
||||
|
||||
|
||||
def test_package_merge_by_missing_id():
|
||||
def test_package_merge_by_missing_id() -> None:
|
||||
"""
|
||||
Ensures that a validation error is thrown when trying to extend a missing ID.
|
||||
"""
|
||||
@@ -466,7 +468,7 @@ def test_package_merge_by_missing_id():
|
||||
assert error_raised
|
||||
|
||||
|
||||
def test_package_list_remove_by_id():
|
||||
def test_package_list_remove_by_id() -> None:
|
||||
"""
|
||||
Ensures that components with matching IDs are removed correctly.
|
||||
|
||||
@@ -517,7 +519,7 @@ def test_package_list_remove_by_id():
|
||||
assert actual == expected
|
||||
|
||||
|
||||
def test_multiple_package_list_remove_by_id():
|
||||
def test_multiple_package_list_remove_by_id() -> None:
|
||||
"""
|
||||
Ensures that components with matching IDs are removed correctly.
|
||||
|
||||
@@ -563,7 +565,7 @@ def test_multiple_package_list_remove_by_id():
|
||||
assert actual == expected
|
||||
|
||||
|
||||
def test_package_dict_remove_by_id(basic_wifi, basic_esphome):
|
||||
def test_package_dict_remove_by_id(basic_wifi, basic_esphome) -> None:
|
||||
"""
|
||||
Ensures that components with missing IDs are removed from dict.
|
||||
Ensures that the top-level configuration takes precedence over duplicate keys defined in a package.
|
||||
@@ -584,7 +586,7 @@ def test_package_dict_remove_by_id(basic_wifi, basic_esphome):
|
||||
assert actual == expected
|
||||
|
||||
|
||||
def test_package_remove_by_missing_id():
|
||||
def test_package_remove_by_missing_id() -> None:
|
||||
"""
|
||||
Ensures that components with missing IDs are not merged.
|
||||
"""
|
||||
@@ -632,7 +634,7 @@ def test_package_remove_by_missing_id():
|
||||
@patch("esphome.git.clone_or_update")
|
||||
def test_remote_packages_with_files_list(
|
||||
mock_clone_or_update, mock_is_file, mock_load_yaml
|
||||
):
|
||||
) -> None:
|
||||
"""
|
||||
Ensures that packages are loaded as mixed list of dictionary and strings
|
||||
"""
|
||||
@@ -704,7 +706,7 @@ def test_remote_packages_with_files_list(
|
||||
@patch("esphome.git.clone_or_update")
|
||||
def test_remote_packages_with_files_and_vars(
|
||||
mock_clone_or_update, mock_is_file, mock_load_yaml
|
||||
):
|
||||
) -> None:
|
||||
"""
|
||||
Ensures that packages are loaded as mixed list of dictionary and strings with vars
|
||||
"""
|
||||
@@ -793,3 +795,199 @@ def test_remote_packages_with_files_and_vars(
|
||||
|
||||
actual = packages_pass(config)
|
||||
assert actual == expected
|
||||
|
||||
|
||||
def test_packages_merge_substitutions() -> None:
|
||||
"""
|
||||
Tests that substitutions from packages in a complex package hierarchy
|
||||
are extracted and merged into the top-level config.
|
||||
"""
|
||||
config = {
|
||||
CONF_SUBSTITUTIONS: {
|
||||
"a": 1,
|
||||
"b": 2,
|
||||
"c": 3,
|
||||
},
|
||||
CONF_PACKAGES: {
|
||||
"package1": {
|
||||
"logger": {
|
||||
"level": "DEBUG",
|
||||
},
|
||||
CONF_PACKAGES: [
|
||||
{
|
||||
CONF_SUBSTITUTIONS: {
|
||||
"a": 10,
|
||||
"e": 5,
|
||||
},
|
||||
"sensor": [
|
||||
{"platform": "template", "id": "sensor1"},
|
||||
],
|
||||
},
|
||||
],
|
||||
"sensor": [
|
||||
{"platform": "template", "id": "sensor2"},
|
||||
],
|
||||
},
|
||||
"package2": {
|
||||
"logger": {
|
||||
"level": "VERBOSE",
|
||||
},
|
||||
},
|
||||
"package3": {
|
||||
CONF_PACKAGES: [
|
||||
{
|
||||
CONF_PACKAGES: [
|
||||
{
|
||||
CONF_SUBSTITUTIONS: {
|
||||
"b": 20,
|
||||
"d": 4,
|
||||
},
|
||||
"sensor": [
|
||||
{"platform": "template", "id": "sensor3"},
|
||||
],
|
||||
},
|
||||
],
|
||||
CONF_SUBSTITUTIONS: {
|
||||
"b": 20,
|
||||
"d": 6,
|
||||
},
|
||||
"sensor": [
|
||||
{"platform": "template", "id": "sensor4"},
|
||||
],
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
expected = {
|
||||
CONF_SUBSTITUTIONS: {"a": 1, "e": 5, "b": 2, "d": 6, "c": 3},
|
||||
CONF_PACKAGES: {
|
||||
"package1": {
|
||||
"logger": {
|
||||
"level": "DEBUG",
|
||||
},
|
||||
CONF_PACKAGES: [
|
||||
{
|
||||
"sensor": [
|
||||
{"platform": "template", "id": "sensor1"},
|
||||
],
|
||||
},
|
||||
],
|
||||
"sensor": [
|
||||
{"platform": "template", "id": "sensor2"},
|
||||
],
|
||||
},
|
||||
"package2": {
|
||||
"logger": {
|
||||
"level": "VERBOSE",
|
||||
},
|
||||
},
|
||||
"package3": {
|
||||
CONF_PACKAGES: [
|
||||
{
|
||||
CONF_PACKAGES: [
|
||||
{
|
||||
"sensor": [
|
||||
{"platform": "template", "id": "sensor3"},
|
||||
],
|
||||
},
|
||||
],
|
||||
"sensor": [
|
||||
{"platform": "template", "id": "sensor4"},
|
||||
],
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
actual = do_packages_pass(config)
|
||||
assert actual == expected
|
||||
|
||||
|
||||
def test_package_merge() -> None:
|
||||
"""
|
||||
Tests that all packages are merged into the top-level config.
|
||||
"""
|
||||
config = {
|
||||
CONF_SUBSTITUTIONS: {"a": 1, "e": 5, "b": 2, "d": 6, "c": 3},
|
||||
CONF_PACKAGES: {
|
||||
"package1": {
|
||||
"logger": {
|
||||
"level": "DEBUG",
|
||||
},
|
||||
CONF_PACKAGES: [
|
||||
{
|
||||
"sensor": [
|
||||
{"platform": "template", "id": "sensor1"},
|
||||
],
|
||||
},
|
||||
],
|
||||
"sensor": [
|
||||
{"platform": "template", "id": "sensor2"},
|
||||
],
|
||||
},
|
||||
"package2": {
|
||||
"logger": {
|
||||
"level": "VERBOSE",
|
||||
},
|
||||
},
|
||||
"package3": {
|
||||
CONF_PACKAGES: [
|
||||
{
|
||||
CONF_PACKAGES: [
|
||||
{
|
||||
"sensor": [
|
||||
{"platform": "template", "id": "sensor3"},
|
||||
],
|
||||
},
|
||||
],
|
||||
"sensor": [
|
||||
{"platform": "template", "id": "sensor4"},
|
||||
],
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
}
|
||||
expected = {
|
||||
"sensor": [
|
||||
{"platform": "template", "id": "sensor1"},
|
||||
{"platform": "template", "id": "sensor2"},
|
||||
{"platform": "template", "id": "sensor3"},
|
||||
{"platform": "template", "id": "sensor4"},
|
||||
],
|
||||
"logger": {"level": "VERBOSE"},
|
||||
CONF_SUBSTITUTIONS: {"a": 1, "e": 5, "b": 2, "d": 6, "c": 3},
|
||||
}
|
||||
actual = merge_packages(config)
|
||||
|
||||
assert actual == expected
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"invalid_package",
|
||||
[
|
||||
6,
|
||||
"some string",
|
||||
["some string"],
|
||||
None,
|
||||
True,
|
||||
{"some_component": 8},
|
||||
{3: 2},
|
||||
{"some_component": r"${unevaluated expression}"},
|
||||
],
|
||||
)
|
||||
def test_package_merge_invalid(invalid_package) -> None:
|
||||
"""
|
||||
Tests that trying to merge an invalid package raises an error.
|
||||
"""
|
||||
config = {
|
||||
CONF_PACKAGES: {
|
||||
"some_package": invalid_package,
|
||||
},
|
||||
}
|
||||
|
||||
with pytest.raises(cv.Invalid):
|
||||
merge_packages(config)
|
||||
|
||||
@@ -2,6 +2,6 @@ substitutions:
|
||||
enable_rx_pin: GPIO13
|
||||
|
||||
packages:
|
||||
uart: !include ../../test_build_components/common/uart_1200_none_2stopbits/esp32-idf.yaml
|
||||
uart_1200_none_2stopbits: !include ../../test_build_components/common/uart_1200_none_2stopbits/esp32-idf.yaml
|
||||
|
||||
<<: !include common.yaml
|
||||
|
||||
@@ -2,6 +2,6 @@ substitutions:
|
||||
enable_rx_pin: GPIO15
|
||||
|
||||
packages:
|
||||
uart: !include ../../test_build_components/common/uart_1200_none_2stopbits/esp8266-ard.yaml
|
||||
uart_1200_none_2stopbits: !include ../../test_build_components/common/uart_1200_none_2stopbits/esp8266-ard.yaml
|
||||
|
||||
<<: !include common.yaml
|
||||
|
||||
@@ -2,6 +2,6 @@ substitutions:
|
||||
enable_rx_pin: GPIO3
|
||||
|
||||
packages:
|
||||
uart: !include ../../test_build_components/common/uart_1200_none_2stopbits/rp2040-ard.yaml
|
||||
uart_1200_none_2stopbits: !include ../../test_build_components/common/uart_1200_none_2stopbits/rp2040-ard.yaml
|
||||
|
||||
<<: !include common.yaml
|
||||
|
||||
@@ -52,6 +52,7 @@ void CustomAPIDeviceComponent::on_service_with_arrays(std::vector<bool> bool_arr
|
||||
}
|
||||
}
|
||||
|
||||
// NOLINTNEXTLINE(performance-unnecessary-value-param)
|
||||
void CustomAPIDeviceComponent::on_ha_state_changed(std::string entity_id, std::string state) {
|
||||
ESP_LOGI(TAG, "Home Assistant state changed for %s: %s", entity_id.c_str(), state.c_str());
|
||||
ESP_LOGI(TAG, "This subscription uses std::string API for backward compatibility");
|
||||
|
||||
@@ -24,6 +24,7 @@ class CustomAPIDeviceComponent : public Component, public CustomAPIDevice {
|
||||
std::vector<float> float_array, std::vector<std::string> string_array);
|
||||
|
||||
// Test Home Assistant state subscription with std::string API
|
||||
// NOLINTNEXTLINE(performance-unnecessary-value-param)
|
||||
void on_ha_state_changed(std::string entity_id, std::string state);
|
||||
};
|
||||
|
||||
|
||||
@@ -0,0 +1,43 @@
|
||||
fancy_component: &id001
|
||||
- id: component9
|
||||
value: 9
|
||||
some_component:
|
||||
- id: component1
|
||||
value: 1
|
||||
- id: component2
|
||||
value: 2
|
||||
- id: component3
|
||||
value: 3
|
||||
- id: component4
|
||||
value: 4
|
||||
- id: component5
|
||||
value: 79
|
||||
power: 200
|
||||
- id: component6
|
||||
value: 6
|
||||
- id: component7
|
||||
value: 7
|
||||
switch: &id002
|
||||
- platform: gpio
|
||||
id: switch1
|
||||
pin: 12
|
||||
- platform: gpio
|
||||
id: switch2
|
||||
pin: 13
|
||||
display:
|
||||
- platform: ili9xxx
|
||||
dimensions:
|
||||
width: 100
|
||||
height: 480
|
||||
substitutions:
|
||||
extended_component: component5
|
||||
package_options:
|
||||
alternative_package:
|
||||
alternative_component:
|
||||
- id: component8
|
||||
value: 8
|
||||
fancy_package:
|
||||
fancy_component: *id001
|
||||
pin: 12
|
||||
some_switches: *id002
|
||||
package_selection: fancy_package
|
||||
@@ -0,0 +1,61 @@
|
||||
substitutions:
|
||||
package_options:
|
||||
alternative_package:
|
||||
alternative_component:
|
||||
- id: component8
|
||||
value: 8
|
||||
fancy_package:
|
||||
fancy_component:
|
||||
- id: component9
|
||||
value: 9
|
||||
|
||||
pin: 12
|
||||
some_switches:
|
||||
- platform: gpio
|
||||
id: switch1
|
||||
pin: ${pin}
|
||||
- platform: gpio
|
||||
id: switch2
|
||||
pin: ${pin+1}
|
||||
|
||||
package_selection: fancy_package
|
||||
|
||||
packages:
|
||||
- ${ package_options[package_selection] }
|
||||
- some_component:
|
||||
- id: component1
|
||||
value: 1
|
||||
- some_component:
|
||||
- id: component2
|
||||
value: 2
|
||||
- switch: ${ some_switches }
|
||||
- packages:
|
||||
package_with_defaults: !include
|
||||
file: display.yaml
|
||||
vars:
|
||||
native_width: 100
|
||||
high_dpi: false
|
||||
my_package:
|
||||
packages:
|
||||
- packages:
|
||||
special_package:
|
||||
substitutions:
|
||||
extended_component: component5
|
||||
some_component:
|
||||
- id: component3
|
||||
value: 3
|
||||
some_component:
|
||||
- id: component4
|
||||
value: 4
|
||||
- id: !extend ${ extended_component }
|
||||
power: 200
|
||||
value: 79
|
||||
some_component:
|
||||
- id: component5
|
||||
value: 5
|
||||
|
||||
some_component:
|
||||
- id: component6
|
||||
value: 6
|
||||
- id: component7
|
||||
value: 7
|
||||
@@ -8,7 +8,7 @@ import pytest
|
||||
|
||||
from esphome import config as config_module, yaml_util
|
||||
from esphome.components import substitutions
|
||||
from esphome.components.packages import do_packages_pass
|
||||
from esphome.components.packages import do_packages_pass, merge_packages
|
||||
from esphome.config import resolve_extend_remove
|
||||
from esphome.config_helpers import merge_config
|
||||
from esphome.const import CONF_SUBSTITUTIONS
|
||||
@@ -74,6 +74,8 @@ def verify_database(value: Any, path: str = "") -> str | None:
|
||||
return None
|
||||
if isinstance(value, dict):
|
||||
for k, v in value.items():
|
||||
if path == "" and k == CONF_SUBSTITUTIONS:
|
||||
return None # ignore substitutions key at top level since it is merged.
|
||||
key_result = verify_database(k, f"{path}/{k}")
|
||||
if key_result is not None:
|
||||
return key_result
|
||||
@@ -144,6 +146,8 @@ def test_substitutions_fixtures(
|
||||
|
||||
substitutions.do_substitution_pass(config, command_line_substitutions)
|
||||
|
||||
config = merge_packages(config)
|
||||
|
||||
resolve_extend_remove(config)
|
||||
verify_database_result = verify_database(config)
|
||||
if verify_database_result is not None:
|
||||
|
||||
Reference in New Issue
Block a user