1
0
mirror of https://github.com/esphome/esphome.git synced 2025-11-01 23:51:47 +00:00

Compare commits

..

38 Commits

Author SHA1 Message Date
Jesse Hills
d2df232706 Merge pull request #10763 from esphome/bump-2025.9.0
2025.9.0
2025-09-17 18:51:21 +12:00
Jesse Hills
404e679e66 Bump version to 2025.9.0 2025-09-17 11:02:12 +12:00
Jesse Hills
8d401ad05a Merge pull request #10761 from esphome/bump-2025.9.0b4
2025.9.0b4
2025-09-17 10:50:15 +12:00
Jesse Hills
e542816f7d Bump version to 2025.9.0b4 2025-09-17 09:22:54 +12:00
J. Nick Koston
12cadf0a04 [core] Fix clean build files to properly clear PlatformIO cache (#10754) 2025-09-17 09:22:54 +12:00
J. Nick Koston
adc3d3127d [wizard] Fix KeyError when running wizard with empty OTA password (#10753) 2025-09-17 09:22:54 +12:00
J. Nick Koston
61ab682099 Add additional coverage for util and writer (#10683) 2025-09-17 09:22:54 +12:00
Jesse Hills
c05b7cca5e Merge pull request #10752 from esphome/bump-2025.9.0b3
2025.9.0b3
2025-09-17 00:15:06 +12:00
Jesse Hills
6ac395da6d Bump version to 2025.9.0b3 2025-09-16 20:35:23 +12:00
jokujossai
54616ae1b4 [ade7880] fix channel a voltage registry (#10750) 2025-09-16 20:35:23 +12:00
jokujossai
e33dcda907 [mqtt] fix publish payload length when payload contains null characters (#10744) 2025-09-16 20:35:23 +12:00
J. Nick Koston
04c1b90e57 [ethernet] Conditionally compile PHY-specific code to reduce flash usage (#10747) 2025-09-16 20:35:23 +12:00
J. Nick Koston
ddb8fedef7 [dashboard] Fix archive handler to properly delete build folders using correct path (#10724) 2025-09-16 20:35:23 +12:00
J. Nick Koston
04f4f79cb4 [select] Use const references to avoid unnecessary vector copies (#10741) 2025-09-16 20:35:23 +12:00
J. Nick Koston
8890071360 Add additional test coverage ahead of Path conversion (#10700) 2025-09-16 20:35:23 +12:00
J. Nick Koston
4b3a997a8e Improve coverage for various core modules (#10663) 2025-09-16 20:35:23 +12:00
Jesse Hills
2a4ab6a811 Merge pull request #10725 from esphome/bump-2025.9.0b2
2025.9.0b2
2025-09-15 18:28:51 +12:00
Jesse Hills
971de64494 Bump version to 2025.9.0b2 2025-09-15 12:34:56 +12:00
J. Nick Koston
926fdcbecd [esp32_ble] Optimize BLE hex formatting to eliminate sprintf dependency (#10714)
Co-authored-by: Jesse Hills <3060199+jesserockz@users.noreply.github.com>
2025-09-15 12:34:56 +12:00
J. Nick Koston
6b147312cd [wifi] Optimize WiFi MAC formatting to eliminate sprintf dependency (#10715)
Co-authored-by: Jesse Hills <3060199+jesserockz@users.noreply.github.com>
2025-09-15 12:34:56 +12:00
J. Nick Koston
2d9152d9b9 [md5] Optimize MD5::get_hex() to eliminate sprintf dependency (#10710) 2025-09-15 12:34:56 +12:00
dependabot[bot]
24f9550ce5 Bump aioesphomeapi from 40.2.0 to 40.2.1 (#10721)
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-09-15 12:34:56 +12:00
Big Mike
3427aaab8c ina2xx should be total increasing for energy sensor (#10711) 2025-09-15 12:34:56 +12:00
J. Nick Koston
4e17d14acc [scheduler] Fix timing accumulation in scheduler causing incorrect execution measurements (#10719) 2025-09-15 12:34:56 +12:00
J. Nick Koston
1750f02ef3 [api] Optimize HelloResponse server_info to reduce memory usage (#10701) 2025-09-15 12:34:56 +12:00
J. Nick Koston
ae158179bd [api] Revert unneeded GetTime bidirectional support added in #9790 (#10702) 2025-09-15 12:34:55 +12:00
J. Nick Koston
c601494779 [core] Optimize MAC address formatting to eliminate sprintf dependency (#10713) 2025-09-15 12:34:55 +12:00
J. Nick Koston
646f4e66be [ethernet] Fix permanent component failure from undocumented ESP_FAIL in IPv6 setup (#10708) 2025-09-15 12:34:55 +12:00
dependabot[bot]
5b5e5c213c Bump aioesphomeapi from 40.1.0 to 40.2.0 (#10703)
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-09-15 12:34:55 +12:00
Markus
46235684b1 [core] fix upload to device via MQTT IP lookup (e.g. when mDNS is disable) (#10632)
Co-authored-by: J. Nick Koston <nick@koston.org>
Co-authored-by: J. Nick Koston <nick@home-assistant.io>
Co-authored-by: J. Nick Koston <nick+github@koston.org>
2025-09-15 12:34:55 +12:00
J. Nick Koston
5b702a1efa Add additional dashboard and main tests (#10688)
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
2025-09-15 12:34:55 +12:00
J. Nick Koston
56e9fd2e38 [tests] Add upload_program and show_logs test coverage to prevent regressions (#10684) 2025-09-15 12:34:55 +12:00
J. Nick Koston
65f15a706f Add some more coverage for dashboard web_server (#10682) 2025-09-15 12:34:55 +12:00
J. Nick Koston
eee64cc3a6 Add comprehensive tests for choose_upload_log_host to prevent regressions (#10679)
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
Co-authored-by: pre-commit-ci-lite[bot] <117423508+pre-commit-ci-lite[bot]@users.noreply.github.com>
2025-09-15 12:34:55 +12:00
J. Nick Koston
f43fb3c3a3 [core] Add millisecond precision to logging timestamps (#10677) 2025-09-15 12:34:55 +12:00
rwrozelle
79b0025fe6 Openthread Fix Factory Reset (#9281)
Co-authored-by: Jesse Hills <3060199+jesserockz@users.noreply.github.com>
2025-09-15 12:34:55 +12:00
Jesse Hills
c6a039a72f [adc] Fix FILTER_SOURCE_FILES location (#10673) 2025-09-15 12:34:54 +12:00
esphomebot
6f1fa094c2 Update webserver local assets to 20250910-110003 (#10668) 2025-09-15 12:34:54 +12:00
72 changed files with 2232 additions and 1392 deletions

View File

@@ -11,7 +11,7 @@ ci:
repos:
- repo: https://github.com/astral-sh/ruff-pre-commit
# Ruff version.
rev: v0.13.0
rev: v0.12.12
hooks:
# Run the linter.
- id: ruff

View File

@@ -139,7 +139,6 @@ esphome/components/ens160_base/* @latonita @vincentscode
esphome/components/ens160_i2c/* @latonita
esphome/components/ens160_spi/* @latonita
esphome/components/ens210/* @itn3rd77
esphome/components/epdiy/* @jesserockz
esphome/components/es7210/* @kahrendt
esphome/components/es7243e/* @kbx81
esphome/components/es8156/* @kbx81

View File

@@ -48,7 +48,7 @@ PROJECT_NAME = ESPHome
# could be handy for archiving the generated documentation or if some version
# control system is used.
PROJECT_NUMBER = 2025.10.0-dev
PROJECT_NUMBER = 2025.9.0
# Using the PROJECT_BRIEF tag one can provide an optional one line description
# for a project that appears at the top of each page and should give viewer a

View File

@@ -113,7 +113,7 @@ void ADE7880::update() {
if (this->channel_a_ != nullptr) {
auto *chan = this->channel_a_;
this->update_sensor_from_s24zp_register16_(chan->current, AIRMS, [](float val) { return val / 100000.0f; });
this->update_sensor_from_s24zp_register16_(chan->voltage, BVRMS, [](float val) { return val / 10000.0f; });
this->update_sensor_from_s24zp_register16_(chan->voltage, AVRMS, [](float val) { return val / 10000.0f; });
this->update_sensor_from_s24zp_register16_(chan->active_power, AWATT, [](float val) { return val / 100.0f; });
this->update_sensor_from_s24zp_register16_(chan->apparent_power, AVA, [](float val) { return val / 100.0f; });
this->update_sensor_from_s16_register16_(chan->power_factor, APF,

View File

@@ -27,9 +27,6 @@ service APIConnection {
rpc subscribe_logs (SubscribeLogsRequest) returns (void) {}
rpc subscribe_homeassistant_services (SubscribeHomeassistantServicesRequest) returns (void) {}
rpc subscribe_home_assistant_states (SubscribeHomeAssistantStatesRequest) returns (void) {}
rpc get_time (GetTimeRequest) returns (GetTimeResponse) {
option (needs_authentication) = false;
}
rpc execute_service (ExecuteServiceRequest) returns (void) {}
rpc noise_encryption_set_key (NoiseEncryptionSetKeyRequest) returns (NoiseEncryptionSetKeyResponse) {}
@@ -809,12 +806,12 @@ message HomeAssistantStateResponse {
// ==================== IMPORT TIME ====================
message GetTimeRequest {
option (id) = 36;
option (source) = SOURCE_BOTH;
option (source) = SOURCE_SERVER;
}
message GetTimeResponse {
option (id) = 37;
option (source) = SOURCE_BOTH;
option (source) = SOURCE_CLIENT;
option (no_delay) = true;
fixed32 epoch_seconds = 1;

View File

@@ -42,6 +42,8 @@ static constexpr uint8_t MAX_PING_RETRIES = 60;
static constexpr uint16_t PING_RETRY_INTERVAL = 1000;
static constexpr uint32_t KEEPALIVE_DISCONNECT_TIMEOUT = (KEEPALIVE_TIMEOUT_MS * 5) / 2;
static constexpr auto ESPHOME_VERSION_REF = StringRef::from_lit(ESPHOME_VERSION);
static const char *const TAG = "api.connection";
#ifdef USE_CAMERA
static const int CAMERA_STOP_STREAM = 5000;
@@ -1081,12 +1083,6 @@ void APIConnection::on_get_time_response(const GetTimeResponse &value) {
}
#endif
bool APIConnection::send_get_time_response(const GetTimeRequest &msg) {
GetTimeResponse resp;
resp.epoch_seconds = ::time(nullptr);
return this->send_message(resp, GetTimeResponse::MESSAGE_TYPE);
}
#ifdef USE_BLUETOOTH_PROXY
void APIConnection::subscribe_bluetooth_le_advertisements(const SubscribeBluetoothLEAdvertisementsRequest &msg) {
bluetooth_proxy::global_bluetooth_proxy->subscribe_api_connection(this, msg.flags);
@@ -1376,9 +1372,8 @@ bool APIConnection::send_hello_response(const HelloRequest &msg) {
HelloResponse resp;
resp.api_version_major = 1;
resp.api_version_minor = 12;
// Temporary string for concatenation - will be valid during send_message call
std::string server_info = App.get_name() + " (esphome v" ESPHOME_VERSION ")";
resp.set_server_info(StringRef(server_info));
// Send only the version string - the client only logs this for debugging and doesn't use it otherwise
resp.set_server_info(ESPHOME_VERSION_REF);
resp.set_name(StringRef(App.get_name()));
#ifdef USE_API_PASSWORD
@@ -1425,8 +1420,6 @@ bool APIConnection::send_device_info_response(const DeviceInfoRequest &msg) {
std::string mac_address = get_mac_address_pretty();
resp.set_mac_address(StringRef(mac_address));
// Compile-time StringRef constants
static constexpr auto ESPHOME_VERSION_REF = StringRef::from_lit(ESPHOME_VERSION);
resp.set_esphome_version(ESPHOME_VERSION_REF);
resp.set_compilation_time(App.get_compilation_time_ref());

View File

@@ -219,7 +219,6 @@ class APIConnection final : public APIServerConnection {
#ifdef USE_API_HOMEASSISTANT_STATES
void subscribe_home_assistant_states(const SubscribeHomeAssistantStatesRequest &msg) override;
#endif
bool send_get_time_response(const GetTimeRequest &msg) override;
#ifdef USE_API_SERVICES
void execute_service(const ExecuteServiceRequest &msg) override;
#endif

View File

@@ -921,14 +921,6 @@ bool GetTimeResponse::decode_32bit(uint32_t field_id, Proto32Bit value) {
}
return true;
}
void GetTimeResponse::encode(ProtoWriteBuffer buffer) const {
buffer.encode_fixed32(1, this->epoch_seconds);
buffer.encode_string(2, this->timezone_ref_);
}
void GetTimeResponse::calculate_size(ProtoSize &size) const {
size.add_fixed32(1, this->epoch_seconds);
size.add_length(1, this->timezone_ref_.size());
}
#ifdef USE_API_SERVICES
void ListEntitiesServicesArgument::encode(ProtoWriteBuffer buffer) const {
buffer.encode_string(1, this->name_ref_);

View File

@@ -1180,10 +1180,6 @@ class GetTimeResponse final : public ProtoDecodableMessage {
#endif
uint32_t epoch_seconds{0};
std::string timezone{};
StringRef timezone_ref_{};
void set_timezone(const StringRef &ref) { this->timezone_ref_ = ref; }
void encode(ProtoWriteBuffer buffer) const override;
void calculate_size(ProtoSize &size) const override;
#ifdef HAS_PROTO_MESSAGE_DUMP
void dump_to(std::string &out) const override;
#endif

View File

@@ -1113,13 +1113,7 @@ void GetTimeRequest::dump_to(std::string &out) const { out.append("GetTimeReques
void GetTimeResponse::dump_to(std::string &out) const {
MessageDumpHelper helper(out, "GetTimeResponse");
dump_field(out, "epoch_seconds", this->epoch_seconds);
out.append(" timezone: ");
if (!this->timezone_ref_.empty()) {
out.append("'").append(this->timezone_ref_.c_str()).append("'");
} else {
out.append("'").append(this->timezone).append("'");
}
out.append("\n");
dump_field(out, "timezone", this->timezone);
}
#ifdef USE_API_SERVICES
void ListEntitiesServicesArgument::dump_to(std::string &out) const {

View File

@@ -160,15 +160,6 @@ void APIServerConnectionBase::read_message(uint32_t msg_size, uint32_t msg_type,
break;
}
#endif
case GetTimeRequest::MESSAGE_TYPE: {
GetTimeRequest msg;
// Empty message: no decode needed
#ifdef HAS_PROTO_MESSAGE_DUMP
ESP_LOGVV(TAG, "on_get_time_request: %s", msg.dump().c_str());
#endif
this->on_get_time_request(msg);
break;
}
case GetTimeResponse::MESSAGE_TYPE: {
GetTimeResponse msg;
msg.decode(msg_data, msg_size);
@@ -656,11 +647,6 @@ void APIServerConnection::on_subscribe_home_assistant_states_request(const Subsc
}
}
#endif
void APIServerConnection::on_get_time_request(const GetTimeRequest &msg) {
if (this->check_connection_setup_() && !this->send_get_time_response(msg)) {
this->on_fatal_error();
}
}
#ifdef USE_API_SERVICES
void APIServerConnection::on_execute_service_request(const ExecuteServiceRequest &msg) {
if (this->check_authenticated_()) {

View File

@@ -71,7 +71,7 @@ class APIServerConnectionBase : public ProtoService {
#ifdef USE_API_HOMEASSISTANT_STATES
virtual void on_home_assistant_state_response(const HomeAssistantStateResponse &value){};
#endif
virtual void on_get_time_request(const GetTimeRequest &value){};
virtual void on_get_time_response(const GetTimeResponse &value){};
#ifdef USE_API_SERVICES
@@ -226,7 +226,6 @@ class APIServerConnection : public APIServerConnectionBase {
#ifdef USE_API_HOMEASSISTANT_STATES
virtual void subscribe_home_assistant_states(const SubscribeHomeAssistantStatesRequest &msg) = 0;
#endif
virtual bool send_get_time_response(const GetTimeRequest &msg) = 0;
#ifdef USE_API_SERVICES
virtual void execute_service(const ExecuteServiceRequest &msg) = 0;
#endif
@@ -348,7 +347,6 @@ class APIServerConnection : public APIServerConnectionBase {
#ifdef USE_API_HOMEASSISTANT_STATES
void on_subscribe_home_assistant_states_request(const SubscribeHomeAssistantStatesRequest &msg) override;
#endif
void on_get_time_request(const GetTimeRequest &msg) override;
#ifdef USE_API_SERVICES
void on_execute_service_request(const ExecuteServiceRequest &msg) override;
#endif

View File

@@ -130,7 +130,9 @@ class BluetoothProxy final : public esp32_ble_tracker::ESPBTDeviceListener, publ
std::string get_bluetooth_mac_address_pretty() {
const uint8_t *mac = esp_bt_dev_get_address();
return str_snprintf("%02X:%02X:%02X:%02X:%02X:%02X", 17, mac[0], mac[1], mac[2], mac[3], mac[4], mac[5]);
char buf[18];
format_mac_addr_upper(mac, buf);
return std::string(buf);
}
protected:

View File

@@ -1,107 +0,0 @@
import esphome.codegen as cg
from esphome.components import display, esp32
import esphome.config_validation as cv
from esphome.const import (
CONF_FULL_UPDATE_EVERY,
CONF_ID,
CONF_LAMBDA,
CONF_MODEL,
CONF_PAGES,
)
from esphome.cpp_generator import MockObj
CODEOWNERS = ["@jesserockz"]
DEPENDENCIES = ["esp32", "psram"]
CONF_POWER_OFF_DELAY_ENABLED = "power_off_delay_enabled"
epdiy_ns = cg.esphome_ns.namespace("epdiy")
EPDiyDisplay = epdiy_ns.class_("EPDiyDisplay", display.Display)
class EpdBoardDefinition(MockObj):
def __str__(self):
return f"&{self.base}"
class EpdDisplay_t(MockObj):
def __str__(self):
return f"&{self.base}"
EpdInitOptions = cg.global_ns.enum("EpdInitOptions")
class Model:
def __init__(
self,
*,
board_definition: MockObj,
display_t: MockObj,
init_options: MockObj,
width: int,
height: int,
vcom_mv: int = 0,
):
self.board_definition = board_definition
self.display_t = display_t
self.init_options = init_options
self.width = width
self.height = height
self.vcom_mv = vcom_mv
MODELS: dict[str, Model] = {
"lilygo_t5_4.7": Model(
board_definition=EpdBoardDefinition("epd_board_lilygo_t5_47"),
display_t=EpdDisplay_t("ED047TC2"),
init_options=(EpdInitOptions.EPD_LUT_64K, EpdInitOptions.EPD_FEED_QUEUE_8),
width=960,
height=540,
),
}
CONFIG_SCHEMA = cv.All(
display.FULL_DISPLAY_SCHEMA.extend(
{
cv.GenerateID(): cv.declare_id(EPDiyDisplay),
cv.Required(CONF_MODEL): cv.one_of(*MODELS.keys()),
cv.Optional(CONF_FULL_UPDATE_EVERY, default=10): cv.uint32_t,
cv.Optional(CONF_POWER_OFF_DELAY_ENABLED, default=False): cv.boolean,
}
).extend(cv.polling_component_schema("60s")),
cv.has_at_most_one_key(CONF_PAGES, CONF_LAMBDA),
cv.only_with_esp_idf, # When trying to add library via platformio it breaks, using as an idf component works fine
)
async def to_code(config):
var = cg.new_Pvariable(config[CONF_ID])
await display.register_display(var, config)
model = MODELS[config[CONF_MODEL]]
cg.add(
var.set_model_details(
model.board_definition,
model.display_t,
cg.RawExpression(
f"static_cast<EpdInitOptions>({'|'.join(str(o) for o in model.init_options)})"
),
model.vcom_mv,
)
)
if CONF_LAMBDA in config:
lambda_ = await cg.process_lambda(
config[CONF_LAMBDA], [(display.DisplayRef, "it")], return_type=cg.void
)
cg.add(var.set_writer(lambda_))
cg.add(var.set_power_off_delay_enabled(config[CONF_POWER_OFF_DELAY_ENABLED]))
esp32.add_idf_component(
name="vroland/epdiy",
repo="https://github.com/vroland/epdiy",
ref="c61e9e923ce2418150d54f88cea5d196cdc40c54",
)

View File

@@ -1,76 +0,0 @@
#include "epdiy_display.h"
#include "esphome/core/application.h"
#ifdef USE_ESP_IDF
#include "esphome/core/log.h"
namespace esphome::epdiy {
static const char *const TAG = "epdiy";
static constexpr uint8_t TEMPERATURE = 23; // default temperature for e-paper displays
float EPDiyDisplay::get_setup_priority() const { return esphome::setup_priority::LATE; }
void EPDiyDisplay::setup() {
epd_init(this->board_definition_, this->display_t_, this->init_options_);
if (this->vcom_mv_ != 0) {
epd_set_vcom(this->vcom_mv_);
}
this->state_ = epd_hl_init(nullptr);
this->framebuffer_ = epd_hl_get_framebuffer(&this->state_);
}
void EPDiyDisplay::update() {
this->do_update_();
this->defer([this]() { this->flush_screen_changes_(); });
}
void EPDiyDisplay::fill(Color color) {
if (color == display::COLOR_OFF) {
memset(this->framebuffer_, 0xFF, this->get_buffer_length());
epd_poweron();
epd_hl_update_screen(&this->state_, MODE_GC16, TEMPERATURE);
epd_clear();
epd_poweroff();
App.feed_wdt();
} else {
Display::fill(color);
}
}
void EPDiyDisplay::flush_screen_changes_() {
epd_poweron();
epd_hl_update_screen(&this->state_, MODE_GC16, TEMPERATURE);
memset(this->state_.back_fb, 0xFF, this->get_buffer_length());
uint16_t delay = 0;
if (this->power_off_delay_enabled_) {
delay = 700;
}
this->set_timeout("poweroff", delay, []() { epd_poweroff(); });
}
void EPDiyDisplay::on_shutdown() {
epd_poweroff();
epd_deinit();
}
void HOT EPDiyDisplay::draw_pixel_at(int x, int y, Color color) {
if (color.red == 255 && color.green == 255 && color.blue == 255) {
epd_draw_pixel(x, y, 0, this->framebuffer_);
} else {
int col = (0.2126 * color.red) + (0.7152 * color.green) + (0.0722 * color.blue);
int cl = 255 - col;
epd_draw_pixel(x, y, cl, this->framebuffer_);
}
}
} // namespace esphome::epdiy
#endif // USE_ESP_IDF

View File

@@ -1,63 +0,0 @@
#pragma once
#ifdef USE_ESP_IDF
#include "esphome/core/component.h"
#include "esphome/components/display/display_buffer.h"
#include "esphome/components/display/display_color_utils.h"
#include "esphome/core/hal.h"
#include "esphome/core/version.h"
#include "epd_display.h"
#include "epd_highlevel.h"
namespace esphome::epdiy {
class EPDiyDisplay : public display::Display {
public:
float get_setup_priority() const override;
void setup() override;
void update() override;
void on_shutdown() override;
display::DisplayType get_display_type() override { return display::DisplayType::DISPLAY_TYPE_GRAYSCALE; }
int get_width_internal() override { return this->display_t_->width; };
int get_height_internal() override { return this->display_t_->height; };
size_t get_buffer_length() const { return this->display_t_->width / 2 * this->display_t_->height; }
void set_power_off_delay_enabled(bool power_off_delay_enabled) {
this->power_off_delay_enabled_ = power_off_delay_enabled;
}
void set_model_details(const EpdBoardDefinition *board_definition, const EpdDisplay_t *display_t,
enum EpdInitOptions init_options, uint16_t vcom) {
this->board_definition_ = board_definition;
this->display_t_ = display_t;
this->init_options_ = init_options;
this->vcom_mv_ = vcom;
}
void fill(Color color) override;
void draw_pixel_at(int x, int y, Color color) override;
protected:
void flush_screen_changes_();
EpdiyHighlevelState state_;
uint8_t *framebuffer_;
const EpdBoardDefinition *board_definition_;
const EpdDisplay_t *display_t_;
enum EpdInitOptions init_options_;
uint16_t vcom_mv_;
bool power_off_delay_enabled_;
};
} // namespace esphome::epdiy
#endif // USE_ESP_IDF

View File

@@ -353,7 +353,6 @@ SUPPORTED_PLATFORMIO_ESP_IDF_5X = [
# pioarduino versions that don't require a release number
# List based on https://github.com/pioarduino/esp-idf/releases
SUPPORTED_PIOARDUINO_ESP_IDF_5X = [
cv.Version(5, 5, 1),
cv.Version(5, 5, 0),
cv.Version(5, 4, 2),
cv.Version(5, 4, 1),

View File

@@ -7,6 +7,7 @@
#include <cstdio>
#include <cinttypes>
#include "esphome/core/log.h"
#include "esphome/core/helpers.h"
namespace esphome::esp32_ble {
@@ -169,22 +170,42 @@ bool ESPBTUUID::operator==(const ESPBTUUID &uuid) const {
}
esp_bt_uuid_t ESPBTUUID::get_uuid() const { return this->uuid_; }
std::string ESPBTUUID::to_string() const {
char buf[40]; // Enough for 128-bit UUID with dashes
char *pos = buf;
switch (this->uuid_.len) {
case ESP_UUID_LEN_16:
return str_snprintf("0x%02X%02X", 6, this->uuid_.uuid.uuid16 >> 8, this->uuid_.uuid.uuid16 & 0xff);
*pos++ = '0';
*pos++ = 'x';
*pos++ = format_hex_pretty_char(this->uuid_.uuid.uuid16 >> 12);
*pos++ = format_hex_pretty_char((this->uuid_.uuid.uuid16 >> 8) & 0x0F);
*pos++ = format_hex_pretty_char((this->uuid_.uuid.uuid16 >> 4) & 0x0F);
*pos++ = format_hex_pretty_char(this->uuid_.uuid.uuid16 & 0x0F);
*pos = '\0';
return std::string(buf);
case ESP_UUID_LEN_32:
return str_snprintf("0x%02" PRIX32 "%02" PRIX32 "%02" PRIX32 "%02" PRIX32, 10, (this->uuid_.uuid.uuid32 >> 24),
(this->uuid_.uuid.uuid32 >> 16 & 0xff), (this->uuid_.uuid.uuid32 >> 8 & 0xff),
this->uuid_.uuid.uuid32 & 0xff);
*pos++ = '0';
*pos++ = 'x';
for (int shift = 28; shift >= 0; shift -= 4) {
*pos++ = format_hex_pretty_char((this->uuid_.uuid.uuid32 >> shift) & 0x0F);
}
*pos = '\0';
return std::string(buf);
default:
case ESP_UUID_LEN_128:
std::string buf;
// Format: XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX
for (int8_t i = 15; i >= 0; i--) {
buf += str_snprintf("%02X", 2, this->uuid_.uuid.uuid128[i]);
if (i == 6 || i == 8 || i == 10 || i == 12)
buf += "-";
uint8_t byte = this->uuid_.uuid.uuid128[i];
*pos++ = format_hex_pretty_char(byte >> 4);
*pos++ = format_hex_pretty_char(byte & 0x0F);
if (i == 12 || i == 10 || i == 8 || i == 6) {
*pos++ = '-';
}
}
return buf;
*pos = '\0';
return std::string(buf);
}
return "";
}

View File

@@ -31,12 +31,13 @@ void ESP32BLEBeacon::dump_config() {
char uuid[37];
char *bpos = uuid;
for (int8_t ii = 0; ii < 16; ++ii) {
bpos += sprintf(bpos, "%02X", this->uuid_[ii]);
*bpos++ = format_hex_pretty_char(this->uuid_[ii] >> 4);
*bpos++ = format_hex_pretty_char(this->uuid_[ii] & 0x0F);
if (ii == 3 || ii == 5 || ii == 7 || ii == 9) {
bpos += sprintf(bpos, "-");
*bpos++ = '-';
}
}
uuid[36] = '\0';
*bpos = '\0';
ESP_LOGCONFIG(TAG,
" UUID: %s, Major: %u, Minor: %u, Min Interval: %ums, Max Interval: %ums, Measured Power: %d"
", TX Power: %ddBm",

View File

@@ -43,6 +43,13 @@ void BLEClientBase::setup() {
void BLEClientBase::set_state(espbt::ClientState st) {
ESP_LOGV(TAG, "[%d] [%s] Set state %d", this->connection_index_, this->address_str_.c_str(), (int) st);
ESPBTClient::set_state(st);
if (st == espbt::ClientState::READY_TO_CONNECT) {
// Enable loop for state processing
this->enable_loop();
// Connect immediately instead of waiting for next loop
this->connect();
}
}
void BLEClientBase::loop() {
@@ -58,8 +65,8 @@ void BLEClientBase::loop() {
}
this->set_state(espbt::ClientState::IDLE);
}
// If idle, we can disable the loop as connect()
// will enable it again when a connection is needed.
// If its idle, we can disable the loop as set_state
// will enable it again when we need to connect.
else if (this->state_ == espbt::ClientState::IDLE) {
this->disable_loop();
}
@@ -101,20 +108,9 @@ bool BLEClientBase::parse_device(const espbt::ESPBTDevice &device) {
#endif
void BLEClientBase::connect() {
// Prevent duplicate connection attempts
if (this->state_ == espbt::ClientState::CONNECTING || this->state_ == espbt::ClientState::CONNECTED ||
this->state_ == espbt::ClientState::ESTABLISHED) {
ESP_LOGW(TAG, "[%d] [%s] Connection already in progress, state=%s", this->connection_index_,
this->address_str_.c_str(), espbt::client_state_to_string(this->state_));
return;
}
ESP_LOGI(TAG, "[%d] [%s] 0x%02x Connecting", this->connection_index_, this->address_str_.c_str(),
this->remote_addr_type_);
this->paired_ = false;
// Enable loop for state processing
this->enable_loop();
// Immediately transition to CONNECTING to prevent duplicate connection attempts
this->set_state(espbt::ClientState::CONNECTING);
// Determine connection parameters based on connection type
if (this->connection_type_ == espbt::ConnectionType::V3_WITHOUT_CACHE) {
@@ -172,7 +168,7 @@ void BLEClientBase::unconditional_disconnect() {
this->log_gattc_warning_("esp_ble_gattc_close", err);
}
if (this->state_ == espbt::ClientState::DISCOVERED) {
if (this->state_ == espbt::ClientState::READY_TO_CONNECT || this->state_ == espbt::ClientState::DISCOVERED) {
this->set_address(0);
this->set_state(espbt::ClientState::IDLE);
} else {
@@ -216,6 +212,8 @@ void BLEClientBase::handle_connection_result_(esp_err_t ret) {
if (ret) {
this->log_gattc_warning_("esp_ble_gattc_open", ret);
this->set_state(espbt::ClientState::IDLE);
} else {
this->set_state(espbt::ClientState::CONNECTING);
}
}

View File

@@ -60,11 +60,14 @@ class BLEClientBase : public espbt::ESPBTClient, public Component {
if (address == 0) {
this->address_str_ = "";
} else {
this->address_str_ =
str_snprintf("%02X:%02X:%02X:%02X:%02X:%02X", 17, (uint8_t) (this->address_ >> 40) & 0xff,
(uint8_t) (this->address_ >> 32) & 0xff, (uint8_t) (this->address_ >> 24) & 0xff,
(uint8_t) (this->address_ >> 16) & 0xff, (uint8_t) (this->address_ >> 8) & 0xff,
(uint8_t) (this->address_ >> 0) & 0xff);
char buf[18];
uint8_t mac[6] = {
(uint8_t) ((this->address_ >> 40) & 0xff), (uint8_t) ((this->address_ >> 32) & 0xff),
(uint8_t) ((this->address_ >> 24) & 0xff), (uint8_t) ((this->address_ >> 16) & 0xff),
(uint8_t) ((this->address_ >> 8) & 0xff), (uint8_t) ((this->address_ >> 0) & 0xff),
};
format_mac_addr_upper(mac, buf);
this->address_str_ = buf;
}
}
const std::string &address_str() const { return this->address_str_; }

View File

@@ -51,6 +51,8 @@ const char *client_state_to_string(ClientState state) {
return "IDLE";
case ClientState::DISCOVERED:
return "DISCOVERED";
case ClientState::READY_TO_CONNECT:
return "READY_TO_CONNECT";
case ClientState::CONNECTING:
return "CONNECTING";
case ClientState::CONNECTED:
@@ -605,9 +607,8 @@ void ESPBTDevice::parse_adv_(const uint8_t *payload, uint8_t len) {
}
std::string ESPBTDevice::address_str() const {
char mac[24];
snprintf(mac, sizeof(mac), "%02X:%02X:%02X:%02X:%02X:%02X", this->address_[0], this->address_[1], this->address_[2],
this->address_[3], this->address_[4], this->address_[5]);
char mac[18];
format_mac_addr_upper(this->address_, mac);
return mac;
}
@@ -793,7 +794,7 @@ void ESP32BLETracker::try_promote_discovered_clients_() {
#ifdef USE_ESP32_BLE_SOFTWARE_COEXISTENCE
this->update_coex_preference_(true);
#endif
client->connect();
client->set_state(ClientState::READY_TO_CONNECT);
break;
}
}

View File

@@ -159,6 +159,8 @@ enum class ClientState : uint8_t {
IDLE,
// Device advertisement found.
DISCOVERED,
// Device is discovered and the scanner is stopped
READY_TO_CONNECT,
// Connection in progress.
CONNECTING,
// Initial connection established.
@@ -311,6 +313,7 @@ class ESP32BLETracker : public Component,
counts.discovered++;
break;
case ClientState::CONNECTING:
case ClientState::READY_TO_CONNECT:
counts.connecting++;
break;
default:

View File

@@ -77,6 +77,13 @@ ETHERNET_TYPES = {
"DM9051": EthernetType.ETHERNET_TYPE_DM9051,
}
# PHY types that need compile-time defines for conditional compilation
_PHY_TYPE_TO_DEFINE = {
"KSZ8081": "USE_ETHERNET_KSZ8081",
"KSZ8081RNA": "USE_ETHERNET_KSZ8081",
# Add other PHY types here only if they need conditional compilation
}
SPI_ETHERNET_TYPES = ["W5500", "DM9051"]
SPI_ETHERNET_DEFAULT_POLLING_INTERVAL = TimePeriodMilliseconds(milliseconds=10)
@@ -345,6 +352,10 @@ async def to_code(config):
if CONF_MANUAL_IP in config:
cg.add(var.set_manual_ip(manual_ip(config[CONF_MANUAL_IP])))
# Add compile-time define for PHY types with specific code
if phy_define := _PHY_TYPE_TO_DEFINE.get(config[CONF_TYPE]):
cg.add_define(phy_define)
cg.add_define("USE_ETHERNET")
# Disable WiFi when using Ethernet to save memory

View File

@@ -229,10 +229,12 @@ void EthernetComponent::setup() {
ESPHL_ERROR_CHECK(err, "ETH driver install error");
#ifndef USE_ETHERNET_SPI
#ifdef USE_ETHERNET_KSZ8081
if (this->type_ == ETHERNET_TYPE_KSZ8081RNA && this->clk_mode_ == EMAC_CLK_OUT) {
// KSZ8081RNA default is incorrect. It expects a 25MHz clock instead of the 50MHz we provide.
this->ksz8081_set_clock_reference_(mac);
}
#endif // USE_ETHERNET_KSZ8081
for (const auto &phy_register : this->phy_registers_) {
this->write_phy_register_(mac, phy_register);
@@ -300,6 +302,7 @@ void EthernetComponent::loop() {
this->state_ = EthernetComponentState::CONNECTING;
this->start_connect_();
} else {
this->finish_connect_();
// When connected and stable, disable the loop to save CPU cycles
this->disable_loop();
}
@@ -486,10 +489,35 @@ void EthernetComponent::got_ip6_event_handler(void *arg, esp_event_base_t event_
}
#endif /* USE_NETWORK_IPV6 */
void EthernetComponent::finish_connect_() {
#if USE_NETWORK_IPV6
// Retry IPv6 link-local setup if it failed during initial connect
// This handles the case where min_ipv6_addr_count is NOT set (or is 0),
// allowing us to reach CONNECTED state with just IPv4.
// If IPv6 setup failed in start_connect_() because the interface wasn't ready:
// - Bootup timing issues (#10281)
// - Cable unplugged/network interruption (#10705)
// We can now retry since we're in CONNECTED state and the interface is definitely up.
if (!this->ipv6_setup_done_) {
esp_err_t err = esp_netif_create_ip6_linklocal(this->eth_netif_);
if (err == ESP_OK) {
ESP_LOGD(TAG, "IPv6 link-local address created (retry succeeded)");
}
// Always set the flag to prevent continuous retries
// If IPv6 setup fails here with the interface up and stable, it's
// likely a persistent issue (IPv6 disabled at router, hardware
// limitation, etc.) that won't be resolved by further retries.
// The device continues to work with IPv4.
this->ipv6_setup_done_ = true;
}
#endif /* USE_NETWORK_IPV6 */
}
void EthernetComponent::start_connect_() {
global_eth_component->got_ipv4_address_ = false;
#if USE_NETWORK_IPV6
global_eth_component->ipv6_count_ = 0;
this->ipv6_setup_done_ = false;
#endif /* USE_NETWORK_IPV6 */
this->connect_begin_ = millis();
this->status_set_warning(LOG_STR("waiting for IP configuration"));
@@ -545,9 +573,27 @@ void EthernetComponent::start_connect_() {
}
}
#if USE_NETWORK_IPV6
// Attempt to create IPv6 link-local address
// We MUST attempt this here, not just in finish_connect_(), because with
// min_ipv6_addr_count set, the component won't reach CONNECTED state without IPv6.
// However, this may fail with ESP_FAIL if the interface is not up yet:
// - At bootup when link isn't ready (#10281)
// - After disconnection/cable unplugged (#10705)
// We'll retry in finish_connect_() if it fails here.
err = esp_netif_create_ip6_linklocal(this->eth_netif_);
if (err != ESP_OK) {
ESPHL_ERROR_CHECK(err, "Enable IPv6 link local failed");
if (err == ESP_ERR_ESP_NETIF_INVALID_PARAMS) {
// This is a programming error, not a transient failure
ESPHL_ERROR_CHECK(err, "esp_netif_create_ip6_linklocal invalid parameters");
} else {
// ESP_FAIL means the interface isn't up yet
// This is expected and non-fatal, happens in multiple scenarios:
// - During reconnection after network interruptions (#10705)
// - At bootup when the link isn't ready yet (#10281)
// We'll retry once we reach CONNECTED state and the interface is up
ESP_LOGW(TAG, "esp_netif_create_ip6_linklocal failed: %s", esp_err_to_name(err));
// Don't mark component as failed - this is a transient error
}
}
#endif /* USE_NETWORK_IPV6 */
@@ -638,7 +684,9 @@ void EthernetComponent::get_eth_mac_address_raw(uint8_t *mac) {
std::string EthernetComponent::get_eth_mac_address_pretty() {
uint8_t mac[6];
get_eth_mac_address_raw(mac);
return str_snprintf("%02X:%02X:%02X:%02X:%02X:%02X", 17, mac[0], mac[1], mac[2], mac[3], mac[4], mac[5]);
char buf[18];
format_mac_addr_upper(mac, buf);
return std::string(buf);
}
eth_duplex_t EthernetComponent::get_duplex_mode() {
@@ -675,6 +723,7 @@ bool EthernetComponent::powerdown() {
#ifndef USE_ETHERNET_SPI
#ifdef USE_ETHERNET_KSZ8081
constexpr uint8_t KSZ80XX_PC2R_REG_ADDR = 0x1F;
void EthernetComponent::ksz8081_set_clock_reference_(esp_eth_mac_t *mac) {
@@ -703,6 +752,7 @@ void EthernetComponent::ksz8081_set_clock_reference_(esp_eth_mac_t *mac) {
ESP_LOGVV(TAG, "KSZ8081 PHY Control 2: %s", format_hex_pretty((u_int8_t *) &phy_control_2, 2).c_str());
}
}
#endif // USE_ETHERNET_KSZ8081
void EthernetComponent::write_phy_register_(esp_eth_mac_t *mac, PHYRegister register_data) {
esp_err_t err;

View File

@@ -102,9 +102,12 @@ class EthernetComponent : public Component {
#endif /* LWIP_IPV6 */
void start_connect_();
void finish_connect_();
void dump_connect_params_();
#ifdef USE_ETHERNET_KSZ8081
/// @brief Set `RMII Reference Clock Select` bit for KSZ8081.
void ksz8081_set_clock_reference_(esp_eth_mac_t *mac);
#endif
/// @brief Set arbitratry PHY registers from config.
void write_phy_register_(esp_eth_mac_t *mac, PHYRegister register_data);
@@ -144,6 +147,7 @@ class EthernetComponent : public Component {
bool got_ipv4_address_{false};
#if LWIP_IPV6
uint8_t ipv6_count_{0};
bool ipv6_setup_done_{false};
#endif /* LWIP_IPV6 */
// Pointers at the end (naturally aligned)

View File

@@ -18,6 +18,7 @@ from esphome.const import (
DEVICE_CLASS_TEMPERATURE,
DEVICE_CLASS_VOLTAGE,
STATE_CLASS_MEASUREMENT,
STATE_CLASS_TOTAL_INCREASING,
UNIT_AMPERE,
UNIT_CELSIUS,
UNIT_VOLT,
@@ -162,7 +163,7 @@ INA2XX_SCHEMA = cv.Schema(
unit_of_measurement=UNIT_WATT_HOURS,
accuracy_decimals=8,
device_class=DEVICE_CLASS_ENERGY,
state_class=STATE_CLASS_MEASUREMENT,
state_class=STATE_CLASS_TOTAL_INCREASING,
),
key=CONF_NAME,
),
@@ -170,7 +171,8 @@ INA2XX_SCHEMA = cv.Schema(
sensor.sensor_schema(
unit_of_measurement=UNIT_JOULE,
accuracy_decimals=8,
state_class=STATE_CLASS_MEASUREMENT,
device_class=DEVICE_CLASS_ENERGY,
state_class=STATE_CLASS_TOTAL_INCREASING,
),
key=CONF_NAME,
),

View File

@@ -1,4 +1,3 @@
#include <cstdio>
#include <cstring>
#include "md5.h"
#ifdef USE_MD5
@@ -44,7 +43,9 @@ void MD5Digest::get_bytes(uint8_t *output) { memcpy(output, this->digest_, 16);
void MD5Digest::get_hex(char *output) {
for (size_t i = 0; i < 16; i++) {
sprintf(output + i * 2, "%02x", this->digest_[i]);
uint8_t byte = this->digest_[i];
output[i * 2] = format_hex_char(byte >> 4);
output[i * 2 + 1] = format_hex_char(byte & 0x0F);
}
}

View File

@@ -491,7 +491,7 @@ bool MQTTClientComponent::publish(const std::string &topic, const std::string &p
bool MQTTClientComponent::publish(const std::string &topic, const char *payload, size_t payload_length, uint8_t qos,
bool retain) {
return publish({.topic = topic, .payload = payload, .qos = qos, .retain = retain});
return publish({.topic = topic, .payload = std::string(payload, payload_length), .qos = qos, .retain = retain});
}
bool MQTTClientComponent::publish(const MQTTMessage &message) {

View File

@@ -270,7 +270,6 @@ void PacketTransport::add_binary_data_(uint8_t key, const char *id, bool data) {
auto len = 1 + 1 + 1 + strlen(id);
if (len + this->header_.size() + this->data_.size() > this->get_max_packet_size()) {
this->flush_();
this->init_data_();
}
add(this->data_, key);
add(this->data_, (uint8_t) data);
@@ -285,7 +284,6 @@ void PacketTransport::add_data_(uint8_t key, const char *id, uint32_t data) {
auto len = 4 + 1 + 1 + strlen(id);
if (len + this->header_.size() + this->data_.size() > this->get_max_packet_size()) {
this->flush_();
this->init_data_();
}
add(this->data_, key);
add(this->data_, data);

View File

@@ -196,8 +196,8 @@ FILTER_SOURCE_FILES = filter_source_files_from_platform(
PlatformFramework.ESP32_ARDUINO,
PlatformFramework.ESP32_IDF,
},
"remote_receiver.cpp": {
PlatformFramework.ESP8266_ARDUINO,
"remote_receiver_esp8266.cpp": {PlatformFramework.ESP8266_ARDUINO},
"remote_receiver_libretiny.cpp": {
PlatformFramework.BK72XX_ARDUINO,
PlatformFramework.RTL87XX_ARDUINO,
PlatformFramework.LN882X_ARDUINO,

View File

@@ -3,12 +3,12 @@
#include "esphome/core/helpers.h"
#include "esphome/core/log.h"
#if defined(USE_LIBRETINY) || defined(USE_ESP8266)
#ifdef USE_ESP8266
namespace esphome {
namespace remote_receiver {
static const char *const TAG = "remote_receiver";
static const char *const TAG = "remote_receiver.esp8266";
void IRAM_ATTR HOT RemoteReceiverComponentStore::gpio_intr(RemoteReceiverComponentStore *arg) {
const uint32_t now = micros();

View File

@@ -0,0 +1,125 @@
#include "remote_receiver.h"
#include "esphome/core/hal.h"
#include "esphome/core/helpers.h"
#include "esphome/core/log.h"
#ifdef USE_LIBRETINY
namespace esphome {
namespace remote_receiver {
static const char *const TAG = "remote_receiver.libretiny";
void IRAM_ATTR HOT RemoteReceiverComponentStore::gpio_intr(RemoteReceiverComponentStore *arg) {
const uint32_t now = micros();
// If the lhs is 1 (rising edge) we should write to an uneven index and vice versa
const uint32_t next = (arg->buffer_write_at + 1) % arg->buffer_size;
const bool level = arg->pin.digital_read();
if (level != next % 2)
return;
// If next is buffer_read, we have hit an overflow
if (next == arg->buffer_read_at)
return;
const uint32_t last_change = arg->buffer[arg->buffer_write_at];
const uint32_t time_since_change = now - last_change;
if (time_since_change <= arg->filter_us)
return;
arg->buffer[arg->buffer_write_at = next] = now;
}
void RemoteReceiverComponent::setup() {
this->pin_->setup();
auto &s = this->store_;
s.filter_us = this->filter_us_;
s.pin = this->pin_->to_isr();
s.buffer_size = this->buffer_size_;
this->high_freq_.start();
if (s.buffer_size % 2 != 0) {
// Make sure divisible by two. This way, we know that every 0bxxx0 index is a space and every 0bxxx1 index is a mark
s.buffer_size++;
}
s.buffer = new uint32_t[s.buffer_size];
void *buf = (void *) s.buffer;
memset(buf, 0, s.buffer_size * sizeof(uint32_t));
// First index is a space.
if (this->pin_->digital_read()) {
s.buffer_write_at = s.buffer_read_at = 1;
} else {
s.buffer_write_at = s.buffer_read_at = 0;
}
this->pin_->attach_interrupt(RemoteReceiverComponentStore::gpio_intr, &this->store_, gpio::INTERRUPT_ANY_EDGE);
}
void RemoteReceiverComponent::dump_config() {
ESP_LOGCONFIG(TAG, "Remote Receiver:");
LOG_PIN(" Pin: ", this->pin_);
if (this->pin_->digital_read()) {
ESP_LOGW(TAG, "Remote Receiver Signal starts with a HIGH value. Usually this means you have to "
"invert the signal using 'inverted: True' in the pin schema!");
}
ESP_LOGCONFIG(TAG,
" Buffer Size: %u\n"
" Tolerance: %u%s\n"
" Filter out pulses shorter than: %u us\n"
" Signal is done after %u us of no changes",
this->buffer_size_, this->tolerance_,
(this->tolerance_mode_ == remote_base::TOLERANCE_MODE_TIME) ? " us" : "%", this->filter_us_,
this->idle_us_);
}
void RemoteReceiverComponent::loop() {
auto &s = this->store_;
// copy write at to local variables, as it's volatile
const uint32_t write_at = s.buffer_write_at;
const uint32_t dist = (s.buffer_size + write_at - s.buffer_read_at) % s.buffer_size;
// signals must at least one rising and one leading edge
if (dist <= 1)
return;
const uint32_t now = micros();
if (now - s.buffer[write_at] < this->idle_us_) {
// The last change was fewer than the configured idle time ago.
return;
}
ESP_LOGVV(TAG, "read_at=%u write_at=%u dist=%u now=%u end=%u", s.buffer_read_at, write_at, dist, now,
s.buffer[write_at]);
// Skip first value, it's from the previous idle level
s.buffer_read_at = (s.buffer_read_at + 1) % s.buffer_size;
uint32_t prev = s.buffer_read_at;
s.buffer_read_at = (s.buffer_read_at + 1) % s.buffer_size;
const uint32_t reserve_size = 1 + (s.buffer_size + write_at - s.buffer_read_at) % s.buffer_size;
this->temp_.clear();
this->temp_.reserve(reserve_size);
int32_t multiplier = s.buffer_read_at % 2 == 0 ? 1 : -1;
for (uint32_t i = 0; prev != write_at; i++) {
int32_t delta = s.buffer[s.buffer_read_at] - s.buffer[prev];
if (uint32_t(delta) >= this->idle_us_) {
// already found a space longer than idle. There must have been two pulses
break;
}
ESP_LOGVV(TAG, " i=%u buffer[%u]=%u - buffer[%u]=%u -> %d", i, s.buffer_read_at, s.buffer[s.buffer_read_at], prev,
s.buffer[prev], multiplier * delta);
this->temp_.push_back(multiplier * delta);
prev = s.buffer_read_at;
s.buffer_read_at = (s.buffer_read_at + 1) % s.buffer_size;
multiplier *= -1;
}
s.buffer_read_at = (s.buffer_size + s.buffer_read_at - 1) % s.buffer_size;
this->temp_.push_back(this->idle_us_ * multiplier);
this->call_listeners_dumpers_();
}
} // namespace remote_receiver
} // namespace esphome
#endif

View File

@@ -131,8 +131,8 @@ FILTER_SOURCE_FILES = filter_source_files_from_platform(
PlatformFramework.ESP32_ARDUINO,
PlatformFramework.ESP32_IDF,
},
"remote_transmitter.cpp": {
PlatformFramework.ESP8266_ARDUINO,
"remote_transmitter_esp8266.cpp": {PlatformFramework.ESP8266_ARDUINO},
"remote_transmitter_libretiny.cpp": {
PlatformFramework.BK72XX_ARDUINO,
PlatformFramework.RTL87XX_ARDUINO,
PlatformFramework.LN882X_ARDUINO,

View File

@@ -2,107 +2,10 @@
#include "esphome/core/log.h"
#include "esphome/core/application.h"
#if defined(USE_LIBRETINY) || defined(USE_ESP8266)
namespace esphome {
namespace remote_transmitter {
static const char *const TAG = "remote_transmitter";
void RemoteTransmitterComponent::setup() {
this->pin_->setup();
this->pin_->digital_write(false);
}
void RemoteTransmitterComponent::dump_config() {
ESP_LOGCONFIG(TAG,
"Remote Transmitter:\n"
" Carrier Duty: %u%%",
this->carrier_duty_percent_);
LOG_PIN(" Pin: ", this->pin_);
}
void RemoteTransmitterComponent::calculate_on_off_time_(uint32_t carrier_frequency, uint32_t *on_time_period,
uint32_t *off_time_period) {
if (carrier_frequency == 0) {
*on_time_period = 0;
*off_time_period = 0;
return;
}
uint32_t period = (1000000UL + carrier_frequency / 2) / carrier_frequency; // round(1000000/freq)
period = std::max(uint32_t(1), period);
*on_time_period = (period * this->carrier_duty_percent_) / 100;
*off_time_period = period - *on_time_period;
}
void RemoteTransmitterComponent::await_target_time_() {
const uint32_t current_time = micros();
if (this->target_time_ == 0) {
this->target_time_ = current_time;
} else if ((int32_t) (this->target_time_ - current_time) > 0) {
delayMicroseconds(this->target_time_ - current_time);
}
}
void RemoteTransmitterComponent::mark_(uint32_t on_time, uint32_t off_time, uint32_t usec) {
this->await_target_time_();
this->pin_->digital_write(true);
const uint32_t target = this->target_time_ + usec;
if (this->carrier_duty_percent_ < 100 && (on_time > 0 || off_time > 0)) {
while (true) { // Modulate with carrier frequency
this->target_time_ += on_time;
if ((int32_t) (this->target_time_ - target) >= 0)
break;
this->await_target_time_();
this->pin_->digital_write(false);
this->target_time_ += off_time;
if ((int32_t) (this->target_time_ - target) >= 0)
break;
this->await_target_time_();
this->pin_->digital_write(true);
}
}
this->target_time_ = target;
}
void RemoteTransmitterComponent::space_(uint32_t usec) {
this->await_target_time_();
this->pin_->digital_write(false);
this->target_time_ += usec;
}
void RemoteTransmitterComponent::digital_write(bool value) { this->pin_->digital_write(value); }
void RemoteTransmitterComponent::send_internal(uint32_t send_times, uint32_t send_wait) {
ESP_LOGD(TAG, "Sending remote code");
uint32_t on_time, off_time;
this->calculate_on_off_time_(this->temp_.get_carrier_frequency(), &on_time, &off_time);
this->target_time_ = 0;
this->transmit_trigger_->trigger();
for (uint32_t i = 0; i < send_times; i++) {
InterruptLock lock;
for (int32_t item : this->temp_.get_data()) {
if (item > 0) {
const auto length = uint32_t(item);
this->mark_(on_time, off_time, length);
} else {
const auto length = uint32_t(-item);
this->space_(length);
}
App.feed_wdt();
}
this->await_target_time_(); // wait for duration of last pulse
this->pin_->digital_write(false);
if (i + 1 < send_times)
this->target_time_ += send_wait;
}
this->complete_trigger_->trigger();
}
} // namespace remote_transmitter
} // namespace esphome
#endif

View File

@@ -0,0 +1,107 @@
#include "remote_transmitter.h"
#include "esphome/core/log.h"
#include "esphome/core/application.h"
#ifdef USE_ESP8266
namespace esphome {
namespace remote_transmitter {
static const char *const TAG = "remote_transmitter";
void RemoteTransmitterComponent::setup() {
this->pin_->setup();
this->pin_->digital_write(false);
}
void RemoteTransmitterComponent::dump_config() {
ESP_LOGCONFIG(TAG,
"Remote Transmitter:\n"
" Carrier Duty: %u%%",
this->carrier_duty_percent_);
LOG_PIN(" Pin: ", this->pin_);
}
void RemoteTransmitterComponent::calculate_on_off_time_(uint32_t carrier_frequency, uint32_t *on_time_period,
uint32_t *off_time_period) {
if (carrier_frequency == 0) {
*on_time_period = 0;
*off_time_period = 0;
return;
}
uint32_t period = (1000000UL + carrier_frequency / 2) / carrier_frequency; // round(1000000/freq)
period = std::max(uint32_t(1), period);
*on_time_period = (period * this->carrier_duty_percent_) / 100;
*off_time_period = period - *on_time_period;
}
void RemoteTransmitterComponent::await_target_time_() {
const uint32_t current_time = micros();
if (this->target_time_ == 0) {
this->target_time_ = current_time;
} else if ((int32_t) (this->target_time_ - current_time) > 0) {
delayMicroseconds(this->target_time_ - current_time);
}
}
void RemoteTransmitterComponent::mark_(uint32_t on_time, uint32_t off_time, uint32_t usec) {
this->await_target_time_();
this->pin_->digital_write(true);
const uint32_t target = this->target_time_ + usec;
if (this->carrier_duty_percent_ < 100 && (on_time > 0 || off_time > 0)) {
while (true) { // Modulate with carrier frequency
this->target_time_ += on_time;
if ((int32_t) (this->target_time_ - target) >= 0)
break;
this->await_target_time_();
this->pin_->digital_write(false);
this->target_time_ += off_time;
if ((int32_t) (this->target_time_ - target) >= 0)
break;
this->await_target_time_();
this->pin_->digital_write(true);
}
}
this->target_time_ = target;
}
void RemoteTransmitterComponent::space_(uint32_t usec) {
this->await_target_time_();
this->pin_->digital_write(false);
this->target_time_ += usec;
}
void RemoteTransmitterComponent::digital_write(bool value) { this->pin_->digital_write(value); }
void RemoteTransmitterComponent::send_internal(uint32_t send_times, uint32_t send_wait) {
ESP_LOGD(TAG, "Sending remote code");
uint32_t on_time, off_time;
this->calculate_on_off_time_(this->temp_.get_carrier_frequency(), &on_time, &off_time);
this->target_time_ = 0;
this->transmit_trigger_->trigger();
for (uint32_t i = 0; i < send_times; i++) {
for (int32_t item : this->temp_.get_data()) {
if (item > 0) {
const auto length = uint32_t(item);
this->mark_(on_time, off_time, length);
} else {
const auto length = uint32_t(-item);
this->space_(length);
}
App.feed_wdt();
}
this->await_target_time_(); // wait for duration of last pulse
this->pin_->digital_write(false);
if (i + 1 < send_times)
this->target_time_ += send_wait;
}
this->complete_trigger_->trigger();
}
} // namespace remote_transmitter
} // namespace esphome
#endif

View File

@@ -0,0 +1,110 @@
#include "remote_transmitter.h"
#include "esphome/core/log.h"
#include "esphome/core/application.h"
#ifdef USE_LIBRETINY
namespace esphome {
namespace remote_transmitter {
static const char *const TAG = "remote_transmitter";
void RemoteTransmitterComponent::setup() {
this->pin_->setup();
this->pin_->digital_write(false);
}
void RemoteTransmitterComponent::dump_config() {
ESP_LOGCONFIG(TAG,
"Remote Transmitter:\n"
" Carrier Duty: %u%%",
this->carrier_duty_percent_);
LOG_PIN(" Pin: ", this->pin_);
}
void RemoteTransmitterComponent::calculate_on_off_time_(uint32_t carrier_frequency, uint32_t *on_time_period,
uint32_t *off_time_period) {
if (carrier_frequency == 0) {
*on_time_period = 0;
*off_time_period = 0;
return;
}
uint32_t period = (1000000UL + carrier_frequency / 2) / carrier_frequency; // round(1000000/freq)
period = std::max(uint32_t(1), period);
*on_time_period = (period * this->carrier_duty_percent_) / 100;
*off_time_period = period - *on_time_period;
}
void RemoteTransmitterComponent::await_target_time_() {
const uint32_t current_time = micros();
if (this->target_time_ == 0) {
this->target_time_ = current_time;
} else {
while ((int32_t) (this->target_time_ - micros()) > 0) {
// busy loop that ensures micros is constantly called
}
}
}
void RemoteTransmitterComponent::mark_(uint32_t on_time, uint32_t off_time, uint32_t usec) {
this->await_target_time_();
this->pin_->digital_write(true);
const uint32_t target = this->target_time_ + usec;
if (this->carrier_duty_percent_ < 100 && (on_time > 0 || off_time > 0)) {
while (true) { // Modulate with carrier frequency
this->target_time_ += on_time;
if ((int32_t) (this->target_time_ - target) >= 0)
break;
this->await_target_time_();
this->pin_->digital_write(false);
this->target_time_ += off_time;
if ((int32_t) (this->target_time_ - target) >= 0)
break;
this->await_target_time_();
this->pin_->digital_write(true);
}
}
this->target_time_ = target;
}
void RemoteTransmitterComponent::space_(uint32_t usec) {
this->await_target_time_();
this->pin_->digital_write(false);
this->target_time_ += usec;
}
void RemoteTransmitterComponent::digital_write(bool value) { this->pin_->digital_write(value); }
void RemoteTransmitterComponent::send_internal(uint32_t send_times, uint32_t send_wait) {
ESP_LOGD(TAG, "Sending remote code");
uint32_t on_time, off_time;
this->calculate_on_off_time_(this->temp_.get_carrier_frequency(), &on_time, &off_time);
this->target_time_ = 0;
this->transmit_trigger_->trigger();
for (uint32_t i = 0; i < send_times; i++) {
InterruptLock lock;
for (int32_t item : this->temp_.get_data()) {
if (item > 0) {
const auto length = uint32_t(item);
this->mark_(on_time, off_time, length);
} else {
const auto length = uint32_t(-item);
this->space_(length);
}
App.feed_wdt();
}
this->await_target_time_(); // wait for duration of last pulse
this->pin_->digital_write(false);
if (i + 1 < send_times)
this->target_time_ += send_wait;
}
this->complete_trigger_->trigger();
}
} // namespace remote_transmitter
} // namespace esphome
#endif

View File

@@ -28,12 +28,12 @@ bool Select::has_option(const std::string &option) const { return this->index_of
bool Select::has_index(size_t index) const { return index < this->size(); }
size_t Select::size() const {
auto options = traits.get_options();
const auto &options = traits.get_options();
return options.size();
}
optional<size_t> Select::index_of(const std::string &option) const {
auto options = traits.get_options();
const auto &options = traits.get_options();
auto it = std::find(options.begin(), options.end(), option);
if (it == options.end()) {
return {};
@@ -51,7 +51,7 @@ optional<size_t> Select::active_index() const {
optional<std::string> Select::at(size_t index) const {
if (this->has_index(index)) {
auto options = traits.get_options();
const auto &options = traits.get_options();
return options.at(index);
} else {
return {};

View File

@@ -45,7 +45,7 @@ void SelectCall::perform() {
auto *parent = this->parent_;
const auto *name = parent->get_name().c_str();
const auto &traits = parent->traits;
auto options = traits.get_options();
const auto &options = traits.get_options();
if (this->operation_ == SELECT_OP_NONE) {
ESP_LOGW(TAG, "'%s' - SelectCall performed without selecting an operation", name);

View File

@@ -593,7 +593,7 @@ void WiFiComponent::check_scanning_finished() {
for (auto &res : this->scan_result_) {
char bssid_s[18];
auto bssid = res.get_bssid();
sprintf(bssid_s, "%02X:%02X:%02X:%02X:%02X:%02X", bssid[0], bssid[1], bssid[2], bssid[3], bssid[4], bssid[5]);
format_mac_addr_upper(bssid.data(), bssid_s);
if (res.get_matches()) {
ESP_LOGI(TAG, "- '%s' %s" LOG_SECRET("(%s) ") "%s", res.get_ssid().c_str(),

View File

@@ -1,6 +1,7 @@
#pragma once
#include "esphome/core/component.h"
#include "esphome/core/helpers.h"
#include "esphome/components/text_sensor/text_sensor.h"
#include "esphome/components/wifi/wifi_component.h"
#ifdef USE_WIFI
@@ -106,8 +107,8 @@ class BSSIDWiFiInfo : public PollingComponent, public text_sensor::TextSensor {
wifi::bssid_t bssid = wifi::global_wifi_component->wifi_bssid();
if (memcmp(bssid.data(), last_bssid_.data(), 6) != 0) {
std::copy(bssid.begin(), bssid.end(), last_bssid_.begin());
char buf[30];
sprintf(buf, "%02X:%02X:%02X:%02X:%02X:%02X", bssid[0], bssid[1], bssid[2], bssid[3], bssid[4], bssid[5]);
char buf[18];
format_mac_addr_upper(bssid.data(), buf);
this->publish_state(buf);
}
}

View File

@@ -4,7 +4,7 @@ from enum import Enum
from esphome.enum import StrEnum
__version__ = "2025.10.0-dev"
__version__ = "2025.9.0"
ALLOWED_NAME_CHARS = "abcdefghijklmnopqrstuvwxyz0123456789-_"
VALID_SUBSTITUTIONS_CHARACTERS = (

View File

@@ -175,6 +175,7 @@
#ifdef USE_ARDUINO
#define USE_ARDUINO_VERSION_CODE VERSION_CODE(3, 2, 1)
#define USE_ETHERNET
#define USE_ETHERNET_KSZ8081
#endif
#ifdef USE_ESP_IDF

View File

@@ -255,23 +255,22 @@ size_t parse_hex(const char *str, size_t length, uint8_t *data, size_t count) {
}
std::string format_mac_address_pretty(const uint8_t *mac) {
return str_snprintf("%02X:%02X:%02X:%02X:%02X:%02X", 17, mac[0], mac[1], mac[2], mac[3], mac[4], mac[5]);
char buf[18];
format_mac_addr_upper(mac, buf);
return std::string(buf);
}
static char format_hex_char(uint8_t v) { return v >= 10 ? 'a' + (v - 10) : '0' + v; }
std::string format_hex(const uint8_t *data, size_t length) {
std::string ret;
ret.resize(length * 2);
for (size_t i = 0; i < length; i++) {
ret[2 * i] = format_hex_char((data[i] & 0xF0) >> 4);
ret[2 * i] = format_hex_char(data[i] >> 4);
ret[2 * i + 1] = format_hex_char(data[i] & 0x0F);
}
return ret;
}
std::string format_hex(const std::vector<uint8_t> &data) { return format_hex(data.data(), data.size()); }
static char format_hex_pretty_char(uint8_t v) { return v >= 10 ? 'A' + (v - 10) : '0' + v; }
// Shared implementation for uint8_t and string hex formatting
static std::string format_hex_pretty_uint8(const uint8_t *data, size_t length, char separator, bool show_length) {
if (data == nullptr || length == 0)
@@ -280,7 +279,7 @@ static std::string format_hex_pretty_uint8(const uint8_t *data, size_t length, c
uint8_t multiple = separator ? 3 : 2; // 3 if separator is not \0, 2 otherwise
ret.resize(multiple * length - (separator ? 1 : 0));
for (size_t i = 0; i < length; i++) {
ret[multiple * i] = format_hex_pretty_char((data[i] & 0xF0) >> 4);
ret[multiple * i] = format_hex_pretty_char(data[i] >> 4);
ret[multiple * i + 1] = format_hex_pretty_char(data[i] & 0x0F);
if (separator && i != length - 1)
ret[multiple * i + 2] = separator;
@@ -591,7 +590,9 @@ bool HighFrequencyLoopRequester::is_high_frequency() { return num_requests > 0;
std::string get_mac_address() {
uint8_t mac[6];
get_mac_address_raw(mac);
return str_snprintf("%02x%02x%02x%02x%02x%02x", 12, mac[0], mac[1], mac[2], mac[3], mac[4], mac[5]);
char buf[13];
format_mac_addr_lower_no_sep(mac, buf);
return std::string(buf);
}
std::string get_mac_address_pretty() {

View File

@@ -380,6 +380,35 @@ template<typename T, enable_if_t<std::is_unsigned<T>::value, int> = 0> optional<
return parse_hex<T>(str.c_str(), str.length());
}
/// Convert a nibble (0-15) to lowercase hex char
inline char format_hex_char(uint8_t v) { return v >= 10 ? 'a' + (v - 10) : '0' + v; }
/// Convert a nibble (0-15) to uppercase hex char (used for pretty printing)
/// This always uses uppercase (A-F) for pretty/human-readable output
inline char format_hex_pretty_char(uint8_t v) { return v >= 10 ? 'A' + (v - 10) : '0' + v; }
/// Format MAC address as XX:XX:XX:XX:XX:XX (uppercase)
inline void format_mac_addr_upper(const uint8_t *mac, char *output) {
for (size_t i = 0; i < 6; i++) {
uint8_t byte = mac[i];
output[i * 3] = format_hex_pretty_char(byte >> 4);
output[i * 3 + 1] = format_hex_pretty_char(byte & 0x0F);
if (i < 5)
output[i * 3 + 2] = ':';
}
output[17] = '\0';
}
/// Format MAC address as xxxxxxxxxxxxxx (lowercase, no separators)
inline void format_mac_addr_lower_no_sep(const uint8_t *mac, char *output) {
for (size_t i = 0; i < 6; i++) {
uint8_t byte = mac[i];
output[i * 2] = format_hex_char(byte >> 4);
output[i * 2 + 1] = format_hex_char(byte & 0x0F);
}
output[12] = '\0';
}
/// Format the six-byte array \p mac into a MAC address.
std::string format_mac_address_pretty(const uint8_t mac[6]);
/// Format the byte array \p data of length \p len in lowercased hex.

View File

@@ -345,7 +345,7 @@ void HOT Scheduler::call(uint32_t now) {
// Execute callback without holding lock to prevent deadlocks
// if the callback tries to call defer() again
if (!this->should_skip_item_(item.get())) {
this->execute_item_(item.get(), now);
now = this->execute_item_(item.get(), now);
}
// Recycle the defer item after execution
this->recycle_item_(std::move(item));
@@ -483,7 +483,7 @@ void HOT Scheduler::call(uint32_t now) {
// Warning: During callback(), a lot of stuff can happen, including:
// - timeouts/intervals get added, potentially invalidating vector pointers
// - timeouts/intervals get cancelled
this->execute_item_(item.get(), now);
now = this->execute_item_(item.get(), now);
LockGuard guard{this->lock_};
@@ -568,11 +568,11 @@ void HOT Scheduler::pop_raw_() {
}
// Helper to execute a scheduler item
void HOT Scheduler::execute_item_(SchedulerItem *item, uint32_t now) {
uint32_t HOT Scheduler::execute_item_(SchedulerItem *item, uint32_t now) {
App.set_current_component(item->component);
WarnIfComponentBlockingGuard guard{item->component, now};
item->callback();
guard.finish();
return guard.finish();
}
// Common implementation for cancel operations

View File

@@ -254,7 +254,7 @@ class Scheduler {
}
// Helper to execute a scheduler item
void execute_item_(SchedulerItem *item, uint32_t now);
uint32_t execute_item_(SchedulerItem *item, uint32_t now);
// Helper to check if item should be skipped
bool should_skip_item_(SchedulerItem *item) const {

View File

@@ -1038,12 +1038,9 @@ class ArchiveRequestHandler(BaseHandler):
shutil.move(config_file, os.path.join(archive_path, configuration))
storage_json = StorageJSON.load(storage_path)
if storage_json is not None:
if storage_json is not None and storage_json.build_path:
# Delete build folder (if exists)
name = storage_json.name
build_folder = os.path.join(settings.config_dir, name)
if build_folder is not None:
shutil.rmtree(build_folder, os.path.join(archive_path, name))
shutil.rmtree(storage_json.build_path, ignore_errors=True)
class UnArchiveRequestHandler(BaseHandler):

View File

@@ -19,6 +19,3 @@ dependencies:
- if: "target in [esp32h2, esp32p4]"
zorxx/multipart-parser:
version: 1.0.1
vroland/epdiy:
git: https://github.com/vroland/epdiy.git
version: c61e9e923ce2418150d54f88cea5d196cdc40c54

View File

@@ -1,6 +1,7 @@
import os
import random
import string
from typing import Literal, NotRequired, TypedDict, Unpack
import unicodedata
import voluptuous as vol
@@ -103,11 +104,25 @@ HARDWARE_BASE_CONFIGS = {
}
def sanitize_double_quotes(value):
def sanitize_double_quotes(value: str) -> str:
return value.replace("\\", "\\\\").replace('"', '\\"')
def wizard_file(**kwargs):
class WizardFileKwargs(TypedDict):
"""Keyword arguments for wizard_file function."""
name: str
platform: Literal["ESP8266", "ESP32", "RP2040", "BK72XX", "LN882X", "RTL87XX"]
board: str
ssid: NotRequired[str]
psk: NotRequired[str]
password: NotRequired[str]
ota_password: NotRequired[str]
api_encryption_key: NotRequired[str]
friendly_name: NotRequired[str]
def wizard_file(**kwargs: Unpack[WizardFileKwargs]) -> str:
letters = string.ascii_letters + string.digits
ap_name_base = kwargs["name"].replace("_", " ").title()
ap_name = f"{ap_name_base} Fallback Hotspot"
@@ -180,7 +195,25 @@ captive_portal:
return config
def wizard_write(path, **kwargs):
class WizardWriteKwargs(TypedDict):
"""Keyword arguments for wizard_write function."""
name: str
type: Literal["basic", "empty", "upload"]
# Required for "basic" type
board: NotRequired[str]
platform: NotRequired[str]
ssid: NotRequired[str]
psk: NotRequired[str]
password: NotRequired[str]
ota_password: NotRequired[str]
api_encryption_key: NotRequired[str]
friendly_name: NotRequired[str]
# Required for "upload" type
file_text: NotRequired[str]
def wizard_write(path: str, **kwargs: Unpack[WizardWriteKwargs]) -> bool:
from esphome.components.bk72xx import boards as bk72xx_boards
from esphome.components.esp32 import boards as esp32_boards
from esphome.components.esp8266 import boards as esp8266_boards
@@ -237,14 +270,14 @@ def wizard_write(path, **kwargs):
if get_bool_env(ENV_QUICKWIZARD):
def sleep(time):
def sleep(time: float) -> None:
pass
else:
from time import sleep
def safe_print_step(step, big):
def safe_print_step(step: int, big: str) -> None:
safe_print()
safe_print()
safe_print(f"============= STEP {step} =============")
@@ -253,14 +286,14 @@ def safe_print_step(step, big):
sleep(0.25)
def default_input(text, default):
def default_input(text: str, default: str) -> str:
safe_print()
safe_print(f"Press ENTER for default ({default})")
return safe_input(text.format(default)) or default
# From https://stackoverflow.com/a/518232/8924614
def strip_accents(value):
def strip_accents(value: str) -> str:
return "".join(
c
for c in unicodedata.normalize("NFD", str(value))
@@ -268,7 +301,7 @@ def strip_accents(value):
)
def wizard(path):
def wizard(path: str) -> int:
from esphome.components.bk72xx import boards as bk72xx_boards
from esphome.components.esp32 import boards as esp32_boards
from esphome.components.esp8266 import boards as esp8266_boards
@@ -509,6 +542,7 @@ def wizard(path):
ssid=ssid,
psk=psk,
password=password,
type="basic",
):
return 1

View File

@@ -315,6 +315,19 @@ def clean_build():
_LOGGER.info("Deleting %s", dependencies_lock)
os.remove(dependencies_lock)
# Clean PlatformIO cache to resolve CMake compiler detection issues
# This helps when toolchain paths change or get corrupted
try:
from platformio.project.helpers import get_project_cache_dir
except ImportError:
# PlatformIO is not available, skip cache cleaning
pass
else:
cache_dir = get_project_cache_dir()
if cache_dir and cache_dir.strip() and os.path.isdir(cache_dir):
_LOGGER.info("Deleting PlatformIO cache %s", cache_dir)
shutil.rmtree(cache_dir)
GITIGNORE_CONTENT = """# Gitignore settings for ESPHome
# This is an example and may include too much for your use-case.

View File

@@ -12,7 +12,7 @@ platformio==6.1.18 # When updating platformio, also update /docker/Dockerfile
esptool==5.0.2
click==8.1.7
esphome-dashboard==20250904.0
aioesphomeapi==40.1.0
aioesphomeapi==40.2.1
zeroconf==0.147.2
puremagic==1.30
ruamel.yaml==0.18.15 # dashboard_import

View File

@@ -1,6 +1,6 @@
pylint==3.3.8
flake8==7.3.0 # also change in .pre-commit-config.yaml when updating
ruff==0.13.0 # also change in .pre-commit-config.yaml when updating
ruff==0.12.12 # also change in .pre-commit-config.yaml when updating
pyupgrade==3.20.0 # also change in .pre-commit-config.yaml when updating
pre-commit
@@ -8,7 +8,7 @@ pre-commit
pytest==8.4.2
pytest-cov==7.0.0
pytest-mock==3.15.0
pytest-asyncio==1.2.0
pytest-asyncio==1.1.0
pytest-xdist==3.8.0
asyncmock==0.4.2
hypothesis==6.92.1

View File

@@ -1,203 +0,0 @@
"""Tests for dashboard entries Path-related functionality."""
from __future__ import annotations
from pathlib import Path
import tempfile
from unittest.mock import MagicMock
import pytest
import pytest_asyncio
from esphome.core import CORE
from esphome.dashboard.entries import DashboardEntries, DashboardEntry
def create_cache_key() -> tuple[int, int, float, int]:
"""Helper to create a valid DashboardCacheKeyType."""
return (0, 0, 0.0, 0)
@pytest.fixture(autouse=True)
def setup_core():
"""Set up CORE for testing."""
with tempfile.TemporaryDirectory() as tmpdir:
CORE.config_path = str(Path(tmpdir) / "test.yaml")
yield
CORE.reset()
@pytest.fixture
def mock_settings() -> MagicMock:
"""Create mock dashboard settings."""
settings = MagicMock()
settings.config_dir = "/test/config"
settings.absolute_config_dir = Path("/test/config")
return settings
@pytest_asyncio.fixture
async def dashboard_entries(mock_settings: MagicMock) -> DashboardEntries:
"""Create a DashboardEntries instance for testing."""
return DashboardEntries(mock_settings)
def test_dashboard_entry_path_initialization() -> None:
"""Test DashboardEntry initializes with path correctly."""
test_path = "/test/config/device.yaml"
cache_key = create_cache_key()
entry = DashboardEntry(test_path, cache_key)
assert entry.path == test_path
assert entry.cache_key == cache_key
def test_dashboard_entry_path_with_absolute_path() -> None:
"""Test DashboardEntry handles absolute paths."""
# Use a truly absolute path for the platform
test_path = Path.cwd() / "absolute" / "path" / "to" / "config.yaml"
cache_key = create_cache_key()
entry = DashboardEntry(str(test_path), cache_key)
assert entry.path == str(test_path)
assert Path(entry.path).is_absolute()
def test_dashboard_entry_path_with_relative_path() -> None:
"""Test DashboardEntry handles relative paths."""
test_path = "configs/device.yaml"
cache_key = create_cache_key()
entry = DashboardEntry(test_path, cache_key)
assert entry.path == test_path
assert not Path(entry.path).is_absolute()
@pytest.mark.asyncio
async def test_dashboard_entries_get_by_path(
dashboard_entries: DashboardEntries,
) -> None:
"""Test getting entry by path."""
test_path = "/test/config/device.yaml"
entry = DashboardEntry(test_path, create_cache_key())
dashboard_entries._entries[test_path] = entry
result = dashboard_entries.get(test_path)
assert result == entry
@pytest.mark.asyncio
async def test_dashboard_entries_get_nonexistent_path(
dashboard_entries: DashboardEntries,
) -> None:
"""Test getting non-existent entry returns None."""
result = dashboard_entries.get("/nonexistent/path.yaml")
assert result is None
@pytest.mark.asyncio
async def test_dashboard_entries_path_normalization(
dashboard_entries: DashboardEntries,
) -> None:
"""Test that paths are handled consistently."""
path1 = "/test/config/device.yaml"
entry = DashboardEntry(path1, create_cache_key())
dashboard_entries._entries[path1] = entry
result = dashboard_entries.get(path1)
assert result == entry
@pytest.mark.asyncio
async def test_dashboard_entries_path_with_spaces(
dashboard_entries: DashboardEntries,
) -> None:
"""Test handling paths with spaces."""
test_path = "/test/config/my device.yaml"
entry = DashboardEntry(test_path, create_cache_key())
dashboard_entries._entries[test_path] = entry
result = dashboard_entries.get(test_path)
assert result == entry
assert result.path == test_path
@pytest.mark.asyncio
async def test_dashboard_entries_path_with_special_chars(
dashboard_entries: DashboardEntries,
) -> None:
"""Test handling paths with special characters."""
test_path = "/test/config/device-01_test.yaml"
entry = DashboardEntry(test_path, create_cache_key())
dashboard_entries._entries[test_path] = entry
result = dashboard_entries.get(test_path)
assert result == entry
def test_dashboard_entries_windows_path() -> None:
"""Test handling Windows-style paths."""
test_path = r"C:\Users\test\esphome\device.yaml"
cache_key = create_cache_key()
entry = DashboardEntry(test_path, cache_key)
assert entry.path == test_path
@pytest.mark.asyncio
async def test_dashboard_entries_path_to_cache_key_mapping(
dashboard_entries: DashboardEntries,
) -> None:
"""Test internal entries storage with paths and cache keys."""
path1 = "/test/config/device1.yaml"
path2 = "/test/config/device2.yaml"
entry1 = DashboardEntry(path1, create_cache_key())
entry2 = DashboardEntry(path2, (1, 1, 1.0, 1))
dashboard_entries._entries[path1] = entry1
dashboard_entries._entries[path2] = entry2
assert path1 in dashboard_entries._entries
assert path2 in dashboard_entries._entries
assert dashboard_entries._entries[path1].cache_key == create_cache_key()
assert dashboard_entries._entries[path2].cache_key == (1, 1, 1.0, 1)
def test_dashboard_entry_path_property() -> None:
"""Test that path property returns expected value."""
test_path = "/test/config/device.yaml"
entry = DashboardEntry(test_path, create_cache_key())
assert entry.path == test_path
assert isinstance(entry.path, str)
@pytest.mark.asyncio
async def test_dashboard_entries_all_returns_entries_with_paths(
dashboard_entries: DashboardEntries,
) -> None:
"""Test that all() returns entries with their paths intact."""
paths = [
"/test/config/device1.yaml",
"/test/config/device2.yaml",
"/test/config/subfolder/device3.yaml",
]
for path in paths:
entry = DashboardEntry(path, create_cache_key())
dashboard_entries._entries[path] = entry
all_entries = dashboard_entries.async_all()
assert len(all_entries) == len(paths)
retrieved_paths = [entry.path for entry in all_entries]
assert set(retrieved_paths) == set(paths)

View File

@@ -1,168 +0,0 @@
"""Tests for dashboard settings Path-related functionality."""
from __future__ import annotations
import os
from pathlib import Path
import tempfile
import pytest
from esphome.dashboard.settings import DashboardSettings
@pytest.fixture
def dashboard_settings(tmp_path: Path) -> DashboardSettings:
"""Create DashboardSettings instance with temp directory."""
settings = DashboardSettings()
# Resolve symlinks to ensure paths match
resolved_dir = tmp_path.resolve()
settings.config_dir = str(resolved_dir)
settings.absolute_config_dir = resolved_dir
return settings
def test_rel_path_simple(dashboard_settings: DashboardSettings) -> None:
"""Test rel_path with simple relative path."""
result = dashboard_settings.rel_path("config.yaml")
expected = str(Path(dashboard_settings.config_dir) / "config.yaml")
assert result == expected
def test_rel_path_multiple_components(dashboard_settings: DashboardSettings) -> None:
"""Test rel_path with multiple path components."""
result = dashboard_settings.rel_path("subfolder", "device", "config.yaml")
expected = str(
Path(dashboard_settings.config_dir) / "subfolder" / "device" / "config.yaml"
)
assert result == expected
def test_rel_path_with_dots(dashboard_settings: DashboardSettings) -> None:
"""Test rel_path prevents directory traversal."""
# This should raise ValueError as it tries to go outside config_dir
with pytest.raises(ValueError):
dashboard_settings.rel_path("..", "outside.yaml")
def test_rel_path_absolute_path_within_config(
dashboard_settings: DashboardSettings,
) -> None:
"""Test rel_path with absolute path that's within config dir."""
internal_path = dashboard_settings.absolute_config_dir / "internal.yaml"
internal_path.touch()
result = dashboard_settings.rel_path("internal.yaml")
expected = str(Path(dashboard_settings.config_dir) / "internal.yaml")
assert result == expected
def test_rel_path_absolute_path_outside_config(
dashboard_settings: DashboardSettings,
) -> None:
"""Test rel_path with absolute path outside config dir raises error."""
outside_path = "/tmp/outside/config.yaml"
with pytest.raises(ValueError):
dashboard_settings.rel_path(outside_path)
def test_rel_path_empty_args(dashboard_settings: DashboardSettings) -> None:
"""Test rel_path with no arguments returns config_dir."""
result = dashboard_settings.rel_path()
assert result == dashboard_settings.config_dir
def test_rel_path_with_pathlib_path(dashboard_settings: DashboardSettings) -> None:
"""Test rel_path works with Path objects as arguments."""
path_obj = Path("subfolder") / "config.yaml"
result = dashboard_settings.rel_path(path_obj)
expected = str(Path(dashboard_settings.config_dir) / "subfolder" / "config.yaml")
assert result == expected
def test_rel_path_normalizes_slashes(dashboard_settings: DashboardSettings) -> None:
"""Test rel_path normalizes path separators."""
# os.path.join normalizes slashes on Windows but preserves them on Unix
# Test that providing components separately gives same result
result1 = dashboard_settings.rel_path("folder", "subfolder", "file.yaml")
result2 = dashboard_settings.rel_path("folder", "subfolder", "file.yaml")
assert result1 == result2
# Also test that the result is as expected
expected = os.path.join(
dashboard_settings.config_dir, "folder", "subfolder", "file.yaml"
)
assert result1 == expected
def test_rel_path_handles_spaces(dashboard_settings: DashboardSettings) -> None:
"""Test rel_path handles paths with spaces."""
result = dashboard_settings.rel_path("my folder", "my config.yaml")
expected = str(Path(dashboard_settings.config_dir) / "my folder" / "my config.yaml")
assert result == expected
def test_rel_path_handles_special_chars(dashboard_settings: DashboardSettings) -> None:
"""Test rel_path handles paths with special characters."""
result = dashboard_settings.rel_path("device-01_test", "config.yaml")
expected = str(
Path(dashboard_settings.config_dir) / "device-01_test" / "config.yaml"
)
assert result == expected
def test_config_dir_as_path_property(dashboard_settings: DashboardSettings) -> None:
"""Test that config_dir can be accessed and used with Path operations."""
config_path = Path(dashboard_settings.config_dir)
assert config_path.exists()
assert config_path.is_dir()
assert config_path.is_absolute()
def test_absolute_config_dir_property(dashboard_settings: DashboardSettings) -> None:
"""Test absolute_config_dir is a Path object."""
assert isinstance(dashboard_settings.absolute_config_dir, Path)
assert dashboard_settings.absolute_config_dir.exists()
assert dashboard_settings.absolute_config_dir.is_dir()
assert dashboard_settings.absolute_config_dir.is_absolute()
def test_rel_path_symlink_inside_config(dashboard_settings: DashboardSettings) -> None:
"""Test rel_path with symlink that points inside config dir."""
target = dashboard_settings.absolute_config_dir / "target.yaml"
target.touch()
symlink = dashboard_settings.absolute_config_dir / "link.yaml"
symlink.symlink_to(target)
result = dashboard_settings.rel_path("link.yaml")
expected = str(Path(dashboard_settings.config_dir) / "link.yaml")
assert result == expected
def test_rel_path_symlink_outside_config(dashboard_settings: DashboardSettings) -> None:
"""Test rel_path with symlink that points outside config dir."""
with tempfile.NamedTemporaryFile(suffix=".yaml") as tmp:
symlink = dashboard_settings.absolute_config_dir / "external_link.yaml"
symlink.symlink_to(tmp.name)
with pytest.raises(ValueError):
dashboard_settings.rel_path("external_link.yaml")
def test_rel_path_with_none_arg(dashboard_settings: DashboardSettings) -> None:
"""Test rel_path handles None arguments gracefully."""
result = dashboard_settings.rel_path("None")
expected = str(Path(dashboard_settings.config_dir) / "None")
assert result == expected
def test_rel_path_with_numeric_args(dashboard_settings: DashboardSettings) -> None:
"""Test rel_path handles numeric arguments."""
result = dashboard_settings.rel_path("123", "456.789")
expected = str(Path(dashboard_settings.config_dir) / "123" / "456.789")
assert result == expected

View File

@@ -589,7 +589,7 @@ async def test_archive_request_handler_post(
mock_ext_storage_path: MagicMock,
tmp_path: Path,
) -> None:
"""Test ArchiveRequestHandler.post method."""
"""Test ArchiveRequestHandler.post method without storage_json."""
# Set up temp directories
config_dir = Path(get_fixture_path("conf"))
@@ -616,6 +616,97 @@ async def test_archive_request_handler_post(
).read_text() == "esphome:\n name: test_archive\n"
@pytest.mark.asyncio
async def test_archive_handler_with_build_folder(
dashboard: DashboardTestHelper,
mock_archive_storage_path: MagicMock,
mock_ext_storage_path: MagicMock,
mock_dashboard_settings: MagicMock,
mock_storage_json: MagicMock,
tmp_path: Path,
) -> None:
"""Test ArchiveRequestHandler.post with storage_json and build folder."""
config_dir = tmp_path / "config"
config_dir.mkdir()
archive_dir = tmp_path / "archive"
archive_dir.mkdir()
build_dir = tmp_path / "build"
build_dir.mkdir()
configuration = "test_device.yaml"
test_config = config_dir / configuration
test_config.write_text("esphome:\n name: test_device\n")
build_folder = build_dir / "test_device"
build_folder.mkdir()
(build_folder / "firmware.bin").write_text("binary content")
(build_folder / ".pioenvs").mkdir()
mock_dashboard_settings.config_dir = str(config_dir)
mock_dashboard_settings.rel_path.return_value = str(test_config)
mock_archive_storage_path.return_value = str(archive_dir)
mock_storage = MagicMock()
mock_storage.name = "test_device"
mock_storage.build_path = str(build_folder)
mock_storage_json.load.return_value = mock_storage
response = await dashboard.fetch(
"/archive",
method="POST",
body=f"configuration={configuration}",
headers={"Content-Type": "application/x-www-form-urlencoded"},
)
assert response.code == 200
assert not test_config.exists()
assert (archive_dir / configuration).exists()
assert not build_folder.exists()
assert not (archive_dir / "test_device").exists()
@pytest.mark.asyncio
async def test_archive_handler_no_build_folder(
dashboard: DashboardTestHelper,
mock_archive_storage_path: MagicMock,
mock_ext_storage_path: MagicMock,
mock_dashboard_settings: MagicMock,
mock_storage_json: MagicMock,
tmp_path: Path,
) -> None:
"""Test ArchiveRequestHandler.post with storage_json but no build folder."""
config_dir = tmp_path / "config"
config_dir.mkdir()
archive_dir = tmp_path / "archive"
archive_dir.mkdir()
configuration = "test_device.yaml"
test_config = config_dir / configuration
test_config.write_text("esphome:\n name: test_device\n")
mock_dashboard_settings.config_dir = str(config_dir)
mock_dashboard_settings.rel_path.return_value = str(test_config)
mock_archive_storage_path.return_value = str(archive_dir)
mock_storage = MagicMock()
mock_storage.name = "test_device"
mock_storage.build_path = None
mock_storage_json.load.return_value = mock_storage
response = await dashboard.fetch(
"/archive",
method="POST",
body=f"configuration={configuration}",
headers={"Content-Type": "application/x-www-form-urlencoded"},
)
assert response.code == 200
assert not test_config.exists()
assert (archive_dir / configuration).exists()
assert not (archive_dir / "test_device").exists()
@pytest.mark.skipif(os.name == "nt", reason="Unix sockets are not supported on Windows")
@pytest.mark.usefixtures("mock_trash_storage_path", "mock_archive_storage_path")
def test_start_web_server_with_unix_socket(tmp_path: Path) -> None:

View File

@@ -1,230 +0,0 @@
"""Tests for dashboard web_server Path-related functionality."""
from __future__ import annotations
import gzip
import os
from pathlib import Path
from unittest.mock import MagicMock, patch
from esphome.dashboard import web_server
def test_get_base_frontend_path_production() -> None:
"""Test get_base_frontend_path in production mode."""
mock_module = MagicMock()
mock_module.where.return_value = "/usr/local/lib/esphome_dashboard"
with (
patch.dict(os.environ, {}, clear=True),
patch.dict("sys.modules", {"esphome_dashboard": mock_module}),
):
result = web_server.get_base_frontend_path()
assert result == "/usr/local/lib/esphome_dashboard"
mock_module.where.assert_called_once()
def test_get_base_frontend_path_dev_mode() -> None:
"""Test get_base_frontend_path in development mode."""
test_path = "/home/user/esphome/dashboard"
with patch.dict(os.environ, {"ESPHOME_DASHBOARD_DEV": test_path}):
result = web_server.get_base_frontend_path()
# The function uses os.path.abspath which doesn't resolve symlinks
# We need to match that behavior
# The actual function adds "/" to the path, so we simulate that
test_path_with_slash = test_path if test_path.endswith("/") else test_path + "/"
expected = os.path.abspath(
os.path.join(os.getcwd(), test_path_with_slash, "esphome_dashboard")
)
assert result == expected
def test_get_base_frontend_path_dev_mode_with_trailing_slash() -> None:
"""Test get_base_frontend_path in dev mode with trailing slash."""
test_path = "/home/user/esphome/dashboard/"
with patch.dict(os.environ, {"ESPHOME_DASHBOARD_DEV": test_path}):
result = web_server.get_base_frontend_path()
# The function uses os.path.abspath which doesn't resolve symlinks
expected = os.path.abspath(str(Path.cwd() / test_path / "esphome_dashboard"))
assert result == expected
def test_get_base_frontend_path_dev_mode_relative_path() -> None:
"""Test get_base_frontend_path with relative dev path."""
test_path = "./dashboard"
with patch.dict(os.environ, {"ESPHOME_DASHBOARD_DEV": test_path}):
result = web_server.get_base_frontend_path()
# The function uses os.path.abspath which doesn't resolve symlinks
# We need to match that behavior
# The actual function adds "/" to the path, so we simulate that
test_path_with_slash = test_path if test_path.endswith("/") else test_path + "/"
expected = os.path.abspath(
os.path.join(os.getcwd(), test_path_with_slash, "esphome_dashboard")
)
assert result == expected
assert Path(result).is_absolute()
def test_get_static_path_single_component() -> None:
"""Test get_static_path with single path component."""
with patch("esphome.dashboard.web_server.get_base_frontend_path") as mock_base:
mock_base.return_value = "/base/frontend"
result = web_server.get_static_path("file.js")
assert result == os.path.join("/base/frontend", "static", "file.js")
def test_get_static_path_multiple_components() -> None:
"""Test get_static_path with multiple path components."""
with patch("esphome.dashboard.web_server.get_base_frontend_path") as mock_base:
mock_base.return_value = "/base/frontend"
result = web_server.get_static_path("js", "esphome", "index.js")
assert result == os.path.join(
"/base/frontend", "static", "js", "esphome", "index.js"
)
def test_get_static_path_empty_args() -> None:
"""Test get_static_path with no arguments."""
with patch("esphome.dashboard.web_server.get_base_frontend_path") as mock_base:
mock_base.return_value = "/base/frontend"
result = web_server.get_static_path()
assert result == os.path.join("/base/frontend", "static")
def test_get_static_path_with_pathlib_path() -> None:
"""Test get_static_path with Path objects."""
with patch("esphome.dashboard.web_server.get_base_frontend_path") as mock_base:
mock_base.return_value = "/base/frontend"
path_obj = Path("js") / "app.js"
result = web_server.get_static_path(str(path_obj))
assert result == os.path.join("/base/frontend", "static", "js", "app.js")
def test_get_static_file_url_production() -> None:
"""Test get_static_file_url in production mode."""
web_server.get_static_file_url.cache_clear()
mock_module = MagicMock()
mock_file = MagicMock()
mock_file.read.return_value = b"test content"
mock_file.__enter__ = MagicMock(return_value=mock_file)
mock_file.__exit__ = MagicMock(return_value=None)
with (
patch.dict(os.environ, {}, clear=True),
patch.dict("sys.modules", {"esphome_dashboard": mock_module}),
patch("esphome.dashboard.web_server.get_static_path") as mock_get_path,
patch("esphome.dashboard.web_server.open", create=True, return_value=mock_file),
):
mock_get_path.return_value = "/fake/path/js/app.js"
result = web_server.get_static_file_url("js/app.js")
assert result.startswith("./static/js/app.js?hash=")
def test_get_static_file_url_dev_mode() -> None:
"""Test get_static_file_url in development mode."""
with patch.dict(os.environ, {"ESPHOME_DASHBOARD_DEV": "/dev/path"}):
web_server.get_static_file_url.cache_clear()
result = web_server.get_static_file_url("js/app.js")
assert result == "./static/js/app.js"
def test_get_static_file_url_index_js_special_case() -> None:
"""Test get_static_file_url replaces index.js with entrypoint."""
web_server.get_static_file_url.cache_clear()
mock_module = MagicMock()
mock_module.entrypoint.return_value = "main.js"
with (
patch.dict(os.environ, {}, clear=True),
patch.dict("sys.modules", {"esphome_dashboard": mock_module}),
):
result = web_server.get_static_file_url("js/esphome/index.js")
assert result == "./static/js/esphome/main.js"
def test_load_file_path(tmp_path: Path) -> None:
"""Test loading a file."""
test_file = tmp_path / "test.txt"
test_file.write_bytes(b"test content")
with open(test_file, "rb") as f:
content = f.read()
assert content == b"test content"
def test_load_file_compressed_path(tmp_path: Path) -> None:
"""Test loading a compressed file."""
test_file = tmp_path / "test.txt.gz"
with gzip.open(test_file, "wb") as gz:
gz.write(b"compressed content")
with gzip.open(test_file, "rb") as gz:
content = gz.read()
assert content == b"compressed content"
def test_path_normalization_in_static_path() -> None:
"""Test that paths are normalized correctly."""
with patch("esphome.dashboard.web_server.get_base_frontend_path") as mock_base:
mock_base.return_value = "/base/frontend"
# Test with separate components
result1 = web_server.get_static_path("js", "app.js")
result2 = web_server.get_static_path("js", "app.js")
assert result1 == result2
assert result1 == os.path.join("/base/frontend", "static", "js", "app.js")
def test_windows_path_handling() -> None:
"""Test handling of Windows-style paths."""
with patch("esphome.dashboard.web_server.get_base_frontend_path") as mock_base:
mock_base.return_value = r"C:\Program Files\esphome\frontend"
result = web_server.get_static_path("js", "app.js")
# os.path.join should handle this correctly on the platform
expected = os.path.join(
r"C:\Program Files\esphome\frontend", "static", "js", "app.js"
)
assert result == expected
def test_path_with_special_characters() -> None:
"""Test paths with special characters."""
with patch("esphome.dashboard.web_server.get_base_frontend_path") as mock_base:
mock_base.return_value = "/base/frontend"
result = web_server.get_static_path("js-modules", "app_v1.0.js")
assert result == os.path.join(
"/base/frontend", "static", "js-modules", "app_v1.0.js"
)
def test_path_with_spaces() -> None:
"""Test paths with spaces."""
with patch("esphome.dashboard.web_server.get_base_frontend_path") as mock_base:
mock_base.return_value = "/base/my frontend"
result = web_server.get_static_path("my js", "my app.js")
assert result == os.path.join(
"/base/my frontend", "static", "my js", "my app.js"
)

View File

@@ -9,8 +9,10 @@ not be part of a unit test suite.
"""
from collections.abc import Generator
from pathlib import Path
import sys
from unittest.mock import Mock, patch
import pytest
@@ -43,3 +45,45 @@ def setup_core(tmp_path: Path) -> Path:
"""Set up CORE with test paths."""
CORE.config_path = str(tmp_path / "test.yaml")
return tmp_path
@pytest.fixture
def mock_write_file_if_changed() -> Generator[Mock, None, None]:
"""Mock write_file_if_changed for storage_json."""
with patch("esphome.storage_json.write_file_if_changed") as mock:
yield mock
@pytest.fixture
def mock_copy_file_if_changed() -> Generator[Mock, None, None]:
"""Mock copy_file_if_changed for core.config."""
with patch("esphome.core.config.copy_file_if_changed") as mock:
yield mock
@pytest.fixture
def mock_run_platformio_cli() -> Generator[Mock, None, None]:
"""Mock run_platformio_cli for platformio_api."""
with patch("esphome.platformio_api.run_platformio_cli") as mock:
yield mock
@pytest.fixture
def mock_run_platformio_cli_run() -> Generator[Mock, None, None]:
"""Mock run_platformio_cli_run for platformio_api."""
with patch("esphome.platformio_api.run_platformio_cli_run") as mock:
yield mock
@pytest.fixture
def mock_decode_pc() -> Generator[Mock, None, None]:
"""Mock _decode_pc for platformio_api."""
with patch("esphome.platformio_api._decode_pc") as mock:
yield mock
@pytest.fixture
def mock_run_external_command() -> Generator[Mock, None, None]:
"""Mock run_external_command for platformio_api."""
with patch("esphome.platformio_api.run_external_command") as mock:
yield mock

View File

@@ -1,15 +1,34 @@
"""Unit tests for core config functionality including areas and devices."""
from collections.abc import Callable
import os
from pathlib import Path
import types
from typing import Any
from unittest.mock import MagicMock, Mock, patch
import pytest
from esphome import config_validation as cv, core
from esphome.const import CONF_AREA, CONF_AREAS, CONF_DEVICES
from esphome.core import config
from esphome.core.config import Area, validate_area_config
from esphome.const import (
CONF_AREA,
CONF_AREAS,
CONF_BUILD_PATH,
CONF_DEVICES,
CONF_ESPHOME,
CONF_NAME,
CONF_NAME_ADD_MAC_SUFFIX,
KEY_CORE,
)
from esphome.core import CORE, config
from esphome.core.config import (
Area,
preload_core_config,
valid_include,
valid_project_name,
validate_area_config,
validate_hostname,
)
from .common import load_config_from_fixture
@@ -245,3 +264,316 @@ def test_add_platform_defines_priority() -> None:
f"_add_platform_defines priority ({config._add_platform_defines.priority}) must be lower than "
f"globals priority ({globals_to_code.priority}) to fix issue #10431 (sensor count bug with lambdas)"
)
def test_valid_include_with_angle_brackets() -> None:
"""Test valid_include accepts angle bracket includes."""
assert valid_include("<ArduinoJson.h>") == "<ArduinoJson.h>"
def test_valid_include_with_valid_file(tmp_path: Path) -> None:
"""Test valid_include accepts valid include files."""
CORE.config_path = str(tmp_path / "test.yaml")
include_file = tmp_path / "include.h"
include_file.touch()
assert valid_include(str(include_file)) == str(include_file)
def test_valid_include_with_valid_directory(tmp_path: Path) -> None:
"""Test valid_include accepts valid directories."""
CORE.config_path = str(tmp_path / "test.yaml")
include_dir = tmp_path / "includes"
include_dir.mkdir()
assert valid_include(str(include_dir)) == str(include_dir)
def test_valid_include_invalid_extension(tmp_path: Path) -> None:
"""Test valid_include rejects files with invalid extensions."""
CORE.config_path = str(tmp_path / "test.yaml")
invalid_file = tmp_path / "file.txt"
invalid_file.touch()
with pytest.raises(cv.Invalid, match="Include has invalid file extension"):
valid_include(str(invalid_file))
def test_valid_project_name_valid() -> None:
"""Test valid_project_name accepts valid project names."""
assert valid_project_name("esphome.my_project") == "esphome.my_project"
def test_valid_project_name_no_namespace() -> None:
"""Test valid_project_name rejects names without namespace."""
with pytest.raises(cv.Invalid, match="project name needs to have a namespace"):
valid_project_name("my_project")
def test_valid_project_name_multiple_dots() -> None:
"""Test valid_project_name rejects names with multiple dots."""
with pytest.raises(cv.Invalid, match="project name needs to have a namespace"):
valid_project_name("esphome.my.project")
def test_validate_hostname_valid() -> None:
"""Test validate_hostname accepts valid hostnames."""
config = {CONF_NAME: "my-device", CONF_NAME_ADD_MAC_SUFFIX: False}
assert validate_hostname(config) == config
def test_validate_hostname_too_long() -> None:
"""Test validate_hostname rejects hostnames that are too long."""
config = {
CONF_NAME: "a" * 32, # 32 chars, max is 31
CONF_NAME_ADD_MAC_SUFFIX: False,
}
with pytest.raises(cv.Invalid, match="Hostnames can only be 31 characters long"):
validate_hostname(config)
def test_validate_hostname_too_long_with_mac_suffix() -> None:
"""Test validate_hostname accounts for MAC suffix length."""
config = {
CONF_NAME: "a" * 25, # 25 chars, max is 24 with MAC suffix
CONF_NAME_ADD_MAC_SUFFIX: True,
}
with pytest.raises(cv.Invalid, match="Hostnames can only be 24 characters long"):
validate_hostname(config)
def test_validate_hostname_with_underscore(caplog) -> None:
"""Test validate_hostname warns about underscores."""
config = {CONF_NAME: "my_device", CONF_NAME_ADD_MAC_SUFFIX: False}
assert validate_hostname(config) == config
assert (
"Using the '_' (underscore) character in the hostname is discouraged"
in caplog.text
)
def test_preload_core_config_basic(setup_core: Path) -> None:
"""Test preload_core_config sets basic CORE attributes."""
config = {
CONF_ESPHOME: {
CONF_NAME: "test_device",
},
"esp32": {},
}
result = {}
platform = preload_core_config(config, result)
assert CORE.name == "test_device"
assert platform == "esp32"
assert KEY_CORE in CORE.data
assert CONF_BUILD_PATH in config[CONF_ESPHOME]
# Verify default build path is "build/<device_name>"
build_path = config[CONF_ESPHOME][CONF_BUILD_PATH]
assert build_path.endswith(os.path.join("build", "test_device"))
def test_preload_core_config_with_build_path(setup_core: Path) -> None:
"""Test preload_core_config uses provided build path."""
config = {
CONF_ESPHOME: {
CONF_NAME: "test_device",
CONF_BUILD_PATH: "/custom/build/path",
},
"esp8266": {},
}
result = {}
platform = preload_core_config(config, result)
assert config[CONF_ESPHOME][CONF_BUILD_PATH] == "/custom/build/path"
assert platform == "esp8266"
def test_preload_core_config_env_build_path(setup_core: Path) -> None:
"""Test preload_core_config uses ESPHOME_BUILD_PATH env var."""
config = {
CONF_ESPHOME: {
CONF_NAME: "test_device",
},
"rp2040": {},
}
result = {}
with patch.dict(os.environ, {"ESPHOME_BUILD_PATH": "/env/build"}):
platform = preload_core_config(config, result)
assert CONF_BUILD_PATH in config[CONF_ESPHOME]
assert "test_device" in config[CONF_ESPHOME][CONF_BUILD_PATH]
# Verify it uses the env var path with device name appended
build_path = config[CONF_ESPHOME][CONF_BUILD_PATH]
expected_path = os.path.join("/env/build", "test_device")
assert build_path == expected_path or build_path == expected_path.replace(
"/", os.sep
)
assert platform == "rp2040"
def test_preload_core_config_no_platform(setup_core: Path) -> None:
"""Test preload_core_config raises when no platform is specified."""
config = {
CONF_ESPHOME: {
CONF_NAME: "test_device",
},
}
result = {}
# Mock _is_target_platform to avoid expensive component loading
with patch("esphome.core.config._is_target_platform") as mock_is_platform:
# Return True for known platforms
mock_is_platform.side_effect = lambda name: name in [
"esp32",
"esp8266",
"rp2040",
]
with pytest.raises(cv.Invalid, match="Platform missing"):
preload_core_config(config, result)
def test_preload_core_config_multiple_platforms(setup_core: Path) -> None:
"""Test preload_core_config raises when multiple platforms are specified."""
config = {
CONF_ESPHOME: {
CONF_NAME: "test_device",
},
"esp32": {},
"esp8266": {},
}
result = {}
# Mock _is_target_platform to avoid expensive component loading
with patch("esphome.core.config._is_target_platform") as mock_is_platform:
# Return True for known platforms
mock_is_platform.side_effect = lambda name: name in [
"esp32",
"esp8266",
"rp2040",
]
with pytest.raises(cv.Invalid, match="Found multiple target platform blocks"):
preload_core_config(config, result)
def test_include_file_header(tmp_path: Path, mock_copy_file_if_changed: Mock) -> None:
"""Test include_file adds include statement for header files."""
src_file = tmp_path / "source.h"
src_file.write_text("// Header content")
CORE.build_path = str(tmp_path / "build")
with patch("esphome.core.config.cg") as mock_cg:
# Mock RawStatement to capture the text
mock_raw_statement = MagicMock()
mock_raw_statement.text = ""
def raw_statement_side_effect(text):
mock_raw_statement.text = text
return mock_raw_statement
mock_cg.RawStatement.side_effect = raw_statement_side_effect
config.include_file(str(src_file), "test.h")
mock_copy_file_if_changed.assert_called_once()
mock_cg.add_global.assert_called_once()
# Check that include statement was added
assert '#include "test.h"' in mock_raw_statement.text
def test_include_file_cpp(tmp_path: Path, mock_copy_file_if_changed: Mock) -> None:
"""Test include_file does not add include for cpp files."""
src_file = tmp_path / "source.cpp"
src_file.write_text("// CPP content")
CORE.build_path = str(tmp_path / "build")
with patch("esphome.core.config.cg") as mock_cg:
config.include_file(str(src_file), "test.cpp")
mock_copy_file_if_changed.assert_called_once()
# Should not add include statement for .cpp files
mock_cg.add_global.assert_not_called()
def test_get_usable_cpu_count() -> None:
"""Test get_usable_cpu_count returns CPU count."""
count = config.get_usable_cpu_count()
assert isinstance(count, int)
assert count > 0
def test_get_usable_cpu_count_with_process_cpu_count() -> None:
"""Test get_usable_cpu_count uses process_cpu_count when available."""
# Test with process_cpu_count (Python 3.13+)
# Create a mock os module with process_cpu_count
mock_os = types.SimpleNamespace(process_cpu_count=lambda: 8, cpu_count=lambda: 4)
with patch("esphome.core.config.os", mock_os):
# When process_cpu_count exists, it should be used
count = config.get_usable_cpu_count()
assert count == 8
# Test fallback to cpu_count when process_cpu_count not available
mock_os_no_process = types.SimpleNamespace(cpu_count=lambda: 4)
with patch("esphome.core.config.os", mock_os_no_process):
count = config.get_usable_cpu_count()
assert count == 4
def test_list_target_platforms(tmp_path: Path) -> None:
"""Test _list_target_platforms returns available platforms."""
# Create mock components directory structure
components_dir = tmp_path / "components"
components_dir.mkdir()
# Create platform and non-platform directories with __init__.py
platforms = ["esp32", "esp8266", "rp2040", "libretiny", "host"]
non_platforms = ["sensor"]
for component in platforms + non_platforms:
component_dir = components_dir / component
component_dir.mkdir()
(component_dir / "__init__.py").touch()
# Create a file (not a directory)
(components_dir / "README.md").touch()
# Create a directory without __init__.py
(components_dir / "no_init").mkdir()
# Mock Path(__file__).parents[1] to return our tmp_path
with patch("esphome.core.config.Path") as mock_path:
mock_file_path = MagicMock()
mock_file_path.parents = [MagicMock(), tmp_path]
mock_path.return_value = mock_file_path
platforms = config._list_target_platforms()
assert isinstance(platforms, list)
# Should include platform components
assert "esp32" in platforms
assert "esp8266" in platforms
assert "rp2040" in platforms
assert "libretiny" in platforms
assert "host" in platforms
# Should not include non-platform components
assert "sensor" not in platforms
assert "README.md" not in platforms
assert "no_init" not in platforms
def test_is_target_platform() -> None:
"""Test _is_target_platform identifies valid platforms."""
assert config._is_target_platform("esp32") is True
assert config._is_target_platform("esp8266") is True
assert config._is_target_platform("rp2040") is True
assert config._is_target_platform("invalid_platform") is False
assert config._is_target_platform("api") is False # Component but not platform

View File

@@ -1,3 +0,0 @@
# This file should be ignored
platform: template
name: "Hidden Sensor"

View File

@@ -1 +0,0 @@
This is not a YAML file and should be ignored

View File

@@ -1,4 +0,0 @@
platform: template
name: "Sensor 1"
lambda: |-
return 42.0;

View File

@@ -1,4 +0,0 @@
platform: template
name: "Sensor 2"
lambda: |-
return 100.0;

View File

@@ -1,4 +0,0 @@
platform: template
name: "Sensor 3 in subdir"
lambda: |-
return 200.0;

View File

@@ -1,4 +0,0 @@
test_secret: "my_secret_value"
another_secret: "another_value"
wifi_password: "super_secret_wifi"
api_key: "0123456789abcdef"

View File

@@ -1,17 +0,0 @@
esphome:
name: test_device
platform: ESP32
board: esp32dev
wifi:
ssid: "TestNetwork"
password: !secret wifi_password
api:
encryption:
key: !secret api_key
sensor:
- platform: template
name: "Test Sensor"
id: !secret test_secret

View File

@@ -1,10 +1,16 @@
"""Tests for platformio_api.py path functions."""
import json
import os
from pathlib import Path
from unittest.mock import patch
import shutil
from types import SimpleNamespace
from unittest.mock import MagicMock, Mock, patch
import pytest
from esphome import platformio_api
from esphome.core import CORE
from esphome.core import CORE, EsphomeError
def test_idedata_firmware_elf_path(setup_core: Path) -> None:
@@ -104,7 +110,9 @@ def test_flash_image_dataclass() -> None:
assert image.offset == "0x10000"
def test_load_idedata_returns_dict(setup_core: Path) -> None:
def test_load_idedata_returns_dict(
setup_core: Path, mock_run_platformio_cli_run
) -> None:
"""Test _load_idedata returns parsed idedata dict when successful."""
CORE.build_path = str(setup_core / "build" / "test")
CORE.name = "test"
@@ -118,12 +126,511 @@ def test_load_idedata_returns_dict(setup_core: Path) -> None:
idedata_path.parent.mkdir(parents=True, exist_ok=True)
idedata_path.write_text('{"prog_path": "/test/firmware.elf"}')
with patch("esphome.platformio_api.run_platformio_cli_run") as mock_run:
mock_run.return_value = '{"prog_path": "/test/firmware.elf"}'
mock_run_platformio_cli_run.return_value = '{"prog_path": "/test/firmware.elf"}'
config = {"name": "test"}
result = platformio_api._load_idedata(config)
config = {"name": "test"}
result = platformio_api._load_idedata(config)
assert result is not None
assert isinstance(result, dict)
assert result["prog_path"] == "/test/firmware.elf"
def test_load_idedata_uses_cache_when_valid(
setup_core: Path, mock_run_platformio_cli_run: Mock
) -> None:
"""Test _load_idedata uses cached data when unchanged."""
CORE.build_path = str(setup_core / "build" / "test")
CORE.name = "test"
# Create platformio.ini
platformio_ini = setup_core / "build" / "test" / "platformio.ini"
platformio_ini.parent.mkdir(parents=True, exist_ok=True)
platformio_ini.write_text("content")
# Create idedata cache file that's newer
idedata_path = setup_core / ".esphome" / "idedata" / "test.json"
idedata_path.parent.mkdir(parents=True, exist_ok=True)
idedata_path.write_text('{"prog_path": "/cached/firmware.elf"}')
# Make idedata newer than platformio.ini
platformio_ini_mtime = platformio_ini.stat().st_mtime
os.utime(idedata_path, (platformio_ini_mtime + 1, platformio_ini_mtime + 1))
config = {"name": "test"}
result = platformio_api._load_idedata(config)
# Should not call _run_idedata since cache is valid
mock_run_platformio_cli_run.assert_not_called()
assert result["prog_path"] == "/cached/firmware.elf"
def test_load_idedata_regenerates_when_platformio_ini_newer(
setup_core: Path, mock_run_platformio_cli_run: Mock
) -> None:
"""Test _load_idedata regenerates when platformio.ini is newer."""
CORE.build_path = str(setup_core / "build" / "test")
CORE.name = "test"
# Create idedata cache file first
idedata_path = setup_core / ".esphome" / "idedata" / "test.json"
idedata_path.parent.mkdir(parents=True, exist_ok=True)
idedata_path.write_text('{"prog_path": "/old/firmware.elf"}')
# Create platformio.ini that's newer
idedata_mtime = idedata_path.stat().st_mtime
platformio_ini = setup_core / "build" / "test" / "platformio.ini"
platformio_ini.parent.mkdir(parents=True, exist_ok=True)
platformio_ini.write_text("content")
# Make platformio.ini newer than idedata
os.utime(platformio_ini, (idedata_mtime + 1, idedata_mtime + 1))
# Mock platformio to return new data
new_data = {"prog_path": "/new/firmware.elf"}
mock_run_platformio_cli_run.return_value = json.dumps(new_data)
config = {"name": "test"}
result = platformio_api._load_idedata(config)
# Should call _run_idedata since platformio.ini is newer
mock_run_platformio_cli_run.assert_called_once()
assert result["prog_path"] == "/new/firmware.elf"
def test_load_idedata_regenerates_on_corrupted_cache(
setup_core: Path, mock_run_platformio_cli_run: Mock
) -> None:
"""Test _load_idedata regenerates when cache file is corrupted."""
CORE.build_path = str(setup_core / "build" / "test")
CORE.name = "test"
# Create platformio.ini
platformio_ini = setup_core / "build" / "test" / "platformio.ini"
platformio_ini.parent.mkdir(parents=True, exist_ok=True)
platformio_ini.write_text("content")
# Create corrupted idedata cache file
idedata_path = setup_core / ".esphome" / "idedata" / "test.json"
idedata_path.parent.mkdir(parents=True, exist_ok=True)
idedata_path.write_text('{"prog_path": invalid json')
# Make idedata newer so it would be used if valid
platformio_ini_mtime = platformio_ini.stat().st_mtime
os.utime(idedata_path, (platformio_ini_mtime + 1, platformio_ini_mtime + 1))
# Mock platformio to return new data
new_data = {"prog_path": "/new/firmware.elf"}
mock_run_platformio_cli_run.return_value = json.dumps(new_data)
config = {"name": "test"}
result = platformio_api._load_idedata(config)
# Should call _run_idedata since cache is corrupted
mock_run_platformio_cli_run.assert_called_once()
assert result["prog_path"] == "/new/firmware.elf"
def test_run_idedata_parses_json_from_output(
setup_core: Path, mock_run_platformio_cli_run: Mock
) -> None:
"""Test _run_idedata extracts JSON from platformio output."""
config = {"name": "test"}
expected_data = {
"prog_path": "/path/to/firmware.elf",
"cc_path": "/path/to/gcc",
"extra": {"flash_images": []},
}
# Simulate platformio output with JSON embedded
mock_run_platformio_cli_run.return_value = (
f"Some preamble\n{json.dumps(expected_data)}\nSome postamble"
)
result = platformio_api._run_idedata(config)
assert result == expected_data
def test_run_idedata_raises_on_no_json(
setup_core: Path, mock_run_platformio_cli_run: Mock
) -> None:
"""Test _run_idedata raises EsphomeError when no JSON found."""
config = {"name": "test"}
mock_run_platformio_cli_run.return_value = "No JSON in this output"
with pytest.raises(EsphomeError):
platformio_api._run_idedata(config)
def test_run_idedata_raises_on_invalid_json(
setup_core: Path, mock_run_platformio_cli_run: Mock
) -> None:
"""Test _run_idedata raises on malformed JSON."""
config = {"name": "test"}
mock_run_platformio_cli_run.return_value = '{"invalid": json"}'
# The ValueError from json.loads is re-raised
with pytest.raises(ValueError):
platformio_api._run_idedata(config)
def test_run_platformio_cli_sets_environment_variables(
setup_core: Path, mock_run_external_command: Mock
) -> None:
"""Test run_platformio_cli sets correct environment variables."""
CORE.build_path = str(setup_core / "build" / "test")
with patch.dict(os.environ, {}, clear=False):
mock_run_external_command.return_value = 0
platformio_api.run_platformio_cli("test", "arg")
# Check environment variables were set
assert os.environ["PLATFORMIO_FORCE_COLOR"] == "true"
assert (
setup_core / "build" / "test"
in Path(os.environ["PLATFORMIO_BUILD_DIR"]).parents
or Path(os.environ["PLATFORMIO_BUILD_DIR"]) == setup_core / "build" / "test"
)
assert "PLATFORMIO_LIBDEPS_DIR" in os.environ
assert "PYTHONWARNINGS" in os.environ
# Check command was called correctly
mock_run_external_command.assert_called_once()
args = mock_run_external_command.call_args[0]
assert "platformio" in args
assert "test" in args
assert "arg" in args
def test_run_platformio_cli_run_builds_command(
setup_core: Path, mock_run_platformio_cli: Mock
) -> None:
"""Test run_platformio_cli_run builds correct command."""
CORE.build_path = str(setup_core / "build" / "test")
mock_run_platformio_cli.return_value = 0
config = {"name": "test"}
platformio_api.run_platformio_cli_run(config, True, "extra", "args")
mock_run_platformio_cli.assert_called_once_with(
"run", "-d", CORE.build_path, "-v", "extra", "args"
)
def test_run_compile(setup_core: Path, mock_run_platformio_cli_run: Mock) -> None:
"""Test run_compile with process limit."""
from esphome.const import CONF_COMPILE_PROCESS_LIMIT, CONF_ESPHOME
CORE.build_path = str(setup_core / "build" / "test")
config = {CONF_ESPHOME: {CONF_COMPILE_PROCESS_LIMIT: 4}}
mock_run_platformio_cli_run.return_value = 0
platformio_api.run_compile(config, verbose=True)
mock_run_platformio_cli_run.assert_called_once_with(config, True, "-j4")
def test_get_idedata_caches_result(
setup_core: Path, mock_run_platformio_cli_run: Mock
) -> None:
"""Test get_idedata caches result in CORE.data."""
from esphome.const import KEY_CORE
CORE.build_path = str(setup_core / "build" / "test")
CORE.name = "test"
CORE.data[KEY_CORE] = {}
# Create platformio.ini to avoid regeneration
platformio_ini = setup_core / "build" / "test" / "platformio.ini"
platformio_ini.parent.mkdir(parents=True, exist_ok=True)
platformio_ini.write_text("content")
# Mock platformio to return data
idedata = {"prog_path": "/test/firmware.elf"}
mock_run_platformio_cli_run.return_value = json.dumps(idedata)
config = {"name": "test"}
# First call should load and cache
result1 = platformio_api.get_idedata(config)
mock_run_platformio_cli_run.assert_called_once()
# Second call should use cache from CORE.data
result2 = platformio_api.get_idedata(config)
mock_run_platformio_cli_run.assert_called_once() # Still only called once
assert result1 is result2
assert isinstance(result1, platformio_api.IDEData)
assert result1.firmware_elf_path == "/test/firmware.elf"
def test_idedata_addr2line_path_windows(setup_core: Path) -> None:
"""Test IDEData.addr2line_path on Windows."""
raw_data = {"prog_path": "/path/to/firmware.elf", "cc_path": "C:\\tools\\gcc.exe"}
idedata = platformio_api.IDEData(raw_data)
result = idedata.addr2line_path
assert result == "C:\\tools\\addr2line.exe"
def test_idedata_addr2line_path_unix(setup_core: Path) -> None:
"""Test IDEData.addr2line_path on Unix."""
raw_data = {"prog_path": "/path/to/firmware.elf", "cc_path": "/usr/bin/gcc"}
idedata = platformio_api.IDEData(raw_data)
result = idedata.addr2line_path
assert result == "/usr/bin/addr2line"
def test_patch_structhash(setup_core: Path) -> None:
"""Test patch_structhash monkey patches platformio functions."""
# Create simple namespace objects to act as modules
mock_cli = SimpleNamespace()
mock_helpers = SimpleNamespace()
mock_run = SimpleNamespace(cli=mock_cli, helpers=mock_helpers)
# Mock platformio modules
with patch.dict(
"sys.modules",
{
"platformio.run.cli": mock_cli,
"platformio.run.helpers": mock_helpers,
"platformio.run": mock_run,
"platformio.project.helpers": MagicMock(),
"platformio.fs": MagicMock(),
"platformio": MagicMock(),
},
):
# Call patch_structhash
platformio_api.patch_structhash()
# Verify both modules had clean_build_dir patched
# Check that clean_build_dir was set on both modules
assert hasattr(mock_cli, "clean_build_dir")
assert hasattr(mock_helpers, "clean_build_dir")
# Verify they got the same function assigned
assert mock_cli.clean_build_dir is mock_helpers.clean_build_dir
# Verify it's a real function (not a Mock)
assert callable(mock_cli.clean_build_dir)
assert mock_cli.clean_build_dir.__name__ == "patched_clean_build_dir"
def test_patched_clean_build_dir_removes_outdated(setup_core: Path) -> None:
"""Test patched_clean_build_dir removes build dir when platformio.ini is newer."""
build_dir = setup_core / "build"
build_dir.mkdir()
platformio_ini = setup_core / "platformio.ini"
platformio_ini.write_text("config")
# Make platformio.ini newer than build_dir
build_mtime = build_dir.stat().st_mtime
os.utime(platformio_ini, (build_mtime + 1, build_mtime + 1))
# Track if directory was removed
removed_paths: list[str] = []
def track_rmtree(path: str) -> None:
removed_paths.append(path)
shutil.rmtree(path)
# Create mock modules that patch_structhash expects
mock_cli = SimpleNamespace()
mock_helpers = SimpleNamespace()
mock_project_helpers = MagicMock()
mock_project_helpers.get_project_dir.return_value = str(setup_core)
mock_fs = SimpleNamespace(rmtree=track_rmtree)
with patch.dict(
"sys.modules",
{
"platformio": SimpleNamespace(fs=mock_fs),
"platformio.fs": mock_fs,
"platformio.project.helpers": mock_project_helpers,
"platformio.run": SimpleNamespace(cli=mock_cli, helpers=mock_helpers),
"platformio.run.cli": mock_cli,
"platformio.run.helpers": mock_helpers,
},
):
# Call patch_structhash to install the patched function
platformio_api.patch_structhash()
# Call the patched function
mock_helpers.clean_build_dir(str(build_dir), [])
# Verify directory was removed and recreated
assert len(removed_paths) == 1
assert removed_paths[0] == str(build_dir)
assert build_dir.exists() # makedirs recreated it
def test_patched_clean_build_dir_keeps_updated(setup_core: Path) -> None:
"""Test patched_clean_build_dir keeps build dir when it's up to date."""
build_dir = setup_core / "build"
build_dir.mkdir()
test_file = build_dir / "test.txt"
test_file.write_text("test content")
platformio_ini = setup_core / "platformio.ini"
platformio_ini.write_text("config")
# Make build_dir newer than platformio.ini
ini_mtime = platformio_ini.stat().st_mtime
os.utime(build_dir, (ini_mtime + 1, ini_mtime + 1))
# Track if rmtree is called
removed_paths: list[str] = []
def track_rmtree(path: str) -> None:
removed_paths.append(path)
# Create mock modules
mock_cli = SimpleNamespace()
mock_helpers = SimpleNamespace()
mock_project_helpers = MagicMock()
mock_project_helpers.get_project_dir.return_value = str(setup_core)
mock_fs = SimpleNamespace(rmtree=track_rmtree)
with patch.dict(
"sys.modules",
{
"platformio": SimpleNamespace(fs=mock_fs),
"platformio.fs": mock_fs,
"platformio.project.helpers": mock_project_helpers,
"platformio.run": SimpleNamespace(cli=mock_cli, helpers=mock_helpers),
"platformio.run.cli": mock_cli,
"platformio.run.helpers": mock_helpers,
},
):
# Call patch_structhash to install the patched function
platformio_api.patch_structhash()
# Call the patched function
mock_helpers.clean_build_dir(str(build_dir), [])
# Verify rmtree was NOT called
assert len(removed_paths) == 0
# Verify directory and file still exist
assert build_dir.exists()
assert test_file.exists()
assert test_file.read_text() == "test content"
def test_patched_clean_build_dir_creates_missing(setup_core: Path) -> None:
"""Test patched_clean_build_dir creates build dir when it doesn't exist."""
build_dir = setup_core / "build"
platformio_ini = setup_core / "platformio.ini"
platformio_ini.write_text("config")
# Ensure build_dir doesn't exist
assert not build_dir.exists()
# Track if rmtree is called
removed_paths: list[str] = []
def track_rmtree(path: str) -> None:
removed_paths.append(path)
# Create mock modules
mock_cli = SimpleNamespace()
mock_helpers = SimpleNamespace()
mock_project_helpers = MagicMock()
mock_project_helpers.get_project_dir.return_value = str(setup_core)
mock_fs = SimpleNamespace(rmtree=track_rmtree)
with patch.dict(
"sys.modules",
{
"platformio": SimpleNamespace(fs=mock_fs),
"platformio.fs": mock_fs,
"platformio.project.helpers": mock_project_helpers,
"platformio.run": SimpleNamespace(cli=mock_cli, helpers=mock_helpers),
"platformio.run.cli": mock_cli,
"platformio.run.helpers": mock_helpers,
},
):
# Call patch_structhash to install the patched function
platformio_api.patch_structhash()
# Call the patched function
mock_helpers.clean_build_dir(str(build_dir), [])
# Verify rmtree was NOT called
assert len(removed_paths) == 0
# Verify directory was created
assert build_dir.exists()
def test_process_stacktrace_esp8266_exception(setup_core: Path, caplog) -> None:
"""Test process_stacktrace handles ESP8266 exceptions."""
config = {"name": "test"}
# Test exception type parsing
line = "Exception (28):"
backtrace_state = False
result = platformio_api.process_stacktrace(config, line, backtrace_state)
assert "Access to invalid address: LOAD (wild pointer?)" in caplog.text
assert result is False
def test_process_stacktrace_esp8266_backtrace(
setup_core: Path, mock_decode_pc: Mock
) -> None:
"""Test process_stacktrace handles ESP8266 multi-line backtrace."""
config = {"name": "test"}
# Start of backtrace
line1 = ">>>stack>>>"
state = platformio_api.process_stacktrace(config, line1, False)
assert state is True
# Backtrace content with addresses
line2 = "40201234 40205678"
state = platformio_api.process_stacktrace(config, line2, state)
assert state is True
assert mock_decode_pc.call_count == 2
# End of backtrace
line3 = "<<<stack<<<"
state = platformio_api.process_stacktrace(config, line3, state)
assert state is False
def test_process_stacktrace_esp32_backtrace(
setup_core: Path, mock_decode_pc: Mock
) -> None:
"""Test process_stacktrace handles ESP32 single-line backtrace."""
config = {"name": "test"}
line = "Backtrace: 0x40081234:0x3ffb1234 0x40085678:0x3ffb5678"
state = platformio_api.process_stacktrace(config, line, False)
# Should decode both addresses
assert mock_decode_pc.call_count == 2
mock_decode_pc.assert_any_call(config, "40081234")
mock_decode_pc.assert_any_call(config, "40085678")
assert state is False
def test_process_stacktrace_bad_alloc(
setup_core: Path, mock_decode_pc: Mock, caplog
) -> None:
"""Test process_stacktrace handles bad alloc messages."""
config = {"name": "test"}
line = "last failed alloc call: 40201234(512)"
state = platformio_api.process_stacktrace(config, line, False)
assert "Memory allocation of 512 bytes failed at 40201234" in caplog.text
mock_decode_pc.assert_called_once_with(config, "40201234")
assert state is False

View File

@@ -1,12 +1,15 @@
"""Tests for storage_json.py path functions."""
from datetime import datetime
import json
from pathlib import Path
import sys
from unittest.mock import patch
from unittest.mock import MagicMock, Mock, patch
import pytest
from esphome import storage_json
from esphome.const import CONF_DISABLED, CONF_MDNS
from esphome.core import CORE
@@ -115,7 +118,9 @@ def test_storage_json_firmware_bin_path_property(setup_core: Path) -> None:
assert storage.firmware_bin_path == "/path/to/firmware.bin"
def test_storage_json_save_creates_directory(setup_core: Path, tmp_path: Path) -> None:
def test_storage_json_save_creates_directory(
setup_core: Path, tmp_path: Path, mock_write_file_if_changed: Mock
) -> None:
"""Test StorageJSON.save creates storage directory if it doesn't exist."""
storage_dir = tmp_path / "new_data" / "storage"
storage_file = storage_dir / "test.json"
@@ -139,11 +144,10 @@ def test_storage_json_save_creates_directory(setup_core: Path, tmp_path: Path) -
no_mdns=False,
)
with patch("esphome.storage_json.write_file_if_changed") as mock_write:
storage.save(str(storage_file))
mock_write.assert_called_once()
call_args = mock_write.call_args[0]
assert call_args[0] == str(storage_file)
storage.save(str(storage_file))
mock_write_file_if_changed.assert_called_once()
call_args = mock_write_file_if_changed.call_args[0]
assert call_args[0] == str(storage_file)
def test_storage_json_from_wizard(setup_core: Path) -> None:
@@ -180,3 +184,477 @@ def test_storage_paths_with_ha_addon(mock_is_ha_addon: bool, tmp_path: Path) ->
result = storage_json.esphome_storage_path()
expected = str(Path("/data") / "esphome.json")
assert result == expected
def test_storage_json_as_dict() -> None:
"""Test StorageJSON.as_dict returns correct dictionary."""
storage = storage_json.StorageJSON(
storage_version=1,
name="test_device",
friendly_name="Test Device",
comment="Test comment",
esphome_version="2024.1.0",
src_version=1,
address="192.168.1.100",
web_port=80,
target_platform="ESP32",
build_path="/path/to/build",
firmware_bin_path="/path/to/firmware.bin",
loaded_integrations={"wifi", "api", "ota"},
loaded_platforms={"sensor", "binary_sensor"},
no_mdns=True,
framework="arduino",
core_platform="esp32",
)
result = storage.as_dict()
assert result["storage_version"] == 1
assert result["name"] == "test_device"
assert result["friendly_name"] == "Test Device"
assert result["comment"] == "Test comment"
assert result["esphome_version"] == "2024.1.0"
assert result["src_version"] == 1
assert result["address"] == "192.168.1.100"
assert result["web_port"] == 80
assert result["esp_platform"] == "ESP32"
assert result["build_path"] == "/path/to/build"
assert result["firmware_bin_path"] == "/path/to/firmware.bin"
assert "api" in result["loaded_integrations"]
assert "wifi" in result["loaded_integrations"]
assert "ota" in result["loaded_integrations"]
assert result["loaded_integrations"] == sorted(
["wifi", "api", "ota"]
) # Should be sorted
assert "sensor" in result["loaded_platforms"]
assert result["loaded_platforms"] == sorted(
["sensor", "binary_sensor"]
) # Should be sorted
assert result["no_mdns"] is True
assert result["framework"] == "arduino"
assert result["core_platform"] == "esp32"
def test_storage_json_to_json() -> None:
"""Test StorageJSON.to_json returns valid JSON string."""
storage = storage_json.StorageJSON(
storage_version=1,
name="test",
friendly_name="Test",
comment=None,
esphome_version="2024.1.0",
src_version=None,
address="test.local",
web_port=None,
target_platform="ESP8266",
build_path=None,
firmware_bin_path=None,
loaded_integrations=set(),
loaded_platforms=set(),
no_mdns=False,
)
json_str = storage.to_json()
# Should be valid JSON
parsed = json.loads(json_str)
assert parsed["name"] == "test"
assert parsed["storage_version"] == 1
# Should end with newline
assert json_str.endswith("\n")
def test_storage_json_save(tmp_path: Path) -> None:
"""Test StorageJSON.save writes file correctly."""
storage = storage_json.StorageJSON(
storage_version=1,
name="test",
friendly_name="Test",
comment=None,
esphome_version="2024.1.0",
src_version=None,
address="test.local",
web_port=None,
target_platform="ESP32",
build_path=None,
firmware_bin_path=None,
loaded_integrations=set(),
loaded_platforms=set(),
no_mdns=False,
)
save_path = tmp_path / "test.json"
with patch("esphome.storage_json.write_file_if_changed") as mock_write:
storage.save(str(save_path))
mock_write.assert_called_once_with(str(save_path), storage.to_json())
def test_storage_json_from_esphome_core(setup_core: Path) -> None:
"""Test StorageJSON.from_esphome_core creates correct storage object."""
# Mock CORE object
mock_core = MagicMock()
mock_core.name = "my_device"
mock_core.friendly_name = "My Device"
mock_core.comment = "A test device"
mock_core.address = "192.168.1.50"
mock_core.web_port = 8080
mock_core.target_platform = "esp32"
mock_core.is_esp32 = True
mock_core.build_path = "/build/my_device"
mock_core.firmware_bin = "/build/my_device/firmware.bin"
mock_core.loaded_integrations = {"wifi", "api"}
mock_core.loaded_platforms = {"sensor"}
mock_core.config = {CONF_MDNS: {CONF_DISABLED: True}}
mock_core.target_framework = "esp-idf"
with patch("esphome.components.esp32.get_esp32_variant") as mock_variant:
mock_variant.return_value = "ESP32-C3"
result = storage_json.StorageJSON.from_esphome_core(mock_core, old=None)
assert result.name == "my_device"
assert result.friendly_name == "My Device"
assert result.comment == "A test device"
assert result.address == "192.168.1.50"
assert result.web_port == 8080
assert result.target_platform == "ESP32-C3"
assert result.build_path == "/build/my_device"
assert result.firmware_bin_path == "/build/my_device/firmware.bin"
assert result.loaded_integrations == {"wifi", "api"}
assert result.loaded_platforms == {"sensor"}
assert result.no_mdns is True
assert result.framework == "esp-idf"
assert result.core_platform == "esp32"
def test_storage_json_from_esphome_core_mdns_enabled(setup_core: Path) -> None:
"""Test from_esphome_core with mDNS enabled."""
mock_core = MagicMock()
mock_core.name = "test"
mock_core.friendly_name = "Test"
mock_core.comment = None
mock_core.address = "test.local"
mock_core.web_port = None
mock_core.target_platform = "esp8266"
mock_core.is_esp32 = False
mock_core.build_path = "/build"
mock_core.firmware_bin = "/build/firmware.bin"
mock_core.loaded_integrations = set()
mock_core.loaded_platforms = set()
mock_core.config = {} # No MDNS config means enabled
mock_core.target_framework = "arduino"
result = storage_json.StorageJSON.from_esphome_core(mock_core, old=None)
assert result.no_mdns is False
def test_storage_json_load_valid_file(tmp_path: Path) -> None:
"""Test StorageJSON.load with valid JSON file."""
storage_data = {
"storage_version": 1,
"name": "loaded_device",
"friendly_name": "Loaded Device",
"comment": "Loaded from file",
"esphome_version": "2024.1.0",
"src_version": 2,
"address": "10.0.0.1",
"web_port": 8080,
"esp_platform": "ESP32",
"build_path": "/loaded/build",
"firmware_bin_path": "/loaded/firmware.bin",
"loaded_integrations": ["wifi", "api"],
"loaded_platforms": ["sensor"],
"no_mdns": True,
"framework": "arduino",
"core_platform": "esp32",
}
file_path = tmp_path / "storage.json"
file_path.write_text(json.dumps(storage_data))
result = storage_json.StorageJSON.load(str(file_path))
assert result is not None
assert result.name == "loaded_device"
assert result.friendly_name == "Loaded Device"
assert result.comment == "Loaded from file"
assert result.esphome_version == "2024.1.0"
assert result.src_version == 2
assert result.address == "10.0.0.1"
assert result.web_port == 8080
assert result.target_platform == "ESP32"
assert result.build_path == "/loaded/build"
assert result.firmware_bin_path == "/loaded/firmware.bin"
assert result.loaded_integrations == {"wifi", "api"}
assert result.loaded_platforms == {"sensor"}
assert result.no_mdns is True
assert result.framework == "arduino"
assert result.core_platform == "esp32"
def test_storage_json_load_invalid_file(tmp_path: Path) -> None:
"""Test StorageJSON.load with invalid JSON file."""
file_path = tmp_path / "invalid.json"
file_path.write_text("not valid json{")
result = storage_json.StorageJSON.load(str(file_path))
assert result is None
def test_storage_json_load_nonexistent_file() -> None:
"""Test StorageJSON.load with non-existent file."""
result = storage_json.StorageJSON.load("/nonexistent/file.json")
assert result is None
def test_storage_json_equality() -> None:
"""Test StorageJSON equality comparison."""
storage1 = storage_json.StorageJSON(
storage_version=1,
name="test",
friendly_name="Test",
comment=None,
esphome_version="2024.1.0",
src_version=1,
address="test.local",
web_port=80,
target_platform="ESP32",
build_path="/build",
firmware_bin_path="/firmware.bin",
loaded_integrations={"wifi"},
loaded_platforms=set(),
no_mdns=False,
)
storage2 = storage_json.StorageJSON(
storage_version=1,
name="test",
friendly_name="Test",
comment=None,
esphome_version="2024.1.0",
src_version=1,
address="test.local",
web_port=80,
target_platform="ESP32",
build_path="/build",
firmware_bin_path="/firmware.bin",
loaded_integrations={"wifi"},
loaded_platforms=set(),
no_mdns=False,
)
storage3 = storage_json.StorageJSON(
storage_version=1,
name="different", # Different name
friendly_name="Test",
comment=None,
esphome_version="2024.1.0",
src_version=1,
address="test.local",
web_port=80,
target_platform="ESP32",
build_path="/build",
firmware_bin_path="/firmware.bin",
loaded_integrations={"wifi"},
loaded_platforms=set(),
no_mdns=False,
)
assert storage1 == storage2
assert storage1 != storage3
assert storage1 != "not a storage object"
def test_esphome_storage_json_as_dict() -> None:
"""Test EsphomeStorageJSON.as_dict returns correct dictionary."""
storage = storage_json.EsphomeStorageJSON(
storage_version=1,
cookie_secret="secret123",
last_update_check="2024-01-15T10:30:00",
remote_version="2024.1.1",
)
result = storage.as_dict()
assert result["storage_version"] == 1
assert result["cookie_secret"] == "secret123"
assert result["last_update_check"] == "2024-01-15T10:30:00"
assert result["remote_version"] == "2024.1.1"
def test_esphome_storage_json_last_update_check_property() -> None:
"""Test EsphomeStorageJSON.last_update_check property."""
storage = storage_json.EsphomeStorageJSON(
storage_version=1,
cookie_secret="secret",
last_update_check="2024-01-15T10:30:00",
remote_version=None,
)
# Test getter
result = storage.last_update_check
assert isinstance(result, datetime)
assert result.year == 2024
assert result.month == 1
assert result.day == 15
assert result.hour == 10
assert result.minute == 30
# Test setter
new_date = datetime(2024, 2, 20, 15, 45, 30)
storage.last_update_check = new_date
assert storage.last_update_check_str == "2024-02-20T15:45:30"
def test_esphome_storage_json_last_update_check_invalid() -> None:
"""Test EsphomeStorageJSON.last_update_check with invalid date."""
storage = storage_json.EsphomeStorageJSON(
storage_version=1,
cookie_secret="secret",
last_update_check="invalid date",
remote_version=None,
)
result = storage.last_update_check
assert result is None
def test_esphome_storage_json_to_json() -> None:
"""Test EsphomeStorageJSON.to_json returns valid JSON string."""
storage = storage_json.EsphomeStorageJSON(
storage_version=1,
cookie_secret="mysecret",
last_update_check="2024-01-15T10:30:00",
remote_version="2024.1.1",
)
json_str = storage.to_json()
# Should be valid JSON
parsed = json.loads(json_str)
assert parsed["cookie_secret"] == "mysecret"
assert parsed["storage_version"] == 1
# Should end with newline
assert json_str.endswith("\n")
def test_esphome_storage_json_save(tmp_path: Path) -> None:
"""Test EsphomeStorageJSON.save writes file correctly."""
storage = storage_json.EsphomeStorageJSON(
storage_version=1,
cookie_secret="secret",
last_update_check=None,
remote_version=None,
)
save_path = tmp_path / "esphome.json"
with patch("esphome.storage_json.write_file_if_changed") as mock_write:
storage.save(str(save_path))
mock_write.assert_called_once_with(str(save_path), storage.to_json())
def test_esphome_storage_json_load_valid_file(tmp_path: Path) -> None:
"""Test EsphomeStorageJSON.load with valid JSON file."""
storage_data = {
"storage_version": 1,
"cookie_secret": "loaded_secret",
"last_update_check": "2024-01-20T14:30:00",
"remote_version": "2024.1.2",
}
file_path = tmp_path / "esphome.json"
file_path.write_text(json.dumps(storage_data))
result = storage_json.EsphomeStorageJSON.load(str(file_path))
assert result is not None
assert result.storage_version == 1
assert result.cookie_secret == "loaded_secret"
assert result.last_update_check_str == "2024-01-20T14:30:00"
assert result.remote_version == "2024.1.2"
def test_esphome_storage_json_load_invalid_file(tmp_path: Path) -> None:
"""Test EsphomeStorageJSON.load with invalid JSON file."""
file_path = tmp_path / "invalid.json"
file_path.write_text("not valid json{")
result = storage_json.EsphomeStorageJSON.load(str(file_path))
assert result is None
def test_esphome_storage_json_load_nonexistent_file() -> None:
"""Test EsphomeStorageJSON.load with non-existent file."""
result = storage_json.EsphomeStorageJSON.load("/nonexistent/file.json")
assert result is None
def test_esphome_storage_json_get_default() -> None:
"""Test EsphomeStorageJSON.get_default creates default storage."""
with patch("esphome.storage_json.os.urandom") as mock_urandom:
# Mock urandom to return predictable bytes
mock_urandom.return_value = b"test" * 16 # 64 bytes
result = storage_json.EsphomeStorageJSON.get_default()
assert result.storage_version == 1
assert len(result.cookie_secret) == 128 # 64 bytes hex = 128 chars
assert result.last_update_check is None
assert result.remote_version is None
def test_esphome_storage_json_equality() -> None:
"""Test EsphomeStorageJSON equality comparison."""
storage1 = storage_json.EsphomeStorageJSON(
storage_version=1,
cookie_secret="secret",
last_update_check="2024-01-15T10:30:00",
remote_version="2024.1.1",
)
storage2 = storage_json.EsphomeStorageJSON(
storage_version=1,
cookie_secret="secret",
last_update_check="2024-01-15T10:30:00",
remote_version="2024.1.1",
)
storage3 = storage_json.EsphomeStorageJSON(
storage_version=1,
cookie_secret="different", # Different secret
last_update_check="2024-01-15T10:30:00",
remote_version="2024.1.1",
)
assert storage1 == storage2
assert storage1 != storage3
assert storage1 != "not a storage object"
def test_storage_json_load_legacy_esphomeyaml_version(tmp_path: Path) -> None:
"""Test loading storage with legacy esphomeyaml_version field."""
storage_data = {
"storage_version": 1,
"name": "legacy_device",
"friendly_name": "Legacy Device",
"esphomeyaml_version": "1.14.0", # Legacy field name
"address": "legacy.local",
"esp_platform": "ESP8266",
}
file_path = tmp_path / "legacy.json"
file_path.write_text(json.dumps(storage_data))
result = storage_json.StorageJSON.load(str(file_path))
assert result is not None
assert result.esphome_version == "1.14.0" # Should map to esphome_version

View File

@@ -1,9 +1,12 @@
"""Tests for the wizard.py file."""
import os
from pathlib import Path
from typing import Any
from unittest.mock import MagicMock
import pytest
from pytest import MonkeyPatch
from esphome.components.bk72xx.boards import BK72XX_BOARD_PINS
from esphome.components.esp32.boards import ESP32_BOARD_PINS
@@ -15,7 +18,7 @@ import esphome.wizard as wz
@pytest.fixture
def default_config():
def default_config() -> dict[str, Any]:
return {
"type": "basic",
"name": "test-name",
@@ -28,7 +31,7 @@ def default_config():
@pytest.fixture
def wizard_answers():
def wizard_answers() -> list[str]:
return [
"test-node", # Name of the node
"ESP8266", # platform
@@ -53,7 +56,9 @@ def test_sanitize_quotes_replaces_with_escaped_char():
assert output_str == '\\"key\\": \\"value\\"'
def test_config_file_fallback_ap_includes_descriptive_name(default_config):
def test_config_file_fallback_ap_includes_descriptive_name(
default_config: dict[str, Any],
):
"""
The fallback AP should include the node and a descriptive name
"""
@@ -67,7 +72,9 @@ def test_config_file_fallback_ap_includes_descriptive_name(default_config):
assert 'ssid: "Test Node Fallback Hotspot"' in config
def test_config_file_fallback_ap_name_less_than_32_chars(default_config):
def test_config_file_fallback_ap_name_less_than_32_chars(
default_config: dict[str, Any],
):
"""
The fallback AP name must be less than 32 chars.
Since it is composed of the node name and "Fallback Hotspot" this can be too long and needs truncating
@@ -82,7 +89,7 @@ def test_config_file_fallback_ap_name_less_than_32_chars(default_config):
assert 'ssid: "A Very Long Name For This Node"' in config
def test_config_file_should_include_ota(default_config):
def test_config_file_should_include_ota(default_config: dict[str, Any]):
"""
The Over-The-Air update should be enabled by default
"""
@@ -95,7 +102,9 @@ def test_config_file_should_include_ota(default_config):
assert "ota:" in config
def test_config_file_should_include_ota_when_password_set(default_config):
def test_config_file_should_include_ota_when_password_set(
default_config: dict[str, Any],
):
"""
The Over-The-Air update should be enabled when a password is set
"""
@@ -109,7 +118,9 @@ def test_config_file_should_include_ota_when_password_set(default_config):
assert "ota:" in config
def test_wizard_write_sets_platform(default_config, tmp_path, monkeypatch):
def test_wizard_write_sets_platform(
default_config: dict[str, Any], tmp_path: Path, monkeypatch: MonkeyPatch
):
"""
If the platform is not explicitly set, use "ESP8266" if the board is one of the ESP8266 boards
"""
@@ -126,7 +137,7 @@ def test_wizard_write_sets_platform(default_config, tmp_path, monkeypatch):
assert "esp8266:" in generated_config
def test_wizard_empty_config(tmp_path, monkeypatch):
def test_wizard_empty_config(tmp_path: Path, monkeypatch: MonkeyPatch):
"""
The wizard should be able to create an empty configuration
"""
@@ -146,7 +157,7 @@ def test_wizard_empty_config(tmp_path, monkeypatch):
assert generated_config == ""
def test_wizard_upload_config(tmp_path, monkeypatch):
def test_wizard_upload_config(tmp_path: Path, monkeypatch: MonkeyPatch):
"""
The wizard should be able to import an base64 encoded configuration
"""
@@ -168,7 +179,7 @@ def test_wizard_upload_config(tmp_path, monkeypatch):
def test_wizard_write_defaults_platform_from_board_esp8266(
default_config, tmp_path, monkeypatch
default_config: dict[str, Any], tmp_path: Path, monkeypatch: MonkeyPatch
):
"""
If the platform is not explicitly set, use "ESP8266" if the board is one of the ESP8266 boards
@@ -189,7 +200,7 @@ def test_wizard_write_defaults_platform_from_board_esp8266(
def test_wizard_write_defaults_platform_from_board_esp32(
default_config, tmp_path, monkeypatch
default_config: dict[str, Any], tmp_path: Path, monkeypatch: MonkeyPatch
):
"""
If the platform is not explicitly set, use "ESP32" if the board is one of the ESP32 boards
@@ -210,7 +221,7 @@ def test_wizard_write_defaults_platform_from_board_esp32(
def test_wizard_write_defaults_platform_from_board_bk72xx(
default_config, tmp_path, monkeypatch
default_config: dict[str, Any], tmp_path: Path, monkeypatch: MonkeyPatch
):
"""
If the platform is not explicitly set, use "BK72XX" if the board is one of BK72XX boards
@@ -231,7 +242,7 @@ def test_wizard_write_defaults_platform_from_board_bk72xx(
def test_wizard_write_defaults_platform_from_board_ln882x(
default_config, tmp_path, monkeypatch
default_config: dict[str, Any], tmp_path: Path, monkeypatch: MonkeyPatch
):
"""
If the platform is not explicitly set, use "LN882X" if the board is one of LN882X boards
@@ -252,7 +263,7 @@ def test_wizard_write_defaults_platform_from_board_ln882x(
def test_wizard_write_defaults_platform_from_board_rtl87xx(
default_config, tmp_path, monkeypatch
default_config: dict[str, Any], tmp_path: Path, monkeypatch: MonkeyPatch
):
"""
If the platform is not explicitly set, use "RTL87XX" if the board is one of RTL87XX boards
@@ -272,7 +283,7 @@ def test_wizard_write_defaults_platform_from_board_rtl87xx(
assert "rtl87xx:" in generated_config
def test_safe_print_step_prints_step_number_and_description(monkeypatch):
def test_safe_print_step_prints_step_number_and_description(monkeypatch: MonkeyPatch):
"""
The safe_print_step function prints the step number and the passed description
"""
@@ -296,7 +307,7 @@ def test_safe_print_step_prints_step_number_and_description(monkeypatch):
assert any(f"STEP {step_num}" in arg for arg in all_args)
def test_default_input_uses_default_if_no_input_supplied(monkeypatch):
def test_default_input_uses_default_if_no_input_supplied(monkeypatch: MonkeyPatch):
"""
The default_input() function should return the supplied default value if the user doesn't enter anything
"""
@@ -312,7 +323,7 @@ def test_default_input_uses_default_if_no_input_supplied(monkeypatch):
assert retval == default_string
def test_default_input_uses_user_supplied_value(monkeypatch):
def test_default_input_uses_user_supplied_value(monkeypatch: MonkeyPatch):
"""
The default_input() function should return the value that the user enters
"""
@@ -376,7 +387,9 @@ def test_wizard_rejects_existing_files(tmpdir):
assert retval == 2
def test_wizard_accepts_default_answers_esp8266(tmpdir, monkeypatch, wizard_answers):
def test_wizard_accepts_default_answers_esp8266(
tmpdir, monkeypatch: MonkeyPatch, wizard_answers: list[str]
):
"""
The wizard should accept the given default answers for esp8266
"""
@@ -396,7 +409,9 @@ def test_wizard_accepts_default_answers_esp8266(tmpdir, monkeypatch, wizard_answ
assert retval == 0
def test_wizard_accepts_default_answers_esp32(tmpdir, monkeypatch, wizard_answers):
def test_wizard_accepts_default_answers_esp32(
tmpdir, monkeypatch: MonkeyPatch, wizard_answers: list[str]
):
"""
The wizard should accept the given default answers for esp32
"""
@@ -418,7 +433,9 @@ def test_wizard_accepts_default_answers_esp32(tmpdir, monkeypatch, wizard_answer
assert retval == 0
def test_wizard_offers_better_node_name(tmpdir, monkeypatch, wizard_answers):
def test_wizard_offers_better_node_name(
tmpdir, monkeypatch: MonkeyPatch, wizard_answers: list[str]
):
"""
When the node name does not conform, a better alternative is offered
* Removes special chars
@@ -449,7 +466,9 @@ def test_wizard_offers_better_node_name(tmpdir, monkeypatch, wizard_answers):
assert wz.default_input.call_args.args[1] == expected_name
def test_wizard_requires_correct_platform(tmpdir, monkeypatch, wizard_answers):
def test_wizard_requires_correct_platform(
tmpdir, monkeypatch: MonkeyPatch, wizard_answers: list[str]
):
"""
When the platform is not either esp32 or esp8266, the wizard should reject it
"""
@@ -471,7 +490,9 @@ def test_wizard_requires_correct_platform(tmpdir, monkeypatch, wizard_answers):
assert retval == 0
def test_wizard_requires_correct_board(tmpdir, monkeypatch, wizard_answers):
def test_wizard_requires_correct_board(
tmpdir, monkeypatch: MonkeyPatch, wizard_answers: list[str]
):
"""
When the board is not a valid esp8266 board, the wizard should reject it
"""
@@ -493,7 +514,9 @@ def test_wizard_requires_correct_board(tmpdir, monkeypatch, wizard_answers):
assert retval == 0
def test_wizard_requires_valid_ssid(tmpdir, monkeypatch, wizard_answers):
def test_wizard_requires_valid_ssid(
tmpdir, monkeypatch: MonkeyPatch, wizard_answers: list[str]
):
"""
When the board is not a valid esp8266 board, the wizard should reject it
"""
@@ -515,7 +538,9 @@ def test_wizard_requires_valid_ssid(tmpdir, monkeypatch, wizard_answers):
assert retval == 0
def test_wizard_write_protects_existing_config(tmpdir, default_config, monkeypatch):
def test_wizard_write_protects_existing_config(
tmpdir, default_config: dict[str, Any], monkeypatch: MonkeyPatch
):
"""
The wizard_write function should not overwrite existing config files and return False
"""

View File

@@ -349,6 +349,14 @@ def test_clean_build(
dependencies_lock = tmp_path / "dependencies.lock"
dependencies_lock.write_text("lock file")
# Create PlatformIO cache directory
platformio_cache_dir = tmp_path / ".platformio" / ".cache"
platformio_cache_dir.mkdir(parents=True)
(platformio_cache_dir / "downloads").mkdir()
(platformio_cache_dir / "http").mkdir()
(platformio_cache_dir / "tmp").mkdir()
(platformio_cache_dir / "downloads" / "package.tar.gz").write_text("package")
# Setup mocks
mock_core.relative_pioenvs_path.return_value = str(pioenvs_dir)
mock_core.relative_piolibdeps_path.return_value = str(piolibdeps_dir)
@@ -358,21 +366,30 @@ def test_clean_build(
assert pioenvs_dir.exists()
assert piolibdeps_dir.exists()
assert dependencies_lock.exists()
assert platformio_cache_dir.exists()
# Call the function
with caplog.at_level("INFO"):
clean_build()
# Mock PlatformIO's get_project_cache_dir
with patch(
"platformio.project.helpers.get_project_cache_dir"
) as mock_get_cache_dir:
mock_get_cache_dir.return_value = str(platformio_cache_dir)
# Call the function
with caplog.at_level("INFO"):
clean_build()
# Verify all were removed
assert not pioenvs_dir.exists()
assert not piolibdeps_dir.exists()
assert not dependencies_lock.exists()
assert not platformio_cache_dir.exists()
# Verify logging
assert "Deleting" in caplog.text
assert ".pioenvs" in caplog.text
assert ".piolibdeps" in caplog.text
assert "dependencies.lock" in caplog.text
assert "PlatformIO cache" in caplog.text
@patch("esphome.writer.CORE")
@@ -446,6 +463,86 @@ def test_clean_build_nothing_exists(
assert not dependencies_lock.exists()
@patch("esphome.writer.CORE")
def test_clean_build_platformio_not_available(
mock_core: MagicMock,
tmp_path: Path,
caplog: pytest.LogCaptureFixture,
) -> None:
"""Test clean_build when PlatformIO is not available."""
# Create directory structure and files
pioenvs_dir = tmp_path / ".pioenvs"
pioenvs_dir.mkdir()
piolibdeps_dir = tmp_path / ".piolibdeps"
piolibdeps_dir.mkdir()
dependencies_lock = tmp_path / "dependencies.lock"
dependencies_lock.write_text("lock file")
# Setup mocks
mock_core.relative_pioenvs_path.return_value = str(pioenvs_dir)
mock_core.relative_piolibdeps_path.return_value = str(piolibdeps_dir)
mock_core.relative_build_path.return_value = str(dependencies_lock)
# Verify all exist before
assert pioenvs_dir.exists()
assert piolibdeps_dir.exists()
assert dependencies_lock.exists()
# Mock import error for platformio
with (
patch.dict("sys.modules", {"platformio.project.helpers": None}),
caplog.at_level("INFO"),
):
# Call the function
clean_build()
# Verify standard paths were removed but no cache cleaning attempted
assert not pioenvs_dir.exists()
assert not piolibdeps_dir.exists()
assert not dependencies_lock.exists()
# Verify no cache logging
assert "PlatformIO cache" not in caplog.text
@patch("esphome.writer.CORE")
def test_clean_build_empty_cache_dir(
mock_core: MagicMock,
tmp_path: Path,
caplog: pytest.LogCaptureFixture,
) -> None:
"""Test clean_build when get_project_cache_dir returns empty/whitespace."""
# Create directory structure and files
pioenvs_dir = tmp_path / ".pioenvs"
pioenvs_dir.mkdir()
# Setup mocks
mock_core.relative_pioenvs_path.return_value = str(pioenvs_dir)
mock_core.relative_piolibdeps_path.return_value = str(tmp_path / ".piolibdeps")
mock_core.relative_build_path.return_value = str(tmp_path / "dependencies.lock")
# Verify pioenvs exists before
assert pioenvs_dir.exists()
# Mock PlatformIO's get_project_cache_dir to return whitespace
with patch(
"platformio.project.helpers.get_project_cache_dir"
) as mock_get_cache_dir:
mock_get_cache_dir.return_value = " " # Whitespace only
# Call the function
with caplog.at_level("INFO"):
clean_build()
# Verify pioenvs was removed
assert not pioenvs_dir.exists()
# Verify no cache cleaning was attempted due to empty string
assert "PlatformIO cache" not in caplog.text
@patch("esphome.writer.CORE")
def test_write_gitignore_creates_new_file(
mock_core: MagicMock,

View File

@@ -1,26 +1,9 @@
from pathlib import Path
import shutil
from unittest.mock import patch
import pytest
from esphome import core, yaml_util
from esphome import yaml_util
from esphome.components import substitutions
from esphome.core import EsphomeError
from esphome.util import OrderedDict
@pytest.fixture(autouse=True)
def clear_secrets_cache() -> None:
"""Clear the secrets cache before each test."""
yaml_util._SECRET_VALUES.clear()
yaml_util._SECRET_CACHE.clear()
yield
yaml_util._SECRET_VALUES.clear()
yaml_util._SECRET_CACHE.clear()
def test_include_with_vars(fixture_path: Path) -> None:
def test_include_with_vars(fixture_path):
yaml_file = fixture_path / "yaml_util" / "includetest.yaml"
actual = yaml_util.load_yaml(yaml_file)
@@ -79,202 +62,3 @@ def test_parsing_with_custom_loader(fixture_path):
assert loader_calls[0].endswith("includes/included.yaml")
assert loader_calls[1].endswith("includes/list.yaml")
assert loader_calls[2].endswith("includes/scalar.yaml")
def test_construct_secret_simple(fixture_path: Path) -> None:
"""Test loading a YAML file with !secret tags."""
yaml_file = fixture_path / "yaml_util" / "test_secret.yaml"
actual = yaml_util.load_yaml(yaml_file)
# Check that secrets were properly loaded
assert actual["wifi"]["password"] == "super_secret_wifi"
assert actual["api"]["encryption"]["key"] == "0123456789abcdef"
assert actual["sensor"][0]["id"] == "my_secret_value"
def test_construct_secret_missing(fixture_path: Path, tmp_path: Path) -> None:
"""Test that missing secrets raise proper errors."""
# Create a YAML file with a secret that doesn't exist
test_yaml = tmp_path / "test.yaml"
test_yaml.write_text("""
esphome:
name: test
wifi:
password: !secret nonexistent_secret
""")
# Create an empty secrets file
secrets_yaml = tmp_path / "secrets.yaml"
secrets_yaml.write_text("some_other_secret: value")
with pytest.raises(EsphomeError, match="Secret 'nonexistent_secret' not defined"):
yaml_util.load_yaml(str(test_yaml))
def test_construct_secret_no_secrets_file(tmp_path: Path) -> None:
"""Test that missing secrets.yaml file raises proper error."""
# Create a YAML file with a secret but no secrets.yaml
test_yaml = tmp_path / "test.yaml"
test_yaml.write_text("""
wifi:
password: !secret some_secret
""")
# Mock CORE.config_path to avoid NoneType error
with (
patch.object(core.CORE, "config_path", str(tmp_path / "main.yaml")),
pytest.raises(EsphomeError, match="secrets.yaml"),
):
yaml_util.load_yaml(str(test_yaml))
def test_construct_secret_fallback_to_main_config_dir(
fixture_path: Path, tmp_path: Path
) -> None:
"""Test fallback to main config directory for secrets."""
# Create a subdirectory with a YAML file that uses secrets
subdir = tmp_path / "subdir"
subdir.mkdir()
test_yaml = subdir / "test.yaml"
test_yaml.write_text("""
wifi:
password: !secret test_secret
""")
# Create secrets.yaml in the main directory
main_secrets = tmp_path / "secrets.yaml"
main_secrets.write_text("test_secret: main_secret_value")
# Mock CORE.config_path to point to main directory
with patch.object(core.CORE, "config_path", str(tmp_path / "main.yaml")):
actual = yaml_util.load_yaml(str(test_yaml))
assert actual["wifi"]["password"] == "main_secret_value"
def test_construct_include_dir_named(fixture_path: Path, tmp_path: Path) -> None:
"""Test !include_dir_named directive."""
# Copy fixture directory to temporary location
src_dir = fixture_path / "yaml_util"
dst_dir = tmp_path / "yaml_util"
shutil.copytree(src_dir, dst_dir)
# Create test YAML that uses include_dir_named
test_yaml = dst_dir / "test_include_named.yaml"
test_yaml.write_text("""
sensor: !include_dir_named named_dir
""")
actual = yaml_util.load_yaml(str(test_yaml))
actual_sensor = actual["sensor"]
# Check that files were loaded with their names as keys
assert isinstance(actual_sensor, OrderedDict)
assert "sensor1" in actual_sensor
assert "sensor2" in actual_sensor
assert "sensor3" in actual_sensor # Files from subdirs are included with basename
# Check content of loaded files
assert actual_sensor["sensor1"]["platform"] == "template"
assert actual_sensor["sensor1"]["name"] == "Sensor 1"
assert actual_sensor["sensor2"]["platform"] == "template"
assert actual_sensor["sensor2"]["name"] == "Sensor 2"
# Check that subdirectory files are included with their basename
assert actual_sensor["sensor3"]["platform"] == "template"
assert actual_sensor["sensor3"]["name"] == "Sensor 3 in subdir"
# Check that hidden files and non-YAML files are not included
assert ".hidden" not in actual_sensor
assert "not_yaml" not in actual_sensor
def test_construct_include_dir_named_empty_dir(tmp_path: Path) -> None:
"""Test !include_dir_named with empty directory."""
# Create empty directory
empty_dir = tmp_path / "empty_dir"
empty_dir.mkdir()
test_yaml = tmp_path / "test.yaml"
test_yaml.write_text("""
sensor: !include_dir_named empty_dir
""")
actual = yaml_util.load_yaml(str(test_yaml))
# Should return empty OrderedDict
assert isinstance(actual["sensor"], OrderedDict)
assert len(actual["sensor"]) == 0
def test_construct_include_dir_named_with_dots(tmp_path: Path) -> None:
"""Test that include_dir_named ignores files starting with dots."""
# Create directory with various files
test_dir = tmp_path / "test_dir"
test_dir.mkdir()
# Create visible file
visible_file = test_dir / "visible.yaml"
visible_file.write_text("key: visible_value")
# Create hidden file
hidden_file = test_dir / ".hidden.yaml"
hidden_file.write_text("key: hidden_value")
# Create hidden directory with files
hidden_dir = test_dir / ".hidden_dir"
hidden_dir.mkdir()
hidden_subfile = hidden_dir / "subfile.yaml"
hidden_subfile.write_text("key: hidden_subfile_value")
test_yaml = tmp_path / "test.yaml"
test_yaml.write_text("""
test: !include_dir_named test_dir
""")
actual = yaml_util.load_yaml(str(test_yaml))
# Should only include visible file
assert "visible" in actual["test"]
assert actual["test"]["visible"]["key"] == "visible_value"
# Should not include hidden files or directories
assert ".hidden" not in actual["test"]
assert ".hidden_dir" not in actual["test"]
def test_find_files_recursive(fixture_path: Path, tmp_path: Path) -> None:
"""Test that _find_files works recursively through include_dir_named."""
# Copy fixture directory to temporary location
src_dir = fixture_path / "yaml_util"
dst_dir = tmp_path / "yaml_util"
shutil.copytree(src_dir, dst_dir)
# This indirectly tests _find_files by using include_dir_named
test_yaml = dst_dir / "test_include_recursive.yaml"
test_yaml.write_text("""
all_sensors: !include_dir_named named_dir
""")
actual = yaml_util.load_yaml(str(test_yaml))
# Should find sensor1.yaml, sensor2.yaml, and subdir/sensor3.yaml (all flattened)
assert len(actual["all_sensors"]) == 3
assert "sensor1" in actual["all_sensors"]
assert "sensor2" in actual["all_sensors"]
assert "sensor3" in actual["all_sensors"]
def test_secret_values_tracking(fixture_path: Path) -> None:
"""Test that secret values are properly tracked for dumping."""
yaml_file = fixture_path / "yaml_util" / "test_secret.yaml"
yaml_util.load_yaml(yaml_file)
# Check that secret values are tracked
assert "super_secret_wifi" in yaml_util._SECRET_VALUES
assert yaml_util._SECRET_VALUES["super_secret_wifi"] == "wifi_password"
assert "0123456789abcdef" in yaml_util._SECRET_VALUES
assert yaml_util._SECRET_VALUES["0123456789abcdef"] == "api_key"