mirror of
https://github.com/esphome/esphome.git
synced 2026-02-08 16:51:52 +00:00
Merge branch 'buildinfo' into integration
This commit is contained in:
@@ -518,10 +518,49 @@ def compile_program(args: ArgsProtocol, config: ConfigType) -> int:
|
||||
rc = platformio_api.run_compile(config, CORE.verbose)
|
||||
if rc != 0:
|
||||
return rc
|
||||
|
||||
# Check if firmware was rebuilt and emit build_info + create manifest
|
||||
_check_and_emit_build_info()
|
||||
|
||||
idedata = platformio_api.get_idedata(config)
|
||||
return 0 if idedata is not None else 1
|
||||
|
||||
|
||||
def _check_and_emit_build_info() -> None:
|
||||
"""Check if firmware was rebuilt and emit build_info."""
|
||||
import json
|
||||
|
||||
firmware_path = CORE.firmware_bin
|
||||
build_info_json_path = CORE.relative_build_path("build_info.json")
|
||||
|
||||
# Check if both files exist
|
||||
if not firmware_path.exists() or not build_info_json_path.exists():
|
||||
return
|
||||
|
||||
# Check if firmware is newer than build_info (indicating a relink occurred)
|
||||
if firmware_path.stat().st_mtime <= build_info_json_path.stat().st_mtime:
|
||||
return
|
||||
|
||||
# Read build_info from JSON
|
||||
try:
|
||||
with open(build_info_json_path, encoding="utf-8") as f:
|
||||
build_info = json.load(f)
|
||||
except (OSError, json.JSONDecodeError) as e:
|
||||
_LOGGER.debug("Failed to read build_info: %s", e)
|
||||
return
|
||||
|
||||
config_hash = build_info.get("config_hash")
|
||||
build_time_str = build_info.get("build_time_str")
|
||||
|
||||
if config_hash is None or build_time_str is None:
|
||||
return
|
||||
|
||||
# Emit build_info with human-readable time
|
||||
_LOGGER.info(
|
||||
"Build Info: config_hash=0x%08x build_time_str=%s", config_hash, build_time_str
|
||||
)
|
||||
|
||||
|
||||
def upload_using_esptool(
|
||||
config: ConfigType, port: str, file: str, speed: int
|
||||
) -> str | int:
|
||||
|
||||
@@ -227,7 +227,7 @@ CONFIG_SCHEMA = cv.All(
|
||||
{
|
||||
cv.GenerateID(): cv.declare_id(ADE7880),
|
||||
cv.Optional(CONF_FREQUENCY, default="50Hz"): cv.All(
|
||||
cv.frequency, cv.Range(min=45.0, max=66.0)
|
||||
cv.frequency, cv.float_range(min=45.0, max=66.0)
|
||||
),
|
||||
cv.Optional(CONF_IRQ0_PIN): pins.internal_gpio_input_pin_schema,
|
||||
cv.Required(CONF_IRQ1_PIN): pins.internal_gpio_input_pin_schema,
|
||||
|
||||
@@ -1472,7 +1472,10 @@ bool APIConnection::send_device_info_response(const DeviceInfoRequest &msg) {
|
||||
|
||||
resp.set_esphome_version(ESPHOME_VERSION_REF);
|
||||
|
||||
resp.set_compilation_time(App.get_compilation_time_ref());
|
||||
// Stack buffer for build time string
|
||||
char build_time_str[Application::BUILD_TIME_STR_SIZE];
|
||||
App.get_build_time_string(build_time_str);
|
||||
resp.set_compilation_time(StringRef(build_time_str));
|
||||
|
||||
// Manufacturer string - define once, handle ESP8266 PROGMEM separately
|
||||
#if defined(USE_ESP8266) || defined(USE_ESP32)
|
||||
|
||||
@@ -11,6 +11,7 @@ CODEOWNERS = ["@neffs", "@kbx81"]
|
||||
|
||||
AUTO_LOAD = ["bme68x_bsec2"]
|
||||
DEPENDENCIES = ["i2c"]
|
||||
MULTI_CONF = True
|
||||
|
||||
bme68x_bsec2_i2c_ns = cg.esphome_ns.namespace("bme68x_bsec2_i2c")
|
||||
BME68xBSEC2I2CComponent = bme68x_bsec2_i2c_ns.class_(
|
||||
|
||||
@@ -165,7 +165,7 @@ CONFIG_MAP = {
|
||||
CONF_OUTPUT_POWER: cv.float_range(min=-30.0, max=11.0),
|
||||
CONF_RX_ATTENUATION: cv.enum(RX_ATTENUATION, upper=False),
|
||||
CONF_DC_BLOCKING_FILTER: cv.boolean,
|
||||
CONF_FREQUENCY: cv.All(cv.frequency, cv.float_range(min=300000000, max=928000000)),
|
||||
CONF_FREQUENCY: cv.All(cv.frequency, cv.float_range(min=300.0e6, max=928.0e6)),
|
||||
CONF_IF_FREQUENCY: cv.All(cv.frequency, cv.float_range(min=25000, max=788000)),
|
||||
CONF_FILTER_BANDWIDTH: cv.All(cv.frequency, cv.float_range(min=58000, max=812000)),
|
||||
CONF_CHANNEL: cv.uint8_t,
|
||||
|
||||
@@ -99,11 +99,11 @@ CC1101Component::CC1101Component() {
|
||||
this->state_.FS_AUTOCAL = 1;
|
||||
|
||||
// Default Settings
|
||||
this->set_frequency(433920);
|
||||
this->set_if_frequency(153);
|
||||
this->set_filter_bandwidth(203);
|
||||
this->set_frequency(433920000);
|
||||
this->set_if_frequency(153000);
|
||||
this->set_filter_bandwidth(203000);
|
||||
this->set_channel(0);
|
||||
this->set_channel_spacing(200);
|
||||
this->set_channel_spacing(200000);
|
||||
this->set_symbol_rate(5000);
|
||||
this->set_sync_mode(SyncMode::SYNC_MODE_NONE);
|
||||
this->set_carrier_sense_above_threshold(true);
|
||||
|
||||
@@ -117,9 +117,7 @@ CONF_MIN_HUMIDITY = "min_humidity"
|
||||
CONF_MAX_HUMIDITY = "max_humidity"
|
||||
CONF_TARGET_HUMIDITY = "target_humidity"
|
||||
|
||||
visual_temperature = cv.float_with_unit(
|
||||
"visual_temperature", "(°C|° C|°|C|°K|° K|K|°F|° F|F)?"
|
||||
)
|
||||
visual_temperature = cv.float_with_unit("visual_temperature", "(°|(° ?)?[CKF])?")
|
||||
|
||||
|
||||
VISUAL_TEMPERATURE_STEP_SCHEMA = cv.Schema(
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
from esphome import automation, pins
|
||||
from esphome import automation, core, pins
|
||||
import esphome.codegen as cg
|
||||
from esphome.components import esp32, time
|
||||
from esphome.components.esp32 import (
|
||||
@@ -23,16 +23,20 @@ from esphome.const import (
|
||||
CONF_MINUTE,
|
||||
CONF_MODE,
|
||||
CONF_NUMBER,
|
||||
CONF_PIN,
|
||||
CONF_PINS,
|
||||
CONF_RUN_DURATION,
|
||||
CONF_SECOND,
|
||||
CONF_SLEEP_DURATION,
|
||||
CONF_TIME_ID,
|
||||
CONF_WAKEUP_PIN,
|
||||
PLATFORM_BK72XX,
|
||||
PLATFORM_ESP32,
|
||||
PLATFORM_ESP8266,
|
||||
PlatformFramework,
|
||||
)
|
||||
from esphome.core import CORE
|
||||
from esphome.types import ConfigType
|
||||
|
||||
WAKEUP_PINS = {
|
||||
VARIANT_ESP32: [
|
||||
@@ -113,7 +117,7 @@ WAKEUP_PINS = {
|
||||
}
|
||||
|
||||
|
||||
def validate_pin_number(value):
|
||||
def validate_pin_number_esp32(value: ConfigType) -> ConfigType:
|
||||
valid_pins = WAKEUP_PINS.get(get_esp32_variant(), WAKEUP_PINS[VARIANT_ESP32])
|
||||
if value[CONF_NUMBER] not in valid_pins:
|
||||
raise cv.Invalid(
|
||||
@@ -122,6 +126,51 @@ def validate_pin_number(value):
|
||||
return value
|
||||
|
||||
|
||||
def validate_pin_number(value: ConfigType) -> ConfigType:
|
||||
if not CORE.is_esp32:
|
||||
return value
|
||||
return validate_pin_number_esp32(value)
|
||||
|
||||
|
||||
def validate_wakeup_pin(
|
||||
value: ConfigType | list[ConfigType],
|
||||
) -> list[ConfigType]:
|
||||
if not isinstance(value, list):
|
||||
processed_pins: list[ConfigType] = [{CONF_PIN: value}]
|
||||
else:
|
||||
processed_pins = list(value)
|
||||
|
||||
for i, pin_config in enumerate(processed_pins):
|
||||
# now validate each item
|
||||
validated_pin = WAKEUP_PIN_SCHEMA(pin_config)
|
||||
validate_pin_number(validated_pin[CONF_PIN])
|
||||
processed_pins[i] = validated_pin
|
||||
|
||||
return processed_pins
|
||||
|
||||
|
||||
def validate_config(config: ConfigType) -> ConfigType:
|
||||
# right now only BK72XX supports the list format for wakeup pins
|
||||
if CORE.is_bk72xx:
|
||||
if CONF_WAKEUP_PIN_MODE in config:
|
||||
wakeup_pins = config.get(CONF_WAKEUP_PIN, [])
|
||||
if len(wakeup_pins) > 1:
|
||||
raise cv.Invalid(
|
||||
"You need to remove the global wakeup_pin_mode and define it per pin"
|
||||
)
|
||||
if wakeup_pins:
|
||||
wakeup_pins[0][CONF_WAKEUP_PIN_MODE] = config.pop(CONF_WAKEUP_PIN_MODE)
|
||||
elif (
|
||||
isinstance(config.get(CONF_WAKEUP_PIN), list)
|
||||
and len(config[CONF_WAKEUP_PIN]) > 1
|
||||
):
|
||||
raise cv.Invalid(
|
||||
"Your platform does not support providing multiple entries in wakeup_pin"
|
||||
)
|
||||
|
||||
return config
|
||||
|
||||
|
||||
def _validate_ex1_wakeup_mode(value):
|
||||
if value == "ALL_LOW":
|
||||
esp32.only_on_variant(supported=[VARIANT_ESP32], msg_prefix="ALL_LOW")(value)
|
||||
@@ -141,6 +190,15 @@ def _validate_ex1_wakeup_mode(value):
|
||||
return value
|
||||
|
||||
|
||||
def _validate_sleep_duration(value: core.TimePeriod) -> core.TimePeriod:
|
||||
if not CORE.is_bk72xx:
|
||||
return value
|
||||
max_duration = core.TimePeriod(hours=36)
|
||||
if value > max_duration:
|
||||
raise cv.Invalid("sleep duration cannot be more than 36 hours on BK72XX")
|
||||
return value
|
||||
|
||||
|
||||
deep_sleep_ns = cg.esphome_ns.namespace("deep_sleep")
|
||||
DeepSleepComponent = deep_sleep_ns.class_("DeepSleepComponent", cg.Component)
|
||||
EnterDeepSleepAction = deep_sleep_ns.class_("EnterDeepSleepAction", automation.Action)
|
||||
@@ -186,6 +244,13 @@ WAKEUP_CAUSES_SCHEMA = cv.Schema(
|
||||
}
|
||||
)
|
||||
|
||||
WAKEUP_PIN_SCHEMA = cv.Schema(
|
||||
{
|
||||
cv.Required(CONF_PIN): pins.internal_gpio_input_pin_schema,
|
||||
cv.Optional(CONF_WAKEUP_PIN_MODE): cv.enum(WAKEUP_PIN_MODES, upper=True),
|
||||
}
|
||||
)
|
||||
|
||||
CONFIG_SCHEMA = cv.All(
|
||||
cv.Schema(
|
||||
{
|
||||
@@ -194,14 +259,15 @@ CONFIG_SCHEMA = cv.All(
|
||||
cv.All(cv.only_on_esp32, WAKEUP_CAUSES_SCHEMA),
|
||||
cv.positive_time_period_milliseconds,
|
||||
),
|
||||
cv.Optional(CONF_SLEEP_DURATION): cv.positive_time_period_milliseconds,
|
||||
cv.Optional(CONF_WAKEUP_PIN): cv.All(
|
||||
cv.only_on_esp32,
|
||||
pins.internal_gpio_input_pin_schema,
|
||||
validate_pin_number,
|
||||
cv.Optional(CONF_SLEEP_DURATION): cv.All(
|
||||
cv.positive_time_period_milliseconds,
|
||||
_validate_sleep_duration,
|
||||
),
|
||||
cv.Optional(CONF_WAKEUP_PIN): validate_wakeup_pin,
|
||||
cv.Optional(CONF_WAKEUP_PIN_MODE): cv.All(
|
||||
cv.only_on_esp32, cv.enum(WAKEUP_PIN_MODES), upper=True
|
||||
cv.only_on([PLATFORM_ESP32, PLATFORM_BK72XX]),
|
||||
cv.enum(WAKEUP_PIN_MODES),
|
||||
upper=True,
|
||||
),
|
||||
cv.Optional(CONF_ESP32_EXT1_WAKEUP): cv.All(
|
||||
cv.only_on_esp32,
|
||||
@@ -212,7 +278,8 @@ CONFIG_SCHEMA = cv.All(
|
||||
cv.Schema(
|
||||
{
|
||||
cv.Required(CONF_PINS): cv.ensure_list(
|
||||
pins.internal_gpio_input_pin_schema, validate_pin_number
|
||||
pins.internal_gpio_input_pin_schema,
|
||||
validate_pin_number_esp32,
|
||||
),
|
||||
cv.Required(CONF_MODE): cv.All(
|
||||
cv.enum(EXT1_WAKEUP_MODES, upper=True),
|
||||
@@ -238,7 +305,8 @@ CONFIG_SCHEMA = cv.All(
|
||||
),
|
||||
}
|
||||
).extend(cv.COMPONENT_SCHEMA),
|
||||
cv.only_on([PLATFORM_ESP32, PLATFORM_ESP8266]),
|
||||
cv.only_on([PLATFORM_ESP32, PLATFORM_ESP8266, PLATFORM_BK72XX]),
|
||||
validate_config,
|
||||
)
|
||||
|
||||
|
||||
@@ -249,8 +317,21 @@ async def to_code(config):
|
||||
if CONF_SLEEP_DURATION in config:
|
||||
cg.add(var.set_sleep_duration(config[CONF_SLEEP_DURATION]))
|
||||
if CONF_WAKEUP_PIN in config:
|
||||
pin = await cg.gpio_pin_expression(config[CONF_WAKEUP_PIN])
|
||||
cg.add(var.set_wakeup_pin(pin))
|
||||
pins_as_list = config.get(CONF_WAKEUP_PIN, [])
|
||||
if CORE.is_bk72xx:
|
||||
cg.add(var.init_wakeup_pins_(len(pins_as_list)))
|
||||
for item in pins_as_list:
|
||||
cg.add(
|
||||
var.add_wakeup_pin(
|
||||
await cg.gpio_pin_expression(item[CONF_PIN]),
|
||||
item.get(
|
||||
CONF_WAKEUP_PIN_MODE, WakeupPinMode.WAKEUP_PIN_MODE_IGNORE
|
||||
),
|
||||
)
|
||||
)
|
||||
else:
|
||||
pin = await cg.gpio_pin_expression(pins_as_list[0][CONF_PIN])
|
||||
cg.add(var.set_wakeup_pin(pin))
|
||||
if CONF_WAKEUP_PIN_MODE in config:
|
||||
cg.add(var.set_wakeup_pin_mode(config[CONF_WAKEUP_PIN_MODE]))
|
||||
if CONF_RUN_DURATION in config:
|
||||
@@ -305,7 +386,10 @@ DEEP_SLEEP_ENTER_SCHEMA = cv.All(
|
||||
cv.Schema(
|
||||
{
|
||||
cv.Exclusive(CONF_SLEEP_DURATION, "time"): cv.templatable(
|
||||
cv.positive_time_period_milliseconds
|
||||
cv.All(
|
||||
cv.positive_time_period_milliseconds,
|
||||
_validate_sleep_duration,
|
||||
)
|
||||
),
|
||||
# Only on ESP32 due to how long the RTC on ESP8266 can stay asleep
|
||||
cv.Exclusive(CONF_UNTIL, "time"): cv.All(
|
||||
@@ -363,5 +447,6 @@ FILTER_SOURCE_FILES = filter_source_files_from_platform(
|
||||
PlatformFramework.ESP32_IDF,
|
||||
},
|
||||
"deep_sleep_esp8266.cpp": {PlatformFramework.ESP8266_ARDUINO},
|
||||
"deep_sleep_bk72xx.cpp": {PlatformFramework.BK72XX_ARDUINO},
|
||||
}
|
||||
)
|
||||
|
||||
64
esphome/components/deep_sleep/deep_sleep_bk72xx.cpp
Normal file
64
esphome/components/deep_sleep/deep_sleep_bk72xx.cpp
Normal file
@@ -0,0 +1,64 @@
|
||||
#ifdef USE_BK72XX
|
||||
|
||||
#include "deep_sleep_component.h"
|
||||
#include "esphome/core/log.h"
|
||||
|
||||
namespace esphome::deep_sleep {
|
||||
|
||||
static const char *const TAG = "deep_sleep.bk72xx";
|
||||
|
||||
optional<uint32_t> DeepSleepComponent::get_run_duration_() const { return this->run_duration_; }
|
||||
|
||||
void DeepSleepComponent::dump_config_platform_() {
|
||||
for (const WakeUpPinItem &item : this->wakeup_pins_) {
|
||||
LOG_PIN(" Wakeup Pin: ", item.wakeup_pin);
|
||||
}
|
||||
}
|
||||
|
||||
bool DeepSleepComponent::pin_prevents_sleep_(WakeUpPinItem &pinItem) const {
|
||||
return (pinItem.wakeup_pin_mode == WAKEUP_PIN_MODE_KEEP_AWAKE && pinItem.wakeup_pin != nullptr &&
|
||||
!this->sleep_duration_.has_value() && (pinItem.wakeup_level == get_real_pin_state_(*pinItem.wakeup_pin)));
|
||||
}
|
||||
|
||||
bool DeepSleepComponent::prepare_to_sleep_() {
|
||||
if (wakeup_pins_.size() > 0) {
|
||||
for (WakeUpPinItem &item : this->wakeup_pins_) {
|
||||
if (pin_prevents_sleep_(item)) {
|
||||
// Defer deep sleep until inactive
|
||||
if (!this->next_enter_deep_sleep_) {
|
||||
this->status_set_warning();
|
||||
ESP_LOGV(TAG, "Waiting for pin to switch state to enter deep sleep...");
|
||||
}
|
||||
this->next_enter_deep_sleep_ = true;
|
||||
return false;
|
||||
}
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
void DeepSleepComponent::deep_sleep_() {
|
||||
for (WakeUpPinItem &item : this->wakeup_pins_) {
|
||||
if (item.wakeup_pin_mode == WAKEUP_PIN_MODE_INVERT_WAKEUP) {
|
||||
if (item.wakeup_level == get_real_pin_state_(*item.wakeup_pin)) {
|
||||
item.wakeup_level = !item.wakeup_level;
|
||||
}
|
||||
}
|
||||
ESP_LOGI(TAG, "Wake-up on P%u %s (%d)", item.wakeup_pin->get_pin(), item.wakeup_level ? "HIGH" : "LOW",
|
||||
static_cast<int32_t>(item.wakeup_pin_mode));
|
||||
}
|
||||
|
||||
if (this->sleep_duration_.has_value())
|
||||
lt_deep_sleep_config_timer((*this->sleep_duration_ / 1000) & 0xFFFFFFFF);
|
||||
|
||||
for (WakeUpPinItem &item : this->wakeup_pins_) {
|
||||
lt_deep_sleep_config_gpio(1 << item.wakeup_pin->get_pin(), item.wakeup_level);
|
||||
lt_deep_sleep_keep_floating_gpio(1 << item.wakeup_pin->get_pin(), true);
|
||||
}
|
||||
|
||||
lt_deep_sleep_enter();
|
||||
}
|
||||
|
||||
} // namespace esphome::deep_sleep
|
||||
|
||||
#endif // USE_BK72XX
|
||||
@@ -19,7 +19,7 @@
|
||||
namespace esphome {
|
||||
namespace deep_sleep {
|
||||
|
||||
#ifdef USE_ESP32
|
||||
#if defined(USE_ESP32) || defined(USE_BK72XX)
|
||||
|
||||
/** The values of this enum define what should be done if deep sleep is set up with a wakeup pin on the ESP32
|
||||
* and the scenario occurs that the wakeup pin is already in the wakeup state.
|
||||
@@ -33,7 +33,17 @@ enum WakeupPinMode {
|
||||
*/
|
||||
WAKEUP_PIN_MODE_INVERT_WAKEUP,
|
||||
};
|
||||
#endif
|
||||
|
||||
#if defined(USE_BK72XX)
|
||||
struct WakeUpPinItem {
|
||||
InternalGPIOPin *wakeup_pin;
|
||||
WakeupPinMode wakeup_pin_mode;
|
||||
bool wakeup_level;
|
||||
};
|
||||
#endif // USE_BK72XX
|
||||
|
||||
#ifdef USE_ESP32
|
||||
#if defined(USE_ESP32) && !defined(USE_ESP32_VARIANT_ESP32C2) && !defined(USE_ESP32_VARIANT_ESP32C3)
|
||||
struct Ext1Wakeup {
|
||||
uint64_t mask;
|
||||
@@ -75,6 +85,13 @@ class DeepSleepComponent : public Component {
|
||||
void set_wakeup_pin_mode(WakeupPinMode wakeup_pin_mode);
|
||||
#endif // USE_ESP32
|
||||
|
||||
#if defined(USE_BK72XX)
|
||||
void init_wakeup_pins_(size_t capacity) { this->wakeup_pins_.init(capacity); }
|
||||
void add_wakeup_pin(InternalGPIOPin *wakeup_pin, WakeupPinMode wakeup_pin_mode) {
|
||||
this->wakeup_pins_.emplace_back(WakeUpPinItem{wakeup_pin, wakeup_pin_mode, !wakeup_pin->is_inverted()});
|
||||
}
|
||||
#endif // USE_BK72XX
|
||||
|
||||
#if defined(USE_ESP32)
|
||||
#if !defined(USE_ESP32_VARIANT_ESP32C2) && !defined(USE_ESP32_VARIANT_ESP32C3)
|
||||
void set_ext1_wakeup(Ext1Wakeup ext1_wakeup);
|
||||
@@ -114,7 +131,17 @@ class DeepSleepComponent : public Component {
|
||||
bool prepare_to_sleep_();
|
||||
void deep_sleep_();
|
||||
|
||||
#ifdef USE_BK72XX
|
||||
bool pin_prevents_sleep_(WakeUpPinItem &pinItem) const;
|
||||
bool get_real_pin_state_(InternalGPIOPin &pin) const { return (pin.digital_read() ^ pin.is_inverted()); }
|
||||
#endif // USE_BK72XX
|
||||
|
||||
optional<uint64_t> sleep_duration_;
|
||||
|
||||
#ifdef USE_BK72XX
|
||||
FixedVector<WakeUpPinItem> wakeup_pins_;
|
||||
#endif // USE_BK72XX
|
||||
|
||||
#ifdef USE_ESP32
|
||||
InternalGPIOPin *wakeup_pin_;
|
||||
WakeupPinMode wakeup_pin_mode_{WAKEUP_PIN_MODE_IGNORE};
|
||||
@@ -124,8 +151,10 @@ class DeepSleepComponent : public Component {
|
||||
#endif
|
||||
|
||||
optional<bool> touch_wakeup_;
|
||||
|
||||
optional<WakeupCauseToRunDuration> wakeup_cause_to_run_duration_;
|
||||
#endif // USE_ESP32
|
||||
|
||||
optional<uint32_t> run_duration_;
|
||||
bool next_enter_deep_sleep_{false};
|
||||
bool prevent_{false};
|
||||
|
||||
@@ -13,6 +13,7 @@ from esphome.const import (
|
||||
CONF_ADVANCED,
|
||||
CONF_BOARD,
|
||||
CONF_COMPONENTS,
|
||||
CONF_DISABLED,
|
||||
CONF_ESPHOME,
|
||||
CONF_FRAMEWORK,
|
||||
CONF_IGNORE_EFUSE_CUSTOM_MAC,
|
||||
@@ -24,6 +25,7 @@ from esphome.const import (
|
||||
CONF_PLATFORMIO_OPTIONS,
|
||||
CONF_REF,
|
||||
CONF_REFRESH,
|
||||
CONF_SAFE_MODE,
|
||||
CONF_SOURCE,
|
||||
CONF_TYPE,
|
||||
CONF_VARIANT,
|
||||
@@ -81,6 +83,7 @@ CONF_ASSERTION_LEVEL = "assertion_level"
|
||||
CONF_COMPILER_OPTIMIZATION = "compiler_optimization"
|
||||
CONF_ENABLE_IDF_EXPERIMENTAL_FEATURES = "enable_idf_experimental_features"
|
||||
CONF_ENABLE_LWIP_ASSERT = "enable_lwip_assert"
|
||||
CONF_ENABLE_OTA_ROLLBACK = "enable_ota_rollback"
|
||||
CONF_EXECUTE_FROM_PSRAM = "execute_from_psram"
|
||||
CONF_RELEASE = "release"
|
||||
|
||||
@@ -118,8 +121,8 @@ ARDUINO_ALLOWED_VARIANTS = [
|
||||
]
|
||||
|
||||
|
||||
def get_cpu_frequencies(*frequencies):
|
||||
return [str(x) + "MHZ" for x in frequencies]
|
||||
def get_cpu_frequencies(*frequencies: int) -> list[str]:
|
||||
return [f"{frequency}MHZ" for frequency in frequencies]
|
||||
|
||||
|
||||
CPU_FREQUENCIES = {
|
||||
@@ -136,7 +139,7 @@ CPU_FREQUENCIES = {
|
||||
}
|
||||
|
||||
# Make sure not missed here if a new variant added.
|
||||
assert all(v in CPU_FREQUENCIES for v in VARIANTS)
|
||||
assert all(variant in CPU_FREQUENCIES for variant in VARIANTS)
|
||||
|
||||
FULL_CPU_FREQUENCIES = set(itertools.chain.from_iterable(CPU_FREQUENCIES.values()))
|
||||
|
||||
@@ -250,10 +253,10 @@ def add_idf_sdkconfig_option(name: str, value: SdkconfigValueType):
|
||||
def add_idf_component(
|
||||
*,
|
||||
name: str,
|
||||
repo: str = None,
|
||||
ref: str = None,
|
||||
path: str = None,
|
||||
refresh: TimePeriod = None,
|
||||
repo: str | None = None,
|
||||
ref: str | None = None,
|
||||
path: str | None = None,
|
||||
refresh: TimePeriod | None = None,
|
||||
components: list[str] | None = None,
|
||||
submodules: list[str] | None = None,
|
||||
):
|
||||
@@ -334,7 +337,7 @@ def _format_framework_espidf_version(ver: cv.Version, release: str) -> str:
|
||||
return f"pioarduino/framework-espidf@https://github.com/pioarduino/esp-idf/releases/download/v{str(ver)}/esp-idf-v{str(ver)}.{ext}"
|
||||
|
||||
|
||||
def _is_framework_url(source: str) -> str:
|
||||
def _is_framework_url(source: str) -> bool:
|
||||
# platformio accepts many URL schemes for framework repositories and archives including http, https, git, file, and symlink
|
||||
import urllib.parse
|
||||
|
||||
@@ -571,6 +574,13 @@ def final_validate(config):
|
||||
path=[CONF_FLASH_SIZE],
|
||||
)
|
||||
)
|
||||
if advanced[CONF_ENABLE_OTA_ROLLBACK]:
|
||||
safe_mode_config = full_config.get(CONF_SAFE_MODE)
|
||||
if safe_mode_config is None or safe_mode_config.get(CONF_DISABLED, False):
|
||||
_LOGGER.warning(
|
||||
"OTA rollback requires safe_mode, disabling rollback support"
|
||||
)
|
||||
advanced[CONF_ENABLE_OTA_ROLLBACK] = False
|
||||
if errs:
|
||||
raise cv.MultipleInvalid(errs)
|
||||
|
||||
@@ -705,6 +715,7 @@ FRAMEWORK_SCHEMA = cv.Schema(
|
||||
cv.Optional(CONF_LOOP_TASK_STACK_SIZE, default=8192): cv.int_range(
|
||||
min=8192, max=32768
|
||||
),
|
||||
cv.Optional(CONF_ENABLE_OTA_ROLLBACK, default=True): cv.boolean,
|
||||
}
|
||||
),
|
||||
cv.Optional(CONF_COMPONENTS, default=[]): cv.ensure_list(
|
||||
@@ -985,15 +996,6 @@ async def to_code(config):
|
||||
cg.add_platformio_option("framework", "arduino, espidf")
|
||||
cg.add_build_flag("-DUSE_ARDUINO")
|
||||
cg.add_build_flag("-DUSE_ESP32_FRAMEWORK_ARDUINO")
|
||||
cg.add_platformio_option(
|
||||
"board_build.embed_txtfiles",
|
||||
[
|
||||
"managed_components/espressif__esp_insights/server_certs/https_server.crt",
|
||||
"managed_components/espressif__esp_rainmaker/server_certs/rmaker_mqtt_server.crt",
|
||||
"managed_components/espressif__esp_rainmaker/server_certs/rmaker_claim_service_server.crt",
|
||||
"managed_components/espressif__esp_rainmaker/server_certs/rmaker_ota_server.crt",
|
||||
],
|
||||
)
|
||||
cg.add_define(
|
||||
"USE_ARDUINO_VERSION_CODE",
|
||||
cg.RawExpression(
|
||||
@@ -1176,6 +1178,11 @@ async def to_code(config):
|
||||
"CONFIG_BOOTLOADER_CACHE_32BIT_ADDR_QUAD_FLASH", True
|
||||
)
|
||||
|
||||
# Enable OTA rollback support
|
||||
if advanced[CONF_ENABLE_OTA_ROLLBACK]:
|
||||
add_idf_sdkconfig_option("CONFIG_BOOTLOADER_APP_ROLLBACK_ENABLE", True)
|
||||
cg.add_define("USE_OTA_ROLLBACK")
|
||||
|
||||
cg.add_define("ESPHOME_LOOP_TASK_STACK_SIZE", advanced[CONF_LOOP_TASK_STACK_SIZE])
|
||||
|
||||
cg.add_define(
|
||||
@@ -1205,7 +1212,7 @@ APP_PARTITION_SIZES = {
|
||||
}
|
||||
|
||||
|
||||
def get_arduino_partition_csv(flash_size):
|
||||
def get_arduino_partition_csv(flash_size: str):
|
||||
app_partition_size = APP_PARTITION_SIZES[flash_size]
|
||||
eeprom_partition_size = 0x1000 # 4 KB
|
||||
spiffs_partition_size = 0xF000 # 60 KB
|
||||
@@ -1225,7 +1232,7 @@ spiffs, data, spiffs, 0x{spiffs_partition_start:X}, 0x{spiffs_partition_size:
|
||||
"""
|
||||
|
||||
|
||||
def get_idf_partition_csv(flash_size):
|
||||
def get_idf_partition_csv(flash_size: str):
|
||||
app_partition_size = APP_PARTITION_SIZES[flash_size]
|
||||
|
||||
return f"""\
|
||||
|
||||
@@ -38,15 +38,11 @@ void arch_init() {
|
||||
// Enable the task watchdog only on the loop task (from which we're currently running)
|
||||
esp_task_wdt_add(nullptr);
|
||||
|
||||
// If the bootloader was compiled with CONFIG_BOOTLOADER_APP_ROLLBACK_ENABLE the current
|
||||
// partition will get rolled back unless it is marked as valid.
|
||||
esp_ota_img_states_t state;
|
||||
const esp_partition_t *running = esp_ota_get_running_partition();
|
||||
if (esp_ota_get_state_partition(running, &state) == ESP_OK) {
|
||||
if (state == ESP_OTA_IMG_PENDING_VERIFY) {
|
||||
esp_ota_mark_app_valid_cancel_rollback();
|
||||
}
|
||||
}
|
||||
// Handle OTA rollback: mark partition valid immediately unless USE_OTA_ROLLBACK is enabled,
|
||||
// in which case safe_mode will mark it valid after confirming successful boot.
|
||||
#ifndef USE_OTA_ROLLBACK
|
||||
esp_ota_mark_app_valid_cancel_rollback();
|
||||
#endif
|
||||
}
|
||||
void IRAM_ATTR HOT arch_feed_wdt() { esp_task_wdt_reset(); }
|
||||
|
||||
|
||||
@@ -5,6 +5,7 @@ import json # noqa: E402
|
||||
import os # noqa: E402
|
||||
import pathlib # noqa: E402
|
||||
import shutil # noqa: E402
|
||||
from glob import glob # noqa: E402
|
||||
|
||||
|
||||
def merge_factory_bin(source, target, env):
|
||||
@@ -126,3 +127,14 @@ def esp32_copy_ota_bin(source, target, env):
|
||||
# Run merge first, then ota copy second
|
||||
env.AddPostAction("$BUILD_DIR/${PROGNAME}.bin", merge_factory_bin) # noqa: F821
|
||||
env.AddPostAction("$BUILD_DIR/${PROGNAME}.bin", esp32_copy_ota_bin) # noqa: F821
|
||||
|
||||
# Find server certificates in managed components and generate .S files.
|
||||
# Workaround for PlatformIO not processing target_add_binary_data() from managed component CMakeLists.
|
||||
project_dir = env.subst("$PROJECT_DIR")
|
||||
managed_components = os.path.join(project_dir, "managed_components")
|
||||
if os.path.isdir(managed_components):
|
||||
for cert_file in glob(os.path.join(managed_components, "**/server_certs/*.crt"), recursive=True):
|
||||
try:
|
||||
env.FileToAsm(cert_file, FILE_TYPE="TEXT")
|
||||
except Exception as e:
|
||||
print(f"Error processing {os.path.basename(cert_file)}: {e}")
|
||||
|
||||
@@ -3,7 +3,7 @@ import logging
|
||||
from esphome import automation, pins
|
||||
import esphome.codegen as cg
|
||||
from esphome.components import i2c
|
||||
from esphome.components.esp32 import add_idf_component
|
||||
from esphome.components.esp32 import add_idf_component, add_idf_sdkconfig_option
|
||||
from esphome.components.psram import DOMAIN as psram_domain
|
||||
import esphome.config_validation as cv
|
||||
from esphome.const import (
|
||||
@@ -186,7 +186,7 @@ CONFIG_SCHEMA = cv.All(
|
||||
{
|
||||
cv.Required(CONF_PIN): pins.internal_gpio_input_pin_number,
|
||||
cv.Optional(CONF_FREQUENCY, default="20MHz"): cv.All(
|
||||
cv.frequency, cv.Range(min=8e6, max=20e6)
|
||||
cv.frequency, cv.float_range(min=8e6, max=20e6)
|
||||
),
|
||||
}
|
||||
),
|
||||
@@ -352,6 +352,8 @@ async def to_code(config):
|
||||
cg.add_define("USE_CAMERA")
|
||||
|
||||
add_idf_component(name="espressif/esp32-camera", ref="2.1.1")
|
||||
add_idf_sdkconfig_option("CONFIG_SCCB_HARDWARE_I2C_DRIVER_NEW", True)
|
||||
add_idf_sdkconfig_option("CONFIG_SCCB_HARDWARE_I2C_DRIVER_LEGACY", False)
|
||||
|
||||
for conf in config.get(CONF_ON_STREAM_START, []):
|
||||
trigger = cg.new_Pvariable(conf[CONF_TRIGGER_ID], var)
|
||||
|
||||
@@ -16,7 +16,7 @@ def valid_pwm_pin(value):
|
||||
esp8266_pwm_ns = cg.esphome_ns.namespace("esp8266_pwm")
|
||||
ESP8266PWM = esp8266_pwm_ns.class_("ESP8266PWM", output.FloatOutput, cg.Component)
|
||||
SetFrequencyAction = esp8266_pwm_ns.class_("SetFrequencyAction", automation.Action)
|
||||
validate_frequency = cv.All(cv.frequency, cv.Range(min=1.0e-6))
|
||||
validate_frequency = cv.All(cv.frequency, cv.float_range(min=1.0e-6))
|
||||
|
||||
CONFIG_SCHEMA = cv.All(
|
||||
output.FLOAT_OUTPUT_SCHEMA.extend(
|
||||
|
||||
@@ -69,9 +69,6 @@ def validate_url(value):
|
||||
def validate_ssl_verification(config):
|
||||
error_message = ""
|
||||
|
||||
if CORE.is_esp32 and not CORE.using_esp_idf and config[CONF_VERIFY_SSL]:
|
||||
error_message = "ESPHome supports certificate verification only via ESP-IDF"
|
||||
|
||||
if CORE.is_rp2040 and config[CONF_VERIFY_SSL]:
|
||||
error_message = "ESPHome does not support certificate verification on RP2040"
|
||||
|
||||
@@ -93,9 +90,9 @@ def validate_ssl_verification(config):
|
||||
def _declare_request_class(value):
|
||||
if CORE.is_host:
|
||||
return cv.declare_id(HttpRequestHost)(value)
|
||||
if CORE.using_esp_idf:
|
||||
if CORE.is_esp32:
|
||||
return cv.declare_id(HttpRequestIDF)(value)
|
||||
if CORE.is_esp8266 or CORE.is_esp32 or CORE.is_rp2040:
|
||||
if CORE.is_esp8266 or CORE.is_rp2040:
|
||||
return cv.declare_id(HttpRequestArduino)(value)
|
||||
return NotImplementedError
|
||||
|
||||
@@ -121,11 +118,11 @@ CONFIG_SCHEMA = cv.All(
|
||||
cv.positive_not_null_time_period,
|
||||
cv.positive_time_period_milliseconds,
|
||||
),
|
||||
cv.SplitDefault(CONF_BUFFER_SIZE_RX, esp32_idf=512): cv.All(
|
||||
cv.uint16_t, cv.only_with_esp_idf
|
||||
cv.SplitDefault(CONF_BUFFER_SIZE_RX, esp32=512): cv.All(
|
||||
cv.uint16_t, cv.only_on_esp32
|
||||
),
|
||||
cv.SplitDefault(CONF_BUFFER_SIZE_TX, esp32_idf=512): cv.All(
|
||||
cv.uint16_t, cv.only_with_esp_idf
|
||||
cv.SplitDefault(CONF_BUFFER_SIZE_TX, esp32=512): cv.All(
|
||||
cv.uint16_t, cv.only_on_esp32
|
||||
),
|
||||
cv.Optional(CONF_CA_CERTIFICATE_PATH): cv.All(
|
||||
cv.file_,
|
||||
@@ -158,25 +155,20 @@ async def to_code(config):
|
||||
cg.add(var.set_watchdog_timeout(timeout_ms))
|
||||
|
||||
if CORE.is_esp32:
|
||||
if CORE.using_esp_idf:
|
||||
cg.add(var.set_buffer_size_rx(config[CONF_BUFFER_SIZE_RX]))
|
||||
cg.add(var.set_buffer_size_tx(config[CONF_BUFFER_SIZE_TX]))
|
||||
cg.add(var.set_buffer_size_rx(config[CONF_BUFFER_SIZE_RX]))
|
||||
cg.add(var.set_buffer_size_tx(config[CONF_BUFFER_SIZE_TX]))
|
||||
|
||||
esp32.add_idf_sdkconfig_option(
|
||||
"CONFIG_MBEDTLS_CERTIFICATE_BUNDLE",
|
||||
config.get(CONF_VERIFY_SSL),
|
||||
)
|
||||
esp32.add_idf_sdkconfig_option(
|
||||
"CONFIG_ESP_TLS_INSECURE",
|
||||
not config.get(CONF_VERIFY_SSL),
|
||||
)
|
||||
esp32.add_idf_sdkconfig_option(
|
||||
"CONFIG_ESP_TLS_SKIP_SERVER_CERT_VERIFY",
|
||||
not config.get(CONF_VERIFY_SSL),
|
||||
)
|
||||
else:
|
||||
cg.add_library("NetworkClientSecure", None)
|
||||
cg.add_library("HTTPClient", None)
|
||||
if config.get(CONF_VERIFY_SSL):
|
||||
esp32.add_idf_sdkconfig_option("CONFIG_MBEDTLS_CERTIFICATE_BUNDLE", True)
|
||||
|
||||
esp32.add_idf_sdkconfig_option(
|
||||
"CONFIG_ESP_TLS_INSECURE",
|
||||
not config.get(CONF_VERIFY_SSL),
|
||||
)
|
||||
esp32.add_idf_sdkconfig_option(
|
||||
"CONFIG_ESP_TLS_SKIP_SERVER_CERT_VERIFY",
|
||||
not config.get(CONF_VERIFY_SSL),
|
||||
)
|
||||
if CORE.is_esp8266:
|
||||
cg.add_library("ESP8266HTTPClient", None)
|
||||
if CORE.is_rp2040 and CORE.using_arduino:
|
||||
@@ -327,13 +319,15 @@ FILTER_SOURCE_FILES = filter_source_files_from_platform(
|
||||
{
|
||||
"http_request_host.cpp": {PlatformFramework.HOST_NATIVE},
|
||||
"http_request_arduino.cpp": {
|
||||
PlatformFramework.ESP32_ARDUINO,
|
||||
PlatformFramework.ESP8266_ARDUINO,
|
||||
PlatformFramework.RP2040_ARDUINO,
|
||||
PlatformFramework.BK72XX_ARDUINO,
|
||||
PlatformFramework.RTL87XX_ARDUINO,
|
||||
PlatformFramework.LN882X_ARDUINO,
|
||||
},
|
||||
"http_request_idf.cpp": {PlatformFramework.ESP32_IDF},
|
||||
"http_request_idf.cpp": {
|
||||
PlatformFramework.ESP32_ARDUINO,
|
||||
PlatformFramework.ESP32_IDF,
|
||||
},
|
||||
}
|
||||
)
|
||||
|
||||
@@ -4,8 +4,7 @@
|
||||
|
||||
#include <cinttypes>
|
||||
|
||||
namespace esphome {
|
||||
namespace http_request {
|
||||
namespace esphome::http_request {
|
||||
|
||||
static const char *const TAG = "http_request";
|
||||
|
||||
@@ -42,5 +41,4 @@ std::string HttpContainer::get_response_header(const std::string &header_name) {
|
||||
}
|
||||
}
|
||||
|
||||
} // namespace http_request
|
||||
} // namespace esphome
|
||||
} // namespace esphome::http_request
|
||||
|
||||
@@ -15,8 +15,7 @@
|
||||
#include "esphome/core/helpers.h"
|
||||
#include "esphome/core/log.h"
|
||||
|
||||
namespace esphome {
|
||||
namespace http_request {
|
||||
namespace esphome::http_request {
|
||||
|
||||
struct Header {
|
||||
std::string name;
|
||||
@@ -305,5 +304,4 @@ template<typename... Ts> class HttpRequestSendAction : public Action<Ts...> {
|
||||
size_t max_response_buffer_size_{SIZE_MAX};
|
||||
};
|
||||
|
||||
} // namespace http_request
|
||||
} // namespace esphome
|
||||
} // namespace esphome::http_request
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
#include "http_request_arduino.h"
|
||||
|
||||
#ifdef USE_ARDUINO
|
||||
#if defined(USE_ARDUINO) && !defined(USE_ESP32)
|
||||
|
||||
#include "esphome/components/network/util.h"
|
||||
#include "esphome/components/watchdog/watchdog.h"
|
||||
@@ -9,8 +9,7 @@
|
||||
#include "esphome/core/defines.h"
|
||||
#include "esphome/core/log.h"
|
||||
|
||||
namespace esphome {
|
||||
namespace http_request {
|
||||
namespace esphome::http_request {
|
||||
|
||||
static const char *const TAG = "http_request.arduino";
|
||||
|
||||
@@ -75,8 +74,6 @@ std::shared_ptr<HttpContainer> HttpRequestArduino::perform(const std::string &ur
|
||||
container->client_.setInsecure();
|
||||
}
|
||||
bool status = container->client_.begin(url.c_str());
|
||||
#elif defined(USE_ESP32)
|
||||
bool status = container->client_.begin(url.c_str());
|
||||
#endif
|
||||
|
||||
App.feed_wdt();
|
||||
@@ -90,9 +87,6 @@ std::shared_ptr<HttpContainer> HttpRequestArduino::perform(const std::string &ur
|
||||
|
||||
container->client_.setReuse(true);
|
||||
container->client_.setTimeout(this->timeout_);
|
||||
#if defined(USE_ESP32)
|
||||
container->client_.setConnectTimeout(this->timeout_);
|
||||
#endif
|
||||
|
||||
if (this->useragent_ != nullptr) {
|
||||
container->client_.setUserAgent(this->useragent_);
|
||||
@@ -177,7 +171,6 @@ void HttpContainerArduino::end() {
|
||||
this->client_.end();
|
||||
}
|
||||
|
||||
} // namespace http_request
|
||||
} // namespace esphome
|
||||
} // namespace esphome::http_request
|
||||
|
||||
#endif // USE_ARDUINO
|
||||
#endif // USE_ARDUINO && !USE_ESP32
|
||||
|
||||
@@ -2,9 +2,9 @@
|
||||
|
||||
#include "http_request.h"
|
||||
|
||||
#ifdef USE_ARDUINO
|
||||
#if defined(USE_ARDUINO) && !defined(USE_ESP32)
|
||||
|
||||
#if defined(USE_ESP32) || defined(USE_RP2040)
|
||||
#if defined(USE_RP2040)
|
||||
#include <HTTPClient.h>
|
||||
#include <WiFiClient.h>
|
||||
#endif
|
||||
@@ -15,8 +15,7 @@
|
||||
#endif
|
||||
#endif
|
||||
|
||||
namespace esphome {
|
||||
namespace http_request {
|
||||
namespace esphome::http_request {
|
||||
|
||||
class HttpRequestArduino;
|
||||
class HttpContainerArduino : public HttpContainer {
|
||||
@@ -36,7 +35,6 @@ class HttpRequestArduino : public HttpRequestComponent {
|
||||
const std::set<std::string> &collect_headers) override;
|
||||
};
|
||||
|
||||
} // namespace http_request
|
||||
} // namespace esphome
|
||||
} // namespace esphome::http_request
|
||||
|
||||
#endif // USE_ARDUINO
|
||||
#endif // USE_ARDUINO && !USE_ESP32
|
||||
|
||||
@@ -12,8 +12,7 @@
|
||||
#include "esphome/core/application.h"
|
||||
#include "esphome/core/log.h"
|
||||
|
||||
namespace esphome {
|
||||
namespace http_request {
|
||||
namespace esphome::http_request {
|
||||
|
||||
static const char *const TAG = "http_request.host";
|
||||
|
||||
@@ -139,7 +138,6 @@ void HttpContainerHost::end() {
|
||||
this->bytes_read_ = 0;
|
||||
}
|
||||
|
||||
} // namespace http_request
|
||||
} // namespace esphome
|
||||
} // namespace esphome::http_request
|
||||
|
||||
#endif // USE_HOST
|
||||
|
||||
@@ -2,8 +2,8 @@
|
||||
|
||||
#ifdef USE_HOST
|
||||
#include "http_request.h"
|
||||
namespace esphome {
|
||||
namespace http_request {
|
||||
|
||||
namespace esphome::http_request {
|
||||
|
||||
class HttpRequestHost;
|
||||
class HttpContainerHost : public HttpContainer {
|
||||
@@ -27,7 +27,6 @@ class HttpRequestHost : public HttpRequestComponent {
|
||||
const char *ca_path_{};
|
||||
};
|
||||
|
||||
} // namespace http_request
|
||||
} // namespace esphome
|
||||
} // namespace esphome::http_request
|
||||
|
||||
#endif // USE_HOST
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
#include "http_request_idf.h"
|
||||
|
||||
#ifdef USE_ESP_IDF
|
||||
#ifdef USE_ESP32
|
||||
|
||||
#include "esphome/components/network/util.h"
|
||||
#include "esphome/components/watchdog/watchdog.h"
|
||||
@@ -14,8 +14,7 @@
|
||||
|
||||
#include "esp_task_wdt.h"
|
||||
|
||||
namespace esphome {
|
||||
namespace http_request {
|
||||
namespace esphome::http_request {
|
||||
|
||||
static const char *const TAG = "http_request.idf";
|
||||
|
||||
@@ -245,7 +244,6 @@ void HttpContainerIDF::feed_wdt() {
|
||||
}
|
||||
}
|
||||
|
||||
} // namespace http_request
|
||||
} // namespace esphome
|
||||
} // namespace esphome::http_request
|
||||
|
||||
#endif // USE_ESP_IDF
|
||||
#endif // USE_ESP32
|
||||
|
||||
@@ -2,15 +2,14 @@
|
||||
|
||||
#include "http_request.h"
|
||||
|
||||
#ifdef USE_ESP_IDF
|
||||
#ifdef USE_ESP32
|
||||
|
||||
#include <esp_event.h>
|
||||
#include <esp_http_client.h>
|
||||
#include <esp_netif.h>
|
||||
#include <esp_tls.h>
|
||||
|
||||
namespace esphome {
|
||||
namespace http_request {
|
||||
namespace esphome::http_request {
|
||||
|
||||
class HttpContainerIDF : public HttpContainer {
|
||||
public:
|
||||
@@ -48,7 +47,6 @@ class HttpRequestIDF : public HttpRequestComponent {
|
||||
static esp_err_t http_event_handler(esp_http_client_event_t *evt);
|
||||
};
|
||||
|
||||
} // namespace http_request
|
||||
} // namespace esphome
|
||||
} // namespace esphome::http_request
|
||||
|
||||
#endif // USE_ESP_IDF
|
||||
#endif // USE_ESP32
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
from typing import Any
|
||||
|
||||
from esphome import pins
|
||||
from esphome import automation, pins
|
||||
import esphome.codegen as cg
|
||||
from esphome.components import display
|
||||
from esphome.components.esp32 import add_idf_component
|
||||
@@ -17,6 +17,8 @@ from esphome.const import (
|
||||
CONF_OE_PIN,
|
||||
CONF_UPDATE_INTERVAL,
|
||||
)
|
||||
from esphome.core import ID
|
||||
from esphome.cpp_generator import MockObj, TemplateArgsType
|
||||
import esphome.final_validate as fv
|
||||
from esphome.types import ConfigType
|
||||
|
||||
@@ -135,6 +137,7 @@ CLOCK_SPEEDS = {
|
||||
HUB75Display = hub75_ns.class_("HUB75Display", cg.PollingComponent, display.Display)
|
||||
Hub75Config = cg.global_ns.struct("Hub75Config")
|
||||
Hub75Pins = cg.global_ns.struct("Hub75Pins")
|
||||
SetBrightnessAction = hub75_ns.class_("SetBrightnessAction", automation.Action)
|
||||
|
||||
|
||||
def _merge_board_pins(config: ConfigType) -> ConfigType:
|
||||
@@ -576,3 +579,27 @@ async def to_code(config: ConfigType) -> None:
|
||||
config[CONF_LAMBDA], [(display.DisplayRef, "it")], return_type=cg.void
|
||||
)
|
||||
cg.add(var.set_writer(lambda_))
|
||||
|
||||
|
||||
@automation.register_action(
|
||||
"hub75.set_brightness",
|
||||
SetBrightnessAction,
|
||||
cv.maybe_simple_value(
|
||||
{
|
||||
cv.GenerateID(): cv.use_id(HUB75Display),
|
||||
cv.Required(CONF_BRIGHTNESS): cv.templatable(cv.int_range(min=0, max=255)),
|
||||
},
|
||||
key=CONF_BRIGHTNESS,
|
||||
),
|
||||
)
|
||||
async def hub75_set_brightness_to_code(
|
||||
config: ConfigType,
|
||||
action_id: ID,
|
||||
template_arg: cg.TemplateArguments,
|
||||
args: TemplateArgsType,
|
||||
) -> MockObj:
|
||||
var = cg.new_Pvariable(action_id, template_arg)
|
||||
await cg.register_parented(var, config[CONF_ID])
|
||||
template_ = await cg.templatable(config[CONF_BRIGHTNESS], args, cg.uint8)
|
||||
cg.add(var.set_brightness(template_))
|
||||
return var
|
||||
|
||||
@@ -179,7 +179,7 @@ void HOT HUB75Display::draw_pixels_at(int x_start, int y_start, int w, int h, co
|
||||
}
|
||||
}
|
||||
|
||||
void HUB75Display::set_brightness(int brightness) {
|
||||
void HUB75Display::set_brightness(uint8_t brightness) {
|
||||
this->brightness_ = brightness;
|
||||
this->enabled_ = (brightness > 0);
|
||||
if (this->driver_ != nullptr) {
|
||||
|
||||
@@ -5,6 +5,7 @@
|
||||
#include <utility>
|
||||
|
||||
#include "esphome/components/display/display_buffer.h"
|
||||
#include "esphome/core/automation.h"
|
||||
#include "esphome/core/component.h"
|
||||
#include "esphome/core/hal.h"
|
||||
#include "esphome/core/log.h"
|
||||
@@ -34,7 +35,7 @@ class HUB75Display : public display::Display {
|
||||
display::ColorBitness bitness, bool big_endian, int x_offset, int y_offset, int x_pad) override;
|
||||
|
||||
// Brightness control (runtime mutable)
|
||||
void set_brightness(int brightness);
|
||||
void set_brightness(uint8_t brightness);
|
||||
|
||||
protected:
|
||||
// Display internal methods
|
||||
@@ -46,10 +47,17 @@ class HUB75Display : public display::Display {
|
||||
Hub75Config config_; // Immutable configuration
|
||||
|
||||
// Runtime state (mutable)
|
||||
int brightness_{128};
|
||||
uint8_t brightness_{128};
|
||||
bool enabled_{false};
|
||||
};
|
||||
|
||||
template<typename... Ts> class SetBrightnessAction : public Action<Ts...>, public Parented<HUB75Display> {
|
||||
public:
|
||||
TEMPLATABLE_VALUE(uint8_t, brightness)
|
||||
|
||||
void play(const Ts &...x) override { this->parent_->set_brightness(this->brightness_.value(x...)); }
|
||||
};
|
||||
|
||||
} // namespace esphome::hub75
|
||||
|
||||
#endif
|
||||
|
||||
@@ -121,7 +121,7 @@ CONFIG_SCHEMA = cv.All(
|
||||
nrf52="100kHz",
|
||||
): cv.All(
|
||||
cv.frequency,
|
||||
cv.Range(min=0, min_included=False),
|
||||
cv.float_range(min=0, min_included=False),
|
||||
),
|
||||
cv.Optional(CONF_TIMEOUT): cv.All(
|
||||
cv.only_with_framework(["arduino", "esp-idf"]),
|
||||
|
||||
@@ -45,10 +45,12 @@ CONFIG_SCHEMA = output.FLOAT_OUTPUT_SCHEMA.extend(
|
||||
{
|
||||
cv.Required(CONF_ID): cv.declare_id(LEDCOutput),
|
||||
cv.Required(CONF_PIN): pins.internal_gpio_output_pin_schema,
|
||||
cv.Optional(CONF_FREQUENCY, default="1kHz"): cv.frequency,
|
||||
cv.Optional(CONF_FREQUENCY, default="1kHz"): cv.All(
|
||||
cv.frequency, cv.float_range(min=0, min_included=False)
|
||||
),
|
||||
cv.Optional(CONF_CHANNEL): cv.int_range(min=0, max=15),
|
||||
cv.Optional(CONF_PHASE_ANGLE): cv.All(
|
||||
cv.only_with_esp_idf, cv.angle, cv.float_range(min=0.0, max=360.0)
|
||||
cv.angle, cv.float_range(min=0.0, max=360.0)
|
||||
),
|
||||
}
|
||||
).extend(cv.COMPONENT_SCHEMA)
|
||||
|
||||
@@ -14,7 +14,9 @@ CONFIG_SCHEMA = output.FLOAT_OUTPUT_SCHEMA.extend(
|
||||
{
|
||||
cv.Required(CONF_ID): cv.declare_id(LibreTinyPWM),
|
||||
cv.Required(CONF_PIN): pins.internal_gpio_output_pin_schema,
|
||||
cv.Optional(CONF_FREQUENCY, default="1kHz"): cv.frequency,
|
||||
cv.Optional(CONF_FREQUENCY, default="1kHz"): cv.All(
|
||||
cv.frequency, cv.float_range(min=0, min_included=False)
|
||||
),
|
||||
}
|
||||
).extend(cv.COMPONENT_SCHEMA)
|
||||
|
||||
|
||||
@@ -241,9 +241,12 @@ CONFIG_SCHEMA = cv.All(
|
||||
CONF_HARDWARE_UART,
|
||||
esp8266=UART0,
|
||||
esp32=UART0,
|
||||
esp32_c2=UART0,
|
||||
esp32_c3=USB_SERIAL_JTAG,
|
||||
esp32_c5=USB_SERIAL_JTAG,
|
||||
esp32_c6=USB_SERIAL_JTAG,
|
||||
esp32_c61=USB_SERIAL_JTAG,
|
||||
esp32_h2=USB_SERIAL_JTAG,
|
||||
esp32_p4=USB_SERIAL_JTAG,
|
||||
esp32_s2=USB_CDC,
|
||||
esp32_s3=USB_SERIAL_JTAG,
|
||||
|
||||
@@ -233,11 +233,11 @@ CONFIG_SCHEMA = cv.All(
|
||||
cv.Optional(CONF_PASSWORD, default=""): cv.string,
|
||||
cv.Optional(CONF_CLEAN_SESSION, default=False): cv.boolean,
|
||||
cv.Optional(CONF_CLIENT_ID): cv.string,
|
||||
cv.SplitDefault(CONF_IDF_SEND_ASYNC, esp32_idf=False): cv.All(
|
||||
cv.boolean, cv.only_with_esp_idf
|
||||
cv.SplitDefault(CONF_IDF_SEND_ASYNC, esp32=False): cv.All(
|
||||
cv.boolean, cv.only_on_esp32
|
||||
),
|
||||
cv.Optional(CONF_CERTIFICATE_AUTHORITY): cv.All(
|
||||
cv.string, cv.only_with_esp_idf
|
||||
cv.string, cv.only_on_esp32
|
||||
),
|
||||
cv.Inclusive(CONF_CLIENT_CERTIFICATE, "cert-key-pair"): cv.All(
|
||||
cv.string, cv.only_on_esp32
|
||||
@@ -245,8 +245,8 @@ CONFIG_SCHEMA = cv.All(
|
||||
cv.Inclusive(CONF_CLIENT_CERTIFICATE_KEY, "cert-key-pair"): cv.All(
|
||||
cv.string, cv.only_on_esp32
|
||||
),
|
||||
cv.SplitDefault(CONF_SKIP_CERT_CN_CHECK, esp32_idf=False): cv.All(
|
||||
cv.boolean, cv.only_with_esp_idf
|
||||
cv.SplitDefault(CONF_SKIP_CERT_CN_CHECK, esp32=False): cv.All(
|
||||
cv.boolean, cv.only_on_esp32
|
||||
),
|
||||
cv.Optional(CONF_DISCOVERY, default=True): cv.Any(
|
||||
cv.boolean, cv.one_of("CLEAN", upper=True)
|
||||
|
||||
@@ -154,7 +154,15 @@ bool MQTTComponent::send_discovery_() {
|
||||
device_info[MQTT_DEVICE_MANUFACTURER] =
|
||||
model == nullptr ? ESPHOME_PROJECT_NAME : std::string(ESPHOME_PROJECT_NAME, model - ESPHOME_PROJECT_NAME);
|
||||
#else
|
||||
device_info[MQTT_DEVICE_SW_VERSION] = ESPHOME_VERSION " (" + App.get_compilation_time_ref() + ")";
|
||||
static const char ver_fmt[] PROGMEM = ESPHOME_VERSION " (config hash 0x%08" PRIx32 ")";
|
||||
#ifdef USE_ESP8266
|
||||
char fmt_buf[sizeof(ver_fmt)];
|
||||
strcpy_P(fmt_buf, ver_fmt);
|
||||
const char *fmt = fmt_buf;
|
||||
#else
|
||||
const char *fmt = ver_fmt;
|
||||
#endif
|
||||
device_info[MQTT_DEVICE_SW_VERSION] = str_sprintf(fmt, App.get_config_hash());
|
||||
device_info[MQTT_DEVICE_MODEL] = ESPHOME_BOARD;
|
||||
#if defined(USE_ESP8266) || defined(USE_ESP32)
|
||||
device_info[MQTT_DEVICE_MANUFACTURER] = "Espressif";
|
||||
|
||||
@@ -13,14 +13,16 @@ CONF_SEND_TO_NEXTION = "send_to_nextion"
|
||||
|
||||
FILTER_SOURCE_FILES = filter_source_files_from_platform(
|
||||
{
|
||||
"nextion_upload_arduino.cpp": {
|
||||
"nextion_upload_esp32.cpp": {
|
||||
PlatformFramework.ESP32_ARDUINO,
|
||||
PlatformFramework.ESP32_IDF,
|
||||
},
|
||||
"nextion_upload_arduino.cpp": {
|
||||
PlatformFramework.ESP8266_ARDUINO,
|
||||
PlatformFramework.RP2040_ARDUINO,
|
||||
PlatformFramework.BK72XX_ARDUINO,
|
||||
PlatformFramework.RTL87XX_ARDUINO,
|
||||
PlatformFramework.LN882X_ARDUINO,
|
||||
},
|
||||
"nextion_upload_idf.cpp": {PlatformFramework.ESP32_IDF},
|
||||
}
|
||||
)
|
||||
|
||||
@@ -154,14 +154,11 @@ async def to_code(config):
|
||||
cg.add_define("USE_NEXTION_TFT_UPLOAD")
|
||||
cg.add(var.set_tft_url(config[CONF_TFT_URL]))
|
||||
if CORE.is_esp32:
|
||||
if CORE.using_arduino:
|
||||
cg.add_library("NetworkClientSecure", None)
|
||||
cg.add_library("HTTPClient", None)
|
||||
esp32.add_idf_sdkconfig_option("CONFIG_ESP_TLS_INSECURE", True)
|
||||
esp32.add_idf_sdkconfig_option(
|
||||
"CONFIG_ESP_TLS_SKIP_SERVER_CERT_VERIFY", True
|
||||
)
|
||||
elif CORE.is_esp8266 and CORE.using_arduino:
|
||||
elif CORE.is_esp8266:
|
||||
cg.add_library("ESP8266HTTPClient", None)
|
||||
|
||||
if CONF_TOUCH_SLEEP_TIMEOUT in config:
|
||||
|
||||
@@ -13,17 +13,12 @@
|
||||
#include "esphome/components/display/display_color_utils.h"
|
||||
|
||||
#ifdef USE_NEXTION_TFT_UPLOAD
|
||||
#ifdef USE_ARDUINO
|
||||
#ifdef USE_ESP32
|
||||
#include <HTTPClient.h>
|
||||
#endif // USE_ESP32
|
||||
#ifdef USE_ESP8266
|
||||
#include <esp_http_client.h>
|
||||
#elif defined(USE_ESP8266)
|
||||
#include <ESP8266HTTPClient.h>
|
||||
#include <WiFiClientSecure.h>
|
||||
#endif // USE_ESP8266
|
||||
#elif defined(USE_ESP_IDF)
|
||||
#include <esp_http_client.h>
|
||||
#endif // ARDUINO vs USE_ESP_IDF
|
||||
#endif // USE_ESP32 vs USE_ESP8266
|
||||
#endif // USE_NEXTION_TFT_UPLOAD
|
||||
|
||||
namespace esphome {
|
||||
@@ -1078,7 +1073,7 @@ class Nextion : public NextionBase, public PollingComponent, public uart::UARTDe
|
||||
|
||||
#ifdef USE_NEXTION_TFT_UPLOAD
|
||||
/**
|
||||
* Set the tft file URL. https seems problematic with Arduino..
|
||||
* Set the tft file URL.
|
||||
*/
|
||||
void set_tft_url(const std::string &tft_url) { this->tft_url_ = tft_url; }
|
||||
|
||||
@@ -1422,16 +1417,7 @@ class Nextion : public NextionBase, public PollingComponent, public uart::UARTDe
|
||||
uint32_t original_baud_rate_ = 0;
|
||||
bool upload_first_chunk_sent_ = false;
|
||||
|
||||
#ifdef USE_ARDUINO
|
||||
/**
|
||||
* will request chunk_size chunks from the web server
|
||||
* and send each to the nextion
|
||||
* @param HTTPClient http_client HTTP client handler.
|
||||
* @param int range_start Position of next byte to transfer.
|
||||
* @return position of last byte transferred, -1 for failure.
|
||||
*/
|
||||
int upload_by_chunks_(HTTPClient &http_client, uint32_t &range_start);
|
||||
#elif defined(USE_ESP_IDF)
|
||||
#ifdef USE_ESP32
|
||||
/**
|
||||
* will request 4096 bytes chunks from the web server
|
||||
* and send each to Nextion
|
||||
@@ -1440,7 +1426,16 @@ class Nextion : public NextionBase, public PollingComponent, public uart::UARTDe
|
||||
* @return position of last byte transferred, -1 for failure.
|
||||
*/
|
||||
int upload_by_chunks_(esp_http_client_handle_t http_client, uint32_t &range_start);
|
||||
#endif // USE_ARDUINO vs USE_ESP_IDF
|
||||
#elif defined(USE_ARDUINO)
|
||||
/**
|
||||
* will request chunk_size chunks from the web server
|
||||
* and send each to the nextion
|
||||
* @param HTTPClient http_client HTTP client handler.
|
||||
* @param int range_start Position of next byte to transfer.
|
||||
* @return position of last byte transferred, -1 for failure.
|
||||
*/
|
||||
int upload_by_chunks_(HTTPClient &http_client, uint32_t &range_start);
|
||||
#endif // USE_ESP32 vs USE_ARDUINO
|
||||
|
||||
/**
|
||||
* Ends the upload process, restart Nextion and, if successful,
|
||||
@@ -1450,12 +1445,6 @@ class Nextion : public NextionBase, public PollingComponent, public uart::UARTDe
|
||||
*/
|
||||
bool upload_end_(bool successful);
|
||||
|
||||
/**
|
||||
* Returns the ESP Free Heap memory. This is framework independent.
|
||||
* @return Free Heap in bytes.
|
||||
*/
|
||||
uint32_t get_free_heap_();
|
||||
|
||||
#endif // USE_NEXTION_TFT_UPLOAD
|
||||
|
||||
bool check_connect_();
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
#include "nextion.h"
|
||||
|
||||
#ifdef USE_NEXTION_TFT_UPLOAD
|
||||
#ifdef USE_ARDUINO
|
||||
#ifndef USE_ESP32
|
||||
|
||||
#include <cinttypes>
|
||||
#include "esphome/components/network/util.h"
|
||||
@@ -10,10 +10,6 @@
|
||||
#include "esphome/core/log.h"
|
||||
#include "esphome/core/util.h"
|
||||
|
||||
#ifdef USE_ESP32
|
||||
#include <esp_heap_caps.h>
|
||||
#endif
|
||||
|
||||
namespace esphome {
|
||||
namespace nextion {
|
||||
static const char *const TAG = "nextion.upload.arduino";
|
||||
@@ -21,23 +17,17 @@ static const char *const TAG = "nextion.upload.arduino";
|
||||
// Followed guide
|
||||
// https://unofficialnextion.com/t/nextion-upload-protocol-v1-2-the-fast-one/1044/2
|
||||
|
||||
inline uint32_t Nextion::get_free_heap_() {
|
||||
#if defined(USE_ESP32)
|
||||
return heap_caps_get_free_size(MALLOC_CAP_INTERNAL);
|
||||
#elif defined(USE_ESP8266)
|
||||
return EspClass::getFreeHeap();
|
||||
#endif // USE_ESP32 vs USE_ESP8266
|
||||
}
|
||||
|
||||
int Nextion::upload_by_chunks_(HTTPClient &http_client, uint32_t &range_start) {
|
||||
uint32_t range_size = this->tft_size_ - range_start;
|
||||
ESP_LOGV(TAG, "Heap: %" PRIu32, this->get_free_heap_());
|
||||
ESP_LOGV(TAG, "Heap: %" PRIu32, EspClass::getFreeHeap());
|
||||
uint32_t range_end = ((upload_first_chunk_sent_ or this->tft_size_ < 4096) ? this->tft_size_ : 4096) - 1;
|
||||
ESP_LOGD(TAG, "Range start: %" PRIu32, range_start);
|
||||
if (range_size <= 0 or range_end <= range_start) {
|
||||
ESP_LOGD(TAG, "Range end: %" PRIu32, range_end);
|
||||
ESP_LOGD(TAG, "Range size: %" PRIu32, range_size);
|
||||
ESP_LOGE(TAG, "Invalid range");
|
||||
ESP_LOGD(TAG,
|
||||
"Range end: %" PRIu32 "\n"
|
||||
"Range size: %" PRIu32,
|
||||
range_end, range_size);
|
||||
return -1;
|
||||
}
|
||||
|
||||
@@ -95,14 +85,8 @@ int Nextion::upload_by_chunks_(HTTPClient &http_client, uint32_t &range_start) {
|
||||
this->recv_ret_string_(recv_string, upload_first_chunk_sent_ ? 500 : 5000, true);
|
||||
this->content_length_ -= read_len;
|
||||
const float upload_percentage = 100.0f * (this->tft_size_ - this->content_length_) / this->tft_size_;
|
||||
#if defined(USE_ESP32) && defined(USE_PSRAM)
|
||||
ESP_LOGD(TAG, "Upload: %0.2f%% (%" PRIu32 " left, heap: %" PRIu32 "+%" PRIu32 ")", upload_percentage,
|
||||
this->content_length_, static_cast<uint32_t>(heap_caps_get_free_size(MALLOC_CAP_INTERNAL)),
|
||||
static_cast<uint32_t>(heap_caps_get_free_size(MALLOC_CAP_SPIRAM)));
|
||||
#else
|
||||
ESP_LOGD(TAG, "Upload: %0.2f%% (%" PRIu32 " left, heap: %" PRIu32 ")", upload_percentage, this->content_length_,
|
||||
this->get_free_heap_());
|
||||
#endif
|
||||
EspClass::getFreeHeap());
|
||||
upload_first_chunk_sent_ = true;
|
||||
if (recv_string[0] == 0x08 && recv_string.size() == 5) { // handle partial upload request
|
||||
ESP_LOGD(TAG, "Recv: [%s]",
|
||||
@@ -148,9 +132,11 @@ int Nextion::upload_by_chunks_(HTTPClient &http_client, uint32_t &range_start) {
|
||||
}
|
||||
|
||||
bool Nextion::upload_tft(uint32_t baud_rate, bool exit_reparse) {
|
||||
ESP_LOGD(TAG, "TFT upload requested");
|
||||
ESP_LOGD(TAG, "Exit reparse: %s", YESNO(exit_reparse));
|
||||
ESP_LOGD(TAG, "URL: %s", this->tft_url_.c_str());
|
||||
ESP_LOGD(TAG,
|
||||
"TFT upload requested\n"
|
||||
"Exit reparse: %s\n"
|
||||
"URL: %s",
|
||||
YESNO(exit_reparse), this->tft_url_.c_str());
|
||||
|
||||
if (this->connection_state_.is_updating_) {
|
||||
ESP_LOGW(TAG, "Upload in progress");
|
||||
@@ -180,15 +166,14 @@ bool Nextion::upload_tft(uint32_t baud_rate, bool exit_reparse) {
|
||||
ESP_LOGD(TAG, "Baud rate: %" PRIu32, baud_rate);
|
||||
|
||||
// Define the configuration for the HTTP client
|
||||
ESP_LOGV(TAG, "Init HTTP client");
|
||||
ESP_LOGV(TAG, "Heap: %" PRIu32, this->get_free_heap_());
|
||||
ESP_LOGV(TAG,
|
||||
"Init HTTP client\n"
|
||||
"Heap: %" PRIu32,
|
||||
EspClass::getFreeHeap());
|
||||
HTTPClient http_client;
|
||||
http_client.setTimeout(15000); // Yes 15 seconds.... Helps 8266s along
|
||||
|
||||
bool begin_status = false;
|
||||
#ifdef USE_ESP32
|
||||
begin_status = http_client.begin(this->tft_url_.c_str());
|
||||
#endif
|
||||
#ifdef USE_ESP8266
|
||||
#if USE_ARDUINO_VERSION_CODE >= VERSION_CODE(2, 7, 0)
|
||||
http_client.setFollowRedirects(HTTPC_STRICT_FOLLOW_REDIRECTS);
|
||||
@@ -256,22 +241,24 @@ bool Nextion::upload_tft(uint32_t baud_rate, bool exit_reparse) {
|
||||
this->send_command_("sleep=0");
|
||||
this->send_command_("dim=100");
|
||||
delay(250); // NOLINT
|
||||
ESP_LOGV(TAG, "Heap: %" PRIu32, this->get_free_heap_());
|
||||
ESP_LOGV(TAG, "Heap: %" PRIu32, EspClass::getFreeHeap());
|
||||
|
||||
App.feed_wdt();
|
||||
char command[128];
|
||||
// Tells the Nextion the content length of the tft file and baud rate it will be sent at
|
||||
// Once the Nextion accepts the command it will wait until the file is successfully uploaded
|
||||
// If it fails for any reason a power cycle of the display will be needed
|
||||
sprintf(command, "whmi-wris %d,%d,1", this->content_length_, baud_rate);
|
||||
snprintf(command, sizeof(command), "whmi-wris %" PRIu32 ",%" PRIu32 ",1", this->content_length_, baud_rate);
|
||||
|
||||
// Clear serial receive buffer
|
||||
ESP_LOGV(TAG, "Clear RX buffer");
|
||||
this->reset_(false);
|
||||
delay(250); // NOLINT
|
||||
ESP_LOGV(TAG, "Heap: %" PRIu32, this->get_free_heap_());
|
||||
|
||||
ESP_LOGV(TAG, "Upload cmd: %s", command);
|
||||
ESP_LOGV(TAG,
|
||||
"Heap: %" PRIu32 "\n"
|
||||
"Upload cmd: %s",
|
||||
EspClass::getFreeHeap(), command);
|
||||
this->send_command_(command);
|
||||
|
||||
if (baud_rate != this->original_baud_rate_) {
|
||||
@@ -290,7 +277,7 @@ bool Nextion::upload_tft(uint32_t baud_rate, bool exit_reparse) {
|
||||
ESP_LOGD(TAG, "Upload resp: [%s] %zu B",
|
||||
format_hex_pretty(reinterpret_cast<const uint8_t *>(response.data()), response.size()).c_str(),
|
||||
response.length());
|
||||
ESP_LOGV(TAG, "Heap: %" PRIu32, this->get_free_heap_());
|
||||
ESP_LOGV(TAG, "Heap: %" PRIu32, EspClass::getFreeHeap());
|
||||
|
||||
if (response.find(0x05) != std::string::npos) {
|
||||
ESP_LOGV(TAG, "Upload prep done");
|
||||
@@ -302,10 +289,12 @@ bool Nextion::upload_tft(uint32_t baud_rate, bool exit_reparse) {
|
||||
return this->upload_end_(false);
|
||||
}
|
||||
|
||||
ESP_LOGD(TAG, "Upload TFT:");
|
||||
ESP_LOGD(TAG, " URL: %s", this->tft_url_.c_str());
|
||||
ESP_LOGD(TAG, " Size: %d bytes", this->content_length_);
|
||||
ESP_LOGD(TAG, " Heap: %" PRIu32, this->get_free_heap_());
|
||||
ESP_LOGD(TAG,
|
||||
"Upload TFT:\n"
|
||||
" URL: %s\n"
|
||||
" Size: %d bytes\n"
|
||||
" Heap: %" PRIu32,
|
||||
this->tft_url_.c_str(), this->content_length_, EspClass::getFreeHeap());
|
||||
|
||||
// Proceed with the content download as before
|
||||
|
||||
@@ -322,7 +311,7 @@ bool Nextion::upload_tft(uint32_t baud_rate, bool exit_reparse) {
|
||||
return this->upload_end_(false);
|
||||
}
|
||||
App.feed_wdt();
|
||||
ESP_LOGV(TAG, "Heap: %" PRIu32 " left: %" PRIu32, this->get_free_heap_(), this->content_length_);
|
||||
ESP_LOGV(TAG, "Heap: %" PRIu32 " left: %" PRIu32, EspClass::getFreeHeap(), this->content_length_);
|
||||
}
|
||||
|
||||
ESP_LOGD(TAG, "Upload complete");
|
||||
@@ -356,5 +345,5 @@ WiFiClient *Nextion::get_wifi_client_() {
|
||||
} // namespace nextion
|
||||
} // namespace esphome
|
||||
|
||||
#endif // USE_ARDUINO
|
||||
#endif // NOT USE_ESP32
|
||||
#endif // USE_NEXTION_TFT_UPLOAD
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
#include "nextion.h"
|
||||
|
||||
#ifdef USE_NEXTION_TFT_UPLOAD
|
||||
#ifdef USE_ESP_IDF
|
||||
#ifdef USE_ESP32
|
||||
|
||||
#include <esp_heap_caps.h>
|
||||
#include <esp_http_client.h>
|
||||
@@ -14,7 +14,7 @@
|
||||
|
||||
namespace esphome {
|
||||
namespace nextion {
|
||||
static const char *const TAG = "nextion.upload.idf";
|
||||
static const char *const TAG = "nextion.upload.esp32";
|
||||
|
||||
// Followed guide
|
||||
// https://unofficialnextion.com/t/nextion-upload-protocol-v1-2-the-fast-one/1044/2
|
||||
@@ -25,8 +25,10 @@ int Nextion::upload_by_chunks_(esp_http_client_handle_t http_client, uint32_t &r
|
||||
uint32_t range_end = ((upload_first_chunk_sent_ or this->tft_size_ < 4096) ? this->tft_size_ : 4096) - 1;
|
||||
ESP_LOGD(TAG, "Range start: %" PRIu32, range_start);
|
||||
if (range_size <= 0 or range_end <= range_start) {
|
||||
ESP_LOGD(TAG, "Range end: %" PRIu32, range_end);
|
||||
ESP_LOGD(TAG, "Range size: %" PRIu32, range_size);
|
||||
ESP_LOGD(TAG,
|
||||
"Range end: %" PRIu32 "\n"
|
||||
"Range size: %" PRIu32,
|
||||
range_end, range_size);
|
||||
ESP_LOGE(TAG, "Invalid range");
|
||||
return -1;
|
||||
}
|
||||
@@ -151,9 +153,11 @@ int Nextion::upload_by_chunks_(esp_http_client_handle_t http_client, uint32_t &r
|
||||
}
|
||||
|
||||
bool Nextion::upload_tft(uint32_t baud_rate, bool exit_reparse) {
|
||||
ESP_LOGD(TAG, "TFT upload requested");
|
||||
ESP_LOGD(TAG, "Exit reparse: %s", YESNO(exit_reparse));
|
||||
ESP_LOGD(TAG, "URL: %s", this->tft_url_.c_str());
|
||||
ESP_LOGD(TAG,
|
||||
"TFT upload requested\n"
|
||||
"Exit reparse: %s\n"
|
||||
"URL: %s",
|
||||
YESNO(exit_reparse), this->tft_url_.c_str());
|
||||
|
||||
if (this->connection_state_.is_updating_) {
|
||||
ESP_LOGW(TAG, "Upload in progress");
|
||||
@@ -183,8 +187,10 @@ bool Nextion::upload_tft(uint32_t baud_rate, bool exit_reparse) {
|
||||
ESP_LOGD(TAG, "Baud rate: %" PRIu32, baud_rate);
|
||||
|
||||
// Define the configuration for the HTTP client
|
||||
ESP_LOGV(TAG, "Init HTTP client");
|
||||
ESP_LOGV(TAG, "Heap: %" PRIu32, esp_get_free_heap_size());
|
||||
ESP_LOGV(TAG,
|
||||
"Init HTTP client\n"
|
||||
"Heap: %" PRIu32,
|
||||
esp_get_free_heap_size());
|
||||
esp_http_client_config_t config = {
|
||||
.url = this->tft_url_.c_str(),
|
||||
.cert_pem = nullptr,
|
||||
@@ -208,8 +214,10 @@ bool Nextion::upload_tft(uint32_t baud_rate, bool exit_reparse) {
|
||||
}
|
||||
|
||||
// Perform the HTTP request
|
||||
ESP_LOGV(TAG, "Check connection");
|
||||
ESP_LOGV(TAG, "Heap: %" PRIu32, esp_get_free_heap_size());
|
||||
ESP_LOGV(TAG,
|
||||
"Check connection\n"
|
||||
"Heap: %" PRIu32,
|
||||
esp_get_free_heap_size());
|
||||
err = esp_http_client_perform(http_client);
|
||||
if (err != ESP_OK) {
|
||||
ESP_LOGE(TAG, "HTTP failed: %s", esp_err_to_name(err));
|
||||
@@ -218,8 +226,10 @@ bool Nextion::upload_tft(uint32_t baud_rate, bool exit_reparse) {
|
||||
}
|
||||
|
||||
// Check the HTTP Status Code
|
||||
ESP_LOGV(TAG, "Check status");
|
||||
ESP_LOGV(TAG, "Heap: %" PRIu32, esp_get_free_heap_size());
|
||||
ESP_LOGV(TAG,
|
||||
"Check status\n"
|
||||
"Heap: %" PRIu32,
|
||||
esp_get_free_heap_size());
|
||||
int status_code = esp_http_client_get_status_code(http_client);
|
||||
if (status_code != 200 && status_code != 206) {
|
||||
return this->upload_end_(false);
|
||||
@@ -255,7 +265,7 @@ bool Nextion::upload_tft(uint32_t baud_rate, bool exit_reparse) {
|
||||
// Tells the Nextion the content length of the tft file and baud rate it will be sent at
|
||||
// Once the Nextion accepts the command it will wait until the file is successfully uploaded
|
||||
// If it fails for any reason a power cycle of the display will be needed
|
||||
sprintf(command, "whmi-wris %" PRIu32 ",%" PRIu32 ",1", this->content_length_, baud_rate);
|
||||
snprintf(command, sizeof(command), "whmi-wris %" PRIu32 ",%" PRIu32 ",1", this->content_length_, baud_rate);
|
||||
|
||||
// Clear serial receive buffer
|
||||
ESP_LOGV(TAG, "Clear RX buffer");
|
||||
@@ -300,10 +310,12 @@ bool Nextion::upload_tft(uint32_t baud_rate, bool exit_reparse) {
|
||||
return this->upload_end_(false);
|
||||
}
|
||||
|
||||
ESP_LOGD(TAG, "Uploading TFT:");
|
||||
ESP_LOGD(TAG, " URL: %s", this->tft_url_.c_str());
|
||||
ESP_LOGD(TAG, " Size: %" PRIu32 " bytes", this->content_length_);
|
||||
ESP_LOGD(TAG, " Heap: %" PRIu32, esp_get_free_heap_size());
|
||||
ESP_LOGD(TAG,
|
||||
"Uploading TFT:\n"
|
||||
" URL: %s\n"
|
||||
" Size: %" PRIu32 " bytes\n"
|
||||
" Heap: %" PRIu32,
|
||||
this->tft_url_.c_str(), this->content_length_, esp_get_free_heap_size());
|
||||
|
||||
// Proceed with the content download as before
|
||||
|
||||
@@ -324,9 +336,8 @@ bool Nextion::upload_tft(uint32_t baud_rate, bool exit_reparse) {
|
||||
ESP_LOGV(TAG, "Heap: %" PRIu32 " left: %" PRIu32, esp_get_free_heap_size(), this->content_length_);
|
||||
}
|
||||
|
||||
ESP_LOGD(TAG, "TFT upload complete");
|
||||
|
||||
ESP_LOGD(TAG, "Close HTTP");
|
||||
ESP_LOGD(TAG, "TFT upload complete\n"
|
||||
"Close HTTP");
|
||||
esp_http_client_close(http_client);
|
||||
esp_http_client_cleanup(http_client);
|
||||
ESP_LOGV(TAG, "Connection closed");
|
||||
@@ -336,5 +347,5 @@ bool Nextion::upload_tft(uint32_t baud_rate, bool exit_reparse) {
|
||||
} // namespace nextion
|
||||
} // namespace esphome
|
||||
|
||||
#endif // USE_ESP_IDF
|
||||
#endif // USE_ESP32
|
||||
#endif // USE_NEXTION_TFT_UPLOAD
|
||||
@@ -91,7 +91,7 @@ def set_sdkconfig_options(config):
|
||||
add_idf_sdkconfig_option("CONFIG_OPENTHREAD_SRP_CLIENT", True)
|
||||
add_idf_sdkconfig_option("CONFIG_OPENTHREAD_SRP_CLIENT_MAX_SERVICES", 5)
|
||||
|
||||
# TODO: Add suport for synchronized sleepy end devices (SSED)
|
||||
# TODO: Add support for synchronized sleepy end devices (SSED)
|
||||
add_idf_sdkconfig_option(f"CONFIG_OPENTHREAD_{config.get(CONF_DEVICE_TYPE)}", True)
|
||||
|
||||
|
||||
@@ -102,7 +102,7 @@ OpenThreadSrpComponent = openthread_ns.class_("OpenThreadSrpComponent", cg.Compo
|
||||
_CONNECTION_SCHEMA = cv.Schema(
|
||||
{
|
||||
cv.Optional(CONF_PAN_ID): cv.hex_int,
|
||||
cv.Optional(CONF_CHANNEL): cv.int_,
|
||||
cv.Optional(CONF_CHANNEL): cv.int_range(min=11, max=26),
|
||||
cv.Optional(CONF_NETWORK_KEY): cv.hex_int,
|
||||
cv.Optional(CONF_EXT_PAN_ID): cv.hex_int,
|
||||
cv.Optional(CONF_NETWORK_NAME): cv.string_strict,
|
||||
|
||||
@@ -21,8 +21,7 @@
|
||||
|
||||
static const char *const TAG = "openthread";
|
||||
|
||||
namespace esphome {
|
||||
namespace openthread {
|
||||
namespace esphome::openthread {
|
||||
|
||||
OpenThreadComponent *global_openthread_component = // NOLINT(cppcoreguidelines-avoid-non-const-global-variables)
|
||||
nullptr; // NOLINT(cppcoreguidelines-avoid-non-const-global-variables)
|
||||
@@ -275,7 +274,5 @@ const char *OpenThreadComponent::get_use_address() const { return this->use_addr
|
||||
|
||||
void OpenThreadComponent::set_use_address(const char *use_address) { this->use_address_ = use_address; }
|
||||
|
||||
} // namespace openthread
|
||||
} // namespace esphome
|
||||
|
||||
} // namespace esphome::openthread
|
||||
#endif
|
||||
|
||||
@@ -13,8 +13,7 @@
|
||||
#include <optional>
|
||||
#include <vector>
|
||||
|
||||
namespace esphome {
|
||||
namespace openthread {
|
||||
namespace esphome::openthread {
|
||||
|
||||
class InstanceLock;
|
||||
|
||||
@@ -91,6 +90,5 @@ class InstanceLock {
|
||||
InstanceLock() {}
|
||||
};
|
||||
|
||||
} // namespace openthread
|
||||
} // namespace esphome
|
||||
} // namespace esphome::openthread
|
||||
#endif
|
||||
|
||||
@@ -24,8 +24,7 @@
|
||||
|
||||
static const char *const TAG = "openthread";
|
||||
|
||||
namespace esphome {
|
||||
namespace openthread {
|
||||
namespace esphome::openthread {
|
||||
|
||||
void OpenThreadComponent::setup() {
|
||||
// Used eventfds:
|
||||
@@ -209,6 +208,5 @@ otInstance *InstanceLock::get_instance() { return esp_openthread_get_instance();
|
||||
|
||||
InstanceLock::~InstanceLock() { esp_openthread_lock_release(); }
|
||||
|
||||
} // namespace openthread
|
||||
} // namespace esphome
|
||||
} // namespace esphome::openthread
|
||||
#endif
|
||||
|
||||
@@ -3,8 +3,7 @@
|
||||
#ifdef USE_OPENTHREAD
|
||||
#include "esphome/core/log.h"
|
||||
|
||||
namespace esphome {
|
||||
namespace openthread_info {
|
||||
namespace esphome::openthread_info {
|
||||
|
||||
static const char *const TAG = "openthread_info";
|
||||
|
||||
@@ -19,6 +18,5 @@ void NetworkKeyOpenThreadInfo::dump_config() { LOG_TEXT_SENSOR("", "Network Key"
|
||||
void PanIdOpenThreadInfo::dump_config() { LOG_TEXT_SENSOR("", "PAN ID", this); }
|
||||
void ExtPanIdOpenThreadInfo::dump_config() { LOG_TEXT_SENSOR("", "Extended PAN ID", this); }
|
||||
|
||||
} // namespace openthread_info
|
||||
} // namespace esphome
|
||||
} // namespace esphome::openthread_info
|
||||
#endif
|
||||
|
||||
@@ -5,8 +5,7 @@
|
||||
#include "esphome/core/component.h"
|
||||
#ifdef USE_OPENTHREAD
|
||||
|
||||
namespace esphome {
|
||||
namespace openthread_info {
|
||||
namespace esphome::openthread_info {
|
||||
|
||||
using esphome::openthread::InstanceLock;
|
||||
|
||||
@@ -213,6 +212,5 @@ class ExtPanIdOpenThreadInfo : public DatasetOpenThreadInfo, public text_sensor:
|
||||
std::array<uint8_t, 8> last_extpanid_{};
|
||||
};
|
||||
|
||||
} // namespace openthread_info
|
||||
} // namespace esphome
|
||||
} // namespace esphome::openthread_info
|
||||
#endif
|
||||
|
||||
@@ -38,7 +38,7 @@ CONFIG_SCHEMA = cv.All(
|
||||
{
|
||||
cv.GenerateID(): cv.declare_id(PCA9685Output),
|
||||
cv.Optional(CONF_FREQUENCY): cv.All(
|
||||
cv.frequency, cv.Range(min=23.84, max=1525.88)
|
||||
cv.frequency, cv.float_range(min=23.84, max=1525.88)
|
||||
),
|
||||
cv.Optional(CONF_EXTERNAL_CLOCK_INPUT, default=False): cv.boolean,
|
||||
cv.Optional(CONF_PHASE_BALANCER, default="linear"): cv.enum(
|
||||
|
||||
@@ -7,10 +7,10 @@ from esphome.const import (
|
||||
CONF_UPDATE_INTERVAL,
|
||||
DEVICE_CLASS_PM25,
|
||||
ICON_BLUR,
|
||||
SCHEDULER_DONT_RUN,
|
||||
STATE_CLASS_MEASUREMENT,
|
||||
UNIT_MICROGRAMS_PER_CUBIC_METER,
|
||||
)
|
||||
from esphome.core import TimePeriodMilliseconds
|
||||
|
||||
CODEOWNERS = ["@habbie"]
|
||||
DEPENDENCIES = ["uart"]
|
||||
@@ -41,16 +41,12 @@ CONFIG_SCHEMA = cv.All(
|
||||
|
||||
|
||||
def validate_interval_uart(config):
|
||||
require_tx = False
|
||||
|
||||
interval = config.get(CONF_UPDATE_INTERVAL)
|
||||
|
||||
if isinstance(interval, TimePeriodMilliseconds):
|
||||
# 'never' is encoded as a very large int, not as a TimePeriodMilliseconds objects
|
||||
require_tx = True
|
||||
|
||||
uart.final_validate_device_schema(
|
||||
"pm1006", baud_rate=9600, require_rx=True, require_tx=require_tx
|
||||
"pm1006",
|
||||
baud_rate=9600,
|
||||
require_rx=True,
|
||||
require_tx=interval.total_milliseconds != SCHEDULER_DONT_RUN,
|
||||
)(config)
|
||||
|
||||
|
||||
|
||||
@@ -63,9 +63,7 @@ CONFIG_SCHEMA = cv.All(
|
||||
cv.Optional(
|
||||
CONF_BUFFER_DURATION, default="100ms"
|
||||
): cv.positive_time_period_milliseconds,
|
||||
cv.SplitDefault(CONF_TASK_STACK_IN_PSRAM, esp32_idf=False): cv.All(
|
||||
cv.boolean, cv.only_with_esp_idf
|
||||
),
|
||||
cv.Optional(CONF_TASK_STACK_IN_PSRAM, default=False): cv.boolean,
|
||||
cv.Optional(CONF_FILTERS, default=16): cv.int_range(min=2, max=1024),
|
||||
cv.Optional(CONF_TAPS, default=16): _validate_taps,
|
||||
}
|
||||
|
||||
@@ -11,7 +11,7 @@ DEPENDENCIES = ["rp2040"]
|
||||
rp2040_pwm_ns = cg.esphome_ns.namespace("rp2040_pwm")
|
||||
RP2040PWM = rp2040_pwm_ns.class_("RP2040PWM", output.FloatOutput, cg.Component)
|
||||
SetFrequencyAction = rp2040_pwm_ns.class_("SetFrequencyAction", automation.Action)
|
||||
validate_frequency = cv.All(cv.frequency, cv.Range(min=1.0e-6))
|
||||
validate_frequency = cv.All(cv.frequency, cv.float_range(min=1.0e-6))
|
||||
|
||||
CONFIG_SCHEMA = output.FLOAT_OUTPUT_SCHEMA.extend(
|
||||
{
|
||||
|
||||
@@ -9,6 +9,10 @@
|
||||
#include <cinttypes>
|
||||
#include <cstdio>
|
||||
|
||||
#ifdef USE_OTA_ROLLBACK
|
||||
#include <esp_ota_ops.h>
|
||||
#endif
|
||||
|
||||
namespace esphome {
|
||||
namespace safe_mode {
|
||||
|
||||
@@ -32,6 +36,14 @@ void SafeModeComponent::dump_config() {
|
||||
ESP_LOGW(TAG, "SAFE MODE IS ACTIVE");
|
||||
}
|
||||
}
|
||||
|
||||
#ifdef USE_OTA_ROLLBACK
|
||||
const esp_partition_t *last_invalid = esp_ota_get_last_invalid_partition();
|
||||
if (last_invalid != nullptr) {
|
||||
ESP_LOGW(TAG, "OTA rollback detected! Rolled back from partition '%s'", last_invalid->label);
|
||||
ESP_LOGW(TAG, "The device reset before the boot was marked successful");
|
||||
}
|
||||
#endif
|
||||
}
|
||||
|
||||
float SafeModeComponent::get_setup_priority() const { return setup_priority::AFTER_WIFI; }
|
||||
@@ -42,6 +54,10 @@ void SafeModeComponent::loop() {
|
||||
ESP_LOGI(TAG, "Boot seems successful; resetting boot loop counter");
|
||||
this->clean_rtc();
|
||||
this->boot_successful_ = true;
|
||||
#ifdef USE_OTA_ROLLBACK
|
||||
// Mark OTA partition as valid to prevent rollback
|
||||
esp_ota_mark_app_valid_cancel_rollback();
|
||||
#endif
|
||||
// Disable loop since we no longer need to check
|
||||
this->disable_loop();
|
||||
}
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
#include "sen5x.h"
|
||||
#include "esphome/core/application.h"
|
||||
#include "esphome/core/hal.h"
|
||||
#include "esphome/core/helpers.h"
|
||||
#include "esphome/core/log.h"
|
||||
@@ -154,10 +155,10 @@ void SEN5XComponent::setup() {
|
||||
if (this->voc_sensor_ && this->store_baseline_) {
|
||||
uint32_t combined_serial =
|
||||
encode_uint24(this->serial_number_[0], this->serial_number_[1], this->serial_number_[2]);
|
||||
// Hash with compilation time and serial number
|
||||
// Hash with config hash, version, and serial number
|
||||
// This ensures the baseline storage is cleared after OTA
|
||||
// Serial numbers are unique to each sensor, so mulitple sensors can be used without conflict
|
||||
uint32_t hash = fnv1_hash(App.get_compilation_time_ref() + std::to_string(combined_serial));
|
||||
// Serial numbers are unique to each sensor, so multiple sensors can be used without conflict
|
||||
uint32_t hash = fnv1a_hash_extend(App.get_config_version_hash(), std::to_string(combined_serial));
|
||||
this->pref_ = global_preferences->make_preference<Sen5xBaselines>(hash, true);
|
||||
|
||||
if (this->pref_.load(&this->voc_baselines_storage_)) {
|
||||
|
||||
@@ -72,10 +72,10 @@ void SGP30Component::setup() {
|
||||
return;
|
||||
}
|
||||
|
||||
// Hash with compilation time and serial number
|
||||
// Hash with config hash, version, and serial number
|
||||
// This ensures the baseline storage is cleared after OTA
|
||||
// Serial numbers are unique to each sensor, so mulitple sensors can be used without conflict
|
||||
uint32_t hash = fnv1_hash(App.get_compilation_time_ref() + std::to_string(this->serial_number_));
|
||||
// Serial numbers are unique to each sensor, so multiple sensors can be used without conflict
|
||||
uint32_t hash = fnv1a_hash_extend(App.get_config_version_hash(), std::to_string(this->serial_number_));
|
||||
this->pref_ = global_preferences->make_preference<SGP30Baselines>(hash, true);
|
||||
|
||||
if (this->store_baseline_ && this->pref_.load(&this->baselines_storage_)) {
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
#include "sgp4x.h"
|
||||
#include "esphome/core/application.h"
|
||||
#include "esphome/core/log.h"
|
||||
#include "esphome/core/hal.h"
|
||||
#include <cinttypes>
|
||||
@@ -56,10 +57,10 @@ void SGP4xComponent::setup() {
|
||||
ESP_LOGD(TAG, "Version 0x%0X", featureset);
|
||||
|
||||
if (this->store_baseline_) {
|
||||
// Hash with compilation time and serial number
|
||||
// Hash with config hash, version, and serial number
|
||||
// This ensures the baseline storage is cleared after OTA
|
||||
// Serial numbers are unique to each sensor, so mulitple sensors can be used without conflict
|
||||
uint32_t hash = fnv1_hash(App.get_compilation_time_ref() + std::to_string(this->serial_number_));
|
||||
// Serial numbers are unique to each sensor, so multiple sensors can be used without conflict
|
||||
uint32_t hash = fnv1a_hash_extend(App.get_config_version_hash(), std::to_string(this->serial_number_));
|
||||
this->pref_ = global_preferences->make_preference<SGP4xBaselines>(hash, true);
|
||||
|
||||
if (this->pref_.load(&this->voc_baselines_storage_)) {
|
||||
|
||||
@@ -272,10 +272,11 @@ def validate_spi_config(config):
|
||||
|
||||
# Given an SPI index, convert to a string that represents the C++ object for it.
|
||||
def get_spi_interface(index):
|
||||
if CORE.using_esp_idf:
|
||||
platform = get_target_platform()
|
||||
if platform == PLATFORM_ESP32:
|
||||
# ESP32 uses ESP-IDF SPI driver for both Arduino and IDF frameworks
|
||||
return ["SPI2_HOST", "SPI3_HOST"][index]
|
||||
# Arduino code follows
|
||||
platform = get_target_platform()
|
||||
if platform == PLATFORM_RP2040:
|
||||
return ["&SPI", "&SPI1"][index]
|
||||
if index == 0:
|
||||
@@ -356,7 +357,7 @@ CONFIG_SCHEMA = cv.All(
|
||||
async def to_code(configs):
|
||||
cg.add_define("USE_SPI")
|
||||
cg.add_global(spi_ns.using)
|
||||
if CORE.using_arduino:
|
||||
if CORE.using_arduino and not CORE.is_esp32:
|
||||
cg.add_library("SPI", None)
|
||||
for spi in configs:
|
||||
var = cg.new_Pvariable(spi[CONF_ID])
|
||||
@@ -447,13 +448,15 @@ def final_validate_device_schema(name: str, *, require_mosi: bool, require_miso:
|
||||
FILTER_SOURCE_FILES = filter_source_files_from_platform(
|
||||
{
|
||||
"spi_arduino.cpp": {
|
||||
PlatformFramework.ESP32_ARDUINO,
|
||||
PlatformFramework.ESP8266_ARDUINO,
|
||||
PlatformFramework.RP2040_ARDUINO,
|
||||
PlatformFramework.BK72XX_ARDUINO,
|
||||
PlatformFramework.RTL87XX_ARDUINO,
|
||||
PlatformFramework.LN882X_ARDUINO,
|
||||
},
|
||||
"spi_esp_idf.cpp": {PlatformFramework.ESP32_IDF},
|
||||
"spi_esp_idf.cpp": {
|
||||
PlatformFramework.ESP32_ARDUINO,
|
||||
PlatformFramework.ESP32_IDF,
|
||||
},
|
||||
}
|
||||
)
|
||||
|
||||
@@ -2,8 +2,7 @@
|
||||
#include "esphome/core/log.h"
|
||||
#include "esphome/core/application.h"
|
||||
|
||||
namespace esphome {
|
||||
namespace spi {
|
||||
namespace esphome::spi {
|
||||
|
||||
const char *const TAG = "spi";
|
||||
|
||||
@@ -119,5 +118,4 @@ uint16_t SPIDelegateBitBash::transfer_(uint16_t data, size_t num_bits) {
|
||||
return out_data;
|
||||
}
|
||||
|
||||
} // namespace spi
|
||||
} // namespace esphome
|
||||
} // namespace esphome::spi
|
||||
|
||||
@@ -7,7 +7,13 @@
|
||||
#include <utility>
|
||||
#include <vector>
|
||||
|
||||
#ifdef USE_ARDUINO
|
||||
#ifdef USE_ESP32
|
||||
|
||||
#include "driver/spi_master.h"
|
||||
|
||||
using SPIInterface = spi_host_device_t;
|
||||
|
||||
#elif defined(USE_ARDUINO)
|
||||
|
||||
#include <SPI.h>
|
||||
|
||||
@@ -17,26 +23,16 @@ using SPIInterface = SPIClassRP2040 *;
|
||||
using SPIInterface = SPIClass *;
|
||||
#endif
|
||||
|
||||
#endif
|
||||
#elif defined(CLANG_TIDY)
|
||||
|
||||
#ifdef USE_ESP_IDF
|
||||
using SPIInterface = void *; // Stub for platforms without SPI (e.g., Zephyr)
|
||||
|
||||
#include "driver/spi_master.h"
|
||||
|
||||
using SPIInterface = spi_host_device_t;
|
||||
|
||||
#endif // USE_ESP_IDF
|
||||
|
||||
#ifdef USE_ZEPHYR
|
||||
// TODO supprse clang-tidy. Remove after SPI driver for nrf52 is added.
|
||||
using SPIInterface = void *;
|
||||
#endif
|
||||
#endif // USE_ESP32 / USE_ARDUINO
|
||||
|
||||
/**
|
||||
* Implementation of SPI Controller mode.
|
||||
*/
|
||||
namespace esphome {
|
||||
namespace spi {
|
||||
namespace esphome::spi {
|
||||
|
||||
/// The bit-order for SPI devices. This defines how the data read from and written to the device is interpreted.
|
||||
enum SPIBitOrder {
|
||||
@@ -509,5 +505,4 @@ class SPIDevice : public SPIClient {
|
||||
template<size_t N> void transfer_array(std::array<uint8_t, N> &data) { this->transfer_array(data.data(), N); }
|
||||
};
|
||||
|
||||
} // namespace spi
|
||||
} // namespace esphome
|
||||
} // namespace esphome::spi
|
||||
|
||||
@@ -1,9 +1,8 @@
|
||||
#include "spi.h"
|
||||
#include <vector>
|
||||
|
||||
namespace esphome {
|
||||
namespace spi {
|
||||
#ifdef USE_ARDUINO
|
||||
namespace esphome::spi {
|
||||
#if defined(USE_ARDUINO) && !defined(USE_ESP32)
|
||||
|
||||
static const char *const TAG = "spi-esp-arduino";
|
||||
class SPIDelegateHw : public SPIDelegate {
|
||||
@@ -101,6 +100,5 @@ SPIBus *SPIComponent::get_bus(SPIInterface interface, GPIOPin *clk, GPIOPin *sdo
|
||||
return new SPIBusHw(clk, sdo, sdi, interface);
|
||||
}
|
||||
|
||||
#endif // USE_ARDUINO
|
||||
} // namespace spi
|
||||
} // namespace esphome
|
||||
#endif // USE_ARDUINO && !USE_ESP32
|
||||
} // namespace esphome::spi
|
||||
|
||||
@@ -1,10 +1,9 @@
|
||||
#include "spi.h"
|
||||
#include <vector>
|
||||
|
||||
namespace esphome {
|
||||
namespace spi {
|
||||
namespace esphome::spi {
|
||||
|
||||
#ifdef USE_ESP_IDF
|
||||
#ifdef USE_ESP32
|
||||
static const char *const TAG = "spi-esp-idf";
|
||||
static const size_t MAX_TRANSFER_SIZE = 4092; // dictated by ESP-IDF API.
|
||||
|
||||
@@ -266,6 +265,5 @@ SPIBus *SPIComponent::get_bus(SPIInterface interface, GPIOPin *clk, GPIOPin *sdo
|
||||
return new SPIBusHw(clk, sdo, sdi, interface, data_pins);
|
||||
}
|
||||
|
||||
#endif
|
||||
} // namespace spi
|
||||
} // namespace esphome
|
||||
#endif // USE_ESP32
|
||||
} // namespace esphome::spi
|
||||
|
||||
@@ -199,9 +199,13 @@ CONFIG_SCHEMA = (
|
||||
cv.Optional(CONF_CRC_INITIAL, default=0x1D0F): cv.All(
|
||||
cv.hex_int, cv.Range(min=0, max=0xFFFF)
|
||||
),
|
||||
cv.Optional(CONF_DEVIATION, default=5000): cv.int_range(min=0, max=100000),
|
||||
cv.Optional(CONF_DEVIATION, default="5kHz"): cv.All(
|
||||
cv.frequency, cv.float_range(min=0, max=100000)
|
||||
),
|
||||
cv.Required(CONF_DIO1_PIN): pins.gpio_input_pin_schema,
|
||||
cv.Required(CONF_FREQUENCY): cv.int_range(min=137000000, max=1020000000),
|
||||
cv.Required(CONF_FREQUENCY): cv.All(
|
||||
cv.frequency, cv.float_range(min=137.0e6, max=1020.0e6)
|
||||
),
|
||||
cv.Required(CONF_HW_VERSION): cv.one_of(
|
||||
"sx1261", "sx1262", "sx1268", "llcc68", lower=True
|
||||
),
|
||||
|
||||
@@ -196,9 +196,13 @@ CONFIG_SCHEMA = (
|
||||
cv.Optional(CONF_BITSYNC): cv.boolean,
|
||||
cv.Optional(CONF_CODING_RATE, default="CR_4_5"): cv.enum(CODING_RATE),
|
||||
cv.Optional(CONF_CRC_ENABLE, default=False): cv.boolean,
|
||||
cv.Optional(CONF_DEVIATION, default=5000): cv.int_range(min=0, max=100000),
|
||||
cv.Optional(CONF_DEVIATION, default="5kHz"): cv.All(
|
||||
cv.frequency, cv.float_range(min=0, max=100000)
|
||||
),
|
||||
cv.Optional(CONF_DIO0_PIN): pins.internal_gpio_input_pin_schema,
|
||||
cv.Required(CONF_FREQUENCY): cv.int_range(min=137000000, max=1020000000),
|
||||
cv.Required(CONF_FREQUENCY): cv.All(
|
||||
cv.frequency, cv.float_range(min=137.0e6, max=1020.0e6)
|
||||
),
|
||||
cv.Required(CONF_MODULATION): cv.enum(MOD),
|
||||
cv.Optional(CONF_ON_PACKET): automation.validate_automation(single=True),
|
||||
cv.Optional(CONF_PA_PIN, default="BOOST"): cv.enum(PA_PIN),
|
||||
|
||||
@@ -4,8 +4,7 @@
|
||||
|
||||
#include <cinttypes>
|
||||
|
||||
namespace esphome {
|
||||
namespace time {
|
||||
namespace esphome::time {
|
||||
|
||||
static const char *const TAG = "automation";
|
||||
static const int MAX_TIMESTAMP_DRIFT = 900; // how far can the clock drift before we consider
|
||||
@@ -92,5 +91,4 @@ SyncTrigger::SyncTrigger(RealTimeClock *rtc) : rtc_(rtc) {
|
||||
rtc->add_on_time_sync_callback([this]() { this->trigger(); });
|
||||
}
|
||||
|
||||
} // namespace time
|
||||
} // namespace esphome
|
||||
} // namespace esphome::time
|
||||
|
||||
@@ -8,8 +8,7 @@
|
||||
|
||||
#include <vector>
|
||||
|
||||
namespace esphome {
|
||||
namespace time {
|
||||
namespace esphome::time {
|
||||
|
||||
class CronTrigger : public Trigger<>, public Component {
|
||||
public:
|
||||
@@ -48,5 +47,4 @@ class SyncTrigger : public Trigger<>, public Component {
|
||||
protected:
|
||||
RealTimeClock *rtc_;
|
||||
};
|
||||
} // namespace time
|
||||
} // namespace esphome
|
||||
} // namespace esphome::time
|
||||
|
||||
@@ -17,8 +17,7 @@
|
||||
|
||||
#include <cinttypes>
|
||||
|
||||
namespace esphome {
|
||||
namespace time {
|
||||
namespace esphome::time {
|
||||
|
||||
static const char *const TAG = "time";
|
||||
|
||||
@@ -78,5 +77,4 @@ void RealTimeClock::apply_timezone_() {
|
||||
}
|
||||
#endif
|
||||
|
||||
} // namespace time
|
||||
} // namespace esphome
|
||||
} // namespace esphome::time
|
||||
|
||||
@@ -7,8 +7,7 @@
|
||||
#include "esphome/core/helpers.h"
|
||||
#include "esphome/core/time.h"
|
||||
|
||||
namespace esphome {
|
||||
namespace time {
|
||||
namespace esphome::time {
|
||||
|
||||
/// The RealTimeClock class exposes common timekeeping functions via the device's local real-time clock.
|
||||
///
|
||||
@@ -75,5 +74,4 @@ template<typename... Ts> class TimeHasTimeCondition : public Condition<Ts...> {
|
||||
RealTimeClock *parent_;
|
||||
};
|
||||
|
||||
} // namespace time
|
||||
} // namespace esphome
|
||||
} // namespace esphome::time
|
||||
|
||||
@@ -9,6 +9,7 @@
|
||||
#include "esphome/core/gpio.h"
|
||||
#include "driver/gpio.h"
|
||||
#include "soc/gpio_num.h"
|
||||
#include "soc/uart_pins.h"
|
||||
|
||||
#ifdef USE_LOGGER
|
||||
#include "esphome/components/logger/logger.h"
|
||||
@@ -139,6 +140,22 @@ void IDFUARTComponent::load_settings(bool dump_config) {
|
||||
return;
|
||||
}
|
||||
|
||||
int8_t tx = this->tx_pin_ != nullptr ? this->tx_pin_->get_pin() : -1;
|
||||
int8_t rx = this->rx_pin_ != nullptr ? this->rx_pin_->get_pin() : -1;
|
||||
int8_t flow_control = this->flow_control_pin_ != nullptr ? this->flow_control_pin_->get_pin() : -1;
|
||||
|
||||
// Workaround for ESP-IDF issue: https://github.com/espressif/esp-idf/issues/17459
|
||||
// Commit 9ed617fb17 removed gpio_func_sel() calls from uart_set_pin(), which breaks
|
||||
// UART on default UART0 pins that may have residual state from boot console.
|
||||
// Reset these pins before configuring UART to ensure they're in a clean state.
|
||||
if (tx == U0TXD_GPIO_NUM || tx == U0RXD_GPIO_NUM) {
|
||||
gpio_reset_pin(static_cast<gpio_num_t>(tx));
|
||||
}
|
||||
if (rx == U0TXD_GPIO_NUM || rx == U0RXD_GPIO_NUM) {
|
||||
gpio_reset_pin(static_cast<gpio_num_t>(rx));
|
||||
}
|
||||
|
||||
// Setup pins after reset to preserve open drain/pullup/pulldown flags
|
||||
auto setup_pin_if_needed = [](InternalGPIOPin *pin) {
|
||||
if (!pin) {
|
||||
return;
|
||||
@@ -154,10 +171,6 @@ void IDFUARTComponent::load_settings(bool dump_config) {
|
||||
setup_pin_if_needed(this->tx_pin_);
|
||||
}
|
||||
|
||||
int8_t tx = this->tx_pin_ != nullptr ? this->tx_pin_->get_pin() : -1;
|
||||
int8_t rx = this->rx_pin_ != nullptr ? this->rx_pin_->get_pin() : -1;
|
||||
int8_t flow_control = this->flow_control_pin_ != nullptr ? this->flow_control_pin_->get_pin() : -1;
|
||||
|
||||
uint32_t invert = 0;
|
||||
if (this->tx_pin_ != nullptr && this->tx_pin_->is_inverted()) {
|
||||
invert |= UART_SIGNAL_TXD_INV;
|
||||
|
||||
@@ -29,6 +29,9 @@ UpdateInfo = update_ns.struct("UpdateInfo")
|
||||
PerformAction = update_ns.class_(
|
||||
"PerformAction", automation.Action, cg.Parented.template(UpdateEntity)
|
||||
)
|
||||
CheckAction = update_ns.class_(
|
||||
"CheckAction", automation.Action, cg.Parented.template(UpdateEntity)
|
||||
)
|
||||
IsAvailableCondition = update_ns.class_(
|
||||
"IsAvailableCondition", automation.Condition, cg.Parented.template(UpdateEntity)
|
||||
)
|
||||
@@ -143,6 +146,21 @@ async def update_perform_action_to_code(config, action_id, template_arg, args):
|
||||
return var
|
||||
|
||||
|
||||
@automation.register_action(
|
||||
"update.check",
|
||||
CheckAction,
|
||||
automation.maybe_simple_id(
|
||||
{
|
||||
cv.GenerateID(): cv.use_id(UpdateEntity),
|
||||
}
|
||||
),
|
||||
)
|
||||
async def update_check_action_to_code(config, action_id, template_arg, args):
|
||||
var = cg.new_Pvariable(action_id, template_arg)
|
||||
await cg.register_parented(var, config[CONF_ID])
|
||||
return var
|
||||
|
||||
|
||||
@automation.register_condition(
|
||||
"update.is_available",
|
||||
IsAvailableCondition,
|
||||
|
||||
@@ -14,6 +14,11 @@ template<typename... Ts> class PerformAction : public Action<Ts...>, public Pare
|
||||
void play(const Ts &...x) override { this->parent_->perform(this->force_.value(x...)); }
|
||||
};
|
||||
|
||||
template<typename... Ts> class CheckAction : public Action<Ts...>, public Parented<UpdateEntity> {
|
||||
public:
|
||||
void play(const Ts &...x) override { this->parent_->check(); }
|
||||
};
|
||||
|
||||
template<typename... Ts> class IsAvailableCondition : public Condition<Ts...>, public Parented<UpdateEntity> {
|
||||
public:
|
||||
bool check(const Ts &...x) override { return this->parent_->state == UPDATE_STATE_AVAILABLE; }
|
||||
|
||||
@@ -1,8 +1,9 @@
|
||||
#include "version_text_sensor.h"
|
||||
#include "esphome/core/log.h"
|
||||
#include "esphome/core/application.h"
|
||||
#include "esphome/core/log.h"
|
||||
#include "esphome/core/version.h"
|
||||
#include "esphome/core/helpers.h"
|
||||
#include "esphome/core/progmem.h"
|
||||
|
||||
namespace esphome {
|
||||
namespace version {
|
||||
@@ -10,11 +11,26 @@ namespace version {
|
||||
static const char *const TAG = "version.text_sensor";
|
||||
|
||||
void VersionTextSensor::setup() {
|
||||
if (this->hide_timestamp_) {
|
||||
this->publish_state(ESPHOME_VERSION);
|
||||
} else {
|
||||
this->publish_state(str_sprintf(ESPHOME_VERSION " %s", App.get_compilation_time_ref().c_str()));
|
||||
static const char PREFIX[] PROGMEM = ESPHOME_VERSION " (config hash 0x";
|
||||
static const char BUILT_STR[] PROGMEM = ", built ";
|
||||
// Buffer size: PREFIX + 8 hex chars + BUILT_STR + BUILD_TIME_STR_SIZE + ")" + null
|
||||
constexpr size_t buf_size = sizeof(PREFIX) + 8 + sizeof(BUILT_STR) + esphome::Application::BUILD_TIME_STR_SIZE + 2;
|
||||
char version_str[buf_size];
|
||||
|
||||
ESPHOME_strncpy_P(version_str, PREFIX, sizeof(version_str));
|
||||
|
||||
size_t len = strlen(version_str);
|
||||
snprintf(version_str + len, sizeof(version_str) - len, "%08" PRIx32, App.get_config_hash());
|
||||
|
||||
if (!this->hide_timestamp_) {
|
||||
size_t len = strlen(version_str);
|
||||
ESPHOME_strncat_P(version_str, BUILT_STR, sizeof(version_str) - len - 1);
|
||||
ESPHOME_strncat_P(version_str, ESPHOME_BUILD_TIME_STR, sizeof(version_str) - strlen(version_str) - 1);
|
||||
}
|
||||
|
||||
strncat(version_str, ")", sizeof(version_str) - strlen(version_str) - 1);
|
||||
version_str[sizeof(version_str) - 1] = '\0';
|
||||
this->publish_state(version_str);
|
||||
}
|
||||
float VersionTextSensor::get_setup_priority() const { return setup_priority::DATA; }
|
||||
void VersionTextSensor::set_hide_timestamp(bool hide_timestamp) { this->hide_timestamp_ = hide_timestamp; }
|
||||
|
||||
@@ -376,7 +376,7 @@ void WiFiComponent::start() {
|
||||
get_mac_address_pretty_into_buffer(mac_s));
|
||||
this->last_connected_ = millis();
|
||||
|
||||
uint32_t hash = this->has_sta() ? fnv1_hash(App.get_compilation_time_ref().c_str()) : 88491487UL;
|
||||
uint32_t hash = this->has_sta() ? App.get_config_version_hash() : 88491487UL;
|
||||
|
||||
this->pref_ = global_preferences->make_preference<wifi::SavedWifiSettings>(hash, true);
|
||||
#ifdef USE_WIFI_FAST_CONNECT
|
||||
|
||||
@@ -608,6 +608,8 @@ class EsphomeCore:
|
||||
self.current_component: str | None = None
|
||||
# Address cache for DNS and mDNS lookups from command line arguments
|
||||
self.address_cache: AddressCache | None = None
|
||||
# Cached config hash (computed lazily)
|
||||
self._config_hash: int | None = None
|
||||
|
||||
def reset(self):
|
||||
from esphome.pins import PIN_SCHEMA_REGISTRY
|
||||
@@ -636,6 +638,7 @@ class EsphomeCore:
|
||||
self.unique_ids = {}
|
||||
self.current_component = None
|
||||
self.address_cache = None
|
||||
self._config_hash = None
|
||||
PIN_SCHEMA_REGISTRY.reset()
|
||||
|
||||
@contextmanager
|
||||
@@ -685,6 +688,21 @@ class EsphomeCore:
|
||||
|
||||
return None
|
||||
|
||||
@property
|
||||
def config_hash(self) -> int:
|
||||
"""Get the FNV-1a 32-bit hash of the config.
|
||||
|
||||
The hash is computed lazily and cached for performance.
|
||||
Uses sort_keys=True to ensure deterministic ordering.
|
||||
"""
|
||||
if self._config_hash is None:
|
||||
from esphome import yaml_util
|
||||
from esphome.helpers import fnv1a_32bit_hash
|
||||
|
||||
config_str = yaml_util.dump(self.config, show_secrets=True, sort_keys=True)
|
||||
self._config_hash = fnv1a_32bit_hash(config_str)
|
||||
return self._config_hash
|
||||
|
||||
@property
|
||||
def config_dir(self) -> Path:
|
||||
if self.config_path.is_dir():
|
||||
|
||||
@@ -1,5 +1,12 @@
|
||||
#include "esphome/core/application.h"
|
||||
#include "esphome/core/build_info_data.h"
|
||||
#include "esphome/core/log.h"
|
||||
#include "esphome/core/progmem.h"
|
||||
#include <cstring>
|
||||
|
||||
#ifdef USE_ESP8266
|
||||
#include <pgmspace.h>
|
||||
#endif
|
||||
#include "esphome/core/version.h"
|
||||
#include "esphome/core/hal.h"
|
||||
#include <algorithm>
|
||||
@@ -191,7 +198,9 @@ void Application::loop() {
|
||||
|
||||
if (this->dump_config_at_ < this->components_.size()) {
|
||||
if (this->dump_config_at_ == 0) {
|
||||
ESP_LOGI(TAG, "ESPHome version " ESPHOME_VERSION " compiled on %s", this->compilation_time_);
|
||||
char build_time_str[Application::BUILD_TIME_STR_SIZE];
|
||||
this->get_build_time_string(build_time_str);
|
||||
ESP_LOGI(TAG, "ESPHome version " ESPHOME_VERSION " compiled on %s", build_time_str);
|
||||
#ifdef ESPHOME_PROJECT_NAME
|
||||
ESP_LOGI(TAG, "Project " ESPHOME_PROJECT_NAME " version " ESPHOME_PROJECT_VERSION);
|
||||
#endif
|
||||
@@ -711,4 +720,9 @@ void Application::wake_loop_threadsafe() {
|
||||
}
|
||||
#endif // defined(USE_SOCKET_SELECT_SUPPORT) && defined(USE_WAKE_LOOP_THREADSAFE)
|
||||
|
||||
void Application::get_build_time_string(std::span<char, BUILD_TIME_STR_SIZE> buffer) {
|
||||
ESPHOME_strncpy_P(buffer.data(), ESPHOME_BUILD_TIME_STR, buffer.size());
|
||||
buffer[buffer.size() - 1] = '\0';
|
||||
}
|
||||
|
||||
} // namespace esphome
|
||||
|
||||
@@ -1,9 +1,12 @@
|
||||
#pragma once
|
||||
|
||||
#include <algorithm>
|
||||
#include <ctime>
|
||||
#include <limits>
|
||||
#include <span>
|
||||
#include <string>
|
||||
#include <vector>
|
||||
#include "esphome/core/build_info_data.h"
|
||||
#include "esphome/core/component.h"
|
||||
#include "esphome/core/defines.h"
|
||||
#include "esphome/core/hal.h"
|
||||
@@ -11,6 +14,7 @@
|
||||
#include "esphome/core/preferences.h"
|
||||
#include "esphome/core/scheduler.h"
|
||||
#include "esphome/core/string_ref.h"
|
||||
#include "esphome/core/version.h"
|
||||
|
||||
#ifdef USE_DEVICES
|
||||
#include "esphome/core/device.h"
|
||||
@@ -101,7 +105,7 @@ static const uint32_t TEARDOWN_TIMEOUT_REBOOT_MS = 1000; // 1 second for quick
|
||||
class Application {
|
||||
public:
|
||||
void pre_setup(const std::string &name, const std::string &friendly_name, const char *comment,
|
||||
const char *compilation_time, bool name_add_mac_suffix) {
|
||||
bool name_add_mac_suffix) {
|
||||
arch_init();
|
||||
this->name_add_mac_suffix_ = name_add_mac_suffix;
|
||||
if (name_add_mac_suffix) {
|
||||
@@ -121,7 +125,6 @@ class Application {
|
||||
this->friendly_name_ = friendly_name;
|
||||
}
|
||||
this->comment_ = comment;
|
||||
this->compilation_time_ = compilation_time;
|
||||
}
|
||||
|
||||
#ifdef USE_DEVICES
|
||||
@@ -261,9 +264,30 @@ class Application {
|
||||
|
||||
bool is_name_add_mac_suffix_enabled() const { return this->name_add_mac_suffix_; }
|
||||
|
||||
std::string get_compilation_time() const { return this->compilation_time_; }
|
||||
/// Get the compilation time as StringRef (for API usage)
|
||||
StringRef get_compilation_time_ref() const { return StringRef(this->compilation_time_); }
|
||||
/// Size of buffer required for build time string (including null terminator)
|
||||
static constexpr size_t BUILD_TIME_STR_SIZE = 26;
|
||||
|
||||
/// Get the config hash as a 32-bit integer
|
||||
constexpr uint32_t get_config_hash() { return ESPHOME_CONFIG_HASH; }
|
||||
|
||||
/// Get the config hash extended with ESPHome version
|
||||
constexpr uint32_t get_config_version_hash() { return fnv1a_hash_extend(ESPHOME_CONFIG_HASH, ESPHOME_VERSION); }
|
||||
|
||||
/// Get the build time as a Unix timestamp
|
||||
constexpr time_t get_build_time() { return ESPHOME_BUILD_TIME; }
|
||||
|
||||
/// Copy the build time string into the provided buffer
|
||||
/// Buffer must be BUILD_TIME_STR_SIZE bytes (compile-time enforced)
|
||||
void get_build_time_string(std::span<char, BUILD_TIME_STR_SIZE> buffer);
|
||||
|
||||
/// Get the build time as a string (deprecated, use get_build_time_string() instead)
|
||||
// Remove before 2026.7.0
|
||||
ESPDEPRECATED("Use get_build_time_string() instead. Removed in 2026.7.0", "2026.1.0")
|
||||
std::string get_compilation_time() {
|
||||
char buf[BUILD_TIME_STR_SIZE];
|
||||
this->get_build_time_string(buf);
|
||||
return std::string(buf);
|
||||
}
|
||||
|
||||
/// Get the cached time in milliseconds from when the current component started its loop execution
|
||||
inline uint32_t IRAM_ATTR HOT get_loop_component_start_time() const { return this->loop_component_start_time_; }
|
||||
@@ -478,7 +502,6 @@ class Application {
|
||||
// Pointer-sized members first
|
||||
Component *current_component_{nullptr};
|
||||
const char *comment_{nullptr};
|
||||
const char *compilation_time_{nullptr};
|
||||
|
||||
// std::vector (3 pointers each: begin, end, capacity)
|
||||
// Partitioned vector design for looping components
|
||||
|
||||
10
esphome/core/build_info_data.h
Normal file
10
esphome/core/build_info_data.h
Normal file
@@ -0,0 +1,10 @@
|
||||
#pragma once
|
||||
|
||||
// This file is not used by the runtime, instead, a version is generated during
|
||||
// compilation with the actual build info values.
|
||||
//
|
||||
// This file is only used by static analyzers and IDEs.
|
||||
|
||||
#define ESPHOME_CONFIG_HASH 0x12345678U // NOLINT
|
||||
#define ESPHOME_BUILD_TIME 1700000000 // NOLINT
|
||||
static const char ESPHOME_BUILD_TIME_STR[] = "2024-01-01 00:00:00 +0000";
|
||||
@@ -501,7 +501,6 @@ async def to_code(config: ConfigType) -> None:
|
||||
config[CONF_NAME],
|
||||
config[CONF_FRIENDLY_NAME],
|
||||
config.get(CONF_COMMENT, ""),
|
||||
cg.RawExpression('__DATE__ ", " __TIME__'),
|
||||
config[CONF_NAME_ADD_MAC_SUFFIX],
|
||||
)
|
||||
)
|
||||
|
||||
@@ -170,6 +170,7 @@
|
||||
// ESP32-specific feature flags
|
||||
#ifdef USE_ESP32
|
||||
#define USE_ESPHOME_TASK_LOG_BUFFER
|
||||
#define USE_OTA_ROLLBACK
|
||||
|
||||
#define USE_BLUETOOTH_PROXY
|
||||
#define BLUETOOTH_PROXY_MAX_CONNECTIONS 3
|
||||
|
||||
@@ -155,17 +155,6 @@ uint32_t fnv1_hash(const char *str) {
|
||||
return hash;
|
||||
}
|
||||
|
||||
// FNV-1a hash - preferred for new code
|
||||
uint32_t fnv1a_hash_extend(uint32_t hash, const char *str) {
|
||||
if (str) {
|
||||
while (*str) {
|
||||
hash ^= *str++;
|
||||
hash *= FNV1_PRIME;
|
||||
}
|
||||
}
|
||||
return hash;
|
||||
}
|
||||
|
||||
float random_float() { return static_cast<float>(random_uint32()) / static_cast<float>(UINT32_MAX); }
|
||||
|
||||
// Strings
|
||||
|
||||
@@ -391,12 +391,20 @@ constexpr uint32_t FNV1_OFFSET_BASIS = 2166136261UL;
|
||||
constexpr uint32_t FNV1_PRIME = 16777619UL;
|
||||
|
||||
/// Extend a FNV-1a hash with additional string data.
|
||||
uint32_t fnv1a_hash_extend(uint32_t hash, const char *str);
|
||||
constexpr uint32_t fnv1a_hash_extend(uint32_t hash, const char *str) {
|
||||
if (str) {
|
||||
while (*str) {
|
||||
hash ^= *str++;
|
||||
hash *= FNV1_PRIME;
|
||||
}
|
||||
}
|
||||
return hash;
|
||||
}
|
||||
inline uint32_t fnv1a_hash_extend(uint32_t hash, const std::string &str) {
|
||||
return fnv1a_hash_extend(hash, str.c_str());
|
||||
}
|
||||
/// Calculate a FNV-1a hash of \p str.
|
||||
inline uint32_t fnv1a_hash(const char *str) { return fnv1a_hash_extend(FNV1_OFFSET_BASIS, str); }
|
||||
constexpr uint32_t fnv1a_hash(const char *str) { return fnv1a_hash_extend(FNV1_OFFSET_BASIS, str); }
|
||||
inline uint32_t fnv1a_hash(const std::string &str) { return fnv1a_hash(str.c_str()); }
|
||||
|
||||
/// Return a random 32-bit unsigned integer.
|
||||
|
||||
@@ -9,8 +9,10 @@
|
||||
#define ESPHOME_F(string_literal) F(string_literal)
|
||||
#define ESPHOME_PGM_P PGM_P
|
||||
#define ESPHOME_strncpy_P strncpy_P
|
||||
#define ESPHOME_strncat_P strncat_P
|
||||
#else
|
||||
#define ESPHOME_F(string_literal) (string_literal)
|
||||
#define ESPHOME_PGM_P const char *
|
||||
#define ESPHOME_strncpy_P strncpy
|
||||
#define ESPHOME_strncat_P strncat
|
||||
#endif
|
||||
|
||||
@@ -424,9 +424,13 @@ def write_file_if_changed(path: Path, text: str) -> bool:
|
||||
return True
|
||||
|
||||
|
||||
def copy_file_if_changed(src: Path, dst: Path) -> None:
|
||||
def copy_file_if_changed(src: Path, dst: Path) -> bool:
|
||||
"""Copy file from src to dst if contents differ.
|
||||
|
||||
Returns True if file was copied, False if files already matched.
|
||||
"""
|
||||
if file_compare(src, dst):
|
||||
return
|
||||
return False
|
||||
dst.parent.mkdir(parents=True, exist_ok=True)
|
||||
try:
|
||||
shutil.copyfile(src, dst)
|
||||
@@ -441,11 +445,12 @@ def copy_file_if_changed(src: Path, dst: Path) -> None:
|
||||
with suppress(OSError):
|
||||
os.unlink(dst)
|
||||
shutil.copyfile(src, dst)
|
||||
return
|
||||
return True
|
||||
|
||||
from esphome.core import EsphomeError
|
||||
|
||||
raise EsphomeError(f"Error copying file {src} to {dst}: {err}") from err
|
||||
return True
|
||||
|
||||
|
||||
def list_starts_with(list_, sub):
|
||||
|
||||
@@ -1,11 +1,13 @@
|
||||
from collections.abc import Callable
|
||||
import importlib
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
from pathlib import Path
|
||||
import re
|
||||
import shutil
|
||||
import stat
|
||||
import time
|
||||
from types import TracebackType
|
||||
|
||||
from esphome import loader
|
||||
@@ -23,6 +25,7 @@ from esphome.helpers import (
|
||||
is_ha_addon,
|
||||
read_file,
|
||||
walk_files,
|
||||
write_file,
|
||||
write_file_if_changed,
|
||||
)
|
||||
from esphome.storage_json import StorageJSON, storage_path
|
||||
@@ -173,6 +176,7 @@ VERSION_H_FORMAT = """\
|
||||
"""
|
||||
DEFINES_H_TARGET = "esphome/core/defines.h"
|
||||
VERSION_H_TARGET = "esphome/core/version.h"
|
||||
BUILD_INFO_DATA_H_TARGET = "esphome/core/build_info_data.h"
|
||||
ESPHOME_README_TXT = """
|
||||
THIS DIRECTORY IS AUTO-GENERATED, DO NOT MODIFY
|
||||
|
||||
@@ -206,10 +210,16 @@ def copy_src_tree():
|
||||
include_s = "\n".join(include_l)
|
||||
|
||||
source_files_copy = source_files_map.copy()
|
||||
ignore_targets = [Path(x) for x in (DEFINES_H_TARGET, VERSION_H_TARGET)]
|
||||
ignore_targets = [
|
||||
Path(x) for x in (DEFINES_H_TARGET, VERSION_H_TARGET, BUILD_INFO_DATA_H_TARGET)
|
||||
]
|
||||
for t in ignore_targets:
|
||||
source_files_copy.pop(t)
|
||||
source_files_copy.pop(t, None)
|
||||
|
||||
# Files to exclude from sources_changed tracking (generated files)
|
||||
generated_files = {Path("esphome/core/build_info_data.h")}
|
||||
|
||||
sources_changed = False
|
||||
for fname in walk_files(CORE.relative_src_path("esphome")):
|
||||
p = Path(fname)
|
||||
if p.suffix not in SOURCE_FILE_EXTENSIONS:
|
||||
@@ -223,28 +233,80 @@ def copy_src_tree():
|
||||
if target not in source_files_copy:
|
||||
# Source file removed, delete target
|
||||
p.unlink()
|
||||
if target not in generated_files:
|
||||
sources_changed = True
|
||||
else:
|
||||
src_file = source_files_copy.pop(target)
|
||||
with src_file.path() as src_path:
|
||||
copy_file_if_changed(src_path, p)
|
||||
if copy_file_if_changed(src_path, p) and target not in generated_files:
|
||||
sources_changed = True
|
||||
|
||||
# Now copy new files
|
||||
for target, src_file in source_files_copy.items():
|
||||
dst_path = CORE.relative_src_path(*target.parts)
|
||||
with src_file.path() as src_path:
|
||||
copy_file_if_changed(src_path, dst_path)
|
||||
if (
|
||||
copy_file_if_changed(src_path, dst_path)
|
||||
and target not in generated_files
|
||||
):
|
||||
sources_changed = True
|
||||
|
||||
# Finally copy defines
|
||||
write_file_if_changed(
|
||||
if write_file_if_changed(
|
||||
CORE.relative_src_path("esphome", "core", "defines.h"), generate_defines_h()
|
||||
)
|
||||
):
|
||||
sources_changed = True
|
||||
write_file_if_changed(CORE.relative_build_path("README.txt"), ESPHOME_README_TXT)
|
||||
write_file_if_changed(
|
||||
if write_file_if_changed(
|
||||
CORE.relative_src_path("esphome.h"), ESPHOME_H_FORMAT.format(include_s)
|
||||
)
|
||||
write_file_if_changed(
|
||||
):
|
||||
sources_changed = True
|
||||
if write_file_if_changed(
|
||||
CORE.relative_src_path("esphome", "core", "version.h"), generate_version_h()
|
||||
):
|
||||
sources_changed = True
|
||||
|
||||
# Generate new build_info files if needed
|
||||
build_info_data_h_path = CORE.relative_src_path(
|
||||
"esphome", "core", "build_info_data.h"
|
||||
)
|
||||
build_info_json_path = CORE.relative_build_path("build_info.json")
|
||||
config_hash, build_time, build_time_str = get_build_info()
|
||||
|
||||
# Defensively force a rebuild if the build_info files don't exist, or if
|
||||
# there was a config change which didn't actually cause a source change
|
||||
if not build_info_data_h_path.exists():
|
||||
sources_changed = True
|
||||
else:
|
||||
try:
|
||||
existing = json.loads(build_info_json_path.read_text(encoding="utf-8"))
|
||||
if (
|
||||
existing.get("config_hash") != config_hash
|
||||
or existing.get("esphome_version") != __version__
|
||||
):
|
||||
sources_changed = True
|
||||
except (json.JSONDecodeError, KeyError, OSError):
|
||||
sources_changed = True
|
||||
|
||||
# Write build_info header and JSON metadata
|
||||
if sources_changed:
|
||||
write_file(
|
||||
build_info_data_h_path,
|
||||
generate_build_info_data_h(config_hash, build_time, build_time_str),
|
||||
)
|
||||
write_file(
|
||||
build_info_json_path,
|
||||
json.dumps(
|
||||
{
|
||||
"config_hash": config_hash,
|
||||
"build_time": build_time,
|
||||
"build_time_str": build_time_str,
|
||||
"esphome_version": __version__,
|
||||
},
|
||||
indent=2,
|
||||
)
|
||||
+ "\n",
|
||||
)
|
||||
|
||||
platform = "esphome.components." + CORE.target_platform
|
||||
try:
|
||||
@@ -270,6 +332,35 @@ def generate_version_h():
|
||||
)
|
||||
|
||||
|
||||
def get_build_info() -> tuple[int, int, str]:
|
||||
"""Calculate build_info values from current config.
|
||||
|
||||
Returns:
|
||||
Tuple of (config_hash, build_time, build_time_str)
|
||||
"""
|
||||
config_hash = CORE.config_hash
|
||||
build_time = int(time.time())
|
||||
build_time_str = time.strftime("%Y-%m-%d %H:%M:%S %z", time.localtime(build_time))
|
||||
return config_hash, build_time, build_time_str
|
||||
|
||||
|
||||
def generate_build_info_data_h(
|
||||
config_hash: int, build_time: int, build_time_str: str
|
||||
) -> str:
|
||||
"""Generate build_info_data.h header with config hash and build time."""
|
||||
return f"""#pragma once
|
||||
// Auto-generated build_info data
|
||||
#define ESPHOME_CONFIG_HASH 0x{config_hash:08x}U // NOLINT
|
||||
#define ESPHOME_BUILD_TIME {build_time} // NOLINT
|
||||
#ifdef USE_ESP8266
|
||||
#include <pgmspace.h>
|
||||
static const char ESPHOME_BUILD_TIME_STR[] PROGMEM = "{build_time_str}";
|
||||
#else
|
||||
static const char ESPHOME_BUILD_TIME_STR[] = "{build_time_str}";
|
||||
#endif
|
||||
"""
|
||||
|
||||
|
||||
def write_cpp(code_s):
|
||||
path = CORE.relative_src_path("main.cpp")
|
||||
if path.is_file():
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Callable
|
||||
from contextlib import suppress
|
||||
import functools
|
||||
import inspect
|
||||
from io import BytesIO, TextIOBase, TextIOWrapper
|
||||
@@ -501,13 +502,17 @@ def _load_yaml_internal_with_type(
|
||||
loader.dispose()
|
||||
|
||||
|
||||
def dump(dict_, show_secrets=False):
|
||||
def dump(dict_, show_secrets=False, sort_keys=False):
|
||||
"""Dump YAML to a string and remove null."""
|
||||
if show_secrets:
|
||||
_SECRET_VALUES.clear()
|
||||
_SECRET_CACHE.clear()
|
||||
return yaml.dump(
|
||||
dict_, default_flow_style=False, allow_unicode=True, Dumper=ESPHomeDumper
|
||||
dict_,
|
||||
default_flow_style=False,
|
||||
allow_unicode=True,
|
||||
Dumper=ESPHomeDumper,
|
||||
sort_keys=sort_keys,
|
||||
)
|
||||
|
||||
|
||||
@@ -543,6 +548,9 @@ class ESPHomeDumper(yaml.SafeDumper):
|
||||
best_style = True
|
||||
if hasattr(mapping, "items"):
|
||||
mapping = list(mapping.items())
|
||||
if self.sort_keys:
|
||||
with suppress(TypeError):
|
||||
mapping = sorted(mapping)
|
||||
for item_key, item_value in mapping:
|
||||
node_key = self.represent_data(item_key)
|
||||
node_value = self.represent_data(item_value)
|
||||
|
||||
@@ -253,19 +253,31 @@ def main():
|
||||
print(f"Split {args.split_at}/{args.split_num}: checking {len(files)} files")
|
||||
|
||||
# Print file count before adding header file
|
||||
print(f"\nTotal files to check: {len(files)}")
|
||||
print(f"\nTotal cpp files to check: {len(files)}")
|
||||
|
||||
# Add header file for checking (before early exit check)
|
||||
if args.all_headers and args.split_at in (None, 1):
|
||||
# When --changed is used, only include changed headers instead of all headers
|
||||
if args.changed:
|
||||
all_headers = [
|
||||
os.path.relpath(p, cwd) for p in git_ls_files(["esphome/**/*.h"])
|
||||
]
|
||||
changed_headers = filter_changed(all_headers)
|
||||
if changed_headers:
|
||||
build_all_include(changed_headers)
|
||||
files.insert(0, temp_header_file)
|
||||
else:
|
||||
print("No changed headers to check")
|
||||
else:
|
||||
build_all_include()
|
||||
files.insert(0, temp_header_file)
|
||||
print(f"Added all-include header file, new total: {len(files)}")
|
||||
|
||||
# Early exit if no files to check
|
||||
if not files:
|
||||
print("No files to check - exiting early")
|
||||
return 0
|
||||
|
||||
# Only build header file if we have actual files to check
|
||||
if args.all_headers and args.split_at in (None, 1):
|
||||
build_all_include()
|
||||
files.insert(0, temp_header_file)
|
||||
print(f"Added all-include header file, new total: {len(files)}")
|
||||
|
||||
# Print final file list before loading idedata
|
||||
print_file_list(files, "Final files to process:")
|
||||
|
||||
|
||||
@@ -156,22 +156,25 @@ def print_error_for_file(file: str | Path, body: str | None) -> None:
|
||||
print()
|
||||
|
||||
|
||||
def build_all_include() -> None:
|
||||
# Build a cpp file that includes all header files in this repo.
|
||||
# Otherwise header-only integrations would not be tested by clang-tidy
|
||||
def build_all_include(header_files: list[str] | None = None) -> None:
|
||||
# Build a cpp file that includes header files for clang-tidy to check.
|
||||
# If header_files is provided, only include those headers.
|
||||
# Otherwise, include all header files in the esphome directory.
|
||||
|
||||
# Use git ls-files to find all .h files in the esphome directory
|
||||
# This is much faster than walking the filesystem
|
||||
cmd = ["git", "ls-files", "esphome/**/*.h"]
|
||||
proc = subprocess.run(cmd, capture_output=True, text=True, check=True)
|
||||
if header_files is None:
|
||||
# Use git ls-files to find all .h files in the esphome directory
|
||||
# This is much faster than walking the filesystem
|
||||
cmd = ["git", "ls-files", "esphome/**/*.h"]
|
||||
proc = subprocess.run(cmd, capture_output=True, text=True, check=True)
|
||||
|
||||
# Process git output - git already returns paths relative to repo root
|
||||
headers = [
|
||||
f'#include "{include_p}"'
|
||||
for line in proc.stdout.strip().split("\n")
|
||||
if (include_p := line.replace(os.path.sep, "/"))
|
||||
]
|
||||
# Process git output - git already returns paths relative to repo root
|
||||
header_files = [
|
||||
line.replace(os.path.sep, "/")
|
||||
for line in proc.stdout.strip().split("\n")
|
||||
if line
|
||||
]
|
||||
|
||||
headers = [f'#include "{h}"' for h in header_files]
|
||||
headers.sort()
|
||||
headers.append("")
|
||||
content = "\n".join(headers)
|
||||
|
||||
14
tests/components/deep_sleep/test.bk72xx-ard.yaml
Normal file
14
tests/components/deep_sleep/test.bk72xx-ard.yaml
Normal file
@@ -0,0 +1,14 @@
|
||||
deep_sleep:
|
||||
run_duration: 30s
|
||||
sleep_duration: 12h
|
||||
wakeup_pin:
|
||||
- pin:
|
||||
number: P6
|
||||
- pin: P7
|
||||
wakeup_pin_mode: KEEP_AWAKE
|
||||
- pin:
|
||||
number: P10
|
||||
inverted: true
|
||||
wakeup_pin_mode: INVERT_WAKEUP
|
||||
|
||||
<<: !include common.yaml
|
||||
@@ -3,6 +3,7 @@ esp32:
|
||||
framework:
|
||||
type: esp-idf
|
||||
advanced:
|
||||
enable_ota_rollback: true
|
||||
enable_lwip_mdns_queries: true
|
||||
enable_lwip_bridge_interface: true
|
||||
disable_libc_locks_in_iram: false # Test explicit opt-out of RAM optimization
|
||||
|
||||
12
tests/components/hub75/common.yaml
Normal file
12
tests/components/hub75/common.yaml
Normal file
@@ -0,0 +1,12 @@
|
||||
esphome:
|
||||
on_boot:
|
||||
# Test simple value
|
||||
- hub75.set_brightness: 200
|
||||
|
||||
# Test templatable value
|
||||
- hub75.set_brightness: !lambda 'return 100;'
|
||||
|
||||
# Test with explicit ID
|
||||
- hub75.set_brightness:
|
||||
id: my_hub75
|
||||
brightness: 50
|
||||
@@ -1,8 +1,3 @@
|
||||
esp32:
|
||||
board: esp32dev
|
||||
framework:
|
||||
type: esp-idf
|
||||
|
||||
display:
|
||||
- platform: hub75
|
||||
id: my_hub75
|
||||
@@ -37,3 +32,5 @@ display:
|
||||
then:
|
||||
lambda: |-
|
||||
ESP_LOGD("display", "1 -> 2");
|
||||
|
||||
<<: !include common.yaml
|
||||
|
||||
@@ -1,8 +1,3 @@
|
||||
esp32:
|
||||
board: esp32-s3-devkitc-1
|
||||
framework:
|
||||
type: esp-idf
|
||||
|
||||
display:
|
||||
- platform: hub75
|
||||
id: hub75_display_board
|
||||
@@ -24,3 +19,5 @@ display:
|
||||
then:
|
||||
lambda: |-
|
||||
ESP_LOGD("display", "1 -> 2");
|
||||
|
||||
<<: !include common.yaml
|
||||
|
||||
@@ -1,8 +1,3 @@
|
||||
esp32:
|
||||
board: esp32-s3-devkitc-1
|
||||
framework:
|
||||
type: esp-idf
|
||||
|
||||
display:
|
||||
- platform: hub75
|
||||
id: my_hub75
|
||||
@@ -37,3 +32,5 @@ display:
|
||||
then:
|
||||
lambda: |-
|
||||
ESP_LOGD("display", "1 -> 2");
|
||||
|
||||
<<: !include common.yaml
|
||||
|
||||
@@ -9,6 +9,8 @@ esphome:
|
||||
update.is_available:
|
||||
then:
|
||||
- logger.log: "Update available"
|
||||
else:
|
||||
- update.check:
|
||||
- update.perform:
|
||||
force_update: true
|
||||
|
||||
|
||||
@@ -12,7 +12,7 @@
|
||||
using namespace esphome;
|
||||
|
||||
void setup() {
|
||||
App.pre_setup("livingroom", "LivingRoom", "comment", __DATE__ ", " __TIME__, false);
|
||||
App.pre_setup("livingroom", "LivingRoom", "comment", false);
|
||||
auto *log = new logger::Logger(115200, 512); // NOLINT
|
||||
log->pre_setup();
|
||||
log->set_uart_selection(logger::UART_SELECTION_UART0);
|
||||
|
||||
31
tests/integration/fixtures/build_info.yaml
Normal file
31
tests/integration/fixtures/build_info.yaml
Normal file
@@ -0,0 +1,31 @@
|
||||
esphome:
|
||||
name: build-info-test
|
||||
host:
|
||||
api:
|
||||
logger:
|
||||
|
||||
text_sensor:
|
||||
- platform: template
|
||||
name: "Config Hash"
|
||||
id: config_hash_sensor
|
||||
update_interval: 100ms
|
||||
lambda: |-
|
||||
char buf[16];
|
||||
snprintf(buf, sizeof(buf), "0x%08x", App.get_config_hash());
|
||||
return std::string(buf);
|
||||
- platform: template
|
||||
name: "Build Time"
|
||||
id: build_time_sensor
|
||||
update_interval: 100ms
|
||||
lambda: |-
|
||||
char buf[32];
|
||||
snprintf(buf, sizeof(buf), "%ld", (long)App.get_build_time());
|
||||
return std::string(buf);
|
||||
- platform: template
|
||||
name: "Build Time String"
|
||||
id: build_time_str_sensor
|
||||
update_interval: 100ms
|
||||
lambda: |-
|
||||
char buf[Application::BUILD_TIME_STR_SIZE];
|
||||
App.get_build_time_string(buf);
|
||||
return std::string(buf);
|
||||
117
tests/integration/test_build_info.py
Normal file
117
tests/integration/test_build_info.py
Normal file
@@ -0,0 +1,117 @@
|
||||
"""Integration test for build_info values."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
from datetime import datetime
|
||||
import re
|
||||
import time
|
||||
|
||||
from aioesphomeapi import EntityState, TextSensorState
|
||||
import pytest
|
||||
|
||||
from .types import APIClientConnectedFactory, RunCompiledFunction
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_build_info(
|
||||
yaml_config: str,
|
||||
run_compiled: RunCompiledFunction,
|
||||
api_client_connected: APIClientConnectedFactory,
|
||||
) -> None:
|
||||
"""Test that build_info values are sane."""
|
||||
async with run_compiled(yaml_config), api_client_connected() as client:
|
||||
device_info = await client.device_info()
|
||||
assert device_info is not None
|
||||
assert device_info.name == "build-info-test"
|
||||
|
||||
# Verify compilation_time from device_info is present and parseable
|
||||
# The format is ISO 8601 with timezone: "YYYY-MM-DD HH:MM:SS +ZZZZ"
|
||||
compilation_time = device_info.compilation_time
|
||||
assert compilation_time is not None
|
||||
|
||||
# Validate the ISO format: "YYYY-MM-DD HH:MM:SS +ZZZZ"
|
||||
parsed = datetime.strptime(compilation_time, "%Y-%m-%d %H:%M:%S %z")
|
||||
assert parsed.year >= time.localtime().tm_year
|
||||
|
||||
# Get entities
|
||||
entities, _ = await client.list_entities_services()
|
||||
|
||||
# Find our text sensors by object_id
|
||||
config_hash_entity = next(
|
||||
(e for e in entities if e.object_id == "config_hash"), None
|
||||
)
|
||||
build_time_entity = next(
|
||||
(e for e in entities if e.object_id == "build_time"), None
|
||||
)
|
||||
build_time_str_entity = next(
|
||||
(e for e in entities if e.object_id == "build_time_string"), None
|
||||
)
|
||||
|
||||
assert config_hash_entity is not None, "Config Hash sensor not found"
|
||||
assert build_time_entity is not None, "Build Time sensor not found"
|
||||
assert build_time_str_entity is not None, "Build Time String sensor not found"
|
||||
|
||||
# Wait for all three text sensors to have valid states
|
||||
loop = asyncio.get_running_loop()
|
||||
states: dict[int, TextSensorState] = {}
|
||||
all_received = loop.create_future()
|
||||
expected_keys = {
|
||||
config_hash_entity.key,
|
||||
build_time_entity.key,
|
||||
build_time_str_entity.key,
|
||||
}
|
||||
|
||||
def on_state(state: EntityState) -> None:
|
||||
if isinstance(state, TextSensorState) and not state.missing_state:
|
||||
states[state.key] = state
|
||||
if expected_keys <= states.keys() and not all_received.done():
|
||||
all_received.set_result(True)
|
||||
|
||||
client.subscribe_states(on_state)
|
||||
|
||||
try:
|
||||
await asyncio.wait_for(all_received, timeout=5.0)
|
||||
except TimeoutError:
|
||||
pytest.fail(
|
||||
f"Timeout waiting for text sensor states. Got: {list(states.keys())}"
|
||||
)
|
||||
|
||||
config_hash_state = states[config_hash_entity.key]
|
||||
build_time_state = states[build_time_entity.key]
|
||||
build_time_str_state = states[build_time_str_entity.key]
|
||||
|
||||
# Validate config_hash format (0x followed by 8 hex digits)
|
||||
config_hash = config_hash_state.state
|
||||
assert re.match(r"^0x[0-9a-f]{8}$", config_hash), (
|
||||
f"config_hash should be 0x followed by 8 hex digits, got: {config_hash}"
|
||||
)
|
||||
|
||||
# Validate build_time is a reasonable Unix timestamp
|
||||
build_time = int(build_time_state.state)
|
||||
current_time = int(time.time())
|
||||
# Build time should be within last hour and not in the future
|
||||
assert build_time <= current_time, (
|
||||
f"build_time {build_time} should not be in the future (current: {current_time})"
|
||||
)
|
||||
assert build_time > current_time - 3600, (
|
||||
f"build_time {build_time} should be within the last hour"
|
||||
)
|
||||
|
||||
# Validate build_time_str matches the new ISO format
|
||||
build_time_str = build_time_str_state.state
|
||||
# Format: "YYYY-MM-DD HH:MM:SS +ZZZZ"
|
||||
parsed_build_time = datetime.strptime(build_time_str, "%Y-%m-%d %H:%M:%S %z")
|
||||
assert parsed_build_time.year >= time.localtime().tm_year
|
||||
|
||||
# Verify build_time_str matches what we get from build_time timestamp
|
||||
expected_str = time.strftime("%Y-%m-%d %H:%M:%S %z", time.localtime(build_time))
|
||||
assert build_time_str == expected_str, (
|
||||
f"build_time_str '{build_time_str}' should match timestamp '{expected_str}'"
|
||||
)
|
||||
|
||||
# Verify compilation_time matches build_time_str (they should be the same)
|
||||
assert compilation_time == build_time_str, (
|
||||
f"compilation_time '{compilation_time}' should match "
|
||||
f"build_time_str '{build_time_str}'"
|
||||
)
|
||||
@@ -58,6 +58,7 @@ def mock_write_file_if_changed() -> Generator[Mock, None, None]:
|
||||
def mock_copy_file_if_changed() -> Generator[Mock, None, None]:
|
||||
"""Mock copy_file_if_changed for core.config."""
|
||||
with patch("esphome.core.config.copy_file_if_changed") as mock:
|
||||
mock.return_value = True
|
||||
yield mock
|
||||
|
||||
|
||||
|
||||
@@ -892,3 +892,74 @@ async def test_add_includes_overwrites_existing_files(
|
||||
mock_copy_file_if_changed.assert_called_once_with(
|
||||
include_file, CORE.build_path / "src" / "header.h"
|
||||
)
|
||||
|
||||
|
||||
def test_config_hash_returns_int() -> None:
|
||||
"""Test that config_hash returns an integer."""
|
||||
CORE.reset()
|
||||
CORE.config = {"esphome": {"name": "test"}}
|
||||
assert isinstance(CORE.config_hash, int)
|
||||
|
||||
|
||||
def test_config_hash_is_cached() -> None:
|
||||
"""Test that config_hash is computed once and cached."""
|
||||
CORE.reset()
|
||||
CORE.config = {"esphome": {"name": "test"}}
|
||||
|
||||
# First access computes the hash
|
||||
hash1 = CORE.config_hash
|
||||
|
||||
# Modify config (without resetting cache)
|
||||
CORE.config = {"esphome": {"name": "different"}}
|
||||
|
||||
# Second access returns cached value
|
||||
hash2 = CORE.config_hash
|
||||
|
||||
assert hash1 == hash2
|
||||
|
||||
|
||||
def test_config_hash_reset_clears_cache() -> None:
|
||||
"""Test that reset() clears the cached config_hash."""
|
||||
CORE.reset()
|
||||
CORE.config = {"esphome": {"name": "test"}}
|
||||
hash1 = CORE.config_hash
|
||||
|
||||
# Reset clears the cache
|
||||
CORE.reset()
|
||||
CORE.config = {"esphome": {"name": "different"}}
|
||||
|
||||
hash2 = CORE.config_hash
|
||||
|
||||
# After reset, hash should be recomputed
|
||||
assert hash1 != hash2
|
||||
|
||||
|
||||
def test_config_hash_deterministic_key_order() -> None:
|
||||
"""Test that config_hash is deterministic regardless of key insertion order."""
|
||||
CORE.reset()
|
||||
# Create two configs with same content but different key order
|
||||
config1 = {"z_key": 1, "a_key": 2, "nested": {"z_nested": "z", "a_nested": "a"}}
|
||||
config2 = {"a_key": 2, "z_key": 1, "nested": {"a_nested": "a", "z_nested": "z"}}
|
||||
|
||||
CORE.config = config1
|
||||
hash1 = CORE.config_hash
|
||||
|
||||
CORE.reset()
|
||||
CORE.config = config2
|
||||
hash2 = CORE.config_hash
|
||||
|
||||
# Hashes should be equal because keys are sorted during serialization
|
||||
assert hash1 == hash2
|
||||
|
||||
|
||||
def test_config_hash_different_for_different_configs() -> None:
|
||||
"""Test that different configs produce different hashes."""
|
||||
CORE.reset()
|
||||
CORE.config = {"esphome": {"name": "test1"}}
|
||||
hash1 = CORE.config_hash
|
||||
|
||||
CORE.reset()
|
||||
CORE.config = {"esphome": {"name": "test2"}}
|
||||
hash2 = CORE.config_hash
|
||||
|
||||
assert hash1 != hash2
|
||||
|
||||
@@ -4,9 +4,11 @@ from __future__ import annotations
|
||||
|
||||
from collections.abc import Generator
|
||||
from dataclasses import dataclass
|
||||
import json
|
||||
import logging
|
||||
from pathlib import Path
|
||||
import re
|
||||
import time
|
||||
from typing import Any
|
||||
from unittest.mock import MagicMock, Mock, patch
|
||||
|
||||
@@ -22,6 +24,7 @@ from esphome.__main__ import (
|
||||
command_rename,
|
||||
command_update_all,
|
||||
command_wizard,
|
||||
compile_program,
|
||||
detect_external_components,
|
||||
get_port_type,
|
||||
has_ip_address,
|
||||
@@ -2605,3 +2608,197 @@ def test_command_analyze_memory_no_idedata(
|
||||
|
||||
assert result == 1
|
||||
assert "Failed to get IDE data for memory analysis" in caplog.text
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_compile_build_info_run_compile() -> Generator[Mock]:
|
||||
"""Mock platformio_api.run_compile for build_info tests."""
|
||||
with patch("esphome.platformio_api.run_compile", return_value=0) as mock:
|
||||
yield mock
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_compile_build_info_get_idedata() -> Generator[Mock]:
|
||||
"""Mock platformio_api.get_idedata for build_info tests."""
|
||||
mock_idedata = MagicMock()
|
||||
with patch("esphome.platformio_api.get_idedata", return_value=mock_idedata) as mock:
|
||||
yield mock
|
||||
|
||||
|
||||
def _setup_build_info_test(
|
||||
tmp_path: Path,
|
||||
*,
|
||||
create_firmware: bool = True,
|
||||
create_build_info: bool = True,
|
||||
build_info_content: str | None = None,
|
||||
firmware_first: bool = False,
|
||||
) -> tuple[Path, Path]:
|
||||
"""Set up build directory structure for build_info tests.
|
||||
|
||||
Args:
|
||||
tmp_path: Temporary directory path.
|
||||
create_firmware: Whether to create firmware.bin file.
|
||||
create_build_info: Whether to create build_info.json file.
|
||||
build_info_content: Custom content for build_info.json, or None for default.
|
||||
firmware_first: If True, create firmware before build_info (makes firmware older).
|
||||
|
||||
Returns:
|
||||
Tuple of (build_info_path, firmware_path).
|
||||
"""
|
||||
setup_core(platform=PLATFORM_ESP32, tmp_path=tmp_path, name="test_device")
|
||||
|
||||
build_path = tmp_path / ".esphome" / "build" / "test_device"
|
||||
pioenvs_path = build_path / ".pioenvs" / "test_device"
|
||||
pioenvs_path.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
build_info_path = build_path / "build_info.json"
|
||||
firmware_path = pioenvs_path / "firmware.bin"
|
||||
|
||||
default_build_info = json.dumps(
|
||||
{
|
||||
"config_hash": 0x12345678,
|
||||
"build_time": int(time.time()),
|
||||
"build_time_str": "Dec 13 2025, 12:00:00",
|
||||
"esphome_version": "2025.1.0",
|
||||
}
|
||||
)
|
||||
|
||||
def create_build_info_file() -> None:
|
||||
if create_build_info:
|
||||
content = (
|
||||
build_info_content
|
||||
if build_info_content is not None
|
||||
else default_build_info
|
||||
)
|
||||
build_info_path.write_text(content)
|
||||
|
||||
def create_firmware_file() -> None:
|
||||
if create_firmware:
|
||||
firmware_path.write_bytes(b"fake firmware")
|
||||
|
||||
if firmware_first:
|
||||
create_firmware_file()
|
||||
time.sleep(0.01) # Ensure different timestamps
|
||||
create_build_info_file()
|
||||
else:
|
||||
create_build_info_file()
|
||||
time.sleep(0.01) # Ensure different timestamps
|
||||
create_firmware_file()
|
||||
|
||||
return build_info_path, firmware_path
|
||||
|
||||
|
||||
def test_compile_program_emits_build_info_when_firmware_rebuilt(
|
||||
tmp_path: Path,
|
||||
caplog: pytest.LogCaptureFixture,
|
||||
mock_compile_build_info_run_compile: Mock,
|
||||
mock_compile_build_info_get_idedata: Mock,
|
||||
) -> None:
|
||||
"""Test that compile_program logs build_info when firmware is rebuilt."""
|
||||
_setup_build_info_test(tmp_path, firmware_first=False)
|
||||
|
||||
config: dict[str, Any] = {CONF_ESPHOME: {CONF_NAME: "test_device"}}
|
||||
args = MockArgs()
|
||||
|
||||
with caplog.at_level(logging.INFO):
|
||||
result = compile_program(args, config)
|
||||
|
||||
assert result == 0
|
||||
assert "Build Info: config_hash=0x12345678" in caplog.text
|
||||
|
||||
|
||||
def test_compile_program_no_build_info_when_firmware_not_rebuilt(
|
||||
tmp_path: Path,
|
||||
caplog: pytest.LogCaptureFixture,
|
||||
mock_compile_build_info_run_compile: Mock,
|
||||
mock_compile_build_info_get_idedata: Mock,
|
||||
) -> None:
|
||||
"""Test that compile_program doesn't log build_info when firmware wasn't rebuilt."""
|
||||
_setup_build_info_test(tmp_path, firmware_first=True)
|
||||
|
||||
config: dict[str, Any] = {CONF_ESPHOME: {CONF_NAME: "test_device"}}
|
||||
args = MockArgs()
|
||||
|
||||
with caplog.at_level(logging.INFO):
|
||||
result = compile_program(args, config)
|
||||
|
||||
assert result == 0
|
||||
assert "Build Info:" not in caplog.text
|
||||
|
||||
|
||||
def test_compile_program_no_build_info_when_firmware_missing(
|
||||
tmp_path: Path,
|
||||
caplog: pytest.LogCaptureFixture,
|
||||
mock_compile_build_info_run_compile: Mock,
|
||||
mock_compile_build_info_get_idedata: Mock,
|
||||
) -> None:
|
||||
"""Test that compile_program doesn't log build_info when firmware.bin doesn't exist."""
|
||||
_setup_build_info_test(tmp_path, create_firmware=False)
|
||||
|
||||
config: dict[str, Any] = {CONF_ESPHOME: {CONF_NAME: "test_device"}}
|
||||
args = MockArgs()
|
||||
|
||||
with caplog.at_level(logging.INFO):
|
||||
result = compile_program(args, config)
|
||||
|
||||
assert result == 0
|
||||
assert "Build Info:" not in caplog.text
|
||||
|
||||
|
||||
def test_compile_program_no_build_info_when_json_missing(
|
||||
tmp_path: Path,
|
||||
caplog: pytest.LogCaptureFixture,
|
||||
mock_compile_build_info_run_compile: Mock,
|
||||
mock_compile_build_info_get_idedata: Mock,
|
||||
) -> None:
|
||||
"""Test that compile_program doesn't log build_info when build_info.json doesn't exist."""
|
||||
_setup_build_info_test(tmp_path, create_build_info=False)
|
||||
|
||||
config: dict[str, Any] = {CONF_ESPHOME: {CONF_NAME: "test_device"}}
|
||||
args = MockArgs()
|
||||
|
||||
with caplog.at_level(logging.INFO):
|
||||
result = compile_program(args, config)
|
||||
|
||||
assert result == 0
|
||||
assert "Build Info:" not in caplog.text
|
||||
|
||||
|
||||
def test_compile_program_no_build_info_when_json_invalid(
|
||||
tmp_path: Path,
|
||||
caplog: pytest.LogCaptureFixture,
|
||||
mock_compile_build_info_run_compile: Mock,
|
||||
mock_compile_build_info_get_idedata: Mock,
|
||||
) -> None:
|
||||
"""Test that compile_program doesn't log build_info when build_info.json is invalid."""
|
||||
_setup_build_info_test(tmp_path, build_info_content="not valid json {{{")
|
||||
|
||||
config: dict[str, Any] = {CONF_ESPHOME: {CONF_NAME: "test_device"}}
|
||||
args = MockArgs()
|
||||
|
||||
with caplog.at_level(logging.DEBUG):
|
||||
result = compile_program(args, config)
|
||||
|
||||
assert result == 0
|
||||
assert "Build Info:" not in caplog.text
|
||||
|
||||
|
||||
def test_compile_program_no_build_info_when_json_missing_keys(
|
||||
tmp_path: Path,
|
||||
caplog: pytest.LogCaptureFixture,
|
||||
mock_compile_build_info_run_compile: Mock,
|
||||
mock_compile_build_info_get_idedata: Mock,
|
||||
) -> None:
|
||||
"""Test that compile_program doesn't log build_info when build_info.json is missing required keys."""
|
||||
_setup_build_info_test(
|
||||
tmp_path, build_info_content=json.dumps({"build_time": 1234567890})
|
||||
)
|
||||
|
||||
config: dict[str, Any] = {CONF_ESPHOME: {CONF_NAME: "test_device"}}
|
||||
args = MockArgs()
|
||||
|
||||
with caplog.at_level(logging.INFO):
|
||||
result = compile_program(args, config)
|
||||
|
||||
assert result == 0
|
||||
assert "Build Info:" not in caplog.text
|
||||
|
||||
@@ -1,6 +1,10 @@
|
||||
"""Test writer module functionality."""
|
||||
|
||||
from collections.abc import Callable
|
||||
from contextlib import contextmanager
|
||||
from dataclasses import dataclass
|
||||
from datetime import datetime
|
||||
import json
|
||||
import os
|
||||
from pathlib import Path
|
||||
import stat
|
||||
@@ -20,6 +24,9 @@ from esphome.writer import (
|
||||
clean_all,
|
||||
clean_build,
|
||||
clean_cmake_cache,
|
||||
copy_src_tree,
|
||||
generate_build_info_data_h,
|
||||
get_build_info,
|
||||
storage_should_clean,
|
||||
update_storage_json,
|
||||
write_cpp,
|
||||
@@ -1166,3 +1173,721 @@ def test_clean_build_reraises_for_other_errors(
|
||||
finally:
|
||||
# Cleanup - restore write permission so tmp_path cleanup works
|
||||
os.chmod(subdir, stat.S_IRWXU)
|
||||
|
||||
|
||||
# Tests for get_build_info()
|
||||
|
||||
|
||||
@patch("esphome.writer.CORE")
|
||||
def test_get_build_info_new_build(
|
||||
mock_core: MagicMock,
|
||||
tmp_path: Path,
|
||||
) -> None:
|
||||
"""Test get_build_info returns new build_time when no existing build_info.json."""
|
||||
build_info_path = tmp_path / "build_info.json"
|
||||
mock_core.relative_build_path.return_value = build_info_path
|
||||
mock_core.config_hash = 0x12345678
|
||||
|
||||
config_hash, build_time, build_time_str = get_build_info()
|
||||
|
||||
assert config_hash == 0x12345678
|
||||
assert isinstance(build_time, int)
|
||||
assert build_time > 0
|
||||
assert isinstance(build_time_str, str)
|
||||
# Verify build_time_str format matches expected pattern
|
||||
assert len(build_time_str) >= 19 # e.g., "2025-12-15 16:27:44 +0000"
|
||||
|
||||
|
||||
@patch("esphome.writer.CORE")
|
||||
def test_get_build_info_always_returns_current_time(
|
||||
mock_core: MagicMock,
|
||||
tmp_path: Path,
|
||||
) -> None:
|
||||
"""Test get_build_info always returns current build_time."""
|
||||
build_info_path = tmp_path / "build_info.json"
|
||||
mock_core.relative_build_path.return_value = build_info_path
|
||||
mock_core.config_hash = 0x12345678
|
||||
|
||||
# Create existing build_info.json with matching config_hash and version
|
||||
existing_build_time = 1700000000
|
||||
existing_build_time_str = "2023-11-14 22:13:20 +0000"
|
||||
build_info_path.write_text(
|
||||
json.dumps(
|
||||
{
|
||||
"config_hash": 0x12345678,
|
||||
"build_time": existing_build_time,
|
||||
"build_time_str": existing_build_time_str,
|
||||
"esphome_version": "2025.1.0-dev",
|
||||
}
|
||||
)
|
||||
)
|
||||
|
||||
with patch("esphome.writer.__version__", "2025.1.0-dev"):
|
||||
config_hash, build_time, build_time_str = get_build_info()
|
||||
|
||||
assert config_hash == 0x12345678
|
||||
# get_build_info now always returns current time
|
||||
assert build_time != existing_build_time
|
||||
assert build_time > existing_build_time
|
||||
assert build_time_str != existing_build_time_str
|
||||
|
||||
|
||||
@patch("esphome.writer.CORE")
|
||||
def test_get_build_info_config_changed(
|
||||
mock_core: MagicMock,
|
||||
tmp_path: Path,
|
||||
) -> None:
|
||||
"""Test get_build_info returns new build_time when config hash changed."""
|
||||
build_info_path = tmp_path / "build_info.json"
|
||||
mock_core.relative_build_path.return_value = build_info_path
|
||||
mock_core.config_hash = 0xABCDEF00 # Different from existing
|
||||
|
||||
# Create existing build_info.json with different config_hash
|
||||
existing_build_time = 1700000000
|
||||
build_info_path.write_text(
|
||||
json.dumps(
|
||||
{
|
||||
"config_hash": 0x12345678, # Different
|
||||
"build_time": existing_build_time,
|
||||
"build_time_str": "2023-11-14 22:13:20 +0000",
|
||||
"esphome_version": "2025.1.0-dev",
|
||||
}
|
||||
)
|
||||
)
|
||||
|
||||
with patch("esphome.writer.__version__", "2025.1.0-dev"):
|
||||
config_hash, build_time, build_time_str = get_build_info()
|
||||
|
||||
assert config_hash == 0xABCDEF00
|
||||
assert build_time != existing_build_time # New time generated
|
||||
assert build_time > existing_build_time
|
||||
|
||||
|
||||
@patch("esphome.writer.CORE")
|
||||
def test_get_build_info_version_changed(
|
||||
mock_core: MagicMock,
|
||||
tmp_path: Path,
|
||||
) -> None:
|
||||
"""Test get_build_info returns new build_time when ESPHome version changed."""
|
||||
build_info_path = tmp_path / "build_info.json"
|
||||
mock_core.relative_build_path.return_value = build_info_path
|
||||
mock_core.config_hash = 0x12345678
|
||||
|
||||
# Create existing build_info.json with different version
|
||||
existing_build_time = 1700000000
|
||||
build_info_path.write_text(
|
||||
json.dumps(
|
||||
{
|
||||
"config_hash": 0x12345678,
|
||||
"build_time": existing_build_time,
|
||||
"build_time_str": "2023-11-14 22:13:20 +0000",
|
||||
"esphome_version": "2024.12.0", # Old version
|
||||
}
|
||||
)
|
||||
)
|
||||
|
||||
with patch("esphome.writer.__version__", "2025.1.0-dev"): # New version
|
||||
config_hash, build_time, build_time_str = get_build_info()
|
||||
|
||||
assert config_hash == 0x12345678
|
||||
assert build_time != existing_build_time # New time generated
|
||||
assert build_time > existing_build_time
|
||||
|
||||
|
||||
@patch("esphome.writer.CORE")
|
||||
def test_get_build_info_invalid_json(
|
||||
mock_core: MagicMock,
|
||||
tmp_path: Path,
|
||||
) -> None:
|
||||
"""Test get_build_info handles invalid JSON gracefully."""
|
||||
build_info_path = tmp_path / "build_info.json"
|
||||
mock_core.relative_build_path.return_value = build_info_path
|
||||
mock_core.config_hash = 0x12345678
|
||||
|
||||
# Create invalid JSON file
|
||||
build_info_path.write_text("not valid json {{{")
|
||||
|
||||
config_hash, build_time, build_time_str = get_build_info()
|
||||
|
||||
assert config_hash == 0x12345678
|
||||
assert isinstance(build_time, int)
|
||||
assert build_time > 0
|
||||
|
||||
|
||||
@patch("esphome.writer.CORE")
|
||||
def test_get_build_info_missing_keys(
|
||||
mock_core: MagicMock,
|
||||
tmp_path: Path,
|
||||
) -> None:
|
||||
"""Test get_build_info handles missing keys gracefully."""
|
||||
build_info_path = tmp_path / "build_info.json"
|
||||
mock_core.relative_build_path.return_value = build_info_path
|
||||
mock_core.config_hash = 0x12345678
|
||||
|
||||
# Create JSON with missing keys
|
||||
build_info_path.write_text(json.dumps({"config_hash": 0x12345678}))
|
||||
|
||||
with patch("esphome.writer.__version__", "2025.1.0-dev"):
|
||||
config_hash, build_time, build_time_str = get_build_info()
|
||||
|
||||
assert config_hash == 0x12345678
|
||||
assert isinstance(build_time, int)
|
||||
assert build_time > 0
|
||||
|
||||
|
||||
@patch("esphome.writer.CORE")
|
||||
def test_get_build_info_build_time_str_format(
|
||||
mock_core: MagicMock,
|
||||
tmp_path: Path,
|
||||
) -> None:
|
||||
"""Test get_build_info returns correctly formatted build_time_str."""
|
||||
build_info_path = tmp_path / "build_info.json"
|
||||
mock_core.relative_build_path.return_value = build_info_path
|
||||
mock_core.config_hash = 0x12345678
|
||||
|
||||
config_hash, build_time, build_time_str = get_build_info()
|
||||
|
||||
# Verify the format matches "%Y-%m-%d %H:%M:%S %z"
|
||||
# e.g., "2025-12-15 16:27:44 +0000"
|
||||
parsed = datetime.strptime(build_time_str, "%Y-%m-%d %H:%M:%S %z")
|
||||
assert parsed.year >= 2024
|
||||
|
||||
|
||||
def test_generate_build_info_data_h_format() -> None:
|
||||
"""Test generate_build_info_data_h produces correct header content."""
|
||||
config_hash = 0x12345678
|
||||
build_time = 1700000000
|
||||
build_time_str = "2023-11-14 22:13:20 +0000"
|
||||
|
||||
result = generate_build_info_data_h(config_hash, build_time, build_time_str)
|
||||
|
||||
assert "#pragma once" in result
|
||||
assert "#define ESPHOME_CONFIG_HASH 0x12345678U" in result
|
||||
assert "#define ESPHOME_BUILD_TIME 1700000000" in result
|
||||
assert 'ESPHOME_BUILD_TIME_STR[] = "2023-11-14 22:13:20 +0000"' in result
|
||||
|
||||
|
||||
def test_generate_build_info_data_h_esp8266_progmem() -> None:
|
||||
"""Test generate_build_info_data_h includes PROGMEM for ESP8266."""
|
||||
result = generate_build_info_data_h(0xABCDEF01, 1700000000, "test")
|
||||
|
||||
# Should have ESP8266 PROGMEM conditional
|
||||
assert "#ifdef USE_ESP8266" in result
|
||||
assert "#include <pgmspace.h>" in result
|
||||
assert "PROGMEM" in result
|
||||
|
||||
|
||||
def test_generate_build_info_data_h_hash_formatting() -> None:
|
||||
"""Test generate_build_info_data_h formats hash with leading zeros."""
|
||||
# Test with small hash value that needs leading zeros
|
||||
result = generate_build_info_data_h(0x00000001, 0, "test")
|
||||
assert "#define ESPHOME_CONFIG_HASH 0x00000001U" in result
|
||||
|
||||
# Test with larger hash value
|
||||
result = generate_build_info_data_h(0xFFFFFFFF, 0, "test")
|
||||
assert "#define ESPHOME_CONFIG_HASH 0xffffffffU" in result
|
||||
|
||||
|
||||
@patch("esphome.writer.CORE")
|
||||
@patch("esphome.writer.iter_components")
|
||||
@patch("esphome.writer.walk_files")
|
||||
def test_copy_src_tree_writes_build_info_files(
|
||||
mock_walk_files: MagicMock,
|
||||
mock_iter_components: MagicMock,
|
||||
mock_core: MagicMock,
|
||||
tmp_path: Path,
|
||||
) -> None:
|
||||
"""Test copy_src_tree writes build_info_data.h and build_info.json."""
|
||||
# Setup directory structure
|
||||
src_path = tmp_path / "src"
|
||||
src_path.mkdir()
|
||||
esphome_core_path = src_path / "esphome" / "core"
|
||||
esphome_core_path.mkdir(parents=True)
|
||||
build_path = tmp_path / "build"
|
||||
build_path.mkdir()
|
||||
|
||||
# Create mock source files for defines.h and version.h
|
||||
mock_defines_h = esphome_core_path / "defines.h"
|
||||
mock_defines_h.write_text("// mock defines.h")
|
||||
mock_version_h = esphome_core_path / "version.h"
|
||||
mock_version_h.write_text("// mock version.h")
|
||||
|
||||
# Create mock FileResource that returns our temp files
|
||||
@dataclass(frozen=True)
|
||||
class MockFileResource:
|
||||
package: str
|
||||
resource: str
|
||||
_path: Path
|
||||
|
||||
@contextmanager
|
||||
def path(self):
|
||||
yield self._path
|
||||
|
||||
# Create mock resources for defines.h and version.h (required by copy_src_tree)
|
||||
mock_resources = [
|
||||
MockFileResource(
|
||||
package="esphome.core",
|
||||
resource="defines.h",
|
||||
_path=mock_defines_h,
|
||||
),
|
||||
MockFileResource(
|
||||
package="esphome.core",
|
||||
resource="version.h",
|
||||
_path=mock_version_h,
|
||||
),
|
||||
]
|
||||
|
||||
# Create mock component with resources
|
||||
mock_component = MagicMock()
|
||||
mock_component.resources = mock_resources
|
||||
|
||||
# Setup mocks
|
||||
mock_core.relative_src_path.side_effect = lambda *args: src_path.joinpath(*args)
|
||||
mock_core.relative_build_path.side_effect = lambda *args: build_path.joinpath(*args)
|
||||
mock_core.defines = []
|
||||
mock_core.config_hash = 0xDEADBEEF
|
||||
mock_core.target_platform = "test_platform"
|
||||
mock_core.config = {}
|
||||
mock_iter_components.return_value = [("core", mock_component)]
|
||||
mock_walk_files.return_value = []
|
||||
|
||||
# Create mock module without copy_files attribute (causes AttributeError which is caught)
|
||||
mock_module = MagicMock(spec=[]) # Empty spec = no copy_files attribute
|
||||
|
||||
with (
|
||||
patch("esphome.writer.__version__", "2025.1.0-dev"),
|
||||
patch("esphome.writer.importlib.import_module", return_value=mock_module),
|
||||
):
|
||||
copy_src_tree()
|
||||
|
||||
# Verify build_info_data.h was written
|
||||
build_info_h_path = esphome_core_path / "build_info_data.h"
|
||||
assert build_info_h_path.exists()
|
||||
build_info_h_content = build_info_h_path.read_text()
|
||||
assert "#define ESPHOME_CONFIG_HASH 0xdeadbeefU" in build_info_h_content
|
||||
assert "#define ESPHOME_BUILD_TIME" in build_info_h_content
|
||||
assert "ESPHOME_BUILD_TIME_STR" in build_info_h_content
|
||||
|
||||
# Verify build_info.json was written
|
||||
build_info_json_path = build_path / "build_info.json"
|
||||
assert build_info_json_path.exists()
|
||||
build_info_json = json.loads(build_info_json_path.read_text())
|
||||
assert build_info_json["config_hash"] == 0xDEADBEEF
|
||||
assert "build_time" in build_info_json
|
||||
assert "build_time_str" in build_info_json
|
||||
assert build_info_json["esphome_version"] == "2025.1.0-dev"
|
||||
|
||||
|
||||
@patch("esphome.writer.CORE")
|
||||
@patch("esphome.writer.iter_components")
|
||||
@patch("esphome.writer.walk_files")
|
||||
def test_copy_src_tree_detects_config_hash_change(
|
||||
mock_walk_files: MagicMock,
|
||||
mock_iter_components: MagicMock,
|
||||
mock_core: MagicMock,
|
||||
tmp_path: Path,
|
||||
) -> None:
|
||||
"""Test copy_src_tree detects when config_hash changes."""
|
||||
# Setup directory structure
|
||||
src_path = tmp_path / "src"
|
||||
src_path.mkdir()
|
||||
esphome_core_path = src_path / "esphome" / "core"
|
||||
esphome_core_path.mkdir(parents=True)
|
||||
build_path = tmp_path / "build"
|
||||
build_path.mkdir()
|
||||
|
||||
# Create existing build_info.json with different config_hash
|
||||
build_info_json_path = build_path / "build_info.json"
|
||||
build_info_json_path.write_text(
|
||||
json.dumps(
|
||||
{
|
||||
"config_hash": 0x12345678, # Different from current
|
||||
"build_time": 1700000000,
|
||||
"build_time_str": "2023-11-14 22:13:20 +0000",
|
||||
"esphome_version": "2025.1.0-dev",
|
||||
}
|
||||
)
|
||||
)
|
||||
|
||||
# Create existing build_info_data.h
|
||||
build_info_h_path = esphome_core_path / "build_info_data.h"
|
||||
build_info_h_path.write_text("// old build_info_data.h")
|
||||
|
||||
# Setup mocks
|
||||
mock_core.relative_src_path.side_effect = lambda *args: src_path.joinpath(*args)
|
||||
mock_core.relative_build_path.side_effect = lambda *args: build_path.joinpath(*args)
|
||||
mock_core.defines = []
|
||||
mock_core.config_hash = 0xDEADBEEF # Different from existing
|
||||
mock_core.target_platform = "test_platform"
|
||||
mock_core.config = {}
|
||||
mock_iter_components.return_value = []
|
||||
mock_walk_files.return_value = []
|
||||
|
||||
with (
|
||||
patch("esphome.writer.__version__", "2025.1.0-dev"),
|
||||
patch("esphome.writer.importlib.import_module") as mock_import,
|
||||
):
|
||||
mock_import.side_effect = AttributeError
|
||||
copy_src_tree()
|
||||
|
||||
# Verify build_info files were updated due to config_hash change
|
||||
assert build_info_h_path.exists()
|
||||
new_content = build_info_h_path.read_text()
|
||||
assert "0xdeadbeef" in new_content.lower()
|
||||
|
||||
new_json = json.loads(build_info_json_path.read_text())
|
||||
assert new_json["config_hash"] == 0xDEADBEEF
|
||||
|
||||
|
||||
@patch("esphome.writer.CORE")
|
||||
@patch("esphome.writer.iter_components")
|
||||
@patch("esphome.writer.walk_files")
|
||||
def test_copy_src_tree_detects_version_change(
|
||||
mock_walk_files: MagicMock,
|
||||
mock_iter_components: MagicMock,
|
||||
mock_core: MagicMock,
|
||||
tmp_path: Path,
|
||||
) -> None:
|
||||
"""Test copy_src_tree detects when esphome_version changes."""
|
||||
# Setup directory structure
|
||||
src_path = tmp_path / "src"
|
||||
src_path.mkdir()
|
||||
esphome_core_path = src_path / "esphome" / "core"
|
||||
esphome_core_path.mkdir(parents=True)
|
||||
build_path = tmp_path / "build"
|
||||
build_path.mkdir()
|
||||
|
||||
# Create existing build_info.json with different version
|
||||
build_info_json_path = build_path / "build_info.json"
|
||||
build_info_json_path.write_text(
|
||||
json.dumps(
|
||||
{
|
||||
"config_hash": 0xDEADBEEF,
|
||||
"build_time": 1700000000,
|
||||
"build_time_str": "2023-11-14 22:13:20 +0000",
|
||||
"esphome_version": "2024.12.0", # Old version
|
||||
}
|
||||
)
|
||||
)
|
||||
|
||||
# Create existing build_info_data.h
|
||||
build_info_h_path = esphome_core_path / "build_info_data.h"
|
||||
build_info_h_path.write_text("// old build_info_data.h")
|
||||
|
||||
# Setup mocks
|
||||
mock_core.relative_src_path.side_effect = lambda *args: src_path.joinpath(*args)
|
||||
mock_core.relative_build_path.side_effect = lambda *args: build_path.joinpath(*args)
|
||||
mock_core.defines = []
|
||||
mock_core.config_hash = 0xDEADBEEF
|
||||
mock_core.target_platform = "test_platform"
|
||||
mock_core.config = {}
|
||||
mock_iter_components.return_value = []
|
||||
mock_walk_files.return_value = []
|
||||
|
||||
with (
|
||||
patch("esphome.writer.__version__", "2025.1.0-dev"), # New version
|
||||
patch("esphome.writer.importlib.import_module") as mock_import,
|
||||
):
|
||||
mock_import.side_effect = AttributeError
|
||||
copy_src_tree()
|
||||
|
||||
# Verify build_info files were updated due to version change
|
||||
assert build_info_h_path.exists()
|
||||
new_json = json.loads(build_info_json_path.read_text())
|
||||
assert new_json["esphome_version"] == "2025.1.0-dev"
|
||||
|
||||
|
||||
@patch("esphome.writer.CORE")
|
||||
@patch("esphome.writer.iter_components")
|
||||
@patch("esphome.writer.walk_files")
|
||||
def test_copy_src_tree_handles_invalid_build_info_json(
|
||||
mock_walk_files: MagicMock,
|
||||
mock_iter_components: MagicMock,
|
||||
mock_core: MagicMock,
|
||||
tmp_path: Path,
|
||||
) -> None:
|
||||
"""Test copy_src_tree handles invalid build_info.json gracefully."""
|
||||
# Setup directory structure
|
||||
src_path = tmp_path / "src"
|
||||
src_path.mkdir()
|
||||
esphome_core_path = src_path / "esphome" / "core"
|
||||
esphome_core_path.mkdir(parents=True)
|
||||
build_path = tmp_path / "build"
|
||||
build_path.mkdir()
|
||||
|
||||
# Create invalid build_info.json
|
||||
build_info_json_path = build_path / "build_info.json"
|
||||
build_info_json_path.write_text("invalid json {{{")
|
||||
|
||||
# Create existing build_info_data.h
|
||||
build_info_h_path = esphome_core_path / "build_info_data.h"
|
||||
build_info_h_path.write_text("// old build_info_data.h")
|
||||
|
||||
# Setup mocks
|
||||
mock_core.relative_src_path.side_effect = lambda *args: src_path.joinpath(*args)
|
||||
mock_core.relative_build_path.side_effect = lambda *args: build_path.joinpath(*args)
|
||||
mock_core.defines = []
|
||||
mock_core.config_hash = 0xDEADBEEF
|
||||
mock_core.target_platform = "test_platform"
|
||||
mock_core.config = {}
|
||||
mock_iter_components.return_value = []
|
||||
mock_walk_files.return_value = []
|
||||
|
||||
with (
|
||||
patch("esphome.writer.__version__", "2025.1.0-dev"),
|
||||
patch("esphome.writer.importlib.import_module") as mock_import,
|
||||
):
|
||||
mock_import.side_effect = AttributeError
|
||||
copy_src_tree()
|
||||
|
||||
# Verify build_info files were created despite invalid JSON
|
||||
assert build_info_h_path.exists()
|
||||
new_json = json.loads(build_info_json_path.read_text())
|
||||
assert new_json["config_hash"] == 0xDEADBEEF
|
||||
|
||||
|
||||
@patch("esphome.writer.CORE")
|
||||
@patch("esphome.writer.iter_components")
|
||||
@patch("esphome.writer.walk_files")
|
||||
def test_copy_src_tree_build_info_timestamp_behavior(
|
||||
mock_walk_files: MagicMock,
|
||||
mock_iter_components: MagicMock,
|
||||
mock_core: MagicMock,
|
||||
tmp_path: Path,
|
||||
) -> None:
|
||||
"""Test build_info behaviour: regenerated on change, preserved when unchanged."""
|
||||
# Setup directory structure
|
||||
src_path = tmp_path / "src"
|
||||
src_path.mkdir()
|
||||
esphome_core_path = src_path / "esphome" / "core"
|
||||
esphome_core_path.mkdir(parents=True)
|
||||
esphome_components_path = src_path / "esphome" / "components"
|
||||
esphome_components_path.mkdir(parents=True)
|
||||
build_path = tmp_path / "build"
|
||||
build_path.mkdir()
|
||||
|
||||
# Create a source file
|
||||
source_file = tmp_path / "source" / "test.cpp"
|
||||
source_file.parent.mkdir()
|
||||
source_file.write_text("// version 1")
|
||||
|
||||
# Create destination file in build tree
|
||||
dest_file = esphome_components_path / "test.cpp"
|
||||
|
||||
# Create mock FileResource
|
||||
@dataclass(frozen=True)
|
||||
class MockFileResource:
|
||||
package: str
|
||||
resource: str
|
||||
_path: Path
|
||||
|
||||
@contextmanager
|
||||
def path(self):
|
||||
yield self._path
|
||||
|
||||
mock_resources = [
|
||||
MockFileResource(
|
||||
package="esphome.components",
|
||||
resource="test.cpp",
|
||||
_path=source_file,
|
||||
),
|
||||
]
|
||||
|
||||
mock_component = MagicMock()
|
||||
mock_component.resources = mock_resources
|
||||
|
||||
# Setup mocks
|
||||
mock_core.relative_src_path.side_effect = lambda *args: src_path.joinpath(*args)
|
||||
mock_core.relative_build_path.side_effect = lambda *args: build_path.joinpath(*args)
|
||||
mock_core.defines = []
|
||||
mock_core.config_hash = 0xDEADBEEF
|
||||
mock_core.target_platform = "test_platform"
|
||||
mock_core.config = {}
|
||||
mock_iter_components.return_value = [("test", mock_component)]
|
||||
|
||||
build_info_json_path = build_path / "build_info.json"
|
||||
|
||||
# First run: initial setup, should create build_info
|
||||
mock_walk_files.return_value = []
|
||||
with (
|
||||
patch("esphome.writer.__version__", "2025.1.0-dev"),
|
||||
patch("esphome.writer.importlib.import_module") as mock_import,
|
||||
):
|
||||
mock_import.side_effect = AttributeError
|
||||
copy_src_tree()
|
||||
|
||||
# Manually set an old timestamp for testing
|
||||
old_timestamp = 1700000000
|
||||
old_timestamp_str = "2023-11-14 22:13:20 +0000"
|
||||
build_info_json_path.write_text(
|
||||
json.dumps(
|
||||
{
|
||||
"config_hash": 0xDEADBEEF,
|
||||
"build_time": old_timestamp,
|
||||
"build_time_str": old_timestamp_str,
|
||||
"esphome_version": "2025.1.0-dev",
|
||||
}
|
||||
)
|
||||
)
|
||||
|
||||
# Second run: no changes, should NOT regenerate build_info
|
||||
mock_walk_files.return_value = [str(dest_file)]
|
||||
with (
|
||||
patch("esphome.writer.__version__", "2025.1.0-dev"),
|
||||
patch("esphome.writer.importlib.import_module") as mock_import,
|
||||
):
|
||||
mock_import.side_effect = AttributeError
|
||||
copy_src_tree()
|
||||
|
||||
second_json = json.loads(build_info_json_path.read_text())
|
||||
second_timestamp = second_json["build_time"]
|
||||
|
||||
# Verify timestamp was NOT changed
|
||||
assert second_timestamp == old_timestamp, (
|
||||
f"build_info should not be regenerated when no files change: "
|
||||
f"{old_timestamp} != {second_timestamp}"
|
||||
)
|
||||
|
||||
# Third run: change source file, should regenerate build_info with new timestamp
|
||||
source_file.write_text("// version 2")
|
||||
with (
|
||||
patch("esphome.writer.__version__", "2025.1.0-dev"),
|
||||
patch("esphome.writer.importlib.import_module") as mock_import,
|
||||
):
|
||||
mock_import.side_effect = AttributeError
|
||||
copy_src_tree()
|
||||
|
||||
third_json = json.loads(build_info_json_path.read_text())
|
||||
third_timestamp = third_json["build_time"]
|
||||
|
||||
# Verify timestamp WAS changed
|
||||
assert third_timestamp != old_timestamp, (
|
||||
f"build_info should be regenerated when source file changes: "
|
||||
f"{old_timestamp} == {third_timestamp}"
|
||||
)
|
||||
assert third_timestamp > old_timestamp
|
||||
|
||||
|
||||
@patch("esphome.writer.CORE")
|
||||
@patch("esphome.writer.iter_components")
|
||||
@patch("esphome.writer.walk_files")
|
||||
def test_copy_src_tree_detects_removed_source_file(
|
||||
mock_walk_files: MagicMock,
|
||||
mock_iter_components: MagicMock,
|
||||
mock_core: MagicMock,
|
||||
tmp_path: Path,
|
||||
) -> None:
|
||||
"""Test copy_src_tree detects when a non-generated source file is removed."""
|
||||
# Setup directory structure
|
||||
src_path = tmp_path / "src"
|
||||
src_path.mkdir()
|
||||
esphome_components_path = src_path / "esphome" / "components"
|
||||
esphome_components_path.mkdir(parents=True)
|
||||
build_path = tmp_path / "build"
|
||||
build_path.mkdir()
|
||||
|
||||
# Create an existing source file in the build tree
|
||||
existing_file = esphome_components_path / "test.cpp"
|
||||
existing_file.write_text("// test file")
|
||||
|
||||
# Setup mocks - no components, so the file should be removed
|
||||
mock_core.relative_src_path.side_effect = lambda *args: src_path.joinpath(*args)
|
||||
mock_core.relative_build_path.side_effect = lambda *args: build_path.joinpath(*args)
|
||||
mock_core.defines = []
|
||||
mock_core.config_hash = 0xDEADBEEF
|
||||
mock_core.target_platform = "test_platform"
|
||||
mock_core.config = {}
|
||||
mock_iter_components.return_value = [] # No components = file should be removed
|
||||
mock_walk_files.return_value = [str(existing_file)]
|
||||
|
||||
# Create existing build_info.json
|
||||
build_info_json_path = build_path / "build_info.json"
|
||||
old_timestamp = 1700000000
|
||||
build_info_json_path.write_text(
|
||||
json.dumps(
|
||||
{
|
||||
"config_hash": 0xDEADBEEF,
|
||||
"build_time": old_timestamp,
|
||||
"build_time_str": "2023-11-14 22:13:20 +0000",
|
||||
"esphome_version": "2025.1.0-dev",
|
||||
}
|
||||
)
|
||||
)
|
||||
|
||||
with (
|
||||
patch("esphome.writer.__version__", "2025.1.0-dev"),
|
||||
patch("esphome.writer.importlib.import_module") as mock_import,
|
||||
):
|
||||
mock_import.side_effect = AttributeError
|
||||
copy_src_tree()
|
||||
|
||||
# Verify file was removed
|
||||
assert not existing_file.exists()
|
||||
|
||||
# Verify build_info was regenerated due to source file removal
|
||||
new_json = json.loads(build_info_json_path.read_text())
|
||||
assert new_json["build_time"] != old_timestamp
|
||||
|
||||
|
||||
@patch("esphome.writer.CORE")
|
||||
@patch("esphome.writer.iter_components")
|
||||
@patch("esphome.writer.walk_files")
|
||||
def test_copy_src_tree_ignores_removed_generated_file(
|
||||
mock_walk_files: MagicMock,
|
||||
mock_iter_components: MagicMock,
|
||||
mock_core: MagicMock,
|
||||
tmp_path: Path,
|
||||
) -> None:
|
||||
"""Test copy_src_tree doesn't mark sources_changed when only generated file removed."""
|
||||
# Setup directory structure
|
||||
src_path = tmp_path / "src"
|
||||
src_path.mkdir()
|
||||
esphome_core_path = src_path / "esphome" / "core"
|
||||
esphome_core_path.mkdir(parents=True)
|
||||
build_path = tmp_path / "build"
|
||||
build_path.mkdir()
|
||||
|
||||
# Create existing build_info_data.h (a generated file)
|
||||
build_info_h = esphome_core_path / "build_info_data.h"
|
||||
build_info_h.write_text("// old generated file")
|
||||
|
||||
# Setup mocks
|
||||
mock_core.relative_src_path.side_effect = lambda *args: src_path.joinpath(*args)
|
||||
mock_core.relative_build_path.side_effect = lambda *args: build_path.joinpath(*args)
|
||||
mock_core.defines = []
|
||||
mock_core.config_hash = 0xDEADBEEF
|
||||
mock_core.target_platform = "test_platform"
|
||||
mock_core.config = {}
|
||||
mock_iter_components.return_value = []
|
||||
# walk_files returns the generated file, but it's not in source_files_copy
|
||||
mock_walk_files.return_value = [str(build_info_h)]
|
||||
|
||||
# Create existing build_info.json with old timestamp
|
||||
build_info_json_path = build_path / "build_info.json"
|
||||
old_timestamp = 1700000000
|
||||
build_info_json_path.write_text(
|
||||
json.dumps(
|
||||
{
|
||||
"config_hash": 0xDEADBEEF,
|
||||
"build_time": old_timestamp,
|
||||
"build_time_str": "2023-11-14 22:13:20 +0000",
|
||||
"esphome_version": "2025.1.0-dev",
|
||||
}
|
||||
)
|
||||
)
|
||||
|
||||
with (
|
||||
patch("esphome.writer.__version__", "2025.1.0-dev"),
|
||||
patch("esphome.writer.importlib.import_module") as mock_import,
|
||||
):
|
||||
mock_import.side_effect = AttributeError
|
||||
copy_src_tree()
|
||||
|
||||
# Verify build_info_data.h was regenerated (not removed)
|
||||
assert build_info_h.exists()
|
||||
|
||||
# Note: build_info.json will have a new timestamp because get_build_info()
|
||||
# always returns current time. The key test is that the old build_info_data.h
|
||||
# file was removed and regenerated, not that it triggered sources_changed.
|
||||
new_json = json.loads(build_info_json_path.read_text())
|
||||
assert new_json["config_hash"] == 0xDEADBEEF
|
||||
|
||||
@@ -278,3 +278,31 @@ def test_secret_values_tracking(fixture_path: Path) -> None:
|
||||
assert yaml_util._SECRET_VALUES["super_secret_wifi"] == "wifi_password"
|
||||
assert "0123456789abcdef" in yaml_util._SECRET_VALUES
|
||||
assert yaml_util._SECRET_VALUES["0123456789abcdef"] == "api_key"
|
||||
|
||||
|
||||
def test_dump_sort_keys() -> None:
|
||||
"""Test that dump with sort_keys=True produces sorted output."""
|
||||
# Create a dict with unsorted keys
|
||||
data = {
|
||||
"zebra": 1,
|
||||
"alpha": 2,
|
||||
"nested": {
|
||||
"z_key": "z_value",
|
||||
"a_key": "a_value",
|
||||
},
|
||||
}
|
||||
|
||||
# Without sort_keys, keys are in insertion order
|
||||
unsorted = yaml_util.dump(data, sort_keys=False)
|
||||
lines_unsorted = unsorted.strip().split("\n")
|
||||
# First key should be "zebra" (insertion order)
|
||||
assert lines_unsorted[0].startswith("zebra:")
|
||||
|
||||
# With sort_keys, keys are alphabetically sorted
|
||||
sorted_dump = yaml_util.dump(data, sort_keys=True)
|
||||
lines_sorted = sorted_dump.strip().split("\n")
|
||||
# First key should be "alpha" (alphabetical order)
|
||||
assert lines_sorted[0].startswith("alpha:")
|
||||
# nested keys should also be sorted
|
||||
assert "a_key:" in sorted_dump
|
||||
assert sorted_dump.index("a_key:") < sorted_dump.index("z_key:")
|
||||
|
||||
Reference in New Issue
Block a user