mirror of
https://github.com/esphome/esphome.git
synced 2025-11-17 07:15:48 +00:00
Compare commits
22 Commits
light_loop
...
number_nam
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
23be236133 | ||
|
|
10bdb47eae | ||
|
|
aa097a2fe6 | ||
|
|
3b860e784c | ||
|
|
96ee38759d | ||
|
|
986d3c8f13 | ||
|
|
320120883c | ||
|
|
4fc4da6ed2 | ||
|
|
6f4042f401 | ||
|
|
ea2b4c3e25 | ||
|
|
fc546ca3f6 | ||
|
|
6b158e760d | ||
|
|
5710cab972 | ||
|
|
eb759efb3d | ||
|
|
1df996601d | ||
|
|
c32891ec02 | ||
|
|
2bf6d48fcf | ||
|
|
e49a943cf7 | ||
|
|
67524e14ee | ||
|
|
2290eb0dd2 | ||
|
|
0afcf67c32 | ||
|
|
952bdfaac2 |
4
.github/workflows/codeql.yml
vendored
4
.github/workflows/codeql.yml
vendored
@@ -58,7 +58,7 @@ jobs:
|
||||
|
||||
# Initializes the CodeQL tools for scanning.
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@0499de31b99561a6d14a36a5f662c2a54f91beee # v4.31.2
|
||||
uses: github/codeql-action/init@014f16e7ab1402f30e7c3329d33797e7948572db # v4.31.3
|
||||
with:
|
||||
languages: ${{ matrix.language }}
|
||||
build-mode: ${{ matrix.build-mode }}
|
||||
@@ -86,6 +86,6 @@ jobs:
|
||||
exit 1
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@0499de31b99561a6d14a36a5f662c2a54f91beee # v4.31.2
|
||||
uses: github/codeql-action/analyze@014f16e7ab1402f30e7c3329d33797e7948572db # v4.31.3
|
||||
with:
|
||||
category: "/language:${{matrix.language}}"
|
||||
|
||||
@@ -11,7 +11,7 @@ ci:
|
||||
repos:
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
# Ruff version.
|
||||
rev: v0.14.4
|
||||
rev: v0.14.5
|
||||
hooks:
|
||||
# Run the linter.
|
||||
- id: ruff
|
||||
|
||||
@@ -15,6 +15,11 @@ from . import (
|
||||
class MemoryAnalyzerCLI(MemoryAnalyzer):
|
||||
"""Memory analyzer with CLI-specific report generation."""
|
||||
|
||||
# Symbol size threshold for detailed analysis
|
||||
SYMBOL_SIZE_THRESHOLD: int = (
|
||||
100 # Show symbols larger than this in detailed analysis
|
||||
)
|
||||
|
||||
# Column width constants
|
||||
COL_COMPONENT: int = 29
|
||||
COL_FLASH_TEXT: int = 14
|
||||
@@ -191,14 +196,21 @@ class MemoryAnalyzerCLI(MemoryAnalyzer):
|
||||
f"{len(symbols):>{self.COL_CORE_COUNT}} | {percentage:>{self.COL_CORE_PERCENT - 1}.1f}%"
|
||||
)
|
||||
|
||||
# Top 15 largest core symbols
|
||||
# All core symbols above threshold
|
||||
lines.append("")
|
||||
lines.append(f"Top 15 Largest {_COMPONENT_CORE} Symbols:")
|
||||
sorted_core_symbols = sorted(
|
||||
self._esphome_core_symbols, key=lambda x: x[2], reverse=True
|
||||
)
|
||||
large_core_symbols = [
|
||||
(symbol, demangled, size)
|
||||
for symbol, demangled, size in sorted_core_symbols
|
||||
if size > self.SYMBOL_SIZE_THRESHOLD
|
||||
]
|
||||
|
||||
for i, (symbol, demangled, size) in enumerate(sorted_core_symbols[:15]):
|
||||
lines.append(
|
||||
f"{_COMPONENT_CORE} Symbols > {self.SYMBOL_SIZE_THRESHOLD} B ({len(large_core_symbols)} symbols):"
|
||||
)
|
||||
for i, (symbol, demangled, size) in enumerate(large_core_symbols):
|
||||
lines.append(f"{i + 1}. {demangled} ({size:,} B)")
|
||||
|
||||
lines.append("=" * self.TABLE_WIDTH)
|
||||
@@ -268,13 +280,15 @@ class MemoryAnalyzerCLI(MemoryAnalyzer):
|
||||
lines.append(f"Total size: {comp_mem.flash_total:,} B")
|
||||
lines.append("")
|
||||
|
||||
# Show all symbols > 100 bytes for better visibility
|
||||
# Show all symbols above threshold for better visibility
|
||||
large_symbols = [
|
||||
(sym, dem, size) for sym, dem, size in sorted_symbols if size > 100
|
||||
(sym, dem, size)
|
||||
for sym, dem, size in sorted_symbols
|
||||
if size > self.SYMBOL_SIZE_THRESHOLD
|
||||
]
|
||||
|
||||
lines.append(
|
||||
f"{comp_name} Symbols > 100 B ({len(large_symbols)} symbols):"
|
||||
f"{comp_name} Symbols > {self.SYMBOL_SIZE_THRESHOLD} B ({len(large_symbols)} symbols):"
|
||||
)
|
||||
for i, (symbol, demangled, size) in enumerate(large_symbols):
|
||||
lines.append(f"{i + 1}. {demangled} ({size:,} B)")
|
||||
|
||||
@@ -1,12 +1,11 @@
|
||||
#include "automation.h"
|
||||
#include "esphome/core/log.h"
|
||||
|
||||
namespace esphome {
|
||||
namespace binary_sensor {
|
||||
namespace esphome::binary_sensor {
|
||||
|
||||
static const char *const TAG = "binary_sensor.automation";
|
||||
|
||||
void binary_sensor::MultiClickTrigger::on_state_(bool state) {
|
||||
void MultiClickTrigger::on_state_(bool state) {
|
||||
// Handle duplicate events
|
||||
if (state == this->last_state_) {
|
||||
return;
|
||||
@@ -67,7 +66,7 @@ void binary_sensor::MultiClickTrigger::on_state_(bool state) {
|
||||
|
||||
*this->at_index_ = *this->at_index_ + 1;
|
||||
}
|
||||
void binary_sensor::MultiClickTrigger::schedule_cooldown_() {
|
||||
void MultiClickTrigger::schedule_cooldown_() {
|
||||
ESP_LOGV(TAG, "Multi Click: Invalid length of press, starting cooldown of %" PRIu32 " ms", this->invalid_cooldown_);
|
||||
this->is_in_cooldown_ = true;
|
||||
this->set_timeout("cooldown", this->invalid_cooldown_, [this]() {
|
||||
@@ -79,7 +78,7 @@ void binary_sensor::MultiClickTrigger::schedule_cooldown_() {
|
||||
this->cancel_timeout("is_valid");
|
||||
this->cancel_timeout("is_not_valid");
|
||||
}
|
||||
void binary_sensor::MultiClickTrigger::schedule_is_valid_(uint32_t min_length) {
|
||||
void MultiClickTrigger::schedule_is_valid_(uint32_t min_length) {
|
||||
if (min_length == 0) {
|
||||
this->is_valid_ = true;
|
||||
return;
|
||||
@@ -90,19 +89,19 @@ void binary_sensor::MultiClickTrigger::schedule_is_valid_(uint32_t min_length) {
|
||||
this->is_valid_ = true;
|
||||
});
|
||||
}
|
||||
void binary_sensor::MultiClickTrigger::schedule_is_not_valid_(uint32_t max_length) {
|
||||
void MultiClickTrigger::schedule_is_not_valid_(uint32_t max_length) {
|
||||
this->set_timeout("is_not_valid", max_length, [this]() {
|
||||
ESP_LOGV(TAG, "Multi Click: You waited too long to %s.", this->parent_->state ? "RELEASE" : "PRESS");
|
||||
this->is_valid_ = false;
|
||||
this->schedule_cooldown_();
|
||||
});
|
||||
}
|
||||
void binary_sensor::MultiClickTrigger::cancel() {
|
||||
void MultiClickTrigger::cancel() {
|
||||
ESP_LOGV(TAG, "Multi Click: Sequence explicitly cancelled.");
|
||||
this->is_valid_ = false;
|
||||
this->schedule_cooldown_();
|
||||
}
|
||||
void binary_sensor::MultiClickTrigger::trigger_() {
|
||||
void MultiClickTrigger::trigger_() {
|
||||
ESP_LOGV(TAG, "Multi Click: Hooray, multi click is valid. Triggering!");
|
||||
this->at_index_.reset();
|
||||
this->cancel_timeout("trigger");
|
||||
@@ -118,5 +117,4 @@ bool match_interval(uint32_t min_length, uint32_t max_length, uint32_t length) {
|
||||
return length >= min_length && length <= max_length;
|
||||
}
|
||||
}
|
||||
} // namespace binary_sensor
|
||||
} // namespace esphome
|
||||
} // namespace esphome::binary_sensor
|
||||
|
||||
@@ -9,8 +9,7 @@
|
||||
#include "esphome/core/helpers.h"
|
||||
#include "esphome/components/binary_sensor/binary_sensor.h"
|
||||
|
||||
namespace esphome {
|
||||
namespace binary_sensor {
|
||||
namespace esphome::binary_sensor {
|
||||
|
||||
struct MultiClickTriggerEvent {
|
||||
bool state;
|
||||
@@ -172,5 +171,4 @@ template<typename... Ts> class BinarySensorInvalidateAction : public Action<Ts..
|
||||
BinarySensor *sensor_;
|
||||
};
|
||||
|
||||
} // namespace binary_sensor
|
||||
} // namespace esphome
|
||||
} // namespace esphome::binary_sensor
|
||||
|
||||
@@ -3,9 +3,7 @@
|
||||
#include "esphome/core/controller_registry.h"
|
||||
#include "esphome/core/log.h"
|
||||
|
||||
namespace esphome {
|
||||
|
||||
namespace binary_sensor {
|
||||
namespace esphome::binary_sensor {
|
||||
|
||||
static const char *const TAG = "binary_sensor";
|
||||
|
||||
@@ -63,6 +61,4 @@ void BinarySensor::add_filters(std::initializer_list<Filter *> filters) {
|
||||
}
|
||||
bool BinarySensor::is_status_binary_sensor() const { return false; }
|
||||
|
||||
} // namespace binary_sensor
|
||||
|
||||
} // namespace esphome
|
||||
} // namespace esphome::binary_sensor
|
||||
|
||||
@@ -6,9 +6,7 @@
|
||||
|
||||
#include <initializer_list>
|
||||
|
||||
namespace esphome {
|
||||
|
||||
namespace binary_sensor {
|
||||
namespace esphome::binary_sensor {
|
||||
|
||||
class BinarySensor;
|
||||
void log_binary_sensor(const char *tag, const char *prefix, const char *type, BinarySensor *obj);
|
||||
@@ -70,5 +68,4 @@ class BinarySensorInitiallyOff : public BinarySensor {
|
||||
bool has_state() const override { return true; }
|
||||
};
|
||||
|
||||
} // namespace binary_sensor
|
||||
} // namespace esphome
|
||||
} // namespace esphome::binary_sensor
|
||||
|
||||
@@ -2,9 +2,7 @@
|
||||
|
||||
#include "binary_sensor.h"
|
||||
|
||||
namespace esphome {
|
||||
|
||||
namespace binary_sensor {
|
||||
namespace esphome::binary_sensor {
|
||||
|
||||
static const char *const TAG = "sensor.filter";
|
||||
|
||||
@@ -132,6 +130,4 @@ optional<bool> SettleFilter::new_value(bool value) {
|
||||
|
||||
float SettleFilter::get_setup_priority() const { return setup_priority::HARDWARE; }
|
||||
|
||||
} // namespace binary_sensor
|
||||
|
||||
} // namespace esphome
|
||||
} // namespace esphome::binary_sensor
|
||||
|
||||
@@ -4,9 +4,7 @@
|
||||
#include "esphome/core/component.h"
|
||||
#include "esphome/core/helpers.h"
|
||||
|
||||
namespace esphome {
|
||||
|
||||
namespace binary_sensor {
|
||||
namespace esphome::binary_sensor {
|
||||
|
||||
class BinarySensor;
|
||||
|
||||
@@ -139,6 +137,4 @@ class SettleFilter : public Filter, public Component {
|
||||
bool steady_{true};
|
||||
};
|
||||
|
||||
} // namespace binary_sensor
|
||||
|
||||
} // namespace esphome
|
||||
} // namespace esphome::binary_sensor
|
||||
|
||||
@@ -4,8 +4,7 @@
|
||||
#include "esphome/core/automation.h"
|
||||
#include "cover.h"
|
||||
|
||||
namespace esphome {
|
||||
namespace cover {
|
||||
namespace esphome::cover {
|
||||
|
||||
template<typename... Ts> class OpenAction : public Action<Ts...> {
|
||||
public:
|
||||
@@ -131,5 +130,4 @@ class CoverClosedTrigger : public Trigger<> {
|
||||
}
|
||||
};
|
||||
|
||||
} // namespace cover
|
||||
} // namespace esphome
|
||||
} // namespace esphome::cover
|
||||
|
||||
@@ -6,8 +6,7 @@
|
||||
|
||||
#include "esphome/core/log.h"
|
||||
|
||||
namespace esphome {
|
||||
namespace cover {
|
||||
namespace esphome::cover {
|
||||
|
||||
static const char *const TAG = "cover";
|
||||
|
||||
@@ -212,5 +211,4 @@ void CoverRestoreState::apply(Cover *cover) {
|
||||
cover->publish_state();
|
||||
}
|
||||
|
||||
} // namespace cover
|
||||
} // namespace esphome
|
||||
} // namespace esphome::cover
|
||||
|
||||
@@ -7,8 +7,7 @@
|
||||
|
||||
#include "cover_traits.h"
|
||||
|
||||
namespace esphome {
|
||||
namespace cover {
|
||||
namespace esphome::cover {
|
||||
|
||||
const extern float COVER_OPEN;
|
||||
const extern float COVER_CLOSED;
|
||||
@@ -157,5 +156,4 @@ class Cover : public EntityBase, public EntityBase_DeviceClass {
|
||||
ESPPreferenceObject rtc_;
|
||||
};
|
||||
|
||||
} // namespace cover
|
||||
} // namespace esphome
|
||||
} // namespace esphome::cover
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
#pragma once
|
||||
|
||||
namespace esphome {
|
||||
namespace cover {
|
||||
namespace esphome::cover {
|
||||
|
||||
class CoverTraits {
|
||||
public:
|
||||
@@ -26,5 +25,4 @@ class CoverTraits {
|
||||
bool supports_stop_{false};
|
||||
};
|
||||
|
||||
} // namespace cover
|
||||
} // namespace esphome
|
||||
} // namespace esphome::cover
|
||||
|
||||
@@ -381,8 +381,9 @@ PLATFORM_VERSION_LOOKUP = {
|
||||
}
|
||||
|
||||
|
||||
def _check_versions(value):
|
||||
value = value.copy()
|
||||
def _check_versions(config):
|
||||
config = config.copy()
|
||||
value = config[CONF_FRAMEWORK]
|
||||
|
||||
if value[CONF_VERSION] in PLATFORM_VERSION_LOOKUP:
|
||||
if CONF_SOURCE in value or CONF_PLATFORM_VERSION in value:
|
||||
@@ -447,7 +448,7 @@ def _check_versions(value):
|
||||
"If there are connectivity or build issues please remove the manual version."
|
||||
)
|
||||
|
||||
return value
|
||||
return config
|
||||
|
||||
|
||||
def _parse_platform_version(value):
|
||||
@@ -497,6 +498,8 @@ def final_validate(config):
|
||||
from esphome.components.psram import DOMAIN as PSRAM_DOMAIN
|
||||
|
||||
errs = []
|
||||
conf_fw = config[CONF_FRAMEWORK]
|
||||
advanced = conf_fw[CONF_ADVANCED]
|
||||
full_config = fv.full_config.get()
|
||||
if pio_options := full_config[CONF_ESPHOME].get(CONF_PLATFORMIO_OPTIONS):
|
||||
pio_flash_size_key = "board_upload.flash_size"
|
||||
@@ -513,22 +516,14 @@ def final_validate(config):
|
||||
f"Please specify {CONF_FLASH_SIZE} within esp32 configuration only"
|
||||
)
|
||||
)
|
||||
if (
|
||||
config[CONF_VARIANT] != VARIANT_ESP32
|
||||
and CONF_ADVANCED in (conf_fw := config[CONF_FRAMEWORK])
|
||||
and CONF_IGNORE_EFUSE_MAC_CRC in conf_fw[CONF_ADVANCED]
|
||||
):
|
||||
if config[CONF_VARIANT] != VARIANT_ESP32 and advanced[CONF_IGNORE_EFUSE_MAC_CRC]:
|
||||
errs.append(
|
||||
cv.Invalid(
|
||||
f"'{CONF_IGNORE_EFUSE_MAC_CRC}' is not supported on {config[CONF_VARIANT]}",
|
||||
path=[CONF_FRAMEWORK, CONF_ADVANCED, CONF_IGNORE_EFUSE_MAC_CRC],
|
||||
)
|
||||
)
|
||||
if (
|
||||
config.get(CONF_FRAMEWORK, {})
|
||||
.get(CONF_ADVANCED, {})
|
||||
.get(CONF_EXECUTE_FROM_PSRAM)
|
||||
):
|
||||
if advanced[CONF_EXECUTE_FROM_PSRAM]:
|
||||
if config[CONF_VARIANT] != VARIANT_ESP32S3:
|
||||
errs.append(
|
||||
cv.Invalid(
|
||||
@@ -544,6 +539,17 @@ def final_validate(config):
|
||||
)
|
||||
)
|
||||
|
||||
if (
|
||||
config[CONF_FLASH_SIZE] == "32MB"
|
||||
and "ota" in full_config
|
||||
and not advanced[CONF_ENABLE_IDF_EXPERIMENTAL_FEATURES]
|
||||
):
|
||||
errs.append(
|
||||
cv.Invalid(
|
||||
f"OTA with 32MB flash requires '{CONF_ENABLE_IDF_EXPERIMENTAL_FEATURES}' to be set in the '{CONF_ADVANCED}' section of the esp32 configuration",
|
||||
path=[CONF_FLASH_SIZE],
|
||||
)
|
||||
)
|
||||
if errs:
|
||||
raise cv.MultipleInvalid(errs)
|
||||
|
||||
@@ -598,89 +604,74 @@ def _validate_idf_component(config: ConfigType) -> ConfigType:
|
||||
|
||||
FRAMEWORK_ESP_IDF = "esp-idf"
|
||||
FRAMEWORK_ARDUINO = "arduino"
|
||||
FRAMEWORK_SCHEMA = cv.All(
|
||||
cv.Schema(
|
||||
{
|
||||
cv.Optional(CONF_TYPE, default=FRAMEWORK_ARDUINO): cv.one_of(
|
||||
FRAMEWORK_ESP_IDF, FRAMEWORK_ARDUINO
|
||||
),
|
||||
cv.Optional(CONF_VERSION, default="recommended"): cv.string_strict,
|
||||
cv.Optional(CONF_RELEASE): cv.string_strict,
|
||||
cv.Optional(CONF_SOURCE): cv.string_strict,
|
||||
cv.Optional(CONF_PLATFORM_VERSION): _parse_platform_version,
|
||||
cv.Optional(CONF_SDKCONFIG_OPTIONS, default={}): {
|
||||
cv.string_strict: cv.string_strict
|
||||
},
|
||||
cv.Optional(CONF_LOG_LEVEL, default="ERROR"): cv.one_of(
|
||||
*LOG_LEVELS_IDF, upper=True
|
||||
),
|
||||
cv.Optional(CONF_ADVANCED, default={}): cv.Schema(
|
||||
{
|
||||
cv.Optional(CONF_ASSERTION_LEVEL): cv.one_of(
|
||||
*ASSERTION_LEVELS, upper=True
|
||||
),
|
||||
cv.Optional(CONF_COMPILER_OPTIMIZATION, default="SIZE"): cv.one_of(
|
||||
*COMPILER_OPTIMIZATIONS, upper=True
|
||||
),
|
||||
cv.Optional(CONF_ENABLE_IDF_EXPERIMENTAL_FEATURES): cv.boolean,
|
||||
cv.Optional(CONF_ENABLE_LWIP_ASSERT, default=True): cv.boolean,
|
||||
cv.Optional(
|
||||
CONF_IGNORE_EFUSE_CUSTOM_MAC, default=False
|
||||
): cv.boolean,
|
||||
cv.Optional(CONF_IGNORE_EFUSE_MAC_CRC): cv.boolean,
|
||||
# DHCP server is needed for WiFi AP mode. When WiFi component is used,
|
||||
# it will handle disabling DHCP server when AP is not configured.
|
||||
# Default to false (disabled) when WiFi is not used.
|
||||
cv.OnlyWithout(
|
||||
CONF_ENABLE_LWIP_DHCP_SERVER, "wifi", default=False
|
||||
): cv.boolean,
|
||||
cv.Optional(
|
||||
CONF_ENABLE_LWIP_MDNS_QUERIES, default=True
|
||||
): cv.boolean,
|
||||
cv.Optional(
|
||||
CONF_ENABLE_LWIP_BRIDGE_INTERFACE, default=False
|
||||
): cv.boolean,
|
||||
cv.Optional(
|
||||
CONF_ENABLE_LWIP_TCPIP_CORE_LOCKING, default=True
|
||||
): cv.boolean,
|
||||
cv.Optional(
|
||||
CONF_ENABLE_LWIP_CHECK_THREAD_SAFETY, default=True
|
||||
): cv.boolean,
|
||||
cv.Optional(
|
||||
CONF_DISABLE_LIBC_LOCKS_IN_IRAM, default=True
|
||||
): cv.boolean,
|
||||
cv.Optional(
|
||||
CONF_DISABLE_VFS_SUPPORT_TERMIOS, default=True
|
||||
): cv.boolean,
|
||||
cv.Optional(
|
||||
CONF_DISABLE_VFS_SUPPORT_SELECT, default=True
|
||||
): cv.boolean,
|
||||
cv.Optional(CONF_DISABLE_VFS_SUPPORT_DIR, default=True): cv.boolean,
|
||||
cv.Optional(CONF_EXECUTE_FROM_PSRAM): cv.boolean,
|
||||
cv.Optional(CONF_LOOP_TASK_STACK_SIZE, default=8192): cv.int_range(
|
||||
min=8192, max=32768
|
||||
),
|
||||
}
|
||||
),
|
||||
cv.Optional(CONF_COMPONENTS, default=[]): cv.ensure_list(
|
||||
cv.All(
|
||||
cv.Schema(
|
||||
{
|
||||
cv.Required(CONF_NAME): cv.string_strict,
|
||||
cv.Optional(CONF_SOURCE): cv.git_ref,
|
||||
cv.Optional(CONF_REF): cv.string,
|
||||
cv.Optional(CONF_PATH): cv.string,
|
||||
cv.Optional(CONF_REFRESH): cv.All(
|
||||
cv.string, cv.source_refresh
|
||||
),
|
||||
}
|
||||
),
|
||||
_validate_idf_component,
|
||||
)
|
||||
),
|
||||
}
|
||||
),
|
||||
_check_versions,
|
||||
FRAMEWORK_SCHEMA = cv.Schema(
|
||||
{
|
||||
cv.Optional(CONF_TYPE): cv.one_of(FRAMEWORK_ESP_IDF, FRAMEWORK_ARDUINO),
|
||||
cv.Optional(CONF_VERSION, default="recommended"): cv.string_strict,
|
||||
cv.Optional(CONF_RELEASE): cv.string_strict,
|
||||
cv.Optional(CONF_SOURCE): cv.string_strict,
|
||||
cv.Optional(CONF_PLATFORM_VERSION): _parse_platform_version,
|
||||
cv.Optional(CONF_SDKCONFIG_OPTIONS, default={}): {
|
||||
cv.string_strict: cv.string_strict
|
||||
},
|
||||
cv.Optional(CONF_LOG_LEVEL, default="ERROR"): cv.one_of(
|
||||
*LOG_LEVELS_IDF, upper=True
|
||||
),
|
||||
cv.Optional(CONF_ADVANCED, default={}): cv.Schema(
|
||||
{
|
||||
cv.Optional(CONF_ASSERTION_LEVEL): cv.one_of(
|
||||
*ASSERTION_LEVELS, upper=True
|
||||
),
|
||||
cv.Optional(CONF_COMPILER_OPTIMIZATION, default="SIZE"): cv.one_of(
|
||||
*COMPILER_OPTIMIZATIONS, upper=True
|
||||
),
|
||||
cv.Optional(
|
||||
CONF_ENABLE_IDF_EXPERIMENTAL_FEATURES, default=False
|
||||
): cv.boolean,
|
||||
cv.Optional(CONF_ENABLE_LWIP_ASSERT, default=True): cv.boolean,
|
||||
cv.Optional(CONF_IGNORE_EFUSE_CUSTOM_MAC, default=False): cv.boolean,
|
||||
cv.Optional(CONF_IGNORE_EFUSE_MAC_CRC, default=False): cv.boolean,
|
||||
# DHCP server is needed for WiFi AP mode. When WiFi component is used,
|
||||
# it will handle disabling DHCP server when AP is not configured.
|
||||
# Default to false (disabled) when WiFi is not used.
|
||||
cv.OnlyWithout(
|
||||
CONF_ENABLE_LWIP_DHCP_SERVER, "wifi", default=False
|
||||
): cv.boolean,
|
||||
cv.Optional(CONF_ENABLE_LWIP_MDNS_QUERIES, default=True): cv.boolean,
|
||||
cv.Optional(
|
||||
CONF_ENABLE_LWIP_BRIDGE_INTERFACE, default=False
|
||||
): cv.boolean,
|
||||
cv.Optional(
|
||||
CONF_ENABLE_LWIP_TCPIP_CORE_LOCKING, default=True
|
||||
): cv.boolean,
|
||||
cv.Optional(
|
||||
CONF_ENABLE_LWIP_CHECK_THREAD_SAFETY, default=True
|
||||
): cv.boolean,
|
||||
cv.Optional(CONF_DISABLE_LIBC_LOCKS_IN_IRAM, default=True): cv.boolean,
|
||||
cv.Optional(CONF_DISABLE_VFS_SUPPORT_TERMIOS, default=True): cv.boolean,
|
||||
cv.Optional(CONF_DISABLE_VFS_SUPPORT_SELECT, default=True): cv.boolean,
|
||||
cv.Optional(CONF_DISABLE_VFS_SUPPORT_DIR, default=True): cv.boolean,
|
||||
cv.Optional(CONF_EXECUTE_FROM_PSRAM, default=False): cv.boolean,
|
||||
cv.Optional(CONF_LOOP_TASK_STACK_SIZE, default=8192): cv.int_range(
|
||||
min=8192, max=32768
|
||||
),
|
||||
}
|
||||
),
|
||||
cv.Optional(CONF_COMPONENTS, default=[]): cv.ensure_list(
|
||||
cv.All(
|
||||
cv.Schema(
|
||||
{
|
||||
cv.Required(CONF_NAME): cv.string_strict,
|
||||
cv.Optional(CONF_SOURCE): cv.git_ref,
|
||||
cv.Optional(CONF_REF): cv.string,
|
||||
cv.Optional(CONF_PATH): cv.string,
|
||||
cv.Optional(CONF_REFRESH): cv.All(cv.string, cv.source_refresh),
|
||||
}
|
||||
),
|
||||
_validate_idf_component,
|
||||
)
|
||||
),
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
@@ -743,11 +734,11 @@ def _show_framework_migration_message(name: str, variant: str) -> None:
|
||||
|
||||
|
||||
def _set_default_framework(config):
|
||||
config = config.copy()
|
||||
if CONF_FRAMEWORK not in config:
|
||||
config = config.copy()
|
||||
|
||||
variant = config[CONF_VARIANT]
|
||||
config[CONF_FRAMEWORK] = FRAMEWORK_SCHEMA({})
|
||||
if CONF_TYPE not in config[CONF_FRAMEWORK]:
|
||||
variant = config[CONF_VARIANT]
|
||||
if variant in ARDUINO_ALLOWED_VARIANTS:
|
||||
config[CONF_FRAMEWORK][CONF_TYPE] = FRAMEWORK_ARDUINO
|
||||
_show_framework_migration_message(
|
||||
@@ -787,6 +778,7 @@ CONFIG_SCHEMA = cv.All(
|
||||
),
|
||||
_detect_variant,
|
||||
_set_default_framework,
|
||||
_check_versions,
|
||||
set_core_data,
|
||||
cv.has_at_least_one_key(CONF_BOARD, CONF_VARIANT),
|
||||
)
|
||||
@@ -805,9 +797,7 @@ def _configure_lwip_max_sockets(conf: dict) -> None:
|
||||
from esphome.components.socket import KEY_SOCKET_CONSUMERS
|
||||
|
||||
# Check if user manually specified CONFIG_LWIP_MAX_SOCKETS
|
||||
user_max_sockets = conf.get(CONF_SDKCONFIG_OPTIONS, {}).get(
|
||||
"CONFIG_LWIP_MAX_SOCKETS"
|
||||
)
|
||||
user_max_sockets = conf[CONF_SDKCONFIG_OPTIONS].get("CONFIG_LWIP_MAX_SOCKETS")
|
||||
|
||||
socket_consumers: dict[str, int] = CORE.data.get(KEY_SOCKET_CONSUMERS, {})
|
||||
total_sockets = sum(socket_consumers.values())
|
||||
@@ -977,23 +967,18 @@ async def to_code(config):
|
||||
# WiFi component handles its own optimization when AP mode is not used
|
||||
# When using Arduino with Ethernet, DHCP server functions must be available
|
||||
# for the Network library to compile, even if not actively used
|
||||
if (
|
||||
CONF_ENABLE_LWIP_DHCP_SERVER in advanced
|
||||
and not advanced[CONF_ENABLE_LWIP_DHCP_SERVER]
|
||||
and not (
|
||||
conf[CONF_TYPE] == FRAMEWORK_ARDUINO
|
||||
and "ethernet" in CORE.loaded_integrations
|
||||
)
|
||||
if advanced.get(CONF_ENABLE_LWIP_DHCP_SERVER) is False and not (
|
||||
conf[CONF_TYPE] == FRAMEWORK_ARDUINO and "ethernet" in CORE.loaded_integrations
|
||||
):
|
||||
add_idf_sdkconfig_option("CONFIG_LWIP_DHCPS", False)
|
||||
if not advanced.get(CONF_ENABLE_LWIP_MDNS_QUERIES, True):
|
||||
if not advanced[CONF_ENABLE_LWIP_MDNS_QUERIES]:
|
||||
add_idf_sdkconfig_option("CONFIG_LWIP_DNS_SUPPORT_MDNS_QUERIES", False)
|
||||
if not advanced.get(CONF_ENABLE_LWIP_BRIDGE_INTERFACE, False):
|
||||
if not advanced[CONF_ENABLE_LWIP_BRIDGE_INTERFACE]:
|
||||
add_idf_sdkconfig_option("CONFIG_LWIP_BRIDGEIF_MAX_PORTS", 0)
|
||||
|
||||
_configure_lwip_max_sockets(conf)
|
||||
|
||||
if advanced.get(CONF_EXECUTE_FROM_PSRAM, False):
|
||||
if advanced[CONF_EXECUTE_FROM_PSRAM]:
|
||||
add_idf_sdkconfig_option("CONFIG_SPIRAM_FETCH_INSTRUCTIONS", True)
|
||||
add_idf_sdkconfig_option("CONFIG_SPIRAM_RODATA", True)
|
||||
|
||||
@@ -1004,23 +989,22 @@ async def to_code(config):
|
||||
# - select() on 4 sockets: ~190μs (Arduino/core locking) vs ~235μs (ESP-IDF default)
|
||||
# - Up to 200% slower under load when all operations queue through tcpip_thread
|
||||
# Enabling this makes ESP-IDF socket performance match Arduino framework.
|
||||
if advanced.get(CONF_ENABLE_LWIP_TCPIP_CORE_LOCKING, True):
|
||||
if advanced[CONF_ENABLE_LWIP_TCPIP_CORE_LOCKING]:
|
||||
add_idf_sdkconfig_option("CONFIG_LWIP_TCPIP_CORE_LOCKING", True)
|
||||
if advanced.get(CONF_ENABLE_LWIP_CHECK_THREAD_SAFETY, True):
|
||||
if advanced[CONF_ENABLE_LWIP_CHECK_THREAD_SAFETY]:
|
||||
add_idf_sdkconfig_option("CONFIG_LWIP_CHECK_THREAD_SAFETY", True)
|
||||
|
||||
# Disable placing libc locks in IRAM to save RAM
|
||||
# This is safe for ESPHome since no IRAM ISRs (interrupts that run while cache is disabled)
|
||||
# use libc lock APIs. Saves approximately 1.3KB (1,356 bytes) of IRAM.
|
||||
if advanced.get(CONF_DISABLE_LIBC_LOCKS_IN_IRAM, True):
|
||||
if advanced[CONF_DISABLE_LIBC_LOCKS_IN_IRAM]:
|
||||
add_idf_sdkconfig_option("CONFIG_LIBC_LOCKS_PLACE_IN_IRAM", False)
|
||||
|
||||
# Disable VFS support for termios (terminal I/O functions)
|
||||
# ESPHome doesn't use termios functions on ESP32 (only used in host UART driver).
|
||||
# Saves approximately 1.8KB of flash when disabled (default).
|
||||
add_idf_sdkconfig_option(
|
||||
"CONFIG_VFS_SUPPORT_TERMIOS",
|
||||
not advanced.get(CONF_DISABLE_VFS_SUPPORT_TERMIOS, True),
|
||||
"CONFIG_VFS_SUPPORT_TERMIOS", not advanced[CONF_DISABLE_VFS_SUPPORT_TERMIOS]
|
||||
)
|
||||
|
||||
# Disable VFS support for select() with file descriptors
|
||||
@@ -1034,8 +1018,7 @@ async def to_code(config):
|
||||
else:
|
||||
# No component needs it - allow user to control (default: disabled)
|
||||
add_idf_sdkconfig_option(
|
||||
"CONFIG_VFS_SUPPORT_SELECT",
|
||||
not advanced.get(CONF_DISABLE_VFS_SUPPORT_SELECT, True),
|
||||
"CONFIG_VFS_SUPPORT_SELECT", not advanced[CONF_DISABLE_VFS_SUPPORT_SELECT]
|
||||
)
|
||||
|
||||
# Disable VFS support for directory functions (opendir, readdir, mkdir, etc.)
|
||||
@@ -1048,8 +1031,7 @@ async def to_code(config):
|
||||
else:
|
||||
# No component needs it - allow user to control (default: disabled)
|
||||
add_idf_sdkconfig_option(
|
||||
"CONFIG_VFS_SUPPORT_DIR",
|
||||
not advanced.get(CONF_DISABLE_VFS_SUPPORT_DIR, True),
|
||||
"CONFIG_VFS_SUPPORT_DIR", not advanced[CONF_DISABLE_VFS_SUPPORT_DIR]
|
||||
)
|
||||
|
||||
cg.add_platformio_option("board_build.partitions", "partitions.csv")
|
||||
@@ -1063,7 +1045,7 @@ async def to_code(config):
|
||||
add_idf_sdkconfig_option(flag, assertion_level == key)
|
||||
|
||||
add_idf_sdkconfig_option("CONFIG_COMPILER_OPTIMIZATION_DEFAULT", False)
|
||||
compiler_optimization = advanced.get(CONF_COMPILER_OPTIMIZATION)
|
||||
compiler_optimization = advanced[CONF_COMPILER_OPTIMIZATION]
|
||||
for key, flag in COMPILER_OPTIMIZATIONS.items():
|
||||
add_idf_sdkconfig_option(flag, compiler_optimization == key)
|
||||
|
||||
@@ -1072,18 +1054,20 @@ async def to_code(config):
|
||||
conf[CONF_ADVANCED][CONF_ENABLE_LWIP_ASSERT],
|
||||
)
|
||||
|
||||
if advanced.get(CONF_IGNORE_EFUSE_MAC_CRC):
|
||||
if advanced[CONF_IGNORE_EFUSE_MAC_CRC]:
|
||||
add_idf_sdkconfig_option("CONFIG_ESP_MAC_IGNORE_MAC_CRC_ERROR", True)
|
||||
add_idf_sdkconfig_option("CONFIG_ESP_PHY_CALIBRATION_AND_DATA_STORAGE", False)
|
||||
if advanced.get(CONF_ENABLE_IDF_EXPERIMENTAL_FEATURES):
|
||||
if advanced[CONF_ENABLE_IDF_EXPERIMENTAL_FEATURES]:
|
||||
_LOGGER.warning(
|
||||
"Using experimental features in ESP-IDF may result in unexpected failures."
|
||||
)
|
||||
add_idf_sdkconfig_option("CONFIG_IDF_EXPERIMENTAL_FEATURES", True)
|
||||
if config[CONF_FLASH_SIZE] == "32MB":
|
||||
add_idf_sdkconfig_option(
|
||||
"CONFIG_BOOTLOADER_CACHE_32BIT_ADDR_QUAD_FLASH", True
|
||||
)
|
||||
|
||||
cg.add_define(
|
||||
"ESPHOME_LOOP_TASK_STACK_SIZE", advanced.get(CONF_LOOP_TASK_STACK_SIZE)
|
||||
)
|
||||
cg.add_define("ESPHOME_LOOP_TASK_STACK_SIZE", advanced[CONF_LOOP_TASK_STACK_SIZE])
|
||||
|
||||
cg.add_define(
|
||||
"USE_ESP_IDF_VERSION_CODE",
|
||||
|
||||
@@ -36,7 +36,6 @@ from esphome.const import (
|
||||
CONF_WEIGHT,
|
||||
)
|
||||
from esphome.core import CORE, HexInt
|
||||
from esphome.helpers import cpp_string_escape
|
||||
from esphome.types import ConfigType
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
@@ -50,7 +49,6 @@ font_ns = cg.esphome_ns.namespace("font")
|
||||
|
||||
Font = font_ns.class_("Font")
|
||||
Glyph = font_ns.class_("Glyph")
|
||||
GlyphData = font_ns.struct("GlyphData")
|
||||
|
||||
CONF_BPP = "bpp"
|
||||
CONF_EXTRAS = "extras"
|
||||
@@ -463,7 +461,7 @@ FONT_SCHEMA = cv.Schema(
|
||||
)
|
||||
),
|
||||
cv.GenerateID(CONF_RAW_DATA_ID): cv.declare_id(cg.uint8),
|
||||
cv.GenerateID(CONF_RAW_GLYPH_ID): cv.declare_id(GlyphData),
|
||||
cv.GenerateID(CONF_RAW_GLYPH_ID): cv.declare_id(Glyph),
|
||||
},
|
||||
)
|
||||
|
||||
@@ -488,6 +486,8 @@ class GlyphInfo:
|
||||
|
||||
|
||||
def glyph_to_glyphinfo(glyph, font, size, bpp):
|
||||
# Convert to 32 bit unicode codepoint
|
||||
glyph = ord(glyph)
|
||||
scale = 256 // (1 << bpp)
|
||||
if not font.is_scalable:
|
||||
sizes = [pt_to_px(x.size) for x in font.available_sizes]
|
||||
@@ -583,22 +583,15 @@ async def to_code(config):
|
||||
|
||||
# Create the glyph table that points to data in the above array.
|
||||
glyph_initializer = [
|
||||
cg.StructInitializer(
|
||||
GlyphData,
|
||||
(
|
||||
"a_char",
|
||||
cg.RawExpression(f"(const uint8_t *){cpp_string_escape(x.glyph)}"),
|
||||
),
|
||||
(
|
||||
"data",
|
||||
cg.RawExpression(f"{str(prog_arr)} + {str(y - len(x.bitmap_data))}"),
|
||||
),
|
||||
("advance", x.advance),
|
||||
("offset_x", x.offset_x),
|
||||
("offset_y", x.offset_y),
|
||||
("width", x.width),
|
||||
("height", x.height),
|
||||
)
|
||||
[
|
||||
x.glyph,
|
||||
prog_arr + (y - len(x.bitmap_data)),
|
||||
x.advance,
|
||||
x.offset_x,
|
||||
x.offset_y,
|
||||
x.width,
|
||||
x.height,
|
||||
]
|
||||
for (x, y) in zip(
|
||||
glyph_args, list(accumulate([len(x.bitmap_data) for x in glyph_args]))
|
||||
)
|
||||
|
||||
@@ -6,133 +6,245 @@
|
||||
|
||||
namespace esphome {
|
||||
namespace font {
|
||||
|
||||
static const char *const TAG = "font";
|
||||
|
||||
const uint8_t *Glyph::get_char() const { return this->glyph_data_->a_char; }
|
||||
// Compare the char at the string position with this char.
|
||||
// Return true if this char is less than or equal the other.
|
||||
bool Glyph::compare_to(const uint8_t *str) const {
|
||||
// 1 -> this->char_
|
||||
// 2 -> str
|
||||
for (uint32_t i = 0;; i++) {
|
||||
if (this->glyph_data_->a_char[i] == '\0')
|
||||
return true;
|
||||
if (str[i] == '\0')
|
||||
return false;
|
||||
if (this->glyph_data_->a_char[i] > str[i])
|
||||
return false;
|
||||
if (this->glyph_data_->a_char[i] < str[i])
|
||||
return true;
|
||||
#ifdef USE_LVGL_FONT
|
||||
const uint8_t *Font::get_glyph_bitmap(const lv_font_t *font, uint32_t unicode_letter) {
|
||||
auto *fe = (Font *) font->dsc;
|
||||
const auto *gd = fe->get_glyph_data_(unicode_letter);
|
||||
if (gd == nullptr) {
|
||||
return nullptr;
|
||||
}
|
||||
// this should not happen
|
||||
return false;
|
||||
}
|
||||
int Glyph::match_length(const uint8_t *str) const {
|
||||
for (uint32_t i = 0;; i++) {
|
||||
if (this->glyph_data_->a_char[i] == '\0')
|
||||
return i;
|
||||
if (str[i] != this->glyph_data_->a_char[i])
|
||||
return 0;
|
||||
}
|
||||
// this should not happen
|
||||
return 0;
|
||||
}
|
||||
void Glyph::scan_area(int *x1, int *y1, int *width, int *height) const {
|
||||
*x1 = this->glyph_data_->offset_x;
|
||||
*y1 = this->glyph_data_->offset_y;
|
||||
*width = this->glyph_data_->width;
|
||||
*height = this->glyph_data_->height;
|
||||
return gd->data;
|
||||
}
|
||||
|
||||
Font::Font(const GlyphData *data, int data_nr, int baseline, int height, int descender, int xheight, int capheight,
|
||||
bool Font::get_glyph_dsc_cb(const lv_font_t *font, lv_font_glyph_dsc_t *dsc, uint32_t unicode_letter, uint32_t next) {
|
||||
auto *fe = (Font *) font->dsc;
|
||||
const auto *gd = fe->get_glyph_data_(unicode_letter);
|
||||
if (gd == nullptr) {
|
||||
return false;
|
||||
}
|
||||
dsc->adv_w = gd->advance;
|
||||
dsc->ofs_x = gd->offset_x;
|
||||
dsc->ofs_y = fe->height_ - gd->height - gd->offset_y - fe->lv_font_.base_line;
|
||||
dsc->box_w = gd->width;
|
||||
dsc->box_h = gd->height;
|
||||
dsc->is_placeholder = 0;
|
||||
dsc->bpp = fe->get_bpp();
|
||||
return true;
|
||||
}
|
||||
|
||||
const Glyph *Font::get_glyph_data_(uint32_t unicode_letter) {
|
||||
if (unicode_letter == this->last_letter_ && this->last_letter_ != 0)
|
||||
return this->last_data_;
|
||||
auto *glyph = this->find_glyph(unicode_letter);
|
||||
if (glyph == nullptr) {
|
||||
return nullptr;
|
||||
}
|
||||
this->last_data_ = glyph;
|
||||
this->last_letter_ = unicode_letter;
|
||||
return glyph;
|
||||
}
|
||||
#endif
|
||||
|
||||
/**
|
||||
* Attempt to extract a 32 bit Unicode codepoint from a UTF-8 string.
|
||||
* If successful, return the codepoint and set the length to the number of bytes read.
|
||||
* If the end of the string has been reached and a valid codepoint has not been found, return 0 and set the length to
|
||||
* 0.
|
||||
*
|
||||
* @param utf8_str The input string
|
||||
* @param length Pointer to length storage
|
||||
* @return The extracted code point
|
||||
*/
|
||||
static uint32_t extract_unicode_codepoint(const char *utf8_str, size_t *length) {
|
||||
// Safely cast to uint8_t* for correct bitwise operations on bytes
|
||||
const uint8_t *current = reinterpret_cast<const uint8_t *>(utf8_str);
|
||||
uint32_t code_point = 0;
|
||||
uint8_t c1 = *current++;
|
||||
|
||||
// check for end of string
|
||||
if (c1 == 0) {
|
||||
*length = 0;
|
||||
return 0;
|
||||
}
|
||||
|
||||
// --- 1-Byte Sequence: 0xxxxxxx (ASCII) ---
|
||||
if (c1 < 0x80) {
|
||||
// Valid ASCII byte.
|
||||
code_point = c1;
|
||||
// Optimization: No need to check for continuation bytes.
|
||||
}
|
||||
// --- 2-Byte Sequence: 110xxxxx 10xxxxxx ---
|
||||
else if ((c1 & 0xE0) == 0xC0) {
|
||||
uint8_t c2 = *current++;
|
||||
|
||||
// Error Check 1: Check if c2 is a valid continuation byte (10xxxxxx)
|
||||
if ((c2 & 0xC0) != 0x80) {
|
||||
*length = 0;
|
||||
return 0;
|
||||
}
|
||||
|
||||
code_point = (c1 & 0x1F) << 6;
|
||||
code_point |= (c2 & 0x3F);
|
||||
|
||||
// Error Check 2: Overlong check (2-byte must be > 0x7F)
|
||||
if (code_point <= 0x7F) {
|
||||
*length = 0;
|
||||
return 0;
|
||||
}
|
||||
}
|
||||
// --- 3-Byte Sequence: 1110xxxx 10xxxxxx 10xxxxxx ---
|
||||
else if ((c1 & 0xF0) == 0xE0) {
|
||||
uint8_t c2 = *current++;
|
||||
uint8_t c3 = *current++;
|
||||
|
||||
// Error Check 1: Check continuation bytes
|
||||
if (((c2 & 0xC0) != 0x80) || ((c3 & 0xC0) != 0x80)) {
|
||||
*length = 0;
|
||||
return 0;
|
||||
}
|
||||
|
||||
code_point = (c1 & 0x0F) << 12;
|
||||
code_point |= (c2 & 0x3F) << 6;
|
||||
code_point |= (c3 & 0x3F);
|
||||
|
||||
// Error Check 2: Overlong check (3-byte must be > 0x7FF)
|
||||
// Also check for surrogates (0xD800-0xDFFF)
|
||||
if (code_point <= 0x7FF || (code_point >= 0xD800 && code_point <= 0xDFFF)) {
|
||||
*length = 0;
|
||||
return 0;
|
||||
}
|
||||
}
|
||||
// --- 4-Byte Sequence: 11110xxx 10xxxxxx 10xxxxxx 10xxxxxx ---
|
||||
else if ((c1 & 0xF8) == 0xF0) {
|
||||
uint8_t c2 = *current++;
|
||||
uint8_t c3 = *current++;
|
||||
uint8_t c4 = *current++;
|
||||
|
||||
// Error Check 1: Check continuation bytes
|
||||
if (((c2 & 0xC0) != 0x80) || ((c3 & 0xC0) != 0x80) || ((c4 & 0xC0) != 0x80)) {
|
||||
*length = 0;
|
||||
return 0;
|
||||
}
|
||||
|
||||
code_point = (c1 & 0x07) << 18;
|
||||
code_point |= (c2 & 0x3F) << 12;
|
||||
code_point |= (c3 & 0x3F) << 6;
|
||||
code_point |= (c4 & 0x3F);
|
||||
|
||||
// Error Check 2: Overlong check (4-byte must be > 0xFFFF)
|
||||
// Also check for valid Unicode range (must be <= 0x10FFFF)
|
||||
if (code_point <= 0xFFFF || code_point > 0x10FFFF) {
|
||||
*length = 0;
|
||||
return 0;
|
||||
}
|
||||
}
|
||||
// --- Invalid leading byte (e.g., 10xxxxxx or 11111xxx) ---
|
||||
else {
|
||||
*length = 0;
|
||||
return 0;
|
||||
}
|
||||
*length = current - reinterpret_cast<const uint8_t *>(utf8_str);
|
||||
return code_point;
|
||||
}
|
||||
|
||||
Font::Font(const Glyph *data, int data_nr, int baseline, int height, int descender, int xheight, int capheight,
|
||||
uint8_t bpp)
|
||||
: baseline_(baseline),
|
||||
: glyphs_(ConstVector(data, data_nr)),
|
||||
baseline_(baseline),
|
||||
height_(height),
|
||||
descender_(descender),
|
||||
linegap_(height - baseline - descender),
|
||||
xheight_(xheight),
|
||||
capheight_(capheight),
|
||||
bpp_(bpp) {
|
||||
glyphs_.reserve(data_nr);
|
||||
for (int i = 0; i < data_nr; ++i)
|
||||
glyphs_.emplace_back(&data[i]);
|
||||
#ifdef USE_LVGL_FONT
|
||||
this->lv_font_.dsc = this;
|
||||
this->lv_font_.line_height = this->get_height();
|
||||
this->lv_font_.base_line = this->lv_font_.line_height - this->get_baseline();
|
||||
this->lv_font_.get_glyph_dsc = get_glyph_dsc_cb;
|
||||
this->lv_font_.get_glyph_bitmap = get_glyph_bitmap;
|
||||
this->lv_font_.subpx = LV_FONT_SUBPX_NONE;
|
||||
this->lv_font_.underline_position = -1;
|
||||
this->lv_font_.underline_thickness = 1;
|
||||
#endif
|
||||
}
|
||||
int Font::match_next_glyph(const uint8_t *str, int *match_length) {
|
||||
|
||||
const Glyph *Font::find_glyph(uint32_t codepoint) const {
|
||||
int lo = 0;
|
||||
int hi = this->glyphs_.size() - 1;
|
||||
while (lo != hi) {
|
||||
int mid = (lo + hi + 1) / 2;
|
||||
if (this->glyphs_[mid].compare_to(str)) {
|
||||
if (this->glyphs_[mid].is_less_or_equal(codepoint)) {
|
||||
lo = mid;
|
||||
} else {
|
||||
hi = mid - 1;
|
||||
}
|
||||
}
|
||||
*match_length = this->glyphs_[lo].match_length(str);
|
||||
if (*match_length <= 0)
|
||||
return -1;
|
||||
return lo;
|
||||
auto *result = &this->glyphs_[lo];
|
||||
if (result->code_point == codepoint)
|
||||
return result;
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
#ifdef USE_DISPLAY
|
||||
void Font::measure(const char *str, int *width, int *x_offset, int *baseline, int *height) {
|
||||
*baseline = this->baseline_;
|
||||
*height = this->height_;
|
||||
int i = 0;
|
||||
int min_x = 0;
|
||||
bool has_char = false;
|
||||
int x = 0;
|
||||
while (str[i] != '\0') {
|
||||
int match_length;
|
||||
int glyph_n = this->match_next_glyph((const uint8_t *) str + i, &match_length);
|
||||
if (glyph_n < 0) {
|
||||
for (;;) {
|
||||
size_t length;
|
||||
auto code_point = extract_unicode_codepoint(str, &length);
|
||||
if (length == 0)
|
||||
break;
|
||||
str += length;
|
||||
auto *glyph = this->find_glyph(code_point);
|
||||
if (glyph == nullptr) {
|
||||
// Unknown char, skip
|
||||
if (!this->get_glyphs().empty())
|
||||
x += this->get_glyphs()[0].glyph_data_->advance;
|
||||
i++;
|
||||
if (!this->glyphs_.empty())
|
||||
x += this->glyphs_[0].advance;
|
||||
continue;
|
||||
}
|
||||
|
||||
const Glyph &glyph = this->glyphs_[glyph_n];
|
||||
if (!has_char) {
|
||||
min_x = glyph.glyph_data_->offset_x;
|
||||
min_x = glyph->offset_x;
|
||||
} else {
|
||||
min_x = std::min(min_x, x + glyph.glyph_data_->offset_x);
|
||||
min_x = std::min(min_x, x + glyph->offset_x);
|
||||
}
|
||||
x += glyph.glyph_data_->advance;
|
||||
x += glyph->advance;
|
||||
|
||||
i += match_length;
|
||||
has_char = true;
|
||||
}
|
||||
*x_offset = min_x;
|
||||
*width = x - min_x;
|
||||
}
|
||||
|
||||
void Font::print(int x_start, int y_start, display::Display *display, Color color, const char *text, Color background) {
|
||||
int i = 0;
|
||||
int x_at = x_start;
|
||||
int scan_x1, scan_y1, scan_width, scan_height;
|
||||
while (text[i] != '\0') {
|
||||
int match_length;
|
||||
int glyph_n = this->match_next_glyph((const uint8_t *) text + i, &match_length);
|
||||
if (glyph_n < 0) {
|
||||
for (;;) {
|
||||
size_t length;
|
||||
auto code_point = extract_unicode_codepoint(text, &length);
|
||||
if (length == 0)
|
||||
break;
|
||||
text += length;
|
||||
auto *glyph = this->find_glyph(code_point);
|
||||
if (glyph == nullptr) {
|
||||
// Unknown char, skip
|
||||
ESP_LOGW(TAG, "Encountered character without representation in font: '%c'", text[i]);
|
||||
if (!this->get_glyphs().empty()) {
|
||||
uint8_t glyph_width = this->get_glyphs()[0].glyph_data_->advance;
|
||||
display->filled_rectangle(x_at, y_start, glyph_width, this->height_, color);
|
||||
ESP_LOGW(TAG, "Codepoint 0x%08" PRIx32 " not found in font", code_point);
|
||||
if (!this->glyphs_.empty()) {
|
||||
uint8_t glyph_width = this->glyphs_[0].advance;
|
||||
display->rectangle(x_at, y_start, glyph_width, this->height_, color);
|
||||
x_at += glyph_width;
|
||||
}
|
||||
|
||||
i++;
|
||||
continue;
|
||||
}
|
||||
|
||||
const Glyph &glyph = this->get_glyphs()[glyph_n];
|
||||
glyph.scan_area(&scan_x1, &scan_y1, &scan_width, &scan_height);
|
||||
|
||||
const uint8_t *data = glyph.glyph_data_->data;
|
||||
const int max_x = x_at + scan_x1 + scan_width;
|
||||
const int max_y = y_start + scan_y1 + scan_height;
|
||||
const uint8_t *data = glyph->data;
|
||||
const int max_x = x_at + glyph->offset_x + glyph->width;
|
||||
const int max_y = y_start + glyph->offset_y + glyph->height;
|
||||
|
||||
uint8_t bitmask = 0;
|
||||
uint8_t pixel_data = 0;
|
||||
@@ -145,10 +257,10 @@ void Font::print(int x_start, int y_start, display::Display *display, Color colo
|
||||
auto b_g = (float) background.g;
|
||||
auto b_b = (float) background.b;
|
||||
auto b_w = (float) background.w;
|
||||
for (int glyph_y = y_start + scan_y1; glyph_y != max_y; glyph_y++) {
|
||||
for (int glyph_x = x_at + scan_x1; glyph_x != max_x; glyph_x++) {
|
||||
for (int glyph_y = y_start + glyph->offset_y; glyph_y != max_y; glyph_y++) {
|
||||
for (int glyph_x = x_at + glyph->offset_x; glyph_x != max_x; glyph_x++) {
|
||||
uint8_t pixel = 0;
|
||||
for (int bit_num = 0; bit_num != this->bpp_; bit_num++) {
|
||||
for (uint8_t bit_num = 0; bit_num != this->bpp_; bit_num++) {
|
||||
if (bitmask == 0) {
|
||||
pixel_data = progmem_read_byte(data++);
|
||||
bitmask = 0x80;
|
||||
@@ -168,12 +280,9 @@ void Font::print(int x_start, int y_start, display::Display *display, Color colo
|
||||
}
|
||||
}
|
||||
}
|
||||
x_at += glyph.glyph_data_->advance;
|
||||
|
||||
i += match_length;
|
||||
x_at += glyph->advance;
|
||||
}
|
||||
}
|
||||
#endif
|
||||
|
||||
} // namespace font
|
||||
} // namespace esphome
|
||||
|
||||
@@ -6,14 +6,30 @@
|
||||
#ifdef USE_DISPLAY
|
||||
#include "esphome/components/display/display.h"
|
||||
#endif
|
||||
#ifdef USE_LVGL_FONT
|
||||
#include <lvgl.h>
|
||||
#endif
|
||||
|
||||
namespace esphome {
|
||||
namespace font {
|
||||
|
||||
class Font;
|
||||
|
||||
struct GlyphData {
|
||||
const uint8_t *a_char;
|
||||
class Glyph {
|
||||
public:
|
||||
constexpr Glyph(uint32_t code_point, const uint8_t *data, int advance, int offset_x, int offset_y, int width,
|
||||
int height)
|
||||
: code_point(code_point),
|
||||
data(data),
|
||||
advance(advance),
|
||||
offset_x(offset_x),
|
||||
offset_y(offset_y),
|
||||
width(width),
|
||||
height(height) {}
|
||||
|
||||
bool is_less_or_equal(uint32_t other) const { return this->code_point <= other; }
|
||||
|
||||
const uint32_t code_point;
|
||||
const uint8_t *data;
|
||||
int advance;
|
||||
int offset_x;
|
||||
@@ -22,26 +38,6 @@ struct GlyphData {
|
||||
int height;
|
||||
};
|
||||
|
||||
class Glyph {
|
||||
public:
|
||||
Glyph(const GlyphData *data) : glyph_data_(data) {}
|
||||
|
||||
const uint8_t *get_char() const;
|
||||
|
||||
bool compare_to(const uint8_t *str) const;
|
||||
|
||||
int match_length(const uint8_t *str) const;
|
||||
|
||||
void scan_area(int *x1, int *y1, int *width, int *height) const;
|
||||
|
||||
const GlyphData *get_glyph_data() const { return this->glyph_data_; }
|
||||
|
||||
protected:
|
||||
friend Font;
|
||||
|
||||
const GlyphData *glyph_data_;
|
||||
};
|
||||
|
||||
class Font
|
||||
#ifdef USE_DISPLAY
|
||||
: public display::BaseFont
|
||||
@@ -50,8 +46,8 @@ class Font
|
||||
public:
|
||||
/** Construct the font with the given glyphs.
|
||||
*
|
||||
* @param data A vector of glyphs, must be sorted lexicographically.
|
||||
* @param data_nr The number of glyphs in data.
|
||||
* @param data A list of glyphs, must be sorted lexicographically.
|
||||
* @param data_nr The number of glyphs
|
||||
* @param baseline The y-offset from the top of the text to the baseline.
|
||||
* @param height The y-offset from the top of the text to the bottom.
|
||||
* @param descender The y-offset from the baseline to the lowest stroke in the font (e.g. from letters like g or p).
|
||||
@@ -59,10 +55,10 @@ class Font
|
||||
* @param capheight The height of capital letters, usually measured at the "X" glyph.
|
||||
* @param bpp The bits per pixel used for this font. Used to read data out of the glyph bitmaps.
|
||||
*/
|
||||
Font(const GlyphData *data, int data_nr, int baseline, int height, int descender, int xheight, int capheight,
|
||||
Font(const Glyph *data, int data_nr, int baseline, int height, int descender, int xheight, int capheight,
|
||||
uint8_t bpp = 1);
|
||||
|
||||
int match_next_glyph(const uint8_t *str, int *match_length);
|
||||
const Glyph *find_glyph(uint32_t codepoint) const;
|
||||
|
||||
#ifdef USE_DISPLAY
|
||||
void print(int x_start, int y_start, display::Display *display, Color color, const char *text,
|
||||
@@ -77,11 +73,14 @@ class Font
|
||||
inline int get_xheight() { return this->xheight_; }
|
||||
inline int get_capheight() { return this->capheight_; }
|
||||
inline int get_bpp() { return this->bpp_; }
|
||||
#ifdef USE_LVGL_FONT
|
||||
const lv_font_t *get_lv_font() const { return &this->lv_font_; }
|
||||
#endif
|
||||
|
||||
const std::vector<Glyph, RAMAllocator<Glyph>> &get_glyphs() const { return glyphs_; }
|
||||
const ConstVector<Glyph> &get_glyphs() const { return glyphs_; }
|
||||
|
||||
protected:
|
||||
std::vector<Glyph, RAMAllocator<Glyph>> glyphs_;
|
||||
ConstVector<Glyph> glyphs_;
|
||||
int baseline_;
|
||||
int height_;
|
||||
int descender_;
|
||||
@@ -89,6 +88,14 @@ class Font
|
||||
int xheight_;
|
||||
int capheight_;
|
||||
uint8_t bpp_; // bits per pixel
|
||||
#ifdef USE_LVGL_FONT
|
||||
lv_font_t lv_font_{};
|
||||
static const uint8_t *get_glyph_bitmap(const lv_font_t *font, uint32_t unicode_letter);
|
||||
static bool get_glyph_dsc_cb(const lv_font_t *font, lv_font_glyph_dsc_t *dsc, uint32_t unicode_letter, uint32_t next);
|
||||
const Glyph *get_glyph_data_(uint32_t unicode_letter);
|
||||
uint32_t last_letter_{};
|
||||
const Glyph *last_data_{};
|
||||
#endif
|
||||
};
|
||||
|
||||
} // namespace font
|
||||
|
||||
@@ -31,35 +31,83 @@ CONFIG_SCHEMA = cv.Schema(
|
||||
cv.GenerateID(CONF_LD2410_ID): cv.use_id(LD2410Component),
|
||||
cv.Optional(CONF_MOVING_DISTANCE): sensor.sensor_schema(
|
||||
device_class=DEVICE_CLASS_DISTANCE,
|
||||
filters=[{"throttle_with_priority": cv.TimePeriod(milliseconds=1000)}],
|
||||
filters=[
|
||||
{
|
||||
"timeout": {
|
||||
"timeout": cv.TimePeriod(milliseconds=1000),
|
||||
"value": "last",
|
||||
}
|
||||
},
|
||||
{"throttle_with_priority": cv.TimePeriod(milliseconds=1000)},
|
||||
],
|
||||
icon=ICON_SIGNAL,
|
||||
unit_of_measurement=UNIT_CENTIMETER,
|
||||
),
|
||||
cv.Optional(CONF_STILL_DISTANCE): sensor.sensor_schema(
|
||||
device_class=DEVICE_CLASS_DISTANCE,
|
||||
filters=[{"throttle_with_priority": cv.TimePeriod(milliseconds=1000)}],
|
||||
filters=[
|
||||
{
|
||||
"timeout": {
|
||||
"timeout": cv.TimePeriod(milliseconds=1000),
|
||||
"value": "last",
|
||||
}
|
||||
},
|
||||
{"throttle_with_priority": cv.TimePeriod(milliseconds=1000)},
|
||||
],
|
||||
icon=ICON_SIGNAL,
|
||||
unit_of_measurement=UNIT_CENTIMETER,
|
||||
),
|
||||
cv.Optional(CONF_MOVING_ENERGY): sensor.sensor_schema(
|
||||
filters=[{"throttle_with_priority": cv.TimePeriod(milliseconds=1000)}],
|
||||
filters=[
|
||||
{
|
||||
"timeout": {
|
||||
"timeout": cv.TimePeriod(milliseconds=1000),
|
||||
"value": "last",
|
||||
}
|
||||
},
|
||||
{"throttle_with_priority": cv.TimePeriod(milliseconds=1000)},
|
||||
],
|
||||
icon=ICON_MOTION_SENSOR,
|
||||
unit_of_measurement=UNIT_PERCENT,
|
||||
),
|
||||
cv.Optional(CONF_STILL_ENERGY): sensor.sensor_schema(
|
||||
filters=[{"throttle_with_priority": cv.TimePeriod(milliseconds=1000)}],
|
||||
filters=[
|
||||
{
|
||||
"timeout": {
|
||||
"timeout": cv.TimePeriod(milliseconds=1000),
|
||||
"value": "last",
|
||||
}
|
||||
},
|
||||
{"throttle_with_priority": cv.TimePeriod(milliseconds=1000)},
|
||||
],
|
||||
icon=ICON_FLASH,
|
||||
unit_of_measurement=UNIT_PERCENT,
|
||||
),
|
||||
cv.Optional(CONF_LIGHT): sensor.sensor_schema(
|
||||
device_class=DEVICE_CLASS_ILLUMINANCE,
|
||||
entity_category=ENTITY_CATEGORY_DIAGNOSTIC,
|
||||
filters=[{"throttle_with_priority": cv.TimePeriod(milliseconds=1000)}],
|
||||
filters=[
|
||||
{
|
||||
"timeout": {
|
||||
"timeout": cv.TimePeriod(milliseconds=1000),
|
||||
"value": "last",
|
||||
}
|
||||
},
|
||||
{"throttle_with_priority": cv.TimePeriod(milliseconds=1000)},
|
||||
],
|
||||
icon=ICON_LIGHTBULB,
|
||||
),
|
||||
cv.Optional(CONF_DETECTION_DISTANCE): sensor.sensor_schema(
|
||||
device_class=DEVICE_CLASS_DISTANCE,
|
||||
filters=[{"throttle_with_priority": cv.TimePeriod(milliseconds=1000)}],
|
||||
filters=[
|
||||
{
|
||||
"timeout": {
|
||||
"timeout": cv.TimePeriod(milliseconds=1000),
|
||||
"value": "last",
|
||||
}
|
||||
},
|
||||
{"throttle_with_priority": cv.TimePeriod(milliseconds=1000)},
|
||||
],
|
||||
icon=ICON_SIGNAL,
|
||||
unit_of_measurement=UNIT_CENTIMETER,
|
||||
),
|
||||
@@ -73,7 +121,13 @@ CONFIG_SCHEMA = CONFIG_SCHEMA.extend(
|
||||
cv.Optional(CONF_MOVE_ENERGY): sensor.sensor_schema(
|
||||
entity_category=ENTITY_CATEGORY_DIAGNOSTIC,
|
||||
filters=[
|
||||
{"throttle_with_priority": cv.TimePeriod(milliseconds=1000)}
|
||||
{
|
||||
"timeout": {
|
||||
"timeout": cv.TimePeriod(milliseconds=1000),
|
||||
"value": "last",
|
||||
}
|
||||
},
|
||||
{"throttle_with_priority": cv.TimePeriod(milliseconds=1000)},
|
||||
],
|
||||
icon=ICON_MOTION_SENSOR,
|
||||
unit_of_measurement=UNIT_PERCENT,
|
||||
@@ -81,7 +135,13 @@ CONFIG_SCHEMA = CONFIG_SCHEMA.extend(
|
||||
cv.Optional(CONF_STILL_ENERGY): sensor.sensor_schema(
|
||||
entity_category=ENTITY_CATEGORY_DIAGNOSTIC,
|
||||
filters=[
|
||||
{"throttle_with_priority": cv.TimePeriod(milliseconds=1000)}
|
||||
{
|
||||
"timeout": {
|
||||
"timeout": cv.TimePeriod(milliseconds=1000),
|
||||
"value": "last",
|
||||
}
|
||||
},
|
||||
{"throttle_with_priority": cv.TimePeriod(milliseconds=1000)},
|
||||
],
|
||||
icon=ICON_FLASH,
|
||||
unit_of_measurement=UNIT_PERCENT,
|
||||
|
||||
@@ -31,36 +31,84 @@ CONFIG_SCHEMA = cv.Schema(
|
||||
cv.GenerateID(CONF_LD2412_ID): cv.use_id(LD2412Component),
|
||||
cv.Optional(CONF_DETECTION_DISTANCE): sensor.sensor_schema(
|
||||
device_class=DEVICE_CLASS_DISTANCE,
|
||||
filters=[{"throttle_with_priority": cv.TimePeriod(milliseconds=1000)}],
|
||||
filters=[
|
||||
{
|
||||
"timeout": {
|
||||
"timeout": cv.TimePeriod(milliseconds=1000),
|
||||
"value": "last",
|
||||
}
|
||||
},
|
||||
{"throttle_with_priority": cv.TimePeriod(milliseconds=1000)},
|
||||
],
|
||||
icon=ICON_SIGNAL,
|
||||
unit_of_measurement=UNIT_CENTIMETER,
|
||||
),
|
||||
cv.Optional(CONF_LIGHT): sensor.sensor_schema(
|
||||
device_class=DEVICE_CLASS_ILLUMINANCE,
|
||||
entity_category=ENTITY_CATEGORY_DIAGNOSTIC,
|
||||
filters=[{"throttle_with_priority": cv.TimePeriod(milliseconds=1000)}],
|
||||
filters=[
|
||||
{
|
||||
"timeout": {
|
||||
"timeout": cv.TimePeriod(milliseconds=1000),
|
||||
"value": "last",
|
||||
}
|
||||
},
|
||||
{"throttle_with_priority": cv.TimePeriod(milliseconds=1000)},
|
||||
],
|
||||
icon=ICON_LIGHTBULB,
|
||||
unit_of_measurement=UNIT_EMPTY, # No standard unit for this light sensor
|
||||
),
|
||||
cv.Optional(CONF_MOVING_DISTANCE): sensor.sensor_schema(
|
||||
device_class=DEVICE_CLASS_DISTANCE,
|
||||
filters=[{"throttle_with_priority": cv.TimePeriod(milliseconds=1000)}],
|
||||
filters=[
|
||||
{
|
||||
"timeout": {
|
||||
"timeout": cv.TimePeriod(milliseconds=1000),
|
||||
"value": "last",
|
||||
}
|
||||
},
|
||||
{"throttle_with_priority": cv.TimePeriod(milliseconds=1000)},
|
||||
],
|
||||
icon=ICON_SIGNAL,
|
||||
unit_of_measurement=UNIT_CENTIMETER,
|
||||
),
|
||||
cv.Optional(CONF_MOVING_ENERGY): sensor.sensor_schema(
|
||||
filters=[{"throttle_with_priority": cv.TimePeriod(milliseconds=1000)}],
|
||||
filters=[
|
||||
{
|
||||
"timeout": {
|
||||
"timeout": cv.TimePeriod(milliseconds=1000),
|
||||
"value": "last",
|
||||
}
|
||||
},
|
||||
{"throttle_with_priority": cv.TimePeriod(milliseconds=1000)},
|
||||
],
|
||||
icon=ICON_MOTION_SENSOR,
|
||||
unit_of_measurement=UNIT_PERCENT,
|
||||
),
|
||||
cv.Optional(CONF_STILL_DISTANCE): sensor.sensor_schema(
|
||||
device_class=DEVICE_CLASS_DISTANCE,
|
||||
filters=[{"throttle_with_priority": cv.TimePeriod(milliseconds=1000)}],
|
||||
filters=[
|
||||
{
|
||||
"timeout": {
|
||||
"timeout": cv.TimePeriod(milliseconds=1000),
|
||||
"value": "last",
|
||||
}
|
||||
},
|
||||
{"throttle_with_priority": cv.TimePeriod(milliseconds=1000)},
|
||||
],
|
||||
icon=ICON_SIGNAL,
|
||||
unit_of_measurement=UNIT_CENTIMETER,
|
||||
),
|
||||
cv.Optional(CONF_STILL_ENERGY): sensor.sensor_schema(
|
||||
filters=[{"throttle_with_priority": cv.TimePeriod(milliseconds=1000)}],
|
||||
filters=[
|
||||
{
|
||||
"timeout": {
|
||||
"timeout": cv.TimePeriod(milliseconds=1000),
|
||||
"value": "last",
|
||||
}
|
||||
},
|
||||
{"throttle_with_priority": cv.TimePeriod(milliseconds=1000)},
|
||||
],
|
||||
icon=ICON_FLASH,
|
||||
unit_of_measurement=UNIT_PERCENT,
|
||||
),
|
||||
@@ -74,7 +122,13 @@ CONFIG_SCHEMA = CONFIG_SCHEMA.extend(
|
||||
cv.Optional(CONF_MOVE_ENERGY): sensor.sensor_schema(
|
||||
entity_category=ENTITY_CATEGORY_DIAGNOSTIC,
|
||||
filters=[
|
||||
{"throttle_with_priority": cv.TimePeriod(milliseconds=1000)}
|
||||
{
|
||||
"timeout": {
|
||||
"timeout": cv.TimePeriod(milliseconds=1000),
|
||||
"value": "last",
|
||||
}
|
||||
},
|
||||
{"throttle_with_priority": cv.TimePeriod(milliseconds=1000)},
|
||||
],
|
||||
icon=ICON_MOTION_SENSOR,
|
||||
unit_of_measurement=UNIT_PERCENT,
|
||||
@@ -82,7 +136,13 @@ CONFIG_SCHEMA = CONFIG_SCHEMA.extend(
|
||||
cv.Optional(CONF_STILL_ENERGY): sensor.sensor_schema(
|
||||
entity_category=ENTITY_CATEGORY_DIAGNOSTIC,
|
||||
filters=[
|
||||
{"throttle_with_priority": cv.TimePeriod(milliseconds=1000)}
|
||||
{
|
||||
"timeout": {
|
||||
"timeout": cv.TimePeriod(milliseconds=1000),
|
||||
"value": "last",
|
||||
}
|
||||
},
|
||||
{"throttle_with_priority": cv.TimePeriod(milliseconds=1000)},
|
||||
],
|
||||
icon=ICON_FLASH,
|
||||
unit_of_measurement=UNIT_PERCENT,
|
||||
|
||||
@@ -52,8 +52,10 @@ static void log_invalid_parameter(const char *name, const LogString *message) {
|
||||
}
|
||||
|
||||
static const LogString *color_mode_to_human(ColorMode color_mode) {
|
||||
if (color_mode == ColorMode::UNKNOWN)
|
||||
return LOG_STR("Unknown");
|
||||
if (color_mode == ColorMode::ON_OFF)
|
||||
return LOG_STR("On/Off");
|
||||
if (color_mode == ColorMode::BRIGHTNESS)
|
||||
return LOG_STR("Brightness");
|
||||
if (color_mode == ColorMode::WHITE)
|
||||
return LOG_STR("White");
|
||||
if (color_mode == ColorMode::COLOR_TEMPERATURE)
|
||||
@@ -68,7 +70,7 @@ static const LogString *color_mode_to_human(ColorMode color_mode) {
|
||||
return LOG_STR("RGB + cold/warm white");
|
||||
if (color_mode == ColorMode::RGB_COLOR_TEMPERATURE)
|
||||
return LOG_STR("RGB + color temperature");
|
||||
return LOG_STR("");
|
||||
return LOG_STR("Unknown");
|
||||
}
|
||||
|
||||
// Helper to log percentage values
|
||||
|
||||
@@ -52,15 +52,7 @@ from .schemas import (
|
||||
from .styles import add_top_layer, styles_to_code, theme_to_code
|
||||
from .touchscreens import touchscreen_schema, touchscreens_to_code
|
||||
from .trigger import add_on_boot_triggers, generate_triggers
|
||||
from .types import (
|
||||
FontEngine,
|
||||
IdleTrigger,
|
||||
PlainTrigger,
|
||||
lv_font_t,
|
||||
lv_group_t,
|
||||
lv_style_t,
|
||||
lvgl_ns,
|
||||
)
|
||||
from .types import IdleTrigger, PlainTrigger, lv_font_t, lv_group_t, lv_style_t, lvgl_ns
|
||||
from .widgets import (
|
||||
LvScrActType,
|
||||
Widget,
|
||||
@@ -244,7 +236,6 @@ async def to_code(configs):
|
||||
cg.add_global(lvgl_ns.using)
|
||||
for font in helpers.esphome_fonts_used:
|
||||
await cg.get_variable(font)
|
||||
cg.new_Pvariable(ID(f"{font}_engine", True, type=FontEngine), MockObj(font))
|
||||
default_font = config_0[df.CONF_DEFAULT_FONT]
|
||||
if not lvalid.is_lv_font(default_font):
|
||||
add_define(
|
||||
@@ -256,7 +247,8 @@ async def to_code(configs):
|
||||
type=lv_font_t.operator("ptr").operator("const"),
|
||||
)
|
||||
cg.new_variable(
|
||||
globfont_id, MockObj(await lvalid.lv_font.process(default_font))
|
||||
globfont_id,
|
||||
MockObj(await lvalid.lv_font.process(default_font), "->").get_lv_font(),
|
||||
)
|
||||
add_define("LV_FONT_DEFAULT", df.DEFAULT_ESPHOME_FONT)
|
||||
else:
|
||||
|
||||
@@ -1,76 +0,0 @@
|
||||
#include "lvgl_esphome.h"
|
||||
|
||||
#ifdef USE_LVGL_FONT
|
||||
namespace esphome {
|
||||
namespace lvgl {
|
||||
|
||||
static const uint8_t *get_glyph_bitmap(const lv_font_t *font, uint32_t unicode_letter) {
|
||||
auto *fe = (FontEngine *) font->dsc;
|
||||
const auto *gd = fe->get_glyph_data(unicode_letter);
|
||||
if (gd == nullptr)
|
||||
return nullptr;
|
||||
// esph_log_d(TAG, "Returning bitmap @ %X", (uint32_t)gd->data);
|
||||
|
||||
return gd->data;
|
||||
}
|
||||
|
||||
static bool get_glyph_dsc_cb(const lv_font_t *font, lv_font_glyph_dsc_t *dsc, uint32_t unicode_letter, uint32_t next) {
|
||||
auto *fe = (FontEngine *) font->dsc;
|
||||
const auto *gd = fe->get_glyph_data(unicode_letter);
|
||||
if (gd == nullptr)
|
||||
return false;
|
||||
dsc->adv_w = gd->advance;
|
||||
dsc->ofs_x = gd->offset_x;
|
||||
dsc->ofs_y = fe->height - gd->height - gd->offset_y - fe->baseline;
|
||||
dsc->box_w = gd->width;
|
||||
dsc->box_h = gd->height;
|
||||
dsc->is_placeholder = 0;
|
||||
dsc->bpp = fe->bpp;
|
||||
return true;
|
||||
}
|
||||
|
||||
FontEngine::FontEngine(font::Font *esp_font) : font_(esp_font) {
|
||||
this->bpp = esp_font->get_bpp();
|
||||
this->lv_font_.dsc = this;
|
||||
this->lv_font_.line_height = this->height = esp_font->get_height();
|
||||
this->lv_font_.base_line = this->baseline = this->lv_font_.line_height - esp_font->get_baseline();
|
||||
this->lv_font_.get_glyph_dsc = get_glyph_dsc_cb;
|
||||
this->lv_font_.get_glyph_bitmap = get_glyph_bitmap;
|
||||
this->lv_font_.subpx = LV_FONT_SUBPX_NONE;
|
||||
this->lv_font_.underline_position = -1;
|
||||
this->lv_font_.underline_thickness = 1;
|
||||
}
|
||||
|
||||
const lv_font_t *FontEngine::get_lv_font() { return &this->lv_font_; }
|
||||
|
||||
const font::GlyphData *FontEngine::get_glyph_data(uint32_t unicode_letter) {
|
||||
if (unicode_letter == last_letter_)
|
||||
return this->last_data_;
|
||||
uint8_t unicode[5];
|
||||
memset(unicode, 0, sizeof unicode);
|
||||
if (unicode_letter > 0xFFFF) {
|
||||
unicode[0] = 0xF0 + ((unicode_letter >> 18) & 0x7);
|
||||
unicode[1] = 0x80 + ((unicode_letter >> 12) & 0x3F);
|
||||
unicode[2] = 0x80 + ((unicode_letter >> 6) & 0x3F);
|
||||
unicode[3] = 0x80 + (unicode_letter & 0x3F);
|
||||
} else if (unicode_letter > 0x7FF) {
|
||||
unicode[0] = 0xE0 + ((unicode_letter >> 12) & 0xF);
|
||||
unicode[1] = 0x80 + ((unicode_letter >> 6) & 0x3F);
|
||||
unicode[2] = 0x80 + (unicode_letter & 0x3F);
|
||||
} else if (unicode_letter > 0x7F) {
|
||||
unicode[0] = 0xC0 + ((unicode_letter >> 6) & 0x1F);
|
||||
unicode[1] = 0x80 + (unicode_letter & 0x3F);
|
||||
} else {
|
||||
unicode[0] = unicode_letter;
|
||||
}
|
||||
int match_length;
|
||||
int glyph_n = this->font_->match_next_glyph(unicode, &match_length);
|
||||
if (glyph_n < 0)
|
||||
return nullptr;
|
||||
this->last_data_ = this->font_->get_glyphs()[glyph_n].get_glyph_data();
|
||||
this->last_letter_ = unicode_letter;
|
||||
return this->last_data_;
|
||||
}
|
||||
} // namespace lvgl
|
||||
} // namespace esphome
|
||||
#endif // USES_LVGL_FONT
|
||||
@@ -493,6 +493,7 @@ class LvFont(LValidator):
|
||||
return LV_FONTS
|
||||
if is_lv_font(value):
|
||||
return lv_builtin_font(value)
|
||||
add_lv_use("font")
|
||||
fontval = cv.use_id(Font)(value)
|
||||
esphome_fonts_used.add(fontval)
|
||||
return requires_component("font")(fontval)
|
||||
@@ -502,7 +503,9 @@ class LvFont(LValidator):
|
||||
async def process(self, value, args=()):
|
||||
if is_lv_font(value):
|
||||
return literal(f"&lv_font_{value}")
|
||||
return literal(f"{value}_engine->get_lv_font()")
|
||||
if isinstance(value, str):
|
||||
return literal(f"{value}")
|
||||
return await super().process(value, args)
|
||||
|
||||
|
||||
lv_font = LvFont()
|
||||
|
||||
@@ -50,6 +50,14 @@ static const display::ColorBitness LV_BITNESS = display::ColorBitness::COLOR_BIT
|
||||
static const display::ColorBitness LV_BITNESS = display::ColorBitness::COLOR_BITNESS_332;
|
||||
#endif // LV_COLOR_DEPTH
|
||||
|
||||
#ifdef USE_LVGL_FONT
|
||||
inline void lv_obj_set_style_text_font(lv_obj_t *obj, const font::Font *font, lv_style_selector_t part) {
|
||||
lv_obj_set_style_text_font(obj, font->get_lv_font(), part);
|
||||
}
|
||||
inline void lv_style_set_text_font(lv_style_t *style, const font::Font *font) {
|
||||
lv_style_set_text_font(style, font->get_lv_font());
|
||||
}
|
||||
#endif
|
||||
#ifdef USE_LVGL_IMAGE
|
||||
// Shortcut / overload, so that the source of an image can easily be updated
|
||||
// from within a lambda.
|
||||
@@ -134,24 +142,6 @@ template<typename... Ts> class ObjUpdateAction : public Action<Ts...> {
|
||||
protected:
|
||||
std::function<void(Ts...)> lamb_;
|
||||
};
|
||||
#ifdef USE_LVGL_FONT
|
||||
class FontEngine {
|
||||
public:
|
||||
FontEngine(font::Font *esp_font);
|
||||
const lv_font_t *get_lv_font();
|
||||
|
||||
const font::GlyphData *get_glyph_data(uint32_t unicode_letter);
|
||||
uint16_t baseline{};
|
||||
uint16_t height{};
|
||||
uint8_t bpp{};
|
||||
|
||||
protected:
|
||||
font::Font *font_{};
|
||||
uint32_t last_letter_{};
|
||||
const font::GlyphData *last_data_{};
|
||||
lv_font_t lv_font_{};
|
||||
};
|
||||
#endif // USE_LVGL_FONT
|
||||
#ifdef USE_LVGL_ANIMIMG
|
||||
void lv_animimg_stop(lv_obj_t *obj);
|
||||
#endif // USE_LVGL_ANIMIMG
|
||||
|
||||
@@ -45,7 +45,6 @@ lv_coord_t = cg.global_ns.namespace("lv_coord_t")
|
||||
lv_event_code_t = cg.global_ns.enum("lv_event_code_t")
|
||||
lv_indev_type_t = cg.global_ns.enum("lv_indev_type_t")
|
||||
lv_key_t = cg.global_ns.enum("lv_key_t")
|
||||
FontEngine = lvgl_ns.class_("FontEngine")
|
||||
PlainTrigger = esphome_ns.class_("Trigger<>", automation.Trigger.template())
|
||||
DrawEndTrigger = esphome_ns.class_(
|
||||
"Trigger<uint32_t, uint32_t>", automation.Trigger.template(cg.uint32, cg.uint32)
|
||||
|
||||
@@ -1,8 +1,7 @@
|
||||
#include "automation.h"
|
||||
#include "esphome/core/log.h"
|
||||
|
||||
namespace esphome {
|
||||
namespace number {
|
||||
namespace esphome::number {
|
||||
|
||||
static const char *const TAG = "number.automation";
|
||||
|
||||
@@ -52,5 +51,4 @@ void ValueRangeTrigger::on_state_(float state) {
|
||||
this->rtc_.save(&in_range);
|
||||
}
|
||||
|
||||
} // namespace number
|
||||
} // namespace esphome
|
||||
} // namespace esphome::number
|
||||
|
||||
@@ -4,8 +4,7 @@
|
||||
#include "esphome/core/automation.h"
|
||||
#include "esphome/core/component.h"
|
||||
|
||||
namespace esphome {
|
||||
namespace number {
|
||||
namespace esphome::number {
|
||||
|
||||
class NumberStateTrigger : public Trigger<float> {
|
||||
public:
|
||||
@@ -91,5 +90,4 @@ template<typename... Ts> class NumberInRangeCondition : public Condition<Ts...>
|
||||
float max_{NAN};
|
||||
};
|
||||
|
||||
} // namespace number
|
||||
} // namespace esphome
|
||||
} // namespace esphome::number
|
||||
|
||||
@@ -3,8 +3,7 @@
|
||||
#include "esphome/core/controller_registry.h"
|
||||
#include "esphome/core/log.h"
|
||||
|
||||
namespace esphome {
|
||||
namespace number {
|
||||
namespace esphome::number {
|
||||
|
||||
static const char *const TAG = "number";
|
||||
|
||||
@@ -43,5 +42,4 @@ void Number::add_on_state_callback(std::function<void(float)> &&callback) {
|
||||
this->state_callback_.add(std::move(callback));
|
||||
}
|
||||
|
||||
} // namespace number
|
||||
} // namespace esphome
|
||||
} // namespace esphome::number
|
||||
|
||||
@@ -6,8 +6,7 @@
|
||||
#include "number_call.h"
|
||||
#include "number_traits.h"
|
||||
|
||||
namespace esphome {
|
||||
namespace number {
|
||||
namespace esphome::number {
|
||||
|
||||
class Number;
|
||||
void log_number(const char *tag, const char *prefix, const char *type, Number *obj);
|
||||
@@ -53,5 +52,4 @@ class Number : public EntityBase {
|
||||
CallbackManager<void(float)> state_callback_;
|
||||
};
|
||||
|
||||
} // namespace number
|
||||
} // namespace esphome
|
||||
} // namespace esphome::number
|
||||
|
||||
@@ -2,8 +2,7 @@
|
||||
#include "number.h"
|
||||
#include "esphome/core/log.h"
|
||||
|
||||
namespace esphome {
|
||||
namespace number {
|
||||
namespace esphome::number {
|
||||
|
||||
static const char *const TAG = "number";
|
||||
|
||||
@@ -125,5 +124,4 @@ void NumberCall::perform() {
|
||||
this->parent_->control(target_value);
|
||||
}
|
||||
|
||||
} // namespace number
|
||||
} // namespace esphome
|
||||
} // namespace esphome::number
|
||||
|
||||
@@ -4,8 +4,7 @@
|
||||
#include "esphome/core/log.h"
|
||||
#include "number_traits.h"
|
||||
|
||||
namespace esphome {
|
||||
namespace number {
|
||||
namespace esphome::number {
|
||||
|
||||
class Number;
|
||||
|
||||
@@ -44,5 +43,4 @@ class NumberCall {
|
||||
bool cycle_;
|
||||
};
|
||||
|
||||
} // namespace number
|
||||
} // namespace esphome
|
||||
} // namespace esphome::number
|
||||
|
||||
@@ -1,10 +1,8 @@
|
||||
#include "esphome/core/log.h"
|
||||
#include "number_traits.h"
|
||||
|
||||
namespace esphome {
|
||||
namespace number {
|
||||
namespace esphome::number {
|
||||
|
||||
static const char *const TAG = "number";
|
||||
|
||||
} // namespace number
|
||||
} // namespace esphome
|
||||
} // namespace esphome::number
|
||||
|
||||
@@ -3,8 +3,7 @@
|
||||
#include "esphome/core/entity_base.h"
|
||||
#include "esphome/core/helpers.h"
|
||||
|
||||
namespace esphome {
|
||||
namespace number {
|
||||
namespace esphome::number {
|
||||
|
||||
enum NumberMode : uint8_t {
|
||||
NUMBER_MODE_AUTO = 0,
|
||||
@@ -35,5 +34,4 @@ class NumberTraits : public EntityBase_DeviceClass, public EntityBase_UnitOfMeas
|
||||
NumberMode mode_{NUMBER_MODE_AUTO};
|
||||
};
|
||||
|
||||
} // namespace number
|
||||
} // namespace esphome
|
||||
} // namespace esphome::number
|
||||
|
||||
@@ -1,9 +1,14 @@
|
||||
import logging
|
||||
|
||||
import esphome.codegen as cg
|
||||
from esphome.components import time as time_
|
||||
from esphome.config_helpers import merge_config
|
||||
import esphome.config_validation as cv
|
||||
from esphome.const import (
|
||||
CONF_ID,
|
||||
CONF_PLATFORM,
|
||||
CONF_SERVERS,
|
||||
CONF_TIME,
|
||||
PLATFORM_BK72XX,
|
||||
PLATFORM_ESP32,
|
||||
PLATFORM_ESP8266,
|
||||
@@ -12,13 +17,74 @@ from esphome.const import (
|
||||
PLATFORM_RTL87XX,
|
||||
)
|
||||
from esphome.core import CORE
|
||||
import esphome.final_validate as fv
|
||||
from esphome.types import ConfigType
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
DEPENDENCIES = ["network"]
|
||||
|
||||
CONF_SNTP = "sntp"
|
||||
|
||||
sntp_ns = cg.esphome_ns.namespace("sntp")
|
||||
SNTPComponent = sntp_ns.class_("SNTPComponent", time_.RealTimeClock)
|
||||
|
||||
DEFAULT_SERVERS = ["0.pool.ntp.org", "1.pool.ntp.org", "2.pool.ntp.org"]
|
||||
|
||||
|
||||
def _sntp_final_validate(config: ConfigType) -> None:
|
||||
"""Merge multiple SNTP instances into one, similar to OTA merging behavior."""
|
||||
full_conf = fv.full_config.get()
|
||||
time_confs = full_conf.get(CONF_TIME, [])
|
||||
|
||||
sntp_configs: list[ConfigType] = []
|
||||
other_time_configs: list[ConfigType] = []
|
||||
|
||||
for time_conf in time_confs:
|
||||
if time_conf.get(CONF_PLATFORM) == CONF_SNTP:
|
||||
sntp_configs.append(time_conf)
|
||||
else:
|
||||
other_time_configs.append(time_conf)
|
||||
|
||||
if len(sntp_configs) <= 1:
|
||||
return
|
||||
|
||||
# Merge all SNTP configs into the first one
|
||||
merged = sntp_configs[0]
|
||||
for sntp_conf in sntp_configs[1:]:
|
||||
# Validate that IDs are consistent if manually specified
|
||||
if merged[CONF_ID].is_manual and sntp_conf[CONF_ID].is_manual:
|
||||
raise cv.Invalid(
|
||||
f"Found multiple SNTP configurations but {CONF_ID} is inconsistent"
|
||||
)
|
||||
merged = merge_config(merged, sntp_conf)
|
||||
|
||||
# Deduplicate servers while preserving order
|
||||
servers = merged[CONF_SERVERS]
|
||||
unique_servers = list(dict.fromkeys(servers))
|
||||
|
||||
# Warn if we're dropping servers due to 3-server limit
|
||||
if len(unique_servers) > 3:
|
||||
dropped = unique_servers[3:]
|
||||
unique_servers = unique_servers[:3]
|
||||
_LOGGER.warning(
|
||||
"SNTP supports maximum 3 servers. Dropped excess server(s): %s",
|
||||
dropped,
|
||||
)
|
||||
|
||||
merged[CONF_SERVERS] = unique_servers
|
||||
|
||||
_LOGGER.warning(
|
||||
"Found and merged %d SNTP time configurations into one instance",
|
||||
len(sntp_configs),
|
||||
)
|
||||
|
||||
# Replace time configs with merged SNTP + other time platforms
|
||||
other_time_configs.append(merged)
|
||||
full_conf[CONF_TIME] = other_time_configs
|
||||
fv.full_config.set(full_conf)
|
||||
|
||||
|
||||
CONFIG_SCHEMA = cv.All(
|
||||
time_.TIME_SCHEMA.extend(
|
||||
{
|
||||
@@ -40,6 +106,8 @@ CONFIG_SCHEMA = cv.All(
|
||||
),
|
||||
)
|
||||
|
||||
FINAL_VALIDATE_SCHEMA = _sntp_final_validate
|
||||
|
||||
|
||||
async def to_code(config):
|
||||
servers = config[CONF_SERVERS]
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
from logging import getLogger
|
||||
import math
|
||||
import re
|
||||
|
||||
@@ -35,6 +36,8 @@ from esphome.core import CORE, ID
|
||||
import esphome.final_validate as fv
|
||||
from esphome.yaml_util import make_data_base
|
||||
|
||||
_LOGGER = getLogger(__name__)
|
||||
|
||||
CODEOWNERS = ["@esphome/core"]
|
||||
uart_ns = cg.esphome_ns.namespace("uart")
|
||||
UARTComponent = uart_ns.class_("UARTComponent")
|
||||
@@ -130,6 +133,21 @@ def validate_host_config(config):
|
||||
return config
|
||||
|
||||
|
||||
def validate_rx_buffer_size(config):
|
||||
if CORE.is_esp32:
|
||||
# ESP32 UART hardware FIFO is 128 bytes (LP UART is 16 bytes, but we use 128 as safe minimum)
|
||||
# rx_buffer_size must be greater than the hardware FIFO length
|
||||
min_buffer_size = 128
|
||||
if config[CONF_RX_BUFFER_SIZE] <= min_buffer_size:
|
||||
_LOGGER.warning(
|
||||
"UART rx_buffer_size (%d bytes) is too small and must be greater than the hardware "
|
||||
"FIFO size (%d bytes). The buffer size will be automatically adjusted at runtime.",
|
||||
config[CONF_RX_BUFFER_SIZE],
|
||||
min_buffer_size,
|
||||
)
|
||||
return config
|
||||
|
||||
|
||||
def _uart_declare_type(value):
|
||||
if CORE.is_esp8266:
|
||||
return cv.declare_id(ESP8266UartComponent)(value)
|
||||
@@ -247,6 +265,7 @@ CONFIG_SCHEMA = cv.All(
|
||||
).extend(cv.COMPONENT_SCHEMA),
|
||||
cv.has_at_least_one_key(CONF_TX_PIN, CONF_RX_PIN, CONF_PORT),
|
||||
validate_host_config,
|
||||
validate_rx_buffer_size,
|
||||
)
|
||||
|
||||
|
||||
|
||||
@@ -56,11 +56,19 @@ uint32_t ESP8266UartComponent::get_config() {
|
||||
}
|
||||
|
||||
void ESP8266UartComponent::setup() {
|
||||
if (this->rx_pin_) {
|
||||
this->rx_pin_->setup();
|
||||
}
|
||||
if (this->tx_pin_ && this->rx_pin_ != this->tx_pin_) {
|
||||
this->tx_pin_->setup();
|
||||
auto setup_pin_if_needed = [](InternalGPIOPin *pin) {
|
||||
if (!pin) {
|
||||
return;
|
||||
}
|
||||
const auto mask = gpio::Flags::FLAG_OPEN_DRAIN | gpio::Flags::FLAG_PULLUP | gpio::Flags::FLAG_PULLDOWN;
|
||||
if ((pin->get_flags() & mask) != gpio::Flags::FLAG_NONE) {
|
||||
pin->setup();
|
||||
}
|
||||
};
|
||||
|
||||
setup_pin_if_needed(this->rx_pin_);
|
||||
if (this->rx_pin_ != this->tx_pin_) {
|
||||
setup_pin_if_needed(this->tx_pin_);
|
||||
}
|
||||
|
||||
// Use Arduino HardwareSerial UARTs if all used pins match the ones
|
||||
|
||||
@@ -91,6 +91,16 @@ void IDFUARTComponent::setup() {
|
||||
this->uart_num_ = static_cast<uart_port_t>(next_uart_num++);
|
||||
this->lock_ = xSemaphoreCreateMutex();
|
||||
|
||||
#if (SOC_UART_LP_NUM >= 1)
|
||||
size_t fifo_len = ((this->uart_num_ < SOC_UART_HP_NUM) ? SOC_UART_FIFO_LEN : SOC_LP_UART_FIFO_LEN);
|
||||
#else
|
||||
size_t fifo_len = SOC_UART_FIFO_LEN;
|
||||
#endif
|
||||
if (this->rx_buffer_size_ <= fifo_len) {
|
||||
ESP_LOGW(TAG, "rx_buffer_size is too small, must be greater than %zu", fifo_len);
|
||||
this->rx_buffer_size_ = fifo_len * 2;
|
||||
}
|
||||
|
||||
xSemaphoreTake(this->lock_, portMAX_DELAY);
|
||||
|
||||
this->load_settings(false);
|
||||
@@ -123,11 +133,19 @@ void IDFUARTComponent::load_settings(bool dump_config) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (this->rx_pin_) {
|
||||
this->rx_pin_->setup();
|
||||
}
|
||||
if (this->tx_pin_ && this->rx_pin_ != this->tx_pin_) {
|
||||
this->tx_pin_->setup();
|
||||
auto setup_pin_if_needed = [](InternalGPIOPin *pin) {
|
||||
if (!pin) {
|
||||
return;
|
||||
}
|
||||
const auto mask = gpio::Flags::FLAG_OPEN_DRAIN | gpio::Flags::FLAG_PULLUP | gpio::Flags::FLAG_PULLDOWN;
|
||||
if ((pin->get_flags() & mask) != gpio::Flags::FLAG_NONE) {
|
||||
pin->setup();
|
||||
}
|
||||
};
|
||||
|
||||
setup_pin_if_needed(this->rx_pin_);
|
||||
if (this->rx_pin_ != this->tx_pin_) {
|
||||
setup_pin_if_needed(this->tx_pin_);
|
||||
}
|
||||
|
||||
int8_t tx = this->tx_pin_ != nullptr ? this->tx_pin_->get_pin() : -1;
|
||||
@@ -237,8 +255,12 @@ void IDFUARTComponent::set_rx_timeout(size_t rx_timeout) {
|
||||
|
||||
void IDFUARTComponent::write_array(const uint8_t *data, size_t len) {
|
||||
xSemaphoreTake(this->lock_, portMAX_DELAY);
|
||||
uart_write_bytes(this->uart_num_, data, len);
|
||||
int32_t write_len = uart_write_bytes(this->uart_num_, data, len);
|
||||
xSemaphoreGive(this->lock_);
|
||||
if (write_len != (int32_t) len) {
|
||||
ESP_LOGW(TAG, "uart_write_bytes failed: %d != %zu", write_len, len);
|
||||
this->mark_failed();
|
||||
}
|
||||
#ifdef USE_UART_DEBUGGER
|
||||
for (size_t i = 0; i < len; i++) {
|
||||
this->debug_callback_.call(UART_DIRECTION_TX, data[i]);
|
||||
@@ -267,6 +289,7 @@ bool IDFUARTComponent::peek_byte(uint8_t *data) {
|
||||
|
||||
bool IDFUARTComponent::read_array(uint8_t *data, size_t len) {
|
||||
size_t length_to_read = len;
|
||||
int32_t read_len = 0;
|
||||
if (!this->check_read_timeout_(len))
|
||||
return false;
|
||||
xSemaphoreTake(this->lock_, portMAX_DELAY);
|
||||
@@ -277,25 +300,31 @@ bool IDFUARTComponent::read_array(uint8_t *data, size_t len) {
|
||||
this->has_peek_ = false;
|
||||
}
|
||||
if (length_to_read > 0)
|
||||
uart_read_bytes(this->uart_num_, data, length_to_read, 20 / portTICK_PERIOD_MS);
|
||||
read_len = uart_read_bytes(this->uart_num_, data, length_to_read, 20 / portTICK_PERIOD_MS);
|
||||
xSemaphoreGive(this->lock_);
|
||||
#ifdef USE_UART_DEBUGGER
|
||||
for (size_t i = 0; i < len; i++) {
|
||||
this->debug_callback_.call(UART_DIRECTION_RX, data[i]);
|
||||
}
|
||||
#endif
|
||||
return true;
|
||||
return read_len == (int32_t) length_to_read;
|
||||
}
|
||||
|
||||
int IDFUARTComponent::available() {
|
||||
size_t available;
|
||||
size_t available = 0;
|
||||
esp_err_t err;
|
||||
|
||||
xSemaphoreTake(this->lock_, portMAX_DELAY);
|
||||
uart_get_buffered_data_len(this->uart_num_, &available);
|
||||
if (this->has_peek_)
|
||||
available++;
|
||||
err = uart_get_buffered_data_len(this->uart_num_, &available);
|
||||
xSemaphoreGive(this->lock_);
|
||||
|
||||
if (err != ESP_OK) {
|
||||
ESP_LOGW(TAG, "uart_get_buffered_data_len failed: %s", esp_err_to_name(err));
|
||||
this->mark_failed();
|
||||
}
|
||||
if (this->has_peek_) {
|
||||
available++;
|
||||
}
|
||||
return available;
|
||||
}
|
||||
|
||||
|
||||
@@ -53,7 +53,7 @@ void LibreTinyUARTComponent::setup() {
|
||||
|
||||
auto shouldFallbackToSoftwareSerial = [&]() -> bool {
|
||||
auto hasFlags = [](InternalGPIOPin *pin, const gpio::Flags mask) -> bool {
|
||||
return pin && pin->get_flags() & mask != gpio::Flags::FLAG_NONE;
|
||||
return pin && (pin->get_flags() & mask) != gpio::Flags::FLAG_NONE;
|
||||
};
|
||||
if (hasFlags(this->tx_pin_, gpio::Flags::FLAG_OPEN_DRAIN | gpio::Flags::FLAG_PULLUP | gpio::Flags::FLAG_PULLDOWN) ||
|
||||
hasFlags(this->rx_pin_, gpio::Flags::FLAG_OPEN_DRAIN | gpio::Flags::FLAG_PULLUP | gpio::Flags::FLAG_PULLDOWN)) {
|
||||
|
||||
@@ -52,11 +52,19 @@ uint16_t RP2040UartComponent::get_config() {
|
||||
}
|
||||
|
||||
void RP2040UartComponent::setup() {
|
||||
if (this->rx_pin_) {
|
||||
this->rx_pin_->setup();
|
||||
}
|
||||
if (this->tx_pin_ && this->rx_pin_ != this->tx_pin_) {
|
||||
this->tx_pin_->setup();
|
||||
auto setup_pin_if_needed = [](InternalGPIOPin *pin) {
|
||||
if (!pin) {
|
||||
return;
|
||||
}
|
||||
const auto mask = gpio::Flags::FLAG_OPEN_DRAIN | gpio::Flags::FLAG_PULLUP | gpio::Flags::FLAG_PULLDOWN;
|
||||
if ((pin->get_flags() & mask) != gpio::Flags::FLAG_NONE) {
|
||||
pin->setup();
|
||||
}
|
||||
};
|
||||
|
||||
setup_pin_if_needed(this->rx_pin_);
|
||||
if (this->rx_pin_ != this->tx_pin_) {
|
||||
setup_pin_if_needed(this->tx_pin_);
|
||||
}
|
||||
|
||||
uint16_t config = get_config();
|
||||
|
||||
@@ -1,10 +1,17 @@
|
||||
import logging
|
||||
|
||||
import esphome.codegen as cg
|
||||
from esphome.components.esp32 import add_idf_component
|
||||
from esphome.components.ota import BASE_OTA_SCHEMA, OTAComponent, ota_to_code
|
||||
from esphome.config_helpers import merge_config
|
||||
import esphome.config_validation as cv
|
||||
from esphome.const import CONF_ID
|
||||
from esphome.const import CONF_ID, CONF_OTA, CONF_PLATFORM, CONF_WEB_SERVER
|
||||
from esphome.core import CORE, coroutine_with_priority
|
||||
from esphome.coroutine import CoroPriority
|
||||
import esphome.final_validate as fv
|
||||
from esphome.types import ConfigType
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
CODEOWNERS = ["@esphome/core"]
|
||||
DEPENDENCIES = ["network", "web_server_base"]
|
||||
@@ -12,6 +19,53 @@ DEPENDENCIES = ["network", "web_server_base"]
|
||||
web_server_ns = cg.esphome_ns.namespace("web_server")
|
||||
WebServerOTAComponent = web_server_ns.class_("WebServerOTAComponent", OTAComponent)
|
||||
|
||||
|
||||
def _web_server_ota_final_validate(config: ConfigType) -> None:
|
||||
"""Merge multiple web_server OTA instances into one.
|
||||
|
||||
Multiple web_server OTA instances register duplicate HTTP handlers for /update,
|
||||
causing undefined behavior. Merge them into a single instance.
|
||||
"""
|
||||
full_conf = fv.full_config.get()
|
||||
ota_confs = full_conf.get(CONF_OTA, [])
|
||||
|
||||
web_server_ota_configs: list[ConfigType] = []
|
||||
other_ota_configs: list[ConfigType] = []
|
||||
|
||||
for ota_conf in ota_confs:
|
||||
if ota_conf.get(CONF_PLATFORM) == CONF_WEB_SERVER:
|
||||
web_server_ota_configs.append(ota_conf)
|
||||
else:
|
||||
other_ota_configs.append(ota_conf)
|
||||
|
||||
if len(web_server_ota_configs) <= 1:
|
||||
return
|
||||
|
||||
# Merge all web_server OTA configs into the first one
|
||||
merged = web_server_ota_configs[0]
|
||||
for ota_conf in web_server_ota_configs[1:]:
|
||||
# Validate that IDs are consistent if manually specified
|
||||
if (
|
||||
merged[CONF_ID].is_manual
|
||||
and ota_conf[CONF_ID].is_manual
|
||||
and merged[CONF_ID] != ota_conf[CONF_ID]
|
||||
):
|
||||
raise cv.Invalid(
|
||||
f"Found multiple web_server OTA configurations but {CONF_ID} is inconsistent"
|
||||
)
|
||||
merged = merge_config(merged, ota_conf)
|
||||
|
||||
_LOGGER.warning(
|
||||
"Found and merged %d web_server OTA configurations into one instance",
|
||||
len(web_server_ota_configs),
|
||||
)
|
||||
|
||||
# Replace OTA configs with merged web_server + other OTA platforms
|
||||
other_ota_configs.append(merged)
|
||||
full_conf[CONF_OTA] = other_ota_configs
|
||||
fv.full_config.set(full_conf)
|
||||
|
||||
|
||||
CONFIG_SCHEMA = (
|
||||
cv.Schema(
|
||||
{
|
||||
@@ -22,6 +76,8 @@ CONFIG_SCHEMA = (
|
||||
.extend(cv.COMPONENT_SCHEMA)
|
||||
)
|
||||
|
||||
FINAL_VALIDATE_SCHEMA = _web_server_ota_final_validate
|
||||
|
||||
|
||||
@coroutine_with_priority(CoroPriority.WEB_SERVER_OTA)
|
||||
async def to_code(config):
|
||||
|
||||
@@ -489,10 +489,18 @@ AsyncEventSourceResponse::AsyncEventSourceResponse(const AsyncWebServerRequest *
|
||||
|
||||
void AsyncEventSourceResponse::destroy(void *ptr) {
|
||||
auto *rsp = static_cast<AsyncEventSourceResponse *>(ptr);
|
||||
ESP_LOGD(TAG, "Event source connection closed (fd: %d)", rsp->fd_.load());
|
||||
// Mark as dead by setting fd to 0 - will be cleaned up in the main loop
|
||||
rsp->fd_.store(0);
|
||||
// Note: We don't delete or remove from set here to avoid race conditions
|
||||
int fd = rsp->fd_.exchange(0); // Atomically get and clear fd
|
||||
|
||||
if (fd > 0) {
|
||||
ESP_LOGD(TAG, "Event source connection closed (fd: %d)", fd);
|
||||
// Immediately shut down the socket to prevent lwIP from delivering more data
|
||||
// This prevents "recv_tcp: recv for wrong pcb!" assertions when the TCP stack
|
||||
// tries to deliver queued data after the session is marked as dead
|
||||
// See: https://github.com/esphome/esphome/issues/11936
|
||||
shutdown(fd, SHUT_RDWR);
|
||||
// Note: We don't close() the socket - httpd owns it and will close it
|
||||
}
|
||||
// Session will be cleaned up in the main loop to avoid race conditions
|
||||
}
|
||||
|
||||
// helper for allowing only unique entries in the queue
|
||||
|
||||
@@ -12,7 +12,6 @@ from esphome.components.network import (
|
||||
from esphome.components.psram import is_guaranteed as psram_is_guaranteed
|
||||
from esphome.config_helpers import filter_source_files_from_platform
|
||||
import esphome.config_validation as cv
|
||||
from esphome.config_validation import only_with_esp_idf
|
||||
from esphome.const import (
|
||||
CONF_AP,
|
||||
CONF_BSSID,
|
||||
@@ -352,7 +351,7 @@ CONFIG_SCHEMA = cv.All(
|
||||
single=True
|
||||
),
|
||||
cv.Optional(CONF_USE_PSRAM): cv.All(
|
||||
only_with_esp_idf, cv.requires_component("psram"), cv.boolean
|
||||
cv.only_on_esp32, cv.requires_component("psram"), cv.boolean
|
||||
),
|
||||
}
|
||||
),
|
||||
|
||||
@@ -111,6 +111,23 @@ template<> constexpr int64_t byteswap(int64_t n) { return __builtin_bswap64(n);
|
||||
/// @name Container utilities
|
||||
///@{
|
||||
|
||||
/// Lightweight read-only view over a const array stored in RODATA (will typically be in flash memory)
|
||||
/// Avoids copying data from flash to RAM by keeping a pointer to the flash data.
|
||||
/// Similar to std::span but with minimal overhead for embedded systems.
|
||||
|
||||
template<typename T> class ConstVector {
|
||||
public:
|
||||
constexpr ConstVector(const T *data, size_t size) : data_(data), size_(size) {}
|
||||
|
||||
const constexpr T &operator[](size_t i) const { return data_[i]; }
|
||||
constexpr size_t size() const { return size_; }
|
||||
constexpr bool empty() const { return size_ == 0; }
|
||||
|
||||
protected:
|
||||
const T *data_;
|
||||
size_t size_;
|
||||
};
|
||||
|
||||
/// Minimal static vector - saves memory by avoiding std::vector overhead
|
||||
template<typename T, size_t N> class StaticVector {
|
||||
public:
|
||||
|
||||
@@ -609,13 +609,12 @@ uint64_t Scheduler::millis_64_(uint32_t now) {
|
||||
if (now < last && (last - now) > HALF_MAX_UINT32) {
|
||||
this->millis_major_++;
|
||||
major++;
|
||||
this->last_millis_ = now;
|
||||
#ifdef ESPHOME_DEBUG_SCHEDULER
|
||||
ESP_LOGD(TAG, "Detected true 32-bit rollover at %" PRIu32 "ms (was %" PRIu32 ")", now, last);
|
||||
#endif /* ESPHOME_DEBUG_SCHEDULER */
|
||||
}
|
||||
|
||||
// Only update if time moved forward
|
||||
if (now > last) {
|
||||
} else if (now > last) {
|
||||
// Only update if time moved forward
|
||||
this->last_millis_ = now;
|
||||
}
|
||||
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
pylint==4.0.2
|
||||
pylint==4.0.3
|
||||
flake8==7.3.0 # also change in .pre-commit-config.yaml when updating
|
||||
ruff==0.14.4 # also change in .pre-commit-config.yaml when updating
|
||||
ruff==0.14.5 # also change in .pre-commit-config.yaml when updating
|
||||
pyupgrade==3.21.1 # also change in .pre-commit-config.yaml when updating
|
||||
pre-commit
|
||||
|
||||
|
||||
@@ -1,6 +1,18 @@
|
||||
"""Tests for the web_server OTA platform."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Callable
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
import pytest
|
||||
|
||||
from esphome import config_validation as cv
|
||||
from esphome.components.web_server.ota import _web_server_ota_final_validate
|
||||
from esphome.const import CONF_ID, CONF_OTA, CONF_PLATFORM, CONF_WEB_SERVER
|
||||
from esphome.core import ID
|
||||
import esphome.final_validate as fv
|
||||
|
||||
|
||||
def test_web_server_ota_generated(generate_main: Callable[[str], str]) -> None:
|
||||
@@ -100,3 +112,144 @@ def test_web_server_ota_esp8266(generate_main: Callable[[str], str]) -> None:
|
||||
# Check web server OTA component is present
|
||||
assert "WebServerOTAComponent" in main_cpp
|
||||
assert "web_server::WebServerOTAComponent" in main_cpp
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
("ota_configs", "expected_count", "warning_expected"),
|
||||
[
|
||||
pytest.param(
|
||||
[
|
||||
{
|
||||
CONF_PLATFORM: CONF_WEB_SERVER,
|
||||
CONF_ID: ID("ota_web", is_manual=False),
|
||||
}
|
||||
],
|
||||
1,
|
||||
False,
|
||||
id="single_instance_no_merge",
|
||||
),
|
||||
pytest.param(
|
||||
[
|
||||
{
|
||||
CONF_PLATFORM: CONF_WEB_SERVER,
|
||||
CONF_ID: ID("ota_web_1", is_manual=False),
|
||||
},
|
||||
{
|
||||
CONF_PLATFORM: CONF_WEB_SERVER,
|
||||
CONF_ID: ID("ota_web_2", is_manual=False),
|
||||
},
|
||||
],
|
||||
1,
|
||||
True,
|
||||
id="two_instances_merged",
|
||||
),
|
||||
pytest.param(
|
||||
[
|
||||
{
|
||||
CONF_PLATFORM: CONF_WEB_SERVER,
|
||||
CONF_ID: ID("ota_web_1", is_manual=False),
|
||||
},
|
||||
{
|
||||
CONF_PLATFORM: "esphome",
|
||||
CONF_ID: ID("ota_esphome", is_manual=False),
|
||||
},
|
||||
{
|
||||
CONF_PLATFORM: CONF_WEB_SERVER,
|
||||
CONF_ID: ID("ota_web_2", is_manual=False),
|
||||
},
|
||||
],
|
||||
2,
|
||||
True,
|
||||
id="mixed_platforms_web_server_merged",
|
||||
),
|
||||
],
|
||||
)
|
||||
def test_web_server_ota_instance_merging(
|
||||
ota_configs: list[dict[str, Any]],
|
||||
expected_count: int,
|
||||
warning_expected: bool,
|
||||
caplog: pytest.LogCaptureFixture,
|
||||
) -> None:
|
||||
"""Test web_server OTA instance merging behavior."""
|
||||
full_conf = {CONF_OTA: ota_configs.copy()}
|
||||
|
||||
token = fv.full_config.set(full_conf)
|
||||
try:
|
||||
with caplog.at_level(logging.WARNING):
|
||||
_web_server_ota_final_validate({})
|
||||
|
||||
updated_conf = fv.full_config.get()
|
||||
|
||||
# Verify total number of OTA platforms
|
||||
assert len(updated_conf[CONF_OTA]) == expected_count
|
||||
|
||||
# Verify warning
|
||||
if warning_expected:
|
||||
assert any(
|
||||
"Found and merged" in record.message
|
||||
and "web_server OTA" in record.message
|
||||
for record in caplog.records
|
||||
), "Expected merge warning not found in log"
|
||||
else:
|
||||
assert len(caplog.records) == 0, "Unexpected warnings logged"
|
||||
finally:
|
||||
fv.full_config.reset(token)
|
||||
|
||||
|
||||
def test_web_server_ota_consistent_manual_ids(
|
||||
caplog: pytest.LogCaptureFixture,
|
||||
) -> None:
|
||||
"""Test that consistent manual IDs can be merged successfully."""
|
||||
ota_configs = [
|
||||
{
|
||||
CONF_PLATFORM: CONF_WEB_SERVER,
|
||||
CONF_ID: ID("ota_web", is_manual=True),
|
||||
},
|
||||
{
|
||||
CONF_PLATFORM: CONF_WEB_SERVER,
|
||||
CONF_ID: ID("ota_web", is_manual=True),
|
||||
},
|
||||
]
|
||||
|
||||
full_conf = {CONF_OTA: ota_configs}
|
||||
|
||||
token = fv.full_config.set(full_conf)
|
||||
try:
|
||||
with caplog.at_level(logging.WARNING):
|
||||
_web_server_ota_final_validate({})
|
||||
|
||||
updated_conf = fv.full_config.get()
|
||||
assert len(updated_conf[CONF_OTA]) == 1
|
||||
assert updated_conf[CONF_OTA][0][CONF_ID].id == "ota_web"
|
||||
assert any(
|
||||
"Found and merged" in record.message and "web_server OTA" in record.message
|
||||
for record in caplog.records
|
||||
)
|
||||
finally:
|
||||
fv.full_config.reset(token)
|
||||
|
||||
|
||||
def test_web_server_ota_inconsistent_manual_ids() -> None:
|
||||
"""Test that inconsistent manual IDs raise an error."""
|
||||
ota_configs = [
|
||||
{
|
||||
CONF_PLATFORM: CONF_WEB_SERVER,
|
||||
CONF_ID: ID("ota_web_1", is_manual=True),
|
||||
},
|
||||
{
|
||||
CONF_PLATFORM: CONF_WEB_SERVER,
|
||||
CONF_ID: ID("ota_web_2", is_manual=True),
|
||||
},
|
||||
]
|
||||
|
||||
full_conf = {CONF_OTA: ota_configs}
|
||||
|
||||
token = fv.full_config.set(full_conf)
|
||||
try:
|
||||
with pytest.raises(
|
||||
cv.Invalid,
|
||||
match="Found multiple web_server OTA configurations but id is inconsistent",
|
||||
):
|
||||
_web_server_ota_final_validate({})
|
||||
finally:
|
||||
fv.full_config.reset(token)
|
||||
|
||||
1
tests/component_tests/sntp/__init__.py
Normal file
1
tests/component_tests/sntp/__init__.py
Normal file
@@ -0,0 +1 @@
|
||||
"""Tests for SNTP component."""
|
||||
22
tests/component_tests/sntp/config/sntp_test.yaml
Normal file
22
tests/component_tests/sntp/config/sntp_test.yaml
Normal file
@@ -0,0 +1,22 @@
|
||||
esphome:
|
||||
name: sntp-test
|
||||
|
||||
esp32:
|
||||
board: esp32dev
|
||||
framework:
|
||||
type: esp-idf
|
||||
|
||||
wifi:
|
||||
ssid: "testssid"
|
||||
password: "testpassword"
|
||||
|
||||
# Test multiple SNTP instances that should be merged
|
||||
time:
|
||||
- platform: sntp
|
||||
servers:
|
||||
- 192.168.1.1
|
||||
- pool.ntp.org
|
||||
- platform: sntp
|
||||
servers:
|
||||
- pool.ntp.org
|
||||
- 192.168.1.2
|
||||
238
tests/component_tests/sntp/test_init.py
Normal file
238
tests/component_tests/sntp/test_init.py
Normal file
@@ -0,0 +1,238 @@
|
||||
"""Tests for SNTP time configuration validation."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
import pytest
|
||||
|
||||
from esphome import config_validation as cv
|
||||
from esphome.components.sntp.time import CONF_SNTP, _sntp_final_validate
|
||||
from esphome.const import CONF_ID, CONF_PLATFORM, CONF_SERVERS, CONF_TIME
|
||||
from esphome.core import ID
|
||||
import esphome.final_validate as fv
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
("time_configs", "expected_count", "expected_servers", "warning_messages"),
|
||||
[
|
||||
pytest.param(
|
||||
[
|
||||
{
|
||||
CONF_PLATFORM: CONF_SNTP,
|
||||
CONF_ID: ID("sntp_time", is_manual=False),
|
||||
CONF_SERVERS: ["192.168.1.1", "pool.ntp.org"],
|
||||
}
|
||||
],
|
||||
1,
|
||||
["192.168.1.1", "pool.ntp.org"],
|
||||
[],
|
||||
id="single_instance_no_merge",
|
||||
),
|
||||
pytest.param(
|
||||
[
|
||||
{
|
||||
CONF_PLATFORM: CONF_SNTP,
|
||||
CONF_ID: ID("sntp_time_1", is_manual=False),
|
||||
CONF_SERVERS: ["192.168.1.1", "pool.ntp.org"],
|
||||
},
|
||||
{
|
||||
CONF_PLATFORM: CONF_SNTP,
|
||||
CONF_ID: ID("sntp_time_2", is_manual=False),
|
||||
CONF_SERVERS: ["192.168.1.2"],
|
||||
},
|
||||
],
|
||||
1,
|
||||
["192.168.1.1", "pool.ntp.org", "192.168.1.2"],
|
||||
["Found and merged 2 SNTP time configurations into one instance"],
|
||||
id="two_instances_merged",
|
||||
),
|
||||
pytest.param(
|
||||
[
|
||||
{
|
||||
CONF_PLATFORM: CONF_SNTP,
|
||||
CONF_ID: ID("sntp_time_1", is_manual=False),
|
||||
CONF_SERVERS: ["192.168.1.1", "pool.ntp.org"],
|
||||
},
|
||||
{
|
||||
CONF_PLATFORM: CONF_SNTP,
|
||||
CONF_ID: ID("sntp_time_2", is_manual=False),
|
||||
CONF_SERVERS: ["pool.ntp.org", "192.168.1.2"],
|
||||
},
|
||||
],
|
||||
1,
|
||||
["192.168.1.1", "pool.ntp.org", "192.168.1.2"],
|
||||
["Found and merged 2 SNTP time configurations into one instance"],
|
||||
id="deduplication_preserves_order",
|
||||
),
|
||||
pytest.param(
|
||||
[
|
||||
{
|
||||
CONF_PLATFORM: CONF_SNTP,
|
||||
CONF_ID: ID("sntp_time_1", is_manual=False),
|
||||
CONF_SERVERS: ["192.168.1.1", "pool.ntp.org"],
|
||||
},
|
||||
{
|
||||
CONF_PLATFORM: CONF_SNTP,
|
||||
CONF_ID: ID("sntp_time_2", is_manual=False),
|
||||
CONF_SERVERS: ["192.168.1.2", "pool2.ntp.org"],
|
||||
},
|
||||
{
|
||||
CONF_PLATFORM: CONF_SNTP,
|
||||
CONF_ID: ID("sntp_time_3", is_manual=False),
|
||||
CONF_SERVERS: ["pool3.ntp.org"],
|
||||
},
|
||||
],
|
||||
1,
|
||||
["192.168.1.1", "pool.ntp.org", "192.168.1.2"],
|
||||
[
|
||||
"SNTP supports maximum 3 servers. Dropped excess server(s): ['pool2.ntp.org', 'pool3.ntp.org']",
|
||||
"Found and merged 3 SNTP time configurations into one instance",
|
||||
],
|
||||
id="three_instances_drops_excess_servers",
|
||||
),
|
||||
pytest.param(
|
||||
[
|
||||
{
|
||||
CONF_PLATFORM: CONF_SNTP,
|
||||
CONF_ID: ID("sntp_time_1", is_manual=False),
|
||||
CONF_SERVERS: [
|
||||
"192.168.1.1",
|
||||
"pool.ntp.org",
|
||||
"pool.ntp.org",
|
||||
"192.168.1.1",
|
||||
],
|
||||
},
|
||||
{
|
||||
CONF_PLATFORM: CONF_SNTP,
|
||||
CONF_ID: ID("sntp_time_2", is_manual=False),
|
||||
CONF_SERVERS: ["pool.ntp.org", "192.168.1.2"],
|
||||
},
|
||||
],
|
||||
1,
|
||||
["192.168.1.1", "pool.ntp.org", "192.168.1.2"],
|
||||
["Found and merged 2 SNTP time configurations into one instance"],
|
||||
id="deduplication_multiple_duplicates",
|
||||
),
|
||||
],
|
||||
)
|
||||
def test_sntp_instance_merging(
|
||||
time_configs: list[dict[str, Any]],
|
||||
expected_count: int,
|
||||
expected_servers: list[str],
|
||||
warning_messages: list[str],
|
||||
caplog: pytest.LogCaptureFixture,
|
||||
) -> None:
|
||||
"""Test SNTP instance merging behavior."""
|
||||
# Create a mock full config with time configs
|
||||
full_conf = {CONF_TIME: time_configs.copy()}
|
||||
|
||||
# Set the context var
|
||||
token = fv.full_config.set(full_conf)
|
||||
try:
|
||||
with caplog.at_level(logging.WARNING):
|
||||
_sntp_final_validate({})
|
||||
|
||||
# Get the updated config
|
||||
updated_conf = fv.full_config.get()
|
||||
|
||||
# Check if merging occurred
|
||||
if len(time_configs) > 1:
|
||||
# Verify only one SNTP instance remains
|
||||
sntp_instances = [
|
||||
tc
|
||||
for tc in updated_conf[CONF_TIME]
|
||||
if tc.get(CONF_PLATFORM) == CONF_SNTP
|
||||
]
|
||||
assert len(sntp_instances) == expected_count
|
||||
|
||||
# Verify server list
|
||||
assert sntp_instances[0][CONF_SERVERS] == expected_servers
|
||||
|
||||
# Verify warnings
|
||||
for expected_msg in warning_messages:
|
||||
assert any(
|
||||
expected_msg in record.message for record in caplog.records
|
||||
), f"Expected warning message '{expected_msg}' not found in log"
|
||||
else:
|
||||
# Single instance should not trigger merging or warnings
|
||||
assert len(caplog.records) == 0
|
||||
# Config should be unchanged
|
||||
assert updated_conf[CONF_TIME] == time_configs
|
||||
finally:
|
||||
fv.full_config.reset(token)
|
||||
|
||||
|
||||
def test_sntp_inconsistent_manual_ids() -> None:
|
||||
"""Test that inconsistent manual IDs raise an error."""
|
||||
# Create configs with manual IDs that are inconsistent
|
||||
time_configs = [
|
||||
{
|
||||
CONF_PLATFORM: CONF_SNTP,
|
||||
CONF_ID: ID("sntp_time_1", is_manual=True),
|
||||
CONF_SERVERS: ["192.168.1.1"],
|
||||
},
|
||||
{
|
||||
CONF_PLATFORM: CONF_SNTP,
|
||||
CONF_ID: ID("sntp_time_2", is_manual=True),
|
||||
CONF_SERVERS: ["192.168.1.2"],
|
||||
},
|
||||
]
|
||||
|
||||
full_conf = {CONF_TIME: time_configs}
|
||||
|
||||
token = fv.full_config.set(full_conf)
|
||||
try:
|
||||
with pytest.raises(
|
||||
cv.Invalid,
|
||||
match="Found multiple SNTP configurations but id is inconsistent",
|
||||
):
|
||||
_sntp_final_validate({})
|
||||
finally:
|
||||
fv.full_config.reset(token)
|
||||
|
||||
|
||||
def test_sntp_with_other_time_platforms(caplog: pytest.LogCaptureFixture) -> None:
|
||||
"""Test that SNTP merging doesn't affect other time platforms."""
|
||||
time_configs = [
|
||||
{
|
||||
CONF_PLATFORM: CONF_SNTP,
|
||||
CONF_ID: ID("sntp_time_1", is_manual=False),
|
||||
CONF_SERVERS: ["192.168.1.1"],
|
||||
},
|
||||
{
|
||||
CONF_PLATFORM: "homeassistant",
|
||||
CONF_ID: ID("homeassistant_time", is_manual=False),
|
||||
},
|
||||
{
|
||||
CONF_PLATFORM: CONF_SNTP,
|
||||
CONF_ID: ID("sntp_time_2", is_manual=False),
|
||||
CONF_SERVERS: ["192.168.1.2"],
|
||||
},
|
||||
]
|
||||
|
||||
full_conf = {CONF_TIME: time_configs.copy()}
|
||||
|
||||
token = fv.full_config.set(full_conf)
|
||||
try:
|
||||
with caplog.at_level(logging.WARNING):
|
||||
_sntp_final_validate({})
|
||||
|
||||
updated_conf = fv.full_config.get()
|
||||
|
||||
# Should have 2 time platforms: 1 merged SNTP + 1 homeassistant
|
||||
assert len(updated_conf[CONF_TIME]) == 2
|
||||
|
||||
# Find the platforms
|
||||
platforms = {tc[CONF_PLATFORM] for tc in updated_conf[CONF_TIME]}
|
||||
assert platforms == {CONF_SNTP, "homeassistant"}
|
||||
|
||||
# Verify SNTP was merged
|
||||
sntp_instances = [
|
||||
tc for tc in updated_conf[CONF_TIME] if tc[CONF_PLATFORM] == CONF_SNTP
|
||||
]
|
||||
assert len(sntp_instances) == 1
|
||||
assert sntp_instances[0][CONF_SERVERS] == ["192.168.1.1", "192.168.1.2"]
|
||||
finally:
|
||||
fv.full_config.reset(token)
|
||||
27
tests/components/esp32/test.esp32-p4-idf.yaml
Normal file
27
tests/components/esp32/test.esp32-p4-idf.yaml
Normal file
@@ -0,0 +1,27 @@
|
||||
esp32:
|
||||
variant: esp32p4
|
||||
flash_size: 32MB
|
||||
cpu_frequency: 400MHz
|
||||
framework:
|
||||
type: esp-idf
|
||||
advanced:
|
||||
enable_idf_experimental_features: yes
|
||||
|
||||
ota:
|
||||
platform: esphome
|
||||
|
||||
wifi:
|
||||
ssid: MySSID
|
||||
password: password1
|
||||
|
||||
esp32_hosted:
|
||||
variant: ESP32C6
|
||||
slot: 1
|
||||
active_high: true
|
||||
reset_pin: GPIO15
|
||||
cmd_pin: GPIO13
|
||||
clk_pin: GPIO12
|
||||
d0_pin: GPIO11
|
||||
d1_pin: GPIO10
|
||||
d2_pin: GPIO9
|
||||
d3_pin: GPIO8
|
||||
@@ -76,7 +76,7 @@ lvgl:
|
||||
line_width: 8
|
||||
line_rounded: true
|
||||
- id: date_style
|
||||
text_font: roboto10
|
||||
text_font: !lambda return id(roboto10);
|
||||
align: center
|
||||
text_color: !lambda return color_id2;
|
||||
bg_opa: cover
|
||||
@@ -267,7 +267,7 @@ lvgl:
|
||||
snprintf(buf, sizeof(buf), "Setup: %d", 42);
|
||||
return std::string(buf);
|
||||
align: top_mid
|
||||
text_font: space16
|
||||
text_font: !lambda return id(space16);
|
||||
- label:
|
||||
id: chip_info_label
|
||||
# Test complex setup lambda (real-world pattern)
|
||||
|
||||
@@ -18,6 +18,7 @@ touchscreen:
|
||||
|
||||
lvgl:
|
||||
- id: lvgl_0
|
||||
default_font: space16
|
||||
displays: sdl0
|
||||
- id: lvgl_1
|
||||
displays: sdl1
|
||||
@@ -39,3 +40,8 @@ lvgl:
|
||||
text: Click ME
|
||||
on_click:
|
||||
logger.log: Clicked
|
||||
|
||||
font:
|
||||
- file: "gfonts://Roboto"
|
||||
id: space16
|
||||
bpp: 4
|
||||
|
||||
150
tests/integration/fixtures/sensor_timeout_filter.yaml
Normal file
150
tests/integration/fixtures/sensor_timeout_filter.yaml
Normal file
@@ -0,0 +1,150 @@
|
||||
esphome:
|
||||
name: test-timeout-filters
|
||||
|
||||
host:
|
||||
api:
|
||||
batch_delay: 0ms # Disable batching to receive all state updates
|
||||
logger:
|
||||
level: DEBUG
|
||||
|
||||
# Template sensors that we'll use to publish values
|
||||
sensor:
|
||||
- platform: template
|
||||
name: "Source Timeout Last"
|
||||
id: source_timeout_last
|
||||
accuracy_decimals: 1
|
||||
|
||||
- platform: template
|
||||
name: "Source Timeout Reset"
|
||||
id: source_timeout_reset
|
||||
accuracy_decimals: 1
|
||||
|
||||
- platform: template
|
||||
name: "Source Timeout Static"
|
||||
id: source_timeout_static
|
||||
accuracy_decimals: 1
|
||||
|
||||
- platform: template
|
||||
name: "Source Timeout Lambda"
|
||||
id: source_timeout_lambda
|
||||
accuracy_decimals: 1
|
||||
|
||||
# Test 1: TimeoutFilter - "last" mode (outputs last received value)
|
||||
- platform: copy
|
||||
source_id: source_timeout_last
|
||||
name: "Timeout Last Sensor"
|
||||
id: timeout_last_sensor
|
||||
filters:
|
||||
- timeout:
|
||||
timeout: 100ms
|
||||
value: last # Explicitly specify "last" mode to use TimeoutFilter class
|
||||
|
||||
# Test 2: TimeoutFilter - reset behavior (same filter, different source)
|
||||
- platform: copy
|
||||
source_id: source_timeout_reset
|
||||
name: "Timeout Reset Sensor"
|
||||
id: timeout_reset_sensor
|
||||
filters:
|
||||
- timeout:
|
||||
timeout: 100ms
|
||||
value: last # Explicitly specify "last" mode
|
||||
|
||||
# Test 3: TimeoutFilterConfigured - static value mode
|
||||
- platform: copy
|
||||
source_id: source_timeout_static
|
||||
name: "Timeout Static Sensor"
|
||||
id: timeout_static_sensor
|
||||
filters:
|
||||
- timeout:
|
||||
timeout: 100ms
|
||||
value: 99.9
|
||||
|
||||
# Test 4: TimeoutFilterConfigured - lambda mode
|
||||
- platform: copy
|
||||
source_id: source_timeout_lambda
|
||||
name: "Timeout Lambda Sensor"
|
||||
id: timeout_lambda_sensor
|
||||
filters:
|
||||
- timeout:
|
||||
timeout: 100ms
|
||||
value: !lambda "return -1.0;"
|
||||
|
||||
# Scripts to publish values with controlled timing
|
||||
script:
|
||||
# Test 1: Single value followed by timeout
|
||||
- id: test_timeout_last_script
|
||||
then:
|
||||
# Publish initial value
|
||||
- sensor.template.publish:
|
||||
id: source_timeout_last
|
||||
state: 42.0
|
||||
# Wait for timeout to fire (100ms + margin)
|
||||
- delay: 150ms
|
||||
|
||||
# Test 2: Multiple values before timeout (should reset timer)
|
||||
- id: test_timeout_reset_script
|
||||
then:
|
||||
# Publish first value
|
||||
- sensor.template.publish:
|
||||
id: source_timeout_reset
|
||||
state: 10.0
|
||||
# Wait 50ms (halfway to timeout)
|
||||
- delay: 50ms
|
||||
# Publish second value (resets timeout)
|
||||
- sensor.template.publish:
|
||||
id: source_timeout_reset
|
||||
state: 20.0
|
||||
# Wait 50ms (halfway to timeout again)
|
||||
- delay: 50ms
|
||||
# Publish third value (resets timeout)
|
||||
- sensor.template.publish:
|
||||
id: source_timeout_reset
|
||||
state: 30.0
|
||||
# Wait for timeout to fire (100ms + margin)
|
||||
- delay: 150ms
|
||||
|
||||
# Test 3: Static value timeout
|
||||
- id: test_timeout_static_script
|
||||
then:
|
||||
# Publish initial value
|
||||
- sensor.template.publish:
|
||||
id: source_timeout_static
|
||||
state: 55.5
|
||||
# Wait for timeout to fire
|
||||
- delay: 150ms
|
||||
|
||||
# Test 4: Lambda value timeout
|
||||
- id: test_timeout_lambda_script
|
||||
then:
|
||||
# Publish initial value
|
||||
- sensor.template.publish:
|
||||
id: source_timeout_lambda
|
||||
state: 77.7
|
||||
# Wait for timeout to fire
|
||||
- delay: 150ms
|
||||
|
||||
# Buttons to trigger each test scenario
|
||||
button:
|
||||
- platform: template
|
||||
name: "Test Timeout Last Button"
|
||||
id: test_timeout_last_button
|
||||
on_press:
|
||||
- script.execute: test_timeout_last_script
|
||||
|
||||
- platform: template
|
||||
name: "Test Timeout Reset Button"
|
||||
id: test_timeout_reset_button
|
||||
on_press:
|
||||
- script.execute: test_timeout_reset_script
|
||||
|
||||
- platform: template
|
||||
name: "Test Timeout Static Button"
|
||||
id: test_timeout_static_button
|
||||
on_press:
|
||||
- script.execute: test_timeout_static_script
|
||||
|
||||
- platform: template
|
||||
name: "Test Timeout Lambda Button"
|
||||
id: test_timeout_lambda_button
|
||||
on_press:
|
||||
- script.execute: test_timeout_lambda_script
|
||||
185
tests/integration/test_sensor_timeout_filter.py
Normal file
185
tests/integration/test_sensor_timeout_filter.py
Normal file
@@ -0,0 +1,185 @@
|
||||
"""Test sensor timeout filter functionality."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
|
||||
from aioesphomeapi import EntityState, SensorState
|
||||
import pytest
|
||||
|
||||
from .state_utils import InitialStateHelper, build_key_to_entity_mapping
|
||||
from .types import APIClientConnectedFactory, RunCompiledFunction
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_sensor_timeout_filter(
|
||||
yaml_config: str,
|
||||
run_compiled: RunCompiledFunction,
|
||||
api_client_connected: APIClientConnectedFactory,
|
||||
) -> None:
|
||||
"""Test TimeoutFilter and TimeoutFilterConfigured with all modes."""
|
||||
loop = asyncio.get_running_loop()
|
||||
|
||||
# Track state changes for all sensors
|
||||
timeout_last_states: list[float] = []
|
||||
timeout_reset_states: list[float] = []
|
||||
timeout_static_states: list[float] = []
|
||||
timeout_lambda_states: list[float] = []
|
||||
|
||||
# Futures for each test scenario
|
||||
test1_complete = loop.create_future() # TimeoutFilter - last mode
|
||||
test2_complete = loop.create_future() # TimeoutFilter - reset behavior
|
||||
test3_complete = loop.create_future() # TimeoutFilterConfigured - static value
|
||||
test4_complete = loop.create_future() # TimeoutFilterConfigured - lambda
|
||||
|
||||
def on_state(state: EntityState) -> None:
|
||||
"""Track sensor state updates."""
|
||||
if not isinstance(state, SensorState):
|
||||
return
|
||||
|
||||
if state.missing_state:
|
||||
return
|
||||
|
||||
sensor_name = key_to_sensor.get(state.key)
|
||||
|
||||
# Test 1: TimeoutFilter - last mode
|
||||
if sensor_name == "timeout_last_sensor":
|
||||
timeout_last_states.append(state.state)
|
||||
# Expect 2 values: initial 42.0 + timeout fires with 42.0
|
||||
if len(timeout_last_states) >= 2 and not test1_complete.done():
|
||||
test1_complete.set_result(True)
|
||||
|
||||
# Test 2: TimeoutFilter - reset behavior
|
||||
elif sensor_name == "timeout_reset_sensor":
|
||||
timeout_reset_states.append(state.state)
|
||||
# Expect 4 values: 10.0, 20.0, 30.0, then timeout fires with 30.0
|
||||
if len(timeout_reset_states) >= 4 and not test2_complete.done():
|
||||
test2_complete.set_result(True)
|
||||
|
||||
# Test 3: TimeoutFilterConfigured - static value
|
||||
elif sensor_name == "timeout_static_sensor":
|
||||
timeout_static_states.append(state.state)
|
||||
# Expect 2 values: initial 55.5 + timeout fires with 99.9
|
||||
if len(timeout_static_states) >= 2 and not test3_complete.done():
|
||||
test3_complete.set_result(True)
|
||||
|
||||
# Test 4: TimeoutFilterConfigured - lambda
|
||||
elif sensor_name == "timeout_lambda_sensor":
|
||||
timeout_lambda_states.append(state.state)
|
||||
# Expect 2 values: initial 77.7 + timeout fires with -1.0
|
||||
if len(timeout_lambda_states) >= 2 and not test4_complete.done():
|
||||
test4_complete.set_result(True)
|
||||
|
||||
async with (
|
||||
run_compiled(yaml_config),
|
||||
api_client_connected() as client,
|
||||
):
|
||||
entities, services = await client.list_entities_services()
|
||||
|
||||
key_to_sensor = build_key_to_entity_mapping(
|
||||
entities,
|
||||
[
|
||||
"timeout_last_sensor",
|
||||
"timeout_reset_sensor",
|
||||
"timeout_static_sensor",
|
||||
"timeout_lambda_sensor",
|
||||
],
|
||||
)
|
||||
|
||||
initial_state_helper = InitialStateHelper(entities)
|
||||
client.subscribe_states(initial_state_helper.on_state_wrapper(on_state))
|
||||
|
||||
try:
|
||||
await initial_state_helper.wait_for_initial_states()
|
||||
except TimeoutError:
|
||||
pytest.fail("Timeout waiting for initial states")
|
||||
|
||||
# Helper to find buttons by object_id substring
|
||||
def find_button(object_id_substring: str) -> int:
|
||||
"""Find a button by object_id substring and return its key."""
|
||||
button = next(
|
||||
(e for e in entities if object_id_substring in e.object_id.lower()),
|
||||
None,
|
||||
)
|
||||
assert button is not None, f"Button '{object_id_substring}' not found"
|
||||
return button.key
|
||||
|
||||
# Find all test buttons
|
||||
test1_button_key = find_button("test_timeout_last_button")
|
||||
test2_button_key = find_button("test_timeout_reset_button")
|
||||
test3_button_key = find_button("test_timeout_static_button")
|
||||
test4_button_key = find_button("test_timeout_lambda_button")
|
||||
|
||||
# === Test 1: TimeoutFilter - last mode ===
|
||||
client.button_command(test1_button_key)
|
||||
try:
|
||||
await asyncio.wait_for(test1_complete, timeout=2.0)
|
||||
except TimeoutError:
|
||||
pytest.fail(f"Test 1 timeout. Received states: {timeout_last_states}")
|
||||
|
||||
assert len(timeout_last_states) == 2, (
|
||||
f"Test 1: Should have 2 states, got {len(timeout_last_states)}: {timeout_last_states}"
|
||||
)
|
||||
assert timeout_last_states[0] == pytest.approx(42.0), (
|
||||
f"Test 1: First state should be 42.0, got {timeout_last_states[0]}"
|
||||
)
|
||||
assert timeout_last_states[1] == pytest.approx(42.0), (
|
||||
f"Test 1: Timeout should output last value (42.0), got {timeout_last_states[1]}"
|
||||
)
|
||||
|
||||
# === Test 2: TimeoutFilter - reset behavior ===
|
||||
client.button_command(test2_button_key)
|
||||
try:
|
||||
await asyncio.wait_for(test2_complete, timeout=2.0)
|
||||
except TimeoutError:
|
||||
pytest.fail(f"Test 2 timeout. Received states: {timeout_reset_states}")
|
||||
|
||||
assert len(timeout_reset_states) == 4, (
|
||||
f"Test 2: Should have 4 states, got {len(timeout_reset_states)}: {timeout_reset_states}"
|
||||
)
|
||||
assert timeout_reset_states[0] == pytest.approx(10.0), (
|
||||
f"Test 2: First state should be 10.0, got {timeout_reset_states[0]}"
|
||||
)
|
||||
assert timeout_reset_states[1] == pytest.approx(20.0), (
|
||||
f"Test 2: Second state should be 20.0, got {timeout_reset_states[1]}"
|
||||
)
|
||||
assert timeout_reset_states[2] == pytest.approx(30.0), (
|
||||
f"Test 2: Third state should be 30.0, got {timeout_reset_states[2]}"
|
||||
)
|
||||
assert timeout_reset_states[3] == pytest.approx(30.0), (
|
||||
f"Test 2: Timeout should output last value (30.0), got {timeout_reset_states[3]}"
|
||||
)
|
||||
|
||||
# === Test 3: TimeoutFilterConfigured - static value ===
|
||||
client.button_command(test3_button_key)
|
||||
try:
|
||||
await asyncio.wait_for(test3_complete, timeout=2.0)
|
||||
except TimeoutError:
|
||||
pytest.fail(f"Test 3 timeout. Received states: {timeout_static_states}")
|
||||
|
||||
assert len(timeout_static_states) == 2, (
|
||||
f"Test 3: Should have 2 states, got {len(timeout_static_states)}: {timeout_static_states}"
|
||||
)
|
||||
assert timeout_static_states[0] == pytest.approx(55.5), (
|
||||
f"Test 3: First state should be 55.5, got {timeout_static_states[0]}"
|
||||
)
|
||||
assert timeout_static_states[1] == pytest.approx(99.9), (
|
||||
f"Test 3: Timeout should output configured value (99.9), got {timeout_static_states[1]}"
|
||||
)
|
||||
|
||||
# === Test 4: TimeoutFilterConfigured - lambda ===
|
||||
client.button_command(test4_button_key)
|
||||
try:
|
||||
await asyncio.wait_for(test4_complete, timeout=2.0)
|
||||
except TimeoutError:
|
||||
pytest.fail(f"Test 4 timeout. Received states: {timeout_lambda_states}")
|
||||
|
||||
assert len(timeout_lambda_states) == 2, (
|
||||
f"Test 4: Should have 2 states, got {len(timeout_lambda_states)}: {timeout_lambda_states}"
|
||||
)
|
||||
assert timeout_lambda_states[0] == pytest.approx(77.7), (
|
||||
f"Test 4: First state should be 77.7, got {timeout_lambda_states[0]}"
|
||||
)
|
||||
assert timeout_lambda_states[1] == pytest.approx(-1.0), (
|
||||
f"Test 4: Timeout should evaluate lambda (-1.0), got {timeout_lambda_states[1]}"
|
||||
)
|
||||
Reference in New Issue
Block a user