mirror of
https://github.com/esphome/esphome.git
synced 2025-11-20 00:35:44 +00:00
Merge remote-tracking branch 'upstream/dev' into integration
# Conflicts: # esphome/components/mqtt/mqtt_binary_sensor.cpp # esphome/components/mqtt/mqtt_component.cpp # esphome/components/mqtt/mqtt_cover.cpp # esphome/components/mqtt/mqtt_event.cpp # esphome/components/mqtt/mqtt_number.cpp # esphome/components/mqtt/mqtt_sensor.cpp # esphome/components/mqtt/mqtt_text_sensor.cpp # esphome/components/mqtt/mqtt_valve.cpp
This commit is contained in:
@@ -3,6 +3,8 @@ import re
|
||||
from esphome import config_validation as cv
|
||||
from esphome.const import CONF_ARGS, CONF_FORMAT
|
||||
|
||||
CONF_IF_NAN = "if_nan"
|
||||
|
||||
lv_uses = {
|
||||
"USER_DATA",
|
||||
"LOG",
|
||||
@@ -21,23 +23,48 @@ lv_fonts_used = set()
|
||||
esphome_fonts_used = set()
|
||||
lvgl_components_required = set()
|
||||
|
||||
|
||||
def validate_printf(value):
|
||||
cfmt = r"""
|
||||
# noqa
|
||||
f_regex = re.compile(
|
||||
r"""
|
||||
( # start of capture group 1
|
||||
% # literal "%"
|
||||
(?:[-+0 #]{0,5}) # optional flags
|
||||
[-+0 #]{0,5} # optional flags
|
||||
(?:\d+|\*)? # width
|
||||
(?:\.(?:\d+|\*))? # precision
|
||||
(?:h|l|ll|w|I|I32|I64)? # size
|
||||
f # type
|
||||
)
|
||||
""",
|
||||
flags=re.VERBOSE,
|
||||
)
|
||||
# noqa
|
||||
c_regex = re.compile(
|
||||
r"""
|
||||
( # start of capture group 1
|
||||
% # literal "%"
|
||||
[-+0 #]{0,5} # optional flags
|
||||
(?:\d+|\*)? # width
|
||||
(?:\.(?:\d+|\*))? # precision
|
||||
(?:h|l|ll|w|I|I32|I64)? # size
|
||||
[cCdiouxXeEfgGaAnpsSZ] # type
|
||||
)
|
||||
""" # noqa
|
||||
matches = re.findall(cfmt, value[CONF_FORMAT], flags=re.VERBOSE)
|
||||
""",
|
||||
flags=re.VERBOSE,
|
||||
)
|
||||
|
||||
|
||||
def validate_printf(value):
|
||||
format_string = value[CONF_FORMAT]
|
||||
matches = c_regex.findall(format_string)
|
||||
if len(matches) != len(value[CONF_ARGS]):
|
||||
raise cv.Invalid(
|
||||
f"Found {len(matches)} printf-patterns ({', '.join(matches)}), but {len(value[CONF_ARGS])} args were given!"
|
||||
)
|
||||
|
||||
if value.get(CONF_IF_NAN) and len(f_regex.findall(format_string)) != 1:
|
||||
raise cv.Invalid(
|
||||
"Use of 'if_nan' requires a single valid printf-pattern of type %f"
|
||||
)
|
||||
return value
|
||||
|
||||
|
||||
|
||||
@@ -33,7 +33,13 @@ from .defines import (
|
||||
call_lambda,
|
||||
literal,
|
||||
)
|
||||
from .helpers import add_lv_use, esphome_fonts_used, lv_fonts_used, requires_component
|
||||
from .helpers import (
|
||||
CONF_IF_NAN,
|
||||
add_lv_use,
|
||||
esphome_fonts_used,
|
||||
lv_fonts_used,
|
||||
requires_component,
|
||||
)
|
||||
from .types import lv_font_t, lv_gradient_t
|
||||
|
||||
opacity_consts = LvConstant("LV_OPA_", "TRANSP", "COVER")
|
||||
@@ -412,7 +418,13 @@ class TextValidator(LValidator):
|
||||
str_args = [str(x) for x in value[CONF_ARGS]]
|
||||
arg_expr = cg.RawExpression(",".join(str_args))
|
||||
format_str = cpp_string_escape(format_str)
|
||||
return literal(f"str_sprintf({format_str}, {arg_expr}).c_str()")
|
||||
sprintf_str = f"str_sprintf({format_str}, {arg_expr}).c_str()"
|
||||
if nanval := value.get(CONF_IF_NAN):
|
||||
nanval = cpp_string_escape(nanval)
|
||||
return literal(
|
||||
f"(std::isfinite({arg_expr}) ? {sprintf_str} : {nanval})"
|
||||
)
|
||||
return literal(sprintf_str)
|
||||
if time_format := value.get(CONF_TIME_FORMAT):
|
||||
source = value[CONF_TIME]
|
||||
if isinstance(source, Lambda):
|
||||
|
||||
@@ -20,7 +20,7 @@ from esphome.core.config import StartupTrigger
|
||||
|
||||
from . import defines as df, lv_validation as lvalid
|
||||
from .defines import CONF_TIME_FORMAT, LV_GRAD_DIR
|
||||
from .helpers import requires_component, validate_printf
|
||||
from .helpers import CONF_IF_NAN, requires_component, validate_printf
|
||||
from .layout import (
|
||||
FLEX_OBJ_SCHEMA,
|
||||
GRID_CELL_SCHEMA,
|
||||
@@ -54,6 +54,7 @@ PRINTF_TEXT_SCHEMA = cv.All(
|
||||
{
|
||||
cv.Required(CONF_FORMAT): cv.string,
|
||||
cv.Optional(CONF_ARGS, default=list): cv.ensure_list(cv.lambda_),
|
||||
cv.Optional(CONF_IF_NAN): cv.string,
|
||||
},
|
||||
),
|
||||
validate_printf,
|
||||
|
||||
@@ -30,11 +30,12 @@ MQTTBinarySensorComponent::MQTTBinarySensorComponent(binary_sensor::BinarySensor
|
||||
}
|
||||
|
||||
void MQTTBinarySensorComponent::send_discovery(JsonObject root, mqtt::SendDiscoveryConfig &config) {
|
||||
// NOLINTBEGIN(clang-analyzer-cplusplus.NewDeleteLeaks) false positive with ArduinoJson
|
||||
const auto device_class = this->binary_sensor_->get_device_class_ref();
|
||||
if (!device_class.empty()) {
|
||||
// NOLINTNEXTLINE(clang-analyzer-cplusplus.NewDeleteLeaks) false positive with ArduinoJson
|
||||
root[MQTT_DEVICE_CLASS] = device_class;
|
||||
}
|
||||
// NOLINTEND(clang-analyzer-cplusplus.NewDeleteLeaks)
|
||||
if (this->binary_sensor_->is_status_binary_sensor())
|
||||
root[MQTT_PAYLOAD_ON] = mqtt::global_mqtt_client->get_availability().payload_available;
|
||||
if (this->binary_sensor_->is_status_binary_sensor())
|
||||
|
||||
@@ -89,11 +89,12 @@ bool MQTTComponent::send_discovery_() {
|
||||
|
||||
if (this->is_disabled_by_default_())
|
||||
root[MQTT_ENABLED_BY_DEFAULT] = false;
|
||||
// NOLINTBEGIN(clang-analyzer-cplusplus.NewDeleteLeaks) false positive with ArduinoJson
|
||||
const auto icon_ref = this->get_icon_ref_();
|
||||
if (!icon_ref.empty()) {
|
||||
// NOLINTNEXTLINE(clang-analyzer-cplusplus.NewDeleteLeaks) false positive with ArduinoJson
|
||||
root[MQTT_ICON] = icon_ref;
|
||||
}
|
||||
// NOLINTEND(clang-analyzer-cplusplus.NewDeleteLeaks)
|
||||
|
||||
const auto entity_category = this->get_entity()->get_entity_category();
|
||||
switch (entity_category) {
|
||||
|
||||
@@ -67,11 +67,12 @@ void MQTTCoverComponent::dump_config() {
|
||||
}
|
||||
}
|
||||
void MQTTCoverComponent::send_discovery(JsonObject root, mqtt::SendDiscoveryConfig &config) {
|
||||
// NOLINTBEGIN(clang-analyzer-cplusplus.NewDeleteLeaks) false positive with ArduinoJson
|
||||
const auto device_class = this->cover_->get_device_class_ref();
|
||||
if (!device_class.empty()) {
|
||||
// NOLINTNEXTLINE(clang-analyzer-cplusplus.NewDeleteLeaks) false positive with ArduinoJson
|
||||
root[MQTT_DEVICE_CLASS] = device_class;
|
||||
}
|
||||
// NOLINTEND(clang-analyzer-cplusplus.NewDeleteLeaks)
|
||||
|
||||
auto traits = this->cover_->get_traits();
|
||||
if (traits.get_is_assumed_state()) {
|
||||
|
||||
@@ -21,11 +21,12 @@ void MQTTEventComponent::send_discovery(JsonObject root, mqtt::SendDiscoveryConf
|
||||
for (const auto &event_type : this->event_->get_event_types())
|
||||
event_types.add(event_type);
|
||||
|
||||
// NOLINTBEGIN(clang-analyzer-cplusplus.NewDeleteLeaks) false positive with ArduinoJson
|
||||
const auto device_class = this->event_->get_device_class_ref();
|
||||
if (!device_class.empty()) {
|
||||
// NOLINTNEXTLINE(clang-analyzer-cplusplus.NewDeleteLeaks) false positive with ArduinoJson
|
||||
root[MQTT_DEVICE_CLASS] = device_class;
|
||||
}
|
||||
// NOLINTEND(clang-analyzer-cplusplus.NewDeleteLeaks)
|
||||
|
||||
config.command_topic = false;
|
||||
}
|
||||
|
||||
@@ -44,9 +44,9 @@ void MQTTNumberComponent::send_discovery(JsonObject root, mqtt::SendDiscoveryCon
|
||||
root[MQTT_MIN] = traits.get_min_value();
|
||||
root[MQTT_MAX] = traits.get_max_value();
|
||||
root[MQTT_STEP] = traits.get_step();
|
||||
// NOLINTBEGIN(clang-analyzer-cplusplus.NewDeleteLeaks) false positive with ArduinoJson
|
||||
const auto unit_of_measurement = this->number_->traits.get_unit_of_measurement_ref();
|
||||
if (!unit_of_measurement.empty()) {
|
||||
// NOLINTNEXTLINE(clang-analyzer-cplusplus.NewDeleteLeaks) false positive with ArduinoJson
|
||||
root[MQTT_UNIT_OF_MEASUREMENT] = unit_of_measurement;
|
||||
}
|
||||
switch (this->number_->traits.get_mode()) {
|
||||
@@ -61,9 +61,9 @@ void MQTTNumberComponent::send_discovery(JsonObject root, mqtt::SendDiscoveryCon
|
||||
}
|
||||
const auto device_class = this->number_->traits.get_device_class_ref();
|
||||
if (!device_class.empty()) {
|
||||
// NOLINTNEXTLINE(clang-analyzer-cplusplus.NewDeleteLeaks) false positive with ArduinoJson
|
||||
root[MQTT_DEVICE_CLASS] = device_class;
|
||||
}
|
||||
// NOLINTEND(clang-analyzer-cplusplus.NewDeleteLeaks)
|
||||
|
||||
config.command_topic = true;
|
||||
}
|
||||
|
||||
@@ -44,17 +44,17 @@ void MQTTSensorComponent::set_expire_after(uint32_t expire_after) { this->expire
|
||||
void MQTTSensorComponent::disable_expire_after() { this->expire_after_ = 0; }
|
||||
|
||||
void MQTTSensorComponent::send_discovery(JsonObject root, mqtt::SendDiscoveryConfig &config) {
|
||||
// NOLINTBEGIN(clang-analyzer-cplusplus.NewDeleteLeaks) false positive with ArduinoJson
|
||||
const auto device_class = this->sensor_->get_device_class_ref();
|
||||
if (!device_class.empty()) {
|
||||
// NOLINTNEXTLINE(clang-analyzer-cplusplus.NewDeleteLeaks) false positive with ArduinoJson
|
||||
root[MQTT_DEVICE_CLASS] = device_class;
|
||||
}
|
||||
|
||||
const auto unit_of_measurement = this->sensor_->get_unit_of_measurement_ref();
|
||||
if (!unit_of_measurement.empty()) {
|
||||
// NOLINTNEXTLINE(clang-analyzer-cplusplus.NewDeleteLeaks) false positive with ArduinoJson
|
||||
root[MQTT_UNIT_OF_MEASUREMENT] = unit_of_measurement;
|
||||
}
|
||||
// NOLINTEND(clang-analyzer-cplusplus.NewDeleteLeaks)
|
||||
|
||||
if (this->get_expire_after() > 0)
|
||||
root[MQTT_EXPIRE_AFTER] = this->get_expire_after() / 1000;
|
||||
|
||||
@@ -15,11 +15,12 @@ using namespace esphome::text_sensor;
|
||||
|
||||
MQTTTextSensor::MQTTTextSensor(TextSensor *sensor) : sensor_(sensor) {}
|
||||
void MQTTTextSensor::send_discovery(JsonObject root, mqtt::SendDiscoveryConfig &config) {
|
||||
// NOLINTBEGIN(clang-analyzer-cplusplus.NewDeleteLeaks) false positive with ArduinoJson
|
||||
const auto device_class = this->sensor_->get_device_class_ref();
|
||||
if (!device_class.empty()) {
|
||||
// NOLINTNEXTLINE(clang-analyzer-cplusplus.NewDeleteLeaks) false positive with ArduinoJson
|
||||
root[MQTT_DEVICE_CLASS] = device_class;
|
||||
}
|
||||
// NOLINTEND(clang-analyzer-cplusplus.NewDeleteLeaks)
|
||||
config.command_topic = false;
|
||||
}
|
||||
void MQTTTextSensor::setup() {
|
||||
|
||||
@@ -49,11 +49,12 @@ void MQTTValveComponent::dump_config() {
|
||||
}
|
||||
}
|
||||
void MQTTValveComponent::send_discovery(JsonObject root, mqtt::SendDiscoveryConfig &config) {
|
||||
// NOLINTBEGIN(clang-analyzer-cplusplus.NewDeleteLeaks) false positive with ArduinoJson
|
||||
const auto device_class = this->valve_->get_device_class_ref();
|
||||
if (!device_class.empty()) {
|
||||
// NOLINTNEXTLINE(clang-analyzer-cplusplus.NewDeleteLeaks) false positive with ArduinoJson
|
||||
root[MQTT_DEVICE_CLASS] = device_class;
|
||||
}
|
||||
// NOLINTEND(clang-analyzer-cplusplus.NewDeleteLeaks)
|
||||
|
||||
auto traits = this->valve_->get_traits();
|
||||
if (traits.get_is_assumed_state()) {
|
||||
|
||||
@@ -117,12 +117,8 @@ void HOT Scheduler::set_timer_common_(Component *component, SchedulerItem::Type
|
||||
item->set_name(name_cstr, !is_static_string);
|
||||
item->type = type;
|
||||
item->callback = std::move(func);
|
||||
// Initialize remove to false (though it should already be from constructor)
|
||||
#ifdef ESPHOME_THREAD_MULTI_ATOMICS
|
||||
item->remove.store(false, std::memory_order_relaxed);
|
||||
#else
|
||||
item->remove = false;
|
||||
#endif
|
||||
// Reset remove flag - recycled items may have been cancelled (remove=true) in previous use
|
||||
this->set_item_removed_(item.get(), false);
|
||||
item->is_retry = is_retry;
|
||||
|
||||
#ifndef ESPHOME_THREAD_SINGLE
|
||||
@@ -153,21 +149,7 @@ void HOT Scheduler::set_timer_common_(Component *component, SchedulerItem::Type
|
||||
}
|
||||
|
||||
#ifdef ESPHOME_DEBUG_SCHEDULER
|
||||
// Validate static strings in debug mode
|
||||
if (is_static_string && name_cstr != nullptr) {
|
||||
validate_static_string(name_cstr);
|
||||
}
|
||||
|
||||
// Debug logging
|
||||
const char *type_str = (type == SchedulerItem::TIMEOUT) ? "timeout" : "interval";
|
||||
if (type == SchedulerItem::TIMEOUT) {
|
||||
ESP_LOGD(TAG, "set_%s(name='%s/%s', %s=%" PRIu32 ")", type_str, LOG_STR_ARG(item->get_source()),
|
||||
name_cstr ? name_cstr : "(null)", type_str, delay);
|
||||
} else {
|
||||
ESP_LOGD(TAG, "set_%s(name='%s/%s', %s=%" PRIu32 ", offset=%" PRIu32 ")", type_str, LOG_STR_ARG(item->get_source()),
|
||||
name_cstr ? name_cstr : "(null)", type_str, delay,
|
||||
static_cast<uint32_t>(item->get_next_execution() - now));
|
||||
}
|
||||
this->debug_log_timer_(item.get(), is_static_string, name_cstr, type, delay, now);
|
||||
#endif /* ESPHOME_DEBUG_SCHEDULER */
|
||||
|
||||
// For retries, check if there's a cancelled timeout first
|
||||
@@ -787,4 +769,25 @@ void Scheduler::recycle_item_(std::unique_ptr<SchedulerItem> item) {
|
||||
// else: unique_ptr will delete the item when it goes out of scope
|
||||
}
|
||||
|
||||
#ifdef ESPHOME_DEBUG_SCHEDULER
|
||||
void Scheduler::debug_log_timer_(const SchedulerItem *item, bool is_static_string, const char *name_cstr,
|
||||
SchedulerItem::Type type, uint32_t delay, uint64_t now) {
|
||||
// Validate static strings in debug mode
|
||||
if (is_static_string && name_cstr != nullptr) {
|
||||
validate_static_string(name_cstr);
|
||||
}
|
||||
|
||||
// Debug logging
|
||||
const char *type_str = (type == SchedulerItem::TIMEOUT) ? "timeout" : "interval";
|
||||
if (type == SchedulerItem::TIMEOUT) {
|
||||
ESP_LOGD(TAG, "set_%s(name='%s/%s', %s=%" PRIu32 ")", type_str, LOG_STR_ARG(item->get_source()),
|
||||
name_cstr ? name_cstr : "(null)", type_str, delay);
|
||||
} else {
|
||||
ESP_LOGD(TAG, "set_%s(name='%s/%s', %s=%" PRIu32 ", offset=%" PRIu32 ")", type_str, LOG_STR_ARG(item->get_source()),
|
||||
name_cstr ? name_cstr : "(null)", type_str, delay,
|
||||
static_cast<uint32_t>(item->get_next_execution() - now));
|
||||
}
|
||||
}
|
||||
#endif /* ESPHOME_DEBUG_SCHEDULER */
|
||||
|
||||
} // namespace esphome
|
||||
|
||||
@@ -266,6 +266,12 @@ class Scheduler {
|
||||
// Helper to perform full cleanup when too many items are cancelled
|
||||
void full_cleanup_removed_items_();
|
||||
|
||||
#ifdef ESPHOME_DEBUG_SCHEDULER
|
||||
// Helper for debug logging in set_timer_common_ - extracted to reduce code size
|
||||
void debug_log_timer_(const SchedulerItem *item, bool is_static_string, const char *name_cstr,
|
||||
SchedulerItem::Type type, uint32_t delay, uint64_t now);
|
||||
#endif /* ESPHOME_DEBUG_SCHEDULER */
|
||||
|
||||
#ifndef ESPHOME_THREAD_SINGLE
|
||||
// Helper to process defer queue - inline for performance in hot path
|
||||
inline void process_defer_queue_(uint32_t &now) {
|
||||
@@ -367,6 +373,24 @@ class Scheduler {
|
||||
#endif
|
||||
}
|
||||
|
||||
// Helper to set item removal flag (platform-specific)
|
||||
// For ESPHOME_THREAD_MULTI_NO_ATOMICS platforms, the caller must hold the scheduler lock before calling this
|
||||
// function. Uses memory_order_release when setting to true (for cancellation synchronization),
|
||||
// and memory_order_relaxed when setting to false (for initialization).
|
||||
void set_item_removed_(SchedulerItem *item, bool removed) {
|
||||
#ifdef ESPHOME_THREAD_MULTI_ATOMICS
|
||||
// Multi-threaded with atomics: use atomic store with appropriate ordering
|
||||
// Release ordering when setting to true ensures cancellation is visible to other threads
|
||||
// Relaxed ordering when setting to false is sufficient for initialization
|
||||
item->remove.store(removed, removed ? std::memory_order_release : std::memory_order_relaxed);
|
||||
#else
|
||||
// Single-threaded (ESPHOME_THREAD_SINGLE) or
|
||||
// multi-threaded without atomics (ESPHOME_THREAD_MULTI_NO_ATOMICS): direct write
|
||||
// For ESPHOME_THREAD_MULTI_NO_ATOMICS, caller MUST hold lock!
|
||||
item->remove = removed;
|
||||
#endif
|
||||
}
|
||||
|
||||
// Helper to mark matching items in a container as removed
|
||||
// Returns the number of items marked for removal
|
||||
// IMPORTANT: Caller must hold the scheduler lock before calling this function.
|
||||
@@ -383,15 +407,7 @@ class Scheduler {
|
||||
continue;
|
||||
if (this->matches_item_(item, component, name_cstr, type, match_retry)) {
|
||||
// Mark item for removal (platform-specific)
|
||||
#ifdef ESPHOME_THREAD_MULTI_ATOMICS
|
||||
// Multi-threaded with atomics: use atomic store
|
||||
item->remove.store(true, std::memory_order_release);
|
||||
#else
|
||||
// Single-threaded (ESPHOME_THREAD_SINGLE) or
|
||||
// multi-threaded without atomics (ESPHOME_THREAD_MULTI_NO_ATOMICS): direct write
|
||||
// For ESPHOME_THREAD_MULTI_NO_ATOMICS, caller MUST hold lock!
|
||||
item->remove = true;
|
||||
#endif
|
||||
this->set_item_removed_(item.get(), true);
|
||||
count++;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -63,6 +63,7 @@ from helpers import (
|
||||
get_components_from_integration_fixtures,
|
||||
get_components_with_dependencies,
|
||||
get_cpp_changed_components,
|
||||
get_target_branch,
|
||||
git_ls_files,
|
||||
parse_test_filename,
|
||||
root_path,
|
||||
@@ -471,6 +472,20 @@ def detect_memory_impact_config(
|
||||
- platform: platform name for the merged build
|
||||
- use_merged_config: "true" (always use merged config)
|
||||
"""
|
||||
# Skip memory impact analysis for release* or beta* branches
|
||||
# These branches typically contain many merged changes from dev, and building
|
||||
# all components at once would produce nonsensical memory impact results.
|
||||
# Memory impact analysis is most useful for focused PRs targeting dev.
|
||||
target_branch = get_target_branch()
|
||||
if target_branch and (
|
||||
target_branch.startswith("release") or target_branch.startswith("beta")
|
||||
):
|
||||
print(
|
||||
f"Memory impact: Skipping analysis for target branch {target_branch} "
|
||||
f"(would try to build all components at once, giving nonsensical results)",
|
||||
file=sys.stderr,
|
||||
)
|
||||
return {"should_run": "false"}
|
||||
|
||||
# Get actually changed files (not dependencies)
|
||||
files = changed_files(branch)
|
||||
|
||||
@@ -196,6 +196,20 @@ def splitlines_no_ends(string: str) -> list[str]:
|
||||
return [s.strip() for s in string.splitlines()]
|
||||
|
||||
|
||||
@cache
|
||||
def _get_github_event_data() -> dict | None:
|
||||
"""Read and parse GitHub event file (cached).
|
||||
|
||||
Returns:
|
||||
Parsed event data dictionary, or None if not available
|
||||
"""
|
||||
github_event_path = os.environ.get("GITHUB_EVENT_PATH")
|
||||
if github_event_path and os.path.exists(github_event_path):
|
||||
with open(github_event_path) as f:
|
||||
return json.load(f)
|
||||
return None
|
||||
|
||||
|
||||
def _get_pr_number_from_github_env() -> str | None:
|
||||
"""Extract PR number from GitHub environment variables.
|
||||
|
||||
@@ -208,13 +222,30 @@ def _get_pr_number_from_github_env() -> str | None:
|
||||
return github_ref.split("/pull/")[1].split("/")[0]
|
||||
|
||||
# Fallback to GitHub event file
|
||||
github_event_path = os.environ.get("GITHUB_EVENT_PATH")
|
||||
if github_event_path and os.path.exists(github_event_path):
|
||||
with open(github_event_path) as f:
|
||||
event_data = json.load(f)
|
||||
pr_data = event_data.get("pull_request", {})
|
||||
if pr_number := pr_data.get("number"):
|
||||
return str(pr_number)
|
||||
if event_data := _get_github_event_data():
|
||||
pr_data = event_data.get("pull_request", {})
|
||||
if pr_number := pr_data.get("number"):
|
||||
return str(pr_number)
|
||||
|
||||
return None
|
||||
|
||||
|
||||
def get_target_branch() -> str | None:
|
||||
"""Get the target branch from GitHub environment variables.
|
||||
|
||||
Returns:
|
||||
Target branch name (e.g., "dev", "release", "beta"), or None if not in PR context
|
||||
"""
|
||||
# First try GITHUB_BASE_REF (set for pull_request events)
|
||||
if base_ref := os.environ.get("GITHUB_BASE_REF"):
|
||||
return base_ref
|
||||
|
||||
# Fallback to GitHub event file
|
||||
if event_data := _get_github_event_data():
|
||||
pr_data = event_data.get("pull_request", {})
|
||||
base_data = pr_data.get("base", {})
|
||||
if ref := base_data.get("ref"):
|
||||
return ref
|
||||
|
||||
return None
|
||||
|
||||
|
||||
@@ -1,11 +1,11 @@
|
||||
button:
|
||||
- platform: bl0940
|
||||
bl0940_id: test_id
|
||||
bl0940_id: bl0940_test_id
|
||||
name: Cal Reset
|
||||
|
||||
sensor:
|
||||
- platform: bl0940
|
||||
id: test_id
|
||||
id: bl0940_test_id
|
||||
voltage:
|
||||
name: BL0940 Voltage
|
||||
current:
|
||||
@@ -22,7 +22,7 @@ sensor:
|
||||
number:
|
||||
- platform: bl0940
|
||||
id: bl0940_number_id
|
||||
bl0940_id: test_id
|
||||
bl0940_id: bl0940_test_id
|
||||
current_calibration:
|
||||
name: Cal Current
|
||||
min_value: -5
|
||||
|
||||
@@ -726,6 +726,12 @@ lvgl:
|
||||
- logger.log:
|
||||
format: "Spinbox value is %f"
|
||||
args: [x]
|
||||
- lvgl.label.update:
|
||||
id: hello_label
|
||||
text:
|
||||
format: "value is %.1f now"
|
||||
args: [x]
|
||||
if_nan: "Value unknown"
|
||||
- button:
|
||||
styles: spin_button
|
||||
id: spin_down
|
||||
|
||||
@@ -1,13 +1,13 @@
|
||||
sensor:
|
||||
- platform: nau7802
|
||||
i2c_id: i2c_bus
|
||||
id: test_id
|
||||
id: nau7802_test_id
|
||||
name: weight
|
||||
gain: 32
|
||||
ldo_voltage: "3.0v"
|
||||
samples_per_second: 10
|
||||
on_value:
|
||||
then:
|
||||
- nau7802.calibrate_external_offset: test_id
|
||||
- nau7802.calibrate_internal_offset: test_id
|
||||
- nau7802.calibrate_gain: test_id
|
||||
- nau7802.calibrate_external_offset: nau7802_test_id
|
||||
- nau7802.calibrate_internal_offset: nau7802_test_id
|
||||
- nau7802.calibrate_gain: nau7802_test_id
|
||||
|
||||
@@ -1240,3 +1240,73 @@ def test_detect_memory_impact_config_filters_incompatible_esp8266_on_esp32(
|
||||
)
|
||||
|
||||
assert result["use_merged_config"] == "true"
|
||||
|
||||
|
||||
def test_detect_memory_impact_config_skips_release_branch(tmp_path: Path) -> None:
|
||||
"""Test that memory impact analysis is skipped for release* branches."""
|
||||
# Create test directory structure with components that have tests
|
||||
tests_dir = tmp_path / "tests" / "components"
|
||||
wifi_dir = tests_dir / "wifi"
|
||||
wifi_dir.mkdir(parents=True)
|
||||
(wifi_dir / "test.esp32-idf.yaml").write_text("test: wifi")
|
||||
|
||||
with (
|
||||
patch.object(determine_jobs, "root_path", str(tmp_path)),
|
||||
patch.object(helpers, "root_path", str(tmp_path)),
|
||||
patch.object(determine_jobs, "changed_files") as mock_changed_files,
|
||||
patch.object(determine_jobs, "get_target_branch", return_value="release"),
|
||||
):
|
||||
mock_changed_files.return_value = ["esphome/components/wifi/wifi.cpp"]
|
||||
determine_jobs._component_has_tests.cache_clear()
|
||||
|
||||
result = determine_jobs.detect_memory_impact_config()
|
||||
|
||||
# Memory impact should be skipped for release branch
|
||||
assert result["should_run"] == "false"
|
||||
|
||||
|
||||
def test_detect_memory_impact_config_skips_beta_branch(tmp_path: Path) -> None:
|
||||
"""Test that memory impact analysis is skipped for beta* branches."""
|
||||
# Create test directory structure with components that have tests
|
||||
tests_dir = tmp_path / "tests" / "components"
|
||||
wifi_dir = tests_dir / "wifi"
|
||||
wifi_dir.mkdir(parents=True)
|
||||
(wifi_dir / "test.esp32-idf.yaml").write_text("test: wifi")
|
||||
|
||||
with (
|
||||
patch.object(determine_jobs, "root_path", str(tmp_path)),
|
||||
patch.object(helpers, "root_path", str(tmp_path)),
|
||||
patch.object(determine_jobs, "changed_files") as mock_changed_files,
|
||||
patch.object(determine_jobs, "get_target_branch", return_value="beta"),
|
||||
):
|
||||
mock_changed_files.return_value = ["esphome/components/wifi/wifi.cpp"]
|
||||
determine_jobs._component_has_tests.cache_clear()
|
||||
|
||||
result = determine_jobs.detect_memory_impact_config()
|
||||
|
||||
# Memory impact should be skipped for beta branch
|
||||
assert result["should_run"] == "false"
|
||||
|
||||
|
||||
def test_detect_memory_impact_config_runs_for_dev_branch(tmp_path: Path) -> None:
|
||||
"""Test that memory impact analysis runs for dev branch."""
|
||||
# Create test directory structure with components that have tests
|
||||
tests_dir = tmp_path / "tests" / "components"
|
||||
wifi_dir = tests_dir / "wifi"
|
||||
wifi_dir.mkdir(parents=True)
|
||||
(wifi_dir / "test.esp32-idf.yaml").write_text("test: wifi")
|
||||
|
||||
with (
|
||||
patch.object(determine_jobs, "root_path", str(tmp_path)),
|
||||
patch.object(helpers, "root_path", str(tmp_path)),
|
||||
patch.object(determine_jobs, "changed_files") as mock_changed_files,
|
||||
patch.object(determine_jobs, "get_target_branch", return_value="dev"),
|
||||
):
|
||||
mock_changed_files.return_value = ["esphome/components/wifi/wifi.cpp"]
|
||||
determine_jobs._component_has_tests.cache_clear()
|
||||
|
||||
result = determine_jobs.detect_memory_impact_config()
|
||||
|
||||
# Memory impact should run for dev branch
|
||||
assert result["should_run"] == "true"
|
||||
assert result["components"] == ["wifi"]
|
||||
|
||||
@@ -31,6 +31,13 @@ print_file_list = helpers.print_file_list
|
||||
get_all_dependencies = helpers.get_all_dependencies
|
||||
|
||||
|
||||
@pytest.fixture(autouse=True)
|
||||
def clear_helpers_cache() -> None:
|
||||
"""Clear cached functions before each test."""
|
||||
helpers._get_github_event_data.cache_clear()
|
||||
helpers._get_changed_files_github_actions.cache_clear()
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
("github_ref", "expected_pr_number"),
|
||||
[
|
||||
|
||||
Reference in New Issue
Block a user