mirror of
https://github.com/esphome/esphome.git
synced 2025-10-31 23:21:54 +00:00
Merge remote-tracking branch 'upstream/dev' into integration
This commit is contained in:
15
.github/workflows/auto-label-pr.yml
vendored
15
.github/workflows/auto-label-pr.yml
vendored
@@ -416,7 +416,7 @@ jobs:
|
||||
}
|
||||
|
||||
// Generate review messages
|
||||
function generateReviewMessages(finalLabels) {
|
||||
function generateReviewMessages(finalLabels, originalLabelCount) {
|
||||
const messages = [];
|
||||
const prAuthor = context.payload.pull_request.user.login;
|
||||
|
||||
@@ -430,15 +430,15 @@ jobs:
|
||||
.reduce((sum, file) => sum + (file.deletions || 0), 0);
|
||||
const nonTestChanges = (totalAdditions - testAdditions) - (totalDeletions - testDeletions);
|
||||
|
||||
const tooManyLabels = finalLabels.length > MAX_LABELS;
|
||||
const tooManyLabels = originalLabelCount > MAX_LABELS;
|
||||
const tooManyChanges = nonTestChanges > TOO_BIG_THRESHOLD;
|
||||
|
||||
let message = `${TOO_BIG_MARKER}\n### 📦 Pull Request Size\n\n`;
|
||||
|
||||
if (tooManyLabels && tooManyChanges) {
|
||||
message += `This PR is too large with ${nonTestChanges} line changes (excluding tests) and affects ${finalLabels.length} different components/areas.`;
|
||||
message += `This PR is too large with ${nonTestChanges} line changes (excluding tests) and affects ${originalLabelCount} different components/areas.`;
|
||||
} else if (tooManyLabels) {
|
||||
message += `This PR affects ${finalLabels.length} different components/areas.`;
|
||||
message += `This PR affects ${originalLabelCount} different components/areas.`;
|
||||
} else {
|
||||
message += `This PR is too large with ${nonTestChanges} line changes (excluding tests).`;
|
||||
}
|
||||
@@ -466,8 +466,8 @@ jobs:
|
||||
}
|
||||
|
||||
// Handle reviews
|
||||
async function handleReviews(finalLabels) {
|
||||
const reviewMessages = generateReviewMessages(finalLabels);
|
||||
async function handleReviews(finalLabels, originalLabelCount) {
|
||||
const reviewMessages = generateReviewMessages(finalLabels, originalLabelCount);
|
||||
const hasReviewableLabels = finalLabels.some(label =>
|
||||
['too-big', 'needs-codeowners'].includes(label)
|
||||
);
|
||||
@@ -627,6 +627,7 @@ jobs:
|
||||
|
||||
// Handle too many labels (only for non-mega PRs)
|
||||
const tooManyLabels = finalLabels.length > MAX_LABELS;
|
||||
const originalLabelCount = finalLabels.length;
|
||||
|
||||
if (tooManyLabels && !isMegaPR && !finalLabels.includes('too-big')) {
|
||||
finalLabels = ['too-big'];
|
||||
@@ -635,7 +636,7 @@ jobs:
|
||||
console.log('Computed labels:', finalLabels.join(', '));
|
||||
|
||||
// Handle reviews
|
||||
await handleReviews(finalLabels);
|
||||
await handleReviews(finalLabels, originalLabelCount);
|
||||
|
||||
// Apply labels
|
||||
if (finalLabels.length > 0) {
|
||||
|
||||
6
.github/workflows/ci.yml
vendored
6
.github/workflows/ci.yml
vendored
@@ -114,7 +114,7 @@ jobs:
|
||||
matrix:
|
||||
python-version:
|
||||
- "3.11"
|
||||
- "3.14"
|
||||
- "3.13"
|
||||
os:
|
||||
- ubuntu-latest
|
||||
- macOS-latest
|
||||
@@ -123,9 +123,9 @@ jobs:
|
||||
# Minimize CI resource usage
|
||||
# by only running the Python version
|
||||
# version used for docker images on Windows and macOS
|
||||
- python-version: "3.14"
|
||||
- python-version: "3.13"
|
||||
os: windows-latest
|
||||
- python-version: "3.14"
|
||||
- python-version: "3.13"
|
||||
os: macOS-latest
|
||||
runs-on: ${{ matrix.os }}
|
||||
needs:
|
||||
|
||||
@@ -60,8 +60,6 @@ class FanCall {
|
||||
this->speed_ = speed;
|
||||
return *this;
|
||||
}
|
||||
ESPDEPRECATED("set_speed() with string argument is deprecated, use integer argument instead.", "2021.9")
|
||||
FanCall &set_speed(const char *legacy_speed);
|
||||
optional<int> get_speed() const { return this->speed_; }
|
||||
FanCall &set_direction(FanDirection direction) {
|
||||
this->direction_ = direction;
|
||||
|
||||
@@ -12,7 +12,6 @@ from esphome.const import (
|
||||
CONF_ON_ERROR,
|
||||
CONF_ON_RESPONSE,
|
||||
CONF_TIMEOUT,
|
||||
CONF_TRIGGER_ID,
|
||||
CONF_URL,
|
||||
CONF_WATCHDOG_TIMEOUT,
|
||||
PLATFORM_HOST,
|
||||
@@ -216,16 +215,8 @@ HTTP_REQUEST_ACTION_SCHEMA = cv.Schema(
|
||||
f"{CONF_VERIFY_SSL} has moved to the base component configuration."
|
||||
),
|
||||
cv.Optional(CONF_CAPTURE_RESPONSE, default=False): cv.boolean,
|
||||
cv.Optional(CONF_ON_RESPONSE): automation.validate_automation(
|
||||
{cv.GenerateID(CONF_TRIGGER_ID): cv.declare_id(HttpRequestResponseTrigger)}
|
||||
),
|
||||
cv.Optional(CONF_ON_ERROR): automation.validate_automation(
|
||||
{
|
||||
cv.GenerateID(CONF_TRIGGER_ID): cv.declare_id(
|
||||
automation.Trigger.template()
|
||||
)
|
||||
}
|
||||
),
|
||||
cv.Optional(CONF_ON_RESPONSE): automation.validate_automation(single=True),
|
||||
cv.Optional(CONF_ON_ERROR): automation.validate_automation(single=True),
|
||||
cv.Optional(CONF_MAX_RESPONSE_BUFFER_SIZE, default="1kB"): cv.validate_bytes,
|
||||
}
|
||||
)
|
||||
@@ -280,7 +271,12 @@ async def http_request_action_to_code(config, action_id, template_arg, args):
|
||||
template_ = await cg.templatable(config[CONF_URL], args, cg.std_string)
|
||||
cg.add(var.set_url(template_))
|
||||
cg.add(var.set_method(config[CONF_METHOD]))
|
||||
cg.add(var.set_capture_response(config[CONF_CAPTURE_RESPONSE]))
|
||||
|
||||
capture_response = config[CONF_CAPTURE_RESPONSE]
|
||||
if capture_response:
|
||||
cg.add(var.set_capture_response(capture_response))
|
||||
cg.add_define("USE_HTTP_REQUEST_RESPONSE")
|
||||
|
||||
cg.add(var.set_max_response_buffer_size(config[CONF_MAX_RESPONSE_BUFFER_SIZE]))
|
||||
|
||||
if CONF_BODY in config:
|
||||
@@ -303,21 +299,26 @@ async def http_request_action_to_code(config, action_id, template_arg, args):
|
||||
for value in config.get(CONF_COLLECT_HEADERS, []):
|
||||
cg.add(var.add_collect_header(value))
|
||||
|
||||
for conf in config.get(CONF_ON_RESPONSE, []):
|
||||
trigger = cg.new_Pvariable(conf[CONF_TRIGGER_ID])
|
||||
cg.add(var.register_response_trigger(trigger))
|
||||
if response_conf := config.get(CONF_ON_RESPONSE):
|
||||
if capture_response:
|
||||
await automation.build_automation(
|
||||
trigger,
|
||||
var.get_success_trigger_with_response(),
|
||||
[
|
||||
(cg.std_shared_ptr.template(HttpContainer), "response"),
|
||||
(cg.std_string_ref, "body"),
|
||||
*args,
|
||||
],
|
||||
conf,
|
||||
response_conf,
|
||||
)
|
||||
for conf in config.get(CONF_ON_ERROR, []):
|
||||
trigger = cg.new_Pvariable(conf[CONF_TRIGGER_ID])
|
||||
cg.add(var.register_error_trigger(trigger))
|
||||
await automation.build_automation(trigger, [], conf)
|
||||
else:
|
||||
await automation.build_automation(
|
||||
var.get_success_trigger(),
|
||||
[(cg.std_shared_ptr.template(HttpContainer), "response"), *args],
|
||||
response_conf,
|
||||
)
|
||||
|
||||
if error_conf := config.get(CONF_ON_ERROR):
|
||||
await automation.build_automation(var.get_error_trigger(), args, error_conf)
|
||||
|
||||
return var
|
||||
|
||||
|
||||
@@ -183,7 +183,9 @@ template<typename... Ts> class HttpRequestSendAction : public Action<Ts...> {
|
||||
TEMPLATABLE_VALUE(std::string, url)
|
||||
TEMPLATABLE_VALUE(const char *, method)
|
||||
TEMPLATABLE_VALUE(std::string, body)
|
||||
#ifdef USE_HTTP_REQUEST_RESPONSE
|
||||
TEMPLATABLE_VALUE(bool, capture_response)
|
||||
#endif
|
||||
|
||||
void add_request_header(const char *key, TemplatableValue<const char *, Ts...> value) {
|
||||
this->request_headers_.insert({key, value});
|
||||
@@ -195,9 +197,14 @@ template<typename... Ts> class HttpRequestSendAction : public Action<Ts...> {
|
||||
|
||||
void set_json(std::function<void(Ts..., JsonObject)> json_func) { this->json_func_ = json_func; }
|
||||
|
||||
void register_response_trigger(HttpRequestResponseTrigger *trigger) { this->response_triggers_.push_back(trigger); }
|
||||
#ifdef USE_HTTP_REQUEST_RESPONSE
|
||||
Trigger<std::shared_ptr<HttpContainer>, std::string &, Ts...> *get_success_trigger_with_response() const {
|
||||
return this->success_trigger_with_response_;
|
||||
}
|
||||
#endif
|
||||
Trigger<std::shared_ptr<HttpContainer>, Ts...> *get_success_trigger() const { return this->success_trigger_; }
|
||||
|
||||
void register_error_trigger(Trigger<> *trigger) { this->error_triggers_.push_back(trigger); }
|
||||
Trigger<Ts...> *get_error_trigger() const { return this->error_trigger_; }
|
||||
|
||||
void set_max_response_buffer_size(size_t max_response_buffer_size) {
|
||||
this->max_response_buffer_size_ = max_response_buffer_size;
|
||||
@@ -228,17 +235,20 @@ template<typename... Ts> class HttpRequestSendAction : public Action<Ts...> {
|
||||
auto container = this->parent_->start(this->url_.value(x...), this->method_.value(x...), body, request_headers,
|
||||
this->collect_headers_);
|
||||
|
||||
auto captured_args = std::make_tuple(x...);
|
||||
|
||||
if (container == nullptr) {
|
||||
for (auto *trigger : this->error_triggers_)
|
||||
trigger->trigger();
|
||||
std::apply([this](Ts... captured_args_inner) { this->error_trigger_->trigger(captured_args_inner...); },
|
||||
captured_args);
|
||||
return;
|
||||
}
|
||||
|
||||
size_t content_length = container->content_length;
|
||||
size_t max_length = std::min(content_length, this->max_response_buffer_size_);
|
||||
|
||||
std::string response_body;
|
||||
#ifdef USE_HTTP_REQUEST_RESPONSE
|
||||
if (this->capture_response_.value(x...)) {
|
||||
std::string response_body;
|
||||
RAMAllocator<uint8_t> allocator;
|
||||
uint8_t *buf = allocator.allocate(max_length);
|
||||
if (buf != nullptr) {
|
||||
@@ -253,19 +263,17 @@ template<typename... Ts> class HttpRequestSendAction : public Action<Ts...> {
|
||||
response_body.assign((char *) buf, read_index);
|
||||
allocator.deallocate(buf, max_length);
|
||||
}
|
||||
}
|
||||
|
||||
if (this->response_triggers_.size() == 1) {
|
||||
// if there is only one trigger, no need to copy the response body
|
||||
this->response_triggers_[0]->process(container, response_body);
|
||||
} else {
|
||||
for (auto *trigger : this->response_triggers_) {
|
||||
// with multiple triggers, pass a copy of the response body to each
|
||||
// one so that modifications made in one trigger are not visible to
|
||||
// the others
|
||||
auto response_body_copy = std::string(response_body);
|
||||
trigger->process(container, response_body_copy);
|
||||
}
|
||||
std::apply(
|
||||
[this, &container, &response_body](Ts... captured_args_inner) {
|
||||
this->success_trigger_with_response_->trigger(container, response_body, captured_args_inner...);
|
||||
},
|
||||
captured_args);
|
||||
} else
|
||||
#endif
|
||||
{
|
||||
std::apply([this, &container](
|
||||
Ts... captured_args_inner) { this->success_trigger_->trigger(container, captured_args_inner...); },
|
||||
captured_args);
|
||||
}
|
||||
container->end();
|
||||
}
|
||||
@@ -283,8 +291,13 @@ template<typename... Ts> class HttpRequestSendAction : public Action<Ts...> {
|
||||
std::set<std::string> collect_headers_{"content-type", "content-length"};
|
||||
std::map<const char *, TemplatableValue<std::string, Ts...>> json_{};
|
||||
std::function<void(Ts..., JsonObject)> json_func_{nullptr};
|
||||
std::vector<HttpRequestResponseTrigger *> response_triggers_{};
|
||||
std::vector<Trigger<> *> error_triggers_{};
|
||||
#ifdef USE_HTTP_REQUEST_RESPONSE
|
||||
Trigger<std::shared_ptr<HttpContainer>, std::string &, Ts...> *success_trigger_with_response_ =
|
||||
new Trigger<std::shared_ptr<HttpContainer>, std::string &, Ts...>();
|
||||
#endif
|
||||
Trigger<std::shared_ptr<HttpContainer>, Ts...> *success_trigger_ =
|
||||
new Trigger<std::shared_ptr<HttpContainer>, Ts...>();
|
||||
Trigger<Ts...> *error_trigger_ = new Trigger<Ts...>();
|
||||
|
||||
size_t max_response_buffer_size_{SIZE_MAX};
|
||||
};
|
||||
|
||||
@@ -173,14 +173,34 @@ def uart_selection(value):
|
||||
raise NotImplementedError
|
||||
|
||||
|
||||
def validate_local_no_higher_than_global(value):
|
||||
global_level = LOG_LEVEL_SEVERITY.index(value[CONF_LEVEL])
|
||||
for tag, level in value.get(CONF_LOGS, {}).items():
|
||||
if LOG_LEVEL_SEVERITY.index(level) > global_level:
|
||||
raise cv.Invalid(
|
||||
f"The configured log level for {tag} ({level}) must be no more severe than the global log level {value[CONF_LEVEL]}."
|
||||
def validate_local_no_higher_than_global(config):
|
||||
global_level = config[CONF_LEVEL]
|
||||
global_level_index = LOG_LEVEL_SEVERITY.index(global_level)
|
||||
errs = []
|
||||
for tag, level in config.get(CONF_LOGS, {}).items():
|
||||
if LOG_LEVEL_SEVERITY.index(level) > global_level_index:
|
||||
errs.append(
|
||||
cv.Invalid(
|
||||
f"The configured log level for {tag} ({level}) must not be less severe than the global log level ({global_level})",
|
||||
[CONF_LOGS, tag],
|
||||
)
|
||||
return value
|
||||
)
|
||||
if errs:
|
||||
raise cv.MultipleInvalid(errs)
|
||||
return config
|
||||
|
||||
|
||||
def validate_initial_no_higher_than_global(config):
|
||||
if initial_level := config.get(CONF_INITIAL_LEVEL):
|
||||
global_level = config[CONF_LEVEL]
|
||||
if LOG_LEVEL_SEVERITY.index(initial_level) > LOG_LEVEL_SEVERITY.index(
|
||||
global_level
|
||||
):
|
||||
raise cv.Invalid(
|
||||
f"The initial log level ({initial_level}) must not be less severe than the global log level ({global_level})",
|
||||
[CONF_INITIAL_LEVEL],
|
||||
)
|
||||
return config
|
||||
|
||||
|
||||
Logger = logger_ns.class_("Logger", cg.Component)
|
||||
@@ -263,6 +283,7 @@ CONFIG_SCHEMA = cv.All(
|
||||
}
|
||||
).extend(cv.COMPONENT_SCHEMA),
|
||||
validate_local_no_higher_than_global,
|
||||
validate_initial_no_higher_than_global,
|
||||
)
|
||||
|
||||
|
||||
|
||||
@@ -384,6 +384,18 @@ class DriverChip:
|
||||
transform[CONF_TRANSFORM] = True
|
||||
return transform
|
||||
|
||||
def swap_xy_schema(self):
|
||||
uses_swap = self.get_default(CONF_SWAP_XY, None) != cv.UNDEFINED
|
||||
|
||||
def validator(value):
|
||||
if value:
|
||||
raise cv.Invalid("Axis swapping not supported by this model")
|
||||
return cv.boolean(value)
|
||||
|
||||
if uses_swap:
|
||||
return {cv.Required(CONF_SWAP_XY): cv.boolean}
|
||||
return {cv.Optional(CONF_SWAP_XY, default=False): validator}
|
||||
|
||||
def add_madctl(self, sequence: list, config: dict):
|
||||
# Add the MADCTL command to the sequence based on the configuration.
|
||||
use_flip = config.get(CONF_USE_AXIS_FLIPS)
|
||||
|
||||
@@ -46,6 +46,7 @@ from esphome.const import (
|
||||
CONF_DATA_RATE,
|
||||
CONF_DC_PIN,
|
||||
CONF_DIMENSIONS,
|
||||
CONF_DISABLED,
|
||||
CONF_ENABLE_PIN,
|
||||
CONF_GREEN,
|
||||
CONF_HSYNC_PIN,
|
||||
@@ -117,16 +118,16 @@ def data_pin_set(length):
|
||||
|
||||
def model_schema(config):
|
||||
model = MODELS[config[CONF_MODEL].upper()]
|
||||
if transforms := model.transforms:
|
||||
transform = cv.Schema({cv.Required(x): cv.boolean for x in transforms})
|
||||
for x in (CONF_SWAP_XY, CONF_MIRROR_X, CONF_MIRROR_Y):
|
||||
if x not in transforms:
|
||||
transform = transform.extend(
|
||||
{cv.Optional(x): cv.invalid(f"{x} not supported by this model")}
|
||||
transform = cv.Any(
|
||||
cv.Schema(
|
||||
{
|
||||
cv.Required(CONF_MIRROR_X): cv.boolean,
|
||||
cv.Required(CONF_MIRROR_Y): cv.boolean,
|
||||
**model.swap_xy_schema(),
|
||||
}
|
||||
),
|
||||
cv.one_of(CONF_DISABLED, lower=True),
|
||||
)
|
||||
else:
|
||||
transform = cv.invalid("This model does not support transforms")
|
||||
|
||||
# RPI model does not use an init sequence, indicates with empty list
|
||||
if model.initsequence is None:
|
||||
# Custom model requires an init sequence
|
||||
@@ -135,12 +136,16 @@ def model_schema(config):
|
||||
else:
|
||||
iseqconf = cv.Optional(CONF_INIT_SEQUENCE)
|
||||
uses_spi = CONF_INIT_SEQUENCE in config or len(model.initsequence) != 0
|
||||
swap_xy = config.get(CONF_TRANSFORM, {}).get(CONF_SWAP_XY, False)
|
||||
|
||||
# Dimensions are optional if the model has a default width and the swap_xy transform is not overridden
|
||||
cv_dimensions = (
|
||||
cv.Optional if model.get_default(CONF_WIDTH) and not swap_xy else cv.Required
|
||||
# Dimensions are optional if the model has a default width and the x-y transform is not overridden
|
||||
transform_config = config.get(CONF_TRANSFORM, {})
|
||||
is_swapped = (
|
||||
isinstance(transform_config, dict)
|
||||
and transform_config.get(CONF_SWAP_XY, False) is True
|
||||
)
|
||||
cv_dimensions = (
|
||||
cv.Optional if model.get_default(CONF_WIDTH) and not is_swapped else cv.Required
|
||||
)
|
||||
|
||||
pixel_modes = (PIXEL_MODE_16BIT, PIXEL_MODE_18BIT, "16", "18")
|
||||
schema = display.FULL_DISPLAY_SCHEMA.extend(
|
||||
{
|
||||
@@ -157,7 +162,7 @@ def model_schema(config):
|
||||
model.option(CONF_PIXEL_MODE, PIXEL_MODE_16BIT): cv.one_of(
|
||||
*pixel_modes, lower=True
|
||||
),
|
||||
model.option(CONF_TRANSFORM, cv.UNDEFINED): transform,
|
||||
cv.Optional(CONF_TRANSFORM): transform,
|
||||
cv.Required(CONF_MODEL): cv.one_of(model.name, upper=True),
|
||||
model.option(CONF_INVERT_COLORS, False): cv.boolean,
|
||||
model.option(CONF_USE_AXIS_FLIPS, True): cv.boolean,
|
||||
@@ -270,7 +275,6 @@ async def to_code(config):
|
||||
cg.add(var.set_vsync_front_porch(config[CONF_VSYNC_FRONT_PORCH]))
|
||||
cg.add(var.set_pclk_inverted(config[CONF_PCLK_INVERTED]))
|
||||
cg.add(var.set_pclk_frequency(config[CONF_PCLK_FREQUENCY]))
|
||||
index = 0
|
||||
dpins = []
|
||||
if CONF_RED in config[CONF_DATA_PINS]:
|
||||
red_pins = config[CONF_DATA_PINS][CONF_RED]
|
||||
|
||||
@@ -131,19 +131,6 @@ def denominator(config):
|
||||
) from StopIteration
|
||||
|
||||
|
||||
def swap_xy_schema(model):
|
||||
uses_swap = model.get_default(CONF_SWAP_XY, None) != cv.UNDEFINED
|
||||
|
||||
def validator(value):
|
||||
if value:
|
||||
raise cv.Invalid("Axis swapping not supported by this model")
|
||||
return cv.boolean(value)
|
||||
|
||||
if uses_swap:
|
||||
return {cv.Required(CONF_SWAP_XY): cv.boolean}
|
||||
return {cv.Optional(CONF_SWAP_XY, default=False): validator}
|
||||
|
||||
|
||||
def model_schema(config):
|
||||
model = MODELS[config[CONF_MODEL]]
|
||||
bus_mode = config[CONF_BUS_MODE]
|
||||
@@ -152,7 +139,7 @@ def model_schema(config):
|
||||
{
|
||||
cv.Required(CONF_MIRROR_X): cv.boolean,
|
||||
cv.Required(CONF_MIRROR_Y): cv.boolean,
|
||||
**swap_xy_schema(model),
|
||||
**model.swap_xy_schema(),
|
||||
}
|
||||
),
|
||||
cv.one_of(CONF_DISABLED, lower=True),
|
||||
|
||||
@@ -102,7 +102,7 @@ CONFIG_SCHEMA = cv.Any(
|
||||
str: PACKAGE_SCHEMA,
|
||||
}
|
||||
),
|
||||
cv.ensure_list(PACKAGE_SCHEMA),
|
||||
[PACKAGE_SCHEMA],
|
||||
)
|
||||
|
||||
|
||||
|
||||
@@ -80,19 +80,12 @@ void TemplateAlarmControlPanel::dump_config() {
|
||||
}
|
||||
|
||||
void TemplateAlarmControlPanel::setup() {
|
||||
switch (this->restore_mode_) {
|
||||
case ALARM_CONTROL_PANEL_ALWAYS_DISARMED:
|
||||
this->current_state_ = ACP_STATE_DISARMED;
|
||||
break;
|
||||
case ALARM_CONTROL_PANEL_RESTORE_DEFAULT_DISARMED: {
|
||||
if (this->restore_mode_ == ALARM_CONTROL_PANEL_RESTORE_DEFAULT_DISARMED) {
|
||||
uint8_t value;
|
||||
this->pref_ = global_preferences->make_preference<uint8_t>(this->get_preference_hash());
|
||||
if (this->pref_.load(&value)) {
|
||||
this->current_state_ = static_cast<alarm_control_panel::AlarmControlPanelState>(value);
|
||||
} else {
|
||||
this->current_state_ = ACP_STATE_DISARMED;
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
this->desired_state_ = this->current_state_;
|
||||
@@ -119,15 +112,15 @@ void TemplateAlarmControlPanel::loop() {
|
||||
this->publish_state(ACP_STATE_TRIGGERED);
|
||||
return;
|
||||
}
|
||||
auto future_state = this->current_state_;
|
||||
auto next_state = this->current_state_;
|
||||
// reset triggered if all clear
|
||||
if (this->current_state_ == ACP_STATE_TRIGGERED && this->trigger_time_ > 0 &&
|
||||
(millis() - this->last_update_) > this->trigger_time_) {
|
||||
future_state = this->desired_state_;
|
||||
next_state = this->desired_state_;
|
||||
}
|
||||
|
||||
bool delayed_sensor_not_ready = false;
|
||||
bool instant_sensor_not_ready = false;
|
||||
bool delayed_sensor_faulted = false;
|
||||
bool instant_sensor_faulted = false;
|
||||
|
||||
#ifdef USE_BINARY_SENSOR
|
||||
// Test all of the sensors in the list regardless of the alarm panel state
|
||||
@@ -144,7 +137,7 @@ void TemplateAlarmControlPanel::loop() {
|
||||
// Record the sensor state change
|
||||
this->sensor_data_[sensor_info.second.store_index].last_chime_state = sensor_info.first->state;
|
||||
}
|
||||
// Check for triggered sensors
|
||||
// Check for faulted sensors
|
||||
if (sensor_info.first->state) { // Sensor triggered?
|
||||
// Skip if auto bypassed
|
||||
if (std::count(this->bypassed_sensor_indicies_.begin(), this->bypassed_sensor_indicies_.end(),
|
||||
@@ -163,42 +156,41 @@ void TemplateAlarmControlPanel::loop() {
|
||||
}
|
||||
|
||||
switch (sensor_info.second.type) {
|
||||
case ALARM_SENSOR_TYPE_INSTANT:
|
||||
instant_sensor_not_ready = true;
|
||||
break;
|
||||
case ALARM_SENSOR_TYPE_INSTANT_ALWAYS:
|
||||
instant_sensor_not_ready = true;
|
||||
future_state = ACP_STATE_TRIGGERED;
|
||||
next_state = ACP_STATE_TRIGGERED;
|
||||
[[fallthrough]];
|
||||
case ALARM_SENSOR_TYPE_INSTANT:
|
||||
instant_sensor_faulted = true;
|
||||
break;
|
||||
case ALARM_SENSOR_TYPE_DELAYED_FOLLOWER:
|
||||
// Look to see if we are in the pending state
|
||||
if (this->current_state_ == ACP_STATE_PENDING) {
|
||||
delayed_sensor_not_ready = true;
|
||||
delayed_sensor_faulted = true;
|
||||
} else {
|
||||
instant_sensor_not_ready = true;
|
||||
instant_sensor_faulted = true;
|
||||
}
|
||||
break;
|
||||
case ALARM_SENSOR_TYPE_DELAYED:
|
||||
default:
|
||||
delayed_sensor_not_ready = true;
|
||||
delayed_sensor_faulted = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
// Update all sensors not ready flag
|
||||
this->sensors_ready_ = ((!instant_sensor_not_ready) && (!delayed_sensor_not_ready));
|
||||
// Update all sensors ready flag
|
||||
bool sensors_ready = !(instant_sensor_faulted || delayed_sensor_faulted);
|
||||
|
||||
// Call the ready state change callback if there was a change
|
||||
if (this->sensors_ready_ != this->sensors_ready_last_) {
|
||||
if (this->sensors_ready_ != sensors_ready) {
|
||||
this->sensors_ready_ = sensors_ready;
|
||||
this->ready_callback_.call();
|
||||
this->sensors_ready_last_ = this->sensors_ready_;
|
||||
}
|
||||
|
||||
#endif
|
||||
if (this->is_state_armed(future_state) && (!this->sensors_ready_)) {
|
||||
if (this->is_state_armed(next_state) && (!this->sensors_ready_)) {
|
||||
// Instant sensors
|
||||
if (instant_sensor_not_ready) {
|
||||
if (instant_sensor_faulted) {
|
||||
this->publish_state(ACP_STATE_TRIGGERED);
|
||||
} else if (delayed_sensor_not_ready) {
|
||||
} else if (delayed_sensor_faulted) {
|
||||
// Delayed sensors
|
||||
if ((this->pending_time_ > 0) && (this->current_state_ != ACP_STATE_TRIGGERED)) {
|
||||
this->publish_state(ACP_STATE_PENDING);
|
||||
@@ -206,8 +198,8 @@ void TemplateAlarmControlPanel::loop() {
|
||||
this->publish_state(ACP_STATE_TRIGGERED);
|
||||
}
|
||||
}
|
||||
} else if (future_state != this->current_state_) {
|
||||
this->publish_state(future_state);
|
||||
} else if (next_state != this->current_state_) {
|
||||
this->publish_state(next_state);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -234,8 +226,6 @@ uint32_t TemplateAlarmControlPanel::get_supported_features() const {
|
||||
return features;
|
||||
}
|
||||
|
||||
bool TemplateAlarmControlPanel::get_requires_code() const { return !this->codes_.empty(); }
|
||||
|
||||
void TemplateAlarmControlPanel::arm_(optional<std::string> code, alarm_control_panel::AlarmControlPanelState state,
|
||||
uint32_t delay) {
|
||||
if (this->current_state_ != ACP_STATE_DISARMED) {
|
||||
@@ -258,9 +248,9 @@ void TemplateAlarmControlPanel::arm_(optional<std::string> code, alarm_control_p
|
||||
void TemplateAlarmControlPanel::bypass_before_arming() {
|
||||
#ifdef USE_BINARY_SENSOR
|
||||
for (auto sensor_info : this->sensor_map_) {
|
||||
// Check for sensors left on and set to bypass automatically and remove them from monitoring
|
||||
// Check for faulted bypass_auto sensors and remove them from monitoring
|
||||
if ((sensor_info.second.flags & BINARY_SENSOR_MODE_BYPASS_AUTO) && (sensor_info.first->state)) {
|
||||
ESP_LOGW(TAG, "'%s' is left on and will be automatically bypassed", sensor_info.first->get_name().c_str());
|
||||
ESP_LOGW(TAG, "'%s' is faulted and will be automatically bypassed", sensor_info.first->get_name().c_str());
|
||||
this->bypassed_sensor_indicies_.push_back(sensor_info.second.store_index);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -56,7 +56,7 @@ class TemplateAlarmControlPanel : public alarm_control_panel::AlarmControlPanel,
|
||||
void setup() override;
|
||||
void loop() override;
|
||||
uint32_t get_supported_features() const override;
|
||||
bool get_requires_code() const override;
|
||||
bool get_requires_code() const override { return !this->codes_.empty(); }
|
||||
bool get_requires_code_to_arm() const override { return this->requires_code_to_arm_; }
|
||||
bool get_all_sensors_ready() { return this->sensors_ready_; };
|
||||
void set_restore_mode(TemplateAlarmControlPanelRestoreMode restore_mode) { this->restore_mode_ = restore_mode; }
|
||||
@@ -66,7 +66,8 @@ class TemplateAlarmControlPanel : public alarm_control_panel::AlarmControlPanel,
|
||||
/** Add a binary_sensor to the alarm_panel.
|
||||
*
|
||||
* @param sensor The BinarySensor instance.
|
||||
* @param ignore_when_home if this should be ignored when armed_home mode
|
||||
* @param flags The OR of BinarySensorFlags for the sensor.
|
||||
* @param type The sensor type which determines its triggering behaviour.
|
||||
*/
|
||||
void add_sensor(binary_sensor::BinarySensor *sensor, uint16_t flags = 0,
|
||||
AlarmSensorType type = ALARM_SENSOR_TYPE_DELAYED);
|
||||
@@ -121,7 +122,7 @@ class TemplateAlarmControlPanel : public alarm_control_panel::AlarmControlPanel,
|
||||
protected:
|
||||
void control(const alarm_control_panel::AlarmControlPanelCall &call) override;
|
||||
#ifdef USE_BINARY_SENSOR
|
||||
// This maps a binary sensor to its type and attribute bits
|
||||
// This maps a binary sensor to its alarm specific info
|
||||
std::map<binary_sensor::BinarySensor *, SensorInfo> sensor_map_;
|
||||
// a list of automatically bypassed sensors
|
||||
std::vector<uint8_t> bypassed_sensor_indicies_;
|
||||
@@ -147,7 +148,6 @@ class TemplateAlarmControlPanel : public alarm_control_panel::AlarmControlPanel,
|
||||
bool supports_arm_home_ = false;
|
||||
bool supports_arm_night_ = false;
|
||||
bool sensors_ready_ = false;
|
||||
bool sensors_ready_last_ = false;
|
||||
uint8_t next_store_index_ = 0;
|
||||
// check if the code is valid
|
||||
bool is_code_valid_(optional<std::string> code);
|
||||
|
||||
@@ -446,7 +446,7 @@ async def uart_write_to_code(config, action_id, template_arg, args):
|
||||
templ = await cg.templatable(data, args, cg.std_vector.template(cg.uint8))
|
||||
cg.add(var.set_data_template(templ))
|
||||
else:
|
||||
cg.add(var.set_data_static(data))
|
||||
cg.add(var.set_data_static(cg.ArrayInitializer(*data)))
|
||||
return var
|
||||
|
||||
|
||||
|
||||
@@ -14,8 +14,12 @@ template<typename... Ts> class UARTWriteAction : public Action<Ts...>, public Pa
|
||||
this->data_func_ = func;
|
||||
this->static_ = false;
|
||||
}
|
||||
void set_data_static(const std::vector<uint8_t> &data) {
|
||||
this->data_static_ = data;
|
||||
void set_data_static(std::vector<uint8_t> &&data) {
|
||||
this->data_static_ = std::move(data);
|
||||
this->static_ = true;
|
||||
}
|
||||
void set_data_static(std::initializer_list<uint8_t> data) {
|
||||
this->data_static_ = std::vector<uint8_t>(data);
|
||||
this->static_ = true;
|
||||
}
|
||||
|
||||
|
||||
@@ -33,4 +33,4 @@ async def to_code(config):
|
||||
data = config[CONF_DATA]
|
||||
if isinstance(data, bytes):
|
||||
data = [HexInt(x) for x in data]
|
||||
cg.add(var.set_data(data))
|
||||
cg.add(var.set_data(cg.ArrayInitializer(*data)))
|
||||
|
||||
@@ -11,7 +11,8 @@ namespace uart {
|
||||
|
||||
class UARTButton : public button::Button, public UARTDevice, public Component {
|
||||
public:
|
||||
void set_data(const std::vector<uint8_t> &data) { this->data_ = data; }
|
||||
void set_data(std::vector<uint8_t> &&data) { this->data_ = std::move(data); }
|
||||
void set_data(std::initializer_list<uint8_t> data) { this->data_ = std::vector<uint8_t>(data); }
|
||||
|
||||
void dump_config() override;
|
||||
|
||||
|
||||
@@ -44,16 +44,16 @@ async def to_code(config):
|
||||
if data_on := data.get(CONF_TURN_ON):
|
||||
if isinstance(data_on, bytes):
|
||||
data_on = [HexInt(x) for x in data_on]
|
||||
cg.add(var.set_data_on(data_on))
|
||||
cg.add(var.set_data_on(cg.ArrayInitializer(*data_on)))
|
||||
if data_off := data.get(CONF_TURN_OFF):
|
||||
if isinstance(data_off, bytes):
|
||||
data_off = [HexInt(x) for x in data_off]
|
||||
cg.add(var.set_data_off(data_off))
|
||||
cg.add(var.set_data_off(cg.ArrayInitializer(*data_off)))
|
||||
else:
|
||||
data = config[CONF_DATA]
|
||||
if isinstance(data, bytes):
|
||||
data = [HexInt(x) for x in data]
|
||||
cg.add(var.set_data_on(data))
|
||||
cg.add(var.set_data_on(cg.ArrayInitializer(*data)))
|
||||
cg.add(var.set_single_state(True))
|
||||
if CONF_SEND_EVERY in config:
|
||||
cg.add(var.set_send_every(config[CONF_SEND_EVERY]))
|
||||
|
||||
@@ -14,8 +14,10 @@ class UARTSwitch : public switch_::Switch, public UARTDevice, public Component {
|
||||
public:
|
||||
void loop() override;
|
||||
|
||||
void set_data_on(const std::vector<uint8_t> &data) { this->data_on_ = data; }
|
||||
void set_data_off(const std::vector<uint8_t> &data) { this->data_off_ = data; }
|
||||
void set_data_on(std::vector<uint8_t> &&data) { this->data_on_ = std::move(data); }
|
||||
void set_data_on(std::initializer_list<uint8_t> data) { this->data_on_ = std::vector<uint8_t>(data); }
|
||||
void set_data_off(std::vector<uint8_t> &&data) { this->data_off_ = std::move(data); }
|
||||
void set_data_off(std::initializer_list<uint8_t> data) { this->data_off_ = std::vector<uint8_t>(data); }
|
||||
void set_send_every(uint32_t send_every) { this->send_every_ = send_every; }
|
||||
void set_single_state(bool single) { this->single_state_ = single; }
|
||||
|
||||
|
||||
@@ -46,40 +46,58 @@ uint16_t LibreTinyUARTComponent::get_config() {
|
||||
}
|
||||
|
||||
void LibreTinyUARTComponent::setup() {
|
||||
if (this->rx_pin_) {
|
||||
this->rx_pin_->setup();
|
||||
}
|
||||
if (this->tx_pin_ && this->rx_pin_ != this->tx_pin_) {
|
||||
this->tx_pin_->setup();
|
||||
}
|
||||
|
||||
int8_t tx_pin = tx_pin_ == nullptr ? -1 : tx_pin_->get_pin();
|
||||
int8_t rx_pin = rx_pin_ == nullptr ? -1 : rx_pin_->get_pin();
|
||||
bool tx_inverted = tx_pin_ != nullptr && tx_pin_->is_inverted();
|
||||
bool rx_inverted = rx_pin_ != nullptr && rx_pin_->is_inverted();
|
||||
|
||||
auto shouldFallbackToSoftwareSerial = [&]() -> bool {
|
||||
auto hasFlags = [](InternalGPIOPin *pin, const gpio::Flags mask) -> bool {
|
||||
return pin && pin->get_flags() & mask != gpio::Flags::FLAG_NONE;
|
||||
};
|
||||
if (hasFlags(this->tx_pin_, gpio::Flags::FLAG_OPEN_DRAIN | gpio::Flags::FLAG_PULLUP | gpio::Flags::FLAG_PULLDOWN) ||
|
||||
hasFlags(this->rx_pin_, gpio::Flags::FLAG_OPEN_DRAIN | gpio::Flags::FLAG_PULLUP | gpio::Flags::FLAG_PULLDOWN)) {
|
||||
#if LT_ARD_HAS_SOFTSERIAL
|
||||
ESP_LOGI(TAG, "Pins has flags set. Using Software Serial");
|
||||
return true;
|
||||
#else
|
||||
ESP_LOGW(TAG, "Pin flags are set but not supported for hardware serial. Ignoring");
|
||||
#endif
|
||||
}
|
||||
return false;
|
||||
};
|
||||
|
||||
if (false)
|
||||
return;
|
||||
#if LT_HW_UART0
|
||||
else if ((tx_pin == -1 || tx_pin == PIN_SERIAL0_TX) && (rx_pin == -1 || rx_pin == PIN_SERIAL0_RX)) {
|
||||
else if ((tx_pin == -1 || tx_pin == PIN_SERIAL0_TX) && (rx_pin == -1 || rx_pin == PIN_SERIAL0_RX) &&
|
||||
!shouldFallbackToSoftwareSerial()) {
|
||||
this->serial_ = &Serial0;
|
||||
this->hardware_idx_ = 0;
|
||||
}
|
||||
#endif
|
||||
#if LT_HW_UART1
|
||||
else if ((tx_pin == -1 || tx_pin == PIN_SERIAL1_TX) && (rx_pin == -1 || rx_pin == PIN_SERIAL1_RX)) {
|
||||
else if ((tx_pin == -1 || tx_pin == PIN_SERIAL1_TX) && (rx_pin == -1 || rx_pin == PIN_SERIAL1_RX) &&
|
||||
!shouldFallbackToSoftwareSerial()) {
|
||||
this->serial_ = &Serial1;
|
||||
this->hardware_idx_ = 1;
|
||||
}
|
||||
#endif
|
||||
#if LT_HW_UART2
|
||||
else if ((tx_pin == -1 || tx_pin == PIN_SERIAL2_TX) && (rx_pin == -1 || rx_pin == PIN_SERIAL2_RX)) {
|
||||
else if ((tx_pin == -1 || tx_pin == PIN_SERIAL2_TX) && (rx_pin == -1 || rx_pin == PIN_SERIAL2_RX) &&
|
||||
!shouldFallbackToSoftwareSerial()) {
|
||||
this->serial_ = &Serial2;
|
||||
this->hardware_idx_ = 2;
|
||||
}
|
||||
#endif
|
||||
else {
|
||||
#if LT_ARD_HAS_SOFTSERIAL
|
||||
if (this->rx_pin_) {
|
||||
this->rx_pin_->setup();
|
||||
}
|
||||
if (this->tx_pin_ && this->rx_pin_ != this->tx_pin_) {
|
||||
this->tx_pin_->setup();
|
||||
}
|
||||
this->serial_ = new SoftwareSerial(rx_pin, tx_pin, rx_inverted || tx_inverted);
|
||||
#else
|
||||
this->serial_ = &Serial;
|
||||
|
||||
@@ -187,6 +187,7 @@
|
||||
#define ESPHOME_ESP32_BLE_GATTS_EVENT_HANDLER_COUNT 1
|
||||
#define ESPHOME_ESP32_BLE_BLE_STATUS_EVENT_HANDLER_COUNT 2
|
||||
#define USE_ESP32_CAMERA_JPEG_ENCODER
|
||||
#define USE_HTTP_REQUEST_RESPONSE
|
||||
#define USE_I2C
|
||||
#define USE_IMPROV
|
||||
#define USE_ESP32_IMPROV_NEXT_URL
|
||||
@@ -237,6 +238,7 @@
|
||||
#define USE_CAPTIVE_PORTAL
|
||||
#define USE_ESP8266_PREFERENCES_FLASH
|
||||
#define USE_HTTP_REQUEST_ESP8266_HTTPS
|
||||
#define USE_HTTP_REQUEST_RESPONSE
|
||||
#define USE_I2C
|
||||
#define USE_SOCKET_IMPL_LWIP_TCP
|
||||
|
||||
@@ -257,6 +259,7 @@
|
||||
|
||||
#ifdef USE_RP2040
|
||||
#define USE_ARDUINO_VERSION_CODE VERSION_CODE(3, 3, 0)
|
||||
#define USE_HTTP_REQUEST_RESPONSE
|
||||
#define USE_I2C
|
||||
#define USE_LOGGER_USB_CDC
|
||||
#define USE_SOCKET_IMPL_LWIP_TCP
|
||||
@@ -273,6 +276,7 @@
|
||||
#endif
|
||||
|
||||
#ifdef USE_HOST
|
||||
#define USE_HTTP_REQUEST_RESPONSE
|
||||
#define USE_SOCKET_IMPL_BSD_SOCKETS
|
||||
#define USE_SOCKET_SELECT_SUPPORT
|
||||
#endif
|
||||
|
||||
@@ -12,7 +12,7 @@ platformio==6.1.18 # When updating platformio, also update /docker/Dockerfile
|
||||
esptool==5.1.0
|
||||
click==8.1.7
|
||||
esphome-dashboard==20251013.0
|
||||
aioesphomeapi==42.3.0
|
||||
aioesphomeapi==42.4.0
|
||||
zeroconf==0.148.0
|
||||
puremagic==1.30
|
||||
ruamel.yaml==0.18.16 # dashboard_import
|
||||
|
||||
@@ -5,7 +5,7 @@ from unittest.mock import MagicMock, patch
|
||||
|
||||
import pytest
|
||||
|
||||
from esphome.components.packages import do_packages_pass
|
||||
from esphome.components.packages import CONFIG_SCHEMA, do_packages_pass
|
||||
from esphome.config import resolve_extend_remove
|
||||
from esphome.config_helpers import Extend, Remove
|
||||
import esphome.config_validation as cv
|
||||
@@ -94,6 +94,50 @@ def test_package_invalid_dict(basic_esphome, basic_wifi):
|
||||
packages_pass(config)
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"package",
|
||||
[
|
||||
{"package1": "github://esphome/non-existant-repo/file1.yml@main"},
|
||||
{"package2": "github://esphome/non-existant-repo/file1.yml"},
|
||||
{"package3": "github://esphome/non-existant-repo/other-folder/file1.yml"},
|
||||
[
|
||||
"github://esphome/non-existant-repo/file1.yml@main",
|
||||
"github://esphome/non-existant-repo/file1.yml",
|
||||
"github://esphome/non-existant-repo/other-folder/file1.yml",
|
||||
],
|
||||
],
|
||||
)
|
||||
def test_package_shorthand(package):
|
||||
CONFIG_SCHEMA(package)
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"package",
|
||||
[
|
||||
# not github
|
||||
{"package1": "someplace://esphome/non-existant-repo/file1.yml@main"},
|
||||
# missing repo
|
||||
{"package2": "github://esphome/file1.yml"},
|
||||
# missing file
|
||||
{"package3": "github://esphome/non-existant-repo/@main"},
|
||||
{"a": "invalid string, not shorthand"},
|
||||
"some string",
|
||||
3,
|
||||
False,
|
||||
{"a": 8},
|
||||
["someplace://esphome/non-existant-repo/file1.yml@main"],
|
||||
["github://esphome/file1.yml"],
|
||||
["github://esphome/non-existant-repo/@main"],
|
||||
["some string"],
|
||||
[True],
|
||||
[3],
|
||||
],
|
||||
)
|
||||
def test_package_invalid(package):
|
||||
with pytest.raises(cv.Invalid):
|
||||
CONFIG_SCHEMA(package)
|
||||
|
||||
|
||||
def test_package_include(basic_wifi, basic_esphome):
|
||||
"""
|
||||
Tests the simple case where an independent config present in a package is added to the top-level config as is.
|
||||
|
||||
@@ -4,51 +4,6 @@ wifi:
|
||||
ssid: MySSID
|
||||
password: password1
|
||||
|
||||
esphome:
|
||||
on_boot:
|
||||
then:
|
||||
- http_request.get:
|
||||
url: https://esphome.io
|
||||
request_headers:
|
||||
Content-Type: application/json
|
||||
collect_headers:
|
||||
- age
|
||||
on_error:
|
||||
logger.log: "Request failed"
|
||||
on_response:
|
||||
then:
|
||||
- logger.log:
|
||||
format: "Response status: %d, Duration: %lu ms, age: %s"
|
||||
args:
|
||||
- response->status_code
|
||||
- (long) response->duration_ms
|
||||
- response->get_response_header("age").c_str()
|
||||
- http_request.post:
|
||||
url: https://esphome.io
|
||||
request_headers:
|
||||
Content-Type: application/json
|
||||
json:
|
||||
key: value
|
||||
- http_request.send:
|
||||
method: PUT
|
||||
url: https://esphome.io
|
||||
request_headers:
|
||||
Content-Type: application/json
|
||||
body: "Some data"
|
||||
|
||||
http_request:
|
||||
useragent: esphome/tagreader
|
||||
timeout: 10s
|
||||
verify_ssl: ${verify_ssl}
|
||||
|
||||
script:
|
||||
- id: does_not_compile
|
||||
parameters:
|
||||
api_url: string
|
||||
then:
|
||||
- http_request.get:
|
||||
url: "http://google.com"
|
||||
|
||||
ota:
|
||||
- platform: http_request
|
||||
id: http_request_ota
|
||||
|
||||
@@ -31,6 +31,20 @@ esphome:
|
||||
request_headers:
|
||||
Content-Type: application/json
|
||||
body: "Some data"
|
||||
- http_request.post:
|
||||
url: https://esphome.io
|
||||
request_headers:
|
||||
Content-Type: application/json
|
||||
json:
|
||||
key: value
|
||||
capture_response: true
|
||||
on_response:
|
||||
then:
|
||||
- logger.log:
|
||||
format: "Captured response status: %d, Body: %s"
|
||||
args:
|
||||
- response->status_code
|
||||
- body.c_str()
|
||||
|
||||
http_request:
|
||||
useragent: esphome/tagreader
|
||||
|
||||
Reference in New Issue
Block a user