mirror of
https://github.com/esphome/esphome.git
synced 2026-02-10 17:51:53 +00:00
Compare commits
2 Commits
api-dedup-
...
app-loop-e
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
8f367571f8 | ||
|
|
1213774168 |
@@ -219,8 +219,35 @@ void APIConnection::loop() {
|
||||
this->process_batch_();
|
||||
}
|
||||
|
||||
if (this->active_iterator_ != ActiveIterator::NONE) {
|
||||
this->process_active_iterator_();
|
||||
switch (this->active_iterator_) {
|
||||
case ActiveIterator::LIST_ENTITIES:
|
||||
if (this->iterator_storage_.list_entities.completed()) {
|
||||
this->destroy_active_iterator_();
|
||||
if (this->flags_.state_subscription) {
|
||||
this->begin_iterator_(ActiveIterator::INITIAL_STATE);
|
||||
}
|
||||
} else {
|
||||
this->process_iterator_batch_(this->iterator_storage_.list_entities);
|
||||
}
|
||||
break;
|
||||
case ActiveIterator::INITIAL_STATE:
|
||||
if (this->iterator_storage_.initial_state.completed()) {
|
||||
this->destroy_active_iterator_();
|
||||
// Process any remaining batched messages immediately
|
||||
if (!this->deferred_batch_.empty()) {
|
||||
this->process_batch_();
|
||||
}
|
||||
// Now that everything is sent, enable immediate sending for future state changes
|
||||
this->flags_.should_try_send_immediately = true;
|
||||
// Release excess memory from buffers that grew during initial sync
|
||||
this->deferred_batch_.release_buffer();
|
||||
this->helper_->release_buffers();
|
||||
} else {
|
||||
this->process_iterator_batch_(this->iterator_storage_.initial_state);
|
||||
}
|
||||
break;
|
||||
case ActiveIterator::NONE:
|
||||
break;
|
||||
}
|
||||
|
||||
if (this->flags_.sent_ping) {
|
||||
@@ -256,49 +283,6 @@ void APIConnection::loop() {
|
||||
#endif
|
||||
}
|
||||
|
||||
void APIConnection::process_active_iterator_() {
|
||||
// Caller ensures active_iterator_ != NONE
|
||||
if (this->active_iterator_ == ActiveIterator::LIST_ENTITIES) {
|
||||
if (this->iterator_storage_.list_entities.completed()) {
|
||||
this->destroy_active_iterator_();
|
||||
if (this->flags_.state_subscription) {
|
||||
this->begin_iterator_(ActiveIterator::INITIAL_STATE);
|
||||
}
|
||||
} else {
|
||||
this->process_iterator_batch_(this->iterator_storage_.list_entities);
|
||||
}
|
||||
} else { // INITIAL_STATE
|
||||
if (this->iterator_storage_.initial_state.completed()) {
|
||||
this->destroy_active_iterator_();
|
||||
// Process any remaining batched messages immediately
|
||||
if (!this->deferred_batch_.empty()) {
|
||||
this->process_batch_();
|
||||
}
|
||||
// Now that everything is sent, enable immediate sending for future state changes
|
||||
this->flags_.should_try_send_immediately = true;
|
||||
// Release excess memory from buffers that grew during initial sync
|
||||
this->deferred_batch_.release_buffer();
|
||||
this->helper_->release_buffers();
|
||||
} else {
|
||||
this->process_iterator_batch_(this->iterator_storage_.initial_state);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
void APIConnection::process_iterator_batch_(ComponentIterator &iterator) {
|
||||
size_t initial_size = this->deferred_batch_.size();
|
||||
size_t max_batch = this->get_max_batch_size_();
|
||||
while (!iterator.completed() && (this->deferred_batch_.size() - initial_size) < max_batch) {
|
||||
iterator.advance();
|
||||
}
|
||||
|
||||
// If the batch is full, process it immediately
|
||||
// Note: iterator.advance() already calls schedule_batch_() via schedule_message_()
|
||||
if (this->deferred_batch_.size() >= max_batch) {
|
||||
this->process_batch_();
|
||||
}
|
||||
}
|
||||
|
||||
bool APIConnection::send_disconnect_response_() {
|
||||
// remote initiated disconnect_client
|
||||
// don't close yet, we still need to send the disconnect response
|
||||
|
||||
@@ -15,10 +15,6 @@
|
||||
#include <limits>
|
||||
#include <vector>
|
||||
|
||||
namespace esphome {
|
||||
class ComponentIterator;
|
||||
} // namespace esphome
|
||||
|
||||
namespace esphome::api {
|
||||
|
||||
// Keepalive timeout in milliseconds
|
||||
@@ -368,13 +364,20 @@ class APIConnection final : public APIServerConnectionBase {
|
||||
return this->client_supports_api_version(1, 14) ? MAX_INITIAL_PER_BATCH : MAX_INITIAL_PER_BATCH_LEGACY;
|
||||
}
|
||||
|
||||
// Process active iterator (list_entities/initial_state) during connection setup.
|
||||
// Extracted from loop() — only runs during initial handshake, NONE in steady state.
|
||||
void __attribute__((noinline)) process_active_iterator_();
|
||||
// Helper method to process multiple entities from an iterator in a batch
|
||||
template<typename Iterator> void process_iterator_batch_(Iterator &iterator) {
|
||||
size_t initial_size = this->deferred_batch_.size();
|
||||
size_t max_batch = this->get_max_batch_size_();
|
||||
while (!iterator.completed() && (this->deferred_batch_.size() - initial_size) < max_batch) {
|
||||
iterator.advance();
|
||||
}
|
||||
|
||||
// Helper method to process multiple entities from an iterator in a batch.
|
||||
// Takes ComponentIterator base class reference to avoid duplicate template instantiations.
|
||||
void process_iterator_batch_(ComponentIterator &iterator);
|
||||
// If the batch is full, process it immediately
|
||||
// Note: iterator.advance() already calls schedule_batch_() via schedule_message_()
|
||||
if (this->deferred_batch_.size() >= max_batch) {
|
||||
this->process_batch_();
|
||||
}
|
||||
}
|
||||
|
||||
#ifdef USE_BINARY_SENSOR
|
||||
static uint16_t try_send_binary_sensor_state(EntityBase *entity, APIConnection *conn, uint32_t remaining_size);
|
||||
|
||||
@@ -94,6 +94,7 @@ class ListEntitiesIterator : public ComponentIterator {
|
||||
bool on_update(update::UpdateEntity *entity) override;
|
||||
#endif
|
||||
bool on_end() override;
|
||||
bool completed() { return this->state_ == IteratorState::NONE; }
|
||||
|
||||
protected:
|
||||
APIConnection *client_;
|
||||
|
||||
@@ -88,6 +88,7 @@ class InitialStateIterator : public ComponentIterator {
|
||||
#ifdef USE_UPDATE
|
||||
bool on_update(update::UpdateEntity *entity) override;
|
||||
#endif
|
||||
bool completed() { return this->state_ == IteratorState::NONE; }
|
||||
|
||||
protected:
|
||||
APIConnection *client_;
|
||||
|
||||
@@ -204,36 +204,40 @@ void Application::loop() {
|
||||
this->last_loop_ = last_op_end_time;
|
||||
|
||||
if (this->dump_config_at_ < this->components_.size()) {
|
||||
if (this->dump_config_at_ == 0) {
|
||||
char build_time_str[Application::BUILD_TIME_STR_SIZE];
|
||||
this->get_build_time_string(build_time_str);
|
||||
ESP_LOGI(TAG, "ESPHome version " ESPHOME_VERSION " compiled on %s", build_time_str);
|
||||
this->process_dump_config_();
|
||||
}
|
||||
}
|
||||
|
||||
void Application::process_dump_config_() {
|
||||
if (this->dump_config_at_ == 0) {
|
||||
char build_time_str[Application::BUILD_TIME_STR_SIZE];
|
||||
this->get_build_time_string(build_time_str);
|
||||
ESP_LOGI(TAG, "ESPHome version " ESPHOME_VERSION " compiled on %s", build_time_str);
|
||||
#ifdef ESPHOME_PROJECT_NAME
|
||||
ESP_LOGI(TAG, "Project " ESPHOME_PROJECT_NAME " version " ESPHOME_PROJECT_VERSION);
|
||||
ESP_LOGI(TAG, "Project " ESPHOME_PROJECT_NAME " version " ESPHOME_PROJECT_VERSION);
|
||||
#endif
|
||||
#ifdef USE_ESP32
|
||||
esp_chip_info_t chip_info;
|
||||
esp_chip_info(&chip_info);
|
||||
ESP_LOGI(TAG, "ESP32 Chip: %s rev%d.%d, %d core(s)", ESPHOME_VARIANT, chip_info.revision / 100,
|
||||
chip_info.revision % 100, chip_info.cores);
|
||||
esp_chip_info_t chip_info;
|
||||
esp_chip_info(&chip_info);
|
||||
ESP_LOGI(TAG, "ESP32 Chip: %s rev%d.%d, %d core(s)", ESPHOME_VARIANT, chip_info.revision / 100,
|
||||
chip_info.revision % 100, chip_info.cores);
|
||||
#if defined(USE_ESP32_VARIANT_ESP32) && !defined(USE_ESP32_MIN_CHIP_REVISION_SET)
|
||||
// Suggest optimization for chips that don't need the PSRAM cache workaround
|
||||
if (chip_info.revision >= 300) {
|
||||
// Suggest optimization for chips that don't need the PSRAM cache workaround
|
||||
if (chip_info.revision >= 300) {
|
||||
#ifdef USE_PSRAM
|
||||
ESP_LOGW(TAG, "Set minimum_chip_revision: \"%d.%d\" to save ~10KB IRAM", chip_info.revision / 100,
|
||||
chip_info.revision % 100);
|
||||
ESP_LOGW(TAG, "Set minimum_chip_revision: \"%d.%d\" to save ~10KB IRAM", chip_info.revision / 100,
|
||||
chip_info.revision % 100);
|
||||
#else
|
||||
ESP_LOGW(TAG, "Set minimum_chip_revision: \"%d.%d\" to reduce binary size", chip_info.revision / 100,
|
||||
chip_info.revision % 100);
|
||||
#endif
|
||||
}
|
||||
#endif
|
||||
ESP_LOGW(TAG, "Set minimum_chip_revision: \"%d.%d\" to reduce binary size", chip_info.revision / 100,
|
||||
chip_info.revision % 100);
|
||||
#endif
|
||||
}
|
||||
|
||||
this->components_[this->dump_config_at_]->call_dump_config();
|
||||
this->dump_config_at_++;
|
||||
#endif
|
||||
#endif
|
||||
}
|
||||
|
||||
this->components_[this->dump_config_at_]->call_dump_config();
|
||||
this->dump_config_at_++;
|
||||
}
|
||||
|
||||
void IRAM_ATTR HOT Application::feed_wdt(uint32_t time) {
|
||||
|
||||
@@ -519,6 +519,11 @@ class Application {
|
||||
void before_loop_tasks_(uint32_t loop_start_time);
|
||||
void after_loop_tasks_();
|
||||
|
||||
/// Process dump_config output one component per loop iteration.
|
||||
/// Extracted from loop() to keep cold startup/reconnect logging out of the hot path.
|
||||
/// Caller must ensure dump_config_at_ < components_.size().
|
||||
void __attribute__((noinline)) process_dump_config_();
|
||||
|
||||
void feed_wdt_arch_();
|
||||
|
||||
/// Perform a delay while also monitoring socket file descriptors for readiness
|
||||
|
||||
@@ -26,7 +26,6 @@ class ComponentIterator {
|
||||
public:
|
||||
void begin(bool include_internal = false);
|
||||
void advance();
|
||||
bool completed() const { return this->state_ == IteratorState::NONE; }
|
||||
virtual bool on_begin();
|
||||
#ifdef USE_BINARY_SENSOR
|
||||
virtual bool on_binary_sensor(binary_sensor::BinarySensor *binary_sensor) = 0;
|
||||
|
||||
Reference in New Issue
Block a user