mirror of
https://github.com/esphome/esphome.git
synced 2025-10-28 13:43:54 +00:00
Merge branch 'integration' into memory_api
This commit is contained in:
62
.github/workflows/ci.yml
vendored
62
.github/workflows/ci.yml
vendored
@@ -379,7 +379,16 @@ jobs:
|
|||||||
|
|
||||||
# Use intelligent splitter that groups components with same bus configs
|
# Use intelligent splitter that groups components with same bus configs
|
||||||
components='${{ needs.determine-jobs.outputs.changed-components-with-tests }}'
|
components='${{ needs.determine-jobs.outputs.changed-components-with-tests }}'
|
||||||
directly_changed='${{ needs.determine-jobs.outputs.directly-changed-components-with-tests }}'
|
|
||||||
|
# Only isolate directly changed components when targeting dev branch
|
||||||
|
# For beta/release branches, group everything for faster CI
|
||||||
|
if [[ "${{ github.base_ref }}" == beta* ]] || [[ "${{ github.base_ref }}" == release* ]]; then
|
||||||
|
directly_changed='[]'
|
||||||
|
echo "Target branch: ${{ github.base_ref }} - grouping all components"
|
||||||
|
else
|
||||||
|
directly_changed='${{ needs.determine-jobs.outputs.directly-changed-components-with-tests }}'
|
||||||
|
echo "Target branch: ${{ github.base_ref }} - isolating directly changed components"
|
||||||
|
fi
|
||||||
|
|
||||||
echo "Splitting components intelligently..."
|
echo "Splitting components intelligently..."
|
||||||
output=$(python3 script/split_components_for_ci.py --components "$components" --directly-changed "$directly_changed" --batch-size 40 --output github)
|
output=$(python3 script/split_components_for_ci.py --components "$components" --directly-changed "$directly_changed" --batch-size 40 --output github)
|
||||||
@@ -396,7 +405,7 @@ jobs:
|
|||||||
if: github.event_name == 'pull_request' && fromJSON(needs.determine-jobs.outputs.component-test-count) > 0
|
if: github.event_name == 'pull_request' && fromJSON(needs.determine-jobs.outputs.component-test-count) > 0
|
||||||
strategy:
|
strategy:
|
||||||
fail-fast: false
|
fail-fast: false
|
||||||
max-parallel: ${{ (github.base_ref == 'beta' || github.base_ref == 'release') && 8 || 4 }}
|
max-parallel: ${{ (startsWith(github.base_ref, 'beta') || startsWith(github.base_ref, 'release')) && 8 || 4 }}
|
||||||
matrix:
|
matrix:
|
||||||
components: ${{ fromJson(needs.test-build-components-splitter.outputs.matrix) }}
|
components: ${{ fromJson(needs.test-build-components-splitter.outputs.matrix) }}
|
||||||
steps:
|
steps:
|
||||||
@@ -424,18 +433,31 @@ jobs:
|
|||||||
- name: Validate and compile components with intelligent grouping
|
- name: Validate and compile components with intelligent grouping
|
||||||
run: |
|
run: |
|
||||||
. venv/bin/activate
|
. venv/bin/activate
|
||||||
# Use /mnt for build files (70GB available vs ~29GB on /)
|
|
||||||
# Bind mount PlatformIO directory to /mnt (tools, packages, build cache all go there)
|
|
||||||
sudo mkdir -p /mnt/platformio
|
|
||||||
sudo chown $USER:$USER /mnt/platformio
|
|
||||||
mkdir -p ~/.platformio
|
|
||||||
sudo mount --bind /mnt/platformio ~/.platformio
|
|
||||||
|
|
||||||
# Bind mount test build directory to /mnt
|
# Check if /mnt has more free space than / before bind mounting
|
||||||
sudo mkdir -p /mnt/test_build_components_build
|
# Extract available space in KB for comparison
|
||||||
sudo chown $USER:$USER /mnt/test_build_components_build
|
root_avail=$(df -k / | awk 'NR==2 {print $4}')
|
||||||
mkdir -p tests/test_build_components/build
|
mnt_avail=$(df -k /mnt 2>/dev/null | awk 'NR==2 {print $4}')
|
||||||
sudo mount --bind /mnt/test_build_components_build tests/test_build_components/build
|
|
||||||
|
echo "Available space: / has ${root_avail}KB, /mnt has ${mnt_avail}KB"
|
||||||
|
|
||||||
|
# Only use /mnt if it has more space than /
|
||||||
|
if [ -n "$mnt_avail" ] && [ "$mnt_avail" -gt "$root_avail" ]; then
|
||||||
|
echo "Using /mnt for build files (more space available)"
|
||||||
|
# Bind mount PlatformIO directory to /mnt (tools, packages, build cache all go there)
|
||||||
|
sudo mkdir -p /mnt/platformio
|
||||||
|
sudo chown $USER:$USER /mnt/platformio
|
||||||
|
mkdir -p ~/.platformio
|
||||||
|
sudo mount --bind /mnt/platformio ~/.platformio
|
||||||
|
|
||||||
|
# Bind mount test build directory to /mnt
|
||||||
|
sudo mkdir -p /mnt/test_build_components_build
|
||||||
|
sudo chown $USER:$USER /mnt/test_build_components_build
|
||||||
|
mkdir -p tests/test_build_components/build
|
||||||
|
sudo mount --bind /mnt/test_build_components_build tests/test_build_components/build
|
||||||
|
else
|
||||||
|
echo "Using / for build files (more space available than /mnt or /mnt unavailable)"
|
||||||
|
fi
|
||||||
|
|
||||||
# Convert space-separated components to comma-separated for Python script
|
# Convert space-separated components to comma-separated for Python script
|
||||||
components_csv=$(echo "${{ matrix.components }}" | tr ' ' ',')
|
components_csv=$(echo "${{ matrix.components }}" | tr ' ' ',')
|
||||||
@@ -448,7 +470,7 @@ jobs:
|
|||||||
# - This catches pin conflicts and other issues in directly changed code
|
# - This catches pin conflicts and other issues in directly changed code
|
||||||
# - Grouped tests use --testing-mode to allow config merging (disables some checks)
|
# - Grouped tests use --testing-mode to allow config merging (disables some checks)
|
||||||
# - Dependencies are safe to group since they weren't modified in this PR
|
# - Dependencies are safe to group since they weren't modified in this PR
|
||||||
if [ "${{ github.base_ref }}" = "beta" ] || [ "${{ github.base_ref }}" = "release" ]; then
|
if [[ "${{ github.base_ref }}" == beta* ]] || [[ "${{ github.base_ref }}" == release* ]]; then
|
||||||
directly_changed_csv=""
|
directly_changed_csv=""
|
||||||
echo "Testing components: $components_csv"
|
echo "Testing components: $components_csv"
|
||||||
echo "Target branch: ${{ github.base_ref }} - grouping all components"
|
echo "Target branch: ${{ github.base_ref }} - grouping all components"
|
||||||
@@ -459,6 +481,11 @@ jobs:
|
|||||||
fi
|
fi
|
||||||
echo ""
|
echo ""
|
||||||
|
|
||||||
|
# Show disk space before validation (after bind mounts setup)
|
||||||
|
echo "Disk space before config validation:"
|
||||||
|
df -h
|
||||||
|
echo ""
|
||||||
|
|
||||||
# Run config validation with grouping and isolation
|
# Run config validation with grouping and isolation
|
||||||
python3 script/test_build_components.py -e config -c "$components_csv" -f --isolate "$directly_changed_csv"
|
python3 script/test_build_components.py -e config -c "$components_csv" -f --isolate "$directly_changed_csv"
|
||||||
|
|
||||||
@@ -466,6 +493,11 @@ jobs:
|
|||||||
echo "Config validation passed! Starting compilation..."
|
echo "Config validation passed! Starting compilation..."
|
||||||
echo ""
|
echo ""
|
||||||
|
|
||||||
|
# Show disk space before compilation
|
||||||
|
echo "Disk space before compilation:"
|
||||||
|
df -h
|
||||||
|
echo ""
|
||||||
|
|
||||||
# Run compilation with grouping and isolation
|
# Run compilation with grouping and isolation
|
||||||
python3 script/test_build_components.py -e compile -c "$components_csv" -f --isolate "$directly_changed_csv"
|
python3 script/test_build_components.py -e compile -c "$components_csv" -f --isolate "$directly_changed_csv"
|
||||||
|
|
||||||
@@ -474,7 +506,7 @@ jobs:
|
|||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
needs:
|
needs:
|
||||||
- common
|
- common
|
||||||
if: github.event_name == 'pull_request' && github.base_ref != 'beta' && github.base_ref != 'release'
|
if: github.event_name == 'pull_request' && !startsWith(github.base_ref, 'beta') && !startsWith(github.base_ref, 'release')
|
||||||
steps:
|
steps:
|
||||||
- name: Check out code from GitHub
|
- name: Check out code from GitHub
|
||||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||||
|
|||||||
@@ -987,8 +987,8 @@ message ListEntitiesClimateResponse {
|
|||||||
string name = 3;
|
string name = 3;
|
||||||
reserved 4; // Deprecated: was string unique_id
|
reserved 4; // Deprecated: was string unique_id
|
||||||
|
|
||||||
bool supports_current_temperature = 5;
|
bool supports_current_temperature = 5; // Deprecated: use feature_flags
|
||||||
bool supports_two_point_target_temperature = 6;
|
bool supports_two_point_target_temperature = 6; // Deprecated: use feature_flags
|
||||||
repeated ClimateMode supported_modes = 7 [(container_pointer) = "std::set<climate::ClimateMode>"];
|
repeated ClimateMode supported_modes = 7 [(container_pointer) = "std::set<climate::ClimateMode>"];
|
||||||
float visual_min_temperature = 8;
|
float visual_min_temperature = 8;
|
||||||
float visual_max_temperature = 9;
|
float visual_max_temperature = 9;
|
||||||
@@ -997,7 +997,7 @@ message ListEntitiesClimateResponse {
|
|||||||
// is if CLIMATE_PRESET_AWAY exists is supported_presets
|
// is if CLIMATE_PRESET_AWAY exists is supported_presets
|
||||||
// Deprecated in API version 1.5
|
// Deprecated in API version 1.5
|
||||||
bool legacy_supports_away = 11 [deprecated=true];
|
bool legacy_supports_away = 11 [deprecated=true];
|
||||||
bool supports_action = 12;
|
bool supports_action = 12; // Deprecated: use feature_flags
|
||||||
repeated ClimateFanMode supported_fan_modes = 13 [(container_pointer) = "std::set<climate::ClimateFanMode>"];
|
repeated ClimateFanMode supported_fan_modes = 13 [(container_pointer) = "std::set<climate::ClimateFanMode>"];
|
||||||
repeated ClimateSwingMode supported_swing_modes = 14 [(container_pointer) = "std::set<climate::ClimateSwingMode>"];
|
repeated ClimateSwingMode supported_swing_modes = 14 [(container_pointer) = "std::set<climate::ClimateSwingMode>"];
|
||||||
repeated string supported_custom_fan_modes = 15 [(container_pointer) = "std::set"];
|
repeated string supported_custom_fan_modes = 15 [(container_pointer) = "std::set"];
|
||||||
@@ -1007,11 +1007,12 @@ message ListEntitiesClimateResponse {
|
|||||||
string icon = 19 [(field_ifdef) = "USE_ENTITY_ICON"];
|
string icon = 19 [(field_ifdef) = "USE_ENTITY_ICON"];
|
||||||
EntityCategory entity_category = 20;
|
EntityCategory entity_category = 20;
|
||||||
float visual_current_temperature_step = 21;
|
float visual_current_temperature_step = 21;
|
||||||
bool supports_current_humidity = 22;
|
bool supports_current_humidity = 22; // Deprecated: use feature_flags
|
||||||
bool supports_target_humidity = 23;
|
bool supports_target_humidity = 23; // Deprecated: use feature_flags
|
||||||
float visual_min_humidity = 24;
|
float visual_min_humidity = 24;
|
||||||
float visual_max_humidity = 25;
|
float visual_max_humidity = 25;
|
||||||
uint32 device_id = 26 [(field_ifdef) = "USE_DEVICES"];
|
uint32 device_id = 26 [(field_ifdef) = "USE_DEVICES"];
|
||||||
|
uint32 feature_flags = 27;
|
||||||
}
|
}
|
||||||
message ClimateStateResponse {
|
message ClimateStateResponse {
|
||||||
option (id) = 47;
|
option (id) = 47;
|
||||||
|
|||||||
@@ -27,6 +27,9 @@
|
|||||||
#ifdef USE_BLUETOOTH_PROXY
|
#ifdef USE_BLUETOOTH_PROXY
|
||||||
#include "esphome/components/bluetooth_proxy/bluetooth_proxy.h"
|
#include "esphome/components/bluetooth_proxy/bluetooth_proxy.h"
|
||||||
#endif
|
#endif
|
||||||
|
#ifdef USE_CLIMATE
|
||||||
|
#include "esphome/components/climate/climate_mode.h"
|
||||||
|
#endif
|
||||||
#ifdef USE_VOICE_ASSISTANT
|
#ifdef USE_VOICE_ASSISTANT
|
||||||
#include "esphome/components/voice_assistant/voice_assistant.h"
|
#include "esphome/components/voice_assistant/voice_assistant.h"
|
||||||
#endif
|
#endif
|
||||||
@@ -623,9 +626,10 @@ uint16_t APIConnection::try_send_climate_state(EntityBase *entity, APIConnection
|
|||||||
auto traits = climate->get_traits();
|
auto traits = climate->get_traits();
|
||||||
resp.mode = static_cast<enums::ClimateMode>(climate->mode);
|
resp.mode = static_cast<enums::ClimateMode>(climate->mode);
|
||||||
resp.action = static_cast<enums::ClimateAction>(climate->action);
|
resp.action = static_cast<enums::ClimateAction>(climate->action);
|
||||||
if (traits.get_supports_current_temperature())
|
if (traits.has_feature_flags(climate::CLIMATE_SUPPORTS_CURRENT_TEMPERATURE))
|
||||||
resp.current_temperature = climate->current_temperature;
|
resp.current_temperature = climate->current_temperature;
|
||||||
if (traits.get_supports_two_point_target_temperature()) {
|
if (traits.has_feature_flags(climate::CLIMATE_SUPPORTS_TWO_POINT_TARGET_TEMPERATURE |
|
||||||
|
climate::CLIMATE_REQUIRES_TWO_POINT_TARGET_TEMPERATURE)) {
|
||||||
resp.target_temperature_low = climate->target_temperature_low;
|
resp.target_temperature_low = climate->target_temperature_low;
|
||||||
resp.target_temperature_high = climate->target_temperature_high;
|
resp.target_temperature_high = climate->target_temperature_high;
|
||||||
} else {
|
} else {
|
||||||
@@ -644,9 +648,9 @@ uint16_t APIConnection::try_send_climate_state(EntityBase *entity, APIConnection
|
|||||||
}
|
}
|
||||||
if (traits.get_supports_swing_modes())
|
if (traits.get_supports_swing_modes())
|
||||||
resp.swing_mode = static_cast<enums::ClimateSwingMode>(climate->swing_mode);
|
resp.swing_mode = static_cast<enums::ClimateSwingMode>(climate->swing_mode);
|
||||||
if (traits.get_supports_current_humidity())
|
if (traits.has_feature_flags(climate::CLIMATE_SUPPORTS_CURRENT_HUMIDITY))
|
||||||
resp.current_humidity = climate->current_humidity;
|
resp.current_humidity = climate->current_humidity;
|
||||||
if (traits.get_supports_target_humidity())
|
if (traits.has_feature_flags(climate::CLIMATE_SUPPORTS_TARGET_HUMIDITY))
|
||||||
resp.target_humidity = climate->target_humidity;
|
resp.target_humidity = climate->target_humidity;
|
||||||
return fill_and_encode_entity_state(climate, resp, ClimateStateResponse::MESSAGE_TYPE, conn, remaining_size,
|
return fill_and_encode_entity_state(climate, resp, ClimateStateResponse::MESSAGE_TYPE, conn, remaining_size,
|
||||||
is_single);
|
is_single);
|
||||||
@@ -656,10 +660,14 @@ uint16_t APIConnection::try_send_climate_info(EntityBase *entity, APIConnection
|
|||||||
auto *climate = static_cast<climate::Climate *>(entity);
|
auto *climate = static_cast<climate::Climate *>(entity);
|
||||||
ListEntitiesClimateResponse msg;
|
ListEntitiesClimateResponse msg;
|
||||||
auto traits = climate->get_traits();
|
auto traits = climate->get_traits();
|
||||||
|
// Flags set for backward compatibility, deprecated in 2025.11.0
|
||||||
msg.supports_current_temperature = traits.get_supports_current_temperature();
|
msg.supports_current_temperature = traits.get_supports_current_temperature();
|
||||||
msg.supports_current_humidity = traits.get_supports_current_humidity();
|
msg.supports_current_humidity = traits.get_supports_current_humidity();
|
||||||
msg.supports_two_point_target_temperature = traits.get_supports_two_point_target_temperature();
|
msg.supports_two_point_target_temperature = traits.get_supports_two_point_target_temperature();
|
||||||
msg.supports_target_humidity = traits.get_supports_target_humidity();
|
msg.supports_target_humidity = traits.get_supports_target_humidity();
|
||||||
|
msg.supports_action = traits.get_supports_action();
|
||||||
|
// Current feature flags and other supported parameters
|
||||||
|
msg.feature_flags = traits.get_feature_flags();
|
||||||
msg.supported_modes = &traits.get_supported_modes_for_api_();
|
msg.supported_modes = &traits.get_supported_modes_for_api_();
|
||||||
msg.visual_min_temperature = traits.get_visual_min_temperature();
|
msg.visual_min_temperature = traits.get_visual_min_temperature();
|
||||||
msg.visual_max_temperature = traits.get_visual_max_temperature();
|
msg.visual_max_temperature = traits.get_visual_max_temperature();
|
||||||
@@ -667,7 +675,6 @@ uint16_t APIConnection::try_send_climate_info(EntityBase *entity, APIConnection
|
|||||||
msg.visual_current_temperature_step = traits.get_visual_current_temperature_step();
|
msg.visual_current_temperature_step = traits.get_visual_current_temperature_step();
|
||||||
msg.visual_min_humidity = traits.get_visual_min_humidity();
|
msg.visual_min_humidity = traits.get_visual_min_humidity();
|
||||||
msg.visual_max_humidity = traits.get_visual_max_humidity();
|
msg.visual_max_humidity = traits.get_visual_max_humidity();
|
||||||
msg.supports_action = traits.get_supports_action();
|
|
||||||
msg.supported_fan_modes = &traits.get_supported_fan_modes_for_api_();
|
msg.supported_fan_modes = &traits.get_supported_fan_modes_for_api_();
|
||||||
msg.supported_custom_fan_modes = &traits.get_supported_custom_fan_modes_for_api_();
|
msg.supported_custom_fan_modes = &traits.get_supported_custom_fan_modes_for_api_();
|
||||||
msg.supported_presets = &traits.get_supported_presets_for_api_();
|
msg.supported_presets = &traits.get_supported_presets_for_api_();
|
||||||
@@ -1406,7 +1413,7 @@ bool APIConnection::send_hello_response(const HelloRequest &msg) {
|
|||||||
|
|
||||||
HelloResponse resp;
|
HelloResponse resp;
|
||||||
resp.api_version_major = 1;
|
resp.api_version_major = 1;
|
||||||
resp.api_version_minor = 12;
|
resp.api_version_minor = 13;
|
||||||
// Send only the version string - the client only logs this for debugging and doesn't use it otherwise
|
// Send only the version string - the client only logs this for debugging and doesn't use it otherwise
|
||||||
resp.set_server_info(ESPHOME_VERSION_REF);
|
resp.set_server_info(ESPHOME_VERSION_REF);
|
||||||
resp.set_name(StringRef(App.get_name()));
|
resp.set_name(StringRef(App.get_name()));
|
||||||
|
|||||||
@@ -1201,6 +1201,7 @@ void ListEntitiesClimateResponse::encode(ProtoWriteBuffer buffer) const {
|
|||||||
#ifdef USE_DEVICES
|
#ifdef USE_DEVICES
|
||||||
buffer.encode_uint32(26, this->device_id);
|
buffer.encode_uint32(26, this->device_id);
|
||||||
#endif
|
#endif
|
||||||
|
buffer.encode_uint32(27, this->feature_flags);
|
||||||
}
|
}
|
||||||
void ListEntitiesClimateResponse::calculate_size(ProtoSize &size) const {
|
void ListEntitiesClimateResponse::calculate_size(ProtoSize &size) const {
|
||||||
size.add_length(1, this->object_id_ref_.size());
|
size.add_length(1, this->object_id_ref_.size());
|
||||||
@@ -1255,6 +1256,7 @@ void ListEntitiesClimateResponse::calculate_size(ProtoSize &size) const {
|
|||||||
#ifdef USE_DEVICES
|
#ifdef USE_DEVICES
|
||||||
size.add_uint32(2, this->device_id);
|
size.add_uint32(2, this->device_id);
|
||||||
#endif
|
#endif
|
||||||
|
size.add_uint32(2, this->feature_flags);
|
||||||
}
|
}
|
||||||
void ClimateStateResponse::encode(ProtoWriteBuffer buffer) const {
|
void ClimateStateResponse::encode(ProtoWriteBuffer buffer) const {
|
||||||
buffer.encode_fixed32(1, this->key);
|
buffer.encode_fixed32(1, this->key);
|
||||||
|
|||||||
@@ -1371,7 +1371,7 @@ class CameraImageRequest final : public ProtoDecodableMessage {
|
|||||||
class ListEntitiesClimateResponse final : public InfoResponseProtoMessage {
|
class ListEntitiesClimateResponse final : public InfoResponseProtoMessage {
|
||||||
public:
|
public:
|
||||||
static constexpr uint8_t MESSAGE_TYPE = 46;
|
static constexpr uint8_t MESSAGE_TYPE = 46;
|
||||||
static constexpr uint8_t ESTIMATED_SIZE = 145;
|
static constexpr uint8_t ESTIMATED_SIZE = 150;
|
||||||
#ifdef HAS_PROTO_MESSAGE_DUMP
|
#ifdef HAS_PROTO_MESSAGE_DUMP
|
||||||
const char *message_name() const override { return "list_entities_climate_response"; }
|
const char *message_name() const override { return "list_entities_climate_response"; }
|
||||||
#endif
|
#endif
|
||||||
@@ -1392,6 +1392,7 @@ class ListEntitiesClimateResponse final : public InfoResponseProtoMessage {
|
|||||||
bool supports_target_humidity{false};
|
bool supports_target_humidity{false};
|
||||||
float visual_min_humidity{0.0f};
|
float visual_min_humidity{0.0f};
|
||||||
float visual_max_humidity{0.0f};
|
float visual_max_humidity{0.0f};
|
||||||
|
uint32_t feature_flags{0};
|
||||||
void encode(ProtoWriteBuffer buffer) const override;
|
void encode(ProtoWriteBuffer buffer) const override;
|
||||||
void calculate_size(ProtoSize &size) const override;
|
void calculate_size(ProtoSize &size) const override;
|
||||||
#ifdef HAS_PROTO_MESSAGE_DUMP
|
#ifdef HAS_PROTO_MESSAGE_DUMP
|
||||||
|
|||||||
@@ -1292,6 +1292,7 @@ void ListEntitiesClimateResponse::dump_to(std::string &out) const {
|
|||||||
#ifdef USE_DEVICES
|
#ifdef USE_DEVICES
|
||||||
dump_field(out, "device_id", this->device_id);
|
dump_field(out, "device_id", this->device_id);
|
||||||
#endif
|
#endif
|
||||||
|
dump_field(out, "feature_flags", this->feature_flags);
|
||||||
}
|
}
|
||||||
void ClimateStateResponse::dump_to(std::string &out) const {
|
void ClimateStateResponse::dump_to(std::string &out) const {
|
||||||
MessageDumpHelper helper(out, "ClimateStateResponse");
|
MessageDumpHelper helper(out, "ClimateStateResponse");
|
||||||
|
|||||||
@@ -96,7 +96,8 @@ void ClimateCall::validate_() {
|
|||||||
}
|
}
|
||||||
if (this->target_temperature_.has_value()) {
|
if (this->target_temperature_.has_value()) {
|
||||||
auto target = *this->target_temperature_;
|
auto target = *this->target_temperature_;
|
||||||
if (traits.get_supports_two_point_target_temperature()) {
|
if (traits.has_feature_flags(CLIMATE_SUPPORTS_TWO_POINT_TARGET_TEMPERATURE |
|
||||||
|
CLIMATE_REQUIRES_TWO_POINT_TARGET_TEMPERATURE)) {
|
||||||
ESP_LOGW(TAG, " Cannot set target temperature for climate device "
|
ESP_LOGW(TAG, " Cannot set target temperature for climate device "
|
||||||
"with two-point target temperature!");
|
"with two-point target temperature!");
|
||||||
this->target_temperature_.reset();
|
this->target_temperature_.reset();
|
||||||
@@ -106,7 +107,8 @@ void ClimateCall::validate_() {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
if (this->target_temperature_low_.has_value() || this->target_temperature_high_.has_value()) {
|
if (this->target_temperature_low_.has_value() || this->target_temperature_high_.has_value()) {
|
||||||
if (!traits.get_supports_two_point_target_temperature()) {
|
if (!traits.has_feature_flags(CLIMATE_SUPPORTS_TWO_POINT_TARGET_TEMPERATURE |
|
||||||
|
CLIMATE_REQUIRES_TWO_POINT_TARGET_TEMPERATURE)) {
|
||||||
ESP_LOGW(TAG, " Cannot set low/high target temperature for this device!");
|
ESP_LOGW(TAG, " Cannot set low/high target temperature for this device!");
|
||||||
this->target_temperature_low_.reset();
|
this->target_temperature_low_.reset();
|
||||||
this->target_temperature_high_.reset();
|
this->target_temperature_high_.reset();
|
||||||
@@ -350,13 +352,14 @@ void Climate::save_state_() {
|
|||||||
|
|
||||||
state.mode = this->mode;
|
state.mode = this->mode;
|
||||||
auto traits = this->get_traits();
|
auto traits = this->get_traits();
|
||||||
if (traits.get_supports_two_point_target_temperature()) {
|
if (traits.has_feature_flags(CLIMATE_SUPPORTS_TWO_POINT_TARGET_TEMPERATURE |
|
||||||
|
CLIMATE_REQUIRES_TWO_POINT_TARGET_TEMPERATURE)) {
|
||||||
state.target_temperature_low = this->target_temperature_low;
|
state.target_temperature_low = this->target_temperature_low;
|
||||||
state.target_temperature_high = this->target_temperature_high;
|
state.target_temperature_high = this->target_temperature_high;
|
||||||
} else {
|
} else {
|
||||||
state.target_temperature = this->target_temperature;
|
state.target_temperature = this->target_temperature;
|
||||||
}
|
}
|
||||||
if (traits.get_supports_target_humidity()) {
|
if (traits.has_feature_flags(climate::CLIMATE_SUPPORTS_TARGET_HUMIDITY)) {
|
||||||
state.target_humidity = this->target_humidity;
|
state.target_humidity = this->target_humidity;
|
||||||
}
|
}
|
||||||
if (traits.get_supports_fan_modes() && fan_mode.has_value()) {
|
if (traits.get_supports_fan_modes() && fan_mode.has_value()) {
|
||||||
@@ -400,7 +403,7 @@ void Climate::publish_state() {
|
|||||||
auto traits = this->get_traits();
|
auto traits = this->get_traits();
|
||||||
|
|
||||||
ESP_LOGD(TAG, " Mode: %s", LOG_STR_ARG(climate_mode_to_string(this->mode)));
|
ESP_LOGD(TAG, " Mode: %s", LOG_STR_ARG(climate_mode_to_string(this->mode)));
|
||||||
if (traits.get_supports_action()) {
|
if (traits.has_feature_flags(climate::CLIMATE_SUPPORTS_ACTION)) {
|
||||||
ESP_LOGD(TAG, " Action: %s", LOG_STR_ARG(climate_action_to_string(this->action)));
|
ESP_LOGD(TAG, " Action: %s", LOG_STR_ARG(climate_action_to_string(this->action)));
|
||||||
}
|
}
|
||||||
if (traits.get_supports_fan_modes() && this->fan_mode.has_value()) {
|
if (traits.get_supports_fan_modes() && this->fan_mode.has_value()) {
|
||||||
@@ -418,19 +421,20 @@ void Climate::publish_state() {
|
|||||||
if (traits.get_supports_swing_modes()) {
|
if (traits.get_supports_swing_modes()) {
|
||||||
ESP_LOGD(TAG, " Swing Mode: %s", LOG_STR_ARG(climate_swing_mode_to_string(this->swing_mode)));
|
ESP_LOGD(TAG, " Swing Mode: %s", LOG_STR_ARG(climate_swing_mode_to_string(this->swing_mode)));
|
||||||
}
|
}
|
||||||
if (traits.get_supports_current_temperature()) {
|
if (traits.has_feature_flags(climate::CLIMATE_SUPPORTS_CURRENT_TEMPERATURE)) {
|
||||||
ESP_LOGD(TAG, " Current Temperature: %.2f°C", this->current_temperature);
|
ESP_LOGD(TAG, " Current Temperature: %.2f°C", this->current_temperature);
|
||||||
}
|
}
|
||||||
if (traits.get_supports_two_point_target_temperature()) {
|
if (traits.has_feature_flags(CLIMATE_SUPPORTS_TWO_POINT_TARGET_TEMPERATURE |
|
||||||
|
CLIMATE_REQUIRES_TWO_POINT_TARGET_TEMPERATURE)) {
|
||||||
ESP_LOGD(TAG, " Target Temperature: Low: %.2f°C High: %.2f°C", this->target_temperature_low,
|
ESP_LOGD(TAG, " Target Temperature: Low: %.2f°C High: %.2f°C", this->target_temperature_low,
|
||||||
this->target_temperature_high);
|
this->target_temperature_high);
|
||||||
} else {
|
} else {
|
||||||
ESP_LOGD(TAG, " Target Temperature: %.2f°C", this->target_temperature);
|
ESP_LOGD(TAG, " Target Temperature: %.2f°C", this->target_temperature);
|
||||||
}
|
}
|
||||||
if (traits.get_supports_current_humidity()) {
|
if (traits.has_feature_flags(climate::CLIMATE_SUPPORTS_CURRENT_HUMIDITY)) {
|
||||||
ESP_LOGD(TAG, " Current Humidity: %.0f%%", this->current_humidity);
|
ESP_LOGD(TAG, " Current Humidity: %.0f%%", this->current_humidity);
|
||||||
}
|
}
|
||||||
if (traits.get_supports_target_humidity()) {
|
if (traits.has_feature_flags(climate::CLIMATE_SUPPORTS_TARGET_HUMIDITY)) {
|
||||||
ESP_LOGD(TAG, " Target Humidity: %.0f%%", this->target_humidity);
|
ESP_LOGD(TAG, " Target Humidity: %.0f%%", this->target_humidity);
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -485,13 +489,14 @@ ClimateCall ClimateDeviceRestoreState::to_call(Climate *climate) {
|
|||||||
auto call = climate->make_call();
|
auto call = climate->make_call();
|
||||||
auto traits = climate->get_traits();
|
auto traits = climate->get_traits();
|
||||||
call.set_mode(this->mode);
|
call.set_mode(this->mode);
|
||||||
if (traits.get_supports_two_point_target_temperature()) {
|
if (traits.has_feature_flags(CLIMATE_SUPPORTS_TWO_POINT_TARGET_TEMPERATURE |
|
||||||
|
CLIMATE_REQUIRES_TWO_POINT_TARGET_TEMPERATURE)) {
|
||||||
call.set_target_temperature_low(this->target_temperature_low);
|
call.set_target_temperature_low(this->target_temperature_low);
|
||||||
call.set_target_temperature_high(this->target_temperature_high);
|
call.set_target_temperature_high(this->target_temperature_high);
|
||||||
} else {
|
} else {
|
||||||
call.set_target_temperature(this->target_temperature);
|
call.set_target_temperature(this->target_temperature);
|
||||||
}
|
}
|
||||||
if (traits.get_supports_target_humidity()) {
|
if (traits.has_feature_flags(climate::CLIMATE_SUPPORTS_TARGET_HUMIDITY)) {
|
||||||
call.set_target_humidity(this->target_humidity);
|
call.set_target_humidity(this->target_humidity);
|
||||||
}
|
}
|
||||||
if (traits.get_supports_fan_modes() || !traits.get_supported_custom_fan_modes().empty()) {
|
if (traits.get_supports_fan_modes() || !traits.get_supported_custom_fan_modes().empty()) {
|
||||||
@@ -508,13 +513,14 @@ ClimateCall ClimateDeviceRestoreState::to_call(Climate *climate) {
|
|||||||
void ClimateDeviceRestoreState::apply(Climate *climate) {
|
void ClimateDeviceRestoreState::apply(Climate *climate) {
|
||||||
auto traits = climate->get_traits();
|
auto traits = climate->get_traits();
|
||||||
climate->mode = this->mode;
|
climate->mode = this->mode;
|
||||||
if (traits.get_supports_two_point_target_temperature()) {
|
if (traits.has_feature_flags(CLIMATE_SUPPORTS_TWO_POINT_TARGET_TEMPERATURE |
|
||||||
|
CLIMATE_REQUIRES_TWO_POINT_TARGET_TEMPERATURE)) {
|
||||||
climate->target_temperature_low = this->target_temperature_low;
|
climate->target_temperature_low = this->target_temperature_low;
|
||||||
climate->target_temperature_high = this->target_temperature_high;
|
climate->target_temperature_high = this->target_temperature_high;
|
||||||
} else {
|
} else {
|
||||||
climate->target_temperature = this->target_temperature;
|
climate->target_temperature = this->target_temperature;
|
||||||
}
|
}
|
||||||
if (traits.get_supports_target_humidity()) {
|
if (traits.has_feature_flags(climate::CLIMATE_SUPPORTS_TARGET_HUMIDITY)) {
|
||||||
climate->target_humidity = this->target_humidity;
|
climate->target_humidity = this->target_humidity;
|
||||||
}
|
}
|
||||||
if (traits.get_supports_fan_modes() && !this->uses_custom_fan_mode) {
|
if (traits.get_supports_fan_modes() && !this->uses_custom_fan_mode) {
|
||||||
@@ -580,28 +586,30 @@ void Climate::dump_traits_(const char *tag) {
|
|||||||
" Target: %.1f",
|
" Target: %.1f",
|
||||||
traits.get_visual_min_temperature(), traits.get_visual_max_temperature(),
|
traits.get_visual_min_temperature(), traits.get_visual_max_temperature(),
|
||||||
traits.get_visual_target_temperature_step());
|
traits.get_visual_target_temperature_step());
|
||||||
if (traits.get_supports_current_temperature()) {
|
if (traits.has_feature_flags(climate::CLIMATE_SUPPORTS_CURRENT_TEMPERATURE)) {
|
||||||
ESP_LOGCONFIG(tag, " Current: %.1f", traits.get_visual_current_temperature_step());
|
ESP_LOGCONFIG(tag, " Current: %.1f", traits.get_visual_current_temperature_step());
|
||||||
}
|
}
|
||||||
if (traits.get_supports_target_humidity() || traits.get_supports_current_humidity()) {
|
if (traits.has_feature_flags(climate::CLIMATE_SUPPORTS_TARGET_HUMIDITY |
|
||||||
|
climate::CLIMATE_SUPPORTS_CURRENT_HUMIDITY)) {
|
||||||
ESP_LOGCONFIG(tag,
|
ESP_LOGCONFIG(tag,
|
||||||
" - Min humidity: %.0f\n"
|
" - Min humidity: %.0f\n"
|
||||||
" - Max humidity: %.0f",
|
" - Max humidity: %.0f",
|
||||||
traits.get_visual_min_humidity(), traits.get_visual_max_humidity());
|
traits.get_visual_min_humidity(), traits.get_visual_max_humidity());
|
||||||
}
|
}
|
||||||
if (traits.get_supports_two_point_target_temperature()) {
|
if (traits.has_feature_flags(CLIMATE_SUPPORTS_TWO_POINT_TARGET_TEMPERATURE |
|
||||||
|
CLIMATE_REQUIRES_TWO_POINT_TARGET_TEMPERATURE)) {
|
||||||
ESP_LOGCONFIG(tag, " [x] Supports two-point target temperature");
|
ESP_LOGCONFIG(tag, " [x] Supports two-point target temperature");
|
||||||
}
|
}
|
||||||
if (traits.get_supports_current_temperature()) {
|
if (traits.has_feature_flags(climate::CLIMATE_SUPPORTS_CURRENT_TEMPERATURE)) {
|
||||||
ESP_LOGCONFIG(tag, " [x] Supports current temperature");
|
ESP_LOGCONFIG(tag, " [x] Supports current temperature");
|
||||||
}
|
}
|
||||||
if (traits.get_supports_target_humidity()) {
|
if (traits.has_feature_flags(climate::CLIMATE_SUPPORTS_TARGET_HUMIDITY)) {
|
||||||
ESP_LOGCONFIG(tag, " [x] Supports target humidity");
|
ESP_LOGCONFIG(tag, " [x] Supports target humidity");
|
||||||
}
|
}
|
||||||
if (traits.get_supports_current_humidity()) {
|
if (traits.has_feature_flags(climate::CLIMATE_SUPPORTS_CURRENT_HUMIDITY)) {
|
||||||
ESP_LOGCONFIG(tag, " [x] Supports current humidity");
|
ESP_LOGCONFIG(tag, " [x] Supports current humidity");
|
||||||
}
|
}
|
||||||
if (traits.get_supports_action()) {
|
if (traits.has_feature_flags(climate::CLIMATE_SUPPORTS_ACTION)) {
|
||||||
ESP_LOGCONFIG(tag, " [x] Supports action");
|
ESP_LOGCONFIG(tag, " [x] Supports action");
|
||||||
}
|
}
|
||||||
if (!traits.get_supported_modes().empty()) {
|
if (!traits.get_supported_modes().empty()) {
|
||||||
|
|||||||
@@ -98,6 +98,21 @@ enum ClimatePreset : uint8_t {
|
|||||||
CLIMATE_PRESET_ACTIVITY = 7,
|
CLIMATE_PRESET_ACTIVITY = 7,
|
||||||
};
|
};
|
||||||
|
|
||||||
|
enum ClimateFeature : uint32_t {
|
||||||
|
// Reporting current temperature is supported
|
||||||
|
CLIMATE_SUPPORTS_CURRENT_TEMPERATURE = 1 << 0,
|
||||||
|
// Setting two target temperatures is supported (used in conjunction with CLIMATE_MODE_HEAT_COOL)
|
||||||
|
CLIMATE_SUPPORTS_TWO_POINT_TARGET_TEMPERATURE = 1 << 1,
|
||||||
|
// Single-point mode is NOT supported (UI always displays two handles, setting 'target_temperature' is not supported)
|
||||||
|
CLIMATE_REQUIRES_TWO_POINT_TARGET_TEMPERATURE = 1 << 2,
|
||||||
|
// Reporting current humidity is supported
|
||||||
|
CLIMATE_SUPPORTS_CURRENT_HUMIDITY = 1 << 3,
|
||||||
|
// Setting a target humidity is supported
|
||||||
|
CLIMATE_SUPPORTS_TARGET_HUMIDITY = 1 << 4,
|
||||||
|
// Reporting current climate action is supported
|
||||||
|
CLIMATE_SUPPORTS_ACTION = 1 << 5,
|
||||||
|
};
|
||||||
|
|
||||||
/// Convert the given ClimateMode to a human-readable string.
|
/// Convert the given ClimateMode to a human-readable string.
|
||||||
const LogString *climate_mode_to_string(ClimateMode mode);
|
const LogString *climate_mode_to_string(ClimateMode mode);
|
||||||
|
|
||||||
|
|||||||
@@ -21,48 +21,92 @@ namespace climate {
|
|||||||
* - Target Temperature
|
* - Target Temperature
|
||||||
*
|
*
|
||||||
* All other properties and modes are optional and the integration must mark
|
* All other properties and modes are optional and the integration must mark
|
||||||
* each of them as supported by setting the appropriate flag here.
|
* each of them as supported by setting the appropriate flag(s) here.
|
||||||
*
|
*
|
||||||
* - supports current temperature - if the climate device supports reporting a current temperature
|
* - feature flags: see ClimateFeatures enum in climate_mode.h
|
||||||
* - supports two point target temperature - if the climate device's target temperature should be
|
|
||||||
* split in target_temperature_low and target_temperature_high instead of just the single target_temperature
|
|
||||||
* - supports modes:
|
* - supports modes:
|
||||||
* - auto mode (automatic control)
|
* - auto mode (automatic control)
|
||||||
* - cool mode (lowers current temperature)
|
* - cool mode (lowers current temperature)
|
||||||
* - heat mode (increases current temperature)
|
* - heat mode (increases current temperature)
|
||||||
* - dry mode (removes humidity from air)
|
* - dry mode (removes humidity from air)
|
||||||
* - fan mode (only turns on fan)
|
* - fan mode (only turns on fan)
|
||||||
* - supports action - if the climate device supports reporting the active
|
|
||||||
* current action of the device with the action property.
|
|
||||||
* - supports fan modes - optionally, if it has a fan which can be configured in different ways:
|
* - supports fan modes - optionally, if it has a fan which can be configured in different ways:
|
||||||
* - on, off, auto, high, medium, low, middle, focus, diffuse, quiet
|
* - on, off, auto, high, medium, low, middle, focus, diffuse, quiet
|
||||||
* - supports swing modes - optionally, if it has a swing which can be configured in different ways:
|
* - supports swing modes - optionally, if it has a swing which can be configured in different ways:
|
||||||
* - off, both, vertical, horizontal
|
* - off, both, vertical, horizontal
|
||||||
*
|
*
|
||||||
* This class also contains static data for the climate device display:
|
* This class also contains static data for the climate device display:
|
||||||
* - visual min/max temperature - tells the frontend what range of temperatures the climate device
|
* - visual min/max temperature/humidity - tells the frontend what range of temperature/humidity the
|
||||||
* should display (gauge min/max values)
|
* climate device should display (gauge min/max values)
|
||||||
* - temperature step - the step with which to increase/decrease target temperature.
|
* - temperature step - the step with which to increase/decrease target temperature.
|
||||||
* This also affects with how many decimal places the temperature is shown
|
* This also affects with how many decimal places the temperature is shown
|
||||||
*/
|
*/
|
||||||
class ClimateTraits {
|
class ClimateTraits {
|
||||||
public:
|
public:
|
||||||
bool get_supports_current_temperature() const { return this->supports_current_temperature_; }
|
/// Get/set feature flags (see ClimateFeatures enum in climate_mode.h)
|
||||||
|
uint32_t get_feature_flags() const { return this->feature_flags_; }
|
||||||
|
void add_feature_flags(uint32_t feature_flags) { this->feature_flags_ |= feature_flags; }
|
||||||
|
void clear_feature_flags(uint32_t feature_flags) { this->feature_flags_ &= ~feature_flags; }
|
||||||
|
bool has_feature_flags(uint32_t feature_flags) const { return this->feature_flags_ & feature_flags; }
|
||||||
|
void set_feature_flags(uint32_t feature_flags) { this->feature_flags_ = feature_flags; }
|
||||||
|
|
||||||
|
ESPDEPRECATED("This method is deprecated, use get_feature_flags() instead", "2025.11.0")
|
||||||
|
bool get_supports_current_temperature() const {
|
||||||
|
return this->has_feature_flags(CLIMATE_SUPPORTS_CURRENT_TEMPERATURE);
|
||||||
|
}
|
||||||
|
ESPDEPRECATED("This method is deprecated, use add_feature_flags() instead", "2025.11.0")
|
||||||
void set_supports_current_temperature(bool supports_current_temperature) {
|
void set_supports_current_temperature(bool supports_current_temperature) {
|
||||||
this->supports_current_temperature_ = supports_current_temperature;
|
if (supports_current_temperature) {
|
||||||
|
this->add_feature_flags(CLIMATE_SUPPORTS_CURRENT_TEMPERATURE);
|
||||||
|
} else {
|
||||||
|
this->clear_feature_flags(CLIMATE_SUPPORTS_CURRENT_TEMPERATURE);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
bool get_supports_current_humidity() const { return this->supports_current_humidity_; }
|
ESPDEPRECATED("This method is deprecated, use get_feature_flags() instead", "2025.11.0")
|
||||||
|
bool get_supports_current_humidity() const { return this->has_feature_flags(CLIMATE_SUPPORTS_CURRENT_HUMIDITY); }
|
||||||
|
ESPDEPRECATED("This method is deprecated, use add_feature_flags() instead", "2025.11.0")
|
||||||
void set_supports_current_humidity(bool supports_current_humidity) {
|
void set_supports_current_humidity(bool supports_current_humidity) {
|
||||||
this->supports_current_humidity_ = supports_current_humidity;
|
if (supports_current_humidity) {
|
||||||
|
this->add_feature_flags(CLIMATE_SUPPORTS_CURRENT_HUMIDITY);
|
||||||
|
} else {
|
||||||
|
this->clear_feature_flags(CLIMATE_SUPPORTS_CURRENT_HUMIDITY);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
bool get_supports_two_point_target_temperature() const { return this->supports_two_point_target_temperature_; }
|
ESPDEPRECATED("This method is deprecated, use get_feature_flags() instead", "2025.11.0")
|
||||||
|
bool get_supports_two_point_target_temperature() const {
|
||||||
|
return this->has_feature_flags(CLIMATE_REQUIRES_TWO_POINT_TARGET_TEMPERATURE);
|
||||||
|
}
|
||||||
|
ESPDEPRECATED("This method is deprecated, use add_feature_flags() instead", "2025.11.0")
|
||||||
void set_supports_two_point_target_temperature(bool supports_two_point_target_temperature) {
|
void set_supports_two_point_target_temperature(bool supports_two_point_target_temperature) {
|
||||||
this->supports_two_point_target_temperature_ = supports_two_point_target_temperature;
|
if (supports_two_point_target_temperature)
|
||||||
|
// Use CLIMATE_REQUIRES_TWO_POINT_TARGET_TEMPERATURE to mimic previous behavior
|
||||||
|
{
|
||||||
|
this->add_feature_flags(CLIMATE_REQUIRES_TWO_POINT_TARGET_TEMPERATURE);
|
||||||
|
} else {
|
||||||
|
this->clear_feature_flags(CLIMATE_REQUIRES_TWO_POINT_TARGET_TEMPERATURE);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
bool get_supports_target_humidity() const { return this->supports_target_humidity_; }
|
ESPDEPRECATED("This method is deprecated, use get_feature_flags() instead", "2025.11.0")
|
||||||
|
bool get_supports_target_humidity() const { return this->has_feature_flags(CLIMATE_SUPPORTS_TARGET_HUMIDITY); }
|
||||||
|
ESPDEPRECATED("This method is deprecated, use add_feature_flags() instead", "2025.11.0")
|
||||||
void set_supports_target_humidity(bool supports_target_humidity) {
|
void set_supports_target_humidity(bool supports_target_humidity) {
|
||||||
this->supports_target_humidity_ = supports_target_humidity;
|
if (supports_target_humidity) {
|
||||||
|
this->add_feature_flags(CLIMATE_SUPPORTS_TARGET_HUMIDITY);
|
||||||
|
} else {
|
||||||
|
this->clear_feature_flags(CLIMATE_SUPPORTS_TARGET_HUMIDITY);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
ESPDEPRECATED("This method is deprecated, use get_feature_flags() instead", "2025.11.0")
|
||||||
|
bool get_supports_action() const { return this->has_feature_flags(CLIMATE_SUPPORTS_ACTION); }
|
||||||
|
ESPDEPRECATED("This method is deprecated, use add_feature_flags() instead", "2025.11.0")
|
||||||
|
void set_supports_action(bool supports_action) {
|
||||||
|
if (supports_action) {
|
||||||
|
this->add_feature_flags(CLIMATE_SUPPORTS_ACTION);
|
||||||
|
} else {
|
||||||
|
this->clear_feature_flags(CLIMATE_SUPPORTS_ACTION);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
void set_supported_modes(std::set<ClimateMode> modes) { this->supported_modes_ = std::move(modes); }
|
void set_supported_modes(std::set<ClimateMode> modes) { this->supported_modes_ = std::move(modes); }
|
||||||
void add_supported_mode(ClimateMode mode) { this->supported_modes_.insert(mode); }
|
void add_supported_mode(ClimateMode mode) { this->supported_modes_.insert(mode); }
|
||||||
ESPDEPRECATED("This method is deprecated, use set_supported_modes() instead", "v1.20")
|
ESPDEPRECATED("This method is deprecated, use set_supported_modes() instead", "v1.20")
|
||||||
@@ -82,9 +126,6 @@ class ClimateTraits {
|
|||||||
bool supports_mode(ClimateMode mode) const { return this->supported_modes_.count(mode); }
|
bool supports_mode(ClimateMode mode) const { return this->supported_modes_.count(mode); }
|
||||||
const std::set<ClimateMode> &get_supported_modes() const { return this->supported_modes_; }
|
const std::set<ClimateMode> &get_supported_modes() const { return this->supported_modes_; }
|
||||||
|
|
||||||
void set_supports_action(bool supports_action) { this->supports_action_ = supports_action; }
|
|
||||||
bool get_supports_action() const { return this->supports_action_; }
|
|
||||||
|
|
||||||
void set_supported_fan_modes(std::set<ClimateFanMode> modes) { this->supported_fan_modes_ = std::move(modes); }
|
void set_supported_fan_modes(std::set<ClimateFanMode> modes) { this->supported_fan_modes_ = std::move(modes); }
|
||||||
void add_supported_fan_mode(ClimateFanMode mode) { this->supported_fan_modes_.insert(mode); }
|
void add_supported_fan_mode(ClimateFanMode mode) { this->supported_fan_modes_.insert(mode); }
|
||||||
void add_supported_custom_fan_mode(const std::string &mode) { this->supported_custom_fan_modes_.insert(mode); }
|
void add_supported_custom_fan_mode(const std::string &mode) { this->supported_custom_fan_modes_.insert(mode); }
|
||||||
@@ -219,24 +260,20 @@ class ClimateTraits {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
bool supports_current_temperature_{false};
|
uint32_t feature_flags_{0};
|
||||||
bool supports_current_humidity_{false};
|
|
||||||
bool supports_two_point_target_temperature_{false};
|
|
||||||
bool supports_target_humidity_{false};
|
|
||||||
std::set<climate::ClimateMode> supported_modes_ = {climate::CLIMATE_MODE_OFF};
|
|
||||||
bool supports_action_{false};
|
|
||||||
std::set<climate::ClimateFanMode> supported_fan_modes_;
|
|
||||||
std::set<climate::ClimateSwingMode> supported_swing_modes_;
|
|
||||||
std::set<climate::ClimatePreset> supported_presets_;
|
|
||||||
std::set<std::string> supported_custom_fan_modes_;
|
|
||||||
std::set<std::string> supported_custom_presets_;
|
|
||||||
|
|
||||||
float visual_min_temperature_{10};
|
float visual_min_temperature_{10};
|
||||||
float visual_max_temperature_{30};
|
float visual_max_temperature_{30};
|
||||||
float visual_target_temperature_step_{0.1};
|
float visual_target_temperature_step_{0.1};
|
||||||
float visual_current_temperature_step_{0.1};
|
float visual_current_temperature_step_{0.1};
|
||||||
float visual_min_humidity_{30};
|
float visual_min_humidity_{30};
|
||||||
float visual_max_humidity_{99};
|
float visual_max_humidity_{99};
|
||||||
|
|
||||||
|
std::set<climate::ClimateMode> supported_modes_ = {climate::CLIMATE_MODE_OFF};
|
||||||
|
std::set<climate::ClimateFanMode> supported_fan_modes_;
|
||||||
|
std::set<climate::ClimateSwingMode> supported_swing_modes_;
|
||||||
|
std::set<climate::ClimatePreset> supported_presets_;
|
||||||
|
std::set<std::string> supported_custom_fan_modes_;
|
||||||
|
std::set<std::string> supported_custom_presets_;
|
||||||
};
|
};
|
||||||
|
|
||||||
} // namespace climate
|
} // namespace climate
|
||||||
|
|||||||
@@ -30,14 +30,12 @@ class DateTimeBase : public EntityBase {
|
|||||||
#endif
|
#endif
|
||||||
};
|
};
|
||||||
|
|
||||||
#ifdef USE_TIME
|
|
||||||
class DateTimeStateTrigger : public Trigger<ESPTime> {
|
class DateTimeStateTrigger : public Trigger<ESPTime> {
|
||||||
public:
|
public:
|
||||||
explicit DateTimeStateTrigger(DateTimeBase *parent) {
|
explicit DateTimeStateTrigger(DateTimeBase *parent) {
|
||||||
parent->add_on_state_callback([this, parent]() { this->trigger(parent->state_as_esptime()); });
|
parent->add_on_state_callback([this, parent]() { this->trigger(parent->state_as_esptime()); });
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
#endif
|
|
||||||
|
|
||||||
} // namespace datetime
|
} // namespace datetime
|
||||||
} // namespace esphome
|
} // namespace esphome
|
||||||
|
|||||||
@@ -190,7 +190,7 @@ async def to_code(config):
|
|||||||
cg.add_define("ESPHOME_VARIANT", "ESP8266")
|
cg.add_define("ESPHOME_VARIANT", "ESP8266")
|
||||||
cg.add_define(ThreadModel.SINGLE)
|
cg.add_define(ThreadModel.SINGLE)
|
||||||
|
|
||||||
cg.add_platformio_option("extra_scripts", ["post:post_build.py"])
|
cg.add_platformio_option("extra_scripts", ["pre:iram_fix.py", "post:post_build.py"])
|
||||||
|
|
||||||
conf = config[CONF_FRAMEWORK]
|
conf = config[CONF_FRAMEWORK]
|
||||||
cg.add_platformio_option("framework", "arduino")
|
cg.add_platformio_option("framework", "arduino")
|
||||||
@@ -230,6 +230,12 @@ async def to_code(config):
|
|||||||
# For cases where nullptrs can be handled, use nothrow: `new (std::nothrow) T;`
|
# For cases where nullptrs can be handled, use nothrow: `new (std::nothrow) T;`
|
||||||
cg.add_build_flag("-DNEW_OOM_ABORT")
|
cg.add_build_flag("-DNEW_OOM_ABORT")
|
||||||
|
|
||||||
|
# In testing mode, fake a larger IRAM to allow linking grouped component tests
|
||||||
|
# Real ESP8266 hardware only has 32KB IRAM, but for CI testing we pretend it has 2MB
|
||||||
|
# This is done via a pre-build script that generates a custom linker script
|
||||||
|
if CORE.testing_mode:
|
||||||
|
cg.add_build_flag("-DESPHOME_TESTING_MODE")
|
||||||
|
|
||||||
cg.add_platformio_option("board_build.flash_mode", config[CONF_BOARD_FLASH_MODE])
|
cg.add_platformio_option("board_build.flash_mode", config[CONF_BOARD_FLASH_MODE])
|
||||||
|
|
||||||
ver: cv.Version = CORE.data[KEY_CORE][KEY_FRAMEWORK_VERSION]
|
ver: cv.Version = CORE.data[KEY_CORE][KEY_FRAMEWORK_VERSION]
|
||||||
@@ -265,3 +271,8 @@ def copy_files():
|
|||||||
post_build_file,
|
post_build_file,
|
||||||
CORE.relative_build_path("post_build.py"),
|
CORE.relative_build_path("post_build.py"),
|
||||||
)
|
)
|
||||||
|
iram_fix_file = dir / "iram_fix.py.script"
|
||||||
|
copy_file_if_changed(
|
||||||
|
iram_fix_file,
|
||||||
|
CORE.relative_build_path("iram_fix.py"),
|
||||||
|
)
|
||||||
|
|||||||
44
esphome/components/esp8266/iram_fix.py.script
Normal file
44
esphome/components/esp8266/iram_fix.py.script
Normal file
@@ -0,0 +1,44 @@
|
|||||||
|
import os
|
||||||
|
import re
|
||||||
|
|
||||||
|
# pylint: disable=E0602
|
||||||
|
Import("env") # noqa
|
||||||
|
|
||||||
|
|
||||||
|
def patch_linker_script_after_preprocess(source, target, env):
|
||||||
|
"""Patch the local linker script after PlatformIO preprocesses it."""
|
||||||
|
# Check if we're in testing mode by looking for the define
|
||||||
|
build_flags = env.get("BUILD_FLAGS", [])
|
||||||
|
testing_mode = any("-DESPHOME_TESTING_MODE" in flag for flag in build_flags)
|
||||||
|
|
||||||
|
if not testing_mode:
|
||||||
|
return
|
||||||
|
|
||||||
|
# Get the local linker script path
|
||||||
|
build_dir = env.subst("$BUILD_DIR")
|
||||||
|
local_ld = os.path.join(build_dir, "ld", "local.eagle.app.v6.common.ld")
|
||||||
|
|
||||||
|
if not os.path.exists(local_ld):
|
||||||
|
return
|
||||||
|
|
||||||
|
# Read the linker script
|
||||||
|
with open(local_ld, "r") as f:
|
||||||
|
content = f.read()
|
||||||
|
|
||||||
|
# Replace IRAM size from 0x8000 (32KB) to 0x200000 (2MB)
|
||||||
|
# The line looks like: iram1_0_seg : org = 0x40100000, len = 0x8000
|
||||||
|
updated = re.sub(
|
||||||
|
r"(iram1_0_seg\s*:\s*org\s*=\s*0x40100000\s*,\s*len\s*=\s*)0x8000",
|
||||||
|
r"\g<1>0x200000",
|
||||||
|
content,
|
||||||
|
)
|
||||||
|
|
||||||
|
if updated != content:
|
||||||
|
with open(local_ld, "w") as f:
|
||||||
|
f.write(updated)
|
||||||
|
print("ESPHome: Patched IRAM size to 2MB for testing mode")
|
||||||
|
|
||||||
|
|
||||||
|
# Hook into the build process right before linking
|
||||||
|
# This runs after PlatformIO has already preprocessed the linker scripts
|
||||||
|
env.AddPreAction("$BUILD_DIR/${PROGNAME}.elf", patch_linker_script_after_preprocess)
|
||||||
@@ -249,6 +249,9 @@ MaxFilter = sensor_ns.class_("MaxFilter", Filter)
|
|||||||
SlidingWindowMovingAverageFilter = sensor_ns.class_(
|
SlidingWindowMovingAverageFilter = sensor_ns.class_(
|
||||||
"SlidingWindowMovingAverageFilter", Filter
|
"SlidingWindowMovingAverageFilter", Filter
|
||||||
)
|
)
|
||||||
|
StreamingMinFilter = sensor_ns.class_("StreamingMinFilter", Filter)
|
||||||
|
StreamingMaxFilter = sensor_ns.class_("StreamingMaxFilter", Filter)
|
||||||
|
StreamingMovingAverageFilter = sensor_ns.class_("StreamingMovingAverageFilter", Filter)
|
||||||
ExponentialMovingAverageFilter = sensor_ns.class_(
|
ExponentialMovingAverageFilter = sensor_ns.class_(
|
||||||
"ExponentialMovingAverageFilter", Filter
|
"ExponentialMovingAverageFilter", Filter
|
||||||
)
|
)
|
||||||
@@ -450,14 +453,21 @@ async def skip_initial_filter_to_code(config, filter_id):
|
|||||||
return cg.new_Pvariable(filter_id, config)
|
return cg.new_Pvariable(filter_id, config)
|
||||||
|
|
||||||
|
|
||||||
@FILTER_REGISTRY.register("min", MinFilter, MIN_SCHEMA)
|
@FILTER_REGISTRY.register("min", Filter, MIN_SCHEMA)
|
||||||
async def min_filter_to_code(config, filter_id):
|
async def min_filter_to_code(config, filter_id):
|
||||||
return cg.new_Pvariable(
|
window_size: int = config[CONF_WINDOW_SIZE]
|
||||||
filter_id,
|
send_every: int = config[CONF_SEND_EVERY]
|
||||||
config[CONF_WINDOW_SIZE],
|
send_first_at: int = config[CONF_SEND_FIRST_AT]
|
||||||
config[CONF_SEND_EVERY],
|
|
||||||
config[CONF_SEND_FIRST_AT],
|
# Optimization: Use streaming filter for batch windows (window_size == send_every)
|
||||||
)
|
# Saves 99.98% memory for large windows (e.g., 20KB → 4 bytes for window_size=5000)
|
||||||
|
if window_size == send_every:
|
||||||
|
# Use streaming filter - O(1) memory instead of O(n)
|
||||||
|
rhs = StreamingMinFilter.new(window_size, send_first_at)
|
||||||
|
return cg.Pvariable(filter_id, rhs, StreamingMinFilter)
|
||||||
|
# Use sliding window filter - maintains ring buffer
|
||||||
|
rhs = MinFilter.new(window_size, send_every, send_first_at)
|
||||||
|
return cg.Pvariable(filter_id, rhs, MinFilter)
|
||||||
|
|
||||||
|
|
||||||
MAX_SCHEMA = cv.All(
|
MAX_SCHEMA = cv.All(
|
||||||
@@ -472,14 +482,18 @@ MAX_SCHEMA = cv.All(
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@FILTER_REGISTRY.register("max", MaxFilter, MAX_SCHEMA)
|
@FILTER_REGISTRY.register("max", Filter, MAX_SCHEMA)
|
||||||
async def max_filter_to_code(config, filter_id):
|
async def max_filter_to_code(config, filter_id):
|
||||||
return cg.new_Pvariable(
|
window_size: int = config[CONF_WINDOW_SIZE]
|
||||||
filter_id,
|
send_every: int = config[CONF_SEND_EVERY]
|
||||||
config[CONF_WINDOW_SIZE],
|
send_first_at: int = config[CONF_SEND_FIRST_AT]
|
||||||
config[CONF_SEND_EVERY],
|
|
||||||
config[CONF_SEND_FIRST_AT],
|
# Optimization: Use streaming filter for batch windows (window_size == send_every)
|
||||||
)
|
if window_size == send_every:
|
||||||
|
rhs = StreamingMaxFilter.new(window_size, send_first_at)
|
||||||
|
return cg.Pvariable(filter_id, rhs, StreamingMaxFilter)
|
||||||
|
rhs = MaxFilter.new(window_size, send_every, send_first_at)
|
||||||
|
return cg.Pvariable(filter_id, rhs, MaxFilter)
|
||||||
|
|
||||||
|
|
||||||
SLIDING_AVERAGE_SCHEMA = cv.All(
|
SLIDING_AVERAGE_SCHEMA = cv.All(
|
||||||
@@ -496,16 +510,20 @@ SLIDING_AVERAGE_SCHEMA = cv.All(
|
|||||||
|
|
||||||
@FILTER_REGISTRY.register(
|
@FILTER_REGISTRY.register(
|
||||||
"sliding_window_moving_average",
|
"sliding_window_moving_average",
|
||||||
SlidingWindowMovingAverageFilter,
|
Filter,
|
||||||
SLIDING_AVERAGE_SCHEMA,
|
SLIDING_AVERAGE_SCHEMA,
|
||||||
)
|
)
|
||||||
async def sliding_window_moving_average_filter_to_code(config, filter_id):
|
async def sliding_window_moving_average_filter_to_code(config, filter_id):
|
||||||
return cg.new_Pvariable(
|
window_size: int = config[CONF_WINDOW_SIZE]
|
||||||
filter_id,
|
send_every: int = config[CONF_SEND_EVERY]
|
||||||
config[CONF_WINDOW_SIZE],
|
send_first_at: int = config[CONF_SEND_FIRST_AT]
|
||||||
config[CONF_SEND_EVERY],
|
|
||||||
config[CONF_SEND_FIRST_AT],
|
# Optimization: Use streaming filter for batch windows (window_size == send_every)
|
||||||
)
|
if window_size == send_every:
|
||||||
|
rhs = StreamingMovingAverageFilter.new(window_size, send_first_at)
|
||||||
|
return cg.Pvariable(filter_id, rhs, StreamingMovingAverageFilter)
|
||||||
|
rhs = SlidingWindowMovingAverageFilter.new(window_size, send_every, send_first_at)
|
||||||
|
return cg.Pvariable(filter_id, rhs, SlidingWindowMovingAverageFilter)
|
||||||
|
|
||||||
|
|
||||||
EXPONENTIAL_AVERAGE_SCHEMA = cv.All(
|
EXPONENTIAL_AVERAGE_SCHEMA = cv.All(
|
||||||
|
|||||||
@@ -32,50 +32,75 @@ void Filter::initialize(Sensor *parent, Filter *next) {
|
|||||||
this->next_ = next;
|
this->next_ = next;
|
||||||
}
|
}
|
||||||
|
|
||||||
// MedianFilter
|
// SlidingWindowFilter
|
||||||
MedianFilter::MedianFilter(size_t window_size, size_t send_every, size_t send_first_at)
|
SlidingWindowFilter::SlidingWindowFilter(size_t window_size, size_t send_every, size_t send_first_at)
|
||||||
: send_every_(send_every), send_at_(send_every - send_first_at), window_size_(window_size) {}
|
: window_size_(window_size), send_every_(send_every), send_at_(send_every - send_first_at) {
|
||||||
void MedianFilter::set_send_every(size_t send_every) { this->send_every_ = send_every; }
|
// Allocate ring buffer once at initialization
|
||||||
void MedianFilter::set_window_size(size_t window_size) { this->window_size_ = window_size; }
|
this->window_.init(window_size);
|
||||||
optional<float> MedianFilter::new_value(float value) {
|
}
|
||||||
while (this->queue_.size() >= this->window_size_) {
|
|
||||||
this->queue_.pop_front();
|
|
||||||
}
|
|
||||||
this->queue_.push_back(value);
|
|
||||||
ESP_LOGVV(TAG, "MedianFilter(%p)::new_value(%f)", this, value);
|
|
||||||
|
|
||||||
|
void SlidingWindowFilter::set_window_size(size_t window_size) {
|
||||||
|
this->window_size_ = window_size;
|
||||||
|
// Reallocate buffer with new size
|
||||||
|
this->window_.init(window_size);
|
||||||
|
this->window_head_ = 0;
|
||||||
|
this->window_count_ = 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
optional<float> SlidingWindowFilter::new_value(float value) {
|
||||||
|
// Add value to ring buffer
|
||||||
|
if (this->window_count_ < this->window_size_) {
|
||||||
|
// Buffer not yet full - just append
|
||||||
|
this->window_.push_back(value);
|
||||||
|
this->window_count_++;
|
||||||
|
} else {
|
||||||
|
// Buffer full - overwrite oldest value (ring buffer)
|
||||||
|
this->window_[this->window_head_] = value;
|
||||||
|
this->window_head_++;
|
||||||
|
if (this->window_head_ >= this->window_size_) {
|
||||||
|
this->window_head_ = 0;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if we should send a result
|
||||||
if (++this->send_at_ >= this->send_every_) {
|
if (++this->send_at_ >= this->send_every_) {
|
||||||
this->send_at_ = 0;
|
this->send_at_ = 0;
|
||||||
|
float result = this->compute_result();
|
||||||
float median = NAN;
|
ESP_LOGVV(TAG, "SlidingWindowFilter(%p)::new_value(%f) SENDING %f", this, value, result);
|
||||||
if (!this->queue_.empty()) {
|
return result;
|
||||||
// Copy queue without NaN values
|
|
||||||
std::vector<float> median_queue;
|
|
||||||
median_queue.reserve(this->queue_.size());
|
|
||||||
for (auto v : this->queue_) {
|
|
||||||
if (!std::isnan(v)) {
|
|
||||||
median_queue.push_back(v);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
sort(median_queue.begin(), median_queue.end());
|
|
||||||
|
|
||||||
size_t queue_size = median_queue.size();
|
|
||||||
if (queue_size) {
|
|
||||||
if (queue_size % 2) {
|
|
||||||
median = median_queue[queue_size / 2];
|
|
||||||
} else {
|
|
||||||
median = (median_queue[queue_size / 2] + median_queue[(queue_size / 2) - 1]) / 2.0f;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
ESP_LOGVV(TAG, "MedianFilter(%p)::new_value(%f) SENDING %f", this, value, median);
|
|
||||||
return median;
|
|
||||||
}
|
}
|
||||||
return {};
|
return {};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// SortedWindowFilter
|
||||||
|
FixedVector<float> SortedWindowFilter::get_sorted_values_() {
|
||||||
|
// Copy window without NaN values using FixedVector (no heap allocation)
|
||||||
|
FixedVector<float> sorted_values;
|
||||||
|
sorted_values.init(this->window_count_);
|
||||||
|
for (size_t i = 0; i < this->window_count_; i++) {
|
||||||
|
float v = this->window_[i];
|
||||||
|
if (!std::isnan(v)) {
|
||||||
|
sorted_values.push_back(v);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
std::sort(sorted_values.begin(), sorted_values.end());
|
||||||
|
return sorted_values;
|
||||||
|
}
|
||||||
|
|
||||||
|
// MedianFilter
|
||||||
|
float MedianFilter::compute_result() {
|
||||||
|
FixedVector<float> sorted_values = this->get_sorted_values_();
|
||||||
|
if (sorted_values.empty())
|
||||||
|
return NAN;
|
||||||
|
|
||||||
|
size_t size = sorted_values.size();
|
||||||
|
if (size % 2) {
|
||||||
|
return sorted_values[size / 2];
|
||||||
|
} else {
|
||||||
|
return (sorted_values[size / 2] + sorted_values[(size / 2) - 1]) / 2.0f;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// SkipInitialFilter
|
// SkipInitialFilter
|
||||||
SkipInitialFilter::SkipInitialFilter(size_t num_to_ignore) : num_to_ignore_(num_to_ignore) {}
|
SkipInitialFilter::SkipInitialFilter(size_t num_to_ignore) : num_to_ignore_(num_to_ignore) {}
|
||||||
optional<float> SkipInitialFilter::new_value(float value) {
|
optional<float> SkipInitialFilter::new_value(float value) {
|
||||||
@@ -91,136 +116,36 @@ optional<float> SkipInitialFilter::new_value(float value) {
|
|||||||
|
|
||||||
// QuantileFilter
|
// QuantileFilter
|
||||||
QuantileFilter::QuantileFilter(size_t window_size, size_t send_every, size_t send_first_at, float quantile)
|
QuantileFilter::QuantileFilter(size_t window_size, size_t send_every, size_t send_first_at, float quantile)
|
||||||
: send_every_(send_every), send_at_(send_every - send_first_at), window_size_(window_size), quantile_(quantile) {}
|
: SortedWindowFilter(window_size, send_every, send_first_at), quantile_(quantile) {}
|
||||||
void QuantileFilter::set_send_every(size_t send_every) { this->send_every_ = send_every; }
|
|
||||||
void QuantileFilter::set_window_size(size_t window_size) { this->window_size_ = window_size; }
|
|
||||||
void QuantileFilter::set_quantile(float quantile) { this->quantile_ = quantile; }
|
|
||||||
optional<float> QuantileFilter::new_value(float value) {
|
|
||||||
while (this->queue_.size() >= this->window_size_) {
|
|
||||||
this->queue_.pop_front();
|
|
||||||
}
|
|
||||||
this->queue_.push_back(value);
|
|
||||||
ESP_LOGVV(TAG, "QuantileFilter(%p)::new_value(%f), quantile:%f", this, value, this->quantile_);
|
|
||||||
|
|
||||||
if (++this->send_at_ >= this->send_every_) {
|
float QuantileFilter::compute_result() {
|
||||||
this->send_at_ = 0;
|
FixedVector<float> sorted_values = this->get_sorted_values_();
|
||||||
|
if (sorted_values.empty())
|
||||||
|
return NAN;
|
||||||
|
|
||||||
float result = NAN;
|
size_t position = ceilf(sorted_values.size() * this->quantile_) - 1;
|
||||||
if (!this->queue_.empty()) {
|
ESP_LOGVV(TAG, "QuantileFilter(%p)::position: %zu/%zu", this, position + 1, sorted_values.size());
|
||||||
// Copy queue without NaN values
|
return sorted_values[position];
|
||||||
std::vector<float> quantile_queue;
|
|
||||||
for (auto v : this->queue_) {
|
|
||||||
if (!std::isnan(v)) {
|
|
||||||
quantile_queue.push_back(v);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
sort(quantile_queue.begin(), quantile_queue.end());
|
|
||||||
|
|
||||||
size_t queue_size = quantile_queue.size();
|
|
||||||
if (queue_size) {
|
|
||||||
size_t position = ceilf(queue_size * this->quantile_) - 1;
|
|
||||||
ESP_LOGVV(TAG, "QuantileFilter(%p)::position: %zu/%zu", this, position + 1, queue_size);
|
|
||||||
result = quantile_queue[position];
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
ESP_LOGVV(TAG, "QuantileFilter(%p)::new_value(%f) SENDING %f", this, value, result);
|
|
||||||
return result;
|
|
||||||
}
|
|
||||||
return {};
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// MinFilter
|
// MinFilter
|
||||||
MinFilter::MinFilter(size_t window_size, size_t send_every, size_t send_first_at)
|
float MinFilter::compute_result() { return this->find_extremum_<std::less<float>>(); }
|
||||||
: send_every_(send_every), send_at_(send_every - send_first_at), window_size_(window_size) {}
|
|
||||||
void MinFilter::set_send_every(size_t send_every) { this->send_every_ = send_every; }
|
|
||||||
void MinFilter::set_window_size(size_t window_size) { this->window_size_ = window_size; }
|
|
||||||
optional<float> MinFilter::new_value(float value) {
|
|
||||||
while (this->queue_.size() >= this->window_size_) {
|
|
||||||
this->queue_.pop_front();
|
|
||||||
}
|
|
||||||
this->queue_.push_back(value);
|
|
||||||
ESP_LOGVV(TAG, "MinFilter(%p)::new_value(%f)", this, value);
|
|
||||||
|
|
||||||
if (++this->send_at_ >= this->send_every_) {
|
|
||||||
this->send_at_ = 0;
|
|
||||||
|
|
||||||
float min = NAN;
|
|
||||||
for (auto v : this->queue_) {
|
|
||||||
if (!std::isnan(v)) {
|
|
||||||
min = std::isnan(min) ? v : std::min(min, v);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
ESP_LOGVV(TAG, "MinFilter(%p)::new_value(%f) SENDING %f", this, value, min);
|
|
||||||
return min;
|
|
||||||
}
|
|
||||||
return {};
|
|
||||||
}
|
|
||||||
|
|
||||||
// MaxFilter
|
// MaxFilter
|
||||||
MaxFilter::MaxFilter(size_t window_size, size_t send_every, size_t send_first_at)
|
float MaxFilter::compute_result() { return this->find_extremum_<std::greater<float>>(); }
|
||||||
: send_every_(send_every), send_at_(send_every - send_first_at), window_size_(window_size) {}
|
|
||||||
void MaxFilter::set_send_every(size_t send_every) { this->send_every_ = send_every; }
|
|
||||||
void MaxFilter::set_window_size(size_t window_size) { this->window_size_ = window_size; }
|
|
||||||
optional<float> MaxFilter::new_value(float value) {
|
|
||||||
while (this->queue_.size() >= this->window_size_) {
|
|
||||||
this->queue_.pop_front();
|
|
||||||
}
|
|
||||||
this->queue_.push_back(value);
|
|
||||||
ESP_LOGVV(TAG, "MaxFilter(%p)::new_value(%f)", this, value);
|
|
||||||
|
|
||||||
if (++this->send_at_ >= this->send_every_) {
|
|
||||||
this->send_at_ = 0;
|
|
||||||
|
|
||||||
float max = NAN;
|
|
||||||
for (auto v : this->queue_) {
|
|
||||||
if (!std::isnan(v)) {
|
|
||||||
max = std::isnan(max) ? v : std::max(max, v);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
ESP_LOGVV(TAG, "MaxFilter(%p)::new_value(%f) SENDING %f", this, value, max);
|
|
||||||
return max;
|
|
||||||
}
|
|
||||||
return {};
|
|
||||||
}
|
|
||||||
|
|
||||||
// SlidingWindowMovingAverageFilter
|
// SlidingWindowMovingAverageFilter
|
||||||
SlidingWindowMovingAverageFilter::SlidingWindowMovingAverageFilter(size_t window_size, size_t send_every,
|
float SlidingWindowMovingAverageFilter::compute_result() {
|
||||||
size_t send_first_at)
|
float sum = 0;
|
||||||
: send_every_(send_every), send_at_(send_every - send_first_at), window_size_(window_size) {}
|
size_t valid_count = 0;
|
||||||
void SlidingWindowMovingAverageFilter::set_send_every(size_t send_every) { this->send_every_ = send_every; }
|
for (size_t i = 0; i < this->window_count_; i++) {
|
||||||
void SlidingWindowMovingAverageFilter::set_window_size(size_t window_size) { this->window_size_ = window_size; }
|
float v = this->window_[i];
|
||||||
optional<float> SlidingWindowMovingAverageFilter::new_value(float value) {
|
if (!std::isnan(v)) {
|
||||||
while (this->queue_.size() >= this->window_size_) {
|
sum += v;
|
||||||
this->queue_.pop_front();
|
valid_count++;
|
||||||
}
|
|
||||||
this->queue_.push_back(value);
|
|
||||||
ESP_LOGVV(TAG, "SlidingWindowMovingAverageFilter(%p)::new_value(%f)", this, value);
|
|
||||||
|
|
||||||
if (++this->send_at_ >= this->send_every_) {
|
|
||||||
this->send_at_ = 0;
|
|
||||||
|
|
||||||
float sum = 0;
|
|
||||||
size_t valid_count = 0;
|
|
||||||
for (auto v : this->queue_) {
|
|
||||||
if (!std::isnan(v)) {
|
|
||||||
sum += v;
|
|
||||||
valid_count++;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
float average = NAN;
|
|
||||||
if (valid_count) {
|
|
||||||
average = sum / valid_count;
|
|
||||||
}
|
|
||||||
|
|
||||||
ESP_LOGVV(TAG, "SlidingWindowMovingAverageFilter(%p)::new_value(%f) SENDING %f", this, value, average);
|
|
||||||
return average;
|
|
||||||
}
|
}
|
||||||
return {};
|
return valid_count ? sum / valid_count : NAN;
|
||||||
}
|
}
|
||||||
|
|
||||||
// ExponentialMovingAverageFilter
|
// ExponentialMovingAverageFilter
|
||||||
@@ -543,5 +468,78 @@ optional<float> ToNTCTemperatureFilter::new_value(float value) {
|
|||||||
return temp;
|
return temp;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// StreamingFilter (base class)
|
||||||
|
StreamingFilter::StreamingFilter(size_t window_size, size_t send_first_at)
|
||||||
|
: window_size_(window_size), send_first_at_(send_first_at) {}
|
||||||
|
|
||||||
|
optional<float> StreamingFilter::new_value(float value) {
|
||||||
|
// Process the value (child class tracks min/max/sum/etc)
|
||||||
|
this->process_value(value);
|
||||||
|
|
||||||
|
this->count_++;
|
||||||
|
|
||||||
|
// Check if we should send (handle send_first_at for first value)
|
||||||
|
bool should_send = false;
|
||||||
|
if (this->first_send_ && this->count_ >= this->send_first_at_) {
|
||||||
|
should_send = true;
|
||||||
|
this->first_send_ = false;
|
||||||
|
} else if (!this->first_send_ && this->count_ >= this->window_size_) {
|
||||||
|
should_send = true;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (should_send) {
|
||||||
|
float result = this->compute_batch_result();
|
||||||
|
// Reset for next batch
|
||||||
|
this->count_ = 0;
|
||||||
|
this->reset_batch();
|
||||||
|
ESP_LOGVV(TAG, "StreamingFilter(%p)::new_value(%f) SENDING %f", this, value, result);
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
|
||||||
|
return {};
|
||||||
|
}
|
||||||
|
|
||||||
|
// StreamingMinFilter
|
||||||
|
void StreamingMinFilter::process_value(float value) {
|
||||||
|
// Update running minimum (ignore NaN values)
|
||||||
|
if (!std::isnan(value)) {
|
||||||
|
this->current_min_ = std::isnan(this->current_min_) ? value : std::min(this->current_min_, value);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
float StreamingMinFilter::compute_batch_result() { return this->current_min_; }
|
||||||
|
|
||||||
|
void StreamingMinFilter::reset_batch() { this->current_min_ = NAN; }
|
||||||
|
|
||||||
|
// StreamingMaxFilter
|
||||||
|
void StreamingMaxFilter::process_value(float value) {
|
||||||
|
// Update running maximum (ignore NaN values)
|
||||||
|
if (!std::isnan(value)) {
|
||||||
|
this->current_max_ = std::isnan(this->current_max_) ? value : std::max(this->current_max_, value);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
float StreamingMaxFilter::compute_batch_result() { return this->current_max_; }
|
||||||
|
|
||||||
|
void StreamingMaxFilter::reset_batch() { this->current_max_ = NAN; }
|
||||||
|
|
||||||
|
// StreamingMovingAverageFilter
|
||||||
|
void StreamingMovingAverageFilter::process_value(float value) {
|
||||||
|
// Accumulate sum (ignore NaN values)
|
||||||
|
if (!std::isnan(value)) {
|
||||||
|
this->sum_ += value;
|
||||||
|
this->valid_count_++;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
float StreamingMovingAverageFilter::compute_batch_result() {
|
||||||
|
return this->valid_count_ > 0 ? this->sum_ / this->valid_count_ : NAN;
|
||||||
|
}
|
||||||
|
|
||||||
|
void StreamingMovingAverageFilter::reset_batch() {
|
||||||
|
this->sum_ = 0.0f;
|
||||||
|
this->valid_count_ = 0;
|
||||||
|
}
|
||||||
|
|
||||||
} // namespace sensor
|
} // namespace sensor
|
||||||
} // namespace esphome
|
} // namespace esphome
|
||||||
|
|||||||
@@ -44,11 +44,78 @@ class Filter {
|
|||||||
Sensor *parent_{nullptr};
|
Sensor *parent_{nullptr};
|
||||||
};
|
};
|
||||||
|
|
||||||
|
/** Base class for filters that use a sliding window of values.
|
||||||
|
*
|
||||||
|
* Uses a ring buffer to efficiently maintain a fixed-size sliding window without
|
||||||
|
* reallocations or pop_front() overhead. Eliminates deque fragmentation issues.
|
||||||
|
*/
|
||||||
|
class SlidingWindowFilter : public Filter {
|
||||||
|
public:
|
||||||
|
SlidingWindowFilter(size_t window_size, size_t send_every, size_t send_first_at);
|
||||||
|
|
||||||
|
void set_send_every(size_t send_every) { this->send_every_ = send_every; }
|
||||||
|
void set_window_size(size_t window_size);
|
||||||
|
|
||||||
|
optional<float> new_value(float value) final;
|
||||||
|
|
||||||
|
protected:
|
||||||
|
/// Called by new_value() to compute the filtered result from the current window
|
||||||
|
virtual float compute_result() = 0;
|
||||||
|
|
||||||
|
/// Access the sliding window values (ring buffer implementation)
|
||||||
|
/// Use: for (size_t i = 0; i < window_count_; i++) { float val = window_[i]; }
|
||||||
|
FixedVector<float> window_;
|
||||||
|
size_t window_head_{0}; ///< Index where next value will be written
|
||||||
|
size_t window_count_{0}; ///< Number of valid values in window (0 to window_size_)
|
||||||
|
size_t window_size_; ///< Maximum window size
|
||||||
|
size_t send_every_; ///< Send result every N values
|
||||||
|
size_t send_at_; ///< Counter for send_every
|
||||||
|
};
|
||||||
|
|
||||||
|
/** Base class for Min/Max filters.
|
||||||
|
*
|
||||||
|
* Provides a templated helper to find extremum values efficiently.
|
||||||
|
*/
|
||||||
|
class MinMaxFilter : public SlidingWindowFilter {
|
||||||
|
public:
|
||||||
|
using SlidingWindowFilter::SlidingWindowFilter;
|
||||||
|
|
||||||
|
protected:
|
||||||
|
/// Helper to find min or max value in window, skipping NaN values
|
||||||
|
/// Usage: find_extremum_<std::less<float>>() for min, find_extremum_<std::greater<float>>() for max
|
||||||
|
template<typename Compare> float find_extremum_() {
|
||||||
|
float result = NAN;
|
||||||
|
Compare comp;
|
||||||
|
for (size_t i = 0; i < this->window_count_; i++) {
|
||||||
|
float v = this->window_[i];
|
||||||
|
if (!std::isnan(v)) {
|
||||||
|
result = std::isnan(result) ? v : (comp(v, result) ? v : result);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
/** Base class for filters that need a sorted window (Median, Quantile).
|
||||||
|
*
|
||||||
|
* Extends SlidingWindowFilter to provide a helper that creates a sorted copy
|
||||||
|
* of non-NaN values from the window.
|
||||||
|
*/
|
||||||
|
class SortedWindowFilter : public SlidingWindowFilter {
|
||||||
|
public:
|
||||||
|
using SlidingWindowFilter::SlidingWindowFilter;
|
||||||
|
|
||||||
|
protected:
|
||||||
|
/// Helper to get sorted non-NaN values from the window
|
||||||
|
/// Returns empty FixedVector if all values are NaN
|
||||||
|
FixedVector<float> get_sorted_values_();
|
||||||
|
};
|
||||||
|
|
||||||
/** Simple quantile filter.
|
/** Simple quantile filter.
|
||||||
*
|
*
|
||||||
* Takes the quantile of the last <send_every> values and pushes it out every <send_every>.
|
* Takes the quantile of the last <window_size> values and pushes it out every <send_every>.
|
||||||
*/
|
*/
|
||||||
class QuantileFilter : public Filter {
|
class QuantileFilter : public SortedWindowFilter {
|
||||||
public:
|
public:
|
||||||
/** Construct a QuantileFilter.
|
/** Construct a QuantileFilter.
|
||||||
*
|
*
|
||||||
@@ -61,25 +128,18 @@ class QuantileFilter : public Filter {
|
|||||||
*/
|
*/
|
||||||
explicit QuantileFilter(size_t window_size, size_t send_every, size_t send_first_at, float quantile);
|
explicit QuantileFilter(size_t window_size, size_t send_every, size_t send_first_at, float quantile);
|
||||||
|
|
||||||
optional<float> new_value(float value) override;
|
void set_quantile(float quantile) { this->quantile_ = quantile; }
|
||||||
|
|
||||||
void set_send_every(size_t send_every);
|
|
||||||
void set_window_size(size_t window_size);
|
|
||||||
void set_quantile(float quantile);
|
|
||||||
|
|
||||||
protected:
|
protected:
|
||||||
std::deque<float> queue_;
|
float compute_result() override;
|
||||||
size_t send_every_;
|
|
||||||
size_t send_at_;
|
|
||||||
size_t window_size_;
|
|
||||||
float quantile_;
|
float quantile_;
|
||||||
};
|
};
|
||||||
|
|
||||||
/** Simple median filter.
|
/** Simple median filter.
|
||||||
*
|
*
|
||||||
* Takes the median of the last <send_every> values and pushes it out every <send_every>.
|
* Takes the median of the last <window_size> values and pushes it out every <send_every>.
|
||||||
*/
|
*/
|
||||||
class MedianFilter : public Filter {
|
class MedianFilter : public SortedWindowFilter {
|
||||||
public:
|
public:
|
||||||
/** Construct a MedianFilter.
|
/** Construct a MedianFilter.
|
||||||
*
|
*
|
||||||
@@ -89,18 +149,10 @@ class MedianFilter : public Filter {
|
|||||||
* on startup being published on the first *raw* value, so with no filter applied. Must be less than or equal to
|
* on startup being published on the first *raw* value, so with no filter applied. Must be less than or equal to
|
||||||
* send_every.
|
* send_every.
|
||||||
*/
|
*/
|
||||||
explicit MedianFilter(size_t window_size, size_t send_every, size_t send_first_at);
|
using SortedWindowFilter::SortedWindowFilter;
|
||||||
|
|
||||||
optional<float> new_value(float value) override;
|
|
||||||
|
|
||||||
void set_send_every(size_t send_every);
|
|
||||||
void set_window_size(size_t window_size);
|
|
||||||
|
|
||||||
protected:
|
protected:
|
||||||
std::deque<float> queue_;
|
float compute_result() override;
|
||||||
size_t send_every_;
|
|
||||||
size_t send_at_;
|
|
||||||
size_t window_size_;
|
|
||||||
};
|
};
|
||||||
|
|
||||||
/** Simple skip filter.
|
/** Simple skip filter.
|
||||||
@@ -123,9 +175,9 @@ class SkipInitialFilter : public Filter {
|
|||||||
|
|
||||||
/** Simple min filter.
|
/** Simple min filter.
|
||||||
*
|
*
|
||||||
* Takes the min of the last <send_every> values and pushes it out every <send_every>.
|
* Takes the min of the last <window_size> values and pushes it out every <send_every>.
|
||||||
*/
|
*/
|
||||||
class MinFilter : public Filter {
|
class MinFilter : public MinMaxFilter {
|
||||||
public:
|
public:
|
||||||
/** Construct a MinFilter.
|
/** Construct a MinFilter.
|
||||||
*
|
*
|
||||||
@@ -135,25 +187,17 @@ class MinFilter : public Filter {
|
|||||||
* on startup being published on the first *raw* value, so with no filter applied. Must be less than or equal to
|
* on startup being published on the first *raw* value, so with no filter applied. Must be less than or equal to
|
||||||
* send_every.
|
* send_every.
|
||||||
*/
|
*/
|
||||||
explicit MinFilter(size_t window_size, size_t send_every, size_t send_first_at);
|
using MinMaxFilter::MinMaxFilter;
|
||||||
|
|
||||||
optional<float> new_value(float value) override;
|
|
||||||
|
|
||||||
void set_send_every(size_t send_every);
|
|
||||||
void set_window_size(size_t window_size);
|
|
||||||
|
|
||||||
protected:
|
protected:
|
||||||
std::deque<float> queue_;
|
float compute_result() override;
|
||||||
size_t send_every_;
|
|
||||||
size_t send_at_;
|
|
||||||
size_t window_size_;
|
|
||||||
};
|
};
|
||||||
|
|
||||||
/** Simple max filter.
|
/** Simple max filter.
|
||||||
*
|
*
|
||||||
* Takes the max of the last <send_every> values and pushes it out every <send_every>.
|
* Takes the max of the last <window_size> values and pushes it out every <send_every>.
|
||||||
*/
|
*/
|
||||||
class MaxFilter : public Filter {
|
class MaxFilter : public MinMaxFilter {
|
||||||
public:
|
public:
|
||||||
/** Construct a MaxFilter.
|
/** Construct a MaxFilter.
|
||||||
*
|
*
|
||||||
@@ -163,18 +207,10 @@ class MaxFilter : public Filter {
|
|||||||
* on startup being published on the first *raw* value, so with no filter applied. Must be less than or equal to
|
* on startup being published on the first *raw* value, so with no filter applied. Must be less than or equal to
|
||||||
* send_every.
|
* send_every.
|
||||||
*/
|
*/
|
||||||
explicit MaxFilter(size_t window_size, size_t send_every, size_t send_first_at);
|
using MinMaxFilter::MinMaxFilter;
|
||||||
|
|
||||||
optional<float> new_value(float value) override;
|
|
||||||
|
|
||||||
void set_send_every(size_t send_every);
|
|
||||||
void set_window_size(size_t window_size);
|
|
||||||
|
|
||||||
protected:
|
protected:
|
||||||
std::deque<float> queue_;
|
float compute_result() override;
|
||||||
size_t send_every_;
|
|
||||||
size_t send_at_;
|
|
||||||
size_t window_size_;
|
|
||||||
};
|
};
|
||||||
|
|
||||||
/** Simple sliding window moving average filter.
|
/** Simple sliding window moving average filter.
|
||||||
@@ -182,7 +218,7 @@ class MaxFilter : public Filter {
|
|||||||
* Essentially just takes takes the average of the last window_size values and pushes them out
|
* Essentially just takes takes the average of the last window_size values and pushes them out
|
||||||
* every send_every.
|
* every send_every.
|
||||||
*/
|
*/
|
||||||
class SlidingWindowMovingAverageFilter : public Filter {
|
class SlidingWindowMovingAverageFilter : public SlidingWindowFilter {
|
||||||
public:
|
public:
|
||||||
/** Construct a SlidingWindowMovingAverageFilter.
|
/** Construct a SlidingWindowMovingAverageFilter.
|
||||||
*
|
*
|
||||||
@@ -192,18 +228,10 @@ class SlidingWindowMovingAverageFilter : public Filter {
|
|||||||
* on startup being published on the first *raw* value, so with no filter applied. Must be less than or equal to
|
* on startup being published on the first *raw* value, so with no filter applied. Must be less than or equal to
|
||||||
* send_every.
|
* send_every.
|
||||||
*/
|
*/
|
||||||
explicit SlidingWindowMovingAverageFilter(size_t window_size, size_t send_every, size_t send_first_at);
|
using SlidingWindowFilter::SlidingWindowFilter;
|
||||||
|
|
||||||
optional<float> new_value(float value) override;
|
|
||||||
|
|
||||||
void set_send_every(size_t send_every);
|
|
||||||
void set_window_size(size_t window_size);
|
|
||||||
|
|
||||||
protected:
|
protected:
|
||||||
std::deque<float> queue_;
|
float compute_result() override;
|
||||||
size_t send_every_;
|
|
||||||
size_t send_at_;
|
|
||||||
size_t window_size_;
|
|
||||||
};
|
};
|
||||||
|
|
||||||
/** Simple exponential moving average filter.
|
/** Simple exponential moving average filter.
|
||||||
@@ -476,5 +504,81 @@ class ToNTCTemperatureFilter : public Filter {
|
|||||||
double c_;
|
double c_;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
/** Base class for streaming filters (batch windows where window_size == send_every).
|
||||||
|
*
|
||||||
|
* When window_size equals send_every, we don't need a sliding window.
|
||||||
|
* This base class handles the common batching logic.
|
||||||
|
*/
|
||||||
|
class StreamingFilter : public Filter {
|
||||||
|
public:
|
||||||
|
StreamingFilter(size_t window_size, size_t send_first_at);
|
||||||
|
|
||||||
|
optional<float> new_value(float value) final;
|
||||||
|
|
||||||
|
protected:
|
||||||
|
/// Called by new_value() to process each value in the batch
|
||||||
|
virtual void process_value(float value) = 0;
|
||||||
|
|
||||||
|
/// Called by new_value() to compute the result after collecting window_size values
|
||||||
|
virtual float compute_batch_result() = 0;
|
||||||
|
|
||||||
|
/// Called by new_value() to reset internal state after sending a result
|
||||||
|
virtual void reset_batch() = 0;
|
||||||
|
|
||||||
|
size_t window_size_;
|
||||||
|
size_t count_{0};
|
||||||
|
size_t send_first_at_;
|
||||||
|
bool first_send_{true};
|
||||||
|
};
|
||||||
|
|
||||||
|
/** Streaming min filter for batch windows (window_size == send_every).
|
||||||
|
*
|
||||||
|
* Uses O(1) memory instead of O(n) by tracking only the minimum value.
|
||||||
|
*/
|
||||||
|
class StreamingMinFilter : public StreamingFilter {
|
||||||
|
public:
|
||||||
|
using StreamingFilter::StreamingFilter;
|
||||||
|
|
||||||
|
protected:
|
||||||
|
void process_value(float value) override;
|
||||||
|
float compute_batch_result() override;
|
||||||
|
void reset_batch() override;
|
||||||
|
|
||||||
|
float current_min_{NAN};
|
||||||
|
};
|
||||||
|
|
||||||
|
/** Streaming max filter for batch windows (window_size == send_every).
|
||||||
|
*
|
||||||
|
* Uses O(1) memory instead of O(n) by tracking only the maximum value.
|
||||||
|
*/
|
||||||
|
class StreamingMaxFilter : public StreamingFilter {
|
||||||
|
public:
|
||||||
|
using StreamingFilter::StreamingFilter;
|
||||||
|
|
||||||
|
protected:
|
||||||
|
void process_value(float value) override;
|
||||||
|
float compute_batch_result() override;
|
||||||
|
void reset_batch() override;
|
||||||
|
|
||||||
|
float current_max_{NAN};
|
||||||
|
};
|
||||||
|
|
||||||
|
/** Streaming moving average filter for batch windows (window_size == send_every).
|
||||||
|
*
|
||||||
|
* Uses O(1) memory instead of O(n) by tracking only sum and count.
|
||||||
|
*/
|
||||||
|
class StreamingMovingAverageFilter : public StreamingFilter {
|
||||||
|
public:
|
||||||
|
using StreamingFilter::StreamingFilter;
|
||||||
|
|
||||||
|
protected:
|
||||||
|
void process_value(float value) override;
|
||||||
|
float compute_batch_result() override;
|
||||||
|
void reset_batch() override;
|
||||||
|
|
||||||
|
float sum_{0.0f};
|
||||||
|
size_t valid_count_{0};
|
||||||
|
};
|
||||||
|
|
||||||
} // namespace sensor
|
} // namespace sensor
|
||||||
} // namespace esphome
|
} // namespace esphome
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
import logging
|
import logging
|
||||||
|
|
||||||
from esphome import core
|
from esphome import core
|
||||||
from esphome.config_helpers import Extend, Remove, merge_config
|
from esphome.config_helpers import Extend, Remove, merge_config, merge_dicts_ordered
|
||||||
import esphome.config_validation as cv
|
import esphome.config_validation as cv
|
||||||
from esphome.const import CONF_SUBSTITUTIONS, VALID_SUBSTITUTIONS_CHARACTERS
|
from esphome.const import CONF_SUBSTITUTIONS, VALID_SUBSTITUTIONS_CHARACTERS
|
||||||
from esphome.yaml_util import ESPHomeDataBase, ESPLiteralValue, make_data_base
|
from esphome.yaml_util import ESPHomeDataBase, ESPLiteralValue, make_data_base
|
||||||
@@ -170,10 +170,10 @@ def do_substitution_pass(config, command_line_substitutions, ignore_missing=Fals
|
|||||||
return
|
return
|
||||||
|
|
||||||
# Merge substitutions in config, overriding with substitutions coming from command line:
|
# Merge substitutions in config, overriding with substitutions coming from command line:
|
||||||
substitutions = {
|
# Use merge_dicts_ordered to preserve OrderedDict type for move_to_end()
|
||||||
**config.get(CONF_SUBSTITUTIONS, {}),
|
substitutions = merge_dicts_ordered(
|
||||||
**(command_line_substitutions or {}),
|
config.get(CONF_SUBSTITUTIONS, {}), command_line_substitutions or {}
|
||||||
}
|
)
|
||||||
with cv.prepend_path("substitutions"):
|
with cv.prepend_path("substitutions"):
|
||||||
if not isinstance(substitutions, dict):
|
if not isinstance(substitutions, dict):
|
||||||
raise cv.Invalid(
|
raise cv.Invalid(
|
||||||
|
|||||||
@@ -241,9 +241,14 @@ void ThermostatClimate::control(const climate::ClimateCall &call) {
|
|||||||
|
|
||||||
climate::ClimateTraits ThermostatClimate::traits() {
|
climate::ClimateTraits ThermostatClimate::traits() {
|
||||||
auto traits = climate::ClimateTraits();
|
auto traits = climate::ClimateTraits();
|
||||||
traits.set_supports_current_temperature(true);
|
|
||||||
|
traits.add_feature_flags(climate::CLIMATE_SUPPORTS_ACTION | climate::CLIMATE_SUPPORTS_CURRENT_TEMPERATURE);
|
||||||
|
|
||||||
|
if (this->supports_two_points_)
|
||||||
|
traits.add_feature_flags(climate::CLIMATE_SUPPORTS_TWO_POINT_TARGET_TEMPERATURE);
|
||||||
|
|
||||||
if (this->humidity_sensor_ != nullptr)
|
if (this->humidity_sensor_ != nullptr)
|
||||||
traits.set_supports_current_humidity(true);
|
traits.add_feature_flags(climate::CLIMATE_SUPPORTS_CURRENT_HUMIDITY);
|
||||||
|
|
||||||
if (this->supports_auto_)
|
if (this->supports_auto_)
|
||||||
traits.add_supported_mode(climate::CLIMATE_MODE_AUTO);
|
traits.add_supported_mode(climate::CLIMATE_MODE_AUTO);
|
||||||
@@ -294,9 +299,6 @@ climate::ClimateTraits ThermostatClimate::traits() {
|
|||||||
for (auto &it : this->custom_preset_config_) {
|
for (auto &it : this->custom_preset_config_) {
|
||||||
traits.add_supported_custom_preset(it.first);
|
traits.add_supported_custom_preset(it.first);
|
||||||
}
|
}
|
||||||
|
|
||||||
traits.set_supports_two_point_target_temperature(this->supports_two_points_);
|
|
||||||
traits.set_supports_action(true);
|
|
||||||
return traits;
|
return traits;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -12,7 +12,7 @@ from typing import Any
|
|||||||
import voluptuous as vol
|
import voluptuous as vol
|
||||||
|
|
||||||
from esphome import core, loader, pins, yaml_util
|
from esphome import core, loader, pins, yaml_util
|
||||||
from esphome.config_helpers import Extend, Remove
|
from esphome.config_helpers import Extend, Remove, merge_dicts_ordered
|
||||||
import esphome.config_validation as cv
|
import esphome.config_validation as cv
|
||||||
from esphome.const import (
|
from esphome.const import (
|
||||||
CONF_ESPHOME,
|
CONF_ESPHOME,
|
||||||
@@ -922,10 +922,9 @@ def validate_config(
|
|||||||
if CONF_SUBSTITUTIONS in config or command_line_substitutions:
|
if CONF_SUBSTITUTIONS in config or command_line_substitutions:
|
||||||
from esphome.components import substitutions
|
from esphome.components import substitutions
|
||||||
|
|
||||||
result[CONF_SUBSTITUTIONS] = {
|
result[CONF_SUBSTITUTIONS] = merge_dicts_ordered(
|
||||||
**(config.get(CONF_SUBSTITUTIONS) or {}),
|
config.get(CONF_SUBSTITUTIONS) or {}, command_line_substitutions
|
||||||
**command_line_substitutions,
|
)
|
||||||
}
|
|
||||||
result.add_output_path([CONF_SUBSTITUTIONS], CONF_SUBSTITUTIONS)
|
result.add_output_path([CONF_SUBSTITUTIONS], CONF_SUBSTITUTIONS)
|
||||||
try:
|
try:
|
||||||
substitutions.do_substitution_pass(config, command_line_substitutions)
|
substitutions.do_substitution_pass(config, command_line_substitutions)
|
||||||
|
|||||||
@@ -10,6 +10,7 @@ from esphome.const import (
|
|||||||
PlatformFramework,
|
PlatformFramework,
|
||||||
)
|
)
|
||||||
from esphome.core import CORE
|
from esphome.core import CORE
|
||||||
|
from esphome.util import OrderedDict
|
||||||
|
|
||||||
# Pre-build lookup map from (platform, framework) tuples to PlatformFramework enum
|
# Pre-build lookup map from (platform, framework) tuples to PlatformFramework enum
|
||||||
_PLATFORM_FRAMEWORK_LOOKUP = {
|
_PLATFORM_FRAMEWORK_LOOKUP = {
|
||||||
@@ -17,6 +18,25 @@ _PLATFORM_FRAMEWORK_LOOKUP = {
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def merge_dicts_ordered(*dicts: dict) -> OrderedDict:
|
||||||
|
"""Merge multiple dicts into an OrderedDict, preserving key order.
|
||||||
|
|
||||||
|
This is a helper to ensure that dictionary merging preserves OrderedDict type,
|
||||||
|
which is important for operations like move_to_end().
|
||||||
|
|
||||||
|
Args:
|
||||||
|
*dicts: Variable number of dictionaries to merge (later dicts override earlier ones)
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
OrderedDict with merged contents
|
||||||
|
"""
|
||||||
|
result = OrderedDict()
|
||||||
|
for d in dicts:
|
||||||
|
if d:
|
||||||
|
result.update(d)
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
class Extend:
|
class Extend:
|
||||||
def __init__(self, value):
|
def __init__(self, value):
|
||||||
self.value = value
|
self.value = value
|
||||||
@@ -60,7 +80,11 @@ def merge_config(full_old, full_new):
|
|||||||
if isinstance(new, dict):
|
if isinstance(new, dict):
|
||||||
if not isinstance(old, dict):
|
if not isinstance(old, dict):
|
||||||
return new
|
return new
|
||||||
res = old.copy()
|
# Preserve OrderedDict type by copying to OrderedDict if either input is OrderedDict
|
||||||
|
if isinstance(old, OrderedDict) or isinstance(new, OrderedDict):
|
||||||
|
res = OrderedDict(old)
|
||||||
|
else:
|
||||||
|
res = old.copy()
|
||||||
for k, v in new.items():
|
for k, v in new.items():
|
||||||
if isinstance(v, Remove) and k in old:
|
if isinstance(v, Remove) and k in old:
|
||||||
del res[k]
|
del res[k]
|
||||||
|
|||||||
105
esphome/git.py
105
esphome/git.py
@@ -18,14 +18,60 @@ _LOGGER = logging.getLogger(__name__)
|
|||||||
NEVER_REFRESH = TimePeriodSeconds(seconds=-1)
|
NEVER_REFRESH = TimePeriodSeconds(seconds=-1)
|
||||||
|
|
||||||
|
|
||||||
def run_git_command(cmd, cwd=None) -> str:
|
class GitException(cv.Invalid):
|
||||||
_LOGGER.debug("Running git command: %s", " ".join(cmd))
|
"""Base exception for git-related errors."""
|
||||||
|
|
||||||
|
|
||||||
|
class GitNotInstalledError(GitException):
|
||||||
|
"""Exception raised when git is not installed on the system."""
|
||||||
|
|
||||||
|
|
||||||
|
class GitCommandError(GitException):
|
||||||
|
"""Exception raised when a git command fails."""
|
||||||
|
|
||||||
|
|
||||||
|
class GitRepositoryError(GitException):
|
||||||
|
"""Exception raised when a git repository is in an invalid state."""
|
||||||
|
|
||||||
|
|
||||||
|
def run_git_command(cmd: list[str], git_dir: Path | None = None) -> str:
|
||||||
|
if git_dir is not None:
|
||||||
|
_LOGGER.debug(
|
||||||
|
"Running git command with repository isolation: %s (git_dir=%s)",
|
||||||
|
" ".join(cmd),
|
||||||
|
git_dir,
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
_LOGGER.debug("Running git command: %s", " ".join(cmd))
|
||||||
|
|
||||||
|
# Set up environment for repository isolation if git_dir is provided
|
||||||
|
# Force git to only operate on this specific repository by setting
|
||||||
|
# GIT_DIR and GIT_WORK_TREE. This prevents git from walking up the
|
||||||
|
# directory tree to find parent repositories when the target repo's
|
||||||
|
# .git directory is corrupt. Without this, commands like 'git stash'
|
||||||
|
# could accidentally operate on parent repositories (e.g., the main
|
||||||
|
# ESPHome repo) instead of failing, causing data loss.
|
||||||
|
env: dict[str, str] | None = None
|
||||||
|
cwd: str | None = None
|
||||||
|
if git_dir is not None:
|
||||||
|
env = {
|
||||||
|
**subprocess.os.environ,
|
||||||
|
"GIT_DIR": str(Path(git_dir) / ".git"),
|
||||||
|
"GIT_WORK_TREE": str(git_dir),
|
||||||
|
}
|
||||||
|
cwd = str(git_dir)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
ret = subprocess.run(
|
ret = subprocess.run(
|
||||||
cmd, cwd=cwd, capture_output=True, check=False, close_fds=False
|
cmd,
|
||||||
|
cwd=cwd,
|
||||||
|
capture_output=True,
|
||||||
|
check=False,
|
||||||
|
close_fds=False,
|
||||||
|
env=env,
|
||||||
)
|
)
|
||||||
except FileNotFoundError as err:
|
except FileNotFoundError as err:
|
||||||
raise cv.Invalid(
|
raise GitNotInstalledError(
|
||||||
"git is not installed but required for external_components.\n"
|
"git is not installed but required for external_components.\n"
|
||||||
"Please see https://git-scm.com/book/en/v2/Getting-Started-Installing-Git for installing git"
|
"Please see https://git-scm.com/book/en/v2/Getting-Started-Installing-Git for installing git"
|
||||||
) from err
|
) from err
|
||||||
@@ -34,8 +80,8 @@ def run_git_command(cmd, cwd=None) -> str:
|
|||||||
err_str = ret.stderr.decode("utf-8")
|
err_str = ret.stderr.decode("utf-8")
|
||||||
lines = [x.strip() for x in err_str.splitlines()]
|
lines = [x.strip() for x in err_str.splitlines()]
|
||||||
if lines[-1].startswith("fatal:"):
|
if lines[-1].startswith("fatal:"):
|
||||||
raise cv.Invalid(lines[-1][len("fatal: ") :])
|
raise GitCommandError(lines[-1][len("fatal: ") :])
|
||||||
raise cv.Invalid(err_str)
|
raise GitCommandError(err_str)
|
||||||
|
|
||||||
return ret.stdout.decode("utf-8").strip()
|
return ret.stdout.decode("utf-8").strip()
|
||||||
|
|
||||||
@@ -77,15 +123,15 @@ def clone_or_update(
|
|||||||
# We need to fetch the PR branch first, otherwise git will complain
|
# We need to fetch the PR branch first, otherwise git will complain
|
||||||
# about missing objects
|
# about missing objects
|
||||||
_LOGGER.info("Fetching %s", ref)
|
_LOGGER.info("Fetching %s", ref)
|
||||||
run_git_command(["git", "fetch", "--", "origin", ref], str(repo_dir))
|
run_git_command(["git", "fetch", "--", "origin", ref], git_dir=repo_dir)
|
||||||
run_git_command(["git", "reset", "--hard", "FETCH_HEAD"], str(repo_dir))
|
run_git_command(["git", "reset", "--hard", "FETCH_HEAD"], git_dir=repo_dir)
|
||||||
|
|
||||||
if submodules is not None:
|
if submodules is not None:
|
||||||
_LOGGER.info(
|
_LOGGER.info(
|
||||||
"Initializing submodules (%s) for %s", ", ".join(submodules), key
|
"Initializing submodules (%s) for %s", ", ".join(submodules), key
|
||||||
)
|
)
|
||||||
run_git_command(
|
run_git_command(
|
||||||
["git", "submodule", "update", "--init"] + submodules, str(repo_dir)
|
["git", "submodule", "update", "--init"] + submodules, git_dir=repo_dir
|
||||||
)
|
)
|
||||||
|
|
||||||
else:
|
else:
|
||||||
@@ -104,35 +150,55 @@ def clone_or_update(
|
|||||||
# Try to update the repository, recovering from broken state if needed
|
# Try to update the repository, recovering from broken state if needed
|
||||||
old_sha: str | None = None
|
old_sha: str | None = None
|
||||||
try:
|
try:
|
||||||
old_sha = run_git_command(["git", "rev-parse", "HEAD"], str(repo_dir))
|
# First verify the repository is valid by checking HEAD
|
||||||
|
# Use git_dir parameter to prevent git from walking up to parent repos
|
||||||
|
old_sha = run_git_command(
|
||||||
|
["git", "rev-parse", "HEAD"], git_dir=repo_dir
|
||||||
|
)
|
||||||
|
|
||||||
_LOGGER.info("Updating %s", key)
|
_LOGGER.info("Updating %s", key)
|
||||||
_LOGGER.debug("Location: %s", repo_dir)
|
_LOGGER.debug("Location: %s", repo_dir)
|
||||||
|
|
||||||
# Stash local changes (if any)
|
# Stash local changes (if any)
|
||||||
|
# Use git_dir to ensure this only affects the specific repo
|
||||||
run_git_command(
|
run_git_command(
|
||||||
["git", "stash", "push", "--include-untracked"], str(repo_dir)
|
["git", "stash", "push", "--include-untracked"],
|
||||||
|
git_dir=repo_dir,
|
||||||
)
|
)
|
||||||
|
|
||||||
# Fetch remote ref
|
# Fetch remote ref
|
||||||
cmd = ["git", "fetch", "--", "origin"]
|
cmd = ["git", "fetch", "--", "origin"]
|
||||||
if ref is not None:
|
if ref is not None:
|
||||||
cmd.append(ref)
|
cmd.append(ref)
|
||||||
run_git_command(cmd, str(repo_dir))
|
run_git_command(cmd, git_dir=repo_dir)
|
||||||
|
|
||||||
# Hard reset to FETCH_HEAD (short-lived git ref corresponding to most recent fetch)
|
# Hard reset to FETCH_HEAD (short-lived git ref corresponding to most recent fetch)
|
||||||
run_git_command(["git", "reset", "--hard", "FETCH_HEAD"], str(repo_dir))
|
run_git_command(
|
||||||
except cv.Invalid as err:
|
["git", "reset", "--hard", "FETCH_HEAD"],
|
||||||
|
git_dir=repo_dir,
|
||||||
|
)
|
||||||
|
except GitException as err:
|
||||||
# Repository is in a broken state or update failed
|
# Repository is in a broken state or update failed
|
||||||
# Only attempt recovery once to prevent infinite recursion
|
# Only attempt recovery once to prevent infinite recursion
|
||||||
if not _recover_broken:
|
if not _recover_broken:
|
||||||
|
_LOGGER.error(
|
||||||
|
"Repository %s recovery failed, cannot retry (already attempted once)",
|
||||||
|
key,
|
||||||
|
)
|
||||||
raise
|
raise
|
||||||
|
|
||||||
_LOGGER.warning(
|
_LOGGER.warning(
|
||||||
"Repository %s has issues (%s), removing and re-cloning",
|
"Repository %s has issues (%s), attempting recovery",
|
||||||
key,
|
key,
|
||||||
err,
|
err,
|
||||||
)
|
)
|
||||||
|
_LOGGER.info("Removing broken repository at %s", repo_dir)
|
||||||
shutil.rmtree(repo_dir)
|
shutil.rmtree(repo_dir)
|
||||||
|
_LOGGER.info("Successfully removed broken repository, re-cloning...")
|
||||||
|
|
||||||
# Recursively call clone_or_update to re-clone
|
# Recursively call clone_or_update to re-clone
|
||||||
# Set _recover_broken=False to prevent infinite recursion
|
# Set _recover_broken=False to prevent infinite recursion
|
||||||
return clone_or_update(
|
result = clone_or_update(
|
||||||
url=url,
|
url=url,
|
||||||
ref=ref,
|
ref=ref,
|
||||||
refresh=refresh,
|
refresh=refresh,
|
||||||
@@ -142,18 +208,21 @@ def clone_or_update(
|
|||||||
submodules=submodules,
|
submodules=submodules,
|
||||||
_recover_broken=False,
|
_recover_broken=False,
|
||||||
)
|
)
|
||||||
|
_LOGGER.info("Repository %s successfully recovered", key)
|
||||||
|
return result
|
||||||
|
|
||||||
if submodules is not None:
|
if submodules is not None:
|
||||||
_LOGGER.info(
|
_LOGGER.info(
|
||||||
"Updating submodules (%s) for %s", ", ".join(submodules), key
|
"Updating submodules (%s) for %s", ", ".join(submodules), key
|
||||||
)
|
)
|
||||||
run_git_command(
|
run_git_command(
|
||||||
["git", "submodule", "update", "--init"] + submodules, str(repo_dir)
|
["git", "submodule", "update", "--init"] + submodules,
|
||||||
|
git_dir=repo_dir,
|
||||||
)
|
)
|
||||||
|
|
||||||
def revert():
|
def revert():
|
||||||
_LOGGER.info("Reverting changes to %s -> %s", key, old_sha)
|
_LOGGER.info("Reverting changes to %s -> %s", key, old_sha)
|
||||||
run_git_command(["git", "reset", "--hard", old_sha], str(repo_dir))
|
run_git_command(["git", "reset", "--hard", old_sha], git_dir=repo_dir)
|
||||||
|
|
||||||
return repo_dir, revert
|
return repo_dir, revert
|
||||||
|
|
||||||
|
|||||||
@@ -56,6 +56,10 @@ DIRECT_BUS_TYPES = ("i2c", "spi", "uart", "modbus")
|
|||||||
# These components can be merged with any other group
|
# These components can be merged with any other group
|
||||||
NO_BUSES_SIGNATURE = "no_buses"
|
NO_BUSES_SIGNATURE = "no_buses"
|
||||||
|
|
||||||
|
# Prefix for isolated component signatures
|
||||||
|
# Isolated components have unique signatures and cannot be merged with others
|
||||||
|
ISOLATED_SIGNATURE_PREFIX = "isolated_"
|
||||||
|
|
||||||
# Base bus components - these ARE the bus implementations and should not
|
# Base bus components - these ARE the bus implementations and should not
|
||||||
# be flagged as needing migration since they are the platform/base components
|
# be flagged as needing migration since they are the platform/base components
|
||||||
BASE_BUS_COMPONENTS = {
|
BASE_BUS_COMPONENTS = {
|
||||||
@@ -75,6 +79,7 @@ ISOLATED_COMPONENTS = {
|
|||||||
"ethernet": "Defines ethernet: which conflicts with wifi: used by most components",
|
"ethernet": "Defines ethernet: which conflicts with wifi: used by most components",
|
||||||
"ethernet_info": "Related to ethernet component which conflicts with wifi",
|
"ethernet_info": "Related to ethernet component which conflicts with wifi",
|
||||||
"lvgl": "Defines multiple SDL displays on host platform that conflict when merged with other display configs",
|
"lvgl": "Defines multiple SDL displays on host platform that conflict when merged with other display configs",
|
||||||
|
"mapping": "Uses dict format for image/display sections incompatible with standard list format - ESPHome merge_config cannot handle",
|
||||||
"openthread": "Conflicts with wifi: used by most components",
|
"openthread": "Conflicts with wifi: used by most components",
|
||||||
"openthread_info": "Conflicts with wifi: used by most components",
|
"openthread_info": "Conflicts with wifi: used by most components",
|
||||||
"matrix_keypad": "Needs isolation due to keypad",
|
"matrix_keypad": "Needs isolation due to keypad",
|
||||||
@@ -368,6 +373,143 @@ def analyze_all_components(
|
|||||||
return components, non_groupable, direct_bus_components
|
return components, non_groupable, direct_bus_components
|
||||||
|
|
||||||
|
|
||||||
|
@lru_cache(maxsize=256)
|
||||||
|
def _get_bus_configs(buses: tuple[str, ...]) -> frozenset[tuple[str, str]]:
|
||||||
|
"""Map bus type to set of configs for that type.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
buses: Tuple of bus package names (e.g., ("uart_9600", "i2c"))
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Frozenset of (base_type, full_config) tuples
|
||||||
|
Example: frozenset({("uart", "uart_9600"), ("i2c", "i2c")})
|
||||||
|
"""
|
||||||
|
# Split on underscore to get base type: "uart_9600" -> "uart", "i2c" -> "i2c"
|
||||||
|
return frozenset((bus.split("_", 1)[0], bus) for bus in buses)
|
||||||
|
|
||||||
|
|
||||||
|
@lru_cache(maxsize=1024)
|
||||||
|
def are_buses_compatible(buses1: tuple[str, ...], buses2: tuple[str, ...]) -> bool:
|
||||||
|
"""Check if two bus tuples are compatible for merging.
|
||||||
|
|
||||||
|
Two bus lists are compatible if they don't have conflicting configurations
|
||||||
|
for the same bus type. For example:
|
||||||
|
- ("ble", "uart") and ("i2c",) are compatible (different buses)
|
||||||
|
- ("uart_9600",) and ("uart_19200",) are NOT compatible (same bus, different configs)
|
||||||
|
- ("uart_9600",) and ("uart_9600",) are compatible (same bus, same config)
|
||||||
|
|
||||||
|
Args:
|
||||||
|
buses1: First tuple of bus package names
|
||||||
|
buses2: Second tuple of bus package names
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
True if buses can be merged without conflicts
|
||||||
|
"""
|
||||||
|
configs1 = _get_bus_configs(buses1)
|
||||||
|
configs2 = _get_bus_configs(buses2)
|
||||||
|
|
||||||
|
# Group configs by base type
|
||||||
|
bus_types1: dict[str, set[str]] = {}
|
||||||
|
for base_type, full_config in configs1:
|
||||||
|
if base_type not in bus_types1:
|
||||||
|
bus_types1[base_type] = set()
|
||||||
|
bus_types1[base_type].add(full_config)
|
||||||
|
|
||||||
|
bus_types2: dict[str, set[str]] = {}
|
||||||
|
for base_type, full_config in configs2:
|
||||||
|
if base_type not in bus_types2:
|
||||||
|
bus_types2[base_type] = set()
|
||||||
|
bus_types2[base_type].add(full_config)
|
||||||
|
|
||||||
|
# Check for conflicts: same bus type with different configs
|
||||||
|
for bus_type, configs in bus_types1.items():
|
||||||
|
if bus_type not in bus_types2:
|
||||||
|
continue # No conflict - different bus types
|
||||||
|
# Same bus type - check if configs match
|
||||||
|
if configs != bus_types2[bus_type]:
|
||||||
|
return False # Conflict - same bus type, different configs
|
||||||
|
|
||||||
|
return True # No conflicts found
|
||||||
|
|
||||||
|
|
||||||
|
def merge_compatible_bus_groups(
|
||||||
|
grouped_components: dict[tuple[str, str], list[str]],
|
||||||
|
) -> dict[tuple[str, str], list[str]]:
|
||||||
|
"""Merge groups with compatible (non-conflicting) buses.
|
||||||
|
|
||||||
|
This function takes groups keyed by (platform, bus_signature) and merges
|
||||||
|
groups that share the same platform and have compatible bus configurations.
|
||||||
|
Two groups can be merged if their buses don't conflict - meaning they don't
|
||||||
|
have different configurations for the same bus type.
|
||||||
|
|
||||||
|
For example:
|
||||||
|
- ["ble"] + ["uart"] = compatible (different buses)
|
||||||
|
- ["uart_9600"] + ["uart_19200"] = incompatible (same bus, different configs)
|
||||||
|
- ["uart_9600"] + ["uart_9600"] = compatible (same bus, same config)
|
||||||
|
|
||||||
|
Args:
|
||||||
|
grouped_components: Dictionary mapping (platform, signature) to list of component names
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Dictionary with same structure but with compatible groups merged
|
||||||
|
"""
|
||||||
|
merged_groups: dict[tuple[str, str], list[str]] = {}
|
||||||
|
processed_keys: set[tuple[str, str]] = set()
|
||||||
|
|
||||||
|
for (platform1, sig1), comps1 in sorted(grouped_components.items()):
|
||||||
|
if (platform1, sig1) in processed_keys:
|
||||||
|
continue
|
||||||
|
|
||||||
|
# Skip NO_BUSES_SIGNATURE - kept separate for flexible batch distribution
|
||||||
|
# These components have no bus requirements and can be added to any batch
|
||||||
|
# as "fillers" for load balancing across CI runners
|
||||||
|
if sig1 == NO_BUSES_SIGNATURE:
|
||||||
|
merged_groups[(platform1, sig1)] = comps1
|
||||||
|
processed_keys.add((platform1, sig1))
|
||||||
|
continue
|
||||||
|
|
||||||
|
# Skip isolated components - they can't be merged with others
|
||||||
|
if sig1.startswith(ISOLATED_SIGNATURE_PREFIX):
|
||||||
|
merged_groups[(platform1, sig1)] = comps1
|
||||||
|
processed_keys.add((platform1, sig1))
|
||||||
|
continue
|
||||||
|
|
||||||
|
# Start with this group's components
|
||||||
|
merged_comps: list[str] = list(comps1)
|
||||||
|
merged_sig: str = sig1
|
||||||
|
processed_keys.add((platform1, sig1))
|
||||||
|
|
||||||
|
# Get buses for this group as tuple for caching
|
||||||
|
buses1: tuple[str, ...] = tuple(sorted(sig1.split("+")))
|
||||||
|
|
||||||
|
# Try to merge with other groups on same platform
|
||||||
|
for (platform2, sig2), comps2 in sorted(grouped_components.items()):
|
||||||
|
if (platform2, sig2) in processed_keys:
|
||||||
|
continue
|
||||||
|
if platform2 != platform1:
|
||||||
|
continue # Different platforms can't be merged
|
||||||
|
if sig2 == NO_BUSES_SIGNATURE:
|
||||||
|
continue # Keep separate for flexible batch distribution
|
||||||
|
if sig2.startswith(ISOLATED_SIGNATURE_PREFIX):
|
||||||
|
continue # Isolated components can't be merged
|
||||||
|
|
||||||
|
# Check if buses are compatible
|
||||||
|
buses2: tuple[str, ...] = tuple(sorted(sig2.split("+")))
|
||||||
|
if are_buses_compatible(buses1, buses2):
|
||||||
|
# Compatible! Merge this group
|
||||||
|
merged_comps.extend(comps2)
|
||||||
|
processed_keys.add((platform2, sig2))
|
||||||
|
# Update merged signature to include all unique buses
|
||||||
|
all_buses: set[str] = set(buses1) | set(buses2)
|
||||||
|
merged_sig = "+".join(sorted(all_buses))
|
||||||
|
buses1 = tuple(sorted(all_buses)) # Update for next iteration
|
||||||
|
|
||||||
|
# Store merged group
|
||||||
|
merged_groups[(platform1, merged_sig)] = merged_comps
|
||||||
|
|
||||||
|
return merged_groups
|
||||||
|
|
||||||
|
|
||||||
def create_grouping_signature(
|
def create_grouping_signature(
|
||||||
platform_buses: dict[str, list[str]], platform: str
|
platform_buses: dict[str, list[str]], platform: str
|
||||||
) -> str:
|
) -> str:
|
||||||
|
|||||||
@@ -185,17 +185,20 @@ def main():
|
|||||||
"-c",
|
"-c",
|
||||||
"--changed",
|
"--changed",
|
||||||
action="store_true",
|
action="store_true",
|
||||||
help="List all components required for testing based on changes (includes dependencies)",
|
help="List all components with dependencies (used by clang-tidy). "
|
||||||
|
"When base test infrastructure changes, returns ALL components.",
|
||||||
)
|
)
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
"--changed-direct",
|
"--changed-direct",
|
||||||
action="store_true",
|
action="store_true",
|
||||||
help="List only directly changed components (without dependencies)",
|
help="List only directly changed components, ignoring infrastructure changes "
|
||||||
|
"(used by CI for isolation decisions)",
|
||||||
)
|
)
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
"--changed-with-deps",
|
"--changed-with-deps",
|
||||||
action="store_true",
|
action="store_true",
|
||||||
help="Output JSON with both directly changed and all changed components",
|
help="Output JSON with both directly changed and all changed components "
|
||||||
|
"(with dependencies), ignoring infrastructure changes (used by CI for test determination)",
|
||||||
)
|
)
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
"-b", "--branch", help="Branch to compare changed files against"
|
"-b", "--branch", help="Branch to compare changed files against"
|
||||||
@@ -213,12 +216,34 @@ def main():
|
|||||||
# When --changed* is passed, only get the changed files
|
# When --changed* is passed, only get the changed files
|
||||||
changed = changed_files(args.branch)
|
changed = changed_files(args.branch)
|
||||||
|
|
||||||
# If any base test file(s) changed, there's no need to filter out components
|
# If any base test file(s) changed, we need to check all components
|
||||||
if any("tests/test_build_components" in file for file in changed):
|
# BUT only for --changed (used by clang-tidy for comprehensive checking)
|
||||||
# Need to get all component files
|
# NOT for --changed-direct or --changed-with-deps (used by CI for targeted testing)
|
||||||
|
#
|
||||||
|
# Flag usage:
|
||||||
|
# - --changed: Used by clang-tidy (script/helpers.py get_changed_components)
|
||||||
|
# Returns: All components with dependencies when base test files change
|
||||||
|
# Reason: Test infrastructure changes may affect any component
|
||||||
|
#
|
||||||
|
# - --changed-direct: Used by CI isolation (script/determine-jobs.py)
|
||||||
|
# Returns: Only components with actual code changes (not infrastructure)
|
||||||
|
# Reason: Only directly changed components need isolated testing
|
||||||
|
#
|
||||||
|
# - --changed-with-deps: Used by CI test determination (script/determine-jobs.py)
|
||||||
|
# Returns: Components with code changes + their dependencies (not infrastructure)
|
||||||
|
# Reason: CI needs to test changed components and their dependents
|
||||||
|
base_test_changed = any(
|
||||||
|
"tests/test_build_components" in file for file in changed
|
||||||
|
)
|
||||||
|
|
||||||
|
if base_test_changed and not args.changed_direct and not args.changed_with_deps:
|
||||||
|
# Base test infrastructure changed - load all component files
|
||||||
|
# This is for --changed (clang-tidy) which needs comprehensive checking
|
||||||
files = get_all_component_files()
|
files = get_all_component_files()
|
||||||
else:
|
else:
|
||||||
# Only look at changed component files
|
# Only look at changed component files (ignore infrastructure changes)
|
||||||
|
# For --changed-direct: only actual component code changes matter (for isolation)
|
||||||
|
# For --changed-with-deps: only actual component code changes matter (for testing)
|
||||||
files = [f for f in changed if filter_component_files(f)]
|
files = [f for f in changed if filter_component_files(f)]
|
||||||
else:
|
else:
|
||||||
# Get all component files
|
# Get all component files
|
||||||
|
|||||||
@@ -16,6 +16,7 @@ The merger handles:
|
|||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
import argparse
|
import argparse
|
||||||
|
from functools import lru_cache
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
import re
|
import re
|
||||||
import sys
|
import sys
|
||||||
@@ -28,6 +29,10 @@ from esphome import yaml_util
|
|||||||
from esphome.config_helpers import merge_config
|
from esphome.config_helpers import merge_config
|
||||||
from script.analyze_component_buses import PACKAGE_DEPENDENCIES, get_common_bus_packages
|
from script.analyze_component_buses import PACKAGE_DEPENDENCIES, get_common_bus_packages
|
||||||
|
|
||||||
|
# Prefix for dependency markers in package tracking
|
||||||
|
# Used to mark packages that are included transitively (e.g., uart via modbus)
|
||||||
|
DEPENDENCY_MARKER_PREFIX = "_dep_"
|
||||||
|
|
||||||
|
|
||||||
def load_yaml_file(yaml_file: Path) -> dict:
|
def load_yaml_file(yaml_file: Path) -> dict:
|
||||||
"""Load YAML file using ESPHome's YAML loader.
|
"""Load YAML file using ESPHome's YAML loader.
|
||||||
@@ -44,6 +49,34 @@ def load_yaml_file(yaml_file: Path) -> dict:
|
|||||||
return yaml_util.load_yaml(yaml_file)
|
return yaml_util.load_yaml(yaml_file)
|
||||||
|
|
||||||
|
|
||||||
|
@lru_cache(maxsize=256)
|
||||||
|
def get_component_packages(
|
||||||
|
component_name: str, platform: str, tests_dir_str: str
|
||||||
|
) -> dict:
|
||||||
|
"""Get packages dict from a component's test file with caching.
|
||||||
|
|
||||||
|
This function is cached to avoid re-loading and re-parsing the same file
|
||||||
|
multiple times when extracting packages during cross-bus merging.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
component_name: Name of the component
|
||||||
|
platform: Platform name (e.g., "esp32-idf")
|
||||||
|
tests_dir_str: String path to tests/components directory (must be string for cache hashability)
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Dictionary with 'packages' key containing the raw packages dict from the YAML,
|
||||||
|
or empty dict if no packages section exists
|
||||||
|
"""
|
||||||
|
tests_dir = Path(tests_dir_str)
|
||||||
|
test_file = tests_dir / component_name / f"test.{platform}.yaml"
|
||||||
|
comp_data = load_yaml_file(test_file)
|
||||||
|
|
||||||
|
if "packages" not in comp_data or not isinstance(comp_data["packages"], dict):
|
||||||
|
return {}
|
||||||
|
|
||||||
|
return comp_data["packages"]
|
||||||
|
|
||||||
|
|
||||||
def extract_packages_from_yaml(data: dict) -> dict[str, str]:
|
def extract_packages_from_yaml(data: dict) -> dict[str, str]:
|
||||||
"""Extract COMMON BUS package includes from parsed YAML.
|
"""Extract COMMON BUS package includes from parsed YAML.
|
||||||
|
|
||||||
@@ -82,7 +115,7 @@ def extract_packages_from_yaml(data: dict) -> dict[str, str]:
|
|||||||
if dep not in common_bus_packages:
|
if dep not in common_bus_packages:
|
||||||
continue
|
continue
|
||||||
# Mark as included via dependency
|
# Mark as included via dependency
|
||||||
packages[f"_dep_{dep}"] = f"(included via {name})"
|
packages[f"{DEPENDENCY_MARKER_PREFIX}{dep}"] = f"(included via {name})"
|
||||||
|
|
||||||
return packages
|
return packages
|
||||||
|
|
||||||
@@ -195,6 +228,9 @@ def merge_component_configs(
|
|||||||
# Start with empty config
|
# Start with empty config
|
||||||
merged_config_data = {}
|
merged_config_data = {}
|
||||||
|
|
||||||
|
# Convert tests_dir to string for caching
|
||||||
|
tests_dir_str = str(tests_dir)
|
||||||
|
|
||||||
# Process each component
|
# Process each component
|
||||||
for comp_name in component_names:
|
for comp_name in component_names:
|
||||||
comp_dir = tests_dir / comp_name
|
comp_dir = tests_dir / comp_name
|
||||||
@@ -206,26 +242,29 @@ def merge_component_configs(
|
|||||||
# Load the component's test file
|
# Load the component's test file
|
||||||
comp_data = load_yaml_file(test_file)
|
comp_data = load_yaml_file(test_file)
|
||||||
|
|
||||||
# Validate packages are compatible
|
# Merge packages from all components (cross-bus merging)
|
||||||
# Components with no packages (no_buses) can merge with any group
|
# Components can have different packages (e.g., one with ble, another with uart)
|
||||||
|
# as long as they don't conflict (checked by are_buses_compatible before calling this)
|
||||||
comp_packages = extract_packages_from_yaml(comp_data)
|
comp_packages = extract_packages_from_yaml(comp_data)
|
||||||
|
|
||||||
if all_packages is None:
|
if all_packages is None:
|
||||||
# First component - set the baseline
|
# First component - initialize package dict
|
||||||
all_packages = comp_packages
|
all_packages = comp_packages if comp_packages else {}
|
||||||
elif not comp_packages:
|
elif comp_packages:
|
||||||
# This component has no packages (no_buses) - it can merge with any group
|
# Merge packages - combine all unique package types
|
||||||
pass
|
# If both have the same package type, verify they're identical
|
||||||
elif not all_packages:
|
for pkg_name, pkg_config in comp_packages.items():
|
||||||
# Previous components had no packages, but this one does - adopt these packages
|
if pkg_name in all_packages:
|
||||||
all_packages = comp_packages
|
# Same package type - verify config matches
|
||||||
elif comp_packages != all_packages:
|
if all_packages[pkg_name] != pkg_config:
|
||||||
# Both have packages but they differ - this is an error
|
raise ValueError(
|
||||||
raise ValueError(
|
f"Component {comp_name} has conflicting config for package '{pkg_name}'. "
|
||||||
f"Component {comp_name} has different packages than previous components. "
|
f"Expected: {all_packages[pkg_name]}, Got: {pkg_config}. "
|
||||||
f"Expected: {all_packages}, Got: {comp_packages}. "
|
f"Components with conflicting bus configs cannot be merged."
|
||||||
f"All components must use the same common bus configs to be merged."
|
)
|
||||||
)
|
else:
|
||||||
|
# New package type - add it
|
||||||
|
all_packages[pkg_name] = pkg_config
|
||||||
|
|
||||||
# Handle $component_dir by replacing with absolute path
|
# Handle $component_dir by replacing with absolute path
|
||||||
# This allows components that use local file references to be grouped
|
# This allows components that use local file references to be grouped
|
||||||
@@ -287,26 +326,51 @@ def merge_component_configs(
|
|||||||
# merge_config handles list merging with ID-based deduplication automatically
|
# merge_config handles list merging with ID-based deduplication automatically
|
||||||
merged_config_data = merge_config(merged_config_data, comp_data)
|
merged_config_data = merge_config(merged_config_data, comp_data)
|
||||||
|
|
||||||
# Add packages back (only once, since they're identical)
|
# Add merged packages back (union of all component packages)
|
||||||
# IMPORTANT: Only re-add common bus packages (spi, i2c, uart, etc.)
|
# IMPORTANT: Only include common bus packages (spi, i2c, uart, etc.)
|
||||||
# Do NOT re-add component-specific packages as they contain unprefixed $component_dir refs
|
# Do NOT re-add component-specific packages as they contain unprefixed $component_dir refs
|
||||||
if all_packages:
|
if all_packages:
|
||||||
first_comp_data = load_yaml_file(
|
# Build packages dict from merged all_packages
|
||||||
tests_dir / component_names[0] / f"test.{platform}.yaml"
|
# all_packages is a dict mapping package_name -> str(package_value)
|
||||||
)
|
# We need to reconstruct the actual package values by loading them from any component
|
||||||
if "packages" in first_comp_data and isinstance(
|
# Since packages with the same name must have identical configs (verified above),
|
||||||
first_comp_data["packages"], dict
|
# we can load the package value from the first component that has each package
|
||||||
):
|
common_bus_packages = get_common_bus_packages()
|
||||||
# Filter to only include common bus packages
|
merged_packages: dict[str, Any] = {}
|
||||||
# Only dict format can contain common bus packages
|
|
||||||
common_bus_packages = get_common_bus_packages()
|
# Collect packages that are included as dependencies
|
||||||
filtered_packages = {
|
# If modbus is present, uart is included via modbus.packages.uart
|
||||||
name: value
|
packages_to_skip: set[str] = set()
|
||||||
for name, value in first_comp_data["packages"].items()
|
for pkg_name in all_packages:
|
||||||
if name in common_bus_packages
|
if pkg_name.startswith(DEPENDENCY_MARKER_PREFIX):
|
||||||
}
|
# Extract the actual package name (remove _dep_ prefix)
|
||||||
if filtered_packages:
|
dep_name = pkg_name[len(DEPENDENCY_MARKER_PREFIX) :]
|
||||||
merged_config_data["packages"] = filtered_packages
|
packages_to_skip.add(dep_name)
|
||||||
|
|
||||||
|
for pkg_name in all_packages:
|
||||||
|
# Skip dependency markers
|
||||||
|
if pkg_name.startswith(DEPENDENCY_MARKER_PREFIX):
|
||||||
|
continue
|
||||||
|
# Skip non-common-bus packages
|
||||||
|
if pkg_name not in common_bus_packages:
|
||||||
|
continue
|
||||||
|
# Skip packages that are included as dependencies of other packages
|
||||||
|
# This prevents duplicate definitions (e.g., uart via modbus + uart separately)
|
||||||
|
if pkg_name in packages_to_skip:
|
||||||
|
continue
|
||||||
|
|
||||||
|
# Find a component that has this package and extract its value
|
||||||
|
# Uses cached lookup to avoid re-loading the same files
|
||||||
|
for comp_name in component_names:
|
||||||
|
comp_packages = get_component_packages(
|
||||||
|
comp_name, platform, tests_dir_str
|
||||||
|
)
|
||||||
|
if pkg_name in comp_packages:
|
||||||
|
merged_packages[pkg_name] = comp_packages[pkg_name]
|
||||||
|
break
|
||||||
|
|
||||||
|
if merged_packages:
|
||||||
|
merged_config_data["packages"] = merged_packages
|
||||||
|
|
||||||
# Deduplicate items with same ID (keeps first occurrence)
|
# Deduplicate items with same ID (keeps first occurrence)
|
||||||
merged_config_data = deduplicate_by_id(merged_config_data)
|
merged_config_data = deduplicate_by_id(merged_config_data)
|
||||||
|
|||||||
@@ -22,9 +22,11 @@ sys.path.insert(0, str(Path(__file__).parent.parent))
|
|||||||
|
|
||||||
from script.analyze_component_buses import (
|
from script.analyze_component_buses import (
|
||||||
ISOLATED_COMPONENTS,
|
ISOLATED_COMPONENTS,
|
||||||
|
ISOLATED_SIGNATURE_PREFIX,
|
||||||
NO_BUSES_SIGNATURE,
|
NO_BUSES_SIGNATURE,
|
||||||
analyze_all_components,
|
analyze_all_components,
|
||||||
create_grouping_signature,
|
create_grouping_signature,
|
||||||
|
merge_compatible_bus_groups,
|
||||||
)
|
)
|
||||||
|
|
||||||
# Weighting for batch creation
|
# Weighting for batch creation
|
||||||
@@ -33,6 +35,10 @@ from script.analyze_component_buses import (
|
|||||||
ISOLATED_WEIGHT = 10
|
ISOLATED_WEIGHT = 10
|
||||||
GROUPABLE_WEIGHT = 1
|
GROUPABLE_WEIGHT = 1
|
||||||
|
|
||||||
|
# Platform used for batching (platform-agnostic batching)
|
||||||
|
# Batches are split across CI runners and each runner tests all platforms
|
||||||
|
ALL_PLATFORMS = "all"
|
||||||
|
|
||||||
|
|
||||||
def has_test_files(component_name: str, tests_dir: Path) -> bool:
|
def has_test_files(component_name: str, tests_dir: Path) -> bool:
|
||||||
"""Check if a component has test files.
|
"""Check if a component has test files.
|
||||||
@@ -57,7 +63,7 @@ def create_intelligent_batches(
|
|||||||
tests_dir: Path,
|
tests_dir: Path,
|
||||||
batch_size: int = 40,
|
batch_size: int = 40,
|
||||||
directly_changed: set[str] | None = None,
|
directly_changed: set[str] | None = None,
|
||||||
) -> list[list[str]]:
|
) -> tuple[list[list[str]], dict[tuple[str, str], list[str]]]:
|
||||||
"""Create batches optimized for component grouping.
|
"""Create batches optimized for component grouping.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
@@ -67,7 +73,9 @@ def create_intelligent_batches(
|
|||||||
directly_changed: Set of directly changed components (for logging only)
|
directly_changed: Set of directly changed components (for logging only)
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
List of component batches (lists of component names)
|
Tuple of (batches, signature_groups) where:
|
||||||
|
- batches: List of component batches (lists of component names)
|
||||||
|
- signature_groups: Dict mapping (platform, signature) to component lists
|
||||||
"""
|
"""
|
||||||
# Filter out components without test files
|
# Filter out components without test files
|
||||||
# Platform components like 'climate' and 'climate_ir' don't have test files
|
# Platform components like 'climate' and 'climate_ir' don't have test files
|
||||||
@@ -91,8 +99,9 @@ def create_intelligent_batches(
|
|||||||
|
|
||||||
# Group components by their bus signature ONLY (ignore platform)
|
# Group components by their bus signature ONLY (ignore platform)
|
||||||
# All platforms will be tested by test_build_components.py for each batch
|
# All platforms will be tested by test_build_components.py for each batch
|
||||||
# Key: signature, Value: list of components
|
# Key: (platform, signature), Value: list of components
|
||||||
signature_groups: dict[str, list[str]] = defaultdict(list)
|
# We use ALL_PLATFORMS since batching is platform-agnostic
|
||||||
|
signature_groups: dict[tuple[str, str], list[str]] = defaultdict(list)
|
||||||
|
|
||||||
for component in components_with_tests:
|
for component in components_with_tests:
|
||||||
# Components that can't be grouped get unique signatures
|
# Components that can't be grouped get unique signatures
|
||||||
@@ -107,7 +116,9 @@ def create_intelligent_batches(
|
|||||||
or (directly_changed and component in directly_changed)
|
or (directly_changed and component in directly_changed)
|
||||||
)
|
)
|
||||||
if is_isolated:
|
if is_isolated:
|
||||||
signature_groups[f"isolated_{component}"].append(component)
|
signature_groups[
|
||||||
|
(ALL_PLATFORMS, f"{ISOLATED_SIGNATURE_PREFIX}{component}")
|
||||||
|
].append(component)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
# Get signature from any platform (they should all have the same buses)
|
# Get signature from any platform (they should all have the same buses)
|
||||||
@@ -117,11 +128,17 @@ def create_intelligent_batches(
|
|||||||
if buses:
|
if buses:
|
||||||
signature = create_grouping_signature({platform: buses}, platform)
|
signature = create_grouping_signature({platform: buses}, platform)
|
||||||
# Group by signature only - platform doesn't matter for batching
|
# Group by signature only - platform doesn't matter for batching
|
||||||
signature_groups[signature].append(component)
|
# Use ALL_PLATFORMS since we're batching across all platforms
|
||||||
|
signature_groups[(ALL_PLATFORMS, signature)].append(component)
|
||||||
break # Only use first platform for grouping
|
break # Only use first platform for grouping
|
||||||
else:
|
else:
|
||||||
# No buses found for any platform - can be grouped together
|
# No buses found for any platform - can be grouped together
|
||||||
signature_groups[NO_BUSES_SIGNATURE].append(component)
|
signature_groups[(ALL_PLATFORMS, NO_BUSES_SIGNATURE)].append(component)
|
||||||
|
|
||||||
|
# Merge compatible bus groups (cross-bus optimization)
|
||||||
|
# This allows components with different buses (ble + uart) to be batched together
|
||||||
|
# improving the efficiency of test_build_components.py grouping
|
||||||
|
signature_groups = merge_compatible_bus_groups(signature_groups)
|
||||||
|
|
||||||
# Create batches by keeping signature groups together
|
# Create batches by keeping signature groups together
|
||||||
# Components with the same signature stay in the same batches
|
# Components with the same signature stay in the same batches
|
||||||
@@ -132,8 +149,8 @@ def create_intelligent_batches(
|
|||||||
# 2. Sort groupable signatures by size (largest first)
|
# 2. Sort groupable signatures by size (largest first)
|
||||||
# 3. "no_buses" components CAN be grouped together
|
# 3. "no_buses" components CAN be grouped together
|
||||||
def sort_key(item):
|
def sort_key(item):
|
||||||
signature, components = item
|
(_platform, signature), components = item
|
||||||
is_isolated = signature.startswith("isolated_")
|
is_isolated = signature.startswith(ISOLATED_SIGNATURE_PREFIX)
|
||||||
# Put "isolated_*" last (1), groupable first (0)
|
# Put "isolated_*" last (1), groupable first (0)
|
||||||
# Within each category, sort by size (largest first)
|
# Within each category, sort by size (largest first)
|
||||||
return (is_isolated, -len(components))
|
return (is_isolated, -len(components))
|
||||||
@@ -149,8 +166,8 @@ def create_intelligent_batches(
|
|||||||
current_batch = []
|
current_batch = []
|
||||||
current_weight = 0
|
current_weight = 0
|
||||||
|
|
||||||
for signature, group_components in sorted_groups:
|
for (_platform, signature), group_components in sorted_groups:
|
||||||
is_isolated = signature.startswith("isolated_")
|
is_isolated = signature.startswith(ISOLATED_SIGNATURE_PREFIX)
|
||||||
weight_per_component = ISOLATED_WEIGHT if is_isolated else GROUPABLE_WEIGHT
|
weight_per_component = ISOLATED_WEIGHT if is_isolated else GROUPABLE_WEIGHT
|
||||||
|
|
||||||
for component in group_components:
|
for component in group_components:
|
||||||
@@ -169,7 +186,7 @@ def create_intelligent_batches(
|
|||||||
if current_batch:
|
if current_batch:
|
||||||
batches.append(current_batch)
|
batches.append(current_batch)
|
||||||
|
|
||||||
return batches
|
return batches, signature_groups
|
||||||
|
|
||||||
|
|
||||||
def main() -> int:
|
def main() -> int:
|
||||||
@@ -231,7 +248,7 @@ def main() -> int:
|
|||||||
return 1
|
return 1
|
||||||
|
|
||||||
# Create intelligent batches
|
# Create intelligent batches
|
||||||
batches = create_intelligent_batches(
|
batches, signature_groups = create_intelligent_batches(
|
||||||
components=components,
|
components=components,
|
||||||
tests_dir=args.tests_dir,
|
tests_dir=args.tests_dir,
|
||||||
batch_size=args.batch_size,
|
batch_size=args.batch_size,
|
||||||
@@ -256,6 +273,58 @@ def main() -> int:
|
|||||||
# Re-analyze to get isolated component counts for summary
|
# Re-analyze to get isolated component counts for summary
|
||||||
_, non_groupable, _ = analyze_all_components(args.tests_dir)
|
_, non_groupable, _ = analyze_all_components(args.tests_dir)
|
||||||
|
|
||||||
|
# Show grouping details
|
||||||
|
print("\n=== Component Grouping Details ===", file=sys.stderr)
|
||||||
|
# Sort groups by signature for readability
|
||||||
|
groupable_groups = []
|
||||||
|
isolated_groups = []
|
||||||
|
for (platform, signature), group_comps in sorted(signature_groups.items()):
|
||||||
|
if signature.startswith(ISOLATED_SIGNATURE_PREFIX):
|
||||||
|
isolated_groups.append((signature, group_comps))
|
||||||
|
else:
|
||||||
|
groupable_groups.append((signature, group_comps))
|
||||||
|
|
||||||
|
if groupable_groups:
|
||||||
|
print(
|
||||||
|
f"\nGroupable signatures ({len(groupable_groups)} merged groups after cross-bus optimization):",
|
||||||
|
file=sys.stderr,
|
||||||
|
)
|
||||||
|
for signature, group_comps in sorted(
|
||||||
|
groupable_groups, key=lambda x: (-len(x[1]), x[0])
|
||||||
|
):
|
||||||
|
# Check if this is a merged signature (contains +)
|
||||||
|
is_merged = "+" in signature and signature != NO_BUSES_SIGNATURE
|
||||||
|
# Special handling for no_buses components
|
||||||
|
if signature == NO_BUSES_SIGNATURE:
|
||||||
|
print(
|
||||||
|
f" [{signature}]: {len(group_comps)} components (used as fillers across batches)",
|
||||||
|
file=sys.stderr,
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
merge_indicator = " [MERGED]" if is_merged else ""
|
||||||
|
print(
|
||||||
|
f" [{signature}]{merge_indicator}: {len(group_comps)} components",
|
||||||
|
file=sys.stderr,
|
||||||
|
)
|
||||||
|
# Show first few components as examples
|
||||||
|
examples = ", ".join(sorted(group_comps)[:8])
|
||||||
|
if len(group_comps) > 8:
|
||||||
|
examples += f", ... (+{len(group_comps) - 8} more)"
|
||||||
|
print(f" → {examples}", file=sys.stderr)
|
||||||
|
|
||||||
|
if isolated_groups:
|
||||||
|
print(
|
||||||
|
f"\nIsolated components ({len(isolated_groups)} components - tested individually):",
|
||||||
|
file=sys.stderr,
|
||||||
|
)
|
||||||
|
isolated_names = sorted(
|
||||||
|
[comp for _, comps in isolated_groups for comp in comps]
|
||||||
|
)
|
||||||
|
# Group isolated components for compact display
|
||||||
|
for i in range(0, len(isolated_names), 10):
|
||||||
|
chunk = isolated_names[i : i + 10]
|
||||||
|
print(f" {', '.join(chunk)}", file=sys.stderr)
|
||||||
|
|
||||||
# Count isolated vs groupable components
|
# Count isolated vs groupable components
|
||||||
all_batched_components = [comp for batch in batches for comp in batch]
|
all_batched_components = [comp for batch in batches for comp in batch]
|
||||||
isolated_count = sum(
|
isolated_count = sum(
|
||||||
|
|||||||
@@ -17,11 +17,13 @@ from __future__ import annotations
|
|||||||
|
|
||||||
import argparse
|
import argparse
|
||||||
from collections import defaultdict
|
from collections import defaultdict
|
||||||
|
from dataclasses import dataclass
|
||||||
import hashlib
|
import hashlib
|
||||||
import os
|
import os
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
import subprocess
|
import subprocess
|
||||||
import sys
|
import sys
|
||||||
|
import time
|
||||||
|
|
||||||
# Add esphome to path
|
# Add esphome to path
|
||||||
sys.path.insert(0, str(Path(__file__).parent.parent))
|
sys.path.insert(0, str(Path(__file__).parent.parent))
|
||||||
@@ -34,32 +36,49 @@ from script.analyze_component_buses import (
|
|||||||
analyze_all_components,
|
analyze_all_components,
|
||||||
create_grouping_signature,
|
create_grouping_signature,
|
||||||
is_platform_component,
|
is_platform_component,
|
||||||
|
merge_compatible_bus_groups,
|
||||||
uses_local_file_references,
|
uses_local_file_references,
|
||||||
)
|
)
|
||||||
from script.merge_component_configs import merge_component_configs
|
from script.merge_component_configs import merge_component_configs
|
||||||
|
|
||||||
# Platform-specific maximum group sizes
|
|
||||||
# ESP8266 has limited IRAM and can't handle large component groups
|
@dataclass
|
||||||
PLATFORM_MAX_GROUP_SIZE = {
|
class TestResult:
|
||||||
"esp8266-ard": 10, # ESP8266 Arduino has limited IRAM
|
"""Store information about a single test run."""
|
||||||
"esp8266-idf": 10, # ESP8266 IDF also has limited IRAM
|
|
||||||
# BK72xx now uses BK7252 board (1.62MB flash vs 1.03MB) - no limit needed
|
test_id: str
|
||||||
# Other platforms can handle larger groups
|
components: list[str]
|
||||||
}
|
platform: str
|
||||||
|
success: bool
|
||||||
|
duration: float
|
||||||
|
command: str = ""
|
||||||
|
test_type: str = "compile" # "config" or "compile"
|
||||||
|
|
||||||
|
|
||||||
def show_disk_space_if_ci(esphome_command: str) -> None:
|
def show_disk_space_if_ci(esphome_command: str) -> None:
|
||||||
"""Show disk space usage if running in CI during compile.
|
"""Show disk space usage if running in CI during compile.
|
||||||
|
|
||||||
|
Only shows output during compilation (not config validation) since
|
||||||
|
disk space is only relevant when actually building firmware.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
esphome_command: The esphome command being run (config/compile/clean)
|
esphome_command: The esphome command being run (config/compile/clean)
|
||||||
"""
|
"""
|
||||||
if os.environ.get("GITHUB_ACTIONS") and esphome_command == "compile":
|
# Only show disk space during compilation in CI
|
||||||
print("\n" + "=" * 80)
|
# Config validation doesn't build anything so disk space isn't relevant
|
||||||
print("Disk Space After Build:")
|
if not os.environ.get("GITHUB_ACTIONS"):
|
||||||
print("=" * 80)
|
return
|
||||||
subprocess.run(["df", "-h"], check=False)
|
if esphome_command != "compile":
|
||||||
print("=" * 80 + "\n")
|
return
|
||||||
|
|
||||||
|
print("\n" + "=" * 80)
|
||||||
|
print("Disk Space After Build:")
|
||||||
|
print("=" * 80)
|
||||||
|
# Use sys.stdout.flush() to ensure output appears immediately
|
||||||
|
sys.stdout.flush()
|
||||||
|
subprocess.run(["df", "-h"], check=False, stdout=sys.stdout, stderr=sys.stderr)
|
||||||
|
print("=" * 80 + "\n")
|
||||||
|
sys.stdout.flush()
|
||||||
|
|
||||||
|
|
||||||
def find_component_tests(
|
def find_component_tests(
|
||||||
@@ -128,6 +147,140 @@ def get_platform_base_files(base_dir: Path) -> dict[str, list[Path]]:
|
|||||||
return dict(platform_files)
|
return dict(platform_files)
|
||||||
|
|
||||||
|
|
||||||
|
def group_components_by_platform(
|
||||||
|
failed_results: list[TestResult],
|
||||||
|
) -> dict[tuple[str, str], list[str]]:
|
||||||
|
"""Group failed components by platform and test type for simplified reproduction commands.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
failed_results: List of failed test results
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Dictionary mapping (platform, test_type) to list of component names
|
||||||
|
"""
|
||||||
|
platform_components: dict[tuple[str, str], list[str]] = {}
|
||||||
|
for result in failed_results:
|
||||||
|
key = (result.platform, result.test_type)
|
||||||
|
if key not in platform_components:
|
||||||
|
platform_components[key] = []
|
||||||
|
platform_components[key].extend(result.components)
|
||||||
|
|
||||||
|
# Remove duplicates and sort for each platform
|
||||||
|
return {
|
||||||
|
key: sorted(set(components)) for key, components in platform_components.items()
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def format_github_summary(test_results: list[TestResult]) -> str:
|
||||||
|
"""Format test results as GitHub Actions job summary markdown.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
test_results: List of all test results
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Markdown formatted summary string
|
||||||
|
"""
|
||||||
|
# Separate results into passed and failed
|
||||||
|
passed_results = [r for r in test_results if r.success]
|
||||||
|
failed_results = [r for r in test_results if not r.success]
|
||||||
|
|
||||||
|
lines = []
|
||||||
|
|
||||||
|
# Header with emoji based on success/failure
|
||||||
|
if failed_results:
|
||||||
|
lines.append("## :x: Component Tests Failed\n")
|
||||||
|
else:
|
||||||
|
lines.append("## :white_check_mark: Component Tests Passed\n")
|
||||||
|
|
||||||
|
# Summary statistics
|
||||||
|
total_time = sum(r.duration for r in test_results)
|
||||||
|
# Determine test type from results (all should be the same)
|
||||||
|
test_type = test_results[0].test_type if test_results else "unknown"
|
||||||
|
lines.append(
|
||||||
|
f"**Results:** {len(passed_results)} passed, {len(failed_results)} failed\n"
|
||||||
|
)
|
||||||
|
lines.append(f"**Total time:** {total_time:.1f}s\n")
|
||||||
|
lines.append(f"**Test type:** `{test_type}`\n")
|
||||||
|
|
||||||
|
# Show failed tests if any
|
||||||
|
if failed_results:
|
||||||
|
lines.append("### Failed Tests\n")
|
||||||
|
lines.append("| Test | Components | Platform | Duration |\n")
|
||||||
|
lines.append("|------|-----------|----------|----------|\n")
|
||||||
|
for result in failed_results:
|
||||||
|
components_str = ", ".join(result.components)
|
||||||
|
lines.append(
|
||||||
|
f"| `{result.test_id}` | {components_str} | {result.platform} | {result.duration:.1f}s |\n"
|
||||||
|
)
|
||||||
|
lines.append("\n")
|
||||||
|
|
||||||
|
# Show simplified commands to reproduce failures
|
||||||
|
# Group all failed components by platform for a single command per platform
|
||||||
|
lines.append("<details>\n")
|
||||||
|
lines.append("<summary>Commands to reproduce failures</summary>\n\n")
|
||||||
|
lines.append("```bash\n")
|
||||||
|
|
||||||
|
# Generate one command per platform and test type
|
||||||
|
platform_components = group_components_by_platform(failed_results)
|
||||||
|
for platform, test_type in sorted(platform_components.keys()):
|
||||||
|
components_csv = ",".join(platform_components[(platform, test_type)])
|
||||||
|
lines.append(
|
||||||
|
f"script/test_build_components.py -c {components_csv} -t {platform} -e {test_type}\n"
|
||||||
|
)
|
||||||
|
|
||||||
|
lines.append("```\n")
|
||||||
|
lines.append("</details>\n")
|
||||||
|
|
||||||
|
# Show passed tests
|
||||||
|
if passed_results:
|
||||||
|
lines.append("### Passed Tests\n\n")
|
||||||
|
lines.append(f"{len(passed_results)} tests passed successfully\n")
|
||||||
|
|
||||||
|
# Separate grouped and individual tests
|
||||||
|
grouped_results = [r for r in passed_results if len(r.components) > 1]
|
||||||
|
individual_results = [r for r in passed_results if len(r.components) == 1]
|
||||||
|
|
||||||
|
if grouped_results:
|
||||||
|
lines.append("#### Grouped Tests\n")
|
||||||
|
lines.append("| Components | Platform | Count | Duration |\n")
|
||||||
|
lines.append("|-----------|----------|-------|----------|\n")
|
||||||
|
for result in grouped_results:
|
||||||
|
components_str = ", ".join(result.components)
|
||||||
|
lines.append(
|
||||||
|
f"| {components_str} | {result.platform} | {len(result.components)} | {result.duration:.1f}s |\n"
|
||||||
|
)
|
||||||
|
lines.append("\n")
|
||||||
|
|
||||||
|
if individual_results:
|
||||||
|
lines.append("#### Individual Tests\n")
|
||||||
|
# Show first 10 individual tests with timing
|
||||||
|
if len(individual_results) <= 10:
|
||||||
|
lines.extend(
|
||||||
|
f"- `{result.test_id}` - {result.duration:.1f}s\n"
|
||||||
|
for result in individual_results
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
lines.extend(
|
||||||
|
f"- `{result.test_id}` - {result.duration:.1f}s\n"
|
||||||
|
for result in individual_results[:10]
|
||||||
|
)
|
||||||
|
lines.append(f"\n...and {len(individual_results) - 10} more\n")
|
||||||
|
lines.append("\n")
|
||||||
|
|
||||||
|
return "".join(lines)
|
||||||
|
|
||||||
|
|
||||||
|
def write_github_summary(test_results: list[TestResult]) -> None:
|
||||||
|
"""Write GitHub Actions job summary with test results and timing.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
test_results: List of all test results
|
||||||
|
"""
|
||||||
|
summary_content = format_github_summary(test_results)
|
||||||
|
with open(os.environ["GITHUB_STEP_SUMMARY"], "a", encoding="utf-8") as f:
|
||||||
|
f.write(summary_content)
|
||||||
|
|
||||||
|
|
||||||
def extract_platform_with_version(base_file: Path) -> str:
|
def extract_platform_with_version(base_file: Path) -> str:
|
||||||
"""Extract platform with version from base filename.
|
"""Extract platform with version from base filename.
|
||||||
|
|
||||||
@@ -151,7 +304,7 @@ def run_esphome_test(
|
|||||||
esphome_command: str,
|
esphome_command: str,
|
||||||
continue_on_fail: bool,
|
continue_on_fail: bool,
|
||||||
use_testing_mode: bool = False,
|
use_testing_mode: bool = False,
|
||||||
) -> tuple[bool, str]:
|
) -> TestResult:
|
||||||
"""Run esphome test for a single component.
|
"""Run esphome test for a single component.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
@@ -166,7 +319,7 @@ def run_esphome_test(
|
|||||||
use_testing_mode: Whether to use --testing-mode flag
|
use_testing_mode: Whether to use --testing-mode flag
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
Tuple of (success status, command string)
|
TestResult object with test details and timing
|
||||||
"""
|
"""
|
||||||
test_name = test_file.stem.split(".")[0]
|
test_name = test_file.stem.split(".")[0]
|
||||||
|
|
||||||
@@ -221,9 +374,13 @@ def run_esphome_test(
|
|||||||
if use_testing_mode:
|
if use_testing_mode:
|
||||||
print(" (using --testing-mode)")
|
print(" (using --testing-mode)")
|
||||||
|
|
||||||
|
start_time = time.time()
|
||||||
|
test_id = f"{component}.{test_name}.{platform_with_version}"
|
||||||
|
|
||||||
try:
|
try:
|
||||||
result = subprocess.run(cmd, check=False)
|
result = subprocess.run(cmd, check=False)
|
||||||
success = result.returncode == 0
|
success = result.returncode == 0
|
||||||
|
duration = time.time() - start_time
|
||||||
|
|
||||||
# Show disk space after build in CI during compile
|
# Show disk space after build in CI during compile
|
||||||
show_disk_space_if_ci(esphome_command)
|
show_disk_space_if_ci(esphome_command)
|
||||||
@@ -236,12 +393,30 @@ def run_esphome_test(
|
|||||||
print(cmd_str)
|
print(cmd_str)
|
||||||
print()
|
print()
|
||||||
raise subprocess.CalledProcessError(result.returncode, cmd)
|
raise subprocess.CalledProcessError(result.returncode, cmd)
|
||||||
return success, cmd_str
|
|
||||||
|
return TestResult(
|
||||||
|
test_id=test_id,
|
||||||
|
components=[component],
|
||||||
|
platform=platform_with_version,
|
||||||
|
success=success,
|
||||||
|
duration=duration,
|
||||||
|
command=cmd_str,
|
||||||
|
test_type=esphome_command,
|
||||||
|
)
|
||||||
except subprocess.CalledProcessError:
|
except subprocess.CalledProcessError:
|
||||||
|
duration = time.time() - start_time
|
||||||
# Re-raise if we're not continuing on fail
|
# Re-raise if we're not continuing on fail
|
||||||
if not continue_on_fail:
|
if not continue_on_fail:
|
||||||
raise
|
raise
|
||||||
return False, cmd_str
|
return TestResult(
|
||||||
|
test_id=test_id,
|
||||||
|
components=[component],
|
||||||
|
platform=platform_with_version,
|
||||||
|
success=False,
|
||||||
|
duration=duration,
|
||||||
|
command=cmd_str,
|
||||||
|
test_type=esphome_command,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def run_grouped_test(
|
def run_grouped_test(
|
||||||
@@ -253,7 +428,7 @@ def run_grouped_test(
|
|||||||
tests_dir: Path,
|
tests_dir: Path,
|
||||||
esphome_command: str,
|
esphome_command: str,
|
||||||
continue_on_fail: bool,
|
continue_on_fail: bool,
|
||||||
) -> tuple[bool, str]:
|
) -> TestResult:
|
||||||
"""Run esphome test for a group of components with shared bus configs.
|
"""Run esphome test for a group of components with shared bus configs.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
@@ -267,7 +442,7 @@ def run_grouped_test(
|
|||||||
continue_on_fail: Whether to continue on failure
|
continue_on_fail: Whether to continue on failure
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
Tuple of (success status, command string)
|
TestResult object with test details and timing
|
||||||
"""
|
"""
|
||||||
# Create merged config
|
# Create merged config
|
||||||
group_name = "_".join(components[:3]) # Use first 3 components for name
|
group_name = "_".join(components[:3]) # Use first 3 components for name
|
||||||
@@ -294,8 +469,17 @@ def run_grouped_test(
|
|||||||
print(f"Error merging configs for {components}: {e}")
|
print(f"Error merging configs for {components}: {e}")
|
||||||
if not continue_on_fail:
|
if not continue_on_fail:
|
||||||
raise
|
raise
|
||||||
# Return empty command string since we failed before building the command
|
# Return TestResult for merge failure
|
||||||
return False, f"# Failed during config merge: {e}"
|
test_id = f"GROUPED[{','.join(components)}].{platform_with_version}"
|
||||||
|
return TestResult(
|
||||||
|
test_id=test_id,
|
||||||
|
components=components,
|
||||||
|
platform=platform_with_version,
|
||||||
|
success=False,
|
||||||
|
duration=0.0,
|
||||||
|
command=f"# Failed during config merge: {e}",
|
||||||
|
test_type=esphome_command,
|
||||||
|
)
|
||||||
|
|
||||||
# Create test file that includes merged config
|
# Create test file that includes merged config
|
||||||
output_file = build_dir / f"test_{group_name}.{platform_with_version}.yaml"
|
output_file = build_dir / f"test_{group_name}.{platform_with_version}.yaml"
|
||||||
@@ -334,9 +518,13 @@ def run_grouped_test(
|
|||||||
print(f"> [GROUPED: {components_str}] [{platform_with_version}]")
|
print(f"> [GROUPED: {components_str}] [{platform_with_version}]")
|
||||||
print(" (using --testing-mode)")
|
print(" (using --testing-mode)")
|
||||||
|
|
||||||
|
start_time = time.time()
|
||||||
|
test_id = f"GROUPED[{','.join(components)}].{platform_with_version}"
|
||||||
|
|
||||||
try:
|
try:
|
||||||
result = subprocess.run(cmd, check=False)
|
result = subprocess.run(cmd, check=False)
|
||||||
success = result.returncode == 0
|
success = result.returncode == 0
|
||||||
|
duration = time.time() - start_time
|
||||||
|
|
||||||
# Show disk space after build in CI during compile
|
# Show disk space after build in CI during compile
|
||||||
show_disk_space_if_ci(esphome_command)
|
show_disk_space_if_ci(esphome_command)
|
||||||
@@ -349,12 +537,30 @@ def run_grouped_test(
|
|||||||
print(cmd_str)
|
print(cmd_str)
|
||||||
print()
|
print()
|
||||||
raise subprocess.CalledProcessError(result.returncode, cmd)
|
raise subprocess.CalledProcessError(result.returncode, cmd)
|
||||||
return success, cmd_str
|
|
||||||
|
return TestResult(
|
||||||
|
test_id=test_id,
|
||||||
|
components=components,
|
||||||
|
platform=platform_with_version,
|
||||||
|
success=success,
|
||||||
|
duration=duration,
|
||||||
|
command=cmd_str,
|
||||||
|
test_type=esphome_command,
|
||||||
|
)
|
||||||
except subprocess.CalledProcessError:
|
except subprocess.CalledProcessError:
|
||||||
|
duration = time.time() - start_time
|
||||||
# Re-raise if we're not continuing on fail
|
# Re-raise if we're not continuing on fail
|
||||||
if not continue_on_fail:
|
if not continue_on_fail:
|
||||||
raise
|
raise
|
||||||
return False, cmd_str
|
return TestResult(
|
||||||
|
test_id=test_id,
|
||||||
|
components=components,
|
||||||
|
platform=platform_with_version,
|
||||||
|
success=False,
|
||||||
|
duration=duration,
|
||||||
|
command=cmd_str,
|
||||||
|
test_type=esphome_command,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def run_grouped_component_tests(
|
def run_grouped_component_tests(
|
||||||
@@ -366,7 +572,7 @@ def run_grouped_component_tests(
|
|||||||
esphome_command: str,
|
esphome_command: str,
|
||||||
continue_on_fail: bool,
|
continue_on_fail: bool,
|
||||||
additional_isolated: set[str] | None = None,
|
additional_isolated: set[str] | None = None,
|
||||||
) -> tuple[set[tuple[str, str]], list[str], list[str], dict[str, str]]:
|
) -> tuple[set[tuple[str, str]], list[TestResult]]:
|
||||||
"""Run grouped component tests.
|
"""Run grouped component tests.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
@@ -380,12 +586,10 @@ def run_grouped_component_tests(
|
|||||||
additional_isolated: Additional components to treat as isolated (not grouped)
|
additional_isolated: Additional components to treat as isolated (not grouped)
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
Tuple of (tested_components, passed_tests, failed_tests, failed_commands)
|
Tuple of (tested_components, test_results)
|
||||||
"""
|
"""
|
||||||
tested_components = set()
|
tested_components = set()
|
||||||
passed_tests = []
|
test_results = []
|
||||||
failed_tests = []
|
|
||||||
failed_commands = {} # Map test_id to command string
|
|
||||||
|
|
||||||
# Group components by platform and bus signature
|
# Group components by platform and bus signature
|
||||||
grouped_components: dict[tuple[str, str], list[str]] = defaultdict(list)
|
grouped_components: dict[tuple[str, str], list[str]] = defaultdict(list)
|
||||||
@@ -462,6 +666,11 @@ def run_grouped_component_tests(
|
|||||||
if signature:
|
if signature:
|
||||||
grouped_components[(platform, signature)].append(component)
|
grouped_components[(platform, signature)].append(component)
|
||||||
|
|
||||||
|
# Merge groups with compatible buses (cross-bus grouping optimization)
|
||||||
|
# This allows mixing components with different buses (e.g., ble + uart)
|
||||||
|
# as long as they don't have conflicting configurations for the same bus type
|
||||||
|
grouped_components = merge_compatible_bus_groups(grouped_components)
|
||||||
|
|
||||||
# Print detailed grouping plan
|
# Print detailed grouping plan
|
||||||
print("\nGrouping Plan:")
|
print("\nGrouping Plan:")
|
||||||
print("-" * 80)
|
print("-" * 80)
|
||||||
@@ -560,28 +769,6 @@ def run_grouped_component_tests(
|
|||||||
# No other groups for this platform - keep no_buses components together
|
# No other groups for this platform - keep no_buses components together
|
||||||
grouped_components[(platform, NO_BUSES_SIGNATURE)] = no_buses_comps
|
grouped_components[(platform, NO_BUSES_SIGNATURE)] = no_buses_comps
|
||||||
|
|
||||||
# Split groups that exceed platform-specific maximum sizes
|
|
||||||
# ESP8266 has limited IRAM and can't handle large component groups
|
|
||||||
split_groups = {}
|
|
||||||
for (platform, signature), components in list(grouped_components.items()):
|
|
||||||
max_size = PLATFORM_MAX_GROUP_SIZE.get(platform)
|
|
||||||
if max_size and len(components) > max_size:
|
|
||||||
# Split this group into smaller groups
|
|
||||||
print(
|
|
||||||
f"\n ℹ️ Splitting {platform} group (signature: {signature}) "
|
|
||||||
f"from {len(components)} to max {max_size} components per group"
|
|
||||||
)
|
|
||||||
# Remove original group
|
|
||||||
del grouped_components[(platform, signature)]
|
|
||||||
# Create split groups
|
|
||||||
for i in range(0, len(components), max_size):
|
|
||||||
split_components = components[i : i + max_size]
|
|
||||||
# Create unique signature for each split group
|
|
||||||
split_signature = f"{signature}_split{i // max_size + 1}"
|
|
||||||
split_groups[(platform, split_signature)] = split_components
|
|
||||||
# Add split groups back
|
|
||||||
grouped_components.update(split_groups)
|
|
||||||
|
|
||||||
groups_to_test = []
|
groups_to_test = []
|
||||||
individual_tests = set() # Use set to avoid duplicates
|
individual_tests = set() # Use set to avoid duplicates
|
||||||
|
|
||||||
@@ -672,7 +859,7 @@ def run_grouped_component_tests(
|
|||||||
continue
|
continue
|
||||||
|
|
||||||
# Run grouped test
|
# Run grouped test
|
||||||
success, cmd_str = run_grouped_test(
|
test_result = run_grouped_test(
|
||||||
components=components_to_group,
|
components=components_to_group,
|
||||||
platform=platform,
|
platform=platform,
|
||||||
platform_with_version=platform_with_version,
|
platform_with_version=platform_with_version,
|
||||||
@@ -687,17 +874,10 @@ def run_grouped_component_tests(
|
|||||||
for comp in components_to_group:
|
for comp in components_to_group:
|
||||||
tested_components.add((comp, platform_with_version))
|
tested_components.add((comp, platform_with_version))
|
||||||
|
|
||||||
# Record result for each component - show all components in grouped tests
|
# Store test result
|
||||||
test_id = (
|
test_results.append(test_result)
|
||||||
f"GROUPED[{','.join(components_to_group)}].{platform_with_version}"
|
|
||||||
)
|
|
||||||
if success:
|
|
||||||
passed_tests.append(test_id)
|
|
||||||
else:
|
|
||||||
failed_tests.append(test_id)
|
|
||||||
failed_commands[test_id] = cmd_str
|
|
||||||
|
|
||||||
return tested_components, passed_tests, failed_tests, failed_commands
|
return tested_components, test_results
|
||||||
|
|
||||||
|
|
||||||
def run_individual_component_test(
|
def run_individual_component_test(
|
||||||
@@ -710,9 +890,7 @@ def run_individual_component_test(
|
|||||||
esphome_command: str,
|
esphome_command: str,
|
||||||
continue_on_fail: bool,
|
continue_on_fail: bool,
|
||||||
tested_components: set[tuple[str, str]],
|
tested_components: set[tuple[str, str]],
|
||||||
passed_tests: list[str],
|
test_results: list[TestResult],
|
||||||
failed_tests: list[str],
|
|
||||||
failed_commands: dict[str, str],
|
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Run an individual component test if not already tested in a group.
|
"""Run an individual component test if not already tested in a group.
|
||||||
|
|
||||||
@@ -726,16 +904,13 @@ def run_individual_component_test(
|
|||||||
esphome_command: ESPHome command
|
esphome_command: ESPHome command
|
||||||
continue_on_fail: Whether to continue on failure
|
continue_on_fail: Whether to continue on failure
|
||||||
tested_components: Set of already tested components
|
tested_components: Set of already tested components
|
||||||
passed_tests: List to append passed test IDs
|
test_results: List to append test results
|
||||||
failed_tests: List to append failed test IDs
|
|
||||||
failed_commands: Dict to store failed test commands
|
|
||||||
"""
|
"""
|
||||||
# Skip if already tested in a group
|
# Skip if already tested in a group
|
||||||
if (component, platform_with_version) in tested_components:
|
if (component, platform_with_version) in tested_components:
|
||||||
return
|
return
|
||||||
|
|
||||||
test_name = test_file.stem.split(".")[0]
|
test_result = run_esphome_test(
|
||||||
success, cmd_str = run_esphome_test(
|
|
||||||
component=component,
|
component=component,
|
||||||
test_file=test_file,
|
test_file=test_file,
|
||||||
platform=platform,
|
platform=platform,
|
||||||
@@ -745,12 +920,7 @@ def run_individual_component_test(
|
|||||||
esphome_command=esphome_command,
|
esphome_command=esphome_command,
|
||||||
continue_on_fail=continue_on_fail,
|
continue_on_fail=continue_on_fail,
|
||||||
)
|
)
|
||||||
test_id = f"{component}.{test_name}.{platform_with_version}"
|
test_results.append(test_result)
|
||||||
if success:
|
|
||||||
passed_tests.append(test_id)
|
|
||||||
else:
|
|
||||||
failed_tests.append(test_id)
|
|
||||||
failed_commands[test_id] = cmd_str
|
|
||||||
|
|
||||||
|
|
||||||
def test_components(
|
def test_components(
|
||||||
@@ -799,19 +969,12 @@ def test_components(
|
|||||||
print(f"Found {len(all_tests)} components to test")
|
print(f"Found {len(all_tests)} components to test")
|
||||||
|
|
||||||
# Run tests
|
# Run tests
|
||||||
failed_tests = []
|
test_results = []
|
||||||
passed_tests = []
|
|
||||||
tested_components = set() # Track which components were tested in groups
|
tested_components = set() # Track which components were tested in groups
|
||||||
failed_commands = {} # Track commands for failed tests
|
|
||||||
|
|
||||||
# First, run grouped tests if grouping is enabled
|
# First, run grouped tests if grouping is enabled
|
||||||
if enable_grouping:
|
if enable_grouping:
|
||||||
(
|
tested_components, grouped_results = run_grouped_component_tests(
|
||||||
tested_components,
|
|
||||||
passed_tests,
|
|
||||||
failed_tests,
|
|
||||||
failed_commands,
|
|
||||||
) = run_grouped_component_tests(
|
|
||||||
all_tests=all_tests,
|
all_tests=all_tests,
|
||||||
platform_filter=platform_filter,
|
platform_filter=platform_filter,
|
||||||
platform_bases=platform_bases,
|
platform_bases=platform_bases,
|
||||||
@@ -821,6 +984,7 @@ def test_components(
|
|||||||
continue_on_fail=continue_on_fail,
|
continue_on_fail=continue_on_fail,
|
||||||
additional_isolated=isolated_components,
|
additional_isolated=isolated_components,
|
||||||
)
|
)
|
||||||
|
test_results.extend(grouped_results)
|
||||||
|
|
||||||
# Then run individual tests for components not in groups
|
# Then run individual tests for components not in groups
|
||||||
for component, test_files in sorted(all_tests.items()):
|
for component, test_files in sorted(all_tests.items()):
|
||||||
@@ -846,9 +1010,7 @@ def test_components(
|
|||||||
esphome_command=esphome_command,
|
esphome_command=esphome_command,
|
||||||
continue_on_fail=continue_on_fail,
|
continue_on_fail=continue_on_fail,
|
||||||
tested_components=tested_components,
|
tested_components=tested_components,
|
||||||
passed_tests=passed_tests,
|
test_results=test_results,
|
||||||
failed_tests=failed_tests,
|
|
||||||
failed_commands=failed_commands,
|
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
# Platform-specific test
|
# Platform-specific test
|
||||||
@@ -880,31 +1042,40 @@ def test_components(
|
|||||||
esphome_command=esphome_command,
|
esphome_command=esphome_command,
|
||||||
continue_on_fail=continue_on_fail,
|
continue_on_fail=continue_on_fail,
|
||||||
tested_components=tested_components,
|
tested_components=tested_components,
|
||||||
passed_tests=passed_tests,
|
test_results=test_results,
|
||||||
failed_tests=failed_tests,
|
|
||||||
failed_commands=failed_commands,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
|
# Separate results into passed and failed
|
||||||
|
passed_results = [r for r in test_results if r.success]
|
||||||
|
failed_results = [r for r in test_results if not r.success]
|
||||||
|
|
||||||
# Print summary
|
# Print summary
|
||||||
print("\n" + "=" * 80)
|
print("\n" + "=" * 80)
|
||||||
print(f"Test Summary: {len(passed_tests)} passed, {len(failed_tests)} failed")
|
print(f"Test Summary: {len(passed_results)} passed, {len(failed_results)} failed")
|
||||||
print("=" * 80)
|
print("=" * 80)
|
||||||
|
|
||||||
if failed_tests:
|
if failed_results:
|
||||||
print("\nFailed tests:")
|
print("\nFailed tests:")
|
||||||
for test in failed_tests:
|
for result in failed_results:
|
||||||
print(f" - {test}")
|
print(f" - {result.test_id}")
|
||||||
|
|
||||||
# Print failed commands at the end for easy copy-paste from CI logs
|
# Print simplified commands grouped by platform and test type for easy copy-paste
|
||||||
print("\n" + "=" * 80)
|
print("\n" + "=" * 80)
|
||||||
print("Failed test commands (copy-paste to reproduce locally):")
|
print("Commands to reproduce failures (copy-paste to reproduce locally):")
|
||||||
print("=" * 80)
|
print("=" * 80)
|
||||||
for test in failed_tests:
|
platform_components = group_components_by_platform(failed_results)
|
||||||
if test in failed_commands:
|
for platform, test_type in sorted(platform_components.keys()):
|
||||||
print(f"\n# {test}")
|
components_csv = ",".join(platform_components[(platform, test_type)])
|
||||||
print(failed_commands[test])
|
print(
|
||||||
|
f"script/test_build_components.py -c {components_csv} -t {platform} -e {test_type}"
|
||||||
|
)
|
||||||
print()
|
print()
|
||||||
|
|
||||||
|
# Write GitHub Actions job summary if in CI
|
||||||
|
if os.environ.get("GITHUB_STEP_SUMMARY"):
|
||||||
|
write_github_summary(test_results)
|
||||||
|
|
||||||
|
if failed_results:
|
||||||
return 1
|
return 1
|
||||||
|
|
||||||
return 0
|
return 0
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
substitutions:
|
substitutions:
|
||||||
irq0_pin: GPIO13
|
irq0_pin: GPIO0
|
||||||
irq1_pin: GPIO15
|
irq1_pin: GPIO15
|
||||||
reset_pin: GPIO16
|
reset_pin: GPIO16
|
||||||
|
|
||||||
|
|||||||
@@ -4,10 +4,13 @@ sensor:
|
|||||||
irq_pin: ${irq_pin}
|
irq_pin: ${irq_pin}
|
||||||
voltage:
|
voltage:
|
||||||
name: ADE7953 Voltage
|
name: ADE7953 Voltage
|
||||||
|
id: ade7953_i2c_voltage
|
||||||
current_a:
|
current_a:
|
||||||
name: ADE7953 Current A
|
name: ADE7953 Current A
|
||||||
|
id: ade7953_i2c_current_a
|
||||||
current_b:
|
current_b:
|
||||||
name: ADE7953 Current B
|
name: ADE7953 Current B
|
||||||
|
id: ade7953_i2c_current_b
|
||||||
power_factor_a:
|
power_factor_a:
|
||||||
name: ADE7953 Power Factor A
|
name: ADE7953 Power Factor A
|
||||||
power_factor_b:
|
power_factor_b:
|
||||||
|
|||||||
@@ -4,13 +4,13 @@ sensor:
|
|||||||
irq_pin: ${irq_pin}
|
irq_pin: ${irq_pin}
|
||||||
voltage:
|
voltage:
|
||||||
name: ADE7953 Voltage
|
name: ADE7953 Voltage
|
||||||
id: ade7953_voltage
|
id: ade7953_spi_voltage
|
||||||
current_a:
|
current_a:
|
||||||
name: ADE7953 Current A
|
name: ADE7953 Current A
|
||||||
id: ade7953_current_a
|
id: ade7953_spi_current_a
|
||||||
current_b:
|
current_b:
|
||||||
name: ADE7953 Current B
|
name: ADE7953 Current B
|
||||||
id: ade7953_current_b
|
id: ade7953_spi_current_b
|
||||||
power_factor_a:
|
power_factor_a:
|
||||||
name: ADE7953 Power Factor A
|
name: ADE7953 Power Factor A
|
||||||
power_factor_b:
|
power_factor_b:
|
||||||
|
|||||||
@@ -1,13 +1,16 @@
|
|||||||
as3935_i2c:
|
as3935_i2c:
|
||||||
|
id: as3935_i2c_id
|
||||||
i2c_id: i2c_bus
|
i2c_id: i2c_bus
|
||||||
irq_pin: ${irq_pin}
|
irq_pin: ${irq_pin}
|
||||||
|
|
||||||
binary_sensor:
|
binary_sensor:
|
||||||
- platform: as3935
|
- platform: as3935
|
||||||
|
as3935_id: as3935_i2c_id
|
||||||
name: Storm Alert
|
name: Storm Alert
|
||||||
|
|
||||||
sensor:
|
sensor:
|
||||||
- platform: as3935
|
- platform: as3935
|
||||||
|
as3935_id: as3935_i2c_id
|
||||||
lightning_energy:
|
lightning_energy:
|
||||||
name: Lightning Energy
|
name: Lightning Energy
|
||||||
distance:
|
distance:
|
||||||
|
|||||||
@@ -1,13 +1,16 @@
|
|||||||
as3935_spi:
|
as3935_spi:
|
||||||
|
id: as3935_spi_id
|
||||||
cs_pin: ${cs_pin}
|
cs_pin: ${cs_pin}
|
||||||
irq_pin: ${irq_pin}
|
irq_pin: ${irq_pin}
|
||||||
|
|
||||||
binary_sensor:
|
binary_sensor:
|
||||||
- platform: as3935
|
- platform: as3935
|
||||||
|
as3935_id: as3935_spi_id
|
||||||
name: Storm Alert
|
name: Storm Alert
|
||||||
|
|
||||||
sensor:
|
sensor:
|
||||||
- platform: as3935
|
- platform: as3935
|
||||||
|
as3935_id: as3935_spi_id
|
||||||
lightning_energy:
|
lightning_energy:
|
||||||
name: Lightning Energy
|
name: Lightning Energy
|
||||||
distance:
|
distance:
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
display:
|
display:
|
||||||
- platform: ssd1306_i2c
|
- platform: ssd1306_i2c
|
||||||
i2c_id: i2c_bus
|
i2c_id: i2c_bus
|
||||||
id: ssd1306_display
|
id: ssd1306_i2c_display
|
||||||
model: SSD1306_128X64
|
model: SSD1306_128X64
|
||||||
reset_pin: 19
|
reset_pin: 19
|
||||||
pages:
|
pages:
|
||||||
@@ -13,6 +13,6 @@ touchscreen:
|
|||||||
- platform: axs15231
|
- platform: axs15231
|
||||||
i2c_id: i2c_bus
|
i2c_id: i2c_bus
|
||||||
id: axs15231_touchscreen
|
id: axs15231_touchscreen
|
||||||
display: ssd1306_display
|
display: ssd1306_i2c_display
|
||||||
interrupt_pin: 20
|
interrupt_pin: 20
|
||||||
reset_pin: 18
|
reset_pin: 18
|
||||||
|
|||||||
@@ -3,12 +3,12 @@ sensor:
|
|||||||
i2c_id: i2c_bus
|
i2c_id: i2c_bus
|
||||||
address: 0x76
|
address: 0x76
|
||||||
temperature:
|
temperature:
|
||||||
id: bme280_temperature
|
id: bme280_i2c_temperature
|
||||||
name: BME280 Temperature
|
name: BME280 Temperature
|
||||||
humidity:
|
humidity:
|
||||||
id: bme280_humidity
|
id: bme280_i2c_humidity
|
||||||
name: BME280 Humidity
|
name: BME280 Humidity
|
||||||
pressure:
|
pressure:
|
||||||
id: bme280_pressure
|
id: bme280_i2c_pressure
|
||||||
name: BME280 Pressure
|
name: BME280 Pressure
|
||||||
update_interval: 15s
|
update_interval: 15s
|
||||||
|
|||||||
@@ -2,12 +2,12 @@ sensor:
|
|||||||
- platform: bme280_spi
|
- platform: bme280_spi
|
||||||
cs_pin: ${cs_pin}
|
cs_pin: ${cs_pin}
|
||||||
temperature:
|
temperature:
|
||||||
id: bme280_temperature
|
id: bme280_spi_temperature
|
||||||
name: BME280 Temperature
|
name: BME280 Temperature
|
||||||
humidity:
|
humidity:
|
||||||
id: bme280_humidity
|
id: bme280_spi_humidity
|
||||||
name: BME280 Humidity
|
name: BME280 Humidity
|
||||||
pressure:
|
pressure:
|
||||||
id: bme280_pressure
|
id: bme280_spi_pressure
|
||||||
name: BME280 Pressure
|
name: BME280 Pressure
|
||||||
update_interval: 15s
|
update_interval: 15s
|
||||||
|
|||||||
@@ -3,10 +3,10 @@ sensor:
|
|||||||
i2c_id: i2c_bus
|
i2c_id: i2c_bus
|
||||||
address: 0x77
|
address: 0x77
|
||||||
temperature:
|
temperature:
|
||||||
id: bmp280_temperature
|
id: bmp280_i2c_temperature
|
||||||
name: Outside Temperature
|
name: Outside Temperature
|
||||||
pressure:
|
pressure:
|
||||||
name: Outside Pressure
|
name: Outside Pressure
|
||||||
id: bmp280_pressure
|
id: bmp280_i2c_pressure
|
||||||
iir_filter: 16x
|
iir_filter: 16x
|
||||||
update_interval: 15s
|
update_interval: 15s
|
||||||
|
|||||||
@@ -2,10 +2,10 @@ sensor:
|
|||||||
- platform: bmp280_spi
|
- platform: bmp280_spi
|
||||||
cs_pin: ${cs_pin}
|
cs_pin: ${cs_pin}
|
||||||
temperature:
|
temperature:
|
||||||
id: bmp280_temperature
|
id: bmp280_spi_temperature
|
||||||
name: Outside Temperature
|
name: Outside Temperature
|
||||||
pressure:
|
pressure:
|
||||||
name: Outside Pressure
|
name: Outside Pressure
|
||||||
id: bmp280_pressure
|
id: bmp280_spi_pressure
|
||||||
iir_filter: 16x
|
iir_filter: 16x
|
||||||
update_interval: 15s
|
update_interval: 15s
|
||||||
|
|||||||
@@ -3,8 +3,10 @@ sensor:
|
|||||||
i2c_id: i2c_bus
|
i2c_id: i2c_bus
|
||||||
address: 0x77
|
address: 0x77
|
||||||
temperature:
|
temperature:
|
||||||
|
id: bmp3xx_i2c_temperature
|
||||||
name: BMP Temperature
|
name: BMP Temperature
|
||||||
oversampling: 16x
|
oversampling: 16x
|
||||||
pressure:
|
pressure:
|
||||||
|
id: bmp3xx_i2c_pressure
|
||||||
name: BMP Pressure
|
name: BMP Pressure
|
||||||
iir_filter: 2X
|
iir_filter: 2X
|
||||||
|
|||||||
@@ -2,8 +2,10 @@ sensor:
|
|||||||
- platform: bmp3xx_spi
|
- platform: bmp3xx_spi
|
||||||
cs_pin: ${cs_pin}
|
cs_pin: ${cs_pin}
|
||||||
temperature:
|
temperature:
|
||||||
|
id: bmp3xx_spi_temperature
|
||||||
name: BMP Temperature
|
name: BMP Temperature
|
||||||
oversampling: 16x
|
oversampling: 16x
|
||||||
pressure:
|
pressure:
|
||||||
|
id: bmp3xx_spi_pressure
|
||||||
name: BMP Pressure
|
name: BMP Pressure
|
||||||
iir_filter: 2X
|
iir_filter: 2X
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
packages:
|
packages:
|
||||||
camera: !include ../../test_build_components/common/camera/esp32-idf.yaml
|
i2c_camera: !include ../../test_build_components/common/i2c_camera/esp32-idf.yaml
|
||||||
|
|
||||||
<<: !include common.yaml
|
<<: !include common.yaml
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
packages:
|
packages:
|
||||||
camera: !include ../../test_build_components/common/camera/esp32-idf.yaml
|
i2c_camera: !include ../../test_build_components/common/i2c_camera/esp32-idf.yaml
|
||||||
|
|
||||||
<<: !include common.yaml
|
<<: !include common.yaml
|
||||||
|
|||||||
@@ -4,6 +4,7 @@ packages:
|
|||||||
|
|
||||||
display:
|
display:
|
||||||
- platform: ili9xxx
|
- platform: ili9xxx
|
||||||
|
spi_id: spi_bus
|
||||||
id: ili9xxx_display
|
id: ili9xxx_display
|
||||||
model: GC9A01A
|
model: GC9A01A
|
||||||
invert_colors: True
|
invert_colors: True
|
||||||
@@ -16,5 +17,6 @@ display:
|
|||||||
|
|
||||||
touchscreen:
|
touchscreen:
|
||||||
- platform: chsc6x
|
- platform: chsc6x
|
||||||
|
i2c_id: i2c_bus
|
||||||
display: ili9xxx_display
|
display: ili9xxx_display
|
||||||
interrupt_pin: 20
|
interrupt_pin: 20
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
display:
|
display:
|
||||||
- platform: ssd1306_i2c
|
- platform: ssd1306_i2c
|
||||||
i2c_id: i2c_bus
|
i2c_id: i2c_bus
|
||||||
id: ssd1306_display
|
id: ssd1306_i2c_display
|
||||||
model: SSD1306_128X64
|
model: SSD1306_128X64
|
||||||
reset_pin: ${display_reset_pin}
|
reset_pin: ${display_reset_pin}
|
||||||
pages:
|
pages:
|
||||||
@@ -15,7 +15,7 @@ touchscreen:
|
|||||||
id: ektf2232_touchscreen
|
id: ektf2232_touchscreen
|
||||||
interrupt_pin: ${interrupt_pin}
|
interrupt_pin: ${interrupt_pin}
|
||||||
reset_pin: ${touch_reset_pin}
|
reset_pin: ${touch_reset_pin}
|
||||||
display: ssd1306_display
|
display: ssd1306_i2c_display
|
||||||
on_touch:
|
on_touch:
|
||||||
- logger.log:
|
- logger.log:
|
||||||
format: Touch at (%d, %d)
|
format: Touch at (%d, %d)
|
||||||
|
|||||||
@@ -3,8 +3,11 @@ sensor:
|
|||||||
i2c_id: i2c_bus
|
i2c_id: i2c_bus
|
||||||
address: 0x53
|
address: 0x53
|
||||||
eco2:
|
eco2:
|
||||||
|
id: ens160_i2c_eco2
|
||||||
name: "ENS160 eCO2"
|
name: "ENS160 eCO2"
|
||||||
tvoc:
|
tvoc:
|
||||||
|
id: ens160_i2c_tvoc
|
||||||
name: "ENS160 Total Volatile Organic Compounds"
|
name: "ENS160 Total Volatile Organic Compounds"
|
||||||
aqi:
|
aqi:
|
||||||
|
id: ens160_i2c_aqi
|
||||||
name: "ENS160 Air Quality Index"
|
name: "ENS160 Air Quality Index"
|
||||||
|
|||||||
@@ -2,8 +2,11 @@ sensor:
|
|||||||
- platform: ens160_spi
|
- platform: ens160_spi
|
||||||
cs_pin: ${cs_pin}
|
cs_pin: ${cs_pin}
|
||||||
eco2:
|
eco2:
|
||||||
|
id: ens160_spi_eco2
|
||||||
name: "ENS160 eCO2"
|
name: "ENS160 eCO2"
|
||||||
tvoc:
|
tvoc:
|
||||||
|
id: ens160_spi_tvoc
|
||||||
name: "ENS160 Total Volatile Organic Compounds"
|
name: "ENS160 Total Volatile Organic Compounds"
|
||||||
aqi:
|
aqi:
|
||||||
|
id: ens160_spi_aqi
|
||||||
name: "ENS160 Air Quality Index"
|
name: "ENS160 Air Quality Index"
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
packages:
|
packages:
|
||||||
camera: !include ../../test_build_components/common/camera/esp32-idf.yaml
|
i2c_camera: !include ../../test_build_components/common/i2c_camera/esp32-idf.yaml
|
||||||
|
|
||||||
<<: !include common.yaml
|
<<: !include common.yaml
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
packages:
|
packages:
|
||||||
camera: !include ../../test_build_components/common/camera/esp32-idf.yaml
|
i2c_camera: !include ../../test_build_components/common/i2c_camera/esp32-idf.yaml
|
||||||
|
|
||||||
<<: !include common.yaml
|
<<: !include common.yaml
|
||||||
|
|||||||
@@ -49,6 +49,7 @@ font:
|
|||||||
|
|
||||||
display:
|
display:
|
||||||
- platform: ssd1306_i2c
|
- platform: ssd1306_i2c
|
||||||
|
i2c_id: i2c_bus
|
||||||
id: ssd1306_display
|
id: ssd1306_display
|
||||||
model: SSD1306_128X64
|
model: SSD1306_128X64
|
||||||
reset_pin: ${display_reset_pin}
|
reset_pin: ${display_reset_pin}
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
substitutions:
|
substitutions:
|
||||||
interrupt_pin: GPIO12
|
interrupt_pin: GPIO0
|
||||||
reset_pin: GPIO16
|
reset_pin: GPIO16
|
||||||
|
|
||||||
packages:
|
packages:
|
||||||
|
|||||||
@@ -11,6 +11,7 @@ graph:
|
|||||||
|
|
||||||
display:
|
display:
|
||||||
- platform: ssd1306_i2c
|
- platform: ssd1306_i2c
|
||||||
|
i2c_id: i2c_bus
|
||||||
id: ssd1306_display
|
id: ssd1306_display
|
||||||
model: SSD1306_128X64
|
model: SSD1306_128X64
|
||||||
reset_pin: ${reset_pin}
|
reset_pin: ${reset_pin}
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
display:
|
display:
|
||||||
- platform: ssd1306_i2c
|
- platform: ssd1306_i2c
|
||||||
id: ssd1306_display
|
id: ssd1306_i2c_display
|
||||||
model: SSD1306_128X64
|
model: SSD1306_128X64
|
||||||
reset_pin: ${reset_pin}
|
reset_pin: ${reset_pin}
|
||||||
pages:
|
pages:
|
||||||
@@ -36,7 +36,7 @@ switch:
|
|||||||
|
|
||||||
graphical_display_menu:
|
graphical_display_menu:
|
||||||
id: test_graphical_display_menu
|
id: test_graphical_display_menu
|
||||||
display: ssd1306_display
|
display: ssd1306_i2c_display
|
||||||
font: roboto
|
font: roboto
|
||||||
active: false
|
active: false
|
||||||
mode: rotary
|
mode: rotary
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
display:
|
display:
|
||||||
- platform: ssd1306_i2c
|
- platform: ssd1306_i2c
|
||||||
i2c_id: i2c_bus
|
i2c_id: i2c_bus
|
||||||
id: ssd1306_display
|
id: ssd1306_i2c_display
|
||||||
model: SSD1306_128X64
|
model: SSD1306_128X64
|
||||||
reset_pin: ${display_reset_pin}
|
reset_pin: ${display_reset_pin}
|
||||||
pages:
|
pages:
|
||||||
@@ -13,7 +13,7 @@ touchscreen:
|
|||||||
- platform: gt911
|
- platform: gt911
|
||||||
i2c_id: i2c_bus
|
i2c_id: i2c_bus
|
||||||
id: gt911_touchscreen
|
id: gt911_touchscreen
|
||||||
display: ssd1306_display
|
display: ssd1306_i2c_display
|
||||||
interrupt_pin: ${interrupt_pin}
|
interrupt_pin: ${interrupt_pin}
|
||||||
reset_pin: ${reset_pin}
|
reset_pin: ${reset_pin}
|
||||||
|
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
substitutions:
|
substitutions:
|
||||||
clk_pin: GPIO4
|
clk_pin: GPIO0
|
||||||
dout_pin: GPIO5
|
dout_pin: GPIO2
|
||||||
|
|
||||||
<<: !include common.yaml
|
<<: !include common.yaml
|
||||||
|
|||||||
@@ -7,9 +7,21 @@ sensor:
|
|||||||
max_current: 40 A
|
max_current: 40 A
|
||||||
adc_range: 1
|
adc_range: 1
|
||||||
temperature_coefficient: 50
|
temperature_coefficient: 50
|
||||||
shunt_voltage: "INA2xx Shunt Voltage"
|
shunt_voltage:
|
||||||
bus_voltage: "INA2xx Bus Voltage"
|
id: ina2xx_i2c_shunt_voltage
|
||||||
current: "INA2xx Current"
|
name: "INA2xx Shunt Voltage"
|
||||||
power: "INA2xx Power"
|
bus_voltage:
|
||||||
energy: "INA2xx Energy"
|
id: ina2xx_i2c_bus_voltage
|
||||||
charge: "INA2xx Charge"
|
name: "INA2xx Bus Voltage"
|
||||||
|
current:
|
||||||
|
id: ina2xx_i2c_current
|
||||||
|
name: "INA2xx Current"
|
||||||
|
power:
|
||||||
|
id: ina2xx_i2c_power
|
||||||
|
name: "INA2xx Power"
|
||||||
|
energy:
|
||||||
|
id: ina2xx_i2c_energy
|
||||||
|
name: "INA2xx Energy"
|
||||||
|
charge:
|
||||||
|
id: ina2xx_i2c_charge
|
||||||
|
name: "INA2xx Charge"
|
||||||
|
|||||||
@@ -6,9 +6,21 @@ sensor:
|
|||||||
max_current: 40 A
|
max_current: 40 A
|
||||||
adc_range: 1
|
adc_range: 1
|
||||||
temperature_coefficient: 50
|
temperature_coefficient: 50
|
||||||
shunt_voltage: "INA2xx Shunt Voltage"
|
shunt_voltage:
|
||||||
bus_voltage: "INA2xx Bus Voltage"
|
id: ina2xx_spi_shunt_voltage
|
||||||
current: "INA2xx Current"
|
name: "INA2xx Shunt Voltage"
|
||||||
power: "INA2xx Power"
|
bus_voltage:
|
||||||
energy: "INA2xx Energy"
|
id: ina2xx_spi_bus_voltage
|
||||||
charge: "INA2xx Charge"
|
name: "INA2xx Bus Voltage"
|
||||||
|
current:
|
||||||
|
id: ina2xx_spi_current
|
||||||
|
name: "INA2xx Current"
|
||||||
|
power:
|
||||||
|
id: ina2xx_spi_power
|
||||||
|
name: "INA2xx Power"
|
||||||
|
energy:
|
||||||
|
id: ina2xx_spi_energy
|
||||||
|
name: "INA2xx Energy"
|
||||||
|
charge:
|
||||||
|
id: ina2xx_spi_charge
|
||||||
|
name: "INA2xx Charge"
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
display:
|
display:
|
||||||
- platform: ssd1306_i2c
|
- platform: ssd1306_i2c
|
||||||
i2c_id: i2c_bus
|
i2c_id: i2c_bus
|
||||||
id: ssd1306_display
|
id: ssd1306_i2c_display
|
||||||
model: SSD1306_128X64
|
model: SSD1306_128X64
|
||||||
reset_pin: ${reset_pin}
|
reset_pin: ${reset_pin}
|
||||||
pages:
|
pages:
|
||||||
@@ -14,7 +14,7 @@ touchscreen:
|
|||||||
i2c_id: i2c_bus
|
i2c_id: i2c_bus
|
||||||
id: lilygo_touchscreen
|
id: lilygo_touchscreen
|
||||||
interrupt_pin: ${interrupt_pin}
|
interrupt_pin: ${interrupt_pin}
|
||||||
display: ssd1306_display
|
display: ssd1306_i2c_display
|
||||||
on_touch:
|
on_touch:
|
||||||
- logger.log:
|
- logger.log:
|
||||||
format: Touch at (%d, %d)
|
format: Touch at (%d, %d)
|
||||||
|
|||||||
@@ -1,9 +1,9 @@
|
|||||||
pn532_i2c:
|
pn532_i2c:
|
||||||
i2c_id: i2c_bus
|
i2c_id: i2c_bus
|
||||||
id: pn532_nfcc
|
id: pn532_nfcc_i2c
|
||||||
|
|
||||||
binary_sensor:
|
binary_sensor:
|
||||||
- platform: pn532
|
- platform: pn532
|
||||||
pn532_id: pn532_nfcc
|
pn532_id: pn532_nfcc_i2c
|
||||||
name: PN532 NFC Tag
|
name: PN532 NFC Tag
|
||||||
uid: 74-10-37-94
|
uid: 74-10-37-94
|
||||||
|
|||||||
@@ -1,9 +1,9 @@
|
|||||||
pn532_spi:
|
pn532_spi:
|
||||||
id: pn532_nfcc
|
id: pn532_nfcc_spi
|
||||||
cs_pin: ${cs_pin}
|
cs_pin: ${cs_pin}
|
||||||
|
|
||||||
binary_sensor:
|
binary_sensor:
|
||||||
- platform: pn532
|
- platform: pn532
|
||||||
pn532_id: pn532_nfcc
|
pn532_id: pn532_nfcc_spi
|
||||||
name: PN532 NFC Tag
|
name: PN532 NFC Tag
|
||||||
uid: 74-10-37-94
|
uid: 74-10-37-94
|
||||||
|
|||||||
@@ -1,23 +1,23 @@
|
|||||||
esphome:
|
esphome:
|
||||||
on_boot:
|
on_boot:
|
||||||
then:
|
then:
|
||||||
- tag.set_clean_mode: nfcc_pn7160
|
- tag.set_clean_mode: nfcc_pn7160_i2c
|
||||||
- tag.set_format_mode: nfcc_pn7160
|
- tag.set_format_mode: nfcc_pn7160_i2c
|
||||||
- tag.set_read_mode: nfcc_pn7160
|
- tag.set_read_mode: nfcc_pn7160_i2c
|
||||||
- tag.set_write_message:
|
- tag.set_write_message:
|
||||||
message: https://www.home-assistant.io/tag/pulse
|
message: https://www.home-assistant.io/tag/pulse
|
||||||
include_android_app_record: false
|
include_android_app_record: false
|
||||||
- tag.set_write_mode: nfcc_pn7160
|
- tag.set_write_mode: nfcc_pn7160_i2c
|
||||||
- tag.set_emulation_message:
|
- tag.set_emulation_message:
|
||||||
message: https://www.home-assistant.io/tag/pulse
|
message: https://www.home-assistant.io/tag/pulse
|
||||||
include_android_app_record: false
|
include_android_app_record: false
|
||||||
- tag.emulation_off: nfcc_pn7160
|
- tag.emulation_off: nfcc_pn7160_i2c
|
||||||
- tag.emulation_on: nfcc_pn7160
|
- tag.emulation_on: nfcc_pn7160_i2c
|
||||||
- tag.polling_off: nfcc_pn7160
|
- tag.polling_off: nfcc_pn7160_i2c
|
||||||
- tag.polling_on: nfcc_pn7160
|
- tag.polling_on: nfcc_pn7160_i2c
|
||||||
|
|
||||||
pn7150_i2c:
|
pn7150_i2c:
|
||||||
id: nfcc_pn7160
|
id: nfcc_pn7160_i2c
|
||||||
i2c_id: i2c_bus
|
i2c_id: i2c_bus
|
||||||
irq_pin: ${irq_pin}
|
irq_pin: ${irq_pin}
|
||||||
ven_pin: ${ven_pin}
|
ven_pin: ${ven_pin}
|
||||||
|
|||||||
@@ -1,23 +1,23 @@
|
|||||||
esphome:
|
esphome:
|
||||||
on_boot:
|
on_boot:
|
||||||
then:
|
then:
|
||||||
- tag.set_clean_mode: nfcc_pn7160
|
- tag.set_clean_mode: nfcc_pn7160_spi
|
||||||
- tag.set_format_mode: nfcc_pn7160
|
- tag.set_format_mode: nfcc_pn7160_spi
|
||||||
- tag.set_read_mode: nfcc_pn7160
|
- tag.set_read_mode: nfcc_pn7160_spi
|
||||||
- tag.set_write_message:
|
- tag.set_write_message:
|
||||||
message: https://www.home-assistant.io/tag/pulse
|
message: https://www.home-assistant.io/tag/pulse
|
||||||
include_android_app_record: false
|
include_android_app_record: false
|
||||||
- tag.set_write_mode: nfcc_pn7160
|
- tag.set_write_mode: nfcc_pn7160_spi
|
||||||
- tag.set_emulation_message:
|
- tag.set_emulation_message:
|
||||||
message: https://www.home-assistant.io/tag/pulse
|
message: https://www.home-assistant.io/tag/pulse
|
||||||
include_android_app_record: false
|
include_android_app_record: false
|
||||||
- tag.emulation_off: nfcc_pn7160
|
- tag.emulation_off: nfcc_pn7160_spi
|
||||||
- tag.emulation_on: nfcc_pn7160
|
- tag.emulation_on: nfcc_pn7160_spi
|
||||||
- tag.polling_off: nfcc_pn7160
|
- tag.polling_off: nfcc_pn7160_spi
|
||||||
- tag.polling_on: nfcc_pn7160
|
- tag.polling_on: nfcc_pn7160_spi
|
||||||
|
|
||||||
pn7160_spi:
|
pn7160_spi:
|
||||||
id: nfcc_pn7160
|
id: nfcc_pn7160_spi
|
||||||
cs_pin: ${cs_pin}
|
cs_pin: ${cs_pin}
|
||||||
irq_pin: ${irq_pin}
|
irq_pin: ${irq_pin}
|
||||||
ven_pin: ${ven_pin}
|
ven_pin: ${ven_pin}
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
rc522_i2c:
|
rc522_i2c:
|
||||||
- id: rc522_nfcc
|
- id: rc522_nfcc_i2c
|
||||||
i2c_id: i2c_bus
|
i2c_id: i2c_bus
|
||||||
update_interval: 1s
|
update_interval: 1s
|
||||||
on_tag:
|
on_tag:
|
||||||
@@ -8,6 +8,6 @@ rc522_i2c:
|
|||||||
|
|
||||||
binary_sensor:
|
binary_sensor:
|
||||||
- platform: rc522
|
- platform: rc522
|
||||||
rc522_id: rc522_nfcc
|
rc522_id: rc522_nfcc_i2c
|
||||||
name: RC522 NFC Tag
|
name: RC522 NFC Tag
|
||||||
uid: 74-10-37-94
|
uid: 74-10-37-94
|
||||||
|
|||||||
@@ -1,9 +1,9 @@
|
|||||||
rc522_spi:
|
rc522_spi:
|
||||||
id: rc522_nfcc
|
id: rc522_nfcc_spi
|
||||||
cs_pin: ${cs_pin}
|
cs_pin: ${cs_pin}
|
||||||
|
|
||||||
binary_sensor:
|
binary_sensor:
|
||||||
- platform: rc522
|
- platform: rc522
|
||||||
rc522_id: rc522_nfcc
|
rc522_id: rc522_nfcc_spi
|
||||||
name: PN532 NFC Tag
|
name: RC522 NFC Tag
|
||||||
uid: 74-10-37-94
|
uid: 74-10-37-94
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
substitutions:
|
substitutions:
|
||||||
tx_pin: GPIO0
|
tx_pin: GPIO0
|
||||||
rx_pin: GPIO2
|
rx_pin: GPIO2
|
||||||
flow_control_pin: GPIO4
|
flow_control_pin: GPIO15
|
||||||
|
|
||||||
packages:
|
packages:
|
||||||
modbus: !include ../../test_build_components/common/modbus/esp8266-ard.yaml
|
modbus: !include ../../test_build_components/common/modbus/esp8266-ard.yaml
|
||||||
|
|||||||
@@ -2,8 +2,8 @@ packages:
|
|||||||
spi: !include ../../test_build_components/common/spi/esp8266-ard.yaml
|
spi: !include ../../test_build_components/common/spi/esp8266-ard.yaml
|
||||||
|
|
||||||
substitutions:
|
substitutions:
|
||||||
clock_pin: GPIO5
|
clock_pin: GPIO15
|
||||||
data_pin: GPIO4
|
data_pin: GPIO16
|
||||||
latch_pin1: GPIO2
|
latch_pin1: GPIO2
|
||||||
oe_pin1: GPIO0
|
oe_pin1: GPIO0
|
||||||
latch_pin2: GPIO3
|
latch_pin2: GPIO3
|
||||||
|
|||||||
@@ -4,7 +4,7 @@ display:
|
|||||||
model: SSD1306_128X64
|
model: SSD1306_128X64
|
||||||
reset_pin: ${reset_pin}
|
reset_pin: ${reset_pin}
|
||||||
address: 0x3C
|
address: 0x3C
|
||||||
id: display1
|
id: ssd1306_i2c_display
|
||||||
contrast: 60%
|
contrast: 60%
|
||||||
pages:
|
pages:
|
||||||
- id: ssd1306_i2c_page1
|
- id: ssd1306_i2c_page1
|
||||||
|
|||||||
@@ -1,5 +1,6 @@
|
|||||||
display:
|
display:
|
||||||
- platform: ssd1306_spi
|
- platform: ssd1306_spi
|
||||||
|
id: ssd1306_spi_display
|
||||||
model: SSD1306 128x64
|
model: SSD1306 128x64
|
||||||
cs_pin: ${cs_pin}
|
cs_pin: ${cs_pin}
|
||||||
dc_pin: ${dc_pin}
|
dc_pin: ${dc_pin}
|
||||||
|
|||||||
@@ -4,7 +4,7 @@ display:
|
|||||||
model: SSD1327_128x128
|
model: SSD1327_128x128
|
||||||
reset_pin: ${reset_pin}
|
reset_pin: ${reset_pin}
|
||||||
address: 0x3C
|
address: 0x3C
|
||||||
id: display1
|
id: ssd1327_i2c_display
|
||||||
pages:
|
pages:
|
||||||
- id: ssd1327_i2c_page1
|
- id: ssd1327_i2c_page1
|
||||||
lambda: |-
|
lambda: |-
|
||||||
|
|||||||
@@ -1,5 +1,6 @@
|
|||||||
display:
|
display:
|
||||||
- platform: ssd1327_spi
|
- platform: ssd1327_spi
|
||||||
|
id: ssd1327_spi_display
|
||||||
model: SSD1327 128x128
|
model: SSD1327 128x128
|
||||||
cs_pin: ${cs_pin}
|
cs_pin: ${cs_pin}
|
||||||
dc_pin: ${dc_pin}
|
dc_pin: ${dc_pin}
|
||||||
|
|||||||
@@ -3,7 +3,7 @@ display:
|
|||||||
i2c_id: i2c_bus
|
i2c_id: i2c_bus
|
||||||
reset_pin: ${reset_pin}
|
reset_pin: ${reset_pin}
|
||||||
address: 0x3C
|
address: 0x3C
|
||||||
id: display1
|
id: st7567_i2c_display
|
||||||
pages:
|
pages:
|
||||||
- id: st7567_i2c_page1
|
- id: st7567_i2c_page1
|
||||||
lambda: |-
|
lambda: |-
|
||||||
|
|||||||
@@ -1,5 +1,6 @@
|
|||||||
display:
|
display:
|
||||||
- platform: st7567_spi
|
- platform: st7567_spi
|
||||||
|
id: st7567_spi_display
|
||||||
cs_pin: ${cs_pin}
|
cs_pin: ${cs_pin}
|
||||||
dc_pin: ${dc_pin}
|
dc_pin: ${dc_pin}
|
||||||
reset_pin: ${reset_pin}
|
reset_pin: ${reset_pin}
|
||||||
|
|||||||
@@ -6,7 +6,8 @@ udp:
|
|||||||
addresses: ["239.0.60.53"]
|
addresses: ["239.0.60.53"]
|
||||||
|
|
||||||
time:
|
time:
|
||||||
platform: host
|
- platform: host
|
||||||
|
id: host_time
|
||||||
|
|
||||||
syslog:
|
syslog:
|
||||||
port: 514
|
port: 514
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
display:
|
display:
|
||||||
- platform: ssd1306_i2c
|
- platform: ssd1306_i2c
|
||||||
i2c_id: i2c_bus
|
i2c_id: i2c_bus
|
||||||
id: ssd1306_display
|
id: ssd1306_i2c_display
|
||||||
model: SSD1306_128X64
|
model: SSD1306_128X64
|
||||||
reset_pin: ${disp_reset_pin}
|
reset_pin: ${disp_reset_pin}
|
||||||
pages:
|
pages:
|
||||||
@@ -13,7 +13,7 @@ touchscreen:
|
|||||||
- platform: tt21100
|
- platform: tt21100
|
||||||
i2c_id: i2c_bus
|
i2c_id: i2c_bus
|
||||||
id: tt21100_touchscreen
|
id: tt21100_touchscreen
|
||||||
display: ssd1306_display
|
display: ssd1306_i2c_display
|
||||||
interrupt_pin: ${interrupt_pin}
|
interrupt_pin: ${interrupt_pin}
|
||||||
reset_pin: ${reset_pin}
|
reset_pin: ${reset_pin}
|
||||||
|
|
||||||
|
|||||||
@@ -1,4 +1,5 @@
|
|||||||
packages:
|
packages:
|
||||||
i2c: !include ../../test_build_components/common/i2c/esp32-idf.yaml
|
i2c: !include ../../test_build_components/common/i2c/esp32-idf.yaml
|
||||||
|
uart_bridge_2: !include ../../test_build_components/common/uart_bridge_2/esp32-idf.yaml
|
||||||
|
|
||||||
<<: !include common.yaml
|
<<: !include common.yaml
|
||||||
|
|||||||
@@ -4,5 +4,6 @@ substitutions:
|
|||||||
|
|
||||||
packages:
|
packages:
|
||||||
i2c: !include ../../test_build_components/common/i2c/esp32-s3-idf.yaml
|
i2c: !include ../../test_build_components/common/i2c/esp32-s3-idf.yaml
|
||||||
|
uart_bridge_2: !include ../../test_build_components/common/uart_bridge_2/esp32-s3-idf.yaml
|
||||||
|
|
||||||
<<: !include common.yaml
|
<<: !include common.yaml
|
||||||
|
|||||||
@@ -1,20 +1,20 @@
|
|||||||
wk2132_spi:
|
wk2132_spi:
|
||||||
- id: wk2132_spi_id
|
- id: wk2132_spi_bridge
|
||||||
cs_pin: ${cs_pin}
|
cs_pin: ${cs_pin}
|
||||||
crystal: 11059200
|
crystal: 11059200
|
||||||
data_rate: 1MHz
|
data_rate: 1MHz
|
||||||
uart:
|
uart:
|
||||||
- id: wk2132_spi_id0
|
- id: wk2132_spi_uart0
|
||||||
channel: 0
|
channel: 0
|
||||||
baud_rate: 115200
|
baud_rate: 115200
|
||||||
stop_bits: 1
|
stop_bits: 1
|
||||||
parity: none
|
parity: none
|
||||||
- id: wk2132_spi_id1
|
- id: wk2132_spi_uart1
|
||||||
channel: 1
|
channel: 1
|
||||||
baud_rate: 9600
|
baud_rate: 9600
|
||||||
|
|
||||||
# Ensures a sensor doesn't break validation
|
# Ensures a sensor doesn't break validation
|
||||||
sensor:
|
sensor:
|
||||||
- platform: a02yyuw
|
- platform: a02yyuw
|
||||||
uart_id: wk2132_spi_id1
|
uart_id: wk2132_spi_uart1
|
||||||
id: distance_sensor
|
id: distance_sensor
|
||||||
|
|||||||
@@ -3,5 +3,6 @@ substitutions:
|
|||||||
|
|
||||||
packages:
|
packages:
|
||||||
spi: !include ../../test_build_components/common/spi/esp32-idf.yaml
|
spi: !include ../../test_build_components/common/spi/esp32-idf.yaml
|
||||||
|
uart_bridge_2: !include ../../test_build_components/common/uart_bridge_2/esp32-idf.yaml
|
||||||
|
|
||||||
<<: !include common.yaml
|
<<: !include common.yaml
|
||||||
|
|||||||
@@ -6,5 +6,6 @@ substitutions:
|
|||||||
|
|
||||||
packages:
|
packages:
|
||||||
spi: !include ../../test_build_components/common/spi/esp32-s3-idf.yaml
|
spi: !include ../../test_build_components/common/spi/esp32-s3-idf.yaml
|
||||||
|
uart_bridge_2: !include ../../test_build_components/common/uart_bridge_2/esp32-s3-idf.yaml
|
||||||
|
|
||||||
<<: !include common.yaml
|
<<: !include common.yaml
|
||||||
|
|||||||
@@ -1,4 +1,5 @@
|
|||||||
packages:
|
packages:
|
||||||
i2c: !include ../../test_build_components/common/i2c/esp32-idf.yaml
|
i2c: !include ../../test_build_components/common/i2c/esp32-idf.yaml
|
||||||
|
uart_bridge_4: !include ../../test_build_components/common/uart_bridge_4/esp32-idf.yaml
|
||||||
|
|
||||||
<<: !include common.yaml
|
<<: !include common.yaml
|
||||||
|
|||||||
@@ -4,5 +4,6 @@ substitutions:
|
|||||||
|
|
||||||
packages:
|
packages:
|
||||||
i2c: !include ../../test_build_components/common/i2c/esp32-s3-idf.yaml
|
i2c: !include ../../test_build_components/common/i2c/esp32-s3-idf.yaml
|
||||||
|
uart_bridge_4: !include ../../test_build_components/common/uart_bridge_4/esp32-s3-idf.yaml
|
||||||
|
|
||||||
<<: !include common.yaml
|
<<: !include common.yaml
|
||||||
|
|||||||
@@ -3,5 +3,6 @@ substitutions:
|
|||||||
|
|
||||||
packages:
|
packages:
|
||||||
spi: !include ../../test_build_components/common/spi/esp32-idf.yaml
|
spi: !include ../../test_build_components/common/spi/esp32-idf.yaml
|
||||||
|
uart_bridge_4: !include ../../test_build_components/common/uart_bridge_4/esp32-idf.yaml
|
||||||
|
|
||||||
<<: !include common.yaml
|
<<: !include common.yaml
|
||||||
|
|||||||
@@ -6,5 +6,6 @@ substitutions:
|
|||||||
|
|
||||||
packages:
|
packages:
|
||||||
spi: !include ../../test_build_components/common/spi/esp32-s3-idf.yaml
|
spi: !include ../../test_build_components/common/spi/esp32-s3-idf.yaml
|
||||||
|
uart_bridge_4: !include ../../test_build_components/common/uart_bridge_4/esp32-s3-idf.yaml
|
||||||
|
|
||||||
<<: !include common.yaml
|
<<: !include common.yaml
|
||||||
|
|||||||
@@ -1,4 +1,5 @@
|
|||||||
packages:
|
packages:
|
||||||
i2c: !include ../../test_build_components/common/i2c/esp32-idf.yaml
|
i2c: !include ../../test_build_components/common/i2c/esp32-idf.yaml
|
||||||
|
uart_bridge_4: !include ../../test_build_components/common/uart_bridge_4/esp32-idf.yaml
|
||||||
|
|
||||||
<<: !include common.yaml
|
<<: !include common.yaml
|
||||||
|
|||||||
@@ -4,5 +4,6 @@ substitutions:
|
|||||||
|
|
||||||
packages:
|
packages:
|
||||||
i2c: !include ../../test_build_components/common/i2c/esp32-s3-idf.yaml
|
i2c: !include ../../test_build_components/common/i2c/esp32-s3-idf.yaml
|
||||||
|
uart_bridge_4: !include ../../test_build_components/common/uart_bridge_4/esp32-s3-idf.yaml
|
||||||
|
|
||||||
<<: !include common.yaml
|
<<: !include common.yaml
|
||||||
|
|||||||
@@ -1,28 +1,28 @@
|
|||||||
wk2204_spi:
|
wk2204_spi:
|
||||||
- id: wk2204_spi_id
|
- id: wk2204_spi_bridge
|
||||||
cs_pin: ${cs_pin}
|
cs_pin: ${cs_pin}
|
||||||
crystal: 11059200
|
crystal: 11059200
|
||||||
data_rate: 1MHz
|
data_rate: 1MHz
|
||||||
uart:
|
uart:
|
||||||
- id: wk2204_spi_id0
|
- id: wk2204_spi_uart0
|
||||||
channel: 0
|
channel: 0
|
||||||
baud_rate: 115200
|
baud_rate: 115200
|
||||||
stop_bits: 1
|
stop_bits: 1
|
||||||
parity: none
|
parity: none
|
||||||
- id: wk2204_spi_id1
|
- id: wk2204_spi_uart1
|
||||||
channel: 1
|
channel: 1
|
||||||
baud_rate: 921600
|
baud_rate: 921600
|
||||||
- id: wk2204_spi_id2
|
- id: wk2204_spi_uart2
|
||||||
channel: 2
|
channel: 2
|
||||||
baud_rate: 115200
|
baud_rate: 115200
|
||||||
stop_bits: 1
|
stop_bits: 1
|
||||||
parity: none
|
parity: none
|
||||||
- id: wk2204_spi_id3
|
- id: wk2204_spi_uart3
|
||||||
channel: 3
|
channel: 3
|
||||||
baud_rate: 9600
|
baud_rate: 9600
|
||||||
|
|
||||||
# Ensures a sensor doesn't break validation
|
# Ensures a sensor doesn't break validation
|
||||||
sensor:
|
sensor:
|
||||||
- platform: a02yyuw
|
- platform: a02yyuw
|
||||||
uart_id: wk2204_spi_id3
|
uart_id: wk2204_spi_uart3
|
||||||
id: distance_sensor
|
id: distance_sensor
|
||||||
|
|||||||
@@ -3,5 +3,6 @@ substitutions:
|
|||||||
|
|
||||||
packages:
|
packages:
|
||||||
spi: !include ../../test_build_components/common/spi/esp32-idf.yaml
|
spi: !include ../../test_build_components/common/spi/esp32-idf.yaml
|
||||||
|
uart_bridge_4: !include ../../test_build_components/common/uart_bridge_4/esp32-idf.yaml
|
||||||
|
|
||||||
<<: !include common.yaml
|
<<: !include common.yaml
|
||||||
|
|||||||
@@ -6,5 +6,6 @@ substitutions:
|
|||||||
|
|
||||||
packages:
|
packages:
|
||||||
spi: !include ../../test_build_components/common/spi/esp32-s3-idf.yaml
|
spi: !include ../../test_build_components/common/spi/esp32-s3-idf.yaml
|
||||||
|
uart_bridge_4: !include ../../test_build_components/common/uart_bridge_4/esp32-s3-idf.yaml
|
||||||
|
|
||||||
<<: !include common.yaml
|
<<: !include common.yaml
|
||||||
|
|||||||
@@ -1,4 +1,5 @@
|
|||||||
packages:
|
packages:
|
||||||
i2c: !include ../../test_build_components/common/i2c/esp32-idf.yaml
|
i2c: !include ../../test_build_components/common/i2c/esp32-idf.yaml
|
||||||
|
uart_bridge_4: !include ../../test_build_components/common/uart_bridge_4/esp32-idf.yaml
|
||||||
|
|
||||||
<<: !include common.yaml
|
<<: !include common.yaml
|
||||||
|
|||||||
@@ -4,5 +4,6 @@ substitutions:
|
|||||||
|
|
||||||
packages:
|
packages:
|
||||||
i2c: !include ../../test_build_components/common/i2c/esp32-s3-idf.yaml
|
i2c: !include ../../test_build_components/common/i2c/esp32-s3-idf.yaml
|
||||||
|
uart_bridge_4: !include ../../test_build_components/common/uart_bridge_4/esp32-s3-idf.yaml
|
||||||
|
|
||||||
<<: !include common.yaml
|
<<: !include common.yaml
|
||||||
|
|||||||
@@ -3,5 +3,6 @@ substitutions:
|
|||||||
|
|
||||||
packages:
|
packages:
|
||||||
spi: !include ../../test_build_components/common/spi/esp32-idf.yaml
|
spi: !include ../../test_build_components/common/spi/esp32-idf.yaml
|
||||||
|
uart_bridge_4: !include ../../test_build_components/common/uart_bridge_4/esp32-idf.yaml
|
||||||
|
|
||||||
<<: !include common.yaml
|
<<: !include common.yaml
|
||||||
|
|||||||
@@ -6,5 +6,6 @@ substitutions:
|
|||||||
|
|
||||||
packages:
|
packages:
|
||||||
spi: !include ../../test_build_components/common/spi/esp32-s3-idf.yaml
|
spi: !include ../../test_build_components/common/spi/esp32-s3-idf.yaml
|
||||||
|
uart_bridge_4: !include ../../test_build_components/common/uart_bridge_4/esp32-s3-idf.yaml
|
||||||
|
|
||||||
<<: !include common.yaml
|
<<: !include common.yaml
|
||||||
|
|||||||
58
tests/integration/fixtures/sensor_filters_batch_window.yaml
Normal file
58
tests/integration/fixtures/sensor_filters_batch_window.yaml
Normal file
@@ -0,0 +1,58 @@
|
|||||||
|
esphome:
|
||||||
|
name: test-batch-window-filters
|
||||||
|
|
||||||
|
host:
|
||||||
|
api:
|
||||||
|
batch_delay: 0ms # Disable batching to receive all state updates
|
||||||
|
logger:
|
||||||
|
level: DEBUG
|
||||||
|
|
||||||
|
# Template sensor that we'll use to publish values
|
||||||
|
sensor:
|
||||||
|
- platform: template
|
||||||
|
name: "Source Sensor"
|
||||||
|
id: source_sensor
|
||||||
|
accuracy_decimals: 2
|
||||||
|
|
||||||
|
# Batch window filters (window_size == send_every) - use streaming filters
|
||||||
|
- platform: copy
|
||||||
|
source_id: source_sensor
|
||||||
|
name: "Min Sensor"
|
||||||
|
id: min_sensor
|
||||||
|
filters:
|
||||||
|
- min:
|
||||||
|
window_size: 5
|
||||||
|
send_every: 5
|
||||||
|
send_first_at: 1
|
||||||
|
|
||||||
|
- platform: copy
|
||||||
|
source_id: source_sensor
|
||||||
|
name: "Max Sensor"
|
||||||
|
id: max_sensor
|
||||||
|
filters:
|
||||||
|
- max:
|
||||||
|
window_size: 5
|
||||||
|
send_every: 5
|
||||||
|
send_first_at: 1
|
||||||
|
|
||||||
|
- platform: copy
|
||||||
|
source_id: source_sensor
|
||||||
|
name: "Moving Avg Sensor"
|
||||||
|
id: moving_avg_sensor
|
||||||
|
filters:
|
||||||
|
- sliding_window_moving_average:
|
||||||
|
window_size: 5
|
||||||
|
send_every: 5
|
||||||
|
send_first_at: 1
|
||||||
|
|
||||||
|
# Button to trigger publishing test values
|
||||||
|
button:
|
||||||
|
- platform: template
|
||||||
|
name: "Publish Values Button"
|
||||||
|
id: publish_button
|
||||||
|
on_press:
|
||||||
|
- lambda: |-
|
||||||
|
// Publish 10 values: 1.0, 2.0, ..., 10.0
|
||||||
|
for (int i = 1; i <= 10; i++) {
|
||||||
|
id(source_sensor).publish_state(float(i));
|
||||||
|
}
|
||||||
84
tests/integration/fixtures/sensor_filters_nan_handling.yaml
Normal file
84
tests/integration/fixtures/sensor_filters_nan_handling.yaml
Normal file
@@ -0,0 +1,84 @@
|
|||||||
|
esphome:
|
||||||
|
name: test-nan-handling
|
||||||
|
|
||||||
|
host:
|
||||||
|
api:
|
||||||
|
batch_delay: 0ms # Disable batching to receive all state updates
|
||||||
|
logger:
|
||||||
|
level: DEBUG
|
||||||
|
|
||||||
|
sensor:
|
||||||
|
- platform: template
|
||||||
|
name: "Source NaN Sensor"
|
||||||
|
id: source_nan_sensor
|
||||||
|
accuracy_decimals: 2
|
||||||
|
|
||||||
|
- platform: copy
|
||||||
|
source_id: source_nan_sensor
|
||||||
|
name: "Min NaN Sensor"
|
||||||
|
id: min_nan_sensor
|
||||||
|
filters:
|
||||||
|
- min:
|
||||||
|
window_size: 5
|
||||||
|
send_every: 5
|
||||||
|
send_first_at: 1
|
||||||
|
|
||||||
|
- platform: copy
|
||||||
|
source_id: source_nan_sensor
|
||||||
|
name: "Max NaN Sensor"
|
||||||
|
id: max_nan_sensor
|
||||||
|
filters:
|
||||||
|
- max:
|
||||||
|
window_size: 5
|
||||||
|
send_every: 5
|
||||||
|
send_first_at: 1
|
||||||
|
|
||||||
|
script:
|
||||||
|
- id: publish_nan_values_script
|
||||||
|
then:
|
||||||
|
- sensor.template.publish:
|
||||||
|
id: source_nan_sensor
|
||||||
|
state: 10.0
|
||||||
|
- delay: 20ms
|
||||||
|
- sensor.template.publish:
|
||||||
|
id: source_nan_sensor
|
||||||
|
state: !lambda 'return NAN;'
|
||||||
|
- delay: 20ms
|
||||||
|
- sensor.template.publish:
|
||||||
|
id: source_nan_sensor
|
||||||
|
state: 5.0
|
||||||
|
- delay: 20ms
|
||||||
|
- sensor.template.publish:
|
||||||
|
id: source_nan_sensor
|
||||||
|
state: !lambda 'return NAN;'
|
||||||
|
- delay: 20ms
|
||||||
|
- sensor.template.publish:
|
||||||
|
id: source_nan_sensor
|
||||||
|
state: 15.0
|
||||||
|
- delay: 20ms
|
||||||
|
- sensor.template.publish:
|
||||||
|
id: source_nan_sensor
|
||||||
|
state: 8.0
|
||||||
|
- delay: 20ms
|
||||||
|
- sensor.template.publish:
|
||||||
|
id: source_nan_sensor
|
||||||
|
state: !lambda 'return NAN;'
|
||||||
|
- delay: 20ms
|
||||||
|
- sensor.template.publish:
|
||||||
|
id: source_nan_sensor
|
||||||
|
state: 12.0
|
||||||
|
- delay: 20ms
|
||||||
|
- sensor.template.publish:
|
||||||
|
id: source_nan_sensor
|
||||||
|
state: 3.0
|
||||||
|
- delay: 20ms
|
||||||
|
- sensor.template.publish:
|
||||||
|
id: source_nan_sensor
|
||||||
|
state: !lambda 'return NAN;'
|
||||||
|
|
||||||
|
button:
|
||||||
|
- platform: template
|
||||||
|
name: "Publish NaN Values Button"
|
||||||
|
id: publish_nan_button
|
||||||
|
on_press:
|
||||||
|
- script.execute: publish_nan_values_script
|
||||||
115
tests/integration/fixtures/sensor_filters_ring_buffer.yaml
Normal file
115
tests/integration/fixtures/sensor_filters_ring_buffer.yaml
Normal file
@@ -0,0 +1,115 @@
|
|||||||
|
esphome:
|
||||||
|
name: test-sliding-window-filters
|
||||||
|
|
||||||
|
host:
|
||||||
|
api:
|
||||||
|
batch_delay: 0ms # Disable batching to receive all state updates
|
||||||
|
logger:
|
||||||
|
level: DEBUG
|
||||||
|
|
||||||
|
# Template sensor that we'll use to publish values
|
||||||
|
sensor:
|
||||||
|
- platform: template
|
||||||
|
name: "Source Sensor"
|
||||||
|
id: source_sensor
|
||||||
|
accuracy_decimals: 2
|
||||||
|
|
||||||
|
# ACTUAL sliding window filters (window_size != send_every) - use ring buffers
|
||||||
|
# Window of 5, send every 2 values
|
||||||
|
- platform: copy
|
||||||
|
source_id: source_sensor
|
||||||
|
name: "Sliding Min Sensor"
|
||||||
|
id: sliding_min_sensor
|
||||||
|
filters:
|
||||||
|
- min:
|
||||||
|
window_size: 5
|
||||||
|
send_every: 2
|
||||||
|
send_first_at: 1
|
||||||
|
|
||||||
|
- platform: copy
|
||||||
|
source_id: source_sensor
|
||||||
|
name: "Sliding Max Sensor"
|
||||||
|
id: sliding_max_sensor
|
||||||
|
filters:
|
||||||
|
- max:
|
||||||
|
window_size: 5
|
||||||
|
send_every: 2
|
||||||
|
send_first_at: 1
|
||||||
|
|
||||||
|
- platform: copy
|
||||||
|
source_id: source_sensor
|
||||||
|
name: "Sliding Median Sensor"
|
||||||
|
id: sliding_median_sensor
|
||||||
|
filters:
|
||||||
|
- median:
|
||||||
|
window_size: 5
|
||||||
|
send_every: 2
|
||||||
|
send_first_at: 1
|
||||||
|
|
||||||
|
- platform: copy
|
||||||
|
source_id: source_sensor
|
||||||
|
name: "Sliding Moving Avg Sensor"
|
||||||
|
id: sliding_moving_avg_sensor
|
||||||
|
filters:
|
||||||
|
- sliding_window_moving_average:
|
||||||
|
window_size: 5
|
||||||
|
send_every: 2
|
||||||
|
send_first_at: 1
|
||||||
|
|
||||||
|
# Button to trigger publishing test values
|
||||||
|
script:
|
||||||
|
- id: publish_values_script
|
||||||
|
then:
|
||||||
|
# Publish 10 values: 1.0, 2.0, ..., 10.0
|
||||||
|
# With window_size=5, send_every=2, send_first_at=1:
|
||||||
|
# - Output at position 1: window=[1], min=1, max=1, median=1, avg=1
|
||||||
|
# - Output at position 3: window=[1,2,3], min=1, max=3, median=2, avg=2
|
||||||
|
# - Output at position 5: window=[1,2,3,4,5], min=1, max=5, median=3, avg=3
|
||||||
|
# - Output at position 7: window=[3,4,5,6,7], min=3, max=7, median=5, avg=5
|
||||||
|
# - Output at position 9: window=[5,6,7,8,9], min=5, max=9, median=7, avg=7
|
||||||
|
- sensor.template.publish:
|
||||||
|
id: source_sensor
|
||||||
|
state: 1.0
|
||||||
|
- delay: 20ms
|
||||||
|
- sensor.template.publish:
|
||||||
|
id: source_sensor
|
||||||
|
state: 2.0
|
||||||
|
- delay: 20ms
|
||||||
|
- sensor.template.publish:
|
||||||
|
id: source_sensor
|
||||||
|
state: 3.0
|
||||||
|
- delay: 20ms
|
||||||
|
- sensor.template.publish:
|
||||||
|
id: source_sensor
|
||||||
|
state: 4.0
|
||||||
|
- delay: 20ms
|
||||||
|
- sensor.template.publish:
|
||||||
|
id: source_sensor
|
||||||
|
state: 5.0
|
||||||
|
- delay: 20ms
|
||||||
|
- sensor.template.publish:
|
||||||
|
id: source_sensor
|
||||||
|
state: 6.0
|
||||||
|
- delay: 20ms
|
||||||
|
- sensor.template.publish:
|
||||||
|
id: source_sensor
|
||||||
|
state: 7.0
|
||||||
|
- delay: 20ms
|
||||||
|
- sensor.template.publish:
|
||||||
|
id: source_sensor
|
||||||
|
state: 8.0
|
||||||
|
- delay: 20ms
|
||||||
|
- sensor.template.publish:
|
||||||
|
id: source_sensor
|
||||||
|
state: 9.0
|
||||||
|
- delay: 20ms
|
||||||
|
- sensor.template.publish:
|
||||||
|
id: source_sensor
|
||||||
|
state: 10.0
|
||||||
|
|
||||||
|
button:
|
||||||
|
- platform: template
|
||||||
|
name: "Publish Values Button"
|
||||||
|
id: publish_button
|
||||||
|
on_press:
|
||||||
|
- script.execute: publish_values_script
|
||||||
@@ -0,0 +1,72 @@
|
|||||||
|
esphome:
|
||||||
|
name: test-ring-buffer-wraparound
|
||||||
|
|
||||||
|
host:
|
||||||
|
api:
|
||||||
|
batch_delay: 0ms # Disable batching to receive all state updates
|
||||||
|
logger:
|
||||||
|
level: DEBUG
|
||||||
|
|
||||||
|
sensor:
|
||||||
|
- platform: template
|
||||||
|
name: "Source Wraparound Sensor"
|
||||||
|
id: source_wraparound
|
||||||
|
accuracy_decimals: 2
|
||||||
|
|
||||||
|
- platform: copy
|
||||||
|
source_id: source_wraparound
|
||||||
|
name: "Wraparound Min Sensor"
|
||||||
|
id: wraparound_min_sensor
|
||||||
|
filters:
|
||||||
|
- min:
|
||||||
|
window_size: 3
|
||||||
|
send_every: 3
|
||||||
|
send_first_at: 1
|
||||||
|
|
||||||
|
script:
|
||||||
|
- id: publish_wraparound_script
|
||||||
|
then:
|
||||||
|
# Publish 9 values to test ring buffer wraparound
|
||||||
|
# Values: 10, 20, 30, 5, 25, 15, 40, 35, 20
|
||||||
|
- sensor.template.publish:
|
||||||
|
id: source_wraparound
|
||||||
|
state: 10.0
|
||||||
|
- delay: 20ms
|
||||||
|
- sensor.template.publish:
|
||||||
|
id: source_wraparound
|
||||||
|
state: 20.0
|
||||||
|
- delay: 20ms
|
||||||
|
- sensor.template.publish:
|
||||||
|
id: source_wraparound
|
||||||
|
state: 30.0
|
||||||
|
- delay: 20ms
|
||||||
|
- sensor.template.publish:
|
||||||
|
id: source_wraparound
|
||||||
|
state: 5.0
|
||||||
|
- delay: 20ms
|
||||||
|
- sensor.template.publish:
|
||||||
|
id: source_wraparound
|
||||||
|
state: 25.0
|
||||||
|
- delay: 20ms
|
||||||
|
- sensor.template.publish:
|
||||||
|
id: source_wraparound
|
||||||
|
state: 15.0
|
||||||
|
- delay: 20ms
|
||||||
|
- sensor.template.publish:
|
||||||
|
id: source_wraparound
|
||||||
|
state: 40.0
|
||||||
|
- delay: 20ms
|
||||||
|
- sensor.template.publish:
|
||||||
|
id: source_wraparound
|
||||||
|
state: 35.0
|
||||||
|
- delay: 20ms
|
||||||
|
- sensor.template.publish:
|
||||||
|
id: source_wraparound
|
||||||
|
state: 20.0
|
||||||
|
|
||||||
|
button:
|
||||||
|
- platform: template
|
||||||
|
name: "Publish Wraparound Button"
|
||||||
|
id: publish_wraparound_button
|
||||||
|
on_press:
|
||||||
|
- script.execute: publish_wraparound_script
|
||||||
123
tests/integration/fixtures/sensor_filters_sliding_window.yaml
Normal file
123
tests/integration/fixtures/sensor_filters_sliding_window.yaml
Normal file
@@ -0,0 +1,123 @@
|
|||||||
|
esphome:
|
||||||
|
name: test-sliding-window-filters
|
||||||
|
|
||||||
|
host:
|
||||||
|
api:
|
||||||
|
batch_delay: 0ms # Disable batching to receive all state updates
|
||||||
|
logger:
|
||||||
|
level: DEBUG
|
||||||
|
|
||||||
|
# Template sensor that we'll use to publish values
|
||||||
|
sensor:
|
||||||
|
- platform: template
|
||||||
|
name: "Source Sensor"
|
||||||
|
id: source_sensor
|
||||||
|
accuracy_decimals: 2
|
||||||
|
|
||||||
|
# Min filter sensor
|
||||||
|
- platform: copy
|
||||||
|
source_id: source_sensor
|
||||||
|
name: "Min Sensor"
|
||||||
|
id: min_sensor
|
||||||
|
filters:
|
||||||
|
- min:
|
||||||
|
window_size: 5
|
||||||
|
send_every: 5
|
||||||
|
send_first_at: 1
|
||||||
|
|
||||||
|
# Max filter sensor
|
||||||
|
- platform: copy
|
||||||
|
source_id: source_sensor
|
||||||
|
name: "Max Sensor"
|
||||||
|
id: max_sensor
|
||||||
|
filters:
|
||||||
|
- max:
|
||||||
|
window_size: 5
|
||||||
|
send_every: 5
|
||||||
|
send_first_at: 1
|
||||||
|
|
||||||
|
# Median filter sensor
|
||||||
|
- platform: copy
|
||||||
|
source_id: source_sensor
|
||||||
|
name: "Median Sensor"
|
||||||
|
id: median_sensor
|
||||||
|
filters:
|
||||||
|
- median:
|
||||||
|
window_size: 5
|
||||||
|
send_every: 5
|
||||||
|
send_first_at: 1
|
||||||
|
|
||||||
|
# Quantile filter sensor (90th percentile)
|
||||||
|
- platform: copy
|
||||||
|
source_id: source_sensor
|
||||||
|
name: "Quantile Sensor"
|
||||||
|
id: quantile_sensor
|
||||||
|
filters:
|
||||||
|
- quantile:
|
||||||
|
window_size: 5
|
||||||
|
send_every: 5
|
||||||
|
send_first_at: 1
|
||||||
|
quantile: 0.9
|
||||||
|
|
||||||
|
# Moving average filter sensor
|
||||||
|
- platform: copy
|
||||||
|
source_id: source_sensor
|
||||||
|
name: "Moving Avg Sensor"
|
||||||
|
id: moving_avg_sensor
|
||||||
|
filters:
|
||||||
|
- sliding_window_moving_average:
|
||||||
|
window_size: 5
|
||||||
|
send_every: 5
|
||||||
|
send_first_at: 1
|
||||||
|
|
||||||
|
# Script to publish values with delays
|
||||||
|
script:
|
||||||
|
- id: publish_values_script
|
||||||
|
then:
|
||||||
|
- sensor.template.publish:
|
||||||
|
id: source_sensor
|
||||||
|
state: 1.0
|
||||||
|
- delay: 20ms
|
||||||
|
- sensor.template.publish:
|
||||||
|
id: source_sensor
|
||||||
|
state: 2.0
|
||||||
|
- delay: 20ms
|
||||||
|
- sensor.template.publish:
|
||||||
|
id: source_sensor
|
||||||
|
state: 3.0
|
||||||
|
- delay: 20ms
|
||||||
|
- sensor.template.publish:
|
||||||
|
id: source_sensor
|
||||||
|
state: 4.0
|
||||||
|
- delay: 20ms
|
||||||
|
- sensor.template.publish:
|
||||||
|
id: source_sensor
|
||||||
|
state: 5.0
|
||||||
|
- delay: 20ms
|
||||||
|
- sensor.template.publish:
|
||||||
|
id: source_sensor
|
||||||
|
state: 6.0
|
||||||
|
- delay: 20ms
|
||||||
|
- sensor.template.publish:
|
||||||
|
id: source_sensor
|
||||||
|
state: 7.0
|
||||||
|
- delay: 20ms
|
||||||
|
- sensor.template.publish:
|
||||||
|
id: source_sensor
|
||||||
|
state: 8.0
|
||||||
|
- delay: 20ms
|
||||||
|
- sensor.template.publish:
|
||||||
|
id: source_sensor
|
||||||
|
state: 9.0
|
||||||
|
- delay: 20ms
|
||||||
|
- sensor.template.publish:
|
||||||
|
id: source_sensor
|
||||||
|
state: 10.0
|
||||||
|
|
||||||
|
# Button to trigger publishing test values
|
||||||
|
button:
|
||||||
|
- platform: template
|
||||||
|
name: "Publish Values Button"
|
||||||
|
id: publish_button
|
||||||
|
on_press:
|
||||||
|
- script.execute: publish_values_script
|
||||||
163
tests/integration/test_sensor_filters_ring_buffer.py
Normal file
163
tests/integration/test_sensor_filters_ring_buffer.py
Normal file
@@ -0,0 +1,163 @@
|
|||||||
|
"""Test sensor ring buffer filter functionality (window_size != send_every)."""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import asyncio
|
||||||
|
|
||||||
|
from aioesphomeapi import EntityInfo, EntityState, SensorState
|
||||||
|
import pytest
|
||||||
|
|
||||||
|
from .types import APIClientConnectedFactory, RunCompiledFunction
|
||||||
|
|
||||||
|
|
||||||
|
def build_key_to_sensor_mapping(
|
||||||
|
entities: list[EntityInfo], sensor_names: list[str]
|
||||||
|
) -> dict[int, str]:
|
||||||
|
"""Build a mapping from entity keys to sensor names.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
entities: List of entity info objects from the API
|
||||||
|
sensor_names: List of sensor names to search for in object_ids
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Dictionary mapping entity keys to sensor names
|
||||||
|
"""
|
||||||
|
key_to_sensor: dict[int, str] = {}
|
||||||
|
for entity in entities:
|
||||||
|
obj_id = entity.object_id.lower()
|
||||||
|
for sensor_name in sensor_names:
|
||||||
|
if sensor_name in obj_id:
|
||||||
|
key_to_sensor[entity.key] = sensor_name
|
||||||
|
break
|
||||||
|
return key_to_sensor
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_sensor_filters_ring_buffer(
|
||||||
|
yaml_config: str,
|
||||||
|
run_compiled: RunCompiledFunction,
|
||||||
|
api_client_connected: APIClientConnectedFactory,
|
||||||
|
) -> None:
|
||||||
|
"""Test that ring buffer filters (window_size != send_every) work correctly."""
|
||||||
|
loop = asyncio.get_running_loop()
|
||||||
|
|
||||||
|
# Track state changes for each sensor
|
||||||
|
sensor_states: dict[str, list[float]] = {
|
||||||
|
"sliding_min": [],
|
||||||
|
"sliding_max": [],
|
||||||
|
"sliding_median": [],
|
||||||
|
"sliding_moving_avg": [],
|
||||||
|
}
|
||||||
|
|
||||||
|
# Futures to track when we receive expected values
|
||||||
|
all_updates_received = loop.create_future()
|
||||||
|
|
||||||
|
def on_state(state: EntityState) -> None:
|
||||||
|
"""Track sensor state updates."""
|
||||||
|
if not isinstance(state, SensorState):
|
||||||
|
return
|
||||||
|
|
||||||
|
# Skip NaN values (initial states)
|
||||||
|
if state.missing_state:
|
||||||
|
return
|
||||||
|
|
||||||
|
# Get the sensor name from the key mapping
|
||||||
|
sensor_name = key_to_sensor.get(state.key)
|
||||||
|
if not sensor_name or sensor_name not in sensor_states:
|
||||||
|
return
|
||||||
|
|
||||||
|
sensor_states[sensor_name].append(state.state)
|
||||||
|
|
||||||
|
# Check if we've received enough updates from all sensors
|
||||||
|
# With send_every=2, send_first_at=1, we expect 5 outputs per sensor
|
||||||
|
if (
|
||||||
|
len(sensor_states["sliding_min"]) >= 5
|
||||||
|
and len(sensor_states["sliding_max"]) >= 5
|
||||||
|
and len(sensor_states["sliding_median"]) >= 5
|
||||||
|
and len(sensor_states["sliding_moving_avg"]) >= 5
|
||||||
|
and not all_updates_received.done()
|
||||||
|
):
|
||||||
|
all_updates_received.set_result(True)
|
||||||
|
|
||||||
|
async with (
|
||||||
|
run_compiled(yaml_config),
|
||||||
|
api_client_connected() as client,
|
||||||
|
):
|
||||||
|
# Get entities first to build key mapping
|
||||||
|
entities, services = await client.list_entities_services()
|
||||||
|
|
||||||
|
# Build key-to-sensor mapping
|
||||||
|
key_to_sensor = build_key_to_sensor_mapping(
|
||||||
|
entities,
|
||||||
|
[
|
||||||
|
"sliding_min",
|
||||||
|
"sliding_max",
|
||||||
|
"sliding_median",
|
||||||
|
"sliding_moving_avg",
|
||||||
|
],
|
||||||
|
)
|
||||||
|
|
||||||
|
# Subscribe to state changes AFTER building mapping
|
||||||
|
client.subscribe_states(on_state)
|
||||||
|
|
||||||
|
# Find the publish button
|
||||||
|
publish_button = next(
|
||||||
|
(e for e in entities if "publish_values_button" in e.object_id.lower()),
|
||||||
|
None,
|
||||||
|
)
|
||||||
|
assert publish_button is not None, "Publish Values Button not found"
|
||||||
|
|
||||||
|
# Press the button to publish test values
|
||||||
|
client.button_command(publish_button.key)
|
||||||
|
|
||||||
|
# Wait for all sensors to receive their values
|
||||||
|
try:
|
||||||
|
await asyncio.wait_for(all_updates_received, timeout=10.0)
|
||||||
|
except TimeoutError:
|
||||||
|
# Provide detailed failure info
|
||||||
|
pytest.fail(
|
||||||
|
f"Timeout waiting for updates. Received states:\n"
|
||||||
|
f" min: {sensor_states['sliding_min']}\n"
|
||||||
|
f" max: {sensor_states['sliding_max']}\n"
|
||||||
|
f" median: {sensor_states['sliding_median']}\n"
|
||||||
|
f" moving_avg: {sensor_states['sliding_moving_avg']}"
|
||||||
|
)
|
||||||
|
|
||||||
|
# Verify we got 5 outputs per sensor (positions 1, 3, 5, 7, 9)
|
||||||
|
assert len(sensor_states["sliding_min"]) == 5, (
|
||||||
|
f"Min sensor should have 5 values, got {len(sensor_states['sliding_min'])}: {sensor_states['sliding_min']}"
|
||||||
|
)
|
||||||
|
assert len(sensor_states["sliding_max"]) == 5
|
||||||
|
assert len(sensor_states["sliding_median"]) == 5
|
||||||
|
assert len(sensor_states["sliding_moving_avg"]) == 5
|
||||||
|
|
||||||
|
# Verify the values at each output position
|
||||||
|
# Position 1: window=[1]
|
||||||
|
assert abs(sensor_states["sliding_min"][0] - 1.0) < 0.01
|
||||||
|
assert abs(sensor_states["sliding_max"][0] - 1.0) < 0.01
|
||||||
|
assert abs(sensor_states["sliding_median"][0] - 1.0) < 0.01
|
||||||
|
assert abs(sensor_states["sliding_moving_avg"][0] - 1.0) < 0.01
|
||||||
|
|
||||||
|
# Position 3: window=[1,2,3]
|
||||||
|
assert abs(sensor_states["sliding_min"][1] - 1.0) < 0.01
|
||||||
|
assert abs(sensor_states["sliding_max"][1] - 3.0) < 0.01
|
||||||
|
assert abs(sensor_states["sliding_median"][1] - 2.0) < 0.01
|
||||||
|
assert abs(sensor_states["sliding_moving_avg"][1] - 2.0) < 0.01
|
||||||
|
|
||||||
|
# Position 5: window=[1,2,3,4,5]
|
||||||
|
assert abs(sensor_states["sliding_min"][2] - 1.0) < 0.01
|
||||||
|
assert abs(sensor_states["sliding_max"][2] - 5.0) < 0.01
|
||||||
|
assert abs(sensor_states["sliding_median"][2] - 3.0) < 0.01
|
||||||
|
assert abs(sensor_states["sliding_moving_avg"][2] - 3.0) < 0.01
|
||||||
|
|
||||||
|
# Position 7: window=[3,4,5,6,7] (ring buffer wrapped)
|
||||||
|
assert abs(sensor_states["sliding_min"][3] - 3.0) < 0.01
|
||||||
|
assert abs(sensor_states["sliding_max"][3] - 7.0) < 0.01
|
||||||
|
assert abs(sensor_states["sliding_median"][3] - 5.0) < 0.01
|
||||||
|
assert abs(sensor_states["sliding_moving_avg"][3] - 5.0) < 0.01
|
||||||
|
|
||||||
|
# Position 9: window=[5,6,7,8,9] (ring buffer wrapped)
|
||||||
|
assert abs(sensor_states["sliding_min"][4] - 5.0) < 0.01
|
||||||
|
assert abs(sensor_states["sliding_max"][4] - 9.0) < 0.01
|
||||||
|
assert abs(sensor_states["sliding_median"][4] - 7.0) < 0.01
|
||||||
|
assert abs(sensor_states["sliding_moving_avg"][4] - 7.0) < 0.01
|
||||||
387
tests/integration/test_sensor_filters_sliding_window.py
Normal file
387
tests/integration/test_sensor_filters_sliding_window.py
Normal file
@@ -0,0 +1,387 @@
|
|||||||
|
"""Test sensor sliding window filter functionality."""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import asyncio
|
||||||
|
|
||||||
|
from aioesphomeapi import EntityInfo, EntityState, SensorState
|
||||||
|
import pytest
|
||||||
|
|
||||||
|
from .types import APIClientConnectedFactory, RunCompiledFunction
|
||||||
|
|
||||||
|
|
||||||
|
def build_key_to_sensor_mapping(
|
||||||
|
entities: list[EntityInfo], sensor_names: list[str]
|
||||||
|
) -> dict[int, str]:
|
||||||
|
"""Build a mapping from entity keys to sensor names.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
entities: List of entity info objects from the API
|
||||||
|
sensor_names: List of sensor names to search for in object_ids
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Dictionary mapping entity keys to sensor names
|
||||||
|
"""
|
||||||
|
key_to_sensor: dict[int, str] = {}
|
||||||
|
for entity in entities:
|
||||||
|
obj_id = entity.object_id.lower()
|
||||||
|
for sensor_name in sensor_names:
|
||||||
|
if sensor_name in obj_id:
|
||||||
|
key_to_sensor[entity.key] = sensor_name
|
||||||
|
break
|
||||||
|
return key_to_sensor
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_sensor_filters_sliding_window(
|
||||||
|
yaml_config: str,
|
||||||
|
run_compiled: RunCompiledFunction,
|
||||||
|
api_client_connected: APIClientConnectedFactory,
|
||||||
|
) -> None:
|
||||||
|
"""Test that sliding window filters (min, max, median, quantile, moving_average) work correctly."""
|
||||||
|
loop = asyncio.get_running_loop()
|
||||||
|
|
||||||
|
# Track state changes for each sensor
|
||||||
|
sensor_states: dict[str, list[float]] = {
|
||||||
|
"min_sensor": [],
|
||||||
|
"max_sensor": [],
|
||||||
|
"median_sensor": [],
|
||||||
|
"quantile_sensor": [],
|
||||||
|
"moving_avg_sensor": [],
|
||||||
|
}
|
||||||
|
|
||||||
|
# Futures to track when we receive expected values
|
||||||
|
min_received = loop.create_future()
|
||||||
|
max_received = loop.create_future()
|
||||||
|
median_received = loop.create_future()
|
||||||
|
quantile_received = loop.create_future()
|
||||||
|
moving_avg_received = loop.create_future()
|
||||||
|
|
||||||
|
def on_state(state: EntityState) -> None:
|
||||||
|
"""Track sensor state updates."""
|
||||||
|
if not isinstance(state, SensorState):
|
||||||
|
return
|
||||||
|
|
||||||
|
# Skip NaN values (initial states)
|
||||||
|
if state.missing_state:
|
||||||
|
return
|
||||||
|
|
||||||
|
# Get the sensor name from the key mapping
|
||||||
|
sensor_name = key_to_sensor.get(state.key)
|
||||||
|
if not sensor_name or sensor_name not in sensor_states:
|
||||||
|
return
|
||||||
|
|
||||||
|
sensor_states[sensor_name].append(state.state)
|
||||||
|
|
||||||
|
# Check if we received the expected final value
|
||||||
|
# After publishing 10 values [1.0, 2.0, ..., 10.0], the window has the last 5: [2, 3, 4, 5, 6]
|
||||||
|
# Filters send at position 1 and position 6 (send_every=5 means every 5th value after first)
|
||||||
|
if (
|
||||||
|
sensor_name == "min_sensor"
|
||||||
|
and abs(state.state - 2.0) < 0.01
|
||||||
|
and not min_received.done()
|
||||||
|
):
|
||||||
|
min_received.set_result(True)
|
||||||
|
elif (
|
||||||
|
sensor_name == "max_sensor"
|
||||||
|
and abs(state.state - 6.0) < 0.01
|
||||||
|
and not max_received.done()
|
||||||
|
):
|
||||||
|
max_received.set_result(True)
|
||||||
|
elif (
|
||||||
|
sensor_name == "median_sensor"
|
||||||
|
and abs(state.state - 4.0) < 0.01
|
||||||
|
and not median_received.done()
|
||||||
|
):
|
||||||
|
# Median of [2, 3, 4, 5, 6] = 4
|
||||||
|
median_received.set_result(True)
|
||||||
|
elif (
|
||||||
|
sensor_name == "quantile_sensor"
|
||||||
|
and abs(state.state - 6.0) < 0.01
|
||||||
|
and not quantile_received.done()
|
||||||
|
):
|
||||||
|
# 90th percentile of [2, 3, 4, 5, 6] = 6
|
||||||
|
quantile_received.set_result(True)
|
||||||
|
elif (
|
||||||
|
sensor_name == "moving_avg_sensor"
|
||||||
|
and abs(state.state - 4.0) < 0.01
|
||||||
|
and not moving_avg_received.done()
|
||||||
|
):
|
||||||
|
# Average of [2, 3, 4, 5, 6] = 4
|
||||||
|
moving_avg_received.set_result(True)
|
||||||
|
|
||||||
|
async with (
|
||||||
|
run_compiled(yaml_config),
|
||||||
|
api_client_connected() as client,
|
||||||
|
):
|
||||||
|
# Get entities first to build key mapping
|
||||||
|
entities, services = await client.list_entities_services()
|
||||||
|
|
||||||
|
# Build key-to-sensor mapping
|
||||||
|
key_to_sensor = build_key_to_sensor_mapping(
|
||||||
|
entities,
|
||||||
|
[
|
||||||
|
"min_sensor",
|
||||||
|
"max_sensor",
|
||||||
|
"median_sensor",
|
||||||
|
"quantile_sensor",
|
||||||
|
"moving_avg_sensor",
|
||||||
|
],
|
||||||
|
)
|
||||||
|
|
||||||
|
# Subscribe to state changes AFTER building mapping
|
||||||
|
client.subscribe_states(on_state)
|
||||||
|
|
||||||
|
# Find the publish button
|
||||||
|
publish_button = next(
|
||||||
|
(e for e in entities if "publish_values_button" in e.object_id.lower()),
|
||||||
|
None,
|
||||||
|
)
|
||||||
|
assert publish_button is not None, "Publish Values Button not found"
|
||||||
|
|
||||||
|
# Press the button to publish test values
|
||||||
|
client.button_command(publish_button.key)
|
||||||
|
|
||||||
|
# Wait for all sensors to receive their final values
|
||||||
|
try:
|
||||||
|
await asyncio.wait_for(
|
||||||
|
asyncio.gather(
|
||||||
|
min_received,
|
||||||
|
max_received,
|
||||||
|
median_received,
|
||||||
|
quantile_received,
|
||||||
|
moving_avg_received,
|
||||||
|
),
|
||||||
|
timeout=10.0,
|
||||||
|
)
|
||||||
|
except TimeoutError:
|
||||||
|
# Provide detailed failure info
|
||||||
|
pytest.fail(
|
||||||
|
f"Timeout waiting for expected values. Received states:\n"
|
||||||
|
f" min: {sensor_states['min_sensor']}\n"
|
||||||
|
f" max: {sensor_states['max_sensor']}\n"
|
||||||
|
f" median: {sensor_states['median_sensor']}\n"
|
||||||
|
f" quantile: {sensor_states['quantile_sensor']}\n"
|
||||||
|
f" moving_avg: {sensor_states['moving_avg_sensor']}"
|
||||||
|
)
|
||||||
|
|
||||||
|
# Verify we got the expected values
|
||||||
|
# With batch_delay: 0ms, we should receive all outputs
|
||||||
|
# Filters output at positions 1 and 6 (send_every: 5)
|
||||||
|
assert len(sensor_states["min_sensor"]) == 2, (
|
||||||
|
f"Min sensor should have 2 values, got {len(sensor_states['min_sensor'])}: {sensor_states['min_sensor']}"
|
||||||
|
)
|
||||||
|
assert len(sensor_states["max_sensor"]) == 2, (
|
||||||
|
f"Max sensor should have 2 values, got {len(sensor_states['max_sensor'])}: {sensor_states['max_sensor']}"
|
||||||
|
)
|
||||||
|
assert len(sensor_states["median_sensor"]) == 2
|
||||||
|
assert len(sensor_states["quantile_sensor"]) == 2
|
||||||
|
assert len(sensor_states["moving_avg_sensor"]) == 2
|
||||||
|
|
||||||
|
# Verify the first output (after 1 value: [1])
|
||||||
|
assert abs(sensor_states["min_sensor"][0] - 1.0) < 0.01, (
|
||||||
|
f"First min should be 1.0, got {sensor_states['min_sensor'][0]}"
|
||||||
|
)
|
||||||
|
assert abs(sensor_states["max_sensor"][0] - 1.0) < 0.01, (
|
||||||
|
f"First max should be 1.0, got {sensor_states['max_sensor'][0]}"
|
||||||
|
)
|
||||||
|
assert abs(sensor_states["median_sensor"][0] - 1.0) < 0.01, (
|
||||||
|
f"First median should be 1.0, got {sensor_states['median_sensor'][0]}"
|
||||||
|
)
|
||||||
|
assert abs(sensor_states["moving_avg_sensor"][0] - 1.0) < 0.01, (
|
||||||
|
f"First moving avg should be 1.0, got {sensor_states['moving_avg_sensor'][0]}"
|
||||||
|
)
|
||||||
|
|
||||||
|
# Verify the second output (after 6 values, window has [2, 3, 4, 5, 6])
|
||||||
|
assert abs(sensor_states["min_sensor"][1] - 2.0) < 0.01, (
|
||||||
|
f"Second min should be 2.0, got {sensor_states['min_sensor'][1]}"
|
||||||
|
)
|
||||||
|
assert abs(sensor_states["max_sensor"][1] - 6.0) < 0.01, (
|
||||||
|
f"Second max should be 6.0, got {sensor_states['max_sensor'][1]}"
|
||||||
|
)
|
||||||
|
assert abs(sensor_states["median_sensor"][1] - 4.0) < 0.01, (
|
||||||
|
f"Second median should be 4.0, got {sensor_states['median_sensor'][1]}"
|
||||||
|
)
|
||||||
|
assert abs(sensor_states["moving_avg_sensor"][1] - 4.0) < 0.01, (
|
||||||
|
f"Second moving avg should be 4.0, got {sensor_states['moving_avg_sensor'][1]}"
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_sensor_filters_nan_handling(
|
||||||
|
yaml_config: str,
|
||||||
|
run_compiled: RunCompiledFunction,
|
||||||
|
api_client_connected: APIClientConnectedFactory,
|
||||||
|
) -> None:
|
||||||
|
"""Test that sliding window filters handle NaN values correctly."""
|
||||||
|
loop = asyncio.get_running_loop()
|
||||||
|
|
||||||
|
# Track states
|
||||||
|
min_states: list[float] = []
|
||||||
|
max_states: list[float] = []
|
||||||
|
|
||||||
|
# Future to track completion
|
||||||
|
filters_completed = loop.create_future()
|
||||||
|
|
||||||
|
def on_state(state: EntityState) -> None:
|
||||||
|
"""Track sensor state updates."""
|
||||||
|
if not isinstance(state, SensorState):
|
||||||
|
return
|
||||||
|
|
||||||
|
# Skip NaN values (initial states)
|
||||||
|
if state.missing_state:
|
||||||
|
return
|
||||||
|
|
||||||
|
sensor_name = key_to_sensor.get(state.key)
|
||||||
|
if sensor_name == "min_nan":
|
||||||
|
min_states.append(state.state)
|
||||||
|
elif sensor_name == "max_nan":
|
||||||
|
max_states.append(state.state)
|
||||||
|
|
||||||
|
# Check if both have received their final values
|
||||||
|
# With batch_delay: 0ms, we should receive 2 outputs each
|
||||||
|
if (
|
||||||
|
len(min_states) >= 2
|
||||||
|
and len(max_states) >= 2
|
||||||
|
and not filters_completed.done()
|
||||||
|
):
|
||||||
|
filters_completed.set_result(True)
|
||||||
|
|
||||||
|
async with (
|
||||||
|
run_compiled(yaml_config),
|
||||||
|
api_client_connected() as client,
|
||||||
|
):
|
||||||
|
# Get entities first to build key mapping
|
||||||
|
entities, services = await client.list_entities_services()
|
||||||
|
|
||||||
|
# Build key-to-sensor mapping
|
||||||
|
key_to_sensor = build_key_to_sensor_mapping(entities, ["min_nan", "max_nan"])
|
||||||
|
|
||||||
|
# Subscribe to state changes AFTER building mapping
|
||||||
|
client.subscribe_states(on_state)
|
||||||
|
|
||||||
|
# Find the publish button
|
||||||
|
publish_button = next(
|
||||||
|
(e for e in entities if "publish_nan_values_button" in e.object_id.lower()),
|
||||||
|
None,
|
||||||
|
)
|
||||||
|
assert publish_button is not None, "Publish NaN Values Button not found"
|
||||||
|
|
||||||
|
# Press the button
|
||||||
|
client.button_command(publish_button.key)
|
||||||
|
|
||||||
|
# Wait for filters to process
|
||||||
|
try:
|
||||||
|
await asyncio.wait_for(filters_completed, timeout=10.0)
|
||||||
|
except TimeoutError:
|
||||||
|
pytest.fail(
|
||||||
|
f"Timeout waiting for NaN handling. Received:\n"
|
||||||
|
f" min_states: {min_states}\n"
|
||||||
|
f" max_states: {max_states}"
|
||||||
|
)
|
||||||
|
|
||||||
|
# Verify NaN values were ignored
|
||||||
|
# With batch_delay: 0ms, we should receive both outputs (at positions 1 and 6)
|
||||||
|
# Position 1: window=[10], min=10, max=10
|
||||||
|
# Position 6: window=[NaN, 5, NaN, 15, 8], ignoring NaN -> [5, 15, 8], min=5, max=15
|
||||||
|
assert len(min_states) == 2, (
|
||||||
|
f"Should have 2 min states, got {len(min_states)}: {min_states}"
|
||||||
|
)
|
||||||
|
assert len(max_states) == 2, (
|
||||||
|
f"Should have 2 max states, got {len(max_states)}: {max_states}"
|
||||||
|
)
|
||||||
|
|
||||||
|
# First output
|
||||||
|
assert abs(min_states[0] - 10.0) < 0.01, (
|
||||||
|
f"First min should be 10.0, got {min_states[0]}"
|
||||||
|
)
|
||||||
|
assert abs(max_states[0] - 10.0) < 0.01, (
|
||||||
|
f"First max should be 10.0, got {max_states[0]}"
|
||||||
|
)
|
||||||
|
|
||||||
|
# Second output - verify NaN values were ignored
|
||||||
|
assert abs(min_states[1] - 5.0) < 0.01, (
|
||||||
|
f"Second min should ignore NaN and return 5.0, got {min_states[1]}"
|
||||||
|
)
|
||||||
|
assert abs(max_states[1] - 15.0) < 0.01, (
|
||||||
|
f"Second max should ignore NaN and return 15.0, got {max_states[1]}"
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_sensor_filters_ring_buffer_wraparound(
|
||||||
|
yaml_config: str,
|
||||||
|
run_compiled: RunCompiledFunction,
|
||||||
|
api_client_connected: APIClientConnectedFactory,
|
||||||
|
) -> None:
|
||||||
|
"""Test that ring buffer correctly wraps around when window fills up."""
|
||||||
|
loop = asyncio.get_running_loop()
|
||||||
|
|
||||||
|
min_states: list[float] = []
|
||||||
|
|
||||||
|
test_completed = loop.create_future()
|
||||||
|
|
||||||
|
def on_state(state: EntityState) -> None:
|
||||||
|
"""Track min sensor states."""
|
||||||
|
if not isinstance(state, SensorState):
|
||||||
|
return
|
||||||
|
|
||||||
|
# Skip NaN values (initial states)
|
||||||
|
if state.missing_state:
|
||||||
|
return
|
||||||
|
|
||||||
|
sensor_name = key_to_sensor.get(state.key)
|
||||||
|
if sensor_name == "wraparound_min":
|
||||||
|
min_states.append(state.state)
|
||||||
|
# With batch_delay: 0ms, we should receive all 3 outputs
|
||||||
|
if len(min_states) >= 3 and not test_completed.done():
|
||||||
|
test_completed.set_result(True)
|
||||||
|
|
||||||
|
async with (
|
||||||
|
run_compiled(yaml_config),
|
||||||
|
api_client_connected() as client,
|
||||||
|
):
|
||||||
|
# Get entities first to build key mapping
|
||||||
|
entities, services = await client.list_entities_services()
|
||||||
|
|
||||||
|
# Build key-to-sensor mapping
|
||||||
|
key_to_sensor = build_key_to_sensor_mapping(entities, ["wraparound_min"])
|
||||||
|
|
||||||
|
# Subscribe to state changes AFTER building mapping
|
||||||
|
client.subscribe_states(on_state)
|
||||||
|
|
||||||
|
# Find the publish button
|
||||||
|
publish_button = next(
|
||||||
|
(e for e in entities if "publish_wraparound_button" in e.object_id.lower()),
|
||||||
|
None,
|
||||||
|
)
|
||||||
|
assert publish_button is not None, "Publish Wraparound Button not found"
|
||||||
|
|
||||||
|
# Press the button
|
||||||
|
# Will publish: 10, 20, 30, 5, 25, 15, 40, 35, 20
|
||||||
|
client.button_command(publish_button.key)
|
||||||
|
|
||||||
|
# Wait for completion
|
||||||
|
try:
|
||||||
|
await asyncio.wait_for(test_completed, timeout=10.0)
|
||||||
|
except TimeoutError:
|
||||||
|
pytest.fail(f"Timeout waiting for wraparound test. Received: {min_states}")
|
||||||
|
|
||||||
|
# Verify outputs
|
||||||
|
# With window_size=3, send_every=3, we get outputs at positions 1, 4, 7
|
||||||
|
# Position 1: window=[10], min=10
|
||||||
|
# Position 4: window=[20, 30, 5], min=5
|
||||||
|
# Position 7: window=[15, 40, 35], min=15
|
||||||
|
# With batch_delay: 0ms, we should receive all 3 outputs
|
||||||
|
assert len(min_states) == 3, (
|
||||||
|
f"Should have 3 states, got {len(min_states)}: {min_states}"
|
||||||
|
)
|
||||||
|
assert abs(min_states[0] - 10.0) < 0.01, (
|
||||||
|
f"First min should be 10.0, got {min_states[0]}"
|
||||||
|
)
|
||||||
|
assert abs(min_states[1] - 5.0) < 0.01, (
|
||||||
|
f"Second min should be 5.0, got {min_states[1]}"
|
||||||
|
)
|
||||||
|
assert abs(min_states[2] - 15.0) < 0.01, (
|
||||||
|
f"Third min should be 15.0, got {min_states[2]}"
|
||||||
|
)
|
||||||
@@ -3,9 +3,13 @@ esphome:
|
|||||||
friendly_name: $component_name
|
friendly_name: $component_name
|
||||||
|
|
||||||
esp32:
|
esp32:
|
||||||
board: nodemcu-32s
|
# Use board with 8MB flash for testing large component groups
|
||||||
|
board: esp32-pico-devkitm-2
|
||||||
framework:
|
framework:
|
||||||
type: esp-idf
|
type: esp-idf
|
||||||
|
# Use custom partition table with larger app partitions (3MB each)
|
||||||
|
# Default IDF partitions only allow 1.75MB which is too small for grouped tests
|
||||||
|
partitions: ../partitions_testing.csv
|
||||||
|
|
||||||
logger:
|
logger:
|
||||||
level: VERY_VERBOSE
|
level: VERY_VERBOSE
|
||||||
|
|||||||
@@ -1,3 +1,10 @@
|
|||||||
|
# I2C bus for camera sensor
|
||||||
|
i2c:
|
||||||
|
- id: i2c_camera_bus
|
||||||
|
sda: 25
|
||||||
|
scl: 23
|
||||||
|
frequency: 400kHz
|
||||||
|
|
||||||
esp32_camera:
|
esp32_camera:
|
||||||
name: ESP32 Camera
|
name: ESP32 Camera
|
||||||
data_pins:
|
data_pins:
|
||||||
@@ -15,9 +22,7 @@ esp32_camera:
|
|||||||
external_clock:
|
external_clock:
|
||||||
pin: 27
|
pin: 27
|
||||||
frequency: 20MHz
|
frequency: 20MHz
|
||||||
i2c_pins:
|
i2c_id: i2c_camera_bus
|
||||||
sda: 25
|
|
||||||
scl: 23
|
|
||||||
reset_pin: 15
|
reset_pin: 15
|
||||||
power_down_pin: 1
|
power_down_pin: 1
|
||||||
resolution: 640x480
|
resolution: 640x480
|
||||||
@@ -0,0 +1,11 @@
|
|||||||
|
# Common configuration for 2-channel UART bridge/expander chips
|
||||||
|
# Used by components like wk2132 that create 2 UART channels
|
||||||
|
# Defines standardized UART IDs: uart_id_0, uart_id_1
|
||||||
|
|
||||||
|
substitutions:
|
||||||
|
# These will be overridden by component-specific values
|
||||||
|
uart_bridge_address: "0x70"
|
||||||
|
|
||||||
|
# Note: The actual UART instances are created by the bridge component
|
||||||
|
# This package just ensures all bridge components use the same ID naming convention
|
||||||
|
# so they can be grouped together without conflicts
|
||||||
@@ -0,0 +1,11 @@
|
|||||||
|
# Common configuration for 2-channel UART bridge/expander chips
|
||||||
|
# Used by components like wk2132 that create 2 UART channels
|
||||||
|
# Defines standardized UART IDs: uart_id_0, uart_id_1
|
||||||
|
|
||||||
|
substitutions:
|
||||||
|
# These will be overridden by component-specific values
|
||||||
|
uart_bridge_address: "0x70"
|
||||||
|
|
||||||
|
# Note: The actual UART instances are created by the bridge component
|
||||||
|
# This package just ensures all bridge components use the same ID naming convention
|
||||||
|
# so they can be grouped together without conflicts
|
||||||
@@ -0,0 +1,11 @@
|
|||||||
|
# Common configuration for 4-channel UART bridge/expander chips
|
||||||
|
# Used by components like wk2168, wk2204, wk2212 that create 4 UART channels
|
||||||
|
# Defines standardized UART IDs: uart_id_0, uart_id_1, uart_id_2, uart_id_3
|
||||||
|
|
||||||
|
substitutions:
|
||||||
|
# These will be overridden by component-specific values
|
||||||
|
uart_bridge_address: "0x70"
|
||||||
|
|
||||||
|
# Note: The actual UART instances are created by the bridge component
|
||||||
|
# This package just ensures all bridge components use the same ID naming convention
|
||||||
|
# so they can be grouped together without conflicts
|
||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user