mirror of
https://github.com/esphome/esphome.git
synced 2025-11-14 05:45:48 +00:00
Compare commits
128 Commits
cache_gith
...
ci_impact_
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
3f34de5d6e | ||
|
|
629bded07c | ||
|
|
8b26ed1eda | ||
|
|
fd32f6930e | ||
|
|
b4ae85cf0f | ||
|
|
e70cb098ae | ||
|
|
7f2d8a2c11 | ||
|
|
4f4da1de22 | ||
|
|
f9807db08a | ||
|
|
541fb8b27c | ||
|
|
85e0a4fbf9 | ||
|
|
7e54803ede | ||
|
|
a078486a87 | ||
|
|
ba18bb6a4f | ||
|
|
07ad32968e | ||
|
|
0b077bdfc6 | ||
|
|
1f00617738 | ||
|
|
9cf1fd24fd | ||
|
|
bbd636a8cc | ||
|
|
322dc530a9 | ||
|
|
0b09e50685 | ||
|
|
a96cc5e6f2 | ||
|
|
9a4288d81a | ||
|
|
b95999aca7 | ||
|
|
c70937ed01 | ||
|
|
3151606d50 | ||
|
|
5080698c3a | ||
|
|
931e3f80f0 | ||
|
|
b5190e5e87 | ||
|
|
66103b71ba | ||
|
|
050d9575f2 | ||
|
|
cd93f7f55a | ||
|
|
d98b00f56d | ||
|
|
8fd43f1d96 | ||
|
|
0475ec5533 | ||
|
|
5ccdc48ce2 | ||
|
|
db20f90aa0 | ||
|
|
6fe5a0c736 | ||
|
|
1ec9383abe | ||
|
|
5e1ee92754 | ||
|
|
558d4eb9dd | ||
|
|
c6ecfd0c55 | ||
|
|
3b8b2c0754 | ||
|
|
f5d69a2539 | ||
|
|
29b9073d62 | ||
|
|
a45e94cd06 | ||
|
|
71f2fb8353 | ||
|
|
0fcae15c25 | ||
|
|
a1d6bac21a | ||
|
|
db69ce24ae | ||
|
|
293400ee14 | ||
|
|
57bf3f968f | ||
|
|
922c2bcd5a | ||
|
|
5e9b972831 | ||
|
|
3bc0041b94 | ||
|
|
daa03e5b3c | ||
|
|
62ce39e430 | ||
|
|
a9e5e4d6d2 | ||
|
|
95a0c9594f | ||
|
|
8762d7cf0e | ||
|
|
84316d62f9 | ||
|
|
e1e047c53f | ||
|
|
b0ada914bc | ||
|
|
e2101f5a20 | ||
|
|
f87c969b43 | ||
|
|
f011c44130 | ||
|
|
843f590db4 | ||
|
|
2c86ebaf7f | ||
|
|
25fe4a1476 | ||
|
|
86c12079b4 | ||
|
|
79aafe2cd5 | ||
|
|
a5d6e39b2f | ||
|
|
a78a7dfa4e | ||
|
|
7879df4dd1 | ||
|
|
43c62297e8 | ||
|
|
5049c7227d | ||
|
|
256d3b119b | ||
|
|
6d2c700c43 | ||
|
|
9d081795e8 | ||
|
|
59848a2c8a | ||
|
|
c7c408e667 | ||
|
|
acfa325f23 | ||
|
|
cb97271704 | ||
|
|
1118ef32c3 | ||
|
|
0cff6acdf4 | ||
|
|
7be04916ac | ||
|
|
b5c4dc13e0 | ||
|
|
0200d7c358 | ||
|
|
44ad787cb3 | ||
|
|
699da1adc1 | ||
|
|
4d7e8ffd0a | ||
|
|
3ba2212cfc | ||
|
|
bb2be9869d | ||
|
|
b4ba2aff30 | ||
|
|
febe075bb2 | ||
|
|
baf117b411 | ||
|
|
55e03036e2 | ||
|
|
af45dc206f | ||
|
|
7027ae9833 | ||
|
|
784183ca8d | ||
|
|
855df423ee | ||
|
|
f75f11b550 | ||
|
|
a999349fa5 | ||
|
|
92d54ffb09 | ||
|
|
589c25e65a | ||
|
|
5a8558e1c5 | ||
|
|
a72c494b75 | ||
|
|
e3089ff0f6 | ||
|
|
a4b14902db | ||
|
|
4ff39ee82c | ||
|
|
447ee3da39 | ||
|
|
9b6707c1c0 | ||
|
|
0c18dd872b | ||
|
|
b074ca8a1e | ||
|
|
4c24545b82 | ||
|
|
cd252a33f9 | ||
|
|
36f8511309 | ||
|
|
12874187dd | ||
|
|
d7832c44bc | ||
|
|
8e6ee2bed1 | ||
|
|
354f46f7c0 | ||
|
|
7b6acd3c00 | ||
|
|
11f5f7683c | ||
|
|
5da589abd0 | ||
|
|
daa39a489d | ||
|
|
3bb95a190d | ||
|
|
25a6202bb9 | ||
|
|
c4eeed7f7e |
108
.github/workflows/ci-memory-impact-comment.yml
vendored
108
.github/workflows/ci-memory-impact-comment.yml
vendored
@@ -1,108 +0,0 @@
|
||||
---
|
||||
name: Memory Impact Comment (Forks)
|
||||
|
||||
on:
|
||||
workflow_run:
|
||||
workflows: ["CI"]
|
||||
types: [completed]
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
pull-requests: write
|
||||
actions: read
|
||||
|
||||
jobs:
|
||||
memory-impact-comment:
|
||||
name: Post memory impact comment (fork PRs only)
|
||||
runs-on: ubuntu-24.04
|
||||
# Only run for PRs from forks that had successful CI runs
|
||||
if: >
|
||||
github.event.workflow_run.event == 'pull_request' &&
|
||||
github.event.workflow_run.conclusion == 'success' &&
|
||||
github.event.workflow_run.head_repository.full_name != github.repository
|
||||
env:
|
||||
GH_TOKEN: ${{ github.token }}
|
||||
steps:
|
||||
- name: Get PR details
|
||||
id: pr
|
||||
run: |
|
||||
# Get PR details by searching for PR with matching head SHA
|
||||
# The workflow_run.pull_requests field is often empty for forks
|
||||
head_sha="${{ github.event.workflow_run.head_sha }}"
|
||||
pr_data=$(gh api "/repos/${{ github.repository }}/commits/$head_sha/pulls" \
|
||||
--jq '.[0] | {number: .number, base_ref: .base.ref}')
|
||||
if [ -z "$pr_data" ] || [ "$pr_data" == "null" ]; then
|
||||
echo "No PR found for SHA $head_sha, skipping"
|
||||
echo "skip=true" >> $GITHUB_OUTPUT
|
||||
exit 0
|
||||
fi
|
||||
|
||||
pr_number=$(echo "$pr_data" | jq -r '.number')
|
||||
base_ref=$(echo "$pr_data" | jq -r '.base_ref')
|
||||
|
||||
echo "pr_number=$pr_number" >> $GITHUB_OUTPUT
|
||||
echo "base_ref=$base_ref" >> $GITHUB_OUTPUT
|
||||
echo "Found PR #$pr_number targeting base branch: $base_ref"
|
||||
|
||||
- name: Check out code from base repository
|
||||
if: steps.pr.outputs.skip != 'true'
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
with:
|
||||
# Always check out from the base repository (esphome/esphome), never from forks
|
||||
# Use the PR's target branch to ensure we run trusted code from the main repo
|
||||
repository: ${{ github.repository }}
|
||||
ref: ${{ steps.pr.outputs.base_ref }}
|
||||
|
||||
- name: Restore Python
|
||||
if: steps.pr.outputs.skip != 'true'
|
||||
uses: ./.github/actions/restore-python
|
||||
with:
|
||||
python-version: "3.11"
|
||||
cache-key: ${{ hashFiles('.cache-key') }}
|
||||
|
||||
- name: Download memory analysis artifacts
|
||||
if: steps.pr.outputs.skip != 'true'
|
||||
run: |
|
||||
run_id="${{ github.event.workflow_run.id }}"
|
||||
echo "Downloading artifacts from workflow run $run_id"
|
||||
|
||||
mkdir -p memory-analysis
|
||||
|
||||
# Download target analysis artifact
|
||||
if gh run download --name "memory-analysis-target" --dir memory-analysis --repo "${{ github.repository }}" "$run_id"; then
|
||||
echo "Downloaded memory-analysis-target artifact."
|
||||
else
|
||||
echo "No memory-analysis-target artifact found."
|
||||
fi
|
||||
|
||||
# Download PR analysis artifact
|
||||
if gh run download --name "memory-analysis-pr" --dir memory-analysis --repo "${{ github.repository }}" "$run_id"; then
|
||||
echo "Downloaded memory-analysis-pr artifact."
|
||||
else
|
||||
echo "No memory-analysis-pr artifact found."
|
||||
fi
|
||||
|
||||
- name: Check if artifacts exist
|
||||
id: check
|
||||
if: steps.pr.outputs.skip != 'true'
|
||||
run: |
|
||||
if [ -f ./memory-analysis/memory-analysis-target.json ] && [ -f ./memory-analysis/memory-analysis-pr.json ]; then
|
||||
echo "found=true" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "found=false" >> $GITHUB_OUTPUT
|
||||
echo "Memory analysis artifacts not found, skipping comment"
|
||||
fi
|
||||
|
||||
- name: Post or update PR comment
|
||||
if: steps.pr.outputs.skip != 'true' && steps.check.outputs.found == 'true'
|
||||
env:
|
||||
PR_NUMBER: ${{ steps.pr.outputs.pr_number }}
|
||||
run: |
|
||||
. venv/bin/activate
|
||||
# Pass PR number and JSON file paths directly to Python script
|
||||
# Let Python parse the JSON to avoid shell injection risks
|
||||
# The script will validate and sanitize all inputs
|
||||
python script/ci_memory_impact_comment.py \
|
||||
--pr-number "$PR_NUMBER" \
|
||||
--target-json ./memory-analysis/memory-analysis-target.json \
|
||||
--pr-json ./memory-analysis/memory-analysis-pr.json
|
||||
80
.github/workflows/ci.yml
vendored
80
.github/workflows/ci.yml
vendored
@@ -432,21 +432,6 @@ jobs:
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
cache-key: ${{ needs.common.outputs.cache-key }}
|
||||
|
||||
- name: Cache platformio
|
||||
if: github.ref == 'refs/heads/dev'
|
||||
uses: actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
|
||||
with:
|
||||
path: ~/.platformio
|
||||
key: platformio-test-${{ hashFiles('platformio.ini') }}
|
||||
|
||||
- name: Cache platformio
|
||||
if: github.ref != 'refs/heads/dev'
|
||||
uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
|
||||
with:
|
||||
path: ~/.platformio
|
||||
key: platformio-test-${{ hashFiles('platformio.ini') }}
|
||||
|
||||
- name: Validate and compile components with intelligent grouping
|
||||
run: |
|
||||
. venv/bin/activate
|
||||
@@ -656,12 +641,6 @@ jobs:
|
||||
--output-env \
|
||||
--output-json memory-analysis-target.json
|
||||
|
||||
# Add metadata to JSON before caching
|
||||
python script/ci_add_metadata_to_json.py \
|
||||
--json-file memory-analysis-target.json \
|
||||
--components "$components" \
|
||||
--platform "$platform"
|
||||
|
||||
- name: Save memory analysis to cache
|
||||
if: steps.check-script.outputs.skip != 'true' && steps.cache-memory-analysis.outputs.cache-hit != 'true' && steps.build.outcome == 'success'
|
||||
uses: actions/cache/save@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
|
||||
@@ -741,13 +720,6 @@ jobs:
|
||||
python script/ci_memory_impact_extract.py \
|
||||
--output-env \
|
||||
--output-json memory-analysis-pr.json
|
||||
|
||||
# Add metadata to JSON (components and platform are in shell variables above)
|
||||
python script/ci_add_metadata_to_json.py \
|
||||
--json-file memory-analysis-pr.json \
|
||||
--components "$components" \
|
||||
--platform "$platform"
|
||||
|
||||
- name: Upload memory analysis JSON
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
@@ -764,12 +736,10 @@ jobs:
|
||||
- determine-jobs
|
||||
- memory-impact-target-branch
|
||||
- memory-impact-pr-branch
|
||||
if: github.event_name == 'pull_request' && github.event.pull_request.head.repo.full_name == github.repository && fromJSON(needs.determine-jobs.outputs.memory_impact).should_run == 'true' && needs.memory-impact-target-branch.outputs.skip != 'true'
|
||||
if: github.event_name == 'pull_request' && fromJSON(needs.determine-jobs.outputs.memory_impact).should_run == 'true' && needs.memory-impact-target-branch.outputs.skip != 'true'
|
||||
permissions:
|
||||
contents: read
|
||||
pull-requests: write
|
||||
env:
|
||||
GH_TOKEN: ${{ github.token }}
|
||||
steps:
|
||||
- name: Check out code
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
@@ -792,16 +762,52 @@ jobs:
|
||||
continue-on-error: true
|
||||
- name: Post or update PR comment
|
||||
env:
|
||||
PR_NUMBER: ${{ github.event.pull_request.number }}
|
||||
GH_TOKEN: ${{ github.token }}
|
||||
COMPONENTS: ${{ toJSON(fromJSON(needs.determine-jobs.outputs.memory_impact).components) }}
|
||||
PLATFORM: ${{ fromJSON(needs.determine-jobs.outputs.memory_impact).platform }}
|
||||
TARGET_RAM: ${{ needs.memory-impact-target-branch.outputs.ram_usage }}
|
||||
TARGET_FLASH: ${{ needs.memory-impact-target-branch.outputs.flash_usage }}
|
||||
PR_RAM: ${{ needs.memory-impact-pr-branch.outputs.ram_usage }}
|
||||
PR_FLASH: ${{ needs.memory-impact-pr-branch.outputs.flash_usage }}
|
||||
TARGET_CACHE_HIT: ${{ needs.memory-impact-target-branch.outputs.cache_hit }}
|
||||
run: |
|
||||
. venv/bin/activate
|
||||
|
||||
# Pass JSON file paths directly to Python script
|
||||
# All data is extracted from JSON files for security
|
||||
# Check if analysis JSON files exist
|
||||
target_json_arg=""
|
||||
pr_json_arg=""
|
||||
|
||||
if [ -f ./memory-analysis/memory-analysis-target.json ]; then
|
||||
echo "Found target analysis JSON"
|
||||
target_json_arg="--target-json ./memory-analysis/memory-analysis-target.json"
|
||||
else
|
||||
echo "No target analysis JSON found"
|
||||
fi
|
||||
|
||||
if [ -f ./memory-analysis/memory-analysis-pr.json ]; then
|
||||
echo "Found PR analysis JSON"
|
||||
pr_json_arg="--pr-json ./memory-analysis/memory-analysis-pr.json"
|
||||
else
|
||||
echo "No PR analysis JSON found"
|
||||
fi
|
||||
|
||||
# Add cache flag if target was cached
|
||||
cache_flag=""
|
||||
if [ "$TARGET_CACHE_HIT" == "true" ]; then
|
||||
cache_flag="--target-cache-hit"
|
||||
fi
|
||||
|
||||
python script/ci_memory_impact_comment.py \
|
||||
--pr-number "$PR_NUMBER" \
|
||||
--target-json ./memory-analysis/memory-analysis-target.json \
|
||||
--pr-json ./memory-analysis/memory-analysis-pr.json
|
||||
--pr-number "${{ github.event.pull_request.number }}" \
|
||||
--components "$COMPONENTS" \
|
||||
--platform "$PLATFORM" \
|
||||
--target-ram "$TARGET_RAM" \
|
||||
--target-flash "$TARGET_FLASH" \
|
||||
--pr-ram "$PR_RAM" \
|
||||
--pr-flash "$PR_FLASH" \
|
||||
$target_json_arg \
|
||||
$pr_json_arg \
|
||||
$cache_flag
|
||||
|
||||
ci-status:
|
||||
name: CI Status
|
||||
|
||||
@@ -70,7 +70,6 @@ esphome/components/bl0939/* @ziceva
|
||||
esphome/components/bl0940/* @dan-s-github @tobias-
|
||||
esphome/components/bl0942/* @dbuezas @dwmw2
|
||||
esphome/components/ble_client/* @buxtronix @clydebarrow
|
||||
esphome/components/ble_nus/* @tomaszduda23
|
||||
esphome/components/bluetooth_proxy/* @bdraco @jesserockz
|
||||
esphome/components/bme280_base/* @esphome/core
|
||||
esphome/components/bme280_spi/* @apbodrov
|
||||
|
||||
@@ -185,9 +185,7 @@ def choose_upload_log_host(
|
||||
else:
|
||||
resolved.append(device)
|
||||
if not resolved:
|
||||
raise EsphomeError(
|
||||
f"All specified devices {defaults} could not be resolved. Is the device connected to the network?"
|
||||
)
|
||||
_LOGGER.error("All specified devices: %s could not be resolved.", defaults)
|
||||
return resolved
|
||||
|
||||
# No devices specified, show interactive chooser
|
||||
|
||||
@@ -506,7 +506,7 @@ message ListEntitiesLightResponse {
|
||||
string name = 3;
|
||||
reserved 4; // Deprecated: was string unique_id
|
||||
|
||||
repeated ColorMode supported_color_modes = 12 [(container_pointer_no_template) = "light::ColorModeMask"];
|
||||
repeated ColorMode supported_color_modes = 12 [(container_pointer) = "std::set<light::ColorMode>"];
|
||||
// next four supports_* are for legacy clients, newer clients should use color modes
|
||||
// Deprecated in API version 1.6
|
||||
bool legacy_supports_brightness = 5 [deprecated=true];
|
||||
|
||||
@@ -453,6 +453,7 @@ uint16_t APIConnection::try_send_light_state(EntityBase *entity, APIConnection *
|
||||
bool is_single) {
|
||||
auto *light = static_cast<light::LightState *>(entity);
|
||||
LightStateResponse resp;
|
||||
auto traits = light->get_traits();
|
||||
auto values = light->remote_values;
|
||||
auto color_mode = values.get_color_mode();
|
||||
resp.state = values.is_on();
|
||||
@@ -476,8 +477,7 @@ uint16_t APIConnection::try_send_light_info(EntityBase *entity, APIConnection *c
|
||||
auto *light = static_cast<light::LightState *>(entity);
|
||||
ListEntitiesLightResponse msg;
|
||||
auto traits = light->get_traits();
|
||||
// Pass pointer to ColorModeMask so the iterator can encode actual ColorMode enum values
|
||||
msg.supported_color_modes = &traits.get_supported_color_modes();
|
||||
msg.supported_color_modes = &traits.get_supported_color_modes_for_api_();
|
||||
if (traits.supports_color_capability(light::ColorCapability::COLOR_TEMPERATURE) ||
|
||||
traits.supports_color_capability(light::ColorCapability::COLD_WARM_WHITE)) {
|
||||
msg.min_mireds = traits.get_min_mireds();
|
||||
@@ -661,12 +661,11 @@ uint16_t APIConnection::try_send_climate_info(EntityBase *entity, APIConnection
|
||||
ListEntitiesClimateResponse msg;
|
||||
auto traits = climate->get_traits();
|
||||
// Flags set for backward compatibility, deprecated in 2025.11.0
|
||||
msg.supports_current_temperature = traits.has_feature_flags(climate::CLIMATE_SUPPORTS_CURRENT_TEMPERATURE);
|
||||
msg.supports_current_humidity = traits.has_feature_flags(climate::CLIMATE_SUPPORTS_CURRENT_HUMIDITY);
|
||||
msg.supports_two_point_target_temperature = traits.has_feature_flags(
|
||||
climate::CLIMATE_SUPPORTS_TWO_POINT_TARGET_TEMPERATURE | climate::CLIMATE_REQUIRES_TWO_POINT_TARGET_TEMPERATURE);
|
||||
msg.supports_target_humidity = traits.has_feature_flags(climate::CLIMATE_SUPPORTS_TARGET_HUMIDITY);
|
||||
msg.supports_action = traits.has_feature_flags(climate::CLIMATE_SUPPORTS_ACTION);
|
||||
msg.supports_current_temperature = traits.get_supports_current_temperature();
|
||||
msg.supports_current_humidity = traits.get_supports_current_humidity();
|
||||
msg.supports_two_point_target_temperature = traits.get_supports_two_point_target_temperature();
|
||||
msg.supports_target_humidity = traits.get_supports_target_humidity();
|
||||
msg.supports_action = traits.get_supports_action();
|
||||
// Current feature flags and other supported parameters
|
||||
msg.feature_flags = traits.get_feature_flags();
|
||||
msg.supported_modes = &traits.get_supported_modes_for_api_();
|
||||
@@ -1082,8 +1081,13 @@ void APIConnection::on_get_time_response(const GetTimeResponse &value) {
|
||||
homeassistant::global_homeassistant_time->set_epoch_time(value.epoch_seconds);
|
||||
#ifdef USE_TIME_TIMEZONE
|
||||
if (value.timezone_len > 0) {
|
||||
homeassistant::global_homeassistant_time->set_timezone(reinterpret_cast<const char *>(value.timezone),
|
||||
value.timezone_len);
|
||||
const std::string ¤t_tz = homeassistant::global_homeassistant_time->get_timezone();
|
||||
// Compare without allocating a string
|
||||
if (current_tz.length() != value.timezone_len ||
|
||||
memcmp(current_tz.c_str(), value.timezone, value.timezone_len) != 0) {
|
||||
homeassistant::global_homeassistant_time->set_timezone(
|
||||
std::string(reinterpret_cast<const char *>(value.timezone), value.timezone_len));
|
||||
}
|
||||
}
|
||||
#endif
|
||||
}
|
||||
|
||||
@@ -70,14 +70,4 @@ extend google.protobuf.FieldOptions {
|
||||
// init(size) before adding elements. This eliminates std::vector template overhead
|
||||
// and is ideal when the exact size is known before populating the array.
|
||||
optional bool fixed_vector = 50013 [default=false];
|
||||
|
||||
// container_pointer_no_template: Use a non-template container type for repeated fields
|
||||
// Similar to container_pointer, but for containers that don't take template parameters.
|
||||
// The container type is used as-is without appending element type.
|
||||
// The container must have:
|
||||
// - begin() and end() methods returning iterators
|
||||
// - empty() method
|
||||
// Example: [(container_pointer_no_template) = "light::ColorModeMask"]
|
||||
// generates: const light::ColorModeMask *supported_color_modes{};
|
||||
optional string container_pointer_no_template = 50014;
|
||||
}
|
||||
|
||||
@@ -790,7 +790,7 @@ class ListEntitiesLightResponse final : public InfoResponseProtoMessage {
|
||||
#ifdef HAS_PROTO_MESSAGE_DUMP
|
||||
const char *message_name() const override { return "list_entities_light_response"; }
|
||||
#endif
|
||||
const light::ColorModeMask *supported_color_modes{};
|
||||
const std::set<light::ColorMode> *supported_color_modes{};
|
||||
float min_mireds{0.0f};
|
||||
float max_mireds{0.0f};
|
||||
std::vector<std::string> effects{};
|
||||
|
||||
@@ -1,29 +0,0 @@
|
||||
import esphome.codegen as cg
|
||||
from esphome.components.zephyr import zephyr_add_prj_conf
|
||||
import esphome.config_validation as cv
|
||||
from esphome.const import CONF_ID, CONF_LOGS, CONF_TYPE
|
||||
|
||||
AUTO_LOAD = ["zephyr_ble_server"]
|
||||
CODEOWNERS = ["@tomaszduda23"]
|
||||
|
||||
ble_nus_ns = cg.esphome_ns.namespace("ble_nus")
|
||||
BLENUS = ble_nus_ns.class_("BLENUS", cg.Component)
|
||||
|
||||
CONFIG_SCHEMA = cv.All(
|
||||
cv.Schema(
|
||||
{
|
||||
cv.GenerateID(): cv.declare_id(BLENUS),
|
||||
cv.Optional(CONF_TYPE, default=CONF_LOGS): cv.one_of(
|
||||
*[CONF_LOGS], lower=True
|
||||
),
|
||||
}
|
||||
).extend(cv.COMPONENT_SCHEMA),
|
||||
cv.only_with_framework("zephyr"),
|
||||
)
|
||||
|
||||
|
||||
async def to_code(config):
|
||||
var = cg.new_Pvariable(config[CONF_ID])
|
||||
zephyr_add_prj_conf("BT_NUS", True)
|
||||
cg.add(var.set_expose_log(config[CONF_TYPE] == CONF_LOGS))
|
||||
await cg.register_component(var, config)
|
||||
@@ -1,157 +0,0 @@
|
||||
#ifdef USE_ZEPHYR
|
||||
#include "ble_nus.h"
|
||||
#include <zephyr/kernel.h>
|
||||
#include <bluetooth/services/nus.h>
|
||||
#include "esphome/core/log.h"
|
||||
#ifdef USE_LOGGER
|
||||
#include "esphome/components/logger/logger.h"
|
||||
#include "esphome/core/application.h"
|
||||
#endif
|
||||
#include <zephyr/sys/ring_buffer.h>
|
||||
|
||||
namespace esphome::ble_nus {
|
||||
|
||||
constexpr size_t BLE_TX_BUF_SIZE = 2048;
|
||||
|
||||
// NOLINTBEGIN(cppcoreguidelines-avoid-non-const-global-variables)
|
||||
BLENUS *global_ble_nus;
|
||||
RING_BUF_DECLARE(global_ble_tx_ring_buf, BLE_TX_BUF_SIZE);
|
||||
// NOLINTEND(cppcoreguidelines-avoid-non-const-global-variables)
|
||||
|
||||
static const char *const TAG = "ble_nus";
|
||||
|
||||
size_t BLENUS::write_array(const uint8_t *data, size_t len) {
|
||||
if (atomic_get(&this->tx_status_) == TX_DISABLED) {
|
||||
return 0;
|
||||
}
|
||||
return ring_buf_put(&global_ble_tx_ring_buf, data, len);
|
||||
}
|
||||
|
||||
void BLENUS::connected(bt_conn *conn, uint8_t err) {
|
||||
if (err == 0) {
|
||||
global_ble_nus->conn_.store(bt_conn_ref(conn));
|
||||
}
|
||||
}
|
||||
|
||||
void BLENUS::disconnected(bt_conn *conn, uint8_t reason) {
|
||||
if (global_ble_nus->conn_) {
|
||||
bt_conn_unref(global_ble_nus->conn_.load());
|
||||
// Connection array is global static.
|
||||
// Reference can be kept even if disconnected.
|
||||
}
|
||||
}
|
||||
|
||||
void BLENUS::tx_callback(bt_conn *conn) {
|
||||
atomic_cas(&global_ble_nus->tx_status_, TX_BUSY, TX_ENABLED);
|
||||
ESP_LOGVV(TAG, "Sent operation completed");
|
||||
}
|
||||
|
||||
void BLENUS::send_enabled_callback(bt_nus_send_status status) {
|
||||
switch (status) {
|
||||
case BT_NUS_SEND_STATUS_ENABLED:
|
||||
atomic_set(&global_ble_nus->tx_status_, TX_ENABLED);
|
||||
#ifdef USE_LOGGER
|
||||
if (global_ble_nus->expose_log_) {
|
||||
App.schedule_dump_config();
|
||||
}
|
||||
#endif
|
||||
ESP_LOGD(TAG, "NUS notification has been enabled");
|
||||
break;
|
||||
case BT_NUS_SEND_STATUS_DISABLED:
|
||||
atomic_set(&global_ble_nus->tx_status_, TX_DISABLED);
|
||||
ESP_LOGD(TAG, "NUS notification has been disabled");
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
void BLENUS::rx_callback(bt_conn *conn, const uint8_t *const data, uint16_t len) {
|
||||
ESP_LOGD(TAG, "Received %d bytes.", len);
|
||||
}
|
||||
|
||||
void BLENUS::setup() {
|
||||
bt_nus_cb callbacks = {
|
||||
.received = rx_callback,
|
||||
.sent = tx_callback,
|
||||
.send_enabled = send_enabled_callback,
|
||||
};
|
||||
|
||||
bt_nus_init(&callbacks);
|
||||
|
||||
static bt_conn_cb conn_callbacks = {
|
||||
.connected = BLENUS::connected,
|
||||
.disconnected = BLENUS::disconnected,
|
||||
};
|
||||
|
||||
bt_conn_cb_register(&conn_callbacks);
|
||||
|
||||
global_ble_nus = this;
|
||||
#ifdef USE_LOGGER
|
||||
if (logger::global_logger != nullptr && this->expose_log_) {
|
||||
logger::global_logger->add_on_log_callback(
|
||||
[this](int level, const char *tag, const char *message, size_t message_len) {
|
||||
this->write_array(reinterpret_cast<const uint8_t *>(message), message_len);
|
||||
const char c = '\n';
|
||||
this->write_array(reinterpret_cast<const uint8_t *>(&c), 1);
|
||||
});
|
||||
}
|
||||
|
||||
#endif
|
||||
}
|
||||
|
||||
void BLENUS::dump_config() {
|
||||
ESP_LOGCONFIG(TAG, "ble nus:");
|
||||
ESP_LOGCONFIG(TAG, " log: %s", YESNO(this->expose_log_));
|
||||
uint32_t mtu = 0;
|
||||
bt_conn *conn = this->conn_.load();
|
||||
if (conn) {
|
||||
mtu = bt_nus_get_mtu(conn);
|
||||
}
|
||||
ESP_LOGCONFIG(TAG, " MTU: %u", mtu);
|
||||
}
|
||||
|
||||
void BLENUS::loop() {
|
||||
if (ring_buf_is_empty(&global_ble_tx_ring_buf)) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (!atomic_cas(&this->tx_status_, TX_ENABLED, TX_BUSY)) {
|
||||
if (atomic_get(&this->tx_status_) == TX_DISABLED) {
|
||||
ring_buf_reset(&global_ble_tx_ring_buf);
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
bt_conn *conn = this->conn_.load();
|
||||
if (conn) {
|
||||
conn = bt_conn_ref(conn);
|
||||
}
|
||||
|
||||
if (nullptr == conn) {
|
||||
atomic_cas(&this->tx_status_, TX_BUSY, TX_ENABLED);
|
||||
return;
|
||||
}
|
||||
|
||||
uint32_t req_len = bt_nus_get_mtu(conn);
|
||||
|
||||
uint8_t *buf;
|
||||
uint32_t size = ring_buf_get_claim(&global_ble_tx_ring_buf, &buf, req_len);
|
||||
|
||||
int err, err2;
|
||||
|
||||
err = bt_nus_send(conn, buf, size);
|
||||
err2 = ring_buf_get_finish(&global_ble_tx_ring_buf, size);
|
||||
if (err2) {
|
||||
// It should no happen.
|
||||
ESP_LOGE(TAG, "Size %u exceeds valid bytes in the ring buffer (%d error)", size, err2);
|
||||
}
|
||||
if (err == 0) {
|
||||
ESP_LOGVV(TAG, "Sent %d bytes", size);
|
||||
} else {
|
||||
ESP_LOGE(TAG, "Failed to send %d bytes (%d error)", size, err);
|
||||
atomic_cas(&this->tx_status_, TX_BUSY, TX_ENABLED);
|
||||
}
|
||||
bt_conn_unref(conn);
|
||||
}
|
||||
|
||||
} // namespace esphome::ble_nus
|
||||
#endif
|
||||
@@ -1,37 +0,0 @@
|
||||
#pragma once
|
||||
#ifdef USE_ZEPHYR
|
||||
#include "esphome/core/defines.h"
|
||||
#include "esphome/core/component.h"
|
||||
#include <shell/shell_bt_nus.h>
|
||||
#include <atomic>
|
||||
|
||||
namespace esphome::ble_nus {
|
||||
|
||||
class BLENUS : public Component {
|
||||
enum TxStatus {
|
||||
TX_DISABLED,
|
||||
TX_ENABLED,
|
||||
TX_BUSY,
|
||||
};
|
||||
|
||||
public:
|
||||
void setup() override;
|
||||
void dump_config() override;
|
||||
void loop() override;
|
||||
size_t write_array(const uint8_t *data, size_t len);
|
||||
void set_expose_log(bool expose_log) { this->expose_log_ = expose_log; }
|
||||
|
||||
protected:
|
||||
static void send_enabled_callback(bt_nus_send_status status);
|
||||
static void tx_callback(bt_conn *conn);
|
||||
static void rx_callback(bt_conn *conn, const uint8_t *data, uint16_t len);
|
||||
static void connected(bt_conn *conn, uint8_t err);
|
||||
static void disconnected(bt_conn *conn, uint8_t reason);
|
||||
|
||||
std::atomic<bt_conn *> conn_ = nullptr;
|
||||
bool expose_log_ = false;
|
||||
atomic_t tx_status_ = ATOMIC_INIT(TX_DISABLED);
|
||||
};
|
||||
|
||||
} // namespace esphome::ble_nus
|
||||
#endif
|
||||
@@ -155,12 +155,16 @@ esp32_ble_tracker::AdvertisementParserType BluetoothProxy::get_advertisement_par
|
||||
BluetoothConnection *BluetoothProxy::get_connection_(uint64_t address, bool reserve) {
|
||||
for (uint8_t i = 0; i < this->connection_count_; i++) {
|
||||
auto *connection = this->connections_[i];
|
||||
uint64_t conn_addr = connection->get_address();
|
||||
|
||||
if (conn_addr == address)
|
||||
if (connection->get_address() == address)
|
||||
return connection;
|
||||
}
|
||||
|
||||
if (reserve && conn_addr == 0) {
|
||||
if (!reserve)
|
||||
return nullptr;
|
||||
|
||||
for (uint8_t i = 0; i < this->connection_count_; i++) {
|
||||
auto *connection = this->connections_[i];
|
||||
if (connection->get_address() == 0) {
|
||||
connection->send_service_ = INIT_SENDING_SERVICES;
|
||||
connection->set_address(address);
|
||||
// All connections must start at INIT
|
||||
@@ -171,6 +175,7 @@ BluetoothConnection *BluetoothProxy::get_connection_(uint64_t address, bool rese
|
||||
return connection;
|
||||
}
|
||||
}
|
||||
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
|
||||
@@ -41,7 +41,7 @@ CONFIG_SCHEMA = cv.All(
|
||||
cv.Schema(
|
||||
{
|
||||
cv.GenerateID(): cv.declare_id(BME680BSECComponent),
|
||||
cv.Optional(CONF_TEMPERATURE_OFFSET, default=0): cv.temperature_delta,
|
||||
cv.Optional(CONF_TEMPERATURE_OFFSET, default=0): cv.temperature,
|
||||
cv.Optional(CONF_IAQ_MODE, default="STATIC"): cv.enum(
|
||||
IAQ_MODE_OPTIONS, upper=True
|
||||
),
|
||||
|
||||
@@ -139,7 +139,7 @@ CONFIG_SCHEMA_BASE = (
|
||||
cv.Optional(CONF_SUPPLY_VOLTAGE, default="3.3V"): cv.enum(
|
||||
VOLTAGE_OPTIONS, upper=True
|
||||
),
|
||||
cv.Optional(CONF_TEMPERATURE_OFFSET, default=0): cv.temperature_delta,
|
||||
cv.Optional(CONF_TEMPERATURE_OFFSET, default=0): cv.temperature,
|
||||
cv.Optional(
|
||||
CONF_STATE_SAVE_INTERVAL, default="6hours"
|
||||
): cv.positive_time_period_minutes,
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
#pragma once
|
||||
|
||||
#include <set>
|
||||
#include "climate_mode.h"
|
||||
#include "esphome/core/helpers.h"
|
||||
#include "climate_mode.h"
|
||||
#include <set>
|
||||
|
||||
namespace esphome {
|
||||
|
||||
@@ -109,12 +109,44 @@ class ClimateTraits {
|
||||
|
||||
void set_supported_modes(std::set<ClimateMode> modes) { this->supported_modes_ = std::move(modes); }
|
||||
void add_supported_mode(ClimateMode mode) { this->supported_modes_.insert(mode); }
|
||||
ESPDEPRECATED("This method is deprecated, use set_supported_modes() instead", "v1.20")
|
||||
void set_supports_auto_mode(bool supports_auto_mode) { set_mode_support_(CLIMATE_MODE_AUTO, supports_auto_mode); }
|
||||
ESPDEPRECATED("This method is deprecated, use set_supported_modes() instead", "v1.20")
|
||||
void set_supports_cool_mode(bool supports_cool_mode) { set_mode_support_(CLIMATE_MODE_COOL, supports_cool_mode); }
|
||||
ESPDEPRECATED("This method is deprecated, use set_supported_modes() instead", "v1.20")
|
||||
void set_supports_heat_mode(bool supports_heat_mode) { set_mode_support_(CLIMATE_MODE_HEAT, supports_heat_mode); }
|
||||
ESPDEPRECATED("This method is deprecated, use set_supported_modes() instead", "v1.20")
|
||||
void set_supports_heat_cool_mode(bool supported) { set_mode_support_(CLIMATE_MODE_HEAT_COOL, supported); }
|
||||
ESPDEPRECATED("This method is deprecated, use set_supported_modes() instead", "v1.20")
|
||||
void set_supports_fan_only_mode(bool supports_fan_only_mode) {
|
||||
set_mode_support_(CLIMATE_MODE_FAN_ONLY, supports_fan_only_mode);
|
||||
}
|
||||
ESPDEPRECATED("This method is deprecated, use set_supported_modes() instead", "v1.20")
|
||||
void set_supports_dry_mode(bool supports_dry_mode) { set_mode_support_(CLIMATE_MODE_DRY, supports_dry_mode); }
|
||||
bool supports_mode(ClimateMode mode) const { return this->supported_modes_.count(mode); }
|
||||
const std::set<ClimateMode> &get_supported_modes() const { return this->supported_modes_; }
|
||||
|
||||
void set_supported_fan_modes(std::set<ClimateFanMode> modes) { this->supported_fan_modes_ = std::move(modes); }
|
||||
void add_supported_fan_mode(ClimateFanMode mode) { this->supported_fan_modes_.insert(mode); }
|
||||
void add_supported_custom_fan_mode(const std::string &mode) { this->supported_custom_fan_modes_.insert(mode); }
|
||||
ESPDEPRECATED("This method is deprecated, use set_supported_fan_modes() instead", "v1.20")
|
||||
void set_supports_fan_mode_on(bool supported) { set_fan_mode_support_(CLIMATE_FAN_ON, supported); }
|
||||
ESPDEPRECATED("This method is deprecated, use set_supported_fan_modes() instead", "v1.20")
|
||||
void set_supports_fan_mode_off(bool supported) { set_fan_mode_support_(CLIMATE_FAN_OFF, supported); }
|
||||
ESPDEPRECATED("This method is deprecated, use set_supported_fan_modes() instead", "v1.20")
|
||||
void set_supports_fan_mode_auto(bool supported) { set_fan_mode_support_(CLIMATE_FAN_AUTO, supported); }
|
||||
ESPDEPRECATED("This method is deprecated, use set_supported_fan_modes() instead", "v1.20")
|
||||
void set_supports_fan_mode_low(bool supported) { set_fan_mode_support_(CLIMATE_FAN_LOW, supported); }
|
||||
ESPDEPRECATED("This method is deprecated, use set_supported_fan_modes() instead", "v1.20")
|
||||
void set_supports_fan_mode_medium(bool supported) { set_fan_mode_support_(CLIMATE_FAN_MEDIUM, supported); }
|
||||
ESPDEPRECATED("This method is deprecated, use set_supported_fan_modes() instead", "v1.20")
|
||||
void set_supports_fan_mode_high(bool supported) { set_fan_mode_support_(CLIMATE_FAN_HIGH, supported); }
|
||||
ESPDEPRECATED("This method is deprecated, use set_supported_fan_modes() instead", "v1.20")
|
||||
void set_supports_fan_mode_middle(bool supported) { set_fan_mode_support_(CLIMATE_FAN_MIDDLE, supported); }
|
||||
ESPDEPRECATED("This method is deprecated, use set_supported_fan_modes() instead", "v1.20")
|
||||
void set_supports_fan_mode_focus(bool supported) { set_fan_mode_support_(CLIMATE_FAN_FOCUS, supported); }
|
||||
ESPDEPRECATED("This method is deprecated, use set_supported_fan_modes() instead", "v1.20")
|
||||
void set_supports_fan_mode_diffuse(bool supported) { set_fan_mode_support_(CLIMATE_FAN_DIFFUSE, supported); }
|
||||
bool supports_fan_mode(ClimateFanMode fan_mode) const { return this->supported_fan_modes_.count(fan_mode); }
|
||||
bool get_supports_fan_modes() const {
|
||||
return !this->supported_fan_modes_.empty() || !this->supported_custom_fan_modes_.empty();
|
||||
@@ -146,6 +178,16 @@ class ClimateTraits {
|
||||
|
||||
void set_supported_swing_modes(std::set<ClimateSwingMode> modes) { this->supported_swing_modes_ = std::move(modes); }
|
||||
void add_supported_swing_mode(ClimateSwingMode mode) { this->supported_swing_modes_.insert(mode); }
|
||||
ESPDEPRECATED("This method is deprecated, use set_supported_swing_modes() instead", "v1.20")
|
||||
void set_supports_swing_mode_off(bool supported) { set_swing_mode_support_(CLIMATE_SWING_OFF, supported); }
|
||||
ESPDEPRECATED("This method is deprecated, use set_supported_swing_modes() instead", "v1.20")
|
||||
void set_supports_swing_mode_both(bool supported) { set_swing_mode_support_(CLIMATE_SWING_BOTH, supported); }
|
||||
ESPDEPRECATED("This method is deprecated, use set_supported_swing_modes() instead", "v1.20")
|
||||
void set_supports_swing_mode_vertical(bool supported) { set_swing_mode_support_(CLIMATE_SWING_VERTICAL, supported); }
|
||||
ESPDEPRECATED("This method is deprecated, use set_supported_swing_modes() instead", "v1.20")
|
||||
void set_supports_swing_mode_horizontal(bool supported) {
|
||||
set_swing_mode_support_(CLIMATE_SWING_HORIZONTAL, supported);
|
||||
}
|
||||
bool supports_swing_mode(ClimateSwingMode swing_mode) const { return this->supported_swing_modes_.count(swing_mode); }
|
||||
bool get_supports_swing_modes() const { return !this->supported_swing_modes_.empty(); }
|
||||
const std::set<ClimateSwingMode> &get_supported_swing_modes() const { return this->supported_swing_modes_; }
|
||||
|
||||
@@ -6,7 +6,6 @@
|
||||
#include <freertos/FreeRTOS.h>
|
||||
#include <freertos/task.h>
|
||||
#include <esp_idf_version.h>
|
||||
#include <esp_ota_ops.h>
|
||||
#include <esp_task_wdt.h>
|
||||
#include <esp_timer.h>
|
||||
#include <soc/rtc.h>
|
||||
@@ -53,16 +52,6 @@ void arch_init() {
|
||||
disableCore1WDT();
|
||||
#endif
|
||||
#endif
|
||||
|
||||
// If the bootloader was compiled with CONFIG_BOOTLOADER_APP_ROLLBACK_ENABLE the current
|
||||
// partition will get rolled back unless it is marked as valid.
|
||||
esp_ota_img_states_t state;
|
||||
const esp_partition_t *running = esp_ota_get_running_partition();
|
||||
if (esp_ota_get_state_partition(running, &state) == ESP_OK) {
|
||||
if (state == ESP_OTA_IMG_PENDING_VERIFY) {
|
||||
esp_ota_mark_app_valid_cancel_rollback();
|
||||
}
|
||||
}
|
||||
}
|
||||
void IRAM_ATTR HOT arch_feed_wdt() { esp_task_wdt_reset(); }
|
||||
|
||||
|
||||
@@ -61,7 +61,12 @@ class BLEClientBase : public espbt::ESPBTClient, public Component {
|
||||
this->address_str_ = "";
|
||||
} else {
|
||||
char buf[18];
|
||||
format_mac_addr_upper(this->remote_bda_, buf);
|
||||
uint8_t mac[6] = {
|
||||
(uint8_t) ((this->address_ >> 40) & 0xff), (uint8_t) ((this->address_ >> 32) & 0xff),
|
||||
(uint8_t) ((this->address_ >> 24) & 0xff), (uint8_t) ((this->address_ >> 16) & 0xff),
|
||||
(uint8_t) ((this->address_ >> 8) & 0xff), (uint8_t) ((this->address_ >> 0) & 0xff),
|
||||
};
|
||||
format_mac_addr_upper(mac, buf);
|
||||
this->address_str_ = buf;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -14,7 +14,7 @@ void Kuntze::on_modbus_data(const std::vector<uint8_t> &data) {
|
||||
auto get_16bit = [&](int i) -> uint16_t { return (uint16_t(data[i * 2]) << 8) | uint16_t(data[i * 2 + 1]); };
|
||||
|
||||
this->waiting_ = false;
|
||||
ESP_LOGV(TAG, "Data: %s", format_hex_pretty(data).c_str());
|
||||
ESP_LOGV(TAG, "Data: %s", hexencode(data).c_str());
|
||||
|
||||
float value = (float) get_16bit(0);
|
||||
for (int i = 0; i < data[3]; i++)
|
||||
|
||||
@@ -1,11 +1,11 @@
|
||||
#pragma once
|
||||
|
||||
#include "esphome/core/component.h"
|
||||
#include "esphome/core/defines.h"
|
||||
#include "esphome/core/color.h"
|
||||
#include "esp_color_correction.h"
|
||||
#include "esp_color_view.h"
|
||||
#include "esp_range_view.h"
|
||||
#include "esphome/core/color.h"
|
||||
#include "esphome/core/component.h"
|
||||
#include "esphome/core/defines.h"
|
||||
#include "light_output.h"
|
||||
#include "light_state.h"
|
||||
#include "transformers.h"
|
||||
@@ -17,6 +17,8 @@
|
||||
namespace esphome {
|
||||
namespace light {
|
||||
|
||||
using ESPColor ESPDEPRECATED("esphome::light::ESPColor is deprecated, use esphome::Color instead.", "v1.21") = Color;
|
||||
|
||||
/// Convert the color information from a `LightColorValues` object to a `Color` object (does not apply brightness).
|
||||
Color color_from_light_color_values(LightColorValues val);
|
||||
|
||||
|
||||
@@ -104,200 +104,5 @@ constexpr ColorModeHelper operator|(ColorModeHelper lhs, ColorMode rhs) {
|
||||
return static_cast<ColorMode>(static_cast<uint8_t>(lhs) | static_cast<uint8_t>(rhs));
|
||||
}
|
||||
|
||||
// Type alias for raw color mode bitmask values
|
||||
using color_mode_bitmask_t = uint16_t;
|
||||
|
||||
// Constants for ColorMode count and bit range
|
||||
static constexpr int COLOR_MODE_COUNT = 10; // UNKNOWN through RGB_COLD_WARM_WHITE
|
||||
static constexpr int MAX_BIT_INDEX = sizeof(color_mode_bitmask_t) * 8; // Number of bits in bitmask type
|
||||
|
||||
// Compile-time array of all ColorMode values in declaration order
|
||||
// Bit positions (0-9) map directly to enum declaration order
|
||||
static constexpr ColorMode COLOR_MODES[COLOR_MODE_COUNT] = {
|
||||
ColorMode::UNKNOWN, // bit 0
|
||||
ColorMode::ON_OFF, // bit 1
|
||||
ColorMode::BRIGHTNESS, // bit 2
|
||||
ColorMode::WHITE, // bit 3
|
||||
ColorMode::COLOR_TEMPERATURE, // bit 4
|
||||
ColorMode::COLD_WARM_WHITE, // bit 5
|
||||
ColorMode::RGB, // bit 6
|
||||
ColorMode::RGB_WHITE, // bit 7
|
||||
ColorMode::RGB_COLOR_TEMPERATURE, // bit 8
|
||||
ColorMode::RGB_COLD_WARM_WHITE, // bit 9
|
||||
};
|
||||
|
||||
/// Map ColorMode enum values to bit positions (0-9)
|
||||
/// Bit positions follow the enum declaration order
|
||||
static constexpr int mode_to_bit(ColorMode mode) {
|
||||
// Linear search through COLOR_MODES array
|
||||
// Compiler optimizes this to efficient code since array is constexpr
|
||||
for (int i = 0; i < COLOR_MODE_COUNT; ++i) {
|
||||
if (COLOR_MODES[i] == mode)
|
||||
return i;
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
|
||||
/// Map bit positions (0-9) to ColorMode enum values
|
||||
/// Bit positions follow the enum declaration order
|
||||
static constexpr ColorMode bit_to_mode(int bit) {
|
||||
// Direct lookup in COLOR_MODES array
|
||||
return (bit >= 0 && bit < COLOR_MODE_COUNT) ? COLOR_MODES[bit] : ColorMode::UNKNOWN;
|
||||
}
|
||||
|
||||
/// Helper to compute capability bitmask at compile time
|
||||
static constexpr color_mode_bitmask_t compute_capability_bitmask(ColorCapability capability) {
|
||||
color_mode_bitmask_t mask = 0;
|
||||
uint8_t cap_bit = static_cast<uint8_t>(capability);
|
||||
|
||||
// Check each ColorMode to see if it has this capability
|
||||
for (int bit = 0; bit < COLOR_MODE_COUNT; ++bit) {
|
||||
uint8_t mode_val = static_cast<uint8_t>(bit_to_mode(bit));
|
||||
if ((mode_val & cap_bit) != 0) {
|
||||
mask |= (1 << bit);
|
||||
}
|
||||
}
|
||||
return mask;
|
||||
}
|
||||
|
||||
// Number of ColorCapability enum values
|
||||
static constexpr int COLOR_CAPABILITY_COUNT = 6;
|
||||
|
||||
/// Compile-time lookup table mapping ColorCapability to bitmask
|
||||
/// This array is computed at compile time using constexpr
|
||||
static constexpr color_mode_bitmask_t CAPABILITY_BITMASKS[] = {
|
||||
compute_capability_bitmask(ColorCapability::ON_OFF), // 1 << 0
|
||||
compute_capability_bitmask(ColorCapability::BRIGHTNESS), // 1 << 1
|
||||
compute_capability_bitmask(ColorCapability::WHITE), // 1 << 2
|
||||
compute_capability_bitmask(ColorCapability::COLOR_TEMPERATURE), // 1 << 3
|
||||
compute_capability_bitmask(ColorCapability::COLD_WARM_WHITE), // 1 << 4
|
||||
compute_capability_bitmask(ColorCapability::RGB), // 1 << 5
|
||||
};
|
||||
|
||||
/// Bitmask for storing a set of ColorMode values efficiently.
|
||||
/// Replaces std::set<ColorMode> to eliminate red-black tree overhead (~586 bytes).
|
||||
class ColorModeMask {
|
||||
public:
|
||||
constexpr ColorModeMask() = default;
|
||||
|
||||
/// Support initializer list syntax: {ColorMode::RGB, ColorMode::WHITE}
|
||||
constexpr ColorModeMask(std::initializer_list<ColorMode> modes) {
|
||||
for (auto mode : modes) {
|
||||
this->add(mode);
|
||||
}
|
||||
}
|
||||
|
||||
constexpr void add(ColorMode mode) { this->mask_ |= (1 << mode_to_bit(mode)); }
|
||||
|
||||
/// Add multiple modes at once using initializer list
|
||||
constexpr void add(std::initializer_list<ColorMode> modes) {
|
||||
for (auto mode : modes) {
|
||||
this->add(mode);
|
||||
}
|
||||
}
|
||||
|
||||
constexpr bool contains(ColorMode mode) const { return (this->mask_ & (1 << mode_to_bit(mode))) != 0; }
|
||||
|
||||
constexpr size_t size() const {
|
||||
// Count set bits using Brian Kernighan's algorithm
|
||||
// More efficient for sparse bitmasks (typical case: 2-4 modes out of 10)
|
||||
uint16_t n = this->mask_;
|
||||
size_t count = 0;
|
||||
while (n) {
|
||||
n &= n - 1; // Clear the least significant set bit
|
||||
count++;
|
||||
}
|
||||
return count;
|
||||
}
|
||||
|
||||
constexpr bool empty() const { return this->mask_ == 0; }
|
||||
|
||||
/// Iterator support for API encoding
|
||||
class Iterator {
|
||||
public:
|
||||
using iterator_category = std::forward_iterator_tag;
|
||||
using value_type = ColorMode;
|
||||
using difference_type = std::ptrdiff_t;
|
||||
using pointer = const ColorMode *;
|
||||
using reference = ColorMode;
|
||||
|
||||
constexpr Iterator(color_mode_bitmask_t mask, int bit) : mask_(mask), bit_(bit) { advance_to_next_set_bit_(); }
|
||||
|
||||
constexpr ColorMode operator*() const { return bit_to_mode(bit_); }
|
||||
|
||||
constexpr Iterator &operator++() {
|
||||
++bit_;
|
||||
advance_to_next_set_bit_();
|
||||
return *this;
|
||||
}
|
||||
|
||||
constexpr bool operator==(const Iterator &other) const { return bit_ == other.bit_; }
|
||||
|
||||
constexpr bool operator!=(const Iterator &other) const { return !(*this == other); }
|
||||
|
||||
private:
|
||||
constexpr void advance_to_next_set_bit_() { bit_ = ColorModeMask::find_next_set_bit(mask_, bit_); }
|
||||
|
||||
color_mode_bitmask_t mask_;
|
||||
int bit_;
|
||||
};
|
||||
|
||||
constexpr Iterator begin() const { return Iterator(mask_, 0); }
|
||||
constexpr Iterator end() const { return Iterator(mask_, MAX_BIT_INDEX); }
|
||||
|
||||
/// Get the raw bitmask value for API encoding
|
||||
constexpr color_mode_bitmask_t get_mask() const { return this->mask_; }
|
||||
|
||||
/// Find the next set bit in a bitmask starting from a given position
|
||||
/// Returns the bit position, or MAX_BIT_INDEX if no more bits are set
|
||||
static constexpr int find_next_set_bit(color_mode_bitmask_t mask, int start_bit) {
|
||||
int bit = start_bit;
|
||||
while (bit < MAX_BIT_INDEX && !(mask & (1 << bit))) {
|
||||
++bit;
|
||||
}
|
||||
return bit;
|
||||
}
|
||||
|
||||
/// Find the first set bit in a bitmask and return the corresponding ColorMode
|
||||
/// Used for optimizing compute_color_mode_() intersection logic
|
||||
static constexpr ColorMode first_mode_from_mask(color_mode_bitmask_t mask) {
|
||||
return bit_to_mode(find_next_set_bit(mask, 0));
|
||||
}
|
||||
|
||||
/// Check if a ColorMode is present in a raw bitmask value
|
||||
/// Useful for checking intersection results without creating a temporary ColorModeMask
|
||||
static constexpr bool mask_contains(color_mode_bitmask_t mask, ColorMode mode) {
|
||||
return (mask & (1 << mode_to_bit(mode))) != 0;
|
||||
}
|
||||
|
||||
/// Check if any mode in the bitmask has a specific capability
|
||||
/// Used for checking if a light supports a capability (e.g., BRIGHTNESS, RGB)
|
||||
bool has_capability(ColorCapability capability) const {
|
||||
// Lookup the pre-computed bitmask for this capability and check intersection with our mask
|
||||
// ColorCapability values: 1, 2, 4, 8, 16, 32 -> array indices: 0, 1, 2, 3, 4, 5
|
||||
// We need to convert the power-of-2 value to an index
|
||||
uint8_t cap_val = static_cast<uint8_t>(capability);
|
||||
#if defined(__GNUC__) || defined(__clang__)
|
||||
// Use compiler intrinsic for efficient bit position lookup (O(1) vs O(log n))
|
||||
int index = __builtin_ctz(cap_val);
|
||||
#else
|
||||
// Fallback for compilers without __builtin_ctz
|
||||
int index = 0;
|
||||
while (cap_val > 1) {
|
||||
cap_val >>= 1;
|
||||
++index;
|
||||
}
|
||||
#endif
|
||||
return (this->mask_ & CAPABILITY_BITMASKS[index]) != 0;
|
||||
}
|
||||
|
||||
private:
|
||||
// Using uint16_t instead of uint32_t for more efficient iteration (fewer bits to scan).
|
||||
// Currently only 10 ColorMode values exist, so 16 bits is sufficient.
|
||||
// Can be changed to uint32_t if more than 16 color modes are needed in the future.
|
||||
// Note: Due to struct padding, uint16_t and uint32_t result in same LightTraits size (12 bytes).
|
||||
color_mode_bitmask_t mask_{0};
|
||||
};
|
||||
|
||||
} // namespace light
|
||||
} // namespace esphome
|
||||
|
||||
@@ -406,7 +406,7 @@ void LightCall::transform_parameters_() {
|
||||
}
|
||||
}
|
||||
ColorMode LightCall::compute_color_mode_() {
|
||||
const auto &supported_modes = this->parent_->get_traits().get_supported_color_modes();
|
||||
auto supported_modes = this->parent_->get_traits().get_supported_color_modes();
|
||||
int supported_count = supported_modes.size();
|
||||
|
||||
// Some lights don't support any color modes (e.g. monochromatic light), leave it at unknown.
|
||||
@@ -425,19 +425,20 @@ ColorMode LightCall::compute_color_mode_() {
|
||||
// If no color mode is specified, we try to guess the color mode. This is needed for backward compatibility to
|
||||
// pre-colormode clients and automations, but also for the MQTT API, where HA doesn't let us know which color mode
|
||||
// was used for some reason.
|
||||
// Compute intersection of suitable and supported modes using bitwise AND
|
||||
color_mode_bitmask_t intersection = this->get_suitable_color_modes_mask_() & supported_modes.get_mask();
|
||||
std::set<ColorMode> suitable_modes = this->get_suitable_color_modes_();
|
||||
|
||||
// Don't change if the current mode is in the intersection (suitable AND supported)
|
||||
if (ColorModeMask::mask_contains(intersection, current_mode)) {
|
||||
// Don't change if the current mode is suitable.
|
||||
if (suitable_modes.count(current_mode) > 0) {
|
||||
ESP_LOGI(TAG, "'%s': color mode not specified; retaining %s", this->parent_->get_name().c_str(),
|
||||
LOG_STR_ARG(color_mode_to_human(current_mode)));
|
||||
return current_mode;
|
||||
}
|
||||
|
||||
// Use the preferred suitable mode.
|
||||
if (intersection != 0) {
|
||||
ColorMode mode = ColorModeMask::first_mode_from_mask(intersection);
|
||||
for (auto mode : suitable_modes) {
|
||||
if (supported_modes.count(mode) == 0)
|
||||
continue;
|
||||
|
||||
ESP_LOGI(TAG, "'%s': color mode not specified; using %s", this->parent_->get_name().c_str(),
|
||||
LOG_STR_ARG(color_mode_to_human(mode)));
|
||||
return mode;
|
||||
@@ -450,7 +451,7 @@ ColorMode LightCall::compute_color_mode_() {
|
||||
LOG_STR_ARG(color_mode_to_human(color_mode)));
|
||||
return color_mode;
|
||||
}
|
||||
color_mode_bitmask_t LightCall::get_suitable_color_modes_mask_() {
|
||||
std::set<ColorMode> LightCall::get_suitable_color_modes_() {
|
||||
bool has_white = this->has_white() && this->white_ > 0.0f;
|
||||
bool has_ct = this->has_color_temperature();
|
||||
bool has_cwww =
|
||||
@@ -458,44 +459,36 @@ color_mode_bitmask_t LightCall::get_suitable_color_modes_mask_() {
|
||||
bool has_rgb = (this->has_color_brightness() && this->color_brightness_ > 0.0f) ||
|
||||
(this->has_red() || this->has_green() || this->has_blue());
|
||||
|
||||
// Build key from flags: [rgb][cwww][ct][white]
|
||||
// Build key from flags: [rgb][cwww][ct][white]
|
||||
#define KEY(white, ct, cwww, rgb) ((white) << 0 | (ct) << 1 | (cwww) << 2 | (rgb) << 3)
|
||||
|
||||
uint8_t key = KEY(has_white, has_ct, has_cwww, has_rgb);
|
||||
|
||||
switch (key) {
|
||||
case KEY(true, false, false, false): // white only
|
||||
return ColorModeMask({ColorMode::WHITE, ColorMode::RGB_WHITE, ColorMode::RGB_COLOR_TEMPERATURE,
|
||||
ColorMode::COLD_WARM_WHITE, ColorMode::RGB_COLD_WARM_WHITE})
|
||||
.get_mask();
|
||||
return {ColorMode::WHITE, ColorMode::RGB_WHITE, ColorMode::RGB_COLOR_TEMPERATURE, ColorMode::COLD_WARM_WHITE,
|
||||
ColorMode::RGB_COLD_WARM_WHITE};
|
||||
case KEY(false, true, false, false): // ct only
|
||||
return ColorModeMask({ColorMode::COLOR_TEMPERATURE, ColorMode::RGB_COLOR_TEMPERATURE, ColorMode::COLD_WARM_WHITE,
|
||||
ColorMode::RGB_COLD_WARM_WHITE})
|
||||
.get_mask();
|
||||
return {ColorMode::COLOR_TEMPERATURE, ColorMode::RGB_COLOR_TEMPERATURE, ColorMode::COLD_WARM_WHITE,
|
||||
ColorMode::RGB_COLD_WARM_WHITE};
|
||||
case KEY(true, true, false, false): // white + ct
|
||||
return ColorModeMask(
|
||||
{ColorMode::COLD_WARM_WHITE, ColorMode::RGB_COLOR_TEMPERATURE, ColorMode::RGB_COLD_WARM_WHITE})
|
||||
.get_mask();
|
||||
return {ColorMode::COLD_WARM_WHITE, ColorMode::RGB_COLOR_TEMPERATURE, ColorMode::RGB_COLD_WARM_WHITE};
|
||||
case KEY(false, false, true, false): // cwww only
|
||||
return ColorModeMask({ColorMode::COLD_WARM_WHITE, ColorMode::RGB_COLD_WARM_WHITE}).get_mask();
|
||||
return {ColorMode::COLD_WARM_WHITE, ColorMode::RGB_COLD_WARM_WHITE};
|
||||
case KEY(false, false, false, false): // none
|
||||
return ColorModeMask({ColorMode::RGB_WHITE, ColorMode::RGB_COLOR_TEMPERATURE, ColorMode::RGB_COLD_WARM_WHITE,
|
||||
ColorMode::RGB, ColorMode::WHITE, ColorMode::COLOR_TEMPERATURE, ColorMode::COLD_WARM_WHITE})
|
||||
.get_mask();
|
||||
return {ColorMode::RGB_WHITE, ColorMode::RGB_COLOR_TEMPERATURE, ColorMode::RGB_COLD_WARM_WHITE, ColorMode::RGB,
|
||||
ColorMode::WHITE, ColorMode::COLOR_TEMPERATURE, ColorMode::COLD_WARM_WHITE};
|
||||
case KEY(true, false, false, true): // rgb + white
|
||||
return ColorModeMask({ColorMode::RGB_WHITE, ColorMode::RGB_COLOR_TEMPERATURE, ColorMode::RGB_COLD_WARM_WHITE})
|
||||
.get_mask();
|
||||
return {ColorMode::RGB_WHITE, ColorMode::RGB_COLOR_TEMPERATURE, ColorMode::RGB_COLD_WARM_WHITE};
|
||||
case KEY(false, true, false, true): // rgb + ct
|
||||
case KEY(true, true, false, true): // rgb + white + ct
|
||||
return ColorModeMask({ColorMode::RGB_COLOR_TEMPERATURE, ColorMode::RGB_COLD_WARM_WHITE}).get_mask();
|
||||
return {ColorMode::RGB_COLOR_TEMPERATURE, ColorMode::RGB_COLD_WARM_WHITE};
|
||||
case KEY(false, false, true, true): // rgb + cwww
|
||||
return ColorModeMask({ColorMode::RGB_COLD_WARM_WHITE}).get_mask();
|
||||
return {ColorMode::RGB_COLD_WARM_WHITE};
|
||||
case KEY(false, false, false, true): // rgb only
|
||||
return ColorModeMask({ColorMode::RGB, ColorMode::RGB_WHITE, ColorMode::RGB_COLOR_TEMPERATURE,
|
||||
ColorMode::RGB_COLD_WARM_WHITE})
|
||||
.get_mask();
|
||||
return {ColorMode::RGB, ColorMode::RGB_WHITE, ColorMode::RGB_COLOR_TEMPERATURE, ColorMode::RGB_COLD_WARM_WHITE};
|
||||
default:
|
||||
return 0; // conflicting flags
|
||||
return {}; // conflicting flags
|
||||
}
|
||||
|
||||
#undef KEY
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
#pragma once
|
||||
|
||||
#include "light_color_values.h"
|
||||
#include <set>
|
||||
|
||||
namespace esphome {
|
||||
|
||||
@@ -185,8 +186,8 @@ class LightCall {
|
||||
|
||||
//// Compute the color mode that should be used for this call.
|
||||
ColorMode compute_color_mode_();
|
||||
/// Get potential color modes bitmask for this light call.
|
||||
color_mode_bitmask_t get_suitable_color_modes_mask_();
|
||||
/// Get potential color modes for this light call.
|
||||
std::set<ColorMode> get_suitable_color_modes_();
|
||||
/// Some color modes also can be set using non-native parameters, transform those calls.
|
||||
void transform_parameters_();
|
||||
|
||||
|
||||
@@ -43,6 +43,7 @@ void LightJSONSchema::dump_json(LightState &state, JsonObject root) {
|
||||
}
|
||||
|
||||
auto values = state.remote_values;
|
||||
auto traits = state.get_output()->get_traits();
|
||||
|
||||
const auto color_mode = values.get_color_mode();
|
||||
const char *mode_str = get_color_mode_json_str(color_mode);
|
||||
|
||||
@@ -191,9 +191,11 @@ void LightState::current_values_as_brightness(float *brightness) {
|
||||
this->current_values.as_brightness(brightness, this->gamma_correct_);
|
||||
}
|
||||
void LightState::current_values_as_rgb(float *red, float *green, float *blue, bool color_interlock) {
|
||||
auto traits = this->get_traits();
|
||||
this->current_values.as_rgb(red, green, blue, this->gamma_correct_, false);
|
||||
}
|
||||
void LightState::current_values_as_rgbw(float *red, float *green, float *blue, float *white, bool color_interlock) {
|
||||
auto traits = this->get_traits();
|
||||
this->current_values.as_rgbw(red, green, blue, white, this->gamma_correct_, false);
|
||||
}
|
||||
void LightState::current_values_as_rgbww(float *red, float *green, float *blue, float *cold_white, float *warm_white,
|
||||
@@ -207,6 +209,7 @@ void LightState::current_values_as_rgbct(float *red, float *green, float *blue,
|
||||
white_brightness, this->gamma_correct_);
|
||||
}
|
||||
void LightState::current_values_as_cwww(float *cold_white, float *warm_white, bool constant_brightness) {
|
||||
auto traits = this->get_traits();
|
||||
this->current_values.as_cwww(cold_white, warm_white, this->gamma_correct_, constant_brightness);
|
||||
}
|
||||
void LightState::current_values_as_ct(float *color_temperature, float *white_brightness) {
|
||||
|
||||
@@ -1,7 +1,8 @@
|
||||
#pragma once
|
||||
|
||||
#include "color_mode.h"
|
||||
#include "esphome/core/helpers.h"
|
||||
#include "color_mode.h"
|
||||
#include <set>
|
||||
|
||||
namespace esphome {
|
||||
|
||||
@@ -18,17 +19,38 @@ class LightTraits {
|
||||
public:
|
||||
LightTraits() = default;
|
||||
|
||||
const ColorModeMask &get_supported_color_modes() const { return this->supported_color_modes_; }
|
||||
void set_supported_color_modes(ColorModeMask supported_color_modes) {
|
||||
this->supported_color_modes_ = supported_color_modes;
|
||||
}
|
||||
void set_supported_color_modes(std::initializer_list<ColorMode> modes) {
|
||||
this->supported_color_modes_ = ColorModeMask(modes);
|
||||
const std::set<ColorMode> &get_supported_color_modes() const { return this->supported_color_modes_; }
|
||||
void set_supported_color_modes(std::set<ColorMode> supported_color_modes) {
|
||||
this->supported_color_modes_ = std::move(supported_color_modes);
|
||||
}
|
||||
|
||||
bool supports_color_mode(ColorMode color_mode) const { return this->supported_color_modes_.contains(color_mode); }
|
||||
bool supports_color_mode(ColorMode color_mode) const { return this->supported_color_modes_.count(color_mode); }
|
||||
bool supports_color_capability(ColorCapability color_capability) const {
|
||||
return this->supported_color_modes_.has_capability(color_capability);
|
||||
for (auto mode : this->supported_color_modes_) {
|
||||
if (mode & color_capability)
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
ESPDEPRECATED("get_supports_brightness() is deprecated, use color modes instead.", "v1.21")
|
||||
bool get_supports_brightness() const { return this->supports_color_capability(ColorCapability::BRIGHTNESS); }
|
||||
ESPDEPRECATED("get_supports_rgb() is deprecated, use color modes instead.", "v1.21")
|
||||
bool get_supports_rgb() const { return this->supports_color_capability(ColorCapability::RGB); }
|
||||
ESPDEPRECATED("get_supports_rgb_white_value() is deprecated, use color modes instead.", "v1.21")
|
||||
bool get_supports_rgb_white_value() const {
|
||||
return this->supports_color_mode(ColorMode::RGB_WHITE) ||
|
||||
this->supports_color_mode(ColorMode::RGB_COLOR_TEMPERATURE);
|
||||
}
|
||||
ESPDEPRECATED("get_supports_color_temperature() is deprecated, use color modes instead.", "v1.21")
|
||||
bool get_supports_color_temperature() const {
|
||||
return this->supports_color_capability(ColorCapability::COLOR_TEMPERATURE);
|
||||
}
|
||||
ESPDEPRECATED("get_supports_color_interlock() is deprecated, use color modes instead.", "v1.21")
|
||||
bool get_supports_color_interlock() const {
|
||||
return this->supports_color_mode(ColorMode::RGB) &&
|
||||
(this->supports_color_mode(ColorMode::WHITE) || this->supports_color_mode(ColorMode::COLD_WARM_WHITE) ||
|
||||
this->supports_color_mode(ColorMode::COLOR_TEMPERATURE));
|
||||
}
|
||||
|
||||
float get_min_mireds() const { return this->min_mireds_; }
|
||||
@@ -37,9 +59,19 @@ class LightTraits {
|
||||
void set_max_mireds(float max_mireds) { this->max_mireds_ = max_mireds; }
|
||||
|
||||
protected:
|
||||
#ifdef USE_API
|
||||
// The API connection is a friend class to access internal methods
|
||||
friend class api::APIConnection;
|
||||
// This method returns a reference to the internal color modes set.
|
||||
// It is used by the API to avoid copying data when encoding messages.
|
||||
// Warning: Do not use this method outside of the API connection code.
|
||||
// It returns a reference to internal data that can be invalidated.
|
||||
const std::set<ColorMode> &get_supported_color_modes_for_api_() const { return this->supported_color_modes_; }
|
||||
#endif
|
||||
|
||||
std::set<ColorMode> supported_color_modes_{};
|
||||
float min_mireds_{0};
|
||||
float max_mireds_{0};
|
||||
ColorModeMask supported_color_modes_{};
|
||||
};
|
||||
|
||||
} // namespace light
|
||||
|
||||
@@ -68,9 +68,6 @@ static constexpr char LOG_LEVEL_LETTER_CHARS[] = {
|
||||
// Maximum header size: 35 bytes fixed + 32 bytes tag + 16 bytes thread name = 83 bytes (45 byte safety margin)
|
||||
static constexpr uint16_t MAX_HEADER_SIZE = 128;
|
||||
|
||||
// "0x" + 2 hex digits per byte + '\0'
|
||||
static constexpr size_t MAX_POINTER_REPRESENTATION = 2 + sizeof(void *) * 2 + 1;
|
||||
|
||||
#if defined(USE_ESP32) || defined(USE_ESP8266) || defined(USE_RP2040) || defined(USE_LIBRETINY) || defined(USE_ZEPHYR)
|
||||
/** Enum for logging UART selection
|
||||
*
|
||||
@@ -180,11 +177,8 @@ class Logger : public Component {
|
||||
inline void HOT format_log_to_buffer_with_terminator_(uint8_t level, const char *tag, int line, const char *format,
|
||||
va_list args, char *buffer, uint16_t *buffer_at,
|
||||
uint16_t buffer_size) {
|
||||
#if defined(USE_ESP32) || defined(USE_LIBRETINY)
|
||||
#if defined(USE_ESP32) || defined(USE_LIBRETINY) || defined(USE_ZEPHYR)
|
||||
this->write_header_to_buffer_(level, tag, line, this->get_thread_name_(), buffer, buffer_at, buffer_size);
|
||||
#elif defined(USE_ZEPHYR)
|
||||
char buff[MAX_POINTER_REPRESENTATION];
|
||||
this->write_header_to_buffer_(level, tag, line, this->get_thread_name_(buff), buffer, buffer_at, buffer_size);
|
||||
#else
|
||||
this->write_header_to_buffer_(level, tag, line, nullptr, buffer, buffer_at, buffer_size);
|
||||
#endif
|
||||
@@ -283,11 +277,7 @@ class Logger : public Component {
|
||||
#endif
|
||||
|
||||
#if defined(USE_ESP32) || defined(USE_LIBRETINY) || defined(USE_ZEPHYR)
|
||||
const char *HOT get_thread_name_(
|
||||
#ifdef USE_ZEPHYR
|
||||
char *buff
|
||||
#endif
|
||||
) {
|
||||
const char *HOT get_thread_name_() {
|
||||
#ifdef USE_ZEPHYR
|
||||
k_tid_t current_task = k_current_get();
|
||||
#else
|
||||
@@ -301,13 +291,7 @@ class Logger : public Component {
|
||||
#elif defined(USE_LIBRETINY)
|
||||
return pcTaskGetTaskName(current_task);
|
||||
#elif defined(USE_ZEPHYR)
|
||||
const char *name = k_thread_name_get(current_task);
|
||||
if (name) {
|
||||
// zephyr print task names only if debug component is present
|
||||
return name;
|
||||
}
|
||||
std::snprintf(buff, MAX_POINTER_REPRESENTATION, "%p", current_task);
|
||||
return buff;
|
||||
return k_thread_name_get(current_task);
|
||||
#endif
|
||||
}
|
||||
}
|
||||
|
||||
@@ -31,17 +31,18 @@ void MDNSComponent::setup() {
|
||||
mdns_instance_name_set(this->hostname_.c_str());
|
||||
|
||||
for (const auto &service : services) {
|
||||
auto txt_records = std::make_unique<mdns_txt_item_t[]>(service.txt_records.size());
|
||||
for (size_t i = 0; i < service.txt_records.size(); i++) {
|
||||
const auto &record = service.txt_records[i];
|
||||
std::vector<mdns_txt_item_t> txt_records;
|
||||
for (const auto &record : service.txt_records) {
|
||||
mdns_txt_item_t it{};
|
||||
// key and value are either compile-time string literals in flash or pointers to dynamic_txt_values_
|
||||
// Both remain valid for the lifetime of this function, and ESP-IDF makes internal copies
|
||||
txt_records[i].key = MDNS_STR_ARG(record.key);
|
||||
txt_records[i].value = MDNS_STR_ARG(record.value);
|
||||
it.key = MDNS_STR_ARG(record.key);
|
||||
it.value = MDNS_STR_ARG(record.value);
|
||||
txt_records.push_back(it);
|
||||
}
|
||||
uint16_t port = const_cast<TemplatableValue<uint16_t> &>(service.port).value();
|
||||
err = mdns_service_add(nullptr, MDNS_STR_ARG(service.service_type), MDNS_STR_ARG(service.proto), port,
|
||||
txt_records.get(), service.txt_records.size());
|
||||
txt_records.data(), txt_records.size());
|
||||
|
||||
if (err != ESP_OK) {
|
||||
ESP_LOGW(TAG, "Failed to register service %s: %s", MDNS_STR_ARG(service.service_type), esp_err_to_name(err));
|
||||
|
||||
@@ -140,8 +140,11 @@ void MQTTClientComponent::send_device_info_() {
|
||||
#endif
|
||||
|
||||
#ifdef USE_API_NOISE
|
||||
root[api::global_api_server->get_noise_ctx()->has_psk() ? "api_encryption" : "api_encryption_supported"] =
|
||||
"Noise_NNpsk0_25519_ChaChaPoly_SHA256";
|
||||
if (api::global_api_server->get_noise_ctx()->has_psk()) {
|
||||
root["api_encryption"] = "Noise_NNpsk0_25519_ChaChaPoly_SHA256";
|
||||
} else {
|
||||
root["api_encryption_supported"] = "Noise_NNpsk0_25519_ChaChaPoly_SHA256";
|
||||
}
|
||||
#endif
|
||||
},
|
||||
2, this->discovery_info_.retain);
|
||||
|
||||
@@ -17,11 +17,11 @@ void MQTTClimateComponent::send_discovery(JsonObject root, mqtt::SendDiscoveryCo
|
||||
// NOLINTBEGIN(clang-analyzer-cplusplus.NewDeleteLeaks) false positive with ArduinoJson
|
||||
auto traits = this->device_->get_traits();
|
||||
// current_temperature_topic
|
||||
if (traits.has_feature_flags(climate::CLIMATE_SUPPORTS_CURRENT_TEMPERATURE)) {
|
||||
if (traits.get_supports_current_temperature()) {
|
||||
root[MQTT_CURRENT_TEMPERATURE_TOPIC] = this->get_current_temperature_state_topic();
|
||||
}
|
||||
// current_humidity_topic
|
||||
if (traits.has_feature_flags(climate::CLIMATE_SUPPORTS_CURRENT_HUMIDITY)) {
|
||||
if (traits.get_supports_current_humidity()) {
|
||||
root[MQTT_CURRENT_HUMIDITY_TOPIC] = this->get_current_humidity_state_topic();
|
||||
}
|
||||
// mode_command_topic
|
||||
@@ -45,8 +45,7 @@ void MQTTClimateComponent::send_discovery(JsonObject root, mqtt::SendDiscoveryCo
|
||||
if (traits.supports_mode(CLIMATE_MODE_HEAT_COOL))
|
||||
modes.add("heat_cool");
|
||||
|
||||
if (traits.has_feature_flags(climate::CLIMATE_SUPPORTS_TWO_POINT_TARGET_TEMPERATURE |
|
||||
climate::CLIMATE_REQUIRES_TWO_POINT_TARGET_TEMPERATURE)) {
|
||||
if (traits.get_supports_two_point_target_temperature()) {
|
||||
// temperature_low_command_topic
|
||||
root[MQTT_TEMPERATURE_LOW_COMMAND_TOPIC] = this->get_target_temperature_low_command_topic();
|
||||
// temperature_low_state_topic
|
||||
@@ -62,7 +61,7 @@ void MQTTClimateComponent::send_discovery(JsonObject root, mqtt::SendDiscoveryCo
|
||||
root[MQTT_TEMPERATURE_STATE_TOPIC] = this->get_target_temperature_state_topic();
|
||||
}
|
||||
|
||||
if (traits.has_feature_flags(climate::CLIMATE_SUPPORTS_TARGET_HUMIDITY)) {
|
||||
if (traits.get_supports_target_humidity()) {
|
||||
// target_humidity_command_topic
|
||||
root[MQTT_TARGET_HUMIDITY_COMMAND_TOPIC] = this->get_target_humidity_command_topic();
|
||||
// target_humidity_state_topic
|
||||
@@ -110,7 +109,7 @@ void MQTTClimateComponent::send_discovery(JsonObject root, mqtt::SendDiscoveryCo
|
||||
presets.add(preset);
|
||||
}
|
||||
|
||||
if (traits.has_feature_flags(climate::CLIMATE_SUPPORTS_ACTION)) {
|
||||
if (traits.get_supports_action()) {
|
||||
// action_topic
|
||||
root[MQTT_ACTION_TOPIC] = this->get_action_state_topic();
|
||||
}
|
||||
@@ -175,8 +174,7 @@ void MQTTClimateComponent::setup() {
|
||||
call.perform();
|
||||
});
|
||||
|
||||
if (traits.has_feature_flags(climate::CLIMATE_SUPPORTS_TWO_POINT_TARGET_TEMPERATURE |
|
||||
climate::CLIMATE_REQUIRES_TWO_POINT_TARGET_TEMPERATURE)) {
|
||||
if (traits.get_supports_two_point_target_temperature()) {
|
||||
this->subscribe(this->get_target_temperature_low_command_topic(),
|
||||
[this](const std::string &topic, const std::string &payload) {
|
||||
auto val = parse_number<float>(payload);
|
||||
@@ -213,7 +211,7 @@ void MQTTClimateComponent::setup() {
|
||||
});
|
||||
}
|
||||
|
||||
if (traits.has_feature_flags(climate::CLIMATE_SUPPORTS_TARGET_HUMIDITY)) {
|
||||
if (traits.get_supports_target_humidity()) {
|
||||
this->subscribe(this->get_target_humidity_command_topic(),
|
||||
[this](const std::string &topic, const std::string &payload) {
|
||||
auto val = parse_number<float>(payload);
|
||||
@@ -292,14 +290,12 @@ bool MQTTClimateComponent::publish_state_() {
|
||||
success = false;
|
||||
int8_t target_accuracy = traits.get_target_temperature_accuracy_decimals();
|
||||
int8_t current_accuracy = traits.get_current_temperature_accuracy_decimals();
|
||||
if (traits.has_feature_flags(climate::CLIMATE_SUPPORTS_CURRENT_TEMPERATURE) &&
|
||||
!std::isnan(this->device_->current_temperature)) {
|
||||
if (traits.get_supports_current_temperature() && !std::isnan(this->device_->current_temperature)) {
|
||||
std::string payload = value_accuracy_to_string(this->device_->current_temperature, current_accuracy);
|
||||
if (!this->publish(this->get_current_temperature_state_topic(), payload))
|
||||
success = false;
|
||||
}
|
||||
if (traits.has_feature_flags(climate::CLIMATE_SUPPORTS_TWO_POINT_TARGET_TEMPERATURE |
|
||||
climate::CLIMATE_REQUIRES_TWO_POINT_TARGET_TEMPERATURE)) {
|
||||
if (traits.get_supports_two_point_target_temperature()) {
|
||||
std::string payload = value_accuracy_to_string(this->device_->target_temperature_low, target_accuracy);
|
||||
if (!this->publish(this->get_target_temperature_low_state_topic(), payload))
|
||||
success = false;
|
||||
@@ -312,14 +308,12 @@ bool MQTTClimateComponent::publish_state_() {
|
||||
success = false;
|
||||
}
|
||||
|
||||
if (traits.has_feature_flags(climate::CLIMATE_SUPPORTS_CURRENT_HUMIDITY) &&
|
||||
!std::isnan(this->device_->current_humidity)) {
|
||||
if (traits.get_supports_current_humidity() && !std::isnan(this->device_->current_humidity)) {
|
||||
std::string payload = value_accuracy_to_string(this->device_->current_humidity, 0);
|
||||
if (!this->publish(this->get_current_humidity_state_topic(), payload))
|
||||
success = false;
|
||||
}
|
||||
if (traits.has_feature_flags(climate::CLIMATE_SUPPORTS_TARGET_HUMIDITY) &&
|
||||
!std::isnan(this->device_->target_humidity)) {
|
||||
if (traits.get_supports_target_humidity() && !std::isnan(this->device_->target_humidity)) {
|
||||
std::string payload = value_accuracy_to_string(this->device_->target_humidity, 0);
|
||||
if (!this->publish(this->get_target_humidity_state_topic(), payload))
|
||||
success = false;
|
||||
@@ -363,7 +357,7 @@ bool MQTTClimateComponent::publish_state_() {
|
||||
success = false;
|
||||
}
|
||||
|
||||
if (traits.has_feature_flags(climate::CLIMATE_SUPPORTS_ACTION)) {
|
||||
if (traits.get_supports_action()) {
|
||||
const char *payload;
|
||||
switch (this->device_->action) {
|
||||
case CLIMATE_ACTION_OFF:
|
||||
|
||||
@@ -85,20 +85,24 @@ bool MQTTComponent::send_discovery_() {
|
||||
}
|
||||
|
||||
// Fields from EntityBase
|
||||
root[MQTT_NAME] = this->get_entity()->has_own_name() ? this->friendly_name() : "";
|
||||
|
||||
if (this->get_entity()->has_own_name()) {
|
||||
root[MQTT_NAME] = this->friendly_name();
|
||||
} else {
|
||||
root[MQTT_NAME] = "";
|
||||
}
|
||||
if (this->is_disabled_by_default())
|
||||
root[MQTT_ENABLED_BY_DEFAULT] = false;
|
||||
if (!this->get_icon().empty())
|
||||
root[MQTT_ICON] = this->get_icon();
|
||||
|
||||
const auto entity_category = this->get_entity()->get_entity_category();
|
||||
switch (entity_category) {
|
||||
switch (this->get_entity()->get_entity_category()) {
|
||||
case ENTITY_CATEGORY_NONE:
|
||||
break;
|
||||
case ENTITY_CATEGORY_CONFIG:
|
||||
root[MQTT_ENTITY_CATEGORY] = "config";
|
||||
break;
|
||||
case ENTITY_CATEGORY_DIAGNOSTIC:
|
||||
root[MQTT_ENTITY_CATEGORY] = entity_category == ENTITY_CATEGORY_CONFIG ? "config" : "diagnostic";
|
||||
root[MQTT_ENTITY_CATEGORY] = "diagnostic";
|
||||
break;
|
||||
}
|
||||
|
||||
@@ -109,14 +113,20 @@ bool MQTTComponent::send_discovery_() {
|
||||
if (this->command_retain_)
|
||||
root[MQTT_COMMAND_RETAIN] = true;
|
||||
|
||||
const Availability &avail =
|
||||
this->availability_ == nullptr ? global_mqtt_client->get_availability() : *this->availability_;
|
||||
if (!avail.topic.empty()) {
|
||||
root[MQTT_AVAILABILITY_TOPIC] = avail.topic;
|
||||
if (avail.payload_available != "online")
|
||||
root[MQTT_PAYLOAD_AVAILABLE] = avail.payload_available;
|
||||
if (avail.payload_not_available != "offline")
|
||||
root[MQTT_PAYLOAD_NOT_AVAILABLE] = avail.payload_not_available;
|
||||
if (this->availability_ == nullptr) {
|
||||
if (!global_mqtt_client->get_availability().topic.empty()) {
|
||||
root[MQTT_AVAILABILITY_TOPIC] = global_mqtt_client->get_availability().topic;
|
||||
if (global_mqtt_client->get_availability().payload_available != "online")
|
||||
root[MQTT_PAYLOAD_AVAILABLE] = global_mqtt_client->get_availability().payload_available;
|
||||
if (global_mqtt_client->get_availability().payload_not_available != "offline")
|
||||
root[MQTT_PAYLOAD_NOT_AVAILABLE] = global_mqtt_client->get_availability().payload_not_available;
|
||||
}
|
||||
} else if (!this->availability_->topic.empty()) {
|
||||
root[MQTT_AVAILABILITY_TOPIC] = this->availability_->topic;
|
||||
if (this->availability_->payload_available != "online")
|
||||
root[MQTT_PAYLOAD_AVAILABLE] = this->availability_->payload_available;
|
||||
if (this->availability_->payload_not_available != "offline")
|
||||
root[MQTT_PAYLOAD_NOT_AVAILABLE] = this->availability_->payload_not_available;
|
||||
}
|
||||
|
||||
const MQTTDiscoveryInfo &discovery_info = global_mqtt_client->get_discovery_info();
|
||||
@@ -135,8 +145,10 @@ bool MQTTComponent::send_discovery_() {
|
||||
if (discovery_info.object_id_generator == MQTT_DEVICE_NAME_OBJECT_ID_GENERATOR)
|
||||
root[MQTT_OBJECT_ID] = node_name + "_" + this->get_default_object_id_();
|
||||
|
||||
const std::string &friendly_name_ref = App.get_friendly_name();
|
||||
const std::string &node_friendly_name = friendly_name_ref.empty() ? node_name : friendly_name_ref;
|
||||
std::string node_friendly_name = App.get_friendly_name();
|
||||
if (node_friendly_name.empty()) {
|
||||
node_friendly_name = node_name;
|
||||
}
|
||||
std::string node_area = App.get_area();
|
||||
|
||||
JsonObject device_info = root[MQTT_DEVICE].to<JsonObject>();
|
||||
@@ -146,9 +158,13 @@ bool MQTTComponent::send_discovery_() {
|
||||
#ifdef ESPHOME_PROJECT_NAME
|
||||
device_info[MQTT_DEVICE_SW_VERSION] = ESPHOME_PROJECT_VERSION " (ESPHome " ESPHOME_VERSION ")";
|
||||
const char *model = std::strchr(ESPHOME_PROJECT_NAME, '.');
|
||||
device_info[MQTT_DEVICE_MODEL] = model == nullptr ? ESPHOME_BOARD : model + 1;
|
||||
device_info[MQTT_DEVICE_MANUFACTURER] =
|
||||
model == nullptr ? ESPHOME_PROJECT_NAME : std::string(ESPHOME_PROJECT_NAME, model - ESPHOME_PROJECT_NAME);
|
||||
if (model == nullptr) { // must never happen but check anyway
|
||||
device_info[MQTT_DEVICE_MODEL] = ESPHOME_BOARD;
|
||||
device_info[MQTT_DEVICE_MANUFACTURER] = ESPHOME_PROJECT_NAME;
|
||||
} else {
|
||||
device_info[MQTT_DEVICE_MODEL] = model + 1;
|
||||
device_info[MQTT_DEVICE_MANUFACTURER] = std::string(ESPHOME_PROJECT_NAME, model - ESPHOME_PROJECT_NAME);
|
||||
}
|
||||
#else
|
||||
device_info[MQTT_DEVICE_SW_VERSION] = ESPHOME_VERSION " (" + App.get_compilation_time() + ")";
|
||||
device_info[MQTT_DEVICE_MODEL] = ESPHOME_BOARD;
|
||||
|
||||
@@ -1291,6 +1291,9 @@ void Nextion::check_pending_waveform_() {
|
||||
|
||||
void Nextion::set_writer(const nextion_writer_t &writer) { this->writer_ = writer; }
|
||||
|
||||
ESPDEPRECATED("set_wait_for_ack(bool) deprecated, no effect", "v1.20")
|
||||
void Nextion::set_wait_for_ack(bool wait_for_ack) { ESP_LOGE(TAG, "Deprecated"); }
|
||||
|
||||
bool Nextion::is_updating() { return this->connection_state_.is_updating_; }
|
||||
|
||||
} // namespace nextion
|
||||
|
||||
@@ -916,7 +916,7 @@ void PrometheusHandler::climate_row_(AsyncResponseStream *stream, climate::Clima
|
||||
auto min_temp_value = value_accuracy_to_string(traits.get_visual_min_temperature(), target_accuracy);
|
||||
climate_value_row_(stream, obj, area, node, friendly_name, min_temp, min_temp_value);
|
||||
// now check optional traits
|
||||
if (traits.has_feature_flags(climate::CLIMATE_SUPPORTS_CURRENT_TEMPERATURE)) {
|
||||
if (traits.get_supports_current_temperature()) {
|
||||
std::string current_temp = "current_temperature";
|
||||
if (std::isnan(obj->current_temperature)) {
|
||||
climate_failed_row_(stream, obj, area, node, friendly_name, current_temp, true);
|
||||
@@ -927,7 +927,7 @@ void PrometheusHandler::climate_row_(AsyncResponseStream *stream, climate::Clima
|
||||
climate_failed_row_(stream, obj, area, node, friendly_name, current_temp, false);
|
||||
}
|
||||
}
|
||||
if (traits.has_feature_flags(climate::CLIMATE_SUPPORTS_CURRENT_HUMIDITY)) {
|
||||
if (traits.get_supports_current_humidity()) {
|
||||
std::string current_humidity = "current_humidity";
|
||||
if (std::isnan(obj->current_humidity)) {
|
||||
climate_failed_row_(stream, obj, area, node, friendly_name, current_humidity, true);
|
||||
@@ -938,7 +938,7 @@ void PrometheusHandler::climate_row_(AsyncResponseStream *stream, climate::Clima
|
||||
climate_failed_row_(stream, obj, area, node, friendly_name, current_humidity, false);
|
||||
}
|
||||
}
|
||||
if (traits.has_feature_flags(climate::CLIMATE_SUPPORTS_TARGET_HUMIDITY)) {
|
||||
if (traits.get_supports_target_humidity()) {
|
||||
std::string target_humidity = "target_humidity";
|
||||
if (std::isnan(obj->target_humidity)) {
|
||||
climate_failed_row_(stream, obj, area, node, friendly_name, target_humidity, true);
|
||||
@@ -949,8 +949,7 @@ void PrometheusHandler::climate_row_(AsyncResponseStream *stream, climate::Clima
|
||||
climate_failed_row_(stream, obj, area, node, friendly_name, target_humidity, false);
|
||||
}
|
||||
}
|
||||
if (traits.has_feature_flags(climate::CLIMATE_SUPPORTS_TWO_POINT_TARGET_TEMPERATURE |
|
||||
climate::CLIMATE_REQUIRES_TWO_POINT_TARGET_TEMPERATURE)) {
|
||||
if (traits.get_supports_two_point_target_temperature()) {
|
||||
std::string target_temp_low = "target_temperature_low";
|
||||
auto target_temp_low_value = value_accuracy_to_string(obj->target_temperature_low, target_accuracy);
|
||||
climate_value_row_(stream, obj, area, node, friendly_name, target_temp_low, target_temp_low_value);
|
||||
@@ -962,7 +961,7 @@ void PrometheusHandler::climate_row_(AsyncResponseStream *stream, climate::Clima
|
||||
auto target_temp_value = value_accuracy_to_string(obj->target_temperature, target_accuracy);
|
||||
climate_value_row_(stream, obj, area, node, friendly_name, target_temp, target_temp_value);
|
||||
}
|
||||
if (traits.has_feature_flags(climate::CLIMATE_SUPPORTS_ACTION)) {
|
||||
if (traits.get_supports_action()) {
|
||||
std::string climate_trait_category = "action";
|
||||
const auto *climate_trait_value = climate::climate_action_to_string(obj->action);
|
||||
climate_setting_row_(stream, obj, area, node, friendly_name, climate_trait_category, climate_trait_value);
|
||||
|
||||
@@ -81,7 +81,7 @@ CONFIG_SCHEMA = (
|
||||
cv.int_range(min=0, max=0xFFFF, max_included=False),
|
||||
),
|
||||
cv.Optional(CONF_AMBIENT_PRESSURE_COMPENSATION): cv.pressure,
|
||||
cv.Optional(CONF_TEMPERATURE_OFFSET, default="4°C"): cv.temperature_delta,
|
||||
cv.Optional(CONF_TEMPERATURE_OFFSET, default="4°C"): cv.temperature,
|
||||
cv.Optional(CONF_AMBIENT_PRESSURE_COMPENSATION_SOURCE): cv.use_id(
|
||||
sensor.Sensor
|
||||
),
|
||||
|
||||
@@ -45,26 +45,13 @@ def get_script(script_id):
|
||||
|
||||
|
||||
def check_max_runs(value):
|
||||
# Set default for queued mode to prevent unbounded queue growth
|
||||
if CONF_MAX_RUNS not in value and value[CONF_MODE] == CONF_QUEUED:
|
||||
value[CONF_MAX_RUNS] = 5
|
||||
|
||||
if CONF_MAX_RUNS not in value:
|
||||
return value
|
||||
|
||||
if value[CONF_MODE] not in [CONF_QUEUED, CONF_PARALLEL]:
|
||||
raise cv.Invalid(
|
||||
"The option 'max_runs' is only valid in 'queued' and 'parallel' mode.",
|
||||
"The option 'max_runs' is only valid in 'queue' and 'parallel' mode.",
|
||||
path=[CONF_MAX_RUNS],
|
||||
)
|
||||
|
||||
# Queued mode must have bounded queue (min 1), parallel mode can be unlimited (0)
|
||||
if value[CONF_MODE] == CONF_QUEUED and value[CONF_MAX_RUNS] < 1:
|
||||
raise cv.Invalid(
|
||||
"The option 'max_runs' must be at least 1 for queued mode.",
|
||||
path=[CONF_MAX_RUNS],
|
||||
)
|
||||
|
||||
return value
|
||||
|
||||
|
||||
@@ -119,7 +106,7 @@ CONFIG_SCHEMA = automation.validate_automation(
|
||||
cv.Optional(CONF_MODE, default=CONF_SINGLE): cv.one_of(
|
||||
*SCRIPT_MODES, lower=True
|
||||
),
|
||||
cv.Optional(CONF_MAX_RUNS): cv.int_range(min=0, max=100),
|
||||
cv.Optional(CONF_MAX_RUNS): cv.positive_int,
|
||||
cv.Optional(CONF_PARAMETERS, default={}): cv.Schema(
|
||||
{
|
||||
validate_parameter_name: validate_parameter_type,
|
||||
|
||||
@@ -1,11 +1,10 @@
|
||||
#pragma once
|
||||
|
||||
#include <memory>
|
||||
#include <tuple>
|
||||
#include "esphome/core/automation.h"
|
||||
#include "esphome/core/component.h"
|
||||
#include "esphome/core/helpers.h"
|
||||
#include "esphome/core/log.h"
|
||||
|
||||
#include <queue>
|
||||
namespace esphome {
|
||||
namespace script {
|
||||
|
||||
@@ -97,41 +96,23 @@ template<typename... Ts> class RestartScript : public Script<Ts...> {
|
||||
/** A script type that queues new instances that are created.
|
||||
*
|
||||
* Only one instance of the script can be active at a time.
|
||||
*
|
||||
* Ring buffer implementation:
|
||||
* - num_queued_ tracks the number of queued (waiting) instances, NOT including the currently running one
|
||||
* - queue_front_ points to the next item to execute (read position)
|
||||
* - Buffer size is max_runs_ - 1 (max total instances minus the running one)
|
||||
* - Write position is calculated as: (queue_front_ + num_queued_) % (max_runs_ - 1)
|
||||
* - When an item finishes, queue_front_ advances: (queue_front_ + 1) % (max_runs_ - 1)
|
||||
* - First execute() runs immediately without queuing (num_queued_ stays 0)
|
||||
* - Subsequent executes while running are queued starting at position 0
|
||||
* - Maximum total instances = max_runs_ (includes 1 running + (max_runs_ - 1) queued)
|
||||
*/
|
||||
template<typename... Ts> class QueueingScript : public Script<Ts...>, public Component {
|
||||
public:
|
||||
void execute(Ts... x) override {
|
||||
if (this->is_action_running() || this->num_queued_ > 0) {
|
||||
// num_queued_ is the number of *queued* instances (waiting, not including currently running)
|
||||
// max_runs_ is the maximum *total* instances (running + queued)
|
||||
// So we reject when num_queued_ + 1 >= max_runs_ (queued + running >= max)
|
||||
if (this->num_queued_ + 1 >= this->max_runs_) {
|
||||
this->esp_logw_(__LINE__, ESPHOME_LOG_FORMAT("Script '%s' max instances (running + queued) reached!"),
|
||||
if (this->is_action_running() || this->num_runs_ > 0) {
|
||||
// num_runs_ is the number of *queued* instances, so total number of instances is
|
||||
// num_runs_ + 1
|
||||
if (this->max_runs_ != 0 && this->num_runs_ + 1 >= this->max_runs_) {
|
||||
this->esp_logw_(__LINE__, ESPHOME_LOG_FORMAT("Script '%s' maximum number of queued runs exceeded!"),
|
||||
LOG_STR_ARG(this->name_));
|
||||
return;
|
||||
}
|
||||
|
||||
// Initialize queue on first queued item (after capacity check)
|
||||
this->lazy_init_queue_();
|
||||
|
||||
this->esp_logd_(__LINE__, ESPHOME_LOG_FORMAT("Script '%s' queueing new instance (mode: queued)"),
|
||||
LOG_STR_ARG(this->name_));
|
||||
// Ring buffer: write to (queue_front_ + num_queued_) % queue_capacity
|
||||
const size_t queue_capacity = static_cast<size_t>(this->max_runs_ - 1);
|
||||
size_t write_pos = (this->queue_front_ + this->num_queued_) % queue_capacity;
|
||||
// Use std::make_unique to replace the unique_ptr
|
||||
this->var_queue_[write_pos] = std::make_unique<std::tuple<Ts...>>(x...);
|
||||
this->num_queued_++;
|
||||
this->num_runs_++;
|
||||
this->var_queue_.push(std::make_tuple(x...));
|
||||
return;
|
||||
}
|
||||
|
||||
@@ -141,46 +122,29 @@ template<typename... Ts> class QueueingScript : public Script<Ts...>, public Com
|
||||
}
|
||||
|
||||
void stop() override {
|
||||
// Clear all queued items to free memory immediately
|
||||
// Resetting the array automatically destroys all unique_ptrs and their contents
|
||||
this->var_queue_.reset();
|
||||
this->num_queued_ = 0;
|
||||
this->queue_front_ = 0;
|
||||
this->num_runs_ = 0;
|
||||
Script<Ts...>::stop();
|
||||
}
|
||||
|
||||
void loop() override {
|
||||
if (this->num_queued_ != 0 && !this->is_action_running()) {
|
||||
// Dequeue: decrement count, move tuple out (frees slot), advance read position
|
||||
this->num_queued_--;
|
||||
const size_t queue_capacity = static_cast<size_t>(this->max_runs_ - 1);
|
||||
auto tuple_ptr = std::move(this->var_queue_[this->queue_front_]);
|
||||
this->queue_front_ = (this->queue_front_ + 1) % queue_capacity;
|
||||
this->trigger_tuple_(*tuple_ptr, typename gens<sizeof...(Ts)>::type());
|
||||
if (this->num_runs_ != 0 && !this->is_action_running()) {
|
||||
this->num_runs_--;
|
||||
auto &vars = this->var_queue_.front();
|
||||
this->var_queue_.pop();
|
||||
this->trigger_tuple_(vars, typename gens<sizeof...(Ts)>::type());
|
||||
}
|
||||
}
|
||||
|
||||
void set_max_runs(int max_runs) { max_runs_ = max_runs; }
|
||||
|
||||
protected:
|
||||
// Lazy init queue on first use - avoids setup() ordering issues and saves memory
|
||||
// if script is never executed during this boot cycle
|
||||
inline void lazy_init_queue_() {
|
||||
if (!this->var_queue_) {
|
||||
// Allocate array of max_runs_ - 1 slots for queued items (running item is separate)
|
||||
// unique_ptr array is zero-initialized, so all slots start as nullptr
|
||||
this->var_queue_ = std::make_unique<std::unique_ptr<std::tuple<Ts...>>[]>(this->max_runs_ - 1);
|
||||
}
|
||||
}
|
||||
|
||||
template<int... S> void trigger_tuple_(const std::tuple<Ts...> &tuple, seq<S...> /*unused*/) {
|
||||
this->trigger(std::get<S>(tuple)...);
|
||||
}
|
||||
|
||||
int num_queued_ = 0; // Number of queued instances (not including currently running)
|
||||
int max_runs_ = 0; // Maximum total instances (running + queued)
|
||||
size_t queue_front_ = 0; // Ring buffer read position (next item to execute)
|
||||
std::unique_ptr<std::unique_ptr<std::tuple<Ts...>>[]> var_queue_; // Ring buffer of queued parameters
|
||||
int num_runs_ = 0;
|
||||
int max_runs_ = 0;
|
||||
std::queue<std::tuple<Ts...>> var_queue_;
|
||||
};
|
||||
|
||||
/** A script type that executes new instances in parallel.
|
||||
|
||||
@@ -28,6 +28,21 @@
|
||||
namespace esphome {
|
||||
namespace statsd {
|
||||
|
||||
using sensor_type_t = enum { TYPE_SENSOR, TYPE_BINARY_SENSOR };
|
||||
|
||||
using sensors_t = struct {
|
||||
const char *name;
|
||||
sensor_type_t type;
|
||||
union {
|
||||
#ifdef USE_SENSOR
|
||||
esphome::sensor::Sensor *sensor;
|
||||
#endif
|
||||
#ifdef USE_BINARY_SENSOR
|
||||
esphome::binary_sensor::BinarySensor *binary_sensor;
|
||||
#endif
|
||||
};
|
||||
};
|
||||
|
||||
class StatsdComponent : public PollingComponent {
|
||||
public:
|
||||
~StatsdComponent();
|
||||
@@ -56,20 +71,6 @@ class StatsdComponent : public PollingComponent {
|
||||
const char *prefix_;
|
||||
uint16_t port_;
|
||||
|
||||
using sensor_type_t = enum { TYPE_SENSOR, TYPE_BINARY_SENSOR };
|
||||
using sensors_t = struct {
|
||||
const char *name;
|
||||
sensor_type_t type;
|
||||
union {
|
||||
#ifdef USE_SENSOR
|
||||
esphome::sensor::Sensor *sensor;
|
||||
#endif
|
||||
#ifdef USE_BINARY_SENSOR
|
||||
esphome::binary_sensor::BinarySensor *binary_sensor;
|
||||
#endif
|
||||
};
|
||||
};
|
||||
|
||||
std::vector<sensors_t> sensors_;
|
||||
|
||||
#ifdef USE_ESP8266
|
||||
|
||||
@@ -71,14 +71,9 @@ from esphome.const import (
|
||||
CONF_VISUAL,
|
||||
)
|
||||
|
||||
CONF_DEFAULT_PRESET = "default_preset"
|
||||
CONF_HUMIDITY_CONTROL_DEHUMIDIFY_ACTION = "humidity_control_dehumidify_action"
|
||||
CONF_HUMIDITY_CONTROL_HUMIDIFY_ACTION = "humidity_control_humidify_action"
|
||||
CONF_HUMIDITY_CONTROL_OFF_ACTION = "humidity_control_off_action"
|
||||
CONF_HUMIDITY_HYSTERESIS = "humidity_hysteresis"
|
||||
CONF_ON_BOOT_RESTORE_FROM = "on_boot_restore_from"
|
||||
CONF_PRESET_CHANGE = "preset_change"
|
||||
CONF_TARGET_HUMIDITY_CHANGE_ACTION = "target_humidity_change_action"
|
||||
CONF_DEFAULT_PRESET = "default_preset"
|
||||
CONF_ON_BOOT_RESTORE_FROM = "on_boot_restore_from"
|
||||
|
||||
CODEOWNERS = ["@kbx81"]
|
||||
|
||||
@@ -246,14 +241,6 @@ def validate_thermostat(config):
|
||||
CONF_MAX_HEATING_RUN_TIME,
|
||||
CONF_SUPPLEMENTAL_HEATING_ACTION,
|
||||
],
|
||||
CONF_HUMIDITY_CONTROL_DEHUMIDIFY_ACTION: [
|
||||
CONF_HUMIDITY_CONTROL_OFF_ACTION,
|
||||
CONF_HUMIDITY_SENSOR,
|
||||
],
|
||||
CONF_HUMIDITY_CONTROL_HUMIDIFY_ACTION: [
|
||||
CONF_HUMIDITY_CONTROL_OFF_ACTION,
|
||||
CONF_HUMIDITY_SENSOR,
|
||||
],
|
||||
}
|
||||
for config_trigger, req_triggers in requirements.items():
|
||||
for req_trigger in req_triggers:
|
||||
@@ -351,7 +338,7 @@ def validate_thermostat(config):
|
||||
# Warn about using the removed CONF_DEFAULT_MODE and advise users
|
||||
if CONF_DEFAULT_MODE in config and config[CONF_DEFAULT_MODE] is not None:
|
||||
raise cv.Invalid(
|
||||
f"{CONF_DEFAULT_MODE} is no longer valid. Please switch to using presets and specify a {CONF_DEFAULT_PRESET}"
|
||||
f"{CONF_DEFAULT_MODE} is no longer valid. Please switch to using presets and specify a {CONF_DEFAULT_PRESET}."
|
||||
)
|
||||
|
||||
default_mode = config[CONF_DEFAULT_MODE]
|
||||
@@ -601,24 +588,9 @@ CONFIG_SCHEMA = cv.All(
|
||||
cv.Optional(CONF_SWING_VERTICAL_ACTION): automation.validate_automation(
|
||||
single=True
|
||||
),
|
||||
cv.Optional(
|
||||
CONF_TARGET_HUMIDITY_CHANGE_ACTION
|
||||
): automation.validate_automation(single=True),
|
||||
cv.Optional(
|
||||
CONF_TARGET_TEMPERATURE_CHANGE_ACTION
|
||||
): automation.validate_automation(single=True),
|
||||
cv.Exclusive(
|
||||
CONF_HUMIDITY_CONTROL_DEHUMIDIFY_ACTION,
|
||||
group_of_exclusion="humidity_control",
|
||||
): automation.validate_automation(single=True),
|
||||
cv.Exclusive(
|
||||
CONF_HUMIDITY_CONTROL_HUMIDIFY_ACTION,
|
||||
group_of_exclusion="humidity_control",
|
||||
): automation.validate_automation(single=True),
|
||||
cv.Optional(
|
||||
CONF_HUMIDITY_CONTROL_OFF_ACTION
|
||||
): automation.validate_automation(single=True),
|
||||
cv.Optional(CONF_HUMIDITY_HYSTERESIS, default=1.0): cv.percentage,
|
||||
cv.Optional(CONF_DEFAULT_MODE, default=None): cv.valid,
|
||||
cv.Optional(CONF_DEFAULT_PRESET): cv.templatable(cv.string),
|
||||
cv.Optional(CONF_DEFAULT_TARGET_TEMPERATURE_HIGH): cv.temperature,
|
||||
@@ -910,39 +882,12 @@ async def to_code(config):
|
||||
config[CONF_SWING_VERTICAL_ACTION],
|
||||
)
|
||||
cg.add(var.set_supports_swing_mode_vertical(True))
|
||||
if CONF_TARGET_HUMIDITY_CHANGE_ACTION in config:
|
||||
await automation.build_automation(
|
||||
var.get_humidity_change_trigger(),
|
||||
[],
|
||||
config[CONF_TARGET_HUMIDITY_CHANGE_ACTION],
|
||||
)
|
||||
if CONF_TARGET_TEMPERATURE_CHANGE_ACTION in config:
|
||||
await automation.build_automation(
|
||||
var.get_temperature_change_trigger(),
|
||||
[],
|
||||
config[CONF_TARGET_TEMPERATURE_CHANGE_ACTION],
|
||||
)
|
||||
if CONF_HUMIDITY_CONTROL_DEHUMIDIFY_ACTION in config:
|
||||
cg.add(var.set_supports_dehumidification(True))
|
||||
await automation.build_automation(
|
||||
var.get_humidity_control_dehumidify_action_trigger(),
|
||||
[],
|
||||
config[CONF_HUMIDITY_CONTROL_DEHUMIDIFY_ACTION],
|
||||
)
|
||||
if CONF_HUMIDITY_CONTROL_HUMIDIFY_ACTION in config:
|
||||
cg.add(var.set_supports_humidification(True))
|
||||
await automation.build_automation(
|
||||
var.get_humidity_control_humidify_action_trigger(),
|
||||
[],
|
||||
config[CONF_HUMIDITY_CONTROL_HUMIDIFY_ACTION],
|
||||
)
|
||||
if CONF_HUMIDITY_CONTROL_OFF_ACTION in config:
|
||||
await automation.build_automation(
|
||||
var.get_humidity_control_off_action_trigger(),
|
||||
[],
|
||||
config[CONF_HUMIDITY_CONTROL_OFF_ACTION],
|
||||
)
|
||||
cg.add(var.set_humidity_hysteresis(config[CONF_HUMIDITY_HYSTERESIS]))
|
||||
|
||||
if CONF_PRESET in config:
|
||||
for preset_config in config[CONF_PRESET]:
|
||||
|
||||
@@ -32,7 +32,6 @@ void ThermostatClimate::setup() {
|
||||
if (this->humidity_sensor_ != nullptr) {
|
||||
this->humidity_sensor_->add_on_state_callback([this](float state) {
|
||||
this->current_humidity = state;
|
||||
this->switch_to_humidity_control_action_(this->compute_humidity_control_action_());
|
||||
this->publish_state();
|
||||
});
|
||||
this->current_humidity = this->humidity_sensor_->state;
|
||||
@@ -85,8 +84,6 @@ void ThermostatClimate::refresh() {
|
||||
this->switch_to_supplemental_action_(this->compute_supplemental_action_());
|
||||
this->switch_to_fan_mode_(this->fan_mode.value(), false);
|
||||
this->switch_to_swing_mode_(this->swing_mode, false);
|
||||
this->switch_to_humidity_control_action_(this->compute_humidity_control_action_());
|
||||
this->check_humidity_change_trigger_();
|
||||
this->check_temperature_change_trigger_();
|
||||
this->publish_state();
|
||||
}
|
||||
@@ -132,11 +129,6 @@ bool ThermostatClimate::hysteresis_valid() {
|
||||
return true;
|
||||
}
|
||||
|
||||
bool ThermostatClimate::humidity_hysteresis_valid() {
|
||||
return !std::isnan(this->humidity_hysteresis_) && this->humidity_hysteresis_ >= 0.0f &&
|
||||
this->humidity_hysteresis_ < 100.0f;
|
||||
}
|
||||
|
||||
bool ThermostatClimate::limit_setpoints_for_heat_cool() {
|
||||
return this->mode == climate::CLIMATE_MODE_HEAT_COOL ||
|
||||
(this->mode == climate::CLIMATE_MODE_AUTO && this->supports_heat_cool_);
|
||||
@@ -197,16 +189,6 @@ void ThermostatClimate::validate_target_temperature_high() {
|
||||
}
|
||||
}
|
||||
|
||||
void ThermostatClimate::validate_target_humidity() {
|
||||
if (std::isnan(this->target_humidity)) {
|
||||
this->target_humidity =
|
||||
(this->get_traits().get_visual_max_humidity() - this->get_traits().get_visual_min_humidity()) / 2.0f;
|
||||
} else {
|
||||
this->target_humidity = clamp<float>(this->target_humidity, this->get_traits().get_visual_min_humidity(),
|
||||
this->get_traits().get_visual_max_humidity());
|
||||
}
|
||||
}
|
||||
|
||||
void ThermostatClimate::control(const climate::ClimateCall &call) {
|
||||
bool target_temperature_high_changed = false;
|
||||
|
||||
@@ -253,10 +235,6 @@ void ThermostatClimate::control(const climate::ClimateCall &call) {
|
||||
this->validate_target_temperature();
|
||||
}
|
||||
}
|
||||
if (call.get_target_humidity().has_value()) {
|
||||
this->target_humidity = call.get_target_humidity().value();
|
||||
this->validate_target_humidity();
|
||||
}
|
||||
// make any changes happen
|
||||
this->refresh();
|
||||
}
|
||||
@@ -272,9 +250,6 @@ climate::ClimateTraits ThermostatClimate::traits() {
|
||||
if (this->humidity_sensor_ != nullptr)
|
||||
traits.add_feature_flags(climate::CLIMATE_SUPPORTS_CURRENT_HUMIDITY);
|
||||
|
||||
if (this->supports_humidification_ || this->supports_dehumidification_)
|
||||
traits.add_feature_flags(climate::CLIMATE_SUPPORTS_TARGET_HUMIDITY);
|
||||
|
||||
if (this->supports_auto_)
|
||||
traits.add_supported_mode(climate::CLIMATE_MODE_AUTO);
|
||||
if (this->supports_heat_cool_)
|
||||
@@ -448,28 +423,6 @@ climate::ClimateAction ThermostatClimate::compute_supplemental_action_() {
|
||||
return target_action;
|
||||
}
|
||||
|
||||
HumidificationAction ThermostatClimate::compute_humidity_control_action_() {
|
||||
auto target_action = THERMOSTAT_HUMIDITY_CONTROL_ACTION_OFF;
|
||||
// if hysteresis value or current_humidity is not valid, we go to OFF
|
||||
if (std::isnan(this->current_humidity) || !this->humidity_hysteresis_valid()) {
|
||||
return THERMOSTAT_HUMIDITY_CONTROL_ACTION_OFF;
|
||||
}
|
||||
|
||||
// ensure set point is valid before computing the action
|
||||
this->validate_target_humidity();
|
||||
// everything has been validated so we can now safely compute the action
|
||||
if (this->dehumidification_required_() && this->humidification_required_()) {
|
||||
// this is bad and should never happen, so just stop.
|
||||
// target_action = THERMOSTAT_HUMIDITY_CONTROL_ACTION_OFF;
|
||||
} else if (this->supports_dehumidification_ && this->dehumidification_required_()) {
|
||||
target_action = THERMOSTAT_HUMIDITY_CONTROL_ACTION_DEHUMIDIFY;
|
||||
} else if (this->supports_humidification_ && this->humidification_required_()) {
|
||||
target_action = THERMOSTAT_HUMIDITY_CONTROL_ACTION_HUMIDIFY;
|
||||
}
|
||||
|
||||
return target_action;
|
||||
}
|
||||
|
||||
void ThermostatClimate::switch_to_action_(climate::ClimateAction action, bool publish_state) {
|
||||
// setup_complete_ helps us ensure an action is called immediately after boot
|
||||
if ((action == this->action) && this->setup_complete_) {
|
||||
@@ -643,44 +596,6 @@ void ThermostatClimate::trigger_supplemental_action_() {
|
||||
}
|
||||
}
|
||||
|
||||
void ThermostatClimate::switch_to_humidity_control_action_(HumidificationAction action) {
|
||||
// setup_complete_ helps us ensure an action is called immediately after boot
|
||||
if ((action == this->humidification_action_) && this->setup_complete_) {
|
||||
// already in target mode
|
||||
return;
|
||||
}
|
||||
|
||||
Trigger<> *trig = this->humidity_control_off_action_trigger_;
|
||||
switch (action) {
|
||||
case THERMOSTAT_HUMIDITY_CONTROL_ACTION_OFF:
|
||||
// trig = this->humidity_control_off_action_trigger_;
|
||||
ESP_LOGVV(TAG, "Switching to HUMIDIFICATION_OFF action");
|
||||
break;
|
||||
case THERMOSTAT_HUMIDITY_CONTROL_ACTION_DEHUMIDIFY:
|
||||
trig = this->humidity_control_dehumidify_action_trigger_;
|
||||
ESP_LOGVV(TAG, "Switching to DEHUMIDIFY action");
|
||||
break;
|
||||
case THERMOSTAT_HUMIDITY_CONTROL_ACTION_HUMIDIFY:
|
||||
trig = this->humidity_control_humidify_action_trigger_;
|
||||
ESP_LOGVV(TAG, "Switching to HUMIDIFY action");
|
||||
break;
|
||||
case THERMOSTAT_HUMIDITY_CONTROL_ACTION_NONE:
|
||||
default:
|
||||
action = THERMOSTAT_HUMIDITY_CONTROL_ACTION_OFF;
|
||||
// trig = this->humidity_control_off_action_trigger_;
|
||||
}
|
||||
|
||||
if (this->prev_humidity_control_trigger_ != nullptr) {
|
||||
this->prev_humidity_control_trigger_->stop_action();
|
||||
this->prev_humidity_control_trigger_ = nullptr;
|
||||
}
|
||||
this->humidification_action_ = action;
|
||||
this->prev_humidity_control_trigger_ = trig;
|
||||
if (trig != nullptr) {
|
||||
trig->trigger();
|
||||
}
|
||||
}
|
||||
|
||||
void ThermostatClimate::switch_to_fan_mode_(climate::ClimateFanMode fan_mode, bool publish_state) {
|
||||
// setup_complete_ helps us ensure an action is called immediately after boot
|
||||
if ((fan_mode == this->prev_fan_mode_) && this->setup_complete_) {
|
||||
@@ -972,20 +887,6 @@ void ThermostatClimate::idle_on_timer_callback_() {
|
||||
this->switch_to_supplemental_action_(this->compute_supplemental_action_());
|
||||
}
|
||||
|
||||
void ThermostatClimate::check_humidity_change_trigger_() {
|
||||
if ((this->prev_target_humidity_ == this->target_humidity) && this->setup_complete_) {
|
||||
return; // nothing changed, no reason to trigger
|
||||
} else {
|
||||
// save the new temperature so we can check it again later; the trigger will fire below
|
||||
this->prev_target_humidity_ = this->target_humidity;
|
||||
}
|
||||
// trigger the action
|
||||
Trigger<> *trig = this->humidity_change_trigger_;
|
||||
if (trig != nullptr) {
|
||||
trig->trigger();
|
||||
}
|
||||
}
|
||||
|
||||
void ThermostatClimate::check_temperature_change_trigger_() {
|
||||
if (this->supports_two_points_) {
|
||||
// setup_complete_ helps us ensure an action is called immediately after boot
|
||||
@@ -1095,32 +996,6 @@ bool ThermostatClimate::supplemental_heating_required_() {
|
||||
(this->supplemental_action_ == climate::CLIMATE_ACTION_HEATING));
|
||||
}
|
||||
|
||||
bool ThermostatClimate::dehumidification_required_() {
|
||||
if (this->current_humidity > this->target_humidity + this->humidity_hysteresis_) {
|
||||
// if the current humidity exceeds the target + hysteresis, dehumidification is required
|
||||
return true;
|
||||
} else if (this->current_humidity < this->target_humidity - this->humidity_hysteresis_) {
|
||||
// if the current humidity is less than the target - hysteresis, dehumidification should stop
|
||||
return false;
|
||||
}
|
||||
// if we get here, the current humidity is between target + hysteresis and target - hysteresis,
|
||||
// so the action should not change
|
||||
return this->humidification_action_ == THERMOSTAT_HUMIDITY_CONTROL_ACTION_DEHUMIDIFY;
|
||||
}
|
||||
|
||||
bool ThermostatClimate::humidification_required_() {
|
||||
if (this->current_humidity < this->target_humidity - this->humidity_hysteresis_) {
|
||||
// if the current humidity is below the target - hysteresis, humidification is required
|
||||
return true;
|
||||
} else if (this->current_humidity > this->target_humidity + this->humidity_hysteresis_) {
|
||||
// if the current humidity is above the target + hysteresis, humidification should stop
|
||||
return false;
|
||||
}
|
||||
// if we get here, the current humidity is between target - hysteresis and target + hysteresis,
|
||||
// so the action should not change
|
||||
return this->humidification_action_ == THERMOSTAT_HUMIDITY_CONTROL_ACTION_HUMIDIFY;
|
||||
}
|
||||
|
||||
void ThermostatClimate::dump_preset_config_(const char *preset_name, const ThermostatClimateTargetTempConfig &config) {
|
||||
if (this->supports_heat_) {
|
||||
ESP_LOGCONFIG(TAG, " Default Target Temperature Low: %.1f°C",
|
||||
@@ -1277,12 +1152,8 @@ ThermostatClimate::ThermostatClimate()
|
||||
swing_mode_off_trigger_(new Trigger<>()),
|
||||
swing_mode_horizontal_trigger_(new Trigger<>()),
|
||||
swing_mode_vertical_trigger_(new Trigger<>()),
|
||||
humidity_change_trigger_(new Trigger<>()),
|
||||
temperature_change_trigger_(new Trigger<>()),
|
||||
preset_change_trigger_(new Trigger<>()),
|
||||
humidity_control_dehumidify_action_trigger_(new Trigger<>()),
|
||||
humidity_control_humidify_action_trigger_(new Trigger<>()),
|
||||
humidity_control_off_action_trigger_(new Trigger<>()) {}
|
||||
preset_change_trigger_(new Trigger<>()) {}
|
||||
|
||||
void ThermostatClimate::set_default_preset(const std::string &custom_preset) {
|
||||
this->default_custom_preset_ = custom_preset;
|
||||
@@ -1346,9 +1217,6 @@ void ThermostatClimate::set_sensor(sensor::Sensor *sensor) { this->sensor_ = sen
|
||||
void ThermostatClimate::set_humidity_sensor(sensor::Sensor *humidity_sensor) {
|
||||
this->humidity_sensor_ = humidity_sensor;
|
||||
}
|
||||
void ThermostatClimate::set_humidity_hysteresis(float humidity_hysteresis) {
|
||||
this->humidity_hysteresis_ = std::clamp<float>(humidity_hysteresis, 0.0f, 100.0f);
|
||||
}
|
||||
void ThermostatClimate::set_use_startup_delay(bool use_startup_delay) { this->use_startup_delay_ = use_startup_delay; }
|
||||
void ThermostatClimate::set_supports_heat_cool(bool supports_heat_cool) {
|
||||
this->supports_heat_cool_ = supports_heat_cool;
|
||||
@@ -1416,18 +1284,6 @@ void ThermostatClimate::set_supports_swing_mode_vertical(bool supports_swing_mod
|
||||
void ThermostatClimate::set_supports_two_points(bool supports_two_points) {
|
||||
this->supports_two_points_ = supports_two_points;
|
||||
}
|
||||
void ThermostatClimate::set_supports_dehumidification(bool supports_dehumidification) {
|
||||
this->supports_dehumidification_ = supports_dehumidification;
|
||||
if (supports_dehumidification) {
|
||||
this->supports_humidification_ = false;
|
||||
}
|
||||
}
|
||||
void ThermostatClimate::set_supports_humidification(bool supports_humidification) {
|
||||
this->supports_humidification_ = supports_humidification;
|
||||
if (supports_humidification) {
|
||||
this->supports_dehumidification_ = false;
|
||||
}
|
||||
}
|
||||
|
||||
Trigger<> *ThermostatClimate::get_cool_action_trigger() const { return this->cool_action_trigger_; }
|
||||
Trigger<> *ThermostatClimate::get_supplemental_cool_action_trigger() const {
|
||||
@@ -1461,18 +1317,8 @@ Trigger<> *ThermostatClimate::get_swing_mode_both_trigger() const { return this-
|
||||
Trigger<> *ThermostatClimate::get_swing_mode_off_trigger() const { return this->swing_mode_off_trigger_; }
|
||||
Trigger<> *ThermostatClimate::get_swing_mode_horizontal_trigger() const { return this->swing_mode_horizontal_trigger_; }
|
||||
Trigger<> *ThermostatClimate::get_swing_mode_vertical_trigger() const { return this->swing_mode_vertical_trigger_; }
|
||||
Trigger<> *ThermostatClimate::get_humidity_change_trigger() const { return this->humidity_change_trigger_; }
|
||||
Trigger<> *ThermostatClimate::get_temperature_change_trigger() const { return this->temperature_change_trigger_; }
|
||||
Trigger<> *ThermostatClimate::get_preset_change_trigger() const { return this->preset_change_trigger_; }
|
||||
Trigger<> *ThermostatClimate::get_humidity_control_dehumidify_action_trigger() const {
|
||||
return this->humidity_control_dehumidify_action_trigger_;
|
||||
}
|
||||
Trigger<> *ThermostatClimate::get_humidity_control_humidify_action_trigger() const {
|
||||
return this->humidity_control_humidify_action_trigger_;
|
||||
}
|
||||
Trigger<> *ThermostatClimate::get_humidity_control_off_action_trigger() const {
|
||||
return this->humidity_control_off_action_trigger_;
|
||||
}
|
||||
|
||||
void ThermostatClimate::dump_config() {
|
||||
LOG_CLIMATE("", "Thermostat", this);
|
||||
@@ -1576,12 +1422,7 @@ void ThermostatClimate::dump_config() {
|
||||
" OFF: %s\n"
|
||||
" HORIZONTAL: %s\n"
|
||||
" VERTICAL: %s\n"
|
||||
" Supports TWO SET POINTS: %s\n"
|
||||
" Supported Humidity Parameters:\n"
|
||||
" CURRENT: %s\n"
|
||||
" TARGET: %s\n"
|
||||
" DEHUMIDIFICATION: %s\n"
|
||||
" HUMIDIFICATION: %s",
|
||||
" Supports TWO SET POINTS: %s",
|
||||
YESNO(this->supports_fan_mode_on_), YESNO(this->supports_fan_mode_off_),
|
||||
YESNO(this->supports_fan_mode_auto_), YESNO(this->supports_fan_mode_low_),
|
||||
YESNO(this->supports_fan_mode_medium_), YESNO(this->supports_fan_mode_high_),
|
||||
@@ -1589,10 +1430,7 @@ void ThermostatClimate::dump_config() {
|
||||
YESNO(this->supports_fan_mode_diffuse_), YESNO(this->supports_fan_mode_quiet_),
|
||||
YESNO(this->supports_swing_mode_both_), YESNO(this->supports_swing_mode_off_),
|
||||
YESNO(this->supports_swing_mode_horizontal_), YESNO(this->supports_swing_mode_vertical_),
|
||||
YESNO(this->supports_two_points_),
|
||||
YESNO(this->get_traits().has_feature_flags(climate::CLIMATE_SUPPORTS_CURRENT_HUMIDITY)),
|
||||
YESNO(this->supports_dehumidification_ || this->supports_humidification_),
|
||||
YESNO(this->supports_dehumidification_), YESNO(this->supports_humidification_));
|
||||
YESNO(this->supports_two_points_));
|
||||
|
||||
if (!this->preset_config_.empty()) {
|
||||
ESP_LOGCONFIG(TAG, " Supported PRESETS:");
|
||||
|
||||
@@ -13,13 +13,6 @@
|
||||
namespace esphome {
|
||||
namespace thermostat {
|
||||
|
||||
enum HumidificationAction : uint8_t {
|
||||
THERMOSTAT_HUMIDITY_CONTROL_ACTION_OFF = 0,
|
||||
THERMOSTAT_HUMIDITY_CONTROL_ACTION_DEHUMIDIFY = 1,
|
||||
THERMOSTAT_HUMIDITY_CONTROL_ACTION_HUMIDIFY = 2,
|
||||
THERMOSTAT_HUMIDITY_CONTROL_ACTION_NONE,
|
||||
};
|
||||
|
||||
enum ThermostatClimateTimerIndex : uint8_t {
|
||||
THERMOSTAT_TIMER_COOLING_MAX_RUN_TIME = 0,
|
||||
THERMOSTAT_TIMER_COOLING_OFF = 1,
|
||||
@@ -97,7 +90,6 @@ class ThermostatClimate : public climate::Climate, public Component {
|
||||
void set_idle_minimum_time_in_sec(uint32_t time);
|
||||
void set_sensor(sensor::Sensor *sensor);
|
||||
void set_humidity_sensor(sensor::Sensor *humidity_sensor);
|
||||
void set_humidity_hysteresis(float humidity_hysteresis);
|
||||
void set_use_startup_delay(bool use_startup_delay);
|
||||
void set_supports_auto(bool supports_auto);
|
||||
void set_supports_heat_cool(bool supports_heat_cool);
|
||||
@@ -123,8 +115,6 @@ class ThermostatClimate : public climate::Climate, public Component {
|
||||
void set_supports_swing_mode_horizontal(bool supports_swing_mode_horizontal);
|
||||
void set_supports_swing_mode_off(bool supports_swing_mode_off);
|
||||
void set_supports_swing_mode_vertical(bool supports_swing_mode_vertical);
|
||||
void set_supports_dehumidification(bool supports_dehumidification);
|
||||
void set_supports_humidification(bool supports_humidification);
|
||||
void set_supports_two_points(bool supports_two_points);
|
||||
|
||||
void set_preset_config(climate::ClimatePreset preset, const ThermostatClimateTargetTempConfig &config);
|
||||
@@ -158,12 +148,8 @@ class ThermostatClimate : public climate::Climate, public Component {
|
||||
Trigger<> *get_swing_mode_horizontal_trigger() const;
|
||||
Trigger<> *get_swing_mode_off_trigger() const;
|
||||
Trigger<> *get_swing_mode_vertical_trigger() const;
|
||||
Trigger<> *get_humidity_change_trigger() const;
|
||||
Trigger<> *get_temperature_change_trigger() const;
|
||||
Trigger<> *get_preset_change_trigger() const;
|
||||
Trigger<> *get_humidity_control_dehumidify_action_trigger() const;
|
||||
Trigger<> *get_humidity_control_humidify_action_trigger() const;
|
||||
Trigger<> *get_humidity_control_off_action_trigger() const;
|
||||
/// Get current hysteresis values
|
||||
float cool_deadband();
|
||||
float cool_overrun();
|
||||
@@ -180,13 +166,11 @@ class ThermostatClimate : public climate::Climate, public Component {
|
||||
climate::ClimateFanMode locked_fan_mode();
|
||||
/// Set point and hysteresis validation
|
||||
bool hysteresis_valid(); // returns true if valid
|
||||
bool humidity_hysteresis_valid(); // returns true if valid
|
||||
bool limit_setpoints_for_heat_cool(); // returns true if set points should be further limited within visual range
|
||||
void validate_target_temperature();
|
||||
void validate_target_temperatures(bool pin_target_temperature_high);
|
||||
void validate_target_temperature_low();
|
||||
void validate_target_temperature_high();
|
||||
void validate_target_humidity();
|
||||
|
||||
protected:
|
||||
/// Override control to change settings of the climate device.
|
||||
@@ -208,13 +192,11 @@ class ThermostatClimate : public climate::Climate, public Component {
|
||||
/// Re-compute the required action of this climate controller.
|
||||
climate::ClimateAction compute_action_(bool ignore_timers = false);
|
||||
climate::ClimateAction compute_supplemental_action_();
|
||||
HumidificationAction compute_humidity_control_action_();
|
||||
|
||||
/// Switch the climate device to the given climate action.
|
||||
void switch_to_action_(climate::ClimateAction action, bool publish_state = true);
|
||||
void switch_to_supplemental_action_(climate::ClimateAction action);
|
||||
void trigger_supplemental_action_();
|
||||
void switch_to_humidity_control_action_(HumidificationAction action);
|
||||
|
||||
/// Switch the climate device to the given climate fan mode.
|
||||
void switch_to_fan_mode_(climate::ClimateFanMode fan_mode, bool publish_state = true);
|
||||
@@ -225,9 +207,6 @@ class ThermostatClimate : public climate::Climate, public Component {
|
||||
/// Switch the climate device to the given climate swing mode.
|
||||
void switch_to_swing_mode_(climate::ClimateSwingMode swing_mode, bool publish_state = true);
|
||||
|
||||
/// Check if the humidity change trigger should be called.
|
||||
void check_humidity_change_trigger_();
|
||||
|
||||
/// Check if the temperature change trigger should be called.
|
||||
void check_temperature_change_trigger_();
|
||||
|
||||
@@ -264,8 +243,6 @@ class ThermostatClimate : public climate::Climate, public Component {
|
||||
bool heating_required_();
|
||||
bool supplemental_cooling_required_();
|
||||
bool supplemental_heating_required_();
|
||||
bool dehumidification_required_();
|
||||
bool humidification_required_();
|
||||
|
||||
void dump_preset_config_(const char *preset_name, const ThermostatClimateTargetTempConfig &config);
|
||||
|
||||
@@ -282,9 +259,6 @@ class ThermostatClimate : public climate::Climate, public Component {
|
||||
/// The current supplemental action
|
||||
climate::ClimateAction supplemental_action_{climate::CLIMATE_ACTION_OFF};
|
||||
|
||||
/// The current humidification action
|
||||
HumidificationAction humidification_action_{THERMOSTAT_HUMIDITY_CONTROL_ACTION_NONE};
|
||||
|
||||
/// Default standard preset to use on start up
|
||||
climate::ClimatePreset default_preset_{};
|
||||
|
||||
@@ -347,12 +321,6 @@ class ThermostatClimate : public climate::Climate, public Component {
|
||||
/// A false value means that the controller has no such support.
|
||||
bool supports_two_points_{false};
|
||||
|
||||
/// Whether the controller supports dehumidification and/or humidification
|
||||
///
|
||||
/// A false value means that the controller has no such support.
|
||||
bool supports_dehumidification_{false};
|
||||
bool supports_humidification_{false};
|
||||
|
||||
/// Flags indicating if maximum allowable run time was exceeded
|
||||
bool cooling_max_runtime_exceeded_{false};
|
||||
bool heating_max_runtime_exceeded_{false};
|
||||
@@ -363,10 +331,9 @@ class ThermostatClimate : public climate::Climate, public Component {
|
||||
/// setup_complete_ blocks modifying/resetting the temps immediately after boot
|
||||
bool setup_complete_{false};
|
||||
|
||||
/// Store previously-known humidity and temperatures
|
||||
/// Store previously-known temperatures
|
||||
///
|
||||
/// These are used to determine when a temperature/humidity has changed
|
||||
float prev_target_humidity_{NAN};
|
||||
/// These are used to determine when the temperature change trigger/action needs to be called
|
||||
float prev_target_temperature_{NAN};
|
||||
float prev_target_temperature_low_{NAN};
|
||||
float prev_target_temperature_high_{NAN};
|
||||
@@ -380,9 +347,6 @@ class ThermostatClimate : public climate::Climate, public Component {
|
||||
float heating_deadband_{0};
|
||||
float heating_overrun_{0};
|
||||
|
||||
/// Hysteresis values used for computing humidification action
|
||||
float humidity_hysteresis_{0};
|
||||
|
||||
/// Maximum allowable temperature deltas before engaging supplemental cooling/heating actions
|
||||
float supplemental_cool_delta_{0};
|
||||
float supplemental_heat_delta_{0};
|
||||
@@ -484,24 +448,12 @@ class ThermostatClimate : public climate::Climate, public Component {
|
||||
/// The trigger to call when the controller should switch the swing mode to "vertical".
|
||||
Trigger<> *swing_mode_vertical_trigger_{nullptr};
|
||||
|
||||
/// The trigger to call when the target humidity changes.
|
||||
Trigger<> *humidity_change_trigger_{nullptr};
|
||||
|
||||
/// The trigger to call when the target temperature(s) change(es).
|
||||
Trigger<> *temperature_change_trigger_{nullptr};
|
||||
|
||||
/// The trigger to call when the preset mode changes
|
||||
Trigger<> *preset_change_trigger_{nullptr};
|
||||
|
||||
/// The trigger to call when dehumidification is required
|
||||
Trigger<> *humidity_control_dehumidify_action_trigger_{nullptr};
|
||||
|
||||
/// The trigger to call when humidification is required
|
||||
Trigger<> *humidity_control_humidify_action_trigger_{nullptr};
|
||||
|
||||
/// The trigger to call when (de)humidification should stop
|
||||
Trigger<> *humidity_control_off_action_trigger_{nullptr};
|
||||
|
||||
/// A reference to the trigger that was previously active.
|
||||
///
|
||||
/// This is so that the previous trigger can be stopped before enabling a new one
|
||||
@@ -510,7 +462,6 @@ class ThermostatClimate : public climate::Climate, public Component {
|
||||
Trigger<> *prev_fan_mode_trigger_{nullptr};
|
||||
Trigger<> *prev_mode_trigger_{nullptr};
|
||||
Trigger<> *prev_swing_mode_trigger_{nullptr};
|
||||
Trigger<> *prev_humidity_control_trigger_{nullptr};
|
||||
|
||||
/// Default custom preset to use on start up
|
||||
std::string default_custom_preset_{};
|
||||
|
||||
@@ -27,14 +27,6 @@ class RealTimeClock : public PollingComponent {
|
||||
this->apply_timezone_();
|
||||
}
|
||||
|
||||
/// Set the time zone from raw buffer, only if it differs from the current one.
|
||||
void set_timezone(const char *tz, size_t len) {
|
||||
if (this->timezone_.length() != len || memcmp(this->timezone_.c_str(), tz, len) != 0) {
|
||||
this->timezone_.assign(tz, len);
|
||||
this->apply_timezone_();
|
||||
}
|
||||
}
|
||||
|
||||
/// Get the time zone currently in use.
|
||||
std::string get_timezone() { return this->timezone_; }
|
||||
#endif
|
||||
|
||||
@@ -1325,7 +1325,7 @@ std::string WebServer::climate_json(climate::Climate *obj, JsonDetail start_conf
|
||||
root["max_temp"] = value_accuracy_to_string(traits.get_visual_max_temperature(), target_accuracy);
|
||||
root["min_temp"] = value_accuracy_to_string(traits.get_visual_min_temperature(), target_accuracy);
|
||||
root["step"] = traits.get_visual_target_temperature_step();
|
||||
if (traits.has_feature_flags(climate::CLIMATE_SUPPORTS_ACTION)) {
|
||||
if (traits.get_supports_action()) {
|
||||
root["action"] = PSTR_LOCAL(climate_action_to_string(obj->action));
|
||||
root["state"] = root["action"];
|
||||
has_state = true;
|
||||
@@ -1345,15 +1345,14 @@ std::string WebServer::climate_json(climate::Climate *obj, JsonDetail start_conf
|
||||
if (traits.get_supports_swing_modes()) {
|
||||
root["swing_mode"] = PSTR_LOCAL(climate_swing_mode_to_string(obj->swing_mode));
|
||||
}
|
||||
if (traits.has_feature_flags(climate::CLIMATE_SUPPORTS_CURRENT_TEMPERATURE)) {
|
||||
if (traits.get_supports_current_temperature()) {
|
||||
if (!std::isnan(obj->current_temperature)) {
|
||||
root["current_temperature"] = value_accuracy_to_string(obj->current_temperature, current_accuracy);
|
||||
} else {
|
||||
root["current_temperature"] = "NA";
|
||||
}
|
||||
}
|
||||
if (traits.has_feature_flags(climate::CLIMATE_SUPPORTS_TWO_POINT_TARGET_TEMPERATURE |
|
||||
climate::CLIMATE_REQUIRES_TWO_POINT_TARGET_TEMPERATURE)) {
|
||||
if (traits.get_supports_two_point_target_temperature()) {
|
||||
root["target_temperature_low"] = value_accuracy_to_string(obj->target_temperature_low, target_accuracy);
|
||||
root["target_temperature_high"] = value_accuracy_to_string(obj->target_temperature_high, target_accuracy);
|
||||
if (!has_state) {
|
||||
|
||||
@@ -407,8 +407,7 @@ async def to_code(config):
|
||||
|
||||
cg.add(var.set_reboot_timeout(config[CONF_REBOOT_TIMEOUT]))
|
||||
cg.add(var.set_power_save_mode(config[CONF_POWER_SAVE_MODE]))
|
||||
if config[CONF_FAST_CONNECT]:
|
||||
cg.add_define("USE_WIFI_FAST_CONNECT")
|
||||
cg.add(var.set_fast_connect(config[CONF_FAST_CONNECT]))
|
||||
cg.add(var.set_passive_scan(config[CONF_PASSIVE_SCAN]))
|
||||
if CONF_OUTPUT_POWER in config:
|
||||
cg.add(var.set_output_power(config[CONF_OUTPUT_POWER]))
|
||||
|
||||
@@ -84,9 +84,9 @@ void WiFiComponent::start() {
|
||||
uint32_t hash = this->has_sta() ? fnv1_hash(App.get_compilation_time()) : 88491487UL;
|
||||
|
||||
this->pref_ = global_preferences->make_preference<wifi::SavedWifiSettings>(hash, true);
|
||||
#ifdef USE_WIFI_FAST_CONNECT
|
||||
this->fast_connect_pref_ = global_preferences->make_preference<wifi::SavedWifiFastConnectSettings>(hash + 1, false);
|
||||
#endif
|
||||
if (this->fast_connect_) {
|
||||
this->fast_connect_pref_ = global_preferences->make_preference<wifi::SavedWifiFastConnectSettings>(hash + 1, false);
|
||||
}
|
||||
|
||||
SavedWifiSettings save{};
|
||||
if (this->pref_.load(&save)) {
|
||||
@@ -108,16 +108,16 @@ void WiFiComponent::start() {
|
||||
ESP_LOGV(TAG, "Setting Power Save Option failed");
|
||||
}
|
||||
|
||||
#ifdef USE_WIFI_FAST_CONNECT
|
||||
this->trying_loaded_ap_ = this->load_fast_connect_settings_();
|
||||
if (!this->trying_loaded_ap_) {
|
||||
this->ap_index_ = 0;
|
||||
this->selected_ap_ = this->sta_[this->ap_index_];
|
||||
if (this->fast_connect_) {
|
||||
this->trying_loaded_ap_ = this->load_fast_connect_settings_();
|
||||
if (!this->trying_loaded_ap_) {
|
||||
this->ap_index_ = 0;
|
||||
this->selected_ap_ = this->sta_[this->ap_index_];
|
||||
}
|
||||
this->start_connecting(this->selected_ap_, false);
|
||||
} else {
|
||||
this->start_scanning();
|
||||
}
|
||||
this->start_connecting(this->selected_ap_, false);
|
||||
#else
|
||||
this->start_scanning();
|
||||
#endif
|
||||
#ifdef USE_WIFI_AP
|
||||
} else if (this->has_ap()) {
|
||||
this->setup_ap_config_();
|
||||
@@ -168,20 +168,13 @@ void WiFiComponent::loop() {
|
||||
case WIFI_COMPONENT_STATE_COOLDOWN: {
|
||||
this->status_set_warning(LOG_STR("waiting to reconnect"));
|
||||
if (millis() - this->action_started_ > 5000) {
|
||||
#ifdef USE_WIFI_FAST_CONNECT
|
||||
// NOTE: This check may not make sense here as it could interfere with AP cycling
|
||||
if (!this->selected_ap_.get_bssid().has_value())
|
||||
this->selected_ap_ = this->sta_[0];
|
||||
this->start_connecting(this->selected_ap_, false);
|
||||
#else
|
||||
if (this->retry_hidden_) {
|
||||
if (this->fast_connect_ || this->retry_hidden_) {
|
||||
if (!this->selected_ap_.get_bssid().has_value())
|
||||
this->selected_ap_ = this->sta_[0];
|
||||
this->start_connecting(this->selected_ap_, false);
|
||||
} else {
|
||||
this->start_scanning();
|
||||
}
|
||||
#endif
|
||||
}
|
||||
break;
|
||||
}
|
||||
@@ -251,6 +244,7 @@ WiFiComponent::WiFiComponent() { global_wifi_component = this; }
|
||||
|
||||
bool WiFiComponent::has_ap() const { return this->has_ap_; }
|
||||
bool WiFiComponent::has_sta() const { return !this->sta_.empty(); }
|
||||
void WiFiComponent::set_fast_connect(bool fast_connect) { this->fast_connect_ = fast_connect; }
|
||||
#ifdef USE_WIFI_11KV_SUPPORT
|
||||
void WiFiComponent::set_btm(bool btm) { this->btm_ = btm; }
|
||||
void WiFiComponent::set_rrm(bool rrm) { this->rrm_ = rrm; }
|
||||
@@ -613,12 +607,10 @@ void WiFiComponent::check_scanning_finished() {
|
||||
for (auto &ap : this->sta_) {
|
||||
if (res.matches(ap)) {
|
||||
res.set_matches(true);
|
||||
// Cache priority lookup - do single search instead of 2 separate searches
|
||||
const bssid_t &bssid = res.get_bssid();
|
||||
if (!this->has_sta_priority(bssid)) {
|
||||
this->set_sta_priority(bssid, ap.get_priority());
|
||||
if (!this->has_sta_priority(res.get_bssid())) {
|
||||
this->set_sta_priority(res.get_bssid(), ap.get_priority());
|
||||
}
|
||||
res.set_priority(this->get_sta_priority(bssid));
|
||||
res.set_priority(this->get_sta_priority(res.get_bssid()));
|
||||
break;
|
||||
}
|
||||
}
|
||||
@@ -637,9 +629,8 @@ void WiFiComponent::check_scanning_finished() {
|
||||
return;
|
||||
}
|
||||
|
||||
// Build connection params directly into selected_ap_ to avoid extra copy
|
||||
const WiFiScanResult &scan_res = this->scan_result_[0];
|
||||
WiFiAP &selected = this->selected_ap_;
|
||||
WiFiAP connect_params;
|
||||
WiFiScanResult scan_res = this->scan_result_[0];
|
||||
for (auto &config : this->sta_) {
|
||||
// search for matching STA config, at least one will match (from checks before)
|
||||
if (!scan_res.matches(config)) {
|
||||
@@ -648,38 +639,37 @@ void WiFiComponent::check_scanning_finished() {
|
||||
|
||||
if (config.get_hidden()) {
|
||||
// selected network is hidden, we use the data from the config
|
||||
selected.set_hidden(true);
|
||||
selected.set_ssid(config.get_ssid());
|
||||
// Clear channel and BSSID for hidden networks - there might be multiple hidden networks
|
||||
connect_params.set_hidden(true);
|
||||
connect_params.set_ssid(config.get_ssid());
|
||||
// don't set BSSID and channel, there might be multiple hidden networks
|
||||
// but we can't know which one is the correct one. Rely on probe-req with just SSID.
|
||||
selected.set_channel(0);
|
||||
selected.set_bssid(optional<bssid_t>{});
|
||||
} else {
|
||||
// selected network is visible, we use the data from the scan
|
||||
// limit the connect params to only connect to exactly this network
|
||||
// (network selection is done during scan phase).
|
||||
selected.set_hidden(false);
|
||||
selected.set_ssid(scan_res.get_ssid());
|
||||
selected.set_channel(scan_res.get_channel());
|
||||
selected.set_bssid(scan_res.get_bssid());
|
||||
connect_params.set_hidden(false);
|
||||
connect_params.set_ssid(scan_res.get_ssid());
|
||||
connect_params.set_channel(scan_res.get_channel());
|
||||
connect_params.set_bssid(scan_res.get_bssid());
|
||||
}
|
||||
// copy manual IP (if set)
|
||||
selected.set_manual_ip(config.get_manual_ip());
|
||||
connect_params.set_manual_ip(config.get_manual_ip());
|
||||
|
||||
#ifdef USE_WIFI_WPA2_EAP
|
||||
// copy EAP parameters (if set)
|
||||
selected.set_eap(config.get_eap());
|
||||
connect_params.set_eap(config.get_eap());
|
||||
#endif
|
||||
|
||||
// copy password (if set)
|
||||
selected.set_password(config.get_password());
|
||||
connect_params.set_password(config.get_password());
|
||||
|
||||
break;
|
||||
}
|
||||
|
||||
yield();
|
||||
|
||||
this->start_connecting(this->selected_ap_, false);
|
||||
this->selected_ap_ = connect_params;
|
||||
this->start_connecting(connect_params, false);
|
||||
}
|
||||
|
||||
void WiFiComponent::dump_config() {
|
||||
@@ -729,9 +719,9 @@ void WiFiComponent::check_connecting_finished() {
|
||||
this->scan_result_.shrink_to_fit();
|
||||
}
|
||||
|
||||
#ifdef USE_WIFI_FAST_CONNECT
|
||||
this->save_fast_connect_settings_();
|
||||
#endif
|
||||
if (this->fast_connect_) {
|
||||
this->save_fast_connect_settings_();
|
||||
}
|
||||
|
||||
return;
|
||||
}
|
||||
@@ -779,31 +769,31 @@ void WiFiComponent::retry_connect() {
|
||||
delay(10);
|
||||
if (!this->is_captive_portal_active_() && !this->is_esp32_improv_active_() &&
|
||||
(this->num_retried_ > 3 || this->error_from_callback_)) {
|
||||
#ifdef USE_WIFI_FAST_CONNECT
|
||||
if (this->trying_loaded_ap_) {
|
||||
this->trying_loaded_ap_ = false;
|
||||
this->ap_index_ = 0; // Retry from the first configured AP
|
||||
} else if (this->ap_index_ >= this->sta_.size() - 1) {
|
||||
ESP_LOGW(TAG, "No more APs to try");
|
||||
this->ap_index_ = 0;
|
||||
this->restart_adapter();
|
||||
if (this->fast_connect_) {
|
||||
if (this->trying_loaded_ap_) {
|
||||
this->trying_loaded_ap_ = false;
|
||||
this->ap_index_ = 0; // Retry from the first configured AP
|
||||
} else if (this->ap_index_ >= this->sta_.size() - 1) {
|
||||
ESP_LOGW(TAG, "No more APs to try");
|
||||
this->ap_index_ = 0;
|
||||
this->restart_adapter();
|
||||
} else {
|
||||
// Try next AP
|
||||
this->ap_index_++;
|
||||
}
|
||||
this->num_retried_ = 0;
|
||||
this->selected_ap_ = this->sta_[this->ap_index_];
|
||||
} else {
|
||||
// Try next AP
|
||||
this->ap_index_++;
|
||||
if (this->num_retried_ > 5) {
|
||||
// If retry failed for more than 5 times, let's restart STA
|
||||
this->restart_adapter();
|
||||
} else {
|
||||
// Try hidden networks after 3 failed retries
|
||||
ESP_LOGD(TAG, "Retrying with hidden networks");
|
||||
this->retry_hidden_ = true;
|
||||
this->num_retried_++;
|
||||
}
|
||||
}
|
||||
this->num_retried_ = 0;
|
||||
this->selected_ap_ = this->sta_[this->ap_index_];
|
||||
#else
|
||||
if (this->num_retried_ > 5) {
|
||||
// If retry failed for more than 5 times, let's restart STA
|
||||
this->restart_adapter();
|
||||
} else {
|
||||
// Try hidden networks after 3 failed retries
|
||||
ESP_LOGD(TAG, "Retrying with hidden networks");
|
||||
this->retry_hidden_ = true;
|
||||
this->num_retried_++;
|
||||
}
|
||||
#endif
|
||||
} else {
|
||||
this->num_retried_++;
|
||||
}
|
||||
@@ -849,7 +839,6 @@ bool WiFiComponent::is_esp32_improv_active_() {
|
||||
#endif
|
||||
}
|
||||
|
||||
#ifdef USE_WIFI_FAST_CONNECT
|
||||
bool WiFiComponent::load_fast_connect_settings_() {
|
||||
SavedWifiFastConnectSettings fast_connect_save{};
|
||||
|
||||
@@ -884,7 +873,6 @@ void WiFiComponent::save_fast_connect_settings_() {
|
||||
ESP_LOGD(TAG, "Saved fast_connect settings");
|
||||
}
|
||||
}
|
||||
#endif
|
||||
|
||||
void WiFiAP::set_ssid(const std::string &ssid) { this->ssid_ = ssid; }
|
||||
void WiFiAP::set_bssid(bssid_t bssid) { this->bssid_ = bssid; }
|
||||
@@ -914,7 +902,7 @@ WiFiScanResult::WiFiScanResult(const bssid_t &bssid, std::string ssid, uint8_t c
|
||||
rssi_(rssi),
|
||||
with_auth_(with_auth),
|
||||
is_hidden_(is_hidden) {}
|
||||
bool WiFiScanResult::matches(const WiFiAP &config) const {
|
||||
bool WiFiScanResult::matches(const WiFiAP &config) {
|
||||
if (config.get_hidden()) {
|
||||
// User configured a hidden network, only match actually hidden networks
|
||||
// don't match SSID
|
||||
|
||||
@@ -170,7 +170,7 @@ class WiFiScanResult {
|
||||
public:
|
||||
WiFiScanResult(const bssid_t &bssid, std::string ssid, uint8_t channel, int8_t rssi, bool with_auth, bool is_hidden);
|
||||
|
||||
bool matches(const WiFiAP &config) const;
|
||||
bool matches(const WiFiAP &config);
|
||||
|
||||
bool get_matches() const;
|
||||
void set_matches(bool matches);
|
||||
@@ -240,6 +240,7 @@ class WiFiComponent : public Component {
|
||||
void start_scanning();
|
||||
void check_scanning_finished();
|
||||
void start_connecting(const WiFiAP &ap, bool two);
|
||||
void set_fast_connect(bool fast_connect);
|
||||
void set_ap_timeout(uint32_t ap_timeout) { ap_timeout_ = ap_timeout; }
|
||||
|
||||
void check_connecting_finished();
|
||||
@@ -363,10 +364,8 @@ class WiFiComponent : public Component {
|
||||
bool is_captive_portal_active_();
|
||||
bool is_esp32_improv_active_();
|
||||
|
||||
#ifdef USE_WIFI_FAST_CONNECT
|
||||
bool load_fast_connect_settings_();
|
||||
void save_fast_connect_settings_();
|
||||
#endif
|
||||
|
||||
#ifdef USE_ESP8266
|
||||
static void wifi_event_callback(System_Event_t *event);
|
||||
@@ -400,9 +399,7 @@ class WiFiComponent : public Component {
|
||||
WiFiAP ap_;
|
||||
optional<float> output_power_;
|
||||
ESPPreferenceObject pref_;
|
||||
#ifdef USE_WIFI_FAST_CONNECT
|
||||
ESPPreferenceObject fast_connect_pref_;
|
||||
#endif
|
||||
|
||||
// Group all 32-bit integers together
|
||||
uint32_t action_started_;
|
||||
@@ -414,17 +411,14 @@ class WiFiComponent : public Component {
|
||||
WiFiComponentState state_{WIFI_COMPONENT_STATE_OFF};
|
||||
WiFiPowerSaveMode power_save_{WIFI_POWER_SAVE_NONE};
|
||||
uint8_t num_retried_{0};
|
||||
#ifdef USE_WIFI_FAST_CONNECT
|
||||
uint8_t ap_index_{0};
|
||||
#endif
|
||||
#if USE_NETWORK_IPV6
|
||||
uint8_t num_ipv6_addresses_{0};
|
||||
#endif /* USE_NETWORK_IPV6 */
|
||||
|
||||
// Group all boolean values together
|
||||
#ifdef USE_WIFI_FAST_CONNECT
|
||||
bool fast_connect_{false};
|
||||
bool trying_loaded_ap_{false};
|
||||
#endif
|
||||
bool retry_hidden_{false};
|
||||
bool has_ap_{false};
|
||||
bool handled_connected_state_{false};
|
||||
|
||||
@@ -706,10 +706,10 @@ void WiFiComponent::wifi_scan_done_callback_(void *arg, STATUS status) {
|
||||
|
||||
this->scan_result_.init(count);
|
||||
for (bss_info *it = head; it != nullptr; it = STAILQ_NEXT(it, next)) {
|
||||
this->scan_result_.emplace_back(
|
||||
bssid_t{it->bssid[0], it->bssid[1], it->bssid[2], it->bssid[3], it->bssid[4], it->bssid[5]},
|
||||
std::string(reinterpret_cast<char *>(it->ssid), it->ssid_len), it->channel, it->rssi, it->authmode != AUTH_OPEN,
|
||||
it->is_hidden != 0);
|
||||
WiFiScanResult res({it->bssid[0], it->bssid[1], it->bssid[2], it->bssid[3], it->bssid[4], it->bssid[5]},
|
||||
std::string(reinterpret_cast<char *>(it->ssid), it->ssid_len), it->channel, it->rssi,
|
||||
it->authmode != AUTH_OPEN, it->is_hidden != 0);
|
||||
this->scan_result_.push_back(res);
|
||||
}
|
||||
this->scan_done_ = true;
|
||||
}
|
||||
|
||||
@@ -776,12 +776,13 @@ void WiFiComponent::wifi_process_event_(IDFWiFiEvent *data) {
|
||||
}
|
||||
|
||||
uint16_t number = it.number;
|
||||
auto records = std::make_unique<wifi_ap_record_t[]>(number);
|
||||
err = esp_wifi_scan_get_ap_records(&number, records.get());
|
||||
std::vector<wifi_ap_record_t> records(number);
|
||||
err = esp_wifi_scan_get_ap_records(&number, records.data());
|
||||
if (err != ESP_OK) {
|
||||
ESP_LOGW(TAG, "esp_wifi_scan_get_ap_records failed: %s", esp_err_to_name(err));
|
||||
return;
|
||||
}
|
||||
records.resize(number);
|
||||
|
||||
scan_result_.init(number);
|
||||
for (int i = 0; i < number; i++) {
|
||||
@@ -789,8 +790,8 @@ void WiFiComponent::wifi_process_event_(IDFWiFiEvent *data) {
|
||||
bssid_t bssid;
|
||||
std::copy(record.bssid, record.bssid + 6, bssid.begin());
|
||||
std::string ssid(reinterpret_cast<const char *>(record.ssid));
|
||||
scan_result_.emplace_back(bssid, ssid, record.primary, record.rssi, record.authmode != WIFI_AUTH_OPEN,
|
||||
ssid.empty());
|
||||
WiFiScanResult result(bssid, ssid, record.primary, record.rssi, record.authmode != WIFI_AUTH_OPEN, ssid.empty());
|
||||
scan_result_.push_back(result);
|
||||
}
|
||||
|
||||
} else if (data->event_base == WIFI_EVENT && data->event_id == WIFI_EVENT_AP_START) {
|
||||
|
||||
@@ -419,9 +419,9 @@ void WiFiComponent::wifi_scan_done_callback_() {
|
||||
uint8_t *bssid = WiFi.BSSID(i);
|
||||
int32_t channel = WiFi.channel(i);
|
||||
|
||||
this->scan_result_.emplace_back(bssid_t{bssid[0], bssid[1], bssid[2], bssid[3], bssid[4], bssid[5]},
|
||||
std::string(ssid.c_str()), channel, rssi, authmode != WIFI_AUTH_OPEN,
|
||||
ssid.length() == 0);
|
||||
WiFiScanResult scan({bssid[0], bssid[1], bssid[2], bssid[3], bssid[4], bssid[5]}, std::string(ssid.c_str()),
|
||||
channel, rssi, authmode != WIFI_AUTH_OPEN, ssid.length() == 0);
|
||||
this->scan_result_.push_back(scan);
|
||||
}
|
||||
WiFi.scanDelete();
|
||||
this->scan_done_ = true;
|
||||
|
||||
@@ -1,34 +0,0 @@
|
||||
import esphome.codegen as cg
|
||||
from esphome.components.zephyr import zephyr_add_prj_conf
|
||||
import esphome.config_validation as cv
|
||||
from esphome.const import CONF_ESPHOME, CONF_ID, CONF_NAME, Framework
|
||||
import esphome.final_validate as fv
|
||||
|
||||
zephyr_ble_server_ns = cg.esphome_ns.namespace("zephyr_ble_server")
|
||||
BLEServer = zephyr_ble_server_ns.class_("BLEServer", cg.Component)
|
||||
|
||||
CONFIG_SCHEMA = cv.All(
|
||||
cv.Schema(
|
||||
{
|
||||
cv.GenerateID(): cv.declare_id(BLEServer),
|
||||
}
|
||||
).extend(cv.COMPONENT_SCHEMA),
|
||||
cv.only_with_framework(Framework.ZEPHYR),
|
||||
)
|
||||
|
||||
|
||||
def _final_validate(_):
|
||||
full_config = fv.full_config.get()
|
||||
zephyr_add_prj_conf("BT_DEVICE_NAME", full_config[CONF_ESPHOME][CONF_NAME])
|
||||
|
||||
|
||||
FINAL_VALIDATE_SCHEMA = _final_validate
|
||||
|
||||
|
||||
async def to_code(config):
|
||||
var = cg.new_Pvariable(config[CONF_ID])
|
||||
zephyr_add_prj_conf("BT", True)
|
||||
zephyr_add_prj_conf("BT_PERIPHERAL", True)
|
||||
zephyr_add_prj_conf("BT_RX_STACK_SIZE", 1536)
|
||||
# zephyr_add_prj_conf("BT_LL_SW_SPLIT", True)
|
||||
await cg.register_component(var, config)
|
||||
@@ -1,100 +0,0 @@
|
||||
#ifdef USE_ZEPHYR
|
||||
#include "ble_server.h"
|
||||
#include "esphome/core/defines.h"
|
||||
#include "esphome/core/log.h"
|
||||
#include <zephyr/bluetooth/bluetooth.h>
|
||||
#include <zephyr/bluetooth/conn.h>
|
||||
|
||||
namespace esphome::zephyr_ble_server {
|
||||
|
||||
static const char *const TAG = "zephyr_ble_server";
|
||||
|
||||
static struct k_work advertise_work; // NOLINT(cppcoreguidelines-avoid-non-const-global-variables)
|
||||
|
||||
#define DEVICE_NAME CONFIG_BT_DEVICE_NAME
|
||||
#define DEVICE_NAME_LEN (sizeof(DEVICE_NAME) - 1)
|
||||
|
||||
static const struct bt_data AD[] = {
|
||||
BT_DATA_BYTES(BT_DATA_FLAGS, (BT_LE_AD_GENERAL | BT_LE_AD_NO_BREDR)),
|
||||
BT_DATA(BT_DATA_NAME_COMPLETE, DEVICE_NAME, DEVICE_NAME_LEN),
|
||||
};
|
||||
|
||||
static const struct bt_data SD[] = {
|
||||
#ifdef USE_OTA
|
||||
BT_DATA_BYTES(BT_DATA_UUID128_ALL, 0x84, 0xaa, 0x60, 0x74, 0x52, 0x8a, 0x8b, 0x86, 0xd3, 0x4c, 0xb7, 0x1d, 0x1d,
|
||||
0xdc, 0x53, 0x8d),
|
||||
#endif
|
||||
};
|
||||
|
||||
const struct bt_le_adv_param *const ADV_PARAM = BT_LE_ADV_CONN;
|
||||
|
||||
static void advertise(struct k_work *work) {
|
||||
int rc = bt_le_adv_stop();
|
||||
if (rc) {
|
||||
ESP_LOGE(TAG, "Advertising failed to stop (rc %d)", rc);
|
||||
}
|
||||
|
||||
rc = bt_le_adv_start(ADV_PARAM, AD, ARRAY_SIZE(AD), SD, ARRAY_SIZE(SD));
|
||||
if (rc) {
|
||||
ESP_LOGE(TAG, "Advertising failed to start (rc %d)", rc);
|
||||
return;
|
||||
}
|
||||
ESP_LOGI(TAG, "Advertising successfully started");
|
||||
}
|
||||
|
||||
static void connected(struct bt_conn *conn, uint8_t err) {
|
||||
if (err) {
|
||||
ESP_LOGE(TAG, "Connection failed (err 0x%02x)", err);
|
||||
} else {
|
||||
ESP_LOGI(TAG, "Connected");
|
||||
}
|
||||
}
|
||||
|
||||
static void disconnected(struct bt_conn *conn, uint8_t reason) {
|
||||
ESP_LOGI(TAG, "Disconnected (reason 0x%02x)", reason);
|
||||
k_work_submit(&advertise_work);
|
||||
}
|
||||
|
||||
static void bt_ready(int err) {
|
||||
if (err != 0) {
|
||||
ESP_LOGE(TAG, "Bluetooth failed to initialise: %d", err);
|
||||
} else {
|
||||
k_work_submit(&advertise_work);
|
||||
}
|
||||
}
|
||||
|
||||
BT_CONN_CB_DEFINE(conn_callbacks) = {
|
||||
.connected = connected,
|
||||
.disconnected = disconnected,
|
||||
};
|
||||
|
||||
void BLEServer::setup() {
|
||||
k_work_init(&advertise_work, advertise);
|
||||
resume_();
|
||||
}
|
||||
|
||||
void BLEServer::loop() {
|
||||
if (this->suspended_) {
|
||||
resume_();
|
||||
this->suspended_ = false;
|
||||
}
|
||||
}
|
||||
|
||||
void BLEServer::resume_() {
|
||||
int rc = bt_enable(bt_ready);
|
||||
if (rc != 0) {
|
||||
ESP_LOGE(TAG, "Bluetooth enable failed: %d", rc);
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
void BLEServer::on_shutdown() {
|
||||
struct k_work_sync sync;
|
||||
k_work_cancel_sync(&advertise_work, &sync);
|
||||
bt_disable();
|
||||
this->suspended_ = true;
|
||||
}
|
||||
|
||||
} // namespace esphome::zephyr_ble_server
|
||||
|
||||
#endif
|
||||
@@ -1,19 +0,0 @@
|
||||
#pragma once
|
||||
#ifdef USE_ZEPHYR
|
||||
#include "esphome/core/component.h"
|
||||
|
||||
namespace esphome::zephyr_ble_server {
|
||||
|
||||
class BLEServer : public Component {
|
||||
public:
|
||||
void setup() override;
|
||||
void loop() override;
|
||||
void on_shutdown() override;
|
||||
|
||||
protected:
|
||||
void resume_();
|
||||
bool suspended_ = false;
|
||||
};
|
||||
|
||||
} // namespace esphome::zephyr_ble_server
|
||||
#endif
|
||||
@@ -12,7 +12,7 @@ from typing import Any
|
||||
import voluptuous as vol
|
||||
|
||||
from esphome import core, loader, pins, yaml_util
|
||||
from esphome.config_helpers import Extend, Remove, merge_config, merge_dicts_ordered
|
||||
from esphome.config_helpers import Extend, Remove, merge_dicts_ordered
|
||||
import esphome.config_validation as cv
|
||||
from esphome.const import (
|
||||
CONF_ESPHOME,
|
||||
@@ -324,7 +324,13 @@ def iter_ids(config, path=None):
|
||||
yield from iter_ids(value, path + [key])
|
||||
|
||||
|
||||
def check_replaceme(value):
|
||||
def recursive_check_replaceme(value):
|
||||
if isinstance(value, list):
|
||||
return cv.Schema([recursive_check_replaceme])(value)
|
||||
if isinstance(value, dict):
|
||||
return cv.Schema({cv.valid: recursive_check_replaceme})(value)
|
||||
if isinstance(value, ESPLiteralValue):
|
||||
pass
|
||||
if isinstance(value, str) and value == "REPLACEME":
|
||||
raise cv.Invalid(
|
||||
"Found 'REPLACEME' in configuration, this is most likely an error. "
|
||||
@@ -333,86 +339,7 @@ def check_replaceme(value):
|
||||
"If you want to use the literal REPLACEME string, "
|
||||
'please use "!literal REPLACEME"'
|
||||
)
|
||||
|
||||
|
||||
def _build_list_index(lst):
|
||||
index = OrderedDict()
|
||||
extensions, removals = [], set()
|
||||
for item in lst:
|
||||
if item is None:
|
||||
removals.add(None)
|
||||
continue
|
||||
item_id = None
|
||||
if isinstance(item, dict) and (item_id := item.get(CONF_ID)):
|
||||
if isinstance(item_id, Extend):
|
||||
extensions.append(item)
|
||||
continue
|
||||
if isinstance(item_id, Remove):
|
||||
removals.add(item_id.value)
|
||||
continue
|
||||
if not item_id or item_id in index:
|
||||
# no id or duplicate -> pass through with identity-based key
|
||||
item_id = id(item)
|
||||
index[item_id] = item
|
||||
return index, extensions, removals
|
||||
|
||||
|
||||
def resolve_extend_remove(value, is_key=None):
|
||||
if isinstance(value, ESPLiteralValue):
|
||||
return # do not check inside literal blocks
|
||||
if isinstance(value, list):
|
||||
index, extensions, removals = _build_list_index(value)
|
||||
if extensions or removals:
|
||||
# Rebuild the original list after
|
||||
# processing all extensions and removals
|
||||
for item in extensions:
|
||||
item_id = item[CONF_ID].value
|
||||
if item_id in removals:
|
||||
continue
|
||||
old = index.get(item_id)
|
||||
if old is None:
|
||||
# Failed to find source for extension
|
||||
# Find index of item to show error at correct position
|
||||
i = next(
|
||||
(
|
||||
i
|
||||
for i, d in enumerate(value)
|
||||
if d.get(CONF_ID) == item[CONF_ID]
|
||||
)
|
||||
)
|
||||
with cv.prepend_path(i):
|
||||
raise cv.Invalid(
|
||||
f"Source for extension of ID '{item_id}' was not found."
|
||||
)
|
||||
item[CONF_ID] = item_id
|
||||
index[item_id] = merge_config(old, item)
|
||||
for item_id in removals:
|
||||
index.pop(item_id, None)
|
||||
|
||||
value[:] = index.values()
|
||||
|
||||
for i, item in enumerate(value):
|
||||
with cv.prepend_path(i):
|
||||
resolve_extend_remove(item, False)
|
||||
return
|
||||
if isinstance(value, dict):
|
||||
removals = []
|
||||
for k, v in value.items():
|
||||
with cv.prepend_path(k):
|
||||
if isinstance(v, Remove):
|
||||
removals.append(k)
|
||||
continue
|
||||
resolve_extend_remove(k, True)
|
||||
resolve_extend_remove(v, False)
|
||||
for k in removals:
|
||||
value.pop(k, None)
|
||||
return
|
||||
if is_key:
|
||||
return # do not check keys (yet)
|
||||
|
||||
check_replaceme(value)
|
||||
|
||||
return
|
||||
return value
|
||||
|
||||
|
||||
class ConfigValidationStep(abc.ABC):
|
||||
@@ -510,6 +437,19 @@ class LoadValidationStep(ConfigValidationStep):
|
||||
continue
|
||||
p_name = p_config.get("platform")
|
||||
if p_name is None:
|
||||
p_id = p_config.get(CONF_ID)
|
||||
if isinstance(p_id, Extend):
|
||||
result.add_str_error(
|
||||
f"Source for extension of ID '{p_id.value}' was not found.",
|
||||
path + [CONF_ID],
|
||||
)
|
||||
continue
|
||||
if isinstance(p_id, Remove):
|
||||
result.add_str_error(
|
||||
f"Source for removal of ID '{p_id.value}' was not found.",
|
||||
path + [CONF_ID],
|
||||
)
|
||||
continue
|
||||
result.add_str_error(
|
||||
f"'{self.domain}' requires a 'platform' key but it was not specified.",
|
||||
path,
|
||||
@@ -994,10 +934,9 @@ def validate_config(
|
||||
|
||||
CORE.raw_config = config
|
||||
|
||||
# 1.1. Resolve !extend and !remove and check for REPLACEME
|
||||
# After this step, there will not be any Extend or Remove values in the config anymore
|
||||
# 1.1. Check for REPLACEME special value
|
||||
try:
|
||||
resolve_extend_remove(config)
|
||||
recursive_check_replaceme(config)
|
||||
except vol.Invalid as err:
|
||||
result.add_error(err)
|
||||
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
from collections.abc import Callable
|
||||
|
||||
from esphome.const import (
|
||||
CONF_ID,
|
||||
CONF_LEVEL,
|
||||
CONF_LOGGER,
|
||||
KEY_CORE,
|
||||
@@ -74,28 +75,73 @@ class Remove:
|
||||
return isinstance(b, Remove) and self.value == b.value
|
||||
|
||||
|
||||
def merge_config(old, new):
|
||||
if isinstance(new, Remove):
|
||||
return new
|
||||
if isinstance(new, dict):
|
||||
if not isinstance(old, dict):
|
||||
return new
|
||||
# Preserve OrderedDict type by copying to OrderedDict if either input is OrderedDict
|
||||
if isinstance(old, OrderedDict) or isinstance(new, OrderedDict):
|
||||
res = OrderedDict(old)
|
||||
else:
|
||||
def merge_config(full_old, full_new):
|
||||
def merge(old, new):
|
||||
if isinstance(new, dict):
|
||||
if not isinstance(old, dict):
|
||||
return new
|
||||
# Preserve OrderedDict type by copying to OrderedDict if either input is OrderedDict
|
||||
if isinstance(old, OrderedDict) or isinstance(new, OrderedDict):
|
||||
res = OrderedDict(old)
|
||||
else:
|
||||
res = old.copy()
|
||||
for k, v in new.items():
|
||||
if isinstance(v, Remove) and k in old:
|
||||
del res[k]
|
||||
else:
|
||||
res[k] = merge(old[k], v) if k in old else v
|
||||
return res
|
||||
if isinstance(new, list):
|
||||
if not isinstance(old, list):
|
||||
return new
|
||||
res = old.copy()
|
||||
for k, v in new.items():
|
||||
res[k] = merge_config(old.get(k), v)
|
||||
return res
|
||||
if isinstance(new, list):
|
||||
if not isinstance(old, list):
|
||||
return new
|
||||
return old + new
|
||||
if new is None:
|
||||
return old
|
||||
ids = {
|
||||
v_id: i
|
||||
for i, v in enumerate(res)
|
||||
if isinstance(v, dict)
|
||||
and (v_id := v.get(CONF_ID))
|
||||
and isinstance(v_id, str)
|
||||
}
|
||||
extend_ids = {
|
||||
v_id.value: i
|
||||
for i, v in enumerate(res)
|
||||
if isinstance(v, dict)
|
||||
and (v_id := v.get(CONF_ID))
|
||||
and isinstance(v_id, Extend)
|
||||
}
|
||||
|
||||
return new
|
||||
ids_to_delete = []
|
||||
for v in new:
|
||||
if isinstance(v, dict) and (new_id := v.get(CONF_ID)):
|
||||
if isinstance(new_id, Extend):
|
||||
new_id = new_id.value
|
||||
if new_id in ids:
|
||||
v[CONF_ID] = new_id
|
||||
res[ids[new_id]] = merge(res[ids[new_id]], v)
|
||||
continue
|
||||
elif isinstance(new_id, Remove):
|
||||
new_id = new_id.value
|
||||
if new_id in ids:
|
||||
ids_to_delete.append(ids[new_id])
|
||||
continue
|
||||
elif (
|
||||
new_id in extend_ids
|
||||
): # When a package is extending a non-packaged item
|
||||
extend_res = res[extend_ids[new_id]]
|
||||
extend_res[CONF_ID] = new_id
|
||||
new_v = merge(v, extend_res)
|
||||
res[extend_ids[new_id]] = new_v
|
||||
continue
|
||||
else:
|
||||
ids[new_id] = len(res)
|
||||
res.append(v)
|
||||
return [v for i, v in enumerate(res) if i not in ids_to_delete]
|
||||
if new is None:
|
||||
return old
|
||||
|
||||
return new
|
||||
|
||||
return merge(full_old, full_new)
|
||||
|
||||
|
||||
def filter_source_files_from_platform(
|
||||
|
||||
@@ -24,6 +24,7 @@ import voluptuous as vol
|
||||
|
||||
from esphome import core
|
||||
import esphome.codegen as cg
|
||||
from esphome.config_helpers import Extend, Remove
|
||||
from esphome.const import (
|
||||
ALLOWED_NAME_CHARS,
|
||||
CONF_AVAILABILITY,
|
||||
@@ -623,6 +624,12 @@ def declare_id(type):
|
||||
if value is None:
|
||||
return core.ID(None, is_declaration=True, type=type)
|
||||
|
||||
if isinstance(value, Extend):
|
||||
raise Invalid(f"Source for extension of ID '{value.value}' was not found.")
|
||||
|
||||
if isinstance(value, Remove):
|
||||
raise Invalid(f"Source for Removal of ID '{value.value}' was not found.")
|
||||
|
||||
return core.ID(validate_id_name(value), is_declaration=True, type=type)
|
||||
|
||||
return validator
|
||||
|
||||
@@ -199,7 +199,6 @@
|
||||
#define USE_WEBSERVER_PORT 80 // NOLINT
|
||||
#define USE_WEBSERVER_SORTING
|
||||
#define USE_WIFI_11KV_SUPPORT
|
||||
#define USE_WIFI_FAST_CONNECT
|
||||
#define USB_HOST_MAX_REQUESTS 16
|
||||
|
||||
#ifdef USE_ARDUINO
|
||||
|
||||
@@ -281,13 +281,13 @@ template<typename T> class FixedVector {
|
||||
}
|
||||
}
|
||||
|
||||
/// Emplace element without bounds checking - constructs in-place with arguments
|
||||
/// Emplace element without bounds checking - constructs in-place
|
||||
/// Caller must ensure sufficient capacity was allocated via init()
|
||||
/// Returns reference to the newly constructed element
|
||||
/// NOTE: Caller MUST ensure size_ < capacity_ before calling
|
||||
template<typename... Args> T &emplace_back(Args &&...args) {
|
||||
// Use placement new to construct the object in pre-allocated memory
|
||||
new (&data_[size_]) T(std::forward<Args>(args)...);
|
||||
T &emplace_back() {
|
||||
// Use placement new to default-construct the object in pre-allocated memory
|
||||
new (&data_[size_]) T();
|
||||
size_++;
|
||||
return data_[size_ - 1];
|
||||
}
|
||||
@@ -1158,4 +1158,18 @@ template<typename T, enable_if_t<std::is_pointer<T *>::value, int> = 0> T &id(T
|
||||
|
||||
///@}
|
||||
|
||||
/// @name Deprecated functions
|
||||
///@{
|
||||
|
||||
ESPDEPRECATED("hexencode() is deprecated, use format_hex_pretty() instead.", "2022.1")
|
||||
inline std::string hexencode(const uint8_t *data, uint32_t len) { return format_hex_pretty(data, len); }
|
||||
|
||||
template<typename T>
|
||||
ESPDEPRECATED("hexencode() is deprecated, use format_hex_pretty() instead.", "2022.1")
|
||||
std::string hexencode(const T &data) {
|
||||
return hexencode(data.data(), data.size());
|
||||
}
|
||||
|
||||
///@}
|
||||
|
||||
} // namespace esphome
|
||||
|
||||
@@ -328,30 +328,17 @@ void HOT Scheduler::call(uint32_t now) {
|
||||
// Single-core platforms don't use this queue and fall back to the heap-based approach.
|
||||
//
|
||||
// Note: Items cancelled via cancel_item_locked_() are marked with remove=true but still
|
||||
// processed here. They are skipped during execution by should_skip_item_().
|
||||
// This is intentional - no memory leak occurs.
|
||||
//
|
||||
// We use an index (defer_queue_front_) to track the read position instead of calling
|
||||
// erase() on every pop, which would be O(n). The queue is processed once per loop -
|
||||
// any items added during processing are left for the next loop iteration.
|
||||
|
||||
// Snapshot the queue end point - only process items that existed at loop start
|
||||
// Items added during processing (by callbacks or other threads) run next loop
|
||||
// No lock needed: single consumer (main loop), stale read just means we process less this iteration
|
||||
size_t defer_queue_end = this->defer_queue_.size();
|
||||
|
||||
while (this->defer_queue_front_ < defer_queue_end) {
|
||||
// processed here. They are removed from the queue normally via pop_front() but skipped
|
||||
// during execution by should_skip_item_(). This is intentional - no memory leak occurs.
|
||||
while (!this->defer_queue_.empty()) {
|
||||
// The outer check is done without a lock for performance. If the queue
|
||||
// appears non-empty, we lock and process an item. We don't need to check
|
||||
// empty() again inside the lock because only this thread can remove items.
|
||||
std::unique_ptr<SchedulerItem> item;
|
||||
{
|
||||
LockGuard lock(this->lock_);
|
||||
// SAFETY: Moving out the unique_ptr leaves a nullptr in the vector at defer_queue_front_.
|
||||
// This is intentional and safe because:
|
||||
// 1. The vector is only cleaned up by cleanup_defer_queue_locked_() at the end of this function
|
||||
// 2. Any code iterating defer_queue_ MUST check for nullptr items (see mark_matching_items_removed_
|
||||
// and has_cancelled_timeout_in_container_ in scheduler.h)
|
||||
// 3. The lock protects concurrent access, but the nullptr remains until cleanup
|
||||
item = std::move(this->defer_queue_[this->defer_queue_front_]);
|
||||
this->defer_queue_front_++;
|
||||
item = std::move(this->defer_queue_.front());
|
||||
this->defer_queue_.pop_front();
|
||||
}
|
||||
|
||||
// Execute callback without holding lock to prevent deadlocks
|
||||
@@ -362,13 +349,6 @@ void HOT Scheduler::call(uint32_t now) {
|
||||
// Recycle the defer item after execution
|
||||
this->recycle_item_(std::move(item));
|
||||
}
|
||||
|
||||
// If we've consumed all items up to the snapshot point, clean up the dead space
|
||||
// Single consumer (main loop), so no lock needed for this check
|
||||
if (this->defer_queue_front_ >= defer_queue_end) {
|
||||
LockGuard lock(this->lock_);
|
||||
this->cleanup_defer_queue_locked_();
|
||||
}
|
||||
#endif /* not ESPHOME_THREAD_SINGLE */
|
||||
|
||||
// Convert the fresh timestamp from main loop to 64-bit for scheduler operations
|
||||
|
||||
@@ -264,36 +264,6 @@ class Scheduler {
|
||||
// Helper to recycle a SchedulerItem
|
||||
void recycle_item_(std::unique_ptr<SchedulerItem> item);
|
||||
|
||||
#ifndef ESPHOME_THREAD_SINGLE
|
||||
// Helper to cleanup defer_queue_ after processing
|
||||
// IMPORTANT: Caller must hold the scheduler lock before calling this function.
|
||||
inline void cleanup_defer_queue_locked_() {
|
||||
// Check if new items were added by producers during processing
|
||||
if (this->defer_queue_front_ >= this->defer_queue_.size()) {
|
||||
// Common case: no new items - clear everything
|
||||
this->defer_queue_.clear();
|
||||
} else {
|
||||
// Rare case: new items were added during processing - compact the vector
|
||||
// This only happens when:
|
||||
// 1. A deferred callback calls defer() again, or
|
||||
// 2. Another thread calls defer() while we're processing
|
||||
//
|
||||
// Move unprocessed items (added during this loop) to the front for next iteration
|
||||
//
|
||||
// SAFETY: Compacted items may include cancelled items (marked for removal via
|
||||
// cancel_item_locked_() during execution). This is safe because should_skip_item_()
|
||||
// checks is_item_removed_() before executing, so cancelled items will be skipped
|
||||
// and recycled on the next loop iteration.
|
||||
size_t remaining = this->defer_queue_.size() - this->defer_queue_front_;
|
||||
for (size_t i = 0; i < remaining; i++) {
|
||||
this->defer_queue_[i] = std::move(this->defer_queue_[this->defer_queue_front_ + i]);
|
||||
}
|
||||
this->defer_queue_.resize(remaining);
|
||||
}
|
||||
this->defer_queue_front_ = 0;
|
||||
}
|
||||
#endif /* not ESPHOME_THREAD_SINGLE */
|
||||
|
||||
// Helper to check if item is marked for removal (platform-specific)
|
||||
// Returns true if item should be skipped, handles platform-specific synchronization
|
||||
// For ESPHOME_THREAD_MULTI_NO_ATOMICS platforms, the caller must hold the scheduler lock before calling this
|
||||
@@ -312,18 +282,13 @@ class Scheduler {
|
||||
|
||||
// Helper to mark matching items in a container as removed
|
||||
// Returns the number of items marked for removal
|
||||
// IMPORTANT: Caller must hold the scheduler lock before calling this function.
|
||||
// For ESPHOME_THREAD_MULTI_NO_ATOMICS platforms, the caller must hold the scheduler lock before calling this
|
||||
// function.
|
||||
template<typename Container>
|
||||
size_t mark_matching_items_removed_(Container &container, Component *component, const char *name_cstr,
|
||||
SchedulerItem::Type type, bool match_retry) {
|
||||
size_t count = 0;
|
||||
for (auto &item : container) {
|
||||
// Skip nullptr items (can happen in defer_queue_ when items are being processed)
|
||||
// The defer_queue_ uses index-based processing: items are std::moved out but left in the
|
||||
// vector as nullptr until cleanup. Even though this function is called with lock held,
|
||||
// the vector can still contain nullptr items from the processing loop. This check prevents crashes.
|
||||
if (!item)
|
||||
continue;
|
||||
if (this->matches_item_(item, component, name_cstr, type, match_retry)) {
|
||||
// Mark item for removal (platform-specific)
|
||||
#ifdef ESPHOME_THREAD_MULTI_ATOMICS
|
||||
@@ -346,12 +311,6 @@ class Scheduler {
|
||||
bool has_cancelled_timeout_in_container_(const Container &container, Component *component, const char *name_cstr,
|
||||
bool match_retry) const {
|
||||
for (const auto &item : container) {
|
||||
// Skip nullptr items (can happen in defer_queue_ when items are being processed)
|
||||
// The defer_queue_ uses index-based processing: items are std::moved out but left in the
|
||||
// vector as nullptr until cleanup. If this function is called during defer queue processing,
|
||||
// it will iterate over these nullptr items. This check prevents crashes.
|
||||
if (!item)
|
||||
continue;
|
||||
if (is_item_removed_(item.get()) &&
|
||||
this->matches_item_(item, component, name_cstr, SchedulerItem::TIMEOUT, match_retry,
|
||||
/* skip_removed= */ false)) {
|
||||
@@ -365,12 +324,9 @@ class Scheduler {
|
||||
std::vector<std::unique_ptr<SchedulerItem>> items_;
|
||||
std::vector<std::unique_ptr<SchedulerItem>> to_add_;
|
||||
#ifndef ESPHOME_THREAD_SINGLE
|
||||
// Single-core platforms don't need the defer queue and save ~32 bytes of RAM
|
||||
// Using std::vector instead of std::deque avoids 512-byte chunked allocations
|
||||
// Index tracking avoids O(n) erase() calls when draining the queue each loop
|
||||
std::vector<std::unique_ptr<SchedulerItem>> defer_queue_; // FIFO queue for defer() calls
|
||||
size_t defer_queue_front_{0}; // Index of first valid item in defer_queue_ (tracks consumed items)
|
||||
#endif /* ESPHOME_THREAD_SINGLE */
|
||||
// Single-core platforms don't need the defer queue and save 40 bytes of RAM
|
||||
std::deque<std::unique_ptr<SchedulerItem>> defer_queue_; // FIFO queue for defer() calls
|
||||
#endif /* ESPHOME_THREAD_SINGLE */
|
||||
uint32_t to_remove_{0};
|
||||
|
||||
// Memory pool for recycling SchedulerItem objects to reduce heap churn.
|
||||
|
||||
@@ -1058,8 +1058,7 @@ class DownloadBinaryRequestHandler(BaseHandler):
|
||||
"download",
|
||||
f"{storage_json.name}-{file_name}",
|
||||
)
|
||||
|
||||
path = storage_json.firmware_bin_path.parent.joinpath(file_name)
|
||||
path = storage_json.firmware_bin_path.with_name(file_name)
|
||||
|
||||
if not path.is_file():
|
||||
args = ["esphome", "idedata", settings.rel_path(configuration)]
|
||||
|
||||
@@ -1,362 +0,0 @@
|
||||
"""GitHub download cache for ESPHome.
|
||||
|
||||
This module provides caching functionality for GitHub release downloads
|
||||
to avoid redundant network I/O when switching between platforms.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import hashlib
|
||||
import json
|
||||
import logging
|
||||
from pathlib import Path
|
||||
import shutil
|
||||
import time
|
||||
import urllib.error
|
||||
import urllib.request
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class GitHubCache:
|
||||
"""Manages caching of GitHub release downloads."""
|
||||
|
||||
# Cache expiration time in seconds (30 days)
|
||||
CACHE_EXPIRATION_SECONDS = 30 * 24 * 60 * 60
|
||||
|
||||
def __init__(self, cache_dir: Path | None = None):
|
||||
"""Initialize the cache manager.
|
||||
|
||||
Args:
|
||||
cache_dir: Directory to store cached files.
|
||||
Defaults to ~/.esphome_cache/github
|
||||
"""
|
||||
if cache_dir is None:
|
||||
cache_dir = Path.home() / ".esphome_cache" / "github"
|
||||
self.cache_dir = cache_dir
|
||||
self.cache_dir.mkdir(parents=True, exist_ok=True)
|
||||
self.metadata_file = self.cache_dir / "cache_metadata.json"
|
||||
# Prune old files on initialization
|
||||
try:
|
||||
self._prune_old_files()
|
||||
except Exception as e:
|
||||
_LOGGER.debug("Failed to prune old cache files: %s", e)
|
||||
|
||||
def _load_metadata(self) -> dict:
|
||||
"""Load cache metadata from disk."""
|
||||
if self.metadata_file.exists():
|
||||
try:
|
||||
with open(self.metadata_file) as f:
|
||||
return json.load(f)
|
||||
except (OSError, ValueError, json.JSONDecodeError):
|
||||
return {}
|
||||
return {}
|
||||
|
||||
def _save_metadata(self, metadata: dict) -> None:
|
||||
"""Save cache metadata to disk."""
|
||||
try:
|
||||
with open(self.metadata_file, "w") as f:
|
||||
json.dump(metadata, f, indent=2)
|
||||
except OSError as e:
|
||||
_LOGGER.debug("Failed to save cache metadata: %s", e)
|
||||
|
||||
@staticmethod
|
||||
def is_github_url(url: str) -> bool:
|
||||
"""Check if URL is a GitHub release download."""
|
||||
return "github.com" in url.lower() and url.endswith(".zip")
|
||||
|
||||
def _get_cache_key(self, url: str) -> str:
|
||||
"""Get cache key (hash) for a URL."""
|
||||
return hashlib.sha256(url.encode()).hexdigest()
|
||||
|
||||
def _get_cache_path(self, url: str) -> Path:
|
||||
"""Get cache file path for a URL."""
|
||||
cache_key = self._get_cache_key(url)
|
||||
ext = Path(url.split("?")[0]).suffix
|
||||
return self.cache_dir / f"{cache_key}{ext}"
|
||||
|
||||
def _check_if_modified(
|
||||
self,
|
||||
url: str,
|
||||
last_modified: str | None = None,
|
||||
etag: str | None = None,
|
||||
) -> bool:
|
||||
"""Check if a URL has been modified using HTTP 304.
|
||||
|
||||
Args:
|
||||
url: URL to check
|
||||
last_modified: Last-Modified header from previous response
|
||||
etag: ETag header from previous response
|
||||
|
||||
Returns:
|
||||
True if modified, False if not modified (or offline/unreachable)
|
||||
"""
|
||||
if not last_modified and not etag:
|
||||
# No cache headers available, assume modified
|
||||
return True
|
||||
|
||||
try:
|
||||
request = urllib.request.Request(url)
|
||||
request.get_method = lambda: "HEAD"
|
||||
|
||||
if last_modified:
|
||||
request.add_header("If-Modified-Since", last_modified)
|
||||
if etag:
|
||||
request.add_header("If-None-Match", etag)
|
||||
|
||||
try:
|
||||
urllib.request.urlopen(request, timeout=10)
|
||||
# 200 OK = file was modified
|
||||
return True
|
||||
except urllib.error.HTTPError as e:
|
||||
if e.code == 304:
|
||||
# Not modified
|
||||
_LOGGER.debug("File not modified (HTTP 304): %s", url)
|
||||
return False
|
||||
# Other errors, assume modified to be safe
|
||||
return True
|
||||
except (OSError, urllib.error.URLError):
|
||||
# If check fails (offline/network error), assume not modified (use cache)
|
||||
_LOGGER.info("Cannot reach server (offline?), using cached file: %s", url)
|
||||
return False
|
||||
|
||||
def get_cached_path(self, url: str, check_updates: bool = True) -> Path | None:
|
||||
"""Get path to cached file if available and valid.
|
||||
|
||||
Args:
|
||||
url: URL to check
|
||||
check_updates: Whether to check for updates using HTTP 304
|
||||
|
||||
Returns:
|
||||
Path to cached file if valid, None if needs download
|
||||
"""
|
||||
if not self.is_github_url(url):
|
||||
return None
|
||||
|
||||
cache_path = self._get_cache_path(url)
|
||||
if not cache_path.exists():
|
||||
return None
|
||||
|
||||
# Load metadata
|
||||
metadata = self._load_metadata()
|
||||
cache_key = self._get_cache_key(url)
|
||||
|
||||
# Check if file should be re-downloaded
|
||||
should_redownload = False
|
||||
if check_updates and cache_key in metadata:
|
||||
last_modified = metadata[cache_key].get("last_modified")
|
||||
etag = metadata[cache_key].get("etag")
|
||||
if self._check_if_modified(url, last_modified, etag):
|
||||
# File was modified, need to re-download
|
||||
_LOGGER.debug("Cached file is outdated: %s", url)
|
||||
should_redownload = True
|
||||
|
||||
if should_redownload:
|
||||
return None
|
||||
|
||||
# File is valid, update cached_at timestamp to keep it fresh
|
||||
if cache_key in metadata:
|
||||
metadata[cache_key]["cached_at"] = time.time()
|
||||
self._save_metadata(metadata)
|
||||
|
||||
# Log appropriate message
|
||||
if not check_updates:
|
||||
_LOGGER.debug("Using cached file (no update check): %s", url)
|
||||
elif cache_key not in metadata:
|
||||
_LOGGER.debug("Using cached file (no metadata): %s", url)
|
||||
else:
|
||||
_LOGGER.debug("Using cached file: %s", url)
|
||||
|
||||
return cache_path
|
||||
|
||||
def save_to_cache(self, url: str, source_path: Path) -> None:
|
||||
"""Save a downloaded file to cache.
|
||||
|
||||
Args:
|
||||
url: URL the file was downloaded from
|
||||
source_path: Path to the downloaded file
|
||||
"""
|
||||
if not self.is_github_url(url):
|
||||
return
|
||||
|
||||
try:
|
||||
cache_path = self._get_cache_path(url)
|
||||
# Only copy if source and destination are different
|
||||
if source_path.resolve() != cache_path.resolve():
|
||||
shutil.copy2(source_path, cache_path)
|
||||
|
||||
# Try to get HTTP headers for caching
|
||||
last_modified = None
|
||||
etag = None
|
||||
try:
|
||||
request = urllib.request.Request(url)
|
||||
request.get_method = lambda: "HEAD"
|
||||
response = urllib.request.urlopen(request, timeout=10)
|
||||
last_modified = response.headers.get("Last-Modified")
|
||||
etag = response.headers.get("ETag")
|
||||
except (OSError, urllib.error.URLError):
|
||||
pass
|
||||
|
||||
# Update metadata
|
||||
metadata = self._load_metadata()
|
||||
cache_key = self._get_cache_key(url)
|
||||
|
||||
metadata[cache_key] = {
|
||||
"url": url,
|
||||
"size": cache_path.stat().st_size,
|
||||
"cached_at": time.time(),
|
||||
"last_modified": last_modified,
|
||||
"etag": etag,
|
||||
}
|
||||
self._save_metadata(metadata)
|
||||
|
||||
_LOGGER.debug("Saved to cache: %s", url)
|
||||
|
||||
except OSError as e:
|
||||
_LOGGER.debug("Failed to save to cache: %s", e)
|
||||
|
||||
def copy_from_cache(self, url: str, destination: Path) -> bool:
|
||||
"""Copy a cached file to destination.
|
||||
|
||||
Args:
|
||||
url: URL of the cached file
|
||||
destination: Where to copy the file
|
||||
|
||||
Returns:
|
||||
True if successful, False otherwise
|
||||
"""
|
||||
cached_path = self.get_cached_path(url, check_updates=True)
|
||||
if not cached_path:
|
||||
return False
|
||||
|
||||
try:
|
||||
shutil.copy2(cached_path, destination)
|
||||
_LOGGER.info("Using cached download for %s", url)
|
||||
return True
|
||||
except OSError as e:
|
||||
_LOGGER.warning("Failed to use cache: %s", e)
|
||||
return False
|
||||
|
||||
def cache_size(self) -> int:
|
||||
"""Get total size of cached files in bytes."""
|
||||
total = 0
|
||||
try:
|
||||
for file_path in self.cache_dir.glob("*"):
|
||||
if file_path.is_file() and file_path != self.metadata_file:
|
||||
total += file_path.stat().st_size
|
||||
except OSError:
|
||||
pass
|
||||
return total
|
||||
|
||||
def list_cached(self) -> list[dict]:
|
||||
"""List all cached files with metadata."""
|
||||
cached_files = []
|
||||
metadata = self._load_metadata()
|
||||
|
||||
for cache_key, meta in metadata.items():
|
||||
cache_path = (
|
||||
self.cache_dir / f"{cache_key}{Path(meta['url'].split('?')[0]).suffix}"
|
||||
)
|
||||
if cache_path.exists():
|
||||
cached_files.append(
|
||||
{
|
||||
"url": meta["url"],
|
||||
"path": cache_path,
|
||||
"size": meta["size"],
|
||||
"cached_at": meta.get("cached_at"),
|
||||
"last_modified": meta.get("last_modified"),
|
||||
"etag": meta.get("etag"),
|
||||
}
|
||||
)
|
||||
|
||||
return cached_files
|
||||
|
||||
def clear_cache(self) -> None:
|
||||
"""Clear all cached files."""
|
||||
try:
|
||||
for file_path in self.cache_dir.glob("*"):
|
||||
if file_path.is_file():
|
||||
file_path.unlink()
|
||||
_LOGGER.info("Cache cleared: %s", self.cache_dir)
|
||||
except OSError as e:
|
||||
_LOGGER.warning("Failed to clear cache: %s", e)
|
||||
|
||||
def _prune_old_files(self) -> None:
|
||||
"""Remove cache files older than CACHE_EXPIRATION_SECONDS."""
|
||||
current_time = time.time()
|
||||
metadata = self._load_metadata()
|
||||
removed_count = 0
|
||||
removed_size = 0
|
||||
|
||||
# Check each file in metadata
|
||||
for cache_key, meta in list(metadata.items()):
|
||||
cached_at = meta.get("cached_at", 0)
|
||||
age_seconds = current_time - cached_at
|
||||
|
||||
if age_seconds > self.CACHE_EXPIRATION_SECONDS:
|
||||
# File is too old, remove it
|
||||
cache_path = (
|
||||
self.cache_dir
|
||||
/ f"{cache_key}{Path(meta['url'].split('?')[0]).suffix}"
|
||||
)
|
||||
if cache_path.exists():
|
||||
file_size = cache_path.stat().st_size
|
||||
cache_path.unlink()
|
||||
removed_size += file_size
|
||||
removed_count += 1
|
||||
_LOGGER.debug(
|
||||
"Pruned old cache file (age: %.1f days): %s",
|
||||
age_seconds / (24 * 60 * 60),
|
||||
meta["url"],
|
||||
)
|
||||
|
||||
# Remove from metadata
|
||||
del metadata[cache_key]
|
||||
|
||||
# Also check for orphaned files (files without metadata)
|
||||
for file_path in self.cache_dir.glob("*.zip"):
|
||||
if file_path == self.metadata_file:
|
||||
continue
|
||||
|
||||
# Check if file is in metadata
|
||||
found_in_metadata = False
|
||||
for cache_key in metadata:
|
||||
if file_path.name.startswith(cache_key):
|
||||
found_in_metadata = True
|
||||
break
|
||||
|
||||
if not found_in_metadata:
|
||||
# Orphaned file - check age by modification time
|
||||
file_age = current_time - file_path.stat().st_mtime
|
||||
if file_age > self.CACHE_EXPIRATION_SECONDS:
|
||||
file_size = file_path.stat().st_size
|
||||
file_path.unlink()
|
||||
removed_size += file_size
|
||||
removed_count += 1
|
||||
_LOGGER.debug(
|
||||
"Pruned orphaned cache file (age: %.1f days): %s",
|
||||
file_age / (24 * 60 * 60),
|
||||
file_path.name,
|
||||
)
|
||||
|
||||
# Save updated metadata if anything was removed
|
||||
if removed_count > 0:
|
||||
self._save_metadata(metadata)
|
||||
removed_mb = removed_size / (1024 * 1024)
|
||||
_LOGGER.info(
|
||||
"Pruned %d old cache file(s), freed %.2f MB",
|
||||
removed_count,
|
||||
removed_mb,
|
||||
)
|
||||
|
||||
|
||||
# Global cache instance
|
||||
_cache: GitHubCache | None = None
|
||||
|
||||
|
||||
def get_cache() -> GitHubCache:
|
||||
"""Get the global GitHub cache instance."""
|
||||
global _cache # noqa: PLW0603
|
||||
if _cache is None:
|
||||
_cache = GitHubCache()
|
||||
return _cache
|
||||
@@ -5,6 +5,7 @@ import os
|
||||
from pathlib import Path
|
||||
import re
|
||||
import subprocess
|
||||
from typing import Any
|
||||
|
||||
from esphome.const import CONF_COMPILE_PROCESS_LIMIT, CONF_ESPHOME, KEY_CORE
|
||||
from esphome.core import CORE, EsphomeError
|
||||
@@ -43,168 +44,32 @@ def patch_structhash():
|
||||
|
||||
|
||||
def patch_file_downloader():
|
||||
"""Patch PlatformIO's FileDownloader to add caching and retry on PackageException errors.
|
||||
"""Patch PlatformIO's FileDownloader to retry on PackageException errors."""
|
||||
from platformio.package.download import FileDownloader
|
||||
from platformio.package.exception import PackageException
|
||||
|
||||
This function attempts to patch PlatformIO's internal download mechanism.
|
||||
If patching fails (due to API changes), it gracefully falls back to no caching.
|
||||
"""
|
||||
try:
|
||||
from platformio.package.download import FileDownloader
|
||||
from platformio.package.exception import PackageException
|
||||
except ImportError as e:
|
||||
_LOGGER.debug("Could not import PlatformIO modules for patching: %s", e)
|
||||
return
|
||||
original_init = FileDownloader.__init__
|
||||
|
||||
# Import our cache module
|
||||
from esphome.github_cache import GitHubCache
|
||||
def patched_init(self, *args: Any, **kwargs: Any) -> None:
|
||||
max_retries = 3
|
||||
|
||||
_LOGGER.debug("Applying GitHub download cache patch...")
|
||||
|
||||
# Verify the classes have the expected methods before patching
|
||||
if not hasattr(FileDownloader, "__init__") or not hasattr(FileDownloader, "start"):
|
||||
_LOGGER.warning(
|
||||
"PlatformIO FileDownloader API has changed, skipping cache patch"
|
||||
)
|
||||
return
|
||||
|
||||
try:
|
||||
original_init = FileDownloader.__init__
|
||||
original_start = FileDownloader.start
|
||||
|
||||
# Initialize cache in .platformio directory so it benefits from GitHub Actions cache
|
||||
platformio_dir = Path.home() / ".platformio"
|
||||
cache_dir = platformio_dir / "esphome_download_cache"
|
||||
cache_dir_existed = cache_dir.exists()
|
||||
cache = GitHubCache(cache_dir=cache_dir)
|
||||
if not cache_dir_existed:
|
||||
_LOGGER.info("Created GitHub download cache at: %s", cache.cache_dir)
|
||||
except Exception as e:
|
||||
_LOGGER.warning("Failed to initialize GitHub download cache: %s", e)
|
||||
return
|
||||
|
||||
def patched_init(self, *args, **kwargs):
|
||||
"""Patched init that checks cache before making HTTP connection."""
|
||||
try:
|
||||
# Extract URL from args (first positional argument)
|
||||
url = args[0] if args else kwargs.get("url")
|
||||
dest_dir = args[1] if len(args) > 1 else kwargs.get("dest_dir")
|
||||
|
||||
# Debug: Log all downloads
|
||||
_LOGGER.debug("[GitHub Cache] Download request for: %s", url)
|
||||
|
||||
# Store URL for later use (original FileDownloader doesn't store it)
|
||||
self._esphome_cache_url = url if cache.is_github_url(url) else None
|
||||
|
||||
# Check cache for GitHub URLs BEFORE making HTTP request
|
||||
if self._esphome_cache_url:
|
||||
_LOGGER.debug("[GitHub Cache] This is a GitHub URL, checking cache...")
|
||||
self._esphome_use_cache = cache.get_cached_path(url, check_updates=True)
|
||||
if self._esphome_use_cache:
|
||||
_LOGGER.info(
|
||||
"Found %s in cache, will restore instead of downloading",
|
||||
Path(url.split("?")[0]).name,
|
||||
)
|
||||
_LOGGER.debug(
|
||||
"[GitHub Cache] Found in cache: %s", self._esphome_use_cache
|
||||
for attempt in range(max_retries):
|
||||
try:
|
||||
return original_init(self, *args, **kwargs)
|
||||
except PackageException as e:
|
||||
if attempt < max_retries - 1:
|
||||
_LOGGER.warning(
|
||||
"Package download failed: %s. Retrying... (attempt %d/%d)",
|
||||
str(e),
|
||||
attempt + 1,
|
||||
max_retries,
|
||||
)
|
||||
else:
|
||||
_LOGGER.debug(
|
||||
"[GitHub Cache] Not in cache, will download and cache"
|
||||
)
|
||||
else:
|
||||
self._esphome_use_cache = None
|
||||
if url and str(url).startswith("http"):
|
||||
_LOGGER.debug("[GitHub Cache] Not a GitHub URL, skipping cache")
|
||||
# Final attempt - re-raise
|
||||
raise
|
||||
return None
|
||||
|
||||
# Only make HTTP connection if we don't have cached file
|
||||
if self._esphome_use_cache:
|
||||
# Skip HTTP connection, we'll handle this in start()
|
||||
# Set minimal attributes to satisfy FileDownloader
|
||||
# Create a mock session that can be safely closed in __del__
|
||||
class MockSession:
|
||||
def close(self):
|
||||
pass
|
||||
|
||||
self._http_session = MockSession()
|
||||
self._http_response = None
|
||||
self._fname = Path(url.split("?")[0]).name
|
||||
self._destination = self._fname
|
||||
if dest_dir:
|
||||
from os.path import join
|
||||
|
||||
self._destination = join(dest_dir, self._fname)
|
||||
# Note: Actual restoration logged in patched_start
|
||||
return None # Don't call original_init
|
||||
|
||||
# Normal initialization with retry logic
|
||||
max_retries = 3
|
||||
for attempt in range(max_retries):
|
||||
try:
|
||||
return original_init(self, *args, **kwargs)
|
||||
except PackageException as e:
|
||||
if attempt < max_retries - 1:
|
||||
_LOGGER.warning(
|
||||
"Package download failed: %s. Retrying... (attempt %d/%d)",
|
||||
str(e),
|
||||
attempt + 1,
|
||||
max_retries,
|
||||
)
|
||||
else:
|
||||
# Final attempt - re-raise
|
||||
raise
|
||||
return None
|
||||
except Exception as e:
|
||||
# If anything goes wrong in our cache logic, fall back to normal download
|
||||
_LOGGER.debug("Cache check failed, falling back to normal download: %s", e)
|
||||
self._esphome_cache_url = None
|
||||
self._esphome_use_cache = None
|
||||
return original_init(self, *args, **kwargs)
|
||||
|
||||
def patched_start(self, *args, **kwargs):
|
||||
"""Patched start that uses cache when available."""
|
||||
try:
|
||||
import shutil
|
||||
|
||||
# Get the cache URL and path that were set in __init__
|
||||
cache_url = getattr(self, "_esphome_cache_url", None)
|
||||
cached_file = getattr(self, "_esphome_use_cache", None)
|
||||
|
||||
# If we're using cache, copy file instead of downloading
|
||||
if cached_file:
|
||||
try:
|
||||
shutil.copy2(cached_file, self._destination)
|
||||
_LOGGER.info(
|
||||
"Restored %s from cache (avoided download)",
|
||||
Path(cached_file).name,
|
||||
)
|
||||
return True
|
||||
except OSError as e:
|
||||
_LOGGER.warning("Failed to copy from cache: %s", e)
|
||||
# Fall through to re-download
|
||||
|
||||
# Perform normal download
|
||||
result = original_start(self, *args, **kwargs)
|
||||
|
||||
# Save to cache if it was a GitHub URL
|
||||
if cache_url:
|
||||
try:
|
||||
cache.save_to_cache(cache_url, Path(self._destination))
|
||||
except OSError as e:
|
||||
_LOGGER.debug("Failed to save to cache: %s", e)
|
||||
|
||||
return result
|
||||
except Exception as e:
|
||||
# If anything goes wrong, fall back to normal download
|
||||
_LOGGER.debug("Cache restoration failed, using normal download: %s", e)
|
||||
return original_start(self, *args, **kwargs)
|
||||
|
||||
# Apply the patches
|
||||
try:
|
||||
FileDownloader.__init__ = patched_init
|
||||
FileDownloader.start = patched_start
|
||||
_LOGGER.debug("GitHub download cache patch applied successfully")
|
||||
except Exception as e:
|
||||
_LOGGER.warning("Failed to apply GitHub download cache patch: %s", e)
|
||||
FileDownloader.__init__ = patched_init
|
||||
|
||||
|
||||
IGNORE_LIB_WARNINGS = f"(?:{'|'.join(['Hash', 'Update'])})"
|
||||
@@ -222,8 +87,6 @@ FILTER_PLATFORMIO_LINES = [
|
||||
r"Memory Usage -> https://bit.ly/pio-memory-usage",
|
||||
r"Found: https://platformio.org/lib/show/.*",
|
||||
r"Using cache: .*",
|
||||
# Don't filter our cache messages - let users see when cache is being used
|
||||
# r"Using cached download for .*",
|
||||
r"Installing dependencies",
|
||||
r"Library Manager: Already installed, built-in library",
|
||||
r"Building in .* mode",
|
||||
|
||||
@@ -12,7 +12,7 @@ platformio==6.1.18 # When updating platformio, also update /docker/Dockerfile
|
||||
esptool==5.1.0
|
||||
click==8.1.7
|
||||
esphome-dashboard==20251013.0
|
||||
aioesphomeapi==42.2.0
|
||||
aioesphomeapi==42.0.0
|
||||
zeroconf==0.148.0
|
||||
puremagic==1.30
|
||||
ruamel.yaml==0.18.15 # dashboard_import
|
||||
|
||||
@@ -1415,13 +1415,7 @@ class RepeatedTypeInfo(TypeInfo):
|
||||
super().__init__(field)
|
||||
# Check if this is a pointer field by looking for container_pointer option
|
||||
self._container_type = get_field_opt(field, pb.container_pointer, "")
|
||||
# Check for non-template container pointer
|
||||
self._container_no_template = get_field_opt(
|
||||
field, pb.container_pointer_no_template, ""
|
||||
)
|
||||
self._use_pointer = bool(self._container_type) or bool(
|
||||
self._container_no_template
|
||||
)
|
||||
self._use_pointer = bool(self._container_type)
|
||||
# Check if this should use FixedVector instead of std::vector
|
||||
self._use_fixed_vector = get_field_opt(field, pb.fixed_vector, False)
|
||||
|
||||
@@ -1440,18 +1434,12 @@ class RepeatedTypeInfo(TypeInfo):
|
||||
|
||||
@property
|
||||
def cpp_type(self) -> str:
|
||||
if self._container_no_template:
|
||||
# Non-template container: use type as-is without appending template parameters
|
||||
return f"const {self._container_no_template}*"
|
||||
if self._use_pointer and self._container_type:
|
||||
# For pointer fields, use the specified container type
|
||||
# Two cases:
|
||||
# 1. "std::set<climate::ClimateMode>" - Full type with template params, use as-is
|
||||
# 2. "std::set" - No <>, append the element type
|
||||
# If the container type already includes the element type (e.g., std::set<climate::ClimateMode>)
|
||||
# use it as-is, otherwise append the element type
|
||||
if "<" in self._container_type and ">" in self._container_type:
|
||||
# Has template parameters specified, use as-is
|
||||
return f"const {self._container_type}*"
|
||||
# No <> at all, append element type
|
||||
return f"const {self._container_type}<{self._ti.cpp_type}>*"
|
||||
if self._use_fixed_vector:
|
||||
return f"FixedVector<{self._ti.cpp_type}>"
|
||||
|
||||
@@ -1,164 +0,0 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Pre-cache PlatformIO GitHub Downloads
|
||||
|
||||
This script extracts GitHub URLs from platformio.ini and pre-caches them
|
||||
to avoid redundant downloads when switching between ESP8266 and ESP32 builds.
|
||||
|
||||
Usage:
|
||||
python3 script/cache_platformio_downloads.py [platformio.ini]
|
||||
"""
|
||||
|
||||
import argparse
|
||||
import configparser
|
||||
from pathlib import Path
|
||||
import re
|
||||
import sys
|
||||
|
||||
# Import the cache manager
|
||||
sys.path.insert(0, str(Path(__file__).parent.parent))
|
||||
from esphome.github_cache import GitHubCache
|
||||
|
||||
|
||||
def extract_github_urls(platformio_ini: Path) -> list[str]:
|
||||
"""Extract all GitHub URLs from platformio.ini.
|
||||
|
||||
Args:
|
||||
platformio_ini: Path to platformio.ini file
|
||||
|
||||
Returns:
|
||||
List of GitHub URLs found
|
||||
"""
|
||||
config = configparser.ConfigParser(inline_comment_prefixes=(";",))
|
||||
config.read(platformio_ini)
|
||||
|
||||
urls = []
|
||||
github_pattern = re.compile(r"https://github\.com/[^\s;]+\.zip")
|
||||
|
||||
for section in config.sections():
|
||||
conf = config[section]
|
||||
|
||||
# Check platform
|
||||
if "platform" in conf:
|
||||
platform_value = conf["platform"]
|
||||
matches = github_pattern.findall(platform_value)
|
||||
urls.extend(matches)
|
||||
|
||||
# Check platform_packages
|
||||
if "platform_packages" in conf:
|
||||
for line in conf["platform_packages"].splitlines():
|
||||
line = line.strip()
|
||||
if not line or line.startswith("#"):
|
||||
continue
|
||||
matches = github_pattern.findall(line)
|
||||
urls.extend(matches)
|
||||
|
||||
# Remove duplicates while preserving order using dict
|
||||
return list(dict.fromkeys(urls))
|
||||
|
||||
|
||||
def main():
|
||||
"""Main entry point."""
|
||||
parser = argparse.ArgumentParser(
|
||||
description="Pre-cache PlatformIO GitHub downloads",
|
||||
formatter_class=argparse.RawDescriptionHelpFormatter,
|
||||
epilog="""
|
||||
This script scans platformio.ini for GitHub URLs and pre-caches them.
|
||||
This avoids redundant downloads when switching between platforms (e.g., ESP8266 and ESP32).
|
||||
|
||||
Examples:
|
||||
# Cache downloads from default platformio.ini
|
||||
%(prog)s
|
||||
|
||||
# Cache downloads from specific file
|
||||
%(prog)s custom_platformio.ini
|
||||
|
||||
# Show what would be cached without downloading
|
||||
%(prog)s --dry-run
|
||||
""",
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
"platformio_ini",
|
||||
nargs="?",
|
||||
default="platformio.ini",
|
||||
help="Path to platformio.ini (default: platformio.ini)",
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
"--dry-run",
|
||||
action="store_true",
|
||||
help="Show what would be cached without downloading",
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
"--cache-dir",
|
||||
type=Path,
|
||||
help="Cache directory (default: ~/.platformio/esphome_download_cache)",
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
"--force",
|
||||
action="store_true",
|
||||
help="Force re-download even if cached",
|
||||
)
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
platformio_ini = Path(args.platformio_ini)
|
||||
|
||||
if not platformio_ini.exists():
|
||||
print(f"Error: {platformio_ini} not found", file=sys.stderr)
|
||||
return 1
|
||||
|
||||
# Extract URLs
|
||||
print(f"Scanning {platformio_ini} for GitHub URLs...")
|
||||
urls = extract_github_urls(platformio_ini)
|
||||
|
||||
if not urls:
|
||||
print("No GitHub URLs found in platformio.ini")
|
||||
return 0
|
||||
|
||||
print(f"Found {len(urls)} unique GitHub URL(s):")
|
||||
for url in urls:
|
||||
print(f" - {url}")
|
||||
print()
|
||||
|
||||
if args.dry_run:
|
||||
print("Dry run - not downloading")
|
||||
return 0
|
||||
|
||||
# Initialize cache (use PlatformIO directory by default)
|
||||
cache_dir = args.cache_dir
|
||||
if cache_dir is None:
|
||||
cache_dir = Path.home() / ".platformio" / "esphome_download_cache"
|
||||
cache = GitHubCache(cache_dir)
|
||||
|
||||
# Cache each URL
|
||||
success_count = 0
|
||||
for i, url in enumerate(urls, 1):
|
||||
print(f"[{i}/{len(urls)}] Checking {url}")
|
||||
try:
|
||||
# Use the download_with_progress from github_download_cache CLI
|
||||
from script.github_download_cache import download_with_progress
|
||||
|
||||
download_with_progress(cache, url, force=args.force, check_updates=True)
|
||||
success_count += 1
|
||||
print()
|
||||
except Exception as e:
|
||||
print(f"Error caching {url}: {e}", file=sys.stderr)
|
||||
print()
|
||||
|
||||
# Show cache stats
|
||||
total_size = cache.cache_size()
|
||||
size_mb = total_size / (1024 * 1024)
|
||||
print("\nCache summary:")
|
||||
print(f" Successfully cached: {success_count}/{len(urls)}")
|
||||
print(f" Total cache size: {size_mb:.2f} MB")
|
||||
print(f" Cache location: {cache.cache_dir}")
|
||||
|
||||
return 0 if success_count == len(urls) else 1
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
sys.exit(main())
|
||||
@@ -1,88 +0,0 @@
|
||||
#!/usr/bin/env python3
|
||||
"""Add metadata to memory analysis JSON file.
|
||||
|
||||
This script adds components and platform metadata to an existing
|
||||
memory analysis JSON file. Used by CI to ensure all required fields are present
|
||||
for the comment script.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import argparse
|
||||
import json
|
||||
from pathlib import Path
|
||||
import sys
|
||||
|
||||
|
||||
def main() -> int:
|
||||
"""Main entry point."""
|
||||
parser = argparse.ArgumentParser(
|
||||
description="Add metadata to memory analysis JSON file"
|
||||
)
|
||||
parser.add_argument(
|
||||
"--json-file",
|
||||
required=True,
|
||||
help="Path to JSON file to update",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--components",
|
||||
required=True,
|
||||
help='JSON array of component names (e.g., \'["api", "wifi"]\')',
|
||||
)
|
||||
parser.add_argument(
|
||||
"--platform",
|
||||
required=True,
|
||||
help="Platform name",
|
||||
)
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
# Load existing JSON
|
||||
json_path = Path(args.json_file)
|
||||
if not json_path.exists():
|
||||
print(f"Error: JSON file not found: {args.json_file}", file=sys.stderr)
|
||||
return 1
|
||||
|
||||
try:
|
||||
with open(json_path, encoding="utf-8") as f:
|
||||
data = json.load(f)
|
||||
except (json.JSONDecodeError, OSError) as e:
|
||||
print(f"Error loading JSON: {e}", file=sys.stderr)
|
||||
return 1
|
||||
|
||||
# Parse components
|
||||
try:
|
||||
components = json.loads(args.components)
|
||||
if not isinstance(components, list):
|
||||
print("Error: --components must be a JSON array", file=sys.stderr)
|
||||
return 1
|
||||
# Element-level validation: ensure each component is a non-empty string
|
||||
for idx, comp in enumerate(components):
|
||||
if not isinstance(comp, str) or not comp.strip():
|
||||
print(
|
||||
f"Error: component at index {idx} is not a non-empty string: {comp!r}",
|
||||
file=sys.stderr,
|
||||
)
|
||||
return 1
|
||||
except json.JSONDecodeError as e:
|
||||
print(f"Error parsing components: {e}", file=sys.stderr)
|
||||
return 1
|
||||
|
||||
# Add metadata
|
||||
data["components"] = components
|
||||
data["platform"] = args.platform
|
||||
|
||||
# Write back
|
||||
try:
|
||||
with open(json_path, "w", encoding="utf-8") as f:
|
||||
json.dump(data, f, indent=2)
|
||||
print(f"Added metadata to {args.json_file}", file=sys.stderr)
|
||||
except OSError as e:
|
||||
print(f"Error writing JSON: {e}", file=sys.stderr)
|
||||
return 1
|
||||
|
||||
return 0
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
sys.exit(main())
|
||||
@@ -24,37 +24,6 @@ sys.path.insert(0, str(Path(__file__).parent.parent))
|
||||
# Comment marker to identify our memory impact comments
|
||||
COMMENT_MARKER = "<!-- esphome-memory-impact-analysis -->"
|
||||
|
||||
|
||||
def run_gh_command(args: list[str], operation: str) -> subprocess.CompletedProcess:
|
||||
"""Run a gh CLI command with error handling.
|
||||
|
||||
Args:
|
||||
args: Command arguments (including 'gh')
|
||||
operation: Description of the operation for error messages
|
||||
|
||||
Returns:
|
||||
CompletedProcess result
|
||||
|
||||
Raises:
|
||||
subprocess.CalledProcessError: If command fails (with detailed error output)
|
||||
"""
|
||||
try:
|
||||
return subprocess.run(
|
||||
args,
|
||||
check=True,
|
||||
capture_output=True,
|
||||
text=True,
|
||||
)
|
||||
except subprocess.CalledProcessError as e:
|
||||
print(
|
||||
f"ERROR: {operation} failed with exit code {e.returncode}", file=sys.stderr
|
||||
)
|
||||
print(f"ERROR: Command: {' '.join(args)}", file=sys.stderr)
|
||||
print(f"ERROR: stdout: {e.stdout}", file=sys.stderr)
|
||||
print(f"ERROR: stderr: {e.stderr}", file=sys.stderr)
|
||||
raise
|
||||
|
||||
|
||||
# Thresholds for emoji significance indicators (percentage)
|
||||
OVERALL_CHANGE_THRESHOLD = 1.0 # Overall RAM/Flash changes
|
||||
COMPONENT_CHANGE_THRESHOLD = 3.0 # Component breakdown changes
|
||||
@@ -269,6 +238,7 @@ def create_comment_body(
|
||||
pr_analysis: dict | None = None,
|
||||
target_symbols: dict | None = None,
|
||||
pr_symbols: dict | None = None,
|
||||
target_cache_hit: bool = False,
|
||||
) -> str:
|
||||
"""Create the comment body with memory impact analysis using Jinja2 templates.
|
||||
|
||||
@@ -283,6 +253,7 @@ def create_comment_body(
|
||||
pr_analysis: Optional component breakdown for PR branch
|
||||
target_symbols: Optional symbol map for target branch
|
||||
pr_symbols: Optional symbol map for PR branch
|
||||
target_cache_hit: Whether target branch analysis was loaded from cache
|
||||
|
||||
Returns:
|
||||
Formatted comment body
|
||||
@@ -312,6 +283,7 @@ def create_comment_body(
|
||||
"flash_change": format_change(
|
||||
target_flash, pr_flash, threshold=OVERALL_CHANGE_THRESHOLD
|
||||
),
|
||||
"target_cache_hit": target_cache_hit,
|
||||
"component_change_threshold": COMPONENT_CHANGE_THRESHOLD,
|
||||
}
|
||||
|
||||
@@ -384,7 +356,7 @@ def find_existing_comment(pr_number: str) -> str | None:
|
||||
print(f"DEBUG: Looking for existing comment on PR #{pr_number}", file=sys.stderr)
|
||||
|
||||
# Use gh api to get comments directly - this returns the numeric id field
|
||||
result = run_gh_command(
|
||||
result = subprocess.run(
|
||||
[
|
||||
"gh",
|
||||
"api",
|
||||
@@ -392,7 +364,9 @@ def find_existing_comment(pr_number: str) -> str | None:
|
||||
"--jq",
|
||||
".[] | {id, body}",
|
||||
],
|
||||
operation="Get PR comments",
|
||||
capture_output=True,
|
||||
text=True,
|
||||
check=True,
|
||||
)
|
||||
|
||||
print(
|
||||
@@ -446,8 +420,7 @@ def update_existing_comment(comment_id: str, comment_body: str) -> None:
|
||||
subprocess.CalledProcessError: If gh command fails
|
||||
"""
|
||||
print(f"DEBUG: Updating existing comment {comment_id}", file=sys.stderr)
|
||||
print(f"DEBUG: Comment body length: {len(comment_body)} bytes", file=sys.stderr)
|
||||
result = run_gh_command(
|
||||
result = subprocess.run(
|
||||
[
|
||||
"gh",
|
||||
"api",
|
||||
@@ -457,7 +430,9 @@ def update_existing_comment(comment_id: str, comment_body: str) -> None:
|
||||
"-f",
|
||||
f"body={comment_body}",
|
||||
],
|
||||
operation="Update PR comment",
|
||||
check=True,
|
||||
capture_output=True,
|
||||
text=True,
|
||||
)
|
||||
print(f"DEBUG: Update response: {result.stdout}", file=sys.stderr)
|
||||
|
||||
@@ -473,10 +448,11 @@ def create_new_comment(pr_number: str, comment_body: str) -> None:
|
||||
subprocess.CalledProcessError: If gh command fails
|
||||
"""
|
||||
print(f"DEBUG: Posting new comment on PR #{pr_number}", file=sys.stderr)
|
||||
print(f"DEBUG: Comment body length: {len(comment_body)} bytes", file=sys.stderr)
|
||||
result = run_gh_command(
|
||||
result = subprocess.run(
|
||||
["gh", "pr", "comment", pr_number, "--body", comment_body],
|
||||
operation="Create PR comment",
|
||||
check=True,
|
||||
capture_output=True,
|
||||
text=True,
|
||||
)
|
||||
print(f"DEBUG: Post response: {result.stdout}", file=sys.stderr)
|
||||
|
||||
@@ -509,128 +485,79 @@ def main() -> int:
|
||||
)
|
||||
parser.add_argument("--pr-number", required=True, help="PR number")
|
||||
parser.add_argument(
|
||||
"--target-json",
|
||||
"--components",
|
||||
required=True,
|
||||
help="Path to target branch analysis JSON file",
|
||||
help='JSON array of component names (e.g., \'["api", "wifi"]\')',
|
||||
)
|
||||
parser.add_argument("--platform", required=True, help="Platform name")
|
||||
parser.add_argument(
|
||||
"--target-ram", type=int, required=True, help="Target branch RAM usage"
|
||||
)
|
||||
parser.add_argument(
|
||||
"--target-flash", type=int, required=True, help="Target branch flash usage"
|
||||
)
|
||||
parser.add_argument("--pr-ram", type=int, required=True, help="PR branch RAM usage")
|
||||
parser.add_argument(
|
||||
"--pr-flash", type=int, required=True, help="PR branch flash usage"
|
||||
)
|
||||
parser.add_argument(
|
||||
"--target-json",
|
||||
help="Optional path to target branch analysis JSON (for detailed analysis)",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--pr-json",
|
||||
required=True,
|
||||
help="Path to PR branch analysis JSON file",
|
||||
help="Optional path to PR branch analysis JSON (for detailed analysis)",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--target-cache-hit",
|
||||
action="store_true",
|
||||
help="Indicates that target branch analysis was loaded from cache",
|
||||
)
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
# Load analysis JSON files (all data comes from JSON for security)
|
||||
target_data: dict | None = load_analysis_json(args.target_json)
|
||||
if not target_data:
|
||||
print("Error: Failed to load target analysis JSON", file=sys.stderr)
|
||||
# Parse components from JSON
|
||||
try:
|
||||
components = json.loads(args.components)
|
||||
if not isinstance(components, list):
|
||||
print("Error: --components must be a JSON array", file=sys.stderr)
|
||||
sys.exit(1)
|
||||
except json.JSONDecodeError as e:
|
||||
print(f"Error parsing --components JSON: {e}", file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
pr_data: dict | None = load_analysis_json(args.pr_json)
|
||||
if not pr_data:
|
||||
print("Error: Failed to load PR analysis JSON", file=sys.stderr)
|
||||
sys.exit(1)
|
||||
# Load analysis JSON files
|
||||
target_analysis = None
|
||||
pr_analysis = None
|
||||
target_symbols = None
|
||||
pr_symbols = None
|
||||
|
||||
# Extract detailed analysis if available
|
||||
target_analysis: dict | None = None
|
||||
pr_analysis: dict | None = None
|
||||
target_symbols: dict | None = None
|
||||
pr_symbols: dict | None = None
|
||||
if args.target_json:
|
||||
target_data = load_analysis_json(args.target_json)
|
||||
if target_data and target_data.get("detailed_analysis"):
|
||||
target_analysis = target_data["detailed_analysis"].get("components")
|
||||
target_symbols = target_data["detailed_analysis"].get("symbols")
|
||||
|
||||
if target_data.get("detailed_analysis"):
|
||||
target_analysis = target_data["detailed_analysis"].get("components")
|
||||
target_symbols = target_data["detailed_analysis"].get("symbols")
|
||||
|
||||
if pr_data.get("detailed_analysis"):
|
||||
pr_analysis = pr_data["detailed_analysis"].get("components")
|
||||
pr_symbols = pr_data["detailed_analysis"].get("symbols")
|
||||
|
||||
# Extract all values from JSON files (prevents shell injection from PR code)
|
||||
components = target_data.get("components")
|
||||
platform = target_data.get("platform")
|
||||
target_ram = target_data.get("ram_bytes")
|
||||
target_flash = target_data.get("flash_bytes")
|
||||
pr_ram = pr_data.get("ram_bytes")
|
||||
pr_flash = pr_data.get("flash_bytes")
|
||||
|
||||
# Validate required fields and types
|
||||
missing_fields: list[str] = []
|
||||
type_errors: list[str] = []
|
||||
|
||||
if components is None:
|
||||
missing_fields.append("components")
|
||||
elif not isinstance(components, list):
|
||||
type_errors.append(
|
||||
f"components must be a list, got {type(components).__name__}"
|
||||
)
|
||||
else:
|
||||
for idx, comp in enumerate(components):
|
||||
if not isinstance(comp, str):
|
||||
type_errors.append(
|
||||
f"components[{idx}] must be a string, got {type(comp).__name__}"
|
||||
)
|
||||
if platform is None:
|
||||
missing_fields.append("platform")
|
||||
elif not isinstance(platform, str):
|
||||
type_errors.append(f"platform must be a string, got {type(platform).__name__}")
|
||||
|
||||
if target_ram is None:
|
||||
missing_fields.append("target.ram_bytes")
|
||||
elif not isinstance(target_ram, int):
|
||||
type_errors.append(
|
||||
f"target.ram_bytes must be an integer, got {type(target_ram).__name__}"
|
||||
)
|
||||
|
||||
if target_flash is None:
|
||||
missing_fields.append("target.flash_bytes")
|
||||
elif not isinstance(target_flash, int):
|
||||
type_errors.append(
|
||||
f"target.flash_bytes must be an integer, got {type(target_flash).__name__}"
|
||||
)
|
||||
|
||||
if pr_ram is None:
|
||||
missing_fields.append("pr.ram_bytes")
|
||||
elif not isinstance(pr_ram, int):
|
||||
type_errors.append(
|
||||
f"pr.ram_bytes must be an integer, got {type(pr_ram).__name__}"
|
||||
)
|
||||
|
||||
if pr_flash is None:
|
||||
missing_fields.append("pr.flash_bytes")
|
||||
elif not isinstance(pr_flash, int):
|
||||
type_errors.append(
|
||||
f"pr.flash_bytes must be an integer, got {type(pr_flash).__name__}"
|
||||
)
|
||||
|
||||
if missing_fields or type_errors:
|
||||
if missing_fields:
|
||||
print(
|
||||
f"Error: JSON files missing required fields: {', '.join(missing_fields)}",
|
||||
file=sys.stderr,
|
||||
)
|
||||
if type_errors:
|
||||
print(
|
||||
f"Error: Type validation failed: {'; '.join(type_errors)}",
|
||||
file=sys.stderr,
|
||||
)
|
||||
print(f"Target JSON keys: {list(target_data.keys())}", file=sys.stderr)
|
||||
print(f"PR JSON keys: {list(pr_data.keys())}", file=sys.stderr)
|
||||
sys.exit(1)
|
||||
if args.pr_json:
|
||||
pr_data = load_analysis_json(args.pr_json)
|
||||
if pr_data and pr_data.get("detailed_analysis"):
|
||||
pr_analysis = pr_data["detailed_analysis"].get("components")
|
||||
pr_symbols = pr_data["detailed_analysis"].get("symbols")
|
||||
|
||||
# Create comment body
|
||||
# Note: Memory totals (RAM/Flash) are summed across all builds if multiple were run.
|
||||
comment_body = create_comment_body(
|
||||
components=components,
|
||||
platform=platform,
|
||||
target_ram=target_ram,
|
||||
target_flash=target_flash,
|
||||
pr_ram=pr_ram,
|
||||
pr_flash=pr_flash,
|
||||
platform=args.platform,
|
||||
target_ram=args.target_ram,
|
||||
target_flash=args.target_flash,
|
||||
pr_ram=args.pr_ram,
|
||||
pr_flash=args.pr_flash,
|
||||
target_analysis=target_analysis,
|
||||
pr_analysis=pr_analysis,
|
||||
target_symbols=target_symbols,
|
||||
pr_symbols=pr_symbols,
|
||||
target_cache_hit=args.target_cache_hit,
|
||||
)
|
||||
|
||||
# Post or update comment
|
||||
|
||||
@@ -1,195 +0,0 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
GitHub Download Cache CLI
|
||||
|
||||
This script provides a command-line interface to the GitHub download cache.
|
||||
The actual caching logic is in esphome/github_cache.py.
|
||||
|
||||
Usage:
|
||||
python3 script/github_download_cache.py download URL
|
||||
python3 script/github_download_cache.py list
|
||||
python3 script/github_download_cache.py stats
|
||||
python3 script/github_download_cache.py clear
|
||||
"""
|
||||
|
||||
import argparse
|
||||
from pathlib import Path
|
||||
import sys
|
||||
import urllib.request
|
||||
|
||||
# Add parent directory to path to import esphome modules
|
||||
sys.path.insert(0, str(Path(__file__).parent.parent))
|
||||
|
||||
from esphome.github_cache import GitHubCache
|
||||
|
||||
|
||||
def download_with_progress(
|
||||
cache: GitHubCache, url: str, force: bool = False, check_updates: bool = True
|
||||
) -> Path:
|
||||
"""Download a URL with progress indicator and caching.
|
||||
|
||||
Args:
|
||||
cache: GitHubCache instance
|
||||
url: URL to download
|
||||
force: Force re-download even if cached
|
||||
check_updates: Check for updates using HTTP 304
|
||||
|
||||
Returns:
|
||||
Path to cached file
|
||||
"""
|
||||
# If force, skip cache check
|
||||
if not force:
|
||||
cached_path = cache.get_cached_path(url, check_updates=check_updates)
|
||||
if cached_path:
|
||||
print(f"Using cached file for {url}")
|
||||
print(f" Cache: {cached_path}")
|
||||
return cached_path
|
||||
|
||||
# Need to download
|
||||
print(f"Downloading {url}")
|
||||
cache_path = cache._get_cache_path(url)
|
||||
print(f" Cache: {cache_path}")
|
||||
|
||||
# Download with progress
|
||||
temp_path = cache_path.with_suffix(cache_path.suffix + ".tmp")
|
||||
|
||||
try:
|
||||
with urllib.request.urlopen(url) as response:
|
||||
total_size = int(response.headers.get("Content-Length", 0))
|
||||
downloaded = 0
|
||||
|
||||
with open(temp_path, "wb") as f:
|
||||
while True:
|
||||
chunk = response.read(8192)
|
||||
if not chunk:
|
||||
break
|
||||
f.write(chunk)
|
||||
downloaded += len(chunk)
|
||||
|
||||
if total_size > 0:
|
||||
percent = (downloaded / total_size) * 100
|
||||
print(f"\r Progress: {percent:.1f}%", end="", flush=True)
|
||||
|
||||
print() # New line after progress
|
||||
|
||||
# Move to final location
|
||||
temp_path.replace(cache_path)
|
||||
|
||||
# Let cache handle metadata
|
||||
cache.save_to_cache(url, cache_path)
|
||||
|
||||
return cache_path
|
||||
|
||||
except (OSError, urllib.error.URLError) as e:
|
||||
if temp_path.exists():
|
||||
temp_path.unlink()
|
||||
raise RuntimeError(f"Failed to download {url}: {e}") from e
|
||||
|
||||
|
||||
def main():
|
||||
"""CLI entry point."""
|
||||
parser = argparse.ArgumentParser(
|
||||
description="GitHub Download Cache Manager",
|
||||
formatter_class=argparse.RawDescriptionHelpFormatter,
|
||||
epilog="""
|
||||
Examples:
|
||||
# Download and cache a URL
|
||||
%(prog)s download https://github.com/pioarduino/registry/releases/download/0.0.1/esptoolpy-v5.1.0.zip
|
||||
|
||||
# List cached files
|
||||
%(prog)s list
|
||||
|
||||
# Show cache statistics
|
||||
%(prog)s stats
|
||||
|
||||
# Clear cache
|
||||
%(prog)s clear
|
||||
""",
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
"--cache-dir",
|
||||
type=Path,
|
||||
help="Cache directory (default: ~/.platformio/esphome_download_cache)",
|
||||
)
|
||||
|
||||
subparsers = parser.add_subparsers(dest="command", help="Command to execute")
|
||||
|
||||
# Download command
|
||||
download_parser = subparsers.add_parser("download", help="Download and cache a URL")
|
||||
download_parser.add_argument("url", help="URL to download")
|
||||
download_parser.add_argument(
|
||||
"--force", action="store_true", help="Force re-download even if cached"
|
||||
)
|
||||
download_parser.add_argument(
|
||||
"--no-check-updates",
|
||||
action="store_true",
|
||||
help="Skip checking for updates (don't use HTTP 304)",
|
||||
)
|
||||
|
||||
# List command
|
||||
subparsers.add_parser("list", help="List cached files")
|
||||
|
||||
# Stats command
|
||||
subparsers.add_parser("stats", help="Show cache statistics")
|
||||
|
||||
# Clear command
|
||||
subparsers.add_parser("clear", help="Clear all cached files")
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
if not args.command:
|
||||
parser.print_help()
|
||||
return 1
|
||||
|
||||
# Use PlatformIO cache directory by default
|
||||
if args.cache_dir is None:
|
||||
args.cache_dir = Path.home() / ".platformio" / "esphome_download_cache"
|
||||
|
||||
cache = GitHubCache(args.cache_dir)
|
||||
|
||||
if args.command == "download":
|
||||
try:
|
||||
check_updates = not args.no_check_updates
|
||||
cache_path = download_with_progress(
|
||||
cache, args.url, force=args.force, check_updates=check_updates
|
||||
)
|
||||
print(f"\nCached at: {cache_path}")
|
||||
return 0
|
||||
except Exception as e:
|
||||
print(f"Error: {e}", file=sys.stderr)
|
||||
return 1
|
||||
|
||||
elif args.command == "list":
|
||||
cached = cache.list_cached()
|
||||
if not cached:
|
||||
print("No cached files")
|
||||
return 0
|
||||
|
||||
print(f"Cached files ({len(cached)}):")
|
||||
for item in cached:
|
||||
size_mb = item["size"] / (1024 * 1024)
|
||||
print(f" {item['url']}")
|
||||
print(f" Size: {size_mb:.2f} MB")
|
||||
print(f" Path: {item['path']}")
|
||||
return 0
|
||||
|
||||
elif args.command == "stats":
|
||||
total_size = cache.cache_size()
|
||||
cached_count = len(cache.list_cached())
|
||||
size_mb = total_size / (1024 * 1024)
|
||||
|
||||
print(f"Cache directory: {cache.cache_dir}")
|
||||
print(f"Cached files: {cached_count}")
|
||||
print(f"Total size: {size_mb:.2f} MB")
|
||||
return 0
|
||||
|
||||
elif args.command == "clear":
|
||||
cache.clear_cache()
|
||||
return 0
|
||||
|
||||
return 1
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
sys.exit(main())
|
||||
@@ -25,7 +25,6 @@ int main() { return 0;}
|
||||
Path(zephyr_dir / "prj.conf").write_text(
|
||||
"""
|
||||
CONFIG_NEWLIB_LIBC=y
|
||||
CONFIG_BT=y
|
||||
CONFIG_ADC=y
|
||||
""",
|
||||
encoding="utf-8",
|
||||
|
||||
@@ -1,138 +0,0 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
PlatformIO Download Wrapper with Caching
|
||||
|
||||
This script can be used as a wrapper around PlatformIO downloads to add caching.
|
||||
It intercepts download operations and uses the GitHub download cache.
|
||||
|
||||
This is designed to be called from PlatformIO's extra_scripts if needed.
|
||||
"""
|
||||
|
||||
from pathlib import Path
|
||||
import sys
|
||||
|
||||
# Import the cache manager
|
||||
sys.path.insert(0, str(Path(__file__).parent))
|
||||
from github_download_cache import GitHubDownloadCache
|
||||
|
||||
|
||||
def is_github_url(url: str) -> bool:
|
||||
"""Check if a URL is a GitHub URL."""
|
||||
return "github.com" in url.lower()
|
||||
|
||||
|
||||
def cached_download_handler(source, target, env):
|
||||
"""PlatformIO download handler that uses caching for GitHub URLs.
|
||||
|
||||
This function can be registered as a custom download handler in PlatformIO.
|
||||
|
||||
Args:
|
||||
source: Source URL
|
||||
target: Target file path
|
||||
env: SCons environment
|
||||
"""
|
||||
import shutil
|
||||
import urllib.request
|
||||
|
||||
url = str(source[0])
|
||||
target_path = Path(str(target[0]))
|
||||
|
||||
# Only cache GitHub URLs
|
||||
if not is_github_url(url):
|
||||
# Fall back to default download
|
||||
print(f"Downloading (no cache): {url}")
|
||||
with (
|
||||
urllib.request.urlopen(url) as response,
|
||||
open(target_path, "wb") as out_file,
|
||||
):
|
||||
shutil.copyfileobj(response, out_file)
|
||||
return
|
||||
|
||||
# Use cache for GitHub URLs
|
||||
cache = GitHubDownloadCache()
|
||||
print(f"Downloading with cache: {url}")
|
||||
|
||||
try:
|
||||
cached_path = cache.download_with_cache(url, check_updates=True)
|
||||
|
||||
# Copy from cache to target
|
||||
shutil.copy2(cached_path, target_path)
|
||||
print(f" Copied to: {target_path}")
|
||||
|
||||
except Exception as e:
|
||||
print(f"Cache download failed, using direct download: {e}")
|
||||
# Fall back to direct download
|
||||
with (
|
||||
urllib.request.urlopen(url) as response,
|
||||
open(target_path, "wb") as out_file,
|
||||
):
|
||||
shutil.copyfileobj(response, out_file)
|
||||
|
||||
|
||||
def setup_platformio_caching():
|
||||
"""Setup PlatformIO to use cached downloads.
|
||||
|
||||
This should be called from an extra_scripts file in platformio.ini.
|
||||
|
||||
Example extra_scripts file (e.g., platformio_hooks.py):
|
||||
Import("env")
|
||||
from script.platformio_download_wrapper import setup_platformio_caching
|
||||
setup_platformio_caching()
|
||||
"""
|
||||
try:
|
||||
from SCons.Script import DefaultEnvironment
|
||||
|
||||
DefaultEnvironment()
|
||||
|
||||
# Register custom download handler
|
||||
# Note: This may not work with all PlatformIO versions
|
||||
# as the download mechanism is internal
|
||||
print("Note: Direct download interception is not fully supported.")
|
||||
print("Please use the cache_platformio_downloads.py script instead.")
|
||||
|
||||
except ImportError:
|
||||
print("Warning: SCons not available, cannot setup download caching")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
# CLI mode - can be used to manually download a URL with caching
|
||||
import argparse
|
||||
|
||||
parser = argparse.ArgumentParser(description="Download a URL with caching")
|
||||
parser.add_argument("url", help="URL to download")
|
||||
parser.add_argument("target", help="Target file path")
|
||||
parser.add_argument("--cache-dir", type=Path, help="Cache directory")
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
cache = GitHubDownloadCache(args.cache_dir)
|
||||
target_path = Path(args.target)
|
||||
|
||||
try:
|
||||
if is_github_url(args.url):
|
||||
print(f"Downloading with cache: {args.url}")
|
||||
cached_path = cache.download_with_cache(args.url)
|
||||
|
||||
# Copy to target
|
||||
import shutil
|
||||
|
||||
target_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
shutil.copy2(cached_path, target_path)
|
||||
print(f"Copied to: {target_path}")
|
||||
else:
|
||||
print(f"Downloading directly (not a GitHub URL): {args.url}")
|
||||
import shutil
|
||||
import urllib.request
|
||||
|
||||
target_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
with (
|
||||
urllib.request.urlopen(args.url) as response,
|
||||
open(target_path, "wb") as out_file,
|
||||
):
|
||||
shutil.copyfileobj(response, out_file)
|
||||
|
||||
sys.exit(0)
|
||||
|
||||
except Exception as e:
|
||||
print(f"Error: {e}", file=sys.stderr)
|
||||
sys.exit(1)
|
||||
@@ -6,7 +6,6 @@ from unittest.mock import MagicMock, patch
|
||||
import pytest
|
||||
|
||||
from esphome.components.packages import do_packages_pass
|
||||
from esphome.config import resolve_extend_remove
|
||||
from esphome.config_helpers import Extend, Remove
|
||||
import esphome.config_validation as cv
|
||||
from esphome.const import (
|
||||
@@ -65,20 +64,13 @@ def fixture_basic_esphome():
|
||||
return {CONF_NAME: TEST_DEVICE_NAME, CONF_PLATFORM: TEST_PLATFORM}
|
||||
|
||||
|
||||
def packages_pass(config):
|
||||
"""Wrapper around packages_pass that also resolves Extend and Remove."""
|
||||
config = do_packages_pass(config)
|
||||
resolve_extend_remove(config)
|
||||
return config
|
||||
|
||||
|
||||
def test_package_unused(basic_esphome, basic_wifi):
|
||||
"""
|
||||
Ensures do_package_pass does not change a config if packages aren't used.
|
||||
"""
|
||||
config = {CONF_ESPHOME: basic_esphome, CONF_WIFI: basic_wifi}
|
||||
|
||||
actual = packages_pass(config)
|
||||
actual = do_packages_pass(config)
|
||||
assert actual == config
|
||||
|
||||
|
||||
@@ -91,7 +83,7 @@ def test_package_invalid_dict(basic_esphome, basic_wifi):
|
||||
config = {CONF_ESPHOME: basic_esphome, CONF_PACKAGES: basic_wifi | {CONF_URL: ""}}
|
||||
|
||||
with pytest.raises(cv.Invalid):
|
||||
packages_pass(config)
|
||||
do_packages_pass(config)
|
||||
|
||||
|
||||
def test_package_include(basic_wifi, basic_esphome):
|
||||
@@ -107,7 +99,7 @@ def test_package_include(basic_wifi, basic_esphome):
|
||||
|
||||
expected = {CONF_ESPHOME: basic_esphome, CONF_WIFI: basic_wifi}
|
||||
|
||||
actual = packages_pass(config)
|
||||
actual = do_packages_pass(config)
|
||||
assert actual == expected
|
||||
|
||||
|
||||
@@ -132,7 +124,7 @@ def test_package_append(basic_wifi, basic_esphome):
|
||||
},
|
||||
}
|
||||
|
||||
actual = packages_pass(config)
|
||||
actual = do_packages_pass(config)
|
||||
assert actual == expected
|
||||
|
||||
|
||||
@@ -156,7 +148,7 @@ def test_package_override(basic_wifi, basic_esphome):
|
||||
},
|
||||
}
|
||||
|
||||
actual = packages_pass(config)
|
||||
actual = do_packages_pass(config)
|
||||
assert actual == expected
|
||||
|
||||
|
||||
@@ -185,7 +177,7 @@ def test_multiple_package_order():
|
||||
},
|
||||
}
|
||||
|
||||
actual = packages_pass(config)
|
||||
actual = do_packages_pass(config)
|
||||
assert actual == expected
|
||||
|
||||
|
||||
@@ -241,7 +233,7 @@ def test_package_list_merge():
|
||||
]
|
||||
}
|
||||
|
||||
actual = packages_pass(config)
|
||||
actual = do_packages_pass(config)
|
||||
assert actual == expected
|
||||
|
||||
|
||||
@@ -319,7 +311,7 @@ def test_package_list_merge_by_id():
|
||||
]
|
||||
}
|
||||
|
||||
actual = packages_pass(config)
|
||||
actual = do_packages_pass(config)
|
||||
assert actual == expected
|
||||
|
||||
|
||||
@@ -358,13 +350,13 @@ def test_package_merge_by_id_with_list():
|
||||
]
|
||||
}
|
||||
|
||||
actual = packages_pass(config)
|
||||
actual = do_packages_pass(config)
|
||||
assert actual == expected
|
||||
|
||||
|
||||
def test_package_merge_by_missing_id():
|
||||
"""
|
||||
Ensures that a validation error is thrown when trying to extend a missing ID.
|
||||
Ensures that components with missing IDs are not merged.
|
||||
"""
|
||||
|
||||
config = {
|
||||
@@ -387,15 +379,25 @@ def test_package_merge_by_missing_id():
|
||||
],
|
||||
}
|
||||
|
||||
error_raised = False
|
||||
try:
|
||||
packages_pass(config)
|
||||
assert False, "Expected validation error for missing ID"
|
||||
except cv.Invalid as err:
|
||||
error_raised = True
|
||||
assert err.path == [CONF_SENSOR, 2]
|
||||
expected = {
|
||||
CONF_SENSOR: [
|
||||
{
|
||||
CONF_ID: TEST_SENSOR_ID_1,
|
||||
CONF_FILTERS: [{CONF_MULTIPLY: 42.0}],
|
||||
},
|
||||
{
|
||||
CONF_ID: TEST_SENSOR_ID_1,
|
||||
CONF_FILTERS: [{CONF_MULTIPLY: 10.0}],
|
||||
},
|
||||
{
|
||||
CONF_ID: Extend(TEST_SENSOR_ID_2),
|
||||
CONF_FILTERS: [{CONF_OFFSET: 146.0}],
|
||||
},
|
||||
]
|
||||
}
|
||||
|
||||
assert error_raised
|
||||
actual = do_packages_pass(config)
|
||||
assert actual == expected
|
||||
|
||||
|
||||
def test_package_list_remove_by_id():
|
||||
@@ -445,7 +447,7 @@ def test_package_list_remove_by_id():
|
||||
]
|
||||
}
|
||||
|
||||
actual = packages_pass(config)
|
||||
actual = do_packages_pass(config)
|
||||
assert actual == expected
|
||||
|
||||
|
||||
@@ -491,7 +493,7 @@ def test_multiple_package_list_remove_by_id():
|
||||
]
|
||||
}
|
||||
|
||||
actual = packages_pass(config)
|
||||
actual = do_packages_pass(config)
|
||||
assert actual == expected
|
||||
|
||||
|
||||
@@ -512,7 +514,7 @@ def test_package_dict_remove_by_id(basic_wifi, basic_esphome):
|
||||
CONF_ESPHOME: basic_esphome,
|
||||
}
|
||||
|
||||
actual = packages_pass(config)
|
||||
actual = do_packages_pass(config)
|
||||
assert actual == expected
|
||||
|
||||
|
||||
@@ -543,6 +545,7 @@ def test_package_remove_by_missing_id():
|
||||
}
|
||||
|
||||
expected = {
|
||||
"missing_key": Remove(),
|
||||
CONF_SENSOR: [
|
||||
{
|
||||
CONF_ID: TEST_SENSOR_ID_1,
|
||||
@@ -552,10 +555,14 @@ def test_package_remove_by_missing_id():
|
||||
CONF_ID: TEST_SENSOR_ID_1,
|
||||
CONF_FILTERS: [{CONF_MULTIPLY: 10.0}],
|
||||
},
|
||||
{
|
||||
CONF_ID: Remove(TEST_SENSOR_ID_2),
|
||||
CONF_FILTERS: [{CONF_OFFSET: 146.0}],
|
||||
},
|
||||
],
|
||||
}
|
||||
|
||||
actual = packages_pass(config)
|
||||
actual = do_packages_pass(config)
|
||||
assert actual == expected
|
||||
|
||||
|
||||
@@ -627,7 +634,7 @@ def test_remote_packages_with_files_list(
|
||||
]
|
||||
}
|
||||
|
||||
actual = packages_pass(config)
|
||||
actual = do_packages_pass(config)
|
||||
assert actual == expected
|
||||
|
||||
|
||||
@@ -723,5 +730,5 @@ def test_remote_packages_with_files_and_vars(
|
||||
]
|
||||
}
|
||||
|
||||
actual = packages_pass(config)
|
||||
actual = do_packages_pass(config)
|
||||
assert actual == expected
|
||||
|
||||
@@ -8,12 +8,14 @@ sensor:
|
||||
lambda: |-
|
||||
if (millis() > 10000) {
|
||||
return 0.6;
|
||||
} else {
|
||||
return 0.0;
|
||||
}
|
||||
return 0.0;
|
||||
- platform: template
|
||||
id: template_temperature
|
||||
lambda: |-
|
||||
if (millis() > 10000) {
|
||||
return 42.0;
|
||||
} else {
|
||||
return 0.0;
|
||||
}
|
||||
return 0.0;
|
||||
|
||||
@@ -5,8 +5,9 @@ sensor:
|
||||
lambda: |-
|
||||
if (millis() > 10000) {
|
||||
return 42.0;
|
||||
} else {
|
||||
return 0.0;
|
||||
}
|
||||
return 0.0;
|
||||
update_interval: 15s
|
||||
|
||||
binary_sensor:
|
||||
|
||||
@@ -23,8 +23,9 @@ binary_sensor:
|
||||
- lambda: |-
|
||||
if (id(some_binary_sensor).state) {
|
||||
return x;
|
||||
} else {
|
||||
return {};
|
||||
}
|
||||
return {};
|
||||
- settle: 100ms
|
||||
- timeout: 10s
|
||||
|
||||
|
||||
@@ -4,22 +4,25 @@ binary_sensor:
|
||||
lambda: |-
|
||||
if (millis() > 10000) {
|
||||
return true;
|
||||
} else {
|
||||
return false;
|
||||
}
|
||||
return false;
|
||||
- platform: template
|
||||
id: bin2
|
||||
lambda: |-
|
||||
if (millis() > 20000) {
|
||||
return true;
|
||||
} else {
|
||||
return false;
|
||||
}
|
||||
return false;
|
||||
- platform: template
|
||||
id: bin3
|
||||
lambda: |-
|
||||
if (millis() > 30000) {
|
||||
return true;
|
||||
} else {
|
||||
return false;
|
||||
}
|
||||
return false;
|
||||
|
||||
sensor:
|
||||
- platform: binary_sensor_map
|
||||
|
||||
@@ -1,2 +0,0 @@
|
||||
ble_nus:
|
||||
type: logs
|
||||
@@ -1,2 +0,0 @@
|
||||
ble_nus:
|
||||
type: logs
|
||||
@@ -4,15 +4,17 @@ sensor:
|
||||
lambda: |-
|
||||
if (millis() > 10000) {
|
||||
return 0.6;
|
||||
} else {
|
||||
return 0.0;
|
||||
}
|
||||
return 0.0;
|
||||
- platform: template
|
||||
id: template_temperature2
|
||||
lambda: |-
|
||||
if (millis() > 20000) {
|
||||
return 0.8;
|
||||
} else {
|
||||
return 0.0;
|
||||
}
|
||||
return 0.0;
|
||||
- platform: combination
|
||||
type: kalman
|
||||
name: Kalman-filtered temperature
|
||||
|
||||
@@ -4,8 +4,9 @@ binary_sensor:
|
||||
lambda: |-
|
||||
if (millis() > 10000) {
|
||||
return true;
|
||||
} else {
|
||||
return false;
|
||||
}
|
||||
return false;
|
||||
|
||||
sensor:
|
||||
- platform: duty_time
|
||||
|
||||
@@ -4,8 +4,9 @@ binary_sensor:
|
||||
lambda: |-
|
||||
if (millis() > 10000) {
|
||||
return true;
|
||||
} else {
|
||||
return false;
|
||||
}
|
||||
return false;
|
||||
|
||||
switch:
|
||||
- platform: template
|
||||
|
||||
@@ -17,8 +17,9 @@ lock:
|
||||
lambda: |-
|
||||
if (millis() > 10000) {
|
||||
return LOCK_STATE_LOCKED;
|
||||
} else {
|
||||
return LOCK_STATE_UNLOCKED;
|
||||
}
|
||||
return LOCK_STATE_UNLOCKED;
|
||||
optimistic: true
|
||||
assumed_state: false
|
||||
on_unlock:
|
||||
|
||||
@@ -72,9 +72,10 @@ binary_sensor:
|
||||
if (id(template_sens).state > 30) {
|
||||
// Garage Door is open.
|
||||
return true;
|
||||
} else {
|
||||
// Garage Door is closed.
|
||||
return false;
|
||||
}
|
||||
// Garage Door is closed.
|
||||
return false;
|
||||
on_state:
|
||||
- mqtt.publish:
|
||||
topic: some/topic/binary_sensor
|
||||
@@ -216,8 +217,9 @@ cover:
|
||||
lambda: |-
|
||||
if (id(some_binary_sensor).state) {
|
||||
return COVER_OPEN;
|
||||
} else {
|
||||
return COVER_CLOSED;
|
||||
}
|
||||
return COVER_CLOSED;
|
||||
open_action:
|
||||
- logger.log: open_action
|
||||
close_action:
|
||||
@@ -319,8 +321,9 @@ lock:
|
||||
lambda: |-
|
||||
if (id(some_binary_sensor).state) {
|
||||
return LOCK_STATE_LOCKED;
|
||||
} else {
|
||||
return LOCK_STATE_UNLOCKED;
|
||||
}
|
||||
return LOCK_STATE_UNLOCKED;
|
||||
lock_action:
|
||||
- logger.log: lock_action
|
||||
unlock_action:
|
||||
@@ -357,8 +360,9 @@ sensor:
|
||||
lambda: |-
|
||||
if (id(some_binary_sensor).state) {
|
||||
return 42.0;
|
||||
} else {
|
||||
return 0.0;
|
||||
}
|
||||
return 0.0;
|
||||
update_interval: 60s
|
||||
on_value:
|
||||
- mqtt.publish:
|
||||
@@ -386,8 +390,9 @@ switch:
|
||||
lambda: |-
|
||||
if (id(some_binary_sensor).state) {
|
||||
return true;
|
||||
} else {
|
||||
return false;
|
||||
}
|
||||
return false;
|
||||
turn_on_action:
|
||||
- logger.log: turn_on_action
|
||||
turn_off_action:
|
||||
@@ -431,8 +436,9 @@ valve:
|
||||
lambda: |-
|
||||
if (id(some_binary_sensor).state) {
|
||||
return VALVE_OPEN;
|
||||
} else {
|
||||
return VALVE_CLOSED;
|
||||
}
|
||||
return VALVE_CLOSED;
|
||||
|
||||
alarm_control_panel:
|
||||
- platform: template
|
||||
|
||||
@@ -27,8 +27,9 @@ sensor:
|
||||
lambda: |-
|
||||
if (millis() > 10000) {
|
||||
return 42.0;
|
||||
} else {
|
||||
return 0.0;
|
||||
}
|
||||
return 0.0;
|
||||
update_interval: 60s
|
||||
|
||||
climate:
|
||||
|
||||
@@ -35,8 +35,9 @@ sensor:
|
||||
lambda: |-
|
||||
if (millis() > 10000) {
|
||||
return 42.0;
|
||||
} else {
|
||||
return 0.0;
|
||||
}
|
||||
return 0.0;
|
||||
update_interval: 60s
|
||||
|
||||
text_sensor:
|
||||
@@ -48,8 +49,9 @@ text_sensor:
|
||||
lambda: |-
|
||||
if (millis() > 10000) {
|
||||
return {"Hello World"};
|
||||
} else {
|
||||
return {"Goodbye (cruel) World"};
|
||||
}
|
||||
return {"Goodbye (cruel) World"};
|
||||
update_interval: 60s
|
||||
|
||||
binary_sensor:
|
||||
@@ -58,8 +60,9 @@ binary_sensor:
|
||||
lambda: |-
|
||||
if (millis() > 10000) {
|
||||
return true;
|
||||
} else {
|
||||
return false;
|
||||
}
|
||||
return false;
|
||||
|
||||
switch:
|
||||
- platform: template
|
||||
@@ -67,8 +70,9 @@ switch:
|
||||
lambda: |-
|
||||
if (millis() > 10000) {
|
||||
return true;
|
||||
} else {
|
||||
return false;
|
||||
}
|
||||
return false;
|
||||
optimistic: true
|
||||
|
||||
fan:
|
||||
@@ -81,8 +85,9 @@ cover:
|
||||
lambda: |-
|
||||
if (millis() > 10000) {
|
||||
return COVER_OPEN;
|
||||
} else {
|
||||
return COVER_CLOSED;
|
||||
}
|
||||
return COVER_CLOSED;
|
||||
|
||||
lock:
|
||||
- platform: template
|
||||
@@ -90,8 +95,9 @@ lock:
|
||||
lambda: |-
|
||||
if (millis() > 10000) {
|
||||
return LOCK_STATE_LOCKED;
|
||||
} else {
|
||||
return LOCK_STATE_UNLOCKED;
|
||||
}
|
||||
return LOCK_STATE_UNLOCKED;
|
||||
optimistic: true
|
||||
|
||||
select:
|
||||
|
||||
@@ -59,8 +59,9 @@ binary_sensor:
|
||||
- lambda: |-
|
||||
if (id(other_binary_sensor).state) {
|
||||
return x;
|
||||
} else {
|
||||
return {};
|
||||
}
|
||||
return {};
|
||||
- settle: 500ms
|
||||
- timeout: 5s
|
||||
|
||||
@@ -71,8 +72,9 @@ sensor:
|
||||
lambda: |-
|
||||
if (id(some_binary_sensor).state) {
|
||||
return 42.0;
|
||||
} else {
|
||||
return 0.0;
|
||||
}
|
||||
return 0.0;
|
||||
update_interval: 60s
|
||||
filters:
|
||||
- calibrate_linear:
|
||||
@@ -181,8 +183,9 @@ switch:
|
||||
lambda: |-
|
||||
if (id(some_binary_sensor).state) {
|
||||
return true;
|
||||
} else {
|
||||
return false;
|
||||
}
|
||||
return false;
|
||||
turn_on_action:
|
||||
- logger.log: "turn_on_action"
|
||||
turn_off_action:
|
||||
@@ -200,8 +203,9 @@ cover:
|
||||
lambda: |-
|
||||
if (id(some_binary_sensor).state) {
|
||||
return COVER_OPEN;
|
||||
} else {
|
||||
return COVER_CLOSED;
|
||||
}
|
||||
return COVER_CLOSED;
|
||||
open_action:
|
||||
- logger.log: open_action
|
||||
close_action:
|
||||
@@ -234,8 +238,9 @@ lock:
|
||||
lambda: |-
|
||||
if (id(some_binary_sensor).state) {
|
||||
return LOCK_STATE_LOCKED;
|
||||
} else {
|
||||
return LOCK_STATE_UNLOCKED;
|
||||
}
|
||||
return LOCK_STATE_UNLOCKED;
|
||||
lock_action:
|
||||
- logger.log: lock_action
|
||||
unlock_action:
|
||||
@@ -250,8 +255,9 @@ valve:
|
||||
lambda: |-
|
||||
if (id(some_binary_sensor).state) {
|
||||
return VALVE_OPEN;
|
||||
} else {
|
||||
return VALVE_CLOSED;
|
||||
}
|
||||
return VALVE_CLOSED;
|
||||
open_action:
|
||||
- logger.log: open_action
|
||||
close_action:
|
||||
|
||||
@@ -69,11 +69,6 @@ climate:
|
||||
- logger.log: swing_vertical_action
|
||||
swing_both_action:
|
||||
- logger.log: swing_both_action
|
||||
humidity_control_humidify_action:
|
||||
- logger.log: humidity_control_humidify_action
|
||||
humidity_control_off_action:
|
||||
- logger.log: humidity_control_off_action
|
||||
humidity_hysteresis: 1.0
|
||||
startup_delay: true
|
||||
supplemental_cooling_delta: 2.0
|
||||
cool_deadband: 0.5
|
||||
|
||||
@@ -17,10 +17,10 @@ sensor:
|
||||
name: HLW8012 Voltage
|
||||
power:
|
||||
name: HLW8012 Power
|
||||
id: total_daily_energy_hlw8012_power
|
||||
id: hlw8012_power
|
||||
energy:
|
||||
name: HLW8012 Energy
|
||||
id: total_daily_energy_hlw8012_energy
|
||||
id: hlw8012_energy
|
||||
update_interval: 15s
|
||||
current_resistor: 0.001 ohm
|
||||
voltage_divider: 2351
|
||||
@@ -29,4 +29,4 @@ sensor:
|
||||
model: hlw8012
|
||||
- platform: total_daily_energy
|
||||
name: HLW8012 Total Daily Energy
|
||||
power_id: total_daily_energy_hlw8012_power
|
||||
power_id: hlw8012_power
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
wifi:
|
||||
fast_connect: true
|
||||
networks:
|
||||
- ssid: MySSID
|
||||
eap:
|
||||
|
||||
@@ -35,26 +35,6 @@ from esphome.zeroconf import DiscoveredImport
|
||||
from .common import get_fixture_path
|
||||
|
||||
|
||||
def get_build_path(base_path: Path, device_name: str) -> Path:
|
||||
"""Get the build directory path for a device.
|
||||
|
||||
This is a test helper that constructs the standard ESPHome build directory
|
||||
structure. Note: This helper does NOT perform path traversal sanitization
|
||||
because it's only used in tests where we control the inputs. The actual
|
||||
web_server.py code handles sanitization in DownloadBinaryRequestHandler.get()
|
||||
via file_name.replace("..", "").lstrip("/").
|
||||
|
||||
Args:
|
||||
base_path: The base temporary path (typically tmp_path from pytest)
|
||||
device_name: The name of the device (should not contain path separators
|
||||
in production use, but tests may use it for specific scenarios)
|
||||
|
||||
Returns:
|
||||
Path to the build directory (.esphome/build/device_name)
|
||||
"""
|
||||
return base_path / ".esphome" / "build" / device_name
|
||||
|
||||
|
||||
class DashboardTestHelper:
|
||||
def __init__(self, io_loop: IOLoop, client: AsyncHTTPClient, port: int) -> None:
|
||||
self.io_loop = io_loop
|
||||
@@ -437,180 +417,6 @@ async def test_download_binary_handler_idedata_fallback(
|
||||
assert response.body == b"bootloader content"
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@pytest.mark.usefixtures("mock_ext_storage_path")
|
||||
async def test_download_binary_handler_subdirectory_file(
|
||||
dashboard: DashboardTestHelper,
|
||||
tmp_path: Path,
|
||||
mock_storage_json: MagicMock,
|
||||
) -> None:
|
||||
"""Test the DownloadBinaryRequestHandler.get with file in subdirectory (nRF52 case).
|
||||
|
||||
This is a regression test for issue #11343 where the Path migration broke
|
||||
downloads for nRF52 firmware files in subdirectories like 'zephyr/zephyr.uf2'.
|
||||
|
||||
The issue was that with_name() doesn't accept path separators:
|
||||
- Before: path = storage_json.firmware_bin_path.with_name(file_name)
|
||||
ValueError: Invalid name 'zephyr/zephyr.uf2'
|
||||
- After: path = storage_json.firmware_bin_path.parent.joinpath(file_name)
|
||||
Works correctly with subdirectory paths
|
||||
"""
|
||||
# Create a fake nRF52 build structure with firmware in subdirectory
|
||||
build_dir = get_build_path(tmp_path, "nrf52-device")
|
||||
zephyr_dir = build_dir / "zephyr"
|
||||
zephyr_dir.mkdir(parents=True)
|
||||
|
||||
# Create the main firmware binary (would be in build root)
|
||||
firmware_file = build_dir / "firmware.bin"
|
||||
firmware_file.write_bytes(b"main firmware")
|
||||
|
||||
# Create the UF2 file in zephyr subdirectory (nRF52 specific)
|
||||
uf2_file = zephyr_dir / "zephyr.uf2"
|
||||
uf2_file.write_bytes(b"nRF52 UF2 firmware content")
|
||||
|
||||
# Mock storage JSON
|
||||
mock_storage = Mock()
|
||||
mock_storage.name = "nrf52-device"
|
||||
mock_storage.firmware_bin_path = firmware_file
|
||||
mock_storage_json.load.return_value = mock_storage
|
||||
|
||||
# Request the UF2 file with subdirectory path
|
||||
response = await dashboard.fetch(
|
||||
"/download.bin?configuration=nrf52-device.yaml&file=zephyr/zephyr.uf2",
|
||||
method="GET",
|
||||
)
|
||||
assert response.code == 200
|
||||
assert response.body == b"nRF52 UF2 firmware content"
|
||||
assert response.headers["Content-Type"] == "application/octet-stream"
|
||||
assert "attachment" in response.headers["Content-Disposition"]
|
||||
# Download name should be device-name + full file path
|
||||
assert "nrf52-device-zephyr/zephyr.uf2" in response.headers["Content-Disposition"]
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@pytest.mark.usefixtures("mock_ext_storage_path")
|
||||
async def test_download_binary_handler_subdirectory_file_url_encoded(
|
||||
dashboard: DashboardTestHelper,
|
||||
tmp_path: Path,
|
||||
mock_storage_json: MagicMock,
|
||||
) -> None:
|
||||
"""Test the DownloadBinaryRequestHandler.get with URL-encoded subdirectory path.
|
||||
|
||||
Verifies that URL-encoded paths (e.g., zephyr%2Fzephyr.uf2) are correctly
|
||||
decoded and handled, and that custom download names work with subdirectories.
|
||||
"""
|
||||
# Create a fake build structure with firmware in subdirectory
|
||||
build_dir = get_build_path(tmp_path, "test")
|
||||
zephyr_dir = build_dir / "zephyr"
|
||||
zephyr_dir.mkdir(parents=True)
|
||||
|
||||
firmware_file = build_dir / "firmware.bin"
|
||||
firmware_file.write_bytes(b"content")
|
||||
|
||||
uf2_file = zephyr_dir / "zephyr.uf2"
|
||||
uf2_file.write_bytes(b"content")
|
||||
|
||||
# Mock storage JSON
|
||||
mock_storage = Mock()
|
||||
mock_storage.name = "test_device"
|
||||
mock_storage.firmware_bin_path = firmware_file
|
||||
mock_storage_json.load.return_value = mock_storage
|
||||
|
||||
# Request with URL-encoded path and custom download name
|
||||
response = await dashboard.fetch(
|
||||
"/download.bin?configuration=test.yaml&file=zephyr%2Fzephyr.uf2&download=custom_name.bin",
|
||||
method="GET",
|
||||
)
|
||||
assert response.code == 200
|
||||
assert "custom_name.bin" in response.headers["Content-Disposition"]
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@pytest.mark.usefixtures("mock_ext_storage_path")
|
||||
@pytest.mark.parametrize(
|
||||
"attack_path",
|
||||
[
|
||||
pytest.param("../../../secrets.yaml", id="basic_traversal"),
|
||||
pytest.param("..%2F..%2F..%2Fsecrets.yaml", id="url_encoded"),
|
||||
pytest.param("zephyr/../../../secrets.yaml", id="traversal_with_prefix"),
|
||||
pytest.param("/etc/passwd", id="absolute_path"),
|
||||
pytest.param("//etc/passwd", id="double_slash_absolute"),
|
||||
pytest.param("....//secrets.yaml", id="multiple_dots"),
|
||||
],
|
||||
)
|
||||
async def test_download_binary_handler_path_traversal_protection(
|
||||
dashboard: DashboardTestHelper,
|
||||
tmp_path: Path,
|
||||
mock_storage_json: MagicMock,
|
||||
attack_path: str,
|
||||
) -> None:
|
||||
"""Test that DownloadBinaryRequestHandler prevents path traversal attacks.
|
||||
|
||||
Verifies that attempts to use '..' in file paths are sanitized to prevent
|
||||
accessing files outside the build directory. Tests multiple attack vectors.
|
||||
"""
|
||||
# Create build structure
|
||||
build_dir = get_build_path(tmp_path, "test")
|
||||
build_dir.mkdir(parents=True)
|
||||
firmware_file = build_dir / "firmware.bin"
|
||||
firmware_file.write_bytes(b"firmware content")
|
||||
|
||||
# Create a sensitive file outside the build directory that should NOT be accessible
|
||||
sensitive_file = tmp_path / "secrets.yaml"
|
||||
sensitive_file.write_bytes(b"secret: my_secret_password")
|
||||
|
||||
# Mock storage JSON
|
||||
mock_storage = Mock()
|
||||
mock_storage.name = "test_device"
|
||||
mock_storage.firmware_bin_path = firmware_file
|
||||
mock_storage_json.load.return_value = mock_storage
|
||||
|
||||
# Attempt path traversal attack - should be blocked
|
||||
with pytest.raises(HTTPClientError) as exc_info:
|
||||
await dashboard.fetch(
|
||||
f"/download.bin?configuration=test.yaml&file={attack_path}",
|
||||
method="GET",
|
||||
)
|
||||
# Should get 404 (file not found after sanitization) or 500 (idedata fails)
|
||||
assert exc_info.value.code in (404, 500)
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@pytest.mark.usefixtures("mock_ext_storage_path")
|
||||
async def test_download_binary_handler_multiple_subdirectory_levels(
|
||||
dashboard: DashboardTestHelper,
|
||||
tmp_path: Path,
|
||||
mock_storage_json: MagicMock,
|
||||
) -> None:
|
||||
"""Test downloading files from multiple subdirectory levels.
|
||||
|
||||
Verifies that joinpath correctly handles multi-level paths like 'build/output/firmware.bin'.
|
||||
"""
|
||||
# Create nested directory structure
|
||||
build_dir = get_build_path(tmp_path, "test")
|
||||
nested_dir = build_dir / "build" / "output"
|
||||
nested_dir.mkdir(parents=True)
|
||||
|
||||
firmware_file = build_dir / "firmware.bin"
|
||||
firmware_file.write_bytes(b"main")
|
||||
|
||||
nested_file = nested_dir / "firmware.bin"
|
||||
nested_file.write_bytes(b"nested firmware content")
|
||||
|
||||
# Mock storage JSON
|
||||
mock_storage = Mock()
|
||||
mock_storage.name = "test_device"
|
||||
mock_storage.firmware_bin_path = firmware_file
|
||||
mock_storage_json.load.return_value = mock_storage
|
||||
|
||||
response = await dashboard.fetch(
|
||||
"/download.bin?configuration=test.yaml&file=build/output/firmware.bin",
|
||||
method="GET",
|
||||
)
|
||||
assert response.code == 200
|
||||
assert response.body == b"nested firmware content"
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_edit_request_handler_post_invalid_file(
|
||||
dashboard: DashboardTestHelper,
|
||||
|
||||
@@ -34,9 +34,10 @@ binary_sensor:
|
||||
ESP_LOGD("test", "Button ON at %u", now);
|
||||
}
|
||||
return true;
|
||||
} else {
|
||||
// Only log state change
|
||||
if (id(ir_remote_button).state) {
|
||||
ESP_LOGD("test", "Button OFF at %u", now);
|
||||
}
|
||||
return false;
|
||||
}
|
||||
// Only log state change
|
||||
if (id(ir_remote_button).state) {
|
||||
ESP_LOGD("test", "Button OFF at %u", now);
|
||||
}
|
||||
return false;
|
||||
|
||||
@@ -1,170 +0,0 @@
|
||||
esphome:
|
||||
name: test-script-queued
|
||||
|
||||
host:
|
||||
api:
|
||||
actions:
|
||||
# Test 1: Queue depth with default max_runs=5
|
||||
- action: test_queue_depth
|
||||
then:
|
||||
- logger.log: "=== TEST 1: Queue depth (max_runs=5 means 5 total, reject 6-7) ==="
|
||||
- script.execute:
|
||||
id: queue_depth_script
|
||||
value: 1
|
||||
- script.execute:
|
||||
id: queue_depth_script
|
||||
value: 2
|
||||
- script.execute:
|
||||
id: queue_depth_script
|
||||
value: 3
|
||||
- script.execute:
|
||||
id: queue_depth_script
|
||||
value: 4
|
||||
- script.execute:
|
||||
id: queue_depth_script
|
||||
value: 5
|
||||
- script.execute:
|
||||
id: queue_depth_script
|
||||
value: 6
|
||||
- script.execute:
|
||||
id: queue_depth_script
|
||||
value: 7
|
||||
|
||||
# Test 2: Ring buffer wrap test
|
||||
- action: test_ring_buffer
|
||||
then:
|
||||
- logger.log: "=== TEST 2: Ring buffer wrap (should process A, B, C in order) ==="
|
||||
- script.execute:
|
||||
id: wrap_script
|
||||
msg: "A"
|
||||
- script.execute:
|
||||
id: wrap_script
|
||||
msg: "B"
|
||||
- script.execute:
|
||||
id: wrap_script
|
||||
msg: "C"
|
||||
|
||||
# Test 3: Stop clears queue
|
||||
- action: test_stop_clears
|
||||
then:
|
||||
- logger.log: "=== TEST 3: Stop clears queue (should only see 1, then 'STOPPED') ==="
|
||||
- script.execute:
|
||||
id: stop_script
|
||||
num: 1
|
||||
- script.execute:
|
||||
id: stop_script
|
||||
num: 2
|
||||
- script.execute:
|
||||
id: stop_script
|
||||
num: 3
|
||||
- delay: 50ms
|
||||
- logger.log: "STOPPING script now"
|
||||
- script.stop: stop_script
|
||||
|
||||
# Test 4: Verify rejection (max_runs=3)
|
||||
- action: test_rejection
|
||||
then:
|
||||
- logger.log: "=== TEST 4: Verify rejection (max_runs=3 means 3 total, reject 4-8) ==="
|
||||
- script.execute:
|
||||
id: rejection_script
|
||||
val: 1
|
||||
- script.execute:
|
||||
id: rejection_script
|
||||
val: 2
|
||||
- script.execute:
|
||||
id: rejection_script
|
||||
val: 3
|
||||
- script.execute:
|
||||
id: rejection_script
|
||||
val: 4
|
||||
- script.execute:
|
||||
id: rejection_script
|
||||
val: 5
|
||||
- script.execute:
|
||||
id: rejection_script
|
||||
val: 6
|
||||
- script.execute:
|
||||
id: rejection_script
|
||||
val: 7
|
||||
- script.execute:
|
||||
id: rejection_script
|
||||
val: 8
|
||||
|
||||
# Test 5: No parameters test
|
||||
- action: test_no_params
|
||||
then:
|
||||
- logger.log: "=== TEST 5: No params (should process 3 times) ==="
|
||||
- script.execute: no_params_script
|
||||
- script.execute: no_params_script
|
||||
- script.execute: no_params_script
|
||||
|
||||
logger:
|
||||
level: DEBUG
|
||||
|
||||
script:
|
||||
# Test script 1: Queue depth test (default max_runs=5)
|
||||
- id: queue_depth_script
|
||||
mode: queued
|
||||
parameters:
|
||||
value: int
|
||||
then:
|
||||
- logger.log:
|
||||
format: "Queue test: START item %d"
|
||||
args: ['value']
|
||||
- delay: 100ms
|
||||
- logger.log:
|
||||
format: "Queue test: END item %d"
|
||||
args: ['value']
|
||||
|
||||
# Test script 2: Ring buffer wrap test (max_runs=3)
|
||||
- id: wrap_script
|
||||
mode: queued
|
||||
max_runs: 3
|
||||
parameters:
|
||||
msg: string
|
||||
then:
|
||||
- logger.log:
|
||||
format: "Ring buffer: START '%s'"
|
||||
args: ['msg.c_str()']
|
||||
- delay: 50ms
|
||||
- logger.log:
|
||||
format: "Ring buffer: END '%s'"
|
||||
args: ['msg.c_str()']
|
||||
|
||||
# Test script 3: Stop test
|
||||
- id: stop_script
|
||||
mode: queued
|
||||
max_runs: 5
|
||||
parameters:
|
||||
num: int
|
||||
then:
|
||||
- logger.log:
|
||||
format: "Stop test: START %d"
|
||||
args: ['num']
|
||||
- delay: 100ms
|
||||
- logger.log:
|
||||
format: "Stop test: END %d"
|
||||
args: ['num']
|
||||
|
||||
# Test script 4: Rejection test (max_runs=3)
|
||||
- id: rejection_script
|
||||
mode: queued
|
||||
max_runs: 3
|
||||
parameters:
|
||||
val: int
|
||||
then:
|
||||
- logger.log:
|
||||
format: "Rejection test: START %d"
|
||||
args: ['val']
|
||||
- delay: 200ms
|
||||
- logger.log:
|
||||
format: "Rejection test: END %d"
|
||||
args: ['val']
|
||||
|
||||
# Test script 5: No parameters
|
||||
- id: no_params_script
|
||||
mode: queued
|
||||
then:
|
||||
- logger.log: "No params: START"
|
||||
- delay: 50ms
|
||||
- logger.log: "No params: END"
|
||||
@@ -8,7 +8,6 @@ import asyncio
|
||||
from typing import Any
|
||||
|
||||
from aioesphomeapi import LightState
|
||||
from aioesphomeapi.model import ColorMode
|
||||
import pytest
|
||||
|
||||
from .types import APIClientConnectedFactory, RunCompiledFunction
|
||||
@@ -36,51 +35,10 @@ async def test_light_calls(
|
||||
# Get the light entities
|
||||
entities = await client.list_entities_services()
|
||||
lights = [e for e in entities[0] if e.object_id.startswith("test_")]
|
||||
assert len(lights) >= 3 # Should have RGBCW, RGB, and Binary lights
|
||||
assert len(lights) >= 2 # Should have RGBCW and RGB lights
|
||||
|
||||
rgbcw_light = next(light for light in lights if "RGBCW" in light.name)
|
||||
rgb_light = next(light for light in lights if "RGB Light" in light.name)
|
||||
binary_light = next(light for light in lights if "Binary" in light.name)
|
||||
|
||||
# Test color mode encoding: Verify supported_color_modes contains actual ColorMode enum values
|
||||
# not bit positions. This is critical - the iterator must convert bit positions to actual
|
||||
# ColorMode enum values for API encoding.
|
||||
|
||||
# RGBCW light (rgbww platform) should support RGB_COLD_WARM_WHITE mode
|
||||
assert ColorMode.RGB_COLD_WARM_WHITE in rgbcw_light.supported_color_modes, (
|
||||
f"RGBCW light missing RGB_COLD_WARM_WHITE mode. Got: {rgbcw_light.supported_color_modes}"
|
||||
)
|
||||
# Verify it's the actual enum value, not bit position
|
||||
assert ColorMode.RGB_COLD_WARM_WHITE.value in [
|
||||
mode.value for mode in rgbcw_light.supported_color_modes
|
||||
], (
|
||||
f"RGBCW light has wrong color mode values. Expected {ColorMode.RGB_COLD_WARM_WHITE.value} "
|
||||
f"(RGB_COLD_WARM_WHITE), got: {[mode.value for mode in rgbcw_light.supported_color_modes]}"
|
||||
)
|
||||
|
||||
# RGB light should support RGB mode
|
||||
assert ColorMode.RGB in rgb_light.supported_color_modes, (
|
||||
f"RGB light missing RGB color mode. Got: {rgb_light.supported_color_modes}"
|
||||
)
|
||||
# Verify it's the actual enum value, not bit position
|
||||
assert ColorMode.RGB.value in [
|
||||
mode.value for mode in rgb_light.supported_color_modes
|
||||
], (
|
||||
f"RGB light has wrong color mode values. Expected {ColorMode.RGB.value} (RGB), got: "
|
||||
f"{[mode.value for mode in rgb_light.supported_color_modes]}"
|
||||
)
|
||||
|
||||
# Binary light (on/off only) should support ON_OFF mode
|
||||
assert ColorMode.ON_OFF in binary_light.supported_color_modes, (
|
||||
f"Binary light missing ON_OFF color mode. Got: {binary_light.supported_color_modes}"
|
||||
)
|
||||
# Verify it's the actual enum value, not bit position
|
||||
assert ColorMode.ON_OFF.value in [
|
||||
mode.value for mode in binary_light.supported_color_modes
|
||||
], (
|
||||
f"Binary light has wrong color mode values. Expected {ColorMode.ON_OFF.value} (ON_OFF), got: "
|
||||
f"{[mode.value for mode in binary_light.supported_color_modes]}"
|
||||
)
|
||||
|
||||
async def wait_for_state_change(key: int, timeout: float = 1.0) -> Any:
|
||||
"""Wait for a state change for the given entity key."""
|
||||
|
||||
@@ -1,203 +0,0 @@
|
||||
"""Test ESPHome queued script functionality."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
import re
|
||||
|
||||
import pytest
|
||||
|
||||
from .types import APIClientConnectedFactory, RunCompiledFunction
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_script_queued(
|
||||
yaml_config: str,
|
||||
run_compiled: RunCompiledFunction,
|
||||
api_client_connected: APIClientConnectedFactory,
|
||||
) -> None:
|
||||
"""Test comprehensive queued script functionality."""
|
||||
loop = asyncio.get_running_loop()
|
||||
|
||||
# Track all test results
|
||||
test_results = {
|
||||
"queue_depth": {"processed": [], "rejections": 0},
|
||||
"ring_buffer": {"start_order": [], "end_order": []},
|
||||
"stop": {"processed": [], "stop_logged": False},
|
||||
"rejection": {"processed": [], "rejections": 0},
|
||||
"no_params": {"executions": 0},
|
||||
}
|
||||
|
||||
# Patterns for Test 1: Queue depth
|
||||
queue_start = re.compile(r"Queue test: START item (\d+)")
|
||||
queue_end = re.compile(r"Queue test: END item (\d+)")
|
||||
queue_reject = re.compile(r"Script 'queue_depth_script' max instances")
|
||||
|
||||
# Patterns for Test 2: Ring buffer
|
||||
ring_start = re.compile(r"Ring buffer: START '([A-Z])'")
|
||||
ring_end = re.compile(r"Ring buffer: END '([A-Z])'")
|
||||
|
||||
# Patterns for Test 3: Stop
|
||||
stop_start = re.compile(r"Stop test: START (\d+)")
|
||||
stop_log = re.compile(r"STOPPING script now")
|
||||
|
||||
# Patterns for Test 4: Rejection
|
||||
reject_start = re.compile(r"Rejection test: START (\d+)")
|
||||
reject_end = re.compile(r"Rejection test: END (\d+)")
|
||||
reject_reject = re.compile(r"Script 'rejection_script' max instances")
|
||||
|
||||
# Patterns for Test 5: No params
|
||||
no_params_end = re.compile(r"No params: END")
|
||||
|
||||
# Test completion futures
|
||||
test1_complete = loop.create_future()
|
||||
test2_complete = loop.create_future()
|
||||
test3_complete = loop.create_future()
|
||||
test4_complete = loop.create_future()
|
||||
test5_complete = loop.create_future()
|
||||
|
||||
def check_output(line: str) -> None:
|
||||
"""Check log output for all test messages."""
|
||||
# Test 1: Queue depth
|
||||
if match := queue_start.search(line):
|
||||
item = int(match.group(1))
|
||||
if item not in test_results["queue_depth"]["processed"]:
|
||||
test_results["queue_depth"]["processed"].append(item)
|
||||
|
||||
if match := queue_end.search(line):
|
||||
item = int(match.group(1))
|
||||
if item == 5 and not test1_complete.done():
|
||||
test1_complete.set_result(True)
|
||||
|
||||
if queue_reject.search(line):
|
||||
test_results["queue_depth"]["rejections"] += 1
|
||||
|
||||
# Test 2: Ring buffer
|
||||
if match := ring_start.search(line):
|
||||
msg = match.group(1)
|
||||
test_results["ring_buffer"]["start_order"].append(msg)
|
||||
|
||||
if match := ring_end.search(line):
|
||||
msg = match.group(1)
|
||||
test_results["ring_buffer"]["end_order"].append(msg)
|
||||
if (
|
||||
len(test_results["ring_buffer"]["end_order"]) == 3
|
||||
and not test2_complete.done()
|
||||
):
|
||||
test2_complete.set_result(True)
|
||||
|
||||
# Test 3: Stop
|
||||
if match := stop_start.search(line):
|
||||
item = int(match.group(1))
|
||||
if item not in test_results["stop"]["processed"]:
|
||||
test_results["stop"]["processed"].append(item)
|
||||
|
||||
if stop_log.search(line):
|
||||
test_results["stop"]["stop_logged"] = True
|
||||
# Give time for any queued items to be cleared
|
||||
if not test3_complete.done():
|
||||
loop.call_later(
|
||||
0.3,
|
||||
lambda: test3_complete.set_result(True)
|
||||
if not test3_complete.done()
|
||||
else None,
|
||||
)
|
||||
|
||||
# Test 4: Rejection
|
||||
if match := reject_start.search(line):
|
||||
item = int(match.group(1))
|
||||
if item not in test_results["rejection"]["processed"]:
|
||||
test_results["rejection"]["processed"].append(item)
|
||||
|
||||
if match := reject_end.search(line):
|
||||
item = int(match.group(1))
|
||||
if item == 3 and not test4_complete.done():
|
||||
test4_complete.set_result(True)
|
||||
|
||||
if reject_reject.search(line):
|
||||
test_results["rejection"]["rejections"] += 1
|
||||
|
||||
# Test 5: No params
|
||||
if no_params_end.search(line):
|
||||
test_results["no_params"]["executions"] += 1
|
||||
if (
|
||||
test_results["no_params"]["executions"] == 3
|
||||
and not test5_complete.done()
|
||||
):
|
||||
test5_complete.set_result(True)
|
||||
|
||||
async with (
|
||||
run_compiled(yaml_config, line_callback=check_output),
|
||||
api_client_connected() as client,
|
||||
):
|
||||
# Get services
|
||||
_, services = await client.list_entities_services()
|
||||
|
||||
# Test 1: Queue depth limit
|
||||
test_service = next((s for s in services if s.name == "test_queue_depth"), None)
|
||||
assert test_service is not None, "test_queue_depth service not found"
|
||||
client.execute_service(test_service, {})
|
||||
await asyncio.wait_for(test1_complete, timeout=2.0)
|
||||
await asyncio.sleep(0.1) # Give time for rejections
|
||||
|
||||
# Verify Test 1
|
||||
assert sorted(test_results["queue_depth"]["processed"]) == [1, 2, 3, 4, 5], (
|
||||
f"Test 1: Expected to process items 1-5 (max_runs=5 means 5 total), got {sorted(test_results['queue_depth']['processed'])}"
|
||||
)
|
||||
assert test_results["queue_depth"]["rejections"] >= 2, (
|
||||
"Test 1: Expected at least 2 rejection warnings (items 6-7 should be rejected)"
|
||||
)
|
||||
|
||||
# Test 2: Ring buffer order
|
||||
test_service = next((s for s in services if s.name == "test_ring_buffer"), None)
|
||||
assert test_service is not None, "test_ring_buffer service not found"
|
||||
client.execute_service(test_service, {})
|
||||
await asyncio.wait_for(test2_complete, timeout=2.0)
|
||||
|
||||
# Verify Test 2
|
||||
assert test_results["ring_buffer"]["start_order"] == ["A", "B", "C"], (
|
||||
f"Test 2: Expected start order [A, B, C], got {test_results['ring_buffer']['start_order']}"
|
||||
)
|
||||
assert test_results["ring_buffer"]["end_order"] == ["A", "B", "C"], (
|
||||
f"Test 2: Expected end order [A, B, C], got {test_results['ring_buffer']['end_order']}"
|
||||
)
|
||||
|
||||
# Test 3: Stop clears queue
|
||||
test_service = next((s for s in services if s.name == "test_stop_clears"), None)
|
||||
assert test_service is not None, "test_stop_clears service not found"
|
||||
client.execute_service(test_service, {})
|
||||
await asyncio.wait_for(test3_complete, timeout=2.0)
|
||||
|
||||
# Verify Test 3
|
||||
assert test_results["stop"]["stop_logged"], (
|
||||
"Test 3: Stop command was not logged"
|
||||
)
|
||||
assert test_results["stop"]["processed"] == [1], (
|
||||
f"Test 3: Expected only item 1 to process, got {test_results['stop']['processed']}"
|
||||
)
|
||||
|
||||
# Test 4: Rejection enforcement (max_runs=3)
|
||||
test_service = next((s for s in services if s.name == "test_rejection"), None)
|
||||
assert test_service is not None, "test_rejection service not found"
|
||||
client.execute_service(test_service, {})
|
||||
await asyncio.wait_for(test4_complete, timeout=2.0)
|
||||
await asyncio.sleep(0.1) # Give time for rejections
|
||||
|
||||
# Verify Test 4
|
||||
assert sorted(test_results["rejection"]["processed"]) == [1, 2, 3], (
|
||||
f"Test 4: Expected to process items 1-3 (max_runs=3 means 3 total), got {sorted(test_results['rejection']['processed'])}"
|
||||
)
|
||||
assert test_results["rejection"]["rejections"] == 5, (
|
||||
f"Test 4: Expected 5 rejections (items 4-8), got {test_results['rejection']['rejections']}"
|
||||
)
|
||||
|
||||
# Test 5: No parameters
|
||||
test_service = next((s for s in services if s.name == "test_no_params"), None)
|
||||
assert test_service is not None, "test_no_params service not found"
|
||||
client.execute_service(test_service, {})
|
||||
await asyncio.wait_for(test5_complete, timeout=2.0)
|
||||
|
||||
# Verify Test 5
|
||||
assert test_results["no_params"]["executions"] == 3, (
|
||||
f"Test 5: Expected 3 executions, got {test_results['no_params']['executions']}"
|
||||
)
|
||||
@@ -1,9 +0,0 @@
|
||||
substitutions:
|
||||
A: component1
|
||||
B: component2
|
||||
C: component3
|
||||
some_component:
|
||||
- id: component1
|
||||
value: 2
|
||||
- id: component2
|
||||
value: 5
|
||||
@@ -1,22 +0,0 @@
|
||||
substitutions:
|
||||
A: component1
|
||||
B: component2
|
||||
C: component3
|
||||
|
||||
packages:
|
||||
- some_component:
|
||||
- id: component1
|
||||
value: 1
|
||||
- id: !extend ${B}
|
||||
value: 4
|
||||
- id: !extend ${B}
|
||||
value: 5
|
||||
- id: component3
|
||||
value: 6
|
||||
|
||||
some_component:
|
||||
- id: !extend ${A}
|
||||
value: 2
|
||||
- id: component2
|
||||
value: 3
|
||||
- id: !remove ${C}
|
||||
@@ -321,14 +321,12 @@ def test_choose_upload_log_host_with_serial_device_no_ports(
|
||||
) -> None:
|
||||
"""Test SERIAL device when no serial ports are found."""
|
||||
setup_core()
|
||||
with pytest.raises(
|
||||
EsphomeError, match="All specified devices .* could not be resolved"
|
||||
):
|
||||
choose_upload_log_host(
|
||||
default="SERIAL",
|
||||
check_default=None,
|
||||
purpose=Purpose.UPLOADING,
|
||||
)
|
||||
result = choose_upload_log_host(
|
||||
default="SERIAL",
|
||||
check_default=None,
|
||||
purpose=Purpose.UPLOADING,
|
||||
)
|
||||
assert result == []
|
||||
assert "No serial ports found, skipping SERIAL device" in caplog.text
|
||||
|
||||
|
||||
@@ -369,14 +367,12 @@ def test_choose_upload_log_host_with_ota_device_with_api_config() -> None:
|
||||
"""Test OTA device when API is configured (no upload without OTA in config)."""
|
||||
setup_core(config={CONF_API: {}}, address="192.168.1.100")
|
||||
|
||||
with pytest.raises(
|
||||
EsphomeError, match="All specified devices .* could not be resolved"
|
||||
):
|
||||
choose_upload_log_host(
|
||||
default="OTA",
|
||||
check_default=None,
|
||||
purpose=Purpose.UPLOADING,
|
||||
)
|
||||
result = choose_upload_log_host(
|
||||
default="OTA",
|
||||
check_default=None,
|
||||
purpose=Purpose.UPLOADING,
|
||||
)
|
||||
assert result == []
|
||||
|
||||
|
||||
def test_choose_upload_log_host_with_ota_device_with_api_config_logging() -> None:
|
||||
@@ -409,14 +405,12 @@ def test_choose_upload_log_host_with_ota_device_no_fallback() -> None:
|
||||
"""Test OTA device with no valid fallback options."""
|
||||
setup_core()
|
||||
|
||||
with pytest.raises(
|
||||
EsphomeError, match="All specified devices .* could not be resolved"
|
||||
):
|
||||
choose_upload_log_host(
|
||||
default="OTA",
|
||||
check_default=None,
|
||||
purpose=Purpose.UPLOADING,
|
||||
)
|
||||
result = choose_upload_log_host(
|
||||
default="OTA",
|
||||
check_default=None,
|
||||
purpose=Purpose.UPLOADING,
|
||||
)
|
||||
assert result == []
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("mock_choose_prompt")
|
||||
@@ -621,19 +615,21 @@ def test_choose_upload_log_host_empty_defaults_list() -> None:
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("mock_no_serial_ports", "mock_no_mqtt_logging")
|
||||
def test_choose_upload_log_host_all_devices_unresolved() -> None:
|
||||
def test_choose_upload_log_host_all_devices_unresolved(
|
||||
caplog: pytest.LogCaptureFixture,
|
||||
) -> None:
|
||||
"""Test when all specified devices cannot be resolved."""
|
||||
setup_core()
|
||||
|
||||
with pytest.raises(
|
||||
EsphomeError,
|
||||
match=r"All specified devices \['SERIAL', 'OTA'\] could not be resolved",
|
||||
):
|
||||
choose_upload_log_host(
|
||||
default=["SERIAL", "OTA"],
|
||||
check_default=None,
|
||||
purpose=Purpose.UPLOADING,
|
||||
)
|
||||
result = choose_upload_log_host(
|
||||
default=["SERIAL", "OTA"],
|
||||
check_default=None,
|
||||
purpose=Purpose.UPLOADING,
|
||||
)
|
||||
assert result == []
|
||||
assert (
|
||||
"All specified devices: ['SERIAL', 'OTA'] could not be resolved." in caplog.text
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("mock_no_serial_ports", "mock_no_mqtt_logging")
|
||||
@@ -766,14 +762,12 @@ def test_choose_upload_log_host_no_address_with_ota_config() -> None:
|
||||
"""Test OTA device when OTA is configured but no address is set."""
|
||||
setup_core(config={CONF_OTA: {}})
|
||||
|
||||
with pytest.raises(
|
||||
EsphomeError, match="All specified devices .* could not be resolved"
|
||||
):
|
||||
choose_upload_log_host(
|
||||
default="OTA",
|
||||
check_default=None,
|
||||
purpose=Purpose.UPLOADING,
|
||||
)
|
||||
result = choose_upload_log_host(
|
||||
default="OTA",
|
||||
check_default=None,
|
||||
purpose=Purpose.UPLOADING,
|
||||
)
|
||||
assert result == []
|
||||
|
||||
|
||||
@dataclass
|
||||
|
||||
@@ -4,7 +4,6 @@ from pathlib import Path
|
||||
|
||||
from esphome import config as config_module, yaml_util
|
||||
from esphome.components import substitutions
|
||||
from esphome.config import resolve_extend_remove
|
||||
from esphome.config_helpers import merge_config
|
||||
from esphome.const import CONF_PACKAGES, CONF_SUBSTITUTIONS
|
||||
from esphome.core import CORE
|
||||
@@ -82,8 +81,6 @@ def test_substitutions_fixtures(fixture_path):
|
||||
|
||||
substitutions.do_substitution_pass(config, None)
|
||||
|
||||
resolve_extend_remove(config)
|
||||
|
||||
# Also load expected using ESPHome's loader, or use {} if missing and DEV_MODE
|
||||
if expected_path.is_file():
|
||||
expected = yaml_util.load_yaml(expected_path)
|
||||
|
||||
Reference in New Issue
Block a user