1
0
mirror of https://github.com/esphome/esphome.git synced 2025-11-04 00:51:49 +00:00

Compare commits

..

58 Commits

Author SHA1 Message Date
J. Nick Koston
c064bb7299 Merge branch 'dev' into platformio_cache_tests 2025-10-19 16:14:16 -10:00
tomaszduda23
c15f1a9be8 [nrf52] add missing defines for tests (#11384)
Co-authored-by: J. Nick Koston <nick@koston.org>
2025-10-19 16:11:44 -10:00
J. Nick Koston
ebde648305 cleanup 2025-10-19 15:59:33 -10:00
J. Nick Koston
11b53096a6 [ci] Fix fork PR workflow failing to find PRs from forks (#11396) 2025-10-19 15:58:05 -10:00
J. Nick Koston
6a18367949 [cli] Add analyze-memory command (#11395)
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
2025-10-20 14:26:37 +13:00
Javier Peletier
a59b1494d8 [substitutions] Recursive substitutions and better jinja error handling and debug help (#10806) 2025-10-20 14:17:16 +13:00
J. Nick Koston
1106350114 merge 2025-10-19 15:05:42 -10:00
Jesse Hills
e6ce5c58d1 Merge branch 'release' into dev 2025-10-20 13:43:31 +13:00
Jesse Hills
ebc0f5f7c9 Merge pull request #11387 from esphome/bump-2025.10.2
2025.10.2
2025-10-20 13:42:48 +13:00
Juan Antonio Aldea
0f87e7508b remove hexencode due 2022.1 deprecation (#11383) 2025-10-19 13:09:28 -10:00
J. Nick Koston
862bbb7fe1 [ci] Fix memory impact analysis failing on fork PRs (#11380)
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
Co-authored-by: pre-commit-ci-lite[bot] <117423508+pre-commit-ci-lite[bot]@users.noreply.github.com>
2025-10-19 13:09:09 -10:00
Jesse Hills
020cea80b2 [nextion] Clean up deprecated code from 1.20 (#11393) 2025-10-19 22:16:50 +00:00
Jesse Hills
9c146a7070 [climate] Clean up deprecated functions from 1.20 (#11388) 2025-10-19 22:11:35 +00:00
Jesse Hills
afbd3f77af [light] Clean up deprecated functions from 1.21 (#11389) 2025-10-19 22:08:30 +00:00
Javier Peletier
1e1fefbd0a [substitutions] !extend and !remove now support substitutions and jinja (#11203) 2025-10-20 10:31:25 +13:00
Juan Antonio Aldea
1a2057df30 Migrate from hexencode() to format_hex_pretty() in Kuntze component (#11372) 2025-10-20 10:15:17 +13:00
J. Nick Koston
87ca8784ef [openthread] Backport address resolution support to prevent OTA crash (#11312)
Co-authored-by: Daniel Stiner <danstiner@gmail.com>
2025-10-20 10:12:56 +13:00
Jesse Hills
a186c1062f Bump version to 2025.10.2 2025-10-20 10:06:43 +13:00
Jonathan Swoboda
ea38237f29 [esp32] Fix OTA rollback (#11300)
Co-authored-by: J. Nick Koston <nick@koston.org>
2025-10-20 10:06:43 +13:00
J. Nick Koston
6aff1394ad [core] Fix IndexError when OTA devices cannot be resolved (#11311) 2025-10-20 10:06:43 +13:00
Spectre5
0e34d1b64d Change all temperature offsets to temperature_delta (#11347) 2025-10-20 10:06:43 +13:00
tomaszduda23
1483cee0fb [dashboard] fix migration to Path (#11342)
Co-authored-by: J. Nick Koston <nick@home-assistant.io>
2025-10-20 10:06:43 +13:00
J. Nick Koston
8c1bd2fd85 [dashboard] Fix binary download with packages using secrets after Path migration (#11313) 2025-10-20 10:06:43 +13:00
Daniel Stiner
ea609dc0f6 [const] Add CONF_OPENTHREAD (#11318) 2025-10-20 10:06:42 +13:00
Jonathan Swoboda
913095f6be [esp32] Reduce tx power on Arduino (#11304) 2025-10-20 10:06:42 +13:00
Jonathan Swoboda
bb24ad4a30 [htu21d] Revert register address change (#11291) 2025-10-20 10:06:42 +13:00
Jonathan Swoboda
0d612fecfc [core] Add ESP32 ROM functions to reserved ids (#11293) 2025-10-20 10:06:42 +13:00
J. Nick Koston
9c235b4140 [datetime] Fix DateTimeStateTrigger compilation when time component is not used (#11287) 2025-10-20 10:06:42 +13:00
J. Nick Koston
70cb1793f3 [wifi] Optimize WiFi scan results with in-place construction (#11330) 2025-10-19 19:53:05 +00:00
J. Nick Koston
3bdd351d49 [wifi] Convert fast_connect to compile-time define, save 156-1024 bytes flash (#11328) 2025-10-19 19:52:33 +00:00
Jonathan Swoboda
b0ea3f57de [esp32] Fix OTA rollback (#11300)
Co-authored-by: J. Nick Koston <nick@koston.org>
2025-10-19 09:49:05 -10:00
J. Nick Koston
c9312d5c27 [script] Fix unbounded queue growth, optimize queued mode (default max_runs=5) (#11308)
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
Co-authored-by: pre-commit-ci-lite[bot] <117423508+pre-commit-ci-lite[bot]@users.noreply.github.com>
2025-10-19 09:42:17 -10:00
J. Nick Koston
33fea90c19 [wifi] Optimize WiFi scanning to reduce copies and heap allocations (#11323) 2025-10-19 19:26:18 +00:00
J. Nick Koston
25f3b6a959 [mqtt] Reduce flash usage by optimizing ArduinoJson assignments (#11340) 2025-10-19 19:17:33 +00:00
J. Nick Koston
e993312640 [core] Fix IndexError when OTA devices cannot be resolved (#11311) 2025-10-20 08:15:47 +13:00
J. Nick Koston
85babe85e4 [sensor] Optimize sliding window filters to eliminate heap fragmentation (#11282) 2025-10-20 07:59:47 +13:00
J. Nick Koston
0266c897c9 [mdns] Use std::unique_ptr for TXT records to reduce ESP32 flash usage (#11362) 2025-10-20 07:53:00 +13:00
J. Nick Koston
bda7676e3a [bluetooth_proxy] Merge duplicate loops in get_connection_() (#11359) 2025-10-20 07:51:41 +13:00
J. Nick Koston
57e98ec3fc [wifi] Replace std::vector with std::unique_ptr for WiFi scan buffer (#11364) 2025-10-20 07:49:58 +13:00
J. Nick Koston
09b2ad071b [esp32_ble_client] Remove duplicate MAC address extraction in set_address() (#11358) 2025-10-20 07:49:13 +13:00
J. Nick Koston
fdecda3d65 [light] Use bitmask instead of std::set for color modes (#11348) 2025-10-20 07:48:14 +13:00
J. Nick Koston
a0922bc8b0 [ci] Add automated memory impact analysis for pull requests (#11242)
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
Co-authored-by: pre-commit-ci-lite[bot] <117423508+pre-commit-ci-lite[bot]@users.noreply.github.com>
2025-10-20 07:43:38 +13:00
J. Nick Koston
f25af18655 [scheduler] Replace defer queue deque with vector to avoid 512-byte upfront allocation (#11305) 2025-10-20 07:34:34 +13:00
J. Nick Koston
5db07c2d70 [api][time] Refactor timezone update logic for cleaner code (#11327) 2025-10-20 07:31:40 +13:00
Juan Antonio Aldea
40823df7bc make types sensors_t and sensor_type_t internal to StatsdComponent. (#11345) 2025-10-19 07:47:31 -10:00
tomaszduda23
5e1019a6fa [nrf52, ble_nus] add logging over BLE (#9846) 2025-10-19 07:41:19 -10:00
tomaszduda23
f3cdbd0a05 [nrf52] fix task names in logs (#11367) 2025-10-19 07:39:48 -10:00
Keith Burzinski
ddf1b67e49 [prometheus] Update to use new climate API (#11361) 2025-10-18 22:11:44 -10:00
Keith Burzinski
b4d9fddd07 [mqtt] Update to use new climate API (#11360) 2025-10-18 22:11:10 -10:00
Keith Burzinski
25f03074ab [web_server] Update to use new climate API (#11363) 2025-10-18 22:10:07 -10:00
Keith Burzinski
590f6ff70b [api] Update to use new climate API (#11357) 2025-10-19 06:20:11 +00:00
Keith Burzinski
a33ed5e47b [thermostat] Add humidity support (#11286) 2025-10-18 17:25:53 -10:00
Spectre5
c11a9bb97f Change all temperature offsets to temperature_delta (#11347) 2025-10-18 21:13:57 -04:00
dependabot[bot]
acef2085d9 Bump aioesphomeapi from 42.1.0 to 42.2.0 (#11352)
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-10-18 23:11:36 +00:00
dependabot[bot]
865663ce5f Bump aioesphomeapi from 42.0.0 to 42.1.0 (#11350)
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-10-18 11:48:25 -10:00
tomaszduda23
ae010fd6f1 [dashboard] fix migration to Path (#11342)
Co-authored-by: J. Nick Koston <nick@home-assistant.io>
2025-10-18 17:32:12 +00:00
J. Nick Koston
91a10d0e36 [total_daily_energy] Fix ID conflicts in component test configuration (#11337) 2025-10-18 08:36:30 -04:00
Juan Antonio Aldea
d5c36eaf2a [tests] Remove superfluous else-blocks from lambdas (#11322)
Co-authored-by: J. Nick Koston <nick@koston.org>
2025-10-17 21:40:54 -10:00
97 changed files with 3011 additions and 774 deletions

View File

@@ -0,0 +1,111 @@
---
name: Memory Impact Comment (Forks)
on:
workflow_run:
workflows: ["CI"]
types: [completed]
permissions:
contents: read
pull-requests: write
actions: read
jobs:
memory-impact-comment:
name: Post memory impact comment (fork PRs only)
runs-on: ubuntu-24.04
# Only run for PRs from forks that had successful CI runs
if: >
github.event.workflow_run.event == 'pull_request' &&
github.event.workflow_run.conclusion == 'success' &&
github.event.workflow_run.head_repository.full_name != github.repository
env:
GH_TOKEN: ${{ github.token }}
steps:
- name: Get PR details
id: pr
run: |
# Get PR details by searching for PR with matching head SHA
# The workflow_run.pull_requests field is often empty for forks
# Use paginate to handle repos with many open PRs
head_sha="${{ github.event.workflow_run.head_sha }}"
pr_data=$(gh api --paginate "/repos/${{ github.repository }}/pulls" \
--jq ".[] | select(.head.sha == \"$head_sha\") | {number: .number, base_ref: .base.ref}" \
| head -n 1)
if [ -z "$pr_data" ]; then
echo "No PR found for SHA $head_sha, skipping"
echo "skip=true" >> "$GITHUB_OUTPUT"
exit 0
fi
pr_number=$(echo "$pr_data" | jq -r '.number')
base_ref=$(echo "$pr_data" | jq -r '.base_ref')
echo "pr_number=$pr_number" >> "$GITHUB_OUTPUT"
echo "base_ref=$base_ref" >> "$GITHUB_OUTPUT"
echo "Found PR #$pr_number targeting base branch: $base_ref"
- name: Check out code from base repository
if: steps.pr.outputs.skip != 'true'
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
with:
# Always check out from the base repository (esphome/esphome), never from forks
# Use the PR's target branch to ensure we run trusted code from the main repo
repository: ${{ github.repository }}
ref: ${{ steps.pr.outputs.base_ref }}
- name: Restore Python
if: steps.pr.outputs.skip != 'true'
uses: ./.github/actions/restore-python
with:
python-version: "3.11"
cache-key: ${{ hashFiles('.cache-key') }}
- name: Download memory analysis artifacts
if: steps.pr.outputs.skip != 'true'
run: |
run_id="${{ github.event.workflow_run.id }}"
echo "Downloading artifacts from workflow run $run_id"
mkdir -p memory-analysis
# Download target analysis artifact
if gh run download --name "memory-analysis-target" --dir memory-analysis --repo "${{ github.repository }}" "$run_id"; then
echo "Downloaded memory-analysis-target artifact."
else
echo "No memory-analysis-target artifact found."
fi
# Download PR analysis artifact
if gh run download --name "memory-analysis-pr" --dir memory-analysis --repo "${{ github.repository }}" "$run_id"; then
echo "Downloaded memory-analysis-pr artifact."
else
echo "No memory-analysis-pr artifact found."
fi
- name: Check if artifacts exist
id: check
if: steps.pr.outputs.skip != 'true'
run: |
if [ -f ./memory-analysis/memory-analysis-target.json ] && [ -f ./memory-analysis/memory-analysis-pr.json ]; then
echo "found=true" >> "$GITHUB_OUTPUT"
else
echo "found=false" >> "$GITHUB_OUTPUT"
echo "Memory analysis artifacts not found, skipping comment"
fi
- name: Post or update PR comment
if: steps.pr.outputs.skip != 'true' && steps.check.outputs.found == 'true'
env:
PR_NUMBER: ${{ steps.pr.outputs.pr_number }}
run: |
. venv/bin/activate
# Pass PR number and JSON file paths directly to Python script
# Let Python parse the JSON to avoid shell injection risks
# The script will validate and sanitize all inputs
python script/ci_memory_impact_comment.py \
--pr-number "$PR_NUMBER" \
--target-json ./memory-analysis/memory-analysis-target.json \
--pr-json ./memory-analysis/memory-analysis-pr.json

View File

@@ -432,6 +432,17 @@ jobs:
with:
python-version: ${{ env.DEFAULT_PYTHON }}
cache-key: ${{ needs.common.outputs.cache-key }}
# Cache PlatformIO packages to speed up test builds
# Note: Caches are repository-scoped, PRs from forks cannot restore from the main repo cache
- name: Cache PlatformIO
uses: actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
with:
path: ~/.platformio
key: platformio-test-${{ runner.os }}-${{ env.DEFAULT_PYTHON }}-${{ hashFiles('platformio.ini') }}
restore-keys: |
platformio-test-${{ runner.os }}-${{ env.DEFAULT_PYTHON }}-
- name: Validate and compile components with intelligent grouping
run: |
. venv/bin/activate
@@ -641,6 +652,12 @@ jobs:
--output-env \
--output-json memory-analysis-target.json
# Add metadata to JSON before caching
python script/ci_add_metadata_to_json.py \
--json-file memory-analysis-target.json \
--components "$components" \
--platform "$platform"
- name: Save memory analysis to cache
if: steps.check-script.outputs.skip != 'true' && steps.cache-memory-analysis.outputs.cache-hit != 'true' && steps.build.outcome == 'success'
uses: actions/cache/save@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
@@ -720,6 +737,13 @@ jobs:
python script/ci_memory_impact_extract.py \
--output-env \
--output-json memory-analysis-pr.json
# Add metadata to JSON (components and platform are in shell variables above)
python script/ci_add_metadata_to_json.py \
--json-file memory-analysis-pr.json \
--components "$components" \
--platform "$platform"
- name: Upload memory analysis JSON
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
with:
@@ -736,10 +760,12 @@ jobs:
- determine-jobs
- memory-impact-target-branch
- memory-impact-pr-branch
if: github.event_name == 'pull_request' && fromJSON(needs.determine-jobs.outputs.memory_impact).should_run == 'true' && needs.memory-impact-target-branch.outputs.skip != 'true'
if: github.event_name == 'pull_request' && github.event.pull_request.head.repo.full_name == github.repository && fromJSON(needs.determine-jobs.outputs.memory_impact).should_run == 'true' && needs.memory-impact-target-branch.outputs.skip != 'true'
permissions:
contents: read
pull-requests: write
env:
GH_TOKEN: ${{ github.token }}
steps:
- name: Check out code
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
@@ -762,52 +788,16 @@ jobs:
continue-on-error: true
- name: Post or update PR comment
env:
GH_TOKEN: ${{ github.token }}
COMPONENTS: ${{ toJSON(fromJSON(needs.determine-jobs.outputs.memory_impact).components) }}
PLATFORM: ${{ fromJSON(needs.determine-jobs.outputs.memory_impact).platform }}
TARGET_RAM: ${{ needs.memory-impact-target-branch.outputs.ram_usage }}
TARGET_FLASH: ${{ needs.memory-impact-target-branch.outputs.flash_usage }}
PR_RAM: ${{ needs.memory-impact-pr-branch.outputs.ram_usage }}
PR_FLASH: ${{ needs.memory-impact-pr-branch.outputs.flash_usage }}
TARGET_CACHE_HIT: ${{ needs.memory-impact-target-branch.outputs.cache_hit }}
PR_NUMBER: ${{ github.event.pull_request.number }}
run: |
. venv/bin/activate
# Check if analysis JSON files exist
target_json_arg=""
pr_json_arg=""
if [ -f ./memory-analysis/memory-analysis-target.json ]; then
echo "Found target analysis JSON"
target_json_arg="--target-json ./memory-analysis/memory-analysis-target.json"
else
echo "No target analysis JSON found"
fi
if [ -f ./memory-analysis/memory-analysis-pr.json ]; then
echo "Found PR analysis JSON"
pr_json_arg="--pr-json ./memory-analysis/memory-analysis-pr.json"
else
echo "No PR analysis JSON found"
fi
# Add cache flag if target was cached
cache_flag=""
if [ "$TARGET_CACHE_HIT" == "true" ]; then
cache_flag="--target-cache-hit"
fi
# Pass JSON file paths directly to Python script
# All data is extracted from JSON files for security
python script/ci_memory_impact_comment.py \
--pr-number "${{ github.event.pull_request.number }}" \
--components "$COMPONENTS" \
--platform "$PLATFORM" \
--target-ram "$TARGET_RAM" \
--target-flash "$TARGET_FLASH" \
--pr-ram "$PR_RAM" \
--pr-flash "$PR_FLASH" \
$target_json_arg \
$pr_json_arg \
$cache_flag
--pr-number "$PR_NUMBER" \
--target-json ./memory-analysis/memory-analysis-target.json \
--pr-json ./memory-analysis/memory-analysis-pr.json
ci-status:
name: CI Status

View File

@@ -70,6 +70,7 @@ esphome/components/bl0939/* @ziceva
esphome/components/bl0940/* @dan-s-github @tobias-
esphome/components/bl0942/* @dbuezas @dwmw2
esphome/components/ble_client/* @buxtronix @clydebarrow
esphome/components/ble_nus/* @tomaszduda23
esphome/components/bluetooth_proxy/* @bdraco @jesserockz
esphome/components/bme280_base/* @esphome/core
esphome/components/bme280_spi/* @apbodrov

View File

@@ -62,6 +62,40 @@ from esphome.util import (
_LOGGER = logging.getLogger(__name__)
# Special non-component keys that appear in configs
_NON_COMPONENT_KEYS = frozenset(
{
CONF_ESPHOME,
"substitutions",
"packages",
"globals",
"external_components",
"<<",
}
)
def detect_external_components(config: ConfigType) -> set[str]:
"""Detect external/custom components in the configuration.
External components are those that appear in the config but are not
part of ESPHome's built-in components and are not special config keys.
Args:
config: The ESPHome configuration dictionary
Returns:
A set of external component names
"""
from esphome.analyze_memory.helpers import get_esphome_components
builtin_components = get_esphome_components()
return {
key
for key in config
if key not in builtin_components and key not in _NON_COMPONENT_KEYS
}
class ArgsProtocol(Protocol):
device: list[str] | None
@@ -185,7 +219,9 @@ def choose_upload_log_host(
else:
resolved.append(device)
if not resolved:
_LOGGER.error("All specified devices: %s could not be resolved.", defaults)
raise EsphomeError(
f"All specified devices {defaults} could not be resolved. Is the device connected to the network?"
)
return resolved
# No devices specified, show interactive chooser
@@ -890,6 +926,54 @@ def command_idedata(args: ArgsProtocol, config: ConfigType) -> int:
return 0
def command_analyze_memory(args: ArgsProtocol, config: ConfigType) -> int:
"""Analyze memory usage by component.
This command compiles the configuration and performs memory analysis.
Compilation is fast if sources haven't changed (just relinking).
"""
from esphome import platformio_api
from esphome.analyze_memory.cli import MemoryAnalyzerCLI
# Always compile to ensure fresh data (fast if no changes - just relinks)
exit_code = write_cpp(config)
if exit_code != 0:
return exit_code
exit_code = compile_program(args, config)
if exit_code != 0:
return exit_code
_LOGGER.info("Successfully compiled program.")
# Get idedata for analysis
idedata = platformio_api.get_idedata(config)
if idedata is None:
_LOGGER.error("Failed to get IDE data for memory analysis")
return 1
firmware_elf = Path(idedata.firmware_elf_path)
# Extract external components from config
external_components = detect_external_components(config)
_LOGGER.debug("Detected external components: %s", external_components)
# Perform memory analysis
_LOGGER.info("Analyzing memory usage...")
analyzer = MemoryAnalyzerCLI(
str(firmware_elf),
idedata.objdump_path,
idedata.readelf_path,
external_components,
)
analyzer.analyze()
# Generate and display report
report = analyzer.generate_report()
print()
print(report)
return 0
def command_rename(args: ArgsProtocol, config: ConfigType) -> int | None:
new_name = args.name
for c in new_name:
@@ -1005,6 +1089,7 @@ POST_CONFIG_ACTIONS = {
"idedata": command_idedata,
"rename": command_rename,
"discover": command_discover,
"analyze-memory": command_analyze_memory,
}
SIMPLE_CONFIG_ACTIONS = [
@@ -1290,6 +1375,14 @@ def parse_args(argv):
)
parser_rename.add_argument("name", help="The new name for the device.", type=str)
parser_analyze_memory = subparsers.add_parser(
"analyze-memory",
help="Analyze memory usage by component.",
)
parser_analyze_memory.add_argument(
"configuration", help="Your YAML configuration file(s).", nargs="+"
)
# Keep backward compatibility with the old command line format of
# esphome <config> <command>.
#

View File

@@ -506,7 +506,7 @@ message ListEntitiesLightResponse {
string name = 3;
reserved 4; // Deprecated: was string unique_id
repeated ColorMode supported_color_modes = 12 [(container_pointer) = "std::set<light::ColorMode>"];
repeated ColorMode supported_color_modes = 12 [(container_pointer_no_template) = "light::ColorModeMask"];
// next four supports_* are for legacy clients, newer clients should use color modes
// Deprecated in API version 1.6
bool legacy_supports_brightness = 5 [deprecated=true];

View File

@@ -453,7 +453,6 @@ uint16_t APIConnection::try_send_light_state(EntityBase *entity, APIConnection *
bool is_single) {
auto *light = static_cast<light::LightState *>(entity);
LightStateResponse resp;
auto traits = light->get_traits();
auto values = light->remote_values;
auto color_mode = values.get_color_mode();
resp.state = values.is_on();
@@ -477,7 +476,8 @@ uint16_t APIConnection::try_send_light_info(EntityBase *entity, APIConnection *c
auto *light = static_cast<light::LightState *>(entity);
ListEntitiesLightResponse msg;
auto traits = light->get_traits();
msg.supported_color_modes = &traits.get_supported_color_modes_for_api_();
// Pass pointer to ColorModeMask so the iterator can encode actual ColorMode enum values
msg.supported_color_modes = &traits.get_supported_color_modes();
if (traits.supports_color_capability(light::ColorCapability::COLOR_TEMPERATURE) ||
traits.supports_color_capability(light::ColorCapability::COLD_WARM_WHITE)) {
msg.min_mireds = traits.get_min_mireds();
@@ -661,11 +661,12 @@ uint16_t APIConnection::try_send_climate_info(EntityBase *entity, APIConnection
ListEntitiesClimateResponse msg;
auto traits = climate->get_traits();
// Flags set for backward compatibility, deprecated in 2025.11.0
msg.supports_current_temperature = traits.get_supports_current_temperature();
msg.supports_current_humidity = traits.get_supports_current_humidity();
msg.supports_two_point_target_temperature = traits.get_supports_two_point_target_temperature();
msg.supports_target_humidity = traits.get_supports_target_humidity();
msg.supports_action = traits.get_supports_action();
msg.supports_current_temperature = traits.has_feature_flags(climate::CLIMATE_SUPPORTS_CURRENT_TEMPERATURE);
msg.supports_current_humidity = traits.has_feature_flags(climate::CLIMATE_SUPPORTS_CURRENT_HUMIDITY);
msg.supports_two_point_target_temperature = traits.has_feature_flags(
climate::CLIMATE_SUPPORTS_TWO_POINT_TARGET_TEMPERATURE | climate::CLIMATE_REQUIRES_TWO_POINT_TARGET_TEMPERATURE);
msg.supports_target_humidity = traits.has_feature_flags(climate::CLIMATE_SUPPORTS_TARGET_HUMIDITY);
msg.supports_action = traits.has_feature_flags(climate::CLIMATE_SUPPORTS_ACTION);
// Current feature flags and other supported parameters
msg.feature_flags = traits.get_feature_flags();
msg.supported_modes = &traits.get_supported_modes_for_api_();
@@ -1081,13 +1082,8 @@ void APIConnection::on_get_time_response(const GetTimeResponse &value) {
homeassistant::global_homeassistant_time->set_epoch_time(value.epoch_seconds);
#ifdef USE_TIME_TIMEZONE
if (value.timezone_len > 0) {
const std::string &current_tz = homeassistant::global_homeassistant_time->get_timezone();
// Compare without allocating a string
if (current_tz.length() != value.timezone_len ||
memcmp(current_tz.c_str(), value.timezone, value.timezone_len) != 0) {
homeassistant::global_homeassistant_time->set_timezone(
std::string(reinterpret_cast<const char *>(value.timezone), value.timezone_len));
}
homeassistant::global_homeassistant_time->set_timezone(reinterpret_cast<const char *>(value.timezone),
value.timezone_len);
}
#endif
}

View File

@@ -70,4 +70,14 @@ extend google.protobuf.FieldOptions {
// init(size) before adding elements. This eliminates std::vector template overhead
// and is ideal when the exact size is known before populating the array.
optional bool fixed_vector = 50013 [default=false];
// container_pointer_no_template: Use a non-template container type for repeated fields
// Similar to container_pointer, but for containers that don't take template parameters.
// The container type is used as-is without appending element type.
// The container must have:
// - begin() and end() methods returning iterators
// - empty() method
// Example: [(container_pointer_no_template) = "light::ColorModeMask"]
// generates: const light::ColorModeMask *supported_color_modes{};
optional string container_pointer_no_template = 50014;
}

View File

@@ -790,7 +790,7 @@ class ListEntitiesLightResponse final : public InfoResponseProtoMessage {
#ifdef HAS_PROTO_MESSAGE_DUMP
const char *message_name() const override { return "list_entities_light_response"; }
#endif
const std::set<light::ColorMode> *supported_color_modes{};
const light::ColorModeMask *supported_color_modes{};
float min_mireds{0.0f};
float max_mireds{0.0f};
std::vector<std::string> effects{};

View File

@@ -0,0 +1,29 @@
import esphome.codegen as cg
from esphome.components.zephyr import zephyr_add_prj_conf
import esphome.config_validation as cv
from esphome.const import CONF_ID, CONF_LOGS, CONF_TYPE
AUTO_LOAD = ["zephyr_ble_server"]
CODEOWNERS = ["@tomaszduda23"]
ble_nus_ns = cg.esphome_ns.namespace("ble_nus")
BLENUS = ble_nus_ns.class_("BLENUS", cg.Component)
CONFIG_SCHEMA = cv.All(
cv.Schema(
{
cv.GenerateID(): cv.declare_id(BLENUS),
cv.Optional(CONF_TYPE, default=CONF_LOGS): cv.one_of(
*[CONF_LOGS], lower=True
),
}
).extend(cv.COMPONENT_SCHEMA),
cv.only_with_framework("zephyr"),
)
async def to_code(config):
var = cg.new_Pvariable(config[CONF_ID])
zephyr_add_prj_conf("BT_NUS", True)
cg.add(var.set_expose_log(config[CONF_TYPE] == CONF_LOGS))
await cg.register_component(var, config)

View File

@@ -0,0 +1,157 @@
#ifdef USE_ZEPHYR
#include "ble_nus.h"
#include <zephyr/kernel.h>
#include <bluetooth/services/nus.h>
#include "esphome/core/log.h"
#ifdef USE_LOGGER
#include "esphome/components/logger/logger.h"
#include "esphome/core/application.h"
#endif
#include <zephyr/sys/ring_buffer.h>
namespace esphome::ble_nus {
constexpr size_t BLE_TX_BUF_SIZE = 2048;
// NOLINTBEGIN(cppcoreguidelines-avoid-non-const-global-variables)
BLENUS *global_ble_nus;
RING_BUF_DECLARE(global_ble_tx_ring_buf, BLE_TX_BUF_SIZE);
// NOLINTEND(cppcoreguidelines-avoid-non-const-global-variables)
static const char *const TAG = "ble_nus";
size_t BLENUS::write_array(const uint8_t *data, size_t len) {
if (atomic_get(&this->tx_status_) == TX_DISABLED) {
return 0;
}
return ring_buf_put(&global_ble_tx_ring_buf, data, len);
}
void BLENUS::connected(bt_conn *conn, uint8_t err) {
if (err == 0) {
global_ble_nus->conn_.store(bt_conn_ref(conn));
}
}
void BLENUS::disconnected(bt_conn *conn, uint8_t reason) {
if (global_ble_nus->conn_) {
bt_conn_unref(global_ble_nus->conn_.load());
// Connection array is global static.
// Reference can be kept even if disconnected.
}
}
void BLENUS::tx_callback(bt_conn *conn) {
atomic_cas(&global_ble_nus->tx_status_, TX_BUSY, TX_ENABLED);
ESP_LOGVV(TAG, "Sent operation completed");
}
void BLENUS::send_enabled_callback(bt_nus_send_status status) {
switch (status) {
case BT_NUS_SEND_STATUS_ENABLED:
atomic_set(&global_ble_nus->tx_status_, TX_ENABLED);
#ifdef USE_LOGGER
if (global_ble_nus->expose_log_) {
App.schedule_dump_config();
}
#endif
ESP_LOGD(TAG, "NUS notification has been enabled");
break;
case BT_NUS_SEND_STATUS_DISABLED:
atomic_set(&global_ble_nus->tx_status_, TX_DISABLED);
ESP_LOGD(TAG, "NUS notification has been disabled");
break;
}
}
void BLENUS::rx_callback(bt_conn *conn, const uint8_t *const data, uint16_t len) {
ESP_LOGD(TAG, "Received %d bytes.", len);
}
void BLENUS::setup() {
bt_nus_cb callbacks = {
.received = rx_callback,
.sent = tx_callback,
.send_enabled = send_enabled_callback,
};
bt_nus_init(&callbacks);
static bt_conn_cb conn_callbacks = {
.connected = BLENUS::connected,
.disconnected = BLENUS::disconnected,
};
bt_conn_cb_register(&conn_callbacks);
global_ble_nus = this;
#ifdef USE_LOGGER
if (logger::global_logger != nullptr && this->expose_log_) {
logger::global_logger->add_on_log_callback(
[this](int level, const char *tag, const char *message, size_t message_len) {
this->write_array(reinterpret_cast<const uint8_t *>(message), message_len);
const char c = '\n';
this->write_array(reinterpret_cast<const uint8_t *>(&c), 1);
});
}
#endif
}
void BLENUS::dump_config() {
ESP_LOGCONFIG(TAG, "ble nus:");
ESP_LOGCONFIG(TAG, " log: %s", YESNO(this->expose_log_));
uint32_t mtu = 0;
bt_conn *conn = this->conn_.load();
if (conn) {
mtu = bt_nus_get_mtu(conn);
}
ESP_LOGCONFIG(TAG, " MTU: %u", mtu);
}
void BLENUS::loop() {
if (ring_buf_is_empty(&global_ble_tx_ring_buf)) {
return;
}
if (!atomic_cas(&this->tx_status_, TX_ENABLED, TX_BUSY)) {
if (atomic_get(&this->tx_status_) == TX_DISABLED) {
ring_buf_reset(&global_ble_tx_ring_buf);
}
return;
}
bt_conn *conn = this->conn_.load();
if (conn) {
conn = bt_conn_ref(conn);
}
if (nullptr == conn) {
atomic_cas(&this->tx_status_, TX_BUSY, TX_ENABLED);
return;
}
uint32_t req_len = bt_nus_get_mtu(conn);
uint8_t *buf;
uint32_t size = ring_buf_get_claim(&global_ble_tx_ring_buf, &buf, req_len);
int err, err2;
err = bt_nus_send(conn, buf, size);
err2 = ring_buf_get_finish(&global_ble_tx_ring_buf, size);
if (err2) {
// It should no happen.
ESP_LOGE(TAG, "Size %u exceeds valid bytes in the ring buffer (%d error)", size, err2);
}
if (err == 0) {
ESP_LOGVV(TAG, "Sent %d bytes", size);
} else {
ESP_LOGE(TAG, "Failed to send %d bytes (%d error)", size, err);
atomic_cas(&this->tx_status_, TX_BUSY, TX_ENABLED);
}
bt_conn_unref(conn);
}
} // namespace esphome::ble_nus
#endif

View File

@@ -0,0 +1,37 @@
#pragma once
#ifdef USE_ZEPHYR
#include "esphome/core/defines.h"
#include "esphome/core/component.h"
#include <shell/shell_bt_nus.h>
#include <atomic>
namespace esphome::ble_nus {
class BLENUS : public Component {
enum TxStatus {
TX_DISABLED,
TX_ENABLED,
TX_BUSY,
};
public:
void setup() override;
void dump_config() override;
void loop() override;
size_t write_array(const uint8_t *data, size_t len);
void set_expose_log(bool expose_log) { this->expose_log_ = expose_log; }
protected:
static void send_enabled_callback(bt_nus_send_status status);
static void tx_callback(bt_conn *conn);
static void rx_callback(bt_conn *conn, const uint8_t *data, uint16_t len);
static void connected(bt_conn *conn, uint8_t err);
static void disconnected(bt_conn *conn, uint8_t reason);
std::atomic<bt_conn *> conn_ = nullptr;
bool expose_log_ = false;
atomic_t tx_status_ = ATOMIC_INIT(TX_DISABLED);
};
} // namespace esphome::ble_nus
#endif

View File

@@ -155,16 +155,12 @@ esp32_ble_tracker::AdvertisementParserType BluetoothProxy::get_advertisement_par
BluetoothConnection *BluetoothProxy::get_connection_(uint64_t address, bool reserve) {
for (uint8_t i = 0; i < this->connection_count_; i++) {
auto *connection = this->connections_[i];
if (connection->get_address() == address)
uint64_t conn_addr = connection->get_address();
if (conn_addr == address)
return connection;
}
if (!reserve)
return nullptr;
for (uint8_t i = 0; i < this->connection_count_; i++) {
auto *connection = this->connections_[i];
if (connection->get_address() == 0) {
if (reserve && conn_addr == 0) {
connection->send_service_ = INIT_SENDING_SERVICES;
connection->set_address(address);
// All connections must start at INIT
@@ -175,7 +171,6 @@ BluetoothConnection *BluetoothProxy::get_connection_(uint64_t address, bool rese
return connection;
}
}
return nullptr;
}

View File

@@ -41,7 +41,7 @@ CONFIG_SCHEMA = cv.All(
cv.Schema(
{
cv.GenerateID(): cv.declare_id(BME680BSECComponent),
cv.Optional(CONF_TEMPERATURE_OFFSET, default=0): cv.temperature,
cv.Optional(CONF_TEMPERATURE_OFFSET, default=0): cv.temperature_delta,
cv.Optional(CONF_IAQ_MODE, default="STATIC"): cv.enum(
IAQ_MODE_OPTIONS, upper=True
),

View File

@@ -139,7 +139,7 @@ CONFIG_SCHEMA_BASE = (
cv.Optional(CONF_SUPPLY_VOLTAGE, default="3.3V"): cv.enum(
VOLTAGE_OPTIONS, upper=True
),
cv.Optional(CONF_TEMPERATURE_OFFSET, default=0): cv.temperature,
cv.Optional(CONF_TEMPERATURE_OFFSET, default=0): cv.temperature_delta,
cv.Optional(
CONF_STATE_SAVE_INTERVAL, default="6hours"
): cv.positive_time_period_minutes,

View File

@@ -1,8 +1,8 @@
#pragma once
#include "esphome/core/helpers.h"
#include "climate_mode.h"
#include <set>
#include "climate_mode.h"
#include "esphome/core/helpers.h"
namespace esphome {
@@ -109,44 +109,12 @@ class ClimateTraits {
void set_supported_modes(std::set<ClimateMode> modes) { this->supported_modes_ = std::move(modes); }
void add_supported_mode(ClimateMode mode) { this->supported_modes_.insert(mode); }
ESPDEPRECATED("This method is deprecated, use set_supported_modes() instead", "v1.20")
void set_supports_auto_mode(bool supports_auto_mode) { set_mode_support_(CLIMATE_MODE_AUTO, supports_auto_mode); }
ESPDEPRECATED("This method is deprecated, use set_supported_modes() instead", "v1.20")
void set_supports_cool_mode(bool supports_cool_mode) { set_mode_support_(CLIMATE_MODE_COOL, supports_cool_mode); }
ESPDEPRECATED("This method is deprecated, use set_supported_modes() instead", "v1.20")
void set_supports_heat_mode(bool supports_heat_mode) { set_mode_support_(CLIMATE_MODE_HEAT, supports_heat_mode); }
ESPDEPRECATED("This method is deprecated, use set_supported_modes() instead", "v1.20")
void set_supports_heat_cool_mode(bool supported) { set_mode_support_(CLIMATE_MODE_HEAT_COOL, supported); }
ESPDEPRECATED("This method is deprecated, use set_supported_modes() instead", "v1.20")
void set_supports_fan_only_mode(bool supports_fan_only_mode) {
set_mode_support_(CLIMATE_MODE_FAN_ONLY, supports_fan_only_mode);
}
ESPDEPRECATED("This method is deprecated, use set_supported_modes() instead", "v1.20")
void set_supports_dry_mode(bool supports_dry_mode) { set_mode_support_(CLIMATE_MODE_DRY, supports_dry_mode); }
bool supports_mode(ClimateMode mode) const { return this->supported_modes_.count(mode); }
const std::set<ClimateMode> &get_supported_modes() const { return this->supported_modes_; }
void set_supported_fan_modes(std::set<ClimateFanMode> modes) { this->supported_fan_modes_ = std::move(modes); }
void add_supported_fan_mode(ClimateFanMode mode) { this->supported_fan_modes_.insert(mode); }
void add_supported_custom_fan_mode(const std::string &mode) { this->supported_custom_fan_modes_.insert(mode); }
ESPDEPRECATED("This method is deprecated, use set_supported_fan_modes() instead", "v1.20")
void set_supports_fan_mode_on(bool supported) { set_fan_mode_support_(CLIMATE_FAN_ON, supported); }
ESPDEPRECATED("This method is deprecated, use set_supported_fan_modes() instead", "v1.20")
void set_supports_fan_mode_off(bool supported) { set_fan_mode_support_(CLIMATE_FAN_OFF, supported); }
ESPDEPRECATED("This method is deprecated, use set_supported_fan_modes() instead", "v1.20")
void set_supports_fan_mode_auto(bool supported) { set_fan_mode_support_(CLIMATE_FAN_AUTO, supported); }
ESPDEPRECATED("This method is deprecated, use set_supported_fan_modes() instead", "v1.20")
void set_supports_fan_mode_low(bool supported) { set_fan_mode_support_(CLIMATE_FAN_LOW, supported); }
ESPDEPRECATED("This method is deprecated, use set_supported_fan_modes() instead", "v1.20")
void set_supports_fan_mode_medium(bool supported) { set_fan_mode_support_(CLIMATE_FAN_MEDIUM, supported); }
ESPDEPRECATED("This method is deprecated, use set_supported_fan_modes() instead", "v1.20")
void set_supports_fan_mode_high(bool supported) { set_fan_mode_support_(CLIMATE_FAN_HIGH, supported); }
ESPDEPRECATED("This method is deprecated, use set_supported_fan_modes() instead", "v1.20")
void set_supports_fan_mode_middle(bool supported) { set_fan_mode_support_(CLIMATE_FAN_MIDDLE, supported); }
ESPDEPRECATED("This method is deprecated, use set_supported_fan_modes() instead", "v1.20")
void set_supports_fan_mode_focus(bool supported) { set_fan_mode_support_(CLIMATE_FAN_FOCUS, supported); }
ESPDEPRECATED("This method is deprecated, use set_supported_fan_modes() instead", "v1.20")
void set_supports_fan_mode_diffuse(bool supported) { set_fan_mode_support_(CLIMATE_FAN_DIFFUSE, supported); }
bool supports_fan_mode(ClimateFanMode fan_mode) const { return this->supported_fan_modes_.count(fan_mode); }
bool get_supports_fan_modes() const {
return !this->supported_fan_modes_.empty() || !this->supported_custom_fan_modes_.empty();
@@ -178,16 +146,6 @@ class ClimateTraits {
void set_supported_swing_modes(std::set<ClimateSwingMode> modes) { this->supported_swing_modes_ = std::move(modes); }
void add_supported_swing_mode(ClimateSwingMode mode) { this->supported_swing_modes_.insert(mode); }
ESPDEPRECATED("This method is deprecated, use set_supported_swing_modes() instead", "v1.20")
void set_supports_swing_mode_off(bool supported) { set_swing_mode_support_(CLIMATE_SWING_OFF, supported); }
ESPDEPRECATED("This method is deprecated, use set_supported_swing_modes() instead", "v1.20")
void set_supports_swing_mode_both(bool supported) { set_swing_mode_support_(CLIMATE_SWING_BOTH, supported); }
ESPDEPRECATED("This method is deprecated, use set_supported_swing_modes() instead", "v1.20")
void set_supports_swing_mode_vertical(bool supported) { set_swing_mode_support_(CLIMATE_SWING_VERTICAL, supported); }
ESPDEPRECATED("This method is deprecated, use set_supported_swing_modes() instead", "v1.20")
void set_supports_swing_mode_horizontal(bool supported) {
set_swing_mode_support_(CLIMATE_SWING_HORIZONTAL, supported);
}
bool supports_swing_mode(ClimateSwingMode swing_mode) const { return this->supported_swing_modes_.count(swing_mode); }
bool get_supports_swing_modes() const { return !this->supported_swing_modes_.empty(); }
const std::set<ClimateSwingMode> &get_supported_swing_modes() const { return this->supported_swing_modes_; }

View File

@@ -805,6 +805,7 @@ async def to_code(config):
add_idf_sdkconfig_option("CONFIG_AUTOSTART_ARDUINO", True)
add_idf_sdkconfig_option("CONFIG_MBEDTLS_PSK_MODES", True)
add_idf_sdkconfig_option("CONFIG_MBEDTLS_CERTIFICATE_BUNDLE", True)
add_idf_sdkconfig_option("CONFIG_ESP_PHY_REDUCE_TX_POWER", True)
cg.add_build_flag("-Wno-nonnull-compare")

View File

@@ -6,6 +6,7 @@
#include <freertos/FreeRTOS.h>
#include <freertos/task.h>
#include <esp_idf_version.h>
#include <esp_ota_ops.h>
#include <esp_task_wdt.h>
#include <esp_timer.h>
#include <soc/rtc.h>
@@ -52,6 +53,16 @@ void arch_init() {
disableCore1WDT();
#endif
#endif
// If the bootloader was compiled with CONFIG_BOOTLOADER_APP_ROLLBACK_ENABLE the current
// partition will get rolled back unless it is marked as valid.
esp_ota_img_states_t state;
const esp_partition_t *running = esp_ota_get_running_partition();
if (esp_ota_get_state_partition(running, &state) == ESP_OK) {
if (state == ESP_OTA_IMG_PENDING_VERIFY) {
esp_ota_mark_app_valid_cancel_rollback();
}
}
}
void IRAM_ATTR HOT arch_feed_wdt() { esp_task_wdt_reset(); }

View File

@@ -61,12 +61,7 @@ class BLEClientBase : public espbt::ESPBTClient, public Component {
this->address_str_ = "";
} else {
char buf[18];
uint8_t mac[6] = {
(uint8_t) ((this->address_ >> 40) & 0xff), (uint8_t) ((this->address_ >> 32) & 0xff),
(uint8_t) ((this->address_ >> 24) & 0xff), (uint8_t) ((this->address_ >> 16) & 0xff),
(uint8_t) ((this->address_ >> 8) & 0xff), (uint8_t) ((this->address_ >> 0) & 0xff),
};
format_mac_addr_upper(mac, buf);
format_mac_addr_upper(this->remote_bda_, buf);
this->address_str_ = buf;
}
}

View File

@@ -14,7 +14,7 @@ void Kuntze::on_modbus_data(const std::vector<uint8_t> &data) {
auto get_16bit = [&](int i) -> uint16_t { return (uint16_t(data[i * 2]) << 8) | uint16_t(data[i * 2 + 1]); };
this->waiting_ = false;
ESP_LOGV(TAG, "Data: %s", hexencode(data).c_str());
ESP_LOGV(TAG, "Data: %s", format_hex_pretty(data).c_str());
float value = (float) get_16bit(0);
for (int i = 0; i < data[3]; i++)

View File

@@ -1,11 +1,11 @@
#pragma once
#include "esphome/core/component.h"
#include "esphome/core/defines.h"
#include "esphome/core/color.h"
#include "esp_color_correction.h"
#include "esp_color_view.h"
#include "esp_range_view.h"
#include "esphome/core/color.h"
#include "esphome/core/component.h"
#include "esphome/core/defines.h"
#include "light_output.h"
#include "light_state.h"
#include "transformers.h"
@@ -17,8 +17,6 @@
namespace esphome {
namespace light {
using ESPColor ESPDEPRECATED("esphome::light::ESPColor is deprecated, use esphome::Color instead.", "v1.21") = Color;
/// Convert the color information from a `LightColorValues` object to a `Color` object (does not apply brightness).
Color color_from_light_color_values(LightColorValues val);

View File

@@ -104,5 +104,200 @@ constexpr ColorModeHelper operator|(ColorModeHelper lhs, ColorMode rhs) {
return static_cast<ColorMode>(static_cast<uint8_t>(lhs) | static_cast<uint8_t>(rhs));
}
// Type alias for raw color mode bitmask values
using color_mode_bitmask_t = uint16_t;
// Constants for ColorMode count and bit range
static constexpr int COLOR_MODE_COUNT = 10; // UNKNOWN through RGB_COLD_WARM_WHITE
static constexpr int MAX_BIT_INDEX = sizeof(color_mode_bitmask_t) * 8; // Number of bits in bitmask type
// Compile-time array of all ColorMode values in declaration order
// Bit positions (0-9) map directly to enum declaration order
static constexpr ColorMode COLOR_MODES[COLOR_MODE_COUNT] = {
ColorMode::UNKNOWN, // bit 0
ColorMode::ON_OFF, // bit 1
ColorMode::BRIGHTNESS, // bit 2
ColorMode::WHITE, // bit 3
ColorMode::COLOR_TEMPERATURE, // bit 4
ColorMode::COLD_WARM_WHITE, // bit 5
ColorMode::RGB, // bit 6
ColorMode::RGB_WHITE, // bit 7
ColorMode::RGB_COLOR_TEMPERATURE, // bit 8
ColorMode::RGB_COLD_WARM_WHITE, // bit 9
};
/// Map ColorMode enum values to bit positions (0-9)
/// Bit positions follow the enum declaration order
static constexpr int mode_to_bit(ColorMode mode) {
// Linear search through COLOR_MODES array
// Compiler optimizes this to efficient code since array is constexpr
for (int i = 0; i < COLOR_MODE_COUNT; ++i) {
if (COLOR_MODES[i] == mode)
return i;
}
return 0;
}
/// Map bit positions (0-9) to ColorMode enum values
/// Bit positions follow the enum declaration order
static constexpr ColorMode bit_to_mode(int bit) {
// Direct lookup in COLOR_MODES array
return (bit >= 0 && bit < COLOR_MODE_COUNT) ? COLOR_MODES[bit] : ColorMode::UNKNOWN;
}
/// Helper to compute capability bitmask at compile time
static constexpr color_mode_bitmask_t compute_capability_bitmask(ColorCapability capability) {
color_mode_bitmask_t mask = 0;
uint8_t cap_bit = static_cast<uint8_t>(capability);
// Check each ColorMode to see if it has this capability
for (int bit = 0; bit < COLOR_MODE_COUNT; ++bit) {
uint8_t mode_val = static_cast<uint8_t>(bit_to_mode(bit));
if ((mode_val & cap_bit) != 0) {
mask |= (1 << bit);
}
}
return mask;
}
// Number of ColorCapability enum values
static constexpr int COLOR_CAPABILITY_COUNT = 6;
/// Compile-time lookup table mapping ColorCapability to bitmask
/// This array is computed at compile time using constexpr
static constexpr color_mode_bitmask_t CAPABILITY_BITMASKS[] = {
compute_capability_bitmask(ColorCapability::ON_OFF), // 1 << 0
compute_capability_bitmask(ColorCapability::BRIGHTNESS), // 1 << 1
compute_capability_bitmask(ColorCapability::WHITE), // 1 << 2
compute_capability_bitmask(ColorCapability::COLOR_TEMPERATURE), // 1 << 3
compute_capability_bitmask(ColorCapability::COLD_WARM_WHITE), // 1 << 4
compute_capability_bitmask(ColorCapability::RGB), // 1 << 5
};
/// Bitmask for storing a set of ColorMode values efficiently.
/// Replaces std::set<ColorMode> to eliminate red-black tree overhead (~586 bytes).
class ColorModeMask {
public:
constexpr ColorModeMask() = default;
/// Support initializer list syntax: {ColorMode::RGB, ColorMode::WHITE}
constexpr ColorModeMask(std::initializer_list<ColorMode> modes) {
for (auto mode : modes) {
this->add(mode);
}
}
constexpr void add(ColorMode mode) { this->mask_ |= (1 << mode_to_bit(mode)); }
/// Add multiple modes at once using initializer list
constexpr void add(std::initializer_list<ColorMode> modes) {
for (auto mode : modes) {
this->add(mode);
}
}
constexpr bool contains(ColorMode mode) const { return (this->mask_ & (1 << mode_to_bit(mode))) != 0; }
constexpr size_t size() const {
// Count set bits using Brian Kernighan's algorithm
// More efficient for sparse bitmasks (typical case: 2-4 modes out of 10)
uint16_t n = this->mask_;
size_t count = 0;
while (n) {
n &= n - 1; // Clear the least significant set bit
count++;
}
return count;
}
constexpr bool empty() const { return this->mask_ == 0; }
/// Iterator support for API encoding
class Iterator {
public:
using iterator_category = std::forward_iterator_tag;
using value_type = ColorMode;
using difference_type = std::ptrdiff_t;
using pointer = const ColorMode *;
using reference = ColorMode;
constexpr Iterator(color_mode_bitmask_t mask, int bit) : mask_(mask), bit_(bit) { advance_to_next_set_bit_(); }
constexpr ColorMode operator*() const { return bit_to_mode(bit_); }
constexpr Iterator &operator++() {
++bit_;
advance_to_next_set_bit_();
return *this;
}
constexpr bool operator==(const Iterator &other) const { return bit_ == other.bit_; }
constexpr bool operator!=(const Iterator &other) const { return !(*this == other); }
private:
constexpr void advance_to_next_set_bit_() { bit_ = ColorModeMask::find_next_set_bit(mask_, bit_); }
color_mode_bitmask_t mask_;
int bit_;
};
constexpr Iterator begin() const { return Iterator(mask_, 0); }
constexpr Iterator end() const { return Iterator(mask_, MAX_BIT_INDEX); }
/// Get the raw bitmask value for API encoding
constexpr color_mode_bitmask_t get_mask() const { return this->mask_; }
/// Find the next set bit in a bitmask starting from a given position
/// Returns the bit position, or MAX_BIT_INDEX if no more bits are set
static constexpr int find_next_set_bit(color_mode_bitmask_t mask, int start_bit) {
int bit = start_bit;
while (bit < MAX_BIT_INDEX && !(mask & (1 << bit))) {
++bit;
}
return bit;
}
/// Find the first set bit in a bitmask and return the corresponding ColorMode
/// Used for optimizing compute_color_mode_() intersection logic
static constexpr ColorMode first_mode_from_mask(color_mode_bitmask_t mask) {
return bit_to_mode(find_next_set_bit(mask, 0));
}
/// Check if a ColorMode is present in a raw bitmask value
/// Useful for checking intersection results without creating a temporary ColorModeMask
static constexpr bool mask_contains(color_mode_bitmask_t mask, ColorMode mode) {
return (mask & (1 << mode_to_bit(mode))) != 0;
}
/// Check if any mode in the bitmask has a specific capability
/// Used for checking if a light supports a capability (e.g., BRIGHTNESS, RGB)
bool has_capability(ColorCapability capability) const {
// Lookup the pre-computed bitmask for this capability and check intersection with our mask
// ColorCapability values: 1, 2, 4, 8, 16, 32 -> array indices: 0, 1, 2, 3, 4, 5
// We need to convert the power-of-2 value to an index
uint8_t cap_val = static_cast<uint8_t>(capability);
#if defined(__GNUC__) || defined(__clang__)
// Use compiler intrinsic for efficient bit position lookup (O(1) vs O(log n))
int index = __builtin_ctz(cap_val);
#else
// Fallback for compilers without __builtin_ctz
int index = 0;
while (cap_val > 1) {
cap_val >>= 1;
++index;
}
#endif
return (this->mask_ & CAPABILITY_BITMASKS[index]) != 0;
}
private:
// Using uint16_t instead of uint32_t for more efficient iteration (fewer bits to scan).
// Currently only 10 ColorMode values exist, so 16 bits is sufficient.
// Can be changed to uint32_t if more than 16 color modes are needed in the future.
// Note: Due to struct padding, uint16_t and uint32_t result in same LightTraits size (12 bytes).
color_mode_bitmask_t mask_{0};
};
} // namespace light
} // namespace esphome

View File

@@ -406,7 +406,7 @@ void LightCall::transform_parameters_() {
}
}
ColorMode LightCall::compute_color_mode_() {
auto supported_modes = this->parent_->get_traits().get_supported_color_modes();
const auto &supported_modes = this->parent_->get_traits().get_supported_color_modes();
int supported_count = supported_modes.size();
// Some lights don't support any color modes (e.g. monochromatic light), leave it at unknown.
@@ -425,20 +425,19 @@ ColorMode LightCall::compute_color_mode_() {
// If no color mode is specified, we try to guess the color mode. This is needed for backward compatibility to
// pre-colormode clients and automations, but also for the MQTT API, where HA doesn't let us know which color mode
// was used for some reason.
std::set<ColorMode> suitable_modes = this->get_suitable_color_modes_();
// Compute intersection of suitable and supported modes using bitwise AND
color_mode_bitmask_t intersection = this->get_suitable_color_modes_mask_() & supported_modes.get_mask();
// Don't change if the current mode is suitable.
if (suitable_modes.count(current_mode) > 0) {
// Don't change if the current mode is in the intersection (suitable AND supported)
if (ColorModeMask::mask_contains(intersection, current_mode)) {
ESP_LOGI(TAG, "'%s': color mode not specified; retaining %s", this->parent_->get_name().c_str(),
LOG_STR_ARG(color_mode_to_human(current_mode)));
return current_mode;
}
// Use the preferred suitable mode.
for (auto mode : suitable_modes) {
if (supported_modes.count(mode) == 0)
continue;
if (intersection != 0) {
ColorMode mode = ColorModeMask::first_mode_from_mask(intersection);
ESP_LOGI(TAG, "'%s': color mode not specified; using %s", this->parent_->get_name().c_str(),
LOG_STR_ARG(color_mode_to_human(mode)));
return mode;
@@ -451,7 +450,7 @@ ColorMode LightCall::compute_color_mode_() {
LOG_STR_ARG(color_mode_to_human(color_mode)));
return color_mode;
}
std::set<ColorMode> LightCall::get_suitable_color_modes_() {
color_mode_bitmask_t LightCall::get_suitable_color_modes_mask_() {
bool has_white = this->has_white() && this->white_ > 0.0f;
bool has_ct = this->has_color_temperature();
bool has_cwww =
@@ -459,36 +458,44 @@ std::set<ColorMode> LightCall::get_suitable_color_modes_() {
bool has_rgb = (this->has_color_brightness() && this->color_brightness_ > 0.0f) ||
(this->has_red() || this->has_green() || this->has_blue());
// Build key from flags: [rgb][cwww][ct][white]
// Build key from flags: [rgb][cwww][ct][white]
#define KEY(white, ct, cwww, rgb) ((white) << 0 | (ct) << 1 | (cwww) << 2 | (rgb) << 3)
uint8_t key = KEY(has_white, has_ct, has_cwww, has_rgb);
switch (key) {
case KEY(true, false, false, false): // white only
return {ColorMode::WHITE, ColorMode::RGB_WHITE, ColorMode::RGB_COLOR_TEMPERATURE, ColorMode::COLD_WARM_WHITE,
ColorMode::RGB_COLD_WARM_WHITE};
return ColorModeMask({ColorMode::WHITE, ColorMode::RGB_WHITE, ColorMode::RGB_COLOR_TEMPERATURE,
ColorMode::COLD_WARM_WHITE, ColorMode::RGB_COLD_WARM_WHITE})
.get_mask();
case KEY(false, true, false, false): // ct only
return {ColorMode::COLOR_TEMPERATURE, ColorMode::RGB_COLOR_TEMPERATURE, ColorMode::COLD_WARM_WHITE,
ColorMode::RGB_COLD_WARM_WHITE};
return ColorModeMask({ColorMode::COLOR_TEMPERATURE, ColorMode::RGB_COLOR_TEMPERATURE, ColorMode::COLD_WARM_WHITE,
ColorMode::RGB_COLD_WARM_WHITE})
.get_mask();
case KEY(true, true, false, false): // white + ct
return {ColorMode::COLD_WARM_WHITE, ColorMode::RGB_COLOR_TEMPERATURE, ColorMode::RGB_COLD_WARM_WHITE};
return ColorModeMask(
{ColorMode::COLD_WARM_WHITE, ColorMode::RGB_COLOR_TEMPERATURE, ColorMode::RGB_COLD_WARM_WHITE})
.get_mask();
case KEY(false, false, true, false): // cwww only
return {ColorMode::COLD_WARM_WHITE, ColorMode::RGB_COLD_WARM_WHITE};
return ColorModeMask({ColorMode::COLD_WARM_WHITE, ColorMode::RGB_COLD_WARM_WHITE}).get_mask();
case KEY(false, false, false, false): // none
return {ColorMode::RGB_WHITE, ColorMode::RGB_COLOR_TEMPERATURE, ColorMode::RGB_COLD_WARM_WHITE, ColorMode::RGB,
ColorMode::WHITE, ColorMode::COLOR_TEMPERATURE, ColorMode::COLD_WARM_WHITE};
return ColorModeMask({ColorMode::RGB_WHITE, ColorMode::RGB_COLOR_TEMPERATURE, ColorMode::RGB_COLD_WARM_WHITE,
ColorMode::RGB, ColorMode::WHITE, ColorMode::COLOR_TEMPERATURE, ColorMode::COLD_WARM_WHITE})
.get_mask();
case KEY(true, false, false, true): // rgb + white
return {ColorMode::RGB_WHITE, ColorMode::RGB_COLOR_TEMPERATURE, ColorMode::RGB_COLD_WARM_WHITE};
return ColorModeMask({ColorMode::RGB_WHITE, ColorMode::RGB_COLOR_TEMPERATURE, ColorMode::RGB_COLD_WARM_WHITE})
.get_mask();
case KEY(false, true, false, true): // rgb + ct
case KEY(true, true, false, true): // rgb + white + ct
return {ColorMode::RGB_COLOR_TEMPERATURE, ColorMode::RGB_COLD_WARM_WHITE};
return ColorModeMask({ColorMode::RGB_COLOR_TEMPERATURE, ColorMode::RGB_COLD_WARM_WHITE}).get_mask();
case KEY(false, false, true, true): // rgb + cwww
return {ColorMode::RGB_COLD_WARM_WHITE};
return ColorModeMask({ColorMode::RGB_COLD_WARM_WHITE}).get_mask();
case KEY(false, false, false, true): // rgb only
return {ColorMode::RGB, ColorMode::RGB_WHITE, ColorMode::RGB_COLOR_TEMPERATURE, ColorMode::RGB_COLD_WARM_WHITE};
return ColorModeMask({ColorMode::RGB, ColorMode::RGB_WHITE, ColorMode::RGB_COLOR_TEMPERATURE,
ColorMode::RGB_COLD_WARM_WHITE})
.get_mask();
default:
return {}; // conflicting flags
return 0; // conflicting flags
}
#undef KEY

View File

@@ -1,7 +1,6 @@
#pragma once
#include "light_color_values.h"
#include <set>
namespace esphome {
@@ -186,8 +185,8 @@ class LightCall {
//// Compute the color mode that should be used for this call.
ColorMode compute_color_mode_();
/// Get potential color modes for this light call.
std::set<ColorMode> get_suitable_color_modes_();
/// Get potential color modes bitmask for this light call.
color_mode_bitmask_t get_suitable_color_modes_mask_();
/// Some color modes also can be set using non-native parameters, transform those calls.
void transform_parameters_();

View File

@@ -43,7 +43,6 @@ void LightJSONSchema::dump_json(LightState &state, JsonObject root) {
}
auto values = state.remote_values;
auto traits = state.get_output()->get_traits();
const auto color_mode = values.get_color_mode();
const char *mode_str = get_color_mode_json_str(color_mode);

View File

@@ -191,11 +191,9 @@ void LightState::current_values_as_brightness(float *brightness) {
this->current_values.as_brightness(brightness, this->gamma_correct_);
}
void LightState::current_values_as_rgb(float *red, float *green, float *blue, bool color_interlock) {
auto traits = this->get_traits();
this->current_values.as_rgb(red, green, blue, this->gamma_correct_, false);
}
void LightState::current_values_as_rgbw(float *red, float *green, float *blue, float *white, bool color_interlock) {
auto traits = this->get_traits();
this->current_values.as_rgbw(red, green, blue, white, this->gamma_correct_, false);
}
void LightState::current_values_as_rgbww(float *red, float *green, float *blue, float *cold_white, float *warm_white,
@@ -209,7 +207,6 @@ void LightState::current_values_as_rgbct(float *red, float *green, float *blue,
white_brightness, this->gamma_correct_);
}
void LightState::current_values_as_cwww(float *cold_white, float *warm_white, bool constant_brightness) {
auto traits = this->get_traits();
this->current_values.as_cwww(cold_white, warm_white, this->gamma_correct_, constant_brightness);
}
void LightState::current_values_as_ct(float *color_temperature, float *white_brightness) {

View File

@@ -1,8 +1,7 @@
#pragma once
#include "esphome/core/helpers.h"
#include "color_mode.h"
#include <set>
#include "esphome/core/helpers.h"
namespace esphome {
@@ -19,38 +18,17 @@ class LightTraits {
public:
LightTraits() = default;
const std::set<ColorMode> &get_supported_color_modes() const { return this->supported_color_modes_; }
void set_supported_color_modes(std::set<ColorMode> supported_color_modes) {
this->supported_color_modes_ = std::move(supported_color_modes);
const ColorModeMask &get_supported_color_modes() const { return this->supported_color_modes_; }
void set_supported_color_modes(ColorModeMask supported_color_modes) {
this->supported_color_modes_ = supported_color_modes;
}
void set_supported_color_modes(std::initializer_list<ColorMode> modes) {
this->supported_color_modes_ = ColorModeMask(modes);
}
bool supports_color_mode(ColorMode color_mode) const { return this->supported_color_modes_.count(color_mode); }
bool supports_color_mode(ColorMode color_mode) const { return this->supported_color_modes_.contains(color_mode); }
bool supports_color_capability(ColorCapability color_capability) const {
for (auto mode : this->supported_color_modes_) {
if (mode & color_capability)
return true;
}
return false;
}
ESPDEPRECATED("get_supports_brightness() is deprecated, use color modes instead.", "v1.21")
bool get_supports_brightness() const { return this->supports_color_capability(ColorCapability::BRIGHTNESS); }
ESPDEPRECATED("get_supports_rgb() is deprecated, use color modes instead.", "v1.21")
bool get_supports_rgb() const { return this->supports_color_capability(ColorCapability::RGB); }
ESPDEPRECATED("get_supports_rgb_white_value() is deprecated, use color modes instead.", "v1.21")
bool get_supports_rgb_white_value() const {
return this->supports_color_mode(ColorMode::RGB_WHITE) ||
this->supports_color_mode(ColorMode::RGB_COLOR_TEMPERATURE);
}
ESPDEPRECATED("get_supports_color_temperature() is deprecated, use color modes instead.", "v1.21")
bool get_supports_color_temperature() const {
return this->supports_color_capability(ColorCapability::COLOR_TEMPERATURE);
}
ESPDEPRECATED("get_supports_color_interlock() is deprecated, use color modes instead.", "v1.21")
bool get_supports_color_interlock() const {
return this->supports_color_mode(ColorMode::RGB) &&
(this->supports_color_mode(ColorMode::WHITE) || this->supports_color_mode(ColorMode::COLD_WARM_WHITE) ||
this->supports_color_mode(ColorMode::COLOR_TEMPERATURE));
return this->supported_color_modes_.has_capability(color_capability);
}
float get_min_mireds() const { return this->min_mireds_; }
@@ -59,19 +37,9 @@ class LightTraits {
void set_max_mireds(float max_mireds) { this->max_mireds_ = max_mireds; }
protected:
#ifdef USE_API
// The API connection is a friend class to access internal methods
friend class api::APIConnection;
// This method returns a reference to the internal color modes set.
// It is used by the API to avoid copying data when encoding messages.
// Warning: Do not use this method outside of the API connection code.
// It returns a reference to internal data that can be invalidated.
const std::set<ColorMode> &get_supported_color_modes_for_api_() const { return this->supported_color_modes_; }
#endif
std::set<ColorMode> supported_color_modes_{};
float min_mireds_{0};
float max_mireds_{0};
ColorModeMask supported_color_modes_{};
};
} // namespace light

View File

@@ -68,6 +68,9 @@ static constexpr char LOG_LEVEL_LETTER_CHARS[] = {
// Maximum header size: 35 bytes fixed + 32 bytes tag + 16 bytes thread name = 83 bytes (45 byte safety margin)
static constexpr uint16_t MAX_HEADER_SIZE = 128;
// "0x" + 2 hex digits per byte + '\0'
static constexpr size_t MAX_POINTER_REPRESENTATION = 2 + sizeof(void *) * 2 + 1;
#if defined(USE_ESP32) || defined(USE_ESP8266) || defined(USE_RP2040) || defined(USE_LIBRETINY) || defined(USE_ZEPHYR)
/** Enum for logging UART selection
*
@@ -177,8 +180,11 @@ class Logger : public Component {
inline void HOT format_log_to_buffer_with_terminator_(uint8_t level, const char *tag, int line, const char *format,
va_list args, char *buffer, uint16_t *buffer_at,
uint16_t buffer_size) {
#if defined(USE_ESP32) || defined(USE_LIBRETINY) || defined(USE_ZEPHYR)
#if defined(USE_ESP32) || defined(USE_LIBRETINY)
this->write_header_to_buffer_(level, tag, line, this->get_thread_name_(), buffer, buffer_at, buffer_size);
#elif defined(USE_ZEPHYR)
char buff[MAX_POINTER_REPRESENTATION];
this->write_header_to_buffer_(level, tag, line, this->get_thread_name_(buff), buffer, buffer_at, buffer_size);
#else
this->write_header_to_buffer_(level, tag, line, nullptr, buffer, buffer_at, buffer_size);
#endif
@@ -277,7 +283,11 @@ class Logger : public Component {
#endif
#if defined(USE_ESP32) || defined(USE_LIBRETINY) || defined(USE_ZEPHYR)
const char *HOT get_thread_name_() {
const char *HOT get_thread_name_(
#ifdef USE_ZEPHYR
char *buff
#endif
) {
#ifdef USE_ZEPHYR
k_tid_t current_task = k_current_get();
#else
@@ -291,7 +301,13 @@ class Logger : public Component {
#elif defined(USE_LIBRETINY)
return pcTaskGetTaskName(current_task);
#elif defined(USE_ZEPHYR)
return k_thread_name_get(current_task);
const char *name = k_thread_name_get(current_task);
if (name) {
// zephyr print task names only if debug component is present
return name;
}
std::snprintf(buff, MAX_POINTER_REPRESENTATION, "%p", current_task);
return buff;
#endif
}
}

View File

@@ -31,18 +31,17 @@ void MDNSComponent::setup() {
mdns_instance_name_set(this->hostname_.c_str());
for (const auto &service : services) {
std::vector<mdns_txt_item_t> txt_records;
for (const auto &record : service.txt_records) {
mdns_txt_item_t it{};
auto txt_records = std::make_unique<mdns_txt_item_t[]>(service.txt_records.size());
for (size_t i = 0; i < service.txt_records.size(); i++) {
const auto &record = service.txt_records[i];
// key and value are either compile-time string literals in flash or pointers to dynamic_txt_values_
// Both remain valid for the lifetime of this function, and ESP-IDF makes internal copies
it.key = MDNS_STR_ARG(record.key);
it.value = MDNS_STR_ARG(record.value);
txt_records.push_back(it);
txt_records[i].key = MDNS_STR_ARG(record.key);
txt_records[i].value = MDNS_STR_ARG(record.value);
}
uint16_t port = const_cast<TemplatableValue<uint16_t> &>(service.port).value();
err = mdns_service_add(nullptr, MDNS_STR_ARG(service.service_type), MDNS_STR_ARG(service.proto), port,
txt_records.data(), txt_records.size());
txt_records.get(), service.txt_records.size());
if (err != ESP_OK) {
ESP_LOGW(TAG, "Failed to register service %s: %s", MDNS_STR_ARG(service.service_type), esp_err_to_name(err));

View File

@@ -140,11 +140,8 @@ void MQTTClientComponent::send_device_info_() {
#endif
#ifdef USE_API_NOISE
if (api::global_api_server->get_noise_ctx()->has_psk()) {
root["api_encryption"] = "Noise_NNpsk0_25519_ChaChaPoly_SHA256";
} else {
root["api_encryption_supported"] = "Noise_NNpsk0_25519_ChaChaPoly_SHA256";
}
root[api::global_api_server->get_noise_ctx()->has_psk() ? "api_encryption" : "api_encryption_supported"] =
"Noise_NNpsk0_25519_ChaChaPoly_SHA256";
#endif
},
2, this->discovery_info_.retain);

View File

@@ -17,11 +17,11 @@ void MQTTClimateComponent::send_discovery(JsonObject root, mqtt::SendDiscoveryCo
// NOLINTBEGIN(clang-analyzer-cplusplus.NewDeleteLeaks) false positive with ArduinoJson
auto traits = this->device_->get_traits();
// current_temperature_topic
if (traits.get_supports_current_temperature()) {
if (traits.has_feature_flags(climate::CLIMATE_SUPPORTS_CURRENT_TEMPERATURE)) {
root[MQTT_CURRENT_TEMPERATURE_TOPIC] = this->get_current_temperature_state_topic();
}
// current_humidity_topic
if (traits.get_supports_current_humidity()) {
if (traits.has_feature_flags(climate::CLIMATE_SUPPORTS_CURRENT_HUMIDITY)) {
root[MQTT_CURRENT_HUMIDITY_TOPIC] = this->get_current_humidity_state_topic();
}
// mode_command_topic
@@ -45,7 +45,8 @@ void MQTTClimateComponent::send_discovery(JsonObject root, mqtt::SendDiscoveryCo
if (traits.supports_mode(CLIMATE_MODE_HEAT_COOL))
modes.add("heat_cool");
if (traits.get_supports_two_point_target_temperature()) {
if (traits.has_feature_flags(climate::CLIMATE_SUPPORTS_TWO_POINT_TARGET_TEMPERATURE |
climate::CLIMATE_REQUIRES_TWO_POINT_TARGET_TEMPERATURE)) {
// temperature_low_command_topic
root[MQTT_TEMPERATURE_LOW_COMMAND_TOPIC] = this->get_target_temperature_low_command_topic();
// temperature_low_state_topic
@@ -61,7 +62,7 @@ void MQTTClimateComponent::send_discovery(JsonObject root, mqtt::SendDiscoveryCo
root[MQTT_TEMPERATURE_STATE_TOPIC] = this->get_target_temperature_state_topic();
}
if (traits.get_supports_target_humidity()) {
if (traits.has_feature_flags(climate::CLIMATE_SUPPORTS_TARGET_HUMIDITY)) {
// target_humidity_command_topic
root[MQTT_TARGET_HUMIDITY_COMMAND_TOPIC] = this->get_target_humidity_command_topic();
// target_humidity_state_topic
@@ -109,7 +110,7 @@ void MQTTClimateComponent::send_discovery(JsonObject root, mqtt::SendDiscoveryCo
presets.add(preset);
}
if (traits.get_supports_action()) {
if (traits.has_feature_flags(climate::CLIMATE_SUPPORTS_ACTION)) {
// action_topic
root[MQTT_ACTION_TOPIC] = this->get_action_state_topic();
}
@@ -174,7 +175,8 @@ void MQTTClimateComponent::setup() {
call.perform();
});
if (traits.get_supports_two_point_target_temperature()) {
if (traits.has_feature_flags(climate::CLIMATE_SUPPORTS_TWO_POINT_TARGET_TEMPERATURE |
climate::CLIMATE_REQUIRES_TWO_POINT_TARGET_TEMPERATURE)) {
this->subscribe(this->get_target_temperature_low_command_topic(),
[this](const std::string &topic, const std::string &payload) {
auto val = parse_number<float>(payload);
@@ -211,7 +213,7 @@ void MQTTClimateComponent::setup() {
});
}
if (traits.get_supports_target_humidity()) {
if (traits.has_feature_flags(climate::CLIMATE_SUPPORTS_TARGET_HUMIDITY)) {
this->subscribe(this->get_target_humidity_command_topic(),
[this](const std::string &topic, const std::string &payload) {
auto val = parse_number<float>(payload);
@@ -290,12 +292,14 @@ bool MQTTClimateComponent::publish_state_() {
success = false;
int8_t target_accuracy = traits.get_target_temperature_accuracy_decimals();
int8_t current_accuracy = traits.get_current_temperature_accuracy_decimals();
if (traits.get_supports_current_temperature() && !std::isnan(this->device_->current_temperature)) {
if (traits.has_feature_flags(climate::CLIMATE_SUPPORTS_CURRENT_TEMPERATURE) &&
!std::isnan(this->device_->current_temperature)) {
std::string payload = value_accuracy_to_string(this->device_->current_temperature, current_accuracy);
if (!this->publish(this->get_current_temperature_state_topic(), payload))
success = false;
}
if (traits.get_supports_two_point_target_temperature()) {
if (traits.has_feature_flags(climate::CLIMATE_SUPPORTS_TWO_POINT_TARGET_TEMPERATURE |
climate::CLIMATE_REQUIRES_TWO_POINT_TARGET_TEMPERATURE)) {
std::string payload = value_accuracy_to_string(this->device_->target_temperature_low, target_accuracy);
if (!this->publish(this->get_target_temperature_low_state_topic(), payload))
success = false;
@@ -308,12 +312,14 @@ bool MQTTClimateComponent::publish_state_() {
success = false;
}
if (traits.get_supports_current_humidity() && !std::isnan(this->device_->current_humidity)) {
if (traits.has_feature_flags(climate::CLIMATE_SUPPORTS_CURRENT_HUMIDITY) &&
!std::isnan(this->device_->current_humidity)) {
std::string payload = value_accuracy_to_string(this->device_->current_humidity, 0);
if (!this->publish(this->get_current_humidity_state_topic(), payload))
success = false;
}
if (traits.get_supports_target_humidity() && !std::isnan(this->device_->target_humidity)) {
if (traits.has_feature_flags(climate::CLIMATE_SUPPORTS_TARGET_HUMIDITY) &&
!std::isnan(this->device_->target_humidity)) {
std::string payload = value_accuracy_to_string(this->device_->target_humidity, 0);
if (!this->publish(this->get_target_humidity_state_topic(), payload))
success = false;
@@ -357,7 +363,7 @@ bool MQTTClimateComponent::publish_state_() {
success = false;
}
if (traits.get_supports_action()) {
if (traits.has_feature_flags(climate::CLIMATE_SUPPORTS_ACTION)) {
const char *payload;
switch (this->device_->action) {
case CLIMATE_ACTION_OFF:

View File

@@ -85,24 +85,20 @@ bool MQTTComponent::send_discovery_() {
}
// Fields from EntityBase
if (this->get_entity()->has_own_name()) {
root[MQTT_NAME] = this->friendly_name();
} else {
root[MQTT_NAME] = "";
}
root[MQTT_NAME] = this->get_entity()->has_own_name() ? this->friendly_name() : "";
if (this->is_disabled_by_default())
root[MQTT_ENABLED_BY_DEFAULT] = false;
if (!this->get_icon().empty())
root[MQTT_ICON] = this->get_icon();
switch (this->get_entity()->get_entity_category()) {
const auto entity_category = this->get_entity()->get_entity_category();
switch (entity_category) {
case ENTITY_CATEGORY_NONE:
break;
case ENTITY_CATEGORY_CONFIG:
root[MQTT_ENTITY_CATEGORY] = "config";
break;
case ENTITY_CATEGORY_DIAGNOSTIC:
root[MQTT_ENTITY_CATEGORY] = "diagnostic";
root[MQTT_ENTITY_CATEGORY] = entity_category == ENTITY_CATEGORY_CONFIG ? "config" : "diagnostic";
break;
}
@@ -113,20 +109,14 @@ bool MQTTComponent::send_discovery_() {
if (this->command_retain_)
root[MQTT_COMMAND_RETAIN] = true;
if (this->availability_ == nullptr) {
if (!global_mqtt_client->get_availability().topic.empty()) {
root[MQTT_AVAILABILITY_TOPIC] = global_mqtt_client->get_availability().topic;
if (global_mqtt_client->get_availability().payload_available != "online")
root[MQTT_PAYLOAD_AVAILABLE] = global_mqtt_client->get_availability().payload_available;
if (global_mqtt_client->get_availability().payload_not_available != "offline")
root[MQTT_PAYLOAD_NOT_AVAILABLE] = global_mqtt_client->get_availability().payload_not_available;
}
} else if (!this->availability_->topic.empty()) {
root[MQTT_AVAILABILITY_TOPIC] = this->availability_->topic;
if (this->availability_->payload_available != "online")
root[MQTT_PAYLOAD_AVAILABLE] = this->availability_->payload_available;
if (this->availability_->payload_not_available != "offline")
root[MQTT_PAYLOAD_NOT_AVAILABLE] = this->availability_->payload_not_available;
const Availability &avail =
this->availability_ == nullptr ? global_mqtt_client->get_availability() : *this->availability_;
if (!avail.topic.empty()) {
root[MQTT_AVAILABILITY_TOPIC] = avail.topic;
if (avail.payload_available != "online")
root[MQTT_PAYLOAD_AVAILABLE] = avail.payload_available;
if (avail.payload_not_available != "offline")
root[MQTT_PAYLOAD_NOT_AVAILABLE] = avail.payload_not_available;
}
const MQTTDiscoveryInfo &discovery_info = global_mqtt_client->get_discovery_info();
@@ -145,10 +135,8 @@ bool MQTTComponent::send_discovery_() {
if (discovery_info.object_id_generator == MQTT_DEVICE_NAME_OBJECT_ID_GENERATOR)
root[MQTT_OBJECT_ID] = node_name + "_" + this->get_default_object_id_();
std::string node_friendly_name = App.get_friendly_name();
if (node_friendly_name.empty()) {
node_friendly_name = node_name;
}
const std::string &friendly_name_ref = App.get_friendly_name();
const std::string &node_friendly_name = friendly_name_ref.empty() ? node_name : friendly_name_ref;
std::string node_area = App.get_area();
JsonObject device_info = root[MQTT_DEVICE].to<JsonObject>();
@@ -158,13 +146,9 @@ bool MQTTComponent::send_discovery_() {
#ifdef ESPHOME_PROJECT_NAME
device_info[MQTT_DEVICE_SW_VERSION] = ESPHOME_PROJECT_VERSION " (ESPHome " ESPHOME_VERSION ")";
const char *model = std::strchr(ESPHOME_PROJECT_NAME, '.');
if (model == nullptr) { // must never happen but check anyway
device_info[MQTT_DEVICE_MODEL] = ESPHOME_BOARD;
device_info[MQTT_DEVICE_MANUFACTURER] = ESPHOME_PROJECT_NAME;
} else {
device_info[MQTT_DEVICE_MODEL] = model + 1;
device_info[MQTT_DEVICE_MANUFACTURER] = std::string(ESPHOME_PROJECT_NAME, model - ESPHOME_PROJECT_NAME);
}
device_info[MQTT_DEVICE_MODEL] = model == nullptr ? ESPHOME_BOARD : model + 1;
device_info[MQTT_DEVICE_MANUFACTURER] =
model == nullptr ? ESPHOME_PROJECT_NAME : std::string(ESPHOME_PROJECT_NAME, model - ESPHOME_PROJECT_NAME);
#else
device_info[MQTT_DEVICE_SW_VERSION] = ESPHOME_VERSION " (" + App.get_compilation_time() + ")";
device_info[MQTT_DEVICE_MODEL] = ESPHOME_BOARD;

View File

@@ -1291,9 +1291,6 @@ void Nextion::check_pending_waveform_() {
void Nextion::set_writer(const nextion_writer_t &writer) { this->writer_ = writer; }
ESPDEPRECATED("set_wait_for_ack(bool) deprecated, no effect", "v1.20")
void Nextion::set_wait_for_ack(bool wait_for_ack) { ESP_LOGE(TAG, "Deprecated"); }
bool Nextion::is_updating() { return this->connection_state_.is_updating_; }
} // namespace nextion

View File

@@ -916,7 +916,7 @@ void PrometheusHandler::climate_row_(AsyncResponseStream *stream, climate::Clima
auto min_temp_value = value_accuracy_to_string(traits.get_visual_min_temperature(), target_accuracy);
climate_value_row_(stream, obj, area, node, friendly_name, min_temp, min_temp_value);
// now check optional traits
if (traits.get_supports_current_temperature()) {
if (traits.has_feature_flags(climate::CLIMATE_SUPPORTS_CURRENT_TEMPERATURE)) {
std::string current_temp = "current_temperature";
if (std::isnan(obj->current_temperature)) {
climate_failed_row_(stream, obj, area, node, friendly_name, current_temp, true);
@@ -927,7 +927,7 @@ void PrometheusHandler::climate_row_(AsyncResponseStream *stream, climate::Clima
climate_failed_row_(stream, obj, area, node, friendly_name, current_temp, false);
}
}
if (traits.get_supports_current_humidity()) {
if (traits.has_feature_flags(climate::CLIMATE_SUPPORTS_CURRENT_HUMIDITY)) {
std::string current_humidity = "current_humidity";
if (std::isnan(obj->current_humidity)) {
climate_failed_row_(stream, obj, area, node, friendly_name, current_humidity, true);
@@ -938,7 +938,7 @@ void PrometheusHandler::climate_row_(AsyncResponseStream *stream, climate::Clima
climate_failed_row_(stream, obj, area, node, friendly_name, current_humidity, false);
}
}
if (traits.get_supports_target_humidity()) {
if (traits.has_feature_flags(climate::CLIMATE_SUPPORTS_TARGET_HUMIDITY)) {
std::string target_humidity = "target_humidity";
if (std::isnan(obj->target_humidity)) {
climate_failed_row_(stream, obj, area, node, friendly_name, target_humidity, true);
@@ -949,7 +949,8 @@ void PrometheusHandler::climate_row_(AsyncResponseStream *stream, climate::Clima
climate_failed_row_(stream, obj, area, node, friendly_name, target_humidity, false);
}
}
if (traits.get_supports_two_point_target_temperature()) {
if (traits.has_feature_flags(climate::CLIMATE_SUPPORTS_TWO_POINT_TARGET_TEMPERATURE |
climate::CLIMATE_REQUIRES_TWO_POINT_TARGET_TEMPERATURE)) {
std::string target_temp_low = "target_temperature_low";
auto target_temp_low_value = value_accuracy_to_string(obj->target_temperature_low, target_accuracy);
climate_value_row_(stream, obj, area, node, friendly_name, target_temp_low, target_temp_low_value);
@@ -961,7 +962,7 @@ void PrometheusHandler::climate_row_(AsyncResponseStream *stream, climate::Clima
auto target_temp_value = value_accuracy_to_string(obj->target_temperature, target_accuracy);
climate_value_row_(stream, obj, area, node, friendly_name, target_temp, target_temp_value);
}
if (traits.get_supports_action()) {
if (traits.has_feature_flags(climate::CLIMATE_SUPPORTS_ACTION)) {
std::string climate_trait_category = "action";
const auto *climate_trait_value = climate::climate_action_to_string(obj->action);
climate_setting_row_(stream, obj, area, node, friendly_name, climate_trait_category, climate_trait_value);

View File

@@ -81,7 +81,7 @@ CONFIG_SCHEMA = (
cv.int_range(min=0, max=0xFFFF, max_included=False),
),
cv.Optional(CONF_AMBIENT_PRESSURE_COMPENSATION): cv.pressure,
cv.Optional(CONF_TEMPERATURE_OFFSET, default="4°C"): cv.temperature,
cv.Optional(CONF_TEMPERATURE_OFFSET, default="4°C"): cv.temperature_delta,
cv.Optional(CONF_AMBIENT_PRESSURE_COMPENSATION_SOURCE): cv.use_id(
sensor.Sensor
),

View File

@@ -45,13 +45,26 @@ def get_script(script_id):
def check_max_runs(value):
# Set default for queued mode to prevent unbounded queue growth
if CONF_MAX_RUNS not in value and value[CONF_MODE] == CONF_QUEUED:
value[CONF_MAX_RUNS] = 5
if CONF_MAX_RUNS not in value:
return value
if value[CONF_MODE] not in [CONF_QUEUED, CONF_PARALLEL]:
raise cv.Invalid(
"The option 'max_runs' is only valid in 'queue' and 'parallel' mode.",
"The option 'max_runs' is only valid in 'queued' and 'parallel' mode.",
path=[CONF_MAX_RUNS],
)
# Queued mode must have bounded queue (min 1), parallel mode can be unlimited (0)
if value[CONF_MODE] == CONF_QUEUED and value[CONF_MAX_RUNS] < 1:
raise cv.Invalid(
"The option 'max_runs' must be at least 1 for queued mode.",
path=[CONF_MAX_RUNS],
)
return value
@@ -106,7 +119,7 @@ CONFIG_SCHEMA = automation.validate_automation(
cv.Optional(CONF_MODE, default=CONF_SINGLE): cv.one_of(
*SCRIPT_MODES, lower=True
),
cv.Optional(CONF_MAX_RUNS): cv.positive_int,
cv.Optional(CONF_MAX_RUNS): cv.int_range(min=0, max=100),
cv.Optional(CONF_PARAMETERS, default={}): cv.Schema(
{
validate_parameter_name: validate_parameter_type,

View File

@@ -1,10 +1,11 @@
#pragma once
#include <memory>
#include <tuple>
#include "esphome/core/automation.h"
#include "esphome/core/component.h"
#include "esphome/core/helpers.h"
#include "esphome/core/log.h"
#include <queue>
namespace esphome {
namespace script {
@@ -96,23 +97,41 @@ template<typename... Ts> class RestartScript : public Script<Ts...> {
/** A script type that queues new instances that are created.
*
* Only one instance of the script can be active at a time.
*
* Ring buffer implementation:
* - num_queued_ tracks the number of queued (waiting) instances, NOT including the currently running one
* - queue_front_ points to the next item to execute (read position)
* - Buffer size is max_runs_ - 1 (max total instances minus the running one)
* - Write position is calculated as: (queue_front_ + num_queued_) % (max_runs_ - 1)
* - When an item finishes, queue_front_ advances: (queue_front_ + 1) % (max_runs_ - 1)
* - First execute() runs immediately without queuing (num_queued_ stays 0)
* - Subsequent executes while running are queued starting at position 0
* - Maximum total instances = max_runs_ (includes 1 running + (max_runs_ - 1) queued)
*/
template<typename... Ts> class QueueingScript : public Script<Ts...>, public Component {
public:
void execute(Ts... x) override {
if (this->is_action_running() || this->num_runs_ > 0) {
// num_runs_ is the number of *queued* instances, so total number of instances is
// num_runs_ + 1
if (this->max_runs_ != 0 && this->num_runs_ + 1 >= this->max_runs_) {
this->esp_logw_(__LINE__, ESPHOME_LOG_FORMAT("Script '%s' maximum number of queued runs exceeded!"),
if (this->is_action_running() || this->num_queued_ > 0) {
// num_queued_ is the number of *queued* instances (waiting, not including currently running)
// max_runs_ is the maximum *total* instances (running + queued)
// So we reject when num_queued_ + 1 >= max_runs_ (queued + running >= max)
if (this->num_queued_ + 1 >= this->max_runs_) {
this->esp_logw_(__LINE__, ESPHOME_LOG_FORMAT("Script '%s' max instances (running + queued) reached!"),
LOG_STR_ARG(this->name_));
return;
}
// Initialize queue on first queued item (after capacity check)
this->lazy_init_queue_();
this->esp_logd_(__LINE__, ESPHOME_LOG_FORMAT("Script '%s' queueing new instance (mode: queued)"),
LOG_STR_ARG(this->name_));
this->num_runs_++;
this->var_queue_.push(std::make_tuple(x...));
// Ring buffer: write to (queue_front_ + num_queued_) % queue_capacity
const size_t queue_capacity = static_cast<size_t>(this->max_runs_ - 1);
size_t write_pos = (this->queue_front_ + this->num_queued_) % queue_capacity;
// Use std::make_unique to replace the unique_ptr
this->var_queue_[write_pos] = std::make_unique<std::tuple<Ts...>>(x...);
this->num_queued_++;
return;
}
@@ -122,29 +141,46 @@ template<typename... Ts> class QueueingScript : public Script<Ts...>, public Com
}
void stop() override {
this->num_runs_ = 0;
// Clear all queued items to free memory immediately
// Resetting the array automatically destroys all unique_ptrs and their contents
this->var_queue_.reset();
this->num_queued_ = 0;
this->queue_front_ = 0;
Script<Ts...>::stop();
}
void loop() override {
if (this->num_runs_ != 0 && !this->is_action_running()) {
this->num_runs_--;
auto &vars = this->var_queue_.front();
this->var_queue_.pop();
this->trigger_tuple_(vars, typename gens<sizeof...(Ts)>::type());
if (this->num_queued_ != 0 && !this->is_action_running()) {
// Dequeue: decrement count, move tuple out (frees slot), advance read position
this->num_queued_--;
const size_t queue_capacity = static_cast<size_t>(this->max_runs_ - 1);
auto tuple_ptr = std::move(this->var_queue_[this->queue_front_]);
this->queue_front_ = (this->queue_front_ + 1) % queue_capacity;
this->trigger_tuple_(*tuple_ptr, typename gens<sizeof...(Ts)>::type());
}
}
void set_max_runs(int max_runs) { max_runs_ = max_runs; }
protected:
// Lazy init queue on first use - avoids setup() ordering issues and saves memory
// if script is never executed during this boot cycle
inline void lazy_init_queue_() {
if (!this->var_queue_) {
// Allocate array of max_runs_ - 1 slots for queued items (running item is separate)
// unique_ptr array is zero-initialized, so all slots start as nullptr
this->var_queue_ = std::make_unique<std::unique_ptr<std::tuple<Ts...>>[]>(this->max_runs_ - 1);
}
}
template<int... S> void trigger_tuple_(const std::tuple<Ts...> &tuple, seq<S...> /*unused*/) {
this->trigger(std::get<S>(tuple)...);
}
int num_runs_ = 0;
int max_runs_ = 0;
std::queue<std::tuple<Ts...>> var_queue_;
int num_queued_ = 0; // Number of queued instances (not including currently running)
int max_runs_ = 0; // Maximum total instances (running + queued)
size_t queue_front_ = 0; // Ring buffer read position (next item to execute)
std::unique_ptr<std::unique_ptr<std::tuple<Ts...>>[]> var_queue_; // Ring buffer of queued parameters
};
/** A script type that executes new instances in parallel.

View File

@@ -28,21 +28,6 @@
namespace esphome {
namespace statsd {
using sensor_type_t = enum { TYPE_SENSOR, TYPE_BINARY_SENSOR };
using sensors_t = struct {
const char *name;
sensor_type_t type;
union {
#ifdef USE_SENSOR
esphome::sensor::Sensor *sensor;
#endif
#ifdef USE_BINARY_SENSOR
esphome::binary_sensor::BinarySensor *binary_sensor;
#endif
};
};
class StatsdComponent : public PollingComponent {
public:
~StatsdComponent();
@@ -71,6 +56,20 @@ class StatsdComponent : public PollingComponent {
const char *prefix_;
uint16_t port_;
using sensor_type_t = enum { TYPE_SENSOR, TYPE_BINARY_SENSOR };
using sensors_t = struct {
const char *name;
sensor_type_t type;
union {
#ifdef USE_SENSOR
esphome::sensor::Sensor *sensor;
#endif
#ifdef USE_BINARY_SENSOR
esphome::binary_sensor::BinarySensor *binary_sensor;
#endif
};
};
std::vector<sensors_t> sensors_;
#ifdef USE_ESP8266

View File

@@ -6,7 +6,7 @@ import esphome.config_validation as cv
from esphome.const import CONF_SUBSTITUTIONS, VALID_SUBSTITUTIONS_CHARACTERS
from esphome.yaml_util import ESPHomeDataBase, ESPLiteralValue, make_data_base
from .jinja import Jinja, JinjaStr, TemplateError, TemplateRuntimeError, has_jinja
from .jinja import Jinja, JinjaError, JinjaStr, has_jinja
CODEOWNERS = ["@esphome/core"]
_LOGGER = logging.getLogger(__name__)
@@ -57,17 +57,12 @@ def _expand_jinja(value, orig_value, path, jinja, ignore_missing):
"->".join(str(x) for x in path),
err.message,
)
except (
TemplateError,
TemplateRuntimeError,
RuntimeError,
ArithmeticError,
AttributeError,
TypeError,
) as err:
except JinjaError as err:
raise cv.Invalid(
f"{type(err).__name__} Error evaluating jinja expression '{value}': {str(err)}."
f" See {'->'.join(str(x) for x in path)}",
f"{err.error_name()} Error evaluating jinja expression '{value}': {str(err.parent())}."
f"\nEvaluation stack: (most recent evaluation last)\n{err.stack_trace_str()}"
f"\nRelevant context:\n{err.context_trace_str()}"
f"\nSee {'->'.join(str(x) for x in path)}",
path,
)
return value

View File

@@ -6,6 +6,8 @@ import re
import jinja2 as jinja
from jinja2.sandbox import SandboxedEnvironment
from esphome.yaml_util import ESPLiteralValue
TemplateError = jinja.TemplateError
TemplateSyntaxError = jinja.TemplateSyntaxError
TemplateRuntimeError = jinja.TemplateRuntimeError
@@ -26,18 +28,20 @@ def has_jinja(st):
return detect_jinja_re.search(st) is not None
# SAFE_GLOBAL_FUNCTIONS defines a allowlist of built-in functions that are considered safe to expose
# SAFE_GLOBALS defines a allowlist of built-in functions or modules that are considered safe to expose
# in Jinja templates or other sandboxed evaluation contexts. Only functions that do not allow
# arbitrary code execution, file access, or other security risks are included.
#
# The following functions are considered safe:
# - math: The entire math module is injected, allowing access to mathematical functions like sin, cos, sqrt, etc.
# - ord: Converts a character to its Unicode code point integer.
# - chr: Converts an integer to its corresponding Unicode character.
# - len: Returns the length of a sequence or collection.
#
# These functions were chosen because they are pure, have no side effects, and do not provide access
# to the file system, environment, or other potentially sensitive resources.
SAFE_GLOBAL_FUNCTIONS = {
SAFE_GLOBALS = {
"math": math, # Inject entire math module
"ord": ord,
"chr": chr,
"len": len,
@@ -56,22 +60,62 @@ class JinjaStr(str):
later in the main substitutions pass.
"""
Undefined = object()
def __new__(cls, value: str, upvalues=None):
obj = super().__new__(cls, value)
obj.upvalues = upvalues or {}
if isinstance(value, JinjaStr):
base = str(value)
merged = {**value.upvalues, **(upvalues or {})}
else:
base = value
merged = dict(upvalues or {})
obj = super().__new__(cls, base)
obj.upvalues = merged
obj.result = JinjaStr.Undefined
return obj
def __init__(self, value: str, upvalues=None):
self.upvalues = upvalues or {}
class JinjaError(Exception):
def __init__(self, context_trace: dict, expr: str):
self.context_trace = context_trace
self.eval_stack = [expr]
def parent(self):
return self.__context__
def error_name(self):
return type(self.parent()).__name__
def context_trace_str(self):
return "\n".join(
f" {k} = {repr(v)} ({type(v).__name__})"
for k, v in self.context_trace.items()
)
def stack_trace_str(self):
return "\n".join(
f" {len(self.eval_stack) - i}: {expr}{i == 0 and ' <-- ' + self.error_name() or ''}"
for i, expr in enumerate(self.eval_stack)
)
class Jinja:
class TrackerContext(jinja.runtime.Context):
def resolve_or_missing(self, key):
val = super().resolve_or_missing(key)
if isinstance(val, JinjaStr):
self.environment.context_trace[key] = val
val, _ = self.environment.expand(val)
self.environment.context_trace[key] = val
return val
class Jinja(SandboxedEnvironment):
"""
Wraps a Jinja environment
"""
def __init__(self, context_vars):
self.env = SandboxedEnvironment(
super().__init__(
trim_blocks=True,
lstrip_blocks=True,
block_start_string="<%",
@@ -82,13 +126,20 @@ class Jinja:
variable_end_string="}",
undefined=jinja.StrictUndefined,
)
self.env.add_extension("jinja2.ext.do")
self.env.globals["math"] = math # Inject entire math module
self.context_class = TrackerContext
self.add_extension("jinja2.ext.do")
self.context_trace = {}
self.context_vars = {**context_vars}
self.env.globals = {
**self.env.globals,
for k, v in self.context_vars.items():
if isinstance(v, ESPLiteralValue):
continue
if isinstance(v, str) and not isinstance(v, JinjaStr) and has_jinja(v):
self.context_vars[k] = JinjaStr(v, self.context_vars)
self.globals = {
**self.globals,
**self.context_vars,
**SAFE_GLOBAL_FUNCTIONS,
**SAFE_GLOBALS,
}
def safe_eval(self, expr):
@@ -110,23 +161,43 @@ class Jinja:
result = None
override_vars = {}
if isinstance(content_str, JinjaStr):
if content_str.result is not JinjaStr.Undefined:
return content_str.result, None
# If `value` is already a JinjaStr, it means we are trying to evaluate it again
# in a parent pass.
# Hopefully, all required variables are visible now.
override_vars = content_str.upvalues
old_trace = self.context_trace
self.context_trace = {}
try:
template = self.env.from_string(content_str)
template = self.from_string(content_str)
result = self.safe_eval(template.render(override_vars))
if isinstance(result, Undefined):
# This happens when the expression is simply an undefined variable. Jinja does not
# raise an exception, instead we get "Undefined".
# Trigger an UndefinedError exception so we skip to below.
print("" + result)
print("" + result) # force a UndefinedError exception
except (TemplateSyntaxError, UndefinedError) as err:
# `content_str` contains a Jinja expression that refers to a variable that is undefined
# in this scope. Perhaps it refers to a root substitution that is not visible yet.
# Therefore, return the original `content_str` as a JinjaStr, which contains the variables
# Therefore, return `content_str` as a JinjaStr, which contains the variables
# that are actually visible to it at this point to postpone evaluation.
return JinjaStr(content_str, {**self.context_vars, **override_vars}), err
except JinjaError as err:
err.context_trace = {**self.context_trace, **err.context_trace}
err.eval_stack.append(content_str)
raise err
except (
TemplateError,
TemplateRuntimeError,
RuntimeError,
ArithmeticError,
AttributeError,
TypeError,
) as err:
raise JinjaError(self.context_trace, content_str) from err
finally:
self.context_trace = old_trace
if isinstance(content_str, JinjaStr):
content_str.result = result
return result, None

View File

@@ -71,9 +71,14 @@ from esphome.const import (
CONF_VISUAL,
)
CONF_PRESET_CHANGE = "preset_change"
CONF_DEFAULT_PRESET = "default_preset"
CONF_HUMIDITY_CONTROL_DEHUMIDIFY_ACTION = "humidity_control_dehumidify_action"
CONF_HUMIDITY_CONTROL_HUMIDIFY_ACTION = "humidity_control_humidify_action"
CONF_HUMIDITY_CONTROL_OFF_ACTION = "humidity_control_off_action"
CONF_HUMIDITY_HYSTERESIS = "humidity_hysteresis"
CONF_ON_BOOT_RESTORE_FROM = "on_boot_restore_from"
CONF_PRESET_CHANGE = "preset_change"
CONF_TARGET_HUMIDITY_CHANGE_ACTION = "target_humidity_change_action"
CODEOWNERS = ["@kbx81"]
@@ -241,6 +246,14 @@ def validate_thermostat(config):
CONF_MAX_HEATING_RUN_TIME,
CONF_SUPPLEMENTAL_HEATING_ACTION,
],
CONF_HUMIDITY_CONTROL_DEHUMIDIFY_ACTION: [
CONF_HUMIDITY_CONTROL_OFF_ACTION,
CONF_HUMIDITY_SENSOR,
],
CONF_HUMIDITY_CONTROL_HUMIDIFY_ACTION: [
CONF_HUMIDITY_CONTROL_OFF_ACTION,
CONF_HUMIDITY_SENSOR,
],
}
for config_trigger, req_triggers in requirements.items():
for req_trigger in req_triggers:
@@ -338,7 +351,7 @@ def validate_thermostat(config):
# Warn about using the removed CONF_DEFAULT_MODE and advise users
if CONF_DEFAULT_MODE in config and config[CONF_DEFAULT_MODE] is not None:
raise cv.Invalid(
f"{CONF_DEFAULT_MODE} is no longer valid. Please switch to using presets and specify a {CONF_DEFAULT_PRESET}."
f"{CONF_DEFAULT_MODE} is no longer valid. Please switch to using presets and specify a {CONF_DEFAULT_PRESET}"
)
default_mode = config[CONF_DEFAULT_MODE]
@@ -588,9 +601,24 @@ CONFIG_SCHEMA = cv.All(
cv.Optional(CONF_SWING_VERTICAL_ACTION): automation.validate_automation(
single=True
),
cv.Optional(
CONF_TARGET_HUMIDITY_CHANGE_ACTION
): automation.validate_automation(single=True),
cv.Optional(
CONF_TARGET_TEMPERATURE_CHANGE_ACTION
): automation.validate_automation(single=True),
cv.Exclusive(
CONF_HUMIDITY_CONTROL_DEHUMIDIFY_ACTION,
group_of_exclusion="humidity_control",
): automation.validate_automation(single=True),
cv.Exclusive(
CONF_HUMIDITY_CONTROL_HUMIDIFY_ACTION,
group_of_exclusion="humidity_control",
): automation.validate_automation(single=True),
cv.Optional(
CONF_HUMIDITY_CONTROL_OFF_ACTION
): automation.validate_automation(single=True),
cv.Optional(CONF_HUMIDITY_HYSTERESIS, default=1.0): cv.percentage,
cv.Optional(CONF_DEFAULT_MODE, default=None): cv.valid,
cv.Optional(CONF_DEFAULT_PRESET): cv.templatable(cv.string),
cv.Optional(CONF_DEFAULT_TARGET_TEMPERATURE_HIGH): cv.temperature,
@@ -882,12 +910,39 @@ async def to_code(config):
config[CONF_SWING_VERTICAL_ACTION],
)
cg.add(var.set_supports_swing_mode_vertical(True))
if CONF_TARGET_HUMIDITY_CHANGE_ACTION in config:
await automation.build_automation(
var.get_humidity_change_trigger(),
[],
config[CONF_TARGET_HUMIDITY_CHANGE_ACTION],
)
if CONF_TARGET_TEMPERATURE_CHANGE_ACTION in config:
await automation.build_automation(
var.get_temperature_change_trigger(),
[],
config[CONF_TARGET_TEMPERATURE_CHANGE_ACTION],
)
if CONF_HUMIDITY_CONTROL_DEHUMIDIFY_ACTION in config:
cg.add(var.set_supports_dehumidification(True))
await automation.build_automation(
var.get_humidity_control_dehumidify_action_trigger(),
[],
config[CONF_HUMIDITY_CONTROL_DEHUMIDIFY_ACTION],
)
if CONF_HUMIDITY_CONTROL_HUMIDIFY_ACTION in config:
cg.add(var.set_supports_humidification(True))
await automation.build_automation(
var.get_humidity_control_humidify_action_trigger(),
[],
config[CONF_HUMIDITY_CONTROL_HUMIDIFY_ACTION],
)
if CONF_HUMIDITY_CONTROL_OFF_ACTION in config:
await automation.build_automation(
var.get_humidity_control_off_action_trigger(),
[],
config[CONF_HUMIDITY_CONTROL_OFF_ACTION],
)
cg.add(var.set_humidity_hysteresis(config[CONF_HUMIDITY_HYSTERESIS]))
if CONF_PRESET in config:
for preset_config in config[CONF_PRESET]:

View File

@@ -32,6 +32,7 @@ void ThermostatClimate::setup() {
if (this->humidity_sensor_ != nullptr) {
this->humidity_sensor_->add_on_state_callback([this](float state) {
this->current_humidity = state;
this->switch_to_humidity_control_action_(this->compute_humidity_control_action_());
this->publish_state();
});
this->current_humidity = this->humidity_sensor_->state;
@@ -84,6 +85,8 @@ void ThermostatClimate::refresh() {
this->switch_to_supplemental_action_(this->compute_supplemental_action_());
this->switch_to_fan_mode_(this->fan_mode.value(), false);
this->switch_to_swing_mode_(this->swing_mode, false);
this->switch_to_humidity_control_action_(this->compute_humidity_control_action_());
this->check_humidity_change_trigger_();
this->check_temperature_change_trigger_();
this->publish_state();
}
@@ -129,6 +132,11 @@ bool ThermostatClimate::hysteresis_valid() {
return true;
}
bool ThermostatClimate::humidity_hysteresis_valid() {
return !std::isnan(this->humidity_hysteresis_) && this->humidity_hysteresis_ >= 0.0f &&
this->humidity_hysteresis_ < 100.0f;
}
bool ThermostatClimate::limit_setpoints_for_heat_cool() {
return this->mode == climate::CLIMATE_MODE_HEAT_COOL ||
(this->mode == climate::CLIMATE_MODE_AUTO && this->supports_heat_cool_);
@@ -189,6 +197,16 @@ void ThermostatClimate::validate_target_temperature_high() {
}
}
void ThermostatClimate::validate_target_humidity() {
if (std::isnan(this->target_humidity)) {
this->target_humidity =
(this->get_traits().get_visual_max_humidity() - this->get_traits().get_visual_min_humidity()) / 2.0f;
} else {
this->target_humidity = clamp<float>(this->target_humidity, this->get_traits().get_visual_min_humidity(),
this->get_traits().get_visual_max_humidity());
}
}
void ThermostatClimate::control(const climate::ClimateCall &call) {
bool target_temperature_high_changed = false;
@@ -235,6 +253,10 @@ void ThermostatClimate::control(const climate::ClimateCall &call) {
this->validate_target_temperature();
}
}
if (call.get_target_humidity().has_value()) {
this->target_humidity = call.get_target_humidity().value();
this->validate_target_humidity();
}
// make any changes happen
this->refresh();
}
@@ -250,6 +272,9 @@ climate::ClimateTraits ThermostatClimate::traits() {
if (this->humidity_sensor_ != nullptr)
traits.add_feature_flags(climate::CLIMATE_SUPPORTS_CURRENT_HUMIDITY);
if (this->supports_humidification_ || this->supports_dehumidification_)
traits.add_feature_flags(climate::CLIMATE_SUPPORTS_TARGET_HUMIDITY);
if (this->supports_auto_)
traits.add_supported_mode(climate::CLIMATE_MODE_AUTO);
if (this->supports_heat_cool_)
@@ -423,6 +448,28 @@ climate::ClimateAction ThermostatClimate::compute_supplemental_action_() {
return target_action;
}
HumidificationAction ThermostatClimate::compute_humidity_control_action_() {
auto target_action = THERMOSTAT_HUMIDITY_CONTROL_ACTION_OFF;
// if hysteresis value or current_humidity is not valid, we go to OFF
if (std::isnan(this->current_humidity) || !this->humidity_hysteresis_valid()) {
return THERMOSTAT_HUMIDITY_CONTROL_ACTION_OFF;
}
// ensure set point is valid before computing the action
this->validate_target_humidity();
// everything has been validated so we can now safely compute the action
if (this->dehumidification_required_() && this->humidification_required_()) {
// this is bad and should never happen, so just stop.
// target_action = THERMOSTAT_HUMIDITY_CONTROL_ACTION_OFF;
} else if (this->supports_dehumidification_ && this->dehumidification_required_()) {
target_action = THERMOSTAT_HUMIDITY_CONTROL_ACTION_DEHUMIDIFY;
} else if (this->supports_humidification_ && this->humidification_required_()) {
target_action = THERMOSTAT_HUMIDITY_CONTROL_ACTION_HUMIDIFY;
}
return target_action;
}
void ThermostatClimate::switch_to_action_(climate::ClimateAction action, bool publish_state) {
// setup_complete_ helps us ensure an action is called immediately after boot
if ((action == this->action) && this->setup_complete_) {
@@ -596,6 +643,44 @@ void ThermostatClimate::trigger_supplemental_action_() {
}
}
void ThermostatClimate::switch_to_humidity_control_action_(HumidificationAction action) {
// setup_complete_ helps us ensure an action is called immediately after boot
if ((action == this->humidification_action_) && this->setup_complete_) {
// already in target mode
return;
}
Trigger<> *trig = this->humidity_control_off_action_trigger_;
switch (action) {
case THERMOSTAT_HUMIDITY_CONTROL_ACTION_OFF:
// trig = this->humidity_control_off_action_trigger_;
ESP_LOGVV(TAG, "Switching to HUMIDIFICATION_OFF action");
break;
case THERMOSTAT_HUMIDITY_CONTROL_ACTION_DEHUMIDIFY:
trig = this->humidity_control_dehumidify_action_trigger_;
ESP_LOGVV(TAG, "Switching to DEHUMIDIFY action");
break;
case THERMOSTAT_HUMIDITY_CONTROL_ACTION_HUMIDIFY:
trig = this->humidity_control_humidify_action_trigger_;
ESP_LOGVV(TAG, "Switching to HUMIDIFY action");
break;
case THERMOSTAT_HUMIDITY_CONTROL_ACTION_NONE:
default:
action = THERMOSTAT_HUMIDITY_CONTROL_ACTION_OFF;
// trig = this->humidity_control_off_action_trigger_;
}
if (this->prev_humidity_control_trigger_ != nullptr) {
this->prev_humidity_control_trigger_->stop_action();
this->prev_humidity_control_trigger_ = nullptr;
}
this->humidification_action_ = action;
this->prev_humidity_control_trigger_ = trig;
if (trig != nullptr) {
trig->trigger();
}
}
void ThermostatClimate::switch_to_fan_mode_(climate::ClimateFanMode fan_mode, bool publish_state) {
// setup_complete_ helps us ensure an action is called immediately after boot
if ((fan_mode == this->prev_fan_mode_) && this->setup_complete_) {
@@ -887,6 +972,20 @@ void ThermostatClimate::idle_on_timer_callback_() {
this->switch_to_supplemental_action_(this->compute_supplemental_action_());
}
void ThermostatClimate::check_humidity_change_trigger_() {
if ((this->prev_target_humidity_ == this->target_humidity) && this->setup_complete_) {
return; // nothing changed, no reason to trigger
} else {
// save the new temperature so we can check it again later; the trigger will fire below
this->prev_target_humidity_ = this->target_humidity;
}
// trigger the action
Trigger<> *trig = this->humidity_change_trigger_;
if (trig != nullptr) {
trig->trigger();
}
}
void ThermostatClimate::check_temperature_change_trigger_() {
if (this->supports_two_points_) {
// setup_complete_ helps us ensure an action is called immediately after boot
@@ -996,6 +1095,32 @@ bool ThermostatClimate::supplemental_heating_required_() {
(this->supplemental_action_ == climate::CLIMATE_ACTION_HEATING));
}
bool ThermostatClimate::dehumidification_required_() {
if (this->current_humidity > this->target_humidity + this->humidity_hysteresis_) {
// if the current humidity exceeds the target + hysteresis, dehumidification is required
return true;
} else if (this->current_humidity < this->target_humidity - this->humidity_hysteresis_) {
// if the current humidity is less than the target - hysteresis, dehumidification should stop
return false;
}
// if we get here, the current humidity is between target + hysteresis and target - hysteresis,
// so the action should not change
return this->humidification_action_ == THERMOSTAT_HUMIDITY_CONTROL_ACTION_DEHUMIDIFY;
}
bool ThermostatClimate::humidification_required_() {
if (this->current_humidity < this->target_humidity - this->humidity_hysteresis_) {
// if the current humidity is below the target - hysteresis, humidification is required
return true;
} else if (this->current_humidity > this->target_humidity + this->humidity_hysteresis_) {
// if the current humidity is above the target + hysteresis, humidification should stop
return false;
}
// if we get here, the current humidity is between target - hysteresis and target + hysteresis,
// so the action should not change
return this->humidification_action_ == THERMOSTAT_HUMIDITY_CONTROL_ACTION_HUMIDIFY;
}
void ThermostatClimate::dump_preset_config_(const char *preset_name, const ThermostatClimateTargetTempConfig &config) {
if (this->supports_heat_) {
ESP_LOGCONFIG(TAG, " Default Target Temperature Low: %.1f°C",
@@ -1152,8 +1277,12 @@ ThermostatClimate::ThermostatClimate()
swing_mode_off_trigger_(new Trigger<>()),
swing_mode_horizontal_trigger_(new Trigger<>()),
swing_mode_vertical_trigger_(new Trigger<>()),
humidity_change_trigger_(new Trigger<>()),
temperature_change_trigger_(new Trigger<>()),
preset_change_trigger_(new Trigger<>()) {}
preset_change_trigger_(new Trigger<>()),
humidity_control_dehumidify_action_trigger_(new Trigger<>()),
humidity_control_humidify_action_trigger_(new Trigger<>()),
humidity_control_off_action_trigger_(new Trigger<>()) {}
void ThermostatClimate::set_default_preset(const std::string &custom_preset) {
this->default_custom_preset_ = custom_preset;
@@ -1217,6 +1346,9 @@ void ThermostatClimate::set_sensor(sensor::Sensor *sensor) { this->sensor_ = sen
void ThermostatClimate::set_humidity_sensor(sensor::Sensor *humidity_sensor) {
this->humidity_sensor_ = humidity_sensor;
}
void ThermostatClimate::set_humidity_hysteresis(float humidity_hysteresis) {
this->humidity_hysteresis_ = std::clamp<float>(humidity_hysteresis, 0.0f, 100.0f);
}
void ThermostatClimate::set_use_startup_delay(bool use_startup_delay) { this->use_startup_delay_ = use_startup_delay; }
void ThermostatClimate::set_supports_heat_cool(bool supports_heat_cool) {
this->supports_heat_cool_ = supports_heat_cool;
@@ -1284,6 +1416,18 @@ void ThermostatClimate::set_supports_swing_mode_vertical(bool supports_swing_mod
void ThermostatClimate::set_supports_two_points(bool supports_two_points) {
this->supports_two_points_ = supports_two_points;
}
void ThermostatClimate::set_supports_dehumidification(bool supports_dehumidification) {
this->supports_dehumidification_ = supports_dehumidification;
if (supports_dehumidification) {
this->supports_humidification_ = false;
}
}
void ThermostatClimate::set_supports_humidification(bool supports_humidification) {
this->supports_humidification_ = supports_humidification;
if (supports_humidification) {
this->supports_dehumidification_ = false;
}
}
Trigger<> *ThermostatClimate::get_cool_action_trigger() const { return this->cool_action_trigger_; }
Trigger<> *ThermostatClimate::get_supplemental_cool_action_trigger() const {
@@ -1317,8 +1461,18 @@ Trigger<> *ThermostatClimate::get_swing_mode_both_trigger() const { return this-
Trigger<> *ThermostatClimate::get_swing_mode_off_trigger() const { return this->swing_mode_off_trigger_; }
Trigger<> *ThermostatClimate::get_swing_mode_horizontal_trigger() const { return this->swing_mode_horizontal_trigger_; }
Trigger<> *ThermostatClimate::get_swing_mode_vertical_trigger() const { return this->swing_mode_vertical_trigger_; }
Trigger<> *ThermostatClimate::get_humidity_change_trigger() const { return this->humidity_change_trigger_; }
Trigger<> *ThermostatClimate::get_temperature_change_trigger() const { return this->temperature_change_trigger_; }
Trigger<> *ThermostatClimate::get_preset_change_trigger() const { return this->preset_change_trigger_; }
Trigger<> *ThermostatClimate::get_humidity_control_dehumidify_action_trigger() const {
return this->humidity_control_dehumidify_action_trigger_;
}
Trigger<> *ThermostatClimate::get_humidity_control_humidify_action_trigger() const {
return this->humidity_control_humidify_action_trigger_;
}
Trigger<> *ThermostatClimate::get_humidity_control_off_action_trigger() const {
return this->humidity_control_off_action_trigger_;
}
void ThermostatClimate::dump_config() {
LOG_CLIMATE("", "Thermostat", this);
@@ -1422,7 +1576,12 @@ void ThermostatClimate::dump_config() {
" OFF: %s\n"
" HORIZONTAL: %s\n"
" VERTICAL: %s\n"
" Supports TWO SET POINTS: %s",
" Supports TWO SET POINTS: %s\n"
" Supported Humidity Parameters:\n"
" CURRENT: %s\n"
" TARGET: %s\n"
" DEHUMIDIFICATION: %s\n"
" HUMIDIFICATION: %s",
YESNO(this->supports_fan_mode_on_), YESNO(this->supports_fan_mode_off_),
YESNO(this->supports_fan_mode_auto_), YESNO(this->supports_fan_mode_low_),
YESNO(this->supports_fan_mode_medium_), YESNO(this->supports_fan_mode_high_),
@@ -1430,7 +1589,10 @@ void ThermostatClimate::dump_config() {
YESNO(this->supports_fan_mode_diffuse_), YESNO(this->supports_fan_mode_quiet_),
YESNO(this->supports_swing_mode_both_), YESNO(this->supports_swing_mode_off_),
YESNO(this->supports_swing_mode_horizontal_), YESNO(this->supports_swing_mode_vertical_),
YESNO(this->supports_two_points_));
YESNO(this->supports_two_points_),
YESNO(this->get_traits().has_feature_flags(climate::CLIMATE_SUPPORTS_CURRENT_HUMIDITY)),
YESNO(this->supports_dehumidification_ || this->supports_humidification_),
YESNO(this->supports_dehumidification_), YESNO(this->supports_humidification_));
if (!this->preset_config_.empty()) {
ESP_LOGCONFIG(TAG, " Supported PRESETS:");

View File

@@ -13,6 +13,13 @@
namespace esphome {
namespace thermostat {
enum HumidificationAction : uint8_t {
THERMOSTAT_HUMIDITY_CONTROL_ACTION_OFF = 0,
THERMOSTAT_HUMIDITY_CONTROL_ACTION_DEHUMIDIFY = 1,
THERMOSTAT_HUMIDITY_CONTROL_ACTION_HUMIDIFY = 2,
THERMOSTAT_HUMIDITY_CONTROL_ACTION_NONE,
};
enum ThermostatClimateTimerIndex : uint8_t {
THERMOSTAT_TIMER_COOLING_MAX_RUN_TIME = 0,
THERMOSTAT_TIMER_COOLING_OFF = 1,
@@ -90,6 +97,7 @@ class ThermostatClimate : public climate::Climate, public Component {
void set_idle_minimum_time_in_sec(uint32_t time);
void set_sensor(sensor::Sensor *sensor);
void set_humidity_sensor(sensor::Sensor *humidity_sensor);
void set_humidity_hysteresis(float humidity_hysteresis);
void set_use_startup_delay(bool use_startup_delay);
void set_supports_auto(bool supports_auto);
void set_supports_heat_cool(bool supports_heat_cool);
@@ -115,6 +123,8 @@ class ThermostatClimate : public climate::Climate, public Component {
void set_supports_swing_mode_horizontal(bool supports_swing_mode_horizontal);
void set_supports_swing_mode_off(bool supports_swing_mode_off);
void set_supports_swing_mode_vertical(bool supports_swing_mode_vertical);
void set_supports_dehumidification(bool supports_dehumidification);
void set_supports_humidification(bool supports_humidification);
void set_supports_two_points(bool supports_two_points);
void set_preset_config(climate::ClimatePreset preset, const ThermostatClimateTargetTempConfig &config);
@@ -148,8 +158,12 @@ class ThermostatClimate : public climate::Climate, public Component {
Trigger<> *get_swing_mode_horizontal_trigger() const;
Trigger<> *get_swing_mode_off_trigger() const;
Trigger<> *get_swing_mode_vertical_trigger() const;
Trigger<> *get_humidity_change_trigger() const;
Trigger<> *get_temperature_change_trigger() const;
Trigger<> *get_preset_change_trigger() const;
Trigger<> *get_humidity_control_dehumidify_action_trigger() const;
Trigger<> *get_humidity_control_humidify_action_trigger() const;
Trigger<> *get_humidity_control_off_action_trigger() const;
/// Get current hysteresis values
float cool_deadband();
float cool_overrun();
@@ -166,11 +180,13 @@ class ThermostatClimate : public climate::Climate, public Component {
climate::ClimateFanMode locked_fan_mode();
/// Set point and hysteresis validation
bool hysteresis_valid(); // returns true if valid
bool humidity_hysteresis_valid(); // returns true if valid
bool limit_setpoints_for_heat_cool(); // returns true if set points should be further limited within visual range
void validate_target_temperature();
void validate_target_temperatures(bool pin_target_temperature_high);
void validate_target_temperature_low();
void validate_target_temperature_high();
void validate_target_humidity();
protected:
/// Override control to change settings of the climate device.
@@ -192,11 +208,13 @@ class ThermostatClimate : public climate::Climate, public Component {
/// Re-compute the required action of this climate controller.
climate::ClimateAction compute_action_(bool ignore_timers = false);
climate::ClimateAction compute_supplemental_action_();
HumidificationAction compute_humidity_control_action_();
/// Switch the climate device to the given climate action.
void switch_to_action_(climate::ClimateAction action, bool publish_state = true);
void switch_to_supplemental_action_(climate::ClimateAction action);
void trigger_supplemental_action_();
void switch_to_humidity_control_action_(HumidificationAction action);
/// Switch the climate device to the given climate fan mode.
void switch_to_fan_mode_(climate::ClimateFanMode fan_mode, bool publish_state = true);
@@ -207,6 +225,9 @@ class ThermostatClimate : public climate::Climate, public Component {
/// Switch the climate device to the given climate swing mode.
void switch_to_swing_mode_(climate::ClimateSwingMode swing_mode, bool publish_state = true);
/// Check if the humidity change trigger should be called.
void check_humidity_change_trigger_();
/// Check if the temperature change trigger should be called.
void check_temperature_change_trigger_();
@@ -243,6 +264,8 @@ class ThermostatClimate : public climate::Climate, public Component {
bool heating_required_();
bool supplemental_cooling_required_();
bool supplemental_heating_required_();
bool dehumidification_required_();
bool humidification_required_();
void dump_preset_config_(const char *preset_name, const ThermostatClimateTargetTempConfig &config);
@@ -259,6 +282,9 @@ class ThermostatClimate : public climate::Climate, public Component {
/// The current supplemental action
climate::ClimateAction supplemental_action_{climate::CLIMATE_ACTION_OFF};
/// The current humidification action
HumidificationAction humidification_action_{THERMOSTAT_HUMIDITY_CONTROL_ACTION_NONE};
/// Default standard preset to use on start up
climate::ClimatePreset default_preset_{};
@@ -321,6 +347,12 @@ class ThermostatClimate : public climate::Climate, public Component {
/// A false value means that the controller has no such support.
bool supports_two_points_{false};
/// Whether the controller supports dehumidification and/or humidification
///
/// A false value means that the controller has no such support.
bool supports_dehumidification_{false};
bool supports_humidification_{false};
/// Flags indicating if maximum allowable run time was exceeded
bool cooling_max_runtime_exceeded_{false};
bool heating_max_runtime_exceeded_{false};
@@ -331,9 +363,10 @@ class ThermostatClimate : public climate::Climate, public Component {
/// setup_complete_ blocks modifying/resetting the temps immediately after boot
bool setup_complete_{false};
/// Store previously-known temperatures
/// Store previously-known humidity and temperatures
///
/// These are used to determine when the temperature change trigger/action needs to be called
/// These are used to determine when a temperature/humidity has changed
float prev_target_humidity_{NAN};
float prev_target_temperature_{NAN};
float prev_target_temperature_low_{NAN};
float prev_target_temperature_high_{NAN};
@@ -347,6 +380,9 @@ class ThermostatClimate : public climate::Climate, public Component {
float heating_deadband_{0};
float heating_overrun_{0};
/// Hysteresis values used for computing humidification action
float humidity_hysteresis_{0};
/// Maximum allowable temperature deltas before engaging supplemental cooling/heating actions
float supplemental_cool_delta_{0};
float supplemental_heat_delta_{0};
@@ -448,12 +484,24 @@ class ThermostatClimate : public climate::Climate, public Component {
/// The trigger to call when the controller should switch the swing mode to "vertical".
Trigger<> *swing_mode_vertical_trigger_{nullptr};
/// The trigger to call when the target humidity changes.
Trigger<> *humidity_change_trigger_{nullptr};
/// The trigger to call when the target temperature(s) change(es).
Trigger<> *temperature_change_trigger_{nullptr};
/// The trigger to call when the preset mode changes
Trigger<> *preset_change_trigger_{nullptr};
/// The trigger to call when dehumidification is required
Trigger<> *humidity_control_dehumidify_action_trigger_{nullptr};
/// The trigger to call when humidification is required
Trigger<> *humidity_control_humidify_action_trigger_{nullptr};
/// The trigger to call when (de)humidification should stop
Trigger<> *humidity_control_off_action_trigger_{nullptr};
/// A reference to the trigger that was previously active.
///
/// This is so that the previous trigger can be stopped before enabling a new one
@@ -462,6 +510,7 @@ class ThermostatClimate : public climate::Climate, public Component {
Trigger<> *prev_fan_mode_trigger_{nullptr};
Trigger<> *prev_mode_trigger_{nullptr};
Trigger<> *prev_swing_mode_trigger_{nullptr};
Trigger<> *prev_humidity_control_trigger_{nullptr};
/// Default custom preset to use on start up
std::string default_custom_preset_{};

View File

@@ -27,6 +27,14 @@ class RealTimeClock : public PollingComponent {
this->apply_timezone_();
}
/// Set the time zone from raw buffer, only if it differs from the current one.
void set_timezone(const char *tz, size_t len) {
if (this->timezone_.length() != len || memcmp(this->timezone_.c_str(), tz, len) != 0) {
this->timezone_.assign(tz, len);
this->apply_timezone_();
}
}
/// Get the time zone currently in use.
std::string get_timezone() { return this->timezone_; }
#endif

View File

@@ -1325,7 +1325,7 @@ std::string WebServer::climate_json(climate::Climate *obj, JsonDetail start_conf
root["max_temp"] = value_accuracy_to_string(traits.get_visual_max_temperature(), target_accuracy);
root["min_temp"] = value_accuracy_to_string(traits.get_visual_min_temperature(), target_accuracy);
root["step"] = traits.get_visual_target_temperature_step();
if (traits.get_supports_action()) {
if (traits.has_feature_flags(climate::CLIMATE_SUPPORTS_ACTION)) {
root["action"] = PSTR_LOCAL(climate_action_to_string(obj->action));
root["state"] = root["action"];
has_state = true;
@@ -1345,14 +1345,15 @@ std::string WebServer::climate_json(climate::Climate *obj, JsonDetail start_conf
if (traits.get_supports_swing_modes()) {
root["swing_mode"] = PSTR_LOCAL(climate_swing_mode_to_string(obj->swing_mode));
}
if (traits.get_supports_current_temperature()) {
if (traits.has_feature_flags(climate::CLIMATE_SUPPORTS_CURRENT_TEMPERATURE)) {
if (!std::isnan(obj->current_temperature)) {
root["current_temperature"] = value_accuracy_to_string(obj->current_temperature, current_accuracy);
} else {
root["current_temperature"] = "NA";
}
}
if (traits.get_supports_two_point_target_temperature()) {
if (traits.has_feature_flags(climate::CLIMATE_SUPPORTS_TWO_POINT_TARGET_TEMPERATURE |
climate::CLIMATE_REQUIRES_TWO_POINT_TARGET_TEMPERATURE)) {
root["target_temperature_low"] = value_accuracy_to_string(obj->target_temperature_low, target_accuracy);
root["target_temperature_high"] = value_accuracy_to_string(obj->target_temperature_high, target_accuracy);
if (!has_state) {

View File

@@ -407,7 +407,8 @@ async def to_code(config):
cg.add(var.set_reboot_timeout(config[CONF_REBOOT_TIMEOUT]))
cg.add(var.set_power_save_mode(config[CONF_POWER_SAVE_MODE]))
cg.add(var.set_fast_connect(config[CONF_FAST_CONNECT]))
if config[CONF_FAST_CONNECT]:
cg.add_define("USE_WIFI_FAST_CONNECT")
cg.add(var.set_passive_scan(config[CONF_PASSIVE_SCAN]))
if CONF_OUTPUT_POWER in config:
cg.add(var.set_output_power(config[CONF_OUTPUT_POWER]))

View File

@@ -84,9 +84,9 @@ void WiFiComponent::start() {
uint32_t hash = this->has_sta() ? fnv1_hash(App.get_compilation_time()) : 88491487UL;
this->pref_ = global_preferences->make_preference<wifi::SavedWifiSettings>(hash, true);
if (this->fast_connect_) {
this->fast_connect_pref_ = global_preferences->make_preference<wifi::SavedWifiFastConnectSettings>(hash + 1, false);
}
#ifdef USE_WIFI_FAST_CONNECT
this->fast_connect_pref_ = global_preferences->make_preference<wifi::SavedWifiFastConnectSettings>(hash + 1, false);
#endif
SavedWifiSettings save{};
if (this->pref_.load(&save)) {
@@ -108,16 +108,16 @@ void WiFiComponent::start() {
ESP_LOGV(TAG, "Setting Power Save Option failed");
}
if (this->fast_connect_) {
this->trying_loaded_ap_ = this->load_fast_connect_settings_();
if (!this->trying_loaded_ap_) {
this->ap_index_ = 0;
this->selected_ap_ = this->sta_[this->ap_index_];
}
this->start_connecting(this->selected_ap_, false);
} else {
this->start_scanning();
#ifdef USE_WIFI_FAST_CONNECT
this->trying_loaded_ap_ = this->load_fast_connect_settings_();
if (!this->trying_loaded_ap_) {
this->ap_index_ = 0;
this->selected_ap_ = this->sta_[this->ap_index_];
}
this->start_connecting(this->selected_ap_, false);
#else
this->start_scanning();
#endif
#ifdef USE_WIFI_AP
} else if (this->has_ap()) {
this->setup_ap_config_();
@@ -168,13 +168,20 @@ void WiFiComponent::loop() {
case WIFI_COMPONENT_STATE_COOLDOWN: {
this->status_set_warning(LOG_STR("waiting to reconnect"));
if (millis() - this->action_started_ > 5000) {
if (this->fast_connect_ || this->retry_hidden_) {
#ifdef USE_WIFI_FAST_CONNECT
// NOTE: This check may not make sense here as it could interfere with AP cycling
if (!this->selected_ap_.get_bssid().has_value())
this->selected_ap_ = this->sta_[0];
this->start_connecting(this->selected_ap_, false);
#else
if (this->retry_hidden_) {
if (!this->selected_ap_.get_bssid().has_value())
this->selected_ap_ = this->sta_[0];
this->start_connecting(this->selected_ap_, false);
} else {
this->start_scanning();
}
#endif
}
break;
}
@@ -244,7 +251,6 @@ WiFiComponent::WiFiComponent() { global_wifi_component = this; }
bool WiFiComponent::has_ap() const { return this->has_ap_; }
bool WiFiComponent::has_sta() const { return !this->sta_.empty(); }
void WiFiComponent::set_fast_connect(bool fast_connect) { this->fast_connect_ = fast_connect; }
#ifdef USE_WIFI_11KV_SUPPORT
void WiFiComponent::set_btm(bool btm) { this->btm_ = btm; }
void WiFiComponent::set_rrm(bool rrm) { this->rrm_ = rrm; }
@@ -607,10 +613,12 @@ void WiFiComponent::check_scanning_finished() {
for (auto &ap : this->sta_) {
if (res.matches(ap)) {
res.set_matches(true);
if (!this->has_sta_priority(res.get_bssid())) {
this->set_sta_priority(res.get_bssid(), ap.get_priority());
// Cache priority lookup - do single search instead of 2 separate searches
const bssid_t &bssid = res.get_bssid();
if (!this->has_sta_priority(bssid)) {
this->set_sta_priority(bssid, ap.get_priority());
}
res.set_priority(this->get_sta_priority(res.get_bssid()));
res.set_priority(this->get_sta_priority(bssid));
break;
}
}
@@ -629,8 +637,9 @@ void WiFiComponent::check_scanning_finished() {
return;
}
WiFiAP connect_params;
WiFiScanResult scan_res = this->scan_result_[0];
// Build connection params directly into selected_ap_ to avoid extra copy
const WiFiScanResult &scan_res = this->scan_result_[0];
WiFiAP &selected = this->selected_ap_;
for (auto &config : this->sta_) {
// search for matching STA config, at least one will match (from checks before)
if (!scan_res.matches(config)) {
@@ -639,37 +648,38 @@ void WiFiComponent::check_scanning_finished() {
if (config.get_hidden()) {
// selected network is hidden, we use the data from the config
connect_params.set_hidden(true);
connect_params.set_ssid(config.get_ssid());
// don't set BSSID and channel, there might be multiple hidden networks
selected.set_hidden(true);
selected.set_ssid(config.get_ssid());
// Clear channel and BSSID for hidden networks - there might be multiple hidden networks
// but we can't know which one is the correct one. Rely on probe-req with just SSID.
selected.set_channel(0);
selected.set_bssid(optional<bssid_t>{});
} else {
// selected network is visible, we use the data from the scan
// limit the connect params to only connect to exactly this network
// (network selection is done during scan phase).
connect_params.set_hidden(false);
connect_params.set_ssid(scan_res.get_ssid());
connect_params.set_channel(scan_res.get_channel());
connect_params.set_bssid(scan_res.get_bssid());
selected.set_hidden(false);
selected.set_ssid(scan_res.get_ssid());
selected.set_channel(scan_res.get_channel());
selected.set_bssid(scan_res.get_bssid());
}
// copy manual IP (if set)
connect_params.set_manual_ip(config.get_manual_ip());
selected.set_manual_ip(config.get_manual_ip());
#ifdef USE_WIFI_WPA2_EAP
// copy EAP parameters (if set)
connect_params.set_eap(config.get_eap());
selected.set_eap(config.get_eap());
#endif
// copy password (if set)
connect_params.set_password(config.get_password());
selected.set_password(config.get_password());
break;
}
yield();
this->selected_ap_ = connect_params;
this->start_connecting(connect_params, false);
this->start_connecting(this->selected_ap_, false);
}
void WiFiComponent::dump_config() {
@@ -719,9 +729,9 @@ void WiFiComponent::check_connecting_finished() {
this->scan_result_.shrink_to_fit();
}
if (this->fast_connect_) {
this->save_fast_connect_settings_();
}
#ifdef USE_WIFI_FAST_CONNECT
this->save_fast_connect_settings_();
#endif
return;
}
@@ -769,31 +779,31 @@ void WiFiComponent::retry_connect() {
delay(10);
if (!this->is_captive_portal_active_() && !this->is_esp32_improv_active_() &&
(this->num_retried_ > 3 || this->error_from_callback_)) {
if (this->fast_connect_) {
if (this->trying_loaded_ap_) {
this->trying_loaded_ap_ = false;
this->ap_index_ = 0; // Retry from the first configured AP
} else if (this->ap_index_ >= this->sta_.size() - 1) {
ESP_LOGW(TAG, "No more APs to try");
this->ap_index_ = 0;
this->restart_adapter();
} else {
// Try next AP
this->ap_index_++;
}
this->num_retried_ = 0;
this->selected_ap_ = this->sta_[this->ap_index_];
#ifdef USE_WIFI_FAST_CONNECT
if (this->trying_loaded_ap_) {
this->trying_loaded_ap_ = false;
this->ap_index_ = 0; // Retry from the first configured AP
} else if (this->ap_index_ >= this->sta_.size() - 1) {
ESP_LOGW(TAG, "No more APs to try");
this->ap_index_ = 0;
this->restart_adapter();
} else {
if (this->num_retried_ > 5) {
// If retry failed for more than 5 times, let's restart STA
this->restart_adapter();
} else {
// Try hidden networks after 3 failed retries
ESP_LOGD(TAG, "Retrying with hidden networks");
this->retry_hidden_ = true;
this->num_retried_++;
}
// Try next AP
this->ap_index_++;
}
this->num_retried_ = 0;
this->selected_ap_ = this->sta_[this->ap_index_];
#else
if (this->num_retried_ > 5) {
// If retry failed for more than 5 times, let's restart STA
this->restart_adapter();
} else {
// Try hidden networks after 3 failed retries
ESP_LOGD(TAG, "Retrying with hidden networks");
this->retry_hidden_ = true;
this->num_retried_++;
}
#endif
} else {
this->num_retried_++;
}
@@ -839,6 +849,7 @@ bool WiFiComponent::is_esp32_improv_active_() {
#endif
}
#ifdef USE_WIFI_FAST_CONNECT
bool WiFiComponent::load_fast_connect_settings_() {
SavedWifiFastConnectSettings fast_connect_save{};
@@ -873,6 +884,7 @@ void WiFiComponent::save_fast_connect_settings_() {
ESP_LOGD(TAG, "Saved fast_connect settings");
}
}
#endif
void WiFiAP::set_ssid(const std::string &ssid) { this->ssid_ = ssid; }
void WiFiAP::set_bssid(bssid_t bssid) { this->bssid_ = bssid; }
@@ -902,7 +914,7 @@ WiFiScanResult::WiFiScanResult(const bssid_t &bssid, std::string ssid, uint8_t c
rssi_(rssi),
with_auth_(with_auth),
is_hidden_(is_hidden) {}
bool WiFiScanResult::matches(const WiFiAP &config) {
bool WiFiScanResult::matches(const WiFiAP &config) const {
if (config.get_hidden()) {
// User configured a hidden network, only match actually hidden networks
// don't match SSID

View File

@@ -170,7 +170,7 @@ class WiFiScanResult {
public:
WiFiScanResult(const bssid_t &bssid, std::string ssid, uint8_t channel, int8_t rssi, bool with_auth, bool is_hidden);
bool matches(const WiFiAP &config);
bool matches(const WiFiAP &config) const;
bool get_matches() const;
void set_matches(bool matches);
@@ -240,7 +240,6 @@ class WiFiComponent : public Component {
void start_scanning();
void check_scanning_finished();
void start_connecting(const WiFiAP &ap, bool two);
void set_fast_connect(bool fast_connect);
void set_ap_timeout(uint32_t ap_timeout) { ap_timeout_ = ap_timeout; }
void check_connecting_finished();
@@ -364,8 +363,10 @@ class WiFiComponent : public Component {
bool is_captive_portal_active_();
bool is_esp32_improv_active_();
#ifdef USE_WIFI_FAST_CONNECT
bool load_fast_connect_settings_();
void save_fast_connect_settings_();
#endif
#ifdef USE_ESP8266
static void wifi_event_callback(System_Event_t *event);
@@ -399,7 +400,9 @@ class WiFiComponent : public Component {
WiFiAP ap_;
optional<float> output_power_;
ESPPreferenceObject pref_;
#ifdef USE_WIFI_FAST_CONNECT
ESPPreferenceObject fast_connect_pref_;
#endif
// Group all 32-bit integers together
uint32_t action_started_;
@@ -411,14 +414,17 @@ class WiFiComponent : public Component {
WiFiComponentState state_{WIFI_COMPONENT_STATE_OFF};
WiFiPowerSaveMode power_save_{WIFI_POWER_SAVE_NONE};
uint8_t num_retried_{0};
#ifdef USE_WIFI_FAST_CONNECT
uint8_t ap_index_{0};
#endif
#if USE_NETWORK_IPV6
uint8_t num_ipv6_addresses_{0};
#endif /* USE_NETWORK_IPV6 */
// Group all boolean values together
bool fast_connect_{false};
#ifdef USE_WIFI_FAST_CONNECT
bool trying_loaded_ap_{false};
#endif
bool retry_hidden_{false};
bool has_ap_{false};
bool handled_connected_state_{false};

View File

@@ -706,10 +706,10 @@ void WiFiComponent::wifi_scan_done_callback_(void *arg, STATUS status) {
this->scan_result_.init(count);
for (bss_info *it = head; it != nullptr; it = STAILQ_NEXT(it, next)) {
WiFiScanResult res({it->bssid[0], it->bssid[1], it->bssid[2], it->bssid[3], it->bssid[4], it->bssid[5]},
std::string(reinterpret_cast<char *>(it->ssid), it->ssid_len), it->channel, it->rssi,
it->authmode != AUTH_OPEN, it->is_hidden != 0);
this->scan_result_.push_back(res);
this->scan_result_.emplace_back(
bssid_t{it->bssid[0], it->bssid[1], it->bssid[2], it->bssid[3], it->bssid[4], it->bssid[5]},
std::string(reinterpret_cast<char *>(it->ssid), it->ssid_len), it->channel, it->rssi, it->authmode != AUTH_OPEN,
it->is_hidden != 0);
}
this->scan_done_ = true;
}

View File

@@ -776,13 +776,12 @@ void WiFiComponent::wifi_process_event_(IDFWiFiEvent *data) {
}
uint16_t number = it.number;
std::vector<wifi_ap_record_t> records(number);
err = esp_wifi_scan_get_ap_records(&number, records.data());
auto records = std::make_unique<wifi_ap_record_t[]>(number);
err = esp_wifi_scan_get_ap_records(&number, records.get());
if (err != ESP_OK) {
ESP_LOGW(TAG, "esp_wifi_scan_get_ap_records failed: %s", esp_err_to_name(err));
return;
}
records.resize(number);
scan_result_.init(number);
for (int i = 0; i < number; i++) {
@@ -790,8 +789,8 @@ void WiFiComponent::wifi_process_event_(IDFWiFiEvent *data) {
bssid_t bssid;
std::copy(record.bssid, record.bssid + 6, bssid.begin());
std::string ssid(reinterpret_cast<const char *>(record.ssid));
WiFiScanResult result(bssid, ssid, record.primary, record.rssi, record.authmode != WIFI_AUTH_OPEN, ssid.empty());
scan_result_.push_back(result);
scan_result_.emplace_back(bssid, ssid, record.primary, record.rssi, record.authmode != WIFI_AUTH_OPEN,
ssid.empty());
}
} else if (data->event_base == WIFI_EVENT && data->event_id == WIFI_EVENT_AP_START) {

View File

@@ -419,9 +419,9 @@ void WiFiComponent::wifi_scan_done_callback_() {
uint8_t *bssid = WiFi.BSSID(i);
int32_t channel = WiFi.channel(i);
WiFiScanResult scan({bssid[0], bssid[1], bssid[2], bssid[3], bssid[4], bssid[5]}, std::string(ssid.c_str()),
channel, rssi, authmode != WIFI_AUTH_OPEN, ssid.length() == 0);
this->scan_result_.push_back(scan);
this->scan_result_.emplace_back(bssid_t{bssid[0], bssid[1], bssid[2], bssid[3], bssid[4], bssid[5]},
std::string(ssid.c_str()), channel, rssi, authmode != WIFI_AUTH_OPEN,
ssid.length() == 0);
}
WiFi.scanDelete();
this->scan_done_ = true;

View File

@@ -0,0 +1,34 @@
import esphome.codegen as cg
from esphome.components.zephyr import zephyr_add_prj_conf
import esphome.config_validation as cv
from esphome.const import CONF_ESPHOME, CONF_ID, CONF_NAME, Framework
import esphome.final_validate as fv
zephyr_ble_server_ns = cg.esphome_ns.namespace("zephyr_ble_server")
BLEServer = zephyr_ble_server_ns.class_("BLEServer", cg.Component)
CONFIG_SCHEMA = cv.All(
cv.Schema(
{
cv.GenerateID(): cv.declare_id(BLEServer),
}
).extend(cv.COMPONENT_SCHEMA),
cv.only_with_framework(Framework.ZEPHYR),
)
def _final_validate(_):
full_config = fv.full_config.get()
zephyr_add_prj_conf("BT_DEVICE_NAME", full_config[CONF_ESPHOME][CONF_NAME])
FINAL_VALIDATE_SCHEMA = _final_validate
async def to_code(config):
var = cg.new_Pvariable(config[CONF_ID])
zephyr_add_prj_conf("BT", True)
zephyr_add_prj_conf("BT_PERIPHERAL", True)
zephyr_add_prj_conf("BT_RX_STACK_SIZE", 1536)
# zephyr_add_prj_conf("BT_LL_SW_SPLIT", True)
await cg.register_component(var, config)

View File

@@ -0,0 +1,100 @@
#ifdef USE_ZEPHYR
#include "ble_server.h"
#include "esphome/core/defines.h"
#include "esphome/core/log.h"
#include <zephyr/bluetooth/bluetooth.h>
#include <zephyr/bluetooth/conn.h>
namespace esphome::zephyr_ble_server {
static const char *const TAG = "zephyr_ble_server";
static struct k_work advertise_work; // NOLINT(cppcoreguidelines-avoid-non-const-global-variables)
#define DEVICE_NAME CONFIG_BT_DEVICE_NAME
#define DEVICE_NAME_LEN (sizeof(DEVICE_NAME) - 1)
static const struct bt_data AD[] = {
BT_DATA_BYTES(BT_DATA_FLAGS, (BT_LE_AD_GENERAL | BT_LE_AD_NO_BREDR)),
BT_DATA(BT_DATA_NAME_COMPLETE, DEVICE_NAME, DEVICE_NAME_LEN),
};
static const struct bt_data SD[] = {
#ifdef USE_OTA
BT_DATA_BYTES(BT_DATA_UUID128_ALL, 0x84, 0xaa, 0x60, 0x74, 0x52, 0x8a, 0x8b, 0x86, 0xd3, 0x4c, 0xb7, 0x1d, 0x1d,
0xdc, 0x53, 0x8d),
#endif
};
const struct bt_le_adv_param *const ADV_PARAM = BT_LE_ADV_CONN;
static void advertise(struct k_work *work) {
int rc = bt_le_adv_stop();
if (rc) {
ESP_LOGE(TAG, "Advertising failed to stop (rc %d)", rc);
}
rc = bt_le_adv_start(ADV_PARAM, AD, ARRAY_SIZE(AD), SD, ARRAY_SIZE(SD));
if (rc) {
ESP_LOGE(TAG, "Advertising failed to start (rc %d)", rc);
return;
}
ESP_LOGI(TAG, "Advertising successfully started");
}
static void connected(struct bt_conn *conn, uint8_t err) {
if (err) {
ESP_LOGE(TAG, "Connection failed (err 0x%02x)", err);
} else {
ESP_LOGI(TAG, "Connected");
}
}
static void disconnected(struct bt_conn *conn, uint8_t reason) {
ESP_LOGI(TAG, "Disconnected (reason 0x%02x)", reason);
k_work_submit(&advertise_work);
}
static void bt_ready(int err) {
if (err != 0) {
ESP_LOGE(TAG, "Bluetooth failed to initialise: %d", err);
} else {
k_work_submit(&advertise_work);
}
}
BT_CONN_CB_DEFINE(conn_callbacks) = {
.connected = connected,
.disconnected = disconnected,
};
void BLEServer::setup() {
k_work_init(&advertise_work, advertise);
resume_();
}
void BLEServer::loop() {
if (this->suspended_) {
resume_();
this->suspended_ = false;
}
}
void BLEServer::resume_() {
int rc = bt_enable(bt_ready);
if (rc != 0) {
ESP_LOGE(TAG, "Bluetooth enable failed: %d", rc);
return;
}
}
void BLEServer::on_shutdown() {
struct k_work_sync sync;
k_work_cancel_sync(&advertise_work, &sync);
bt_disable();
this->suspended_ = true;
}
} // namespace esphome::zephyr_ble_server
#endif

View File

@@ -0,0 +1,19 @@
#pragma once
#ifdef USE_ZEPHYR
#include "esphome/core/component.h"
namespace esphome::zephyr_ble_server {
class BLEServer : public Component {
public:
void setup() override;
void loop() override;
void on_shutdown() override;
protected:
void resume_();
bool suspended_ = false;
};
} // namespace esphome::zephyr_ble_server
#endif

View File

@@ -12,7 +12,7 @@ from typing import Any
import voluptuous as vol
from esphome import core, loader, pins, yaml_util
from esphome.config_helpers import Extend, Remove, merge_dicts_ordered
from esphome.config_helpers import Extend, Remove, merge_config, merge_dicts_ordered
import esphome.config_validation as cv
from esphome.const import (
CONF_ESPHOME,
@@ -324,13 +324,7 @@ def iter_ids(config, path=None):
yield from iter_ids(value, path + [key])
def recursive_check_replaceme(value):
if isinstance(value, list):
return cv.Schema([recursive_check_replaceme])(value)
if isinstance(value, dict):
return cv.Schema({cv.valid: recursive_check_replaceme})(value)
if isinstance(value, ESPLiteralValue):
pass
def check_replaceme(value):
if isinstance(value, str) and value == "REPLACEME":
raise cv.Invalid(
"Found 'REPLACEME' in configuration, this is most likely an error. "
@@ -339,7 +333,86 @@ def recursive_check_replaceme(value):
"If you want to use the literal REPLACEME string, "
'please use "!literal REPLACEME"'
)
return value
def _build_list_index(lst):
index = OrderedDict()
extensions, removals = [], set()
for item in lst:
if item is None:
removals.add(None)
continue
item_id = None
if isinstance(item, dict) and (item_id := item.get(CONF_ID)):
if isinstance(item_id, Extend):
extensions.append(item)
continue
if isinstance(item_id, Remove):
removals.add(item_id.value)
continue
if not item_id or item_id in index:
# no id or duplicate -> pass through with identity-based key
item_id = id(item)
index[item_id] = item
return index, extensions, removals
def resolve_extend_remove(value, is_key=None):
if isinstance(value, ESPLiteralValue):
return # do not check inside literal blocks
if isinstance(value, list):
index, extensions, removals = _build_list_index(value)
if extensions or removals:
# Rebuild the original list after
# processing all extensions and removals
for item in extensions:
item_id = item[CONF_ID].value
if item_id in removals:
continue
old = index.get(item_id)
if old is None:
# Failed to find source for extension
# Find index of item to show error at correct position
i = next(
(
i
for i, d in enumerate(value)
if d.get(CONF_ID) == item[CONF_ID]
)
)
with cv.prepend_path(i):
raise cv.Invalid(
f"Source for extension of ID '{item_id}' was not found."
)
item[CONF_ID] = item_id
index[item_id] = merge_config(old, item)
for item_id in removals:
index.pop(item_id, None)
value[:] = index.values()
for i, item in enumerate(value):
with cv.prepend_path(i):
resolve_extend_remove(item, False)
return
if isinstance(value, dict):
removals = []
for k, v in value.items():
with cv.prepend_path(k):
if isinstance(v, Remove):
removals.append(k)
continue
resolve_extend_remove(k, True)
resolve_extend_remove(v, False)
for k in removals:
value.pop(k, None)
return
if is_key:
return # do not check keys (yet)
check_replaceme(value)
return
class ConfigValidationStep(abc.ABC):
@@ -437,19 +510,6 @@ class LoadValidationStep(ConfigValidationStep):
continue
p_name = p_config.get("platform")
if p_name is None:
p_id = p_config.get(CONF_ID)
if isinstance(p_id, Extend):
result.add_str_error(
f"Source for extension of ID '{p_id.value}' was not found.",
path + [CONF_ID],
)
continue
if isinstance(p_id, Remove):
result.add_str_error(
f"Source for removal of ID '{p_id.value}' was not found.",
path + [CONF_ID],
)
continue
result.add_str_error(
f"'{self.domain}' requires a 'platform' key but it was not specified.",
path,
@@ -934,9 +994,10 @@ def validate_config(
CORE.raw_config = config
# 1.1. Check for REPLACEME special value
# 1.1. Resolve !extend and !remove and check for REPLACEME
# After this step, there will not be any Extend or Remove values in the config anymore
try:
recursive_check_replaceme(config)
resolve_extend_remove(config)
except vol.Invalid as err:
result.add_error(err)

View File

@@ -1,7 +1,6 @@
from collections.abc import Callable
from esphome.const import (
CONF_ID,
CONF_LEVEL,
CONF_LOGGER,
KEY_CORE,
@@ -75,73 +74,28 @@ class Remove:
return isinstance(b, Remove) and self.value == b.value
def merge_config(full_old, full_new):
def merge(old, new):
if isinstance(new, dict):
if not isinstance(old, dict):
return new
# Preserve OrderedDict type by copying to OrderedDict if either input is OrderedDict
if isinstance(old, OrderedDict) or isinstance(new, OrderedDict):
res = OrderedDict(old)
else:
res = old.copy()
for k, v in new.items():
if isinstance(v, Remove) and k in old:
del res[k]
else:
res[k] = merge(old[k], v) if k in old else v
return res
if isinstance(new, list):
if not isinstance(old, list):
return new
res = old.copy()
ids = {
v_id: i
for i, v in enumerate(res)
if isinstance(v, dict)
and (v_id := v.get(CONF_ID))
and isinstance(v_id, str)
}
extend_ids = {
v_id.value: i
for i, v in enumerate(res)
if isinstance(v, dict)
and (v_id := v.get(CONF_ID))
and isinstance(v_id, Extend)
}
ids_to_delete = []
for v in new:
if isinstance(v, dict) and (new_id := v.get(CONF_ID)):
if isinstance(new_id, Extend):
new_id = new_id.value
if new_id in ids:
v[CONF_ID] = new_id
res[ids[new_id]] = merge(res[ids[new_id]], v)
continue
elif isinstance(new_id, Remove):
new_id = new_id.value
if new_id in ids:
ids_to_delete.append(ids[new_id])
continue
elif (
new_id in extend_ids
): # When a package is extending a non-packaged item
extend_res = res[extend_ids[new_id]]
extend_res[CONF_ID] = new_id
new_v = merge(v, extend_res)
res[extend_ids[new_id]] = new_v
continue
else:
ids[new_id] = len(res)
res.append(v)
return [v for i, v in enumerate(res) if i not in ids_to_delete]
if new is None:
return old
def merge_config(old, new):
if isinstance(new, Remove):
return new
if isinstance(new, dict):
if not isinstance(old, dict):
return new
# Preserve OrderedDict type by copying to OrderedDict if either input is OrderedDict
if isinstance(old, OrderedDict) or isinstance(new, OrderedDict):
res = OrderedDict(old)
else:
res = old.copy()
for k, v in new.items():
res[k] = merge_config(old.get(k), v)
return res
if isinstance(new, list):
if not isinstance(old, list):
return new
return old + new
if new is None:
return old
return merge(full_old, full_new)
return new
def filter_source_files_from_platform(

View File

@@ -24,7 +24,6 @@ import voluptuous as vol
from esphome import core
import esphome.codegen as cg
from esphome.config_helpers import Extend, Remove
from esphome.const import (
ALLOWED_NAME_CHARS,
CONF_AVAILABILITY,
@@ -624,12 +623,6 @@ def declare_id(type):
if value is None:
return core.ID(None, is_declaration=True, type=type)
if isinstance(value, Extend):
raise Invalid(f"Source for extension of ID '{value.value}' was not found.")
if isinstance(value, Remove):
raise Invalid(f"Source for Removal of ID '{value.value}' was not found.")
return core.ID(validate_id_name(value), is_declaration=True, type=type)
return validator

View File

@@ -11,6 +11,7 @@ from esphome.const import (
CONF_COMMENT,
CONF_ESPHOME,
CONF_ETHERNET,
CONF_OPENTHREAD,
CONF_PORT,
CONF_USE_ADDRESS,
CONF_WEB_SERVER,
@@ -641,6 +642,9 @@ class EsphomeCore:
if CONF_ETHERNET in self.config:
return self.config[CONF_ETHERNET][CONF_USE_ADDRESS]
if CONF_OPENTHREAD in self.config:
return f"{self.name}.local"
return None
@property

View File

@@ -199,6 +199,7 @@
#define USE_WEBSERVER_PORT 80 // NOLINT
#define USE_WEBSERVER_SORTING
#define USE_WIFI_11KV_SUPPORT
#define USE_WIFI_FAST_CONNECT
#define USB_HOST_MAX_REQUESTS 16
#ifdef USE_ARDUINO
@@ -272,6 +273,8 @@
#ifdef USE_NRF52
#define USE_NRF52_DFU
#define USE_SOFTDEVICE_ID 7
#define USE_SOFTDEVICE_VERSION 1
#endif
// Disabled feature flags

View File

@@ -281,13 +281,13 @@ template<typename T> class FixedVector {
}
}
/// Emplace element without bounds checking - constructs in-place
/// Emplace element without bounds checking - constructs in-place with arguments
/// Caller must ensure sufficient capacity was allocated via init()
/// Returns reference to the newly constructed element
/// NOTE: Caller MUST ensure size_ < capacity_ before calling
T &emplace_back() {
// Use placement new to default-construct the object in pre-allocated memory
new (&data_[size_]) T();
template<typename... Args> T &emplace_back(Args &&...args) {
// Use placement new to construct the object in pre-allocated memory
new (&data_[size_]) T(std::forward<Args>(args)...);
size_++;
return data_[size_ - 1];
}
@@ -1158,18 +1158,4 @@ template<typename T, enable_if_t<std::is_pointer<T *>::value, int> = 0> T &id(T
///@}
/// @name Deprecated functions
///@{
ESPDEPRECATED("hexencode() is deprecated, use format_hex_pretty() instead.", "2022.1")
inline std::string hexencode(const uint8_t *data, uint32_t len) { return format_hex_pretty(data, len); }
template<typename T>
ESPDEPRECATED("hexencode() is deprecated, use format_hex_pretty() instead.", "2022.1")
std::string hexencode(const T &data) {
return hexencode(data.data(), data.size());
}
///@}
} // namespace esphome

View File

@@ -328,17 +328,30 @@ void HOT Scheduler::call(uint32_t now) {
// Single-core platforms don't use this queue and fall back to the heap-based approach.
//
// Note: Items cancelled via cancel_item_locked_() are marked with remove=true but still
// processed here. They are removed from the queue normally via pop_front() but skipped
// during execution by should_skip_item_(). This is intentional - no memory leak occurs.
while (!this->defer_queue_.empty()) {
// The outer check is done without a lock for performance. If the queue
// appears non-empty, we lock and process an item. We don't need to check
// empty() again inside the lock because only this thread can remove items.
// processed here. They are skipped during execution by should_skip_item_().
// This is intentional - no memory leak occurs.
//
// We use an index (defer_queue_front_) to track the read position instead of calling
// erase() on every pop, which would be O(n). The queue is processed once per loop -
// any items added during processing are left for the next loop iteration.
// Snapshot the queue end point - only process items that existed at loop start
// Items added during processing (by callbacks or other threads) run next loop
// No lock needed: single consumer (main loop), stale read just means we process less this iteration
size_t defer_queue_end = this->defer_queue_.size();
while (this->defer_queue_front_ < defer_queue_end) {
std::unique_ptr<SchedulerItem> item;
{
LockGuard lock(this->lock_);
item = std::move(this->defer_queue_.front());
this->defer_queue_.pop_front();
// SAFETY: Moving out the unique_ptr leaves a nullptr in the vector at defer_queue_front_.
// This is intentional and safe because:
// 1. The vector is only cleaned up by cleanup_defer_queue_locked_() at the end of this function
// 2. Any code iterating defer_queue_ MUST check for nullptr items (see mark_matching_items_removed_
// and has_cancelled_timeout_in_container_ in scheduler.h)
// 3. The lock protects concurrent access, but the nullptr remains until cleanup
item = std::move(this->defer_queue_[this->defer_queue_front_]);
this->defer_queue_front_++;
}
// Execute callback without holding lock to prevent deadlocks
@@ -349,6 +362,13 @@ void HOT Scheduler::call(uint32_t now) {
// Recycle the defer item after execution
this->recycle_item_(std::move(item));
}
// If we've consumed all items up to the snapshot point, clean up the dead space
// Single consumer (main loop), so no lock needed for this check
if (this->defer_queue_front_ >= defer_queue_end) {
LockGuard lock(this->lock_);
this->cleanup_defer_queue_locked_();
}
#endif /* not ESPHOME_THREAD_SINGLE */
// Convert the fresh timestamp from main loop to 64-bit for scheduler operations

View File

@@ -264,6 +264,36 @@ class Scheduler {
// Helper to recycle a SchedulerItem
void recycle_item_(std::unique_ptr<SchedulerItem> item);
#ifndef ESPHOME_THREAD_SINGLE
// Helper to cleanup defer_queue_ after processing
// IMPORTANT: Caller must hold the scheduler lock before calling this function.
inline void cleanup_defer_queue_locked_() {
// Check if new items were added by producers during processing
if (this->defer_queue_front_ >= this->defer_queue_.size()) {
// Common case: no new items - clear everything
this->defer_queue_.clear();
} else {
// Rare case: new items were added during processing - compact the vector
// This only happens when:
// 1. A deferred callback calls defer() again, or
// 2. Another thread calls defer() while we're processing
//
// Move unprocessed items (added during this loop) to the front for next iteration
//
// SAFETY: Compacted items may include cancelled items (marked for removal via
// cancel_item_locked_() during execution). This is safe because should_skip_item_()
// checks is_item_removed_() before executing, so cancelled items will be skipped
// and recycled on the next loop iteration.
size_t remaining = this->defer_queue_.size() - this->defer_queue_front_;
for (size_t i = 0; i < remaining; i++) {
this->defer_queue_[i] = std::move(this->defer_queue_[this->defer_queue_front_ + i]);
}
this->defer_queue_.resize(remaining);
}
this->defer_queue_front_ = 0;
}
#endif /* not ESPHOME_THREAD_SINGLE */
// Helper to check if item is marked for removal (platform-specific)
// Returns true if item should be skipped, handles platform-specific synchronization
// For ESPHOME_THREAD_MULTI_NO_ATOMICS platforms, the caller must hold the scheduler lock before calling this
@@ -282,13 +312,18 @@ class Scheduler {
// Helper to mark matching items in a container as removed
// Returns the number of items marked for removal
// For ESPHOME_THREAD_MULTI_NO_ATOMICS platforms, the caller must hold the scheduler lock before calling this
// function.
// IMPORTANT: Caller must hold the scheduler lock before calling this function.
template<typename Container>
size_t mark_matching_items_removed_(Container &container, Component *component, const char *name_cstr,
SchedulerItem::Type type, bool match_retry) {
size_t count = 0;
for (auto &item : container) {
// Skip nullptr items (can happen in defer_queue_ when items are being processed)
// The defer_queue_ uses index-based processing: items are std::moved out but left in the
// vector as nullptr until cleanup. Even though this function is called with lock held,
// the vector can still contain nullptr items from the processing loop. This check prevents crashes.
if (!item)
continue;
if (this->matches_item_(item, component, name_cstr, type, match_retry)) {
// Mark item for removal (platform-specific)
#ifdef ESPHOME_THREAD_MULTI_ATOMICS
@@ -311,6 +346,12 @@ class Scheduler {
bool has_cancelled_timeout_in_container_(const Container &container, Component *component, const char *name_cstr,
bool match_retry) const {
for (const auto &item : container) {
// Skip nullptr items (can happen in defer_queue_ when items are being processed)
// The defer_queue_ uses index-based processing: items are std::moved out but left in the
// vector as nullptr until cleanup. If this function is called during defer queue processing,
// it will iterate over these nullptr items. This check prevents crashes.
if (!item)
continue;
if (is_item_removed_(item.get()) &&
this->matches_item_(item, component, name_cstr, SchedulerItem::TIMEOUT, match_retry,
/* skip_removed= */ false)) {
@@ -324,9 +365,12 @@ class Scheduler {
std::vector<std::unique_ptr<SchedulerItem>> items_;
std::vector<std::unique_ptr<SchedulerItem>> to_add_;
#ifndef ESPHOME_THREAD_SINGLE
// Single-core platforms don't need the defer queue and save 40 bytes of RAM
std::deque<std::unique_ptr<SchedulerItem>> defer_queue_; // FIFO queue for defer() calls
#endif /* ESPHOME_THREAD_SINGLE */
// Single-core platforms don't need the defer queue and save ~32 bytes of RAM
// Using std::vector instead of std::deque avoids 512-byte chunked allocations
// Index tracking avoids O(n) erase() calls when draining the queue each loop
std::vector<std::unique_ptr<SchedulerItem>> defer_queue_; // FIFO queue for defer() calls
size_t defer_queue_front_{0}; // Index of first valid item in defer_queue_ (tracks consumed items)
#endif /* ESPHOME_THREAD_SINGLE */
uint32_t to_remove_{0};
// Memory pool for recycling SchedulerItem objects to reduce heap churn.

View File

@@ -1058,7 +1058,8 @@ class DownloadBinaryRequestHandler(BaseHandler):
"download",
f"{storage_json.name}-{file_name}",
)
path = storage_json.firmware_bin_path.with_name(file_name)
path = storage_json.firmware_bin_path.parent.joinpath(file_name)
if not path.is_file():
args = ["esphome", "idedata", settings.rel_path(configuration)]

View File

@@ -12,7 +12,7 @@ platformio==6.1.18 # When updating platformio, also update /docker/Dockerfile
esptool==5.1.0
click==8.1.7
esphome-dashboard==20251013.0
aioesphomeapi==42.0.0
aioesphomeapi==42.2.0
zeroconf==0.148.0
puremagic==1.30
ruamel.yaml==0.18.15 # dashboard_import

View File

@@ -1415,7 +1415,13 @@ class RepeatedTypeInfo(TypeInfo):
super().__init__(field)
# Check if this is a pointer field by looking for container_pointer option
self._container_type = get_field_opt(field, pb.container_pointer, "")
self._use_pointer = bool(self._container_type)
# Check for non-template container pointer
self._container_no_template = get_field_opt(
field, pb.container_pointer_no_template, ""
)
self._use_pointer = bool(self._container_type) or bool(
self._container_no_template
)
# Check if this should use FixedVector instead of std::vector
self._use_fixed_vector = get_field_opt(field, pb.fixed_vector, False)
@@ -1434,12 +1440,18 @@ class RepeatedTypeInfo(TypeInfo):
@property
def cpp_type(self) -> str:
if self._container_no_template:
# Non-template container: use type as-is without appending template parameters
return f"const {self._container_no_template}*"
if self._use_pointer and self._container_type:
# For pointer fields, use the specified container type
# If the container type already includes the element type (e.g., std::set<climate::ClimateMode>)
# use it as-is, otherwise append the element type
# Two cases:
# 1. "std::set<climate::ClimateMode>" - Full type with template params, use as-is
# 2. "std::set" - No <>, append the element type
if "<" in self._container_type and ">" in self._container_type:
# Has template parameters specified, use as-is
return f"const {self._container_type}*"
# No <> at all, append element type
return f"const {self._container_type}<{self._ti.cpp_type}>*"
if self._use_fixed_vector:
return f"FixedVector<{self._ti.cpp_type}>"

View File

@@ -0,0 +1,88 @@
#!/usr/bin/env python3
"""Add metadata to memory analysis JSON file.
This script adds components and platform metadata to an existing
memory analysis JSON file. Used by CI to ensure all required fields are present
for the comment script.
"""
from __future__ import annotations
import argparse
import json
from pathlib import Path
import sys
def main() -> int:
"""Main entry point."""
parser = argparse.ArgumentParser(
description="Add metadata to memory analysis JSON file"
)
parser.add_argument(
"--json-file",
required=True,
help="Path to JSON file to update",
)
parser.add_argument(
"--components",
required=True,
help='JSON array of component names (e.g., \'["api", "wifi"]\')',
)
parser.add_argument(
"--platform",
required=True,
help="Platform name",
)
args = parser.parse_args()
# Load existing JSON
json_path = Path(args.json_file)
if not json_path.exists():
print(f"Error: JSON file not found: {args.json_file}", file=sys.stderr)
return 1
try:
with open(json_path, encoding="utf-8") as f:
data = json.load(f)
except (json.JSONDecodeError, OSError) as e:
print(f"Error loading JSON: {e}", file=sys.stderr)
return 1
# Parse components
try:
components = json.loads(args.components)
if not isinstance(components, list):
print("Error: --components must be a JSON array", file=sys.stderr)
return 1
# Element-level validation: ensure each component is a non-empty string
for idx, comp in enumerate(components):
if not isinstance(comp, str) or not comp.strip():
print(
f"Error: component at index {idx} is not a non-empty string: {comp!r}",
file=sys.stderr,
)
return 1
except json.JSONDecodeError as e:
print(f"Error parsing components: {e}", file=sys.stderr)
return 1
# Add metadata
data["components"] = components
data["platform"] = args.platform
# Write back
try:
with open(json_path, "w", encoding="utf-8") as f:
json.dump(data, f, indent=2)
print(f"Added metadata to {args.json_file}", file=sys.stderr)
except OSError as e:
print(f"Error writing JSON: {e}", file=sys.stderr)
return 1
return 0
if __name__ == "__main__":
sys.exit(main())

View File

@@ -24,6 +24,37 @@ sys.path.insert(0, str(Path(__file__).parent.parent))
# Comment marker to identify our memory impact comments
COMMENT_MARKER = "<!-- esphome-memory-impact-analysis -->"
def run_gh_command(args: list[str], operation: str) -> subprocess.CompletedProcess:
"""Run a gh CLI command with error handling.
Args:
args: Command arguments (including 'gh')
operation: Description of the operation for error messages
Returns:
CompletedProcess result
Raises:
subprocess.CalledProcessError: If command fails (with detailed error output)
"""
try:
return subprocess.run(
args,
check=True,
capture_output=True,
text=True,
)
except subprocess.CalledProcessError as e:
print(
f"ERROR: {operation} failed with exit code {e.returncode}", file=sys.stderr
)
print(f"ERROR: Command: {' '.join(args)}", file=sys.stderr)
print(f"ERROR: stdout: {e.stdout}", file=sys.stderr)
print(f"ERROR: stderr: {e.stderr}", file=sys.stderr)
raise
# Thresholds for emoji significance indicators (percentage)
OVERALL_CHANGE_THRESHOLD = 1.0 # Overall RAM/Flash changes
COMPONENT_CHANGE_THRESHOLD = 3.0 # Component breakdown changes
@@ -238,7 +269,6 @@ def create_comment_body(
pr_analysis: dict | None = None,
target_symbols: dict | None = None,
pr_symbols: dict | None = None,
target_cache_hit: bool = False,
) -> str:
"""Create the comment body with memory impact analysis using Jinja2 templates.
@@ -253,7 +283,6 @@ def create_comment_body(
pr_analysis: Optional component breakdown for PR branch
target_symbols: Optional symbol map for target branch
pr_symbols: Optional symbol map for PR branch
target_cache_hit: Whether target branch analysis was loaded from cache
Returns:
Formatted comment body
@@ -283,7 +312,6 @@ def create_comment_body(
"flash_change": format_change(
target_flash, pr_flash, threshold=OVERALL_CHANGE_THRESHOLD
),
"target_cache_hit": target_cache_hit,
"component_change_threshold": COMPONENT_CHANGE_THRESHOLD,
}
@@ -356,7 +384,7 @@ def find_existing_comment(pr_number: str) -> str | None:
print(f"DEBUG: Looking for existing comment on PR #{pr_number}", file=sys.stderr)
# Use gh api to get comments directly - this returns the numeric id field
result = subprocess.run(
result = run_gh_command(
[
"gh",
"api",
@@ -364,9 +392,7 @@ def find_existing_comment(pr_number: str) -> str | None:
"--jq",
".[] | {id, body}",
],
capture_output=True,
text=True,
check=True,
operation="Get PR comments",
)
print(
@@ -420,7 +446,8 @@ def update_existing_comment(comment_id: str, comment_body: str) -> None:
subprocess.CalledProcessError: If gh command fails
"""
print(f"DEBUG: Updating existing comment {comment_id}", file=sys.stderr)
result = subprocess.run(
print(f"DEBUG: Comment body length: {len(comment_body)} bytes", file=sys.stderr)
result = run_gh_command(
[
"gh",
"api",
@@ -430,9 +457,7 @@ def update_existing_comment(comment_id: str, comment_body: str) -> None:
"-f",
f"body={comment_body}",
],
check=True,
capture_output=True,
text=True,
operation="Update PR comment",
)
print(f"DEBUG: Update response: {result.stdout}", file=sys.stderr)
@@ -448,11 +473,10 @@ def create_new_comment(pr_number: str, comment_body: str) -> None:
subprocess.CalledProcessError: If gh command fails
"""
print(f"DEBUG: Posting new comment on PR #{pr_number}", file=sys.stderr)
result = subprocess.run(
print(f"DEBUG: Comment body length: {len(comment_body)} bytes", file=sys.stderr)
result = run_gh_command(
["gh", "pr", "comment", pr_number, "--body", comment_body],
check=True,
capture_output=True,
text=True,
operation="Create PR comment",
)
print(f"DEBUG: Post response: {result.stdout}", file=sys.stderr)
@@ -484,80 +508,129 @@ def main() -> int:
description="Post or update PR comment with memory impact analysis"
)
parser.add_argument("--pr-number", required=True, help="PR number")
parser.add_argument(
"--components",
required=True,
help='JSON array of component names (e.g., \'["api", "wifi"]\')',
)
parser.add_argument("--platform", required=True, help="Platform name")
parser.add_argument(
"--target-ram", type=int, required=True, help="Target branch RAM usage"
)
parser.add_argument(
"--target-flash", type=int, required=True, help="Target branch flash usage"
)
parser.add_argument("--pr-ram", type=int, required=True, help="PR branch RAM usage")
parser.add_argument(
"--pr-flash", type=int, required=True, help="PR branch flash usage"
)
parser.add_argument(
"--target-json",
help="Optional path to target branch analysis JSON (for detailed analysis)",
required=True,
help="Path to target branch analysis JSON file",
)
parser.add_argument(
"--pr-json",
help="Optional path to PR branch analysis JSON (for detailed analysis)",
)
parser.add_argument(
"--target-cache-hit",
action="store_true",
help="Indicates that target branch analysis was loaded from cache",
required=True,
help="Path to PR branch analysis JSON file",
)
args = parser.parse_args()
# Parse components from JSON
try:
components = json.loads(args.components)
if not isinstance(components, list):
print("Error: --components must be a JSON array", file=sys.stderr)
sys.exit(1)
except json.JSONDecodeError as e:
print(f"Error parsing --components JSON: {e}", file=sys.stderr)
# Load analysis JSON files (all data comes from JSON for security)
target_data: dict | None = load_analysis_json(args.target_json)
if not target_data:
print("Error: Failed to load target analysis JSON", file=sys.stderr)
sys.exit(1)
# Load analysis JSON files
target_analysis = None
pr_analysis = None
target_symbols = None
pr_symbols = None
pr_data: dict | None = load_analysis_json(args.pr_json)
if not pr_data:
print("Error: Failed to load PR analysis JSON", file=sys.stderr)
sys.exit(1)
if args.target_json:
target_data = load_analysis_json(args.target_json)
if target_data and target_data.get("detailed_analysis"):
target_analysis = target_data["detailed_analysis"].get("components")
target_symbols = target_data["detailed_analysis"].get("symbols")
# Extract detailed analysis if available
target_analysis: dict | None = None
pr_analysis: dict | None = None
target_symbols: dict | None = None
pr_symbols: dict | None = None
if args.pr_json:
pr_data = load_analysis_json(args.pr_json)
if pr_data and pr_data.get("detailed_analysis"):
pr_analysis = pr_data["detailed_analysis"].get("components")
pr_symbols = pr_data["detailed_analysis"].get("symbols")
if target_data.get("detailed_analysis"):
target_analysis = target_data["detailed_analysis"].get("components")
target_symbols = target_data["detailed_analysis"].get("symbols")
if pr_data.get("detailed_analysis"):
pr_analysis = pr_data["detailed_analysis"].get("components")
pr_symbols = pr_data["detailed_analysis"].get("symbols")
# Extract all values from JSON files (prevents shell injection from PR code)
components = target_data.get("components")
platform = target_data.get("platform")
target_ram = target_data.get("ram_bytes")
target_flash = target_data.get("flash_bytes")
pr_ram = pr_data.get("ram_bytes")
pr_flash = pr_data.get("flash_bytes")
# Validate required fields and types
missing_fields: list[str] = []
type_errors: list[str] = []
if components is None:
missing_fields.append("components")
elif not isinstance(components, list):
type_errors.append(
f"components must be a list, got {type(components).__name__}"
)
else:
for idx, comp in enumerate(components):
if not isinstance(comp, str):
type_errors.append(
f"components[{idx}] must be a string, got {type(comp).__name__}"
)
if platform is None:
missing_fields.append("platform")
elif not isinstance(platform, str):
type_errors.append(f"platform must be a string, got {type(platform).__name__}")
if target_ram is None:
missing_fields.append("target.ram_bytes")
elif not isinstance(target_ram, int):
type_errors.append(
f"target.ram_bytes must be an integer, got {type(target_ram).__name__}"
)
if target_flash is None:
missing_fields.append("target.flash_bytes")
elif not isinstance(target_flash, int):
type_errors.append(
f"target.flash_bytes must be an integer, got {type(target_flash).__name__}"
)
if pr_ram is None:
missing_fields.append("pr.ram_bytes")
elif not isinstance(pr_ram, int):
type_errors.append(
f"pr.ram_bytes must be an integer, got {type(pr_ram).__name__}"
)
if pr_flash is None:
missing_fields.append("pr.flash_bytes")
elif not isinstance(pr_flash, int):
type_errors.append(
f"pr.flash_bytes must be an integer, got {type(pr_flash).__name__}"
)
if missing_fields or type_errors:
if missing_fields:
print(
f"Error: JSON files missing required fields: {', '.join(missing_fields)}",
file=sys.stderr,
)
if type_errors:
print(
f"Error: Type validation failed: {'; '.join(type_errors)}",
file=sys.stderr,
)
print(f"Target JSON keys: {list(target_data.keys())}", file=sys.stderr)
print(f"PR JSON keys: {list(pr_data.keys())}", file=sys.stderr)
sys.exit(1)
# Create comment body
# Note: Memory totals (RAM/Flash) are summed across all builds if multiple were run.
comment_body = create_comment_body(
components=components,
platform=args.platform,
target_ram=args.target_ram,
target_flash=args.target_flash,
pr_ram=args.pr_ram,
pr_flash=args.pr_flash,
platform=platform,
target_ram=target_ram,
target_flash=target_flash,
pr_ram=pr_ram,
pr_flash=pr_flash,
target_analysis=target_analysis,
pr_analysis=pr_analysis,
target_symbols=target_symbols,
pr_symbols=pr_symbols,
target_cache_hit=args.target_cache_hit,
)
# Post or update comment

View File

@@ -25,6 +25,7 @@ int main() { return 0;}
Path(zephyr_dir / "prj.conf").write_text(
"""
CONFIG_NEWLIB_LIBC=y
CONFIG_BT=y
CONFIG_ADC=y
""",
encoding="utf-8",

View File

@@ -6,6 +6,7 @@ from unittest.mock import MagicMock, patch
import pytest
from esphome.components.packages import do_packages_pass
from esphome.config import resolve_extend_remove
from esphome.config_helpers import Extend, Remove
import esphome.config_validation as cv
from esphome.const import (
@@ -64,13 +65,20 @@ def fixture_basic_esphome():
return {CONF_NAME: TEST_DEVICE_NAME, CONF_PLATFORM: TEST_PLATFORM}
def packages_pass(config):
"""Wrapper around packages_pass that also resolves Extend and Remove."""
config = do_packages_pass(config)
resolve_extend_remove(config)
return config
def test_package_unused(basic_esphome, basic_wifi):
"""
Ensures do_package_pass does not change a config if packages aren't used.
"""
config = {CONF_ESPHOME: basic_esphome, CONF_WIFI: basic_wifi}
actual = do_packages_pass(config)
actual = packages_pass(config)
assert actual == config
@@ -83,7 +91,7 @@ def test_package_invalid_dict(basic_esphome, basic_wifi):
config = {CONF_ESPHOME: basic_esphome, CONF_PACKAGES: basic_wifi | {CONF_URL: ""}}
with pytest.raises(cv.Invalid):
do_packages_pass(config)
packages_pass(config)
def test_package_include(basic_wifi, basic_esphome):
@@ -99,7 +107,7 @@ def test_package_include(basic_wifi, basic_esphome):
expected = {CONF_ESPHOME: basic_esphome, CONF_WIFI: basic_wifi}
actual = do_packages_pass(config)
actual = packages_pass(config)
assert actual == expected
@@ -124,7 +132,7 @@ def test_package_append(basic_wifi, basic_esphome):
},
}
actual = do_packages_pass(config)
actual = packages_pass(config)
assert actual == expected
@@ -148,7 +156,7 @@ def test_package_override(basic_wifi, basic_esphome):
},
}
actual = do_packages_pass(config)
actual = packages_pass(config)
assert actual == expected
@@ -177,7 +185,7 @@ def test_multiple_package_order():
},
}
actual = do_packages_pass(config)
actual = packages_pass(config)
assert actual == expected
@@ -233,7 +241,7 @@ def test_package_list_merge():
]
}
actual = do_packages_pass(config)
actual = packages_pass(config)
assert actual == expected
@@ -311,7 +319,7 @@ def test_package_list_merge_by_id():
]
}
actual = do_packages_pass(config)
actual = packages_pass(config)
assert actual == expected
@@ -350,13 +358,13 @@ def test_package_merge_by_id_with_list():
]
}
actual = do_packages_pass(config)
actual = packages_pass(config)
assert actual == expected
def test_package_merge_by_missing_id():
"""
Ensures that components with missing IDs are not merged.
Ensures that a validation error is thrown when trying to extend a missing ID.
"""
config = {
@@ -379,25 +387,15 @@ def test_package_merge_by_missing_id():
],
}
expected = {
CONF_SENSOR: [
{
CONF_ID: TEST_SENSOR_ID_1,
CONF_FILTERS: [{CONF_MULTIPLY: 42.0}],
},
{
CONF_ID: TEST_SENSOR_ID_1,
CONF_FILTERS: [{CONF_MULTIPLY: 10.0}],
},
{
CONF_ID: Extend(TEST_SENSOR_ID_2),
CONF_FILTERS: [{CONF_OFFSET: 146.0}],
},
]
}
error_raised = False
try:
packages_pass(config)
assert False, "Expected validation error for missing ID"
except cv.Invalid as err:
error_raised = True
assert err.path == [CONF_SENSOR, 2]
actual = do_packages_pass(config)
assert actual == expected
assert error_raised
def test_package_list_remove_by_id():
@@ -447,7 +445,7 @@ def test_package_list_remove_by_id():
]
}
actual = do_packages_pass(config)
actual = packages_pass(config)
assert actual == expected
@@ -493,7 +491,7 @@ def test_multiple_package_list_remove_by_id():
]
}
actual = do_packages_pass(config)
actual = packages_pass(config)
assert actual == expected
@@ -514,7 +512,7 @@ def test_package_dict_remove_by_id(basic_wifi, basic_esphome):
CONF_ESPHOME: basic_esphome,
}
actual = do_packages_pass(config)
actual = packages_pass(config)
assert actual == expected
@@ -545,7 +543,6 @@ def test_package_remove_by_missing_id():
}
expected = {
"missing_key": Remove(),
CONF_SENSOR: [
{
CONF_ID: TEST_SENSOR_ID_1,
@@ -555,14 +552,10 @@ def test_package_remove_by_missing_id():
CONF_ID: TEST_SENSOR_ID_1,
CONF_FILTERS: [{CONF_MULTIPLY: 10.0}],
},
{
CONF_ID: Remove(TEST_SENSOR_ID_2),
CONF_FILTERS: [{CONF_OFFSET: 146.0}],
},
],
}
actual = do_packages_pass(config)
actual = packages_pass(config)
assert actual == expected
@@ -634,7 +627,7 @@ def test_remote_packages_with_files_list(
]
}
actual = do_packages_pass(config)
actual = packages_pass(config)
assert actual == expected
@@ -730,5 +723,5 @@ def test_remote_packages_with_files_and_vars(
]
}
actual = do_packages_pass(config)
actual = packages_pass(config)
assert actual == expected

View File

@@ -8,14 +8,12 @@ sensor:
lambda: |-
if (millis() > 10000) {
return 0.6;
} else {
return 0.0;
}
return 0.0;
- platform: template
id: template_temperature
lambda: |-
if (millis() > 10000) {
return 42.0;
} else {
return 0.0;
}
return 0.0;

View File

@@ -5,9 +5,8 @@ sensor:
lambda: |-
if (millis() > 10000) {
return 42.0;
} else {
return 0.0;
}
return 0.0;
update_interval: 15s
binary_sensor:

View File

@@ -23,9 +23,8 @@ binary_sensor:
- lambda: |-
if (id(some_binary_sensor).state) {
return x;
} else {
return {};
}
return {};
- settle: 100ms
- timeout: 10s

View File

@@ -4,25 +4,22 @@ binary_sensor:
lambda: |-
if (millis() > 10000) {
return true;
} else {
return false;
}
return false;
- platform: template
id: bin2
lambda: |-
if (millis() > 20000) {
return true;
} else {
return false;
}
return false;
- platform: template
id: bin3
lambda: |-
if (millis() > 30000) {
return true;
} else {
return false;
}
return false;
sensor:
- platform: binary_sensor_map

View File

@@ -0,0 +1,2 @@
ble_nus:
type: logs

View File

@@ -0,0 +1,2 @@
ble_nus:
type: logs

View File

@@ -4,17 +4,15 @@ sensor:
lambda: |-
if (millis() > 10000) {
return 0.6;
} else {
return 0.0;
}
return 0.0;
- platform: template
id: template_temperature2
lambda: |-
if (millis() > 20000) {
return 0.8;
} else {
return 0.0;
}
return 0.0;
- platform: combination
type: kalman
name: Kalman-filtered temperature

View File

@@ -4,9 +4,8 @@ binary_sensor:
lambda: |-
if (millis() > 10000) {
return true;
} else {
return false;
}
return false;
sensor:
- platform: duty_time

View File

@@ -4,9 +4,8 @@ binary_sensor:
lambda: |-
if (millis() > 10000) {
return true;
} else {
return false;
}
return false;
switch:
- platform: template

View File

@@ -17,9 +17,8 @@ lock:
lambda: |-
if (millis() > 10000) {
return LOCK_STATE_LOCKED;
} else {
return LOCK_STATE_UNLOCKED;
}
return LOCK_STATE_UNLOCKED;
optimistic: true
assumed_state: false
on_unlock:

View File

@@ -72,10 +72,9 @@ binary_sensor:
if (id(template_sens).state > 30) {
// Garage Door is open.
return true;
} else {
// Garage Door is closed.
return false;
}
// Garage Door is closed.
return false;
on_state:
- mqtt.publish:
topic: some/topic/binary_sensor
@@ -217,9 +216,8 @@ cover:
lambda: |-
if (id(some_binary_sensor).state) {
return COVER_OPEN;
} else {
return COVER_CLOSED;
}
return COVER_CLOSED;
open_action:
- logger.log: open_action
close_action:
@@ -321,9 +319,8 @@ lock:
lambda: |-
if (id(some_binary_sensor).state) {
return LOCK_STATE_LOCKED;
} else {
return LOCK_STATE_UNLOCKED;
}
return LOCK_STATE_UNLOCKED;
lock_action:
- logger.log: lock_action
unlock_action:
@@ -360,9 +357,8 @@ sensor:
lambda: |-
if (id(some_binary_sensor).state) {
return 42.0;
} else {
return 0.0;
}
return 0.0;
update_interval: 60s
on_value:
- mqtt.publish:
@@ -390,9 +386,8 @@ switch:
lambda: |-
if (id(some_binary_sensor).state) {
return true;
} else {
return false;
}
return false;
turn_on_action:
- logger.log: turn_on_action
turn_off_action:
@@ -436,9 +431,8 @@ valve:
lambda: |-
if (id(some_binary_sensor).state) {
return VALVE_OPEN;
} else {
return VALVE_CLOSED;
}
return VALVE_CLOSED;
alarm_control_panel:
- platform: template

View File

@@ -27,9 +27,8 @@ sensor:
lambda: |-
if (millis() > 10000) {
return 42.0;
} else {
return 0.0;
}
return 0.0;
update_interval: 60s
climate:

View File

@@ -35,9 +35,8 @@ sensor:
lambda: |-
if (millis() > 10000) {
return 42.0;
} else {
return 0.0;
}
return 0.0;
update_interval: 60s
text_sensor:
@@ -49,9 +48,8 @@ text_sensor:
lambda: |-
if (millis() > 10000) {
return {"Hello World"};
} else {
return {"Goodbye (cruel) World"};
}
return {"Goodbye (cruel) World"};
update_interval: 60s
binary_sensor:
@@ -60,9 +58,8 @@ binary_sensor:
lambda: |-
if (millis() > 10000) {
return true;
} else {
return false;
}
return false;
switch:
- platform: template
@@ -70,9 +67,8 @@ switch:
lambda: |-
if (millis() > 10000) {
return true;
} else {
return false;
}
return false;
optimistic: true
fan:
@@ -85,9 +81,8 @@ cover:
lambda: |-
if (millis() > 10000) {
return COVER_OPEN;
} else {
return COVER_CLOSED;
}
return COVER_CLOSED;
lock:
- platform: template
@@ -95,9 +90,8 @@ lock:
lambda: |-
if (millis() > 10000) {
return LOCK_STATE_LOCKED;
} else {
return LOCK_STATE_UNLOCKED;
}
return LOCK_STATE_UNLOCKED;
optimistic: true
select:

View File

@@ -59,9 +59,8 @@ binary_sensor:
- lambda: |-
if (id(other_binary_sensor).state) {
return x;
} else {
return {};
}
return {};
- settle: 500ms
- timeout: 5s
@@ -72,9 +71,8 @@ sensor:
lambda: |-
if (id(some_binary_sensor).state) {
return 42.0;
} else {
return 0.0;
}
return 0.0;
update_interval: 60s
filters:
- calibrate_linear:
@@ -183,9 +181,8 @@ switch:
lambda: |-
if (id(some_binary_sensor).state) {
return true;
} else {
return false;
}
return false;
turn_on_action:
- logger.log: "turn_on_action"
turn_off_action:
@@ -203,9 +200,8 @@ cover:
lambda: |-
if (id(some_binary_sensor).state) {
return COVER_OPEN;
} else {
return COVER_CLOSED;
}
return COVER_CLOSED;
open_action:
- logger.log: open_action
close_action:
@@ -238,9 +234,8 @@ lock:
lambda: |-
if (id(some_binary_sensor).state) {
return LOCK_STATE_LOCKED;
} else {
return LOCK_STATE_UNLOCKED;
}
return LOCK_STATE_UNLOCKED;
lock_action:
- logger.log: lock_action
unlock_action:
@@ -255,9 +250,8 @@ valve:
lambda: |-
if (id(some_binary_sensor).state) {
return VALVE_OPEN;
} else {
return VALVE_CLOSED;
}
return VALVE_CLOSED;
open_action:
- logger.log: open_action
close_action:

View File

@@ -69,6 +69,11 @@ climate:
- logger.log: swing_vertical_action
swing_both_action:
- logger.log: swing_both_action
humidity_control_humidify_action:
- logger.log: humidity_control_humidify_action
humidity_control_off_action:
- logger.log: humidity_control_off_action
humidity_hysteresis: 1.0
startup_delay: true
supplemental_cooling_delta: 2.0
cool_deadband: 0.5

View File

@@ -17,10 +17,10 @@ sensor:
name: HLW8012 Voltage
power:
name: HLW8012 Power
id: hlw8012_power
id: total_daily_energy_hlw8012_power
energy:
name: HLW8012 Energy
id: hlw8012_energy
id: total_daily_energy_hlw8012_energy
update_interval: 15s
current_resistor: 0.001 ohm
voltage_divider: 2351
@@ -29,4 +29,4 @@ sensor:
model: hlw8012
- platform: total_daily_energy
name: HLW8012 Total Daily Energy
power_id: hlw8012_power
power_id: total_daily_energy_hlw8012_power

View File

@@ -1,4 +1,5 @@
wifi:
fast_connect: true
networks:
- ssid: MySSID
eap:

View File

@@ -35,6 +35,26 @@ from esphome.zeroconf import DiscoveredImport
from .common import get_fixture_path
def get_build_path(base_path: Path, device_name: str) -> Path:
"""Get the build directory path for a device.
This is a test helper that constructs the standard ESPHome build directory
structure. Note: This helper does NOT perform path traversal sanitization
because it's only used in tests where we control the inputs. The actual
web_server.py code handles sanitization in DownloadBinaryRequestHandler.get()
via file_name.replace("..", "").lstrip("/").
Args:
base_path: The base temporary path (typically tmp_path from pytest)
device_name: The name of the device (should not contain path separators
in production use, but tests may use it for specific scenarios)
Returns:
Path to the build directory (.esphome/build/device_name)
"""
return base_path / ".esphome" / "build" / device_name
class DashboardTestHelper:
def __init__(self, io_loop: IOLoop, client: AsyncHTTPClient, port: int) -> None:
self.io_loop = io_loop
@@ -417,6 +437,180 @@ async def test_download_binary_handler_idedata_fallback(
assert response.body == b"bootloader content"
@pytest.mark.asyncio
@pytest.mark.usefixtures("mock_ext_storage_path")
async def test_download_binary_handler_subdirectory_file(
dashboard: DashboardTestHelper,
tmp_path: Path,
mock_storage_json: MagicMock,
) -> None:
"""Test the DownloadBinaryRequestHandler.get with file in subdirectory (nRF52 case).
This is a regression test for issue #11343 where the Path migration broke
downloads for nRF52 firmware files in subdirectories like 'zephyr/zephyr.uf2'.
The issue was that with_name() doesn't accept path separators:
- Before: path = storage_json.firmware_bin_path.with_name(file_name)
ValueError: Invalid name 'zephyr/zephyr.uf2'
- After: path = storage_json.firmware_bin_path.parent.joinpath(file_name)
Works correctly with subdirectory paths
"""
# Create a fake nRF52 build structure with firmware in subdirectory
build_dir = get_build_path(tmp_path, "nrf52-device")
zephyr_dir = build_dir / "zephyr"
zephyr_dir.mkdir(parents=True)
# Create the main firmware binary (would be in build root)
firmware_file = build_dir / "firmware.bin"
firmware_file.write_bytes(b"main firmware")
# Create the UF2 file in zephyr subdirectory (nRF52 specific)
uf2_file = zephyr_dir / "zephyr.uf2"
uf2_file.write_bytes(b"nRF52 UF2 firmware content")
# Mock storage JSON
mock_storage = Mock()
mock_storage.name = "nrf52-device"
mock_storage.firmware_bin_path = firmware_file
mock_storage_json.load.return_value = mock_storage
# Request the UF2 file with subdirectory path
response = await dashboard.fetch(
"/download.bin?configuration=nrf52-device.yaml&file=zephyr/zephyr.uf2",
method="GET",
)
assert response.code == 200
assert response.body == b"nRF52 UF2 firmware content"
assert response.headers["Content-Type"] == "application/octet-stream"
assert "attachment" in response.headers["Content-Disposition"]
# Download name should be device-name + full file path
assert "nrf52-device-zephyr/zephyr.uf2" in response.headers["Content-Disposition"]
@pytest.mark.asyncio
@pytest.mark.usefixtures("mock_ext_storage_path")
async def test_download_binary_handler_subdirectory_file_url_encoded(
dashboard: DashboardTestHelper,
tmp_path: Path,
mock_storage_json: MagicMock,
) -> None:
"""Test the DownloadBinaryRequestHandler.get with URL-encoded subdirectory path.
Verifies that URL-encoded paths (e.g., zephyr%2Fzephyr.uf2) are correctly
decoded and handled, and that custom download names work with subdirectories.
"""
# Create a fake build structure with firmware in subdirectory
build_dir = get_build_path(tmp_path, "test")
zephyr_dir = build_dir / "zephyr"
zephyr_dir.mkdir(parents=True)
firmware_file = build_dir / "firmware.bin"
firmware_file.write_bytes(b"content")
uf2_file = zephyr_dir / "zephyr.uf2"
uf2_file.write_bytes(b"content")
# Mock storage JSON
mock_storage = Mock()
mock_storage.name = "test_device"
mock_storage.firmware_bin_path = firmware_file
mock_storage_json.load.return_value = mock_storage
# Request with URL-encoded path and custom download name
response = await dashboard.fetch(
"/download.bin?configuration=test.yaml&file=zephyr%2Fzephyr.uf2&download=custom_name.bin",
method="GET",
)
assert response.code == 200
assert "custom_name.bin" in response.headers["Content-Disposition"]
@pytest.mark.asyncio
@pytest.mark.usefixtures("mock_ext_storage_path")
@pytest.mark.parametrize(
"attack_path",
[
pytest.param("../../../secrets.yaml", id="basic_traversal"),
pytest.param("..%2F..%2F..%2Fsecrets.yaml", id="url_encoded"),
pytest.param("zephyr/../../../secrets.yaml", id="traversal_with_prefix"),
pytest.param("/etc/passwd", id="absolute_path"),
pytest.param("//etc/passwd", id="double_slash_absolute"),
pytest.param("....//secrets.yaml", id="multiple_dots"),
],
)
async def test_download_binary_handler_path_traversal_protection(
dashboard: DashboardTestHelper,
tmp_path: Path,
mock_storage_json: MagicMock,
attack_path: str,
) -> None:
"""Test that DownloadBinaryRequestHandler prevents path traversal attacks.
Verifies that attempts to use '..' in file paths are sanitized to prevent
accessing files outside the build directory. Tests multiple attack vectors.
"""
# Create build structure
build_dir = get_build_path(tmp_path, "test")
build_dir.mkdir(parents=True)
firmware_file = build_dir / "firmware.bin"
firmware_file.write_bytes(b"firmware content")
# Create a sensitive file outside the build directory that should NOT be accessible
sensitive_file = tmp_path / "secrets.yaml"
sensitive_file.write_bytes(b"secret: my_secret_password")
# Mock storage JSON
mock_storage = Mock()
mock_storage.name = "test_device"
mock_storage.firmware_bin_path = firmware_file
mock_storage_json.load.return_value = mock_storage
# Attempt path traversal attack - should be blocked
with pytest.raises(HTTPClientError) as exc_info:
await dashboard.fetch(
f"/download.bin?configuration=test.yaml&file={attack_path}",
method="GET",
)
# Should get 404 (file not found after sanitization) or 500 (idedata fails)
assert exc_info.value.code in (404, 500)
@pytest.mark.asyncio
@pytest.mark.usefixtures("mock_ext_storage_path")
async def test_download_binary_handler_multiple_subdirectory_levels(
dashboard: DashboardTestHelper,
tmp_path: Path,
mock_storage_json: MagicMock,
) -> None:
"""Test downloading files from multiple subdirectory levels.
Verifies that joinpath correctly handles multi-level paths like 'build/output/firmware.bin'.
"""
# Create nested directory structure
build_dir = get_build_path(tmp_path, "test")
nested_dir = build_dir / "build" / "output"
nested_dir.mkdir(parents=True)
firmware_file = build_dir / "firmware.bin"
firmware_file.write_bytes(b"main")
nested_file = nested_dir / "firmware.bin"
nested_file.write_bytes(b"nested firmware content")
# Mock storage JSON
mock_storage = Mock()
mock_storage.name = "test_device"
mock_storage.firmware_bin_path = firmware_file
mock_storage_json.load.return_value = mock_storage
response = await dashboard.fetch(
"/download.bin?configuration=test.yaml&file=build/output/firmware.bin",
method="GET",
)
assert response.code == 200
assert response.body == b"nested firmware content"
@pytest.mark.asyncio
async def test_edit_request_handler_post_invalid_file(
dashboard: DashboardTestHelper,

View File

@@ -34,10 +34,9 @@ binary_sensor:
ESP_LOGD("test", "Button ON at %u", now);
}
return true;
} else {
// Only log state change
if (id(ir_remote_button).state) {
ESP_LOGD("test", "Button OFF at %u", now);
}
return false;
}
// Only log state change
if (id(ir_remote_button).state) {
ESP_LOGD("test", "Button OFF at %u", now);
}
return false;

View File

@@ -0,0 +1,170 @@
esphome:
name: test-script-queued
host:
api:
actions:
# Test 1: Queue depth with default max_runs=5
- action: test_queue_depth
then:
- logger.log: "=== TEST 1: Queue depth (max_runs=5 means 5 total, reject 6-7) ==="
- script.execute:
id: queue_depth_script
value: 1
- script.execute:
id: queue_depth_script
value: 2
- script.execute:
id: queue_depth_script
value: 3
- script.execute:
id: queue_depth_script
value: 4
- script.execute:
id: queue_depth_script
value: 5
- script.execute:
id: queue_depth_script
value: 6
- script.execute:
id: queue_depth_script
value: 7
# Test 2: Ring buffer wrap test
- action: test_ring_buffer
then:
- logger.log: "=== TEST 2: Ring buffer wrap (should process A, B, C in order) ==="
- script.execute:
id: wrap_script
msg: "A"
- script.execute:
id: wrap_script
msg: "B"
- script.execute:
id: wrap_script
msg: "C"
# Test 3: Stop clears queue
- action: test_stop_clears
then:
- logger.log: "=== TEST 3: Stop clears queue (should only see 1, then 'STOPPED') ==="
- script.execute:
id: stop_script
num: 1
- script.execute:
id: stop_script
num: 2
- script.execute:
id: stop_script
num: 3
- delay: 50ms
- logger.log: "STOPPING script now"
- script.stop: stop_script
# Test 4: Verify rejection (max_runs=3)
- action: test_rejection
then:
- logger.log: "=== TEST 4: Verify rejection (max_runs=3 means 3 total, reject 4-8) ==="
- script.execute:
id: rejection_script
val: 1
- script.execute:
id: rejection_script
val: 2
- script.execute:
id: rejection_script
val: 3
- script.execute:
id: rejection_script
val: 4
- script.execute:
id: rejection_script
val: 5
- script.execute:
id: rejection_script
val: 6
- script.execute:
id: rejection_script
val: 7
- script.execute:
id: rejection_script
val: 8
# Test 5: No parameters test
- action: test_no_params
then:
- logger.log: "=== TEST 5: No params (should process 3 times) ==="
- script.execute: no_params_script
- script.execute: no_params_script
- script.execute: no_params_script
logger:
level: DEBUG
script:
# Test script 1: Queue depth test (default max_runs=5)
- id: queue_depth_script
mode: queued
parameters:
value: int
then:
- logger.log:
format: "Queue test: START item %d"
args: ['value']
- delay: 100ms
- logger.log:
format: "Queue test: END item %d"
args: ['value']
# Test script 2: Ring buffer wrap test (max_runs=3)
- id: wrap_script
mode: queued
max_runs: 3
parameters:
msg: string
then:
- logger.log:
format: "Ring buffer: START '%s'"
args: ['msg.c_str()']
- delay: 50ms
- logger.log:
format: "Ring buffer: END '%s'"
args: ['msg.c_str()']
# Test script 3: Stop test
- id: stop_script
mode: queued
max_runs: 5
parameters:
num: int
then:
- logger.log:
format: "Stop test: START %d"
args: ['num']
- delay: 100ms
- logger.log:
format: "Stop test: END %d"
args: ['num']
# Test script 4: Rejection test (max_runs=3)
- id: rejection_script
mode: queued
max_runs: 3
parameters:
val: int
then:
- logger.log:
format: "Rejection test: START %d"
args: ['val']
- delay: 200ms
- logger.log:
format: "Rejection test: END %d"
args: ['val']
# Test script 5: No parameters
- id: no_params_script
mode: queued
then:
- logger.log: "No params: START"
- delay: 50ms
- logger.log: "No params: END"

View File

@@ -8,6 +8,7 @@ import asyncio
from typing import Any
from aioesphomeapi import LightState
from aioesphomeapi.model import ColorMode
import pytest
from .types import APIClientConnectedFactory, RunCompiledFunction
@@ -35,10 +36,51 @@ async def test_light_calls(
# Get the light entities
entities = await client.list_entities_services()
lights = [e for e in entities[0] if e.object_id.startswith("test_")]
assert len(lights) >= 2 # Should have RGBCW and RGB lights
assert len(lights) >= 3 # Should have RGBCW, RGB, and Binary lights
rgbcw_light = next(light for light in lights if "RGBCW" in light.name)
rgb_light = next(light for light in lights if "RGB Light" in light.name)
binary_light = next(light for light in lights if "Binary" in light.name)
# Test color mode encoding: Verify supported_color_modes contains actual ColorMode enum values
# not bit positions. This is critical - the iterator must convert bit positions to actual
# ColorMode enum values for API encoding.
# RGBCW light (rgbww platform) should support RGB_COLD_WARM_WHITE mode
assert ColorMode.RGB_COLD_WARM_WHITE in rgbcw_light.supported_color_modes, (
f"RGBCW light missing RGB_COLD_WARM_WHITE mode. Got: {rgbcw_light.supported_color_modes}"
)
# Verify it's the actual enum value, not bit position
assert ColorMode.RGB_COLD_WARM_WHITE.value in [
mode.value for mode in rgbcw_light.supported_color_modes
], (
f"RGBCW light has wrong color mode values. Expected {ColorMode.RGB_COLD_WARM_WHITE.value} "
f"(RGB_COLD_WARM_WHITE), got: {[mode.value for mode in rgbcw_light.supported_color_modes]}"
)
# RGB light should support RGB mode
assert ColorMode.RGB in rgb_light.supported_color_modes, (
f"RGB light missing RGB color mode. Got: {rgb_light.supported_color_modes}"
)
# Verify it's the actual enum value, not bit position
assert ColorMode.RGB.value in [
mode.value for mode in rgb_light.supported_color_modes
], (
f"RGB light has wrong color mode values. Expected {ColorMode.RGB.value} (RGB), got: "
f"{[mode.value for mode in rgb_light.supported_color_modes]}"
)
# Binary light (on/off only) should support ON_OFF mode
assert ColorMode.ON_OFF in binary_light.supported_color_modes, (
f"Binary light missing ON_OFF color mode. Got: {binary_light.supported_color_modes}"
)
# Verify it's the actual enum value, not bit position
assert ColorMode.ON_OFF.value in [
mode.value for mode in binary_light.supported_color_modes
], (
f"Binary light has wrong color mode values. Expected {ColorMode.ON_OFF.value} (ON_OFF), got: "
f"{[mode.value for mode in binary_light.supported_color_modes]}"
)
async def wait_for_state_change(key: int, timeout: float = 1.0) -> Any:
"""Wait for a state change for the given entity key."""

View File

@@ -0,0 +1,203 @@
"""Test ESPHome queued script functionality."""
from __future__ import annotations
import asyncio
import re
import pytest
from .types import APIClientConnectedFactory, RunCompiledFunction
@pytest.mark.asyncio
async def test_script_queued(
yaml_config: str,
run_compiled: RunCompiledFunction,
api_client_connected: APIClientConnectedFactory,
) -> None:
"""Test comprehensive queued script functionality."""
loop = asyncio.get_running_loop()
# Track all test results
test_results = {
"queue_depth": {"processed": [], "rejections": 0},
"ring_buffer": {"start_order": [], "end_order": []},
"stop": {"processed": [], "stop_logged": False},
"rejection": {"processed": [], "rejections": 0},
"no_params": {"executions": 0},
}
# Patterns for Test 1: Queue depth
queue_start = re.compile(r"Queue test: START item (\d+)")
queue_end = re.compile(r"Queue test: END item (\d+)")
queue_reject = re.compile(r"Script 'queue_depth_script' max instances")
# Patterns for Test 2: Ring buffer
ring_start = re.compile(r"Ring buffer: START '([A-Z])'")
ring_end = re.compile(r"Ring buffer: END '([A-Z])'")
# Patterns for Test 3: Stop
stop_start = re.compile(r"Stop test: START (\d+)")
stop_log = re.compile(r"STOPPING script now")
# Patterns for Test 4: Rejection
reject_start = re.compile(r"Rejection test: START (\d+)")
reject_end = re.compile(r"Rejection test: END (\d+)")
reject_reject = re.compile(r"Script 'rejection_script' max instances")
# Patterns for Test 5: No params
no_params_end = re.compile(r"No params: END")
# Test completion futures
test1_complete = loop.create_future()
test2_complete = loop.create_future()
test3_complete = loop.create_future()
test4_complete = loop.create_future()
test5_complete = loop.create_future()
def check_output(line: str) -> None:
"""Check log output for all test messages."""
# Test 1: Queue depth
if match := queue_start.search(line):
item = int(match.group(1))
if item not in test_results["queue_depth"]["processed"]:
test_results["queue_depth"]["processed"].append(item)
if match := queue_end.search(line):
item = int(match.group(1))
if item == 5 and not test1_complete.done():
test1_complete.set_result(True)
if queue_reject.search(line):
test_results["queue_depth"]["rejections"] += 1
# Test 2: Ring buffer
if match := ring_start.search(line):
msg = match.group(1)
test_results["ring_buffer"]["start_order"].append(msg)
if match := ring_end.search(line):
msg = match.group(1)
test_results["ring_buffer"]["end_order"].append(msg)
if (
len(test_results["ring_buffer"]["end_order"]) == 3
and not test2_complete.done()
):
test2_complete.set_result(True)
# Test 3: Stop
if match := stop_start.search(line):
item = int(match.group(1))
if item not in test_results["stop"]["processed"]:
test_results["stop"]["processed"].append(item)
if stop_log.search(line):
test_results["stop"]["stop_logged"] = True
# Give time for any queued items to be cleared
if not test3_complete.done():
loop.call_later(
0.3,
lambda: test3_complete.set_result(True)
if not test3_complete.done()
else None,
)
# Test 4: Rejection
if match := reject_start.search(line):
item = int(match.group(1))
if item not in test_results["rejection"]["processed"]:
test_results["rejection"]["processed"].append(item)
if match := reject_end.search(line):
item = int(match.group(1))
if item == 3 and not test4_complete.done():
test4_complete.set_result(True)
if reject_reject.search(line):
test_results["rejection"]["rejections"] += 1
# Test 5: No params
if no_params_end.search(line):
test_results["no_params"]["executions"] += 1
if (
test_results["no_params"]["executions"] == 3
and not test5_complete.done()
):
test5_complete.set_result(True)
async with (
run_compiled(yaml_config, line_callback=check_output),
api_client_connected() as client,
):
# Get services
_, services = await client.list_entities_services()
# Test 1: Queue depth limit
test_service = next((s for s in services if s.name == "test_queue_depth"), None)
assert test_service is not None, "test_queue_depth service not found"
client.execute_service(test_service, {})
await asyncio.wait_for(test1_complete, timeout=2.0)
await asyncio.sleep(0.1) # Give time for rejections
# Verify Test 1
assert sorted(test_results["queue_depth"]["processed"]) == [1, 2, 3, 4, 5], (
f"Test 1: Expected to process items 1-5 (max_runs=5 means 5 total), got {sorted(test_results['queue_depth']['processed'])}"
)
assert test_results["queue_depth"]["rejections"] >= 2, (
"Test 1: Expected at least 2 rejection warnings (items 6-7 should be rejected)"
)
# Test 2: Ring buffer order
test_service = next((s for s in services if s.name == "test_ring_buffer"), None)
assert test_service is not None, "test_ring_buffer service not found"
client.execute_service(test_service, {})
await asyncio.wait_for(test2_complete, timeout=2.0)
# Verify Test 2
assert test_results["ring_buffer"]["start_order"] == ["A", "B", "C"], (
f"Test 2: Expected start order [A, B, C], got {test_results['ring_buffer']['start_order']}"
)
assert test_results["ring_buffer"]["end_order"] == ["A", "B", "C"], (
f"Test 2: Expected end order [A, B, C], got {test_results['ring_buffer']['end_order']}"
)
# Test 3: Stop clears queue
test_service = next((s for s in services if s.name == "test_stop_clears"), None)
assert test_service is not None, "test_stop_clears service not found"
client.execute_service(test_service, {})
await asyncio.wait_for(test3_complete, timeout=2.0)
# Verify Test 3
assert test_results["stop"]["stop_logged"], (
"Test 3: Stop command was not logged"
)
assert test_results["stop"]["processed"] == [1], (
f"Test 3: Expected only item 1 to process, got {test_results['stop']['processed']}"
)
# Test 4: Rejection enforcement (max_runs=3)
test_service = next((s for s in services if s.name == "test_rejection"), None)
assert test_service is not None, "test_rejection service not found"
client.execute_service(test_service, {})
await asyncio.wait_for(test4_complete, timeout=2.0)
await asyncio.sleep(0.1) # Give time for rejections
# Verify Test 4
assert sorted(test_results["rejection"]["processed"]) == [1, 2, 3], (
f"Test 4: Expected to process items 1-3 (max_runs=3 means 3 total), got {sorted(test_results['rejection']['processed'])}"
)
assert test_results["rejection"]["rejections"] == 5, (
f"Test 4: Expected 5 rejections (items 4-8), got {test_results['rejection']['rejections']}"
)
# Test 5: No parameters
test_service = next((s for s in services if s.name == "test_no_params"), None)
assert test_service is not None, "test_no_params service not found"
client.execute_service(test_service, {})
await asyncio.wait_for(test5_complete, timeout=2.0)
# Verify Test 5
assert test_results["no_params"]["executions"] == 3, (
f"Test 5: Expected 3 executions, got {test_results['no_params']['executions']}"
)

View File

@@ -8,6 +8,7 @@ substitutions:
area: 25
numberOne: 1
var1: 79
double_width: 14
test_list:
- The area is 56
- 56
@@ -25,3 +26,4 @@ test_list:
- ord("a") = 97
- chr(97) = a
- len([1,2,3]) = 3
- width = 7, double_width = 14

View File

@@ -8,6 +8,7 @@ substitutions:
area: 25
numberOne: 1
var1: 79
double_width: ${width * 2}
test_list:
- "The area is ${width * height}"
@@ -23,3 +24,4 @@ test_list:
- ord("a") = ${ ord("a") }
- chr(97) = ${ chr(97) }
- len([1,2,3]) = ${ len([1,2,3]) }
- width = ${width}, double_width = ${double_width}

View File

@@ -0,0 +1,9 @@
substitutions:
A: component1
B: component2
C: component3
some_component:
- id: component1
value: 2
- id: component2
value: 5

View File

@@ -0,0 +1,22 @@
substitutions:
A: component1
B: component2
C: component3
packages:
- some_component:
- id: component1
value: 1
- id: !extend ${B}
value: 4
- id: !extend ${B}
value: 5
- id: component3
value: 6
some_component:
- id: !extend ${A}
value: 2
- id: component2
value: 3
- id: !remove ${C}

View File

@@ -570,6 +570,13 @@ class TestEsphomeCore:
assert target.address == "4.3.2.1"
def test_address__openthread(self, target):
target.name = "test-device"
target.config = {}
target.config[const.CONF_OPENTHREAD] = {}
assert target.address == "test-device.local"
def test_is_esp32(self, target):
target.data[const.KEY_CORE] = {const.KEY_TARGET_PLATFORM: "esp32"}

View File

@@ -17,10 +17,12 @@ from esphome import platformio_api
from esphome.__main__ import (
Purpose,
choose_upload_log_host,
command_analyze_memory,
command_clean_all,
command_rename,
command_update_all,
command_wizard,
detect_external_components,
get_port_type,
has_ip_address,
has_mqtt,
@@ -226,13 +228,47 @@ def mock_run_external_process() -> Generator[Mock]:
@pytest.fixture
def mock_run_external_command() -> Generator[Mock]:
"""Mock run_external_command for testing."""
def mock_run_external_command_main() -> Generator[Mock]:
"""Mock run_external_command in __main__ module (different from platformio_api)."""
with patch("esphome.__main__.run_external_command") as mock:
mock.return_value = 0 # Default to success
yield mock
@pytest.fixture
def mock_write_cpp() -> Generator[Mock]:
"""Mock write_cpp for testing."""
with patch("esphome.__main__.write_cpp") as mock:
mock.return_value = 0 # Default to success
yield mock
@pytest.fixture
def mock_compile_program() -> Generator[Mock]:
"""Mock compile_program for testing."""
with patch("esphome.__main__.compile_program") as mock:
mock.return_value = 0 # Default to success
yield mock
@pytest.fixture
def mock_get_esphome_components() -> Generator[Mock]:
"""Mock get_esphome_components for testing."""
with patch("esphome.analyze_memory.helpers.get_esphome_components") as mock:
mock.return_value = {"logger", "api", "ota"}
yield mock
@pytest.fixture
def mock_memory_analyzer_cli() -> Generator[Mock]:
"""Mock MemoryAnalyzerCLI for testing."""
with patch("esphome.analyze_memory.cli.MemoryAnalyzerCLI") as mock_class:
mock_analyzer = MagicMock()
mock_analyzer.generate_report.return_value = "Mock Memory Report"
mock_class.return_value = mock_analyzer
yield mock_class
def test_choose_upload_log_host_with_string_default() -> None:
"""Test with a single string default device."""
setup_core()
@@ -321,12 +357,14 @@ def test_choose_upload_log_host_with_serial_device_no_ports(
) -> None:
"""Test SERIAL device when no serial ports are found."""
setup_core()
result = choose_upload_log_host(
default="SERIAL",
check_default=None,
purpose=Purpose.UPLOADING,
)
assert result == []
with pytest.raises(
EsphomeError, match="All specified devices .* could not be resolved"
):
choose_upload_log_host(
default="SERIAL",
check_default=None,
purpose=Purpose.UPLOADING,
)
assert "No serial ports found, skipping SERIAL device" in caplog.text
@@ -367,12 +405,14 @@ def test_choose_upload_log_host_with_ota_device_with_api_config() -> None:
"""Test OTA device when API is configured (no upload without OTA in config)."""
setup_core(config={CONF_API: {}}, address="192.168.1.100")
result = choose_upload_log_host(
default="OTA",
check_default=None,
purpose=Purpose.UPLOADING,
)
assert result == []
with pytest.raises(
EsphomeError, match="All specified devices .* could not be resolved"
):
choose_upload_log_host(
default="OTA",
check_default=None,
purpose=Purpose.UPLOADING,
)
def test_choose_upload_log_host_with_ota_device_with_api_config_logging() -> None:
@@ -405,12 +445,14 @@ def test_choose_upload_log_host_with_ota_device_no_fallback() -> None:
"""Test OTA device with no valid fallback options."""
setup_core()
result = choose_upload_log_host(
default="OTA",
check_default=None,
purpose=Purpose.UPLOADING,
)
assert result == []
with pytest.raises(
EsphomeError, match="All specified devices .* could not be resolved"
):
choose_upload_log_host(
default="OTA",
check_default=None,
purpose=Purpose.UPLOADING,
)
@pytest.mark.usefixtures("mock_choose_prompt")
@@ -615,21 +657,19 @@ def test_choose_upload_log_host_empty_defaults_list() -> None:
@pytest.mark.usefixtures("mock_no_serial_ports", "mock_no_mqtt_logging")
def test_choose_upload_log_host_all_devices_unresolved(
caplog: pytest.LogCaptureFixture,
) -> None:
def test_choose_upload_log_host_all_devices_unresolved() -> None:
"""Test when all specified devices cannot be resolved."""
setup_core()
result = choose_upload_log_host(
default=["SERIAL", "OTA"],
check_default=None,
purpose=Purpose.UPLOADING,
)
assert result == []
assert (
"All specified devices: ['SERIAL', 'OTA'] could not be resolved." in caplog.text
)
with pytest.raises(
EsphomeError,
match=r"All specified devices \['SERIAL', 'OTA'\] could not be resolved",
):
choose_upload_log_host(
default=["SERIAL", "OTA"],
check_default=None,
purpose=Purpose.UPLOADING,
)
@pytest.mark.usefixtures("mock_no_serial_ports", "mock_no_mqtt_logging")
@@ -762,12 +802,14 @@ def test_choose_upload_log_host_no_address_with_ota_config() -> None:
"""Test OTA device when OTA is configured but no address is set."""
setup_core(config={CONF_OTA: {}})
result = choose_upload_log_host(
default="OTA",
check_default=None,
purpose=Purpose.UPLOADING,
)
assert result == []
with pytest.raises(
EsphomeError, match="All specified devices .* could not be resolved"
):
choose_upload_log_host(
default="OTA",
check_default=None,
purpose=Purpose.UPLOADING,
)
@dataclass
@@ -833,7 +875,7 @@ def test_upload_program_serial_esp8266_with_file(
def test_upload_using_esptool_path_conversion(
tmp_path: Path,
mock_run_external_command: Mock,
mock_run_external_command_main: Mock,
mock_get_idedata: Mock,
) -> None:
"""Test upload_using_esptool properly converts Path objects to strings for esptool.
@@ -869,10 +911,10 @@ def test_upload_using_esptool_path_conversion(
assert result == 0
# Verify that run_external_command was called
assert mock_run_external_command.call_count == 1
assert mock_run_external_command_main.call_count == 1
# Get the actual call arguments
call_args = mock_run_external_command.call_args[0]
call_args = mock_run_external_command_main.call_args[0]
# The first argument should be esptool.main function,
# followed by the command arguments
@@ -911,7 +953,7 @@ def test_upload_using_esptool_path_conversion(
def test_upload_using_esptool_with_file_path(
tmp_path: Path,
mock_run_external_command: Mock,
mock_run_external_command_main: Mock,
) -> None:
"""Test upload_using_esptool with a custom file that's a Path object."""
setup_core(platform=PLATFORM_ESP8266, tmp_path=tmp_path, name="test")
@@ -928,10 +970,10 @@ def test_upload_using_esptool_with_file_path(
assert result == 0
# Verify that run_external_command was called
mock_run_external_command.assert_called_once()
mock_run_external_command_main.assert_called_once()
# Get the actual call arguments
call_args = mock_run_external_command.call_args[0]
call_args = mock_run_external_command_main.call_args[0]
cmd_list = list(call_args[1:]) # Skip the esptool.main function
# Find the firmware path in the command
@@ -2267,3 +2309,226 @@ def test_show_logs_api_mqtt_timeout_fallback(
# Verify run_logs was called with only the static IP (MQTT failed)
mock_run_logs.assert_called_once_with(CORE.config, ["192.168.1.100"])
def test_detect_external_components_no_external(
mock_get_esphome_components: Mock,
) -> None:
"""Test detect_external_components with no external components."""
config = {
CONF_ESPHOME: {CONF_NAME: "test_device"},
"logger": {},
"api": {},
}
result = detect_external_components(config)
assert result == set()
mock_get_esphome_components.assert_called_once()
def test_detect_external_components_with_external(
mock_get_esphome_components: Mock,
) -> None:
"""Test detect_external_components detects external components."""
config = {
CONF_ESPHOME: {CONF_NAME: "test_device"},
"logger": {}, # Built-in
"api": {}, # Built-in
"my_custom_sensor": {}, # External
"another_custom": {}, # External
"external_components": [], # Special key, not a component
"substitutions": {}, # Special key, not a component
}
result = detect_external_components(config)
assert result == {"my_custom_sensor", "another_custom"}
mock_get_esphome_components.assert_called_once()
def test_detect_external_components_filters_special_keys(
mock_get_esphome_components: Mock,
) -> None:
"""Test detect_external_components filters out special config keys."""
config = {
CONF_ESPHOME: {CONF_NAME: "test_device"},
"substitutions": {"key": "value"},
"packages": {},
"globals": [],
"external_components": [],
"<<": {}, # YAML merge key
}
result = detect_external_components(config)
assert result == set()
mock_get_esphome_components.assert_called_once()
def test_command_analyze_memory_success(
tmp_path: Path,
capfd: CaptureFixture[str],
mock_write_cpp: Mock,
mock_compile_program: Mock,
mock_get_idedata: Mock,
mock_get_esphome_components: Mock,
mock_memory_analyzer_cli: Mock,
) -> None:
"""Test command_analyze_memory with successful compilation and analysis."""
setup_core(platform=PLATFORM_ESP32, tmp_path=tmp_path, name="test_device")
# Create firmware.elf file
firmware_path = (
tmp_path / ".esphome" / "build" / "test_device" / ".pioenvs" / "test_device"
)
firmware_path.mkdir(parents=True, exist_ok=True)
firmware_elf = firmware_path / "firmware.elf"
firmware_elf.write_text("mock elf file")
# Mock idedata
mock_idedata_obj = MagicMock(spec=platformio_api.IDEData)
mock_idedata_obj.firmware_elf_path = str(firmware_elf)
mock_idedata_obj.objdump_path = "/path/to/objdump"
mock_idedata_obj.readelf_path = "/path/to/readelf"
mock_get_idedata.return_value = mock_idedata_obj
config = {
CONF_ESPHOME: {CONF_NAME: "test_device"},
"logger": {},
}
args = MockArgs()
result = command_analyze_memory(args, config)
assert result == 0
# Verify compilation was done
mock_write_cpp.assert_called_once_with(config)
mock_compile_program.assert_called_once_with(args, config)
# Verify analyzer was created with correct parameters
mock_memory_analyzer_cli.assert_called_once_with(
str(firmware_elf),
"/path/to/objdump",
"/path/to/readelf",
set(), # No external components
)
# Verify analysis was run
mock_analyzer = mock_memory_analyzer_cli.return_value
mock_analyzer.analyze.assert_called_once()
mock_analyzer.generate_report.assert_called_once()
# Verify report was printed
captured = capfd.readouterr()
assert "Mock Memory Report" in captured.out
def test_command_analyze_memory_with_external_components(
tmp_path: Path,
mock_write_cpp: Mock,
mock_compile_program: Mock,
mock_get_idedata: Mock,
mock_get_esphome_components: Mock,
mock_memory_analyzer_cli: Mock,
) -> None:
"""Test command_analyze_memory detects external components."""
setup_core(platform=PLATFORM_ESP32, tmp_path=tmp_path, name="test_device")
# Create firmware.elf file
firmware_path = (
tmp_path / ".esphome" / "build" / "test_device" / ".pioenvs" / "test_device"
)
firmware_path.mkdir(parents=True, exist_ok=True)
firmware_elf = firmware_path / "firmware.elf"
firmware_elf.write_text("mock elf file")
# Mock idedata
mock_idedata_obj = MagicMock(spec=platformio_api.IDEData)
mock_idedata_obj.firmware_elf_path = str(firmware_elf)
mock_idedata_obj.objdump_path = "/path/to/objdump"
mock_idedata_obj.readelf_path = "/path/to/readelf"
mock_get_idedata.return_value = mock_idedata_obj
config = {
CONF_ESPHOME: {CONF_NAME: "test_device"},
"logger": {},
"my_custom_component": {"param": "value"}, # External component
"external_components": [{"source": "github://user/repo"}], # Not a component
}
args = MockArgs()
result = command_analyze_memory(args, config)
assert result == 0
# Verify analyzer was created with external components detected
mock_memory_analyzer_cli.assert_called_once_with(
str(firmware_elf),
"/path/to/objdump",
"/path/to/readelf",
{"my_custom_component"}, # External component detected
)
def test_command_analyze_memory_write_cpp_fails(
tmp_path: Path,
mock_write_cpp: Mock,
) -> None:
"""Test command_analyze_memory when write_cpp fails."""
setup_core(platform=PLATFORM_ESP32, tmp_path=tmp_path, name="test_device")
config = {CONF_ESPHOME: {CONF_NAME: "test_device"}}
args = MockArgs()
mock_write_cpp.return_value = 1 # Failure
result = command_analyze_memory(args, config)
assert result == 1
mock_write_cpp.assert_called_once_with(config)
def test_command_analyze_memory_compile_fails(
tmp_path: Path,
mock_write_cpp: Mock,
mock_compile_program: Mock,
) -> None:
"""Test command_analyze_memory when compilation fails."""
setup_core(platform=PLATFORM_ESP32, tmp_path=tmp_path, name="test_device")
config = {CONF_ESPHOME: {CONF_NAME: "test_device"}}
args = MockArgs()
mock_compile_program.return_value = 1 # Compilation failed
result = command_analyze_memory(args, config)
assert result == 1
mock_write_cpp.assert_called_once_with(config)
mock_compile_program.assert_called_once_with(args, config)
def test_command_analyze_memory_no_idedata(
tmp_path: Path,
caplog: pytest.LogCaptureFixture,
mock_write_cpp: Mock,
mock_compile_program: Mock,
mock_get_idedata: Mock,
) -> None:
"""Test command_analyze_memory when idedata cannot be retrieved."""
setup_core(platform=PLATFORM_ESP32, tmp_path=tmp_path, name="test_device")
config = {CONF_ESPHOME: {CONF_NAME: "test_device"}}
args = MockArgs()
mock_get_idedata.return_value = None # Failed to get idedata
with caplog.at_level(logging.ERROR):
result = command_analyze_memory(args, config)
assert result == 1
assert "Failed to get IDE data for memory analysis" in caplog.text

View File

@@ -4,6 +4,7 @@ from pathlib import Path
from esphome import config as config_module, yaml_util
from esphome.components import substitutions
from esphome.config import resolve_extend_remove
from esphome.config_helpers import merge_config
from esphome.const import CONF_PACKAGES, CONF_SUBSTITUTIONS
from esphome.core import CORE
@@ -81,6 +82,8 @@ def test_substitutions_fixtures(fixture_path):
substitutions.do_substitution_pass(config, None)
resolve_extend_remove(config)
# Also load expected using ESPHome's loader, or use {} if missing and DEV_MODE
if expected_path.is_file():
expected = yaml_util.load_yaml(expected_path)