mirror of
https://github.com/esphome/esphome.git
synced 2026-02-11 18:21:52 +00:00
Compare commits
8 Commits
automation
...
cache_gith
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
4c9e4d30e9 | ||
|
|
db42983f0c | ||
|
|
20c65f70ed | ||
|
|
38e31e328c | ||
|
|
58cecff778 | ||
|
|
1946656ea8 | ||
|
|
c9700a0450 | ||
|
|
0eab64ffe5 |
19
.github/workflows/ci-memory-impact-comment.yml
vendored
19
.github/workflows/ci-memory-impact-comment.yml
vendored
@@ -28,23 +28,20 @@ jobs:
|
||||
run: |
|
||||
# Get PR details by searching for PR with matching head SHA
|
||||
# The workflow_run.pull_requests field is often empty for forks
|
||||
# Use paginate to handle repos with many open PRs
|
||||
head_sha="${{ github.event.workflow_run.head_sha }}"
|
||||
pr_data=$(gh api --paginate "/repos/${{ github.repository }}/pulls" \
|
||||
--jq ".[] | select(.head.sha == \"$head_sha\") | {number: .number, base_ref: .base.ref}" \
|
||||
| head -n 1)
|
||||
|
||||
if [ -z "$pr_data" ]; then
|
||||
pr_data=$(gh api "/repos/${{ github.repository }}/commits/$head_sha/pulls" \
|
||||
--jq '.[0] | {number: .number, base_ref: .base.ref}')
|
||||
if [ -z "$pr_data" ] || [ "$pr_data" == "null" ]; then
|
||||
echo "No PR found for SHA $head_sha, skipping"
|
||||
echo "skip=true" >> "$GITHUB_OUTPUT"
|
||||
echo "skip=true" >> $GITHUB_OUTPUT
|
||||
exit 0
|
||||
fi
|
||||
|
||||
pr_number=$(echo "$pr_data" | jq -r '.number')
|
||||
base_ref=$(echo "$pr_data" | jq -r '.base_ref')
|
||||
|
||||
echo "pr_number=$pr_number" >> "$GITHUB_OUTPUT"
|
||||
echo "base_ref=$base_ref" >> "$GITHUB_OUTPUT"
|
||||
echo "pr_number=$pr_number" >> $GITHUB_OUTPUT
|
||||
echo "base_ref=$base_ref" >> $GITHUB_OUTPUT
|
||||
echo "Found PR #$pr_number targeting base branch: $base_ref"
|
||||
|
||||
- name: Check out code from base repository
|
||||
@@ -90,9 +87,9 @@ jobs:
|
||||
if: steps.pr.outputs.skip != 'true'
|
||||
run: |
|
||||
if [ -f ./memory-analysis/memory-analysis-target.json ] && [ -f ./memory-analysis/memory-analysis-pr.json ]; then
|
||||
echo "found=true" >> "$GITHUB_OUTPUT"
|
||||
echo "found=true" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "found=false" >> "$GITHUB_OUTPUT"
|
||||
echo "found=false" >> $GITHUB_OUTPUT
|
||||
echo "Memory analysis artifacts not found, skipping comment"
|
||||
fi
|
||||
|
||||
|
||||
205
.github/workflows/ci.yml
vendored
205
.github/workflows/ci.yml
vendored
@@ -170,13 +170,11 @@ jobs:
|
||||
outputs:
|
||||
integration-tests: ${{ steps.determine.outputs.integration-tests }}
|
||||
clang-tidy: ${{ steps.determine.outputs.clang-tidy }}
|
||||
clang-tidy-mode: ${{ steps.determine.outputs.clang-tidy-mode }}
|
||||
python-linters: ${{ steps.determine.outputs.python-linters }}
|
||||
changed-components: ${{ steps.determine.outputs.changed-components }}
|
||||
changed-components-with-tests: ${{ steps.determine.outputs.changed-components-with-tests }}
|
||||
directly-changed-components-with-tests: ${{ steps.determine.outputs.directly-changed-components-with-tests }}
|
||||
component-test-count: ${{ steps.determine.outputs.component-test-count }}
|
||||
changed-cpp-file-count: ${{ steps.determine.outputs.changed-cpp-file-count }}
|
||||
memory_impact: ${{ steps.determine.outputs.memory-impact }}
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
@@ -202,13 +200,11 @@ jobs:
|
||||
# Extract individual fields
|
||||
echo "integration-tests=$(echo "$output" | jq -r '.integration_tests')" >> $GITHUB_OUTPUT
|
||||
echo "clang-tidy=$(echo "$output" | jq -r '.clang_tidy')" >> $GITHUB_OUTPUT
|
||||
echo "clang-tidy-mode=$(echo "$output" | jq -r '.clang_tidy_mode')" >> $GITHUB_OUTPUT
|
||||
echo "python-linters=$(echo "$output" | jq -r '.python_linters')" >> $GITHUB_OUTPUT
|
||||
echo "changed-components=$(echo "$output" | jq -c '.changed_components')" >> $GITHUB_OUTPUT
|
||||
echo "changed-components-with-tests=$(echo "$output" | jq -c '.changed_components_with_tests')" >> $GITHUB_OUTPUT
|
||||
echo "directly-changed-components-with-tests=$(echo "$output" | jq -c '.directly_changed_components_with_tests')" >> $GITHUB_OUTPUT
|
||||
echo "component-test-count=$(echo "$output" | jq -r '.component_test_count')" >> $GITHUB_OUTPUT
|
||||
echo "changed-cpp-file-count=$(echo "$output" | jq -r '.changed_cpp_file_count')" >> $GITHUB_OUTPUT
|
||||
echo "memory-impact=$(echo "$output" | jq -c '.memory_impact')" >> $GITHUB_OUTPUT
|
||||
|
||||
integration-tests:
|
||||
@@ -247,7 +243,7 @@ jobs:
|
||||
. venv/bin/activate
|
||||
pytest -vv --no-cov --tb=native -n auto tests/integration/
|
||||
|
||||
clang-tidy-single:
|
||||
clang-tidy:
|
||||
name: ${{ matrix.name }}
|
||||
runs-on: ubuntu-24.04
|
||||
needs:
|
||||
@@ -265,6 +261,22 @@ jobs:
|
||||
name: Run script/clang-tidy for ESP8266
|
||||
options: --environment esp8266-arduino-tidy --grep USE_ESP8266
|
||||
pio_cache_key: tidyesp8266
|
||||
- id: clang-tidy
|
||||
name: Run script/clang-tidy for ESP32 Arduino 1/4
|
||||
options: --environment esp32-arduino-tidy --split-num 4 --split-at 1
|
||||
pio_cache_key: tidyesp32
|
||||
- id: clang-tidy
|
||||
name: Run script/clang-tidy for ESP32 Arduino 2/4
|
||||
options: --environment esp32-arduino-tidy --split-num 4 --split-at 2
|
||||
pio_cache_key: tidyesp32
|
||||
- id: clang-tidy
|
||||
name: Run script/clang-tidy for ESP32 Arduino 3/4
|
||||
options: --environment esp32-arduino-tidy --split-num 4 --split-at 3
|
||||
pio_cache_key: tidyesp32
|
||||
- id: clang-tidy
|
||||
name: Run script/clang-tidy for ESP32 Arduino 4/4
|
||||
options: --environment esp32-arduino-tidy --split-num 4 --split-at 4
|
||||
pio_cache_key: tidyesp32
|
||||
- id: clang-tidy
|
||||
name: Run script/clang-tidy for ESP32 IDF
|
||||
options: --environment esp32-idf-tidy --grep USE_ESP_IDF
|
||||
@@ -345,166 +357,6 @@ jobs:
|
||||
# yamllint disable-line rule:line-length
|
||||
if: always()
|
||||
|
||||
clang-tidy-nosplit:
|
||||
name: Run script/clang-tidy for ESP32 Arduino
|
||||
runs-on: ubuntu-24.04
|
||||
needs:
|
||||
- common
|
||||
- determine-jobs
|
||||
if: needs.determine-jobs.outputs.clang-tidy-mode == 'nosplit'
|
||||
env:
|
||||
GH_TOKEN: ${{ github.token }}
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
with:
|
||||
# Need history for HEAD~1 to work for checking changed files
|
||||
fetch-depth: 2
|
||||
|
||||
- name: Restore Python
|
||||
uses: ./.github/actions/restore-python
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
cache-key: ${{ needs.common.outputs.cache-key }}
|
||||
|
||||
- name: Cache platformio
|
||||
if: github.ref == 'refs/heads/dev'
|
||||
uses: actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
|
||||
with:
|
||||
path: ~/.platformio
|
||||
key: platformio-tidyesp32-${{ hashFiles('platformio.ini') }}
|
||||
|
||||
- name: Cache platformio
|
||||
if: github.ref != 'refs/heads/dev'
|
||||
uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
|
||||
with:
|
||||
path: ~/.platformio
|
||||
key: platformio-tidyesp32-${{ hashFiles('platformio.ini') }}
|
||||
|
||||
- name: Register problem matchers
|
||||
run: |
|
||||
echo "::add-matcher::.github/workflows/matchers/gcc.json"
|
||||
echo "::add-matcher::.github/workflows/matchers/clang-tidy.json"
|
||||
|
||||
- name: Check if full clang-tidy scan needed
|
||||
id: check_full_scan
|
||||
run: |
|
||||
. venv/bin/activate
|
||||
if python script/clang_tidy_hash.py --check; then
|
||||
echo "full_scan=true" >> $GITHUB_OUTPUT
|
||||
echo "reason=hash_changed" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "full_scan=false" >> $GITHUB_OUTPUT
|
||||
echo "reason=normal" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
- name: Run clang-tidy
|
||||
run: |
|
||||
. venv/bin/activate
|
||||
if [ "${{ steps.check_full_scan.outputs.full_scan }}" = "true" ]; then
|
||||
echo "Running FULL clang-tidy scan (hash changed)"
|
||||
script/clang-tidy --all-headers --fix --environment esp32-arduino-tidy
|
||||
else
|
||||
echo "Running clang-tidy on changed files only"
|
||||
script/clang-tidy --all-headers --fix --changed --environment esp32-arduino-tidy
|
||||
fi
|
||||
env:
|
||||
# Also cache libdeps, store them in a ~/.platformio subfolder
|
||||
PLATFORMIO_LIBDEPS_DIR: ~/.platformio/libdeps
|
||||
|
||||
- name: Suggested changes
|
||||
run: script/ci-suggest-changes
|
||||
if: always()
|
||||
|
||||
clang-tidy-split:
|
||||
name: ${{ matrix.name }}
|
||||
runs-on: ubuntu-24.04
|
||||
needs:
|
||||
- common
|
||||
- determine-jobs
|
||||
if: needs.determine-jobs.outputs.clang-tidy-mode == 'split'
|
||||
env:
|
||||
GH_TOKEN: ${{ github.token }}
|
||||
strategy:
|
||||
fail-fast: false
|
||||
max-parallel: 1
|
||||
matrix:
|
||||
include:
|
||||
- id: clang-tidy
|
||||
name: Run script/clang-tidy for ESP32 Arduino 1/4
|
||||
options: --environment esp32-arduino-tidy --split-num 4 --split-at 1
|
||||
- id: clang-tidy
|
||||
name: Run script/clang-tidy for ESP32 Arduino 2/4
|
||||
options: --environment esp32-arduino-tidy --split-num 4 --split-at 2
|
||||
- id: clang-tidy
|
||||
name: Run script/clang-tidy for ESP32 Arduino 3/4
|
||||
options: --environment esp32-arduino-tidy --split-num 4 --split-at 3
|
||||
- id: clang-tidy
|
||||
name: Run script/clang-tidy for ESP32 Arduino 4/4
|
||||
options: --environment esp32-arduino-tidy --split-num 4 --split-at 4
|
||||
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
with:
|
||||
# Need history for HEAD~1 to work for checking changed files
|
||||
fetch-depth: 2
|
||||
|
||||
- name: Restore Python
|
||||
uses: ./.github/actions/restore-python
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
cache-key: ${{ needs.common.outputs.cache-key }}
|
||||
|
||||
- name: Cache platformio
|
||||
if: github.ref == 'refs/heads/dev'
|
||||
uses: actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
|
||||
with:
|
||||
path: ~/.platformio
|
||||
key: platformio-tidyesp32-${{ hashFiles('platformio.ini') }}
|
||||
|
||||
- name: Cache platformio
|
||||
if: github.ref != 'refs/heads/dev'
|
||||
uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
|
||||
with:
|
||||
path: ~/.platformio
|
||||
key: platformio-tidyesp32-${{ hashFiles('platformio.ini') }}
|
||||
|
||||
- name: Register problem matchers
|
||||
run: |
|
||||
echo "::add-matcher::.github/workflows/matchers/gcc.json"
|
||||
echo "::add-matcher::.github/workflows/matchers/clang-tidy.json"
|
||||
|
||||
- name: Check if full clang-tidy scan needed
|
||||
id: check_full_scan
|
||||
run: |
|
||||
. venv/bin/activate
|
||||
if python script/clang_tidy_hash.py --check; then
|
||||
echo "full_scan=true" >> $GITHUB_OUTPUT
|
||||
echo "reason=hash_changed" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "full_scan=false" >> $GITHUB_OUTPUT
|
||||
echo "reason=normal" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
- name: Run clang-tidy
|
||||
run: |
|
||||
. venv/bin/activate
|
||||
if [ "${{ steps.check_full_scan.outputs.full_scan }}" = "true" ]; then
|
||||
echo "Running FULL clang-tidy scan (hash changed)"
|
||||
script/clang-tidy --all-headers --fix ${{ matrix.options }}
|
||||
else
|
||||
echo "Running clang-tidy on changed files only"
|
||||
script/clang-tidy --all-headers --fix --changed ${{ matrix.options }}
|
||||
fi
|
||||
env:
|
||||
# Also cache libdeps, store them in a ~/.platformio subfolder
|
||||
PLATFORMIO_LIBDEPS_DIR: ~/.platformio/libdeps
|
||||
|
||||
- name: Suggested changes
|
||||
run: script/ci-suggest-changes
|
||||
if: always()
|
||||
|
||||
test-build-components-splitter:
|
||||
name: Split components for intelligent grouping (40 weighted per batch)
|
||||
runs-on: ubuntu-24.04
|
||||
@@ -580,6 +432,21 @@ jobs:
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
cache-key: ${{ needs.common.outputs.cache-key }}
|
||||
|
||||
- name: Cache platformio
|
||||
if: github.ref == 'refs/heads/dev'
|
||||
uses: actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
|
||||
with:
|
||||
path: ~/.platformio
|
||||
key: platformio-test-${{ hashFiles('platformio.ini') }}
|
||||
|
||||
- name: Cache platformio
|
||||
if: github.ref != 'refs/heads/dev'
|
||||
uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
|
||||
with:
|
||||
path: ~/.platformio
|
||||
key: platformio-test-${{ hashFiles('platformio.ini') }}
|
||||
|
||||
- name: Validate and compile components with intelligent grouping
|
||||
run: |
|
||||
. venv/bin/activate
|
||||
@@ -912,13 +779,13 @@ jobs:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
cache-key: ${{ needs.common.outputs.cache-key }}
|
||||
- name: Download target analysis JSON
|
||||
uses: actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0 # v5.0.0
|
||||
uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4.3.0
|
||||
with:
|
||||
name: memory-analysis-target
|
||||
path: ./memory-analysis
|
||||
continue-on-error: true
|
||||
- name: Download PR analysis JSON
|
||||
uses: actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0 # v5.0.0
|
||||
uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4.3.0
|
||||
with:
|
||||
name: memory-analysis-pr
|
||||
path: ./memory-analysis
|
||||
@@ -945,9 +812,7 @@ jobs:
|
||||
- pylint
|
||||
- pytest
|
||||
- integration-tests
|
||||
- clang-tidy-single
|
||||
- clang-tidy-nosplit
|
||||
- clang-tidy-split
|
||||
- clang-tidy
|
||||
- determine-jobs
|
||||
- test-build-components-splitter
|
||||
- test-build-components-split
|
||||
|
||||
@@ -161,7 +161,6 @@ esphome/components/esp32_rmt_led_strip/* @jesserockz
|
||||
esphome/components/esp8266/* @esphome/core
|
||||
esphome/components/esp_ldo/* @clydebarrow
|
||||
esphome/components/espnow/* @jesserockz
|
||||
esphome/components/espnow/packet_transport/* @EasilyBoredEngineer
|
||||
esphome/components/ethernet_info/* @gtjadsonsantos
|
||||
esphome/components/event/* @nohat
|
||||
esphome/components/exposure_notifications/* @OttoWinter
|
||||
|
||||
@@ -62,40 +62,6 @@ from esphome.util import (
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
# Special non-component keys that appear in configs
|
||||
_NON_COMPONENT_KEYS = frozenset(
|
||||
{
|
||||
CONF_ESPHOME,
|
||||
"substitutions",
|
||||
"packages",
|
||||
"globals",
|
||||
"external_components",
|
||||
"<<",
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
def detect_external_components(config: ConfigType) -> set[str]:
|
||||
"""Detect external/custom components in the configuration.
|
||||
|
||||
External components are those that appear in the config but are not
|
||||
part of ESPHome's built-in components and are not special config keys.
|
||||
|
||||
Args:
|
||||
config: The ESPHome configuration dictionary
|
||||
|
||||
Returns:
|
||||
A set of external component names
|
||||
"""
|
||||
from esphome.analyze_memory.helpers import get_esphome_components
|
||||
|
||||
builtin_components = get_esphome_components()
|
||||
return {
|
||||
key
|
||||
for key in config
|
||||
if key not in builtin_components and key not in _NON_COMPONENT_KEYS
|
||||
}
|
||||
|
||||
|
||||
class ArgsProtocol(Protocol):
|
||||
device: list[str] | None
|
||||
@@ -731,13 +697,6 @@ def command_vscode(args: ArgsProtocol) -> int | None:
|
||||
|
||||
|
||||
def command_compile(args: ArgsProtocol, config: ConfigType) -> int | None:
|
||||
# Set memory analysis options in config
|
||||
if args.analyze_memory:
|
||||
config.setdefault(CONF_ESPHOME, {})["analyze_memory"] = True
|
||||
|
||||
if args.memory_report:
|
||||
config.setdefault(CONF_ESPHOME, {})["memory_report_file"] = args.memory_report
|
||||
|
||||
exit_code = write_cpp(config)
|
||||
if exit_code != 0:
|
||||
return exit_code
|
||||
@@ -933,54 +892,6 @@ def command_idedata(args: ArgsProtocol, config: ConfigType) -> int:
|
||||
return 0
|
||||
|
||||
|
||||
def command_analyze_memory(args: ArgsProtocol, config: ConfigType) -> int:
|
||||
"""Analyze memory usage by component.
|
||||
|
||||
This command compiles the configuration and performs memory analysis.
|
||||
Compilation is fast if sources haven't changed (just relinking).
|
||||
"""
|
||||
from esphome import platformio_api
|
||||
from esphome.analyze_memory.cli import MemoryAnalyzerCLI
|
||||
|
||||
# Always compile to ensure fresh data (fast if no changes - just relinks)
|
||||
exit_code = write_cpp(config)
|
||||
if exit_code != 0:
|
||||
return exit_code
|
||||
exit_code = compile_program(args, config)
|
||||
if exit_code != 0:
|
||||
return exit_code
|
||||
_LOGGER.info("Successfully compiled program.")
|
||||
|
||||
# Get idedata for analysis
|
||||
idedata = platformio_api.get_idedata(config)
|
||||
if idedata is None:
|
||||
_LOGGER.error("Failed to get IDE data for memory analysis")
|
||||
return 1
|
||||
|
||||
firmware_elf = Path(idedata.firmware_elf_path)
|
||||
|
||||
# Extract external components from config
|
||||
external_components = detect_external_components(config)
|
||||
_LOGGER.debug("Detected external components: %s", external_components)
|
||||
|
||||
# Perform memory analysis
|
||||
_LOGGER.info("Analyzing memory usage...")
|
||||
analyzer = MemoryAnalyzerCLI(
|
||||
str(firmware_elf),
|
||||
idedata.objdump_path,
|
||||
idedata.readelf_path,
|
||||
external_components,
|
||||
)
|
||||
analyzer.analyze()
|
||||
|
||||
# Generate and display report
|
||||
report = analyzer.generate_report()
|
||||
print()
|
||||
print(report)
|
||||
|
||||
return 0
|
||||
|
||||
|
||||
def command_rename(args: ArgsProtocol, config: ConfigType) -> int | None:
|
||||
new_name = args.name
|
||||
for c in new_name:
|
||||
@@ -1096,7 +1007,6 @@ POST_CONFIG_ACTIONS = {
|
||||
"idedata": command_idedata,
|
||||
"rename": command_rename,
|
||||
"discover": command_discover,
|
||||
"analyze-memory": command_analyze_memory,
|
||||
}
|
||||
|
||||
SIMPLE_CONFIG_ACTIONS = [
|
||||
@@ -1199,17 +1109,6 @@ def parse_args(argv):
|
||||
help="Only generate source code, do not compile.",
|
||||
action="store_true",
|
||||
)
|
||||
parser_compile.add_argument(
|
||||
"--analyze-memory",
|
||||
help="Analyze and display memory usage by component after compilation.",
|
||||
action="store_true",
|
||||
)
|
||||
parser_compile.add_argument(
|
||||
"--memory-report",
|
||||
help="Save memory analysis report to a file (supports .json or .txt).",
|
||||
type=str,
|
||||
metavar="FILE",
|
||||
)
|
||||
|
||||
parser_upload = subparsers.add_parser(
|
||||
"upload",
|
||||
@@ -1393,14 +1292,6 @@ def parse_args(argv):
|
||||
)
|
||||
parser_rename.add_argument("name", help="The new name for the device.", type=str)
|
||||
|
||||
parser_analyze_memory = subparsers.add_parser(
|
||||
"analyze-memory",
|
||||
help="Analyze memory usage by component.",
|
||||
)
|
||||
parser_analyze_memory.add_argument(
|
||||
"configuration", help="Your YAML configuration file(s).", nargs="+"
|
||||
)
|
||||
|
||||
# Keep backward compatibility with the old command line format of
|
||||
# esphome <config> <command>.
|
||||
#
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
"""CLI interface for memory analysis with report generation."""
|
||||
|
||||
from collections import defaultdict
|
||||
import json
|
||||
import sys
|
||||
|
||||
from . import (
|
||||
@@ -271,28 +270,6 @@ class MemoryAnalyzerCLI(MemoryAnalyzer):
|
||||
|
||||
return "\n".join(lines)
|
||||
|
||||
def to_json(self) -> str:
|
||||
"""Export analysis results as JSON."""
|
||||
data = {
|
||||
"components": {
|
||||
name: {
|
||||
"text": mem.text_size,
|
||||
"rodata": mem.rodata_size,
|
||||
"data": mem.data_size,
|
||||
"bss": mem.bss_size,
|
||||
"flash_total": mem.flash_total,
|
||||
"ram_total": mem.ram_total,
|
||||
"symbol_count": mem.symbol_count,
|
||||
}
|
||||
for name, mem in self.components.items()
|
||||
},
|
||||
"totals": {
|
||||
"flash": sum(c.flash_total for c in self.components.values()),
|
||||
"ram": sum(c.ram_total for c in self.components.values()),
|
||||
},
|
||||
}
|
||||
return json.dumps(data, indent=2)
|
||||
|
||||
def dump_uncategorized_symbols(self, output_file: str | None = None) -> None:
|
||||
"""Dump uncategorized symbols for analysis."""
|
||||
# Sort by size descending
|
||||
|
||||
@@ -28,7 +28,7 @@ class Anova : public climate::Climate, public esphome::ble_client::BLEClientNode
|
||||
void dump_config() override;
|
||||
climate::ClimateTraits traits() override {
|
||||
auto traits = climate::ClimateTraits();
|
||||
traits.add_feature_flags(climate::CLIMATE_SUPPORTS_CURRENT_TEMPERATURE);
|
||||
traits.set_supports_current_temperature(true);
|
||||
traits.set_supported_modes({climate::CLIMATE_MODE_OFF, climate::ClimateMode::CLIMATE_MODE_HEAT});
|
||||
traits.set_visual_min_temperature(25.0);
|
||||
traits.set_visual_max_temperature(100.0);
|
||||
|
||||
@@ -155,17 +155,6 @@ def _validate_api_config(config: ConfigType) -> ConfigType:
|
||||
return config
|
||||
|
||||
|
||||
def _consume_api_sockets(config: ConfigType) -> ConfigType:
|
||||
"""Register socket needs for API component."""
|
||||
from esphome.components import socket
|
||||
|
||||
# API needs 1 listening socket + typically 3 concurrent client connections
|
||||
# (not max_connections, which is the upper limit rarely reached)
|
||||
sockets_needed = 1 + 3
|
||||
socket.consume_sockets(sockets_needed, "api")(config)
|
||||
return config
|
||||
|
||||
|
||||
CONFIG_SCHEMA = cv.All(
|
||||
cv.Schema(
|
||||
{
|
||||
@@ -233,7 +222,6 @@ CONFIG_SCHEMA = cv.All(
|
||||
).extend(cv.COMPONENT_SCHEMA),
|
||||
cv.rename_key(CONF_SERVICES, CONF_ACTIONS),
|
||||
_validate_api_config,
|
||||
_consume_api_sockets,
|
||||
)
|
||||
|
||||
|
||||
|
||||
@@ -142,11 +142,6 @@ APIError APINoiseFrameHelper::loop() {
|
||||
* errno API_ERROR_HANDSHAKE_PACKET_LEN: Packet too big for this phase.
|
||||
*/
|
||||
APIError APINoiseFrameHelper::try_read_frame_() {
|
||||
// Clear buffer when starting a new frame (rx_buf_len_ == 0 means not resuming after WOULD_BLOCK)
|
||||
if (this->rx_buf_len_ == 0) {
|
||||
this->rx_buf_.clear();
|
||||
}
|
||||
|
||||
// read header
|
||||
if (rx_header_buf_len_ < 3) {
|
||||
// no header information yet
|
||||
|
||||
@@ -54,11 +54,6 @@ APIError APIPlaintextFrameHelper::loop() {
|
||||
* error API_ERROR_BAD_INDICATOR: Bad indicator byte at start of frame.
|
||||
*/
|
||||
APIError APIPlaintextFrameHelper::try_read_frame_() {
|
||||
// Clear buffer when starting a new frame (rx_buf_len_ == 0 means not resuming after WOULD_BLOCK)
|
||||
if (this->rx_buf_len_ == 0) {
|
||||
this->rx_buf_.clear();
|
||||
}
|
||||
|
||||
// read header
|
||||
while (!rx_header_parsed_) {
|
||||
// Now that we know when the socket is ready, we can read up to 3 bytes
|
||||
|
||||
@@ -6,9 +6,6 @@ namespace bang_bang {
|
||||
|
||||
static const char *const TAG = "bang_bang.climate";
|
||||
|
||||
BangBangClimate::BangBangClimate()
|
||||
: idle_trigger_(new Trigger<>()), cool_trigger_(new Trigger<>()), heat_trigger_(new Trigger<>()) {}
|
||||
|
||||
void BangBangClimate::setup() {
|
||||
this->sensor_->add_on_state_callback([this](float state) {
|
||||
this->current_temperature = state;
|
||||
@@ -34,63 +31,53 @@ void BangBangClimate::setup() {
|
||||
restore->to_call(this).perform();
|
||||
} else {
|
||||
// restore from defaults, change_away handles those for us
|
||||
if (this->supports_cool_ && this->supports_heat_) {
|
||||
if (supports_cool_ && supports_heat_) {
|
||||
this->mode = climate::CLIMATE_MODE_HEAT_COOL;
|
||||
} else if (this->supports_cool_) {
|
||||
} else if (supports_cool_) {
|
||||
this->mode = climate::CLIMATE_MODE_COOL;
|
||||
} else if (this->supports_heat_) {
|
||||
} else if (supports_heat_) {
|
||||
this->mode = climate::CLIMATE_MODE_HEAT;
|
||||
}
|
||||
this->change_away_(false);
|
||||
}
|
||||
}
|
||||
|
||||
void BangBangClimate::control(const climate::ClimateCall &call) {
|
||||
if (call.get_mode().has_value()) {
|
||||
if (call.get_mode().has_value())
|
||||
this->mode = *call.get_mode();
|
||||
}
|
||||
if (call.get_target_temperature_low().has_value()) {
|
||||
if (call.get_target_temperature_low().has_value())
|
||||
this->target_temperature_low = *call.get_target_temperature_low();
|
||||
}
|
||||
if (call.get_target_temperature_high().has_value()) {
|
||||
if (call.get_target_temperature_high().has_value())
|
||||
this->target_temperature_high = *call.get_target_temperature_high();
|
||||
}
|
||||
if (call.get_preset().has_value()) {
|
||||
if (call.get_preset().has_value())
|
||||
this->change_away_(*call.get_preset() == climate::CLIMATE_PRESET_AWAY);
|
||||
}
|
||||
|
||||
this->compute_state_();
|
||||
this->publish_state();
|
||||
}
|
||||
|
||||
climate::ClimateTraits BangBangClimate::traits() {
|
||||
auto traits = climate::ClimateTraits();
|
||||
traits.add_feature_flags(climate::CLIMATE_SUPPORTS_CURRENT_TEMPERATURE |
|
||||
climate::CLIMATE_REQUIRES_TWO_POINT_TARGET_TEMPERATURE | climate::CLIMATE_SUPPORTS_ACTION);
|
||||
if (this->humidity_sensor_ != nullptr) {
|
||||
traits.add_feature_flags(climate::CLIMATE_SUPPORTS_CURRENT_HUMIDITY);
|
||||
}
|
||||
traits.set_supports_current_temperature(true);
|
||||
if (this->humidity_sensor_ != nullptr)
|
||||
traits.set_supports_current_humidity(true);
|
||||
traits.set_supported_modes({
|
||||
climate::CLIMATE_MODE_OFF,
|
||||
});
|
||||
if (this->supports_cool_) {
|
||||
if (supports_cool_)
|
||||
traits.add_supported_mode(climate::CLIMATE_MODE_COOL);
|
||||
}
|
||||
if (this->supports_heat_) {
|
||||
if (supports_heat_)
|
||||
traits.add_supported_mode(climate::CLIMATE_MODE_HEAT);
|
||||
}
|
||||
if (this->supports_cool_ && this->supports_heat_) {
|
||||
if (supports_cool_ && supports_heat_)
|
||||
traits.add_supported_mode(climate::CLIMATE_MODE_HEAT_COOL);
|
||||
}
|
||||
if (this->supports_away_) {
|
||||
traits.set_supports_two_point_target_temperature(true);
|
||||
if (supports_away_) {
|
||||
traits.set_supported_presets({
|
||||
climate::CLIMATE_PRESET_HOME,
|
||||
climate::CLIMATE_PRESET_AWAY,
|
||||
});
|
||||
}
|
||||
traits.set_supports_action(true);
|
||||
return traits;
|
||||
}
|
||||
|
||||
void BangBangClimate::compute_state_() {
|
||||
if (this->mode == climate::CLIMATE_MODE_OFF) {
|
||||
this->switch_to_action_(climate::CLIMATE_ACTION_OFF);
|
||||
@@ -135,7 +122,6 @@ void BangBangClimate::compute_state_() {
|
||||
|
||||
this->switch_to_action_(target_action);
|
||||
}
|
||||
|
||||
void BangBangClimate::switch_to_action_(climate::ClimateAction action) {
|
||||
if (action == this->action) {
|
||||
// already in target mode
|
||||
@@ -180,7 +166,6 @@ void BangBangClimate::switch_to_action_(climate::ClimateAction action) {
|
||||
this->prev_trigger_ = trig;
|
||||
this->publish_state();
|
||||
}
|
||||
|
||||
void BangBangClimate::change_away_(bool away) {
|
||||
if (!away) {
|
||||
this->target_temperature_low = this->normal_config_.default_temperature_low;
|
||||
@@ -191,26 +176,22 @@ void BangBangClimate::change_away_(bool away) {
|
||||
}
|
||||
this->preset = away ? climate::CLIMATE_PRESET_AWAY : climate::CLIMATE_PRESET_HOME;
|
||||
}
|
||||
|
||||
void BangBangClimate::set_normal_config(const BangBangClimateTargetTempConfig &normal_config) {
|
||||
this->normal_config_ = normal_config;
|
||||
}
|
||||
|
||||
void BangBangClimate::set_away_config(const BangBangClimateTargetTempConfig &away_config) {
|
||||
this->supports_away_ = true;
|
||||
this->away_config_ = away_config;
|
||||
}
|
||||
|
||||
BangBangClimate::BangBangClimate()
|
||||
: idle_trigger_(new Trigger<>()), cool_trigger_(new Trigger<>()), heat_trigger_(new Trigger<>()) {}
|
||||
void BangBangClimate::set_sensor(sensor::Sensor *sensor) { this->sensor_ = sensor; }
|
||||
void BangBangClimate::set_humidity_sensor(sensor::Sensor *humidity_sensor) { this->humidity_sensor_ = humidity_sensor; }
|
||||
|
||||
Trigger<> *BangBangClimate::get_idle_trigger() const { return this->idle_trigger_; }
|
||||
Trigger<> *BangBangClimate::get_cool_trigger() const { return this->cool_trigger_; }
|
||||
Trigger<> *BangBangClimate::get_heat_trigger() const { return this->heat_trigger_; }
|
||||
|
||||
void BangBangClimate::set_supports_cool(bool supports_cool) { this->supports_cool_ = supports_cool; }
|
||||
Trigger<> *BangBangClimate::get_heat_trigger() const { return this->heat_trigger_; }
|
||||
void BangBangClimate::set_supports_heat(bool supports_heat) { this->supports_heat_ = supports_heat; }
|
||||
|
||||
void BangBangClimate::dump_config() {
|
||||
LOG_CLIMATE("", "Bang Bang Climate", this);
|
||||
ESP_LOGCONFIG(TAG,
|
||||
|
||||
@@ -25,15 +25,14 @@ class BangBangClimate : public climate::Climate, public Component {
|
||||
|
||||
void set_sensor(sensor::Sensor *sensor);
|
||||
void set_humidity_sensor(sensor::Sensor *humidity_sensor);
|
||||
Trigger<> *get_idle_trigger() const;
|
||||
Trigger<> *get_cool_trigger() const;
|
||||
void set_supports_cool(bool supports_cool);
|
||||
Trigger<> *get_heat_trigger() const;
|
||||
void set_supports_heat(bool supports_heat);
|
||||
void set_normal_config(const BangBangClimateTargetTempConfig &normal_config);
|
||||
void set_away_config(const BangBangClimateTargetTempConfig &away_config);
|
||||
|
||||
Trigger<> *get_idle_trigger() const;
|
||||
Trigger<> *get_cool_trigger() const;
|
||||
Trigger<> *get_heat_trigger() const;
|
||||
|
||||
protected:
|
||||
/// Override control to change settings of the climate device.
|
||||
void control(const climate::ClimateCall &call) override;
|
||||
@@ -57,10 +56,16 @@ class BangBangClimate : public climate::Climate, public Component {
|
||||
*
|
||||
* In idle mode, the controller is assumed to have both heating and cooling disabled.
|
||||
*/
|
||||
Trigger<> *idle_trigger_{nullptr};
|
||||
Trigger<> *idle_trigger_;
|
||||
/** The trigger to call when the controller should switch to cooling mode.
|
||||
*/
|
||||
Trigger<> *cool_trigger_{nullptr};
|
||||
Trigger<> *cool_trigger_;
|
||||
/** Whether the controller supports cooling.
|
||||
*
|
||||
* A false value for this attribute means that the controller has no cooling action
|
||||
* (for example a thermostat, where only heating and not-heating is possible).
|
||||
*/
|
||||
bool supports_cool_{false};
|
||||
/** The trigger to call when the controller should switch to heating mode.
|
||||
*
|
||||
* A null value for this attribute means that the controller has no heating action
|
||||
@@ -68,23 +73,15 @@ class BangBangClimate : public climate::Climate, public Component {
|
||||
* (blinds open) is possible.
|
||||
*/
|
||||
Trigger<> *heat_trigger_{nullptr};
|
||||
bool supports_heat_{false};
|
||||
/** A reference to the trigger that was previously active.
|
||||
*
|
||||
* This is so that the previous trigger can be stopped before enabling a new one.
|
||||
*/
|
||||
Trigger<> *prev_trigger_{nullptr};
|
||||
|
||||
/** Whether the controller supports cooling/heating
|
||||
*
|
||||
* A false value for this attribute means that the controller has no respective action
|
||||
* (for example a thermostat, where only heating and not-heating is possible).
|
||||
*/
|
||||
bool supports_cool_{false};
|
||||
bool supports_heat_{false};
|
||||
|
||||
bool supports_away_{false};
|
||||
|
||||
BangBangClimateTargetTempConfig normal_config_{};
|
||||
bool supports_away_{false};
|
||||
BangBangClimateTargetTempConfig away_config_{};
|
||||
};
|
||||
|
||||
|
||||
@@ -33,7 +33,8 @@ class BedJetClimate : public climate::Climate, public BedJetClient, public Polli
|
||||
|
||||
climate::ClimateTraits traits() override {
|
||||
auto traits = climate::ClimateTraits();
|
||||
traits.add_feature_flags(climate::CLIMATE_SUPPORTS_ACTION | climate::CLIMATE_SUPPORTS_CURRENT_TEMPERATURE);
|
||||
traits.set_supports_action(true);
|
||||
traits.set_supports_current_temperature(true);
|
||||
traits.set_supported_modes({
|
||||
climate::CLIMATE_MODE_OFF,
|
||||
climate::CLIMATE_MODE_HEAT,
|
||||
|
||||
@@ -51,7 +51,7 @@ void BinarySensor::add_filter(Filter *filter) {
|
||||
last_filter->next_ = filter;
|
||||
}
|
||||
}
|
||||
void BinarySensor::add_filters(std::initializer_list<Filter *> filters) {
|
||||
void BinarySensor::add_filters(const std::vector<Filter *> &filters) {
|
||||
for (Filter *filter : filters) {
|
||||
this->add_filter(filter);
|
||||
}
|
||||
|
||||
@@ -4,7 +4,7 @@
|
||||
#include "esphome/core/helpers.h"
|
||||
#include "esphome/components/binary_sensor/filter.h"
|
||||
|
||||
#include <initializer_list>
|
||||
#include <vector>
|
||||
|
||||
namespace esphome {
|
||||
|
||||
@@ -48,7 +48,7 @@ class BinarySensor : public StatefulEntityBase<bool>, public EntityBase_DeviceCl
|
||||
void publish_initial_state(bool new_state);
|
||||
|
||||
void add_filter(Filter *filter);
|
||||
void add_filters(std::initializer_list<Filter *> filters);
|
||||
void add_filters(const std::vector<Filter *> &filters);
|
||||
|
||||
// ========== INTERNAL METHODS ==========
|
||||
// (In most use cases you won't need these)
|
||||
|
||||
@@ -6,42 +6,6 @@ namespace climate {
|
||||
|
||||
static const char *const TAG = "climate";
|
||||
|
||||
// Memory-efficient lookup tables
|
||||
struct StringToUint8 {
|
||||
const char *str;
|
||||
const uint8_t value;
|
||||
};
|
||||
|
||||
constexpr StringToUint8 CLIMATE_MODES_BY_STR[] = {
|
||||
{"OFF", CLIMATE_MODE_OFF},
|
||||
{"AUTO", CLIMATE_MODE_AUTO},
|
||||
{"COOL", CLIMATE_MODE_COOL},
|
||||
{"HEAT", CLIMATE_MODE_HEAT},
|
||||
{"FAN_ONLY", CLIMATE_MODE_FAN_ONLY},
|
||||
{"DRY", CLIMATE_MODE_DRY},
|
||||
{"HEAT_COOL", CLIMATE_MODE_HEAT_COOL},
|
||||
};
|
||||
|
||||
constexpr StringToUint8 CLIMATE_FAN_MODES_BY_STR[] = {
|
||||
{"ON", CLIMATE_FAN_ON}, {"OFF", CLIMATE_FAN_OFF}, {"AUTO", CLIMATE_FAN_AUTO},
|
||||
{"LOW", CLIMATE_FAN_LOW}, {"MEDIUM", CLIMATE_FAN_MEDIUM}, {"HIGH", CLIMATE_FAN_HIGH},
|
||||
{"MIDDLE", CLIMATE_FAN_MIDDLE}, {"FOCUS", CLIMATE_FAN_FOCUS}, {"DIFFUSE", CLIMATE_FAN_DIFFUSE},
|
||||
{"QUIET", CLIMATE_FAN_QUIET},
|
||||
};
|
||||
|
||||
constexpr StringToUint8 CLIMATE_PRESETS_BY_STR[] = {
|
||||
{"ECO", CLIMATE_PRESET_ECO}, {"AWAY", CLIMATE_PRESET_AWAY}, {"BOOST", CLIMATE_PRESET_BOOST},
|
||||
{"COMFORT", CLIMATE_PRESET_COMFORT}, {"HOME", CLIMATE_PRESET_HOME}, {"SLEEP", CLIMATE_PRESET_SLEEP},
|
||||
{"ACTIVITY", CLIMATE_PRESET_ACTIVITY}, {"NONE", CLIMATE_PRESET_NONE},
|
||||
};
|
||||
|
||||
constexpr StringToUint8 CLIMATE_SWING_MODES_BY_STR[] = {
|
||||
{"OFF", CLIMATE_SWING_OFF},
|
||||
{"BOTH", CLIMATE_SWING_BOTH},
|
||||
{"VERTICAL", CLIMATE_SWING_VERTICAL},
|
||||
{"HORIZONTAL", CLIMATE_SWING_HORIZONTAL},
|
||||
};
|
||||
|
||||
void ClimateCall::perform() {
|
||||
this->parent_->control_callback_.call(*this);
|
||||
ESP_LOGD(TAG, "'%s' - Setting", this->parent_->get_name().c_str());
|
||||
@@ -86,46 +50,47 @@ void ClimateCall::perform() {
|
||||
}
|
||||
this->parent_->control(*this);
|
||||
}
|
||||
|
||||
void ClimateCall::validate_() {
|
||||
auto traits = this->parent_->get_traits();
|
||||
if (this->mode_.has_value()) {
|
||||
auto mode = *this->mode_;
|
||||
if (!traits.supports_mode(mode)) {
|
||||
ESP_LOGW(TAG, " Mode %s not supported", LOG_STR_ARG(climate_mode_to_string(mode)));
|
||||
ESP_LOGW(TAG, " Mode %s is not supported by this device!", LOG_STR_ARG(climate_mode_to_string(mode)));
|
||||
this->mode_.reset();
|
||||
}
|
||||
}
|
||||
if (this->custom_fan_mode_.has_value()) {
|
||||
auto custom_fan_mode = *this->custom_fan_mode_;
|
||||
if (!traits.supports_custom_fan_mode(custom_fan_mode)) {
|
||||
ESP_LOGW(TAG, " Fan Mode %s not supported", custom_fan_mode.c_str());
|
||||
ESP_LOGW(TAG, " Fan Mode %s is not supported by this device!", custom_fan_mode.c_str());
|
||||
this->custom_fan_mode_.reset();
|
||||
}
|
||||
} else if (this->fan_mode_.has_value()) {
|
||||
auto fan_mode = *this->fan_mode_;
|
||||
if (!traits.supports_fan_mode(fan_mode)) {
|
||||
ESP_LOGW(TAG, " Fan Mode %s not supported", LOG_STR_ARG(climate_fan_mode_to_string(fan_mode)));
|
||||
ESP_LOGW(TAG, " Fan Mode %s is not supported by this device!",
|
||||
LOG_STR_ARG(climate_fan_mode_to_string(fan_mode)));
|
||||
this->fan_mode_.reset();
|
||||
}
|
||||
}
|
||||
if (this->custom_preset_.has_value()) {
|
||||
auto custom_preset = *this->custom_preset_;
|
||||
if (!traits.supports_custom_preset(custom_preset)) {
|
||||
ESP_LOGW(TAG, " Preset %s not supported", custom_preset.c_str());
|
||||
ESP_LOGW(TAG, " Preset %s is not supported by this device!", custom_preset.c_str());
|
||||
this->custom_preset_.reset();
|
||||
}
|
||||
} else if (this->preset_.has_value()) {
|
||||
auto preset = *this->preset_;
|
||||
if (!traits.supports_preset(preset)) {
|
||||
ESP_LOGW(TAG, " Preset %s not supported", LOG_STR_ARG(climate_preset_to_string(preset)));
|
||||
ESP_LOGW(TAG, " Preset %s is not supported by this device!", LOG_STR_ARG(climate_preset_to_string(preset)));
|
||||
this->preset_.reset();
|
||||
}
|
||||
}
|
||||
if (this->swing_mode_.has_value()) {
|
||||
auto swing_mode = *this->swing_mode_;
|
||||
if (!traits.supports_swing_mode(swing_mode)) {
|
||||
ESP_LOGW(TAG, " Swing Mode %s not supported", LOG_STR_ARG(climate_swing_mode_to_string(swing_mode)));
|
||||
ESP_LOGW(TAG, " Swing Mode %s is not supported by this device!",
|
||||
LOG_STR_ARG(climate_swing_mode_to_string(swing_mode)));
|
||||
this->swing_mode_.reset();
|
||||
}
|
||||
}
|
||||
@@ -134,127 +99,159 @@ void ClimateCall::validate_() {
|
||||
if (traits.has_feature_flags(CLIMATE_SUPPORTS_TWO_POINT_TARGET_TEMPERATURE |
|
||||
CLIMATE_REQUIRES_TWO_POINT_TARGET_TEMPERATURE)) {
|
||||
ESP_LOGW(TAG, " Cannot set target temperature for climate device "
|
||||
"with two-point target temperature");
|
||||
"with two-point target temperature!");
|
||||
this->target_temperature_.reset();
|
||||
} else if (std::isnan(target)) {
|
||||
ESP_LOGW(TAG, " Target temperature must not be NAN");
|
||||
ESP_LOGW(TAG, " Target temperature must not be NAN!");
|
||||
this->target_temperature_.reset();
|
||||
}
|
||||
}
|
||||
if (this->target_temperature_low_.has_value() || this->target_temperature_high_.has_value()) {
|
||||
if (!traits.has_feature_flags(CLIMATE_SUPPORTS_TWO_POINT_TARGET_TEMPERATURE |
|
||||
CLIMATE_REQUIRES_TWO_POINT_TARGET_TEMPERATURE)) {
|
||||
ESP_LOGW(TAG, " Cannot set low/high target temperature");
|
||||
ESP_LOGW(TAG, " Cannot set low/high target temperature for this device!");
|
||||
this->target_temperature_low_.reset();
|
||||
this->target_temperature_high_.reset();
|
||||
}
|
||||
}
|
||||
if (this->target_temperature_low_.has_value() && std::isnan(*this->target_temperature_low_)) {
|
||||
ESP_LOGW(TAG, " Target temperature low must not be NAN");
|
||||
ESP_LOGW(TAG, " Target temperature low must not be NAN!");
|
||||
this->target_temperature_low_.reset();
|
||||
}
|
||||
if (this->target_temperature_high_.has_value() && std::isnan(*this->target_temperature_high_)) {
|
||||
ESP_LOGW(TAG, " Target temperature high must not be NAN");
|
||||
ESP_LOGW(TAG, " Target temperature low must not be NAN!");
|
||||
this->target_temperature_high_.reset();
|
||||
}
|
||||
if (this->target_temperature_low_.has_value() && this->target_temperature_high_.has_value()) {
|
||||
float low = *this->target_temperature_low_;
|
||||
float high = *this->target_temperature_high_;
|
||||
if (low > high) {
|
||||
ESP_LOGW(TAG, " Target temperature low %.2f must be less than target temperature high %.2f", low, high);
|
||||
ESP_LOGW(TAG, " Target temperature low %.2f must be smaller than target temperature high %.2f!", low, high);
|
||||
this->target_temperature_low_.reset();
|
||||
this->target_temperature_high_.reset();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
ClimateCall &ClimateCall::set_mode(ClimateMode mode) {
|
||||
this->mode_ = mode;
|
||||
return *this;
|
||||
}
|
||||
|
||||
ClimateCall &ClimateCall::set_mode(const std::string &mode) {
|
||||
for (const auto &mode_entry : CLIMATE_MODES_BY_STR) {
|
||||
if (str_equals_case_insensitive(mode, mode_entry.str)) {
|
||||
this->set_mode(static_cast<ClimateMode>(mode_entry.value));
|
||||
return *this;
|
||||
}
|
||||
if (str_equals_case_insensitive(mode, "OFF")) {
|
||||
this->set_mode(CLIMATE_MODE_OFF);
|
||||
} else if (str_equals_case_insensitive(mode, "AUTO")) {
|
||||
this->set_mode(CLIMATE_MODE_AUTO);
|
||||
} else if (str_equals_case_insensitive(mode, "COOL")) {
|
||||
this->set_mode(CLIMATE_MODE_COOL);
|
||||
} else if (str_equals_case_insensitive(mode, "HEAT")) {
|
||||
this->set_mode(CLIMATE_MODE_HEAT);
|
||||
} else if (str_equals_case_insensitive(mode, "FAN_ONLY")) {
|
||||
this->set_mode(CLIMATE_MODE_FAN_ONLY);
|
||||
} else if (str_equals_case_insensitive(mode, "DRY")) {
|
||||
this->set_mode(CLIMATE_MODE_DRY);
|
||||
} else if (str_equals_case_insensitive(mode, "HEAT_COOL")) {
|
||||
this->set_mode(CLIMATE_MODE_HEAT_COOL);
|
||||
} else {
|
||||
ESP_LOGW(TAG, "'%s' - Unrecognized mode %s", this->parent_->get_name().c_str(), mode.c_str());
|
||||
}
|
||||
ESP_LOGW(TAG, "'%s' - Unrecognized mode %s", this->parent_->get_name().c_str(), mode.c_str());
|
||||
return *this;
|
||||
}
|
||||
|
||||
ClimateCall &ClimateCall::set_fan_mode(ClimateFanMode fan_mode) {
|
||||
this->fan_mode_ = fan_mode;
|
||||
this->custom_fan_mode_.reset();
|
||||
return *this;
|
||||
}
|
||||
|
||||
ClimateCall &ClimateCall::set_fan_mode(const std::string &fan_mode) {
|
||||
for (const auto &mode_entry : CLIMATE_FAN_MODES_BY_STR) {
|
||||
if (str_equals_case_insensitive(fan_mode, mode_entry.str)) {
|
||||
this->set_fan_mode(static_cast<ClimateFanMode>(mode_entry.value));
|
||||
return *this;
|
||||
}
|
||||
}
|
||||
if (this->parent_->get_traits().supports_custom_fan_mode(fan_mode)) {
|
||||
this->custom_fan_mode_ = fan_mode;
|
||||
this->fan_mode_.reset();
|
||||
if (str_equals_case_insensitive(fan_mode, "ON")) {
|
||||
this->set_fan_mode(CLIMATE_FAN_ON);
|
||||
} else if (str_equals_case_insensitive(fan_mode, "OFF")) {
|
||||
this->set_fan_mode(CLIMATE_FAN_OFF);
|
||||
} else if (str_equals_case_insensitive(fan_mode, "AUTO")) {
|
||||
this->set_fan_mode(CLIMATE_FAN_AUTO);
|
||||
} else if (str_equals_case_insensitive(fan_mode, "LOW")) {
|
||||
this->set_fan_mode(CLIMATE_FAN_LOW);
|
||||
} else if (str_equals_case_insensitive(fan_mode, "MEDIUM")) {
|
||||
this->set_fan_mode(CLIMATE_FAN_MEDIUM);
|
||||
} else if (str_equals_case_insensitive(fan_mode, "HIGH")) {
|
||||
this->set_fan_mode(CLIMATE_FAN_HIGH);
|
||||
} else if (str_equals_case_insensitive(fan_mode, "MIDDLE")) {
|
||||
this->set_fan_mode(CLIMATE_FAN_MIDDLE);
|
||||
} else if (str_equals_case_insensitive(fan_mode, "FOCUS")) {
|
||||
this->set_fan_mode(CLIMATE_FAN_FOCUS);
|
||||
} else if (str_equals_case_insensitive(fan_mode, "DIFFUSE")) {
|
||||
this->set_fan_mode(CLIMATE_FAN_DIFFUSE);
|
||||
} else if (str_equals_case_insensitive(fan_mode, "QUIET")) {
|
||||
this->set_fan_mode(CLIMATE_FAN_QUIET);
|
||||
} else {
|
||||
ESP_LOGW(TAG, "'%s' - Unrecognized fan mode %s", this->parent_->get_name().c_str(), fan_mode.c_str());
|
||||
if (this->parent_->get_traits().supports_custom_fan_mode(fan_mode)) {
|
||||
this->custom_fan_mode_ = fan_mode;
|
||||
this->fan_mode_.reset();
|
||||
} else {
|
||||
ESP_LOGW(TAG, "'%s' - Unrecognized fan mode %s", this->parent_->get_name().c_str(), fan_mode.c_str());
|
||||
}
|
||||
}
|
||||
return *this;
|
||||
}
|
||||
|
||||
ClimateCall &ClimateCall::set_fan_mode(optional<std::string> fan_mode) {
|
||||
if (fan_mode.has_value()) {
|
||||
this->set_fan_mode(fan_mode.value());
|
||||
}
|
||||
return *this;
|
||||
}
|
||||
|
||||
ClimateCall &ClimateCall::set_preset(ClimatePreset preset) {
|
||||
this->preset_ = preset;
|
||||
this->custom_preset_.reset();
|
||||
return *this;
|
||||
}
|
||||
|
||||
ClimateCall &ClimateCall::set_preset(const std::string &preset) {
|
||||
for (const auto &preset_entry : CLIMATE_PRESETS_BY_STR) {
|
||||
if (str_equals_case_insensitive(preset, preset_entry.str)) {
|
||||
this->set_preset(static_cast<ClimatePreset>(preset_entry.value));
|
||||
return *this;
|
||||
}
|
||||
}
|
||||
if (this->parent_->get_traits().supports_custom_preset(preset)) {
|
||||
this->custom_preset_ = preset;
|
||||
this->preset_.reset();
|
||||
if (str_equals_case_insensitive(preset, "ECO")) {
|
||||
this->set_preset(CLIMATE_PRESET_ECO);
|
||||
} else if (str_equals_case_insensitive(preset, "AWAY")) {
|
||||
this->set_preset(CLIMATE_PRESET_AWAY);
|
||||
} else if (str_equals_case_insensitive(preset, "BOOST")) {
|
||||
this->set_preset(CLIMATE_PRESET_BOOST);
|
||||
} else if (str_equals_case_insensitive(preset, "COMFORT")) {
|
||||
this->set_preset(CLIMATE_PRESET_COMFORT);
|
||||
} else if (str_equals_case_insensitive(preset, "HOME")) {
|
||||
this->set_preset(CLIMATE_PRESET_HOME);
|
||||
} else if (str_equals_case_insensitive(preset, "SLEEP")) {
|
||||
this->set_preset(CLIMATE_PRESET_SLEEP);
|
||||
} else if (str_equals_case_insensitive(preset, "ACTIVITY")) {
|
||||
this->set_preset(CLIMATE_PRESET_ACTIVITY);
|
||||
} else if (str_equals_case_insensitive(preset, "NONE")) {
|
||||
this->set_preset(CLIMATE_PRESET_NONE);
|
||||
} else {
|
||||
ESP_LOGW(TAG, "'%s' - Unrecognized preset %s", this->parent_->get_name().c_str(), preset.c_str());
|
||||
if (this->parent_->get_traits().supports_custom_preset(preset)) {
|
||||
this->custom_preset_ = preset;
|
||||
this->preset_.reset();
|
||||
} else {
|
||||
ESP_LOGW(TAG, "'%s' - Unrecognized preset %s", this->parent_->get_name().c_str(), preset.c_str());
|
||||
}
|
||||
}
|
||||
return *this;
|
||||
}
|
||||
|
||||
ClimateCall &ClimateCall::set_preset(optional<std::string> preset) {
|
||||
if (preset.has_value()) {
|
||||
this->set_preset(preset.value());
|
||||
}
|
||||
return *this;
|
||||
}
|
||||
|
||||
ClimateCall &ClimateCall::set_swing_mode(ClimateSwingMode swing_mode) {
|
||||
this->swing_mode_ = swing_mode;
|
||||
return *this;
|
||||
}
|
||||
|
||||
ClimateCall &ClimateCall::set_swing_mode(const std::string &swing_mode) {
|
||||
for (const auto &mode_entry : CLIMATE_SWING_MODES_BY_STR) {
|
||||
if (str_equals_case_insensitive(swing_mode, mode_entry.str)) {
|
||||
this->set_swing_mode(static_cast<ClimateSwingMode>(mode_entry.value));
|
||||
return *this;
|
||||
}
|
||||
if (str_equals_case_insensitive(swing_mode, "OFF")) {
|
||||
this->set_swing_mode(CLIMATE_SWING_OFF);
|
||||
} else if (str_equals_case_insensitive(swing_mode, "BOTH")) {
|
||||
this->set_swing_mode(CLIMATE_SWING_BOTH);
|
||||
} else if (str_equals_case_insensitive(swing_mode, "VERTICAL")) {
|
||||
this->set_swing_mode(CLIMATE_SWING_VERTICAL);
|
||||
} else if (str_equals_case_insensitive(swing_mode, "HORIZONTAL")) {
|
||||
this->set_swing_mode(CLIMATE_SWING_HORIZONTAL);
|
||||
} else {
|
||||
ESP_LOGW(TAG, "'%s' - Unrecognized swing mode %s", this->parent_->get_name().c_str(), swing_mode.c_str());
|
||||
}
|
||||
ESP_LOGW(TAG, "'%s' - Unrecognized swing mode %s", this->parent_->get_name().c_str(), swing_mode.c_str());
|
||||
return *this;
|
||||
}
|
||||
|
||||
@@ -262,71 +259,59 @@ ClimateCall &ClimateCall::set_target_temperature(float target_temperature) {
|
||||
this->target_temperature_ = target_temperature;
|
||||
return *this;
|
||||
}
|
||||
|
||||
ClimateCall &ClimateCall::set_target_temperature_low(float target_temperature_low) {
|
||||
this->target_temperature_low_ = target_temperature_low;
|
||||
return *this;
|
||||
}
|
||||
|
||||
ClimateCall &ClimateCall::set_target_temperature_high(float target_temperature_high) {
|
||||
this->target_temperature_high_ = target_temperature_high;
|
||||
return *this;
|
||||
}
|
||||
|
||||
ClimateCall &ClimateCall::set_target_humidity(float target_humidity) {
|
||||
this->target_humidity_ = target_humidity;
|
||||
return *this;
|
||||
}
|
||||
|
||||
const optional<ClimateMode> &ClimateCall::get_mode() const { return this->mode_; }
|
||||
const optional<float> &ClimateCall::get_target_temperature() const { return this->target_temperature_; }
|
||||
const optional<float> &ClimateCall::get_target_temperature_low() const { return this->target_temperature_low_; }
|
||||
const optional<float> &ClimateCall::get_target_temperature_high() const { return this->target_temperature_high_; }
|
||||
const optional<float> &ClimateCall::get_target_humidity() const { return this->target_humidity_; }
|
||||
|
||||
const optional<ClimateMode> &ClimateCall::get_mode() const { return this->mode_; }
|
||||
const optional<ClimateFanMode> &ClimateCall::get_fan_mode() const { return this->fan_mode_; }
|
||||
const optional<ClimateSwingMode> &ClimateCall::get_swing_mode() const { return this->swing_mode_; }
|
||||
const optional<ClimatePreset> &ClimateCall::get_preset() const { return this->preset_; }
|
||||
const optional<std::string> &ClimateCall::get_custom_fan_mode() const { return this->custom_fan_mode_; }
|
||||
const optional<ClimatePreset> &ClimateCall::get_preset() const { return this->preset_; }
|
||||
const optional<std::string> &ClimateCall::get_custom_preset() const { return this->custom_preset_; }
|
||||
|
||||
const optional<ClimateSwingMode> &ClimateCall::get_swing_mode() const { return this->swing_mode_; }
|
||||
ClimateCall &ClimateCall::set_target_temperature_high(optional<float> target_temperature_high) {
|
||||
this->target_temperature_high_ = target_temperature_high;
|
||||
return *this;
|
||||
}
|
||||
|
||||
ClimateCall &ClimateCall::set_target_temperature_low(optional<float> target_temperature_low) {
|
||||
this->target_temperature_low_ = target_temperature_low;
|
||||
return *this;
|
||||
}
|
||||
|
||||
ClimateCall &ClimateCall::set_target_temperature(optional<float> target_temperature) {
|
||||
this->target_temperature_ = target_temperature;
|
||||
return *this;
|
||||
}
|
||||
|
||||
ClimateCall &ClimateCall::set_target_humidity(optional<float> target_humidity) {
|
||||
this->target_humidity_ = target_humidity;
|
||||
return *this;
|
||||
}
|
||||
|
||||
ClimateCall &ClimateCall::set_mode(optional<ClimateMode> mode) {
|
||||
this->mode_ = mode;
|
||||
return *this;
|
||||
}
|
||||
|
||||
ClimateCall &ClimateCall::set_fan_mode(optional<ClimateFanMode> fan_mode) {
|
||||
this->fan_mode_ = fan_mode;
|
||||
this->custom_fan_mode_.reset();
|
||||
return *this;
|
||||
}
|
||||
|
||||
ClimateCall &ClimateCall::set_preset(optional<ClimatePreset> preset) {
|
||||
this->preset_ = preset;
|
||||
this->custom_preset_.reset();
|
||||
return *this;
|
||||
}
|
||||
|
||||
ClimateCall &ClimateCall::set_swing_mode(optional<ClimateSwingMode> swing_mode) {
|
||||
this->swing_mode_ = swing_mode;
|
||||
return *this;
|
||||
@@ -351,7 +336,6 @@ optional<ClimateDeviceRestoreState> Climate::restore_state_() {
|
||||
return {};
|
||||
return recovered;
|
||||
}
|
||||
|
||||
void Climate::save_state_() {
|
||||
#if (defined(USE_ESP_IDF) || (defined(USE_ESP8266) && USE_ARDUINO_VERSION_CODE >= VERSION_CODE(3, 0, 0))) && \
|
||||
!defined(CLANG_TIDY)
|
||||
@@ -414,7 +398,6 @@ void Climate::save_state_() {
|
||||
|
||||
this->rtc_.save(&state);
|
||||
}
|
||||
|
||||
void Climate::publish_state() {
|
||||
ESP_LOGD(TAG, "'%s' - Sending state:", this->name_.c_str());
|
||||
auto traits = this->get_traits();
|
||||
@@ -486,20 +469,16 @@ ClimateTraits Climate::get_traits() {
|
||||
void Climate::set_visual_min_temperature_override(float visual_min_temperature_override) {
|
||||
this->visual_min_temperature_override_ = visual_min_temperature_override;
|
||||
}
|
||||
|
||||
void Climate::set_visual_max_temperature_override(float visual_max_temperature_override) {
|
||||
this->visual_max_temperature_override_ = visual_max_temperature_override;
|
||||
}
|
||||
|
||||
void Climate::set_visual_temperature_step_override(float target, float current) {
|
||||
this->visual_target_temperature_step_override_ = target;
|
||||
this->visual_current_temperature_step_override_ = current;
|
||||
}
|
||||
|
||||
void Climate::set_visual_min_humidity_override(float visual_min_humidity_override) {
|
||||
this->visual_min_humidity_override_ = visual_min_humidity_override;
|
||||
}
|
||||
|
||||
void Climate::set_visual_max_humidity_override(float visual_max_humidity_override) {
|
||||
this->visual_max_humidity_override_ = visual_max_humidity_override;
|
||||
}
|
||||
@@ -531,7 +510,6 @@ ClimateCall ClimateDeviceRestoreState::to_call(Climate *climate) {
|
||||
}
|
||||
return call;
|
||||
}
|
||||
|
||||
void ClimateDeviceRestoreState::apply(Climate *climate) {
|
||||
auto traits = climate->get_traits();
|
||||
climate->mode = this->mode;
|
||||
@@ -601,68 +579,68 @@ void Climate::dump_traits_(const char *tag) {
|
||||
auto traits = this->get_traits();
|
||||
ESP_LOGCONFIG(tag, "ClimateTraits:");
|
||||
ESP_LOGCONFIG(tag,
|
||||
" Visual settings:\n"
|
||||
" - Min temperature: %.1f\n"
|
||||
" - Max temperature: %.1f\n"
|
||||
" - Temperature step:\n"
|
||||
" Target: %.1f",
|
||||
" [x] Visual settings:\n"
|
||||
" - Min temperature: %.1f\n"
|
||||
" - Max temperature: %.1f\n"
|
||||
" - Temperature step:\n"
|
||||
" Target: %.1f",
|
||||
traits.get_visual_min_temperature(), traits.get_visual_max_temperature(),
|
||||
traits.get_visual_target_temperature_step());
|
||||
if (traits.has_feature_flags(climate::CLIMATE_SUPPORTS_CURRENT_TEMPERATURE)) {
|
||||
ESP_LOGCONFIG(tag, " Current: %.1f", traits.get_visual_current_temperature_step());
|
||||
ESP_LOGCONFIG(tag, " Current: %.1f", traits.get_visual_current_temperature_step());
|
||||
}
|
||||
if (traits.has_feature_flags(climate::CLIMATE_SUPPORTS_TARGET_HUMIDITY |
|
||||
climate::CLIMATE_SUPPORTS_CURRENT_HUMIDITY)) {
|
||||
ESP_LOGCONFIG(tag,
|
||||
" - Min humidity: %.0f\n"
|
||||
" - Max humidity: %.0f",
|
||||
" - Min humidity: %.0f\n"
|
||||
" - Max humidity: %.0f",
|
||||
traits.get_visual_min_humidity(), traits.get_visual_max_humidity());
|
||||
}
|
||||
if (traits.has_feature_flags(CLIMATE_SUPPORTS_TWO_POINT_TARGET_TEMPERATURE |
|
||||
CLIMATE_REQUIRES_TWO_POINT_TARGET_TEMPERATURE)) {
|
||||
ESP_LOGCONFIG(tag, " Supports two-point target temperature");
|
||||
ESP_LOGCONFIG(tag, " [x] Supports two-point target temperature");
|
||||
}
|
||||
if (traits.has_feature_flags(climate::CLIMATE_SUPPORTS_CURRENT_TEMPERATURE)) {
|
||||
ESP_LOGCONFIG(tag, " Supports current temperature");
|
||||
ESP_LOGCONFIG(tag, " [x] Supports current temperature");
|
||||
}
|
||||
if (traits.has_feature_flags(climate::CLIMATE_SUPPORTS_TARGET_HUMIDITY)) {
|
||||
ESP_LOGCONFIG(tag, " Supports target humidity");
|
||||
ESP_LOGCONFIG(tag, " [x] Supports target humidity");
|
||||
}
|
||||
if (traits.has_feature_flags(climate::CLIMATE_SUPPORTS_CURRENT_HUMIDITY)) {
|
||||
ESP_LOGCONFIG(tag, " Supports current humidity");
|
||||
ESP_LOGCONFIG(tag, " [x] Supports current humidity");
|
||||
}
|
||||
if (traits.has_feature_flags(climate::CLIMATE_SUPPORTS_ACTION)) {
|
||||
ESP_LOGCONFIG(tag, " Supports action");
|
||||
ESP_LOGCONFIG(tag, " [x] Supports action");
|
||||
}
|
||||
if (!traits.get_supported_modes().empty()) {
|
||||
ESP_LOGCONFIG(tag, " Supported modes:");
|
||||
ESP_LOGCONFIG(tag, " [x] Supported modes:");
|
||||
for (ClimateMode m : traits.get_supported_modes())
|
||||
ESP_LOGCONFIG(tag, " - %s", LOG_STR_ARG(climate_mode_to_string(m)));
|
||||
ESP_LOGCONFIG(tag, " - %s", LOG_STR_ARG(climate_mode_to_string(m)));
|
||||
}
|
||||
if (!traits.get_supported_fan_modes().empty()) {
|
||||
ESP_LOGCONFIG(tag, " Supported fan modes:");
|
||||
ESP_LOGCONFIG(tag, " [x] Supported fan modes:");
|
||||
for (ClimateFanMode m : traits.get_supported_fan_modes())
|
||||
ESP_LOGCONFIG(tag, " - %s", LOG_STR_ARG(climate_fan_mode_to_string(m)));
|
||||
ESP_LOGCONFIG(tag, " - %s", LOG_STR_ARG(climate_fan_mode_to_string(m)));
|
||||
}
|
||||
if (!traits.get_supported_custom_fan_modes().empty()) {
|
||||
ESP_LOGCONFIG(tag, " Supported custom fan modes:");
|
||||
ESP_LOGCONFIG(tag, " [x] Supported custom fan modes:");
|
||||
for (const std::string &s : traits.get_supported_custom_fan_modes())
|
||||
ESP_LOGCONFIG(tag, " - %s", s.c_str());
|
||||
ESP_LOGCONFIG(tag, " - %s", s.c_str());
|
||||
}
|
||||
if (!traits.get_supported_presets().empty()) {
|
||||
ESP_LOGCONFIG(tag, " Supported presets:");
|
||||
ESP_LOGCONFIG(tag, " [x] Supported presets:");
|
||||
for (ClimatePreset p : traits.get_supported_presets())
|
||||
ESP_LOGCONFIG(tag, " - %s", LOG_STR_ARG(climate_preset_to_string(p)));
|
||||
ESP_LOGCONFIG(tag, " - %s", LOG_STR_ARG(climate_preset_to_string(p)));
|
||||
}
|
||||
if (!traits.get_supported_custom_presets().empty()) {
|
||||
ESP_LOGCONFIG(tag, " Supported custom presets:");
|
||||
ESP_LOGCONFIG(tag, " [x] Supported custom presets:");
|
||||
for (const std::string &s : traits.get_supported_custom_presets())
|
||||
ESP_LOGCONFIG(tag, " - %s", s.c_str());
|
||||
ESP_LOGCONFIG(tag, " - %s", s.c_str());
|
||||
}
|
||||
if (!traits.get_supported_swing_modes().empty()) {
|
||||
ESP_LOGCONFIG(tag, " Supported swing modes:");
|
||||
ESP_LOGCONFIG(tag, " [x] Supported swing modes:");
|
||||
for (ClimateSwingMode m : traits.get_supported_swing_modes())
|
||||
ESP_LOGCONFIG(tag, " - %s", LOG_STR_ARG(climate_swing_mode_to_string(m)));
|
||||
ESP_LOGCONFIG(tag, " - %s", LOG_STR_ARG(climate_swing_mode_to_string(m)));
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -93,31 +93,30 @@ class ClimateCall {
|
||||
|
||||
void perform();
|
||||
|
||||
const optional<ClimateMode> &get_mode() const;
|
||||
const optional<float> &get_target_temperature() const;
|
||||
const optional<float> &get_target_temperature_low() const;
|
||||
const optional<float> &get_target_temperature_high() const;
|
||||
const optional<float> &get_target_humidity() const;
|
||||
|
||||
const optional<ClimateMode> &get_mode() const;
|
||||
const optional<ClimateFanMode> &get_fan_mode() const;
|
||||
const optional<ClimateSwingMode> &get_swing_mode() const;
|
||||
const optional<ClimatePreset> &get_preset() const;
|
||||
const optional<std::string> &get_custom_fan_mode() const;
|
||||
const optional<ClimatePreset> &get_preset() const;
|
||||
const optional<std::string> &get_custom_preset() const;
|
||||
|
||||
protected:
|
||||
void validate_();
|
||||
|
||||
Climate *const parent_;
|
||||
optional<ClimateMode> mode_;
|
||||
optional<float> target_temperature_;
|
||||
optional<float> target_temperature_low_;
|
||||
optional<float> target_temperature_high_;
|
||||
optional<float> target_humidity_;
|
||||
optional<ClimateMode> mode_;
|
||||
optional<ClimateFanMode> fan_mode_;
|
||||
optional<ClimateSwingMode> swing_mode_;
|
||||
optional<ClimatePreset> preset_;
|
||||
optional<std::string> custom_fan_mode_;
|
||||
optional<ClimatePreset> preset_;
|
||||
optional<std::string> custom_preset_;
|
||||
};
|
||||
|
||||
@@ -170,6 +169,47 @@ class Climate : public EntityBase {
|
||||
public:
|
||||
Climate() {}
|
||||
|
||||
/// The active mode of the climate device.
|
||||
ClimateMode mode{CLIMATE_MODE_OFF};
|
||||
|
||||
/// The active state of the climate device.
|
||||
ClimateAction action{CLIMATE_ACTION_OFF};
|
||||
|
||||
/// The current temperature of the climate device, as reported from the integration.
|
||||
float current_temperature{NAN};
|
||||
|
||||
/// The current humidity of the climate device, as reported from the integration.
|
||||
float current_humidity{NAN};
|
||||
|
||||
union {
|
||||
/// The target temperature of the climate device.
|
||||
float target_temperature;
|
||||
struct {
|
||||
/// The minimum target temperature of the climate device, for climate devices with split target temperature.
|
||||
float target_temperature_low{NAN};
|
||||
/// The maximum target temperature of the climate device, for climate devices with split target temperature.
|
||||
float target_temperature_high{NAN};
|
||||
};
|
||||
};
|
||||
|
||||
/// The target humidity of the climate device.
|
||||
float target_humidity;
|
||||
|
||||
/// The active fan mode of the climate device.
|
||||
optional<ClimateFanMode> fan_mode;
|
||||
|
||||
/// The active swing mode of the climate device.
|
||||
ClimateSwingMode swing_mode;
|
||||
|
||||
/// The active custom fan mode of the climate device.
|
||||
optional<std::string> custom_fan_mode;
|
||||
|
||||
/// The active preset of the climate device.
|
||||
optional<ClimatePreset> preset;
|
||||
|
||||
/// The active custom preset mode of the climate device.
|
||||
optional<std::string> custom_preset;
|
||||
|
||||
/** Add a callback for the climate device state, each time the state of the climate device is updated
|
||||
* (using publish_state), this callback will be called.
|
||||
*
|
||||
@@ -211,47 +251,6 @@ class Climate : public EntityBase {
|
||||
void set_visual_min_humidity_override(float visual_min_humidity_override);
|
||||
void set_visual_max_humidity_override(float visual_max_humidity_override);
|
||||
|
||||
/// The current temperature of the climate device, as reported from the integration.
|
||||
float current_temperature{NAN};
|
||||
|
||||
/// The current humidity of the climate device, as reported from the integration.
|
||||
float current_humidity{NAN};
|
||||
|
||||
union {
|
||||
/// The target temperature of the climate device.
|
||||
float target_temperature;
|
||||
struct {
|
||||
/// The minimum target temperature of the climate device, for climate devices with split target temperature.
|
||||
float target_temperature_low{NAN};
|
||||
/// The maximum target temperature of the climate device, for climate devices with split target temperature.
|
||||
float target_temperature_high{NAN};
|
||||
};
|
||||
};
|
||||
|
||||
/// The target humidity of the climate device.
|
||||
float target_humidity;
|
||||
|
||||
/// The active fan mode of the climate device.
|
||||
optional<ClimateFanMode> fan_mode;
|
||||
|
||||
/// The active preset of the climate device.
|
||||
optional<ClimatePreset> preset;
|
||||
|
||||
/// The active custom fan mode of the climate device.
|
||||
optional<std::string> custom_fan_mode;
|
||||
|
||||
/// The active custom preset mode of the climate device.
|
||||
optional<std::string> custom_preset;
|
||||
|
||||
/// The active mode of the climate device.
|
||||
ClimateMode mode{CLIMATE_MODE_OFF};
|
||||
|
||||
/// The active state of the climate device.
|
||||
ClimateAction action{CLIMATE_ACTION_OFF};
|
||||
|
||||
/// The active swing mode of the climate device.
|
||||
ClimateSwingMode swing_mode{CLIMATE_SWING_OFF};
|
||||
|
||||
protected:
|
||||
friend ClimateCall;
|
||||
|
||||
|
||||
@@ -8,10 +8,7 @@ static const char *const TAG = "climate_ir";
|
||||
|
||||
climate::ClimateTraits ClimateIR::traits() {
|
||||
auto traits = climate::ClimateTraits();
|
||||
if (this->sensor_ != nullptr) {
|
||||
traits.add_feature_flags(climate::CLIMATE_SUPPORTS_CURRENT_TEMPERATURE);
|
||||
}
|
||||
|
||||
traits.set_supports_current_temperature(this->sensor_ != nullptr);
|
||||
traits.set_supported_modes({climate::CLIMATE_MODE_OFF, climate::CLIMATE_MODE_HEAT_COOL});
|
||||
if (this->supports_cool_)
|
||||
traits.add_supported_mode(climate::CLIMATE_MODE_COOL);
|
||||
@@ -22,6 +19,7 @@ climate::ClimateTraits ClimateIR::traits() {
|
||||
if (this->supports_fan_only_)
|
||||
traits.add_supported_mode(climate::CLIMATE_MODE_FAN_ONLY);
|
||||
|
||||
traits.set_supports_two_point_target_temperature(false);
|
||||
traits.set_visual_min_temperature(this->minimum_temperature_);
|
||||
traits.set_visual_max_temperature(this->maximum_temperature_);
|
||||
traits.set_visual_temperature_step(this->temperature_step_);
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
#include "cover.h"
|
||||
#include <strings.h>
|
||||
#include "esphome/core/log.h"
|
||||
#include <strings.h>
|
||||
|
||||
namespace esphome {
|
||||
namespace cover {
|
||||
@@ -144,7 +144,21 @@ CoverCall &CoverCall::set_stop(bool stop) {
|
||||
bool CoverCall::get_stop() const { return this->stop_; }
|
||||
|
||||
CoverCall Cover::make_call() { return {this}; }
|
||||
|
||||
void Cover::open() {
|
||||
auto call = this->make_call();
|
||||
call.set_command_open();
|
||||
call.perform();
|
||||
}
|
||||
void Cover::close() {
|
||||
auto call = this->make_call();
|
||||
call.set_command_close();
|
||||
call.perform();
|
||||
}
|
||||
void Cover::stop() {
|
||||
auto call = this->make_call();
|
||||
call.set_command_stop();
|
||||
call.perform();
|
||||
}
|
||||
void Cover::add_on_state_callback(std::function<void()> &&f) { this->state_callback_.add(std::move(f)); }
|
||||
void Cover::publish_state(bool save) {
|
||||
this->position = clamp(this->position, 0.0f, 1.0f);
|
||||
|
||||
@@ -4,7 +4,6 @@
|
||||
#include "esphome/core/entity_base.h"
|
||||
#include "esphome/core/helpers.h"
|
||||
#include "esphome/core/preferences.h"
|
||||
|
||||
#include "cover_traits.h"
|
||||
|
||||
namespace esphome {
|
||||
@@ -126,6 +125,25 @@ class Cover : public EntityBase, public EntityBase_DeviceClass {
|
||||
|
||||
/// Construct a new cover call used to control the cover.
|
||||
CoverCall make_call();
|
||||
/** Open the cover.
|
||||
*
|
||||
* This is a legacy method and may be removed later, please use `.make_call()` instead.
|
||||
*/
|
||||
ESPDEPRECATED("open() is deprecated, use make_call().set_command_open().perform() instead.", "2021.9")
|
||||
void open();
|
||||
/** Close the cover.
|
||||
*
|
||||
* This is a legacy method and may be removed later, please use `.make_call()` instead.
|
||||
*/
|
||||
ESPDEPRECATED("close() is deprecated, use make_call().set_command_close().perform() instead.", "2021.9")
|
||||
void close();
|
||||
/** Stop the cover.
|
||||
*
|
||||
* This is a legacy method and may be removed later, please use `.make_call()` instead.
|
||||
* As per solution from issue #2885 the call should include perform()
|
||||
*/
|
||||
ESPDEPRECATED("stop() is deprecated, use make_call().set_command_stop().perform() instead.", "2021.9")
|
||||
void stop();
|
||||
|
||||
void add_on_state_callback(std::function<void()> &&f);
|
||||
|
||||
|
||||
@@ -241,7 +241,9 @@ uint8_t DaikinArcClimate::humidity_() {
|
||||
|
||||
climate::ClimateTraits DaikinArcClimate::traits() {
|
||||
climate::ClimateTraits traits = climate_ir::ClimateIR::traits();
|
||||
traits.add_feature_flags(climate::CLIMATE_SUPPORTS_CURRENT_TEMPERATURE | climate::CLIMATE_SUPPORTS_TARGET_HUMIDITY);
|
||||
traits.set_supports_current_temperature(true);
|
||||
traits.set_supports_current_humidity(false);
|
||||
traits.set_supports_target_humidity(true);
|
||||
traits.set_visual_min_humidity(38);
|
||||
traits.set_visual_max_humidity(52);
|
||||
return traits;
|
||||
|
||||
@@ -82,14 +82,16 @@ class DemoClimate : public climate::Climate, public Component {
|
||||
climate::ClimateTraits traits{};
|
||||
switch (type_) {
|
||||
case DemoClimateType::TYPE_1:
|
||||
traits.add_feature_flags(climate::CLIMATE_SUPPORTS_CURRENT_TEMPERATURE | climate::CLIMATE_SUPPORTS_ACTION);
|
||||
traits.set_supports_current_temperature(true);
|
||||
traits.set_supported_modes({
|
||||
climate::CLIMATE_MODE_OFF,
|
||||
climate::CLIMATE_MODE_HEAT,
|
||||
});
|
||||
traits.set_supports_action(true);
|
||||
traits.set_visual_temperature_step(0.5);
|
||||
break;
|
||||
case DemoClimateType::TYPE_2:
|
||||
traits.set_supports_current_temperature(false);
|
||||
traits.set_supported_modes({
|
||||
climate::CLIMATE_MODE_OFF,
|
||||
climate::CLIMATE_MODE_HEAT,
|
||||
@@ -98,7 +100,7 @@ class DemoClimate : public climate::Climate, public Component {
|
||||
climate::CLIMATE_MODE_DRY,
|
||||
climate::CLIMATE_MODE_FAN_ONLY,
|
||||
});
|
||||
traits.add_feature_flags(climate::CLIMATE_SUPPORTS_ACTION);
|
||||
traits.set_supports_action(true);
|
||||
traits.set_supported_fan_modes({
|
||||
climate::CLIMATE_FAN_ON,
|
||||
climate::CLIMATE_FAN_OFF,
|
||||
@@ -121,8 +123,8 @@ class DemoClimate : public climate::Climate, public Component {
|
||||
traits.set_supported_custom_presets({"My Preset"});
|
||||
break;
|
||||
case DemoClimateType::TYPE_3:
|
||||
traits.add_feature_flags(climate::CLIMATE_SUPPORTS_CURRENT_TEMPERATURE |
|
||||
climate::CLIMATE_SUPPORTS_TWO_POINT_TARGET_TEMPERATURE);
|
||||
traits.set_supports_current_temperature(true);
|
||||
traits.set_supports_two_point_target_temperature(true);
|
||||
traits.set_supported_modes({
|
||||
climate::CLIMATE_MODE_OFF,
|
||||
climate::CLIMATE_MODE_COOL,
|
||||
|
||||
@@ -103,7 +103,7 @@ bool EPaperBase::is_idle_() {
|
||||
if (this->busy_pin_ == nullptr) {
|
||||
return true;
|
||||
}
|
||||
return this->busy_pin_->digital_read();
|
||||
return !this->busy_pin_->digital_read();
|
||||
}
|
||||
|
||||
void EPaperBase::reset() {
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
import contextlib
|
||||
from dataclasses import dataclass
|
||||
import itertools
|
||||
import logging
|
||||
@@ -103,10 +102,6 @@ COMPILER_OPTIMIZATIONS = {
|
||||
"SIZE": "CONFIG_COMPILER_OPTIMIZATION_SIZE",
|
||||
}
|
||||
|
||||
# Socket limit configuration for ESP-IDF
|
||||
# ESP-IDF CONFIG_LWIP_MAX_SOCKETS has range 1-253, default 10
|
||||
DEFAULT_MAX_SOCKETS = 10 # ESP-IDF default
|
||||
|
||||
ARDUINO_ALLOWED_VARIANTS = [
|
||||
VARIANT_ESP32,
|
||||
VARIANT_ESP32C3,
|
||||
@@ -751,72 +746,6 @@ CONFIG_SCHEMA = cv.All(
|
||||
FINAL_VALIDATE_SCHEMA = cv.Schema(final_validate)
|
||||
|
||||
|
||||
def _configure_lwip_max_sockets(conf: dict) -> None:
|
||||
"""Calculate and set CONFIG_LWIP_MAX_SOCKETS based on component needs.
|
||||
|
||||
Socket component tracks consumer needs via consume_sockets() called during config validation.
|
||||
This function runs in to_code() after all components have registered their socket needs.
|
||||
User-provided sdkconfig_options take precedence.
|
||||
"""
|
||||
from esphome.components.socket import KEY_SOCKET_CONSUMERS
|
||||
|
||||
# Check if user manually specified CONFIG_LWIP_MAX_SOCKETS
|
||||
user_max_sockets = conf.get(CONF_SDKCONFIG_OPTIONS, {}).get(
|
||||
"CONFIG_LWIP_MAX_SOCKETS"
|
||||
)
|
||||
|
||||
socket_consumers: dict[str, int] = CORE.data.get(KEY_SOCKET_CONSUMERS, {})
|
||||
total_sockets = sum(socket_consumers.values())
|
||||
|
||||
# Early return if no sockets registered and no user override
|
||||
if total_sockets == 0 and user_max_sockets is None:
|
||||
return
|
||||
|
||||
components_list = ", ".join(
|
||||
f"{name}={count}" for name, count in sorted(socket_consumers.items())
|
||||
)
|
||||
|
||||
# User specified their own value - respect it but warn if insufficient
|
||||
if user_max_sockets is not None:
|
||||
_LOGGER.info(
|
||||
"Using user-provided CONFIG_LWIP_MAX_SOCKETS: %s",
|
||||
user_max_sockets,
|
||||
)
|
||||
|
||||
# Warn if user's value is less than what components need
|
||||
if total_sockets > 0:
|
||||
user_sockets_int = 0
|
||||
with contextlib.suppress(ValueError, TypeError):
|
||||
user_sockets_int = int(user_max_sockets)
|
||||
|
||||
if user_sockets_int < total_sockets:
|
||||
_LOGGER.warning(
|
||||
"CONFIG_LWIP_MAX_SOCKETS is set to %d but your configuration "
|
||||
"needs %d sockets (registered: %s). You may experience socket "
|
||||
"exhaustion errors. Consider increasing to at least %d.",
|
||||
user_sockets_int,
|
||||
total_sockets,
|
||||
components_list,
|
||||
total_sockets,
|
||||
)
|
||||
# User's value already added via sdkconfig_options processing
|
||||
return
|
||||
|
||||
# Auto-calculate based on component needs
|
||||
# Use at least the ESP-IDF default (10), or the total needed by components
|
||||
max_sockets = max(DEFAULT_MAX_SOCKETS, total_sockets)
|
||||
|
||||
log_level = logging.INFO if max_sockets > DEFAULT_MAX_SOCKETS else logging.DEBUG
|
||||
_LOGGER.log(
|
||||
log_level,
|
||||
"Setting CONFIG_LWIP_MAX_SOCKETS to %d (registered: %s)",
|
||||
max_sockets,
|
||||
components_list,
|
||||
)
|
||||
|
||||
add_idf_sdkconfig_option("CONFIG_LWIP_MAX_SOCKETS", max_sockets)
|
||||
|
||||
|
||||
async def to_code(config):
|
||||
cg.add_platformio_option("board", config[CONF_BOARD])
|
||||
cg.add_platformio_option("board_upload.flash_size", config[CONF_FLASH_SIZE])
|
||||
@@ -850,16 +779,6 @@ async def to_code(config):
|
||||
Path(__file__).parent / "post_build.py.script",
|
||||
)
|
||||
|
||||
# In testing mode, add IRAM fix script to allow linking grouped component tests
|
||||
# Similar to ESP8266's approach but for ESP-IDF
|
||||
if CORE.testing_mode:
|
||||
cg.add_build_flag("-DESPHOME_TESTING_MODE")
|
||||
add_extra_script(
|
||||
"pre",
|
||||
"iram_fix.py",
|
||||
Path(__file__).parent / "iram_fix.py.script",
|
||||
)
|
||||
|
||||
if conf[CONF_TYPE] == FRAMEWORK_ESP_IDF:
|
||||
cg.add_platformio_option("framework", "espidf")
|
||||
cg.add_build_flag("-DUSE_ESP_IDF")
|
||||
@@ -886,7 +805,6 @@ async def to_code(config):
|
||||
add_idf_sdkconfig_option("CONFIG_AUTOSTART_ARDUINO", True)
|
||||
add_idf_sdkconfig_option("CONFIG_MBEDTLS_PSK_MODES", True)
|
||||
add_idf_sdkconfig_option("CONFIG_MBEDTLS_CERTIFICATE_BUNDLE", True)
|
||||
add_idf_sdkconfig_option("CONFIG_ESP_PHY_REDUCE_TX_POWER", True)
|
||||
|
||||
cg.add_build_flag("-Wno-nonnull-compare")
|
||||
|
||||
@@ -937,9 +855,6 @@ async def to_code(config):
|
||||
add_idf_sdkconfig_option("CONFIG_LWIP_DNS_SUPPORT_MDNS_QUERIES", False)
|
||||
if not advanced.get(CONF_ENABLE_LWIP_BRIDGE_INTERFACE, False):
|
||||
add_idf_sdkconfig_option("CONFIG_LWIP_BRIDGEIF_MAX_PORTS", 0)
|
||||
|
||||
_configure_lwip_max_sockets(conf)
|
||||
|
||||
if advanced.get(CONF_EXECUTE_FROM_PSRAM, False):
|
||||
add_idf_sdkconfig_option("CONFIG_SPIRAM_FETCH_INSTRUCTIONS", True)
|
||||
add_idf_sdkconfig_option("CONFIG_SPIRAM_RODATA", True)
|
||||
|
||||
@@ -1,71 +0,0 @@
|
||||
import os
|
||||
import re
|
||||
|
||||
# pylint: disable=E0602
|
||||
Import("env") # noqa
|
||||
|
||||
# IRAM size for testing mode (2MB - large enough to accommodate grouped tests)
|
||||
TESTING_IRAM_SIZE = 0x200000
|
||||
|
||||
|
||||
def patch_idf_linker_script(source, target, env):
|
||||
"""Patch ESP-IDF linker script to increase IRAM size for testing mode."""
|
||||
# Check if we're in testing mode by looking for the define
|
||||
build_flags = env.get("BUILD_FLAGS", [])
|
||||
testing_mode = any("-DESPHOME_TESTING_MODE" in flag for flag in build_flags)
|
||||
|
||||
if not testing_mode:
|
||||
return
|
||||
|
||||
# For ESP-IDF, the linker scripts are generated in the build directory
|
||||
build_dir = env.subst("$BUILD_DIR")
|
||||
|
||||
# The memory.ld file is directly in the build directory
|
||||
memory_ld = os.path.join(build_dir, "memory.ld")
|
||||
|
||||
if not os.path.exists(memory_ld):
|
||||
print(f"ESPHome: Warning - could not find linker script at {memory_ld}")
|
||||
return
|
||||
|
||||
try:
|
||||
with open(memory_ld, "r") as f:
|
||||
content = f.read()
|
||||
except OSError as e:
|
||||
print(f"ESPHome: Error reading linker script: {e}")
|
||||
return
|
||||
|
||||
# Check if this file contains iram0_0_seg
|
||||
if 'iram0_0_seg' not in content:
|
||||
print(f"ESPHome: Warning - iram0_0_seg not found in {memory_ld}")
|
||||
return
|
||||
|
||||
# Look for iram0_0_seg definition and increase its length
|
||||
# ESP-IDF format can be:
|
||||
# iram0_0_seg (RX) : org = 0x40080000, len = 0x20000 + 0x0
|
||||
# or more complex with nested parentheses:
|
||||
# iram0_0_seg (RX) : org = (0x40370000 + 0x4000), len = (((0x403CB700 - (0x40378000 - 0x3FC88000)) - 0x3FC88000) + 0x8000 - 0x4000)
|
||||
# We want to change len to TESTING_IRAM_SIZE for testing
|
||||
|
||||
# Use a more robust approach: find the line and manually parse it
|
||||
lines = content.split('\n')
|
||||
for i, line in enumerate(lines):
|
||||
if 'iram0_0_seg' in line and 'len' in line:
|
||||
# Find the position of "len = " and replace everything after it until the end of the statement
|
||||
match = re.search(r'(iram0_0_seg\s*\([^)]*\)\s*:\s*org\s*=\s*(?:\([^)]+\)|0x[0-9a-fA-F]+)\s*,\s*len\s*=\s*)(.+?)(\s*)$', line)
|
||||
if match:
|
||||
lines[i] = f"{match.group(1)}{TESTING_IRAM_SIZE:#x}{match.group(3)}"
|
||||
break
|
||||
|
||||
updated = '\n'.join(lines)
|
||||
|
||||
if updated != content:
|
||||
with open(memory_ld, "w") as f:
|
||||
f.write(updated)
|
||||
print(f"ESPHome: Patched IRAM size to {TESTING_IRAM_SIZE:#x} in {memory_ld} for testing mode")
|
||||
else:
|
||||
print(f"ESPHome: Warning - could not patch iram0_0_seg in {memory_ld}")
|
||||
|
||||
|
||||
# Hook into the build process before linking
|
||||
# For ESP-IDF, we need to run this after the linker scripts are generated
|
||||
env.AddPreAction("$BUILD_DIR/${PROGNAME}.elf", patch_idf_linker_script)
|
||||
@@ -76,10 +76,6 @@ void ESP32BLE::advertising_set_service_data(const std::vector<uint8_t> &data) {
|
||||
}
|
||||
|
||||
void ESP32BLE::advertising_set_manufacturer_data(const std::vector<uint8_t> &data) {
|
||||
this->advertising_set_manufacturer_data(std::span<const uint8_t>(data));
|
||||
}
|
||||
|
||||
void ESP32BLE::advertising_set_manufacturer_data(std::span<const uint8_t> data) {
|
||||
this->advertising_init_();
|
||||
this->advertising_->set_manufacturer_data(data);
|
||||
this->advertising_start();
|
||||
|
||||
@@ -118,7 +118,6 @@ class ESP32BLE : public Component {
|
||||
void advertising_start();
|
||||
void advertising_set_service_data(const std::vector<uint8_t> &data);
|
||||
void advertising_set_manufacturer_data(const std::vector<uint8_t> &data);
|
||||
void advertising_set_manufacturer_data(std::span<const uint8_t> data);
|
||||
void advertising_set_appearance(uint16_t appearance) { this->appearance_ = appearance; }
|
||||
void advertising_set_service_data_and_name(std::span<const uint8_t> data, bool include_name);
|
||||
void advertising_add_service_uuid(ESPBTUUID uuid);
|
||||
|
||||
@@ -59,10 +59,6 @@ void BLEAdvertising::set_service_data(const std::vector<uint8_t> &data) {
|
||||
}
|
||||
|
||||
void BLEAdvertising::set_manufacturer_data(const std::vector<uint8_t> &data) {
|
||||
this->set_manufacturer_data(std::span<const uint8_t>(data));
|
||||
}
|
||||
|
||||
void BLEAdvertising::set_manufacturer_data(std::span<const uint8_t> data) {
|
||||
delete[] this->advertising_data_.p_manufacturer_data;
|
||||
this->advertising_data_.p_manufacturer_data = nullptr;
|
||||
this->advertising_data_.manufacturer_len = data.size();
|
||||
|
||||
@@ -37,7 +37,6 @@ class BLEAdvertising {
|
||||
void set_scan_response(bool scan_response) { this->scan_response_ = scan_response; }
|
||||
void set_min_preferred_interval(uint16_t interval) { this->advertising_data_.min_interval = interval; }
|
||||
void set_manufacturer_data(const std::vector<uint8_t> &data);
|
||||
void set_manufacturer_data(std::span<const uint8_t> data);
|
||||
void set_appearance(uint16_t appearance) { this->advertising_data_.appearance = appearance; }
|
||||
void set_service_data(const std::vector<uint8_t> &data);
|
||||
void set_service_data(std::span<const uint8_t> data);
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
#include "esp32_ble_beacon.h"
|
||||
#include "esphome/core/log.h"
|
||||
#include "esphome/core/helpers.h"
|
||||
|
||||
#ifdef USE_ESP32
|
||||
|
||||
|
||||
@@ -15,10 +15,7 @@ Trigger<std::vector<uint8_t>, uint16_t> *BLETriggers::create_characteristic_on_w
|
||||
Trigger<std::vector<uint8_t>, uint16_t> *on_write_trigger = // NOLINT(cppcoreguidelines-owning-memory)
|
||||
new Trigger<std::vector<uint8_t>, uint16_t>();
|
||||
characteristic->on_write([on_write_trigger](std::span<const uint8_t> data, uint16_t id) {
|
||||
// Convert span to vector for trigger - copy is necessary because:
|
||||
// 1. Trigger stores the data for use in automation actions that execute later
|
||||
// 2. The span is only valid during this callback (points to temporary BLE stack data)
|
||||
// 3. User lambdas in automations need persistent data they can access asynchronously
|
||||
// Convert span to vector for trigger
|
||||
on_write_trigger->trigger(std::vector<uint8_t>(data.begin(), data.end()), id);
|
||||
});
|
||||
return on_write_trigger;
|
||||
@@ -30,10 +27,7 @@ Trigger<std::vector<uint8_t>, uint16_t> *BLETriggers::create_descriptor_on_write
|
||||
Trigger<std::vector<uint8_t>, uint16_t> *on_write_trigger = // NOLINT(cppcoreguidelines-owning-memory)
|
||||
new Trigger<std::vector<uint8_t>, uint16_t>();
|
||||
descriptor->on_write([on_write_trigger](std::span<const uint8_t> data, uint16_t id) {
|
||||
// Convert span to vector for trigger - copy is necessary because:
|
||||
// 1. Trigger stores the data for use in automation actions that execute later
|
||||
// 2. The span is only valid during this callback (points to temporary BLE stack data)
|
||||
// 3. User lambdas in automations need persistent data they can access asynchronously
|
||||
// Convert span to vector for trigger
|
||||
on_write_trigger->trigger(std::vector<uint8_t>(data.begin(), data.end()), id);
|
||||
});
|
||||
return on_write_trigger;
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
import esphome.codegen as cg
|
||||
import esphome.config_validation as cv
|
||||
from esphome.const import CONF_ID, CONF_MODE, CONF_PORT
|
||||
from esphome.types import ConfigType
|
||||
|
||||
CODEOWNERS = ["@ayufan"]
|
||||
AUTO_LOAD = ["camera"]
|
||||
@@ -14,27 +13,13 @@ Mode = esp32_camera_web_server_ns.enum("Mode")
|
||||
|
||||
MODES = {"STREAM": Mode.STREAM, "SNAPSHOT": Mode.SNAPSHOT}
|
||||
|
||||
|
||||
def _consume_camera_web_server_sockets(config: ConfigType) -> ConfigType:
|
||||
"""Register socket needs for camera web server."""
|
||||
from esphome.components import socket
|
||||
|
||||
# Each camera web server instance needs 1 listening socket + 2 client connections
|
||||
sockets_needed = 3
|
||||
socket.consume_sockets(sockets_needed, "esp32_camera_web_server")(config)
|
||||
return config
|
||||
|
||||
|
||||
CONFIG_SCHEMA = cv.All(
|
||||
cv.Schema(
|
||||
{
|
||||
cv.GenerateID(): cv.declare_id(CameraWebServer),
|
||||
cv.Required(CONF_PORT): cv.port,
|
||||
cv.Required(CONF_MODE): cv.enum(MODES, upper=True),
|
||||
},
|
||||
).extend(cv.COMPONENT_SCHEMA),
|
||||
_consume_camera_web_server_sockets,
|
||||
)
|
||||
CONFIG_SCHEMA = cv.Schema(
|
||||
{
|
||||
cv.GenerateID(): cv.declare_id(CameraWebServer),
|
||||
cv.Required(CONF_PORT): cv.port,
|
||||
cv.Required(CONF_MODE): cv.enum(MODES, upper=True),
|
||||
},
|
||||
).extend(cv.COMPONENT_SCHEMA)
|
||||
|
||||
|
||||
async def to_code(config):
|
||||
|
||||
@@ -95,7 +95,7 @@ async def to_code(config):
|
||||
if framework_ver >= cv.Version(5, 5, 0):
|
||||
esp32.add_idf_component(name="espressif/esp_wifi_remote", ref="1.1.5")
|
||||
esp32.add_idf_component(name="espressif/eppp_link", ref="1.1.3")
|
||||
esp32.add_idf_component(name="espressif/esp_hosted", ref="2.6.1")
|
||||
esp32.add_idf_component(name="espressif/esp_hosted", ref="2.5.11")
|
||||
else:
|
||||
esp32.add_idf_component(name="espressif/esp_wifi_remote", ref="0.13.0")
|
||||
esp32.add_idf_component(name="espressif/eppp_link", ref="0.2.0")
|
||||
|
||||
@@ -1,11 +1,11 @@
|
||||
from esphome import automation
|
||||
import esphome.codegen as cg
|
||||
from esphome.components import binary_sensor, esp32_ble, improv_base, output
|
||||
from esphome.components import binary_sensor, esp32_ble, output
|
||||
from esphome.components.esp32_ble import BTLoggers
|
||||
import esphome.config_validation as cv
|
||||
from esphome.const import CONF_ID, CONF_ON_STATE, CONF_TRIGGER_ID
|
||||
|
||||
AUTO_LOAD = ["esp32_ble_server", "improv_base"]
|
||||
AUTO_LOAD = ["esp32_ble_server"]
|
||||
CODEOWNERS = ["@jesserockz"]
|
||||
DEPENDENCIES = ["wifi", "esp32"]
|
||||
|
||||
@@ -20,7 +20,6 @@ CONF_ON_STOP = "on_stop"
|
||||
CONF_STATUS_INDICATOR = "status_indicator"
|
||||
CONF_WIFI_TIMEOUT = "wifi_timeout"
|
||||
|
||||
|
||||
improv_ns = cg.esphome_ns.namespace("improv")
|
||||
Error = improv_ns.enum("Error")
|
||||
State = improv_ns.enum("State")
|
||||
@@ -44,63 +43,55 @@ ESP32ImprovStoppedTrigger = esp32_improv_ns.class_(
|
||||
)
|
||||
|
||||
|
||||
CONFIG_SCHEMA = (
|
||||
cv.Schema(
|
||||
{
|
||||
cv.GenerateID(): cv.declare_id(ESP32ImprovComponent),
|
||||
cv.Required(CONF_AUTHORIZER): cv.Any(
|
||||
cv.none, cv.use_id(binary_sensor.BinarySensor)
|
||||
),
|
||||
cv.Optional(CONF_STATUS_INDICATOR): cv.use_id(output.BinaryOutput),
|
||||
cv.Optional(
|
||||
CONF_IDENTIFY_DURATION, default="10s"
|
||||
): cv.positive_time_period_milliseconds,
|
||||
cv.Optional(
|
||||
CONF_AUTHORIZED_DURATION, default="1min"
|
||||
): cv.positive_time_period_milliseconds,
|
||||
cv.Optional(
|
||||
CONF_WIFI_TIMEOUT, default="1min"
|
||||
): cv.positive_time_period_milliseconds,
|
||||
cv.Optional(CONF_ON_PROVISIONED): automation.validate_automation(
|
||||
{
|
||||
cv.GenerateID(CONF_TRIGGER_ID): cv.declare_id(
|
||||
ESP32ImprovProvisionedTrigger
|
||||
),
|
||||
}
|
||||
),
|
||||
cv.Optional(CONF_ON_PROVISIONING): automation.validate_automation(
|
||||
{
|
||||
cv.GenerateID(CONF_TRIGGER_ID): cv.declare_id(
|
||||
ESP32ImprovProvisioningTrigger
|
||||
),
|
||||
}
|
||||
),
|
||||
cv.Optional(CONF_ON_START): automation.validate_automation(
|
||||
{
|
||||
cv.GenerateID(CONF_TRIGGER_ID): cv.declare_id(
|
||||
ESP32ImprovStartTrigger
|
||||
),
|
||||
}
|
||||
),
|
||||
cv.Optional(CONF_ON_STATE): automation.validate_automation(
|
||||
{
|
||||
cv.GenerateID(CONF_TRIGGER_ID): cv.declare_id(
|
||||
ESP32ImprovStateTrigger
|
||||
),
|
||||
}
|
||||
),
|
||||
cv.Optional(CONF_ON_STOP): automation.validate_automation(
|
||||
{
|
||||
cv.GenerateID(CONF_TRIGGER_ID): cv.declare_id(
|
||||
ESP32ImprovStoppedTrigger
|
||||
),
|
||||
}
|
||||
),
|
||||
}
|
||||
)
|
||||
.extend(improv_base.IMPROV_SCHEMA)
|
||||
.extend(cv.COMPONENT_SCHEMA)
|
||||
)
|
||||
CONFIG_SCHEMA = cv.Schema(
|
||||
{
|
||||
cv.GenerateID(): cv.declare_id(ESP32ImprovComponent),
|
||||
cv.Required(CONF_AUTHORIZER): cv.Any(
|
||||
cv.none, cv.use_id(binary_sensor.BinarySensor)
|
||||
),
|
||||
cv.Optional(CONF_STATUS_INDICATOR): cv.use_id(output.BinaryOutput),
|
||||
cv.Optional(
|
||||
CONF_IDENTIFY_DURATION, default="10s"
|
||||
): cv.positive_time_period_milliseconds,
|
||||
cv.Optional(
|
||||
CONF_AUTHORIZED_DURATION, default="1min"
|
||||
): cv.positive_time_period_milliseconds,
|
||||
cv.Optional(
|
||||
CONF_WIFI_TIMEOUT, default="1min"
|
||||
): cv.positive_time_period_milliseconds,
|
||||
cv.Optional(CONF_ON_PROVISIONED): automation.validate_automation(
|
||||
{
|
||||
cv.GenerateID(CONF_TRIGGER_ID): cv.declare_id(
|
||||
ESP32ImprovProvisionedTrigger
|
||||
),
|
||||
}
|
||||
),
|
||||
cv.Optional(CONF_ON_PROVISIONING): automation.validate_automation(
|
||||
{
|
||||
cv.GenerateID(CONF_TRIGGER_ID): cv.declare_id(
|
||||
ESP32ImprovProvisioningTrigger
|
||||
),
|
||||
}
|
||||
),
|
||||
cv.Optional(CONF_ON_START): automation.validate_automation(
|
||||
{
|
||||
cv.GenerateID(CONF_TRIGGER_ID): cv.declare_id(ESP32ImprovStartTrigger),
|
||||
}
|
||||
),
|
||||
cv.Optional(CONF_ON_STATE): automation.validate_automation(
|
||||
{
|
||||
cv.GenerateID(CONF_TRIGGER_ID): cv.declare_id(ESP32ImprovStateTrigger),
|
||||
}
|
||||
),
|
||||
cv.Optional(CONF_ON_STOP): automation.validate_automation(
|
||||
{
|
||||
cv.GenerateID(CONF_TRIGGER_ID): cv.declare_id(
|
||||
ESP32ImprovStoppedTrigger
|
||||
),
|
||||
}
|
||||
),
|
||||
}
|
||||
).extend(cv.COMPONENT_SCHEMA)
|
||||
|
||||
|
||||
async def to_code(config):
|
||||
@@ -111,8 +102,7 @@ async def to_code(config):
|
||||
await cg.register_component(var, config)
|
||||
|
||||
cg.add_define("USE_IMPROV")
|
||||
|
||||
await improv_base.setup_improv_core(var, config)
|
||||
cg.add_library("improv/Improv", "1.2.4")
|
||||
|
||||
cg.add(var.set_identify_duration(config[CONF_IDENTIFY_DURATION]))
|
||||
cg.add(var.set_authorized_duration(config[CONF_AUTHORIZED_DURATION]))
|
||||
|
||||
@@ -1,10 +1,10 @@
|
||||
#include "esp32_improv_component.h"
|
||||
|
||||
#include "esphome/components/bytebuffer/bytebuffer.h"
|
||||
#include "esphome/components/esp32_ble/ble.h"
|
||||
#include "esphome/components/esp32_ble_server/ble_2902.h"
|
||||
#include "esphome/core/application.h"
|
||||
#include "esphome/core/log.h"
|
||||
#include "esphome/components/bytebuffer/bytebuffer.h"
|
||||
|
||||
#ifdef USE_ESP32
|
||||
|
||||
@@ -384,32 +384,17 @@ void ESP32ImprovComponent::check_wifi_connection_() {
|
||||
this->connecting_sta_ = {};
|
||||
this->cancel_timeout("wifi-connect-timeout");
|
||||
|
||||
// Build URL list with minimal allocations
|
||||
// Maximum 3 URLs: custom next_url + ESPHOME_MY_LINK + webserver URL
|
||||
std::string url_strings[3];
|
||||
size_t url_count = 0;
|
||||
|
||||
// Add next_url if configured (should be first per Improv BLE spec)
|
||||
std::string next_url = this->get_formatted_next_url_();
|
||||
if (!next_url.empty()) {
|
||||
url_strings[url_count++] = std::move(next_url);
|
||||
}
|
||||
|
||||
// Add default URLs for backward compatibility
|
||||
url_strings[url_count++] = ESPHOME_MY_LINK;
|
||||
std::vector<std::string> urls = {ESPHOME_MY_LINK};
|
||||
#ifdef USE_WEBSERVER
|
||||
for (auto &ip : wifi::global_wifi_component->wifi_sta_ip_addresses()) {
|
||||
if (ip.is_ip4()) {
|
||||
char url_buffer[64];
|
||||
snprintf(url_buffer, sizeof(url_buffer), "http://%s:%d", ip.str().c_str(), USE_WEBSERVER_PORT);
|
||||
url_strings[url_count++] = url_buffer;
|
||||
std::string webserver_url = "http://" + ip.str() + ":" + to_string(USE_WEBSERVER_PORT);
|
||||
urls.push_back(webserver_url);
|
||||
break;
|
||||
}
|
||||
}
|
||||
#endif
|
||||
// Pass to build_rpc_response using vector constructor from iterators to avoid extra copies
|
||||
std::vector<uint8_t> data = improv::build_rpc_response(
|
||||
improv::WIFI_SETTINGS, std::vector<std::string>(url_strings, url_strings + url_count));
|
||||
std::vector<uint8_t> data = improv::build_rpc_response(improv::WIFI_SETTINGS, urls);
|
||||
this->send_response_(data);
|
||||
} else if (this->is_active() && this->state_ != improv::STATE_PROVISIONED) {
|
||||
ESP_LOGD(TAG, "WiFi provisioned externally");
|
||||
|
||||
@@ -7,7 +7,6 @@
|
||||
|
||||
#include "esphome/components/esp32_ble_server/ble_characteristic.h"
|
||||
#include "esphome/components/esp32_ble_server/ble_server.h"
|
||||
#include "esphome/components/improv_base/improv_base.h"
|
||||
#include "esphome/components/wifi/wifi_component.h"
|
||||
|
||||
#ifdef USE_ESP32_IMPROV_STATE_CALLBACK
|
||||
@@ -33,7 +32,7 @@ namespace esp32_improv {
|
||||
|
||||
using namespace esp32_ble_server;
|
||||
|
||||
class ESP32ImprovComponent : public Component, public improv_base::ImprovBase {
|
||||
class ESP32ImprovComponent : public Component {
|
||||
public:
|
||||
ESP32ImprovComponent();
|
||||
void dump_config() override;
|
||||
|
||||
@@ -190,9 +190,7 @@ async def to_code(config):
|
||||
cg.add_define("ESPHOME_VARIANT", "ESP8266")
|
||||
cg.add_define(ThreadModel.SINGLE)
|
||||
|
||||
cg.add_platformio_option(
|
||||
"extra_scripts", ["pre:testing_mode.py", "post:post_build.py"]
|
||||
)
|
||||
cg.add_platformio_option("extra_scripts", ["pre:iram_fix.py", "post:post_build.py"])
|
||||
|
||||
conf = config[CONF_FRAMEWORK]
|
||||
cg.add_platformio_option("framework", "arduino")
|
||||
@@ -232,9 +230,9 @@ async def to_code(config):
|
||||
# For cases where nullptrs can be handled, use nothrow: `new (std::nothrow) T;`
|
||||
cg.add_build_flag("-DNEW_OOM_ABORT")
|
||||
|
||||
# In testing mode, fake larger memory to allow linking grouped component tests
|
||||
# Real ESP8266 hardware only has 32KB IRAM and ~80KB RAM, but for CI testing
|
||||
# we pretend it has much larger memory to test that components compile together
|
||||
# In testing mode, fake a larger IRAM to allow linking grouped component tests
|
||||
# Real ESP8266 hardware only has 32KB IRAM, but for CI testing we pretend it has 2MB
|
||||
# This is done via a pre-build script that generates a custom linker script
|
||||
if CORE.testing_mode:
|
||||
cg.add_build_flag("-DESPHOME_TESTING_MODE")
|
||||
|
||||
@@ -273,8 +271,8 @@ def copy_files():
|
||||
post_build_file,
|
||||
CORE.relative_build_path("post_build.py"),
|
||||
)
|
||||
testing_mode_file = dir / "testing_mode.py.script"
|
||||
iram_fix_file = dir / "iram_fix.py.script"
|
||||
copy_file_if_changed(
|
||||
testing_mode_file,
|
||||
CORE.relative_build_path("testing_mode.py"),
|
||||
iram_fix_file,
|
||||
CORE.relative_build_path("iram_fix.py"),
|
||||
)
|
||||
|
||||
44
esphome/components/esp8266/iram_fix.py.script
Normal file
44
esphome/components/esp8266/iram_fix.py.script
Normal file
@@ -0,0 +1,44 @@
|
||||
import os
|
||||
import re
|
||||
|
||||
# pylint: disable=E0602
|
||||
Import("env") # noqa
|
||||
|
||||
|
||||
def patch_linker_script_after_preprocess(source, target, env):
|
||||
"""Patch the local linker script after PlatformIO preprocesses it."""
|
||||
# Check if we're in testing mode by looking for the define
|
||||
build_flags = env.get("BUILD_FLAGS", [])
|
||||
testing_mode = any("-DESPHOME_TESTING_MODE" in flag for flag in build_flags)
|
||||
|
||||
if not testing_mode:
|
||||
return
|
||||
|
||||
# Get the local linker script path
|
||||
build_dir = env.subst("$BUILD_DIR")
|
||||
local_ld = os.path.join(build_dir, "ld", "local.eagle.app.v6.common.ld")
|
||||
|
||||
if not os.path.exists(local_ld):
|
||||
return
|
||||
|
||||
# Read the linker script
|
||||
with open(local_ld, "r") as f:
|
||||
content = f.read()
|
||||
|
||||
# Replace IRAM size from 0x8000 (32KB) to 0x200000 (2MB)
|
||||
# The line looks like: iram1_0_seg : org = 0x40100000, len = 0x8000
|
||||
updated = re.sub(
|
||||
r"(iram1_0_seg\s*:\s*org\s*=\s*0x40100000\s*,\s*len\s*=\s*)0x8000",
|
||||
r"\g<1>0x200000",
|
||||
content,
|
||||
)
|
||||
|
||||
if updated != content:
|
||||
with open(local_ld, "w") as f:
|
||||
f.write(updated)
|
||||
print("ESPHome: Patched IRAM size to 2MB for testing mode")
|
||||
|
||||
|
||||
# Hook into the build process right before linking
|
||||
# This runs after PlatformIO has already preprocessed the linker scripts
|
||||
env.AddPreAction("$BUILD_DIR/${PROGNAME}.elf", patch_linker_script_after_preprocess)
|
||||
@@ -1,166 +0,0 @@
|
||||
import os
|
||||
import re
|
||||
|
||||
# pylint: disable=E0602
|
||||
Import("env") # noqa
|
||||
|
||||
|
||||
# Memory sizes for testing mode (allow larger builds for CI component grouping)
|
||||
TESTING_IRAM_SIZE = "0x200000" # 2MB
|
||||
TESTING_DRAM_SIZE = "0x200000" # 2MB
|
||||
TESTING_FLASH_SIZE = "0x2000000" # 32MB
|
||||
|
||||
|
||||
def patch_segment_size(content, segment_name, new_size, label):
|
||||
"""Patch a memory segment's length in linker script.
|
||||
|
||||
Args:
|
||||
content: Linker script content
|
||||
segment_name: Name of the segment (e.g., 'iram1_0_seg')
|
||||
new_size: New size as hex string (e.g., '0x200000')
|
||||
label: Human-readable label for logging (e.g., 'IRAM')
|
||||
|
||||
Returns:
|
||||
Tuple of (patched_content, was_patched)
|
||||
"""
|
||||
# Match: segment_name : org = 0x..., len = 0x...
|
||||
pattern = rf"({segment_name}\s*:\s*org\s*=\s*0x[0-9a-fA-F]+\s*,\s*len\s*=\s*)0x[0-9a-fA-F]+"
|
||||
new_content = re.sub(pattern, rf"\g<1>{new_size}", content)
|
||||
return new_content, new_content != content
|
||||
|
||||
|
||||
def apply_memory_patches(content):
|
||||
"""Apply IRAM, DRAM, and Flash patches to linker script content.
|
||||
|
||||
Args:
|
||||
content: Linker script content as string
|
||||
|
||||
Returns:
|
||||
Patched content as string
|
||||
"""
|
||||
patches_applied = []
|
||||
|
||||
# Patch IRAM (for larger code in IRAM)
|
||||
content, patched = patch_segment_size(content, "iram1_0_seg", TESTING_IRAM_SIZE, "IRAM")
|
||||
if patched:
|
||||
patches_applied.append("IRAM")
|
||||
|
||||
# Patch DRAM (for larger BSS/data sections)
|
||||
content, patched = patch_segment_size(content, "dram0_0_seg", TESTING_DRAM_SIZE, "DRAM")
|
||||
if patched:
|
||||
patches_applied.append("DRAM")
|
||||
|
||||
# Patch Flash (for larger code sections)
|
||||
content, patched = patch_segment_size(content, "irom0_0_seg", TESTING_FLASH_SIZE, "Flash")
|
||||
if patched:
|
||||
patches_applied.append("Flash")
|
||||
|
||||
if patches_applied:
|
||||
iram_mb = int(TESTING_IRAM_SIZE, 16) // (1024 * 1024)
|
||||
dram_mb = int(TESTING_DRAM_SIZE, 16) // (1024 * 1024)
|
||||
flash_mb = int(TESTING_FLASH_SIZE, 16) // (1024 * 1024)
|
||||
print(f" Patched memory segments: {', '.join(patches_applied)} (IRAM/DRAM: {iram_mb}MB, Flash: {flash_mb}MB)")
|
||||
|
||||
return content
|
||||
|
||||
|
||||
def patch_linker_script_file(filepath, description):
|
||||
"""Patch a linker script file in the build directory with enlarged memory segments.
|
||||
|
||||
This function modifies linker scripts in the build directory only (never SDK files).
|
||||
It patches IRAM, DRAM, and Flash segments to allow larger builds in testing mode.
|
||||
|
||||
Args:
|
||||
filepath: Path to the linker script file in the build directory
|
||||
description: Human-readable description for logging
|
||||
|
||||
Returns:
|
||||
True if the file was patched, False if already patched or not found
|
||||
"""
|
||||
if not os.path.exists(filepath):
|
||||
print(f"ESPHome: {description} not found at {filepath}")
|
||||
return False
|
||||
|
||||
print(f"ESPHome: Patching {description}...")
|
||||
with open(filepath, "r") as f:
|
||||
content = f.read()
|
||||
|
||||
patched_content = apply_memory_patches(content)
|
||||
|
||||
if patched_content != content:
|
||||
with open(filepath, "w") as f:
|
||||
f.write(patched_content)
|
||||
print(f"ESPHome: Successfully patched {description}")
|
||||
return True
|
||||
else:
|
||||
print(f"ESPHome: {description} already patched or no changes needed")
|
||||
return False
|
||||
|
||||
|
||||
def patch_local_linker_script(source, target, env):
|
||||
"""Patch the local.eagle.app.v6.common.ld in build directory.
|
||||
|
||||
This patches the preprocessed linker script that PlatformIO creates in the build
|
||||
directory, enlarging IRAM, DRAM, and Flash segments for testing mode.
|
||||
|
||||
Args:
|
||||
source: SCons source nodes
|
||||
target: SCons target nodes
|
||||
env: SCons environment
|
||||
"""
|
||||
# Check if we're in testing mode
|
||||
build_flags = env.get("BUILD_FLAGS", [])
|
||||
testing_mode = any("-DESPHOME_TESTING_MODE" in flag for flag in build_flags)
|
||||
|
||||
if not testing_mode:
|
||||
return
|
||||
|
||||
# Patch the local linker script if it exists
|
||||
build_dir = env.subst("$BUILD_DIR")
|
||||
ld_dir = os.path.join(build_dir, "ld")
|
||||
if os.path.exists(ld_dir):
|
||||
local_ld = os.path.join(ld_dir, "local.eagle.app.v6.common.ld")
|
||||
if os.path.exists(local_ld):
|
||||
patch_linker_script_file(local_ld, "local.eagle.app.v6.common.ld")
|
||||
|
||||
|
||||
# Check if we're in testing mode
|
||||
build_flags = env.get("BUILD_FLAGS", [])
|
||||
testing_mode = any("-DESPHOME_TESTING_MODE" in flag for flag in build_flags)
|
||||
|
||||
if testing_mode:
|
||||
# Create a custom linker script in the build directory with patched memory limits
|
||||
# This allows larger IRAM/DRAM/Flash for CI component grouping tests
|
||||
build_dir = env.subst("$BUILD_DIR")
|
||||
ldscript = env.GetProjectOption("board_build.ldscript", "")
|
||||
assert ldscript, "No linker script configured in board_build.ldscript"
|
||||
|
||||
framework_dir = env.PioPlatform().get_package_dir("framework-arduinoespressif8266")
|
||||
assert framework_dir is not None, "Could not find framework-arduinoespressif8266 package"
|
||||
|
||||
# Read the original SDK linker script (read-only, SDK is never modified)
|
||||
sdk_ld = os.path.join(framework_dir, "tools", "sdk", "ld", ldscript)
|
||||
# Create a custom version in the build directory (isolated, temporary)
|
||||
custom_ld = os.path.join(build_dir, f"testing_{ldscript}")
|
||||
|
||||
if os.path.exists(sdk_ld) and not os.path.exists(custom_ld):
|
||||
# Read the SDK linker script
|
||||
with open(sdk_ld, "r") as f:
|
||||
content = f.read()
|
||||
|
||||
# Apply memory patches (IRAM: 2MB, DRAM: 2MB, Flash: 32MB)
|
||||
patched_content = apply_memory_patches(content)
|
||||
|
||||
# Write the patched linker script to the build directory
|
||||
with open(custom_ld, "w") as f:
|
||||
f.write(patched_content)
|
||||
|
||||
print(f"ESPHome: Created custom linker script: {custom_ld}")
|
||||
|
||||
# Tell the linker to use our custom script from the build directory
|
||||
assert os.path.exists(custom_ld), f"Custom linker script not found: {custom_ld}"
|
||||
env.Replace(LDSCRIPT_PATH=custom_ld)
|
||||
print(f"ESPHome: Using custom linker script with patched memory limits")
|
||||
|
||||
# Also patch local.eagle.app.v6.common.ld after PlatformIO creates it
|
||||
env.AddPreAction("$BUILD_DIR/${PROGNAME}.elf", patch_local_linker_script)
|
||||
@@ -103,16 +103,7 @@ def ota_esphome_final_validate(config):
|
||||
)
|
||||
|
||||
|
||||
def _consume_ota_sockets(config: ConfigType) -> ConfigType:
|
||||
"""Register socket needs for OTA component."""
|
||||
from esphome.components import socket
|
||||
|
||||
# OTA needs 1 listening socket (client connections are temporary during updates)
|
||||
socket.consume_sockets(1, "ota")(config)
|
||||
return config
|
||||
|
||||
|
||||
CONFIG_SCHEMA = cv.All(
|
||||
CONFIG_SCHEMA = (
|
||||
cv.Schema(
|
||||
{
|
||||
cv.GenerateID(): cv.declare_id(ESPHomeOTAComponent),
|
||||
@@ -139,8 +130,7 @@ CONFIG_SCHEMA = cv.All(
|
||||
}
|
||||
)
|
||||
.extend(BASE_OTA_SCHEMA)
|
||||
.extend(cv.COMPONENT_SCHEMA),
|
||||
_consume_ota_sockets,
|
||||
.extend(cv.COMPONENT_SCHEMA)
|
||||
)
|
||||
|
||||
FINAL_VALIDATE_SCHEMA = ota_esphome_final_validate
|
||||
|
||||
@@ -14,13 +14,13 @@ template<typename... Ts> class SendAction : public Action<Ts...>, public Parente
|
||||
TEMPLATABLE_VALUE(std::vector<uint8_t>, data);
|
||||
|
||||
public:
|
||||
void add_on_sent(const std::initializer_list<Action<Ts...> *> &actions) {
|
||||
void add_on_sent(const std::vector<Action<Ts...> *> &actions) {
|
||||
this->sent_.add_actions(actions);
|
||||
if (this->flags_.wait_for_sent) {
|
||||
this->sent_.add_action(new LambdaAction<Ts...>([this](Ts... x) { this->play_next_(x...); }));
|
||||
}
|
||||
}
|
||||
void add_on_error(const std::initializer_list<Action<Ts...> *> &actions) {
|
||||
void add_on_error(const std::vector<Action<Ts...> *> &actions) {
|
||||
this->error_.add_actions(actions);
|
||||
if (this->flags_.wait_for_sent) {
|
||||
this->error_.add_action(new LambdaAction<Ts...>([this](Ts... x) {
|
||||
|
||||
@@ -1,39 +0,0 @@
|
||||
"""ESP-NOW transport platform for packet_transport component."""
|
||||
|
||||
import esphome.codegen as cg
|
||||
from esphome.components.packet_transport import (
|
||||
PacketTransport,
|
||||
new_packet_transport,
|
||||
transport_schema,
|
||||
)
|
||||
import esphome.config_validation as cv
|
||||
from esphome.core import HexInt
|
||||
from esphome.cpp_types import PollingComponent
|
||||
|
||||
from .. import ESPNowComponent, espnow_ns
|
||||
|
||||
CODEOWNERS = ["@EasilyBoredEngineer"]
|
||||
DEPENDENCIES = ["espnow"]
|
||||
|
||||
ESPNowTransport = espnow_ns.class_("ESPNowTransport", PacketTransport, PollingComponent)
|
||||
|
||||
CONF_ESPNOW_ID = "espnow_id"
|
||||
CONF_PEER_ADDRESS = "peer_address"
|
||||
|
||||
CONFIG_SCHEMA = transport_schema(ESPNowTransport).extend(
|
||||
{
|
||||
cv.GenerateID(CONF_ESPNOW_ID): cv.use_id(ESPNowComponent),
|
||||
cv.Optional(CONF_PEER_ADDRESS, default="FF:FF:FF:FF:FF:FF"): cv.mac_address,
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
async def to_code(config):
|
||||
"""Set up the ESP-NOW transport component."""
|
||||
var, _ = await new_packet_transport(config)
|
||||
|
||||
await cg.register_parented(var, config[CONF_ESPNOW_ID])
|
||||
|
||||
# Set peer address - convert MAC to parts array like ESP-NOW does
|
||||
mac = config[CONF_PEER_ADDRESS]
|
||||
cg.add(var.set_peer_address([HexInt(x) for x in mac.parts]))
|
||||
@@ -1,97 +0,0 @@
|
||||
#include "espnow_transport.h"
|
||||
|
||||
#ifdef USE_ESP32
|
||||
|
||||
#include "esphome/core/application.h"
|
||||
#include "esphome/core/log.h"
|
||||
|
||||
namespace esphome {
|
||||
namespace espnow {
|
||||
|
||||
static const char *const TAG = "espnow.transport";
|
||||
|
||||
bool ESPNowTransport::should_send() { return this->parent_ != nullptr && !this->parent_->is_failed(); }
|
||||
|
||||
void ESPNowTransport::setup() {
|
||||
packet_transport::PacketTransport::setup();
|
||||
|
||||
if (this->parent_ == nullptr) {
|
||||
ESP_LOGE(TAG, "ESPNow component not set");
|
||||
this->mark_failed();
|
||||
return;
|
||||
}
|
||||
|
||||
ESP_LOGI(TAG, "Registering ESP-NOW handlers");
|
||||
ESP_LOGI(TAG, "Peer address: %02X:%02X:%02X:%02X:%02X:%02X", this->peer_address_[0], this->peer_address_[1],
|
||||
this->peer_address_[2], this->peer_address_[3], this->peer_address_[4], this->peer_address_[5]);
|
||||
|
||||
// Register received handler
|
||||
this->parent_->register_received_handler(static_cast<ESPNowReceivedPacketHandler *>(this));
|
||||
|
||||
// Register broadcasted handler
|
||||
this->parent_->register_broadcasted_handler(static_cast<ESPNowBroadcastedHandler *>(this));
|
||||
}
|
||||
|
||||
void ESPNowTransport::update() {
|
||||
packet_transport::PacketTransport::update();
|
||||
this->updated_ = true;
|
||||
}
|
||||
|
||||
void ESPNowTransport::send_packet(const std::vector<uint8_t> &buf) const {
|
||||
if (this->parent_ == nullptr) {
|
||||
ESP_LOGE(TAG, "ESPNow component not set");
|
||||
return;
|
||||
}
|
||||
|
||||
if (buf.empty()) {
|
||||
ESP_LOGW(TAG, "Attempted to send empty packet");
|
||||
return;
|
||||
}
|
||||
|
||||
if (buf.size() > ESP_NOW_MAX_DATA_LEN) {
|
||||
ESP_LOGE(TAG, "Packet too large: %zu bytes (max %d)", buf.size(), ESP_NOW_MAX_DATA_LEN);
|
||||
return;
|
||||
}
|
||||
|
||||
// Send to configured peer address
|
||||
this->parent_->send(this->peer_address_.data(), buf.data(), buf.size(), [](esp_err_t err) {
|
||||
if (err != ESP_OK) {
|
||||
ESP_LOGW(TAG, "Send failed: %d", err);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
bool ESPNowTransport::on_received(const ESPNowRecvInfo &info, const uint8_t *data, uint8_t size) {
|
||||
ESP_LOGV(TAG, "Received packet of size %u from %02X:%02X:%02X:%02X:%02X:%02X", size, info.src_addr[0],
|
||||
info.src_addr[1], info.src_addr[2], info.src_addr[3], info.src_addr[4], info.src_addr[5]);
|
||||
|
||||
if (data == nullptr || size == 0) {
|
||||
ESP_LOGW(TAG, "Received empty or null packet");
|
||||
return false;
|
||||
}
|
||||
|
||||
this->packet_buffer_.resize(size);
|
||||
memcpy(this->packet_buffer_.data(), data, size);
|
||||
this->process_(this->packet_buffer_);
|
||||
return false; // Allow other handlers to run
|
||||
}
|
||||
|
||||
bool ESPNowTransport::on_broadcasted(const ESPNowRecvInfo &info, const uint8_t *data, uint8_t size) {
|
||||
ESP_LOGV(TAG, "Received broadcast packet of size %u from %02X:%02X:%02X:%02X:%02X:%02X", size, info.src_addr[0],
|
||||
info.src_addr[1], info.src_addr[2], info.src_addr[3], info.src_addr[4], info.src_addr[5]);
|
||||
|
||||
if (data == nullptr || size == 0) {
|
||||
ESP_LOGW(TAG, "Received empty or null broadcast packet");
|
||||
return false;
|
||||
}
|
||||
|
||||
this->packet_buffer_.resize(size);
|
||||
memcpy(this->packet_buffer_.data(), data, size);
|
||||
this->process_(this->packet_buffer_);
|
||||
return false; // Allow other handlers to run
|
||||
}
|
||||
|
||||
} // namespace espnow
|
||||
} // namespace esphome
|
||||
|
||||
#endif // USE_ESP32
|
||||
@@ -1,44 +0,0 @@
|
||||
#pragma once
|
||||
|
||||
#include "../espnow_component.h"
|
||||
|
||||
#ifdef USE_ESP32
|
||||
|
||||
#include "esphome/core/component.h"
|
||||
#include "esphome/components/packet_transport/packet_transport.h"
|
||||
|
||||
#include <vector>
|
||||
|
||||
namespace esphome {
|
||||
namespace espnow {
|
||||
|
||||
class ESPNowTransport : public packet_transport::PacketTransport,
|
||||
public Parented<ESPNowComponent>,
|
||||
public ESPNowReceivedPacketHandler,
|
||||
public ESPNowBroadcastedHandler {
|
||||
public:
|
||||
void setup() override;
|
||||
void update() override;
|
||||
float get_setup_priority() const override { return setup_priority::AFTER_WIFI; }
|
||||
|
||||
void set_peer_address(peer_address_t address) {
|
||||
memcpy(this->peer_address_.data(), address.data(), ESP_NOW_ETH_ALEN);
|
||||
}
|
||||
|
||||
// ESPNow handler interface
|
||||
bool on_received(const ESPNowRecvInfo &info, const uint8_t *data, uint8_t size) override;
|
||||
bool on_broadcasted(const ESPNowRecvInfo &info, const uint8_t *data, uint8_t size) override;
|
||||
|
||||
protected:
|
||||
void send_packet(const std::vector<uint8_t> &buf) const override;
|
||||
size_t get_max_packet_size() override { return ESP_NOW_MAX_DATA_LEN; }
|
||||
bool should_send() override;
|
||||
|
||||
peer_address_t peer_address_{{0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF}};
|
||||
std::vector<uint8_t> packet_buffer_;
|
||||
};
|
||||
|
||||
} // namespace espnow
|
||||
} // namespace esphome
|
||||
|
||||
#endif // USE_ESP32
|
||||
@@ -38,6 +38,7 @@ IS_PLATFORM_COMPONENT = True
|
||||
|
||||
fan_ns = cg.esphome_ns.namespace("fan")
|
||||
Fan = fan_ns.class_("Fan", cg.EntityBase)
|
||||
FanState = fan_ns.class_("Fan", Fan, cg.Component)
|
||||
|
||||
FanDirection = fan_ns.enum("FanDirection", is_class=True)
|
||||
FAN_DIRECTION_ENUM = {
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
#pragma once
|
||||
|
||||
#include "esphome/core/automation.h"
|
||||
#include "esphome/core/component.h"
|
||||
#include "fan.h"
|
||||
#include "esphome/core/automation.h"
|
||||
#include "fan_state.h"
|
||||
|
||||
namespace esphome {
|
||||
namespace fan {
|
||||
|
||||
16
esphome/components/fan/fan_state.cpp
Normal file
16
esphome/components/fan/fan_state.cpp
Normal file
@@ -0,0 +1,16 @@
|
||||
#include "fan_state.h"
|
||||
|
||||
namespace esphome {
|
||||
namespace fan {
|
||||
|
||||
static const char *const TAG = "fan";
|
||||
|
||||
void FanState::setup() {
|
||||
auto restore = this->restore_state_();
|
||||
if (restore)
|
||||
restore->to_call(*this).perform();
|
||||
}
|
||||
float FanState::get_setup_priority() const { return setup_priority::DATA - 1.0f; }
|
||||
|
||||
} // namespace fan
|
||||
} // namespace esphome
|
||||
34
esphome/components/fan/fan_state.h
Normal file
34
esphome/components/fan/fan_state.h
Normal file
@@ -0,0 +1,34 @@
|
||||
#pragma once
|
||||
|
||||
#include "esphome/core/component.h"
|
||||
#include "fan.h"
|
||||
|
||||
namespace esphome {
|
||||
namespace fan {
|
||||
|
||||
enum ESPDEPRECATED("LegacyFanDirection members are deprecated, use FanDirection instead.",
|
||||
"2022.2") LegacyFanDirection {
|
||||
FAN_DIRECTION_FORWARD = 0,
|
||||
FAN_DIRECTION_REVERSE = 1
|
||||
};
|
||||
|
||||
class ESPDEPRECATED("FanState is deprecated, use Fan instead.", "2022.2") FanState : public Fan, public Component {
|
||||
public:
|
||||
FanState() = default;
|
||||
|
||||
/// Get the traits of this fan.
|
||||
FanTraits get_traits() override { return this->traits_; }
|
||||
/// Set the traits of this fan (i.e. what features it supports).
|
||||
void set_traits(const FanTraits &traits) { this->traits_ = traits; }
|
||||
|
||||
void setup() override;
|
||||
float get_setup_priority() const override;
|
||||
|
||||
protected:
|
||||
void control(const FanCall &call) override { this->publish_state(); }
|
||||
|
||||
FanTraits traits_{};
|
||||
};
|
||||
|
||||
} // namespace fan
|
||||
} // namespace esphome
|
||||
@@ -65,7 +65,7 @@ HaierClimateBase::HaierClimateBase()
|
||||
{climate::CLIMATE_FAN_AUTO, climate::CLIMATE_FAN_LOW, climate::CLIMATE_FAN_MEDIUM, climate::CLIMATE_FAN_HIGH});
|
||||
this->traits_.set_supported_swing_modes({climate::CLIMATE_SWING_OFF, climate::CLIMATE_SWING_BOTH,
|
||||
climate::CLIMATE_SWING_VERTICAL, climate::CLIMATE_SWING_HORIZONTAL});
|
||||
this->traits_.add_feature_flags(climate::CLIMATE_SUPPORTS_CURRENT_TEMPERATURE);
|
||||
this->traits_.set_supports_current_temperature(true);
|
||||
}
|
||||
|
||||
HaierClimateBase::~HaierClimateBase() {}
|
||||
|
||||
@@ -16,8 +16,7 @@ void HDC1080Component::setup() {
|
||||
|
||||
// if configuration fails - there is a problem
|
||||
if (this->write_register(HDC1080_CMD_CONFIGURATION, config, 2) != i2c::ERROR_OK) {
|
||||
ESP_LOGW(TAG, "Failed to configure HDC1080");
|
||||
this->status_set_warning();
|
||||
this->mark_failed();
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -107,7 +107,7 @@ void IDFI2CBus::dump_config() {
|
||||
if (s.second) {
|
||||
ESP_LOGCONFIG(TAG, "Found device at address 0x%02X", s.first);
|
||||
} else {
|
||||
ESP_LOGCONFIG(TAG, "Unknown error at address 0x%02X", s.first);
|
||||
ESP_LOGE(TAG, "Unknown error at address 0x%02X", s.first);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -6,42 +6,31 @@
|
||||
namespace esphome {
|
||||
namespace improv_base {
|
||||
|
||||
static constexpr const char DEVICE_NAME_PLACEHOLDER[] = "{{device_name}}";
|
||||
static constexpr size_t DEVICE_NAME_PLACEHOLDER_LEN = sizeof(DEVICE_NAME_PLACEHOLDER) - 1;
|
||||
static constexpr const char IP_ADDRESS_PLACEHOLDER[] = "{{ip_address}}";
|
||||
static constexpr size_t IP_ADDRESS_PLACEHOLDER_LEN = sizeof(IP_ADDRESS_PLACEHOLDER) - 1;
|
||||
|
||||
static void replace_all_in_place(std::string &str, const char *placeholder, size_t placeholder_len,
|
||||
const std::string &replacement) {
|
||||
size_t pos = 0;
|
||||
const size_t replacement_len = replacement.length();
|
||||
while ((pos = str.find(placeholder, pos)) != std::string::npos) {
|
||||
str.replace(pos, placeholder_len, replacement);
|
||||
pos += replacement_len;
|
||||
}
|
||||
}
|
||||
|
||||
std::string ImprovBase::get_formatted_next_url_() {
|
||||
if (this->next_url_.empty()) {
|
||||
return "";
|
||||
}
|
||||
std::string copy = this->next_url_;
|
||||
// Device name
|
||||
std::size_t pos = this->next_url_.find("{{device_name}}");
|
||||
if (pos != std::string::npos) {
|
||||
const std::string &device_name = App.get_name();
|
||||
copy.replace(pos, 15, device_name);
|
||||
}
|
||||
|
||||
std::string formatted_url = this->next_url_;
|
||||
|
||||
// Replace all occurrences of {{device_name}}
|
||||
replace_all_in_place(formatted_url, DEVICE_NAME_PLACEHOLDER, DEVICE_NAME_PLACEHOLDER_LEN, App.get_name());
|
||||
|
||||
// Replace all occurrences of {{ip_address}}
|
||||
for (auto &ip : network::get_ip_addresses()) {
|
||||
if (ip.is_ip4()) {
|
||||
replace_all_in_place(formatted_url, IP_ADDRESS_PLACEHOLDER, IP_ADDRESS_PLACEHOLDER_LEN, ip.str());
|
||||
break;
|
||||
// Ip address
|
||||
pos = this->next_url_.find("{{ip_address}}");
|
||||
if (pos != std::string::npos) {
|
||||
for (auto &ip : network::get_ip_addresses()) {
|
||||
if (ip.is_ip4()) {
|
||||
std::string ipa = ip.str();
|
||||
copy.replace(pos, 14, ipa);
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Note: {{esphome_version}} is replaced at code generation time in Python
|
||||
|
||||
return formatted_url;
|
||||
return copy;
|
||||
}
|
||||
|
||||
} // namespace improv_base
|
||||
|
||||
@@ -56,7 +56,7 @@ void MCP23016::pin_mode(uint8_t pin, gpio::Flags flags) {
|
||||
this->update_reg_(pin, false, iodir);
|
||||
}
|
||||
}
|
||||
float MCP23016::get_setup_priority() const { return setup_priority::IO; }
|
||||
float MCP23016::get_setup_priority() const { return setup_priority::HARDWARE; }
|
||||
bool MCP23016::read_reg_(uint8_t reg, uint8_t *value) {
|
||||
if (this->is_failed())
|
||||
return false;
|
||||
|
||||
@@ -13,7 +13,6 @@ from esphome.const import (
|
||||
)
|
||||
from esphome.core import CORE, Lambda, coroutine_with_priority
|
||||
from esphome.coroutine import CoroPriority
|
||||
from esphome.types import ConfigType
|
||||
|
||||
CODEOWNERS = ["@esphome/core"]
|
||||
DEPENDENCIES = ["network"]
|
||||
@@ -47,19 +46,6 @@ SERVICE_SCHEMA = cv.Schema(
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
def _consume_mdns_sockets(config: ConfigType) -> ConfigType:
|
||||
"""Register socket needs for mDNS component."""
|
||||
if config.get(CONF_DISABLED):
|
||||
return config
|
||||
|
||||
from esphome.components import socket
|
||||
|
||||
# mDNS needs 2 sockets (IPv4 + IPv6 multicast)
|
||||
socket.consume_sockets(2, "mdns")(config)
|
||||
return config
|
||||
|
||||
|
||||
CONFIG_SCHEMA = cv.All(
|
||||
cv.Schema(
|
||||
{
|
||||
@@ -69,7 +55,6 @@ CONFIG_SCHEMA = cv.All(
|
||||
}
|
||||
),
|
||||
_remove_id_if_disabled,
|
||||
_consume_mdns_sockets,
|
||||
)
|
||||
|
||||
|
||||
|
||||
@@ -77,7 +77,7 @@ void AirConditioner::control(const ClimateCall &call) {
|
||||
|
||||
ClimateTraits AirConditioner::traits() {
|
||||
auto traits = ClimateTraits();
|
||||
traits.add_feature_flags(climate::CLIMATE_SUPPORTS_CURRENT_TEMPERATURE);
|
||||
traits.set_supports_current_temperature(true);
|
||||
traits.set_visual_min_temperature(17);
|
||||
traits.set_visual_max_temperature(30);
|
||||
traits.set_visual_temperature_step(0.5);
|
||||
|
||||
@@ -30,19 +30,6 @@ wave_4_3 = DriverChip(
|
||||
"blue": [14, 38, 18, 17, 10],
|
||||
},
|
||||
)
|
||||
|
||||
wave_4_3.extend(
|
||||
"WAVESHARE-5-1024X600",
|
||||
width=1024,
|
||||
height=600,
|
||||
hsync_back_porch=145,
|
||||
hsync_front_porch=170,
|
||||
hsync_pulse_width=30,
|
||||
vsync_back_porch=23,
|
||||
vsync_front_porch=12,
|
||||
vsync_pulse_width=2,
|
||||
)
|
||||
|
||||
wave_4_3.extend(
|
||||
"ESP32-S3-TOUCH-LCD-7-800X480",
|
||||
enable_pin=[{"ch422g": None, "number": 2}, {"ch422g": None, "number": 6}],
|
||||
|
||||
@@ -52,9 +52,8 @@ const uint8_t MITSUBISHI_BYTE16 = 0x00;
|
||||
|
||||
climate::ClimateTraits MitsubishiClimate::traits() {
|
||||
auto traits = climate::ClimateTraits();
|
||||
if (this->sensor_ != nullptr) {
|
||||
traits.add_feature_flags(climate::CLIMATE_SUPPORTS_CURRENT_TEMPERATURE);
|
||||
}
|
||||
traits.set_supports_current_temperature(this->sensor_ != nullptr);
|
||||
traits.set_supports_action(false);
|
||||
traits.set_visual_min_temperature(MITSUBISHI_TEMP_MIN);
|
||||
traits.set_visual_max_temperature(MITSUBISHI_TEMP_MAX);
|
||||
traits.set_visual_temperature_step(1.0f);
|
||||
|
||||
@@ -58,7 +58,6 @@ from esphome.const import (
|
||||
PlatformFramework,
|
||||
)
|
||||
from esphome.core import CORE, CoroPriority, coroutine_with_priority
|
||||
from esphome.types import ConfigType
|
||||
|
||||
DEPENDENCIES = ["network"]
|
||||
|
||||
@@ -211,15 +210,6 @@ def validate_fingerprint(value):
|
||||
return value
|
||||
|
||||
|
||||
def _consume_mqtt_sockets(config: ConfigType) -> ConfigType:
|
||||
"""Register socket needs for MQTT component."""
|
||||
from esphome.components import socket
|
||||
|
||||
# MQTT needs 1 socket for the broker connection
|
||||
socket.consume_sockets(1, "mqtt")(config)
|
||||
return config
|
||||
|
||||
|
||||
CONFIG_SCHEMA = cv.All(
|
||||
cv.Schema(
|
||||
{
|
||||
@@ -316,7 +306,6 @@ CONFIG_SCHEMA = cv.All(
|
||||
),
|
||||
validate_config,
|
||||
cv.only_on([PLATFORM_ESP32, PLATFORM_ESP8266, PLATFORM_BK72XX]),
|
||||
_consume_mqtt_sockets,
|
||||
)
|
||||
|
||||
|
||||
|
||||
@@ -5,7 +5,7 @@
|
||||
#ifdef USE_MQTT
|
||||
#ifdef USE_FAN
|
||||
|
||||
#include "esphome/components/fan/fan.h"
|
||||
#include "esphome/components/fan/fan_state.h"
|
||||
#include "mqtt_component.h"
|
||||
|
||||
namespace esphome {
|
||||
|
||||
@@ -69,12 +69,6 @@ void MQTTJSONLightComponent::send_discovery(JsonObject root, mqtt::SendDiscovery
|
||||
if (traits.supports_color_capability(ColorCapability::BRIGHTNESS))
|
||||
root["brightness"] = true;
|
||||
|
||||
if (traits.supports_color_mode(ColorMode::COLOR_TEMPERATURE) ||
|
||||
traits.supports_color_mode(ColorMode::COLD_WARM_WHITE)) {
|
||||
root[MQTT_MIN_MIREDS] = traits.get_min_mireds();
|
||||
root[MQTT_MAX_MIREDS] = traits.get_max_mireds();
|
||||
}
|
||||
|
||||
if (this->state_->supports_effects()) {
|
||||
root["effect"] = true;
|
||||
JsonArray effect_list = root[MQTT_EFFECT_LIST].to<JsonArray>();
|
||||
|
||||
@@ -54,10 +54,11 @@ void PIDClimate::control(const climate::ClimateCall &call) {
|
||||
}
|
||||
climate::ClimateTraits PIDClimate::traits() {
|
||||
auto traits = climate::ClimateTraits();
|
||||
traits.add_feature_flags(climate::CLIMATE_SUPPORTS_CURRENT_TEMPERATURE | climate::CLIMATE_SUPPORTS_ACTION);
|
||||
traits.set_supports_current_temperature(true);
|
||||
traits.set_supports_two_point_target_temperature(false);
|
||||
|
||||
if (this->humidity_sensor_ != nullptr)
|
||||
traits.add_feature_flags(climate::CLIMATE_SUPPORTS_CURRENT_HUMIDITY);
|
||||
traits.set_supports_current_humidity(true);
|
||||
|
||||
traits.set_supported_modes({climate::CLIMATE_MODE_OFF});
|
||||
if (supports_cool_())
|
||||
@@ -67,6 +68,7 @@ climate::ClimateTraits PIDClimate::traits() {
|
||||
if (supports_heat_() && supports_cool_())
|
||||
traits.add_supported_mode(climate::CLIMATE_MODE_HEAT_COOL);
|
||||
|
||||
traits.set_supports_action(true);
|
||||
return traits;
|
||||
}
|
||||
void PIDClimate::dump_config() {
|
||||
|
||||
@@ -62,7 +62,7 @@ CONF_WARNING_MPPT_OVERLOAD = "warning_mppt_overload"
|
||||
CONF_WARNING_BATTERY_TOO_LOW_TO_CHARGE = "warning_battery_too_low_to_charge"
|
||||
CONF_FAULT_DC_DC_OVER_CURRENT = "fault_dc_dc_over_current"
|
||||
CONF_FAULT_CODE = "fault_code"
|
||||
CONF_WARNING_LOW_PV_ENERGY = "warning_low_pv_energy"
|
||||
CONF_WARNUNG_LOW_PV_ENERGY = "warnung_low_pv_energy"
|
||||
CONF_WARNING_HIGH_AC_INPUT_DURING_BUS_SOFT_START = (
|
||||
"warning_high_ac_input_during_bus_soft_start"
|
||||
)
|
||||
@@ -122,7 +122,7 @@ TYPES = [
|
||||
CONF_WARNING_BATTERY_TOO_LOW_TO_CHARGE,
|
||||
CONF_FAULT_DC_DC_OVER_CURRENT,
|
||||
CONF_FAULT_CODE,
|
||||
CONF_WARNING_LOW_PV_ENERGY,
|
||||
CONF_WARNUNG_LOW_PV_ENERGY,
|
||||
CONF_WARNING_HIGH_AC_INPUT_DURING_BUS_SOFT_START,
|
||||
CONF_WARNING_BATTERY_EQUALIZATION,
|
||||
]
|
||||
|
||||
@@ -13,7 +13,7 @@ void PipsolarOutput::write_state(float state) {
|
||||
|
||||
if (std::find(this->possible_values_.begin(), this->possible_values_.end(), state) != this->possible_values_.end()) {
|
||||
ESP_LOGD(TAG, "Will write: %s out of value %f / %02.0f", tmp, state, state);
|
||||
this->parent_->queue_command(std::string(tmp));
|
||||
this->parent_->switch_command(std::string(tmp));
|
||||
} else {
|
||||
ESP_LOGD(TAG, "Will not write: %s as it is not in list of allowed values", tmp);
|
||||
}
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -7,7 +7,6 @@
|
||||
#include "esphome/components/uart/uart.h"
|
||||
#include "esphome/core/automation.h"
|
||||
#include "esphome/core/component.h"
|
||||
#include "esphome/core/helpers.h"
|
||||
|
||||
namespace esphome {
|
||||
namespace pipsolar {
|
||||
@@ -29,17 +28,10 @@ struct PollingCommand {
|
||||
bool needs_update;
|
||||
};
|
||||
|
||||
struct QFLAGValues {
|
||||
esphome::optional<bool> silence_buzzer_open_buzzer;
|
||||
esphome::optional<bool> overload_bypass_function;
|
||||
esphome::optional<bool> lcd_escape_to_default;
|
||||
esphome::optional<bool> overload_restart_function;
|
||||
esphome::optional<bool> over_temperature_restart_function;
|
||||
esphome::optional<bool> backlight_on;
|
||||
esphome::optional<bool> alarm_on_when_primary_source_interrupt;
|
||||
esphome::optional<bool> fault_code_record;
|
||||
esphome::optional<bool> power_saving;
|
||||
};
|
||||
#define PIPSOLAR_VALUED_ENTITY_(type, name, polling_command, value_type) \
|
||||
protected: \
|
||||
value_type value_##name##_; \
|
||||
PIPSOLAR_ENTITY_(type, name, polling_command)
|
||||
|
||||
#define PIPSOLAR_ENTITY_(type, name, polling_command) \
|
||||
protected: \
|
||||
@@ -51,123 +43,126 @@ struct QFLAGValues {
|
||||
this->add_polling_command_(#polling_command, POLLING_##polling_command); \
|
||||
}
|
||||
|
||||
#define PIPSOLAR_SENSOR(name, polling_command) PIPSOLAR_ENTITY_(sensor::Sensor, name, polling_command)
|
||||
#define PIPSOLAR_SENSOR(name, polling_command, value_type) \
|
||||
PIPSOLAR_VALUED_ENTITY_(sensor::Sensor, name, polling_command, value_type)
|
||||
#define PIPSOLAR_SWITCH(name, polling_command) PIPSOLAR_ENTITY_(switch_::Switch, name, polling_command)
|
||||
#define PIPSOLAR_BINARY_SENSOR(name, polling_command) \
|
||||
PIPSOLAR_ENTITY_(binary_sensor::BinarySensor, name, polling_command)
|
||||
#define PIPSOLAR_BINARY_SENSOR(name, polling_command, value_type) \
|
||||
PIPSOLAR_VALUED_ENTITY_(binary_sensor::BinarySensor, name, polling_command, value_type)
|
||||
#define PIPSOLAR_VALUED_TEXT_SENSOR(name, polling_command, value_type) \
|
||||
PIPSOLAR_VALUED_ENTITY_(text_sensor::TextSensor, name, polling_command, value_type)
|
||||
#define PIPSOLAR_TEXT_SENSOR(name, polling_command) PIPSOLAR_ENTITY_(text_sensor::TextSensor, name, polling_command)
|
||||
|
||||
class Pipsolar : public uart::UARTDevice, public PollingComponent {
|
||||
// QPIGS values
|
||||
PIPSOLAR_SENSOR(grid_voltage, QPIGS)
|
||||
PIPSOLAR_SENSOR(grid_frequency, QPIGS)
|
||||
PIPSOLAR_SENSOR(ac_output_voltage, QPIGS)
|
||||
PIPSOLAR_SENSOR(ac_output_frequency, QPIGS)
|
||||
PIPSOLAR_SENSOR(ac_output_apparent_power, QPIGS)
|
||||
PIPSOLAR_SENSOR(ac_output_active_power, QPIGS)
|
||||
PIPSOLAR_SENSOR(output_load_percent, QPIGS)
|
||||
PIPSOLAR_SENSOR(bus_voltage, QPIGS)
|
||||
PIPSOLAR_SENSOR(battery_voltage, QPIGS)
|
||||
PIPSOLAR_SENSOR(battery_charging_current, QPIGS)
|
||||
PIPSOLAR_SENSOR(battery_capacity_percent, QPIGS)
|
||||
PIPSOLAR_SENSOR(inverter_heat_sink_temperature, QPIGS)
|
||||
PIPSOLAR_SENSOR(pv_input_current_for_battery, QPIGS)
|
||||
PIPSOLAR_SENSOR(pv_input_voltage, QPIGS)
|
||||
PIPSOLAR_SENSOR(battery_voltage_scc, QPIGS)
|
||||
PIPSOLAR_SENSOR(battery_discharge_current, QPIGS)
|
||||
PIPSOLAR_BINARY_SENSOR(add_sbu_priority_version, QPIGS)
|
||||
PIPSOLAR_BINARY_SENSOR(configuration_status, QPIGS)
|
||||
PIPSOLAR_BINARY_SENSOR(scc_firmware_version, QPIGS)
|
||||
PIPSOLAR_BINARY_SENSOR(load_status, QPIGS)
|
||||
PIPSOLAR_BINARY_SENSOR(battery_voltage_to_steady_while_charging, QPIGS)
|
||||
PIPSOLAR_BINARY_SENSOR(charging_status, QPIGS)
|
||||
PIPSOLAR_BINARY_SENSOR(scc_charging_status, QPIGS)
|
||||
PIPSOLAR_BINARY_SENSOR(ac_charging_status, QPIGS)
|
||||
PIPSOLAR_SENSOR(battery_voltage_offset_for_fans_on, QPIGS) //.1 scale
|
||||
PIPSOLAR_SENSOR(eeprom_version, QPIGS)
|
||||
PIPSOLAR_SENSOR(pv_charging_power, QPIGS)
|
||||
PIPSOLAR_BINARY_SENSOR(charging_to_floating_mode, QPIGS)
|
||||
PIPSOLAR_BINARY_SENSOR(switch_on, QPIGS)
|
||||
PIPSOLAR_BINARY_SENSOR(dustproof_installed, QPIGS)
|
||||
PIPSOLAR_SENSOR(grid_voltage, QPIGS, float)
|
||||
PIPSOLAR_SENSOR(grid_frequency, QPIGS, float)
|
||||
PIPSOLAR_SENSOR(ac_output_voltage, QPIGS, float)
|
||||
PIPSOLAR_SENSOR(ac_output_frequency, QPIGS, float)
|
||||
PIPSOLAR_SENSOR(ac_output_apparent_power, QPIGS, int)
|
||||
PIPSOLAR_SENSOR(ac_output_active_power, QPIGS, int)
|
||||
PIPSOLAR_SENSOR(output_load_percent, QPIGS, int)
|
||||
PIPSOLAR_SENSOR(bus_voltage, QPIGS, int)
|
||||
PIPSOLAR_SENSOR(battery_voltage, QPIGS, float)
|
||||
PIPSOLAR_SENSOR(battery_charging_current, QPIGS, int)
|
||||
PIPSOLAR_SENSOR(battery_capacity_percent, QPIGS, int)
|
||||
PIPSOLAR_SENSOR(inverter_heat_sink_temperature, QPIGS, int)
|
||||
PIPSOLAR_SENSOR(pv_input_current_for_battery, QPIGS, float)
|
||||
PIPSOLAR_SENSOR(pv_input_voltage, QPIGS, float)
|
||||
PIPSOLAR_SENSOR(battery_voltage_scc, QPIGS, float)
|
||||
PIPSOLAR_SENSOR(battery_discharge_current, QPIGS, int)
|
||||
PIPSOLAR_BINARY_SENSOR(add_sbu_priority_version, QPIGS, int)
|
||||
PIPSOLAR_BINARY_SENSOR(configuration_status, QPIGS, int)
|
||||
PIPSOLAR_BINARY_SENSOR(scc_firmware_version, QPIGS, int)
|
||||
PIPSOLAR_BINARY_SENSOR(load_status, QPIGS, int)
|
||||
PIPSOLAR_BINARY_SENSOR(battery_voltage_to_steady_while_charging, QPIGS, int)
|
||||
PIPSOLAR_BINARY_SENSOR(charging_status, QPIGS, int)
|
||||
PIPSOLAR_BINARY_SENSOR(scc_charging_status, QPIGS, int)
|
||||
PIPSOLAR_BINARY_SENSOR(ac_charging_status, QPIGS, int)
|
||||
PIPSOLAR_SENSOR(battery_voltage_offset_for_fans_on, QPIGS, int) //.1 scale
|
||||
PIPSOLAR_SENSOR(eeprom_version, QPIGS, int)
|
||||
PIPSOLAR_SENSOR(pv_charging_power, QPIGS, int)
|
||||
PIPSOLAR_BINARY_SENSOR(charging_to_floating_mode, QPIGS, int)
|
||||
PIPSOLAR_BINARY_SENSOR(switch_on, QPIGS, int)
|
||||
PIPSOLAR_BINARY_SENSOR(dustproof_installed, QPIGS, int)
|
||||
|
||||
// QPIRI values
|
||||
PIPSOLAR_SENSOR(grid_rating_voltage, QPIRI)
|
||||
PIPSOLAR_SENSOR(grid_rating_current, QPIRI)
|
||||
PIPSOLAR_SENSOR(ac_output_rating_voltage, QPIRI)
|
||||
PIPSOLAR_SENSOR(ac_output_rating_frequency, QPIRI)
|
||||
PIPSOLAR_SENSOR(ac_output_rating_current, QPIRI)
|
||||
PIPSOLAR_SENSOR(ac_output_rating_apparent_power, QPIRI)
|
||||
PIPSOLAR_SENSOR(ac_output_rating_active_power, QPIRI)
|
||||
PIPSOLAR_SENSOR(battery_rating_voltage, QPIRI)
|
||||
PIPSOLAR_SENSOR(battery_recharge_voltage, QPIRI)
|
||||
PIPSOLAR_SENSOR(battery_under_voltage, QPIRI)
|
||||
PIPSOLAR_SENSOR(battery_bulk_voltage, QPIRI)
|
||||
PIPSOLAR_SENSOR(battery_float_voltage, QPIRI)
|
||||
PIPSOLAR_SENSOR(battery_type, QPIRI)
|
||||
PIPSOLAR_SENSOR(current_max_ac_charging_current, QPIRI)
|
||||
PIPSOLAR_SENSOR(current_max_charging_current, QPIRI)
|
||||
PIPSOLAR_SENSOR(input_voltage_range, QPIRI)
|
||||
PIPSOLAR_SENSOR(output_source_priority, QPIRI)
|
||||
PIPSOLAR_SENSOR(charger_source_priority, QPIRI)
|
||||
PIPSOLAR_SENSOR(parallel_max_num, QPIRI)
|
||||
PIPSOLAR_SENSOR(machine_type, QPIRI)
|
||||
PIPSOLAR_SENSOR(topology, QPIRI)
|
||||
PIPSOLAR_SENSOR(output_mode, QPIRI)
|
||||
PIPSOLAR_SENSOR(battery_redischarge_voltage, QPIRI)
|
||||
PIPSOLAR_SENSOR(pv_ok_condition_for_parallel, QPIRI)
|
||||
PIPSOLAR_SENSOR(pv_power_balance, QPIRI)
|
||||
PIPSOLAR_SENSOR(grid_rating_voltage, QPIRI, float)
|
||||
PIPSOLAR_SENSOR(grid_rating_current, QPIRI, float)
|
||||
PIPSOLAR_SENSOR(ac_output_rating_voltage, QPIRI, float)
|
||||
PIPSOLAR_SENSOR(ac_output_rating_frequency, QPIRI, float)
|
||||
PIPSOLAR_SENSOR(ac_output_rating_current, QPIRI, float)
|
||||
PIPSOLAR_SENSOR(ac_output_rating_apparent_power, QPIRI, int)
|
||||
PIPSOLAR_SENSOR(ac_output_rating_active_power, QPIRI, int)
|
||||
PIPSOLAR_SENSOR(battery_rating_voltage, QPIRI, float)
|
||||
PIPSOLAR_SENSOR(battery_recharge_voltage, QPIRI, float)
|
||||
PIPSOLAR_SENSOR(battery_under_voltage, QPIRI, float)
|
||||
PIPSOLAR_SENSOR(battery_bulk_voltage, QPIRI, float)
|
||||
PIPSOLAR_SENSOR(battery_float_voltage, QPIRI, float)
|
||||
PIPSOLAR_SENSOR(battery_type, QPIRI, int)
|
||||
PIPSOLAR_SENSOR(current_max_ac_charging_current, QPIRI, int)
|
||||
PIPSOLAR_SENSOR(current_max_charging_current, QPIRI, int)
|
||||
PIPSOLAR_SENSOR(input_voltage_range, QPIRI, int)
|
||||
PIPSOLAR_SENSOR(output_source_priority, QPIRI, int)
|
||||
PIPSOLAR_SENSOR(charger_source_priority, QPIRI, int)
|
||||
PIPSOLAR_SENSOR(parallel_max_num, QPIRI, int)
|
||||
PIPSOLAR_SENSOR(machine_type, QPIRI, int)
|
||||
PIPSOLAR_SENSOR(topology, QPIRI, int)
|
||||
PIPSOLAR_SENSOR(output_mode, QPIRI, int)
|
||||
PIPSOLAR_SENSOR(battery_redischarge_voltage, QPIRI, float)
|
||||
PIPSOLAR_SENSOR(pv_ok_condition_for_parallel, QPIRI, int)
|
||||
PIPSOLAR_SENSOR(pv_power_balance, QPIRI, int)
|
||||
|
||||
// QMOD values
|
||||
PIPSOLAR_TEXT_SENSOR(device_mode, QMOD)
|
||||
PIPSOLAR_VALUED_TEXT_SENSOR(device_mode, QMOD, char)
|
||||
|
||||
// QFLAG values
|
||||
PIPSOLAR_BINARY_SENSOR(silence_buzzer_open_buzzer, QFLAG)
|
||||
PIPSOLAR_BINARY_SENSOR(overload_bypass_function, QFLAG)
|
||||
PIPSOLAR_BINARY_SENSOR(lcd_escape_to_default, QFLAG)
|
||||
PIPSOLAR_BINARY_SENSOR(overload_restart_function, QFLAG)
|
||||
PIPSOLAR_BINARY_SENSOR(over_temperature_restart_function, QFLAG)
|
||||
PIPSOLAR_BINARY_SENSOR(backlight_on, QFLAG)
|
||||
PIPSOLAR_BINARY_SENSOR(alarm_on_when_primary_source_interrupt, QFLAG)
|
||||
PIPSOLAR_BINARY_SENSOR(fault_code_record, QFLAG)
|
||||
PIPSOLAR_BINARY_SENSOR(power_saving, QFLAG)
|
||||
PIPSOLAR_BINARY_SENSOR(silence_buzzer_open_buzzer, QFLAG, int)
|
||||
PIPSOLAR_BINARY_SENSOR(overload_bypass_function, QFLAG, int)
|
||||
PIPSOLAR_BINARY_SENSOR(lcd_escape_to_default, QFLAG, int)
|
||||
PIPSOLAR_BINARY_SENSOR(overload_restart_function, QFLAG, int)
|
||||
PIPSOLAR_BINARY_SENSOR(over_temperature_restart_function, QFLAG, int)
|
||||
PIPSOLAR_BINARY_SENSOR(backlight_on, QFLAG, int)
|
||||
PIPSOLAR_BINARY_SENSOR(alarm_on_when_primary_source_interrupt, QFLAG, int)
|
||||
PIPSOLAR_BINARY_SENSOR(fault_code_record, QFLAG, int)
|
||||
PIPSOLAR_BINARY_SENSOR(power_saving, QFLAG, int)
|
||||
|
||||
// QPIWS values
|
||||
PIPSOLAR_BINARY_SENSOR(warnings_present, QPIWS)
|
||||
PIPSOLAR_BINARY_SENSOR(faults_present, QPIWS)
|
||||
PIPSOLAR_BINARY_SENSOR(warning_power_loss, QPIWS)
|
||||
PIPSOLAR_BINARY_SENSOR(fault_inverter_fault, QPIWS)
|
||||
PIPSOLAR_BINARY_SENSOR(fault_bus_over, QPIWS)
|
||||
PIPSOLAR_BINARY_SENSOR(fault_bus_under, QPIWS)
|
||||
PIPSOLAR_BINARY_SENSOR(fault_bus_soft_fail, QPIWS)
|
||||
PIPSOLAR_BINARY_SENSOR(warning_line_fail, QPIWS)
|
||||
PIPSOLAR_BINARY_SENSOR(fault_opvshort, QPIWS)
|
||||
PIPSOLAR_BINARY_SENSOR(fault_inverter_voltage_too_low, QPIWS)
|
||||
PIPSOLAR_BINARY_SENSOR(fault_inverter_voltage_too_high, QPIWS)
|
||||
PIPSOLAR_BINARY_SENSOR(warning_over_temperature, QPIWS)
|
||||
PIPSOLAR_BINARY_SENSOR(warning_fan_lock, QPIWS)
|
||||
PIPSOLAR_BINARY_SENSOR(warning_battery_voltage_high, QPIWS)
|
||||
PIPSOLAR_BINARY_SENSOR(warning_battery_low_alarm, QPIWS)
|
||||
PIPSOLAR_BINARY_SENSOR(warning_battery_under_shutdown, QPIWS)
|
||||
PIPSOLAR_BINARY_SENSOR(warning_battery_derating, QPIWS)
|
||||
PIPSOLAR_BINARY_SENSOR(warning_over_load, QPIWS)
|
||||
PIPSOLAR_BINARY_SENSOR(warning_eeprom_failed, QPIWS)
|
||||
PIPSOLAR_BINARY_SENSOR(fault_inverter_over_current, QPIWS)
|
||||
PIPSOLAR_BINARY_SENSOR(fault_inverter_soft_failed, QPIWS)
|
||||
PIPSOLAR_BINARY_SENSOR(fault_self_test_failed, QPIWS)
|
||||
PIPSOLAR_BINARY_SENSOR(fault_op_dc_voltage_over, QPIWS)
|
||||
PIPSOLAR_BINARY_SENSOR(fault_battery_open, QPIWS)
|
||||
PIPSOLAR_BINARY_SENSOR(fault_current_sensor_failed, QPIWS)
|
||||
PIPSOLAR_BINARY_SENSOR(fault_battery_short, QPIWS)
|
||||
PIPSOLAR_BINARY_SENSOR(warning_power_limit, QPIWS)
|
||||
PIPSOLAR_BINARY_SENSOR(warning_pv_voltage_high, QPIWS)
|
||||
PIPSOLAR_BINARY_SENSOR(fault_mppt_overload, QPIWS)
|
||||
PIPSOLAR_BINARY_SENSOR(warning_mppt_overload, QPIWS)
|
||||
PIPSOLAR_BINARY_SENSOR(warning_battery_too_low_to_charge, QPIWS)
|
||||
PIPSOLAR_BINARY_SENSOR(fault_dc_dc_over_current, QPIWS)
|
||||
PIPSOLAR_BINARY_SENSOR(fault_code, QPIWS)
|
||||
PIPSOLAR_BINARY_SENSOR(warning_low_pv_energy, QPIWS)
|
||||
PIPSOLAR_BINARY_SENSOR(warning_high_ac_input_during_bus_soft_start, QPIWS)
|
||||
PIPSOLAR_BINARY_SENSOR(warning_battery_equalization, QPIWS)
|
||||
PIPSOLAR_BINARY_SENSOR(warnings_present, QPIWS, bool)
|
||||
PIPSOLAR_BINARY_SENSOR(faults_present, QPIWS, bool)
|
||||
PIPSOLAR_BINARY_SENSOR(warning_power_loss, QPIWS, bool)
|
||||
PIPSOLAR_BINARY_SENSOR(fault_inverter_fault, QPIWS, bool)
|
||||
PIPSOLAR_BINARY_SENSOR(fault_bus_over, QPIWS, bool)
|
||||
PIPSOLAR_BINARY_SENSOR(fault_bus_under, QPIWS, bool)
|
||||
PIPSOLAR_BINARY_SENSOR(fault_bus_soft_fail, QPIWS, bool)
|
||||
PIPSOLAR_BINARY_SENSOR(warning_line_fail, QPIWS, bool)
|
||||
PIPSOLAR_BINARY_SENSOR(fault_opvshort, QPIWS, bool)
|
||||
PIPSOLAR_BINARY_SENSOR(fault_inverter_voltage_too_low, QPIWS, bool)
|
||||
PIPSOLAR_BINARY_SENSOR(fault_inverter_voltage_too_high, QPIWS, bool)
|
||||
PIPSOLAR_BINARY_SENSOR(warning_over_temperature, QPIWS, bool)
|
||||
PIPSOLAR_BINARY_SENSOR(warning_fan_lock, QPIWS, bool)
|
||||
PIPSOLAR_BINARY_SENSOR(warning_battery_voltage_high, QPIWS, bool)
|
||||
PIPSOLAR_BINARY_SENSOR(warning_battery_low_alarm, QPIWS, bool)
|
||||
PIPSOLAR_BINARY_SENSOR(warning_battery_under_shutdown, QPIWS, bool)
|
||||
PIPSOLAR_BINARY_SENSOR(warning_battery_derating, QPIWS, bool)
|
||||
PIPSOLAR_BINARY_SENSOR(warning_over_load, QPIWS, bool)
|
||||
PIPSOLAR_BINARY_SENSOR(warning_eeprom_failed, QPIWS, bool)
|
||||
PIPSOLAR_BINARY_SENSOR(fault_inverter_over_current, QPIWS, bool)
|
||||
PIPSOLAR_BINARY_SENSOR(fault_inverter_soft_failed, QPIWS, bool)
|
||||
PIPSOLAR_BINARY_SENSOR(fault_self_test_failed, QPIWS, bool)
|
||||
PIPSOLAR_BINARY_SENSOR(fault_op_dc_voltage_over, QPIWS, bool)
|
||||
PIPSOLAR_BINARY_SENSOR(fault_battery_open, QPIWS, bool)
|
||||
PIPSOLAR_BINARY_SENSOR(fault_current_sensor_failed, QPIWS, bool)
|
||||
PIPSOLAR_BINARY_SENSOR(fault_battery_short, QPIWS, bool)
|
||||
PIPSOLAR_BINARY_SENSOR(warning_power_limit, QPIWS, bool)
|
||||
PIPSOLAR_BINARY_SENSOR(warning_pv_voltage_high, QPIWS, bool)
|
||||
PIPSOLAR_BINARY_SENSOR(fault_mppt_overload, QPIWS, bool)
|
||||
PIPSOLAR_BINARY_SENSOR(warning_mppt_overload, QPIWS, bool)
|
||||
PIPSOLAR_BINARY_SENSOR(warning_battery_too_low_to_charge, QPIWS, bool)
|
||||
PIPSOLAR_BINARY_SENSOR(fault_dc_dc_over_current, QPIWS, bool)
|
||||
PIPSOLAR_BINARY_SENSOR(fault_code, QPIWS, int)
|
||||
PIPSOLAR_BINARY_SENSOR(warnung_low_pv_energy, QPIWS, bool)
|
||||
PIPSOLAR_BINARY_SENSOR(warning_high_ac_input_during_bus_soft_start, QPIWS, bool)
|
||||
PIPSOLAR_BINARY_SENSOR(warning_battery_equalization, QPIWS, bool)
|
||||
|
||||
PIPSOLAR_TEXT_SENSOR(last_qpigs, QPIGS)
|
||||
PIPSOLAR_TEXT_SENSOR(last_qpiri, QPIRI)
|
||||
@@ -185,14 +180,14 @@ class Pipsolar : public uart::UARTDevice, public PollingComponent {
|
||||
PIPSOLAR_SWITCH(pv_ok_condition_for_parallel_switch, QPIRI)
|
||||
PIPSOLAR_SWITCH(pv_power_balance_switch, QPIRI)
|
||||
|
||||
void queue_command(const std::string &command);
|
||||
void switch_command(const std::string &command);
|
||||
void setup() override;
|
||||
void loop() override;
|
||||
void dump_config() override;
|
||||
void update() override;
|
||||
|
||||
protected:
|
||||
static const size_t PIPSOLAR_READ_BUFFER_LENGTH = 128; // maximum supported answer length
|
||||
static const size_t PIPSOLAR_READ_BUFFER_LENGTH = 110; // maximum supported answer length
|
||||
static const size_t COMMAND_QUEUE_LENGTH = 10;
|
||||
static const size_t COMMAND_TIMEOUT = 5000;
|
||||
static const size_t POLLING_COMMANDS_MAX = 15;
|
||||
@@ -203,26 +198,7 @@ class Pipsolar : public uart::UARTDevice, public PollingComponent {
|
||||
uint16_t pipsolar_crc_(uint8_t *msg, uint8_t len);
|
||||
bool send_next_command_();
|
||||
bool send_next_poll_();
|
||||
|
||||
void handle_qpiri_(const char *message);
|
||||
void handle_qpigs_(const char *message);
|
||||
void handle_qmod_(const char *message);
|
||||
void handle_qflag_(const char *message);
|
||||
void handle_qpiws_(const char *message);
|
||||
void handle_qt_(const char *message);
|
||||
void handle_qmn_(const char *message);
|
||||
|
||||
void skip_start_(const char *message, size_t *pos);
|
||||
void skip_field_(const char *message, size_t *pos);
|
||||
std::string read_field_(const char *message, size_t *pos);
|
||||
|
||||
void read_float_sensor_(const char *message, size_t *pos, sensor::Sensor *sensor);
|
||||
void read_int_sensor_(const char *message, size_t *pos, sensor::Sensor *sensor);
|
||||
|
||||
void publish_binary_sensor_(esphome::optional<bool> b, binary_sensor::BinarySensor *sensor);
|
||||
|
||||
esphome::optional<bool> get_bit_(std::string bits, uint8_t bit_pos);
|
||||
|
||||
void queue_command_(const char *command, uint8_t length);
|
||||
std::string command_queue_[COMMAND_QUEUE_LENGTH];
|
||||
uint8_t command_queue_position_ = 0;
|
||||
uint8_t read_buffer_[PIPSOLAR_READ_BUFFER_LENGTH];
|
||||
@@ -237,10 +213,11 @@ class Pipsolar : public uart::UARTDevice, public PollingComponent {
|
||||
STATE_POLL_COMPLETE = 3,
|
||||
STATE_COMMAND_COMPLETE = 4,
|
||||
STATE_POLL_CHECKED = 5,
|
||||
STATE_POLL_DECODED = 6,
|
||||
};
|
||||
|
||||
uint8_t last_polling_command_ = 0;
|
||||
PollingCommand enabled_polling_commands_[POLLING_COMMANDS_MAX];
|
||||
PollingCommand used_polling_commands_[POLLING_COMMANDS_MAX];
|
||||
};
|
||||
|
||||
} // namespace pipsolar
|
||||
|
||||
@@ -11,11 +11,11 @@ void PipsolarSwitch::dump_config() { LOG_SWITCH("", "Pipsolar Switch", this); }
|
||||
void PipsolarSwitch::write_state(bool state) {
|
||||
if (state) {
|
||||
if (!this->on_command_.empty()) {
|
||||
this->parent_->queue_command(this->on_command_);
|
||||
this->parent_->switch_command(this->on_command_);
|
||||
}
|
||||
} else {
|
||||
if (!this->off_command_.empty()) {
|
||||
this->parent_->queue_command(this->off_command_);
|
||||
this->parent_->switch_command(this->off_command_);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -261,12 +261,9 @@ ThrottleAverageFilter = sensor_ns.class_("ThrottleAverageFilter", Filter, cg.Com
|
||||
LambdaFilter = sensor_ns.class_("LambdaFilter", Filter)
|
||||
OffsetFilter = sensor_ns.class_("OffsetFilter", Filter)
|
||||
MultiplyFilter = sensor_ns.class_("MultiplyFilter", Filter)
|
||||
ValueListFilter = sensor_ns.class_("ValueListFilter", Filter)
|
||||
FilterOutValueFilter = sensor_ns.class_("FilterOutValueFilter", ValueListFilter)
|
||||
FilterOutValueFilter = sensor_ns.class_("FilterOutValueFilter", Filter)
|
||||
ThrottleFilter = sensor_ns.class_("ThrottleFilter", Filter)
|
||||
ThrottleWithPriorityFilter = sensor_ns.class_(
|
||||
"ThrottleWithPriorityFilter", ValueListFilter
|
||||
)
|
||||
ThrottleWithPriorityFilter = sensor_ns.class_("ThrottleWithPriorityFilter", Filter)
|
||||
TimeoutFilter = sensor_ns.class_("TimeoutFilter", Filter, cg.Component)
|
||||
DebounceFilter = sensor_ns.class_("DebounceFilter", Filter, cg.Component)
|
||||
HeartbeatFilter = sensor_ns.class_("HeartbeatFilter", Filter, cg.Component)
|
||||
|
||||
@@ -228,40 +228,27 @@ MultiplyFilter::MultiplyFilter(TemplatableValue<float> multiplier) : multiplier_
|
||||
|
||||
optional<float> MultiplyFilter::new_value(float value) { return value * this->multiplier_.value(); }
|
||||
|
||||
// ValueListFilter (base class)
|
||||
ValueListFilter::ValueListFilter(std::initializer_list<TemplatableValue<float>> values) : values_(values) {}
|
||||
|
||||
bool ValueListFilter::value_matches_any_(float sensor_value) {
|
||||
int8_t accuracy = this->parent_->get_accuracy_decimals();
|
||||
float accuracy_mult = powf(10.0f, accuracy);
|
||||
float rounded_sensor = roundf(accuracy_mult * sensor_value);
|
||||
|
||||
for (auto &filter_value : this->values_) {
|
||||
float fv = filter_value.value();
|
||||
|
||||
// Handle NaN comparison
|
||||
if (std::isnan(fv)) {
|
||||
if (std::isnan(sensor_value))
|
||||
return true;
|
||||
continue;
|
||||
}
|
||||
|
||||
// Compare rounded values
|
||||
if (roundf(accuracy_mult * fv) == rounded_sensor)
|
||||
return true;
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
// FilterOutValueFilter
|
||||
FilterOutValueFilter::FilterOutValueFilter(std::initializer_list<TemplatableValue<float>> values_to_filter_out)
|
||||
: ValueListFilter(values_to_filter_out) {}
|
||||
FilterOutValueFilter::FilterOutValueFilter(std::vector<TemplatableValue<float>> values_to_filter_out)
|
||||
: values_to_filter_out_(std::move(values_to_filter_out)) {}
|
||||
|
||||
optional<float> FilterOutValueFilter::new_value(float value) {
|
||||
if (this->value_matches_any_(value))
|
||||
return {}; // Filter out
|
||||
return value; // Pass through
|
||||
int8_t accuracy = this->parent_->get_accuracy_decimals();
|
||||
float accuracy_mult = powf(10.0f, accuracy);
|
||||
for (auto filter_value : this->values_to_filter_out_) {
|
||||
if (std::isnan(filter_value.value())) {
|
||||
if (std::isnan(value)) {
|
||||
return {};
|
||||
}
|
||||
continue;
|
||||
}
|
||||
float rounded_filter_out = roundf(accuracy_mult * filter_value.value());
|
||||
float rounded_value = roundf(accuracy_mult * value);
|
||||
if (rounded_filter_out == rounded_value) {
|
||||
return {};
|
||||
}
|
||||
}
|
||||
return value;
|
||||
}
|
||||
|
||||
// ThrottleFilter
|
||||
@@ -276,15 +263,33 @@ optional<float> ThrottleFilter::new_value(float value) {
|
||||
}
|
||||
|
||||
// ThrottleWithPriorityFilter
|
||||
ThrottleWithPriorityFilter::ThrottleWithPriorityFilter(
|
||||
uint32_t min_time_between_inputs, std::initializer_list<TemplatableValue<float>> prioritized_values)
|
||||
: ValueListFilter(prioritized_values), min_time_between_inputs_(min_time_between_inputs) {}
|
||||
ThrottleWithPriorityFilter::ThrottleWithPriorityFilter(uint32_t min_time_between_inputs,
|
||||
std::vector<TemplatableValue<float>> prioritized_values)
|
||||
: min_time_between_inputs_(min_time_between_inputs), prioritized_values_(std::move(prioritized_values)) {}
|
||||
|
||||
optional<float> ThrottleWithPriorityFilter::new_value(float value) {
|
||||
bool is_prioritized_value = false;
|
||||
int8_t accuracy = this->parent_->get_accuracy_decimals();
|
||||
float accuracy_mult = powf(10.0f, accuracy);
|
||||
const uint32_t now = App.get_loop_component_start_time();
|
||||
// Allow value through if: no previous input, time expired, or is prioritized
|
||||
if (this->last_input_ == 0 || now - this->last_input_ >= min_time_between_inputs_ ||
|
||||
this->value_matches_any_(value)) {
|
||||
// First, determine if the new value is one of the prioritized values
|
||||
for (auto prioritized_value : this->prioritized_values_) {
|
||||
if (std::isnan(prioritized_value.value())) {
|
||||
if (std::isnan(value)) {
|
||||
is_prioritized_value = true;
|
||||
break;
|
||||
}
|
||||
continue;
|
||||
}
|
||||
float rounded_prioritized_value = roundf(accuracy_mult * prioritized_value.value());
|
||||
float rounded_value = roundf(accuracy_mult * value);
|
||||
if (rounded_prioritized_value == rounded_value) {
|
||||
is_prioritized_value = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
// Finally, determine if the new value should be throttled and pass it through if not
|
||||
if (this->last_input_ == 0 || now - this->last_input_ >= min_time_between_inputs_ || is_prioritized_value) {
|
||||
this->last_input_ = now;
|
||||
return value;
|
||||
}
|
||||
|
||||
@@ -317,28 +317,15 @@ class MultiplyFilter : public Filter {
|
||||
TemplatableValue<float> multiplier_;
|
||||
};
|
||||
|
||||
/** Base class for filters that compare sensor values against a list of configured values.
|
||||
*
|
||||
* This base class provides common functionality for filters that need to check if a sensor
|
||||
* value matches any value in a configured list, with proper handling of NaN values and
|
||||
* accuracy-based rounding for comparisons.
|
||||
*/
|
||||
class ValueListFilter : public Filter {
|
||||
protected:
|
||||
explicit ValueListFilter(std::initializer_list<TemplatableValue<float>> values);
|
||||
|
||||
/// Check if sensor value matches any configured value (with accuracy rounding)
|
||||
bool value_matches_any_(float sensor_value);
|
||||
|
||||
FixedVector<TemplatableValue<float>> values_;
|
||||
};
|
||||
|
||||
/// A simple filter that only forwards the filter chain if it doesn't receive `value_to_filter_out`.
|
||||
class FilterOutValueFilter : public ValueListFilter {
|
||||
class FilterOutValueFilter : public Filter {
|
||||
public:
|
||||
explicit FilterOutValueFilter(std::initializer_list<TemplatableValue<float>> values_to_filter_out);
|
||||
explicit FilterOutValueFilter(std::vector<TemplatableValue<float>> values_to_filter_out);
|
||||
|
||||
optional<float> new_value(float value) override;
|
||||
|
||||
protected:
|
||||
std::vector<TemplatableValue<float>> values_to_filter_out_;
|
||||
};
|
||||
|
||||
class ThrottleFilter : public Filter {
|
||||
@@ -353,16 +340,17 @@ class ThrottleFilter : public Filter {
|
||||
};
|
||||
|
||||
/// Same as 'throttle' but will immediately publish values contained in `value_to_prioritize`.
|
||||
class ThrottleWithPriorityFilter : public ValueListFilter {
|
||||
class ThrottleWithPriorityFilter : public Filter {
|
||||
public:
|
||||
explicit ThrottleWithPriorityFilter(uint32_t min_time_between_inputs,
|
||||
std::initializer_list<TemplatableValue<float>> prioritized_values);
|
||||
std::vector<TemplatableValue<float>> prioritized_values);
|
||||
|
||||
optional<float> new_value(float value) override;
|
||||
|
||||
protected:
|
||||
uint32_t last_input_{0};
|
||||
uint32_t min_time_between_inputs_;
|
||||
std::vector<TemplatableValue<float>> prioritized_values_;
|
||||
};
|
||||
|
||||
class TimeoutFilter : public Filter, public Component {
|
||||
|
||||
@@ -107,12 +107,12 @@ void Sensor::add_filter(Filter *filter) {
|
||||
}
|
||||
filter->initialize(this, nullptr);
|
||||
}
|
||||
void Sensor::add_filters(std::initializer_list<Filter *> filters) {
|
||||
void Sensor::add_filters(const std::vector<Filter *> &filters) {
|
||||
for (Filter *filter : filters) {
|
||||
this->add_filter(filter);
|
||||
}
|
||||
}
|
||||
void Sensor::set_filters(std::initializer_list<Filter *> filters) {
|
||||
void Sensor::set_filters(const std::vector<Filter *> &filters) {
|
||||
this->clear_filters();
|
||||
this->add_filters(filters);
|
||||
}
|
||||
|
||||
@@ -6,7 +6,7 @@
|
||||
#include "esphome/core/log.h"
|
||||
#include "esphome/components/sensor/filter.h"
|
||||
|
||||
#include <initializer_list>
|
||||
#include <vector>
|
||||
#include <memory>
|
||||
|
||||
namespace esphome {
|
||||
@@ -77,10 +77,10 @@ class Sensor : public EntityBase, public EntityBase_DeviceClass, public EntityBa
|
||||
* SlidingWindowMovingAverageFilter(15, 15), // average over last 15 values
|
||||
* });
|
||||
*/
|
||||
void add_filters(std::initializer_list<Filter *> filters);
|
||||
void add_filters(const std::vector<Filter *> &filters);
|
||||
|
||||
/// Clear the filters and replace them by filters.
|
||||
void set_filters(std::initializer_list<Filter *> filters);
|
||||
void set_filters(const std::vector<Filter *> &filters);
|
||||
|
||||
/// Clear the entire filter chain.
|
||||
void clear_filters();
|
||||
|
||||
@@ -1,5 +1,3 @@
|
||||
from collections.abc import Callable, MutableMapping
|
||||
|
||||
import esphome.codegen as cg
|
||||
import esphome.config_validation as cv
|
||||
from esphome.core import CORE
|
||||
@@ -11,32 +9,6 @@ IMPLEMENTATION_LWIP_TCP = "lwip_tcp"
|
||||
IMPLEMENTATION_LWIP_SOCKETS = "lwip_sockets"
|
||||
IMPLEMENTATION_BSD_SOCKETS = "bsd_sockets"
|
||||
|
||||
# Socket tracking infrastructure
|
||||
# Components register their socket needs and platforms read this to configure appropriately
|
||||
KEY_SOCKET_CONSUMERS = "socket_consumers"
|
||||
|
||||
|
||||
def consume_sockets(
|
||||
value: int, consumer: str
|
||||
) -> Callable[[MutableMapping], MutableMapping]:
|
||||
"""Register socket usage for a component.
|
||||
|
||||
Args:
|
||||
value: Number of sockets needed by the component
|
||||
consumer: Name of the component consuming the sockets
|
||||
|
||||
Returns:
|
||||
A validator function that records the socket usage
|
||||
"""
|
||||
|
||||
def _consume_sockets(config: MutableMapping) -> MutableMapping:
|
||||
consumers: dict[str, int] = CORE.data.setdefault(KEY_SOCKET_CONSUMERS, {})
|
||||
consumers[consumer] = consumers.get(consumer, 0) + value
|
||||
return config
|
||||
|
||||
return _consume_sockets
|
||||
|
||||
|
||||
CONFIG_SCHEMA = cv.Schema(
|
||||
{
|
||||
cv.SplitDefault(
|
||||
|
||||
@@ -6,7 +6,7 @@ import esphome.config_validation as cv
|
||||
from esphome.const import CONF_SUBSTITUTIONS, VALID_SUBSTITUTIONS_CHARACTERS
|
||||
from esphome.yaml_util import ESPHomeDataBase, ESPLiteralValue, make_data_base
|
||||
|
||||
from .jinja import Jinja, JinjaError, JinjaStr, has_jinja
|
||||
from .jinja import Jinja, JinjaStr, TemplateError, TemplateRuntimeError, has_jinja
|
||||
|
||||
CODEOWNERS = ["@esphome/core"]
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
@@ -57,12 +57,17 @@ def _expand_jinja(value, orig_value, path, jinja, ignore_missing):
|
||||
"->".join(str(x) for x in path),
|
||||
err.message,
|
||||
)
|
||||
except JinjaError as err:
|
||||
except (
|
||||
TemplateError,
|
||||
TemplateRuntimeError,
|
||||
RuntimeError,
|
||||
ArithmeticError,
|
||||
AttributeError,
|
||||
TypeError,
|
||||
) as err:
|
||||
raise cv.Invalid(
|
||||
f"{err.error_name()} Error evaluating jinja expression '{value}': {str(err.parent())}."
|
||||
f"\nEvaluation stack: (most recent evaluation last)\n{err.stack_trace_str()}"
|
||||
f"\nRelevant context:\n{err.context_trace_str()}"
|
||||
f"\nSee {'->'.join(str(x) for x in path)}",
|
||||
f"{type(err).__name__} Error evaluating jinja expression '{value}': {str(err)}."
|
||||
f" See {'->'.join(str(x) for x in path)}",
|
||||
path,
|
||||
)
|
||||
return value
|
||||
|
||||
@@ -6,8 +6,6 @@ import re
|
||||
import jinja2 as jinja
|
||||
from jinja2.sandbox import SandboxedEnvironment
|
||||
|
||||
from esphome.yaml_util import ESPLiteralValue
|
||||
|
||||
TemplateError = jinja.TemplateError
|
||||
TemplateSyntaxError = jinja.TemplateSyntaxError
|
||||
TemplateRuntimeError = jinja.TemplateRuntimeError
|
||||
@@ -28,20 +26,18 @@ def has_jinja(st):
|
||||
return detect_jinja_re.search(st) is not None
|
||||
|
||||
|
||||
# SAFE_GLOBALS defines a allowlist of built-in functions or modules that are considered safe to expose
|
||||
# SAFE_GLOBAL_FUNCTIONS defines a allowlist of built-in functions that are considered safe to expose
|
||||
# in Jinja templates or other sandboxed evaluation contexts. Only functions that do not allow
|
||||
# arbitrary code execution, file access, or other security risks are included.
|
||||
#
|
||||
# The following functions are considered safe:
|
||||
# - math: The entire math module is injected, allowing access to mathematical functions like sin, cos, sqrt, etc.
|
||||
# - ord: Converts a character to its Unicode code point integer.
|
||||
# - chr: Converts an integer to its corresponding Unicode character.
|
||||
# - len: Returns the length of a sequence or collection.
|
||||
#
|
||||
# These functions were chosen because they are pure, have no side effects, and do not provide access
|
||||
# to the file system, environment, or other potentially sensitive resources.
|
||||
SAFE_GLOBALS = {
|
||||
"math": math, # Inject entire math module
|
||||
SAFE_GLOBAL_FUNCTIONS = {
|
||||
"ord": ord,
|
||||
"chr": chr,
|
||||
"len": len,
|
||||
@@ -60,62 +56,22 @@ class JinjaStr(str):
|
||||
later in the main substitutions pass.
|
||||
"""
|
||||
|
||||
Undefined = object()
|
||||
|
||||
def __new__(cls, value: str, upvalues=None):
|
||||
if isinstance(value, JinjaStr):
|
||||
base = str(value)
|
||||
merged = {**value.upvalues, **(upvalues or {})}
|
||||
else:
|
||||
base = value
|
||||
merged = dict(upvalues or {})
|
||||
obj = super().__new__(cls, base)
|
||||
obj.upvalues = merged
|
||||
obj.result = JinjaStr.Undefined
|
||||
obj = super().__new__(cls, value)
|
||||
obj.upvalues = upvalues or {}
|
||||
return obj
|
||||
|
||||
|
||||
class JinjaError(Exception):
|
||||
def __init__(self, context_trace: dict, expr: str):
|
||||
self.context_trace = context_trace
|
||||
self.eval_stack = [expr]
|
||||
|
||||
def parent(self):
|
||||
return self.__context__
|
||||
|
||||
def error_name(self):
|
||||
return type(self.parent()).__name__
|
||||
|
||||
def context_trace_str(self):
|
||||
return "\n".join(
|
||||
f" {k} = {repr(v)} ({type(v).__name__})"
|
||||
for k, v in self.context_trace.items()
|
||||
)
|
||||
|
||||
def stack_trace_str(self):
|
||||
return "\n".join(
|
||||
f" {len(self.eval_stack) - i}: {expr}{i == 0 and ' <-- ' + self.error_name() or ''}"
|
||||
for i, expr in enumerate(self.eval_stack)
|
||||
)
|
||||
def __init__(self, value: str, upvalues=None):
|
||||
self.upvalues = upvalues or {}
|
||||
|
||||
|
||||
class TrackerContext(jinja.runtime.Context):
|
||||
def resolve_or_missing(self, key):
|
||||
val = super().resolve_or_missing(key)
|
||||
if isinstance(val, JinjaStr):
|
||||
self.environment.context_trace[key] = val
|
||||
val, _ = self.environment.expand(val)
|
||||
self.environment.context_trace[key] = val
|
||||
return val
|
||||
|
||||
|
||||
class Jinja(SandboxedEnvironment):
|
||||
class Jinja:
|
||||
"""
|
||||
Wraps a Jinja environment
|
||||
"""
|
||||
|
||||
def __init__(self, context_vars):
|
||||
super().__init__(
|
||||
self.env = SandboxedEnvironment(
|
||||
trim_blocks=True,
|
||||
lstrip_blocks=True,
|
||||
block_start_string="<%",
|
||||
@@ -126,20 +82,13 @@ class Jinja(SandboxedEnvironment):
|
||||
variable_end_string="}",
|
||||
undefined=jinja.StrictUndefined,
|
||||
)
|
||||
self.context_class = TrackerContext
|
||||
self.add_extension("jinja2.ext.do")
|
||||
self.context_trace = {}
|
||||
self.env.add_extension("jinja2.ext.do")
|
||||
self.env.globals["math"] = math # Inject entire math module
|
||||
self.context_vars = {**context_vars}
|
||||
for k, v in self.context_vars.items():
|
||||
if isinstance(v, ESPLiteralValue):
|
||||
continue
|
||||
if isinstance(v, str) and not isinstance(v, JinjaStr) and has_jinja(v):
|
||||
self.context_vars[k] = JinjaStr(v, self.context_vars)
|
||||
|
||||
self.globals = {
|
||||
**self.globals,
|
||||
self.env.globals = {
|
||||
**self.env.globals,
|
||||
**self.context_vars,
|
||||
**SAFE_GLOBALS,
|
||||
**SAFE_GLOBAL_FUNCTIONS,
|
||||
}
|
||||
|
||||
def safe_eval(self, expr):
|
||||
@@ -161,43 +110,23 @@ class Jinja(SandboxedEnvironment):
|
||||
result = None
|
||||
override_vars = {}
|
||||
if isinstance(content_str, JinjaStr):
|
||||
if content_str.result is not JinjaStr.Undefined:
|
||||
return content_str.result, None
|
||||
# If `value` is already a JinjaStr, it means we are trying to evaluate it again
|
||||
# in a parent pass.
|
||||
# Hopefully, all required variables are visible now.
|
||||
override_vars = content_str.upvalues
|
||||
|
||||
old_trace = self.context_trace
|
||||
self.context_trace = {}
|
||||
try:
|
||||
template = self.from_string(content_str)
|
||||
template = self.env.from_string(content_str)
|
||||
result = self.safe_eval(template.render(override_vars))
|
||||
if isinstance(result, Undefined):
|
||||
print("" + result) # force a UndefinedError exception
|
||||
# This happens when the expression is simply an undefined variable. Jinja does not
|
||||
# raise an exception, instead we get "Undefined".
|
||||
# Trigger an UndefinedError exception so we skip to below.
|
||||
print("" + result)
|
||||
except (TemplateSyntaxError, UndefinedError) as err:
|
||||
# `content_str` contains a Jinja expression that refers to a variable that is undefined
|
||||
# in this scope. Perhaps it refers to a root substitution that is not visible yet.
|
||||
# Therefore, return `content_str` as a JinjaStr, which contains the variables
|
||||
# Therefore, return the original `content_str` as a JinjaStr, which contains the variables
|
||||
# that are actually visible to it at this point to postpone evaluation.
|
||||
return JinjaStr(content_str, {**self.context_vars, **override_vars}), err
|
||||
except JinjaError as err:
|
||||
err.context_trace = {**self.context_trace, **err.context_trace}
|
||||
err.eval_stack.append(content_str)
|
||||
raise err
|
||||
except (
|
||||
TemplateError,
|
||||
TemplateRuntimeError,
|
||||
RuntimeError,
|
||||
ArithmeticError,
|
||||
AttributeError,
|
||||
TypeError,
|
||||
) as err:
|
||||
raise JinjaError(self.context_trace, content_str) from err
|
||||
finally:
|
||||
self.context_trace = old_trace
|
||||
|
||||
if isinstance(content_str, JinjaStr):
|
||||
content_str.result = result
|
||||
|
||||
return result, None
|
||||
|
||||
@@ -110,28 +110,17 @@ def validate_mapping(value):
|
||||
"substitute", SubstituteFilter, cv.ensure_list(validate_mapping)
|
||||
)
|
||||
async def substitute_filter_to_code(config, filter_id):
|
||||
substitutions = [
|
||||
cg.StructInitializer(
|
||||
cg.MockObj("Substitution", "esphome::text_sensor::"),
|
||||
("from", conf[CONF_FROM]),
|
||||
("to", conf[CONF_TO]),
|
||||
)
|
||||
for conf in config
|
||||
]
|
||||
return cg.new_Pvariable(filter_id, substitutions)
|
||||
from_strings = [conf[CONF_FROM] for conf in config]
|
||||
to_strings = [conf[CONF_TO] for conf in config]
|
||||
return cg.new_Pvariable(filter_id, from_strings, to_strings)
|
||||
|
||||
|
||||
@FILTER_REGISTRY.register("map", MapFilter, cv.ensure_list(validate_mapping))
|
||||
async def map_filter_to_code(config, filter_id):
|
||||
mappings = [
|
||||
cg.StructInitializer(
|
||||
cg.MockObj("Substitution", "esphome::text_sensor::"),
|
||||
("from", conf[CONF_FROM]),
|
||||
("to", conf[CONF_TO]),
|
||||
)
|
||||
for conf in config
|
||||
]
|
||||
return cg.new_Pvariable(filter_id, mappings)
|
||||
map_ = cg.std_ns.class_("map").template(cg.std_string, cg.std_string)
|
||||
return cg.new_Pvariable(
|
||||
filter_id, map_([(item[CONF_FROM], item[CONF_TO]) for item in config])
|
||||
)
|
||||
|
||||
|
||||
validate_device_class = cv.one_of(*DEVICE_CLASSES, lower=True, space="_")
|
||||
|
||||
@@ -62,26 +62,19 @@ optional<std::string> AppendFilter::new_value(std::string value) { return value
|
||||
optional<std::string> PrependFilter::new_value(std::string value) { return this->prefix_ + value; }
|
||||
|
||||
// Substitute
|
||||
SubstituteFilter::SubstituteFilter(std::initializer_list<Substitution> substitutions) : substitutions_(substitutions) {}
|
||||
|
||||
optional<std::string> SubstituteFilter::new_value(std::string value) {
|
||||
std::size_t pos;
|
||||
for (const auto &sub : this->substitutions_) {
|
||||
while ((pos = value.find(sub.from)) != std::string::npos)
|
||||
value.replace(pos, sub.from.size(), sub.to);
|
||||
for (size_t i = 0; i < this->from_strings_.size(); i++) {
|
||||
while ((pos = value.find(this->from_strings_[i])) != std::string::npos)
|
||||
value.replace(pos, this->from_strings_[i].size(), this->to_strings_[i]);
|
||||
}
|
||||
return value;
|
||||
}
|
||||
|
||||
// Map
|
||||
MapFilter::MapFilter(std::initializer_list<Substitution> mappings) : mappings_(mappings) {}
|
||||
|
||||
optional<std::string> MapFilter::new_value(std::string value) {
|
||||
for (const auto &mapping : this->mappings_) {
|
||||
if (mapping.from == value)
|
||||
return mapping.to;
|
||||
}
|
||||
return value; // Pass through if no match
|
||||
auto item = mappings_.find(value);
|
||||
return item == mappings_.end() ? value : item->second;
|
||||
}
|
||||
|
||||
} // namespace text_sensor
|
||||
|
||||
@@ -2,6 +2,10 @@
|
||||
|
||||
#include "esphome/core/component.h"
|
||||
#include "esphome/core/helpers.h"
|
||||
#include <queue>
|
||||
#include <utility>
|
||||
#include <map>
|
||||
#include <vector>
|
||||
|
||||
namespace esphome {
|
||||
namespace text_sensor {
|
||||
@@ -94,52 +98,26 @@ class PrependFilter : public Filter {
|
||||
std::string prefix_;
|
||||
};
|
||||
|
||||
struct Substitution {
|
||||
std::string from;
|
||||
std::string to;
|
||||
};
|
||||
|
||||
/// A simple filter that replaces a substring with another substring
|
||||
class SubstituteFilter : public Filter {
|
||||
public:
|
||||
explicit SubstituteFilter(std::initializer_list<Substitution> substitutions);
|
||||
SubstituteFilter(std::vector<std::string> from_strings, std::vector<std::string> to_strings)
|
||||
: from_strings_(std::move(from_strings)), to_strings_(std::move(to_strings)) {}
|
||||
optional<std::string> new_value(std::string value) override;
|
||||
|
||||
protected:
|
||||
FixedVector<Substitution> substitutions_;
|
||||
std::vector<std::string> from_strings_;
|
||||
std::vector<std::string> to_strings_;
|
||||
};
|
||||
|
||||
/** A filter that maps values from one set to another
|
||||
*
|
||||
* Uses linear search instead of std::map for typical small datasets (2-20 mappings).
|
||||
* Linear search on contiguous memory is faster than red-black tree lookups when:
|
||||
* - Dataset is small (< ~30 items)
|
||||
* - Memory is contiguous (cache-friendly, better CPU cache utilization)
|
||||
* - No pointer chasing overhead (tree node traversal)
|
||||
* - String comparison cost dominates lookup time
|
||||
*
|
||||
* Benchmark results (see benchmark_map_filter.cpp):
|
||||
* - 2 mappings: Linear 1.26x faster than std::map
|
||||
* - 5 mappings: Linear 2.25x faster than std::map
|
||||
* - 10 mappings: Linear 1.83x faster than std::map
|
||||
* - 20 mappings: Linear 1.59x faster than std::map
|
||||
* - 30 mappings: Linear 1.09x faster than std::map
|
||||
* - 40 mappings: std::map 1.27x faster than Linear (break-even)
|
||||
*
|
||||
* Benefits over std::map:
|
||||
* - ~2KB smaller flash (no red-black tree code)
|
||||
* - ~24-32 bytes less RAM per mapping (no tree node overhead)
|
||||
* - Faster for typical ESPHome usage (2-10 mappings common, 20+ rare)
|
||||
*
|
||||
* Break-even point: ~35-40 mappings, but ESPHome configs rarely exceed 20
|
||||
*/
|
||||
/// A filter that maps values from one set to another
|
||||
class MapFilter : public Filter {
|
||||
public:
|
||||
explicit MapFilter(std::initializer_list<Substitution> mappings);
|
||||
MapFilter(std::map<std::string, std::string> mappings) : mappings_(std::move(mappings)) {}
|
||||
optional<std::string> new_value(std::string value) override;
|
||||
|
||||
protected:
|
||||
FixedVector<Substitution> mappings_;
|
||||
std::map<std::string, std::string> mappings_;
|
||||
};
|
||||
|
||||
} // namespace text_sensor
|
||||
|
||||
@@ -51,12 +51,12 @@ void TextSensor::add_filter(Filter *filter) {
|
||||
}
|
||||
filter->initialize(this, nullptr);
|
||||
}
|
||||
void TextSensor::add_filters(std::initializer_list<Filter *> filters) {
|
||||
void TextSensor::add_filters(const std::vector<Filter *> &filters) {
|
||||
for (Filter *filter : filters) {
|
||||
this->add_filter(filter);
|
||||
}
|
||||
}
|
||||
void TextSensor::set_filters(std::initializer_list<Filter *> filters) {
|
||||
void TextSensor::set_filters(const std::vector<Filter *> &filters) {
|
||||
this->clear_filters();
|
||||
this->add_filters(filters);
|
||||
}
|
||||
|
||||
@@ -5,7 +5,7 @@
|
||||
#include "esphome/core/helpers.h"
|
||||
#include "esphome/components/text_sensor/filter.h"
|
||||
|
||||
#include <initializer_list>
|
||||
#include <vector>
|
||||
#include <memory>
|
||||
|
||||
namespace esphome {
|
||||
@@ -37,10 +37,10 @@ class TextSensor : public EntityBase, public EntityBase_DeviceClass {
|
||||
void add_filter(Filter *filter);
|
||||
|
||||
/// Add a list of vectors to the back of the filter chain.
|
||||
void add_filters(std::initializer_list<Filter *> filters);
|
||||
void add_filters(const std::vector<Filter *> &filters);
|
||||
|
||||
/// Clear the filters and replace them by filters.
|
||||
void set_filters(std::initializer_list<Filter *> filters);
|
||||
void set_filters(const std::vector<Filter *> &filters);
|
||||
|
||||
/// Clear the entire filter chain.
|
||||
void clear_filters();
|
||||
|
||||
@@ -283,11 +283,8 @@ void TuyaClimate::control_fan_mode_(const climate::ClimateCall &call) {
|
||||
|
||||
climate::ClimateTraits TuyaClimate::traits() {
|
||||
auto traits = climate::ClimateTraits();
|
||||
traits.add_feature_flags(climate::CLIMATE_SUPPORTS_ACTION);
|
||||
if (this->current_temperature_id_.has_value()) {
|
||||
traits.add_feature_flags(climate::CLIMATE_SUPPORTS_CURRENT_TEMPERATURE);
|
||||
}
|
||||
|
||||
traits.set_supports_action(true);
|
||||
traits.set_supports_current_temperature(this->current_temperature_id_.has_value());
|
||||
if (supports_heat_)
|
||||
traits.add_supported_mode(climate::CLIMATE_MODE_HEAT);
|
||||
if (supports_cool_)
|
||||
|
||||
@@ -17,12 +17,6 @@ UponorSmatrixDevice = uponor_smatrix_ns.class_(
|
||||
"UponorSmatrixDevice", cg.Parented.template(UponorSmatrixComponent)
|
||||
)
|
||||
|
||||
|
||||
device_address = cv.All(
|
||||
cv.hex_int,
|
||||
cv.Range(min=0x1000000, max=0xFFFFFFFF, msg="Expected a 32 bit device address"),
|
||||
)
|
||||
|
||||
CONF_UPONOR_SMATRIX_ID = "uponor_smatrix_id"
|
||||
CONF_TIME_DEVICE_ADDRESS = "time_device_address"
|
||||
|
||||
@@ -30,12 +24,9 @@ CONFIG_SCHEMA = (
|
||||
cv.Schema(
|
||||
{
|
||||
cv.GenerateID(): cv.declare_id(UponorSmatrixComponent),
|
||||
cv.Optional(CONF_ADDRESS): cv.invalid(
|
||||
f"The '{CONF_ADDRESS}' option has been removed. "
|
||||
"Use full 32 bit addresses in the device definitions instead."
|
||||
),
|
||||
cv.Optional(CONF_ADDRESS): cv.hex_uint16_t,
|
||||
cv.Optional(CONF_TIME_ID): cv.use_id(time.RealTimeClock),
|
||||
cv.Optional(CONF_TIME_DEVICE_ADDRESS): device_address,
|
||||
cv.Optional(CONF_TIME_DEVICE_ADDRESS): cv.hex_uint16_t,
|
||||
}
|
||||
)
|
||||
.extend(cv.COMPONENT_SCHEMA)
|
||||
@@ -56,7 +47,7 @@ FINAL_VALIDATE_SCHEMA = uart.final_validate_device_schema(
|
||||
UPONOR_SMATRIX_DEVICE_SCHEMA = cv.Schema(
|
||||
{
|
||||
cv.GenerateID(CONF_UPONOR_SMATRIX_ID): cv.use_id(UponorSmatrixComponent),
|
||||
cv.Required(CONF_ADDRESS): device_address,
|
||||
cv.Required(CONF_ADDRESS): cv.hex_uint16_t,
|
||||
}
|
||||
)
|
||||
|
||||
@@ -67,15 +58,17 @@ async def to_code(config):
|
||||
await cg.register_component(var, config)
|
||||
await uart.register_uart_device(var, config)
|
||||
|
||||
if address := config.get(CONF_ADDRESS):
|
||||
cg.add(var.set_system_address(address))
|
||||
if time_id := config.get(CONF_TIME_ID):
|
||||
time_ = await cg.get_variable(time_id)
|
||||
cg.add(var.set_time_id(time_))
|
||||
if time_device_address := config.get(CONF_TIME_DEVICE_ADDRESS):
|
||||
cg.add(var.set_time_device_address(time_device_address))
|
||||
if time_device_address := config.get(CONF_TIME_DEVICE_ADDRESS):
|
||||
cg.add(var.set_time_device_address(time_device_address))
|
||||
|
||||
|
||||
async def register_uponor_smatrix_device(var, config):
|
||||
parent = await cg.get_variable(config[CONF_UPONOR_SMATRIX_ID])
|
||||
cg.add(var.set_parent(parent))
|
||||
cg.add(var.set_address(config[CONF_ADDRESS]))
|
||||
cg.add(var.set_device_address(config[CONF_ADDRESS]))
|
||||
cg.add(parent.register_device(var))
|
||||
|
||||
@@ -10,7 +10,7 @@ static const char *const TAG = "uponor_smatrix.climate";
|
||||
|
||||
void UponorSmatrixClimate::dump_config() {
|
||||
LOG_CLIMATE("", "Uponor Smatrix Climate", this);
|
||||
ESP_LOGCONFIG(TAG, " Device address: 0x%08X", this->address_);
|
||||
ESP_LOGCONFIG(TAG, " Device address: 0x%04X", this->address_);
|
||||
}
|
||||
|
||||
void UponorSmatrixClimate::loop() {
|
||||
@@ -30,9 +30,10 @@ void UponorSmatrixClimate::loop() {
|
||||
|
||||
climate::ClimateTraits UponorSmatrixClimate::traits() {
|
||||
auto traits = climate::ClimateTraits();
|
||||
traits.add_feature_flags(climate::CLIMATE_SUPPORTS_CURRENT_TEMPERATURE | climate::CLIMATE_SUPPORTS_CURRENT_HUMIDITY |
|
||||
climate::CLIMATE_SUPPORTS_ACTION);
|
||||
traits.set_supports_current_temperature(true);
|
||||
traits.set_supports_current_humidity(true);
|
||||
traits.set_supported_modes({climate::CLIMATE_MODE_HEAT});
|
||||
traits.set_supports_action(true);
|
||||
traits.set_supported_presets({climate::CLIMATE_PRESET_ECO});
|
||||
traits.set_visual_min_temperature(this->min_temperature_);
|
||||
traits.set_visual_max_temperature(this->max_temperature_);
|
||||
|
||||
@@ -9,7 +9,7 @@ static const char *const TAG = "uponor_smatrix.sensor";
|
||||
void UponorSmatrixSensor::dump_config() {
|
||||
ESP_LOGCONFIG(TAG,
|
||||
"Uponor Smatrix Sensor\n"
|
||||
" Device address: 0x%08X",
|
||||
" Device address: 0x%04X",
|
||||
this->address_);
|
||||
LOG_SENSOR(" ", "Temperature", this->temperature_sensor_);
|
||||
LOG_SENSOR(" ", "External Temperature", this->external_temperature_sensor_);
|
||||
|
||||
@@ -18,10 +18,11 @@ void UponorSmatrixComponent::setup() {
|
||||
|
||||
void UponorSmatrixComponent::dump_config() {
|
||||
ESP_LOGCONFIG(TAG, "Uponor Smatrix");
|
||||
ESP_LOGCONFIG(TAG, " System address: 0x%04X", this->address_);
|
||||
#ifdef USE_TIME
|
||||
if (this->time_id_ != nullptr) {
|
||||
ESP_LOGCONFIG(TAG, " Time synchronization: YES");
|
||||
ESP_LOGCONFIG(TAG, " Time master device address: 0x%08X", this->time_device_address_);
|
||||
ESP_LOGCONFIG(TAG, " Time master device address: 0x%04X", this->time_device_address_);
|
||||
}
|
||||
#endif
|
||||
|
||||
@@ -30,7 +31,7 @@ void UponorSmatrixComponent::dump_config() {
|
||||
if (!this->unknown_devices_.empty()) {
|
||||
ESP_LOGCONFIG(TAG, " Detected unknown device addresses:");
|
||||
for (auto device_address : this->unknown_devices_) {
|
||||
ESP_LOGCONFIG(TAG, " 0x%08X", device_address);
|
||||
ESP_LOGCONFIG(TAG, " 0x%04X", device_address);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -88,7 +89,8 @@ bool UponorSmatrixComponent::parse_byte_(uint8_t byte) {
|
||||
return false;
|
||||
}
|
||||
|
||||
uint32_t device_address = encode_uint32(packet[0], packet[1], packet[2], packet[3]);
|
||||
uint16_t system_address = encode_uint16(packet[0], packet[1]);
|
||||
uint16_t device_address = encode_uint16(packet[2], packet[3]);
|
||||
uint16_t crc = encode_uint16(packet[packet_len - 1], packet[packet_len - 2]);
|
||||
|
||||
uint16_t computed_crc = crc16(packet, packet_len - 2);
|
||||
@@ -97,14 +99,24 @@ bool UponorSmatrixComponent::parse_byte_(uint8_t byte) {
|
||||
return false;
|
||||
}
|
||||
|
||||
ESP_LOGV(TAG, "Received packet: addr=%08X, data=%s, crc=%04X", device_address,
|
||||
ESP_LOGV(TAG, "Received packet: sys=%04X, dev=%04X, data=%s, crc=%04X", system_address, device_address,
|
||||
format_hex(&packet[4], packet_len - 6).c_str(), crc);
|
||||
|
||||
// Detect or check system address
|
||||
if (this->address_ == 0) {
|
||||
ESP_LOGI(TAG, "Using detected system address 0x%04X", system_address);
|
||||
this->address_ = system_address;
|
||||
} else if (this->address_ != system_address) {
|
||||
// This should never happen except if the system address was set or detected incorrectly, so warn the user.
|
||||
ESP_LOGW(TAG, "Received packet from unknown system address 0x%04X", system_address);
|
||||
return true;
|
||||
}
|
||||
|
||||
// Handle packet
|
||||
size_t data_len = (packet_len - 6) / 3;
|
||||
if (data_len == 0) {
|
||||
if (packet[4] == UPONOR_ID_REQUEST)
|
||||
ESP_LOGVV(TAG, "Ignoring request packet for device 0x%08X", device_address);
|
||||
ESP_LOGVV(TAG, "Ignoring request packet for device 0x%04X", device_address);
|
||||
return true;
|
||||
}
|
||||
|
||||
@@ -129,7 +141,7 @@ bool UponorSmatrixComponent::parse_byte_(uint8_t byte) {
|
||||
if (data[i].id == UPONOR_ID_DATETIME1)
|
||||
found_time = true;
|
||||
if (found_temperature && found_time) {
|
||||
ESP_LOGI(TAG, "Using detected time device address 0x%08X", device_address);
|
||||
ESP_LOGI(TAG, "Using detected time device address 0x%04X", device_address);
|
||||
this->time_device_address_ = device_address;
|
||||
break;
|
||||
}
|
||||
@@ -148,7 +160,7 @@ bool UponorSmatrixComponent::parse_byte_(uint8_t byte) {
|
||||
|
||||
// Log unknown device addresses
|
||||
if (!found && !this->unknown_devices_.count(device_address)) {
|
||||
ESP_LOGI(TAG, "Received packet for unknown device address 0x%08X ", device_address);
|
||||
ESP_LOGI(TAG, "Received packet for unknown device address 0x%04X ", device_address);
|
||||
this->unknown_devices_.insert(device_address);
|
||||
}
|
||||
|
||||
@@ -156,16 +168,16 @@ bool UponorSmatrixComponent::parse_byte_(uint8_t byte) {
|
||||
return true;
|
||||
}
|
||||
|
||||
bool UponorSmatrixComponent::send(uint32_t device_address, const UponorSmatrixData *data, size_t data_len) {
|
||||
if (device_address == 0 || data == nullptr || data_len == 0)
|
||||
bool UponorSmatrixComponent::send(uint16_t device_address, const UponorSmatrixData *data, size_t data_len) {
|
||||
if (this->address_ == 0 || device_address == 0 || data == nullptr || data_len == 0)
|
||||
return false;
|
||||
|
||||
// Assemble packet for send queue. All fields are big-endian except for the little-endian checksum.
|
||||
std::vector<uint8_t> packet;
|
||||
packet.reserve(6 + 3 * data_len);
|
||||
|
||||
packet.push_back(device_address >> 24);
|
||||
packet.push_back(device_address >> 16);
|
||||
packet.push_back(this->address_ >> 8);
|
||||
packet.push_back(this->address_ >> 0);
|
||||
packet.push_back(device_address >> 8);
|
||||
packet.push_back(device_address >> 0);
|
||||
|
||||
|
||||
@@ -71,21 +71,23 @@ class UponorSmatrixComponent : public uart::UARTDevice, public Component {
|
||||
void dump_config() override;
|
||||
void loop() override;
|
||||
|
||||
void set_system_address(uint16_t address) { this->address_ = address; }
|
||||
void register_device(UponorSmatrixDevice *device) { this->devices_.push_back(device); }
|
||||
|
||||
bool send(uint32_t device_address, const UponorSmatrixData *data, size_t data_len);
|
||||
bool send(uint16_t device_address, const UponorSmatrixData *data, size_t data_len);
|
||||
|
||||
#ifdef USE_TIME
|
||||
void set_time_id(time::RealTimeClock *time_id) { this->time_id_ = time_id; }
|
||||
void set_time_device_address(uint32_t address) { this->time_device_address_ = address; }
|
||||
void set_time_device_address(uint16_t address) { this->time_device_address_ = address; }
|
||||
void send_time() { this->send_time_requested_ = true; }
|
||||
#endif
|
||||
|
||||
protected:
|
||||
bool parse_byte_(uint8_t byte);
|
||||
|
||||
uint16_t address_;
|
||||
std::vector<UponorSmatrixDevice *> devices_;
|
||||
std::set<uint32_t> unknown_devices_;
|
||||
std::set<uint16_t> unknown_devices_;
|
||||
|
||||
std::vector<uint8_t> rx_buffer_;
|
||||
std::queue<std::vector<uint8_t>> tx_queue_;
|
||||
@@ -94,7 +96,7 @@ class UponorSmatrixComponent : public uart::UARTDevice, public Component {
|
||||
|
||||
#ifdef USE_TIME
|
||||
time::RealTimeClock *time_id_{nullptr};
|
||||
uint32_t time_device_address_;
|
||||
uint16_t time_device_address_;
|
||||
bool send_time_requested_;
|
||||
bool do_send_time_();
|
||||
#endif
|
||||
@@ -102,7 +104,7 @@ class UponorSmatrixComponent : public uart::UARTDevice, public Component {
|
||||
|
||||
class UponorSmatrixDevice : public Parented<UponorSmatrixComponent> {
|
||||
public:
|
||||
void set_address(uint32_t address) { this->address_ = address; }
|
||||
void set_device_address(uint16_t address) { this->address_ = address; }
|
||||
|
||||
virtual void on_device_data(const UponorSmatrixData *data, size_t data_len) = 0;
|
||||
bool send(const UponorSmatrixData *data, size_t data_len) {
|
||||
@@ -111,7 +113,7 @@ class UponorSmatrixDevice : public Parented<UponorSmatrixComponent> {
|
||||
|
||||
protected:
|
||||
friend UponorSmatrixComponent;
|
||||
uint32_t address_;
|
||||
uint16_t address_;
|
||||
};
|
||||
|
||||
inline float raw_to_celsius(uint16_t raw) {
|
||||
|
||||
@@ -136,18 +136,6 @@ def _final_validate_sorting(config: ConfigType) -> ConfigType:
|
||||
|
||||
FINAL_VALIDATE_SCHEMA = _final_validate_sorting
|
||||
|
||||
|
||||
def _consume_web_server_sockets(config: ConfigType) -> ConfigType:
|
||||
"""Register socket needs for web_server component."""
|
||||
from esphome.components import socket
|
||||
|
||||
# Web server needs 1 listening socket + typically 2 concurrent client connections
|
||||
# (browser makes 2 connections for page + event stream)
|
||||
sockets_needed = 3
|
||||
socket.consume_sockets(sockets_needed, "web_server")(config)
|
||||
return config
|
||||
|
||||
|
||||
sorting_group = {
|
||||
cv.Required(CONF_ID): cv.declare_id(cg.int_),
|
||||
cv.Required(CONF_NAME): cv.string,
|
||||
@@ -217,7 +205,6 @@ CONFIG_SCHEMA = cv.All(
|
||||
validate_local,
|
||||
validate_sorting_groups,
|
||||
validate_ota,
|
||||
_consume_web_server_sockets,
|
||||
)
|
||||
|
||||
|
||||
|
||||
@@ -81,9 +81,7 @@ const uint32_t YASHIMA_CARRIER_FREQUENCY = 38000;
|
||||
|
||||
climate::ClimateTraits YashimaClimate::traits() {
|
||||
auto traits = climate::ClimateTraits();
|
||||
if (this->sensor_ != nullptr) {
|
||||
traits.add_feature_flags(climate::CLIMATE_SUPPORTS_CURRENT_TEMPERATURE);
|
||||
}
|
||||
traits.set_supports_current_temperature(this->sensor_ != nullptr);
|
||||
|
||||
traits.set_supported_modes({climate::CLIMATE_MODE_OFF, climate::CLIMATE_MODE_HEAT_COOL});
|
||||
if (supports_cool_)
|
||||
@@ -91,6 +89,7 @@ climate::ClimateTraits YashimaClimate::traits() {
|
||||
if (supports_heat_)
|
||||
traits.add_supported_mode(climate::CLIMATE_MODE_HEAT);
|
||||
|
||||
traits.set_supports_two_point_target_temperature(false);
|
||||
traits.set_visual_min_temperature(YASHIMA_TEMP_MIN);
|
||||
traits.set_visual_max_temperature(YASHIMA_TEMP_MAX);
|
||||
traits.set_visual_temperature_step(1);
|
||||
|
||||
@@ -11,7 +11,6 @@ from esphome.const import (
|
||||
CONF_COMMENT,
|
||||
CONF_ESPHOME,
|
||||
CONF_ETHERNET,
|
||||
CONF_OPENTHREAD,
|
||||
CONF_PORT,
|
||||
CONF_USE_ADDRESS,
|
||||
CONF_WEB_SERVER,
|
||||
@@ -642,9 +641,6 @@ class EsphomeCore:
|
||||
if CONF_ETHERNET in self.config:
|
||||
return self.config[CONF_ETHERNET][CONF_USE_ADDRESS]
|
||||
|
||||
if CONF_OPENTHREAD in self.config:
|
||||
return f"{self.name}.local"
|
||||
|
||||
return None
|
||||
|
||||
@property
|
||||
@@ -709,15 +705,6 @@ class EsphomeCore:
|
||||
def relative_piolibdeps_path(self, *path: str | Path) -> Path:
|
||||
return self.relative_build_path(".piolibdeps", *path)
|
||||
|
||||
@property
|
||||
def platformio_cache_dir(self) -> str:
|
||||
"""Get the PlatformIO cache directory path."""
|
||||
# Check if running in Docker/HA addon with custom cache dir
|
||||
if (cache_dir := os.environ.get("PLATFORMIO_CACHE_DIR")) and cache_dir.strip():
|
||||
return cache_dir
|
||||
# Default PlatformIO cache location
|
||||
return os.path.expanduser("~/.platformio/.cache")
|
||||
|
||||
@property
|
||||
def firmware_bin(self) -> Path:
|
||||
if self.is_libretiny:
|
||||
|
||||
@@ -39,7 +39,7 @@
|
||||
#include "esphome/components/text_sensor/text_sensor.h"
|
||||
#endif
|
||||
#ifdef USE_FAN
|
||||
#include "esphome/components/fan/fan.h"
|
||||
#include "esphome/components/fan/fan_state.h"
|
||||
#endif
|
||||
#ifdef USE_CLIMATE
|
||||
#include "esphome/components/climate/climate.h"
|
||||
|
||||
@@ -243,7 +243,7 @@ template<typename... Ts> class ActionList {
|
||||
}
|
||||
this->actions_end_ = action;
|
||||
}
|
||||
void add_actions(const std::initializer_list<Action<Ts...> *> &actions) {
|
||||
void add_actions(const std::vector<Action<Ts...> *> &actions) {
|
||||
for (auto *action : actions) {
|
||||
this->add_action(action);
|
||||
}
|
||||
@@ -286,7 +286,7 @@ template<typename... Ts> class Automation {
|
||||
explicit Automation(Trigger<Ts...> *trigger) : trigger_(trigger) { this->trigger_->set_automation_parent(this); }
|
||||
|
||||
void add_action(Action<Ts...> *action) { this->actions_.add_action(action); }
|
||||
void add_actions(const std::initializer_list<Action<Ts...> *> &actions) { this->actions_.add_actions(actions); }
|
||||
void add_actions(const std::vector<Action<Ts...> *> &actions) { this->actions_.add_actions(actions); }
|
||||
|
||||
void stop() { this->actions_.stop(); }
|
||||
|
||||
|
||||
@@ -194,12 +194,12 @@ template<typename... Ts> class IfAction : public Action<Ts...> {
|
||||
public:
|
||||
explicit IfAction(Condition<Ts...> *condition) : condition_(condition) {}
|
||||
|
||||
void add_then(const std::initializer_list<Action<Ts...> *> &actions) {
|
||||
void add_then(const std::vector<Action<Ts...> *> &actions) {
|
||||
this->then_.add_actions(actions);
|
||||
this->then_.add_action(new LambdaAction<Ts...>([this](Ts... x) { this->play_next_(x...); }));
|
||||
}
|
||||
|
||||
void add_else(const std::initializer_list<Action<Ts...> *> &actions) {
|
||||
void add_else(const std::vector<Action<Ts...> *> &actions) {
|
||||
this->else_.add_actions(actions);
|
||||
this->else_.add_action(new LambdaAction<Ts...>([this](Ts... x) { this->play_next_(x...); }));
|
||||
}
|
||||
@@ -240,7 +240,7 @@ template<typename... Ts> class WhileAction : public Action<Ts...> {
|
||||
public:
|
||||
WhileAction(Condition<Ts...> *condition) : condition_(condition) {}
|
||||
|
||||
void add_then(const std::initializer_list<Action<Ts...> *> &actions) {
|
||||
void add_then(const std::vector<Action<Ts...> *> &actions) {
|
||||
this->then_.add_actions(actions);
|
||||
this->then_.add_action(new LambdaAction<Ts...>([this](Ts... x) {
|
||||
if (this->num_running_ > 0 && this->condition_->check_tuple(this->var_)) {
|
||||
@@ -287,7 +287,7 @@ template<typename... Ts> class RepeatAction : public Action<Ts...> {
|
||||
public:
|
||||
TEMPLATABLE_VALUE(uint32_t, count)
|
||||
|
||||
void add_then(const std::initializer_list<Action<uint32_t, Ts...> *> &actions) {
|
||||
void add_then(const std::vector<Action<uint32_t, Ts...> *> &actions) {
|
||||
this->then_.add_actions(actions);
|
||||
this->then_.add_action(new LambdaAction<uint32_t, Ts...>([this](uint32_t iteration, Ts... x) {
|
||||
iteration++;
|
||||
|
||||
@@ -5,7 +5,7 @@
|
||||
#include "esphome/components/binary_sensor/binary_sensor.h"
|
||||
#endif
|
||||
#ifdef USE_FAN
|
||||
#include "esphome/components/fan/fan.h"
|
||||
#include "esphome/components/fan/fan_state.h"
|
||||
#endif
|
||||
#ifdef USE_LIGHT
|
||||
#include "esphome/components/light/light_state.h"
|
||||
|
||||
@@ -273,8 +273,6 @@
|
||||
|
||||
#ifdef USE_NRF52
|
||||
#define USE_NRF52_DFU
|
||||
#define USE_SOFTDEVICE_ID 7
|
||||
#define USE_SOFTDEVICE_VERSION 1
|
||||
#endif
|
||||
|
||||
// Disabled feature flags
|
||||
|
||||
@@ -414,8 +414,10 @@ int8_t step_to_accuracy_decimals(float step) {
|
||||
return str.length() - dot_pos - 1;
|
||||
}
|
||||
|
||||
// Store BASE64 characters as array - automatically placed in flash/ROM on embedded platforms
|
||||
static const char BASE64_CHARS[] = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/";
|
||||
// Use C-style string constant to store in ROM instead of RAM (saves 24 bytes)
|
||||
static constexpr const char *BASE64_CHARS = "ABCDEFGHIJKLMNOPQRSTUVWXYZ"
|
||||
"abcdefghijklmnopqrstuvwxyz"
|
||||
"0123456789+/";
|
||||
|
||||
// Helper function to find the index of a base64 character in the lookup table.
|
||||
// Returns the character's position (0-63) if found, or 0 if not found.
|
||||
@@ -425,8 +427,8 @@ static const char BASE64_CHARS[] = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqr
|
||||
// stops processing at the first invalid character due to the is_base64() check in its
|
||||
// while loop condition, making this edge case harmless in practice.
|
||||
static inline uint8_t base64_find_char(char c) {
|
||||
const void *ptr = memchr(BASE64_CHARS, c, sizeof(BASE64_CHARS));
|
||||
return ptr ? (static_cast<const char *>(ptr) - BASE64_CHARS) : 0;
|
||||
const char *pos = strchr(BASE64_CHARS, c);
|
||||
return pos ? (pos - BASE64_CHARS) : 0;
|
||||
}
|
||||
|
||||
static inline bool is_base64(char c) { return (isalnum(c) || (c == '+') || (c == '/')); }
|
||||
|
||||
@@ -143,9 +143,6 @@ template<typename T, size_t N> class StaticVector {
|
||||
size_t size() const { return count_; }
|
||||
bool empty() const { return count_ == 0; }
|
||||
|
||||
// Direct access to size counter for efficient in-place construction
|
||||
size_t &count() { return count_; }
|
||||
|
||||
T &operator[](size_t i) { return data_[i]; }
|
||||
const T &operator[](size_t i) const { return data_[i]; }
|
||||
|
||||
|
||||
@@ -95,9 +95,10 @@ class Scheduler {
|
||||
} name_;
|
||||
uint32_t interval;
|
||||
// Split time to handle millis() rollover. The scheduler combines the 32-bit millis()
|
||||
// with a 16-bit rollover counter to create a 48-bit time space (stored as 64-bit
|
||||
// for compatibility). With 49.7 days per 32-bit rollover, the 16-bit counter
|
||||
// supports 49.7 days × 65536 = ~8900 years. This ensures correct scheduling
|
||||
// with a 16-bit rollover counter to create a 48-bit time space (using 32+16 bits).
|
||||
// This is intentionally limited to 48 bits, not stored as a full 64-bit value.
|
||||
// With 49.7 days per 32-bit rollover, the 16-bit counter supports
|
||||
// 49.7 days × 65536 = ~8900 years. This ensures correct scheduling
|
||||
// even when devices run for months. Split into two fields for better memory
|
||||
// alignment on 32-bit systems.
|
||||
uint32_t next_execution_low_; // Lower 32 bits of execution time (millis value)
|
||||
|
||||
362
esphome/github_cache.py
Normal file
362
esphome/github_cache.py
Normal file
@@ -0,0 +1,362 @@
|
||||
"""GitHub download cache for ESPHome.
|
||||
|
||||
This module provides caching functionality for GitHub release downloads
|
||||
to avoid redundant network I/O when switching between platforms.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import hashlib
|
||||
import json
|
||||
import logging
|
||||
from pathlib import Path
|
||||
import shutil
|
||||
import time
|
||||
import urllib.error
|
||||
import urllib.request
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class GitHubCache:
|
||||
"""Manages caching of GitHub release downloads."""
|
||||
|
||||
# Cache expiration time in seconds (30 days)
|
||||
CACHE_EXPIRATION_SECONDS = 30 * 24 * 60 * 60
|
||||
|
||||
def __init__(self, cache_dir: Path | None = None):
|
||||
"""Initialize the cache manager.
|
||||
|
||||
Args:
|
||||
cache_dir: Directory to store cached files.
|
||||
Defaults to ~/.esphome_cache/github
|
||||
"""
|
||||
if cache_dir is None:
|
||||
cache_dir = Path.home() / ".esphome_cache" / "github"
|
||||
self.cache_dir = cache_dir
|
||||
self.cache_dir.mkdir(parents=True, exist_ok=True)
|
||||
self.metadata_file = self.cache_dir / "cache_metadata.json"
|
||||
# Prune old files on initialization
|
||||
try:
|
||||
self._prune_old_files()
|
||||
except Exception as e:
|
||||
_LOGGER.debug("Failed to prune old cache files: %s", e)
|
||||
|
||||
def _load_metadata(self) -> dict:
|
||||
"""Load cache metadata from disk."""
|
||||
if self.metadata_file.exists():
|
||||
try:
|
||||
with open(self.metadata_file) as f:
|
||||
return json.load(f)
|
||||
except (OSError, ValueError, json.JSONDecodeError):
|
||||
return {}
|
||||
return {}
|
||||
|
||||
def _save_metadata(self, metadata: dict) -> None:
|
||||
"""Save cache metadata to disk."""
|
||||
try:
|
||||
with open(self.metadata_file, "w") as f:
|
||||
json.dump(metadata, f, indent=2)
|
||||
except OSError as e:
|
||||
_LOGGER.debug("Failed to save cache metadata: %s", e)
|
||||
|
||||
@staticmethod
|
||||
def is_github_url(url: str) -> bool:
|
||||
"""Check if URL is a GitHub release download."""
|
||||
return "github.com" in url.lower() and url.endswith(".zip")
|
||||
|
||||
def _get_cache_key(self, url: str) -> str:
|
||||
"""Get cache key (hash) for a URL."""
|
||||
return hashlib.sha256(url.encode()).hexdigest()
|
||||
|
||||
def _get_cache_path(self, url: str) -> Path:
|
||||
"""Get cache file path for a URL."""
|
||||
cache_key = self._get_cache_key(url)
|
||||
ext = Path(url.split("?")[0]).suffix
|
||||
return self.cache_dir / f"{cache_key}{ext}"
|
||||
|
||||
def _check_if_modified(
|
||||
self,
|
||||
url: str,
|
||||
last_modified: str | None = None,
|
||||
etag: str | None = None,
|
||||
) -> bool:
|
||||
"""Check if a URL has been modified using HTTP 304.
|
||||
|
||||
Args:
|
||||
url: URL to check
|
||||
last_modified: Last-Modified header from previous response
|
||||
etag: ETag header from previous response
|
||||
|
||||
Returns:
|
||||
True if modified, False if not modified (or offline/unreachable)
|
||||
"""
|
||||
if not last_modified and not etag:
|
||||
# No cache headers available, assume modified
|
||||
return True
|
||||
|
||||
try:
|
||||
request = urllib.request.Request(url)
|
||||
request.get_method = lambda: "HEAD"
|
||||
|
||||
if last_modified:
|
||||
request.add_header("If-Modified-Since", last_modified)
|
||||
if etag:
|
||||
request.add_header("If-None-Match", etag)
|
||||
|
||||
try:
|
||||
urllib.request.urlopen(request, timeout=10)
|
||||
# 200 OK = file was modified
|
||||
return True
|
||||
except urllib.error.HTTPError as e:
|
||||
if e.code == 304:
|
||||
# Not modified
|
||||
_LOGGER.debug("File not modified (HTTP 304): %s", url)
|
||||
return False
|
||||
# Other errors, assume modified to be safe
|
||||
return True
|
||||
except (OSError, urllib.error.URLError):
|
||||
# If check fails (offline/network error), assume not modified (use cache)
|
||||
_LOGGER.info("Cannot reach server (offline?), using cached file: %s", url)
|
||||
return False
|
||||
|
||||
def get_cached_path(self, url: str, check_updates: bool = True) -> Path | None:
|
||||
"""Get path to cached file if available and valid.
|
||||
|
||||
Args:
|
||||
url: URL to check
|
||||
check_updates: Whether to check for updates using HTTP 304
|
||||
|
||||
Returns:
|
||||
Path to cached file if valid, None if needs download
|
||||
"""
|
||||
if not self.is_github_url(url):
|
||||
return None
|
||||
|
||||
cache_path = self._get_cache_path(url)
|
||||
if not cache_path.exists():
|
||||
return None
|
||||
|
||||
# Load metadata
|
||||
metadata = self._load_metadata()
|
||||
cache_key = self._get_cache_key(url)
|
||||
|
||||
# Check if file should be re-downloaded
|
||||
should_redownload = False
|
||||
if check_updates and cache_key in metadata:
|
||||
last_modified = metadata[cache_key].get("last_modified")
|
||||
etag = metadata[cache_key].get("etag")
|
||||
if self._check_if_modified(url, last_modified, etag):
|
||||
# File was modified, need to re-download
|
||||
_LOGGER.debug("Cached file is outdated: %s", url)
|
||||
should_redownload = True
|
||||
|
||||
if should_redownload:
|
||||
return None
|
||||
|
||||
# File is valid, update cached_at timestamp to keep it fresh
|
||||
if cache_key in metadata:
|
||||
metadata[cache_key]["cached_at"] = time.time()
|
||||
self._save_metadata(metadata)
|
||||
|
||||
# Log appropriate message
|
||||
if not check_updates:
|
||||
_LOGGER.debug("Using cached file (no update check): %s", url)
|
||||
elif cache_key not in metadata:
|
||||
_LOGGER.debug("Using cached file (no metadata): %s", url)
|
||||
else:
|
||||
_LOGGER.debug("Using cached file: %s", url)
|
||||
|
||||
return cache_path
|
||||
|
||||
def save_to_cache(self, url: str, source_path: Path) -> None:
|
||||
"""Save a downloaded file to cache.
|
||||
|
||||
Args:
|
||||
url: URL the file was downloaded from
|
||||
source_path: Path to the downloaded file
|
||||
"""
|
||||
if not self.is_github_url(url):
|
||||
return
|
||||
|
||||
try:
|
||||
cache_path = self._get_cache_path(url)
|
||||
# Only copy if source and destination are different
|
||||
if source_path.resolve() != cache_path.resolve():
|
||||
shutil.copy2(source_path, cache_path)
|
||||
|
||||
# Try to get HTTP headers for caching
|
||||
last_modified = None
|
||||
etag = None
|
||||
try:
|
||||
request = urllib.request.Request(url)
|
||||
request.get_method = lambda: "HEAD"
|
||||
response = urllib.request.urlopen(request, timeout=10)
|
||||
last_modified = response.headers.get("Last-Modified")
|
||||
etag = response.headers.get("ETag")
|
||||
except (OSError, urllib.error.URLError):
|
||||
pass
|
||||
|
||||
# Update metadata
|
||||
metadata = self._load_metadata()
|
||||
cache_key = self._get_cache_key(url)
|
||||
|
||||
metadata[cache_key] = {
|
||||
"url": url,
|
||||
"size": cache_path.stat().st_size,
|
||||
"cached_at": time.time(),
|
||||
"last_modified": last_modified,
|
||||
"etag": etag,
|
||||
}
|
||||
self._save_metadata(metadata)
|
||||
|
||||
_LOGGER.debug("Saved to cache: %s", url)
|
||||
|
||||
except OSError as e:
|
||||
_LOGGER.debug("Failed to save to cache: %s", e)
|
||||
|
||||
def copy_from_cache(self, url: str, destination: Path) -> bool:
|
||||
"""Copy a cached file to destination.
|
||||
|
||||
Args:
|
||||
url: URL of the cached file
|
||||
destination: Where to copy the file
|
||||
|
||||
Returns:
|
||||
True if successful, False otherwise
|
||||
"""
|
||||
cached_path = self.get_cached_path(url, check_updates=True)
|
||||
if not cached_path:
|
||||
return False
|
||||
|
||||
try:
|
||||
shutil.copy2(cached_path, destination)
|
||||
_LOGGER.info("Using cached download for %s", url)
|
||||
return True
|
||||
except OSError as e:
|
||||
_LOGGER.warning("Failed to use cache: %s", e)
|
||||
return False
|
||||
|
||||
def cache_size(self) -> int:
|
||||
"""Get total size of cached files in bytes."""
|
||||
total = 0
|
||||
try:
|
||||
for file_path in self.cache_dir.glob("*"):
|
||||
if file_path.is_file() and file_path != self.metadata_file:
|
||||
total += file_path.stat().st_size
|
||||
except OSError:
|
||||
pass
|
||||
return total
|
||||
|
||||
def list_cached(self) -> list[dict]:
|
||||
"""List all cached files with metadata."""
|
||||
cached_files = []
|
||||
metadata = self._load_metadata()
|
||||
|
||||
for cache_key, meta in metadata.items():
|
||||
cache_path = (
|
||||
self.cache_dir / f"{cache_key}{Path(meta['url'].split('?')[0]).suffix}"
|
||||
)
|
||||
if cache_path.exists():
|
||||
cached_files.append(
|
||||
{
|
||||
"url": meta["url"],
|
||||
"path": cache_path,
|
||||
"size": meta["size"],
|
||||
"cached_at": meta.get("cached_at"),
|
||||
"last_modified": meta.get("last_modified"),
|
||||
"etag": meta.get("etag"),
|
||||
}
|
||||
)
|
||||
|
||||
return cached_files
|
||||
|
||||
def clear_cache(self) -> None:
|
||||
"""Clear all cached files."""
|
||||
try:
|
||||
for file_path in self.cache_dir.glob("*"):
|
||||
if file_path.is_file():
|
||||
file_path.unlink()
|
||||
_LOGGER.info("Cache cleared: %s", self.cache_dir)
|
||||
except OSError as e:
|
||||
_LOGGER.warning("Failed to clear cache: %s", e)
|
||||
|
||||
def _prune_old_files(self) -> None:
|
||||
"""Remove cache files older than CACHE_EXPIRATION_SECONDS."""
|
||||
current_time = time.time()
|
||||
metadata = self._load_metadata()
|
||||
removed_count = 0
|
||||
removed_size = 0
|
||||
|
||||
# Check each file in metadata
|
||||
for cache_key, meta in list(metadata.items()):
|
||||
cached_at = meta.get("cached_at", 0)
|
||||
age_seconds = current_time - cached_at
|
||||
|
||||
if age_seconds > self.CACHE_EXPIRATION_SECONDS:
|
||||
# File is too old, remove it
|
||||
cache_path = (
|
||||
self.cache_dir
|
||||
/ f"{cache_key}{Path(meta['url'].split('?')[0]).suffix}"
|
||||
)
|
||||
if cache_path.exists():
|
||||
file_size = cache_path.stat().st_size
|
||||
cache_path.unlink()
|
||||
removed_size += file_size
|
||||
removed_count += 1
|
||||
_LOGGER.debug(
|
||||
"Pruned old cache file (age: %.1f days): %s",
|
||||
age_seconds / (24 * 60 * 60),
|
||||
meta["url"],
|
||||
)
|
||||
|
||||
# Remove from metadata
|
||||
del metadata[cache_key]
|
||||
|
||||
# Also check for orphaned files (files without metadata)
|
||||
for file_path in self.cache_dir.glob("*.zip"):
|
||||
if file_path == self.metadata_file:
|
||||
continue
|
||||
|
||||
# Check if file is in metadata
|
||||
found_in_metadata = False
|
||||
for cache_key in metadata:
|
||||
if file_path.name.startswith(cache_key):
|
||||
found_in_metadata = True
|
||||
break
|
||||
|
||||
if not found_in_metadata:
|
||||
# Orphaned file - check age by modification time
|
||||
file_age = current_time - file_path.stat().st_mtime
|
||||
if file_age > self.CACHE_EXPIRATION_SECONDS:
|
||||
file_size = file_path.stat().st_size
|
||||
file_path.unlink()
|
||||
removed_size += file_size
|
||||
removed_count += 1
|
||||
_LOGGER.debug(
|
||||
"Pruned orphaned cache file (age: %.1f days): %s",
|
||||
file_age / (24 * 60 * 60),
|
||||
file_path.name,
|
||||
)
|
||||
|
||||
# Save updated metadata if anything was removed
|
||||
if removed_count > 0:
|
||||
self._save_metadata(metadata)
|
||||
removed_mb = removed_size / (1024 * 1024)
|
||||
_LOGGER.info(
|
||||
"Pruned %d old cache file(s), freed %.2f MB",
|
||||
removed_count,
|
||||
removed_mb,
|
||||
)
|
||||
|
||||
|
||||
# Global cache instance
|
||||
_cache: GitHubCache | None = None
|
||||
|
||||
|
||||
def get_cache() -> GitHubCache:
|
||||
"""Get the global GitHub cache instance."""
|
||||
global _cache # noqa: PLW0603
|
||||
if _cache is None:
|
||||
_cache = GitHubCache()
|
||||
return _cache
|
||||
@@ -5,7 +5,6 @@ import os
|
||||
from pathlib import Path
|
||||
import re
|
||||
import subprocess
|
||||
from typing import Any
|
||||
|
||||
from esphome.const import CONF_COMPILE_PROCESS_LIMIT, CONF_ESPHOME, KEY_CORE
|
||||
from esphome.core import CORE, EsphomeError
|
||||
@@ -44,32 +43,168 @@ def patch_structhash():
|
||||
|
||||
|
||||
def patch_file_downloader():
|
||||
"""Patch PlatformIO's FileDownloader to retry on PackageException errors."""
|
||||
from platformio.package.download import FileDownloader
|
||||
from platformio.package.exception import PackageException
|
||||
"""Patch PlatformIO's FileDownloader to add caching and retry on PackageException errors.
|
||||
|
||||
original_init = FileDownloader.__init__
|
||||
This function attempts to patch PlatformIO's internal download mechanism.
|
||||
If patching fails (due to API changes), it gracefully falls back to no caching.
|
||||
"""
|
||||
try:
|
||||
from platformio.package.download import FileDownloader
|
||||
from platformio.package.exception import PackageException
|
||||
except ImportError as e:
|
||||
_LOGGER.debug("Could not import PlatformIO modules for patching: %s", e)
|
||||
return
|
||||
|
||||
def patched_init(self, *args: Any, **kwargs: Any) -> None:
|
||||
max_retries = 3
|
||||
# Import our cache module
|
||||
from esphome.github_cache import GitHubCache
|
||||
|
||||
for attempt in range(max_retries):
|
||||
try:
|
||||
return original_init(self, *args, **kwargs)
|
||||
except PackageException as e:
|
||||
if attempt < max_retries - 1:
|
||||
_LOGGER.warning(
|
||||
"Package download failed: %s. Retrying... (attempt %d/%d)",
|
||||
str(e),
|
||||
attempt + 1,
|
||||
max_retries,
|
||||
_LOGGER.debug("Applying GitHub download cache patch...")
|
||||
|
||||
# Verify the classes have the expected methods before patching
|
||||
if not hasattr(FileDownloader, "__init__") or not hasattr(FileDownloader, "start"):
|
||||
_LOGGER.warning(
|
||||
"PlatformIO FileDownloader API has changed, skipping cache patch"
|
||||
)
|
||||
return
|
||||
|
||||
try:
|
||||
original_init = FileDownloader.__init__
|
||||
original_start = FileDownloader.start
|
||||
|
||||
# Initialize cache in .platformio directory so it benefits from GitHub Actions cache
|
||||
platformio_dir = Path.home() / ".platformio"
|
||||
cache_dir = platformio_dir / "esphome_download_cache"
|
||||
cache_dir_existed = cache_dir.exists()
|
||||
cache = GitHubCache(cache_dir=cache_dir)
|
||||
if not cache_dir_existed:
|
||||
_LOGGER.info("Created GitHub download cache at: %s", cache.cache_dir)
|
||||
except Exception as e:
|
||||
_LOGGER.warning("Failed to initialize GitHub download cache: %s", e)
|
||||
return
|
||||
|
||||
def patched_init(self, *args, **kwargs):
|
||||
"""Patched init that checks cache before making HTTP connection."""
|
||||
try:
|
||||
# Extract URL from args (first positional argument)
|
||||
url = args[0] if args else kwargs.get("url")
|
||||
dest_dir = args[1] if len(args) > 1 else kwargs.get("dest_dir")
|
||||
|
||||
# Debug: Log all downloads
|
||||
_LOGGER.debug("[GitHub Cache] Download request for: %s", url)
|
||||
|
||||
# Store URL for later use (original FileDownloader doesn't store it)
|
||||
self._esphome_cache_url = url if cache.is_github_url(url) else None
|
||||
|
||||
# Check cache for GitHub URLs BEFORE making HTTP request
|
||||
if self._esphome_cache_url:
|
||||
_LOGGER.debug("[GitHub Cache] This is a GitHub URL, checking cache...")
|
||||
self._esphome_use_cache = cache.get_cached_path(url, check_updates=True)
|
||||
if self._esphome_use_cache:
|
||||
_LOGGER.info(
|
||||
"Found %s in cache, will restore instead of downloading",
|
||||
Path(url.split("?")[0]).name,
|
||||
)
|
||||
_LOGGER.debug(
|
||||
"[GitHub Cache] Found in cache: %s", self._esphome_use_cache
|
||||
)
|
||||
else:
|
||||
# Final attempt - re-raise
|
||||
raise
|
||||
return None
|
||||
_LOGGER.debug(
|
||||
"[GitHub Cache] Not in cache, will download and cache"
|
||||
)
|
||||
else:
|
||||
self._esphome_use_cache = None
|
||||
if url and str(url).startswith("http"):
|
||||
_LOGGER.debug("[GitHub Cache] Not a GitHub URL, skipping cache")
|
||||
|
||||
FileDownloader.__init__ = patched_init
|
||||
# Only make HTTP connection if we don't have cached file
|
||||
if self._esphome_use_cache:
|
||||
# Skip HTTP connection, we'll handle this in start()
|
||||
# Set minimal attributes to satisfy FileDownloader
|
||||
# Create a mock session that can be safely closed in __del__
|
||||
class MockSession:
|
||||
def close(self):
|
||||
pass
|
||||
|
||||
self._http_session = MockSession()
|
||||
self._http_response = None
|
||||
self._fname = Path(url.split("?")[0]).name
|
||||
self._destination = self._fname
|
||||
if dest_dir:
|
||||
from os.path import join
|
||||
|
||||
self._destination = join(dest_dir, self._fname)
|
||||
# Note: Actual restoration logged in patched_start
|
||||
return None # Don't call original_init
|
||||
|
||||
# Normal initialization with retry logic
|
||||
max_retries = 3
|
||||
for attempt in range(max_retries):
|
||||
try:
|
||||
return original_init(self, *args, **kwargs)
|
||||
except PackageException as e:
|
||||
if attempt < max_retries - 1:
|
||||
_LOGGER.warning(
|
||||
"Package download failed: %s. Retrying... (attempt %d/%d)",
|
||||
str(e),
|
||||
attempt + 1,
|
||||
max_retries,
|
||||
)
|
||||
else:
|
||||
# Final attempt - re-raise
|
||||
raise
|
||||
return None
|
||||
except Exception as e:
|
||||
# If anything goes wrong in our cache logic, fall back to normal download
|
||||
_LOGGER.debug("Cache check failed, falling back to normal download: %s", e)
|
||||
self._esphome_cache_url = None
|
||||
self._esphome_use_cache = None
|
||||
return original_init(self, *args, **kwargs)
|
||||
|
||||
def patched_start(self, *args, **kwargs):
|
||||
"""Patched start that uses cache when available."""
|
||||
try:
|
||||
import shutil
|
||||
|
||||
# Get the cache URL and path that were set in __init__
|
||||
cache_url = getattr(self, "_esphome_cache_url", None)
|
||||
cached_file = getattr(self, "_esphome_use_cache", None)
|
||||
|
||||
# If we're using cache, copy file instead of downloading
|
||||
if cached_file:
|
||||
try:
|
||||
shutil.copy2(cached_file, self._destination)
|
||||
_LOGGER.info(
|
||||
"Restored %s from cache (avoided download)",
|
||||
Path(cached_file).name,
|
||||
)
|
||||
return True
|
||||
except OSError as e:
|
||||
_LOGGER.warning("Failed to copy from cache: %s", e)
|
||||
# Fall through to re-download
|
||||
|
||||
# Perform normal download
|
||||
result = original_start(self, *args, **kwargs)
|
||||
|
||||
# Save to cache if it was a GitHub URL
|
||||
if cache_url:
|
||||
try:
|
||||
cache.save_to_cache(cache_url, Path(self._destination))
|
||||
except OSError as e:
|
||||
_LOGGER.debug("Failed to save to cache: %s", e)
|
||||
|
||||
return result
|
||||
except Exception as e:
|
||||
# If anything goes wrong, fall back to normal download
|
||||
_LOGGER.debug("Cache restoration failed, using normal download: %s", e)
|
||||
return original_start(self, *args, **kwargs)
|
||||
|
||||
# Apply the patches
|
||||
try:
|
||||
FileDownloader.__init__ = patched_init
|
||||
FileDownloader.start = patched_start
|
||||
_LOGGER.debug("GitHub download cache patch applied successfully")
|
||||
except Exception as e:
|
||||
_LOGGER.warning("Failed to apply GitHub download cache patch: %s", e)
|
||||
|
||||
|
||||
IGNORE_LIB_WARNINGS = f"(?:{'|'.join(['Hash', 'Update'])})"
|
||||
@@ -87,6 +222,8 @@ FILTER_PLATFORMIO_LINES = [
|
||||
r"Memory Usage -> https://bit.ly/pio-memory-usage",
|
||||
r"Found: https://platformio.org/lib/show/.*",
|
||||
r"Using cache: .*",
|
||||
# Don't filter our cache messages - let users see when cache is being used
|
||||
# r"Using cached download for .*",
|
||||
r"Installing dependencies",
|
||||
r"Library Manager: Already installed, built-in library",
|
||||
r"Building in .* mode",
|
||||
@@ -145,16 +282,7 @@ def run_compile(config, verbose):
|
||||
args = []
|
||||
if CONF_COMPILE_PROCESS_LIMIT in config[CONF_ESPHOME]:
|
||||
args += [f"-j{config[CONF_ESPHOME][CONF_COMPILE_PROCESS_LIMIT]}"]
|
||||
result = run_platformio_cli_run(config, verbose, *args)
|
||||
|
||||
# Run memory analysis if enabled
|
||||
if config.get(CONF_ESPHOME, {}).get("analyze_memory", False):
|
||||
try:
|
||||
analyze_memory_usage(config)
|
||||
except Exception as e:
|
||||
_LOGGER.warning("Failed to analyze memory usage: %s", e)
|
||||
|
||||
return result
|
||||
return run_platformio_cli_run(config, verbose, *args)
|
||||
|
||||
|
||||
def _run_idedata(config):
|
||||
@@ -403,74 +531,3 @@ class IDEData:
|
||||
if path.endswith(".exe")
|
||||
else f"{path[:-3]}readelf"
|
||||
)
|
||||
|
||||
|
||||
def analyze_memory_usage(config: dict[str, Any]) -> None:
|
||||
"""Analyze memory usage by component after compilation."""
|
||||
# Lazy import to avoid overhead when not needed
|
||||
from esphome.analyze_memory.cli import MemoryAnalyzerCLI
|
||||
from esphome.analyze_memory.helpers import get_esphome_components
|
||||
|
||||
idedata = get_idedata(config)
|
||||
|
||||
# Get paths to tools
|
||||
elf_path = idedata.firmware_elf_path
|
||||
objdump_path = idedata.objdump_path
|
||||
readelf_path = idedata.readelf_path
|
||||
|
||||
# Debug logging
|
||||
_LOGGER.debug("ELF path from idedata: %s", elf_path)
|
||||
|
||||
# Check if file exists
|
||||
if not Path(elf_path).exists():
|
||||
# Try alternate path
|
||||
alt_path = Path(CORE.relative_build_path(".pioenvs", CORE.name, "firmware.elf"))
|
||||
if alt_path.exists():
|
||||
elf_path = str(alt_path)
|
||||
_LOGGER.debug("Using alternate ELF path: %s", elf_path)
|
||||
else:
|
||||
_LOGGER.warning("ELF file not found at %s or %s", elf_path, alt_path)
|
||||
return
|
||||
|
||||
# Extract external components from config
|
||||
external_components = set()
|
||||
|
||||
# Get the list of built-in ESPHome components
|
||||
builtin_components = get_esphome_components()
|
||||
|
||||
# Special non-component keys that appear in configs
|
||||
NON_COMPONENT_KEYS = {
|
||||
CONF_ESPHOME,
|
||||
"substitutions",
|
||||
"packages",
|
||||
"globals",
|
||||
"<<",
|
||||
}
|
||||
|
||||
# Check all top-level keys in config
|
||||
for key in config:
|
||||
if key not in builtin_components and key not in NON_COMPONENT_KEYS:
|
||||
# This is an external component
|
||||
external_components.add(key)
|
||||
|
||||
_LOGGER.debug("Detected external components: %s", external_components)
|
||||
|
||||
# Create analyzer and run analysis
|
||||
analyzer = MemoryAnalyzerCLI(
|
||||
elf_path, objdump_path, readelf_path, external_components
|
||||
)
|
||||
analyzer.analyze()
|
||||
|
||||
# Generate and print report
|
||||
report = analyzer.generate_report()
|
||||
_LOGGER.info("\n%s", report)
|
||||
|
||||
# Optionally save to file
|
||||
if config.get(CONF_ESPHOME, {}).get("memory_report_file"):
|
||||
report_file = Path(config[CONF_ESPHOME]["memory_report_file"])
|
||||
if report_file.suffix == ".json":
|
||||
report_file.write_text(analyzer.to_json())
|
||||
_LOGGER.info("Memory report saved to %s", report_file)
|
||||
else:
|
||||
report_file.write_text(report)
|
||||
_LOGGER.info("Memory report saved to %s", report_file)
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
pylint==4.0.2
|
||||
pylint==4.0.1
|
||||
flake8==7.3.0 # also change in .pre-commit-config.yaml when updating
|
||||
ruff==0.14.1 # also change in .pre-commit-config.yaml when updating
|
||||
pyupgrade==3.21.0 # also change in .pre-commit-config.yaml when updating
|
||||
|
||||
164
script/cache_platformio_downloads.py
Executable file
164
script/cache_platformio_downloads.py
Executable file
@@ -0,0 +1,164 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Pre-cache PlatformIO GitHub Downloads
|
||||
|
||||
This script extracts GitHub URLs from platformio.ini and pre-caches them
|
||||
to avoid redundant downloads when switching between ESP8266 and ESP32 builds.
|
||||
|
||||
Usage:
|
||||
python3 script/cache_platformio_downloads.py [platformio.ini]
|
||||
"""
|
||||
|
||||
import argparse
|
||||
import configparser
|
||||
from pathlib import Path
|
||||
import re
|
||||
import sys
|
||||
|
||||
# Import the cache manager
|
||||
sys.path.insert(0, str(Path(__file__).parent.parent))
|
||||
from esphome.github_cache import GitHubCache
|
||||
|
||||
|
||||
def extract_github_urls(platformio_ini: Path) -> list[str]:
|
||||
"""Extract all GitHub URLs from platformio.ini.
|
||||
|
||||
Args:
|
||||
platformio_ini: Path to platformio.ini file
|
||||
|
||||
Returns:
|
||||
List of GitHub URLs found
|
||||
"""
|
||||
config = configparser.ConfigParser(inline_comment_prefixes=(";",))
|
||||
config.read(platformio_ini)
|
||||
|
||||
urls = []
|
||||
github_pattern = re.compile(r"https://github\.com/[^\s;]+\.zip")
|
||||
|
||||
for section in config.sections():
|
||||
conf = config[section]
|
||||
|
||||
# Check platform
|
||||
if "platform" in conf:
|
||||
platform_value = conf["platform"]
|
||||
matches = github_pattern.findall(platform_value)
|
||||
urls.extend(matches)
|
||||
|
||||
# Check platform_packages
|
||||
if "platform_packages" in conf:
|
||||
for line in conf["platform_packages"].splitlines():
|
||||
line = line.strip()
|
||||
if not line or line.startswith("#"):
|
||||
continue
|
||||
matches = github_pattern.findall(line)
|
||||
urls.extend(matches)
|
||||
|
||||
# Remove duplicates while preserving order using dict
|
||||
return list(dict.fromkeys(urls))
|
||||
|
||||
|
||||
def main():
|
||||
"""Main entry point."""
|
||||
parser = argparse.ArgumentParser(
|
||||
description="Pre-cache PlatformIO GitHub downloads",
|
||||
formatter_class=argparse.RawDescriptionHelpFormatter,
|
||||
epilog="""
|
||||
This script scans platformio.ini for GitHub URLs and pre-caches them.
|
||||
This avoids redundant downloads when switching between platforms (e.g., ESP8266 and ESP32).
|
||||
|
||||
Examples:
|
||||
# Cache downloads from default platformio.ini
|
||||
%(prog)s
|
||||
|
||||
# Cache downloads from specific file
|
||||
%(prog)s custom_platformio.ini
|
||||
|
||||
# Show what would be cached without downloading
|
||||
%(prog)s --dry-run
|
||||
""",
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
"platformio_ini",
|
||||
nargs="?",
|
||||
default="platformio.ini",
|
||||
help="Path to platformio.ini (default: platformio.ini)",
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
"--dry-run",
|
||||
action="store_true",
|
||||
help="Show what would be cached without downloading",
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
"--cache-dir",
|
||||
type=Path,
|
||||
help="Cache directory (default: ~/.platformio/esphome_download_cache)",
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
"--force",
|
||||
action="store_true",
|
||||
help="Force re-download even if cached",
|
||||
)
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
platformio_ini = Path(args.platformio_ini)
|
||||
|
||||
if not platformio_ini.exists():
|
||||
print(f"Error: {platformio_ini} not found", file=sys.stderr)
|
||||
return 1
|
||||
|
||||
# Extract URLs
|
||||
print(f"Scanning {platformio_ini} for GitHub URLs...")
|
||||
urls = extract_github_urls(platformio_ini)
|
||||
|
||||
if not urls:
|
||||
print("No GitHub URLs found in platformio.ini")
|
||||
return 0
|
||||
|
||||
print(f"Found {len(urls)} unique GitHub URL(s):")
|
||||
for url in urls:
|
||||
print(f" - {url}")
|
||||
print()
|
||||
|
||||
if args.dry_run:
|
||||
print("Dry run - not downloading")
|
||||
return 0
|
||||
|
||||
# Initialize cache (use PlatformIO directory by default)
|
||||
cache_dir = args.cache_dir
|
||||
if cache_dir is None:
|
||||
cache_dir = Path.home() / ".platformio" / "esphome_download_cache"
|
||||
cache = GitHubCache(cache_dir)
|
||||
|
||||
# Cache each URL
|
||||
success_count = 0
|
||||
for i, url in enumerate(urls, 1):
|
||||
print(f"[{i}/{len(urls)}] Checking {url}")
|
||||
try:
|
||||
# Use the download_with_progress from github_download_cache CLI
|
||||
from script.github_download_cache import download_with_progress
|
||||
|
||||
download_with_progress(cache, url, force=args.force, check_updates=True)
|
||||
success_count += 1
|
||||
print()
|
||||
except Exception as e:
|
||||
print(f"Error caching {url}: {e}", file=sys.stderr)
|
||||
print()
|
||||
|
||||
# Show cache stats
|
||||
total_size = cache.cache_size()
|
||||
size_mb = total_size / (1024 * 1024)
|
||||
print("\nCache summary:")
|
||||
print(f" Successfully cached: {success_count}/{len(urls)}")
|
||||
print(f" Total cache size: {size_mb:.2f} MB")
|
||||
print(f" Cache location: {cache.cache_dir}")
|
||||
|
||||
return 0 if success_count == len(urls) else 1
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
sys.exit(main())
|
||||
@@ -61,11 +61,6 @@ from helpers import (
|
||||
root_path,
|
||||
)
|
||||
|
||||
# Threshold for splitting clang-tidy jobs
|
||||
# For small PRs (< 65 files), use nosplit for faster CI
|
||||
# For large PRs (>= 65 files), use split for better parallelization
|
||||
CLANG_TIDY_SPLIT_THRESHOLD = 65
|
||||
|
||||
|
||||
class Platform(StrEnum):
|
||||
"""Platform identifiers for memory impact analysis."""
|
||||
@@ -83,16 +78,11 @@ MEMORY_IMPACT_FALLBACK_COMPONENT = "api" # Representative component for core ch
|
||||
MEMORY_IMPACT_FALLBACK_PLATFORM = Platform.ESP32_IDF # Most representative platform
|
||||
|
||||
# Platform preference order for memory impact analysis
|
||||
# This order is used when no platform-specific hints are detected from filenames
|
||||
# Priority rationale:
|
||||
# 1. ESP32-C6 IDF - Newest platform, supports Thread/Zigbee
|
||||
# 2. ESP8266 Arduino - Most memory constrained (best for detecting memory impact),
|
||||
# fastest build times, most sensitive to code size changes
|
||||
# 3. ESP32 IDF - Primary ESP32 platform, most representative of modern ESPHome
|
||||
# 4-6. Other ESP32 variants - Less commonly used but still supported
|
||||
# Prefer newer platforms first as they represent the future of ESPHome
|
||||
# ESP8266 is most constrained but many new features don't support it
|
||||
MEMORY_IMPACT_PLATFORM_PREFERENCE = [
|
||||
Platform.ESP32_C6_IDF, # ESP32-C6 IDF (newest, supports Thread/Zigbee)
|
||||
Platform.ESP8266_ARD, # ESP8266 Arduino (most memory constrained, fastest builds)
|
||||
Platform.ESP8266_ARD, # ESP8266 Arduino (most memory constrained - best for impact analysis)
|
||||
Platform.ESP32_IDF, # ESP32 IDF platform (primary ESP32 platform, most representative)
|
||||
Platform.ESP32_C3_IDF, # ESP32-C3 IDF
|
||||
Platform.ESP32_S2_IDF, # ESP32-S2 IDF
|
||||
@@ -220,22 +210,6 @@ def should_run_clang_tidy(branch: str | None = None) -> bool:
|
||||
return _any_changed_file_endswith(branch, CPP_FILE_EXTENSIONS)
|
||||
|
||||
|
||||
def count_changed_cpp_files(branch: str | None = None) -> int:
|
||||
"""Count the number of changed C++ files.
|
||||
|
||||
This is used to determine whether to split clang-tidy jobs or run them as a single job.
|
||||
For PRs with < 65 changed C++ files, running a single job is faster than splitting.
|
||||
|
||||
Args:
|
||||
branch: Branch to compare against. If None, uses default.
|
||||
|
||||
Returns:
|
||||
Number of changed C++ files.
|
||||
"""
|
||||
files = changed_files(branch)
|
||||
return sum(1 for file in files if file.endswith(CPP_FILE_EXTENSIONS))
|
||||
|
||||
|
||||
def should_run_clang_format(branch: str | None = None) -> bool:
|
||||
"""Determine if clang-format should run based on changed files.
|
||||
|
||||
@@ -290,91 +264,6 @@ def _component_has_tests(component: str) -> bool:
|
||||
return bool(get_component_test_files(component))
|
||||
|
||||
|
||||
def _select_platform_by_preference(
|
||||
platforms: list[Platform] | set[Platform],
|
||||
) -> Platform:
|
||||
"""Select the most preferred platform from a list/set based on MEMORY_IMPACT_PLATFORM_PREFERENCE.
|
||||
|
||||
Args:
|
||||
platforms: List or set of platforms to choose from
|
||||
|
||||
Returns:
|
||||
The most preferred platform (earliest in MEMORY_IMPACT_PLATFORM_PREFERENCE)
|
||||
"""
|
||||
return min(platforms, key=MEMORY_IMPACT_PLATFORM_PREFERENCE.index)
|
||||
|
||||
|
||||
def _select_platform_by_count(
|
||||
platform_counts: Counter[Platform],
|
||||
) -> Platform:
|
||||
"""Select platform by count, using MEMORY_IMPACT_PLATFORM_PREFERENCE as tiebreaker.
|
||||
|
||||
Args:
|
||||
platform_counts: Counter mapping platforms to their counts
|
||||
|
||||
Returns:
|
||||
Platform with highest count, breaking ties by preference order
|
||||
"""
|
||||
return min(
|
||||
platform_counts.keys(),
|
||||
key=lambda p: (
|
||||
-platform_counts[p], # Negative to prefer higher counts
|
||||
MEMORY_IMPACT_PLATFORM_PREFERENCE.index(p),
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
def _detect_platform_hint_from_filename(filename: str) -> Platform | None:
|
||||
"""Detect platform hint from filename patterns.
|
||||
|
||||
Detects platform-specific files using patterns like:
|
||||
- wifi_component_esp_idf.cpp, *_idf.h -> ESP32 IDF variants
|
||||
- wifi_component_esp8266.cpp, *_esp8266.h -> ESP8266_ARD
|
||||
- *_esp32*.cpp -> ESP32 IDF (generic)
|
||||
- *_libretiny.cpp, *_retiny.* -> LibreTiny (not in preference list)
|
||||
- *_pico.cpp, *_rp2040.* -> RP2040 (not in preference list)
|
||||
|
||||
Args:
|
||||
filename: File path to check
|
||||
|
||||
Returns:
|
||||
Platform enum if a specific platform is detected, None otherwise
|
||||
"""
|
||||
filename_lower = filename.lower()
|
||||
|
||||
# ESP-IDF platforms (check specific variants first)
|
||||
if "esp_idf" in filename_lower or "_idf" in filename_lower:
|
||||
# Check for specific ESP32 variants
|
||||
if "c6" in filename_lower or "esp32c6" in filename_lower:
|
||||
return Platform.ESP32_C6_IDF
|
||||
if "c3" in filename_lower or "esp32c3" in filename_lower:
|
||||
return Platform.ESP32_C3_IDF
|
||||
if "s2" in filename_lower or "esp32s2" in filename_lower:
|
||||
return Platform.ESP32_S2_IDF
|
||||
if "s3" in filename_lower or "esp32s3" in filename_lower:
|
||||
return Platform.ESP32_S3_IDF
|
||||
# Default to ESP32 IDF for generic esp_idf files
|
||||
return Platform.ESP32_IDF
|
||||
|
||||
# ESP8266 Arduino
|
||||
if "esp8266" in filename_lower:
|
||||
return Platform.ESP8266_ARD
|
||||
|
||||
# Generic ESP32 (without _idf suffix, could be Arduino or shared code)
|
||||
# Prefer IDF as it's the modern platform
|
||||
if "esp32" in filename_lower:
|
||||
return Platform.ESP32_IDF
|
||||
|
||||
# LibreTiny and RP2040 are not in MEMORY_IMPACT_PLATFORM_PREFERENCE
|
||||
# so we don't return them as hints
|
||||
# if "retiny" in filename_lower or "libretiny" in filename_lower:
|
||||
# return None # No specific LibreTiny platform preference
|
||||
# if "pico" in filename_lower or "rp2040" in filename_lower:
|
||||
# return None # No RP2040 platform preference
|
||||
|
||||
return None
|
||||
|
||||
|
||||
def detect_memory_impact_config(
|
||||
branch: str | None = None,
|
||||
) -> dict[str, Any]:
|
||||
@@ -384,12 +273,6 @@ def detect_memory_impact_config(
|
||||
building a merged configuration with all changed components (like
|
||||
test_build_components.py does) to get comprehensive memory analysis.
|
||||
|
||||
When platform-specific files are detected (e.g., wifi_component_esp_idf.cpp),
|
||||
prefers that platform for testing to ensure the most relevant memory analysis.
|
||||
|
||||
For core C++ file changes without component changes, runs a fallback
|
||||
analysis using a representative component to measure the impact.
|
||||
|
||||
Args:
|
||||
branch: Branch to compare against
|
||||
|
||||
@@ -405,10 +288,8 @@ def detect_memory_impact_config(
|
||||
files = changed_files(branch)
|
||||
|
||||
# Find all changed components (excluding core and base bus components)
|
||||
# Also collect platform hints from platform-specific filenames
|
||||
changed_component_set: set[str] = set()
|
||||
has_core_cpp_changes = False
|
||||
platform_hints: list[Platform] = []
|
||||
has_core_changes = False
|
||||
|
||||
for file in files:
|
||||
component = get_component_from_path(file)
|
||||
@@ -416,27 +297,22 @@ def detect_memory_impact_config(
|
||||
# Skip base bus components as they're used across many builds
|
||||
if component not in BASE_BUS_COMPONENTS:
|
||||
changed_component_set.add(component)
|
||||
# Check if this is a platform-specific file
|
||||
platform_hint = _detect_platform_hint_from_filename(file)
|
||||
if platform_hint:
|
||||
platform_hints.append(platform_hint)
|
||||
elif file.startswith("esphome/") and file.endswith(CPP_FILE_EXTENSIONS):
|
||||
# Core ESPHome C++ files changed (not component-specific)
|
||||
# Only C++ files affect memory usage
|
||||
has_core_cpp_changes = True
|
||||
elif file.startswith("esphome/"):
|
||||
# Core ESPHome files changed (not component-specific)
|
||||
has_core_changes = True
|
||||
|
||||
# If no components changed but core C++ changed, test representative component
|
||||
# If no components changed but core changed, test representative component
|
||||
force_fallback_platform = False
|
||||
if not changed_component_set and has_core_cpp_changes:
|
||||
if not changed_component_set and has_core_changes:
|
||||
print(
|
||||
f"Memory impact: No components changed, but core C++ files changed. "
|
||||
f"Memory impact: No components changed, but core files changed. "
|
||||
f"Testing {MEMORY_IMPACT_FALLBACK_COMPONENT} component on {MEMORY_IMPACT_FALLBACK_PLATFORM}.",
|
||||
file=sys.stderr,
|
||||
)
|
||||
changed_component_set.add(MEMORY_IMPACT_FALLBACK_COMPONENT)
|
||||
force_fallback_platform = True # Use fallback platform (most representative)
|
||||
elif not changed_component_set:
|
||||
# No components and no core C++ changes
|
||||
# No components and no core changes
|
||||
return {"should_run": "false"}
|
||||
|
||||
# Find components that have tests and collect their supported platforms
|
||||
@@ -476,42 +352,27 @@ def detect_memory_impact_config(
|
||||
common_platforms &= platforms
|
||||
|
||||
# Select the most preferred platform from the common set
|
||||
# Priority order:
|
||||
# 1. Platform hints from filenames (e.g., wifi_component_esp_idf.cpp suggests ESP32_IDF)
|
||||
# 2. Core changes use fallback platform (most representative of codebase)
|
||||
# 3. Common platforms supported by all components
|
||||
# 4. Most commonly supported platform
|
||||
if platform_hints:
|
||||
# Use most common platform hint that's also supported by all components
|
||||
hint_counts = Counter(platform_hints)
|
||||
# Filter to only hints that are in common_platforms (if any common platforms exist)
|
||||
valid_hints = (
|
||||
[h for h in hint_counts if h in common_platforms]
|
||||
if common_platforms
|
||||
else list(hint_counts.keys())
|
||||
)
|
||||
if valid_hints:
|
||||
platform = _select_platform_by_count(
|
||||
Counter({p: hint_counts[p] for p in valid_hints})
|
||||
)
|
||||
elif common_platforms:
|
||||
# Hints exist but none match common platforms, use common platform logic
|
||||
platform = _select_platform_by_preference(common_platforms)
|
||||
else:
|
||||
# Use the most common hint even if it's not in common platforms
|
||||
platform = _select_platform_by_count(hint_counts)
|
||||
elif force_fallback_platform:
|
||||
# Exception: for core changes, use fallback platform (most representative of codebase)
|
||||
if force_fallback_platform:
|
||||
platform = MEMORY_IMPACT_FALLBACK_PLATFORM
|
||||
elif common_platforms:
|
||||
# Pick the most preferred platform that all components support
|
||||
platform = _select_platform_by_preference(common_platforms)
|
||||
platform = min(common_platforms, key=MEMORY_IMPACT_PLATFORM_PREFERENCE.index)
|
||||
else:
|
||||
# No common platform - pick the most commonly supported platform
|
||||
# This allows testing components individually even if they can't be merged
|
||||
# Count how many components support each platform
|
||||
platform_counts = Counter(
|
||||
p for platforms in component_platforms_map.values() for p in platforms
|
||||
)
|
||||
platform = _select_platform_by_count(platform_counts)
|
||||
# Pick the platform supported by most components, preferring earlier in MEMORY_IMPACT_PLATFORM_PREFERENCE
|
||||
platform = max(
|
||||
platform_counts.keys(),
|
||||
key=lambda p: (
|
||||
platform_counts[p],
|
||||
-MEMORY_IMPACT_PLATFORM_PREFERENCE.index(p),
|
||||
),
|
||||
)
|
||||
|
||||
# Debug output
|
||||
print("Memory impact analysis:", file=sys.stderr)
|
||||
@@ -521,7 +382,6 @@ def detect_memory_impact_config(
|
||||
f" Component platforms: {dict(sorted(component_platforms_map.items()))}",
|
||||
file=sys.stderr,
|
||||
)
|
||||
print(f" Platform hints from filenames: {platform_hints}", file=sys.stderr)
|
||||
print(f" Common platforms: {sorted(common_platforms)}", file=sys.stderr)
|
||||
print(f" Selected platform: {platform}", file=sys.stderr)
|
||||
|
||||
@@ -548,7 +408,6 @@ def main() -> None:
|
||||
run_clang_tidy = should_run_clang_tidy(args.branch)
|
||||
run_clang_format = should_run_clang_format(args.branch)
|
||||
run_python_linters = should_run_python_linters(args.branch)
|
||||
changed_cpp_file_count = count_changed_cpp_files(args.branch)
|
||||
|
||||
# Get both directly changed and all changed components (with dependencies) in one call
|
||||
script_path = Path(__file__).parent / "list-components.py"
|
||||
@@ -586,19 +445,10 @@ def main() -> None:
|
||||
# Detect components for memory impact analysis (merged config)
|
||||
memory_impact = detect_memory_impact_config(args.branch)
|
||||
|
||||
if run_clang_tidy:
|
||||
if changed_cpp_file_count < CLANG_TIDY_SPLIT_THRESHOLD:
|
||||
clang_tidy_mode = "nosplit"
|
||||
else:
|
||||
clang_tidy_mode = "split"
|
||||
else:
|
||||
clang_tidy_mode = "disabled"
|
||||
|
||||
# Build output
|
||||
output: dict[str, Any] = {
|
||||
"integration_tests": run_integration,
|
||||
"clang_tidy": run_clang_tidy,
|
||||
"clang_tidy_mode": clang_tidy_mode,
|
||||
"clang_format": run_clang_format,
|
||||
"python_linters": run_python_linters,
|
||||
"changed_components": changed_components,
|
||||
@@ -608,7 +458,6 @@ def main() -> None:
|
||||
"component_test_count": len(changed_components_with_tests),
|
||||
"directly_changed_count": len(directly_changed_with_tests),
|
||||
"dependency_only_count": len(dependency_only_components),
|
||||
"changed_cpp_file_count": changed_cpp_file_count,
|
||||
"memory_impact": memory_impact,
|
||||
}
|
||||
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user