mirror of
https://github.com/esphome/esphome.git
synced 2025-09-26 07:02:21 +01:00
Merge branch 'dev' into remove_esp32_arduino_ble_limiations
This commit is contained in:
5
.github/workflows/sync-device-classes.yml
vendored
5
.github/workflows/sync-device-classes.yml
vendored
@@ -30,11 +30,16 @@ jobs:
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
pip install -e lib/home-assistant
|
||||
pip install -r requirements_test.txt pre-commit
|
||||
|
||||
- name: Sync
|
||||
run: |
|
||||
python ./script/sync-device_class.py
|
||||
|
||||
- name: Run pre-commit hooks
|
||||
run: |
|
||||
python script/run-in-env.py pre-commit run --all-files
|
||||
|
||||
- name: Commit changes
|
||||
uses: peter-evans/create-pull-request@v7.0.8
|
||||
with:
|
||||
|
@@ -11,7 +11,7 @@ ci:
|
||||
repos:
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
# Ruff version.
|
||||
rev: v0.13.0
|
||||
rev: v0.13.1
|
||||
hooks:
|
||||
# Run the linter.
|
||||
- id: ruff
|
||||
|
@@ -548,3 +548,4 @@ esphome/components/xxtea/* @clydebarrow
|
||||
esphome/components/zephyr/* @tomaszduda23
|
||||
esphome/components/zhlt01/* @cfeenstra1024
|
||||
esphome/components/zio_ultrasonic/* @kahrendt
|
||||
esphome/components/zwave_proxy/* @kbx81
|
||||
|
@@ -6,6 +6,7 @@ import getpass
|
||||
import importlib
|
||||
import logging
|
||||
import os
|
||||
from pathlib import Path
|
||||
import re
|
||||
import sys
|
||||
import time
|
||||
@@ -114,6 +115,14 @@ class Purpose(StrEnum):
|
||||
LOGGING = "logging"
|
||||
|
||||
|
||||
def _resolve_with_cache(address: str, purpose: Purpose) -> list[str]:
|
||||
"""Resolve an address using cache if available, otherwise return the address itself."""
|
||||
if CORE.address_cache and (cached := CORE.address_cache.get_addresses(address)):
|
||||
_LOGGER.debug("Using cached addresses for %s: %s", purpose.value, cached)
|
||||
return cached
|
||||
return [address]
|
||||
|
||||
|
||||
def choose_upload_log_host(
|
||||
default: list[str] | str | None,
|
||||
check_default: str | None,
|
||||
@@ -142,7 +151,7 @@ def choose_upload_log_host(
|
||||
(purpose == Purpose.LOGGING and has_api())
|
||||
or (purpose == Purpose.UPLOADING and has_ota())
|
||||
):
|
||||
resolved.append(CORE.address)
|
||||
resolved.extend(_resolve_with_cache(CORE.address, purpose))
|
||||
|
||||
if purpose == Purpose.LOGGING:
|
||||
if has_api() and has_mqtt_ip_lookup():
|
||||
@@ -152,15 +161,14 @@ def choose_upload_log_host(
|
||||
resolved.append("MQTT")
|
||||
|
||||
if has_api() and has_non_ip_address():
|
||||
resolved.append(CORE.address)
|
||||
resolved.extend(_resolve_with_cache(CORE.address, purpose))
|
||||
|
||||
elif purpose == Purpose.UPLOADING:
|
||||
if has_ota() and has_mqtt_ip_lookup():
|
||||
resolved.append("MQTTIP")
|
||||
|
||||
if has_ota() and has_non_ip_address():
|
||||
resolved.append(CORE.address)
|
||||
|
||||
resolved.extend(_resolve_with_cache(CORE.address, purpose))
|
||||
else:
|
||||
resolved.append(device)
|
||||
if not resolved:
|
||||
@@ -445,7 +453,7 @@ def upload_using_esptool(
|
||||
"detect",
|
||||
]
|
||||
for img in flash_images:
|
||||
cmd += [img.offset, img.path]
|
||||
cmd += [img.offset, str(img.path)]
|
||||
|
||||
if os.environ.get("ESPHOME_USE_SUBPROCESS") is None:
|
||||
import esptool
|
||||
@@ -531,7 +539,10 @@ def upload_program(
|
||||
|
||||
remote_port = int(ota_conf[CONF_PORT])
|
||||
password = ota_conf.get(CONF_PASSWORD, "")
|
||||
binary = args.file if getattr(args, "file", None) is not None else CORE.firmware_bin
|
||||
if getattr(args, "file", None) is not None:
|
||||
binary = Path(args.file)
|
||||
else:
|
||||
binary = CORE.firmware_bin
|
||||
|
||||
# MQTT address resolution
|
||||
if get_port_type(host) in ("MQTT", "MQTTIP"):
|
||||
@@ -598,7 +609,7 @@ def clean_mqtt(config: ConfigType, args: ArgsProtocol) -> int | None:
|
||||
def command_wizard(args: ArgsProtocol) -> int | None:
|
||||
from esphome import wizard
|
||||
|
||||
return wizard.wizard(args.configuration)
|
||||
return wizard.wizard(Path(args.configuration))
|
||||
|
||||
|
||||
def command_config(args: ArgsProtocol, config: ConfigType) -> int | None:
|
||||
@@ -761,7 +772,7 @@ def command_update_all(args: ArgsProtocol) -> int | None:
|
||||
safe_print(f"{half_line}{middle_text}{half_line}")
|
||||
|
||||
for f in files:
|
||||
safe_print(f"Updating {color(AnsiFore.CYAN, f)}")
|
||||
safe_print(f"Updating {color(AnsiFore.CYAN, str(f))}")
|
||||
safe_print("-" * twidth)
|
||||
safe_print()
|
||||
if CORE.dashboard:
|
||||
@@ -773,10 +784,10 @@ def command_update_all(args: ArgsProtocol) -> int | None:
|
||||
"esphome", "run", f, "--no-logs", "--device", "OTA"
|
||||
)
|
||||
if rc == 0:
|
||||
print_bar(f"[{color(AnsiFore.BOLD_GREEN, 'SUCCESS')}] {f}")
|
||||
print_bar(f"[{color(AnsiFore.BOLD_GREEN, 'SUCCESS')}] {str(f)}")
|
||||
success[f] = True
|
||||
else:
|
||||
print_bar(f"[{color(AnsiFore.BOLD_RED, 'ERROR')}] {f}")
|
||||
print_bar(f"[{color(AnsiFore.BOLD_RED, 'ERROR')}] {str(f)}")
|
||||
success[f] = False
|
||||
|
||||
safe_print()
|
||||
@@ -787,9 +798,9 @@ def command_update_all(args: ArgsProtocol) -> int | None:
|
||||
failed = 0
|
||||
for f in files:
|
||||
if success[f]:
|
||||
safe_print(f" - {f}: {color(AnsiFore.GREEN, 'SUCCESS')}")
|
||||
safe_print(f" - {str(f)}: {color(AnsiFore.GREEN, 'SUCCESS')}")
|
||||
else:
|
||||
safe_print(f" - {f}: {color(AnsiFore.BOLD_RED, 'FAILED')}")
|
||||
safe_print(f" - {str(f)}: {color(AnsiFore.BOLD_RED, 'FAILED')}")
|
||||
failed += 1
|
||||
return failed
|
||||
|
||||
@@ -811,7 +822,8 @@ def command_idedata(args: ArgsProtocol, config: ConfigType) -> int:
|
||||
|
||||
|
||||
def command_rename(args: ArgsProtocol, config: ConfigType) -> int | None:
|
||||
for c in args.name:
|
||||
new_name = args.name
|
||||
for c in new_name:
|
||||
if c not in ALLOWED_NAME_CHARS:
|
||||
print(
|
||||
color(
|
||||
@@ -822,8 +834,7 @@ def command_rename(args: ArgsProtocol, config: ConfigType) -> int | None:
|
||||
)
|
||||
return 1
|
||||
# Load existing yaml file
|
||||
with open(CORE.config_path, mode="r+", encoding="utf-8") as raw_file:
|
||||
raw_contents = raw_file.read()
|
||||
raw_contents = CORE.config_path.read_text(encoding="utf-8")
|
||||
|
||||
yaml = yaml_util.load_yaml(CORE.config_path)
|
||||
if CONF_ESPHOME not in yaml or CONF_NAME not in yaml[CONF_ESPHOME]:
|
||||
@@ -838,7 +849,7 @@ def command_rename(args: ArgsProtocol, config: ConfigType) -> int | None:
|
||||
if match is None:
|
||||
new_raw = re.sub(
|
||||
rf"name:\s+[\"']?{old_name}[\"']?",
|
||||
f'name: "{args.name}"',
|
||||
f'name: "{new_name}"',
|
||||
raw_contents,
|
||||
)
|
||||
else:
|
||||
@@ -858,29 +869,28 @@ def command_rename(args: ArgsProtocol, config: ConfigType) -> int | None:
|
||||
|
||||
new_raw = re.sub(
|
||||
rf"^(\s+{match.group(1)}):\s+[\"']?{old_name}[\"']?",
|
||||
f'\\1: "{args.name}"',
|
||||
f'\\1: "{new_name}"',
|
||||
raw_contents,
|
||||
flags=re.MULTILINE,
|
||||
)
|
||||
|
||||
new_path = os.path.join(CORE.config_dir, args.name + ".yaml")
|
||||
new_path: Path = CORE.config_dir / (new_name + ".yaml")
|
||||
print(
|
||||
f"Updating {color(AnsiFore.CYAN, CORE.config_path)} to {color(AnsiFore.CYAN, new_path)}"
|
||||
f"Updating {color(AnsiFore.CYAN, str(CORE.config_path))} to {color(AnsiFore.CYAN, str(new_path))}"
|
||||
)
|
||||
print()
|
||||
|
||||
with open(new_path, mode="w", encoding="utf-8") as new_file:
|
||||
new_file.write(new_raw)
|
||||
new_path.write_text(new_raw, encoding="utf-8")
|
||||
|
||||
rc = run_external_process("esphome", "config", new_path)
|
||||
rc = run_external_process("esphome", "config", str(new_path))
|
||||
if rc != 0:
|
||||
print(color(AnsiFore.BOLD_RED, "Rename failed. Reverting changes."))
|
||||
os.remove(new_path)
|
||||
new_path.unlink()
|
||||
return 1
|
||||
|
||||
cli_args = [
|
||||
"run",
|
||||
new_path,
|
||||
str(new_path),
|
||||
"--no-logs",
|
||||
"--device",
|
||||
CORE.address,
|
||||
@@ -894,11 +904,11 @@ def command_rename(args: ArgsProtocol, config: ConfigType) -> int | None:
|
||||
except KeyboardInterrupt:
|
||||
rc = 1
|
||||
if rc != 0:
|
||||
os.remove(new_path)
|
||||
new_path.unlink()
|
||||
return 1
|
||||
|
||||
if CORE.config_path != new_path:
|
||||
os.remove(CORE.config_path)
|
||||
CORE.config_path.unlink()
|
||||
|
||||
print(color(AnsiFore.BOLD_GREEN, "SUCCESS"))
|
||||
print()
|
||||
@@ -965,6 +975,18 @@ def parse_args(argv):
|
||||
help="Add a substitution",
|
||||
metavar=("key", "value"),
|
||||
)
|
||||
options_parser.add_argument(
|
||||
"--mdns-address-cache",
|
||||
help="mDNS address cache mapping in format 'hostname=ip1,ip2'",
|
||||
action="append",
|
||||
default=[],
|
||||
)
|
||||
options_parser.add_argument(
|
||||
"--dns-address-cache",
|
||||
help="DNS address cache mapping in format 'hostname=ip1,ip2'",
|
||||
action="append",
|
||||
default=[],
|
||||
)
|
||||
|
||||
parser = argparse.ArgumentParser(
|
||||
description=f"ESPHome {const.__version__}", parents=[options_parser]
|
||||
@@ -1212,9 +1234,15 @@ def parse_args(argv):
|
||||
|
||||
|
||||
def run_esphome(argv):
|
||||
from esphome.address_cache import AddressCache
|
||||
|
||||
args = parse_args(argv)
|
||||
CORE.dashboard = args.dashboard
|
||||
|
||||
# Create address cache from command-line arguments
|
||||
CORE.address_cache = AddressCache.from_cli_args(
|
||||
args.mdns_address_cache, args.dns_address_cache
|
||||
)
|
||||
# Override log level if verbose is set
|
||||
if args.verbose:
|
||||
args.log_level = "DEBUG"
|
||||
@@ -1237,14 +1265,20 @@ def run_esphome(argv):
|
||||
_LOGGER.info("ESPHome %s", const.__version__)
|
||||
|
||||
for conf_path in args.configuration:
|
||||
if any(os.path.basename(conf_path) == x for x in SECRETS_FILES):
|
||||
conf_path = Path(conf_path)
|
||||
if any(conf_path.name == x for x in SECRETS_FILES):
|
||||
_LOGGER.warning("Skipping secrets file %s", conf_path)
|
||||
continue
|
||||
|
||||
CORE.config_path = conf_path
|
||||
CORE.dashboard = args.dashboard
|
||||
|
||||
config = read_config(dict(args.substitution) if args.substitution else {})
|
||||
# For logs command, skip updating external components
|
||||
skip_external = args.command == "logs"
|
||||
config = read_config(
|
||||
dict(args.substitution) if args.substitution else {},
|
||||
skip_external_update=skip_external,
|
||||
)
|
||||
if config is None:
|
||||
return 2
|
||||
CORE.config = config
|
||||
|
142
esphome/address_cache.py
Normal file
142
esphome/address_cache.py
Normal file
@@ -0,0 +1,142 @@
|
||||
"""Address cache for DNS and mDNS lookups."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from collections.abc import Iterable
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def normalize_hostname(hostname: str) -> str:
|
||||
"""Normalize hostname for cache lookups.
|
||||
|
||||
Removes trailing dots and converts to lowercase.
|
||||
"""
|
||||
return hostname.rstrip(".").lower()
|
||||
|
||||
|
||||
class AddressCache:
|
||||
"""Cache for DNS and mDNS address lookups.
|
||||
|
||||
This cache stores pre-resolved addresses from command-line arguments
|
||||
to avoid slow DNS/mDNS lookups during builds.
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
mdns_cache: dict[str, list[str]] | None = None,
|
||||
dns_cache: dict[str, list[str]] | None = None,
|
||||
) -> None:
|
||||
"""Initialize the address cache.
|
||||
|
||||
Args:
|
||||
mdns_cache: Pre-populated mDNS addresses (hostname -> IPs)
|
||||
dns_cache: Pre-populated DNS addresses (hostname -> IPs)
|
||||
"""
|
||||
self.mdns_cache = mdns_cache or {}
|
||||
self.dns_cache = dns_cache or {}
|
||||
|
||||
def _get_cached_addresses(
|
||||
self, hostname: str, cache: dict[str, list[str]], cache_type: str
|
||||
) -> list[str] | None:
|
||||
"""Get cached addresses from a specific cache.
|
||||
|
||||
Args:
|
||||
hostname: The hostname to look up
|
||||
cache: The cache dictionary to check
|
||||
cache_type: Type of cache for logging ("mDNS" or "DNS")
|
||||
|
||||
Returns:
|
||||
List of IP addresses if found in cache, None otherwise
|
||||
"""
|
||||
normalized = normalize_hostname(hostname)
|
||||
if addresses := cache.get(normalized):
|
||||
_LOGGER.debug("Using %s cache for %s: %s", cache_type, hostname, addresses)
|
||||
return addresses
|
||||
return None
|
||||
|
||||
def get_mdns_addresses(self, hostname: str) -> list[str] | None:
|
||||
"""Get cached mDNS addresses for a hostname.
|
||||
|
||||
Args:
|
||||
hostname: The hostname to look up (should end with .local)
|
||||
|
||||
Returns:
|
||||
List of IP addresses if found in cache, None otherwise
|
||||
"""
|
||||
return self._get_cached_addresses(hostname, self.mdns_cache, "mDNS")
|
||||
|
||||
def get_dns_addresses(self, hostname: str) -> list[str] | None:
|
||||
"""Get cached DNS addresses for a hostname.
|
||||
|
||||
Args:
|
||||
hostname: The hostname to look up
|
||||
|
||||
Returns:
|
||||
List of IP addresses if found in cache, None otherwise
|
||||
"""
|
||||
return self._get_cached_addresses(hostname, self.dns_cache, "DNS")
|
||||
|
||||
def get_addresses(self, hostname: str) -> list[str] | None:
|
||||
"""Get cached addresses for a hostname.
|
||||
|
||||
Checks mDNS cache for .local domains, DNS cache otherwise.
|
||||
|
||||
Args:
|
||||
hostname: The hostname to look up
|
||||
|
||||
Returns:
|
||||
List of IP addresses if found in cache, None otherwise
|
||||
"""
|
||||
normalized = normalize_hostname(hostname)
|
||||
if normalized.endswith(".local"):
|
||||
return self.get_mdns_addresses(hostname)
|
||||
return self.get_dns_addresses(hostname)
|
||||
|
||||
def has_cache(self) -> bool:
|
||||
"""Check if any cache entries exist."""
|
||||
return bool(self.mdns_cache or self.dns_cache)
|
||||
|
||||
@classmethod
|
||||
def from_cli_args(
|
||||
cls, mdns_args: Iterable[str], dns_args: Iterable[str]
|
||||
) -> AddressCache:
|
||||
"""Create cache from command-line arguments.
|
||||
|
||||
Args:
|
||||
mdns_args: List of mDNS cache entries like ['host=ip1,ip2']
|
||||
dns_args: List of DNS cache entries like ['host=ip1,ip2']
|
||||
|
||||
Returns:
|
||||
Configured AddressCache instance
|
||||
"""
|
||||
mdns_cache = cls._parse_cache_args(mdns_args)
|
||||
dns_cache = cls._parse_cache_args(dns_args)
|
||||
return cls(mdns_cache=mdns_cache, dns_cache=dns_cache)
|
||||
|
||||
@staticmethod
|
||||
def _parse_cache_args(cache_args: Iterable[str]) -> dict[str, list[str]]:
|
||||
"""Parse cache arguments into a dictionary.
|
||||
|
||||
Args:
|
||||
cache_args: List of cache mappings like ['host1=ip1,ip2', 'host2=ip3']
|
||||
|
||||
Returns:
|
||||
Dictionary mapping normalized hostnames to list of IP addresses
|
||||
"""
|
||||
cache: dict[str, list[str]] = {}
|
||||
for arg in cache_args:
|
||||
if "=" not in arg:
|
||||
_LOGGER.warning(
|
||||
"Invalid cache format: %s (expected 'hostname=ip1,ip2')", arg
|
||||
)
|
||||
continue
|
||||
hostname, ips = arg.split("=", 1)
|
||||
# Normalize hostname for consistent lookups
|
||||
normalized = normalize_hostname(hostname)
|
||||
cache[normalized] = [ip.strip() for ip in ips.split(",")]
|
||||
return cache
|
@@ -1,5 +1,3 @@
|
||||
import os
|
||||
|
||||
from esphome.const import __version__
|
||||
from esphome.core import CORE
|
||||
from esphome.helpers import mkdir_p, read_file, write_file_if_changed
|
||||
@@ -63,7 +61,7 @@ def write_ini(content):
|
||||
update_storage_json()
|
||||
path = CORE.relative_build_path("platformio.ini")
|
||||
|
||||
if os.path.isfile(path):
|
||||
if path.is_file():
|
||||
text = read_file(path)
|
||||
content_format = find_begin_end(
|
||||
text, INI_AUTO_GENERATE_BEGIN, INI_AUTO_GENERATE_END
|
||||
|
@@ -66,6 +66,9 @@ service APIConnection {
|
||||
rpc voice_assistant_set_configuration(VoiceAssistantSetConfiguration) returns (void) {}
|
||||
|
||||
rpc alarm_control_panel_command (AlarmControlPanelCommandRequest) returns (void) {}
|
||||
|
||||
rpc zwave_proxy_frame(ZWaveProxyFrame) returns (void) {}
|
||||
rpc zwave_proxy_request(ZWaveProxyRequest) returns (void) {}
|
||||
}
|
||||
|
||||
|
||||
@@ -254,6 +257,10 @@ message DeviceInfoResponse {
|
||||
|
||||
// Top-level area info to phase out suggested_area
|
||||
AreaInfo area = 22 [(field_ifdef) = "USE_AREAS"];
|
||||
|
||||
// Indicates if Z-Wave proxy support is available and features supported
|
||||
uint32 zwave_proxy_feature_flags = 23 [(field_ifdef) = "USE_ZWAVE_PROXY"];
|
||||
uint32 zwave_home_id = 24 [(field_ifdef) = "USE_ZWAVE_PROXY"];
|
||||
}
|
||||
|
||||
message ListEntitiesRequest {
|
||||
@@ -2276,3 +2283,26 @@ message UpdateCommandRequest {
|
||||
UpdateCommand command = 2;
|
||||
uint32 device_id = 3 [(field_ifdef) = "USE_DEVICES"];
|
||||
}
|
||||
|
||||
// ==================== Z-WAVE ====================
|
||||
|
||||
message ZWaveProxyFrame {
|
||||
option (id) = 128;
|
||||
option (source) = SOURCE_BOTH;
|
||||
option (ifdef) = "USE_ZWAVE_PROXY";
|
||||
option (no_delay) = true;
|
||||
|
||||
bytes data = 1 [(fixed_array_size) = 257];
|
||||
}
|
||||
|
||||
enum ZWaveProxyRequestType {
|
||||
ZWAVE_PROXY_REQUEST_TYPE_SUBSCRIBE = 0;
|
||||
ZWAVE_PROXY_REQUEST_TYPE_UNSUBSCRIBE = 1;
|
||||
}
|
||||
message ZWaveProxyRequest {
|
||||
option (id) = 129;
|
||||
option (source) = SOURCE_CLIENT;
|
||||
option (ifdef) = "USE_ZWAVE_PROXY";
|
||||
|
||||
ZWaveProxyRequestType type = 1;
|
||||
}
|
||||
|
@@ -30,6 +30,9 @@
|
||||
#ifdef USE_VOICE_ASSISTANT
|
||||
#include "esphome/components/voice_assistant/voice_assistant.h"
|
||||
#endif
|
||||
#ifdef USE_ZWAVE_PROXY
|
||||
#include "esphome/components/zwave_proxy/zwave_proxy.h"
|
||||
#endif
|
||||
|
||||
namespace esphome::api {
|
||||
|
||||
@@ -1203,7 +1206,16 @@ void APIConnection::voice_assistant_set_configuration(const VoiceAssistantSetCon
|
||||
voice_assistant::global_voice_assistant->on_set_configuration(msg.active_wake_words);
|
||||
}
|
||||
}
|
||||
#endif
|
||||
|
||||
#ifdef USE_ZWAVE_PROXY
|
||||
void APIConnection::zwave_proxy_frame(const ZWaveProxyFrame &msg) {
|
||||
zwave_proxy::global_zwave_proxy->send_frame(msg.data, msg.data_len);
|
||||
}
|
||||
|
||||
void APIConnection::zwave_proxy_request(const ZWaveProxyRequest &msg) {
|
||||
zwave_proxy::global_zwave_proxy->zwave_proxy_request(this, msg.type);
|
||||
}
|
||||
#endif
|
||||
|
||||
#ifdef USE_ALARM_CONTROL_PANEL
|
||||
@@ -1460,6 +1472,10 @@ bool APIConnection::send_device_info_response(const DeviceInfoRequest &msg) {
|
||||
#ifdef USE_VOICE_ASSISTANT
|
||||
resp.voice_assistant_feature_flags = voice_assistant::global_voice_assistant->get_feature_flags();
|
||||
#endif
|
||||
#ifdef USE_ZWAVE_PROXY
|
||||
resp.zwave_proxy_feature_flags = zwave_proxy::global_zwave_proxy->get_feature_flags();
|
||||
resp.zwave_home_id = zwave_proxy::global_zwave_proxy->get_home_id();
|
||||
#endif
|
||||
#ifdef USE_API_NOISE
|
||||
resp.api_encryption_supported = true;
|
||||
#endif
|
||||
|
@@ -171,6 +171,11 @@ class APIConnection final : public APIServerConnection {
|
||||
void voice_assistant_set_configuration(const VoiceAssistantSetConfiguration &msg) override;
|
||||
#endif
|
||||
|
||||
#ifdef USE_ZWAVE_PROXY
|
||||
void zwave_proxy_frame(const ZWaveProxyFrame &msg) override;
|
||||
void zwave_proxy_request(const ZWaveProxyRequest &msg) override;
|
||||
#endif
|
||||
|
||||
#ifdef USE_ALARM_CONTROL_PANEL
|
||||
bool send_alarm_control_panel_state(alarm_control_panel::AlarmControlPanel *a_alarm_control_panel);
|
||||
void alarm_control_panel_command(const AlarmControlPanelCommandRequest &msg) override;
|
||||
|
@@ -129,6 +129,12 @@ void DeviceInfoResponse::encode(ProtoWriteBuffer buffer) const {
|
||||
#ifdef USE_AREAS
|
||||
buffer.encode_message(22, this->area);
|
||||
#endif
|
||||
#ifdef USE_ZWAVE_PROXY
|
||||
buffer.encode_uint32(23, this->zwave_proxy_feature_flags);
|
||||
#endif
|
||||
#ifdef USE_ZWAVE_PROXY
|
||||
buffer.encode_uint32(24, this->zwave_home_id);
|
||||
#endif
|
||||
}
|
||||
void DeviceInfoResponse::calculate_size(ProtoSize &size) const {
|
||||
#ifdef USE_API_PASSWORD
|
||||
@@ -181,6 +187,12 @@ void DeviceInfoResponse::calculate_size(ProtoSize &size) const {
|
||||
#ifdef USE_AREAS
|
||||
size.add_message_object(2, this->area);
|
||||
#endif
|
||||
#ifdef USE_ZWAVE_PROXY
|
||||
size.add_uint32(2, this->zwave_proxy_feature_flags);
|
||||
#endif
|
||||
#ifdef USE_ZWAVE_PROXY
|
||||
size.add_uint32(2, this->zwave_home_id);
|
||||
#endif
|
||||
}
|
||||
#ifdef USE_BINARY_SENSOR
|
||||
void ListEntitiesBinarySensorResponse::encode(ProtoWriteBuffer buffer) const {
|
||||
@@ -3013,5 +3025,35 @@ bool UpdateCommandRequest::decode_32bit(uint32_t field_id, Proto32Bit value) {
|
||||
return true;
|
||||
}
|
||||
#endif
|
||||
#ifdef USE_ZWAVE_PROXY
|
||||
bool ZWaveProxyFrame::decode_length(uint32_t field_id, ProtoLengthDelimited value) {
|
||||
switch (field_id) {
|
||||
case 1: {
|
||||
const std::string &data_str = value.as_string();
|
||||
this->data_len = data_str.size();
|
||||
if (this->data_len > 257) {
|
||||
this->data_len = 257;
|
||||
}
|
||||
memcpy(this->data, data_str.data(), this->data_len);
|
||||
break;
|
||||
}
|
||||
default:
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
void ZWaveProxyFrame::encode(ProtoWriteBuffer buffer) const { buffer.encode_bytes(1, this->data, this->data_len); }
|
||||
void ZWaveProxyFrame::calculate_size(ProtoSize &size) const { size.add_length(1, this->data_len); }
|
||||
bool ZWaveProxyRequest::decode_varint(uint32_t field_id, ProtoVarInt value) {
|
||||
switch (field_id) {
|
||||
case 1:
|
||||
this->type = static_cast<enums::ZWaveProxyRequestType>(value.as_uint32());
|
||||
break;
|
||||
default:
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
#endif
|
||||
|
||||
} // namespace esphome::api
|
||||
|
@@ -276,6 +276,12 @@ enum UpdateCommand : uint32_t {
|
||||
UPDATE_COMMAND_CHECK = 2,
|
||||
};
|
||||
#endif
|
||||
#ifdef USE_ZWAVE_PROXY
|
||||
enum ZWaveProxyRequestType : uint32_t {
|
||||
ZWAVE_PROXY_REQUEST_TYPE_SUBSCRIBE = 0,
|
||||
ZWAVE_PROXY_REQUEST_TYPE_UNSUBSCRIBE = 1,
|
||||
};
|
||||
#endif
|
||||
|
||||
} // namespace enums
|
||||
|
||||
@@ -492,7 +498,7 @@ class DeviceInfo final : public ProtoMessage {
|
||||
class DeviceInfoResponse final : public ProtoMessage {
|
||||
public:
|
||||
static constexpr uint8_t MESSAGE_TYPE = 10;
|
||||
static constexpr uint8_t ESTIMATED_SIZE = 247;
|
||||
static constexpr uint16_t ESTIMATED_SIZE = 257;
|
||||
#ifdef HAS_PROTO_MESSAGE_DUMP
|
||||
const char *message_name() const override { return "device_info_response"; }
|
||||
#endif
|
||||
@@ -552,6 +558,12 @@ class DeviceInfoResponse final : public ProtoMessage {
|
||||
#endif
|
||||
#ifdef USE_AREAS
|
||||
AreaInfo area{};
|
||||
#endif
|
||||
#ifdef USE_ZWAVE_PROXY
|
||||
uint32_t zwave_proxy_feature_flags{0};
|
||||
#endif
|
||||
#ifdef USE_ZWAVE_PROXY
|
||||
uint32_t zwave_home_id{0};
|
||||
#endif
|
||||
void encode(ProtoWriteBuffer buffer) const override;
|
||||
void calculate_size(ProtoSize &size) const override;
|
||||
@@ -2913,5 +2925,40 @@ class UpdateCommandRequest final : public CommandProtoMessage {
|
||||
bool decode_varint(uint32_t field_id, ProtoVarInt value) override;
|
||||
};
|
||||
#endif
|
||||
#ifdef USE_ZWAVE_PROXY
|
||||
class ZWaveProxyFrame final : public ProtoDecodableMessage {
|
||||
public:
|
||||
static constexpr uint8_t MESSAGE_TYPE = 128;
|
||||
static constexpr uint8_t ESTIMATED_SIZE = 33;
|
||||
#ifdef HAS_PROTO_MESSAGE_DUMP
|
||||
const char *message_name() const override { return "z_wave_proxy_frame"; }
|
||||
#endif
|
||||
uint8_t data[257]{};
|
||||
uint16_t data_len{0};
|
||||
void encode(ProtoWriteBuffer buffer) const override;
|
||||
void calculate_size(ProtoSize &size) const override;
|
||||
#ifdef HAS_PROTO_MESSAGE_DUMP
|
||||
void dump_to(std::string &out) const override;
|
||||
#endif
|
||||
|
||||
protected:
|
||||
bool decode_length(uint32_t field_id, ProtoLengthDelimited value) override;
|
||||
};
|
||||
class ZWaveProxyRequest final : public ProtoDecodableMessage {
|
||||
public:
|
||||
static constexpr uint8_t MESSAGE_TYPE = 129;
|
||||
static constexpr uint8_t ESTIMATED_SIZE = 2;
|
||||
#ifdef HAS_PROTO_MESSAGE_DUMP
|
||||
const char *message_name() const override { return "z_wave_proxy_request"; }
|
||||
#endif
|
||||
enums::ZWaveProxyRequestType type{};
|
||||
#ifdef HAS_PROTO_MESSAGE_DUMP
|
||||
void dump_to(std::string &out) const override;
|
||||
#endif
|
||||
|
||||
protected:
|
||||
bool decode_varint(uint32_t field_id, ProtoVarInt value) override;
|
||||
};
|
||||
#endif
|
||||
|
||||
} // namespace esphome::api
|
||||
|
@@ -655,6 +655,18 @@ template<> const char *proto_enum_to_string<enums::UpdateCommand>(enums::UpdateC
|
||||
}
|
||||
}
|
||||
#endif
|
||||
#ifdef USE_ZWAVE_PROXY
|
||||
template<> const char *proto_enum_to_string<enums::ZWaveProxyRequestType>(enums::ZWaveProxyRequestType value) {
|
||||
switch (value) {
|
||||
case enums::ZWAVE_PROXY_REQUEST_TYPE_SUBSCRIBE:
|
||||
return "ZWAVE_PROXY_REQUEST_TYPE_SUBSCRIBE";
|
||||
case enums::ZWAVE_PROXY_REQUEST_TYPE_UNSUBSCRIBE:
|
||||
return "ZWAVE_PROXY_REQUEST_TYPE_UNSUBSCRIBE";
|
||||
default:
|
||||
return "UNKNOWN";
|
||||
}
|
||||
}
|
||||
#endif
|
||||
|
||||
void HelloRequest::dump_to(std::string &out) const {
|
||||
MessageDumpHelper helper(out, "HelloRequest");
|
||||
@@ -754,6 +766,12 @@ void DeviceInfoResponse::dump_to(std::string &out) const {
|
||||
this->area.dump_to(out);
|
||||
out.append("\n");
|
||||
#endif
|
||||
#ifdef USE_ZWAVE_PROXY
|
||||
dump_field(out, "zwave_proxy_feature_flags", this->zwave_proxy_feature_flags);
|
||||
#endif
|
||||
#ifdef USE_ZWAVE_PROXY
|
||||
dump_field(out, "zwave_home_id", this->zwave_home_id);
|
||||
#endif
|
||||
}
|
||||
void ListEntitiesRequest::dump_to(std::string &out) const { out.append("ListEntitiesRequest {}"); }
|
||||
void ListEntitiesDoneResponse::dump_to(std::string &out) const { out.append("ListEntitiesDoneResponse {}"); }
|
||||
@@ -2107,6 +2125,18 @@ void UpdateCommandRequest::dump_to(std::string &out) const {
|
||||
#endif
|
||||
}
|
||||
#endif
|
||||
#ifdef USE_ZWAVE_PROXY
|
||||
void ZWaveProxyFrame::dump_to(std::string &out) const {
|
||||
MessageDumpHelper helper(out, "ZWaveProxyFrame");
|
||||
out.append(" data: ");
|
||||
out.append(format_hex_pretty(this->data, this->data_len));
|
||||
out.append("\n");
|
||||
}
|
||||
void ZWaveProxyRequest::dump_to(std::string &out) const {
|
||||
MessageDumpHelper helper(out, "ZWaveProxyRequest");
|
||||
dump_field(out, "type", static_cast<enums::ZWaveProxyRequestType>(this->type));
|
||||
}
|
||||
#endif
|
||||
|
||||
} // namespace esphome::api
|
||||
|
||||
|
@@ -588,6 +588,28 @@ void APIServerConnectionBase::read_message(uint32_t msg_size, uint32_t msg_type,
|
||||
this->on_bluetooth_scanner_set_mode_request(msg);
|
||||
break;
|
||||
}
|
||||
#endif
|
||||
#ifdef USE_ZWAVE_PROXY
|
||||
case ZWaveProxyFrame::MESSAGE_TYPE: {
|
||||
ZWaveProxyFrame msg;
|
||||
msg.decode(msg_data, msg_size);
|
||||
#ifdef HAS_PROTO_MESSAGE_DUMP
|
||||
ESP_LOGVV(TAG, "on_z_wave_proxy_frame: %s", msg.dump().c_str());
|
||||
#endif
|
||||
this->on_z_wave_proxy_frame(msg);
|
||||
break;
|
||||
}
|
||||
#endif
|
||||
#ifdef USE_ZWAVE_PROXY
|
||||
case ZWaveProxyRequest::MESSAGE_TYPE: {
|
||||
ZWaveProxyRequest msg;
|
||||
msg.decode(msg_data, msg_size);
|
||||
#ifdef HAS_PROTO_MESSAGE_DUMP
|
||||
ESP_LOGVV(TAG, "on_z_wave_proxy_request: %s", msg.dump().c_str());
|
||||
#endif
|
||||
this->on_z_wave_proxy_request(msg);
|
||||
break;
|
||||
}
|
||||
#endif
|
||||
default:
|
||||
break;
|
||||
@@ -899,5 +921,19 @@ void APIServerConnection::on_alarm_control_panel_command_request(const AlarmCont
|
||||
}
|
||||
}
|
||||
#endif
|
||||
#ifdef USE_ZWAVE_PROXY
|
||||
void APIServerConnection::on_z_wave_proxy_frame(const ZWaveProxyFrame &msg) {
|
||||
if (this->check_authenticated_()) {
|
||||
this->zwave_proxy_frame(msg);
|
||||
}
|
||||
}
|
||||
#endif
|
||||
#ifdef USE_ZWAVE_PROXY
|
||||
void APIServerConnection::on_z_wave_proxy_request(const ZWaveProxyRequest &msg) {
|
||||
if (this->check_authenticated_()) {
|
||||
this->zwave_proxy_request(msg);
|
||||
}
|
||||
}
|
||||
#endif
|
||||
|
||||
} // namespace esphome::api
|
||||
|
@@ -207,6 +207,12 @@ class APIServerConnectionBase : public ProtoService {
|
||||
|
||||
#ifdef USE_UPDATE
|
||||
virtual void on_update_command_request(const UpdateCommandRequest &value){};
|
||||
#endif
|
||||
#ifdef USE_ZWAVE_PROXY
|
||||
virtual void on_z_wave_proxy_frame(const ZWaveProxyFrame &value){};
|
||||
#endif
|
||||
#ifdef USE_ZWAVE_PROXY
|
||||
virtual void on_z_wave_proxy_request(const ZWaveProxyRequest &value){};
|
||||
#endif
|
||||
protected:
|
||||
void read_message(uint32_t msg_size, uint32_t msg_type, uint8_t *msg_data) override;
|
||||
@@ -335,6 +341,12 @@ class APIServerConnection : public APIServerConnectionBase {
|
||||
#endif
|
||||
#ifdef USE_ALARM_CONTROL_PANEL
|
||||
virtual void alarm_control_panel_command(const AlarmControlPanelCommandRequest &msg) = 0;
|
||||
#endif
|
||||
#ifdef USE_ZWAVE_PROXY
|
||||
virtual void zwave_proxy_frame(const ZWaveProxyFrame &msg) = 0;
|
||||
#endif
|
||||
#ifdef USE_ZWAVE_PROXY
|
||||
virtual void zwave_proxy_request(const ZWaveProxyRequest &msg) = 0;
|
||||
#endif
|
||||
protected:
|
||||
void on_hello_request(const HelloRequest &msg) override;
|
||||
@@ -459,6 +471,12 @@ class APIServerConnection : public APIServerConnectionBase {
|
||||
#ifdef USE_ALARM_CONTROL_PANEL
|
||||
void on_alarm_control_panel_command_request(const AlarmControlPanelCommandRequest &msg) override;
|
||||
#endif
|
||||
#ifdef USE_ZWAVE_PROXY
|
||||
void on_z_wave_proxy_frame(const ZWaveProxyFrame &msg) override;
|
||||
#endif
|
||||
#ifdef USE_ZWAVE_PROXY
|
||||
void on_z_wave_proxy_request(const ZWaveProxyRequest &msg) override;
|
||||
#endif
|
||||
};
|
||||
|
||||
} // namespace esphome::api
|
||||
|
@@ -10,7 +10,8 @@ from esphome.const import (
|
||||
PLATFORM_LN882X,
|
||||
PLATFORM_RTL87XX,
|
||||
)
|
||||
from esphome.core import CORE, CoroPriority, coroutine_with_priority
|
||||
from esphome.core import CORE, coroutine_with_priority
|
||||
from esphome.coroutine import CoroPriority
|
||||
|
||||
AUTO_LOAD = ["web_server_base", "ota.web_server"]
|
||||
DEPENDENCIES = ["wifi"]
|
||||
@@ -40,7 +41,7 @@ CONFIG_SCHEMA = cv.All(
|
||||
)
|
||||
|
||||
|
||||
@coroutine_with_priority(CoroPriority.COMMUNICATION)
|
||||
@coroutine_with_priority(CoroPriority.CAPTIVE_PORTAL)
|
||||
async def to_code(config):
|
||||
paren = await cg.get_variable(config[CONF_WEB_SERVER_BASE_ID])
|
||||
|
||||
|
@@ -2,7 +2,7 @@ from esphome import pins
|
||||
import esphome.codegen as cg
|
||||
from esphome.components import i2c, touchscreen
|
||||
import esphome.config_validation as cv
|
||||
from esphome.const import CONF_ID, CONF_INTERRUPT_PIN
|
||||
from esphome.const import CONF_ID, CONF_INTERRUPT_PIN, CONF_RESET_PIN
|
||||
|
||||
CODEOWNERS = ["@jesserockz"]
|
||||
DEPENDENCIES = ["i2c"]
|
||||
@@ -15,7 +15,7 @@ EKTF2232Touchscreen = ektf2232_ns.class_(
|
||||
)
|
||||
|
||||
CONF_EKTF2232_ID = "ektf2232_id"
|
||||
CONF_RTS_PIN = "rts_pin"
|
||||
CONF_RTS_PIN = "rts_pin" # To be removed before 2026.4.0
|
||||
|
||||
CONFIG_SCHEMA = touchscreen.TOUCHSCREEN_SCHEMA.extend(
|
||||
cv.Schema(
|
||||
@@ -24,7 +24,10 @@ CONFIG_SCHEMA = touchscreen.TOUCHSCREEN_SCHEMA.extend(
|
||||
cv.Required(CONF_INTERRUPT_PIN): cv.All(
|
||||
pins.internal_gpio_input_pin_schema
|
||||
),
|
||||
cv.Required(CONF_RTS_PIN): pins.gpio_output_pin_schema,
|
||||
cv.Required(CONF_RESET_PIN): pins.gpio_output_pin_schema,
|
||||
cv.Optional(CONF_RTS_PIN): cv.invalid(
|
||||
f"{CONF_RTS_PIN} has been renamed to {CONF_RESET_PIN}"
|
||||
),
|
||||
}
|
||||
).extend(i2c.i2c_device_schema(0x15))
|
||||
)
|
||||
@@ -37,5 +40,5 @@ async def to_code(config):
|
||||
|
||||
interrupt_pin = await cg.gpio_pin_expression(config[CONF_INTERRUPT_PIN])
|
||||
cg.add(var.set_interrupt_pin(interrupt_pin))
|
||||
rts_pin = await cg.gpio_pin_expression(config[CONF_RTS_PIN])
|
||||
cg.add(var.set_rts_pin(rts_pin))
|
||||
reset_pin = await cg.gpio_pin_expression(config[CONF_RESET_PIN])
|
||||
cg.add(var.set_reset_pin(reset_pin))
|
||||
|
@@ -21,7 +21,7 @@ void EKTF2232Touchscreen::setup() {
|
||||
|
||||
this->attach_interrupt_(this->interrupt_pin_, gpio::INTERRUPT_FALLING_EDGE);
|
||||
|
||||
this->rts_pin_->setup();
|
||||
this->reset_pin_->setup();
|
||||
|
||||
this->hard_reset_();
|
||||
if (!this->soft_reset_()) {
|
||||
@@ -98,9 +98,9 @@ bool EKTF2232Touchscreen::get_power_state() {
|
||||
}
|
||||
|
||||
void EKTF2232Touchscreen::hard_reset_() {
|
||||
this->rts_pin_->digital_write(false);
|
||||
this->reset_pin_->digital_write(false);
|
||||
delay(15);
|
||||
this->rts_pin_->digital_write(true);
|
||||
this->reset_pin_->digital_write(true);
|
||||
delay(15);
|
||||
}
|
||||
|
||||
@@ -127,7 +127,7 @@ void EKTF2232Touchscreen::dump_config() {
|
||||
ESP_LOGCONFIG(TAG, "EKT2232 Touchscreen:");
|
||||
LOG_I2C_DEVICE(this);
|
||||
LOG_PIN(" Interrupt Pin: ", this->interrupt_pin_);
|
||||
LOG_PIN(" RTS Pin: ", this->rts_pin_);
|
||||
LOG_PIN(" Reset Pin: ", this->reset_pin_);
|
||||
}
|
||||
|
||||
} // namespace ektf2232
|
||||
|
@@ -17,7 +17,7 @@ class EKTF2232Touchscreen : public Touchscreen, public i2c::I2CDevice {
|
||||
void dump_config() override;
|
||||
|
||||
void set_interrupt_pin(InternalGPIOPin *pin) { this->interrupt_pin_ = pin; }
|
||||
void set_rts_pin(GPIOPin *pin) { this->rts_pin_ = pin; }
|
||||
void set_reset_pin(GPIOPin *pin) { this->reset_pin_ = pin; }
|
||||
|
||||
void set_power_state(bool enable);
|
||||
bool get_power_state();
|
||||
@@ -28,7 +28,7 @@ class EKTF2232Touchscreen : public Touchscreen, public i2c::I2CDevice {
|
||||
void update_touches() override;
|
||||
|
||||
InternalGPIOPin *interrupt_pin_;
|
||||
GPIOPin *rts_pin_;
|
||||
GPIOPin *reset_pin_;
|
||||
};
|
||||
|
||||
} // namespace ektf2232
|
||||
|
@@ -37,7 +37,7 @@ from esphome.const import (
|
||||
)
|
||||
from esphome.core import CORE, HexInt, TimePeriod
|
||||
import esphome.final_validate as fv
|
||||
from esphome.helpers import copy_file_if_changed, mkdir_p, write_file_if_changed
|
||||
from esphome.helpers import copy_file_if_changed, write_file_if_changed
|
||||
from esphome.types import ConfigType
|
||||
from esphome.writer import clean_cmake_cache
|
||||
|
||||
@@ -272,14 +272,14 @@ def add_idf_component(
|
||||
}
|
||||
|
||||
|
||||
def add_extra_script(stage: str, filename: str, path: str):
|
||||
def add_extra_script(stage: str, filename: str, path: Path):
|
||||
"""Add an extra script to the project."""
|
||||
key = f"{stage}:{filename}"
|
||||
if add_extra_build_file(filename, path):
|
||||
cg.add_platformio_option("extra_scripts", [key])
|
||||
|
||||
|
||||
def add_extra_build_file(filename: str, path: str) -> bool:
|
||||
def add_extra_build_file(filename: str, path: Path) -> bool:
|
||||
"""Add an extra build file to the project."""
|
||||
if filename not in CORE.data[KEY_ESP32][KEY_EXTRA_BUILD_FILES]:
|
||||
CORE.data[KEY_ESP32][KEY_EXTRA_BUILD_FILES][filename] = {
|
||||
@@ -818,7 +818,7 @@ async def to_code(config):
|
||||
add_extra_script(
|
||||
"post",
|
||||
"post_build.py",
|
||||
os.path.join(os.path.dirname(__file__), "post_build.py.script"),
|
||||
Path(__file__).parent / "post_build.py.script",
|
||||
)
|
||||
|
||||
if conf[CONF_TYPE] == FRAMEWORK_ESP_IDF:
|
||||
@@ -1040,7 +1040,7 @@ def _write_sdkconfig():
|
||||
|
||||
|
||||
def _write_idf_component_yml():
|
||||
yml_path = Path(CORE.relative_build_path("src/idf_component.yml"))
|
||||
yml_path = CORE.relative_build_path("src/idf_component.yml")
|
||||
if CORE.data[KEY_ESP32][KEY_COMPONENTS]:
|
||||
components: dict = CORE.data[KEY_ESP32][KEY_COMPONENTS]
|
||||
dependencies = {}
|
||||
@@ -1058,8 +1058,8 @@ def _write_idf_component_yml():
|
||||
contents = ""
|
||||
if write_file_if_changed(yml_path, contents):
|
||||
dependencies_lock = CORE.relative_build_path("dependencies.lock")
|
||||
if os.path.isfile(dependencies_lock):
|
||||
os.remove(dependencies_lock)
|
||||
if dependencies_lock.is_file():
|
||||
dependencies_lock.unlink()
|
||||
clean_cmake_cache()
|
||||
|
||||
|
||||
@@ -1093,14 +1093,13 @@ def copy_files():
|
||||
)
|
||||
|
||||
for file in CORE.data[KEY_ESP32][KEY_EXTRA_BUILD_FILES].values():
|
||||
if file[KEY_PATH].startswith("http"):
|
||||
name: str = file[KEY_NAME]
|
||||
path: Path = file[KEY_PATH]
|
||||
if str(path).startswith("http"):
|
||||
import requests
|
||||
|
||||
mkdir_p(CORE.relative_build_path(os.path.dirname(file[KEY_NAME])))
|
||||
with open(CORE.relative_build_path(file[KEY_NAME]), "wb") as f:
|
||||
f.write(requests.get(file[KEY_PATH], timeout=30).content)
|
||||
CORE.relative_build_path(name).parent.mkdir(parents=True, exist_ok=True)
|
||||
content = requests.get(path, timeout=30).content
|
||||
CORE.relative_build_path(name).write_bytes(content)
|
||||
else:
|
||||
copy_file_if_changed(
|
||||
file[KEY_PATH],
|
||||
CORE.relative_build_path(file[KEY_NAME]),
|
||||
)
|
||||
copy_file_if_changed(path, CORE.relative_build_path(name))
|
||||
|
@@ -17,7 +17,14 @@ static const char *const TAG = "esp32.preferences";
|
||||
|
||||
struct NVSData {
|
||||
std::string key;
|
||||
std::vector<uint8_t> data;
|
||||
std::unique_ptr<uint8_t[]> data;
|
||||
size_t len;
|
||||
|
||||
void set_data(const uint8_t *src, size_t size) {
|
||||
data = std::make_unique<uint8_t[]>(size);
|
||||
memcpy(data.get(), src, size);
|
||||
len = size;
|
||||
}
|
||||
};
|
||||
|
||||
static std::vector<NVSData> s_pending_save; // NOLINT(cppcoreguidelines-avoid-non-const-global-variables)
|
||||
@@ -30,26 +37,26 @@ class ESP32PreferenceBackend : public ESPPreferenceBackend {
|
||||
// try find in pending saves and update that
|
||||
for (auto &obj : s_pending_save) {
|
||||
if (obj.key == key) {
|
||||
obj.data.assign(data, data + len);
|
||||
obj.set_data(data, len);
|
||||
return true;
|
||||
}
|
||||
}
|
||||
NVSData save{};
|
||||
save.key = key;
|
||||
save.data.assign(data, data + len);
|
||||
s_pending_save.emplace_back(save);
|
||||
ESP_LOGVV(TAG, "s_pending_save: key: %s, len: %d", key.c_str(), len);
|
||||
save.set_data(data, len);
|
||||
s_pending_save.emplace_back(std::move(save));
|
||||
ESP_LOGVV(TAG, "s_pending_save: key: %s, len: %zu", key.c_str(), len);
|
||||
return true;
|
||||
}
|
||||
bool load(uint8_t *data, size_t len) override {
|
||||
// try find in pending saves and load from that
|
||||
for (auto &obj : s_pending_save) {
|
||||
if (obj.key == key) {
|
||||
if (obj.data.size() != len) {
|
||||
if (obj.len != len) {
|
||||
// size mismatch
|
||||
return false;
|
||||
}
|
||||
memcpy(data, obj.data.data(), len);
|
||||
memcpy(data, obj.data.get(), len);
|
||||
return true;
|
||||
}
|
||||
}
|
||||
@@ -61,7 +68,7 @@ class ESP32PreferenceBackend : public ESPPreferenceBackend {
|
||||
return false;
|
||||
}
|
||||
if (actual_len != len) {
|
||||
ESP_LOGVV(TAG, "NVS length does not match (%u!=%u)", actual_len, len);
|
||||
ESP_LOGVV(TAG, "NVS length does not match (%zu!=%zu)", actual_len, len);
|
||||
return false;
|
||||
}
|
||||
err = nvs_get_blob(nvs_handle, key.c_str(), data, &len);
|
||||
@@ -69,7 +76,7 @@ class ESP32PreferenceBackend : public ESPPreferenceBackend {
|
||||
ESP_LOGV(TAG, "nvs_get_blob('%s') failed: %s", key.c_str(), esp_err_to_name(err));
|
||||
return false;
|
||||
} else {
|
||||
ESP_LOGVV(TAG, "nvs_get_blob: key: %s, len: %d", key.c_str(), len);
|
||||
ESP_LOGVV(TAG, "nvs_get_blob: key: %s, len: %zu", key.c_str(), len);
|
||||
}
|
||||
return true;
|
||||
}
|
||||
@@ -112,7 +119,7 @@ class ESP32Preferences : public ESPPreferences {
|
||||
if (s_pending_save.empty())
|
||||
return true;
|
||||
|
||||
ESP_LOGV(TAG, "Saving %d items...", s_pending_save.size());
|
||||
ESP_LOGV(TAG, "Saving %zu items...", s_pending_save.size());
|
||||
// goal try write all pending saves even if one fails
|
||||
int cached = 0, written = 0, failed = 0;
|
||||
esp_err_t last_err = ESP_OK;
|
||||
@@ -123,11 +130,10 @@ class ESP32Preferences : public ESPPreferences {
|
||||
const auto &save = s_pending_save[i];
|
||||
ESP_LOGVV(TAG, "Checking if NVS data %s has changed", save.key.c_str());
|
||||
if (is_changed(nvs_handle, save)) {
|
||||
esp_err_t err = nvs_set_blob(nvs_handle, save.key.c_str(), save.data.data(), save.data.size());
|
||||
ESP_LOGV(TAG, "sync: key: %s, len: %d", save.key.c_str(), save.data.size());
|
||||
esp_err_t err = nvs_set_blob(nvs_handle, save.key.c_str(), save.data.get(), save.len);
|
||||
ESP_LOGV(TAG, "sync: key: %s, len: %zu", save.key.c_str(), save.len);
|
||||
if (err != 0) {
|
||||
ESP_LOGV(TAG, "nvs_set_blob('%s', len=%u) failed: %s", save.key.c_str(), save.data.size(),
|
||||
esp_err_to_name(err));
|
||||
ESP_LOGV(TAG, "nvs_set_blob('%s', len=%zu) failed: %s", save.key.c_str(), save.len, esp_err_to_name(err));
|
||||
failed++;
|
||||
last_err = err;
|
||||
last_key = save.key;
|
||||
@@ -135,7 +141,7 @@ class ESP32Preferences : public ESPPreferences {
|
||||
}
|
||||
written++;
|
||||
} else {
|
||||
ESP_LOGV(TAG, "NVS data not changed skipping %s len=%u", save.key.c_str(), save.data.size());
|
||||
ESP_LOGV(TAG, "NVS data not changed skipping %s len=%zu", save.key.c_str(), save.len);
|
||||
cached++;
|
||||
}
|
||||
s_pending_save.erase(s_pending_save.begin() + i);
|
||||
@@ -164,7 +170,7 @@ class ESP32Preferences : public ESPPreferences {
|
||||
return true;
|
||||
}
|
||||
// Check size first before allocating memory
|
||||
if (actual_len != to_save.data.size()) {
|
||||
if (actual_len != to_save.len) {
|
||||
return true;
|
||||
}
|
||||
auto stored_data = std::make_unique<uint8_t[]>(actual_len);
|
||||
@@ -173,7 +179,7 @@ class ESP32Preferences : public ESPPreferences {
|
||||
ESP_LOGV(TAG, "nvs_get_blob('%s') failed: %s", to_save.key.c_str(), esp_err_to_name(err));
|
||||
return true;
|
||||
}
|
||||
return memcmp(to_save.data.data(), stored_data.get(), to_save.data.size()) != 0;
|
||||
return memcmp(to_save.data.get(), stored_data.get(), to_save.len) != 0;
|
||||
}
|
||||
|
||||
bool reset() override {
|
||||
|
@@ -1,4 +1,5 @@
|
||||
import os
|
||||
from pathlib import Path
|
||||
|
||||
from esphome import pins
|
||||
from esphome.components import esp32
|
||||
@@ -97,5 +98,5 @@ async def to_code(config):
|
||||
esp32.add_extra_script(
|
||||
"post",
|
||||
"esp32_hosted.py",
|
||||
os.path.join(os.path.dirname(__file__), "esp32_hosted.py.script"),
|
||||
Path(__file__).parent / "esp32_hosted.py.script",
|
||||
)
|
||||
|
@@ -31,6 +31,9 @@ void ESP32ImprovComponent::setup() {
|
||||
#endif
|
||||
global_ble_server->on(BLEServerEvt::EmptyEvt::ON_DISCONNECT,
|
||||
[this](uint16_t conn_id) { this->set_error_(improv::ERROR_NONE); });
|
||||
|
||||
// Start with loop disabled - will be enabled by start() when needed
|
||||
this->disable_loop();
|
||||
}
|
||||
|
||||
void ESP32ImprovComponent::setup_characteristics() {
|
||||
|
@@ -1,5 +1,5 @@
|
||||
import logging
|
||||
import os
|
||||
from pathlib import Path
|
||||
|
||||
import esphome.codegen as cg
|
||||
import esphome.config_validation as cv
|
||||
@@ -259,8 +259,8 @@ async def to_code(config):
|
||||
|
||||
# Called by writer.py
|
||||
def copy_files():
|
||||
dir = os.path.dirname(__file__)
|
||||
post_build_file = os.path.join(dir, "post_build.py.script")
|
||||
dir = Path(__file__).parent
|
||||
post_build_file = dir / "post_build.py.script"
|
||||
copy_file_if_changed(
|
||||
post_build_file,
|
||||
CORE.relative_build_path("post_build.py"),
|
||||
|
@@ -16,7 +16,8 @@ from esphome.const import (
|
||||
CONF_SAFE_MODE,
|
||||
CONF_VERSION,
|
||||
)
|
||||
from esphome.core import CoroPriority, coroutine_with_priority
|
||||
from esphome.core import coroutine_with_priority
|
||||
from esphome.coroutine import CoroPriority
|
||||
import esphome.final_validate as fv
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
@@ -121,7 +122,7 @@ CONFIG_SCHEMA = (
|
||||
FINAL_VALIDATE_SCHEMA = ota_esphome_final_validate
|
||||
|
||||
|
||||
@coroutine_with_priority(CoroPriority.COMMUNICATION)
|
||||
@coroutine_with_priority(CoroPriority.OTA_UPDATES)
|
||||
async def to_code(config):
|
||||
var = cg.new_Pvariable(config[CONF_ID])
|
||||
cg.add(var.set_port(config[CONF_PORT]))
|
||||
|
@@ -117,19 +117,15 @@ ManualIP = ethernet_ns.struct("ManualIP")
|
||||
|
||||
def _is_framework_spi_polling_mode_supported():
|
||||
# SPI Ethernet without IRQ feature is added in
|
||||
# esp-idf >= (5.3+ ,5.2.1+, 5.1.4) and arduino-esp32 >= 3.0.0
|
||||
# esp-idf >= (5.3+ ,5.2.1+, 5.1.4)
|
||||
# Note: Arduino now uses ESP-IDF as a component, so we only check IDF version
|
||||
framework_version = CORE.data[KEY_CORE][KEY_FRAMEWORK_VERSION]
|
||||
if CORE.using_esp_idf:
|
||||
if framework_version >= cv.Version(5, 3, 0):
|
||||
return True
|
||||
if cv.Version(5, 3, 0) > framework_version >= cv.Version(5, 2, 1):
|
||||
return True
|
||||
if cv.Version(5, 2, 0) > framework_version >= cv.Version(5, 1, 4): # noqa: SIM103
|
||||
return True
|
||||
return False
|
||||
if CORE.using_arduino:
|
||||
return framework_version >= cv.Version(3, 0, 0)
|
||||
# fail safe: Unknown framework
|
||||
if framework_version >= cv.Version(5, 3, 0):
|
||||
return True
|
||||
if cv.Version(5, 3, 0) > framework_version >= cv.Version(5, 2, 1):
|
||||
return True
|
||||
if cv.Version(5, 2, 0) > framework_version >= cv.Version(5, 1, 4): # noqa: SIM103
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
|
@@ -39,11 +39,13 @@ async def to_code(config):
|
||||
pass
|
||||
|
||||
|
||||
def _process_git_config(config: dict, refresh) -> str:
|
||||
def _process_git_config(config: dict, refresh, skip_update: bool = False) -> str:
|
||||
# When skip_update is True, use NEVER_REFRESH to prevent updates
|
||||
actual_refresh = git.NEVER_REFRESH if skip_update else refresh
|
||||
repo_dir, _ = git.clone_or_update(
|
||||
url=config[CONF_URL],
|
||||
ref=config.get(CONF_REF),
|
||||
refresh=refresh,
|
||||
refresh=actual_refresh,
|
||||
domain=DOMAIN,
|
||||
username=config.get(CONF_USERNAME),
|
||||
password=config.get(CONF_PASSWORD),
|
||||
@@ -70,12 +72,12 @@ def _process_git_config(config: dict, refresh) -> str:
|
||||
return components_dir
|
||||
|
||||
|
||||
def _process_single_config(config: dict):
|
||||
def _process_single_config(config: dict, skip_update: bool = False):
|
||||
conf = config[CONF_SOURCE]
|
||||
if conf[CONF_TYPE] == TYPE_GIT:
|
||||
with cv.prepend_path([CONF_SOURCE]):
|
||||
components_dir = _process_git_config(
|
||||
config[CONF_SOURCE], config[CONF_REFRESH]
|
||||
config[CONF_SOURCE], config[CONF_REFRESH], skip_update
|
||||
)
|
||||
elif conf[CONF_TYPE] == TYPE_LOCAL:
|
||||
components_dir = Path(CORE.relative_config_path(conf[CONF_PATH]))
|
||||
@@ -105,7 +107,7 @@ def _process_single_config(config: dict):
|
||||
loader.install_meta_finder(components_dir, allowed_components=allowed_components)
|
||||
|
||||
|
||||
def do_external_components_pass(config: dict) -> None:
|
||||
def do_external_components_pass(config: dict, skip_update: bool = False) -> None:
|
||||
conf = config.get(DOMAIN)
|
||||
if conf is None:
|
||||
return
|
||||
@@ -113,4 +115,4 @@ def do_external_components_pass(config: dict) -> None:
|
||||
conf = CONFIG_SCHEMA(conf)
|
||||
for i, c in enumerate(conf):
|
||||
with cv.prepend_path(i):
|
||||
_process_single_config(c)
|
||||
_process_single_config(c, skip_update)
|
||||
|
@@ -3,7 +3,6 @@ import functools
|
||||
import hashlib
|
||||
from itertools import accumulate
|
||||
import logging
|
||||
import os
|
||||
from pathlib import Path
|
||||
import re
|
||||
|
||||
@@ -38,6 +37,7 @@ from esphome.const import (
|
||||
)
|
||||
from esphome.core import CORE, HexInt
|
||||
from esphome.helpers import cpp_string_escape
|
||||
from esphome.types import ConfigType
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -253,11 +253,11 @@ def validate_truetype_file(value):
|
||||
return CORE.relative_config_path(cv.file_(value))
|
||||
|
||||
|
||||
def add_local_file(value):
|
||||
def add_local_file(value: ConfigType) -> ConfigType:
|
||||
if value in FONT_CACHE:
|
||||
return value
|
||||
path = value[CONF_PATH]
|
||||
if not os.path.isfile(path):
|
||||
path = Path(value[CONF_PATH])
|
||||
if not path.is_file():
|
||||
raise cv.Invalid(f"File '{path}' not found.")
|
||||
FONT_CACHE[value] = path
|
||||
return value
|
||||
@@ -318,7 +318,7 @@ def download_gfont(value):
|
||||
external_files.compute_local_file_dir(DOMAIN)
|
||||
/ f"{value[CONF_FAMILY]}@{value[CONF_WEIGHT]}@{value[CONF_ITALIC]}@v1.ttf"
|
||||
)
|
||||
if not external_files.is_file_recent(str(path), value[CONF_REFRESH]):
|
||||
if not external_files.is_file_recent(path, value[CONF_REFRESH]):
|
||||
_LOGGER.debug("download_gfont: path=%s", path)
|
||||
try:
|
||||
req = requests.get(url, timeout=external_files.NETWORK_TIMEOUT)
|
||||
|
@@ -6,6 +6,7 @@ namespace gpio {
|
||||
|
||||
static const char *const TAG = "gpio.binary_sensor";
|
||||
|
||||
#if ESPHOME_LOG_LEVEL >= ESPHOME_LOG_LEVEL_DEBUG
|
||||
static const LogString *interrupt_type_to_string(gpio::InterruptType type) {
|
||||
switch (type) {
|
||||
case gpio::INTERRUPT_RISING_EDGE:
|
||||
@@ -22,6 +23,7 @@ static const LogString *interrupt_type_to_string(gpio::InterruptType type) {
|
||||
static const LogString *gpio_mode_to_string(bool use_interrupt) {
|
||||
return use_interrupt ? LOG_STR("interrupt") : LOG_STR("polling");
|
||||
}
|
||||
#endif
|
||||
|
||||
void IRAM_ATTR GPIOBinarySensorStore::gpio_intr(GPIOBinarySensorStore *arg) {
|
||||
bool new_state = arg->isr_pin_.digital_read();
|
||||
|
@@ -194,7 +194,7 @@ async def to_code(config):
|
||||
cg.add_define("CPPHTTPLIB_OPENSSL_SUPPORT")
|
||||
elif path := config.get(CONF_CA_CERTIFICATE_PATH):
|
||||
cg.add_define("CPPHTTPLIB_OPENSSL_SUPPORT")
|
||||
cg.add(var.set_ca_path(path))
|
||||
cg.add(var.set_ca_path(str(path)))
|
||||
cg.add_build_flag("-lssl")
|
||||
cg.add_build_flag("-lcrypto")
|
||||
|
||||
|
@@ -3,7 +3,8 @@ import esphome.codegen as cg
|
||||
from esphome.components.ota import BASE_OTA_SCHEMA, OTAComponent, ota_to_code
|
||||
import esphome.config_validation as cv
|
||||
from esphome.const import CONF_ID, CONF_PASSWORD, CONF_URL, CONF_USERNAME
|
||||
from esphome.core import CoroPriority, coroutine_with_priority
|
||||
from esphome.core import coroutine_with_priority
|
||||
from esphome.coroutine import CoroPriority
|
||||
|
||||
from .. import CONF_HTTP_REQUEST_ID, HttpRequestComponent, http_request_ns
|
||||
|
||||
@@ -40,7 +41,7 @@ CONFIG_SCHEMA = cv.All(
|
||||
)
|
||||
|
||||
|
||||
@coroutine_with_priority(CoroPriority.COMMUNICATION)
|
||||
@coroutine_with_priority(CoroPriority.OTA_UPDATES)
|
||||
async def to_code(config):
|
||||
var = cg.new_Pvariable(config[CONF_ID])
|
||||
await ota_to_code(var, config)
|
||||
|
@@ -8,52 +8,12 @@ namespace json {
|
||||
|
||||
static const char *const TAG = "json";
|
||||
|
||||
#ifdef USE_PSRAM
|
||||
// Build an allocator for the JSON Library using the RAMAllocator class
|
||||
// This is only compiled when PSRAM is enabled
|
||||
struct SpiRamAllocator : ArduinoJson::Allocator {
|
||||
void *allocate(size_t size) override { return this->allocator_.allocate(size); }
|
||||
|
||||
void deallocate(void *pointer) override {
|
||||
// ArduinoJson's Allocator interface doesn't provide the size parameter in deallocate.
|
||||
// RAMAllocator::deallocate() requires the size, which we don't have access to here.
|
||||
// RAMAllocator::deallocate implementation just calls free() regardless of whether
|
||||
// the memory was allocated with heap_caps_malloc or malloc.
|
||||
// This is safe because ESP-IDF's heap implementation internally tracks the memory region
|
||||
// and routes free() to the appropriate heap.
|
||||
free(pointer); // NOLINT(cppcoreguidelines-owning-memory,cppcoreguidelines-no-malloc)
|
||||
}
|
||||
|
||||
void *reallocate(void *ptr, size_t new_size) override {
|
||||
return this->allocator_.reallocate(static_cast<uint8_t *>(ptr), new_size);
|
||||
}
|
||||
|
||||
protected:
|
||||
RAMAllocator<uint8_t> allocator_{RAMAllocator<uint8_t>(RAMAllocator<uint8_t>::NONE)};
|
||||
};
|
||||
#endif
|
||||
|
||||
std::string build_json(const json_build_t &f) {
|
||||
// NOLINTBEGIN(clang-analyzer-cplusplus.NewDeleteLeaks) false positive with ArduinoJson
|
||||
#ifdef USE_PSRAM
|
||||
auto doc_allocator = SpiRamAllocator();
|
||||
JsonDocument json_document(&doc_allocator);
|
||||
#else
|
||||
JsonDocument json_document;
|
||||
#endif
|
||||
if (json_document.overflowed()) {
|
||||
ESP_LOGE(TAG, "Could not allocate memory for JSON document!");
|
||||
return "{}";
|
||||
}
|
||||
JsonObject root = json_document.to<JsonObject>();
|
||||
JsonBuilder builder;
|
||||
JsonObject root = builder.root();
|
||||
f(root);
|
||||
if (json_document.overflowed()) {
|
||||
ESP_LOGE(TAG, "Could not allocate memory for JSON document!");
|
||||
return "{}";
|
||||
}
|
||||
std::string output;
|
||||
serializeJson(json_document, output);
|
||||
return output;
|
||||
return builder.serialize();
|
||||
// NOLINTEND(clang-analyzer-cplusplus.NewDeleteLeaks)
|
||||
}
|
||||
|
||||
@@ -84,5 +44,15 @@ bool parse_json(const std::string &data, const json_parse_t &f) {
|
||||
// NOLINTEND(clang-analyzer-cplusplus.NewDeleteLeaks)
|
||||
}
|
||||
|
||||
std::string JsonBuilder::serialize() {
|
||||
if (doc_.overflowed()) {
|
||||
ESP_LOGE(TAG, "JSON document overflow");
|
||||
return "{}";
|
||||
}
|
||||
std::string output;
|
||||
serializeJson(doc_, output);
|
||||
return output;
|
||||
}
|
||||
|
||||
} // namespace json
|
||||
} // namespace esphome
|
||||
|
@@ -13,6 +13,31 @@
|
||||
namespace esphome {
|
||||
namespace json {
|
||||
|
||||
#ifdef USE_PSRAM
|
||||
// Build an allocator for the JSON Library using the RAMAllocator class
|
||||
// This is only compiled when PSRAM is enabled
|
||||
struct SpiRamAllocator : ArduinoJson::Allocator {
|
||||
void *allocate(size_t size) override { return allocator_.allocate(size); }
|
||||
|
||||
void deallocate(void *ptr) override {
|
||||
// ArduinoJson's Allocator interface doesn't provide the size parameter in deallocate.
|
||||
// RAMAllocator::deallocate() requires the size, which we don't have access to here.
|
||||
// RAMAllocator::deallocate implementation just calls free() regardless of whether
|
||||
// the memory was allocated with heap_caps_malloc or malloc.
|
||||
// This is safe because ESP-IDF's heap implementation internally tracks the memory region
|
||||
// and routes free() to the appropriate heap.
|
||||
free(ptr); // NOLINT(cppcoreguidelines-owning-memory,cppcoreguidelines-no-malloc)
|
||||
}
|
||||
|
||||
void *reallocate(void *ptr, size_t new_size) override {
|
||||
return allocator_.reallocate(static_cast<uint8_t *>(ptr), new_size);
|
||||
}
|
||||
|
||||
protected:
|
||||
RAMAllocator<uint8_t> allocator_{RAMAllocator<uint8_t>::NONE};
|
||||
};
|
||||
#endif
|
||||
|
||||
/// Callback function typedef for parsing JsonObjects.
|
||||
using json_parse_t = std::function<bool(JsonObject)>;
|
||||
|
||||
@@ -25,5 +50,29 @@ std::string build_json(const json_build_t &f);
|
||||
/// Parse a JSON string and run the provided json parse function if it's valid.
|
||||
bool parse_json(const std::string &data, const json_parse_t &f);
|
||||
|
||||
/// Builder class for creating JSON documents without lambdas
|
||||
class JsonBuilder {
|
||||
public:
|
||||
JsonObject root() {
|
||||
if (!root_created_) {
|
||||
root_ = doc_.to<JsonObject>();
|
||||
root_created_ = true;
|
||||
}
|
||||
return root_;
|
||||
}
|
||||
|
||||
std::string serialize();
|
||||
|
||||
private:
|
||||
#ifdef USE_PSRAM
|
||||
SpiRamAllocator allocator_;
|
||||
JsonDocument doc_{&allocator_};
|
||||
#else
|
||||
JsonDocument doc_;
|
||||
#endif
|
||||
JsonObject root_;
|
||||
bool root_created_{false};
|
||||
};
|
||||
|
||||
} // namespace json
|
||||
} // namespace esphome
|
||||
|
@@ -1,6 +1,5 @@
|
||||
import json
|
||||
import logging
|
||||
from os.path import dirname, isfile, join
|
||||
|
||||
import esphome.codegen as cg
|
||||
import esphome.config_validation as cv
|
||||
@@ -24,6 +23,7 @@ from esphome.const import (
|
||||
__version__,
|
||||
)
|
||||
from esphome.core import CORE
|
||||
from esphome.storage_json import StorageJSON
|
||||
|
||||
from . import gpio # noqa
|
||||
from .const import (
|
||||
@@ -129,7 +129,7 @@ def only_on_family(*, supported=None, unsupported=None):
|
||||
return validator_
|
||||
|
||||
|
||||
def get_download_types(storage_json=None):
|
||||
def get_download_types(storage_json: StorageJSON = None):
|
||||
types = [
|
||||
{
|
||||
"title": "UF2 package (recommended)",
|
||||
@@ -139,11 +139,11 @@ def get_download_types(storage_json=None):
|
||||
},
|
||||
]
|
||||
|
||||
build_dir = dirname(storage_json.firmware_bin_path)
|
||||
outputs = join(build_dir, "firmware.json")
|
||||
if not isfile(outputs):
|
||||
build_dir = storage_json.firmware_bin_path.parent
|
||||
outputs = build_dir / "firmware.json"
|
||||
if not outputs.is_file():
|
||||
return types
|
||||
with open(outputs, encoding="utf-8") as f:
|
||||
with outputs.open(encoding="utf-8") as f:
|
||||
outputs = json.load(f)
|
||||
for output in outputs:
|
||||
if not output["public"]:
|
||||
|
@@ -15,7 +15,14 @@ static const char *const TAG = "lt.preferences";
|
||||
|
||||
struct NVSData {
|
||||
std::string key;
|
||||
std::vector<uint8_t> data;
|
||||
std::unique_ptr<uint8_t[]> data;
|
||||
size_t len;
|
||||
|
||||
void set_data(const uint8_t *src, size_t size) {
|
||||
data = std::make_unique<uint8_t[]>(size);
|
||||
memcpy(data.get(), src, size);
|
||||
len = size;
|
||||
}
|
||||
};
|
||||
|
||||
static std::vector<NVSData> s_pending_save; // NOLINT(cppcoreguidelines-avoid-non-const-global-variables)
|
||||
@@ -30,15 +37,15 @@ class LibreTinyPreferenceBackend : public ESPPreferenceBackend {
|
||||
// try find in pending saves and update that
|
||||
for (auto &obj : s_pending_save) {
|
||||
if (obj.key == key) {
|
||||
obj.data.assign(data, data + len);
|
||||
obj.set_data(data, len);
|
||||
return true;
|
||||
}
|
||||
}
|
||||
NVSData save{};
|
||||
save.key = key;
|
||||
save.data.assign(data, data + len);
|
||||
s_pending_save.emplace_back(save);
|
||||
ESP_LOGVV(TAG, "s_pending_save: key: %s, len: %d", key.c_str(), len);
|
||||
save.set_data(data, len);
|
||||
s_pending_save.emplace_back(std::move(save));
|
||||
ESP_LOGVV(TAG, "s_pending_save: key: %s, len: %zu", key.c_str(), len);
|
||||
return true;
|
||||
}
|
||||
|
||||
@@ -46,11 +53,11 @@ class LibreTinyPreferenceBackend : public ESPPreferenceBackend {
|
||||
// try find in pending saves and load from that
|
||||
for (auto &obj : s_pending_save) {
|
||||
if (obj.key == key) {
|
||||
if (obj.data.size() != len) {
|
||||
if (obj.len != len) {
|
||||
// size mismatch
|
||||
return false;
|
||||
}
|
||||
memcpy(data, obj.data.data(), len);
|
||||
memcpy(data, obj.data.get(), len);
|
||||
return true;
|
||||
}
|
||||
}
|
||||
@@ -58,10 +65,10 @@ class LibreTinyPreferenceBackend : public ESPPreferenceBackend {
|
||||
fdb_blob_make(blob, data, len);
|
||||
size_t actual_len = fdb_kv_get_blob(db, key.c_str(), blob);
|
||||
if (actual_len != len) {
|
||||
ESP_LOGVV(TAG, "NVS length does not match (%u!=%u)", actual_len, len);
|
||||
ESP_LOGVV(TAG, "NVS length does not match (%zu!=%zu)", actual_len, len);
|
||||
return false;
|
||||
} else {
|
||||
ESP_LOGVV(TAG, "fdb_kv_get_blob: key: %s, len: %d", key.c_str(), len);
|
||||
ESP_LOGVV(TAG, "fdb_kv_get_blob: key: %s, len: %zu", key.c_str(), len);
|
||||
}
|
||||
return true;
|
||||
}
|
||||
@@ -101,7 +108,7 @@ class LibreTinyPreferences : public ESPPreferences {
|
||||
if (s_pending_save.empty())
|
||||
return true;
|
||||
|
||||
ESP_LOGV(TAG, "Saving %d items...", s_pending_save.size());
|
||||
ESP_LOGV(TAG, "Saving %zu items...", s_pending_save.size());
|
||||
// goal try write all pending saves even if one fails
|
||||
int cached = 0, written = 0, failed = 0;
|
||||
fdb_err_t last_err = FDB_NO_ERR;
|
||||
@@ -112,11 +119,11 @@ class LibreTinyPreferences : public ESPPreferences {
|
||||
const auto &save = s_pending_save[i];
|
||||
ESP_LOGVV(TAG, "Checking if FDB data %s has changed", save.key.c_str());
|
||||
if (is_changed(&db, save)) {
|
||||
ESP_LOGV(TAG, "sync: key: %s, len: %d", save.key.c_str(), save.data.size());
|
||||
fdb_blob_make(&blob, save.data.data(), save.data.size());
|
||||
ESP_LOGV(TAG, "sync: key: %s, len: %zu", save.key.c_str(), save.len);
|
||||
fdb_blob_make(&blob, save.data.get(), save.len);
|
||||
fdb_err_t err = fdb_kv_set_blob(&db, save.key.c_str(), &blob);
|
||||
if (err != FDB_NO_ERR) {
|
||||
ESP_LOGV(TAG, "fdb_kv_set_blob('%s', len=%u) failed: %d", save.key.c_str(), save.data.size(), err);
|
||||
ESP_LOGV(TAG, "fdb_kv_set_blob('%s', len=%zu) failed: %d", save.key.c_str(), save.len, err);
|
||||
failed++;
|
||||
last_err = err;
|
||||
last_key = save.key;
|
||||
@@ -124,7 +131,7 @@ class LibreTinyPreferences : public ESPPreferences {
|
||||
}
|
||||
written++;
|
||||
} else {
|
||||
ESP_LOGD(TAG, "FDB data not changed; skipping %s len=%u", save.key.c_str(), save.data.size());
|
||||
ESP_LOGD(TAG, "FDB data not changed; skipping %s len=%zu", save.key.c_str(), save.len);
|
||||
cached++;
|
||||
}
|
||||
s_pending_save.erase(s_pending_save.begin() + i);
|
||||
@@ -147,7 +154,7 @@ class LibreTinyPreferences : public ESPPreferences {
|
||||
}
|
||||
|
||||
// Check size first - if different, data has changed
|
||||
if (kv.value_len != to_save.data.size()) {
|
||||
if (kv.value_len != to_save.len) {
|
||||
return true;
|
||||
}
|
||||
|
||||
@@ -161,7 +168,7 @@ class LibreTinyPreferences : public ESPPreferences {
|
||||
}
|
||||
|
||||
// Compare the actual data
|
||||
return memcmp(to_save.data.data(), stored_data.get(), kv.value_len) != 0;
|
||||
return memcmp(to_save.data.get(), stored_data.get(), kv.value_len) != 0;
|
||||
}
|
||||
|
||||
bool reset() override {
|
||||
|
@@ -11,7 +11,8 @@ from esphome.const import (
|
||||
CONF_SERVICES,
|
||||
PlatformFramework,
|
||||
)
|
||||
from esphome.core import CORE, CoroPriority, coroutine_with_priority
|
||||
from esphome.core import CORE, coroutine_with_priority
|
||||
from esphome.coroutine import CoroPriority
|
||||
|
||||
CODEOWNERS = ["@esphome/core"]
|
||||
DEPENDENCIES = ["network"]
|
||||
@@ -72,7 +73,7 @@ def mdns_service(
|
||||
)
|
||||
|
||||
|
||||
@coroutine_with_priority(CoroPriority.COMMUNICATION)
|
||||
@coroutine_with_priority(CoroPriority.NETWORK_SERVICES)
|
||||
async def to_code(config):
|
||||
if config[CONF_DISABLED] is True:
|
||||
return
|
||||
|
@@ -128,21 +128,21 @@ void MMC5603Component::update() {
|
||||
raw_x |= buffer[1] << 4;
|
||||
raw_x |= buffer[2] << 0;
|
||||
|
||||
const float x = 0.0625 * (raw_x - 524288);
|
||||
const float x = 0.00625 * (raw_x - 524288);
|
||||
|
||||
int32_t raw_y = 0;
|
||||
raw_y |= buffer[3] << 12;
|
||||
raw_y |= buffer[4] << 4;
|
||||
raw_y |= buffer[5] << 0;
|
||||
|
||||
const float y = 0.0625 * (raw_y - 524288);
|
||||
const float y = 0.00625 * (raw_y - 524288);
|
||||
|
||||
int32_t raw_z = 0;
|
||||
raw_z |= buffer[6] << 12;
|
||||
raw_z |= buffer[7] << 4;
|
||||
raw_z |= buffer[8] << 0;
|
||||
|
||||
const float z = 0.0625 * (raw_z - 524288);
|
||||
const float z = 0.00625 * (raw_z - 524288);
|
||||
|
||||
const float heading = atan2f(0.0f - x, y) * 180.0f / M_PI;
|
||||
ESP_LOGD(TAG, "Got x=%0.02fµT y=%0.02fµT z=%0.02fµT heading=%0.01f°", x, y, z, heading);
|
||||
|
@@ -51,6 +51,7 @@ from esphome.const import (
|
||||
DEVICE_CLASS_OZONE,
|
||||
DEVICE_CLASS_PH,
|
||||
DEVICE_CLASS_PM1,
|
||||
DEVICE_CLASS_PM4,
|
||||
DEVICE_CLASS_PM10,
|
||||
DEVICE_CLASS_PM25,
|
||||
DEVICE_CLASS_POWER,
|
||||
@@ -116,6 +117,7 @@ DEVICE_CLASSES = [
|
||||
DEVICE_CLASS_PM1,
|
||||
DEVICE_CLASS_PM10,
|
||||
DEVICE_CLASS_PM25,
|
||||
DEVICE_CLASS_PM4,
|
||||
DEVICE_CLASS_POWER,
|
||||
DEVICE_CLASS_POWER_FACTOR,
|
||||
DEVICE_CLASS_PRECIPITATION,
|
||||
|
@@ -10,7 +10,8 @@ from esphome.const import (
|
||||
CONF_TRIGGER_ID,
|
||||
PlatformFramework,
|
||||
)
|
||||
from esphome.core import CORE, CoroPriority, coroutine_with_priority
|
||||
from esphome.core import CORE, coroutine_with_priority
|
||||
from esphome.coroutine import CoroPriority
|
||||
|
||||
CODEOWNERS = ["@esphome/core"]
|
||||
AUTO_LOAD = ["md5", "safe_mode"]
|
||||
@@ -82,7 +83,7 @@ BASE_OTA_SCHEMA = cv.Schema(
|
||||
)
|
||||
|
||||
|
||||
@coroutine_with_priority(CoroPriority.COMMUNICATION)
|
||||
@coroutine_with_priority(CoroPriority.OTA_UPDATES)
|
||||
async def to_code(config):
|
||||
cg.add_define("USE_OTA")
|
||||
|
||||
|
@@ -106,11 +106,13 @@ CONFIG_SCHEMA = cv.Any(
|
||||
)
|
||||
|
||||
|
||||
def _process_base_package(config: dict) -> dict:
|
||||
def _process_base_package(config: dict, skip_update: bool = False) -> dict:
|
||||
# When skip_update is True, use NEVER_REFRESH to prevent updates
|
||||
actual_refresh = git.NEVER_REFRESH if skip_update else config[CONF_REFRESH]
|
||||
repo_dir, revert = git.clone_or_update(
|
||||
url=config[CONF_URL],
|
||||
ref=config.get(CONF_REF),
|
||||
refresh=config[CONF_REFRESH],
|
||||
refresh=actual_refresh,
|
||||
domain=DOMAIN,
|
||||
username=config.get(CONF_USERNAME),
|
||||
password=config.get(CONF_PASSWORD),
|
||||
@@ -180,16 +182,16 @@ def _process_base_package(config: dict) -> dict:
|
||||
return {"packages": packages}
|
||||
|
||||
|
||||
def _process_package(package_config, config):
|
||||
def _process_package(package_config, config, skip_update: bool = False):
|
||||
recursive_package = package_config
|
||||
if CONF_URL in package_config:
|
||||
package_config = _process_base_package(package_config)
|
||||
package_config = _process_base_package(package_config, skip_update)
|
||||
if isinstance(package_config, dict):
|
||||
recursive_package = do_packages_pass(package_config)
|
||||
recursive_package = do_packages_pass(package_config, skip_update)
|
||||
return merge_config(recursive_package, config)
|
||||
|
||||
|
||||
def do_packages_pass(config: dict):
|
||||
def do_packages_pass(config: dict, skip_update: bool = False):
|
||||
if CONF_PACKAGES not in config:
|
||||
return config
|
||||
packages = config[CONF_PACKAGES]
|
||||
@@ -198,10 +200,10 @@ def do_packages_pass(config: dict):
|
||||
if isinstance(packages, dict):
|
||||
for package_name, package_config in reversed(packages.items()):
|
||||
with cv.prepend_path(package_name):
|
||||
config = _process_package(package_config, config)
|
||||
config = _process_package(package_config, config, skip_update)
|
||||
elif isinstance(packages, list):
|
||||
for package_config in reversed(packages):
|
||||
config = _process_package(package_config, config)
|
||||
config = _process_package(package_config, config, skip_update)
|
||||
else:
|
||||
raise cv.Invalid(
|
||||
f"Packages must be a key to value mapping or list, got {type(packages)} instead"
|
||||
|
@@ -121,15 +121,11 @@ def transport_schema(cls):
|
||||
return TRANSPORT_SCHEMA.extend({cv.GenerateID(): cv.declare_id(cls)})
|
||||
|
||||
|
||||
# Build a list of sensors for this platform
|
||||
CORE.data[DOMAIN] = {CONF_SENSORS: []}
|
||||
|
||||
|
||||
def get_sensors(transport_id):
|
||||
"""Return the list of sensors for this platform."""
|
||||
return (
|
||||
sensor
|
||||
for sensor in CORE.data[DOMAIN][CONF_SENSORS]
|
||||
for sensor in CORE.data.setdefault(DOMAIN, {}).setdefault(CONF_SENSORS, [])
|
||||
if sensor[CONF_TRANSPORT_ID] == transport_id
|
||||
)
|
||||
|
||||
@@ -137,7 +133,8 @@ def get_sensors(transport_id):
|
||||
def validate_packet_transport_sensor(config):
|
||||
if CONF_NAME in config and CONF_INTERNAL not in config:
|
||||
raise cv.Invalid("Must provide internal: config when using name:")
|
||||
CORE.data[DOMAIN][CONF_SENSORS].append(config)
|
||||
conf_sensors = CORE.data.setdefault(DOMAIN, {}).setdefault(CONF_SENSORS, [])
|
||||
conf_sensors.append(config)
|
||||
return config
|
||||
|
||||
|
||||
|
@@ -1,5 +1,5 @@
|
||||
import logging
|
||||
import os
|
||||
from pathlib import Path
|
||||
from string import ascii_letters, digits
|
||||
|
||||
import esphome.codegen as cg
|
||||
@@ -19,7 +19,7 @@ from esphome.const import (
|
||||
ThreadModel,
|
||||
)
|
||||
from esphome.core import CORE, CoroPriority, EsphomeError, coroutine_with_priority
|
||||
from esphome.helpers import copy_file_if_changed, mkdir_p, read_file, write_file
|
||||
from esphome.helpers import copy_file_if_changed, read_file, write_file_if_changed
|
||||
|
||||
from .const import KEY_BOARD, KEY_PIO_FILES, KEY_RP2040, rp2040_ns
|
||||
|
||||
@@ -221,18 +221,18 @@ def generate_pio_files() -> bool:
|
||||
if not files:
|
||||
return False
|
||||
for key, data in files.items():
|
||||
pio_path = CORE.relative_build_path(f"src/pio/{key}.pio")
|
||||
mkdir_p(os.path.dirname(pio_path))
|
||||
write_file(pio_path, data)
|
||||
pio_path = CORE.build_path / "src" / "pio" / f"{key}.pio"
|
||||
pio_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
write_file_if_changed(pio_path, data)
|
||||
includes.append(f"pio/{key}.pio.h")
|
||||
|
||||
write_file(
|
||||
write_file_if_changed(
|
||||
CORE.relative_build_path("src/pio_includes.h"),
|
||||
"#pragma once\n" + "\n".join([f'#include "{include}"' for include in includes]),
|
||||
)
|
||||
|
||||
dir = os.path.dirname(__file__)
|
||||
build_pio_file = os.path.join(dir, "build_pio.py.script")
|
||||
dir = Path(__file__).parent
|
||||
build_pio_file = dir / "build_pio.py.script"
|
||||
copy_file_if_changed(
|
||||
build_pio_file,
|
||||
CORE.relative_build_path("build_pio.py"),
|
||||
@@ -243,8 +243,8 @@ def generate_pio_files() -> bool:
|
||||
|
||||
# Called by writer.py
|
||||
def copy_files():
|
||||
dir = os.path.dirname(__file__)
|
||||
post_build_file = os.path.join(dir, "post_build.py.script")
|
||||
dir = Path(__file__).parent
|
||||
post_build_file = dir / "post_build.py.script"
|
||||
copy_file_if_changed(
|
||||
post_build_file,
|
||||
CORE.relative_build_path("post_build.py"),
|
||||
@@ -252,4 +252,4 @@ def copy_files():
|
||||
if generate_pio_files():
|
||||
path = CORE.relative_src_path("esphome.h")
|
||||
content = read_file(path).rstrip("\n")
|
||||
write_file(path, content + '\n#include "pio_includes.h"\n')
|
||||
write_file_if_changed(path, content + '\n#include "pio_includes.h"\n')
|
||||
|
@@ -76,7 +76,8 @@ bool SensirionI2CDevice::write_command_(uint16_t command, CommandLen command_len
|
||||
temp[raw_idx++] = data[i] >> 8;
|
||||
#endif
|
||||
// Use MSB first since Sensirion devices use CRC-8 with MSB first
|
||||
temp[raw_idx++] = crc8(&temp[raw_idx - 2], 2, 0xFF, CRC_POLYNOMIAL, true);
|
||||
uint8_t crc = crc8(&temp[raw_idx - 2], 2, 0xFF, CRC_POLYNOMIAL, true);
|
||||
temp[raw_idx++] = crc;
|
||||
}
|
||||
this->last_error_ = this->write(temp, raw_idx);
|
||||
return this->last_error_ == i2c::ERROR_OK;
|
||||
|
@@ -74,6 +74,7 @@ from esphome.const import (
|
||||
DEVICE_CLASS_OZONE,
|
||||
DEVICE_CLASS_PH,
|
||||
DEVICE_CLASS_PM1,
|
||||
DEVICE_CLASS_PM4,
|
||||
DEVICE_CLASS_PM10,
|
||||
DEVICE_CLASS_PM25,
|
||||
DEVICE_CLASS_POWER,
|
||||
@@ -143,6 +144,7 @@ DEVICE_CLASSES = [
|
||||
DEVICE_CLASS_PM1,
|
||||
DEVICE_CLASS_PM10,
|
||||
DEVICE_CLASS_PM25,
|
||||
DEVICE_CLASS_PM4,
|
||||
DEVICE_CLASS_POWER,
|
||||
DEVICE_CLASS_POWER_FACTOR,
|
||||
DEVICE_CLASS_PRECIPITATION,
|
||||
|
@@ -4,7 +4,7 @@ from esphome import core
|
||||
from esphome.config_helpers import Extend, Remove, merge_config
|
||||
import esphome.config_validation as cv
|
||||
from esphome.const import CONF_SUBSTITUTIONS, VALID_SUBSTITUTIONS_CHARACTERS
|
||||
from esphome.yaml_util import ESPHomeDataBase, make_data_base
|
||||
from esphome.yaml_util import ESPHomeDataBase, ESPLiteralValue, make_data_base
|
||||
|
||||
from .jinja import Jinja, JinjaStr, TemplateError, TemplateRuntimeError, has_jinja
|
||||
|
||||
@@ -127,6 +127,8 @@ def _expand_substitutions(substitutions, value, path, jinja, ignore_missing):
|
||||
|
||||
|
||||
def _substitute_item(substitutions, item, path, jinja, ignore_missing):
|
||||
if isinstance(item, ESPLiteralValue):
|
||||
return None # do not substitute inside literal blocks
|
||||
if isinstance(item, list):
|
||||
for i, it in enumerate(item):
|
||||
sub = _substitute_item(substitutions, it, path + [i], jinja, ignore_missing)
|
||||
|
@@ -215,12 +215,37 @@ void Tuya::handle_command_(uint8_t command, uint8_t version, const uint8_t *buff
|
||||
this->send_empty_command_(TuyaCommandType::DATAPOINT_QUERY);
|
||||
}
|
||||
break;
|
||||
case TuyaCommandType::WIFI_RESET:
|
||||
ESP_LOGE(TAG, "WIFI_RESET is not handled");
|
||||
break;
|
||||
case TuyaCommandType::WIFI_SELECT:
|
||||
ESP_LOGE(TAG, "WIFI_SELECT is not handled");
|
||||
case TuyaCommandType::WIFI_RESET: {
|
||||
const bool is_select = (len >= 1);
|
||||
// Send WIFI_SELECT ACK
|
||||
TuyaCommand ack;
|
||||
ack.cmd = is_select ? TuyaCommandType::WIFI_SELECT : TuyaCommandType::WIFI_RESET;
|
||||
ack.payload.clear();
|
||||
this->send_command_(ack);
|
||||
// Establish pairing mode for correct first WIFI_STATE byte, EZ (0x00) default
|
||||
uint8_t first = 0x00;
|
||||
const char *mode_str = "EZ";
|
||||
if (is_select && buffer[0] == 0x01) {
|
||||
first = 0x01;
|
||||
mode_str = "AP";
|
||||
}
|
||||
// Send WIFI_STATE response, MCU exits pairing mode
|
||||
TuyaCommand st;
|
||||
st.cmd = TuyaCommandType::WIFI_STATE;
|
||||
st.payload.resize(1);
|
||||
st.payload[0] = first;
|
||||
this->send_command_(st);
|
||||
st.payload[0] = 0x02;
|
||||
this->send_command_(st);
|
||||
st.payload[0] = 0x03;
|
||||
this->send_command_(st);
|
||||
st.payload[0] = 0x04;
|
||||
this->send_command_(st);
|
||||
ESP_LOGI(TAG, "%s received (%s), replied with WIFI_STATE confirming connection established",
|
||||
is_select ? "WIFI_SELECT" : "WIFI_RESET", mode_str);
|
||||
break;
|
||||
}
|
||||
case TuyaCommandType::DATAPOINT_DELIVER:
|
||||
break;
|
||||
case TuyaCommandType::DATAPOINT_REPORT_ASYNC:
|
||||
|
@@ -1,3 +1,4 @@
|
||||
import math
|
||||
import re
|
||||
|
||||
from esphome import automation, pins
|
||||
@@ -14,6 +15,7 @@ from esphome.const import (
|
||||
CONF_DIRECTION,
|
||||
CONF_DUMMY_RECEIVER,
|
||||
CONF_DUMMY_RECEIVER_ID,
|
||||
CONF_FLOW_CONTROL_PIN,
|
||||
CONF_ID,
|
||||
CONF_INVERT,
|
||||
CONF_LAMBDA,
|
||||
@@ -152,6 +154,8 @@ UART_PARITY_OPTIONS = {
|
||||
CONF_STOP_BITS = "stop_bits"
|
||||
CONF_DATA_BITS = "data_bits"
|
||||
CONF_PARITY = "parity"
|
||||
CONF_RX_FULL_THRESHOLD = "rx_full_threshold"
|
||||
CONF_RX_TIMEOUT = "rx_timeout"
|
||||
|
||||
UARTDirection = uart_ns.enum("UARTDirection")
|
||||
UART_DIRECTIONS = {
|
||||
@@ -219,8 +223,17 @@ CONFIG_SCHEMA = cv.All(
|
||||
cv.Required(CONF_BAUD_RATE): cv.int_range(min=1),
|
||||
cv.Optional(CONF_TX_PIN): pins.internal_gpio_output_pin_schema,
|
||||
cv.Optional(CONF_RX_PIN): validate_rx_pin,
|
||||
cv.Optional(CONF_FLOW_CONTROL_PIN): cv.All(
|
||||
cv.only_on_esp32, pins.internal_gpio_output_pin_schema
|
||||
),
|
||||
cv.Optional(CONF_PORT): cv.All(validate_port, cv.only_on(PLATFORM_HOST)),
|
||||
cv.Optional(CONF_RX_BUFFER_SIZE, default=256): cv.validate_bytes,
|
||||
cv.Optional(CONF_RX_FULL_THRESHOLD): cv.All(
|
||||
cv.only_on_esp32, cv.validate_bytes, cv.int_range(min=1, max=120)
|
||||
),
|
||||
cv.SplitDefault(CONF_RX_TIMEOUT, esp32=2): cv.All(
|
||||
cv.only_on_esp32, cv.validate_bytes, cv.int_range(min=0, max=92)
|
||||
),
|
||||
cv.Optional(CONF_STOP_BITS, default=1): cv.one_of(1, 2, int=True),
|
||||
cv.Optional(CONF_DATA_BITS, default=8): cv.int_range(min=5, max=8),
|
||||
cv.Optional(CONF_PARITY, default="NONE"): cv.enum(
|
||||
@@ -275,9 +288,27 @@ async def to_code(config):
|
||||
if CONF_RX_PIN in config:
|
||||
rx_pin = await cg.gpio_pin_expression(config[CONF_RX_PIN])
|
||||
cg.add(var.set_rx_pin(rx_pin))
|
||||
if CONF_FLOW_CONTROL_PIN in config:
|
||||
flow_control_pin = await cg.gpio_pin_expression(config[CONF_FLOW_CONTROL_PIN])
|
||||
cg.add(var.set_flow_control_pin(flow_control_pin))
|
||||
if CONF_PORT in config:
|
||||
cg.add(var.set_name(config[CONF_PORT]))
|
||||
cg.add(var.set_rx_buffer_size(config[CONF_RX_BUFFER_SIZE]))
|
||||
if CORE.is_esp32:
|
||||
if CONF_RX_FULL_THRESHOLD not in config:
|
||||
# Calculate rx_full_threshold to be 10ms
|
||||
bytelength = config[CONF_DATA_BITS] + config[CONF_STOP_BITS] + 1
|
||||
if config[CONF_PARITY] != "NONE":
|
||||
bytelength += 1
|
||||
config[CONF_RX_FULL_THRESHOLD] = max(
|
||||
1,
|
||||
min(
|
||||
120,
|
||||
math.floor((config[CONF_BAUD_RATE] / (bytelength * 1000 / 10)) - 1),
|
||||
),
|
||||
)
|
||||
cg.add(var.set_rx_full_threshold(config[CONF_RX_FULL_THRESHOLD]))
|
||||
cg.add(var.set_rx_timeout(config[CONF_RX_TIMEOUT]))
|
||||
cg.add(var.set_stop_bits(config[CONF_STOP_BITS]))
|
||||
cg.add(var.set_data_bits(config[CONF_DATA_BITS]))
|
||||
cg.add(var.set_parity(config[CONF_PARITY]))
|
||||
|
@@ -18,6 +18,12 @@ class UARTDevice {
|
||||
|
||||
void write_byte(uint8_t data) { this->parent_->write_byte(data); }
|
||||
|
||||
void set_rx_full_threshold(size_t rx_full_threshold) { this->parent_->set_rx_full_threshold(rx_full_threshold); }
|
||||
void set_rx_full_threshold_ms(size_t time) { this->parent_->set_rx_full_threshold_ms(time); }
|
||||
size_t get_rx_full_threshold() { return this->parent_->get_rx_full_threshold(); }
|
||||
void set_rx_timeout(size_t rx_timeout) { this->parent_->set_rx_timeout(rx_timeout); }
|
||||
size_t get_rx_timeout() { return this->parent_->get_rx_timeout(); }
|
||||
|
||||
void write_array(const uint8_t *data, size_t len) { this->parent_->write_array(data, len); }
|
||||
void write_array(const std::vector<uint8_t> &data) { this->parent_->write_array(data); }
|
||||
template<size_t N> void write_array(const std::array<uint8_t, N> &data) {
|
||||
|
@@ -20,5 +20,13 @@ bool UARTComponent::check_read_timeout_(size_t len) {
|
||||
return true;
|
||||
}
|
||||
|
||||
void UARTComponent::set_rx_full_threshold_ms(uint8_t time) {
|
||||
uint8_t bytelength = this->data_bits_ + this->stop_bits_ + 1;
|
||||
if (this->parity_ != UARTParityOptions::UART_CONFIG_PARITY_NONE)
|
||||
bytelength += 1;
|
||||
int32_t val = clamp<int32_t>((this->baud_rate_ / (bytelength * 1000 / time)) - 1, 1, 120);
|
||||
this->set_rx_full_threshold(val);
|
||||
}
|
||||
|
||||
} // namespace uart
|
||||
} // namespace esphome
|
||||
|
@@ -6,6 +6,7 @@
|
||||
#include "esphome/core/component.h"
|
||||
#include "esphome/core/hal.h"
|
||||
#include "esphome/core/log.h"
|
||||
#include "esphome/core/helpers.h"
|
||||
#ifdef USE_UART_DEBUGGER
|
||||
#include "esphome/core/automation.h"
|
||||
#endif
|
||||
@@ -82,6 +83,10 @@ class UARTComponent {
|
||||
// @param rx_pin Pointer to the internal GPIO pin used for reception.
|
||||
void set_rx_pin(InternalGPIOPin *rx_pin) { this->rx_pin_ = rx_pin; }
|
||||
|
||||
// Sets the flow control pin for the UART bus.
|
||||
// @param flow_control_pin Pointer to the internal GPIO pin used for flow control.
|
||||
void set_flow_control_pin(InternalGPIOPin *flow_control_pin) { this->flow_control_pin_ = flow_control_pin; }
|
||||
|
||||
// Sets the size of the RX buffer.
|
||||
// @param rx_buffer_size Size of the RX buffer in bytes.
|
||||
void set_rx_buffer_size(size_t rx_buffer_size) { this->rx_buffer_size_ = rx_buffer_size; }
|
||||
@@ -90,6 +95,26 @@ class UARTComponent {
|
||||
// @return Size of the RX buffer in bytes.
|
||||
size_t get_rx_buffer_size() { return this->rx_buffer_size_; }
|
||||
|
||||
// Sets the RX FIFO full interrupt threshold.
|
||||
// @param rx_full_threshold RX full interrupt threshold in bytes.
|
||||
virtual void set_rx_full_threshold(size_t rx_full_threshold) {}
|
||||
|
||||
// Sets the RX FIFO full interrupt threshold.
|
||||
// @param time RX full interrupt threshold in ms.
|
||||
void set_rx_full_threshold_ms(uint8_t time);
|
||||
|
||||
// Gets the RX FIFO full interrupt threshold.
|
||||
// @return RX full interrupt threshold in bytes.
|
||||
size_t get_rx_full_threshold() { return this->rx_full_threshold_; }
|
||||
|
||||
// Sets the RX timeout interrupt threshold.
|
||||
// @param rx_timeout RX timeout interrupt threshold (unit: time of sending one byte).
|
||||
virtual void set_rx_timeout(size_t rx_timeout) {}
|
||||
|
||||
// Gets the RX timeout interrupt threshold.
|
||||
// @return RX timeout interrupt threshold (unit: time of sending one byte).
|
||||
size_t get_rx_timeout() { return this->rx_timeout_; }
|
||||
|
||||
// Sets the number of stop bits used in UART communication.
|
||||
// @param stop_bits Number of stop bits.
|
||||
void set_stop_bits(uint8_t stop_bits) { this->stop_bits_ = stop_bits; }
|
||||
@@ -161,7 +186,10 @@ class UARTComponent {
|
||||
|
||||
InternalGPIOPin *tx_pin_;
|
||||
InternalGPIOPin *rx_pin_;
|
||||
InternalGPIOPin *flow_control_pin_;
|
||||
size_t rx_buffer_size_;
|
||||
size_t rx_full_threshold_{1};
|
||||
size_t rx_timeout_{0};
|
||||
uint32_t baud_rate_;
|
||||
uint8_t stop_bits_;
|
||||
uint8_t data_bits_;
|
||||
|
@@ -90,6 +90,12 @@ void IDFUARTComponent::setup() {
|
||||
|
||||
xSemaphoreTake(this->lock_, portMAX_DELAY);
|
||||
|
||||
this->load_settings(false);
|
||||
|
||||
xSemaphoreGive(this->lock_);
|
||||
}
|
||||
|
||||
void IDFUARTComponent::load_settings(bool dump_config) {
|
||||
uart_config_t uart_config = this->get_config_();
|
||||
esp_err_t err = uart_param_config(this->uart_num_, &uart_config);
|
||||
if (err != ESP_OK) {
|
||||
@@ -100,6 +106,7 @@ void IDFUARTComponent::setup() {
|
||||
|
||||
int8_t tx = this->tx_pin_ != nullptr ? this->tx_pin_->get_pin() : -1;
|
||||
int8_t rx = this->rx_pin_ != nullptr ? this->rx_pin_->get_pin() : -1;
|
||||
int8_t flow_control = this->flow_control_pin_ != nullptr ? this->flow_control_pin_->get_pin() : -1;
|
||||
|
||||
uint32_t invert = 0;
|
||||
if (this->tx_pin_ != nullptr && this->tx_pin_->is_inverted())
|
||||
@@ -114,13 +121,21 @@ void IDFUARTComponent::setup() {
|
||||
return;
|
||||
}
|
||||
|
||||
err = uart_set_pin(this->uart_num_, tx, rx, UART_PIN_NO_CHANGE, UART_PIN_NO_CHANGE);
|
||||
err = uart_set_pin(this->uart_num_, tx, rx, flow_control, UART_PIN_NO_CHANGE);
|
||||
if (err != ESP_OK) {
|
||||
ESP_LOGW(TAG, "uart_set_pin failed: %s", esp_err_to_name(err));
|
||||
this->mark_failed();
|
||||
return;
|
||||
}
|
||||
|
||||
if (uart_is_driver_installed(this->uart_num_)) {
|
||||
uart_driver_delete(this->uart_num_);
|
||||
if (err != ESP_OK) {
|
||||
ESP_LOGW(TAG, "uart_driver_delete failed: %s", esp_err_to_name(err));
|
||||
this->mark_failed();
|
||||
return;
|
||||
}
|
||||
}
|
||||
err = uart_driver_install(this->uart_num_, /* UART RX ring buffer size. */ this->rx_buffer_size_,
|
||||
/* UART TX ring buffer size. If set to zero, driver will not use TX buffer, TX function will
|
||||
block task until all data have been sent out.*/
|
||||
@@ -133,17 +148,29 @@ void IDFUARTComponent::setup() {
|
||||
return;
|
||||
}
|
||||
|
||||
xSemaphoreGive(this->lock_);
|
||||
}
|
||||
|
||||
void IDFUARTComponent::load_settings(bool dump_config) {
|
||||
uart_config_t uart_config = this->get_config_();
|
||||
esp_err_t err = uart_param_config(this->uart_num_, &uart_config);
|
||||
err = uart_set_rx_full_threshold(this->uart_num_, this->rx_full_threshold_);
|
||||
if (err != ESP_OK) {
|
||||
ESP_LOGW(TAG, "uart_param_config failed: %s", esp_err_to_name(err));
|
||||
ESP_LOGW(TAG, "uart_set_rx_full_threshold failed: %s", esp_err_to_name(err));
|
||||
this->mark_failed();
|
||||
return;
|
||||
} else if (dump_config) {
|
||||
}
|
||||
|
||||
err = uart_set_rx_timeout(this->uart_num_, this->rx_timeout_);
|
||||
if (err != ESP_OK) {
|
||||
ESP_LOGW(TAG, "uart_set_rx_timeout failed: %s", esp_err_to_name(err));
|
||||
this->mark_failed();
|
||||
return;
|
||||
}
|
||||
|
||||
auto mode = this->flow_control_pin_ != nullptr ? UART_MODE_RS485_HALF_DUPLEX : UART_MODE_UART;
|
||||
err = uart_set_mode(this->uart_num_, mode);
|
||||
if (err != ESP_OK) {
|
||||
ESP_LOGW(TAG, "uart_set_mode failed: %s", esp_err_to_name(err));
|
||||
this->mark_failed();
|
||||
return;
|
||||
}
|
||||
|
||||
if (dump_config) {
|
||||
ESP_LOGCONFIG(TAG, "UART %u was reloaded.", this->uart_num_);
|
||||
this->dump_config();
|
||||
}
|
||||
@@ -153,8 +180,13 @@ void IDFUARTComponent::dump_config() {
|
||||
ESP_LOGCONFIG(TAG, "UART Bus %u:", this->uart_num_);
|
||||
LOG_PIN(" TX Pin: ", tx_pin_);
|
||||
LOG_PIN(" RX Pin: ", rx_pin_);
|
||||
LOG_PIN(" Flow Control Pin: ", flow_control_pin_);
|
||||
if (this->rx_pin_ != nullptr) {
|
||||
ESP_LOGCONFIG(TAG, " RX Buffer Size: %u", this->rx_buffer_size_);
|
||||
ESP_LOGCONFIG(TAG,
|
||||
" RX Buffer Size: %u\n"
|
||||
" RX Full Threshold: %u\n"
|
||||
" RX Timeout: %u",
|
||||
this->rx_buffer_size_, this->rx_full_threshold_, this->rx_timeout_);
|
||||
}
|
||||
ESP_LOGCONFIG(TAG,
|
||||
" Baud Rate: %" PRIu32 " baud\n"
|
||||
@@ -165,6 +197,28 @@ void IDFUARTComponent::dump_config() {
|
||||
this->check_logger_conflict();
|
||||
}
|
||||
|
||||
void IDFUARTComponent::set_rx_full_threshold(size_t rx_full_threshold) {
|
||||
if (this->is_ready()) {
|
||||
esp_err_t err = uart_set_rx_full_threshold(this->uart_num_, rx_full_threshold);
|
||||
if (err != ESP_OK) {
|
||||
ESP_LOGW(TAG, "uart_set_rx_full_threshold failed: %s", esp_err_to_name(err));
|
||||
return;
|
||||
}
|
||||
}
|
||||
this->rx_full_threshold_ = rx_full_threshold;
|
||||
}
|
||||
|
||||
void IDFUARTComponent::set_rx_timeout(size_t rx_timeout) {
|
||||
if (this->is_ready()) {
|
||||
esp_err_t err = uart_set_rx_timeout(this->uart_num_, rx_timeout);
|
||||
if (err != ESP_OK) {
|
||||
ESP_LOGW(TAG, "uart_set_rx_timeout failed: %s", esp_err_to_name(err));
|
||||
return;
|
||||
}
|
||||
}
|
||||
this->rx_timeout_ = rx_timeout;
|
||||
}
|
||||
|
||||
void IDFUARTComponent::write_array(const uint8_t *data, size_t len) {
|
||||
xSemaphoreTake(this->lock_, portMAX_DELAY);
|
||||
uart_write_bytes(this->uart_num_, data, len);
|
||||
|
@@ -15,6 +15,9 @@ class IDFUARTComponent : public UARTComponent, public Component {
|
||||
void dump_config() override;
|
||||
float get_setup_priority() const override { return setup_priority::BUS; }
|
||||
|
||||
void set_rx_full_threshold(size_t rx_full_threshold) override;
|
||||
void set_rx_timeout(size_t rx_timeout) override;
|
||||
|
||||
void write_array(const uint8_t *data, size_t len) override;
|
||||
|
||||
bool peek_byte(uint8_t *data) override;
|
||||
|
@@ -3,7 +3,8 @@ from esphome.components.esp32 import add_idf_component
|
||||
from esphome.components.ota import BASE_OTA_SCHEMA, OTAComponent, ota_to_code
|
||||
import esphome.config_validation as cv
|
||||
from esphome.const import CONF_ID
|
||||
from esphome.core import CORE, CoroPriority, coroutine_with_priority
|
||||
from esphome.core import CORE, coroutine_with_priority
|
||||
from esphome.coroutine import CoroPriority
|
||||
|
||||
CODEOWNERS = ["@esphome/core"]
|
||||
DEPENDENCIES = ["network", "web_server_base"]
|
||||
@@ -22,7 +23,7 @@ CONFIG_SCHEMA = (
|
||||
)
|
||||
|
||||
|
||||
@coroutine_with_priority(CoroPriority.COMMUNICATION)
|
||||
@coroutine_with_priority(CoroPriority.WEB_SERVER_OTA)
|
||||
async def to_code(config):
|
||||
var = cg.new_Pvariable(config[CONF_ID])
|
||||
await ota_to_code(var, config)
|
||||
|
@@ -228,10 +228,11 @@ void DeferredUpdateEventSourceList::on_client_connect_(WebServer *ws, DeferredUp
|
||||
|
||||
#ifdef USE_WEBSERVER_SORTING
|
||||
for (auto &group : ws->sorting_groups_) {
|
||||
message = json::build_json([group](JsonObject root) {
|
||||
root["name"] = group.second.name;
|
||||
root["sorting_weight"] = group.second.weight;
|
||||
});
|
||||
json::JsonBuilder builder;
|
||||
JsonObject root = builder.root();
|
||||
root["name"] = group.second.name;
|
||||
root["sorting_weight"] = group.second.weight;
|
||||
message = builder.serialize();
|
||||
|
||||
// up to 31 groups should be able to be queued initially without defer
|
||||
source->try_send_nodefer(message.c_str(), "sorting_group");
|
||||
@@ -265,17 +266,20 @@ void WebServer::set_js_include(const char *js_include) { this->js_include_ = js_
|
||||
#endif
|
||||
|
||||
std::string WebServer::get_config_json() {
|
||||
return json::build_json([this](JsonObject root) {
|
||||
root["title"] = App.get_friendly_name().empty() ? App.get_name() : App.get_friendly_name();
|
||||
root["comment"] = App.get_comment();
|
||||
json::JsonBuilder builder;
|
||||
JsonObject root = builder.root();
|
||||
|
||||
root["title"] = App.get_friendly_name().empty() ? App.get_name() : App.get_friendly_name();
|
||||
root["comment"] = App.get_comment();
|
||||
#if defined(USE_WEBSERVER_OTA_DISABLED) || !defined(USE_WEBSERVER_OTA)
|
||||
root["ota"] = false; // Note: USE_WEBSERVER_OTA_DISABLED only affects web_server, not captive_portal
|
||||
root["ota"] = false; // Note: USE_WEBSERVER_OTA_DISABLED only affects web_server, not captive_portal
|
||||
#else
|
||||
root["ota"] = true;
|
||||
root["ota"] = true;
|
||||
#endif
|
||||
root["log"] = this->expose_log_;
|
||||
root["lang"] = "en";
|
||||
});
|
||||
root["log"] = this->expose_log_;
|
||||
root["lang"] = "en";
|
||||
|
||||
return builder.serialize();
|
||||
}
|
||||
|
||||
void WebServer::setup() {
|
||||
@@ -435,22 +439,26 @@ std::string WebServer::sensor_all_json_generator(WebServer *web_server, void *so
|
||||
return web_server->sensor_json((sensor::Sensor *) (source), ((sensor::Sensor *) (source))->state, DETAIL_ALL);
|
||||
}
|
||||
std::string WebServer::sensor_json(sensor::Sensor *obj, float value, JsonDetail start_config) {
|
||||
return json::build_json([this, obj, value, start_config](JsonObject root) {
|
||||
std::string state;
|
||||
if (std::isnan(value)) {
|
||||
state = "NA";
|
||||
} else {
|
||||
state = value_accuracy_to_string(value, obj->get_accuracy_decimals());
|
||||
if (!obj->get_unit_of_measurement().empty())
|
||||
state += " " + obj->get_unit_of_measurement();
|
||||
}
|
||||
set_json_icon_state_value(root, obj, "sensor-" + obj->get_object_id(), state, value, start_config);
|
||||
if (start_config == DETAIL_ALL) {
|
||||
this->add_sorting_info_(root, obj);
|
||||
if (!obj->get_unit_of_measurement().empty())
|
||||
root["uom"] = obj->get_unit_of_measurement();
|
||||
}
|
||||
});
|
||||
json::JsonBuilder builder;
|
||||
JsonObject root = builder.root();
|
||||
|
||||
// Build JSON directly inline
|
||||
std::string state;
|
||||
if (std::isnan(value)) {
|
||||
state = "NA";
|
||||
} else {
|
||||
state = value_accuracy_to_string(value, obj->get_accuracy_decimals());
|
||||
if (!obj->get_unit_of_measurement().empty())
|
||||
state += " " + obj->get_unit_of_measurement();
|
||||
}
|
||||
set_json_icon_state_value(root, obj, "sensor-" + obj->get_object_id(), state, value, start_config);
|
||||
if (start_config == DETAIL_ALL) {
|
||||
this->add_sorting_info_(root, obj);
|
||||
if (!obj->get_unit_of_measurement().empty())
|
||||
root["uom"] = obj->get_unit_of_measurement();
|
||||
}
|
||||
|
||||
return builder.serialize();
|
||||
}
|
||||
#endif
|
||||
|
||||
@@ -483,12 +491,15 @@ std::string WebServer::text_sensor_all_json_generator(WebServer *web_server, voi
|
||||
}
|
||||
std::string WebServer::text_sensor_json(text_sensor::TextSensor *obj, const std::string &value,
|
||||
JsonDetail start_config) {
|
||||
return json::build_json([this, obj, value, start_config](JsonObject root) {
|
||||
set_json_icon_state_value(root, obj, "text_sensor-" + obj->get_object_id(), value, value, start_config);
|
||||
if (start_config == DETAIL_ALL) {
|
||||
this->add_sorting_info_(root, obj);
|
||||
}
|
||||
});
|
||||
json::JsonBuilder builder;
|
||||
JsonObject root = builder.root();
|
||||
|
||||
set_json_icon_state_value(root, obj, "text_sensor-" + obj->get_object_id(), value, value, start_config);
|
||||
if (start_config == DETAIL_ALL) {
|
||||
this->add_sorting_info_(root, obj);
|
||||
}
|
||||
|
||||
return builder.serialize();
|
||||
}
|
||||
#endif
|
||||
|
||||
@@ -553,13 +564,16 @@ std::string WebServer::switch_all_json_generator(WebServer *web_server, void *so
|
||||
return web_server->switch_json((switch_::Switch *) (source), ((switch_::Switch *) (source))->state, DETAIL_ALL);
|
||||
}
|
||||
std::string WebServer::switch_json(switch_::Switch *obj, bool value, JsonDetail start_config) {
|
||||
return json::build_json([this, obj, value, start_config](JsonObject root) {
|
||||
set_json_icon_state_value(root, obj, "switch-" + obj->get_object_id(), value ? "ON" : "OFF", value, start_config);
|
||||
if (start_config == DETAIL_ALL) {
|
||||
root["assumed_state"] = obj->assumed_state();
|
||||
this->add_sorting_info_(root, obj);
|
||||
}
|
||||
});
|
||||
json::JsonBuilder builder;
|
||||
JsonObject root = builder.root();
|
||||
|
||||
set_json_icon_state_value(root, obj, "switch-" + obj->get_object_id(), value ? "ON" : "OFF", value, start_config);
|
||||
if (start_config == DETAIL_ALL) {
|
||||
root["assumed_state"] = obj->assumed_state();
|
||||
this->add_sorting_info_(root, obj);
|
||||
}
|
||||
|
||||
return builder.serialize();
|
||||
}
|
||||
#endif
|
||||
|
||||
@@ -590,12 +604,15 @@ std::string WebServer::button_all_json_generator(WebServer *web_server, void *so
|
||||
return web_server->button_json((button::Button *) (source), DETAIL_ALL);
|
||||
}
|
||||
std::string WebServer::button_json(button::Button *obj, JsonDetail start_config) {
|
||||
return json::build_json([this, obj, start_config](JsonObject root) {
|
||||
set_json_id(root, obj, "button-" + obj->get_object_id(), start_config);
|
||||
if (start_config == DETAIL_ALL) {
|
||||
this->add_sorting_info_(root, obj);
|
||||
}
|
||||
});
|
||||
json::JsonBuilder builder;
|
||||
JsonObject root = builder.root();
|
||||
|
||||
set_json_id(root, obj, "button-" + obj->get_object_id(), start_config);
|
||||
if (start_config == DETAIL_ALL) {
|
||||
this->add_sorting_info_(root, obj);
|
||||
}
|
||||
|
||||
return builder.serialize();
|
||||
}
|
||||
#endif
|
||||
|
||||
@@ -627,13 +644,16 @@ std::string WebServer::binary_sensor_all_json_generator(WebServer *web_server, v
|
||||
((binary_sensor::BinarySensor *) (source))->state, DETAIL_ALL);
|
||||
}
|
||||
std::string WebServer::binary_sensor_json(binary_sensor::BinarySensor *obj, bool value, JsonDetail start_config) {
|
||||
return json::build_json([this, obj, value, start_config](JsonObject root) {
|
||||
set_json_icon_state_value(root, obj, "binary_sensor-" + obj->get_object_id(), value ? "ON" : "OFF", value,
|
||||
start_config);
|
||||
if (start_config == DETAIL_ALL) {
|
||||
this->add_sorting_info_(root, obj);
|
||||
}
|
||||
});
|
||||
json::JsonBuilder builder;
|
||||
JsonObject root = builder.root();
|
||||
|
||||
set_json_icon_state_value(root, obj, "binary_sensor-" + obj->get_object_id(), value ? "ON" : "OFF", value,
|
||||
start_config);
|
||||
if (start_config == DETAIL_ALL) {
|
||||
this->add_sorting_info_(root, obj);
|
||||
}
|
||||
|
||||
return builder.serialize();
|
||||
}
|
||||
#endif
|
||||
|
||||
@@ -694,20 +714,23 @@ std::string WebServer::fan_all_json_generator(WebServer *web_server, void *sourc
|
||||
return web_server->fan_json((fan::Fan *) (source), DETAIL_ALL);
|
||||
}
|
||||
std::string WebServer::fan_json(fan::Fan *obj, JsonDetail start_config) {
|
||||
return json::build_json([this, obj, start_config](JsonObject root) {
|
||||
set_json_icon_state_value(root, obj, "fan-" + obj->get_object_id(), obj->state ? "ON" : "OFF", obj->state,
|
||||
start_config);
|
||||
const auto traits = obj->get_traits();
|
||||
if (traits.supports_speed()) {
|
||||
root["speed_level"] = obj->speed;
|
||||
root["speed_count"] = traits.supported_speed_count();
|
||||
}
|
||||
if (obj->get_traits().supports_oscillation())
|
||||
root["oscillation"] = obj->oscillating;
|
||||
if (start_config == DETAIL_ALL) {
|
||||
this->add_sorting_info_(root, obj);
|
||||
}
|
||||
});
|
||||
json::JsonBuilder builder;
|
||||
JsonObject root = builder.root();
|
||||
|
||||
set_json_icon_state_value(root, obj, "fan-" + obj->get_object_id(), obj->state ? "ON" : "OFF", obj->state,
|
||||
start_config);
|
||||
const auto traits = obj->get_traits();
|
||||
if (traits.supports_speed()) {
|
||||
root["speed_level"] = obj->speed;
|
||||
root["speed_count"] = traits.supported_speed_count();
|
||||
}
|
||||
if (obj->get_traits().supports_oscillation())
|
||||
root["oscillation"] = obj->oscillating;
|
||||
if (start_config == DETAIL_ALL) {
|
||||
this->add_sorting_info_(root, obj);
|
||||
}
|
||||
|
||||
return builder.serialize();
|
||||
}
|
||||
#endif
|
||||
|
||||
@@ -767,20 +790,23 @@ std::string WebServer::light_all_json_generator(WebServer *web_server, void *sou
|
||||
return web_server->light_json((light::LightState *) (source), DETAIL_ALL);
|
||||
}
|
||||
std::string WebServer::light_json(light::LightState *obj, JsonDetail start_config) {
|
||||
return json::build_json([this, obj, start_config](JsonObject root) {
|
||||
set_json_id(root, obj, "light-" + obj->get_object_id(), start_config);
|
||||
root["state"] = obj->remote_values.is_on() ? "ON" : "OFF";
|
||||
json::JsonBuilder builder;
|
||||
JsonObject root = builder.root();
|
||||
|
||||
light::LightJSONSchema::dump_json(*obj, root);
|
||||
if (start_config == DETAIL_ALL) {
|
||||
JsonArray opt = root["effects"].to<JsonArray>();
|
||||
opt.add("None");
|
||||
for (auto const &option : obj->get_effects()) {
|
||||
opt.add(option->get_name());
|
||||
}
|
||||
this->add_sorting_info_(root, obj);
|
||||
set_json_id(root, obj, "light-" + obj->get_object_id(), start_config);
|
||||
root["state"] = obj->remote_values.is_on() ? "ON" : "OFF";
|
||||
|
||||
light::LightJSONSchema::dump_json(*obj, root);
|
||||
if (start_config == DETAIL_ALL) {
|
||||
JsonArray opt = root["effects"].to<JsonArray>();
|
||||
opt.add("None");
|
||||
for (auto const &option : obj->get_effects()) {
|
||||
opt.add(option->get_name());
|
||||
}
|
||||
});
|
||||
this->add_sorting_info_(root, obj);
|
||||
}
|
||||
|
||||
return builder.serialize();
|
||||
}
|
||||
#endif
|
||||
|
||||
@@ -839,19 +865,22 @@ std::string WebServer::cover_all_json_generator(WebServer *web_server, void *sou
|
||||
return web_server->cover_json((cover::Cover *) (source), DETAIL_ALL);
|
||||
}
|
||||
std::string WebServer::cover_json(cover::Cover *obj, JsonDetail start_config) {
|
||||
return json::build_json([this, obj, start_config](JsonObject root) {
|
||||
set_json_icon_state_value(root, obj, "cover-" + obj->get_object_id(), obj->is_fully_closed() ? "CLOSED" : "OPEN",
|
||||
obj->position, start_config);
|
||||
root["current_operation"] = cover::cover_operation_to_str(obj->current_operation);
|
||||
json::JsonBuilder builder;
|
||||
JsonObject root = builder.root();
|
||||
|
||||
if (obj->get_traits().get_supports_position())
|
||||
root["position"] = obj->position;
|
||||
if (obj->get_traits().get_supports_tilt())
|
||||
root["tilt"] = obj->tilt;
|
||||
if (start_config == DETAIL_ALL) {
|
||||
this->add_sorting_info_(root, obj);
|
||||
}
|
||||
});
|
||||
set_json_icon_state_value(root, obj, "cover-" + obj->get_object_id(), obj->is_fully_closed() ? "CLOSED" : "OPEN",
|
||||
obj->position, start_config);
|
||||
root["current_operation"] = cover::cover_operation_to_str(obj->current_operation);
|
||||
|
||||
if (obj->get_traits().get_supports_position())
|
||||
root["position"] = obj->position;
|
||||
if (obj->get_traits().get_supports_tilt())
|
||||
root["tilt"] = obj->tilt;
|
||||
if (start_config == DETAIL_ALL) {
|
||||
this->add_sorting_info_(root, obj);
|
||||
}
|
||||
|
||||
return builder.serialize();
|
||||
}
|
||||
#endif
|
||||
|
||||
@@ -894,31 +923,33 @@ std::string WebServer::number_all_json_generator(WebServer *web_server, void *so
|
||||
return web_server->number_json((number::Number *) (source), ((number::Number *) (source))->state, DETAIL_ALL);
|
||||
}
|
||||
std::string WebServer::number_json(number::Number *obj, float value, JsonDetail start_config) {
|
||||
return json::build_json([this, obj, value, start_config](JsonObject root) {
|
||||
set_json_id(root, obj, "number-" + obj->get_object_id(), start_config);
|
||||
if (start_config == DETAIL_ALL) {
|
||||
root["min_value"] =
|
||||
value_accuracy_to_string(obj->traits.get_min_value(), step_to_accuracy_decimals(obj->traits.get_step()));
|
||||
root["max_value"] =
|
||||
value_accuracy_to_string(obj->traits.get_max_value(), step_to_accuracy_decimals(obj->traits.get_step()));
|
||||
root["step"] =
|
||||
value_accuracy_to_string(obj->traits.get_step(), step_to_accuracy_decimals(obj->traits.get_step()));
|
||||
root["mode"] = (int) obj->traits.get_mode();
|
||||
if (!obj->traits.get_unit_of_measurement().empty())
|
||||
root["uom"] = obj->traits.get_unit_of_measurement();
|
||||
this->add_sorting_info_(root, obj);
|
||||
}
|
||||
if (std::isnan(value)) {
|
||||
root["value"] = "\"NaN\"";
|
||||
root["state"] = "NA";
|
||||
} else {
|
||||
root["value"] = value_accuracy_to_string(value, step_to_accuracy_decimals(obj->traits.get_step()));
|
||||
std::string state = value_accuracy_to_string(value, step_to_accuracy_decimals(obj->traits.get_step()));
|
||||
if (!obj->traits.get_unit_of_measurement().empty())
|
||||
state += " " + obj->traits.get_unit_of_measurement();
|
||||
root["state"] = state;
|
||||
}
|
||||
});
|
||||
json::JsonBuilder builder;
|
||||
JsonObject root = builder.root();
|
||||
|
||||
set_json_id(root, obj, "number-" + obj->get_object_id(), start_config);
|
||||
if (start_config == DETAIL_ALL) {
|
||||
root["min_value"] =
|
||||
value_accuracy_to_string(obj->traits.get_min_value(), step_to_accuracy_decimals(obj->traits.get_step()));
|
||||
root["max_value"] =
|
||||
value_accuracy_to_string(obj->traits.get_max_value(), step_to_accuracy_decimals(obj->traits.get_step()));
|
||||
root["step"] = value_accuracy_to_string(obj->traits.get_step(), step_to_accuracy_decimals(obj->traits.get_step()));
|
||||
root["mode"] = (int) obj->traits.get_mode();
|
||||
if (!obj->traits.get_unit_of_measurement().empty())
|
||||
root["uom"] = obj->traits.get_unit_of_measurement();
|
||||
this->add_sorting_info_(root, obj);
|
||||
}
|
||||
if (std::isnan(value)) {
|
||||
root["value"] = "\"NaN\"";
|
||||
root["state"] = "NA";
|
||||
} else {
|
||||
root["value"] = value_accuracy_to_string(value, step_to_accuracy_decimals(obj->traits.get_step()));
|
||||
std::string state = value_accuracy_to_string(value, step_to_accuracy_decimals(obj->traits.get_step()));
|
||||
if (!obj->traits.get_unit_of_measurement().empty())
|
||||
state += " " + obj->traits.get_unit_of_measurement();
|
||||
root["state"] = state;
|
||||
}
|
||||
|
||||
return builder.serialize();
|
||||
}
|
||||
#endif
|
||||
|
||||
@@ -966,15 +997,18 @@ std::string WebServer::date_all_json_generator(WebServer *web_server, void *sour
|
||||
return web_server->date_json((datetime::DateEntity *) (source), DETAIL_ALL);
|
||||
}
|
||||
std::string WebServer::date_json(datetime::DateEntity *obj, JsonDetail start_config) {
|
||||
return json::build_json([this, obj, start_config](JsonObject root) {
|
||||
set_json_id(root, obj, "date-" + obj->get_object_id(), start_config);
|
||||
std::string value = str_sprintf("%d-%02d-%02d", obj->year, obj->month, obj->day);
|
||||
root["value"] = value;
|
||||
root["state"] = value;
|
||||
if (start_config == DETAIL_ALL) {
|
||||
this->add_sorting_info_(root, obj);
|
||||
}
|
||||
});
|
||||
json::JsonBuilder builder;
|
||||
JsonObject root = builder.root();
|
||||
|
||||
set_json_id(root, obj, "date-" + obj->get_object_id(), start_config);
|
||||
std::string value = str_sprintf("%d-%02d-%02d", obj->year, obj->month, obj->day);
|
||||
root["value"] = value;
|
||||
root["state"] = value;
|
||||
if (start_config == DETAIL_ALL) {
|
||||
this->add_sorting_info_(root, obj);
|
||||
}
|
||||
|
||||
return builder.serialize();
|
||||
}
|
||||
#endif // USE_DATETIME_DATE
|
||||
|
||||
@@ -1021,15 +1055,18 @@ std::string WebServer::time_all_json_generator(WebServer *web_server, void *sour
|
||||
return web_server->time_json((datetime::TimeEntity *) (source), DETAIL_ALL);
|
||||
}
|
||||
std::string WebServer::time_json(datetime::TimeEntity *obj, JsonDetail start_config) {
|
||||
return json::build_json([this, obj, start_config](JsonObject root) {
|
||||
set_json_id(root, obj, "time-" + obj->get_object_id(), start_config);
|
||||
std::string value = str_sprintf("%02d:%02d:%02d", obj->hour, obj->minute, obj->second);
|
||||
root["value"] = value;
|
||||
root["state"] = value;
|
||||
if (start_config == DETAIL_ALL) {
|
||||
this->add_sorting_info_(root, obj);
|
||||
}
|
||||
});
|
||||
json::JsonBuilder builder;
|
||||
JsonObject root = builder.root();
|
||||
|
||||
set_json_id(root, obj, "time-" + obj->get_object_id(), start_config);
|
||||
std::string value = str_sprintf("%02d:%02d:%02d", obj->hour, obj->minute, obj->second);
|
||||
root["value"] = value;
|
||||
root["state"] = value;
|
||||
if (start_config == DETAIL_ALL) {
|
||||
this->add_sorting_info_(root, obj);
|
||||
}
|
||||
|
||||
return builder.serialize();
|
||||
}
|
||||
#endif // USE_DATETIME_TIME
|
||||
|
||||
@@ -1076,16 +1113,19 @@ std::string WebServer::datetime_all_json_generator(WebServer *web_server, void *
|
||||
return web_server->datetime_json((datetime::DateTimeEntity *) (source), DETAIL_ALL);
|
||||
}
|
||||
std::string WebServer::datetime_json(datetime::DateTimeEntity *obj, JsonDetail start_config) {
|
||||
return json::build_json([this, obj, start_config](JsonObject root) {
|
||||
set_json_id(root, obj, "datetime-" + obj->get_object_id(), start_config);
|
||||
std::string value = str_sprintf("%d-%02d-%02d %02d:%02d:%02d", obj->year, obj->month, obj->day, obj->hour,
|
||||
obj->minute, obj->second);
|
||||
root["value"] = value;
|
||||
root["state"] = value;
|
||||
if (start_config == DETAIL_ALL) {
|
||||
this->add_sorting_info_(root, obj);
|
||||
}
|
||||
});
|
||||
json::JsonBuilder builder;
|
||||
JsonObject root = builder.root();
|
||||
|
||||
set_json_id(root, obj, "datetime-" + obj->get_object_id(), start_config);
|
||||
std::string value =
|
||||
str_sprintf("%d-%02d-%02d %02d:%02d:%02d", obj->year, obj->month, obj->day, obj->hour, obj->minute, obj->second);
|
||||
root["value"] = value;
|
||||
root["state"] = value;
|
||||
if (start_config == DETAIL_ALL) {
|
||||
this->add_sorting_info_(root, obj);
|
||||
}
|
||||
|
||||
return builder.serialize();
|
||||
}
|
||||
#endif // USE_DATETIME_DATETIME
|
||||
|
||||
@@ -1128,22 +1168,25 @@ std::string WebServer::text_all_json_generator(WebServer *web_server, void *sour
|
||||
return web_server->text_json((text::Text *) (source), ((text::Text *) (source))->state, DETAIL_ALL);
|
||||
}
|
||||
std::string WebServer::text_json(text::Text *obj, const std::string &value, JsonDetail start_config) {
|
||||
return json::build_json([this, obj, value, start_config](JsonObject root) {
|
||||
set_json_id(root, obj, "text-" + obj->get_object_id(), start_config);
|
||||
root["min_length"] = obj->traits.get_min_length();
|
||||
root["max_length"] = obj->traits.get_max_length();
|
||||
root["pattern"] = obj->traits.get_pattern();
|
||||
if (obj->traits.get_mode() == text::TextMode::TEXT_MODE_PASSWORD) {
|
||||
root["state"] = "********";
|
||||
} else {
|
||||
root["state"] = value;
|
||||
}
|
||||
root["value"] = value;
|
||||
if (start_config == DETAIL_ALL) {
|
||||
root["mode"] = (int) obj->traits.get_mode();
|
||||
this->add_sorting_info_(root, obj);
|
||||
}
|
||||
});
|
||||
json::JsonBuilder builder;
|
||||
JsonObject root = builder.root();
|
||||
|
||||
set_json_id(root, obj, "text-" + obj->get_object_id(), start_config);
|
||||
root["min_length"] = obj->traits.get_min_length();
|
||||
root["max_length"] = obj->traits.get_max_length();
|
||||
root["pattern"] = obj->traits.get_pattern();
|
||||
if (obj->traits.get_mode() == text::TextMode::TEXT_MODE_PASSWORD) {
|
||||
root["state"] = "********";
|
||||
} else {
|
||||
root["state"] = value;
|
||||
}
|
||||
root["value"] = value;
|
||||
if (start_config == DETAIL_ALL) {
|
||||
root["mode"] = (int) obj->traits.get_mode();
|
||||
this->add_sorting_info_(root, obj);
|
||||
}
|
||||
|
||||
return builder.serialize();
|
||||
}
|
||||
#endif
|
||||
|
||||
@@ -1186,16 +1229,19 @@ std::string WebServer::select_all_json_generator(WebServer *web_server, void *so
|
||||
return web_server->select_json((select::Select *) (source), ((select::Select *) (source))->state, DETAIL_ALL);
|
||||
}
|
||||
std::string WebServer::select_json(select::Select *obj, const std::string &value, JsonDetail start_config) {
|
||||
return json::build_json([this, obj, value, start_config](JsonObject root) {
|
||||
set_json_icon_state_value(root, obj, "select-" + obj->get_object_id(), value, value, start_config);
|
||||
if (start_config == DETAIL_ALL) {
|
||||
JsonArray opt = root["option"].to<JsonArray>();
|
||||
for (auto &option : obj->traits.get_options()) {
|
||||
opt.add(option);
|
||||
}
|
||||
this->add_sorting_info_(root, obj);
|
||||
json::JsonBuilder builder;
|
||||
JsonObject root = builder.root();
|
||||
|
||||
set_json_icon_state_value(root, obj, "select-" + obj->get_object_id(), value, value, start_config);
|
||||
if (start_config == DETAIL_ALL) {
|
||||
JsonArray opt = root["option"].to<JsonArray>();
|
||||
for (auto &option : obj->traits.get_options()) {
|
||||
opt.add(option);
|
||||
}
|
||||
});
|
||||
this->add_sorting_info_(root, obj);
|
||||
}
|
||||
|
||||
return builder.serialize();
|
||||
}
|
||||
#endif
|
||||
|
||||
@@ -1244,98 +1290,102 @@ void WebServer::handle_climate_request(AsyncWebServerRequest *request, const Url
|
||||
request->send(404);
|
||||
}
|
||||
std::string WebServer::climate_state_json_generator(WebServer *web_server, void *source) {
|
||||
// NOLINTNEXTLINE(clang-analyzer-cplusplus.NewDeleteLeaks) false positive with ArduinoJson
|
||||
return web_server->climate_json((climate::Climate *) (source), DETAIL_STATE);
|
||||
}
|
||||
std::string WebServer::climate_all_json_generator(WebServer *web_server, void *source) {
|
||||
// NOLINTNEXTLINE(clang-analyzer-cplusplus.NewDeleteLeaks) false positive with ArduinoJson
|
||||
return web_server->climate_json((climate::Climate *) (source), DETAIL_ALL);
|
||||
}
|
||||
std::string WebServer::climate_json(climate::Climate *obj, JsonDetail start_config) {
|
||||
// NOLINTBEGIN(clang-analyzer-cplusplus.NewDeleteLeaks) false positive with ArduinoJson
|
||||
return json::build_json([this, obj, start_config](JsonObject root) {
|
||||
set_json_id(root, obj, "climate-" + obj->get_object_id(), start_config);
|
||||
const auto traits = obj->get_traits();
|
||||
int8_t target_accuracy = traits.get_target_temperature_accuracy_decimals();
|
||||
int8_t current_accuracy = traits.get_current_temperature_accuracy_decimals();
|
||||
char buf[16];
|
||||
json::JsonBuilder builder;
|
||||
JsonObject root = builder.root();
|
||||
set_json_id(root, obj, "climate-" + obj->get_object_id(), start_config);
|
||||
const auto traits = obj->get_traits();
|
||||
int8_t target_accuracy = traits.get_target_temperature_accuracy_decimals();
|
||||
int8_t current_accuracy = traits.get_current_temperature_accuracy_decimals();
|
||||
char buf[16];
|
||||
|
||||
if (start_config == DETAIL_ALL) {
|
||||
JsonArray opt = root["modes"].to<JsonArray>();
|
||||
for (climate::ClimateMode m : traits.get_supported_modes())
|
||||
opt.add(PSTR_LOCAL(climate::climate_mode_to_string(m)));
|
||||
if (!traits.get_supported_custom_fan_modes().empty()) {
|
||||
JsonArray opt = root["fan_modes"].to<JsonArray>();
|
||||
for (climate::ClimateFanMode m : traits.get_supported_fan_modes())
|
||||
opt.add(PSTR_LOCAL(climate::climate_fan_mode_to_string(m)));
|
||||
}
|
||||
|
||||
if (!traits.get_supported_custom_fan_modes().empty()) {
|
||||
JsonArray opt = root["custom_fan_modes"].to<JsonArray>();
|
||||
for (auto const &custom_fan_mode : traits.get_supported_custom_fan_modes())
|
||||
opt.add(custom_fan_mode);
|
||||
}
|
||||
if (traits.get_supports_swing_modes()) {
|
||||
JsonArray opt = root["swing_modes"].to<JsonArray>();
|
||||
for (auto swing_mode : traits.get_supported_swing_modes())
|
||||
opt.add(PSTR_LOCAL(climate::climate_swing_mode_to_string(swing_mode)));
|
||||
}
|
||||
if (traits.get_supports_presets() && obj->preset.has_value()) {
|
||||
JsonArray opt = root["presets"].to<JsonArray>();
|
||||
for (climate::ClimatePreset m : traits.get_supported_presets())
|
||||
opt.add(PSTR_LOCAL(climate::climate_preset_to_string(m)));
|
||||
}
|
||||
if (!traits.get_supported_custom_presets().empty() && obj->custom_preset.has_value()) {
|
||||
JsonArray opt = root["custom_presets"].to<JsonArray>();
|
||||
for (auto const &custom_preset : traits.get_supported_custom_presets())
|
||||
opt.add(custom_preset);
|
||||
}
|
||||
this->add_sorting_info_(root, obj);
|
||||
if (start_config == DETAIL_ALL) {
|
||||
JsonArray opt = root["modes"].to<JsonArray>();
|
||||
for (climate::ClimateMode m : traits.get_supported_modes())
|
||||
opt.add(PSTR_LOCAL(climate::climate_mode_to_string(m)));
|
||||
if (!traits.get_supported_custom_fan_modes().empty()) {
|
||||
JsonArray opt = root["fan_modes"].to<JsonArray>();
|
||||
for (climate::ClimateFanMode m : traits.get_supported_fan_modes())
|
||||
opt.add(PSTR_LOCAL(climate::climate_fan_mode_to_string(m)));
|
||||
}
|
||||
|
||||
bool has_state = false;
|
||||
root["mode"] = PSTR_LOCAL(climate_mode_to_string(obj->mode));
|
||||
root["max_temp"] = value_accuracy_to_string(traits.get_visual_max_temperature(), target_accuracy);
|
||||
root["min_temp"] = value_accuracy_to_string(traits.get_visual_min_temperature(), target_accuracy);
|
||||
root["step"] = traits.get_visual_target_temperature_step();
|
||||
if (traits.get_supports_action()) {
|
||||
root["action"] = PSTR_LOCAL(climate_action_to_string(obj->action));
|
||||
root["state"] = root["action"];
|
||||
has_state = true;
|
||||
}
|
||||
if (traits.get_supports_fan_modes() && obj->fan_mode.has_value()) {
|
||||
root["fan_mode"] = PSTR_LOCAL(climate_fan_mode_to_string(obj->fan_mode.value()));
|
||||
}
|
||||
if (!traits.get_supported_custom_fan_modes().empty() && obj->custom_fan_mode.has_value()) {
|
||||
root["custom_fan_mode"] = obj->custom_fan_mode.value().c_str();
|
||||
}
|
||||
if (traits.get_supports_presets() && obj->preset.has_value()) {
|
||||
root["preset"] = PSTR_LOCAL(climate_preset_to_string(obj->preset.value()));
|
||||
}
|
||||
if (!traits.get_supported_custom_presets().empty() && obj->custom_preset.has_value()) {
|
||||
root["custom_preset"] = obj->custom_preset.value().c_str();
|
||||
if (!traits.get_supported_custom_fan_modes().empty()) {
|
||||
JsonArray opt = root["custom_fan_modes"].to<JsonArray>();
|
||||
for (auto const &custom_fan_mode : traits.get_supported_custom_fan_modes())
|
||||
opt.add(custom_fan_mode);
|
||||
}
|
||||
if (traits.get_supports_swing_modes()) {
|
||||
root["swing_mode"] = PSTR_LOCAL(climate_swing_mode_to_string(obj->swing_mode));
|
||||
JsonArray opt = root["swing_modes"].to<JsonArray>();
|
||||
for (auto swing_mode : traits.get_supported_swing_modes())
|
||||
opt.add(PSTR_LOCAL(climate::climate_swing_mode_to_string(swing_mode)));
|
||||
}
|
||||
if (traits.get_supports_current_temperature()) {
|
||||
if (!std::isnan(obj->current_temperature)) {
|
||||
root["current_temperature"] = value_accuracy_to_string(obj->current_temperature, current_accuracy);
|
||||
} else {
|
||||
root["current_temperature"] = "NA";
|
||||
}
|
||||
if (traits.get_supports_presets() && obj->preset.has_value()) {
|
||||
JsonArray opt = root["presets"].to<JsonArray>();
|
||||
for (climate::ClimatePreset m : traits.get_supported_presets())
|
||||
opt.add(PSTR_LOCAL(climate::climate_preset_to_string(m)));
|
||||
}
|
||||
if (traits.get_supports_two_point_target_temperature()) {
|
||||
root["target_temperature_low"] = value_accuracy_to_string(obj->target_temperature_low, target_accuracy);
|
||||
root["target_temperature_high"] = value_accuracy_to_string(obj->target_temperature_high, target_accuracy);
|
||||
if (!has_state) {
|
||||
root["state"] = value_accuracy_to_string((obj->target_temperature_high + obj->target_temperature_low) / 2.0f,
|
||||
target_accuracy);
|
||||
}
|
||||
if (!traits.get_supported_custom_presets().empty() && obj->custom_preset.has_value()) {
|
||||
JsonArray opt = root["custom_presets"].to<JsonArray>();
|
||||
for (auto const &custom_preset : traits.get_supported_custom_presets())
|
||||
opt.add(custom_preset);
|
||||
}
|
||||
this->add_sorting_info_(root, obj);
|
||||
}
|
||||
|
||||
bool has_state = false;
|
||||
root["mode"] = PSTR_LOCAL(climate_mode_to_string(obj->mode));
|
||||
root["max_temp"] = value_accuracy_to_string(traits.get_visual_max_temperature(), target_accuracy);
|
||||
root["min_temp"] = value_accuracy_to_string(traits.get_visual_min_temperature(), target_accuracy);
|
||||
root["step"] = traits.get_visual_target_temperature_step();
|
||||
if (traits.get_supports_action()) {
|
||||
root["action"] = PSTR_LOCAL(climate_action_to_string(obj->action));
|
||||
root["state"] = root["action"];
|
||||
has_state = true;
|
||||
}
|
||||
if (traits.get_supports_fan_modes() && obj->fan_mode.has_value()) {
|
||||
root["fan_mode"] = PSTR_LOCAL(climate_fan_mode_to_string(obj->fan_mode.value()));
|
||||
}
|
||||
if (!traits.get_supported_custom_fan_modes().empty() && obj->custom_fan_mode.has_value()) {
|
||||
root["custom_fan_mode"] = obj->custom_fan_mode.value().c_str();
|
||||
}
|
||||
if (traits.get_supports_presets() && obj->preset.has_value()) {
|
||||
root["preset"] = PSTR_LOCAL(climate_preset_to_string(obj->preset.value()));
|
||||
}
|
||||
if (!traits.get_supported_custom_presets().empty() && obj->custom_preset.has_value()) {
|
||||
root["custom_preset"] = obj->custom_preset.value().c_str();
|
||||
}
|
||||
if (traits.get_supports_swing_modes()) {
|
||||
root["swing_mode"] = PSTR_LOCAL(climate_swing_mode_to_string(obj->swing_mode));
|
||||
}
|
||||
if (traits.get_supports_current_temperature()) {
|
||||
if (!std::isnan(obj->current_temperature)) {
|
||||
root["current_temperature"] = value_accuracy_to_string(obj->current_temperature, current_accuracy);
|
||||
} else {
|
||||
root["target_temperature"] = value_accuracy_to_string(obj->target_temperature, target_accuracy);
|
||||
if (!has_state)
|
||||
root["state"] = root["target_temperature"];
|
||||
root["current_temperature"] = "NA";
|
||||
}
|
||||
});
|
||||
}
|
||||
if (traits.get_supports_two_point_target_temperature()) {
|
||||
root["target_temperature_low"] = value_accuracy_to_string(obj->target_temperature_low, target_accuracy);
|
||||
root["target_temperature_high"] = value_accuracy_to_string(obj->target_temperature_high, target_accuracy);
|
||||
if (!has_state) {
|
||||
root["state"] = value_accuracy_to_string((obj->target_temperature_high + obj->target_temperature_low) / 2.0f,
|
||||
target_accuracy);
|
||||
}
|
||||
} else {
|
||||
root["target_temperature"] = value_accuracy_to_string(obj->target_temperature, target_accuracy);
|
||||
if (!has_state)
|
||||
root["state"] = root["target_temperature"];
|
||||
}
|
||||
|
||||
return builder.serialize();
|
||||
// NOLINTEND(clang-analyzer-cplusplus.NewDeleteLeaks)
|
||||
}
|
||||
#endif
|
||||
@@ -1401,13 +1451,16 @@ std::string WebServer::lock_all_json_generator(WebServer *web_server, void *sour
|
||||
return web_server->lock_json((lock::Lock *) (source), ((lock::Lock *) (source))->state, DETAIL_ALL);
|
||||
}
|
||||
std::string WebServer::lock_json(lock::Lock *obj, lock::LockState value, JsonDetail start_config) {
|
||||
return json::build_json([this, obj, value, start_config](JsonObject root) {
|
||||
set_json_icon_state_value(root, obj, "lock-" + obj->get_object_id(), lock::lock_state_to_string(value), value,
|
||||
start_config);
|
||||
if (start_config == DETAIL_ALL) {
|
||||
this->add_sorting_info_(root, obj);
|
||||
}
|
||||
});
|
||||
json::JsonBuilder builder;
|
||||
JsonObject root = builder.root();
|
||||
|
||||
set_json_icon_state_value(root, obj, "lock-" + obj->get_object_id(), lock::lock_state_to_string(value), value,
|
||||
start_config);
|
||||
if (start_config == DETAIL_ALL) {
|
||||
this->add_sorting_info_(root, obj);
|
||||
}
|
||||
|
||||
return builder.serialize();
|
||||
}
|
||||
#endif
|
||||
|
||||
@@ -1464,17 +1517,20 @@ std::string WebServer::valve_all_json_generator(WebServer *web_server, void *sou
|
||||
return web_server->valve_json((valve::Valve *) (source), DETAIL_ALL);
|
||||
}
|
||||
std::string WebServer::valve_json(valve::Valve *obj, JsonDetail start_config) {
|
||||
return json::build_json([this, obj, start_config](JsonObject root) {
|
||||
set_json_icon_state_value(root, obj, "valve-" + obj->get_object_id(), obj->is_fully_closed() ? "CLOSED" : "OPEN",
|
||||
obj->position, start_config);
|
||||
root["current_operation"] = valve::valve_operation_to_str(obj->current_operation);
|
||||
json::JsonBuilder builder;
|
||||
JsonObject root = builder.root();
|
||||
|
||||
if (obj->get_traits().get_supports_position())
|
||||
root["position"] = obj->position;
|
||||
if (start_config == DETAIL_ALL) {
|
||||
this->add_sorting_info_(root, obj);
|
||||
}
|
||||
});
|
||||
set_json_icon_state_value(root, obj, "valve-" + obj->get_object_id(), obj->is_fully_closed() ? "CLOSED" : "OPEN",
|
||||
obj->position, start_config);
|
||||
root["current_operation"] = valve::valve_operation_to_str(obj->current_operation);
|
||||
|
||||
if (obj->get_traits().get_supports_position())
|
||||
root["position"] = obj->position;
|
||||
if (start_config == DETAIL_ALL) {
|
||||
this->add_sorting_info_(root, obj);
|
||||
}
|
||||
|
||||
return builder.serialize();
|
||||
}
|
||||
#endif
|
||||
|
||||
@@ -1533,14 +1589,17 @@ std::string WebServer::alarm_control_panel_all_json_generator(WebServer *web_ser
|
||||
std::string WebServer::alarm_control_panel_json(alarm_control_panel::AlarmControlPanel *obj,
|
||||
alarm_control_panel::AlarmControlPanelState value,
|
||||
JsonDetail start_config) {
|
||||
return json::build_json([this, obj, value, start_config](JsonObject root) {
|
||||
char buf[16];
|
||||
set_json_icon_state_value(root, obj, "alarm-control-panel-" + obj->get_object_id(),
|
||||
PSTR_LOCAL(alarm_control_panel_state_to_string(value)), value, start_config);
|
||||
if (start_config == DETAIL_ALL) {
|
||||
this->add_sorting_info_(root, obj);
|
||||
}
|
||||
});
|
||||
json::JsonBuilder builder;
|
||||
JsonObject root = builder.root();
|
||||
|
||||
char buf[16];
|
||||
set_json_icon_state_value(root, obj, "alarm-control-panel-" + obj->get_object_id(),
|
||||
PSTR_LOCAL(alarm_control_panel_state_to_string(value)), value, start_config);
|
||||
if (start_config == DETAIL_ALL) {
|
||||
this->add_sorting_info_(root, obj);
|
||||
}
|
||||
|
||||
return builder.serialize();
|
||||
}
|
||||
#endif
|
||||
|
||||
@@ -1577,20 +1636,23 @@ std::string WebServer::event_all_json_generator(WebServer *web_server, void *sou
|
||||
return web_server->event_json(event, get_event_type(event), DETAIL_ALL);
|
||||
}
|
||||
std::string WebServer::event_json(event::Event *obj, const std::string &event_type, JsonDetail start_config) {
|
||||
return json::build_json([this, obj, event_type, start_config](JsonObject root) {
|
||||
set_json_id(root, obj, "event-" + obj->get_object_id(), start_config);
|
||||
if (!event_type.empty()) {
|
||||
root["event_type"] = event_type;
|
||||
json::JsonBuilder builder;
|
||||
JsonObject root = builder.root();
|
||||
|
||||
set_json_id(root, obj, "event-" + obj->get_object_id(), start_config);
|
||||
if (!event_type.empty()) {
|
||||
root["event_type"] = event_type;
|
||||
}
|
||||
if (start_config == DETAIL_ALL) {
|
||||
JsonArray event_types = root["event_types"].to<JsonArray>();
|
||||
for (auto const &event_type : obj->get_event_types()) {
|
||||
event_types.add(event_type);
|
||||
}
|
||||
if (start_config == DETAIL_ALL) {
|
||||
JsonArray event_types = root["event_types"].to<JsonArray>();
|
||||
for (auto const &event_type : obj->get_event_types()) {
|
||||
event_types.add(event_type);
|
||||
}
|
||||
root["device_class"] = obj->get_device_class();
|
||||
this->add_sorting_info_(root, obj);
|
||||
}
|
||||
});
|
||||
root["device_class"] = obj->get_device_class();
|
||||
this->add_sorting_info_(root, obj);
|
||||
}
|
||||
|
||||
return builder.serialize();
|
||||
}
|
||||
#endif
|
||||
|
||||
@@ -1637,25 +1699,30 @@ void WebServer::handle_update_request(AsyncWebServerRequest *request, const UrlM
|
||||
request->send(404);
|
||||
}
|
||||
std::string WebServer::update_state_json_generator(WebServer *web_server, void *source) {
|
||||
// NOLINTNEXTLINE(clang-analyzer-cplusplus.NewDeleteLeaks) false positive with ArduinoJson
|
||||
return web_server->update_json((update::UpdateEntity *) (source), DETAIL_STATE);
|
||||
}
|
||||
std::string WebServer::update_all_json_generator(WebServer *web_server, void *source) {
|
||||
// NOLINTNEXTLINE(clang-analyzer-cplusplus.NewDeleteLeaks) false positive with ArduinoJson
|
||||
return web_server->update_json((update::UpdateEntity *) (source), DETAIL_STATE);
|
||||
}
|
||||
std::string WebServer::update_json(update::UpdateEntity *obj, JsonDetail start_config) {
|
||||
// NOLINTBEGIN(clang-analyzer-cplusplus.NewDeleteLeaks) false positive with ArduinoJson
|
||||
return json::build_json([this, obj, start_config](JsonObject root) {
|
||||
set_json_id(root, obj, "update-" + obj->get_object_id(), start_config);
|
||||
root["value"] = obj->update_info.latest_version;
|
||||
root["state"] = update_state_to_string(obj->state);
|
||||
if (start_config == DETAIL_ALL) {
|
||||
root["current_version"] = obj->update_info.current_version;
|
||||
root["title"] = obj->update_info.title;
|
||||
root["summary"] = obj->update_info.summary;
|
||||
root["release_url"] = obj->update_info.release_url;
|
||||
this->add_sorting_info_(root, obj);
|
||||
}
|
||||
});
|
||||
json::JsonBuilder builder;
|
||||
JsonObject root = builder.root();
|
||||
|
||||
set_json_id(root, obj, "update-" + obj->get_object_id(), start_config);
|
||||
root["value"] = obj->update_info.latest_version;
|
||||
root["state"] = update_state_to_string(obj->state);
|
||||
if (start_config == DETAIL_ALL) {
|
||||
root["current_version"] = obj->update_info.current_version;
|
||||
root["title"] = obj->update_info.title;
|
||||
root["summary"] = obj->update_info.summary;
|
||||
root["release_url"] = obj->update_info.release_url;
|
||||
this->add_sorting_info_(root, obj);
|
||||
}
|
||||
|
||||
return builder.serialize();
|
||||
// NOLINTEND(clang-analyzer-cplusplus.NewDeleteLeaks)
|
||||
}
|
||||
#endif
|
||||
|
@@ -1,7 +1,8 @@
|
||||
import esphome.codegen as cg
|
||||
import esphome.config_validation as cv
|
||||
from esphome.const import CONF_ID
|
||||
from esphome.core import CORE, CoroPriority, coroutine_with_priority
|
||||
from esphome.core import CORE, coroutine_with_priority
|
||||
from esphome.coroutine import CoroPriority
|
||||
|
||||
CODEOWNERS = ["@esphome/core"]
|
||||
DEPENDENCIES = ["network"]
|
||||
@@ -26,7 +27,7 @@ CONFIG_SCHEMA = cv.Schema(
|
||||
)
|
||||
|
||||
|
||||
@coroutine_with_priority(CoroPriority.COMMUNICATION)
|
||||
@coroutine_with_priority(CoroPriority.WEB_SERVER_BASE)
|
||||
async def to_code(config):
|
||||
var = cg.new_Pvariable(config[CONF_ID])
|
||||
await cg.register_component(var, config)
|
||||
|
@@ -317,8 +317,8 @@ AsyncEventSource::~AsyncEventSource() {
|
||||
}
|
||||
|
||||
void AsyncEventSource::handleRequest(AsyncWebServerRequest *request) {
|
||||
auto *rsp = // NOLINT(cppcoreguidelines-owning-memory)
|
||||
new AsyncEventSourceResponse(request, this, this->web_server_);
|
||||
// NOLINTNEXTLINE(cppcoreguidelines-owning-memory,clang-analyzer-cplusplus.NewDeleteLeaks)
|
||||
auto *rsp = new AsyncEventSourceResponse(request, this, this->web_server_);
|
||||
if (this->on_connect_) {
|
||||
this->on_connect_(rsp);
|
||||
}
|
||||
@@ -392,10 +392,11 @@ AsyncEventSourceResponse::AsyncEventSourceResponse(const AsyncWebServerRequest *
|
||||
#ifdef USE_WEBSERVER_SORTING
|
||||
for (auto &group : ws->sorting_groups_) {
|
||||
// NOLINTBEGIN(clang-analyzer-cplusplus.NewDeleteLeaks) false positive with ArduinoJson
|
||||
message = json::build_json([group](JsonObject root) {
|
||||
root["name"] = group.second.name;
|
||||
root["sorting_weight"] = group.second.weight;
|
||||
});
|
||||
json::JsonBuilder builder;
|
||||
JsonObject root = builder.root();
|
||||
root["name"] = group.second.name;
|
||||
root["sorting_weight"] = group.second.weight;
|
||||
message = builder.serialize();
|
||||
// NOLINTEND(clang-analyzer-cplusplus.NewDeleteLeaks)
|
||||
|
||||
// a (very) large number of these should be able to be queued initially without defer
|
||||
|
@@ -1,4 +1,4 @@
|
||||
import os
|
||||
from pathlib import Path
|
||||
from typing import TypedDict
|
||||
|
||||
import esphome.codegen as cg
|
||||
@@ -48,7 +48,7 @@ class ZephyrData(TypedDict):
|
||||
bootloader: str
|
||||
prj_conf: dict[str, tuple[PrjConfValueType, bool]]
|
||||
overlay: str
|
||||
extra_build_files: dict[str, str]
|
||||
extra_build_files: dict[str, Path]
|
||||
pm_static: list[Section]
|
||||
user: dict[str, list[str]]
|
||||
|
||||
@@ -93,7 +93,7 @@ def zephyr_add_overlay(content):
|
||||
zephyr_data()[KEY_OVERLAY] += content
|
||||
|
||||
|
||||
def add_extra_build_file(filename: str, path: str) -> bool:
|
||||
def add_extra_build_file(filename: str, path: Path) -> bool:
|
||||
"""Add an extra build file to the project."""
|
||||
extra_build_files = zephyr_data()[KEY_EXTRA_BUILD_FILES]
|
||||
if filename not in extra_build_files:
|
||||
@@ -102,7 +102,7 @@ def add_extra_build_file(filename: str, path: str) -> bool:
|
||||
return False
|
||||
|
||||
|
||||
def add_extra_script(stage: str, filename: str, path: str):
|
||||
def add_extra_script(stage: str, filename: str, path: Path) -> None:
|
||||
"""Add an extra script to the project."""
|
||||
key = f"{stage}:{filename}"
|
||||
if add_extra_build_file(filename, path):
|
||||
@@ -144,7 +144,7 @@ def zephyr_to_code(config):
|
||||
add_extra_script(
|
||||
"pre",
|
||||
"pre_build.py",
|
||||
os.path.join(os.path.dirname(__file__), "pre_build.py.script"),
|
||||
Path(__file__).parent / "pre_build.py.script",
|
||||
)
|
||||
|
||||
|
||||
|
43
esphome/components/zwave_proxy/__init__.py
Normal file
43
esphome/components/zwave_proxy/__init__.py
Normal file
@@ -0,0 +1,43 @@
|
||||
import esphome.codegen as cg
|
||||
from esphome.components import uart
|
||||
import esphome.config_validation as cv
|
||||
from esphome.const import CONF_ID, CONF_POWER_SAVE_MODE, CONF_WIFI
|
||||
import esphome.final_validate as fv
|
||||
|
||||
CODEOWNERS = ["@kbx81"]
|
||||
DEPENDENCIES = ["api", "uart"]
|
||||
|
||||
zwave_proxy_ns = cg.esphome_ns.namespace("zwave_proxy")
|
||||
ZWaveProxy = zwave_proxy_ns.class_("ZWaveProxy", cg.Component, uart.UARTDevice)
|
||||
|
||||
|
||||
def final_validate(config):
|
||||
full_config = fv.full_config.get()
|
||||
if (wifi_conf := full_config.get(CONF_WIFI)) and (
|
||||
wifi_conf.get(CONF_POWER_SAVE_MODE).lower() != "none"
|
||||
):
|
||||
raise cv.Invalid(
|
||||
f"{CONF_WIFI} {CONF_POWER_SAVE_MODE} must be set to 'none' when using Z-Wave proxy"
|
||||
)
|
||||
|
||||
return config
|
||||
|
||||
|
||||
CONFIG_SCHEMA = (
|
||||
cv.Schema(
|
||||
{
|
||||
cv.GenerateID(): cv.declare_id(ZWaveProxy),
|
||||
}
|
||||
)
|
||||
.extend(cv.COMPONENT_SCHEMA)
|
||||
.extend(uart.UART_DEVICE_SCHEMA)
|
||||
)
|
||||
|
||||
FINAL_VALIDATE_SCHEMA = final_validate
|
||||
|
||||
|
||||
async def to_code(config):
|
||||
var = cg.new_Pvariable(config[CONF_ID])
|
||||
await cg.register_component(var, config)
|
||||
await uart.register_uart_device(var, config)
|
||||
cg.add_define("USE_ZWAVE_PROXY")
|
262
esphome/components/zwave_proxy/zwave_proxy.cpp
Normal file
262
esphome/components/zwave_proxy/zwave_proxy.cpp
Normal file
@@ -0,0 +1,262 @@
|
||||
#include "zwave_proxy.h"
|
||||
#include "esphome/core/helpers.h"
|
||||
#include "esphome/core/log.h"
|
||||
#include "esphome/core/util.h"
|
||||
|
||||
namespace esphome {
|
||||
namespace zwave_proxy {
|
||||
|
||||
static const char *const TAG = "zwave_proxy";
|
||||
|
||||
static constexpr uint8_t ZWAVE_COMMAND_GET_NETWORK_IDS = 0x20;
|
||||
// GET_NETWORK_IDS response: [SOF][LENGTH][TYPE][CMD][HOME_ID(4)][NODE_ID][...]
|
||||
static constexpr uint8_t ZWAVE_COMMAND_TYPE_RESPONSE = 0x01; // Response type field value
|
||||
static constexpr uint8_t ZWAVE_MIN_GET_NETWORK_IDS_LENGTH = 9; // TYPE + CMD + HOME_ID(4) + NODE_ID + checksum
|
||||
|
||||
static uint8_t calculate_frame_checksum(const uint8_t *data, uint8_t length) {
|
||||
// Calculate Z-Wave frame checksum
|
||||
// XOR all bytes between SOF and checksum position (exclusive)
|
||||
// Initial value is 0xFF per Z-Wave protocol specification
|
||||
uint8_t checksum = 0xFF;
|
||||
for (uint8_t i = 1; i < length - 1; i++) {
|
||||
checksum ^= data[i];
|
||||
}
|
||||
return checksum;
|
||||
}
|
||||
|
||||
ZWaveProxy::ZWaveProxy() { global_zwave_proxy = this; }
|
||||
|
||||
void ZWaveProxy::setup() { this->send_simple_command_(ZWAVE_COMMAND_GET_NETWORK_IDS); }
|
||||
|
||||
void ZWaveProxy::loop() {
|
||||
if (this->response_handler_()) {
|
||||
ESP_LOGV(TAG, "Handled late response");
|
||||
}
|
||||
if (this->api_connection_ != nullptr && (!this->api_connection_->is_connection_setup() || !api_is_connected())) {
|
||||
ESP_LOGW(TAG, "Subscriber disconnected");
|
||||
this->api_connection_ = nullptr; // Unsubscribe if disconnected
|
||||
}
|
||||
|
||||
while (this->available()) {
|
||||
uint8_t byte;
|
||||
if (!this->read_byte(&byte)) {
|
||||
this->status_set_warning("UART read failed");
|
||||
return;
|
||||
}
|
||||
if (this->parse_byte_(byte)) {
|
||||
// Check if this is a GET_NETWORK_IDS response frame
|
||||
// Frame format: [SOF][LENGTH][TYPE][CMD][HOME_ID(4)][NODE_ID][...]
|
||||
// We verify:
|
||||
// - buffer_[0]: Start of frame marker (0x01)
|
||||
// - buffer_[1]: Length field must be >= 9 to contain all required data
|
||||
// - buffer_[2]: Command type (0x01 for response)
|
||||
// - buffer_[3]: Command ID (0x20 for GET_NETWORK_IDS)
|
||||
if (this->buffer_[3] == ZWAVE_COMMAND_GET_NETWORK_IDS && this->buffer_[2] == ZWAVE_COMMAND_TYPE_RESPONSE &&
|
||||
this->buffer_[1] >= ZWAVE_MIN_GET_NETWORK_IDS_LENGTH && this->buffer_[0] == ZWAVE_FRAME_TYPE_START) {
|
||||
// Extract the 4-byte Home ID starting at offset 4
|
||||
// The frame parser has already validated the checksum and ensured all bytes are present
|
||||
std::memcpy(this->home_id_.data(), this->buffer_.data() + 4, this->home_id_.size());
|
||||
ESP_LOGI(TAG, "Home ID: %s",
|
||||
format_hex_pretty(this->home_id_.data(), this->home_id_.size(), ':', false).c_str());
|
||||
}
|
||||
ESP_LOGV(TAG, "Sending to client: %s", YESNO(this->api_connection_ != nullptr));
|
||||
if (this->api_connection_ != nullptr) {
|
||||
// minimize copying to reduce CPU overhead
|
||||
if (this->in_bootloader_) {
|
||||
this->outgoing_proto_msg_.data_len = this->buffer_index_;
|
||||
} else {
|
||||
// If this is a data frame, use frame length indicator + 2 (for SoF + checksum), else assume 1 for ACK/NAK/CAN
|
||||
this->outgoing_proto_msg_.data_len = this->buffer_[0] == ZWAVE_FRAME_TYPE_START ? this->buffer_[1] + 2 : 1;
|
||||
}
|
||||
std::memcpy(this->outgoing_proto_msg_.data, this->buffer_.data(), this->outgoing_proto_msg_.data_len);
|
||||
this->api_connection_->send_message(this->outgoing_proto_msg_, api::ZWaveProxyFrame::MESSAGE_TYPE);
|
||||
}
|
||||
}
|
||||
}
|
||||
this->status_clear_warning();
|
||||
}
|
||||
|
||||
void ZWaveProxy::dump_config() { ESP_LOGCONFIG(TAG, "Z-Wave Proxy"); }
|
||||
|
||||
void ZWaveProxy::zwave_proxy_request(api::APIConnection *api_connection, api::enums::ZWaveProxyRequestType type) {
|
||||
switch (type) {
|
||||
case api::enums::ZWAVE_PROXY_REQUEST_TYPE_SUBSCRIBE:
|
||||
if (this->api_connection_ != nullptr) {
|
||||
ESP_LOGE(TAG, "Only one API subscription is allowed at a time");
|
||||
return;
|
||||
}
|
||||
this->api_connection_ = api_connection;
|
||||
ESP_LOGV(TAG, "API connection is now subscribed");
|
||||
break;
|
||||
case api::enums::ZWAVE_PROXY_REQUEST_TYPE_UNSUBSCRIBE:
|
||||
if (this->api_connection_ != api_connection) {
|
||||
ESP_LOGV(TAG, "API connection is not subscribed");
|
||||
return;
|
||||
}
|
||||
this->api_connection_ = nullptr;
|
||||
break;
|
||||
default:
|
||||
ESP_LOGW(TAG, "Unknown request type: %d", type);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
void ZWaveProxy::send_frame(const uint8_t *data, size_t length) {
|
||||
if (length == 1 && data[0] == this->last_response_) {
|
||||
ESP_LOGV(TAG, "Skipping sending duplicate response: 0x%02X", data[0]);
|
||||
return;
|
||||
}
|
||||
ESP_LOGVV(TAG, "Sending: %s", format_hex_pretty(data, length).c_str());
|
||||
this->write_array(data, length);
|
||||
}
|
||||
|
||||
void ZWaveProxy::send_simple_command_(const uint8_t command_id) {
|
||||
// Send a simple Z-Wave command with no parameters
|
||||
// Frame format: [SOF][LENGTH][TYPE][CMD][CHECKSUM]
|
||||
// Where LENGTH=0x03 (3 bytes: TYPE + CMD + CHECKSUM)
|
||||
uint8_t cmd[] = {0x01, 0x03, 0x00, command_id, 0x00};
|
||||
cmd[4] = calculate_frame_checksum(cmd, sizeof(cmd));
|
||||
this->send_frame(cmd, sizeof(cmd));
|
||||
}
|
||||
|
||||
bool ZWaveProxy::parse_byte_(uint8_t byte) {
|
||||
bool frame_completed = false;
|
||||
// Basic parsing logic for received frames
|
||||
switch (this->parsing_state_) {
|
||||
case ZWAVE_PARSING_STATE_WAIT_START:
|
||||
this->parse_start_(byte);
|
||||
break;
|
||||
case ZWAVE_PARSING_STATE_WAIT_LENGTH:
|
||||
if (!byte) {
|
||||
ESP_LOGW(TAG, "Invalid LENGTH: %u", byte);
|
||||
this->parsing_state_ = ZWAVE_PARSING_STATE_SEND_NAK;
|
||||
return false;
|
||||
}
|
||||
ESP_LOGVV(TAG, "Received LENGTH: %u", byte);
|
||||
this->end_frame_after_ = this->buffer_index_ + byte;
|
||||
ESP_LOGVV(TAG, "Calculated EOF: %u", this->end_frame_after_);
|
||||
this->buffer_[this->buffer_index_++] = byte;
|
||||
this->parsing_state_ = ZWAVE_PARSING_STATE_WAIT_TYPE;
|
||||
break;
|
||||
case ZWAVE_PARSING_STATE_WAIT_TYPE:
|
||||
this->buffer_[this->buffer_index_++] = byte;
|
||||
ESP_LOGVV(TAG, "Received TYPE: 0x%02X", byte);
|
||||
this->parsing_state_ = ZWAVE_PARSING_STATE_WAIT_COMMAND_ID;
|
||||
break;
|
||||
case ZWAVE_PARSING_STATE_WAIT_COMMAND_ID:
|
||||
this->buffer_[this->buffer_index_++] = byte;
|
||||
ESP_LOGVV(TAG, "Received COMMAND ID: 0x%02X", byte);
|
||||
this->parsing_state_ = ZWAVE_PARSING_STATE_WAIT_PAYLOAD;
|
||||
break;
|
||||
case ZWAVE_PARSING_STATE_WAIT_PAYLOAD:
|
||||
this->buffer_[this->buffer_index_++] = byte;
|
||||
ESP_LOGVV(TAG, "Received PAYLOAD: 0x%02X", byte);
|
||||
if (this->buffer_index_ >= this->end_frame_after_) {
|
||||
this->parsing_state_ = ZWAVE_PARSING_STATE_WAIT_CHECKSUM;
|
||||
}
|
||||
break;
|
||||
case ZWAVE_PARSING_STATE_WAIT_CHECKSUM: {
|
||||
this->buffer_[this->buffer_index_++] = byte;
|
||||
auto checksum = calculate_frame_checksum(this->buffer_.data(), this->buffer_index_);
|
||||
ESP_LOGVV(TAG, "CHECKSUM Received: 0x%02X - Calculated: 0x%02X", byte, checksum);
|
||||
if (checksum != byte) {
|
||||
ESP_LOGW(TAG, "Bad checksum: expected 0x%02X, got 0x%02X", checksum, byte);
|
||||
this->parsing_state_ = ZWAVE_PARSING_STATE_SEND_NAK;
|
||||
} else {
|
||||
this->parsing_state_ = ZWAVE_PARSING_STATE_SEND_ACK;
|
||||
ESP_LOGVV(TAG, "Received frame: %s", format_hex_pretty(this->buffer_.data(), this->buffer_index_).c_str());
|
||||
frame_completed = true;
|
||||
}
|
||||
this->response_handler_();
|
||||
break;
|
||||
}
|
||||
case ZWAVE_PARSING_STATE_READ_BL_MENU:
|
||||
this->buffer_[this->buffer_index_++] = byte;
|
||||
if (!byte) {
|
||||
this->parsing_state_ = ZWAVE_PARSING_STATE_WAIT_START;
|
||||
frame_completed = true;
|
||||
}
|
||||
break;
|
||||
case ZWAVE_PARSING_STATE_SEND_ACK:
|
||||
case ZWAVE_PARSING_STATE_SEND_NAK:
|
||||
break; // Should not happen, handled in loop()
|
||||
default:
|
||||
ESP_LOGW(TAG, "Bad parsing state; resetting");
|
||||
this->parsing_state_ = ZWAVE_PARSING_STATE_WAIT_START;
|
||||
break;
|
||||
}
|
||||
return frame_completed;
|
||||
}
|
||||
|
||||
void ZWaveProxy::parse_start_(uint8_t byte) {
|
||||
this->buffer_index_ = 0;
|
||||
this->parsing_state_ = ZWAVE_PARSING_STATE_WAIT_START;
|
||||
switch (byte) {
|
||||
case ZWAVE_FRAME_TYPE_START:
|
||||
ESP_LOGVV(TAG, "Received START");
|
||||
if (this->in_bootloader_) {
|
||||
ESP_LOGD(TAG, "Exited bootloader mode");
|
||||
this->in_bootloader_ = false;
|
||||
}
|
||||
this->buffer_[this->buffer_index_++] = byte;
|
||||
this->parsing_state_ = ZWAVE_PARSING_STATE_WAIT_LENGTH;
|
||||
return;
|
||||
case ZWAVE_FRAME_TYPE_BL_MENU:
|
||||
ESP_LOGVV(TAG, "Received BL_MENU");
|
||||
if (!this->in_bootloader_) {
|
||||
ESP_LOGD(TAG, "Entered bootloader mode");
|
||||
this->in_bootloader_ = true;
|
||||
}
|
||||
this->buffer_[this->buffer_index_++] = byte;
|
||||
this->parsing_state_ = ZWAVE_PARSING_STATE_READ_BL_MENU;
|
||||
return;
|
||||
case ZWAVE_FRAME_TYPE_BL_BEGIN_UPLOAD:
|
||||
ESP_LOGVV(TAG, "Received BL_BEGIN_UPLOAD");
|
||||
break;
|
||||
case ZWAVE_FRAME_TYPE_ACK:
|
||||
ESP_LOGVV(TAG, "Received ACK");
|
||||
break;
|
||||
case ZWAVE_FRAME_TYPE_NAK:
|
||||
ESP_LOGW(TAG, "Received NAK");
|
||||
break;
|
||||
case ZWAVE_FRAME_TYPE_CAN:
|
||||
ESP_LOGW(TAG, "Received CAN");
|
||||
break;
|
||||
default:
|
||||
ESP_LOGW(TAG, "Unrecognized START: 0x%02X", byte);
|
||||
return;
|
||||
}
|
||||
// Forward response (ACK/NAK/CAN) back to client for processing
|
||||
if (this->api_connection_ != nullptr) {
|
||||
this->outgoing_proto_msg_.data[0] = byte;
|
||||
this->outgoing_proto_msg_.data_len = 1;
|
||||
this->api_connection_->send_message(this->outgoing_proto_msg_, api::ZWaveProxyFrame::MESSAGE_TYPE);
|
||||
}
|
||||
}
|
||||
|
||||
bool ZWaveProxy::response_handler_() {
|
||||
switch (this->parsing_state_) {
|
||||
case ZWAVE_PARSING_STATE_SEND_ACK:
|
||||
this->last_response_ = ZWAVE_FRAME_TYPE_ACK;
|
||||
break;
|
||||
case ZWAVE_PARSING_STATE_SEND_CAN:
|
||||
this->last_response_ = ZWAVE_FRAME_TYPE_CAN;
|
||||
break;
|
||||
case ZWAVE_PARSING_STATE_SEND_NAK:
|
||||
this->last_response_ = ZWAVE_FRAME_TYPE_NAK;
|
||||
break;
|
||||
default:
|
||||
return false; // No response handled
|
||||
}
|
||||
|
||||
ESP_LOGVV(TAG, "Sending %s (0x%02X)", this->last_response_ == ZWAVE_FRAME_TYPE_ACK ? "ACK" : "NAK/CAN",
|
||||
this->last_response_);
|
||||
this->write_byte(this->last_response_);
|
||||
this->parsing_state_ = ZWAVE_PARSING_STATE_WAIT_START;
|
||||
return true;
|
||||
}
|
||||
|
||||
ZWaveProxy *global_zwave_proxy = nullptr; // NOLINT(cppcoreguidelines-avoid-non-const-global-variables)
|
||||
|
||||
} // namespace zwave_proxy
|
||||
} // namespace esphome
|
81
esphome/components/zwave_proxy/zwave_proxy.h
Normal file
81
esphome/components/zwave_proxy/zwave_proxy.h
Normal file
@@ -0,0 +1,81 @@
|
||||
#pragma once
|
||||
|
||||
#include "esphome/components/api/api_connection.h"
|
||||
#include "esphome/components/api/api_pb2.h"
|
||||
#include "esphome/core/component.h"
|
||||
#include "esphome/core/helpers.h"
|
||||
#include "esphome/components/uart/uart.h"
|
||||
|
||||
#include <array>
|
||||
|
||||
namespace esphome {
|
||||
namespace zwave_proxy {
|
||||
|
||||
enum ZWaveResponseTypes : uint8_t {
|
||||
ZWAVE_FRAME_TYPE_ACK = 0x06,
|
||||
ZWAVE_FRAME_TYPE_CAN = 0x18,
|
||||
ZWAVE_FRAME_TYPE_NAK = 0x15,
|
||||
ZWAVE_FRAME_TYPE_START = 0x01,
|
||||
ZWAVE_FRAME_TYPE_BL_MENU = 0x0D,
|
||||
ZWAVE_FRAME_TYPE_BL_BEGIN_UPLOAD = 0x43,
|
||||
};
|
||||
|
||||
enum ZWaveParsingState : uint8_t {
|
||||
ZWAVE_PARSING_STATE_WAIT_START,
|
||||
ZWAVE_PARSING_STATE_WAIT_LENGTH,
|
||||
ZWAVE_PARSING_STATE_WAIT_TYPE,
|
||||
ZWAVE_PARSING_STATE_WAIT_COMMAND_ID,
|
||||
ZWAVE_PARSING_STATE_WAIT_PAYLOAD,
|
||||
ZWAVE_PARSING_STATE_WAIT_CHECKSUM,
|
||||
ZWAVE_PARSING_STATE_SEND_ACK,
|
||||
ZWAVE_PARSING_STATE_SEND_CAN,
|
||||
ZWAVE_PARSING_STATE_SEND_NAK,
|
||||
ZWAVE_PARSING_STATE_READ_BL_MENU,
|
||||
};
|
||||
|
||||
enum ZWaveProxyFeature : uint32_t {
|
||||
FEATURE_ZWAVE_PROXY_ENABLED = 1 << 0,
|
||||
};
|
||||
|
||||
class ZWaveProxy : public uart::UARTDevice, public Component {
|
||||
public:
|
||||
ZWaveProxy();
|
||||
|
||||
void setup() override;
|
||||
void loop() override;
|
||||
void dump_config() override;
|
||||
|
||||
void zwave_proxy_request(api::APIConnection *api_connection, api::enums::ZWaveProxyRequestType type);
|
||||
api::APIConnection *get_api_connection() { return this->api_connection_; }
|
||||
|
||||
uint32_t get_feature_flags() const { return ZWaveProxyFeature::FEATURE_ZWAVE_PROXY_ENABLED; }
|
||||
uint32_t get_home_id() {
|
||||
return encode_uint32(this->home_id_[0], this->home_id_[1], this->home_id_[2], this->home_id_[3]);
|
||||
}
|
||||
|
||||
void send_frame(const uint8_t *data, size_t length);
|
||||
|
||||
protected:
|
||||
void send_simple_command_(uint8_t command_id);
|
||||
bool parse_byte_(uint8_t byte); // Returns true if frame parsing was completed (a frame is ready in the buffer)
|
||||
void parse_start_(uint8_t byte);
|
||||
bool response_handler_();
|
||||
|
||||
api::APIConnection *api_connection_{nullptr}; // Current subscribed client
|
||||
|
||||
std::array<uint8_t, 4> home_id_{0, 0, 0, 0}; // Fixed buffer for home ID
|
||||
std::array<uint8_t, sizeof(api::ZWaveProxyFrame::data)> buffer_; // Fixed buffer for incoming data
|
||||
uint8_t buffer_index_{0}; // Index for populating the data buffer
|
||||
uint8_t end_frame_after_{0}; // Payload reception ends after this index
|
||||
uint8_t last_response_{0}; // Last response type sent
|
||||
ZWaveParsingState parsing_state_{ZWAVE_PARSING_STATE_WAIT_START};
|
||||
bool in_bootloader_{false}; // True if the device is detected to be in bootloader mode
|
||||
|
||||
// Pre-allocated message - always ready to send
|
||||
api::ZWaveProxyFrame outgoing_proto_msg_;
|
||||
};
|
||||
|
||||
extern ZWaveProxy *global_zwave_proxy; // NOLINT(cppcoreguidelines-avoid-non-const-global-variables)
|
||||
|
||||
} // namespace zwave_proxy
|
||||
} // namespace esphome
|
@@ -32,7 +32,7 @@ from esphome.log import AnsiFore, color
|
||||
from esphome.types import ConfigFragmentType, ConfigType
|
||||
from esphome.util import OrderedDict, safe_print
|
||||
from esphome.voluptuous_schema import ExtraKeysInvalid
|
||||
from esphome.yaml_util import ESPForceValue, ESPHomeDataBase, is_secret
|
||||
from esphome.yaml_util import ESPHomeDataBase, ESPLiteralValue, is_secret
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -306,7 +306,7 @@ def recursive_check_replaceme(value):
|
||||
return cv.Schema([recursive_check_replaceme])(value)
|
||||
if isinstance(value, dict):
|
||||
return cv.Schema({cv.valid: recursive_check_replaceme})(value)
|
||||
if isinstance(value, ESPForceValue):
|
||||
if isinstance(value, ESPLiteralValue):
|
||||
pass
|
||||
if isinstance(value, str) and value == "REPLACEME":
|
||||
raise cv.Invalid(
|
||||
@@ -314,7 +314,7 @@ def recursive_check_replaceme(value):
|
||||
"Please make sure you have replaced all fields from the sample "
|
||||
"configuration.\n"
|
||||
"If you want to use the literal REPLACEME string, "
|
||||
'please use "!force REPLACEME"'
|
||||
'please use "!literal REPLACEME"'
|
||||
)
|
||||
return value
|
||||
|
||||
@@ -846,7 +846,9 @@ class PinUseValidationCheck(ConfigValidationStep):
|
||||
|
||||
|
||||
def validate_config(
|
||||
config: dict[str, Any], command_line_substitutions: dict[str, Any]
|
||||
config: dict[str, Any],
|
||||
command_line_substitutions: dict[str, Any],
|
||||
skip_external_update: bool = False,
|
||||
) -> Config:
|
||||
result = Config()
|
||||
|
||||
@@ -859,7 +861,7 @@ def validate_config(
|
||||
|
||||
result.add_output_path([CONF_PACKAGES], CONF_PACKAGES)
|
||||
try:
|
||||
config = do_packages_pass(config)
|
||||
config = do_packages_pass(config, skip_update=skip_external_update)
|
||||
except vol.Invalid as err:
|
||||
result.update(config)
|
||||
result.add_error(err)
|
||||
@@ -896,7 +898,7 @@ def validate_config(
|
||||
|
||||
result.add_output_path([CONF_EXTERNAL_COMPONENTS], CONF_EXTERNAL_COMPONENTS)
|
||||
try:
|
||||
do_external_components_pass(config)
|
||||
do_external_components_pass(config, skip_update=skip_external_update)
|
||||
except vol.Invalid as err:
|
||||
result.update(config)
|
||||
result.add_error(err)
|
||||
@@ -1020,7 +1022,9 @@ class InvalidYAMLError(EsphomeError):
|
||||
self.base_exc = base_exc
|
||||
|
||||
|
||||
def _load_config(command_line_substitutions: dict[str, Any]) -> Config:
|
||||
def _load_config(
|
||||
command_line_substitutions: dict[str, Any], skip_external_update: bool = False
|
||||
) -> Config:
|
||||
"""Load the configuration file."""
|
||||
try:
|
||||
config = yaml_util.load_yaml(CORE.config_path)
|
||||
@@ -1028,7 +1032,7 @@ def _load_config(command_line_substitutions: dict[str, Any]) -> Config:
|
||||
raise InvalidYAMLError(e) from e
|
||||
|
||||
try:
|
||||
return validate_config(config, command_line_substitutions)
|
||||
return validate_config(config, command_line_substitutions, skip_external_update)
|
||||
except EsphomeError:
|
||||
raise
|
||||
except Exception:
|
||||
@@ -1036,9 +1040,11 @@ def _load_config(command_line_substitutions: dict[str, Any]) -> Config:
|
||||
raise
|
||||
|
||||
|
||||
def load_config(command_line_substitutions: dict[str, Any]) -> Config:
|
||||
def load_config(
|
||||
command_line_substitutions: dict[str, Any], skip_external_update: bool = False
|
||||
) -> Config:
|
||||
try:
|
||||
return _load_config(command_line_substitutions)
|
||||
return _load_config(command_line_substitutions, skip_external_update)
|
||||
except vol.Invalid as err:
|
||||
raise EsphomeError(f"Error while parsing config: {err}") from err
|
||||
|
||||
@@ -1178,10 +1184,10 @@ def strip_default_ids(config):
|
||||
return config
|
||||
|
||||
|
||||
def read_config(command_line_substitutions):
|
||||
def read_config(command_line_substitutions, skip_external_update=False):
|
||||
_LOGGER.info("Reading configuration %s...", CORE.config_path)
|
||||
try:
|
||||
res = load_config(command_line_substitutions)
|
||||
res = load_config(command_line_substitutions, skip_external_update)
|
||||
except EsphomeError as err:
|
||||
_LOGGER.error("Error while reading config: %s", err)
|
||||
return None
|
||||
|
@@ -15,7 +15,7 @@ from ipaddress import (
|
||||
ip_network,
|
||||
)
|
||||
import logging
|
||||
import os
|
||||
from pathlib import Path
|
||||
import re
|
||||
from string import ascii_letters, digits
|
||||
import uuid as uuid_
|
||||
@@ -1609,34 +1609,32 @@ def dimensions(value):
|
||||
return dimensions([match.group(1), match.group(2)])
|
||||
|
||||
|
||||
def directory(value):
|
||||
def directory(value: object) -> Path:
|
||||
value = string(value)
|
||||
path = CORE.relative_config_path(value)
|
||||
|
||||
if not os.path.exists(path):
|
||||
if not path.exists():
|
||||
raise Invalid(
|
||||
f"Could not find directory '{path}'. Please make sure it exists (full path: {os.path.abspath(path)})."
|
||||
f"Could not find directory '{path}'. Please make sure it exists (full path: {path.resolve()})."
|
||||
)
|
||||
if not os.path.isdir(path):
|
||||
if not path.is_dir():
|
||||
raise Invalid(
|
||||
f"Path '{path}' is not a directory (full path: {os.path.abspath(path)})."
|
||||
f"Path '{path}' is not a directory (full path: {path.resolve()})."
|
||||
)
|
||||
return value
|
||||
return path
|
||||
|
||||
|
||||
def file_(value):
|
||||
def file_(value: object) -> Path:
|
||||
value = string(value)
|
||||
path = CORE.relative_config_path(value)
|
||||
|
||||
if not os.path.exists(path):
|
||||
if not path.exists():
|
||||
raise Invalid(
|
||||
f"Could not find file '{path}'. Please make sure it exists (full path: {os.path.abspath(path)})."
|
||||
f"Could not find file '{path}'. Please make sure it exists (full path: {path.resolve()})."
|
||||
)
|
||||
if not os.path.isfile(path):
|
||||
raise Invalid(
|
||||
f"Path '{path}' is not a file (full path: {os.path.abspath(path)})."
|
||||
)
|
||||
return value
|
||||
if not path.is_file():
|
||||
raise Invalid(f"Path '{path}' is not a file (full path: {path.resolve()}).")
|
||||
return path
|
||||
|
||||
|
||||
ENTITY_ID_CHARACTERS = "abcdefghijklmnopqrstuvwxyz0123456789_"
|
||||
|
@@ -1269,6 +1269,7 @@ DEVICE_CLASS_PLUG = "plug"
|
||||
DEVICE_CLASS_PM1 = "pm1"
|
||||
DEVICE_CLASS_PM10 = "pm10"
|
||||
DEVICE_CLASS_PM25 = "pm25"
|
||||
DEVICE_CLASS_PM4 = "pm4"
|
||||
DEVICE_CLASS_POWER = "power"
|
||||
DEVICE_CLASS_POWER_FACTOR = "power_factor"
|
||||
DEVICE_CLASS_PRECIPITATION = "precipitation"
|
||||
|
@@ -3,6 +3,7 @@ from contextlib import contextmanager
|
||||
import logging
|
||||
import math
|
||||
import os
|
||||
from pathlib import Path
|
||||
import re
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
@@ -39,6 +40,8 @@ from esphome.helpers import ensure_unique_string, get_str_env, is_ha_addon
|
||||
from esphome.util import OrderedDict
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from esphome.address_cache import AddressCache
|
||||
|
||||
from ..cpp_generator import MockObj, MockObjClass, Statement
|
||||
from ..types import ConfigType, EntityMetadata
|
||||
|
||||
@@ -381,7 +384,7 @@ class DocumentLocation:
|
||||
|
||||
@classmethod
|
||||
def from_mark(cls, mark):
|
||||
return cls(mark.name, mark.line, mark.column)
|
||||
return cls(str(mark.name), mark.line, mark.column)
|
||||
|
||||
def __str__(self):
|
||||
return f"{self.document} {self.line}:{self.column}"
|
||||
@@ -536,9 +539,9 @@ class EsphomeCore:
|
||||
# The first key to this dict should always be the integration name
|
||||
self.data = {}
|
||||
# The relative path to the configuration YAML
|
||||
self.config_path: str | None = None
|
||||
self.config_path: Path | None = None
|
||||
# The relative path to where all build files are stored
|
||||
self.build_path: str | None = None
|
||||
self.build_path: Path | None = None
|
||||
# The validated configuration, this is None until the config has been validated
|
||||
self.config: ConfigType | None = None
|
||||
# The pending tasks in the task queue (mostly for C++ generation)
|
||||
@@ -583,6 +586,8 @@ class EsphomeCore:
|
||||
self.id_classes = {}
|
||||
# The current component being processed during validation
|
||||
self.current_component: str | None = None
|
||||
# Address cache for DNS and mDNS lookups from command line arguments
|
||||
self.address_cache: AddressCache | None = None
|
||||
|
||||
def reset(self):
|
||||
from esphome.pins import PIN_SCHEMA_REGISTRY
|
||||
@@ -610,6 +615,7 @@ class EsphomeCore:
|
||||
self.platform_counts = defaultdict(int)
|
||||
self.unique_ids = {}
|
||||
self.current_component = None
|
||||
self.address_cache = None
|
||||
PIN_SCHEMA_REGISTRY.reset()
|
||||
|
||||
@contextmanager
|
||||
@@ -659,43 +665,46 @@ class EsphomeCore:
|
||||
return None
|
||||
|
||||
@property
|
||||
def config_dir(self):
|
||||
return os.path.abspath(os.path.dirname(self.config_path))
|
||||
def config_dir(self) -> Path:
|
||||
if self.config_path.is_dir():
|
||||
return self.config_path.absolute()
|
||||
return self.config_path.absolute().parent
|
||||
|
||||
@property
|
||||
def data_dir(self):
|
||||
def data_dir(self) -> Path:
|
||||
if is_ha_addon():
|
||||
return os.path.join("/data")
|
||||
return Path("/data")
|
||||
if "ESPHOME_DATA_DIR" in os.environ:
|
||||
return get_str_env("ESPHOME_DATA_DIR", None)
|
||||
return Path(get_str_env("ESPHOME_DATA_DIR", None))
|
||||
return self.relative_config_path(".esphome")
|
||||
|
||||
@property
|
||||
def config_filename(self):
|
||||
return os.path.basename(self.config_path)
|
||||
def config_filename(self) -> str:
|
||||
return self.config_path.name
|
||||
|
||||
def relative_config_path(self, *path):
|
||||
path_ = os.path.expanduser(os.path.join(*path))
|
||||
return os.path.join(self.config_dir, path_)
|
||||
def relative_config_path(self, *path: str | Path) -> Path:
|
||||
path_ = Path(*path).expanduser()
|
||||
return self.config_dir / path_
|
||||
|
||||
def relative_internal_path(self, *path: str) -> str:
|
||||
return os.path.join(self.data_dir, *path)
|
||||
def relative_internal_path(self, *path: str | Path) -> Path:
|
||||
path_ = Path(*path).expanduser()
|
||||
return self.data_dir / path_
|
||||
|
||||
def relative_build_path(self, *path):
|
||||
path_ = os.path.expanduser(os.path.join(*path))
|
||||
return os.path.join(self.build_path, path_)
|
||||
def relative_build_path(self, *path: str | Path) -> Path:
|
||||
path_ = Path(*path).expanduser()
|
||||
return self.build_path / path_
|
||||
|
||||
def relative_src_path(self, *path):
|
||||
def relative_src_path(self, *path: str | Path) -> Path:
|
||||
return self.relative_build_path("src", *path)
|
||||
|
||||
def relative_pioenvs_path(self, *path):
|
||||
def relative_pioenvs_path(self, *path: str | Path) -> Path:
|
||||
return self.relative_build_path(".pioenvs", *path)
|
||||
|
||||
def relative_piolibdeps_path(self, *path):
|
||||
def relative_piolibdeps_path(self, *path: str | Path) -> Path:
|
||||
return self.relative_build_path(".piolibdeps", *path)
|
||||
|
||||
@property
|
||||
def firmware_bin(self):
|
||||
def firmware_bin(self) -> Path:
|
||||
if self.is_libretiny:
|
||||
return self.relative_pioenvs_path(self.name, "firmware.uf2")
|
||||
return self.relative_pioenvs_path(self.name, "firmware.bin")
|
||||
|
@@ -136,21 +136,21 @@ def validate_ids_and_references(config: ConfigType) -> ConfigType:
|
||||
return config
|
||||
|
||||
|
||||
def valid_include(value):
|
||||
def valid_include(value: str) -> str:
|
||||
# Look for "<...>" includes
|
||||
if value.startswith("<") and value.endswith(">"):
|
||||
return value
|
||||
try:
|
||||
return cv.directory(value)
|
||||
return str(cv.directory(value))
|
||||
except cv.Invalid:
|
||||
pass
|
||||
value = cv.file_(value)
|
||||
_, ext = os.path.splitext(value)
|
||||
path = cv.file_(value)
|
||||
ext = path.suffix
|
||||
if ext not in VALID_INCLUDE_EXTS:
|
||||
raise cv.Invalid(
|
||||
f"Include has invalid file extension {ext} - valid extensions are {', '.join(VALID_INCLUDE_EXTS)}"
|
||||
)
|
||||
return value
|
||||
return str(path)
|
||||
|
||||
|
||||
def valid_project_name(value: str):
|
||||
@@ -311,9 +311,9 @@ def preload_core_config(config, result) -> str:
|
||||
CORE.data[KEY_CORE] = {}
|
||||
|
||||
if CONF_BUILD_PATH not in conf:
|
||||
build_path = get_str_env("ESPHOME_BUILD_PATH", "build")
|
||||
conf[CONF_BUILD_PATH] = os.path.join(build_path, CORE.name)
|
||||
CORE.build_path = CORE.relative_internal_path(conf[CONF_BUILD_PATH])
|
||||
build_path = Path(get_str_env("ESPHOME_BUILD_PATH", "build"))
|
||||
conf[CONF_BUILD_PATH] = str(build_path / CORE.name)
|
||||
CORE.build_path = CORE.data_dir / conf[CONF_BUILD_PATH]
|
||||
|
||||
target_platforms = []
|
||||
|
||||
@@ -339,12 +339,12 @@ def preload_core_config(config, result) -> str:
|
||||
return target_platforms[0]
|
||||
|
||||
|
||||
def include_file(path, basename):
|
||||
parts = basename.split(os.path.sep)
|
||||
def include_file(path: Path, basename: Path):
|
||||
parts = basename.parts
|
||||
dst = CORE.relative_src_path(*parts)
|
||||
copy_file_if_changed(path, dst)
|
||||
|
||||
_, ext = os.path.splitext(path)
|
||||
ext = path.suffix
|
||||
if ext in [".h", ".hpp", ".tcc"]:
|
||||
# Header, add include statement
|
||||
cg.add_global(cg.RawStatement(f'#include "{basename}"'))
|
||||
@@ -377,18 +377,18 @@ async def add_arduino_global_workaround():
|
||||
|
||||
|
||||
@coroutine_with_priority(CoroPriority.FINAL)
|
||||
async def add_includes(includes):
|
||||
async def add_includes(includes: list[str]) -> None:
|
||||
# Add includes at the very end, so that the included files can access global variables
|
||||
for include in includes:
|
||||
path = CORE.relative_config_path(include)
|
||||
if os.path.isdir(path):
|
||||
if path.is_dir():
|
||||
# Directory, copy tree
|
||||
for p in walk_files(path):
|
||||
basename = os.path.relpath(p, os.path.dirname(path))
|
||||
basename = p.relative_to(path.parent)
|
||||
include_file(p, basename)
|
||||
else:
|
||||
# Copy file
|
||||
basename = os.path.basename(path)
|
||||
basename = Path(path.name)
|
||||
include_file(path, basename)
|
||||
|
||||
|
||||
|
@@ -100,6 +100,7 @@
|
||||
#define USE_UART_DEBUGGER
|
||||
#define USE_UPDATE
|
||||
#define USE_VALVE
|
||||
#define USE_ZWAVE_PROXY
|
||||
|
||||
// Feature flags which do not work for zephyr
|
||||
#ifndef USE_ZEPHYR
|
||||
|
@@ -90,11 +90,30 @@ class CoroPriority(enum.IntEnum):
|
||||
# Examples: status_led (80)
|
||||
STATUS = 80
|
||||
|
||||
# Web server infrastructure
|
||||
# Examples: web_server_base (65)
|
||||
WEB_SERVER_BASE = 65
|
||||
|
||||
# Network portal services
|
||||
# Examples: captive_portal (64)
|
||||
CAPTIVE_PORTAL = 64
|
||||
|
||||
# Communication protocols and services
|
||||
# Examples: web_server_base (65), captive_portal (64), wifi (60), ethernet (60),
|
||||
# mdns (55), ota_updates (54), web_server_ota (52)
|
||||
# Examples: wifi (60), ethernet (60)
|
||||
COMMUNICATION = 60
|
||||
|
||||
# Network discovery and management services
|
||||
# Examples: mdns (55)
|
||||
NETWORK_SERVICES = 55
|
||||
|
||||
# OTA update services
|
||||
# Examples: ota_updates (54)
|
||||
OTA_UPDATES = 54
|
||||
|
||||
# Web-based OTA services
|
||||
# Examples: web_server_ota (52)
|
||||
WEB_SERVER_OTA = 52
|
||||
|
||||
# Application-level services
|
||||
# Examples: safe_mode (50)
|
||||
APPLICATION = 50
|
||||
|
@@ -7,7 +7,6 @@ from dataclasses import dataclass
|
||||
from functools import partial
|
||||
import json
|
||||
import logging
|
||||
from pathlib import Path
|
||||
import threading
|
||||
from typing import Any
|
||||
|
||||
@@ -108,7 +107,7 @@ class ESPHomeDashboard:
|
||||
await self.loop.run_in_executor(None, self.load_ignored_devices)
|
||||
|
||||
def load_ignored_devices(self) -> None:
|
||||
storage_path = Path(ignored_devices_storage_path())
|
||||
storage_path = ignored_devices_storage_path()
|
||||
try:
|
||||
with storage_path.open("r", encoding="utf-8") as f_handle:
|
||||
data = json.load(f_handle)
|
||||
@@ -117,7 +116,7 @@ class ESPHomeDashboard:
|
||||
pass
|
||||
|
||||
def save_ignored_devices(self) -> None:
|
||||
storage_path = Path(ignored_devices_storage_path())
|
||||
storage_path = ignored_devices_storage_path()
|
||||
with storage_path.open("w", encoding="utf-8") as f_handle:
|
||||
json.dump(
|
||||
{"ignored_devices": sorted(self.ignored_devices)}, indent=2, fp=f_handle
|
||||
|
@@ -28,6 +28,21 @@ class DNSCache:
|
||||
self._cache: dict[str, tuple[float, list[str] | Exception]] = {}
|
||||
self._ttl = ttl
|
||||
|
||||
def get_cached_addresses(
|
||||
self, hostname: str, now_monotonic: float
|
||||
) -> list[str] | None:
|
||||
"""Get cached addresses without triggering resolution.
|
||||
|
||||
Returns None if not in cache, list of addresses if found.
|
||||
"""
|
||||
# Normalize hostname for consistent lookups
|
||||
normalized = hostname.rstrip(".").lower()
|
||||
if expire_time_addresses := self._cache.get(normalized):
|
||||
expire_time, addresses = expire_time_addresses
|
||||
if expire_time > now_monotonic and not isinstance(addresses, Exception):
|
||||
return addresses
|
||||
return None
|
||||
|
||||
async def async_resolve(
|
||||
self, hostname: str, now_monotonic: float
|
||||
) -> list[str] | Exception:
|
||||
|
@@ -5,7 +5,7 @@ from collections import defaultdict
|
||||
from dataclasses import dataclass
|
||||
from functools import lru_cache
|
||||
import logging
|
||||
import os
|
||||
from pathlib import Path
|
||||
from typing import TYPE_CHECKING, Any
|
||||
|
||||
from esphome import const, util
|
||||
@@ -287,12 +287,12 @@ class DashboardEntries:
|
||||
for file in util.list_yaml_files([self._config_dir]):
|
||||
try:
|
||||
# Prefer the json storage path if it exists
|
||||
stat = os.stat(ext_storage_path(os.path.basename(file)))
|
||||
stat = ext_storage_path(file.name).stat()
|
||||
except OSError:
|
||||
try:
|
||||
# Fallback to the yaml file if the storage
|
||||
# file does not exist or could not be generated
|
||||
stat = os.stat(file)
|
||||
stat = file.stat()
|
||||
except OSError:
|
||||
# File was deleted, ignore
|
||||
continue
|
||||
@@ -329,10 +329,10 @@ class DashboardEntry:
|
||||
"_to_dict",
|
||||
)
|
||||
|
||||
def __init__(self, path: str, cache_key: DashboardCacheKeyType) -> None:
|
||||
def __init__(self, path: Path, cache_key: DashboardCacheKeyType) -> None:
|
||||
"""Initialize the DashboardEntry."""
|
||||
self.path = path
|
||||
self.filename: str = os.path.basename(path)
|
||||
self.filename: str = path.name
|
||||
self._storage_path = ext_storage_path(self.filename)
|
||||
self.cache_key = cache_key
|
||||
self.storage: StorageJSON | None = None
|
||||
@@ -365,7 +365,7 @@ class DashboardEntry:
|
||||
"loaded_integrations": sorted(self.loaded_integrations),
|
||||
"deployed_version": self.update_old,
|
||||
"current_version": self.update_new,
|
||||
"path": self.path,
|
||||
"path": str(self.path),
|
||||
"comment": self.comment,
|
||||
"address": self.address,
|
||||
"web_port": self.web_port,
|
||||
|
@@ -27,7 +27,7 @@ class DashboardSettings:
|
||||
|
||||
def __init__(self) -> None:
|
||||
"""Initialize the dashboard settings."""
|
||||
self.config_dir: str = ""
|
||||
self.config_dir: Path = None
|
||||
self.password_hash: str = ""
|
||||
self.username: str = ""
|
||||
self.using_password: bool = False
|
||||
@@ -45,10 +45,10 @@ class DashboardSettings:
|
||||
self.using_password = bool(password)
|
||||
if self.using_password:
|
||||
self.password_hash = password_hash(password)
|
||||
self.config_dir = args.configuration
|
||||
self.absolute_config_dir = Path(self.config_dir).resolve()
|
||||
self.config_dir = Path(args.configuration)
|
||||
self.absolute_config_dir = self.config_dir.resolve()
|
||||
self.verbose = args.verbose
|
||||
CORE.config_path = os.path.join(self.config_dir, ".")
|
||||
CORE.config_path = self.config_dir / "."
|
||||
|
||||
@property
|
||||
def relative_url(self) -> str:
|
||||
@@ -81,9 +81,9 @@ class DashboardSettings:
|
||||
# Compare password in constant running time (to prevent timing attacks)
|
||||
return hmac.compare_digest(self.password_hash, password_hash(password))
|
||||
|
||||
def rel_path(self, *args: Any) -> str:
|
||||
def rel_path(self, *args: Any) -> Path:
|
||||
"""Return a path relative to the ESPHome config folder."""
|
||||
joined_path = os.path.join(self.config_dir, *args)
|
||||
joined_path = self.config_dir / Path(*args)
|
||||
# Raises ValueError if not relative to ESPHome config folder
|
||||
Path(joined_path).resolve().relative_to(self.absolute_config_dir)
|
||||
joined_path.resolve().relative_to(self.absolute_config_dir)
|
||||
return joined_path
|
||||
|
@@ -4,6 +4,9 @@ import asyncio
|
||||
import logging
|
||||
import typing
|
||||
|
||||
from zeroconf import AddressResolver, IPVersion
|
||||
|
||||
from esphome.address_cache import normalize_hostname
|
||||
from esphome.zeroconf import (
|
||||
ESPHOME_SERVICE_TYPE,
|
||||
AsyncEsphomeZeroconf,
|
||||
@@ -50,6 +53,30 @@ class MDNSStatus:
|
||||
return await aiozc.async_resolve_host(host_name)
|
||||
return None
|
||||
|
||||
def get_cached_addresses(self, host_name: str) -> list[str] | None:
|
||||
"""Get cached addresses for a host without triggering resolution.
|
||||
|
||||
Returns None if not in cache or no zeroconf available.
|
||||
"""
|
||||
if not self.aiozc:
|
||||
_LOGGER.debug("No zeroconf instance available for %s", host_name)
|
||||
return None
|
||||
|
||||
# Normalize hostname and get the base name
|
||||
normalized = normalize_hostname(host_name)
|
||||
base_name = normalized.partition(".")[0]
|
||||
|
||||
# Try to load from zeroconf cache without triggering resolution
|
||||
resolver_name = f"{base_name}.local."
|
||||
info = AddressResolver(resolver_name)
|
||||
# Let zeroconf use its own current time for cache checking
|
||||
if info.load_from_cache(self.aiozc.zeroconf):
|
||||
addresses = info.parsed_scoped_addresses(IPVersion.All)
|
||||
_LOGGER.debug("Found %s in zeroconf cache: %s", resolver_name, addresses)
|
||||
return addresses
|
||||
_LOGGER.debug("Not found in zeroconf cache: %s", resolver_name)
|
||||
return None
|
||||
|
||||
async def async_refresh_hosts(self) -> None:
|
||||
"""Refresh the hosts to track."""
|
||||
dashboard = self.dashboard
|
||||
|
@@ -1,63 +0,0 @@
|
||||
import logging
|
||||
import os
|
||||
from pathlib import Path
|
||||
import tempfile
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def write_utf8_file(
|
||||
filename: Path,
|
||||
utf8_str: str,
|
||||
private: bool = False,
|
||||
) -> None:
|
||||
"""Write a file and rename it into place.
|
||||
|
||||
Writes all or nothing.
|
||||
"""
|
||||
write_file(filename, utf8_str.encode("utf-8"), private)
|
||||
|
||||
|
||||
# from https://github.com/home-assistant/core/blob/dev/homeassistant/util/file.py
|
||||
def write_file(
|
||||
filename: Path,
|
||||
utf8_data: bytes,
|
||||
private: bool = False,
|
||||
) -> None:
|
||||
"""Write a file and rename it into place.
|
||||
|
||||
Writes all or nothing.
|
||||
"""
|
||||
|
||||
tmp_filename = ""
|
||||
missing_fchmod = False
|
||||
try:
|
||||
# Modern versions of Python tempfile create this file with mode 0o600
|
||||
with tempfile.NamedTemporaryFile(
|
||||
mode="wb", dir=os.path.dirname(filename), delete=False
|
||||
) as fdesc:
|
||||
fdesc.write(utf8_data)
|
||||
tmp_filename = fdesc.name
|
||||
if not private:
|
||||
try:
|
||||
os.fchmod(fdesc.fileno(), 0o644)
|
||||
except AttributeError:
|
||||
# os.fchmod is not available on Windows
|
||||
missing_fchmod = True
|
||||
|
||||
os.replace(tmp_filename, filename)
|
||||
if missing_fchmod:
|
||||
os.chmod(filename, 0o644)
|
||||
finally:
|
||||
if os.path.exists(tmp_filename):
|
||||
try:
|
||||
os.remove(tmp_filename)
|
||||
except OSError as err:
|
||||
# If we are cleaning up then something else went wrong, so
|
||||
# we should suppress likely follow-on errors in the cleanup
|
||||
_LOGGER.error(
|
||||
"File replacement cleanup failed for %s while saving %s: %s",
|
||||
tmp_filename,
|
||||
filename,
|
||||
err,
|
||||
)
|
@@ -49,10 +49,10 @@ from esphome.storage_json import (
|
||||
from esphome.util import get_serial_ports, shlex_quote
|
||||
from esphome.yaml_util import FastestAvailableSafeLoader
|
||||
|
||||
from ..helpers import write_file
|
||||
from .const import DASHBOARD_COMMAND
|
||||
from .core import DASHBOARD
|
||||
from .entries import UNKNOWN_STATE, entry_state_to_bool
|
||||
from .util.file import write_file
|
||||
from .core import DASHBOARD, ESPHomeDashboard
|
||||
from .entries import UNKNOWN_STATE, DashboardEntry, entry_state_to_bool
|
||||
from .util.subprocess import async_run_system_command
|
||||
from .util.text import friendly_name_slugify
|
||||
|
||||
@@ -314,6 +314,73 @@ class EsphomeCommandWebSocket(tornado.websocket.WebSocketHandler):
|
||||
raise NotImplementedError
|
||||
|
||||
|
||||
def build_cache_arguments(
|
||||
entry: DashboardEntry | None,
|
||||
dashboard: ESPHomeDashboard,
|
||||
now: float,
|
||||
) -> list[str]:
|
||||
"""Build cache arguments for passing to CLI.
|
||||
|
||||
Args:
|
||||
entry: Dashboard entry for the configuration
|
||||
dashboard: Dashboard instance with cache access
|
||||
now: Current monotonic time for DNS cache expiry checks
|
||||
|
||||
Returns:
|
||||
List of cache arguments to pass to CLI
|
||||
"""
|
||||
cache_args: list[str] = []
|
||||
|
||||
if not entry:
|
||||
return cache_args
|
||||
|
||||
_LOGGER.debug(
|
||||
"Building cache for entry (address=%s, name=%s)",
|
||||
entry.address,
|
||||
entry.name,
|
||||
)
|
||||
|
||||
def add_cache_entry(hostname: str, addresses: list[str], cache_type: str) -> None:
|
||||
"""Add a cache entry to the command arguments."""
|
||||
if not addresses:
|
||||
return
|
||||
normalized = hostname.rstrip(".").lower()
|
||||
cache_args.extend(
|
||||
[
|
||||
f"--{cache_type}-address-cache",
|
||||
f"{normalized}={','.join(sort_ip_addresses(addresses))}",
|
||||
]
|
||||
)
|
||||
|
||||
# Check entry.address for cached addresses
|
||||
if use_address := entry.address:
|
||||
if use_address.endswith(".local"):
|
||||
# mDNS cache for .local addresses
|
||||
if (mdns := dashboard.mdns_status) and (
|
||||
cached := mdns.get_cached_addresses(use_address)
|
||||
):
|
||||
_LOGGER.debug("mDNS cache hit for %s: %s", use_address, cached)
|
||||
add_cache_entry(use_address, cached, "mdns")
|
||||
# DNS cache for non-.local addresses
|
||||
elif cached := dashboard.dns_cache.get_cached_addresses(use_address, now):
|
||||
_LOGGER.debug("DNS cache hit for %s: %s", use_address, cached)
|
||||
add_cache_entry(use_address, cached, "dns")
|
||||
|
||||
# Check entry.name if we haven't already cached via address
|
||||
# For mDNS devices, entry.name typically doesn't have .local suffix
|
||||
if entry.name and not use_address:
|
||||
mdns_name = (
|
||||
f"{entry.name}.local" if not entry.name.endswith(".local") else entry.name
|
||||
)
|
||||
if (mdns := dashboard.mdns_status) and (
|
||||
cached := mdns.get_cached_addresses(mdns_name)
|
||||
):
|
||||
_LOGGER.debug("mDNS cache hit for %s: %s", mdns_name, cached)
|
||||
add_cache_entry(mdns_name, cached, "mdns")
|
||||
|
||||
return cache_args
|
||||
|
||||
|
||||
class EsphomePortCommandWebSocket(EsphomeCommandWebSocket):
|
||||
"""Base class for commands that require a port."""
|
||||
|
||||
@@ -326,52 +393,22 @@ class EsphomePortCommandWebSocket(EsphomeCommandWebSocket):
|
||||
configuration = json_message["configuration"]
|
||||
config_file = settings.rel_path(configuration)
|
||||
port = json_message["port"]
|
||||
addresses: list[str] = []
|
||||
|
||||
# Build cache arguments to pass to CLI
|
||||
cache_args: list[str] = []
|
||||
|
||||
if (
|
||||
port == "OTA" # pylint: disable=too-many-boolean-expressions
|
||||
and (entry := entries.get(config_file))
|
||||
and entry.loaded_integrations
|
||||
and "api" in entry.loaded_integrations
|
||||
):
|
||||
# First priority: entry.address AKA use_address
|
||||
if (
|
||||
(use_address := entry.address)
|
||||
and (
|
||||
address_list := await dashboard.dns_cache.async_resolve(
|
||||
use_address, time.monotonic()
|
||||
)
|
||||
)
|
||||
and not isinstance(address_list, Exception)
|
||||
):
|
||||
addresses.extend(sort_ip_addresses(address_list))
|
||||
cache_args = build_cache_arguments(entry, dashboard, time.monotonic())
|
||||
|
||||
# Second priority: mDNS
|
||||
if (
|
||||
(mdns := dashboard.mdns_status)
|
||||
and (address_list := await mdns.async_resolve_host(entry.name))
|
||||
and (
|
||||
new_addresses := [
|
||||
addr for addr in address_list if addr not in addresses
|
||||
]
|
||||
)
|
||||
):
|
||||
# Use the IP address if available but only
|
||||
# if the API is loaded and the device is online
|
||||
# since MQTT logging will not work otherwise
|
||||
addresses.extend(sort_ip_addresses(new_addresses))
|
||||
|
||||
if not addresses:
|
||||
# If no address was found, use the port directly
|
||||
# as otherwise they will get the chooser which
|
||||
# does not work with the dashboard as there is no
|
||||
# interactive way to get keyboard input
|
||||
addresses = [port]
|
||||
|
||||
device_args: list[str] = [
|
||||
arg for address in addresses for arg in ("--device", address)
|
||||
]
|
||||
|
||||
return [*DASHBOARD_COMMAND, *args, config_file, *device_args]
|
||||
# Cache arguments must come before the subcommand
|
||||
cmd = [*DASHBOARD_COMMAND, *cache_args, *args, config_file, "--device", port]
|
||||
_LOGGER.debug("Built command: %s", cmd)
|
||||
return cmd
|
||||
|
||||
|
||||
class EsphomeLogsHandler(EsphomePortCommandWebSocket):
|
||||
@@ -544,7 +581,7 @@ class WizardRequestHandler(BaseHandler):
|
||||
destination = settings.rel_path(filename)
|
||||
|
||||
# Check if destination file already exists
|
||||
if os.path.exists(destination):
|
||||
if destination.exists():
|
||||
self.set_status(409) # Conflict status code
|
||||
self.set_header("content-type", "application/json")
|
||||
self.write(
|
||||
@@ -761,10 +798,9 @@ class DownloadBinaryRequestHandler(BaseHandler):
|
||||
"download",
|
||||
f"{storage_json.name}-{file_name}",
|
||||
)
|
||||
path = os.path.dirname(storage_json.firmware_bin_path)
|
||||
path = os.path.join(path, file_name)
|
||||
path = storage_json.firmware_bin_path.with_name(file_name)
|
||||
|
||||
if not Path(path).is_file():
|
||||
if not path.is_file():
|
||||
args = ["esphome", "idedata", settings.rel_path(configuration)]
|
||||
rc, stdout, _ = await async_run_system_command(args)
|
||||
|
||||
@@ -979,7 +1015,7 @@ class EditRequestHandler(BaseHandler):
|
||||
return
|
||||
|
||||
filename = settings.rel_path(configuration)
|
||||
if Path(filename).resolve().parent != settings.absolute_config_dir:
|
||||
if filename.resolve().parent != settings.absolute_config_dir:
|
||||
self.send_error(404)
|
||||
return
|
||||
|
||||
@@ -1002,10 +1038,6 @@ class EditRequestHandler(BaseHandler):
|
||||
self.set_status(404)
|
||||
return None
|
||||
|
||||
def _write_file(self, filename: str, content: bytes) -> None:
|
||||
"""Write a file with the given content."""
|
||||
write_file(filename, content)
|
||||
|
||||
@authenticated
|
||||
@bind_config
|
||||
async def post(self, configuration: str | None = None) -> None:
|
||||
@@ -1015,12 +1047,12 @@ class EditRequestHandler(BaseHandler):
|
||||
return
|
||||
|
||||
filename = settings.rel_path(configuration)
|
||||
if Path(filename).resolve().parent != settings.absolute_config_dir:
|
||||
if filename.resolve().parent != settings.absolute_config_dir:
|
||||
self.send_error(404)
|
||||
return
|
||||
|
||||
loop = asyncio.get_running_loop()
|
||||
await loop.run_in_executor(None, self._write_file, filename, self.request.body)
|
||||
await loop.run_in_executor(None, write_file, filename, self.request.body)
|
||||
# Ensure the StorageJSON is updated as well
|
||||
DASHBOARD.entries.async_schedule_storage_json_update(filename)
|
||||
self.set_status(200)
|
||||
@@ -1035,7 +1067,7 @@ class ArchiveRequestHandler(BaseHandler):
|
||||
|
||||
archive_path = archive_storage_path()
|
||||
mkdir_p(archive_path)
|
||||
shutil.move(config_file, os.path.join(archive_path, configuration))
|
||||
shutil.move(config_file, archive_path / configuration)
|
||||
|
||||
storage_json = StorageJSON.load(storage_path)
|
||||
if storage_json is not None and storage_json.build_path:
|
||||
@@ -1049,7 +1081,7 @@ class UnArchiveRequestHandler(BaseHandler):
|
||||
def post(self, configuration: str | None = None) -> None:
|
||||
config_file = settings.rel_path(configuration)
|
||||
archive_path = archive_storage_path()
|
||||
shutil.move(os.path.join(archive_path, configuration), config_file)
|
||||
shutil.move(archive_path / configuration, config_file)
|
||||
|
||||
|
||||
class LoginHandler(BaseHandler):
|
||||
@@ -1136,7 +1168,7 @@ class SecretKeysRequestHandler(BaseHandler):
|
||||
|
||||
for secret_filename in const.SECRETS_FILES:
|
||||
relative_filename = settings.rel_path(secret_filename)
|
||||
if os.path.isfile(relative_filename):
|
||||
if relative_filename.is_file():
|
||||
filename = relative_filename
|
||||
break
|
||||
|
||||
@@ -1169,16 +1201,17 @@ class JsonConfigRequestHandler(BaseHandler):
|
||||
@bind_config
|
||||
async def get(self, configuration: str | None = None) -> None:
|
||||
filename = settings.rel_path(configuration)
|
||||
if not os.path.isfile(filename):
|
||||
if not filename.is_file():
|
||||
self.send_error(404)
|
||||
return
|
||||
|
||||
args = ["esphome", "config", filename, "--show-secrets"]
|
||||
args = ["esphome", "config", str(filename), "--show-secrets"]
|
||||
|
||||
rc, stdout, _ = await async_run_system_command(args)
|
||||
rc, stdout, stderr = await async_run_system_command(args)
|
||||
|
||||
if rc != 0:
|
||||
self.send_error(422)
|
||||
self.set_status(422)
|
||||
self.write(stderr)
|
||||
return
|
||||
|
||||
data = yaml.load(stdout, Loader=SafeLoaderIgnoreUnknown)
|
||||
@@ -1187,7 +1220,7 @@ class JsonConfigRequestHandler(BaseHandler):
|
||||
self.finish()
|
||||
|
||||
|
||||
def get_base_frontend_path() -> str:
|
||||
def get_base_frontend_path() -> Path:
|
||||
if ENV_DEV not in os.environ:
|
||||
import esphome_dashboard
|
||||
|
||||
@@ -1198,11 +1231,12 @@ def get_base_frontend_path() -> str:
|
||||
static_path += "/"
|
||||
|
||||
# This path can be relative, so resolve against the root or else templates don't work
|
||||
return os.path.abspath(os.path.join(os.getcwd(), static_path, "esphome_dashboard"))
|
||||
path = Path(os.getcwd()) / static_path / "esphome_dashboard"
|
||||
return path.resolve()
|
||||
|
||||
|
||||
def get_static_path(*args: Iterable[str]) -> str:
|
||||
return os.path.join(get_base_frontend_path(), "static", *args)
|
||||
def get_static_path(*args: Iterable[str]) -> Path:
|
||||
return get_base_frontend_path() / "static" / Path(*args)
|
||||
|
||||
|
||||
@functools.cache
|
||||
@@ -1219,8 +1253,7 @@ def get_static_file_url(name: str) -> str:
|
||||
return base.replace("index.js", esphome_dashboard.entrypoint())
|
||||
|
||||
path = get_static_path(name)
|
||||
with open(path, "rb") as f_handle:
|
||||
hash_ = hashlib.md5(f_handle.read()).hexdigest()[:8]
|
||||
hash_ = hashlib.md5(path.read_bytes()).hexdigest()[:8]
|
||||
return f"{base}?hash={hash_}"
|
||||
|
||||
|
||||
@@ -1320,7 +1353,7 @@ def start_web_server(
|
||||
"""Start the web server listener."""
|
||||
|
||||
trash_path = trash_storage_path()
|
||||
if os.path.exists(trash_path):
|
||||
if trash_path.is_dir() and trash_path.exists():
|
||||
_LOGGER.info("Renaming 'trash' folder to 'archive'")
|
||||
archive_path = archive_storage_path()
|
||||
shutil.move(trash_path, archive_path)
|
||||
|
@@ -4,6 +4,7 @@ import gzip
|
||||
import hashlib
|
||||
import io
|
||||
import logging
|
||||
from pathlib import Path
|
||||
import random
|
||||
import socket
|
||||
import sys
|
||||
@@ -191,7 +192,7 @@ def send_check(sock, data, msg):
|
||||
|
||||
|
||||
def perform_ota(
|
||||
sock: socket.socket, password: str, file_handle: io.IOBase, filename: str
|
||||
sock: socket.socket, password: str, file_handle: io.IOBase, filename: Path
|
||||
) -> None:
|
||||
file_contents = file_handle.read()
|
||||
file_size = len(file_contents)
|
||||
@@ -309,12 +310,16 @@ def perform_ota(
|
||||
|
||||
|
||||
def run_ota_impl_(
|
||||
remote_host: str | list[str], remote_port: int, password: str, filename: str
|
||||
remote_host: str | list[str], remote_port: int, password: str, filename: Path
|
||||
) -> tuple[int, str | None]:
|
||||
from esphome.core import CORE
|
||||
|
||||
# Handle both single host and list of hosts
|
||||
try:
|
||||
# Resolve all hosts at once for parallel DNS resolution
|
||||
res = resolve_ip_address(remote_host, remote_port)
|
||||
res = resolve_ip_address(
|
||||
remote_host, remote_port, address_cache=CORE.address_cache
|
||||
)
|
||||
except EsphomeError as err:
|
||||
_LOGGER.error(
|
||||
"Error resolving IP address of %s. Is it connected to WiFi?",
|
||||
@@ -356,7 +361,7 @@ def run_ota_impl_(
|
||||
|
||||
|
||||
def run_ota(
|
||||
remote_host: str | list[str], remote_port: int, password: str, filename: str
|
||||
remote_host: str | list[str], remote_port: int, password: str, filename: Path
|
||||
) -> tuple[int, str | None]:
|
||||
try:
|
||||
return run_ota_impl_(remote_host, remote_port, password, filename)
|
||||
|
@@ -2,7 +2,6 @@ from __future__ import annotations
|
||||
|
||||
from datetime import datetime
|
||||
import logging
|
||||
import os
|
||||
from pathlib import Path
|
||||
|
||||
import requests
|
||||
@@ -23,11 +22,11 @@ CONTENT_DISPOSITION = "content-disposition"
|
||||
TEMP_DIR = "temp"
|
||||
|
||||
|
||||
def has_remote_file_changed(url, local_file_path):
|
||||
if os.path.exists(local_file_path):
|
||||
def has_remote_file_changed(url: str, local_file_path: Path) -> bool:
|
||||
if local_file_path.exists():
|
||||
_LOGGER.debug("has_remote_file_changed: File exists at %s", local_file_path)
|
||||
try:
|
||||
local_modification_time = os.path.getmtime(local_file_path)
|
||||
local_modification_time = local_file_path.stat().st_mtime
|
||||
local_modification_time_str = datetime.utcfromtimestamp(
|
||||
local_modification_time
|
||||
).strftime("%a, %d %b %Y %H:%M:%S GMT")
|
||||
@@ -65,9 +64,9 @@ def has_remote_file_changed(url, local_file_path):
|
||||
return True
|
||||
|
||||
|
||||
def is_file_recent(file_path: str, refresh: TimePeriodSeconds) -> bool:
|
||||
if os.path.exists(file_path):
|
||||
creation_time = os.path.getctime(file_path)
|
||||
def is_file_recent(file_path: Path, refresh: TimePeriodSeconds) -> bool:
|
||||
if file_path.exists():
|
||||
creation_time = file_path.stat().st_ctime
|
||||
current_time = datetime.now().timestamp()
|
||||
return current_time - creation_time <= refresh.total_seconds
|
||||
return False
|
||||
|
@@ -13,6 +13,9 @@ from esphome.core import CORE, TimePeriodSeconds
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
# Special value to indicate never refresh
|
||||
NEVER_REFRESH = TimePeriodSeconds(seconds=-1)
|
||||
|
||||
|
||||
def run_git_command(cmd, cwd=None) -> str:
|
||||
_LOGGER.debug("Running git command: %s", " ".join(cmd))
|
||||
@@ -85,6 +88,11 @@ def clone_or_update(
|
||||
|
||||
else:
|
||||
# Check refresh needed
|
||||
# Skip refresh if NEVER_REFRESH is specified
|
||||
if refresh == NEVER_REFRESH:
|
||||
_LOGGER.debug("Skipping update for %s (refresh disabled)", key)
|
||||
return repo_dir, None
|
||||
|
||||
file_timestamp = Path(repo_dir / ".git" / "FETCH_HEAD")
|
||||
# On first clone, FETCH_HEAD does not exists
|
||||
if not file_timestamp.exists():
|
||||
|
@@ -1,6 +1,5 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import codecs
|
||||
from contextlib import suppress
|
||||
import ipaddress
|
||||
import logging
|
||||
@@ -8,11 +7,16 @@ import os
|
||||
from pathlib import Path
|
||||
import platform
|
||||
import re
|
||||
import shutil
|
||||
import tempfile
|
||||
from typing import TYPE_CHECKING
|
||||
from urllib.parse import urlparse
|
||||
|
||||
from esphome.const import __version__ as ESPHOME_VERSION
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from esphome.address_cache import AddressCache
|
||||
|
||||
# Type aliases for socket address information
|
||||
AddrInfo = tuple[
|
||||
int, # family (AF_INET, AF_INET6, etc.)
|
||||
@@ -136,16 +140,16 @@ def run_system_command(*args):
|
||||
return rc, stdout, stderr
|
||||
|
||||
|
||||
def mkdir_p(path):
|
||||
def mkdir_p(path: Path):
|
||||
if not path:
|
||||
# Empty path - means create current dir
|
||||
return
|
||||
try:
|
||||
os.makedirs(path)
|
||||
path.mkdir(parents=True, exist_ok=True)
|
||||
except OSError as err:
|
||||
import errno
|
||||
|
||||
if err.errno == errno.EEXIST and os.path.isdir(path):
|
||||
if err.errno == errno.EEXIST and path.is_dir():
|
||||
pass
|
||||
else:
|
||||
from esphome.core import EsphomeError
|
||||
@@ -173,7 +177,24 @@ def addr_preference_(res: AddrInfo) -> int:
|
||||
return 1
|
||||
|
||||
|
||||
def resolve_ip_address(host: str | list[str], port: int) -> list[AddrInfo]:
|
||||
def _add_ip_addresses_to_addrinfo(
|
||||
addresses: list[str], port: int, res: list[AddrInfo]
|
||||
) -> None:
|
||||
"""Helper to add IP addresses to addrinfo results with error handling."""
|
||||
import socket
|
||||
|
||||
for addr in addresses:
|
||||
try:
|
||||
res += socket.getaddrinfo(
|
||||
addr, port, proto=socket.IPPROTO_TCP, flags=socket.AI_NUMERICHOST
|
||||
)
|
||||
except OSError:
|
||||
_LOGGER.debug("Failed to parse IP address '%s'", addr)
|
||||
|
||||
|
||||
def resolve_ip_address(
|
||||
host: str | list[str], port: int, address_cache: AddressCache | None = None
|
||||
) -> list[AddrInfo]:
|
||||
import socket
|
||||
|
||||
# There are five cases here. The host argument could be one of:
|
||||
@@ -194,47 +215,69 @@ def resolve_ip_address(host: str | list[str], port: int) -> list[AddrInfo]:
|
||||
hosts = [host]
|
||||
|
||||
res: list[AddrInfo] = []
|
||||
|
||||
# Fast path: if all hosts are already IP addresses
|
||||
if all(is_ip_address(h) for h in hosts):
|
||||
# Fast path: all are IP addresses, use socket.getaddrinfo with AI_NUMERICHOST
|
||||
for addr in hosts:
|
||||
try:
|
||||
res += socket.getaddrinfo(
|
||||
addr, port, proto=socket.IPPROTO_TCP, flags=socket.AI_NUMERICHOST
|
||||
)
|
||||
except OSError:
|
||||
_LOGGER.debug("Failed to parse IP address '%s'", addr)
|
||||
_add_ip_addresses_to_addrinfo(hosts, port, res)
|
||||
# Sort by preference
|
||||
res.sort(key=addr_preference_)
|
||||
return res
|
||||
|
||||
from esphome.resolver import AsyncResolver
|
||||
# Process hosts
|
||||
cached_addresses: list[str] = []
|
||||
uncached_hosts: list[str] = []
|
||||
has_cache = address_cache is not None
|
||||
|
||||
resolver = AsyncResolver(hosts, port)
|
||||
addr_infos = resolver.resolve()
|
||||
# Convert aioesphomeapi AddrInfo to our format
|
||||
for addr_info in addr_infos:
|
||||
sockaddr = addr_info.sockaddr
|
||||
if addr_info.family == socket.AF_INET6:
|
||||
# IPv6
|
||||
sockaddr_tuple = (
|
||||
sockaddr.address,
|
||||
sockaddr.port,
|
||||
sockaddr.flowinfo,
|
||||
sockaddr.scope_id,
|
||||
)
|
||||
for h in hosts:
|
||||
if is_ip_address(h):
|
||||
if has_cache:
|
||||
# If we have a cache, treat IPs as cached
|
||||
cached_addresses.append(h)
|
||||
else:
|
||||
# If no cache, pass IPs through to resolver with hostnames
|
||||
uncached_hosts.append(h)
|
||||
elif address_cache and (cached := address_cache.get_addresses(h)):
|
||||
# Found in cache
|
||||
cached_addresses.extend(cached)
|
||||
else:
|
||||
# IPv4
|
||||
sockaddr_tuple = (sockaddr.address, sockaddr.port)
|
||||
# Not cached, need to resolve
|
||||
if address_cache and address_cache.has_cache():
|
||||
_LOGGER.info("Host %s not in cache, will need to resolve", h)
|
||||
uncached_hosts.append(h)
|
||||
|
||||
res.append(
|
||||
(
|
||||
addr_info.family,
|
||||
addr_info.type,
|
||||
addr_info.proto,
|
||||
"", # canonname
|
||||
sockaddr_tuple,
|
||||
# Process cached addresses (includes direct IPs and cached lookups)
|
||||
_add_ip_addresses_to_addrinfo(cached_addresses, port, res)
|
||||
|
||||
# If we have uncached hosts (only non-IP hostnames), resolve them
|
||||
if uncached_hosts:
|
||||
from esphome.resolver import AsyncResolver
|
||||
|
||||
resolver = AsyncResolver(uncached_hosts, port)
|
||||
addr_infos = resolver.resolve()
|
||||
# Convert aioesphomeapi AddrInfo to our format
|
||||
for addr_info in addr_infos:
|
||||
sockaddr = addr_info.sockaddr
|
||||
if addr_info.family == socket.AF_INET6:
|
||||
# IPv6
|
||||
sockaddr_tuple = (
|
||||
sockaddr.address,
|
||||
sockaddr.port,
|
||||
sockaddr.flowinfo,
|
||||
sockaddr.scope_id,
|
||||
)
|
||||
else:
|
||||
# IPv4
|
||||
sockaddr_tuple = (sockaddr.address, sockaddr.port)
|
||||
|
||||
res.append(
|
||||
(
|
||||
addr_info.family,
|
||||
addr_info.type,
|
||||
addr_info.proto,
|
||||
"", # canonname
|
||||
sockaddr_tuple,
|
||||
)
|
||||
)
|
||||
)
|
||||
|
||||
# Sort by preference
|
||||
res.sort(key=addr_preference_)
|
||||
@@ -256,14 +299,7 @@ def sort_ip_addresses(address_list: list[str]) -> list[str]:
|
||||
# First "resolve" all the IP addresses to getaddrinfo() tuples of the form
|
||||
# (family, type, proto, canonname, sockaddr)
|
||||
res: list[AddrInfo] = []
|
||||
for addr in address_list:
|
||||
# This should always work as these are supposed to be IP addresses
|
||||
try:
|
||||
res += socket.getaddrinfo(
|
||||
addr, 0, proto=socket.IPPROTO_TCP, flags=socket.AI_NUMERICHOST
|
||||
)
|
||||
except OSError:
|
||||
_LOGGER.info("Failed to parse IP address '%s'", addr)
|
||||
_add_ip_addresses_to_addrinfo(address_list, 0, res)
|
||||
|
||||
# Now use that information to sort them.
|
||||
res.sort(key=addr_preference_)
|
||||
@@ -295,16 +331,15 @@ def is_ha_addon():
|
||||
return get_bool_env("ESPHOME_IS_HA_ADDON")
|
||||
|
||||
|
||||
def walk_files(path):
|
||||
def walk_files(path: Path):
|
||||
for root, _, files in os.walk(path):
|
||||
for name in files:
|
||||
yield os.path.join(root, name)
|
||||
yield Path(root) / name
|
||||
|
||||
|
||||
def read_file(path):
|
||||
def read_file(path: Path) -> str:
|
||||
try:
|
||||
with codecs.open(path, "r", encoding="utf-8") as f_handle:
|
||||
return f_handle.read()
|
||||
return path.read_text(encoding="utf-8")
|
||||
except OSError as err:
|
||||
from esphome.core import EsphomeError
|
||||
|
||||
@@ -315,13 +350,15 @@ def read_file(path):
|
||||
raise EsphomeError(f"Error reading file {path}: {err}") from err
|
||||
|
||||
|
||||
def _write_file(path: Path | str, text: str | bytes):
|
||||
def _write_file(
|
||||
path: Path,
|
||||
text: str | bytes,
|
||||
private: bool = False,
|
||||
) -> None:
|
||||
"""Atomically writes `text` to the given path.
|
||||
|
||||
Automatically creates all parent directories.
|
||||
"""
|
||||
if not isinstance(path, Path):
|
||||
path = Path(path)
|
||||
data = text
|
||||
if isinstance(text, str):
|
||||
data = text.encode()
|
||||
@@ -329,42 +366,54 @@ def _write_file(path: Path | str, text: str | bytes):
|
||||
directory = path.parent
|
||||
directory.mkdir(exist_ok=True, parents=True)
|
||||
|
||||
tmp_path = None
|
||||
tmp_filename: Path | None = None
|
||||
missing_fchmod = False
|
||||
try:
|
||||
# Modern versions of Python tempfile create this file with mode 0o600
|
||||
with tempfile.NamedTemporaryFile(
|
||||
mode="wb", dir=directory, delete=False
|
||||
) as f_handle:
|
||||
tmp_path = f_handle.name
|
||||
f_handle.write(data)
|
||||
# Newer tempfile implementations create the file with mode 0o600
|
||||
os.chmod(tmp_path, 0o644)
|
||||
# If destination exists, will be overwritten
|
||||
os.replace(tmp_path, path)
|
||||
tmp_filename = Path(f_handle.name)
|
||||
|
||||
if not private:
|
||||
try:
|
||||
os.fchmod(f_handle.fileno(), 0o644)
|
||||
except AttributeError:
|
||||
# os.fchmod is not available on Windows
|
||||
missing_fchmod = True
|
||||
shutil.move(tmp_filename, path)
|
||||
if missing_fchmod:
|
||||
path.chmod(0o644)
|
||||
finally:
|
||||
if tmp_path is not None and os.path.exists(tmp_path):
|
||||
if tmp_filename and tmp_filename.exists():
|
||||
try:
|
||||
os.remove(tmp_path)
|
||||
tmp_filename.unlink()
|
||||
except OSError as err:
|
||||
_LOGGER.error("Write file cleanup failed: %s", err)
|
||||
# If we are cleaning up then something else went wrong, so
|
||||
# we should suppress likely follow-on errors in the cleanup
|
||||
_LOGGER.error(
|
||||
"File replacement cleanup failed for %s while saving %s: %s",
|
||||
tmp_filename,
|
||||
path,
|
||||
err,
|
||||
)
|
||||
|
||||
|
||||
def write_file(path: Path | str, text: str):
|
||||
def write_file(path: Path, text: str | bytes, private: bool = False) -> None:
|
||||
try:
|
||||
_write_file(path, text)
|
||||
_write_file(path, text, private=private)
|
||||
except OSError as err:
|
||||
from esphome.core import EsphomeError
|
||||
|
||||
raise EsphomeError(f"Could not write file at {path}") from err
|
||||
|
||||
|
||||
def write_file_if_changed(path: Path | str, text: str) -> bool:
|
||||
def write_file_if_changed(path: Path, text: str) -> bool:
|
||||
"""Write text to the given path, but not if the contents match already.
|
||||
|
||||
Returns true if the file was changed.
|
||||
"""
|
||||
if not isinstance(path, Path):
|
||||
path = Path(path)
|
||||
|
||||
src_content = None
|
||||
if path.is_file():
|
||||
src_content = read_file(path)
|
||||
@@ -374,12 +423,10 @@ def write_file_if_changed(path: Path | str, text: str) -> bool:
|
||||
return True
|
||||
|
||||
|
||||
def copy_file_if_changed(src: os.PathLike, dst: os.PathLike) -> None:
|
||||
import shutil
|
||||
|
||||
def copy_file_if_changed(src: Path, dst: Path) -> None:
|
||||
if file_compare(src, dst):
|
||||
return
|
||||
mkdir_p(os.path.dirname(dst))
|
||||
dst.parent.mkdir(parents=True, exist_ok=True)
|
||||
try:
|
||||
shutil.copyfile(src, dst)
|
||||
except OSError as err:
|
||||
@@ -404,12 +451,12 @@ def list_starts_with(list_, sub):
|
||||
return len(sub) <= len(list_) and all(list_[i] == x for i, x in enumerate(sub))
|
||||
|
||||
|
||||
def file_compare(path1: os.PathLike, path2: os.PathLike) -> bool:
|
||||
def file_compare(path1: Path, path2: Path) -> bool:
|
||||
"""Return True if the files path1 and path2 have the same contents."""
|
||||
import stat
|
||||
|
||||
try:
|
||||
stat1, stat2 = os.stat(path1), os.stat(path2)
|
||||
stat1, stat2 = path1.stat(), path2.stat()
|
||||
except OSError:
|
||||
# File doesn't exist or another error -> not equal
|
||||
return False
|
||||
@@ -426,7 +473,7 @@ def file_compare(path1: os.PathLike, path2: os.PathLike) -> bool:
|
||||
|
||||
bufsize = 8 * 1024
|
||||
# Read files in blocks until a mismatch is found
|
||||
with open(path1, "rb") as fh1, open(path2, "rb") as fh2:
|
||||
with path1.open("rb") as fh1, path2.open("rb") as fh2:
|
||||
while True:
|
||||
blob1, blob2 = fh1.read(bufsize), fh2.read(bufsize)
|
||||
if blob1 != blob2:
|
||||
|
@@ -18,23 +18,25 @@ def patch_structhash():
|
||||
# removed/added. This might have unintended consequences, but this improves compile
|
||||
# times greatly when adding/removing components and a simple clean build solves
|
||||
# all issues
|
||||
from os import makedirs
|
||||
from os.path import getmtime, isdir, join
|
||||
|
||||
from platformio.run import cli, helpers
|
||||
|
||||
def patched_clean_build_dir(build_dir, *args):
|
||||
from platformio import fs
|
||||
from platformio.project.helpers import get_project_dir
|
||||
|
||||
platformio_ini = join(get_project_dir(), "platformio.ini")
|
||||
platformio_ini = Path(get_project_dir()) / "platformio.ini"
|
||||
|
||||
build_dir = Path(build_dir)
|
||||
|
||||
# if project's config is modified
|
||||
if isdir(build_dir) and getmtime(platformio_ini) > getmtime(build_dir):
|
||||
if (
|
||||
build_dir.is_dir()
|
||||
and platformio_ini.stat().st_mtime > build_dir.stat().st_mtime
|
||||
):
|
||||
fs.rmtree(build_dir)
|
||||
|
||||
if not isdir(build_dir):
|
||||
makedirs(build_dir)
|
||||
if not build_dir.is_dir():
|
||||
build_dir.mkdir(parents=True)
|
||||
|
||||
helpers.clean_build_dir = patched_clean_build_dir
|
||||
cli.clean_build_dir = patched_clean_build_dir
|
||||
@@ -77,9 +79,9 @@ FILTER_PLATFORMIO_LINES = [
|
||||
|
||||
def run_platformio_cli(*args, **kwargs) -> str | int:
|
||||
os.environ["PLATFORMIO_FORCE_COLOR"] = "true"
|
||||
os.environ["PLATFORMIO_BUILD_DIR"] = os.path.abspath(CORE.relative_pioenvs_path())
|
||||
os.environ["PLATFORMIO_BUILD_DIR"] = str(CORE.relative_pioenvs_path().absolute())
|
||||
os.environ.setdefault(
|
||||
"PLATFORMIO_LIBDEPS_DIR", os.path.abspath(CORE.relative_piolibdeps_path())
|
||||
"PLATFORMIO_LIBDEPS_DIR", str(CORE.relative_piolibdeps_path().absolute())
|
||||
)
|
||||
# Suppress Python syntax warnings from third-party scripts during compilation
|
||||
os.environ.setdefault("PYTHONWARNINGS", "ignore::SyntaxWarning")
|
||||
@@ -98,7 +100,7 @@ def run_platformio_cli(*args, **kwargs) -> str | int:
|
||||
|
||||
|
||||
def run_platformio_cli_run(config, verbose, *args, **kwargs) -> str | int:
|
||||
command = ["run", "-d", CORE.build_path]
|
||||
command = ["run", "-d", str(CORE.build_path)]
|
||||
if verbose:
|
||||
command += ["-v"]
|
||||
command += list(args)
|
||||
@@ -130,8 +132,8 @@ def _run_idedata(config):
|
||||
|
||||
|
||||
def _load_idedata(config):
|
||||
platformio_ini = Path(CORE.relative_build_path("platformio.ini"))
|
||||
temp_idedata = Path(CORE.relative_internal_path("idedata", f"{CORE.name}.json"))
|
||||
platformio_ini = CORE.relative_build_path("platformio.ini")
|
||||
temp_idedata = CORE.relative_internal_path("idedata", f"{CORE.name}.json")
|
||||
|
||||
changed = False
|
||||
if (
|
||||
@@ -301,7 +303,7 @@ def process_stacktrace(config, line, backtrace_state):
|
||||
|
||||
@dataclass
|
||||
class FlashImage:
|
||||
path: str
|
||||
path: Path
|
||||
offset: str
|
||||
|
||||
|
||||
@@ -310,17 +312,17 @@ class IDEData:
|
||||
self.raw = raw
|
||||
|
||||
@property
|
||||
def firmware_elf_path(self):
|
||||
return self.raw["prog_path"]
|
||||
def firmware_elf_path(self) -> Path:
|
||||
return Path(self.raw["prog_path"])
|
||||
|
||||
@property
|
||||
def firmware_bin_path(self) -> str:
|
||||
return str(Path(self.firmware_elf_path).with_suffix(".bin"))
|
||||
def firmware_bin_path(self) -> Path:
|
||||
return self.firmware_elf_path.with_suffix(".bin")
|
||||
|
||||
@property
|
||||
def extra_flash_images(self) -> list[FlashImage]:
|
||||
return [
|
||||
FlashImage(path=entry["path"], offset=entry["offset"])
|
||||
FlashImage(path=Path(entry["path"]), offset=entry["offset"])
|
||||
for entry in self.raw["extra"]["flash_images"]
|
||||
]
|
||||
|
||||
|
@@ -1,11 +1,11 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import binascii
|
||||
import codecs
|
||||
from datetime import datetime
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
from pathlib import Path
|
||||
|
||||
from esphome import const
|
||||
from esphome.const import CONF_DISABLED, CONF_MDNS
|
||||
@@ -16,30 +16,35 @@ from esphome.types import CoreType
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def storage_path() -> str:
|
||||
return os.path.join(CORE.data_dir, "storage", f"{CORE.config_filename}.json")
|
||||
def storage_path() -> Path:
|
||||
return CORE.data_dir / "storage" / f"{CORE.config_filename}.json"
|
||||
|
||||
|
||||
def ext_storage_path(config_filename: str) -> str:
|
||||
return os.path.join(CORE.data_dir, "storage", f"{config_filename}.json")
|
||||
def ext_storage_path(config_filename: str) -> Path:
|
||||
return CORE.data_dir / "storage" / f"{config_filename}.json"
|
||||
|
||||
|
||||
def esphome_storage_path() -> str:
|
||||
return os.path.join(CORE.data_dir, "esphome.json")
|
||||
def esphome_storage_path() -> Path:
|
||||
return CORE.data_dir / "esphome.json"
|
||||
|
||||
|
||||
def ignored_devices_storage_path() -> str:
|
||||
return os.path.join(CORE.data_dir, "ignored-devices.json")
|
||||
def ignored_devices_storage_path() -> Path:
|
||||
return CORE.data_dir / "ignored-devices.json"
|
||||
|
||||
|
||||
def trash_storage_path() -> str:
|
||||
def trash_storage_path() -> Path:
|
||||
return CORE.relative_config_path("trash")
|
||||
|
||||
|
||||
def archive_storage_path() -> str:
|
||||
def archive_storage_path() -> Path:
|
||||
return CORE.relative_config_path("archive")
|
||||
|
||||
|
||||
def _to_path_if_not_none(value: str | None) -> Path | None:
|
||||
"""Convert a string to Path if it's not None."""
|
||||
return Path(value) if value is not None else None
|
||||
|
||||
|
||||
class StorageJSON:
|
||||
def __init__(
|
||||
self,
|
||||
@@ -52,8 +57,8 @@ class StorageJSON:
|
||||
address: str,
|
||||
web_port: int | None,
|
||||
target_platform: str,
|
||||
build_path: str | None,
|
||||
firmware_bin_path: str | None,
|
||||
build_path: Path | None,
|
||||
firmware_bin_path: Path | None,
|
||||
loaded_integrations: set[str],
|
||||
loaded_platforms: set[str],
|
||||
no_mdns: bool,
|
||||
@@ -107,8 +112,8 @@ class StorageJSON:
|
||||
"address": self.address,
|
||||
"web_port": self.web_port,
|
||||
"esp_platform": self.target_platform,
|
||||
"build_path": self.build_path,
|
||||
"firmware_bin_path": self.firmware_bin_path,
|
||||
"build_path": str(self.build_path),
|
||||
"firmware_bin_path": str(self.firmware_bin_path),
|
||||
"loaded_integrations": sorted(self.loaded_integrations),
|
||||
"loaded_platforms": sorted(self.loaded_platforms),
|
||||
"no_mdns": self.no_mdns,
|
||||
@@ -176,8 +181,8 @@ class StorageJSON:
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def _load_impl(path: str) -> StorageJSON | None:
|
||||
with codecs.open(path, "r", encoding="utf-8") as f_handle:
|
||||
def _load_impl(path: Path) -> StorageJSON | None:
|
||||
with path.open("r", encoding="utf-8") as f_handle:
|
||||
storage = json.load(f_handle)
|
||||
storage_version = storage["storage_version"]
|
||||
name = storage.get("name")
|
||||
@@ -190,8 +195,8 @@ class StorageJSON:
|
||||
address = storage.get("address")
|
||||
web_port = storage.get("web_port")
|
||||
esp_platform = storage.get("esp_platform")
|
||||
build_path = storage.get("build_path")
|
||||
firmware_bin_path = storage.get("firmware_bin_path")
|
||||
build_path = _to_path_if_not_none(storage.get("build_path"))
|
||||
firmware_bin_path = _to_path_if_not_none(storage.get("firmware_bin_path"))
|
||||
loaded_integrations = set(storage.get("loaded_integrations", []))
|
||||
loaded_platforms = set(storage.get("loaded_platforms", []))
|
||||
no_mdns = storage.get("no_mdns", False)
|
||||
@@ -217,7 +222,7 @@ class StorageJSON:
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def load(path: str) -> StorageJSON | None:
|
||||
def load(path: Path) -> StorageJSON | None:
|
||||
try:
|
||||
return StorageJSON._load_impl(path)
|
||||
except Exception: # pylint: disable=broad-except
|
||||
@@ -268,7 +273,7 @@ class EsphomeStorageJSON:
|
||||
|
||||
@staticmethod
|
||||
def _load_impl(path: str) -> EsphomeStorageJSON | None:
|
||||
with codecs.open(path, "r", encoding="utf-8") as f_handle:
|
||||
with Path(path).open("r", encoding="utf-8") as f_handle:
|
||||
storage = json.load(f_handle)
|
||||
storage_version = storage["storage_version"]
|
||||
cookie_secret = storage.get("cookie_secret")
|
||||
|
@@ -1,7 +1,6 @@
|
||||
import collections
|
||||
import io
|
||||
import logging
|
||||
import os
|
||||
from pathlib import Path
|
||||
import re
|
||||
import subprocess
|
||||
@@ -86,7 +85,10 @@ def safe_input(prompt=""):
|
||||
return input()
|
||||
|
||||
|
||||
def shlex_quote(s):
|
||||
def shlex_quote(s: str | Path) -> str:
|
||||
# Convert Path objects to strings
|
||||
if isinstance(s, Path):
|
||||
s = str(s)
|
||||
if not s:
|
||||
return "''"
|
||||
if re.search(r"[^\w@%+=:,./-]", s) is None:
|
||||
@@ -272,25 +274,28 @@ class OrderedDict(collections.OrderedDict):
|
||||
return dict(self).__repr__()
|
||||
|
||||
|
||||
def list_yaml_files(configs: list[str]) -> list[str]:
|
||||
files: list[str] = []
|
||||
def list_yaml_files(configs: list[str | Path]) -> list[Path]:
|
||||
files: list[Path] = []
|
||||
for config in configs:
|
||||
if os.path.isfile(config):
|
||||
config = Path(config)
|
||||
if not config.exists():
|
||||
raise FileNotFoundError(f"Config path '{config}' does not exist!")
|
||||
if config.is_file():
|
||||
files.append(config)
|
||||
else:
|
||||
files.extend(os.path.join(config, p) for p in os.listdir(config))
|
||||
files.extend(config.glob("*"))
|
||||
files = filter_yaml_files(files)
|
||||
return sorted(files)
|
||||
|
||||
|
||||
def filter_yaml_files(files: list[str]) -> list[str]:
|
||||
def filter_yaml_files(files: list[Path]) -> list[Path]:
|
||||
return [
|
||||
f
|
||||
for f in files
|
||||
if (
|
||||
os.path.splitext(f)[1] in (".yaml", ".yml")
|
||||
and os.path.basename(f) not in ("secrets.yaml", "secrets.yml")
|
||||
and not os.path.basename(f).startswith(".")
|
||||
f.suffix in (".yaml", ".yml")
|
||||
and f.name not in ("secrets.yaml", "secrets.yml")
|
||||
and not f.name.startswith(".")
|
||||
)
|
||||
]
|
||||
|
||||
|
@@ -2,7 +2,7 @@ from __future__ import annotations
|
||||
|
||||
from io import StringIO
|
||||
import json
|
||||
import os
|
||||
from pathlib import Path
|
||||
from typing import Any
|
||||
|
||||
from esphome.config import Config, _format_vol_invalid, validate_config
|
||||
@@ -67,24 +67,24 @@ def _read_file_content_from_json_on_stdin() -> str:
|
||||
return data["content"]
|
||||
|
||||
|
||||
def _print_file_read_event(path: str) -> None:
|
||||
def _print_file_read_event(path: Path) -> None:
|
||||
"""Print a file read event."""
|
||||
print(
|
||||
json.dumps(
|
||||
{
|
||||
"type": "read_file",
|
||||
"path": path,
|
||||
"path": str(path),
|
||||
}
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
def _request_and_get_stream_on_stdin(fname: str) -> StringIO:
|
||||
def _request_and_get_stream_on_stdin(fname: Path) -> StringIO:
|
||||
_print_file_read_event(fname)
|
||||
return StringIO(_read_file_content_from_json_on_stdin())
|
||||
|
||||
|
||||
def _vscode_loader(fname: str) -> dict[str, Any]:
|
||||
def _vscode_loader(fname: Path) -> dict[str, Any]:
|
||||
raw_yaml_stream = _request_and_get_stream_on_stdin(fname)
|
||||
# it is required to set the name on StringIO so document on start_mark
|
||||
# is set properly. Otherwise it is initialized with "<file>"
|
||||
@@ -92,7 +92,7 @@ def _vscode_loader(fname: str) -> dict[str, Any]:
|
||||
return parse_yaml(fname, raw_yaml_stream, _vscode_loader)
|
||||
|
||||
|
||||
def _ace_loader(fname: str) -> dict[str, Any]:
|
||||
def _ace_loader(fname: Path) -> dict[str, Any]:
|
||||
raw_yaml_stream = _request_and_get_stream_on_stdin(fname)
|
||||
return parse_yaml(fname, raw_yaml_stream)
|
||||
|
||||
@@ -120,10 +120,10 @@ def read_config(args):
|
||||
return
|
||||
CORE.vscode = True
|
||||
if args.ace: # Running from ESPHome Compiler dashboard, not vscode
|
||||
CORE.config_path = os.path.join(args.configuration, data["file"])
|
||||
CORE.config_path = Path(args.configuration) / data["file"]
|
||||
loader = _ace_loader
|
||||
else:
|
||||
CORE.config_path = data["file"]
|
||||
CORE.config_path = Path(data["file"])
|
||||
loader = _vscode_loader
|
||||
|
||||
file_name = CORE.config_path
|
||||
|
@@ -1,4 +1,4 @@
|
||||
import os
|
||||
from pathlib import Path
|
||||
import random
|
||||
import string
|
||||
from typing import Literal, NotRequired, TypedDict, Unpack
|
||||
@@ -213,7 +213,7 @@ class WizardWriteKwargs(TypedDict):
|
||||
file_text: NotRequired[str]
|
||||
|
||||
|
||||
def wizard_write(path: str, **kwargs: Unpack[WizardWriteKwargs]) -> bool:
|
||||
def wizard_write(path: Path, **kwargs: Unpack[WizardWriteKwargs]) -> bool:
|
||||
from esphome.components.bk72xx import boards as bk72xx_boards
|
||||
from esphome.components.esp32 import boards as esp32_boards
|
||||
from esphome.components.esp8266 import boards as esp8266_boards
|
||||
@@ -256,13 +256,13 @@ def wizard_write(path: str, **kwargs: Unpack[WizardWriteKwargs]) -> bool:
|
||||
file_text = wizard_file(**kwargs)
|
||||
|
||||
# Check if file already exists to prevent overwriting
|
||||
if os.path.exists(path) and os.path.isfile(path):
|
||||
if path.exists() and path.is_file():
|
||||
safe_print(color(AnsiFore.RED, f'The file "{path}" already exists.'))
|
||||
return False
|
||||
|
||||
write_file(path, file_text)
|
||||
storage = StorageJSON.from_wizard(name, name, f"{name}.local", hardware)
|
||||
storage_path = ext_storage_path(os.path.basename(path))
|
||||
storage_path = ext_storage_path(path.name)
|
||||
storage.save(storage_path)
|
||||
|
||||
return True
|
||||
@@ -301,7 +301,7 @@ def strip_accents(value: str) -> str:
|
||||
)
|
||||
|
||||
|
||||
def wizard(path: str) -> int:
|
||||
def wizard(path: Path) -> int:
|
||||
from esphome.components.bk72xx import boards as bk72xx_boards
|
||||
from esphome.components.esp32 import boards as esp32_boards
|
||||
from esphome.components.esp8266 import boards as esp8266_boards
|
||||
@@ -309,14 +309,14 @@ def wizard(path: str) -> int:
|
||||
from esphome.components.rp2040 import boards as rp2040_boards
|
||||
from esphome.components.rtl87xx import boards as rtl87xx_boards
|
||||
|
||||
if not path.endswith(".yaml") and not path.endswith(".yml"):
|
||||
if path.suffix not in (".yaml", ".yml"):
|
||||
safe_print(
|
||||
f"Please make your configuration file {color(AnsiFore.CYAN, path)} have the extension .yaml or .yml"
|
||||
f"Please make your configuration file {color(AnsiFore.CYAN, str(path))} have the extension .yaml or .yml"
|
||||
)
|
||||
return 1
|
||||
if os.path.exists(path):
|
||||
if path.exists():
|
||||
safe_print(
|
||||
f"Uh oh, it seems like {color(AnsiFore.CYAN, path)} already exists, please delete that file first or chose another configuration file."
|
||||
f"Uh oh, it seems like {color(AnsiFore.CYAN, str(path))} already exists, please delete that file first or chose another configuration file."
|
||||
)
|
||||
return 2
|
||||
|
||||
@@ -549,7 +549,7 @@ def wizard(path: str) -> int:
|
||||
safe_print()
|
||||
safe_print(
|
||||
color(AnsiFore.CYAN, "DONE! I've now written a new configuration file to ")
|
||||
+ color(AnsiFore.BOLD_CYAN, path)
|
||||
+ color(AnsiFore.BOLD_CYAN, str(path))
|
||||
)
|
||||
safe_print()
|
||||
safe_print("Next steps:")
|
||||
|
@@ -266,7 +266,7 @@ def generate_version_h():
|
||||
|
||||
def write_cpp(code_s):
|
||||
path = CORE.relative_src_path("main.cpp")
|
||||
if os.path.isfile(path):
|
||||
if path.is_file():
|
||||
text = read_file(path)
|
||||
code_format = find_begin_end(
|
||||
text, CPP_AUTO_GENERATE_BEGIN, CPP_AUTO_GENERATE_END
|
||||
@@ -292,28 +292,33 @@ def write_cpp(code_s):
|
||||
|
||||
def clean_cmake_cache():
|
||||
pioenvs = CORE.relative_pioenvs_path()
|
||||
if os.path.isdir(pioenvs):
|
||||
pioenvs_cmake_path = CORE.relative_pioenvs_path(CORE.name, "CMakeCache.txt")
|
||||
if os.path.isfile(pioenvs_cmake_path):
|
||||
if pioenvs.is_dir():
|
||||
pioenvs_cmake_path = pioenvs / CORE.name / "CMakeCache.txt"
|
||||
if pioenvs_cmake_path.is_file():
|
||||
_LOGGER.info("Deleting %s", pioenvs_cmake_path)
|
||||
os.remove(pioenvs_cmake_path)
|
||||
pioenvs_cmake_path.unlink()
|
||||
|
||||
|
||||
def clean_build():
|
||||
import shutil
|
||||
|
||||
# Allow skipping cache cleaning for integration tests
|
||||
if os.environ.get("ESPHOME_SKIP_CLEAN_BUILD"):
|
||||
_LOGGER.warning("Skipping build cleaning (ESPHOME_SKIP_CLEAN_BUILD set)")
|
||||
return
|
||||
|
||||
pioenvs = CORE.relative_pioenvs_path()
|
||||
if os.path.isdir(pioenvs):
|
||||
if pioenvs.is_dir():
|
||||
_LOGGER.info("Deleting %s", pioenvs)
|
||||
shutil.rmtree(pioenvs)
|
||||
piolibdeps = CORE.relative_piolibdeps_path()
|
||||
if os.path.isdir(piolibdeps):
|
||||
if piolibdeps.is_dir():
|
||||
_LOGGER.info("Deleting %s", piolibdeps)
|
||||
shutil.rmtree(piolibdeps)
|
||||
dependencies_lock = CORE.relative_build_path("dependencies.lock")
|
||||
if os.path.isfile(dependencies_lock):
|
||||
if dependencies_lock.is_file():
|
||||
_LOGGER.info("Deleting %s", dependencies_lock)
|
||||
os.remove(dependencies_lock)
|
||||
dependencies_lock.unlink()
|
||||
|
||||
# Clean PlatformIO cache to resolve CMake compiler detection issues
|
||||
# This helps when toolchain paths change or get corrupted
|
||||
@@ -324,9 +329,11 @@ def clean_build():
|
||||
pass
|
||||
else:
|
||||
cache_dir = get_project_cache_dir()
|
||||
if cache_dir and cache_dir.strip() and os.path.isdir(cache_dir):
|
||||
_LOGGER.info("Deleting PlatformIO cache %s", cache_dir)
|
||||
shutil.rmtree(cache_dir)
|
||||
if cache_dir and cache_dir.strip():
|
||||
cache_path = Path(cache_dir)
|
||||
if cache_path.is_dir():
|
||||
_LOGGER.info("Deleting PlatformIO cache %s", cache_dir)
|
||||
shutil.rmtree(cache_dir)
|
||||
|
||||
|
||||
GITIGNORE_CONTENT = """# Gitignore settings for ESPHome
|
||||
@@ -339,6 +346,5 @@ GITIGNORE_CONTENT = """# Gitignore settings for ESPHome
|
||||
|
||||
def write_gitignore():
|
||||
path = CORE.relative_config_path(".gitignore")
|
||||
if not os.path.isfile(path):
|
||||
with open(file=path, mode="w", encoding="utf-8") as f:
|
||||
f.write(GITIGNORE_CONTENT)
|
||||
if not path.is_file():
|
||||
path.write_text(GITIGNORE_CONTENT, encoding="utf-8")
|
||||
|
@@ -1,7 +1,6 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Callable
|
||||
import fnmatch
|
||||
import functools
|
||||
import inspect
|
||||
from io import BytesIO, TextIOBase, TextIOWrapper
|
||||
@@ -9,6 +8,7 @@ from ipaddress import _BaseAddress, _BaseNetwork
|
||||
import logging
|
||||
import math
|
||||
import os
|
||||
from pathlib import Path
|
||||
from typing import Any
|
||||
import uuid
|
||||
|
||||
@@ -69,7 +69,7 @@ class ESPHomeDataBase:
|
||||
self._content_offset = database.content_offset
|
||||
|
||||
|
||||
class ESPForceValue:
|
||||
class ESPLiteralValue:
|
||||
pass
|
||||
|
||||
|
||||
@@ -109,7 +109,9 @@ def _add_data_ref(fn):
|
||||
class ESPHomeLoaderMixin:
|
||||
"""Loader class that keeps track of line numbers."""
|
||||
|
||||
def __init__(self, name: str, yaml_loader: Callable[[str], dict[str, Any]]) -> None:
|
||||
def __init__(
|
||||
self, name: Path, yaml_loader: Callable[[Path], dict[str, Any]]
|
||||
) -> None:
|
||||
"""Initialize the loader."""
|
||||
self.name = name
|
||||
self.yaml_loader = yaml_loader
|
||||
@@ -254,12 +256,8 @@ class ESPHomeLoaderMixin:
|
||||
f"Environment variable '{node.value}' not defined", node.start_mark
|
||||
)
|
||||
|
||||
@property
|
||||
def _directory(self) -> str:
|
||||
return os.path.dirname(self.name)
|
||||
|
||||
def _rel_path(self, *args: str) -> str:
|
||||
return os.path.join(self._directory, *args)
|
||||
def _rel_path(self, *args: str) -> Path:
|
||||
return self.name.parent / Path(*args)
|
||||
|
||||
@_add_data_ref
|
||||
def construct_secret(self, node: yaml.Node) -> str:
|
||||
@@ -269,8 +267,8 @@ class ESPHomeLoaderMixin:
|
||||
if self.name == CORE.config_path:
|
||||
raise e
|
||||
try:
|
||||
main_config_dir = os.path.dirname(CORE.config_path)
|
||||
main_secret_yml = os.path.join(main_config_dir, SECRET_YAML)
|
||||
main_config_dir = CORE.config_path.parent
|
||||
main_secret_yml = main_config_dir / SECRET_YAML
|
||||
secrets = self.yaml_loader(main_secret_yml)
|
||||
except EsphomeError as er:
|
||||
raise EsphomeError(f"{e}\n{er}") from er
|
||||
@@ -329,7 +327,7 @@ class ESPHomeLoaderMixin:
|
||||
files = filter_yaml_files(_find_files(self._rel_path(node.value), "*.yaml"))
|
||||
mapping = OrderedDict()
|
||||
for fname in files:
|
||||
filename = os.path.splitext(os.path.basename(fname))[0]
|
||||
filename = fname.stem
|
||||
mapping[filename] = self.yaml_loader(fname)
|
||||
return mapping
|
||||
|
||||
@@ -350,9 +348,15 @@ class ESPHomeLoaderMixin:
|
||||
return Lambda(str(node.value))
|
||||
|
||||
@_add_data_ref
|
||||
def construct_force(self, node: yaml.Node) -> ESPForceValue:
|
||||
obj = self.construct_scalar(node)
|
||||
return add_class_to_obj(obj, ESPForceValue)
|
||||
def construct_literal(self, node: yaml.Node) -> ESPLiteralValue:
|
||||
obj = None
|
||||
if isinstance(node, yaml.ScalarNode):
|
||||
obj = self.construct_scalar(node)
|
||||
elif isinstance(node, yaml.SequenceNode):
|
||||
obj = self.construct_sequence(node)
|
||||
elif isinstance(node, yaml.MappingNode):
|
||||
obj = self.construct_mapping(node)
|
||||
return add_class_to_obj(obj, ESPLiteralValue)
|
||||
|
||||
@_add_data_ref
|
||||
def construct_extend(self, node: yaml.Node) -> Extend:
|
||||
@@ -369,8 +373,8 @@ class ESPHomeLoader(ESPHomeLoaderMixin, FastestAvailableSafeLoader):
|
||||
def __init__(
|
||||
self,
|
||||
stream: TextIOBase | BytesIO,
|
||||
name: str,
|
||||
yaml_loader: Callable[[str], dict[str, Any]],
|
||||
name: Path,
|
||||
yaml_loader: Callable[[Path], dict[str, Any]],
|
||||
) -> None:
|
||||
FastestAvailableSafeLoader.__init__(self, stream)
|
||||
ESPHomeLoaderMixin.__init__(self, name, yaml_loader)
|
||||
@@ -382,8 +386,8 @@ class ESPHomePurePythonLoader(ESPHomeLoaderMixin, PurePythonLoader):
|
||||
def __init__(
|
||||
self,
|
||||
stream: TextIOBase | BytesIO,
|
||||
name: str,
|
||||
yaml_loader: Callable[[str], dict[str, Any]],
|
||||
name: Path,
|
||||
yaml_loader: Callable[[Path], dict[str, Any]],
|
||||
) -> None:
|
||||
PurePythonLoader.__init__(self, stream)
|
||||
ESPHomeLoaderMixin.__init__(self, name, yaml_loader)
|
||||
@@ -409,29 +413,29 @@ for _loader in (ESPHomeLoader, ESPHomePurePythonLoader):
|
||||
"!include_dir_merge_named", _loader.construct_include_dir_merge_named
|
||||
)
|
||||
_loader.add_constructor("!lambda", _loader.construct_lambda)
|
||||
_loader.add_constructor("!force", _loader.construct_force)
|
||||
_loader.add_constructor("!literal", _loader.construct_literal)
|
||||
_loader.add_constructor("!extend", _loader.construct_extend)
|
||||
_loader.add_constructor("!remove", _loader.construct_remove)
|
||||
|
||||
|
||||
def load_yaml(fname: str, clear_secrets: bool = True) -> Any:
|
||||
def load_yaml(fname: Path, clear_secrets: bool = True) -> Any:
|
||||
if clear_secrets:
|
||||
_SECRET_VALUES.clear()
|
||||
_SECRET_CACHE.clear()
|
||||
return _load_yaml_internal(fname)
|
||||
|
||||
|
||||
def _load_yaml_internal(fname: str) -> Any:
|
||||
def _load_yaml_internal(fname: Path) -> Any:
|
||||
"""Load a YAML file."""
|
||||
try:
|
||||
with open(fname, encoding="utf-8") as f_handle:
|
||||
with fname.open(encoding="utf-8") as f_handle:
|
||||
return parse_yaml(fname, f_handle)
|
||||
except (UnicodeDecodeError, OSError) as err:
|
||||
raise EsphomeError(f"Error reading file {fname}: {err}") from err
|
||||
|
||||
|
||||
def parse_yaml(
|
||||
file_name: str, file_handle: TextIOWrapper, yaml_loader=_load_yaml_internal
|
||||
file_name: Path, file_handle: TextIOWrapper, yaml_loader=_load_yaml_internal
|
||||
) -> Any:
|
||||
"""Parse a YAML file."""
|
||||
try:
|
||||
@@ -483,9 +487,9 @@ def substitute_vars(config, vars):
|
||||
|
||||
def _load_yaml_internal_with_type(
|
||||
loader_type: type[ESPHomeLoader] | type[ESPHomePurePythonLoader],
|
||||
fname: str,
|
||||
fname: Path,
|
||||
content: TextIOWrapper,
|
||||
yaml_loader: Any,
|
||||
yaml_loader: Callable[[Path], dict[str, Any]],
|
||||
) -> Any:
|
||||
"""Load a YAML file."""
|
||||
loader = loader_type(content, fname, yaml_loader)
|
||||
@@ -512,13 +516,14 @@ def _is_file_valid(name: str) -> bool:
|
||||
return not name.startswith(".")
|
||||
|
||||
|
||||
def _find_files(directory, pattern):
|
||||
def _find_files(directory: Path, pattern):
|
||||
"""Recursively load files in a directory."""
|
||||
for root, dirs, files in os.walk(directory, topdown=True):
|
||||
for root, dirs, files in os.walk(directory):
|
||||
dirs[:] = [d for d in dirs if _is_file_valid(d)]
|
||||
for basename in files:
|
||||
if _is_file_valid(basename) and fnmatch.fnmatch(basename, pattern):
|
||||
filename = os.path.join(root, basename)
|
||||
for f in files:
|
||||
filename = Path(f)
|
||||
if _is_file_valid(f) and filename.match(pattern):
|
||||
filename = Path(root) / filename
|
||||
yield filename
|
||||
|
||||
|
||||
@@ -627,3 +632,4 @@ ESPHomeDumper.add_multi_representer(TimePeriod, ESPHomeDumper.represent_stringif
|
||||
ESPHomeDumper.add_multi_representer(Lambda, ESPHomeDumper.represent_lambda)
|
||||
ESPHomeDumper.add_multi_representer(core.ID, ESPHomeDumper.represent_id)
|
||||
ESPHomeDumper.add_multi_representer(uuid.UUID, ESPHomeDumper.represent_stringify)
|
||||
ESPHomeDumper.add_multi_representer(Path, ESPHomeDumper.represent_stringify)
|
||||
|
@@ -9,13 +9,14 @@ tzlocal==5.3.1 # from time
|
||||
tzdata>=2021.1 # from time
|
||||
pyserial==3.5
|
||||
platformio==6.1.18 # When updating platformio, also update /docker/Dockerfile
|
||||
esptool==5.0.2
|
||||
esptool==5.1.0
|
||||
click==8.1.7
|
||||
esphome-dashboard==20250904.0
|
||||
aioesphomeapi==41.1.0
|
||||
aioesphomeapi==41.7.0
|
||||
zeroconf==0.147.2
|
||||
puremagic==1.30
|
||||
ruamel.yaml==0.18.15 # dashboard_import
|
||||
ruamel.yaml.clib==0.2.12 # dashboard_import
|
||||
esphome-glyphsets==0.2.0
|
||||
pillow==10.4.0
|
||||
cairosvg==2.8.2
|
||||
|
@@ -1,6 +1,6 @@
|
||||
pylint==3.3.8
|
||||
flake8==7.3.0 # also change in .pre-commit-config.yaml when updating
|
||||
ruff==0.13.0 # also change in .pre-commit-config.yaml when updating
|
||||
ruff==0.13.1 # also change in .pre-commit-config.yaml when updating
|
||||
pyupgrade==3.20.0 # also change in .pre-commit-config.yaml when updating
|
||||
pre-commit
|
||||
|
||||
|
@@ -3,7 +3,6 @@ from __future__ import annotations
|
||||
|
||||
from abc import ABC, abstractmethod
|
||||
from enum import IntEnum
|
||||
import os
|
||||
from pathlib import Path
|
||||
import re
|
||||
from subprocess import call
|
||||
@@ -1750,13 +1749,16 @@ def build_message_type(
|
||||
|
||||
# Add estimated size constant
|
||||
estimated_size = calculate_message_estimated_size(desc)
|
||||
# Validate that estimated_size fits in uint8_t
|
||||
if estimated_size > 255:
|
||||
raise ValueError(
|
||||
f"Estimated size {estimated_size} for {desc.name} exceeds uint8_t maximum (255)"
|
||||
)
|
||||
# Use a type appropriate for estimated_size
|
||||
estimated_size_type = (
|
||||
"uint8_t"
|
||||
if estimated_size <= 255
|
||||
else "uint16_t"
|
||||
if estimated_size <= 65535
|
||||
else "size_t"
|
||||
)
|
||||
public_content.append(
|
||||
f"static constexpr uint8_t ESTIMATED_SIZE = {estimated_size};"
|
||||
f"static constexpr {estimated_size_type} ESTIMATED_SIZE = {estimated_size};"
|
||||
)
|
||||
|
||||
# Add message_name method inline in header
|
||||
@@ -2701,8 +2703,8 @@ static const char *const TAG = "api.service";
|
||||
import clang_format
|
||||
|
||||
def exec_clang_format(path: Path) -> None:
|
||||
clang_format_path = os.path.join(
|
||||
os.path.dirname(clang_format.__file__), "data", "bin", "clang-format"
|
||||
clang_format_path = (
|
||||
Path(clang_format.__file__).parent / "data" / "bin" / "clang-format"
|
||||
)
|
||||
call([clang_format_path, "-i", path])
|
||||
|
||||
|
@@ -39,7 +39,7 @@ esphome/core/* @esphome/core
|
||||
parts = [BASE]
|
||||
|
||||
# Fake some directory so that get_component works
|
||||
CORE.config_path = str(root)
|
||||
CORE.config_path = root
|
||||
CORE.data[KEY_CORE] = {KEY_TARGET_FRAMEWORK: None, KEY_TARGET_PLATFORM: None}
|
||||
|
||||
codeowners = defaultdict(list)
|
||||
|
@@ -1,9 +1,9 @@
|
||||
#!/usr/bin/env python3
|
||||
import argparse
|
||||
import glob
|
||||
import inspect
|
||||
import json
|
||||
import os
|
||||
from pathlib import Path
|
||||
import re
|
||||
|
||||
import voluptuous as vol
|
||||
@@ -70,14 +70,14 @@ def get_component_names():
|
||||
component_names = ["esphome", "sensor", "esp32", "esp8266"]
|
||||
skip_components = []
|
||||
|
||||
for d in os.listdir(CORE_COMPONENTS_PATH):
|
||||
for d in CORE_COMPONENTS_PATH.iterdir():
|
||||
if (
|
||||
not d.startswith("__")
|
||||
and os.path.isdir(os.path.join(CORE_COMPONENTS_PATH, d))
|
||||
and d not in component_names
|
||||
and d not in skip_components
|
||||
not d.name.startswith("__")
|
||||
and d.is_dir()
|
||||
and d.name not in component_names
|
||||
and d.name not in skip_components
|
||||
):
|
||||
component_names.append(d)
|
||||
component_names.append(d.name)
|
||||
|
||||
return sorted(component_names)
|
||||
|
||||
@@ -121,7 +121,7 @@ from esphome.util import Registry # noqa: E402
|
||||
|
||||
|
||||
def write_file(name, obj):
|
||||
full_path = os.path.join(args.output_path, name + ".json")
|
||||
full_path = Path(args.output_path) / f"{name}.json"
|
||||
if JSON_DUMP_PRETTY:
|
||||
json_str = json.dumps(obj, indent=2)
|
||||
else:
|
||||
@@ -131,9 +131,10 @@ def write_file(name, obj):
|
||||
|
||||
|
||||
def delete_extra_files(keep_names):
|
||||
for d in os.listdir(args.output_path):
|
||||
if d.endswith(".json") and d[:-5] not in keep_names:
|
||||
os.remove(os.path.join(args.output_path, d))
|
||||
output_path = Path(args.output_path)
|
||||
for d in output_path.iterdir():
|
||||
if d.suffix == ".json" and d.stem not in keep_names:
|
||||
d.unlink()
|
||||
print(f"Deleted {d}")
|
||||
|
||||
|
||||
@@ -367,13 +368,11 @@ def get_logger_tags():
|
||||
"scheduler",
|
||||
"api.service",
|
||||
]
|
||||
for x in os.walk(CORE_COMPONENTS_PATH):
|
||||
for y in glob.glob(os.path.join(x[0], "*.cpp")):
|
||||
with open(y, encoding="utf-8") as file:
|
||||
data = file.read()
|
||||
match = pattern.search(data)
|
||||
if match:
|
||||
tags.append(match.group(1))
|
||||
for file in CORE_COMPONENTS_PATH.rglob("*.cpp"):
|
||||
data = file.read_text()
|
||||
match = pattern.search(data)
|
||||
if match:
|
||||
tags.append(match.group(1))
|
||||
return tags
|
||||
|
||||
|
||||
|
@@ -6,6 +6,7 @@ import collections
|
||||
import fnmatch
|
||||
import functools
|
||||
import os.path
|
||||
from pathlib import Path
|
||||
import re
|
||||
import sys
|
||||
import time
|
||||
@@ -75,12 +76,12 @@ ignore_types = (
|
||||
LINT_FILE_CHECKS = []
|
||||
LINT_CONTENT_CHECKS = []
|
||||
LINT_POST_CHECKS = []
|
||||
EXECUTABLE_BIT = {}
|
||||
EXECUTABLE_BIT: dict[str, int] = {}
|
||||
|
||||
errors = collections.defaultdict(list)
|
||||
errors: collections.defaultdict[Path, list] = collections.defaultdict(list)
|
||||
|
||||
|
||||
def add_errors(fname, errs):
|
||||
def add_errors(fname: Path, errs: list[tuple[int, int, str] | None]) -> None:
|
||||
if not isinstance(errs, list):
|
||||
errs = [errs]
|
||||
for err in errs:
|
||||
@@ -246,8 +247,8 @@ def lint_ext_check(fname):
|
||||
".github/copilot-instructions.md",
|
||||
]
|
||||
)
|
||||
def lint_executable_bit(fname):
|
||||
ex = EXECUTABLE_BIT[fname]
|
||||
def lint_executable_bit(fname: Path) -> str | None:
|
||||
ex = EXECUTABLE_BIT[str(fname)]
|
||||
if ex != 100644:
|
||||
return (
|
||||
f"File has invalid executable bit {ex}. If running from a windows machine please "
|
||||
@@ -506,8 +507,8 @@ def lint_constants_usage():
|
||||
return errs
|
||||
|
||||
|
||||
def relative_cpp_search_text(fname, content):
|
||||
parts = fname.split("/")
|
||||
def relative_cpp_search_text(fname: Path, content) -> str:
|
||||
parts = fname.parts
|
||||
integration = parts[2]
|
||||
return f'#include "esphome/components/{integration}'
|
||||
|
||||
@@ -524,8 +525,8 @@ def lint_relative_cpp_import(fname, line, col, content):
|
||||
)
|
||||
|
||||
|
||||
def relative_py_search_text(fname, content):
|
||||
parts = fname.split("/")
|
||||
def relative_py_search_text(fname: Path, content: str) -> str:
|
||||
parts = fname.parts
|
||||
integration = parts[2]
|
||||
return f"esphome.components.{integration}"
|
||||
|
||||
@@ -591,10 +592,8 @@ def lint_relative_py_import(fname, line, col, content):
|
||||
"esphome/components/http_request/httplib.h",
|
||||
],
|
||||
)
|
||||
def lint_namespace(fname, content):
|
||||
expected_name = re.match(
|
||||
r"^esphome/components/([^/]+)/.*", fname.replace(os.path.sep, "/")
|
||||
).group(1)
|
||||
def lint_namespace(fname: Path, content: str) -> str | None:
|
||||
expected_name = fname.parts[2]
|
||||
# Check for both old style and C++17 nested namespace syntax
|
||||
search_old = f"namespace {expected_name}"
|
||||
search_new = f"namespace esphome::{expected_name}"
|
||||
@@ -733,9 +732,9 @@ def main():
|
||||
files.sort()
|
||||
|
||||
for fname in files:
|
||||
_, ext = os.path.splitext(fname)
|
||||
fname = Path(fname)
|
||||
run_checks(LINT_FILE_CHECKS, fname, fname)
|
||||
if ext in ignore_types:
|
||||
if fname.suffix in ignore_types:
|
||||
continue
|
||||
try:
|
||||
with codecs.open(fname, "r", encoding="utf-8") as f_handle:
|
||||
|
@@ -52,10 +52,10 @@ def styled(color: str | tuple[str, ...], msg: str, reset: bool = True) -> str:
|
||||
return prefix + msg + suffix
|
||||
|
||||
|
||||
def print_error_for_file(file: str, body: str | None) -> None:
|
||||
def print_error_for_file(file: str | Path, body: str | None) -> None:
|
||||
print(
|
||||
styled(colorama.Fore.GREEN, "### File ")
|
||||
+ styled((colorama.Fore.GREEN, colorama.Style.BRIGHT), file)
|
||||
+ styled((colorama.Fore.GREEN, colorama.Style.BRIGHT), str(file))
|
||||
)
|
||||
print()
|
||||
if body is not None:
|
||||
@@ -513,7 +513,7 @@ def get_all_dependencies(component_names: set[str]) -> set[str]:
|
||||
|
||||
# Set up fake config path for component loading
|
||||
root = Path(__file__).parent.parent
|
||||
CORE.config_path = str(root)
|
||||
CORE.config_path = root
|
||||
CORE.data[KEY_CORE] = {}
|
||||
|
||||
# Keep finding dependencies until no new ones are found
|
||||
@@ -553,7 +553,7 @@ def get_components_from_integration_fixtures() -> set[str]:
|
||||
fixtures_dir = Path(__file__).parent.parent / "tests" / "integration" / "fixtures"
|
||||
|
||||
for yaml_file in fixtures_dir.glob("*.yaml"):
|
||||
config: dict[str, any] | None = yaml_util.load_yaml(str(yaml_file))
|
||||
config: dict[str, any] | None = yaml_util.load_yaml(yaml_file)
|
||||
if not config:
|
||||
continue
|
||||
|
||||
|
@@ -50,7 +50,7 @@ def create_components_graph():
|
||||
root = Path(__file__).parent.parent
|
||||
components_dir = root / "esphome" / "components"
|
||||
# Fake some directory so that get_component works
|
||||
CORE.config_path = str(root)
|
||||
CORE.config_path = root
|
||||
# Various configuration to capture different outcomes used by `AUTO_LOAD` function.
|
||||
TARGET_CONFIGURATIONS = [
|
||||
{KEY_TARGET_FRAMEWORK: None, KEY_TARGET_PLATFORM: None},
|
||||
|
@@ -6,6 +6,7 @@ from collections.abc import Callable, Generator
|
||||
from pathlib import Path
|
||||
import sys
|
||||
from typing import Any
|
||||
from unittest import mock
|
||||
|
||||
import pytest
|
||||
|
||||
@@ -17,6 +18,7 @@ from esphome.const import (
|
||||
PlatformFramework,
|
||||
)
|
||||
from esphome.types import ConfigType
|
||||
from esphome.util import OrderedDict
|
||||
|
||||
# Add package root to python path
|
||||
here = Path(__file__).parent
|
||||
@@ -40,9 +42,9 @@ def config_path(request: pytest.FixtureRequest) -> Generator[None]:
|
||||
if config_dir.exists():
|
||||
# Set config_path to a dummy yaml file in the config directory
|
||||
# This ensures CORE.config_dir points to the config directory
|
||||
CORE.config_path = str(config_dir / "dummy.yaml")
|
||||
CORE.config_path = config_dir / "dummy.yaml"
|
||||
else:
|
||||
CORE.config_path = str(Path(request.fspath).parent / "dummy.yaml")
|
||||
CORE.config_path = Path(request.fspath).parent / "dummy.yaml"
|
||||
|
||||
yield
|
||||
CORE.config_path = original_path
|
||||
@@ -129,9 +131,35 @@ def generate_main() -> Generator[Callable[[str | Path], str]]:
|
||||
"""Generates the C++ main.cpp from a given yaml file and returns it in string form."""
|
||||
|
||||
def generator(path: str | Path) -> str:
|
||||
CORE.config_path = str(path)
|
||||
CORE.config_path = Path(path)
|
||||
CORE.config = read_config({})
|
||||
generate_cpp_contents(CORE.config)
|
||||
return CORE.cpp_main_section
|
||||
|
||||
yield generator
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_clone_or_update() -> Generator[Any]:
|
||||
"""Mock git.clone_or_update for testing."""
|
||||
with mock.patch("esphome.git.clone_or_update") as mock_func:
|
||||
# Default return value
|
||||
mock_func.return_value = (Path("/tmp/test"), None)
|
||||
yield mock_func
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_load_yaml() -> Generator[Any]:
|
||||
"""Mock yaml_util.load_yaml for testing."""
|
||||
|
||||
with mock.patch("esphome.yaml_util.load_yaml") as mock_func:
|
||||
# Default return value
|
||||
mock_func.return_value = OrderedDict({"sensor": []})
|
||||
yield mock_func
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_install_meta_finder() -> Generator[Any]:
|
||||
"""Mock loader.install_meta_finder for testing."""
|
||||
with mock.patch("esphome.loader.install_meta_finder") as mock_func:
|
||||
yield mock_func
|
||||
|
134
tests/component_tests/external_components/test_init.py
Normal file
134
tests/component_tests/external_components/test_init.py
Normal file
@@ -0,0 +1,134 @@
|
||||
"""Tests for the external_components skip_update functionality."""
|
||||
|
||||
from pathlib import Path
|
||||
from typing import Any
|
||||
from unittest.mock import MagicMock
|
||||
|
||||
from esphome.components.external_components import do_external_components_pass
|
||||
from esphome.const import (
|
||||
CONF_EXTERNAL_COMPONENTS,
|
||||
CONF_REFRESH,
|
||||
CONF_SOURCE,
|
||||
CONF_URL,
|
||||
TYPE_GIT,
|
||||
)
|
||||
|
||||
|
||||
def test_external_components_skip_update_true(
|
||||
tmp_path: Path, mock_clone_or_update: MagicMock, mock_install_meta_finder: MagicMock
|
||||
) -> None:
|
||||
"""Test that external components don't update when skip_update=True."""
|
||||
# Create a components directory structure
|
||||
components_dir = tmp_path / "components"
|
||||
components_dir.mkdir()
|
||||
|
||||
# Create a test component
|
||||
test_component_dir = components_dir / "test_component"
|
||||
test_component_dir.mkdir()
|
||||
(test_component_dir / "__init__.py").write_text("# Test component")
|
||||
|
||||
# Set up mock to return our tmp_path
|
||||
mock_clone_or_update.return_value = (tmp_path, None)
|
||||
|
||||
config: dict[str, Any] = {
|
||||
CONF_EXTERNAL_COMPONENTS: [
|
||||
{
|
||||
CONF_SOURCE: {
|
||||
"type": TYPE_GIT,
|
||||
CONF_URL: "https://github.com/test/components",
|
||||
},
|
||||
CONF_REFRESH: "1d",
|
||||
"components": "all",
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
# Call with skip_update=True
|
||||
do_external_components_pass(config, skip_update=True)
|
||||
|
||||
# Verify clone_or_update was called with NEVER_REFRESH
|
||||
mock_clone_or_update.assert_called_once()
|
||||
call_args = mock_clone_or_update.call_args
|
||||
from esphome import git
|
||||
|
||||
assert call_args.kwargs["refresh"] == git.NEVER_REFRESH
|
||||
|
||||
|
||||
def test_external_components_skip_update_false(
|
||||
tmp_path: Path, mock_clone_or_update: MagicMock, mock_install_meta_finder: MagicMock
|
||||
) -> None:
|
||||
"""Test that external components update when skip_update=False."""
|
||||
# Create a components directory structure
|
||||
components_dir = tmp_path / "components"
|
||||
components_dir.mkdir()
|
||||
|
||||
# Create a test component
|
||||
test_component_dir = components_dir / "test_component"
|
||||
test_component_dir.mkdir()
|
||||
(test_component_dir / "__init__.py").write_text("# Test component")
|
||||
|
||||
# Set up mock to return our tmp_path
|
||||
mock_clone_or_update.return_value = (tmp_path, None)
|
||||
|
||||
config: dict[str, Any] = {
|
||||
CONF_EXTERNAL_COMPONENTS: [
|
||||
{
|
||||
CONF_SOURCE: {
|
||||
"type": TYPE_GIT,
|
||||
CONF_URL: "https://github.com/test/components",
|
||||
},
|
||||
CONF_REFRESH: "1d",
|
||||
"components": "all",
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
# Call with skip_update=False
|
||||
do_external_components_pass(config, skip_update=False)
|
||||
|
||||
# Verify clone_or_update was called with actual refresh value
|
||||
mock_clone_or_update.assert_called_once()
|
||||
call_args = mock_clone_or_update.call_args
|
||||
from esphome.core import TimePeriodSeconds
|
||||
|
||||
assert call_args.kwargs["refresh"] == TimePeriodSeconds(days=1)
|
||||
|
||||
|
||||
def test_external_components_default_no_skip(
|
||||
tmp_path: Path, mock_clone_or_update: MagicMock, mock_install_meta_finder: MagicMock
|
||||
) -> None:
|
||||
"""Test that external components update by default when skip_update not specified."""
|
||||
# Create a components directory structure
|
||||
components_dir = tmp_path / "components"
|
||||
components_dir.mkdir()
|
||||
|
||||
# Create a test component
|
||||
test_component_dir = components_dir / "test_component"
|
||||
test_component_dir.mkdir()
|
||||
(test_component_dir / "__init__.py").write_text("# Test component")
|
||||
|
||||
# Set up mock to return our tmp_path
|
||||
mock_clone_or_update.return_value = (tmp_path, None)
|
||||
|
||||
config: dict[str, Any] = {
|
||||
CONF_EXTERNAL_COMPONENTS: [
|
||||
{
|
||||
CONF_SOURCE: {
|
||||
"type": TYPE_GIT,
|
||||
CONF_URL: "https://github.com/test/components",
|
||||
},
|
||||
CONF_REFRESH: "1d",
|
||||
"components": "all",
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
# Call without skip_update parameter
|
||||
do_external_components_pass(config)
|
||||
|
||||
# Verify clone_or_update was called with actual refresh value
|
||||
mock_clone_or_update.assert_called_once()
|
||||
call_args = mock_clone_or_update.call_args
|
||||
from esphome.core import TimePeriodSeconds
|
||||
|
||||
assert call_args.kwargs["refresh"] == TimePeriodSeconds(days=1)
|
114
tests/component_tests/packages/test_init.py
Normal file
114
tests/component_tests/packages/test_init.py
Normal file
@@ -0,0 +1,114 @@
|
||||
"""Tests for the packages component skip_update functionality."""
|
||||
|
||||
from pathlib import Path
|
||||
from typing import Any
|
||||
from unittest.mock import MagicMock
|
||||
|
||||
from esphome.components.packages import do_packages_pass
|
||||
from esphome.const import CONF_FILES, CONF_PACKAGES, CONF_REFRESH, CONF_URL
|
||||
from esphome.util import OrderedDict
|
||||
|
||||
|
||||
def test_packages_skip_update_true(
|
||||
tmp_path: Path, mock_clone_or_update: MagicMock, mock_load_yaml: MagicMock
|
||||
) -> None:
|
||||
"""Test that packages don't update when skip_update=True."""
|
||||
# Set up mock to return our tmp_path
|
||||
mock_clone_or_update.return_value = (tmp_path, None)
|
||||
|
||||
# Create the test yaml file
|
||||
test_file = tmp_path / "test.yaml"
|
||||
test_file.write_text("sensor: []")
|
||||
|
||||
# Set mock_load_yaml to return some valid config
|
||||
mock_load_yaml.return_value = OrderedDict({"sensor": []})
|
||||
|
||||
config: dict[str, Any] = {
|
||||
CONF_PACKAGES: {
|
||||
"test_package": {
|
||||
CONF_URL: "https://github.com/test/repo",
|
||||
CONF_FILES: ["test.yaml"],
|
||||
CONF_REFRESH: "1d",
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
# Call with skip_update=True
|
||||
do_packages_pass(config, skip_update=True)
|
||||
|
||||
# Verify clone_or_update was called with NEVER_REFRESH
|
||||
mock_clone_or_update.assert_called_once()
|
||||
call_args = mock_clone_or_update.call_args
|
||||
from esphome import git
|
||||
|
||||
assert call_args.kwargs["refresh"] == git.NEVER_REFRESH
|
||||
|
||||
|
||||
def test_packages_skip_update_false(
|
||||
tmp_path: Path, mock_clone_or_update: MagicMock, mock_load_yaml: MagicMock
|
||||
) -> None:
|
||||
"""Test that packages update when skip_update=False."""
|
||||
# Set up mock to return our tmp_path
|
||||
mock_clone_or_update.return_value = (tmp_path, None)
|
||||
|
||||
# Create the test yaml file
|
||||
test_file = tmp_path / "test.yaml"
|
||||
test_file.write_text("sensor: []")
|
||||
|
||||
# Set mock_load_yaml to return some valid config
|
||||
mock_load_yaml.return_value = OrderedDict({"sensor": []})
|
||||
|
||||
config: dict[str, Any] = {
|
||||
CONF_PACKAGES: {
|
||||
"test_package": {
|
||||
CONF_URL: "https://github.com/test/repo",
|
||||
CONF_FILES: ["test.yaml"],
|
||||
CONF_REFRESH: "1d",
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
# Call with skip_update=False (default)
|
||||
do_packages_pass(config, skip_update=False)
|
||||
|
||||
# Verify clone_or_update was called with actual refresh value
|
||||
mock_clone_or_update.assert_called_once()
|
||||
call_args = mock_clone_or_update.call_args
|
||||
from esphome.core import TimePeriodSeconds
|
||||
|
||||
assert call_args.kwargs["refresh"] == TimePeriodSeconds(days=1)
|
||||
|
||||
|
||||
def test_packages_default_no_skip(
|
||||
tmp_path: Path, mock_clone_or_update: MagicMock, mock_load_yaml: MagicMock
|
||||
) -> None:
|
||||
"""Test that packages update by default when skip_update not specified."""
|
||||
# Set up mock to return our tmp_path
|
||||
mock_clone_or_update.return_value = (tmp_path, None)
|
||||
|
||||
# Create the test yaml file
|
||||
test_file = tmp_path / "test.yaml"
|
||||
test_file.write_text("sensor: []")
|
||||
|
||||
# Set mock_load_yaml to return some valid config
|
||||
mock_load_yaml.return_value = OrderedDict({"sensor": []})
|
||||
|
||||
config: dict[str, Any] = {
|
||||
CONF_PACKAGES: {
|
||||
"test_package": {
|
||||
CONF_URL: "https://github.com/test/repo",
|
||||
CONF_FILES: ["test.yaml"],
|
||||
CONF_REFRESH: "1d",
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
# Call without skip_update parameter
|
||||
do_packages_pass(config)
|
||||
|
||||
# Verify clone_or_update was called with actual refresh value
|
||||
mock_clone_or_update.assert_called_once()
|
||||
call_args = mock_clone_or_update.call_args
|
||||
from esphome.core import TimePeriodSeconds
|
||||
|
||||
assert call_args.kwargs["refresh"] == TimePeriodSeconds(days=1)
|
@@ -7,7 +7,7 @@ display:
|
||||
- platform: ssd1306_i2c
|
||||
id: ssd1306_display
|
||||
model: SSD1306_128X64
|
||||
reset_pin: ${reset_pin}
|
||||
reset_pin: ${display_reset_pin}
|
||||
pages:
|
||||
- id: page1
|
||||
lambda: |-
|
||||
@@ -16,7 +16,7 @@ display:
|
||||
touchscreen:
|
||||
- platform: ektf2232
|
||||
interrupt_pin: ${interrupt_pin}
|
||||
rts_pin: ${rts_pin}
|
||||
reset_pin: ${touch_reset_pin}
|
||||
display: ssd1306_display
|
||||
on_touch:
|
||||
- logger.log:
|
||||
|
@@ -1,8 +1,8 @@
|
||||
substitutions:
|
||||
scl_pin: GPIO16
|
||||
sda_pin: GPIO17
|
||||
reset_pin: GPIO13
|
||||
display_reset_pin: GPIO13
|
||||
interrupt_pin: GPIO14
|
||||
rts_pin: GPIO15
|
||||
touch_reset_pin: GPIO15
|
||||
|
||||
<<: !include common.yaml
|
||||
|
@@ -1,8 +1,8 @@
|
||||
substitutions:
|
||||
scl_pin: GPIO5
|
||||
sda_pin: GPIO4
|
||||
reset_pin: GPIO3
|
||||
display_reset_pin: GPIO3
|
||||
interrupt_pin: GPIO6
|
||||
rts_pin: GPIO7
|
||||
touch_reset_pin: GPIO7
|
||||
|
||||
<<: !include common.yaml
|
||||
|
@@ -1,8 +1,8 @@
|
||||
substitutions:
|
||||
scl_pin: GPIO5
|
||||
sda_pin: GPIO4
|
||||
reset_pin: GPIO3
|
||||
display_reset_pin: GPIO3
|
||||
interrupt_pin: GPIO6
|
||||
rts_pin: GPIO7
|
||||
touch_reset_pin: GPIO7
|
||||
|
||||
<<: !include common.yaml
|
||||
|
@@ -1,8 +1,8 @@
|
||||
substitutions:
|
||||
scl_pin: GPIO16
|
||||
sda_pin: GPIO17
|
||||
reset_pin: GPIO13
|
||||
display_reset_pin: GPIO13
|
||||
interrupt_pin: GPIO14
|
||||
rts_pin: GPIO15
|
||||
touch_reset_pin: GPIO15
|
||||
|
||||
<<: !include common.yaml
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user