mirror of
https://github.com/esphome/esphome.git
synced 2025-11-01 15:41:52 +00:00
Compare commits
61 Commits
dependabot
...
jesserockz
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
6bf78e2e82 | ||
|
|
40823df7bc | ||
|
|
5e1019a6fa | ||
|
|
f3cdbd0a05 | ||
|
|
ddf1b67e49 | ||
|
|
b4d9fddd07 | ||
|
|
25f03074ab | ||
|
|
590f6ff70b | ||
|
|
a33ed5e47b | ||
|
|
c11a9bb97f | ||
|
|
acef2085d9 | ||
|
|
865663ce5f | ||
|
|
ae010fd6f1 | ||
|
|
91a10d0e36 | ||
|
|
d5c36eaf2a | ||
|
|
85f1019d90 | ||
|
|
bfeade1e2b | ||
|
|
b134d42e3b | ||
|
|
b3b65316f0 | ||
|
|
b61cec8e77 | ||
|
|
24243fb22c | ||
|
|
ba6c8c87c2 | ||
|
|
f5774cc138 | ||
|
|
6d09e68b2e | ||
|
|
fe9db75c27 | ||
|
|
2b832e9ee8 | ||
|
|
661e9f9991 | ||
|
|
39e23c323d | ||
|
|
bdfbac0301 | ||
|
|
9646653e57 | ||
|
|
c6c202e4f7 | ||
|
|
62f73c768e | ||
|
|
cd1215347e | ||
|
|
b8353b3117 | ||
|
|
5d3574c81f | ||
|
|
364e5ffd79 | ||
|
|
c38c2a1daf | ||
|
|
070b0882b8 | ||
|
|
7e2ccb7bc3 | ||
|
|
7f1173fcba | ||
|
|
a75ccf841c | ||
|
|
56eb605ec9 | ||
|
|
2c4818de00 | ||
|
|
2b94de8732 | ||
|
|
f71aed3a5c | ||
|
|
353e097085 | ||
|
|
14d76e9e4e | ||
|
|
f2e0a412db | ||
|
|
6943b1d985 | ||
|
|
18062d154f | ||
|
|
2b0b82b2fb | ||
|
|
3e1c8f37c5 | ||
|
|
236ca12d3e | ||
|
|
42f1b61e31 | ||
|
|
708f8a95e5 | ||
|
|
10ca86ae8d | ||
|
|
22056e0809 | ||
|
|
fe4857fabb | ||
|
|
3054c2bc29 | ||
|
|
b190f37ae7 | ||
|
|
817ee70db0 |
62
.github/workflows/ci.yml
vendored
62
.github/workflows/ci.yml
vendored
@@ -379,7 +379,16 @@ jobs:
|
||||
|
||||
# Use intelligent splitter that groups components with same bus configs
|
||||
components='${{ needs.determine-jobs.outputs.changed-components-with-tests }}'
|
||||
directly_changed='${{ needs.determine-jobs.outputs.directly-changed-components-with-tests }}'
|
||||
|
||||
# Only isolate directly changed components when targeting dev branch
|
||||
# For beta/release branches, group everything for faster CI
|
||||
if [[ "${{ github.base_ref }}" == beta* ]] || [[ "${{ github.base_ref }}" == release* ]]; then
|
||||
directly_changed='[]'
|
||||
echo "Target branch: ${{ github.base_ref }} - grouping all components"
|
||||
else
|
||||
directly_changed='${{ needs.determine-jobs.outputs.directly-changed-components-with-tests }}'
|
||||
echo "Target branch: ${{ github.base_ref }} - isolating directly changed components"
|
||||
fi
|
||||
|
||||
echo "Splitting components intelligently..."
|
||||
output=$(python3 script/split_components_for_ci.py --components "$components" --directly-changed "$directly_changed" --batch-size 40 --output github)
|
||||
@@ -396,7 +405,7 @@ jobs:
|
||||
if: github.event_name == 'pull_request' && fromJSON(needs.determine-jobs.outputs.component-test-count) > 0
|
||||
strategy:
|
||||
fail-fast: false
|
||||
max-parallel: ${{ (github.base_ref == 'beta' || github.base_ref == 'release') && 8 || 4 }}
|
||||
max-parallel: ${{ (startsWith(github.base_ref, 'beta') || startsWith(github.base_ref, 'release')) && 8 || 4 }}
|
||||
matrix:
|
||||
components: ${{ fromJson(needs.test-build-components-splitter.outputs.matrix) }}
|
||||
steps:
|
||||
@@ -424,18 +433,31 @@ jobs:
|
||||
- name: Validate and compile components with intelligent grouping
|
||||
run: |
|
||||
. venv/bin/activate
|
||||
# Use /mnt for build files (70GB available vs ~29GB on /)
|
||||
# Bind mount PlatformIO directory to /mnt (tools, packages, build cache all go there)
|
||||
sudo mkdir -p /mnt/platformio
|
||||
sudo chown $USER:$USER /mnt/platformio
|
||||
mkdir -p ~/.platformio
|
||||
sudo mount --bind /mnt/platformio ~/.platformio
|
||||
|
||||
# Bind mount test build directory to /mnt
|
||||
sudo mkdir -p /mnt/test_build_components_build
|
||||
sudo chown $USER:$USER /mnt/test_build_components_build
|
||||
mkdir -p tests/test_build_components/build
|
||||
sudo mount --bind /mnt/test_build_components_build tests/test_build_components/build
|
||||
# Check if /mnt has more free space than / before bind mounting
|
||||
# Extract available space in KB for comparison
|
||||
root_avail=$(df -k / | awk 'NR==2 {print $4}')
|
||||
mnt_avail=$(df -k /mnt 2>/dev/null | awk 'NR==2 {print $4}')
|
||||
|
||||
echo "Available space: / has ${root_avail}KB, /mnt has ${mnt_avail}KB"
|
||||
|
||||
# Only use /mnt if it has more space than /
|
||||
if [ -n "$mnt_avail" ] && [ "$mnt_avail" -gt "$root_avail" ]; then
|
||||
echo "Using /mnt for build files (more space available)"
|
||||
# Bind mount PlatformIO directory to /mnt (tools, packages, build cache all go there)
|
||||
sudo mkdir -p /mnt/platformio
|
||||
sudo chown $USER:$USER /mnt/platformio
|
||||
mkdir -p ~/.platformio
|
||||
sudo mount --bind /mnt/platformio ~/.platformio
|
||||
|
||||
# Bind mount test build directory to /mnt
|
||||
sudo mkdir -p /mnt/test_build_components_build
|
||||
sudo chown $USER:$USER /mnt/test_build_components_build
|
||||
mkdir -p tests/test_build_components/build
|
||||
sudo mount --bind /mnt/test_build_components_build tests/test_build_components/build
|
||||
else
|
||||
echo "Using / for build files (more space available than /mnt or /mnt unavailable)"
|
||||
fi
|
||||
|
||||
# Convert space-separated components to comma-separated for Python script
|
||||
components_csv=$(echo "${{ matrix.components }}" | tr ' ' ',')
|
||||
@@ -448,7 +470,7 @@ jobs:
|
||||
# - This catches pin conflicts and other issues in directly changed code
|
||||
# - Grouped tests use --testing-mode to allow config merging (disables some checks)
|
||||
# - Dependencies are safe to group since they weren't modified in this PR
|
||||
if [ "${{ github.base_ref }}" = "beta" ] || [ "${{ github.base_ref }}" = "release" ]; then
|
||||
if [[ "${{ github.base_ref }}" == beta* ]] || [[ "${{ github.base_ref }}" == release* ]]; then
|
||||
directly_changed_csv=""
|
||||
echo "Testing components: $components_csv"
|
||||
echo "Target branch: ${{ github.base_ref }} - grouping all components"
|
||||
@@ -459,6 +481,11 @@ jobs:
|
||||
fi
|
||||
echo ""
|
||||
|
||||
# Show disk space before validation (after bind mounts setup)
|
||||
echo "Disk space before config validation:"
|
||||
df -h
|
||||
echo ""
|
||||
|
||||
# Run config validation with grouping and isolation
|
||||
python3 script/test_build_components.py -e config -c "$components_csv" -f --isolate "$directly_changed_csv"
|
||||
|
||||
@@ -466,6 +493,11 @@ jobs:
|
||||
echo "Config validation passed! Starting compilation..."
|
||||
echo ""
|
||||
|
||||
# Show disk space before compilation
|
||||
echo "Disk space before compilation:"
|
||||
df -h
|
||||
echo ""
|
||||
|
||||
# Run compilation with grouping and isolation
|
||||
python3 script/test_build_components.py -e compile -c "$components_csv" -f --isolate "$directly_changed_csv"
|
||||
|
||||
@@ -474,7 +506,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
needs:
|
||||
- common
|
||||
if: github.event_name == 'pull_request' && github.base_ref != 'beta' && github.base_ref != 'release'
|
||||
if: github.event_name == 'pull_request' && !startsWith(github.base_ref, 'beta') && !startsWith(github.base_ref, 'release')
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
|
||||
4
.github/workflows/codeql.yml
vendored
4
.github/workflows/codeql.yml
vendored
@@ -58,7 +58,7 @@ jobs:
|
||||
|
||||
# Initializes the CodeQL tools for scanning.
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@f443b600d91635bebf5b0d9ebc620189c0d6fba5 # v4.30.8
|
||||
uses: github/codeql-action/init@16140ae1a102900babc80a33c44059580f687047 # v4.30.9
|
||||
with:
|
||||
languages: ${{ matrix.language }}
|
||||
build-mode: ${{ matrix.build-mode }}
|
||||
@@ -86,6 +86,6 @@ jobs:
|
||||
exit 1
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@f443b600d91635bebf5b0d9ebc620189c0d6fba5 # v4.30.8
|
||||
uses: github/codeql-action/analyze@16140ae1a102900babc80a33c44059580f687047 # v4.30.9
|
||||
with:
|
||||
category: "/language:${{matrix.language}}"
|
||||
|
||||
@@ -11,7 +11,7 @@ ci:
|
||||
repos:
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
# Ruff version.
|
||||
rev: v0.14.0
|
||||
rev: v0.14.1
|
||||
hooks:
|
||||
# Run the linter.
|
||||
- id: ruff
|
||||
|
||||
@@ -62,6 +62,7 @@ esphome/components/bedjet/fan/* @jhansche
|
||||
esphome/components/bedjet/sensor/* @javawizard @jhansche
|
||||
esphome/components/beken_spi_led_strip/* @Mat931
|
||||
esphome/components/bh1750/* @OttoWinter
|
||||
esphome/components/bh1900nux/* @B48D81EFCC
|
||||
esphome/components/binary_sensor/* @esphome/core
|
||||
esphome/components/bk72xx/* @kuba2k2
|
||||
esphome/components/bl0906/* @athom-tech @jesserockz @tarontop
|
||||
@@ -69,6 +70,7 @@ esphome/components/bl0939/* @ziceva
|
||||
esphome/components/bl0940/* @dan-s-github @tobias-
|
||||
esphome/components/bl0942/* @dbuezas @dwmw2
|
||||
esphome/components/ble_client/* @buxtronix @clydebarrow
|
||||
esphome/components/ble_nus/* @tomaszduda23
|
||||
esphome/components/bluetooth_proxy/* @bdraco @jesserockz
|
||||
esphome/components/bme280_base/* @esphome/core
|
||||
esphome/components/bme280_spi/* @apbodrov
|
||||
|
||||
@@ -117,6 +117,17 @@ class Purpose(StrEnum):
|
||||
LOGGING = "logging"
|
||||
|
||||
|
||||
class PortType(StrEnum):
|
||||
SERIAL = "SERIAL"
|
||||
NETWORK = "NETWORK"
|
||||
MQTT = "MQTT"
|
||||
MQTTIP = "MQTTIP"
|
||||
|
||||
|
||||
# Magic MQTT port types that require special handling
|
||||
_MQTT_PORT_TYPES = frozenset({PortType.MQTT, PortType.MQTTIP})
|
||||
|
||||
|
||||
def _resolve_with_cache(address: str, purpose: Purpose) -> list[str]:
|
||||
"""Resolve an address using cache if available, otherwise return the address itself."""
|
||||
if CORE.address_cache and (cached := CORE.address_cache.get_addresses(address)):
|
||||
@@ -280,16 +291,67 @@ def mqtt_get_ip(config: ConfigType, username: str, password: str, client_id: str
|
||||
return mqtt.get_esphome_device_ip(config, username, password, client_id)
|
||||
|
||||
|
||||
_PORT_TO_PORT_TYPE = {
|
||||
"MQTT": "MQTT",
|
||||
"MQTTIP": "MQTTIP",
|
||||
}
|
||||
def _resolve_network_devices(
|
||||
devices: list[str], config: ConfigType, args: ArgsProtocol
|
||||
) -> list[str]:
|
||||
"""Resolve device list, converting MQTT magic strings to actual IP addresses.
|
||||
|
||||
This function filters the devices list to:
|
||||
- Replace MQTT/MQTTIP magic strings with actual IP addresses via MQTT lookup
|
||||
- Deduplicate addresses while preserving order
|
||||
- Only resolve MQTT once even if multiple MQTT strings are present
|
||||
- If MQTT resolution fails, log a warning and continue with other devices
|
||||
|
||||
Args:
|
||||
devices: List of device identifiers (IPs, hostnames, or magic strings)
|
||||
config: ESPHome configuration
|
||||
args: Command-line arguments containing MQTT credentials
|
||||
|
||||
Returns:
|
||||
List of network addresses suitable for connection attempts
|
||||
"""
|
||||
network_devices: list[str] = []
|
||||
mqtt_resolved: bool = False
|
||||
|
||||
for device in devices:
|
||||
port_type = get_port_type(device)
|
||||
if port_type in _MQTT_PORT_TYPES:
|
||||
# Only resolve MQTT once, even if multiple MQTT entries
|
||||
if not mqtt_resolved:
|
||||
try:
|
||||
mqtt_ips = mqtt_get_ip(
|
||||
config, args.username, args.password, args.client_id
|
||||
)
|
||||
network_devices.extend(mqtt_ips)
|
||||
except EsphomeError as err:
|
||||
_LOGGER.warning(
|
||||
"MQTT IP discovery failed (%s), will try other devices if available",
|
||||
err,
|
||||
)
|
||||
mqtt_resolved = True
|
||||
elif device not in network_devices:
|
||||
# Regular network address or IP - add if not already present
|
||||
network_devices.append(device)
|
||||
|
||||
return network_devices
|
||||
|
||||
|
||||
def get_port_type(port: str) -> str:
|
||||
def get_port_type(port: str) -> PortType:
|
||||
"""Determine the type of port/device identifier.
|
||||
|
||||
Returns:
|
||||
PortType.SERIAL for serial ports (/dev/ttyUSB0, COM1, etc.)
|
||||
PortType.MQTT for MQTT logging
|
||||
PortType.MQTTIP for MQTT IP lookup
|
||||
PortType.NETWORK for IP addresses, hostnames, or mDNS names
|
||||
"""
|
||||
if port.startswith("/") or port.startswith("COM"):
|
||||
return "SERIAL"
|
||||
return _PORT_TO_PORT_TYPE.get(port, "NETWORK")
|
||||
return PortType.SERIAL
|
||||
if port == "MQTT":
|
||||
return PortType.MQTT
|
||||
if port == "MQTTIP":
|
||||
return PortType.MQTTIP
|
||||
return PortType.NETWORK
|
||||
|
||||
|
||||
def run_miniterm(config: ConfigType, port: str, args) -> int:
|
||||
@@ -489,7 +551,7 @@ def upload_using_platformio(config: ConfigType, port: str):
|
||||
|
||||
|
||||
def check_permissions(port: str):
|
||||
if os.name == "posix" and get_port_type(port) == "SERIAL":
|
||||
if os.name == "posix" and get_port_type(port) == PortType.SERIAL:
|
||||
# Check if we can open selected serial port
|
||||
if not os.access(port, os.F_OK):
|
||||
raise EsphomeError(
|
||||
@@ -517,7 +579,7 @@ def upload_program(
|
||||
except AttributeError:
|
||||
pass
|
||||
|
||||
if get_port_type(host) == "SERIAL":
|
||||
if get_port_type(host) == PortType.SERIAL:
|
||||
check_permissions(host)
|
||||
|
||||
exit_code = 1
|
||||
@@ -544,17 +606,16 @@ def upload_program(
|
||||
from esphome import espota2
|
||||
|
||||
remote_port = int(ota_conf[CONF_PORT])
|
||||
password = ota_conf.get(CONF_PASSWORD, "")
|
||||
password = ota_conf.get(CONF_PASSWORD)
|
||||
if getattr(args, "file", None) is not None:
|
||||
binary = Path(args.file)
|
||||
else:
|
||||
binary = CORE.firmware_bin
|
||||
|
||||
# MQTT address resolution
|
||||
if get_port_type(host) in ("MQTT", "MQTTIP"):
|
||||
devices = mqtt_get_ip(config, args.username, args.password, args.client_id)
|
||||
# Resolve MQTT magic strings to actual IP addresses
|
||||
network_devices = _resolve_network_devices(devices, config, args)
|
||||
|
||||
return espota2.run_ota(devices, remote_port, password, binary)
|
||||
return espota2.run_ota(network_devices, remote_port, password, binary)
|
||||
|
||||
|
||||
def show_logs(config: ConfigType, args: ArgsProtocol, devices: list[str]) -> int | None:
|
||||
@@ -569,33 +630,22 @@ def show_logs(config: ConfigType, args: ArgsProtocol, devices: list[str]) -> int
|
||||
raise EsphomeError("Logger is not configured!")
|
||||
|
||||
port = devices[0]
|
||||
port_type = get_port_type(port)
|
||||
|
||||
if get_port_type(port) == "SERIAL":
|
||||
if port_type == PortType.SERIAL:
|
||||
check_permissions(port)
|
||||
return run_miniterm(config, port, args)
|
||||
|
||||
port_type = get_port_type(port)
|
||||
|
||||
# Check if we should use API for logging
|
||||
if has_api():
|
||||
addresses_to_use: list[str] | None = None
|
||||
# Resolve MQTT magic strings to actual IP addresses
|
||||
if has_api() and (
|
||||
network_devices := _resolve_network_devices(devices, config, args)
|
||||
):
|
||||
from esphome.components.api.client import run_logs
|
||||
|
||||
if port_type == "NETWORK":
|
||||
# Network addresses (IPs, mDNS names, or regular DNS hostnames) can be used
|
||||
# The resolve_ip_address() function in helpers.py handles all types
|
||||
addresses_to_use = devices
|
||||
elif port_type in ("MQTT", "MQTTIP") and has_mqtt_ip_lookup():
|
||||
# Use MQTT IP lookup for MQTT/MQTTIP types
|
||||
addresses_to_use = mqtt_get_ip(
|
||||
config, args.username, args.password, args.client_id
|
||||
)
|
||||
return run_logs(config, network_devices)
|
||||
|
||||
if addresses_to_use is not None:
|
||||
from esphome.components.api.client import run_logs
|
||||
|
||||
return run_logs(config, addresses_to_use)
|
||||
|
||||
if port_type in ("NETWORK", "MQTT") and has_mqtt_logging():
|
||||
if port_type in (PortType.NETWORK, PortType.MQTT) and has_mqtt_logging():
|
||||
from esphome import mqtt
|
||||
|
||||
return mqtt.show_logs(
|
||||
|
||||
@@ -380,12 +380,19 @@ async def homeassistant_service_to_code(
|
||||
var = cg.new_Pvariable(action_id, template_arg, serv, False)
|
||||
templ = await cg.templatable(config[CONF_ACTION], args, None)
|
||||
cg.add(var.set_service(templ))
|
||||
|
||||
# Initialize FixedVectors with exact sizes from config
|
||||
cg.add(var.init_data(len(config[CONF_DATA])))
|
||||
for key, value in config[CONF_DATA].items():
|
||||
templ = await cg.templatable(value, args, None)
|
||||
cg.add(var.add_data(key, templ))
|
||||
|
||||
cg.add(var.init_data_template(len(config[CONF_DATA_TEMPLATE])))
|
||||
for key, value in config[CONF_DATA_TEMPLATE].items():
|
||||
templ = await cg.templatable(value, args, None)
|
||||
cg.add(var.add_data_template(key, templ))
|
||||
|
||||
cg.add(var.init_variables(len(config[CONF_VARIABLES])))
|
||||
for key, value in config[CONF_VARIABLES].items():
|
||||
templ = await cg.templatable(value, args, None)
|
||||
cg.add(var.add_variable(key, templ))
|
||||
@@ -458,15 +465,23 @@ async def homeassistant_event_to_code(config, action_id, template_arg, args):
|
||||
var = cg.new_Pvariable(action_id, template_arg, serv, True)
|
||||
templ = await cg.templatable(config[CONF_EVENT], args, None)
|
||||
cg.add(var.set_service(templ))
|
||||
|
||||
# Initialize FixedVectors with exact sizes from config
|
||||
cg.add(var.init_data(len(config[CONF_DATA])))
|
||||
for key, value in config[CONF_DATA].items():
|
||||
templ = await cg.templatable(value, args, None)
|
||||
cg.add(var.add_data(key, templ))
|
||||
|
||||
cg.add(var.init_data_template(len(config[CONF_DATA_TEMPLATE])))
|
||||
for key, value in config[CONF_DATA_TEMPLATE].items():
|
||||
templ = await cg.templatable(value, args, None)
|
||||
cg.add(var.add_data_template(key, templ))
|
||||
|
||||
cg.add(var.init_variables(len(config[CONF_VARIABLES])))
|
||||
for key, value in config[CONF_VARIABLES].items():
|
||||
templ = await cg.templatable(value, args, None)
|
||||
cg.add(var.add_variable(key, templ))
|
||||
|
||||
return var
|
||||
|
||||
|
||||
@@ -489,6 +504,8 @@ async def homeassistant_tag_scanned_to_code(config, action_id, template_arg, arg
|
||||
serv = await cg.get_variable(config[CONF_ID])
|
||||
var = cg.new_Pvariable(action_id, template_arg, serv, True)
|
||||
cg.add(var.set_service("esphome.tag_scanned"))
|
||||
# Initialize FixedVector with exact size (1 data field)
|
||||
cg.add(var.init_data(1))
|
||||
templ = await cg.templatable(config[CONF_TAG], args, cg.std_string)
|
||||
cg.add(var.add_data("tag_id", templ))
|
||||
return var
|
||||
|
||||
@@ -876,10 +876,10 @@ message ExecuteServiceArgument {
|
||||
string string_ = 4;
|
||||
// ESPHome 1.14 (api v1.3) make int a signed value
|
||||
sint32 int_ = 5;
|
||||
repeated bool bool_array = 6 [packed=false];
|
||||
repeated sint32 int_array = 7 [packed=false];
|
||||
repeated float float_array = 8 [packed=false];
|
||||
repeated string string_array = 9;
|
||||
repeated bool bool_array = 6 [packed=false, (fixed_vector) = true];
|
||||
repeated sint32 int_array = 7 [packed=false, (fixed_vector) = true];
|
||||
repeated float float_array = 8 [packed=false, (fixed_vector) = true];
|
||||
repeated string string_array = 9 [(fixed_vector) = true];
|
||||
}
|
||||
message ExecuteServiceRequest {
|
||||
option (id) = 42;
|
||||
@@ -888,7 +888,7 @@ message ExecuteServiceRequest {
|
||||
option (ifdef) = "USE_API_SERVICES";
|
||||
|
||||
fixed32 key = 1;
|
||||
repeated ExecuteServiceArgument args = 2;
|
||||
repeated ExecuteServiceArgument args = 2 [(fixed_vector) = true];
|
||||
}
|
||||
|
||||
// ==================== CAMERA ====================
|
||||
@@ -987,8 +987,8 @@ message ListEntitiesClimateResponse {
|
||||
string name = 3;
|
||||
reserved 4; // Deprecated: was string unique_id
|
||||
|
||||
bool supports_current_temperature = 5;
|
||||
bool supports_two_point_target_temperature = 6;
|
||||
bool supports_current_temperature = 5; // Deprecated: use feature_flags
|
||||
bool supports_two_point_target_temperature = 6; // Deprecated: use feature_flags
|
||||
repeated ClimateMode supported_modes = 7 [(container_pointer) = "std::set<climate::ClimateMode>"];
|
||||
float visual_min_temperature = 8;
|
||||
float visual_max_temperature = 9;
|
||||
@@ -997,7 +997,7 @@ message ListEntitiesClimateResponse {
|
||||
// is if CLIMATE_PRESET_AWAY exists is supported_presets
|
||||
// Deprecated in API version 1.5
|
||||
bool legacy_supports_away = 11 [deprecated=true];
|
||||
bool supports_action = 12;
|
||||
bool supports_action = 12; // Deprecated: use feature_flags
|
||||
repeated ClimateFanMode supported_fan_modes = 13 [(container_pointer) = "std::set<climate::ClimateFanMode>"];
|
||||
repeated ClimateSwingMode supported_swing_modes = 14 [(container_pointer) = "std::set<climate::ClimateSwingMode>"];
|
||||
repeated string supported_custom_fan_modes = 15 [(container_pointer) = "std::set"];
|
||||
@@ -1007,11 +1007,12 @@ message ListEntitiesClimateResponse {
|
||||
string icon = 19 [(field_ifdef) = "USE_ENTITY_ICON"];
|
||||
EntityCategory entity_category = 20;
|
||||
float visual_current_temperature_step = 21;
|
||||
bool supports_current_humidity = 22;
|
||||
bool supports_target_humidity = 23;
|
||||
bool supports_current_humidity = 22; // Deprecated: use feature_flags
|
||||
bool supports_target_humidity = 23; // Deprecated: use feature_flags
|
||||
float visual_min_humidity = 24;
|
||||
float visual_max_humidity = 25;
|
||||
uint32 device_id = 26 [(field_ifdef) = "USE_DEVICES"];
|
||||
uint32 feature_flags = 27;
|
||||
}
|
||||
message ClimateStateResponse {
|
||||
option (id) = 47;
|
||||
|
||||
@@ -27,6 +27,9 @@
|
||||
#ifdef USE_BLUETOOTH_PROXY
|
||||
#include "esphome/components/bluetooth_proxy/bluetooth_proxy.h"
|
||||
#endif
|
||||
#ifdef USE_CLIMATE
|
||||
#include "esphome/components/climate/climate_mode.h"
|
||||
#endif
|
||||
#ifdef USE_VOICE_ASSISTANT
|
||||
#include "esphome/components/voice_assistant/voice_assistant.h"
|
||||
#endif
|
||||
@@ -623,9 +626,10 @@ uint16_t APIConnection::try_send_climate_state(EntityBase *entity, APIConnection
|
||||
auto traits = climate->get_traits();
|
||||
resp.mode = static_cast<enums::ClimateMode>(climate->mode);
|
||||
resp.action = static_cast<enums::ClimateAction>(climate->action);
|
||||
if (traits.get_supports_current_temperature())
|
||||
if (traits.has_feature_flags(climate::CLIMATE_SUPPORTS_CURRENT_TEMPERATURE))
|
||||
resp.current_temperature = climate->current_temperature;
|
||||
if (traits.get_supports_two_point_target_temperature()) {
|
||||
if (traits.has_feature_flags(climate::CLIMATE_SUPPORTS_TWO_POINT_TARGET_TEMPERATURE |
|
||||
climate::CLIMATE_REQUIRES_TWO_POINT_TARGET_TEMPERATURE)) {
|
||||
resp.target_temperature_low = climate->target_temperature_low;
|
||||
resp.target_temperature_high = climate->target_temperature_high;
|
||||
} else {
|
||||
@@ -644,9 +648,9 @@ uint16_t APIConnection::try_send_climate_state(EntityBase *entity, APIConnection
|
||||
}
|
||||
if (traits.get_supports_swing_modes())
|
||||
resp.swing_mode = static_cast<enums::ClimateSwingMode>(climate->swing_mode);
|
||||
if (traits.get_supports_current_humidity())
|
||||
if (traits.has_feature_flags(climate::CLIMATE_SUPPORTS_CURRENT_HUMIDITY))
|
||||
resp.current_humidity = climate->current_humidity;
|
||||
if (traits.get_supports_target_humidity())
|
||||
if (traits.has_feature_flags(climate::CLIMATE_SUPPORTS_TARGET_HUMIDITY))
|
||||
resp.target_humidity = climate->target_humidity;
|
||||
return fill_and_encode_entity_state(climate, resp, ClimateStateResponse::MESSAGE_TYPE, conn, remaining_size,
|
||||
is_single);
|
||||
@@ -656,10 +660,15 @@ uint16_t APIConnection::try_send_climate_info(EntityBase *entity, APIConnection
|
||||
auto *climate = static_cast<climate::Climate *>(entity);
|
||||
ListEntitiesClimateResponse msg;
|
||||
auto traits = climate->get_traits();
|
||||
msg.supports_current_temperature = traits.get_supports_current_temperature();
|
||||
msg.supports_current_humidity = traits.get_supports_current_humidity();
|
||||
msg.supports_two_point_target_temperature = traits.get_supports_two_point_target_temperature();
|
||||
msg.supports_target_humidity = traits.get_supports_target_humidity();
|
||||
// Flags set for backward compatibility, deprecated in 2025.11.0
|
||||
msg.supports_current_temperature = traits.has_feature_flags(climate::CLIMATE_SUPPORTS_CURRENT_TEMPERATURE);
|
||||
msg.supports_current_humidity = traits.has_feature_flags(climate::CLIMATE_SUPPORTS_CURRENT_HUMIDITY);
|
||||
msg.supports_two_point_target_temperature = traits.has_feature_flags(
|
||||
climate::CLIMATE_SUPPORTS_TWO_POINT_TARGET_TEMPERATURE | climate::CLIMATE_REQUIRES_TWO_POINT_TARGET_TEMPERATURE);
|
||||
msg.supports_target_humidity = traits.has_feature_flags(climate::CLIMATE_SUPPORTS_TARGET_HUMIDITY);
|
||||
msg.supports_action = traits.has_feature_flags(climate::CLIMATE_SUPPORTS_ACTION);
|
||||
// Current feature flags and other supported parameters
|
||||
msg.feature_flags = traits.get_feature_flags();
|
||||
msg.supported_modes = &traits.get_supported_modes_for_api_();
|
||||
msg.visual_min_temperature = traits.get_visual_min_temperature();
|
||||
msg.visual_max_temperature = traits.get_visual_max_temperature();
|
||||
@@ -667,7 +676,6 @@ uint16_t APIConnection::try_send_climate_info(EntityBase *entity, APIConnection
|
||||
msg.visual_current_temperature_step = traits.get_visual_current_temperature_step();
|
||||
msg.visual_min_humidity = traits.get_visual_min_humidity();
|
||||
msg.visual_max_humidity = traits.get_visual_max_humidity();
|
||||
msg.supports_action = traits.get_supports_action();
|
||||
msg.supported_fan_modes = &traits.get_supported_fan_modes_for_api_();
|
||||
msg.supported_custom_fan_modes = &traits.get_supported_custom_fan_modes_for_api_();
|
||||
msg.supported_presets = &traits.get_supported_presets_for_api_();
|
||||
@@ -1406,7 +1414,7 @@ bool APIConnection::send_hello_response(const HelloRequest &msg) {
|
||||
|
||||
HelloResponse resp;
|
||||
resp.api_version_major = 1;
|
||||
resp.api_version_minor = 12;
|
||||
resp.api_version_minor = 13;
|
||||
// Send only the version string - the client only logs this for debugging and doesn't use it otherwise
|
||||
resp.set_server_info(ESPHOME_VERSION_REF);
|
||||
resp.set_name(StringRef(App.get_name()));
|
||||
|
||||
@@ -242,7 +242,6 @@ APIError APINoiseFrameHelper::state_action_() {
|
||||
const std::string &name = App.get_name();
|
||||
const std::string &mac = get_mac_address();
|
||||
|
||||
std::vector<uint8_t> msg;
|
||||
// Calculate positions and sizes
|
||||
size_t name_len = name.size() + 1; // including null terminator
|
||||
size_t mac_len = mac.size() + 1; // including null terminator
|
||||
@@ -250,17 +249,17 @@ APIError APINoiseFrameHelper::state_action_() {
|
||||
size_t mac_offset = name_offset + name_len;
|
||||
size_t total_size = 1 + name_len + mac_len;
|
||||
|
||||
msg.resize(total_size);
|
||||
auto msg = std::make_unique<uint8_t[]>(total_size);
|
||||
|
||||
// chosen proto
|
||||
msg[0] = 0x01;
|
||||
|
||||
// node name, terminated by null byte
|
||||
std::memcpy(msg.data() + name_offset, name.c_str(), name_len);
|
||||
std::memcpy(msg.get() + name_offset, name.c_str(), name_len);
|
||||
// node mac, terminated by null byte
|
||||
std::memcpy(msg.data() + mac_offset, mac.c_str(), mac_len);
|
||||
std::memcpy(msg.get() + mac_offset, mac.c_str(), mac_len);
|
||||
|
||||
aerr = write_frame_(msg.data(), msg.size());
|
||||
aerr = write_frame_(msg.get(), total_size);
|
||||
if (aerr != APIError::OK)
|
||||
return aerr;
|
||||
|
||||
@@ -339,32 +338,32 @@ void APINoiseFrameHelper::send_explicit_handshake_reject_(const LogString *reaso
|
||||
#ifdef USE_STORE_LOG_STR_IN_FLASH
|
||||
// On ESP8266 with flash strings, we need to use PROGMEM-aware functions
|
||||
size_t reason_len = strlen_P(reinterpret_cast<PGM_P>(reason));
|
||||
std::vector<uint8_t> data;
|
||||
data.resize(reason_len + 1);
|
||||
size_t data_size = reason_len + 1;
|
||||
auto data = std::make_unique<uint8_t[]>(data_size);
|
||||
data[0] = 0x01; // failure
|
||||
|
||||
// Copy error message from PROGMEM
|
||||
if (reason_len > 0) {
|
||||
memcpy_P(data.data() + 1, reinterpret_cast<PGM_P>(reason), reason_len);
|
||||
memcpy_P(data.get() + 1, reinterpret_cast<PGM_P>(reason), reason_len);
|
||||
}
|
||||
#else
|
||||
// Normal memory access
|
||||
const char *reason_str = LOG_STR_ARG(reason);
|
||||
size_t reason_len = strlen(reason_str);
|
||||
std::vector<uint8_t> data;
|
||||
data.resize(reason_len + 1);
|
||||
size_t data_size = reason_len + 1;
|
||||
auto data = std::make_unique<uint8_t[]>(data_size);
|
||||
data[0] = 0x01; // failure
|
||||
|
||||
// Copy error message in bulk
|
||||
if (reason_len > 0) {
|
||||
std::memcpy(data.data() + 1, reason_str, reason_len);
|
||||
std::memcpy(data.get() + 1, reason_str, reason_len);
|
||||
}
|
||||
#endif
|
||||
|
||||
// temporarily remove failed state
|
||||
auto orig_state = state_;
|
||||
state_ = State::EXPLICIT_REJECT;
|
||||
write_frame_(data.data(), data.size());
|
||||
write_frame_(data.get(), data_size);
|
||||
state_ = orig_state;
|
||||
}
|
||||
APIError APINoiseFrameHelper::read_packet(ReadPacketBuffer *buffer) {
|
||||
|
||||
@@ -1064,6 +1064,17 @@ bool ExecuteServiceArgument::decode_32bit(uint32_t field_id, Proto32Bit value) {
|
||||
}
|
||||
return true;
|
||||
}
|
||||
void ExecuteServiceArgument::decode(const uint8_t *buffer, size_t length) {
|
||||
uint32_t count_bool_array = ProtoDecodableMessage::count_repeated_field(buffer, length, 6);
|
||||
this->bool_array.init(count_bool_array);
|
||||
uint32_t count_int_array = ProtoDecodableMessage::count_repeated_field(buffer, length, 7);
|
||||
this->int_array.init(count_int_array);
|
||||
uint32_t count_float_array = ProtoDecodableMessage::count_repeated_field(buffer, length, 8);
|
||||
this->float_array.init(count_float_array);
|
||||
uint32_t count_string_array = ProtoDecodableMessage::count_repeated_field(buffer, length, 9);
|
||||
this->string_array.init(count_string_array);
|
||||
ProtoDecodableMessage::decode(buffer, length);
|
||||
}
|
||||
bool ExecuteServiceRequest::decode_length(uint32_t field_id, ProtoLengthDelimited value) {
|
||||
switch (field_id) {
|
||||
case 2:
|
||||
@@ -1085,6 +1096,11 @@ bool ExecuteServiceRequest::decode_32bit(uint32_t field_id, Proto32Bit value) {
|
||||
}
|
||||
return true;
|
||||
}
|
||||
void ExecuteServiceRequest::decode(const uint8_t *buffer, size_t length) {
|
||||
uint32_t count_args = ProtoDecodableMessage::count_repeated_field(buffer, length, 2);
|
||||
this->args.init(count_args);
|
||||
ProtoDecodableMessage::decode(buffer, length);
|
||||
}
|
||||
#endif
|
||||
#ifdef USE_CAMERA
|
||||
void ListEntitiesCameraResponse::encode(ProtoWriteBuffer buffer) const {
|
||||
@@ -1185,6 +1201,7 @@ void ListEntitiesClimateResponse::encode(ProtoWriteBuffer buffer) const {
|
||||
#ifdef USE_DEVICES
|
||||
buffer.encode_uint32(26, this->device_id);
|
||||
#endif
|
||||
buffer.encode_uint32(27, this->feature_flags);
|
||||
}
|
||||
void ListEntitiesClimateResponse::calculate_size(ProtoSize &size) const {
|
||||
size.add_length(1, this->object_id_ref_.size());
|
||||
@@ -1239,6 +1256,7 @@ void ListEntitiesClimateResponse::calculate_size(ProtoSize &size) const {
|
||||
#ifdef USE_DEVICES
|
||||
size.add_uint32(2, this->device_id);
|
||||
#endif
|
||||
size.add_uint32(2, this->feature_flags);
|
||||
}
|
||||
void ClimateStateResponse::encode(ProtoWriteBuffer buffer) const {
|
||||
buffer.encode_fixed32(1, this->key);
|
||||
|
||||
@@ -1279,10 +1279,11 @@ class ExecuteServiceArgument final : public ProtoDecodableMessage {
|
||||
float float_{0.0f};
|
||||
std::string string_{};
|
||||
int32_t int_{0};
|
||||
std::vector<bool> bool_array{};
|
||||
std::vector<int32_t> int_array{};
|
||||
std::vector<float> float_array{};
|
||||
std::vector<std::string> string_array{};
|
||||
FixedVector<bool> bool_array{};
|
||||
FixedVector<int32_t> int_array{};
|
||||
FixedVector<float> float_array{};
|
||||
FixedVector<std::string> string_array{};
|
||||
void decode(const uint8_t *buffer, size_t length) override;
|
||||
#ifdef HAS_PROTO_MESSAGE_DUMP
|
||||
void dump_to(std::string &out) const override;
|
||||
#endif
|
||||
@@ -1300,7 +1301,8 @@ class ExecuteServiceRequest final : public ProtoDecodableMessage {
|
||||
const char *message_name() const override { return "execute_service_request"; }
|
||||
#endif
|
||||
uint32_t key{0};
|
||||
std::vector<ExecuteServiceArgument> args{};
|
||||
FixedVector<ExecuteServiceArgument> args{};
|
||||
void decode(const uint8_t *buffer, size_t length) override;
|
||||
#ifdef HAS_PROTO_MESSAGE_DUMP
|
||||
void dump_to(std::string &out) const override;
|
||||
#endif
|
||||
@@ -1369,7 +1371,7 @@ class CameraImageRequest final : public ProtoDecodableMessage {
|
||||
class ListEntitiesClimateResponse final : public InfoResponseProtoMessage {
|
||||
public:
|
||||
static constexpr uint8_t MESSAGE_TYPE = 46;
|
||||
static constexpr uint8_t ESTIMATED_SIZE = 145;
|
||||
static constexpr uint8_t ESTIMATED_SIZE = 150;
|
||||
#ifdef HAS_PROTO_MESSAGE_DUMP
|
||||
const char *message_name() const override { return "list_entities_climate_response"; }
|
||||
#endif
|
||||
@@ -1390,6 +1392,7 @@ class ListEntitiesClimateResponse final : public InfoResponseProtoMessage {
|
||||
bool supports_target_humidity{false};
|
||||
float visual_min_humidity{0.0f};
|
||||
float visual_max_humidity{0.0f};
|
||||
uint32_t feature_flags{0};
|
||||
void encode(ProtoWriteBuffer buffer) const override;
|
||||
void calculate_size(ProtoSize &size) const override;
|
||||
#ifdef HAS_PROTO_MESSAGE_DUMP
|
||||
|
||||
@@ -1292,6 +1292,7 @@ void ListEntitiesClimateResponse::dump_to(std::string &out) const {
|
||||
#ifdef USE_DEVICES
|
||||
dump_field(out, "device_id", this->device_id);
|
||||
#endif
|
||||
dump_field(out, "feature_flags", this->feature_flags);
|
||||
}
|
||||
void ClimateStateResponse::dump_to(std::string &out) const {
|
||||
MessageDumpHelper helper(out, "ClimateStateResponse");
|
||||
|
||||
@@ -41,10 +41,14 @@ template<typename... X> class TemplatableStringValue : public TemplatableValue<s
|
||||
|
||||
template<typename... Ts> class TemplatableKeyValuePair {
|
||||
public:
|
||||
// Default constructor needed for FixedVector::emplace_back()
|
||||
TemplatableKeyValuePair() = default;
|
||||
|
||||
// Keys are always string literals from YAML dictionary keys (e.g., "code", "event")
|
||||
// and never templatable values or lambdas. Only the value parameter can be a lambda/template.
|
||||
// Using pass-by-value with std::move allows optimal performance for both lvalues and rvalues.
|
||||
template<typename T> TemplatableKeyValuePair(std::string key, T value) : key(std::move(key)), value(value) {}
|
||||
|
||||
std::string key;
|
||||
TemplatableStringValue<Ts...> value;
|
||||
};
|
||||
@@ -93,15 +97,22 @@ template<typename... Ts> class HomeAssistantServiceCallAction : public Action<Ts
|
||||
|
||||
template<typename T> void set_service(T service) { this->service_ = service; }
|
||||
|
||||
// Initialize FixedVector members - called from Python codegen with compile-time known sizes.
|
||||
// Must be called before any add_* methods; capacity must match the number of subsequent add_* calls.
|
||||
void init_data(size_t count) { this->data_.init(count); }
|
||||
void init_data_template(size_t count) { this->data_template_.init(count); }
|
||||
void init_variables(size_t count) { this->variables_.init(count); }
|
||||
|
||||
// Keys are always string literals from the Python code generation (e.g., cg.add(var.add_data("tag_id", templ))).
|
||||
// The value parameter can be a lambda/template, but keys are never templatable.
|
||||
// Using pass-by-value allows the compiler to optimize for both lvalues and rvalues.
|
||||
template<typename T> void add_data(std::string key, T value) { this->data_.emplace_back(std::move(key), value); }
|
||||
template<typename T> void add_data_template(std::string key, T value) {
|
||||
this->data_template_.emplace_back(std::move(key), value);
|
||||
template<typename K, typename V> void add_data(K &&key, V &&value) {
|
||||
this->add_kv_(this->data_, std::forward<K>(key), std::forward<V>(value));
|
||||
}
|
||||
template<typename T> void add_variable(std::string key, T value) {
|
||||
this->variables_.emplace_back(std::move(key), value);
|
||||
template<typename K, typename V> void add_data_template(K &&key, V &&value) {
|
||||
this->add_kv_(this->data_template_, std::forward<K>(key), std::forward<V>(value));
|
||||
}
|
||||
template<typename K, typename V> void add_variable(K &&key, V &&value) {
|
||||
this->add_kv_(this->variables_, std::forward<K>(key), std::forward<V>(value));
|
||||
}
|
||||
|
||||
#ifdef USE_API_HOMEASSISTANT_ACTION_RESPONSES
|
||||
@@ -174,6 +185,13 @@ template<typename... Ts> class HomeAssistantServiceCallAction : public Action<Ts
|
||||
}
|
||||
|
||||
protected:
|
||||
// Helper to add key-value pairs to FixedVectors with perfect forwarding to avoid copies
|
||||
template<typename K, typename V> void add_kv_(FixedVector<TemplatableKeyValuePair<Ts...>> &vec, K &&key, V &&value) {
|
||||
auto &kv = vec.emplace_back();
|
||||
kv.key = std::forward<K>(key);
|
||||
kv.value = std::forward<V>(value);
|
||||
}
|
||||
|
||||
template<typename VectorType, typename SourceType>
|
||||
static void populate_service_map(VectorType &dest, SourceType &source, Ts... x) {
|
||||
dest.init(source.size());
|
||||
@@ -186,9 +204,9 @@ template<typename... Ts> class HomeAssistantServiceCallAction : public Action<Ts
|
||||
|
||||
APIServer *parent_;
|
||||
TemplatableStringValue<Ts...> service_{};
|
||||
std::vector<TemplatableKeyValuePair<Ts...>> data_;
|
||||
std::vector<TemplatableKeyValuePair<Ts...>> data_template_;
|
||||
std::vector<TemplatableKeyValuePair<Ts...>> variables_;
|
||||
FixedVector<TemplatableKeyValuePair<Ts...>> data_;
|
||||
FixedVector<TemplatableKeyValuePair<Ts...>> data_template_;
|
||||
FixedVector<TemplatableKeyValuePair<Ts...>> variables_;
|
||||
#ifdef USE_API_HOMEASSISTANT_ACTION_RESPONSES
|
||||
#ifdef USE_API_HOMEASSISTANT_ACTION_RESPONSES_JSON
|
||||
TemplatableStringValue<Ts...> response_template_{""};
|
||||
|
||||
@@ -7,6 +7,69 @@ namespace esphome::api {
|
||||
|
||||
static const char *const TAG = "api.proto";
|
||||
|
||||
uint32_t ProtoDecodableMessage::count_repeated_field(const uint8_t *buffer, size_t length, uint32_t target_field_id) {
|
||||
uint32_t count = 0;
|
||||
const uint8_t *ptr = buffer;
|
||||
const uint8_t *end = buffer + length;
|
||||
|
||||
while (ptr < end) {
|
||||
uint32_t consumed;
|
||||
|
||||
// Parse field header (tag)
|
||||
auto res = ProtoVarInt::parse(ptr, end - ptr, &consumed);
|
||||
if (!res.has_value()) {
|
||||
break; // Invalid data, stop counting
|
||||
}
|
||||
|
||||
uint32_t tag = res->as_uint32();
|
||||
uint32_t field_type = tag & WIRE_TYPE_MASK;
|
||||
uint32_t field_id = tag >> 3;
|
||||
ptr += consumed;
|
||||
|
||||
// Count if this is the target field
|
||||
if (field_id == target_field_id) {
|
||||
count++;
|
||||
}
|
||||
|
||||
// Skip field data based on wire type
|
||||
switch (field_type) {
|
||||
case WIRE_TYPE_VARINT: { // VarInt - parse and skip
|
||||
res = ProtoVarInt::parse(ptr, end - ptr, &consumed);
|
||||
if (!res.has_value()) {
|
||||
return count; // Invalid data, return what we have
|
||||
}
|
||||
ptr += consumed;
|
||||
break;
|
||||
}
|
||||
case WIRE_TYPE_LENGTH_DELIMITED: { // Length-delimited - parse length and skip data
|
||||
res = ProtoVarInt::parse(ptr, end - ptr, &consumed);
|
||||
if (!res.has_value()) {
|
||||
return count;
|
||||
}
|
||||
uint32_t field_length = res->as_uint32();
|
||||
ptr += consumed;
|
||||
if (ptr + field_length > end) {
|
||||
return count; // Out of bounds
|
||||
}
|
||||
ptr += field_length;
|
||||
break;
|
||||
}
|
||||
case WIRE_TYPE_FIXED32: { // 32-bit - skip 4 bytes
|
||||
if (ptr + 4 > end) {
|
||||
return count;
|
||||
}
|
||||
ptr += 4;
|
||||
break;
|
||||
}
|
||||
default:
|
||||
// Unknown wire type, can't continue
|
||||
return count;
|
||||
}
|
||||
}
|
||||
|
||||
return count;
|
||||
}
|
||||
|
||||
void ProtoDecodableMessage::decode(const uint8_t *buffer, size_t length) {
|
||||
const uint8_t *ptr = buffer;
|
||||
const uint8_t *end = buffer + length;
|
||||
@@ -22,12 +85,12 @@ void ProtoDecodableMessage::decode(const uint8_t *buffer, size_t length) {
|
||||
}
|
||||
|
||||
uint32_t tag = res->as_uint32();
|
||||
uint32_t field_type = tag & 0b111;
|
||||
uint32_t field_type = tag & WIRE_TYPE_MASK;
|
||||
uint32_t field_id = tag >> 3;
|
||||
ptr += consumed;
|
||||
|
||||
switch (field_type) {
|
||||
case 0: { // VarInt
|
||||
case WIRE_TYPE_VARINT: { // VarInt
|
||||
res = ProtoVarInt::parse(ptr, end - ptr, &consumed);
|
||||
if (!res.has_value()) {
|
||||
ESP_LOGV(TAG, "Invalid VarInt at offset %ld", (long) (ptr - buffer));
|
||||
@@ -39,7 +102,7 @@ void ProtoDecodableMessage::decode(const uint8_t *buffer, size_t length) {
|
||||
ptr += consumed;
|
||||
break;
|
||||
}
|
||||
case 2: { // Length-delimited
|
||||
case WIRE_TYPE_LENGTH_DELIMITED: { // Length-delimited
|
||||
res = ProtoVarInt::parse(ptr, end - ptr, &consumed);
|
||||
if (!res.has_value()) {
|
||||
ESP_LOGV(TAG, "Invalid Length Delimited at offset %ld", (long) (ptr - buffer));
|
||||
@@ -57,7 +120,7 @@ void ProtoDecodableMessage::decode(const uint8_t *buffer, size_t length) {
|
||||
ptr += field_length;
|
||||
break;
|
||||
}
|
||||
case 5: { // 32-bit
|
||||
case WIRE_TYPE_FIXED32: { // 32-bit
|
||||
if (ptr + 4 > end) {
|
||||
ESP_LOGV(TAG, "Out-of-bounds Fixed32-bit at offset %ld", (long) (ptr - buffer));
|
||||
return;
|
||||
|
||||
@@ -15,6 +15,13 @@
|
||||
|
||||
namespace esphome::api {
|
||||
|
||||
// Protocol Buffer wire type constants
|
||||
// See https://protobuf.dev/programming-guides/encoding/#structure
|
||||
constexpr uint8_t WIRE_TYPE_VARINT = 0; // int32, int64, uint32, uint64, sint32, sint64, bool, enum
|
||||
constexpr uint8_t WIRE_TYPE_LENGTH_DELIMITED = 2; // string, bytes, embedded messages, packed repeated fields
|
||||
constexpr uint8_t WIRE_TYPE_FIXED32 = 5; // fixed32, sfixed32, float
|
||||
constexpr uint8_t WIRE_TYPE_MASK = 0b111; // Mask to extract wire type from tag
|
||||
|
||||
// Helper functions for ZigZag encoding/decoding
|
||||
inline constexpr uint32_t encode_zigzag32(int32_t value) {
|
||||
return (static_cast<uint32_t>(value) << 1) ^ (static_cast<uint32_t>(value >> 31));
|
||||
@@ -241,7 +248,7 @@ class ProtoWriteBuffer {
|
||||
* Following https://protobuf.dev/programming-guides/encoding/#structure
|
||||
*/
|
||||
void encode_field_raw(uint32_t field_id, uint32_t type) {
|
||||
uint32_t val = (field_id << 3) | (type & 0b111);
|
||||
uint32_t val = (field_id << 3) | (type & WIRE_TYPE_MASK);
|
||||
this->encode_varint_raw(val);
|
||||
}
|
||||
void encode_string(uint32_t field_id, const char *string, size_t len, bool force = false) {
|
||||
@@ -354,7 +361,18 @@ class ProtoMessage {
|
||||
// Base class for messages that support decoding
|
||||
class ProtoDecodableMessage : public ProtoMessage {
|
||||
public:
|
||||
void decode(const uint8_t *buffer, size_t length);
|
||||
virtual void decode(const uint8_t *buffer, size_t length);
|
||||
|
||||
/**
|
||||
* Count occurrences of a repeated field in a protobuf buffer.
|
||||
* This is a lightweight scan that only parses tags and skips field data.
|
||||
*
|
||||
* @param buffer Pointer to the protobuf buffer
|
||||
* @param length Length of the buffer in bytes
|
||||
* @param target_field_id The field ID to count
|
||||
* @return Number of times the field appears in the buffer
|
||||
*/
|
||||
static uint32_t count_repeated_field(const uint8_t *buffer, size_t length, uint32_t target_field_id);
|
||||
|
||||
protected:
|
||||
virtual bool decode_varint(uint32_t field_id, ProtoVarInt value) { return false; }
|
||||
@@ -482,7 +500,7 @@ class ProtoSize {
|
||||
* @return The number of bytes needed to encode the field ID and wire type
|
||||
*/
|
||||
static constexpr uint32_t field(uint32_t field_id, uint32_t type) {
|
||||
uint32_t tag = (field_id << 3) | (type & 0b111);
|
||||
uint32_t tag = (field_id << 3) | (type & WIRE_TYPE_MASK);
|
||||
return varint(tag);
|
||||
}
|
||||
|
||||
|
||||
@@ -12,16 +12,16 @@ template<> int32_t get_execute_arg_value<int32_t>(const ExecuteServiceArgument &
|
||||
template<> float get_execute_arg_value<float>(const ExecuteServiceArgument &arg) { return arg.float_; }
|
||||
template<> std::string get_execute_arg_value<std::string>(const ExecuteServiceArgument &arg) { return arg.string_; }
|
||||
template<> std::vector<bool> get_execute_arg_value<std::vector<bool>>(const ExecuteServiceArgument &arg) {
|
||||
return arg.bool_array;
|
||||
return std::vector<bool>(arg.bool_array.begin(), arg.bool_array.end());
|
||||
}
|
||||
template<> std::vector<int32_t> get_execute_arg_value<std::vector<int32_t>>(const ExecuteServiceArgument &arg) {
|
||||
return arg.int_array;
|
||||
return std::vector<int32_t>(arg.int_array.begin(), arg.int_array.end());
|
||||
}
|
||||
template<> std::vector<float> get_execute_arg_value<std::vector<float>>(const ExecuteServiceArgument &arg) {
|
||||
return arg.float_array;
|
||||
return std::vector<float>(arg.float_array.begin(), arg.float_array.end());
|
||||
}
|
||||
template<> std::vector<std::string> get_execute_arg_value<std::vector<std::string>>(const ExecuteServiceArgument &arg) {
|
||||
return arg.string_array;
|
||||
return std::vector<std::string>(arg.string_array.begin(), arg.string_array.end());
|
||||
}
|
||||
|
||||
template<> enums::ServiceArgType to_service_arg_type<bool>() { return enums::SERVICE_ARG_TYPE_BOOL; }
|
||||
|
||||
@@ -55,7 +55,7 @@ template<typename... Ts> class UserServiceBase : public UserServiceDescriptor {
|
||||
|
||||
protected:
|
||||
virtual void execute(Ts... x) = 0;
|
||||
template<int... S> void execute_(const std::vector<ExecuteServiceArgument> &args, seq<S...> type) {
|
||||
template<typename ArgsContainer, int... S> void execute_(const ArgsContainer &args, seq<S...> type) {
|
||||
this->execute((get_execute_arg_value<Ts>(args[S]))...);
|
||||
}
|
||||
|
||||
|
||||
0
esphome/components/bh1900nux/__init__.py
Normal file
0
esphome/components/bh1900nux/__init__.py
Normal file
54
esphome/components/bh1900nux/bh1900nux.cpp
Normal file
54
esphome/components/bh1900nux/bh1900nux.cpp
Normal file
@@ -0,0 +1,54 @@
|
||||
#include "esphome/core/log.h"
|
||||
#include "bh1900nux.h"
|
||||
|
||||
namespace esphome {
|
||||
namespace bh1900nux {
|
||||
|
||||
static const char *const TAG = "bh1900nux.sensor";
|
||||
|
||||
// I2C Registers
|
||||
static const uint8_t TEMPERATURE_REG = 0x00;
|
||||
static const uint8_t CONFIG_REG = 0x01; // Not used and supported yet
|
||||
static const uint8_t TEMPERATURE_LOW_REG = 0x02; // Not used and supported yet
|
||||
static const uint8_t TEMPERATURE_HIGH_REG = 0x03; // Not used and supported yet
|
||||
static const uint8_t SOFT_RESET_REG = 0x04;
|
||||
|
||||
// I2C Command payloads
|
||||
static const uint8_t SOFT_RESET_PAYLOAD = 0x01; // Soft Reset value
|
||||
|
||||
static const float SENSOR_RESOLUTION = 0.0625f; // Sensor resolution per bit in degrees celsius
|
||||
|
||||
void BH1900NUXSensor::setup() {
|
||||
// Initialize I2C device
|
||||
i2c::ErrorCode result_code =
|
||||
this->write_register(SOFT_RESET_REG, &SOFT_RESET_PAYLOAD, 1); // Software Reset to check communication
|
||||
if (result_code != i2c::ERROR_OK) {
|
||||
this->mark_failed(ESP_LOG_MSG_COMM_FAIL);
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
void BH1900NUXSensor::update() {
|
||||
uint8_t temperature_raw[2];
|
||||
if (this->read_register(TEMPERATURE_REG, temperature_raw, 2) != i2c::ERROR_OK) {
|
||||
ESP_LOGE(TAG, ESP_LOG_MSG_COMM_FAIL);
|
||||
return;
|
||||
}
|
||||
|
||||
// Combined raw value, unsigned and unaligned 16 bit
|
||||
// Temperature is represented in just 12 bits, shift needed
|
||||
int16_t raw_temperature_register_value = encode_uint16(temperature_raw[0], temperature_raw[1]);
|
||||
raw_temperature_register_value >>= 4;
|
||||
float temperature_value = raw_temperature_register_value * SENSOR_RESOLUTION; // Apply sensor resolution
|
||||
|
||||
this->publish_state(temperature_value);
|
||||
}
|
||||
|
||||
void BH1900NUXSensor::dump_config() {
|
||||
LOG_SENSOR("", "BH1900NUX", this);
|
||||
LOG_I2C_DEVICE(this);
|
||||
LOG_UPDATE_INTERVAL(this);
|
||||
}
|
||||
|
||||
} // namespace bh1900nux
|
||||
} // namespace esphome
|
||||
18
esphome/components/bh1900nux/bh1900nux.h
Normal file
18
esphome/components/bh1900nux/bh1900nux.h
Normal file
@@ -0,0 +1,18 @@
|
||||
#pragma once
|
||||
|
||||
#include "esphome/core/component.h"
|
||||
#include "esphome/components/sensor/sensor.h"
|
||||
#include "esphome/components/i2c/i2c.h"
|
||||
|
||||
namespace esphome {
|
||||
namespace bh1900nux {
|
||||
|
||||
class BH1900NUXSensor : public sensor::Sensor, public PollingComponent, public i2c::I2CDevice {
|
||||
public:
|
||||
void setup() override;
|
||||
void update() override;
|
||||
void dump_config() override;
|
||||
};
|
||||
|
||||
} // namespace bh1900nux
|
||||
} // namespace esphome
|
||||
34
esphome/components/bh1900nux/sensor.py
Normal file
34
esphome/components/bh1900nux/sensor.py
Normal file
@@ -0,0 +1,34 @@
|
||||
import esphome.codegen as cg
|
||||
from esphome.components import i2c, sensor
|
||||
import esphome.config_validation as cv
|
||||
from esphome.const import (
|
||||
DEVICE_CLASS_TEMPERATURE,
|
||||
STATE_CLASS_MEASUREMENT,
|
||||
UNIT_CELSIUS,
|
||||
)
|
||||
|
||||
DEPENDENCIES = ["i2c"]
|
||||
CODEOWNERS = ["@B48D81EFCC"]
|
||||
|
||||
sensor_ns = cg.esphome_ns.namespace("bh1900nux")
|
||||
BH1900NUXSensor = sensor_ns.class_(
|
||||
"BH1900NUXSensor", cg.PollingComponent, i2c.I2CDevice
|
||||
)
|
||||
|
||||
CONFIG_SCHEMA = (
|
||||
sensor.sensor_schema(
|
||||
BH1900NUXSensor,
|
||||
accuracy_decimals=1,
|
||||
unit_of_measurement=UNIT_CELSIUS,
|
||||
device_class=DEVICE_CLASS_TEMPERATURE,
|
||||
state_class=STATE_CLASS_MEASUREMENT,
|
||||
)
|
||||
.extend(cv.polling_component_schema("60s"))
|
||||
.extend(i2c.i2c_device_schema(0x48))
|
||||
)
|
||||
|
||||
|
||||
async def to_code(config):
|
||||
var = await sensor.new_sensor(config)
|
||||
await cg.register_component(var, config)
|
||||
await i2c.register_i2c_device(var, config)
|
||||
29
esphome/components/ble_nus/__init__.py
Normal file
29
esphome/components/ble_nus/__init__.py
Normal file
@@ -0,0 +1,29 @@
|
||||
import esphome.codegen as cg
|
||||
from esphome.components.zephyr import zephyr_add_prj_conf
|
||||
import esphome.config_validation as cv
|
||||
from esphome.const import CONF_ID, CONF_LOGS, CONF_TYPE
|
||||
|
||||
AUTO_LOAD = ["zephyr_ble_server"]
|
||||
CODEOWNERS = ["@tomaszduda23"]
|
||||
|
||||
ble_nus_ns = cg.esphome_ns.namespace("ble_nus")
|
||||
BLENUS = ble_nus_ns.class_("BLENUS", cg.Component)
|
||||
|
||||
CONFIG_SCHEMA = cv.All(
|
||||
cv.Schema(
|
||||
{
|
||||
cv.GenerateID(): cv.declare_id(BLENUS),
|
||||
cv.Optional(CONF_TYPE, default=CONF_LOGS): cv.one_of(
|
||||
*[CONF_LOGS], lower=True
|
||||
),
|
||||
}
|
||||
).extend(cv.COMPONENT_SCHEMA),
|
||||
cv.only_with_framework("zephyr"),
|
||||
)
|
||||
|
||||
|
||||
async def to_code(config):
|
||||
var = cg.new_Pvariable(config[CONF_ID])
|
||||
zephyr_add_prj_conf("BT_NUS", True)
|
||||
cg.add(var.set_expose_log(config[CONF_TYPE] == CONF_LOGS))
|
||||
await cg.register_component(var, config)
|
||||
157
esphome/components/ble_nus/ble_nus.cpp
Normal file
157
esphome/components/ble_nus/ble_nus.cpp
Normal file
@@ -0,0 +1,157 @@
|
||||
#ifdef USE_ZEPHYR
|
||||
#include "ble_nus.h"
|
||||
#include <zephyr/kernel.h>
|
||||
#include <bluetooth/services/nus.h>
|
||||
#include "esphome/core/log.h"
|
||||
#ifdef USE_LOGGER
|
||||
#include "esphome/components/logger/logger.h"
|
||||
#include "esphome/core/application.h"
|
||||
#endif
|
||||
#include <zephyr/sys/ring_buffer.h>
|
||||
|
||||
namespace esphome::ble_nus {
|
||||
|
||||
constexpr size_t BLE_TX_BUF_SIZE = 2048;
|
||||
|
||||
// NOLINTBEGIN(cppcoreguidelines-avoid-non-const-global-variables)
|
||||
BLENUS *global_ble_nus;
|
||||
RING_BUF_DECLARE(global_ble_tx_ring_buf, BLE_TX_BUF_SIZE);
|
||||
// NOLINTEND(cppcoreguidelines-avoid-non-const-global-variables)
|
||||
|
||||
static const char *const TAG = "ble_nus";
|
||||
|
||||
size_t BLENUS::write_array(const uint8_t *data, size_t len) {
|
||||
if (atomic_get(&this->tx_status_) == TX_DISABLED) {
|
||||
return 0;
|
||||
}
|
||||
return ring_buf_put(&global_ble_tx_ring_buf, data, len);
|
||||
}
|
||||
|
||||
void BLENUS::connected(bt_conn *conn, uint8_t err) {
|
||||
if (err == 0) {
|
||||
global_ble_nus->conn_.store(bt_conn_ref(conn));
|
||||
}
|
||||
}
|
||||
|
||||
void BLENUS::disconnected(bt_conn *conn, uint8_t reason) {
|
||||
if (global_ble_nus->conn_) {
|
||||
bt_conn_unref(global_ble_nus->conn_.load());
|
||||
// Connection array is global static.
|
||||
// Reference can be kept even if disconnected.
|
||||
}
|
||||
}
|
||||
|
||||
void BLENUS::tx_callback(bt_conn *conn) {
|
||||
atomic_cas(&global_ble_nus->tx_status_, TX_BUSY, TX_ENABLED);
|
||||
ESP_LOGVV(TAG, "Sent operation completed");
|
||||
}
|
||||
|
||||
void BLENUS::send_enabled_callback(bt_nus_send_status status) {
|
||||
switch (status) {
|
||||
case BT_NUS_SEND_STATUS_ENABLED:
|
||||
atomic_set(&global_ble_nus->tx_status_, TX_ENABLED);
|
||||
#ifdef USE_LOGGER
|
||||
if (global_ble_nus->expose_log_) {
|
||||
App.schedule_dump_config();
|
||||
}
|
||||
#endif
|
||||
ESP_LOGD(TAG, "NUS notification has been enabled");
|
||||
break;
|
||||
case BT_NUS_SEND_STATUS_DISABLED:
|
||||
atomic_set(&global_ble_nus->tx_status_, TX_DISABLED);
|
||||
ESP_LOGD(TAG, "NUS notification has been disabled");
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
void BLENUS::rx_callback(bt_conn *conn, const uint8_t *const data, uint16_t len) {
|
||||
ESP_LOGD(TAG, "Received %d bytes.", len);
|
||||
}
|
||||
|
||||
void BLENUS::setup() {
|
||||
bt_nus_cb callbacks = {
|
||||
.received = rx_callback,
|
||||
.sent = tx_callback,
|
||||
.send_enabled = send_enabled_callback,
|
||||
};
|
||||
|
||||
bt_nus_init(&callbacks);
|
||||
|
||||
static bt_conn_cb conn_callbacks = {
|
||||
.connected = BLENUS::connected,
|
||||
.disconnected = BLENUS::disconnected,
|
||||
};
|
||||
|
||||
bt_conn_cb_register(&conn_callbacks);
|
||||
|
||||
global_ble_nus = this;
|
||||
#ifdef USE_LOGGER
|
||||
if (logger::global_logger != nullptr && this->expose_log_) {
|
||||
logger::global_logger->add_on_log_callback(
|
||||
[this](int level, const char *tag, const char *message, size_t message_len) {
|
||||
this->write_array(reinterpret_cast<const uint8_t *>(message), message_len);
|
||||
const char c = '\n';
|
||||
this->write_array(reinterpret_cast<const uint8_t *>(&c), 1);
|
||||
});
|
||||
}
|
||||
|
||||
#endif
|
||||
}
|
||||
|
||||
void BLENUS::dump_config() {
|
||||
ESP_LOGCONFIG(TAG, "ble nus:");
|
||||
ESP_LOGCONFIG(TAG, " log: %s", YESNO(this->expose_log_));
|
||||
uint32_t mtu = 0;
|
||||
bt_conn *conn = this->conn_.load();
|
||||
if (conn) {
|
||||
mtu = bt_nus_get_mtu(conn);
|
||||
}
|
||||
ESP_LOGCONFIG(TAG, " MTU: %u", mtu);
|
||||
}
|
||||
|
||||
void BLENUS::loop() {
|
||||
if (ring_buf_is_empty(&global_ble_tx_ring_buf)) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (!atomic_cas(&this->tx_status_, TX_ENABLED, TX_BUSY)) {
|
||||
if (atomic_get(&this->tx_status_) == TX_DISABLED) {
|
||||
ring_buf_reset(&global_ble_tx_ring_buf);
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
bt_conn *conn = this->conn_.load();
|
||||
if (conn) {
|
||||
conn = bt_conn_ref(conn);
|
||||
}
|
||||
|
||||
if (nullptr == conn) {
|
||||
atomic_cas(&this->tx_status_, TX_BUSY, TX_ENABLED);
|
||||
return;
|
||||
}
|
||||
|
||||
uint32_t req_len = bt_nus_get_mtu(conn);
|
||||
|
||||
uint8_t *buf;
|
||||
uint32_t size = ring_buf_get_claim(&global_ble_tx_ring_buf, &buf, req_len);
|
||||
|
||||
int err, err2;
|
||||
|
||||
err = bt_nus_send(conn, buf, size);
|
||||
err2 = ring_buf_get_finish(&global_ble_tx_ring_buf, size);
|
||||
if (err2) {
|
||||
// It should no happen.
|
||||
ESP_LOGE(TAG, "Size %u exceeds valid bytes in the ring buffer (%d error)", size, err2);
|
||||
}
|
||||
if (err == 0) {
|
||||
ESP_LOGVV(TAG, "Sent %d bytes", size);
|
||||
} else {
|
||||
ESP_LOGE(TAG, "Failed to send %d bytes (%d error)", size, err);
|
||||
atomic_cas(&this->tx_status_, TX_BUSY, TX_ENABLED);
|
||||
}
|
||||
bt_conn_unref(conn);
|
||||
}
|
||||
|
||||
} // namespace esphome::ble_nus
|
||||
#endif
|
||||
37
esphome/components/ble_nus/ble_nus.h
Normal file
37
esphome/components/ble_nus/ble_nus.h
Normal file
@@ -0,0 +1,37 @@
|
||||
#pragma once
|
||||
#ifdef USE_ZEPHYR
|
||||
#include "esphome/core/defines.h"
|
||||
#include "esphome/core/component.h"
|
||||
#include <shell/shell_bt_nus.h>
|
||||
#include <atomic>
|
||||
|
||||
namespace esphome::ble_nus {
|
||||
|
||||
class BLENUS : public Component {
|
||||
enum TxStatus {
|
||||
TX_DISABLED,
|
||||
TX_ENABLED,
|
||||
TX_BUSY,
|
||||
};
|
||||
|
||||
public:
|
||||
void setup() override;
|
||||
void dump_config() override;
|
||||
void loop() override;
|
||||
size_t write_array(const uint8_t *data, size_t len);
|
||||
void set_expose_log(bool expose_log) { this->expose_log_ = expose_log; }
|
||||
|
||||
protected:
|
||||
static void send_enabled_callback(bt_nus_send_status status);
|
||||
static void tx_callback(bt_conn *conn);
|
||||
static void rx_callback(bt_conn *conn, const uint8_t *data, uint16_t len);
|
||||
static void connected(bt_conn *conn, uint8_t err);
|
||||
static void disconnected(bt_conn *conn, uint8_t reason);
|
||||
|
||||
std::atomic<bt_conn *> conn_ = nullptr;
|
||||
bool expose_log_ = false;
|
||||
atomic_t tx_status_ = ATOMIC_INIT(TX_DISABLED);
|
||||
};
|
||||
|
||||
} // namespace esphome::ble_nus
|
||||
#endif
|
||||
@@ -41,7 +41,7 @@ CONFIG_SCHEMA = cv.All(
|
||||
cv.Schema(
|
||||
{
|
||||
cv.GenerateID(): cv.declare_id(BME680BSECComponent),
|
||||
cv.Optional(CONF_TEMPERATURE_OFFSET, default=0): cv.temperature,
|
||||
cv.Optional(CONF_TEMPERATURE_OFFSET, default=0): cv.temperature_delta,
|
||||
cv.Optional(CONF_IAQ_MODE, default="STATIC"): cv.enum(
|
||||
IAQ_MODE_OPTIONS, upper=True
|
||||
),
|
||||
|
||||
@@ -139,7 +139,7 @@ CONFIG_SCHEMA_BASE = (
|
||||
cv.Optional(CONF_SUPPLY_VOLTAGE, default="3.3V"): cv.enum(
|
||||
VOLTAGE_OPTIONS, upper=True
|
||||
),
|
||||
cv.Optional(CONF_TEMPERATURE_OFFSET, default=0): cv.temperature,
|
||||
cv.Optional(CONF_TEMPERATURE_OFFSET, default=0): cv.temperature_delta,
|
||||
cv.Optional(
|
||||
CONF_STATE_SAVE_INTERVAL, default="6hours"
|
||||
): cv.positive_time_period_minutes,
|
||||
|
||||
@@ -8,17 +8,30 @@ namespace cap1188 {
|
||||
static const char *const TAG = "cap1188";
|
||||
|
||||
void CAP1188Component::setup() {
|
||||
// Reset device using the reset pin
|
||||
if (this->reset_pin_ != nullptr) {
|
||||
this->reset_pin_->setup();
|
||||
this->reset_pin_->digital_write(false);
|
||||
delay(100); // NOLINT
|
||||
this->reset_pin_->digital_write(true);
|
||||
delay(100); // NOLINT
|
||||
this->reset_pin_->digital_write(false);
|
||||
delay(100); // NOLINT
|
||||
this->disable_loop();
|
||||
|
||||
// no reset pin
|
||||
if (this->reset_pin_ == nullptr) {
|
||||
this->finish_setup_();
|
||||
return;
|
||||
}
|
||||
|
||||
// reset pin configured so reset before finishing setup
|
||||
this->reset_pin_->setup();
|
||||
this->reset_pin_->digital_write(false);
|
||||
// delay after reset pin write
|
||||
this->set_timeout(100, [this]() {
|
||||
this->reset_pin_->digital_write(true);
|
||||
// delay after reset pin write
|
||||
this->set_timeout(100, [this]() {
|
||||
this->reset_pin_->digital_write(false);
|
||||
// delay after reset pin write
|
||||
this->set_timeout(100, [this]() { this->finish_setup_(); });
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
void CAP1188Component::finish_setup_() {
|
||||
// Check if CAP1188 is actually connected
|
||||
this->read_byte(CAP1188_PRODUCT_ID, &this->cap1188_product_id_);
|
||||
this->read_byte(CAP1188_MANUFACTURE_ID, &this->cap1188_manufacture_id_);
|
||||
@@ -44,6 +57,9 @@ void CAP1188Component::setup() {
|
||||
|
||||
// Speed up a bit
|
||||
this->write_byte(CAP1188_STAND_BY_CONFIGURATION, 0x30);
|
||||
|
||||
// Setup successful, so enable loop
|
||||
this->enable_loop();
|
||||
}
|
||||
|
||||
void CAP1188Component::dump_config() {
|
||||
|
||||
@@ -49,6 +49,8 @@ class CAP1188Component : public Component, public i2c::I2CDevice {
|
||||
void loop() override;
|
||||
|
||||
protected:
|
||||
void finish_setup_();
|
||||
|
||||
std::vector<CAP1188Channel *> channels_{};
|
||||
uint8_t touch_threshold_{0x20};
|
||||
uint8_t allow_multiple_touches_{0x80};
|
||||
|
||||
@@ -96,7 +96,8 @@ void ClimateCall::validate_() {
|
||||
}
|
||||
if (this->target_temperature_.has_value()) {
|
||||
auto target = *this->target_temperature_;
|
||||
if (traits.get_supports_two_point_target_temperature()) {
|
||||
if (traits.has_feature_flags(CLIMATE_SUPPORTS_TWO_POINT_TARGET_TEMPERATURE |
|
||||
CLIMATE_REQUIRES_TWO_POINT_TARGET_TEMPERATURE)) {
|
||||
ESP_LOGW(TAG, " Cannot set target temperature for climate device "
|
||||
"with two-point target temperature!");
|
||||
this->target_temperature_.reset();
|
||||
@@ -106,7 +107,8 @@ void ClimateCall::validate_() {
|
||||
}
|
||||
}
|
||||
if (this->target_temperature_low_.has_value() || this->target_temperature_high_.has_value()) {
|
||||
if (!traits.get_supports_two_point_target_temperature()) {
|
||||
if (!traits.has_feature_flags(CLIMATE_SUPPORTS_TWO_POINT_TARGET_TEMPERATURE |
|
||||
CLIMATE_REQUIRES_TWO_POINT_TARGET_TEMPERATURE)) {
|
||||
ESP_LOGW(TAG, " Cannot set low/high target temperature for this device!");
|
||||
this->target_temperature_low_.reset();
|
||||
this->target_temperature_high_.reset();
|
||||
@@ -350,13 +352,14 @@ void Climate::save_state_() {
|
||||
|
||||
state.mode = this->mode;
|
||||
auto traits = this->get_traits();
|
||||
if (traits.get_supports_two_point_target_temperature()) {
|
||||
if (traits.has_feature_flags(CLIMATE_SUPPORTS_TWO_POINT_TARGET_TEMPERATURE |
|
||||
CLIMATE_REQUIRES_TWO_POINT_TARGET_TEMPERATURE)) {
|
||||
state.target_temperature_low = this->target_temperature_low;
|
||||
state.target_temperature_high = this->target_temperature_high;
|
||||
} else {
|
||||
state.target_temperature = this->target_temperature;
|
||||
}
|
||||
if (traits.get_supports_target_humidity()) {
|
||||
if (traits.has_feature_flags(climate::CLIMATE_SUPPORTS_TARGET_HUMIDITY)) {
|
||||
state.target_humidity = this->target_humidity;
|
||||
}
|
||||
if (traits.get_supports_fan_modes() && fan_mode.has_value()) {
|
||||
@@ -400,7 +403,7 @@ void Climate::publish_state() {
|
||||
auto traits = this->get_traits();
|
||||
|
||||
ESP_LOGD(TAG, " Mode: %s", LOG_STR_ARG(climate_mode_to_string(this->mode)));
|
||||
if (traits.get_supports_action()) {
|
||||
if (traits.has_feature_flags(climate::CLIMATE_SUPPORTS_ACTION)) {
|
||||
ESP_LOGD(TAG, " Action: %s", LOG_STR_ARG(climate_action_to_string(this->action)));
|
||||
}
|
||||
if (traits.get_supports_fan_modes() && this->fan_mode.has_value()) {
|
||||
@@ -418,19 +421,20 @@ void Climate::publish_state() {
|
||||
if (traits.get_supports_swing_modes()) {
|
||||
ESP_LOGD(TAG, " Swing Mode: %s", LOG_STR_ARG(climate_swing_mode_to_string(this->swing_mode)));
|
||||
}
|
||||
if (traits.get_supports_current_temperature()) {
|
||||
if (traits.has_feature_flags(climate::CLIMATE_SUPPORTS_CURRENT_TEMPERATURE)) {
|
||||
ESP_LOGD(TAG, " Current Temperature: %.2f°C", this->current_temperature);
|
||||
}
|
||||
if (traits.get_supports_two_point_target_temperature()) {
|
||||
if (traits.has_feature_flags(CLIMATE_SUPPORTS_TWO_POINT_TARGET_TEMPERATURE |
|
||||
CLIMATE_REQUIRES_TWO_POINT_TARGET_TEMPERATURE)) {
|
||||
ESP_LOGD(TAG, " Target Temperature: Low: %.2f°C High: %.2f°C", this->target_temperature_low,
|
||||
this->target_temperature_high);
|
||||
} else {
|
||||
ESP_LOGD(TAG, " Target Temperature: %.2f°C", this->target_temperature);
|
||||
}
|
||||
if (traits.get_supports_current_humidity()) {
|
||||
if (traits.has_feature_flags(climate::CLIMATE_SUPPORTS_CURRENT_HUMIDITY)) {
|
||||
ESP_LOGD(TAG, " Current Humidity: %.0f%%", this->current_humidity);
|
||||
}
|
||||
if (traits.get_supports_target_humidity()) {
|
||||
if (traits.has_feature_flags(climate::CLIMATE_SUPPORTS_TARGET_HUMIDITY)) {
|
||||
ESP_LOGD(TAG, " Target Humidity: %.0f%%", this->target_humidity);
|
||||
}
|
||||
|
||||
@@ -485,13 +489,14 @@ ClimateCall ClimateDeviceRestoreState::to_call(Climate *climate) {
|
||||
auto call = climate->make_call();
|
||||
auto traits = climate->get_traits();
|
||||
call.set_mode(this->mode);
|
||||
if (traits.get_supports_two_point_target_temperature()) {
|
||||
if (traits.has_feature_flags(CLIMATE_SUPPORTS_TWO_POINT_TARGET_TEMPERATURE |
|
||||
CLIMATE_REQUIRES_TWO_POINT_TARGET_TEMPERATURE)) {
|
||||
call.set_target_temperature_low(this->target_temperature_low);
|
||||
call.set_target_temperature_high(this->target_temperature_high);
|
||||
} else {
|
||||
call.set_target_temperature(this->target_temperature);
|
||||
}
|
||||
if (traits.get_supports_target_humidity()) {
|
||||
if (traits.has_feature_flags(climate::CLIMATE_SUPPORTS_TARGET_HUMIDITY)) {
|
||||
call.set_target_humidity(this->target_humidity);
|
||||
}
|
||||
if (traits.get_supports_fan_modes() || !traits.get_supported_custom_fan_modes().empty()) {
|
||||
@@ -508,13 +513,14 @@ ClimateCall ClimateDeviceRestoreState::to_call(Climate *climate) {
|
||||
void ClimateDeviceRestoreState::apply(Climate *climate) {
|
||||
auto traits = climate->get_traits();
|
||||
climate->mode = this->mode;
|
||||
if (traits.get_supports_two_point_target_temperature()) {
|
||||
if (traits.has_feature_flags(CLIMATE_SUPPORTS_TWO_POINT_TARGET_TEMPERATURE |
|
||||
CLIMATE_REQUIRES_TWO_POINT_TARGET_TEMPERATURE)) {
|
||||
climate->target_temperature_low = this->target_temperature_low;
|
||||
climate->target_temperature_high = this->target_temperature_high;
|
||||
} else {
|
||||
climate->target_temperature = this->target_temperature;
|
||||
}
|
||||
if (traits.get_supports_target_humidity()) {
|
||||
if (traits.has_feature_flags(climate::CLIMATE_SUPPORTS_TARGET_HUMIDITY)) {
|
||||
climate->target_humidity = this->target_humidity;
|
||||
}
|
||||
if (traits.get_supports_fan_modes() && !this->uses_custom_fan_mode) {
|
||||
@@ -580,28 +586,30 @@ void Climate::dump_traits_(const char *tag) {
|
||||
" Target: %.1f",
|
||||
traits.get_visual_min_temperature(), traits.get_visual_max_temperature(),
|
||||
traits.get_visual_target_temperature_step());
|
||||
if (traits.get_supports_current_temperature()) {
|
||||
if (traits.has_feature_flags(climate::CLIMATE_SUPPORTS_CURRENT_TEMPERATURE)) {
|
||||
ESP_LOGCONFIG(tag, " Current: %.1f", traits.get_visual_current_temperature_step());
|
||||
}
|
||||
if (traits.get_supports_target_humidity() || traits.get_supports_current_humidity()) {
|
||||
if (traits.has_feature_flags(climate::CLIMATE_SUPPORTS_TARGET_HUMIDITY |
|
||||
climate::CLIMATE_SUPPORTS_CURRENT_HUMIDITY)) {
|
||||
ESP_LOGCONFIG(tag,
|
||||
" - Min humidity: %.0f\n"
|
||||
" - Max humidity: %.0f",
|
||||
traits.get_visual_min_humidity(), traits.get_visual_max_humidity());
|
||||
}
|
||||
if (traits.get_supports_two_point_target_temperature()) {
|
||||
if (traits.has_feature_flags(CLIMATE_SUPPORTS_TWO_POINT_TARGET_TEMPERATURE |
|
||||
CLIMATE_REQUIRES_TWO_POINT_TARGET_TEMPERATURE)) {
|
||||
ESP_LOGCONFIG(tag, " [x] Supports two-point target temperature");
|
||||
}
|
||||
if (traits.get_supports_current_temperature()) {
|
||||
if (traits.has_feature_flags(climate::CLIMATE_SUPPORTS_CURRENT_TEMPERATURE)) {
|
||||
ESP_LOGCONFIG(tag, " [x] Supports current temperature");
|
||||
}
|
||||
if (traits.get_supports_target_humidity()) {
|
||||
if (traits.has_feature_flags(climate::CLIMATE_SUPPORTS_TARGET_HUMIDITY)) {
|
||||
ESP_LOGCONFIG(tag, " [x] Supports target humidity");
|
||||
}
|
||||
if (traits.get_supports_current_humidity()) {
|
||||
if (traits.has_feature_flags(climate::CLIMATE_SUPPORTS_CURRENT_HUMIDITY)) {
|
||||
ESP_LOGCONFIG(tag, " [x] Supports current humidity");
|
||||
}
|
||||
if (traits.get_supports_action()) {
|
||||
if (traits.has_feature_flags(climate::CLIMATE_SUPPORTS_ACTION)) {
|
||||
ESP_LOGCONFIG(tag, " [x] Supports action");
|
||||
}
|
||||
if (!traits.get_supported_modes().empty()) {
|
||||
|
||||
@@ -98,6 +98,21 @@ enum ClimatePreset : uint8_t {
|
||||
CLIMATE_PRESET_ACTIVITY = 7,
|
||||
};
|
||||
|
||||
enum ClimateFeature : uint32_t {
|
||||
// Reporting current temperature is supported
|
||||
CLIMATE_SUPPORTS_CURRENT_TEMPERATURE = 1 << 0,
|
||||
// Setting two target temperatures is supported (used in conjunction with CLIMATE_MODE_HEAT_COOL)
|
||||
CLIMATE_SUPPORTS_TWO_POINT_TARGET_TEMPERATURE = 1 << 1,
|
||||
// Single-point mode is NOT supported (UI always displays two handles, setting 'target_temperature' is not supported)
|
||||
CLIMATE_REQUIRES_TWO_POINT_TARGET_TEMPERATURE = 1 << 2,
|
||||
// Reporting current humidity is supported
|
||||
CLIMATE_SUPPORTS_CURRENT_HUMIDITY = 1 << 3,
|
||||
// Setting a target humidity is supported
|
||||
CLIMATE_SUPPORTS_TARGET_HUMIDITY = 1 << 4,
|
||||
// Reporting current climate action is supported
|
||||
CLIMATE_SUPPORTS_ACTION = 1 << 5,
|
||||
};
|
||||
|
||||
/// Convert the given ClimateMode to a human-readable string.
|
||||
const LogString *climate_mode_to_string(ClimateMode mode);
|
||||
|
||||
|
||||
@@ -21,48 +21,92 @@ namespace climate {
|
||||
* - Target Temperature
|
||||
*
|
||||
* All other properties and modes are optional and the integration must mark
|
||||
* each of them as supported by setting the appropriate flag here.
|
||||
* each of them as supported by setting the appropriate flag(s) here.
|
||||
*
|
||||
* - supports current temperature - if the climate device supports reporting a current temperature
|
||||
* - supports two point target temperature - if the climate device's target temperature should be
|
||||
* split in target_temperature_low and target_temperature_high instead of just the single target_temperature
|
||||
* - feature flags: see ClimateFeatures enum in climate_mode.h
|
||||
* - supports modes:
|
||||
* - auto mode (automatic control)
|
||||
* - cool mode (lowers current temperature)
|
||||
* - heat mode (increases current temperature)
|
||||
* - dry mode (removes humidity from air)
|
||||
* - fan mode (only turns on fan)
|
||||
* - supports action - if the climate device supports reporting the active
|
||||
* current action of the device with the action property.
|
||||
* - supports fan modes - optionally, if it has a fan which can be configured in different ways:
|
||||
* - on, off, auto, high, medium, low, middle, focus, diffuse, quiet
|
||||
* - supports swing modes - optionally, if it has a swing which can be configured in different ways:
|
||||
* - off, both, vertical, horizontal
|
||||
*
|
||||
* This class also contains static data for the climate device display:
|
||||
* - visual min/max temperature - tells the frontend what range of temperatures the climate device
|
||||
* should display (gauge min/max values)
|
||||
* - visual min/max temperature/humidity - tells the frontend what range of temperature/humidity the
|
||||
* climate device should display (gauge min/max values)
|
||||
* - temperature step - the step with which to increase/decrease target temperature.
|
||||
* This also affects with how many decimal places the temperature is shown
|
||||
*/
|
||||
class ClimateTraits {
|
||||
public:
|
||||
bool get_supports_current_temperature() const { return this->supports_current_temperature_; }
|
||||
/// Get/set feature flags (see ClimateFeatures enum in climate_mode.h)
|
||||
uint32_t get_feature_flags() const { return this->feature_flags_; }
|
||||
void add_feature_flags(uint32_t feature_flags) { this->feature_flags_ |= feature_flags; }
|
||||
void clear_feature_flags(uint32_t feature_flags) { this->feature_flags_ &= ~feature_flags; }
|
||||
bool has_feature_flags(uint32_t feature_flags) const { return this->feature_flags_ & feature_flags; }
|
||||
void set_feature_flags(uint32_t feature_flags) { this->feature_flags_ = feature_flags; }
|
||||
|
||||
ESPDEPRECATED("This method is deprecated, use get_feature_flags() instead", "2025.11.0")
|
||||
bool get_supports_current_temperature() const {
|
||||
return this->has_feature_flags(CLIMATE_SUPPORTS_CURRENT_TEMPERATURE);
|
||||
}
|
||||
ESPDEPRECATED("This method is deprecated, use add_feature_flags() instead", "2025.11.0")
|
||||
void set_supports_current_temperature(bool supports_current_temperature) {
|
||||
this->supports_current_temperature_ = supports_current_temperature;
|
||||
if (supports_current_temperature) {
|
||||
this->add_feature_flags(CLIMATE_SUPPORTS_CURRENT_TEMPERATURE);
|
||||
} else {
|
||||
this->clear_feature_flags(CLIMATE_SUPPORTS_CURRENT_TEMPERATURE);
|
||||
}
|
||||
}
|
||||
bool get_supports_current_humidity() const { return this->supports_current_humidity_; }
|
||||
ESPDEPRECATED("This method is deprecated, use get_feature_flags() instead", "2025.11.0")
|
||||
bool get_supports_current_humidity() const { return this->has_feature_flags(CLIMATE_SUPPORTS_CURRENT_HUMIDITY); }
|
||||
ESPDEPRECATED("This method is deprecated, use add_feature_flags() instead", "2025.11.0")
|
||||
void set_supports_current_humidity(bool supports_current_humidity) {
|
||||
this->supports_current_humidity_ = supports_current_humidity;
|
||||
if (supports_current_humidity) {
|
||||
this->add_feature_flags(CLIMATE_SUPPORTS_CURRENT_HUMIDITY);
|
||||
} else {
|
||||
this->clear_feature_flags(CLIMATE_SUPPORTS_CURRENT_HUMIDITY);
|
||||
}
|
||||
}
|
||||
bool get_supports_two_point_target_temperature() const { return this->supports_two_point_target_temperature_; }
|
||||
ESPDEPRECATED("This method is deprecated, use get_feature_flags() instead", "2025.11.0")
|
||||
bool get_supports_two_point_target_temperature() const {
|
||||
return this->has_feature_flags(CLIMATE_REQUIRES_TWO_POINT_TARGET_TEMPERATURE);
|
||||
}
|
||||
ESPDEPRECATED("This method is deprecated, use add_feature_flags() instead", "2025.11.0")
|
||||
void set_supports_two_point_target_temperature(bool supports_two_point_target_temperature) {
|
||||
this->supports_two_point_target_temperature_ = supports_two_point_target_temperature;
|
||||
if (supports_two_point_target_temperature)
|
||||
// Use CLIMATE_REQUIRES_TWO_POINT_TARGET_TEMPERATURE to mimic previous behavior
|
||||
{
|
||||
this->add_feature_flags(CLIMATE_REQUIRES_TWO_POINT_TARGET_TEMPERATURE);
|
||||
} else {
|
||||
this->clear_feature_flags(CLIMATE_REQUIRES_TWO_POINT_TARGET_TEMPERATURE);
|
||||
}
|
||||
}
|
||||
bool get_supports_target_humidity() const { return this->supports_target_humidity_; }
|
||||
ESPDEPRECATED("This method is deprecated, use get_feature_flags() instead", "2025.11.0")
|
||||
bool get_supports_target_humidity() const { return this->has_feature_flags(CLIMATE_SUPPORTS_TARGET_HUMIDITY); }
|
||||
ESPDEPRECATED("This method is deprecated, use add_feature_flags() instead", "2025.11.0")
|
||||
void set_supports_target_humidity(bool supports_target_humidity) {
|
||||
this->supports_target_humidity_ = supports_target_humidity;
|
||||
if (supports_target_humidity) {
|
||||
this->add_feature_flags(CLIMATE_SUPPORTS_TARGET_HUMIDITY);
|
||||
} else {
|
||||
this->clear_feature_flags(CLIMATE_SUPPORTS_TARGET_HUMIDITY);
|
||||
}
|
||||
}
|
||||
ESPDEPRECATED("This method is deprecated, use get_feature_flags() instead", "2025.11.0")
|
||||
bool get_supports_action() const { return this->has_feature_flags(CLIMATE_SUPPORTS_ACTION); }
|
||||
ESPDEPRECATED("This method is deprecated, use add_feature_flags() instead", "2025.11.0")
|
||||
void set_supports_action(bool supports_action) {
|
||||
if (supports_action) {
|
||||
this->add_feature_flags(CLIMATE_SUPPORTS_ACTION);
|
||||
} else {
|
||||
this->clear_feature_flags(CLIMATE_SUPPORTS_ACTION);
|
||||
}
|
||||
}
|
||||
|
||||
void set_supported_modes(std::set<ClimateMode> modes) { this->supported_modes_ = std::move(modes); }
|
||||
void add_supported_mode(ClimateMode mode) { this->supported_modes_.insert(mode); }
|
||||
ESPDEPRECATED("This method is deprecated, use set_supported_modes() instead", "v1.20")
|
||||
@@ -82,9 +126,6 @@ class ClimateTraits {
|
||||
bool supports_mode(ClimateMode mode) const { return this->supported_modes_.count(mode); }
|
||||
const std::set<ClimateMode> &get_supported_modes() const { return this->supported_modes_; }
|
||||
|
||||
void set_supports_action(bool supports_action) { this->supports_action_ = supports_action; }
|
||||
bool get_supports_action() const { return this->supports_action_; }
|
||||
|
||||
void set_supported_fan_modes(std::set<ClimateFanMode> modes) { this->supported_fan_modes_ = std::move(modes); }
|
||||
void add_supported_fan_mode(ClimateFanMode mode) { this->supported_fan_modes_.insert(mode); }
|
||||
void add_supported_custom_fan_mode(const std::string &mode) { this->supported_custom_fan_modes_.insert(mode); }
|
||||
@@ -219,24 +260,20 @@ class ClimateTraits {
|
||||
}
|
||||
}
|
||||
|
||||
bool supports_current_temperature_{false};
|
||||
bool supports_current_humidity_{false};
|
||||
bool supports_two_point_target_temperature_{false};
|
||||
bool supports_target_humidity_{false};
|
||||
std::set<climate::ClimateMode> supported_modes_ = {climate::CLIMATE_MODE_OFF};
|
||||
bool supports_action_{false};
|
||||
std::set<climate::ClimateFanMode> supported_fan_modes_;
|
||||
std::set<climate::ClimateSwingMode> supported_swing_modes_;
|
||||
std::set<climate::ClimatePreset> supported_presets_;
|
||||
std::set<std::string> supported_custom_fan_modes_;
|
||||
std::set<std::string> supported_custom_presets_;
|
||||
|
||||
uint32_t feature_flags_{0};
|
||||
float visual_min_temperature_{10};
|
||||
float visual_max_temperature_{30};
|
||||
float visual_target_temperature_step_{0.1};
|
||||
float visual_current_temperature_step_{0.1};
|
||||
float visual_min_humidity_{30};
|
||||
float visual_max_humidity_{99};
|
||||
|
||||
std::set<climate::ClimateMode> supported_modes_ = {climate::CLIMATE_MODE_OFF};
|
||||
std::set<climate::ClimateFanMode> supported_fan_modes_;
|
||||
std::set<climate::ClimateSwingMode> supported_swing_modes_;
|
||||
std::set<climate::ClimatePreset> supported_presets_;
|
||||
std::set<std::string> supported_custom_fan_modes_;
|
||||
std::set<std::string> supported_custom_presets_;
|
||||
};
|
||||
|
||||
} // namespace climate
|
||||
|
||||
@@ -30,14 +30,12 @@ class DateTimeBase : public EntityBase {
|
||||
#endif
|
||||
};
|
||||
|
||||
#ifdef USE_TIME
|
||||
class DateTimeStateTrigger : public Trigger<ESPTime> {
|
||||
public:
|
||||
explicit DateTimeStateTrigger(DateTimeBase *parent) {
|
||||
parent->add_on_state_callback([this, parent]() { this->trigger(parent->state_as_esptime()); });
|
||||
}
|
||||
};
|
||||
#endif
|
||||
|
||||
} // namespace datetime
|
||||
} // namespace esphome
|
||||
|
||||
@@ -11,8 +11,6 @@
|
||||
#include <esp_chip_info.h>
|
||||
#include <esp_partition.h>
|
||||
|
||||
#include <map>
|
||||
|
||||
#ifdef USE_ARDUINO
|
||||
#include <Esp.h>
|
||||
#endif
|
||||
@@ -125,7 +123,12 @@ void DebugComponent::log_partition_info_() {
|
||||
|
||||
uint32_t DebugComponent::get_free_heap_() { return heap_caps_get_free_size(MALLOC_CAP_INTERNAL); }
|
||||
|
||||
static const std::map<int, const char *> CHIP_FEATURES = {
|
||||
struct ChipFeature {
|
||||
int bit;
|
||||
const char *name;
|
||||
};
|
||||
|
||||
static constexpr ChipFeature CHIP_FEATURES[] = {
|
||||
{CHIP_FEATURE_BLE, "BLE"},
|
||||
{CHIP_FEATURE_BT, "BT"},
|
||||
{CHIP_FEATURE_EMB_FLASH, "EMB Flash"},
|
||||
@@ -170,11 +173,13 @@ void DebugComponent::get_device_info_(std::string &device_info) {
|
||||
esp_chip_info(&info);
|
||||
const char *model = ESPHOME_VARIANT;
|
||||
std::string features;
|
||||
for (auto feature : CHIP_FEATURES) {
|
||||
if (info.features & feature.first) {
|
||||
features += feature.second;
|
||||
|
||||
// Check each known feature bit
|
||||
for (const auto &feature : CHIP_FEATURES) {
|
||||
if (info.features & feature.bit) {
|
||||
features += feature.name;
|
||||
features += ", ";
|
||||
info.features &= ~feature.first;
|
||||
info.features &= ~feature.bit;
|
||||
}
|
||||
}
|
||||
if (info.features != 0)
|
||||
|
||||
@@ -25,10 +25,37 @@ static void show_reset_reason(std::string &reset_reason, bool set, const char *r
|
||||
reset_reason += reason;
|
||||
}
|
||||
|
||||
inline uint32_t read_mem_u32(uintptr_t addr) {
|
||||
static inline uint32_t read_mem_u32(uintptr_t addr) {
|
||||
return *reinterpret_cast<volatile uint32_t *>(addr); // NOLINT(performance-no-int-to-ptr)
|
||||
}
|
||||
|
||||
static inline uint8_t read_mem_u8(uintptr_t addr) {
|
||||
return *reinterpret_cast<volatile uint8_t *>(addr); // NOLINT(performance-no-int-to-ptr)
|
||||
}
|
||||
|
||||
// defines from https://github.com/adafruit/Adafruit_nRF52_Bootloader which prints those information
|
||||
constexpr uint32_t SD_MAGIC_NUMBER = 0x51B1E5DB;
|
||||
constexpr uintptr_t MBR_SIZE = 0x1000;
|
||||
constexpr uintptr_t SOFTDEVICE_INFO_STRUCT_OFFSET = 0x2000;
|
||||
constexpr uintptr_t SD_ID_OFFSET = SOFTDEVICE_INFO_STRUCT_OFFSET + 0x10;
|
||||
constexpr uintptr_t SD_VERSION_OFFSET = SOFTDEVICE_INFO_STRUCT_OFFSET + 0x14;
|
||||
|
||||
static inline bool is_sd_present() {
|
||||
return read_mem_u32(SOFTDEVICE_INFO_STRUCT_OFFSET + MBR_SIZE + 4) == SD_MAGIC_NUMBER;
|
||||
}
|
||||
static inline uint32_t sd_id_get() {
|
||||
if (read_mem_u8(MBR_SIZE + SOFTDEVICE_INFO_STRUCT_OFFSET) > (SD_ID_OFFSET - SOFTDEVICE_INFO_STRUCT_OFFSET)) {
|
||||
return read_mem_u32(MBR_SIZE + SD_ID_OFFSET);
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
static inline uint32_t sd_version_get() {
|
||||
if (read_mem_u8(MBR_SIZE + SOFTDEVICE_INFO_STRUCT_OFFSET) > (SD_VERSION_OFFSET - SOFTDEVICE_INFO_STRUCT_OFFSET)) {
|
||||
return read_mem_u32(MBR_SIZE + SD_VERSION_OFFSET);
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
|
||||
std::string DebugComponent::get_reset_reason_() {
|
||||
uint32_t cause;
|
||||
auto ret = hwinfo_get_reset_cause(&cause);
|
||||
@@ -271,6 +298,29 @@ void DebugComponent::get_device_info_(std::string &device_info) {
|
||||
NRF_UICR->NRFFW[0]);
|
||||
ESP_LOGD(TAG, "MBR param page addr 0x%08x, UICR param page addr 0x%08x", read_mem_u32(MBR_PARAM_PAGE_ADDR),
|
||||
NRF_UICR->NRFFW[1]);
|
||||
if (is_sd_present()) {
|
||||
uint32_t const sd_id = sd_id_get();
|
||||
uint32_t const sd_version = sd_version_get();
|
||||
|
||||
uint32_t ver[3];
|
||||
ver[0] = sd_version / 1000000;
|
||||
ver[1] = (sd_version - ver[0] * 1000000) / 1000;
|
||||
ver[2] = (sd_version - ver[0] * 1000000 - ver[1] * 1000);
|
||||
|
||||
ESP_LOGD(TAG, "SoftDevice: S%u %u.%u.%u", sd_id, ver[0], ver[1], ver[2]);
|
||||
#ifdef USE_SOFTDEVICE_ID
|
||||
#ifdef USE_SOFTDEVICE_VERSION
|
||||
if (USE_SOFTDEVICE_ID != sd_id || USE_SOFTDEVICE_VERSION != ver[0]) {
|
||||
ESP_LOGE(TAG, "Built for SoftDevice S%u %u.x.y. It may crash due to mismatch of bootloader version.",
|
||||
USE_SOFTDEVICE_ID, USE_SOFTDEVICE_VERSION);
|
||||
}
|
||||
#else
|
||||
if (USE_SOFTDEVICE_ID != sd_id) {
|
||||
ESP_LOGE(TAG, "Built for SoftDevice S%u. It may crash due to mismatch of bootloader version.", USE_SOFTDEVICE_ID);
|
||||
}
|
||||
#endif
|
||||
#endif
|
||||
}
|
||||
#endif
|
||||
}
|
||||
|
||||
|
||||
@@ -828,6 +828,9 @@ async def to_code(config):
|
||||
# Disable dynamic log level control to save memory
|
||||
add_idf_sdkconfig_option("CONFIG_LOG_DYNAMIC_LEVEL_CONTROL", False)
|
||||
|
||||
# Reduce PHY TX power in the event of a brownout
|
||||
add_idf_sdkconfig_option("CONFIG_ESP_PHY_REDUCE_TX_POWER", True)
|
||||
|
||||
# Set default CPU frequency
|
||||
add_idf_sdkconfig_option(
|
||||
f"CONFIG_ESP_DEFAULT_CPU_FREQ_MHZ_{config[CONF_CPU_FREQUENCY][:-3]}", True
|
||||
|
||||
@@ -108,8 +108,13 @@ class BTLoggers(Enum):
|
||||
"""ESP32 WiFi provisioning over Bluetooth"""
|
||||
|
||||
|
||||
# Set to track which loggers are needed by components
|
||||
_required_loggers: set[BTLoggers] = set()
|
||||
# Key for storing required loggers in CORE.data
|
||||
ESP32_BLE_REQUIRED_LOGGERS_KEY = "esp32_ble_required_loggers"
|
||||
|
||||
|
||||
def _get_required_loggers() -> set[BTLoggers]:
|
||||
"""Get the set of required Bluetooth loggers from CORE.data."""
|
||||
return CORE.data.setdefault(ESP32_BLE_REQUIRED_LOGGERS_KEY, set())
|
||||
|
||||
|
||||
# Dataclass for handler registration counts
|
||||
@@ -170,12 +175,13 @@ def register_bt_logger(*loggers: BTLoggers) -> None:
|
||||
Args:
|
||||
*loggers: One or more BTLoggers enum members
|
||||
"""
|
||||
required_loggers = _get_required_loggers()
|
||||
for logger in loggers:
|
||||
if not isinstance(logger, BTLoggers):
|
||||
raise TypeError(
|
||||
f"Logger must be a BTLoggers enum member, got {type(logger)}"
|
||||
)
|
||||
_required_loggers.add(logger)
|
||||
required_loggers.add(logger)
|
||||
|
||||
|
||||
CONF_BLE_ID = "ble_id"
|
||||
@@ -488,8 +494,9 @@ async def to_code(config):
|
||||
# Apply logger settings if log disabling is enabled
|
||||
if config.get(CONF_DISABLE_BT_LOGS, False):
|
||||
# Disable all Bluetooth loggers that are not required
|
||||
required_loggers = _get_required_loggers()
|
||||
for logger in BTLoggers:
|
||||
if logger not in _required_loggers:
|
||||
if logger not in required_loggers:
|
||||
add_idf_sdkconfig_option(f"{logger.value}_NONE", True)
|
||||
|
||||
# Set BLE connection establishment timeout to match aioesphomeapi/bleak-retry-connector
|
||||
|
||||
@@ -60,11 +60,21 @@ class RegistrationCounts:
|
||||
clients: int = 0
|
||||
|
||||
|
||||
# Set to track which features are needed by components
|
||||
_required_features: set[BLEFeatures] = set()
|
||||
# CORE.data keys for state management
|
||||
ESP32_BLE_TRACKER_REQUIRED_FEATURES_KEY = "esp32_ble_tracker_required_features"
|
||||
ESP32_BLE_TRACKER_REGISTRATION_COUNTS_KEY = "esp32_ble_tracker_registration_counts"
|
||||
|
||||
# Track registration counts for StaticVector sizing
|
||||
_registration_counts = RegistrationCounts()
|
||||
|
||||
def _get_required_features() -> set[BLEFeatures]:
|
||||
"""Get the set of required BLE features from CORE.data."""
|
||||
return CORE.data.setdefault(ESP32_BLE_TRACKER_REQUIRED_FEATURES_KEY, set())
|
||||
|
||||
|
||||
def _get_registration_counts() -> RegistrationCounts:
|
||||
"""Get the registration counts from CORE.data."""
|
||||
return CORE.data.setdefault(
|
||||
ESP32_BLE_TRACKER_REGISTRATION_COUNTS_KEY, RegistrationCounts()
|
||||
)
|
||||
|
||||
|
||||
def register_ble_features(features: set[BLEFeatures]) -> None:
|
||||
@@ -73,7 +83,7 @@ def register_ble_features(features: set[BLEFeatures]) -> None:
|
||||
Args:
|
||||
features: Set of BLEFeatures enum members
|
||||
"""
|
||||
_required_features.update(features)
|
||||
_get_required_features().update(features)
|
||||
|
||||
|
||||
esp32_ble_tracker_ns = cg.esphome_ns.namespace("esp32_ble_tracker")
|
||||
@@ -267,15 +277,17 @@ async def to_code(config):
|
||||
):
|
||||
register_ble_features({BLEFeatures.ESP_BT_DEVICE})
|
||||
|
||||
registration_counts = _get_registration_counts()
|
||||
|
||||
for conf in config.get(CONF_ON_BLE_ADVERTISE, []):
|
||||
_registration_counts.listeners += 1
|
||||
registration_counts.listeners += 1
|
||||
trigger = cg.new_Pvariable(conf[CONF_TRIGGER_ID], var)
|
||||
if CONF_MAC_ADDRESS in conf:
|
||||
addr_list = [it.as_hex for it in conf[CONF_MAC_ADDRESS]]
|
||||
cg.add(trigger.set_addresses(addr_list))
|
||||
await automation.build_automation(trigger, [(ESPBTDeviceConstRef, "x")], conf)
|
||||
for conf in config.get(CONF_ON_BLE_SERVICE_DATA_ADVERTISE, []):
|
||||
_registration_counts.listeners += 1
|
||||
registration_counts.listeners += 1
|
||||
trigger = cg.new_Pvariable(conf[CONF_TRIGGER_ID], var)
|
||||
if len(conf[CONF_SERVICE_UUID]) == len(bt_uuid16_format):
|
||||
cg.add(trigger.set_service_uuid16(as_hex(conf[CONF_SERVICE_UUID])))
|
||||
@@ -288,7 +300,7 @@ async def to_code(config):
|
||||
cg.add(trigger.set_address(conf[CONF_MAC_ADDRESS].as_hex))
|
||||
await automation.build_automation(trigger, [(adv_data_t_const_ref, "x")], conf)
|
||||
for conf in config.get(CONF_ON_BLE_MANUFACTURER_DATA_ADVERTISE, []):
|
||||
_registration_counts.listeners += 1
|
||||
registration_counts.listeners += 1
|
||||
trigger = cg.new_Pvariable(conf[CONF_TRIGGER_ID], var)
|
||||
if len(conf[CONF_MANUFACTURER_ID]) == len(bt_uuid16_format):
|
||||
cg.add(trigger.set_manufacturer_uuid16(as_hex(conf[CONF_MANUFACTURER_ID])))
|
||||
@@ -301,7 +313,7 @@ async def to_code(config):
|
||||
cg.add(trigger.set_address(conf[CONF_MAC_ADDRESS].as_hex))
|
||||
await automation.build_automation(trigger, [(adv_data_t_const_ref, "x")], conf)
|
||||
for conf in config.get(CONF_ON_SCAN_END, []):
|
||||
_registration_counts.listeners += 1
|
||||
registration_counts.listeners += 1
|
||||
trigger = cg.new_Pvariable(conf[CONF_TRIGGER_ID], var)
|
||||
await automation.build_automation(trigger, [], conf)
|
||||
|
||||
@@ -331,19 +343,21 @@ async def to_code(config):
|
||||
@coroutine_with_priority(CoroPriority.FINAL)
|
||||
async def _add_ble_features():
|
||||
# Add feature-specific defines based on what's needed
|
||||
if BLEFeatures.ESP_BT_DEVICE in _required_features:
|
||||
required_features = _get_required_features()
|
||||
if BLEFeatures.ESP_BT_DEVICE in required_features:
|
||||
cg.add_define("USE_ESP32_BLE_DEVICE")
|
||||
cg.add_define("USE_ESP32_BLE_UUID")
|
||||
|
||||
# Add defines for StaticVector sizing based on registration counts
|
||||
# Only define if count > 0 to avoid allocating unnecessary memory
|
||||
if _registration_counts.listeners > 0:
|
||||
registration_counts = _get_registration_counts()
|
||||
if registration_counts.listeners > 0:
|
||||
cg.add_define(
|
||||
"ESPHOME_ESP32_BLE_TRACKER_LISTENER_COUNT", _registration_counts.listeners
|
||||
"ESPHOME_ESP32_BLE_TRACKER_LISTENER_COUNT", registration_counts.listeners
|
||||
)
|
||||
if _registration_counts.clients > 0:
|
||||
if registration_counts.clients > 0:
|
||||
cg.add_define(
|
||||
"ESPHOME_ESP32_BLE_TRACKER_CLIENT_COUNT", _registration_counts.clients
|
||||
"ESPHOME_ESP32_BLE_TRACKER_CLIENT_COUNT", registration_counts.clients
|
||||
)
|
||||
|
||||
|
||||
@@ -395,7 +409,7 @@ async def register_ble_device(
|
||||
var: cg.SafeExpType, config: ConfigType
|
||||
) -> cg.SafeExpType:
|
||||
register_ble_features({BLEFeatures.ESP_BT_DEVICE})
|
||||
_registration_counts.listeners += 1
|
||||
_get_registration_counts().listeners += 1
|
||||
paren = await cg.get_variable(config[CONF_ESP32_BLE_ID])
|
||||
cg.add(paren.register_listener(var))
|
||||
return var
|
||||
@@ -403,7 +417,7 @@ async def register_ble_device(
|
||||
|
||||
async def register_client(var: cg.SafeExpType, config: ConfigType) -> cg.SafeExpType:
|
||||
register_ble_features({BLEFeatures.ESP_BT_DEVICE})
|
||||
_registration_counts.clients += 1
|
||||
_get_registration_counts().clients += 1
|
||||
paren = await cg.get_variable(config[CONF_ESP32_BLE_ID])
|
||||
cg.add(paren.register_client(var))
|
||||
return var
|
||||
@@ -417,7 +431,7 @@ async def register_raw_ble_device(
|
||||
This does NOT register the ESP_BT_DEVICE feature, meaning ESPBTDevice
|
||||
will not be compiled in if this is the only registration method used.
|
||||
"""
|
||||
_registration_counts.listeners += 1
|
||||
_get_registration_counts().listeners += 1
|
||||
paren = await cg.get_variable(config[CONF_ESP32_BLE_ID])
|
||||
cg.add(paren.register_listener(var))
|
||||
return var
|
||||
@@ -431,7 +445,7 @@ async def register_raw_client(
|
||||
This does NOT register the ESP_BT_DEVICE feature, meaning ESPBTDevice
|
||||
will not be compiled in if this is the only registration method used.
|
||||
"""
|
||||
_registration_counts.clients += 1
|
||||
_get_registration_counts().clients += 1
|
||||
paren = await cg.get_variable(config[CONF_ESP32_BLE_ID])
|
||||
cg.add(paren.register_client(var))
|
||||
return var
|
||||
|
||||
@@ -190,7 +190,7 @@ async def to_code(config):
|
||||
cg.add_define("ESPHOME_VARIANT", "ESP8266")
|
||||
cg.add_define(ThreadModel.SINGLE)
|
||||
|
||||
cg.add_platformio_option("extra_scripts", ["post:post_build.py"])
|
||||
cg.add_platformio_option("extra_scripts", ["pre:iram_fix.py", "post:post_build.py"])
|
||||
|
||||
conf = config[CONF_FRAMEWORK]
|
||||
cg.add_platformio_option("framework", "arduino")
|
||||
@@ -230,6 +230,12 @@ async def to_code(config):
|
||||
# For cases where nullptrs can be handled, use nothrow: `new (std::nothrow) T;`
|
||||
cg.add_build_flag("-DNEW_OOM_ABORT")
|
||||
|
||||
# In testing mode, fake a larger IRAM to allow linking grouped component tests
|
||||
# Real ESP8266 hardware only has 32KB IRAM, but for CI testing we pretend it has 2MB
|
||||
# This is done via a pre-build script that generates a custom linker script
|
||||
if CORE.testing_mode:
|
||||
cg.add_build_flag("-DESPHOME_TESTING_MODE")
|
||||
|
||||
cg.add_platformio_option("board_build.flash_mode", config[CONF_BOARD_FLASH_MODE])
|
||||
|
||||
ver: cv.Version = CORE.data[KEY_CORE][KEY_FRAMEWORK_VERSION]
|
||||
@@ -265,3 +271,8 @@ def copy_files():
|
||||
post_build_file,
|
||||
CORE.relative_build_path("post_build.py"),
|
||||
)
|
||||
iram_fix_file = dir / "iram_fix.py.script"
|
||||
copy_file_if_changed(
|
||||
iram_fix_file,
|
||||
CORE.relative_build_path("iram_fix.py"),
|
||||
)
|
||||
|
||||
44
esphome/components/esp8266/iram_fix.py.script
Normal file
44
esphome/components/esp8266/iram_fix.py.script
Normal file
@@ -0,0 +1,44 @@
|
||||
import os
|
||||
import re
|
||||
|
||||
# pylint: disable=E0602
|
||||
Import("env") # noqa
|
||||
|
||||
|
||||
def patch_linker_script_after_preprocess(source, target, env):
|
||||
"""Patch the local linker script after PlatformIO preprocesses it."""
|
||||
# Check if we're in testing mode by looking for the define
|
||||
build_flags = env.get("BUILD_FLAGS", [])
|
||||
testing_mode = any("-DESPHOME_TESTING_MODE" in flag for flag in build_flags)
|
||||
|
||||
if not testing_mode:
|
||||
return
|
||||
|
||||
# Get the local linker script path
|
||||
build_dir = env.subst("$BUILD_DIR")
|
||||
local_ld = os.path.join(build_dir, "ld", "local.eagle.app.v6.common.ld")
|
||||
|
||||
if not os.path.exists(local_ld):
|
||||
return
|
||||
|
||||
# Read the linker script
|
||||
with open(local_ld, "r") as f:
|
||||
content = f.read()
|
||||
|
||||
# Replace IRAM size from 0x8000 (32KB) to 0x200000 (2MB)
|
||||
# The line looks like: iram1_0_seg : org = 0x40100000, len = 0x8000
|
||||
updated = re.sub(
|
||||
r"(iram1_0_seg\s*:\s*org\s*=\s*0x40100000\s*,\s*len\s*=\s*)0x8000",
|
||||
r"\g<1>0x200000",
|
||||
content,
|
||||
)
|
||||
|
||||
if updated != content:
|
||||
with open(local_ld, "w") as f:
|
||||
f.write(updated)
|
||||
print("ESPHome: Patched IRAM size to 2MB for testing mode")
|
||||
|
||||
|
||||
# Hook into the build process right before linking
|
||||
# This runs after PlatformIO has already preprocessed the linker scripts
|
||||
env.AddPreAction("$BUILD_DIR/${PROGNAME}.elf", patch_linker_script_after_preprocess)
|
||||
@@ -19,6 +19,7 @@ from esphome.const import (
|
||||
from esphome.core import CORE, coroutine_with_priority
|
||||
from esphome.coroutine import CoroPriority
|
||||
import esphome.final_validate as fv
|
||||
from esphome.types import ConfigType
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -136,11 +137,12 @@ FINAL_VALIDATE_SCHEMA = ota_esphome_final_validate
|
||||
|
||||
|
||||
@coroutine_with_priority(CoroPriority.OTA_UPDATES)
|
||||
async def to_code(config):
|
||||
async def to_code(config: ConfigType) -> None:
|
||||
var = cg.new_Pvariable(config[CONF_ID])
|
||||
cg.add(var.set_port(config[CONF_PORT]))
|
||||
|
||||
if CONF_PASSWORD in config:
|
||||
# Password could be set to an empty string and we can assume that means no password
|
||||
if config.get(CONF_PASSWORD):
|
||||
cg.add(var.set_auth_password(config[CONF_PASSWORD]))
|
||||
cg.add_define("USE_OTA_PASSWORD")
|
||||
# Only include hash algorithms when password is configured
|
||||
|
||||
@@ -9,8 +9,8 @@ static const char *const TAG = "htu21d";
|
||||
|
||||
static const uint8_t HTU21D_ADDRESS = 0x40;
|
||||
static const uint8_t HTU21D_REGISTER_RESET = 0xFE;
|
||||
static const uint8_t HTU21D_REGISTER_TEMPERATURE = 0xE3;
|
||||
static const uint8_t HTU21D_REGISTER_HUMIDITY = 0xE5;
|
||||
static const uint8_t HTU21D_REGISTER_TEMPERATURE = 0xF3;
|
||||
static const uint8_t HTU21D_REGISTER_HUMIDITY = 0xF5;
|
||||
static const uint8_t HTU21D_WRITERHT_REG_CMD = 0xE6; /**< Write RH/T User Register 1 */
|
||||
static const uint8_t HTU21D_REGISTER_STATUS = 0xE7;
|
||||
static const uint8_t HTU21D_WRITEHEATER_REG_CMD = 0x51; /**< Write Heater Control Register */
|
||||
|
||||
@@ -143,7 +143,18 @@ def validate_mclk_divisible_by_3(config):
|
||||
return config
|
||||
|
||||
|
||||
_use_legacy_driver = None
|
||||
# Key for storing legacy driver setting in CORE.data
|
||||
I2S_USE_LEGACY_DRIVER_KEY = "i2s_use_legacy_driver"
|
||||
|
||||
|
||||
def _get_use_legacy_driver():
|
||||
"""Get the legacy driver setting from CORE.data."""
|
||||
return CORE.data.get(I2S_USE_LEGACY_DRIVER_KEY)
|
||||
|
||||
|
||||
def _set_use_legacy_driver(value: bool) -> None:
|
||||
"""Set the legacy driver setting in CORE.data."""
|
||||
CORE.data[I2S_USE_LEGACY_DRIVER_KEY] = value
|
||||
|
||||
|
||||
def i2s_audio_component_schema(
|
||||
@@ -209,17 +220,15 @@ async def register_i2s_audio_component(var, config):
|
||||
|
||||
|
||||
def validate_use_legacy(value):
|
||||
global _use_legacy_driver # noqa: PLW0603
|
||||
if CONF_USE_LEGACY in value:
|
||||
if (_use_legacy_driver is not None) and (
|
||||
_use_legacy_driver != value[CONF_USE_LEGACY]
|
||||
):
|
||||
existing_value = _get_use_legacy_driver()
|
||||
if (existing_value is not None) and (existing_value != value[CONF_USE_LEGACY]):
|
||||
raise cv.Invalid(
|
||||
f"All i2s_audio components must set {CONF_USE_LEGACY} to the same value."
|
||||
)
|
||||
if (not value[CONF_USE_LEGACY]) and (CORE.using_arduino):
|
||||
raise cv.Invalid("Arduino supports only the legacy i2s driver")
|
||||
_use_legacy_driver = value[CONF_USE_LEGACY]
|
||||
_set_use_legacy_driver(value[CONF_USE_LEGACY])
|
||||
return value
|
||||
|
||||
|
||||
@@ -249,7 +258,8 @@ def _final_validate(_):
|
||||
|
||||
|
||||
def use_legacy():
|
||||
return not (CORE.using_esp_idf and not _use_legacy_driver)
|
||||
legacy_driver = _get_use_legacy_driver()
|
||||
return not (CORE.using_esp_idf and not legacy_driver)
|
||||
|
||||
|
||||
FINAL_VALIDATE_SCHEMA = _final_validate
|
||||
|
||||
@@ -35,6 +35,7 @@ CONF_CHARGE = "charge"
|
||||
CONF_CHARGE_COULOMBS = "charge_coulombs"
|
||||
CONF_ENERGY_JOULES = "energy_joules"
|
||||
CONF_TEMPERATURE_COEFFICIENT = "temperature_coefficient"
|
||||
CONF_RESET_ON_BOOT = "reset_on_boot"
|
||||
UNIT_AMPERE_HOURS = "Ah"
|
||||
UNIT_COULOMB = "C"
|
||||
UNIT_JOULE = "J"
|
||||
@@ -113,6 +114,7 @@ INA2XX_SCHEMA = cv.Schema(
|
||||
cv.Optional(CONF_TEMPERATURE_COEFFICIENT, default=0): cv.int_range(
|
||||
min=0, max=16383
|
||||
),
|
||||
cv.Optional(CONF_RESET_ON_BOOT, default=True): cv.boolean,
|
||||
cv.Optional(CONF_SHUNT_VOLTAGE): cv.maybe_simple_value(
|
||||
sensor.sensor_schema(
|
||||
unit_of_measurement=UNIT_MILLIVOLT,
|
||||
@@ -206,6 +208,7 @@ async def setup_ina2xx(var, config):
|
||||
cg.add(var.set_adc_range(config[CONF_ADC_RANGE]))
|
||||
cg.add(var.set_adc_avg_samples(config[CONF_ADC_AVERAGING]))
|
||||
cg.add(var.set_shunt_tempco(config[CONF_TEMPERATURE_COEFFICIENT]))
|
||||
cg.add(var.set_reset_on_boot(config[CONF_RESET_ON_BOOT]))
|
||||
|
||||
adc_time_config = config[CONF_ADC_TIME]
|
||||
if isinstance(adc_time_config, dict):
|
||||
|
||||
@@ -257,7 +257,12 @@ bool INA2XX::reset_energy_counters() {
|
||||
bool INA2XX::reset_config_() {
|
||||
ESP_LOGV(TAG, "Reset");
|
||||
ConfigurationRegister cfg{0};
|
||||
cfg.RST = true;
|
||||
if (!this->reset_on_boot_) {
|
||||
ESP_LOGI(TAG, "Skipping on-boot device reset");
|
||||
cfg.RST = false;
|
||||
} else {
|
||||
cfg.RST = true;
|
||||
}
|
||||
return this->write_unsigned_16_(RegisterMap::REG_CONFIG, cfg.raw_u16);
|
||||
}
|
||||
|
||||
|
||||
@@ -127,6 +127,7 @@ class INA2XX : public PollingComponent {
|
||||
void set_adc_time_die_temperature(AdcTime time) { this->adc_time_die_temperature_ = time; }
|
||||
void set_adc_avg_samples(AdcAvgSamples samples) { this->adc_avg_samples_ = samples; }
|
||||
void set_shunt_tempco(uint16_t coeff) { this->shunt_tempco_ppm_c_ = coeff; }
|
||||
void set_reset_on_boot(bool reset) { this->reset_on_boot_ = reset; }
|
||||
|
||||
void set_shunt_voltage_sensor(sensor::Sensor *sensor) { this->shunt_voltage_sensor_ = sensor; }
|
||||
void set_bus_voltage_sensor(sensor::Sensor *sensor) { this->bus_voltage_sensor_ = sensor; }
|
||||
@@ -172,6 +173,7 @@ class INA2XX : public PollingComponent {
|
||||
AdcTime adc_time_die_temperature_{AdcTime::ADC_TIME_4120US};
|
||||
AdcAvgSamples adc_avg_samples_{AdcAvgSamples::ADC_AVG_SAMPLES_128};
|
||||
uint16_t shunt_tempco_ppm_c_{0};
|
||||
bool reset_on_boot_{true};
|
||||
|
||||
//
|
||||
// Calculated coefficients
|
||||
|
||||
@@ -68,6 +68,9 @@ static constexpr char LOG_LEVEL_LETTER_CHARS[] = {
|
||||
// Maximum header size: 35 bytes fixed + 32 bytes tag + 16 bytes thread name = 83 bytes (45 byte safety margin)
|
||||
static constexpr uint16_t MAX_HEADER_SIZE = 128;
|
||||
|
||||
// "0x" + 2 hex digits per byte + '\0'
|
||||
static constexpr size_t MAX_POINTER_REPRESENTATION = 2 + sizeof(void *) * 2 + 1;
|
||||
|
||||
#if defined(USE_ESP32) || defined(USE_ESP8266) || defined(USE_RP2040) || defined(USE_LIBRETINY) || defined(USE_ZEPHYR)
|
||||
/** Enum for logging UART selection
|
||||
*
|
||||
@@ -177,8 +180,11 @@ class Logger : public Component {
|
||||
inline void HOT format_log_to_buffer_with_terminator_(uint8_t level, const char *tag, int line, const char *format,
|
||||
va_list args, char *buffer, uint16_t *buffer_at,
|
||||
uint16_t buffer_size) {
|
||||
#if defined(USE_ESP32) || defined(USE_LIBRETINY) || defined(USE_ZEPHYR)
|
||||
#if defined(USE_ESP32) || defined(USE_LIBRETINY)
|
||||
this->write_header_to_buffer_(level, tag, line, this->get_thread_name_(), buffer, buffer_at, buffer_size);
|
||||
#elif defined(USE_ZEPHYR)
|
||||
char buff[MAX_POINTER_REPRESENTATION];
|
||||
this->write_header_to_buffer_(level, tag, line, this->get_thread_name_(buff), buffer, buffer_at, buffer_size);
|
||||
#else
|
||||
this->write_header_to_buffer_(level, tag, line, nullptr, buffer, buffer_at, buffer_size);
|
||||
#endif
|
||||
@@ -277,7 +283,11 @@ class Logger : public Component {
|
||||
#endif
|
||||
|
||||
#if defined(USE_ESP32) || defined(USE_LIBRETINY) || defined(USE_ZEPHYR)
|
||||
const char *HOT get_thread_name_() {
|
||||
const char *HOT get_thread_name_(
|
||||
#ifdef USE_ZEPHYR
|
||||
char *buff
|
||||
#endif
|
||||
) {
|
||||
#ifdef USE_ZEPHYR
|
||||
k_tid_t current_task = k_current_get();
|
||||
#else
|
||||
@@ -291,7 +301,13 @@ class Logger : public Component {
|
||||
#elif defined(USE_LIBRETINY)
|
||||
return pcTaskGetTaskName(current_task);
|
||||
#elif defined(USE_ZEPHYR)
|
||||
return k_thread_name_get(current_task);
|
||||
const char *name = k_thread_name_get(current_task);
|
||||
if (name) {
|
||||
// zephyr print task names only if debug component is present
|
||||
return name;
|
||||
}
|
||||
std::snprintf(buff, MAX_POINTER_REPRESENTATION, "%p", current_task);
|
||||
return buff;
|
||||
#endif
|
||||
}
|
||||
}
|
||||
|
||||
@@ -17,11 +17,11 @@ void MQTTClimateComponent::send_discovery(JsonObject root, mqtt::SendDiscoveryCo
|
||||
// NOLINTBEGIN(clang-analyzer-cplusplus.NewDeleteLeaks) false positive with ArduinoJson
|
||||
auto traits = this->device_->get_traits();
|
||||
// current_temperature_topic
|
||||
if (traits.get_supports_current_temperature()) {
|
||||
if (traits.has_feature_flags(climate::CLIMATE_SUPPORTS_CURRENT_TEMPERATURE)) {
|
||||
root[MQTT_CURRENT_TEMPERATURE_TOPIC] = this->get_current_temperature_state_topic();
|
||||
}
|
||||
// current_humidity_topic
|
||||
if (traits.get_supports_current_humidity()) {
|
||||
if (traits.has_feature_flags(climate::CLIMATE_SUPPORTS_CURRENT_HUMIDITY)) {
|
||||
root[MQTT_CURRENT_HUMIDITY_TOPIC] = this->get_current_humidity_state_topic();
|
||||
}
|
||||
// mode_command_topic
|
||||
@@ -45,7 +45,8 @@ void MQTTClimateComponent::send_discovery(JsonObject root, mqtt::SendDiscoveryCo
|
||||
if (traits.supports_mode(CLIMATE_MODE_HEAT_COOL))
|
||||
modes.add("heat_cool");
|
||||
|
||||
if (traits.get_supports_two_point_target_temperature()) {
|
||||
if (traits.has_feature_flags(climate::CLIMATE_SUPPORTS_TWO_POINT_TARGET_TEMPERATURE |
|
||||
climate::CLIMATE_REQUIRES_TWO_POINT_TARGET_TEMPERATURE)) {
|
||||
// temperature_low_command_topic
|
||||
root[MQTT_TEMPERATURE_LOW_COMMAND_TOPIC] = this->get_target_temperature_low_command_topic();
|
||||
// temperature_low_state_topic
|
||||
@@ -61,7 +62,7 @@ void MQTTClimateComponent::send_discovery(JsonObject root, mqtt::SendDiscoveryCo
|
||||
root[MQTT_TEMPERATURE_STATE_TOPIC] = this->get_target_temperature_state_topic();
|
||||
}
|
||||
|
||||
if (traits.get_supports_target_humidity()) {
|
||||
if (traits.has_feature_flags(climate::CLIMATE_SUPPORTS_TARGET_HUMIDITY)) {
|
||||
// target_humidity_command_topic
|
||||
root[MQTT_TARGET_HUMIDITY_COMMAND_TOPIC] = this->get_target_humidity_command_topic();
|
||||
// target_humidity_state_topic
|
||||
@@ -109,7 +110,7 @@ void MQTTClimateComponent::send_discovery(JsonObject root, mqtt::SendDiscoveryCo
|
||||
presets.add(preset);
|
||||
}
|
||||
|
||||
if (traits.get_supports_action()) {
|
||||
if (traits.has_feature_flags(climate::CLIMATE_SUPPORTS_ACTION)) {
|
||||
// action_topic
|
||||
root[MQTT_ACTION_TOPIC] = this->get_action_state_topic();
|
||||
}
|
||||
@@ -174,7 +175,8 @@ void MQTTClimateComponent::setup() {
|
||||
call.perform();
|
||||
});
|
||||
|
||||
if (traits.get_supports_two_point_target_temperature()) {
|
||||
if (traits.has_feature_flags(climate::CLIMATE_SUPPORTS_TWO_POINT_TARGET_TEMPERATURE |
|
||||
climate::CLIMATE_REQUIRES_TWO_POINT_TARGET_TEMPERATURE)) {
|
||||
this->subscribe(this->get_target_temperature_low_command_topic(),
|
||||
[this](const std::string &topic, const std::string &payload) {
|
||||
auto val = parse_number<float>(payload);
|
||||
@@ -211,7 +213,7 @@ void MQTTClimateComponent::setup() {
|
||||
});
|
||||
}
|
||||
|
||||
if (traits.get_supports_target_humidity()) {
|
||||
if (traits.has_feature_flags(climate::CLIMATE_SUPPORTS_TARGET_HUMIDITY)) {
|
||||
this->subscribe(this->get_target_humidity_command_topic(),
|
||||
[this](const std::string &topic, const std::string &payload) {
|
||||
auto val = parse_number<float>(payload);
|
||||
@@ -290,12 +292,14 @@ bool MQTTClimateComponent::publish_state_() {
|
||||
success = false;
|
||||
int8_t target_accuracy = traits.get_target_temperature_accuracy_decimals();
|
||||
int8_t current_accuracy = traits.get_current_temperature_accuracy_decimals();
|
||||
if (traits.get_supports_current_temperature() && !std::isnan(this->device_->current_temperature)) {
|
||||
if (traits.has_feature_flags(climate::CLIMATE_SUPPORTS_CURRENT_TEMPERATURE) &&
|
||||
!std::isnan(this->device_->current_temperature)) {
|
||||
std::string payload = value_accuracy_to_string(this->device_->current_temperature, current_accuracy);
|
||||
if (!this->publish(this->get_current_temperature_state_topic(), payload))
|
||||
success = false;
|
||||
}
|
||||
if (traits.get_supports_two_point_target_temperature()) {
|
||||
if (traits.has_feature_flags(climate::CLIMATE_SUPPORTS_TWO_POINT_TARGET_TEMPERATURE |
|
||||
climate::CLIMATE_REQUIRES_TWO_POINT_TARGET_TEMPERATURE)) {
|
||||
std::string payload = value_accuracy_to_string(this->device_->target_temperature_low, target_accuracy);
|
||||
if (!this->publish(this->get_target_temperature_low_state_topic(), payload))
|
||||
success = false;
|
||||
@@ -308,12 +312,14 @@ bool MQTTClimateComponent::publish_state_() {
|
||||
success = false;
|
||||
}
|
||||
|
||||
if (traits.get_supports_current_humidity() && !std::isnan(this->device_->current_humidity)) {
|
||||
if (traits.has_feature_flags(climate::CLIMATE_SUPPORTS_CURRENT_HUMIDITY) &&
|
||||
!std::isnan(this->device_->current_humidity)) {
|
||||
std::string payload = value_accuracy_to_string(this->device_->current_humidity, 0);
|
||||
if (!this->publish(this->get_current_humidity_state_topic(), payload))
|
||||
success = false;
|
||||
}
|
||||
if (traits.get_supports_target_humidity() && !std::isnan(this->device_->target_humidity)) {
|
||||
if (traits.has_feature_flags(climate::CLIMATE_SUPPORTS_TARGET_HUMIDITY) &&
|
||||
!std::isnan(this->device_->target_humidity)) {
|
||||
std::string payload = value_accuracy_to_string(this->device_->target_humidity, 0);
|
||||
if (!this->publish(this->get_target_humidity_state_topic(), payload))
|
||||
success = false;
|
||||
@@ -357,7 +363,7 @@ bool MQTTClimateComponent::publish_state_() {
|
||||
success = false;
|
||||
}
|
||||
|
||||
if (traits.get_supports_action()) {
|
||||
if (traits.has_feature_flags(climate::CLIMATE_SUPPORTS_ACTION)) {
|
||||
const char *payload;
|
||||
switch (this->device_->action) {
|
||||
case CLIMATE_ACTION_OFF:
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
from pathlib import Path
|
||||
|
||||
from esphome import pins
|
||||
@@ -48,6 +49,7 @@ from .gpio import nrf52_pin_to_code # noqa
|
||||
CODEOWNERS = ["@tomaszduda23"]
|
||||
AUTO_LOAD = ["zephyr"]
|
||||
IS_TARGET_PLATFORM = True
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def set_core_data(config: ConfigType) -> ConfigType:
|
||||
@@ -127,6 +129,10 @@ def _validate_mcumgr(config):
|
||||
def _final_validate(config):
|
||||
if CONF_DFU in config:
|
||||
_validate_mcumgr(config)
|
||||
if config[KEY_BOOTLOADER] == BOOTLOADER_ADAFRUIT:
|
||||
_LOGGER.warning(
|
||||
"Selected generic Adafruit bootloader. The board might crash. Consider settings `bootloader:`"
|
||||
)
|
||||
|
||||
|
||||
FINAL_VALIDATE_SCHEMA = _final_validate
|
||||
@@ -157,6 +163,13 @@ async def to_code(config: ConfigType) -> None:
|
||||
if config[KEY_BOOTLOADER] == BOOTLOADER_MCUBOOT:
|
||||
cg.add_define("USE_BOOTLOADER_MCUBOOT")
|
||||
else:
|
||||
if "_sd" in config[KEY_BOOTLOADER]:
|
||||
bootloader = config[KEY_BOOTLOADER].split("_")
|
||||
sd_id = bootloader[2][2:]
|
||||
cg.add_define("USE_SOFTDEVICE_ID", int(sd_id))
|
||||
if (len(bootloader)) > 3:
|
||||
sd_version = bootloader[3][1:]
|
||||
cg.add_define("USE_SOFTDEVICE_VERSION", int(sd_version))
|
||||
# make sure that firmware.zip is created
|
||||
# for Adafruit_nRF52_Bootloader
|
||||
cg.add_platformio_option("board_upload.protocol", "nrfutil")
|
||||
|
||||
@@ -11,10 +11,18 @@ from .const import (
|
||||
BOARDS_ZEPHYR = {
|
||||
"adafruit_itsybitsy_nrf52840": {
|
||||
KEY_BOOTLOADER: [
|
||||
BOOTLOADER_ADAFRUIT_NRF52_SD140_V6,
|
||||
BOOTLOADER_ADAFRUIT,
|
||||
BOOTLOADER_ADAFRUIT_NRF52_SD132,
|
||||
BOOTLOADER_ADAFRUIT_NRF52_SD140_V7,
|
||||
]
|
||||
},
|
||||
"xiao_ble": {
|
||||
KEY_BOOTLOADER: [
|
||||
BOOTLOADER_ADAFRUIT_NRF52_SD140_V7,
|
||||
BOOTLOADER_ADAFRUIT,
|
||||
BOOTLOADER_ADAFRUIT_NRF52_SD132,
|
||||
BOOTLOADER_ADAFRUIT_NRF52_SD140_V6,
|
||||
BOOTLOADER_ADAFRUIT_NRF52_SD140_V7,
|
||||
]
|
||||
},
|
||||
}
|
||||
|
||||
@@ -66,6 +66,7 @@ from esphome.const import (
|
||||
DEVICE_CLASS_SPEED,
|
||||
DEVICE_CLASS_SULPHUR_DIOXIDE,
|
||||
DEVICE_CLASS_TEMPERATURE,
|
||||
DEVICE_CLASS_TEMPERATURE_DELTA,
|
||||
DEVICE_CLASS_VOLATILE_ORGANIC_COMPOUNDS,
|
||||
DEVICE_CLASS_VOLATILE_ORGANIC_COMPOUNDS_PARTS,
|
||||
DEVICE_CLASS_VOLTAGE,
|
||||
@@ -130,6 +131,7 @@ DEVICE_CLASSES = [
|
||||
DEVICE_CLASS_SPEED,
|
||||
DEVICE_CLASS_SULPHUR_DIOXIDE,
|
||||
DEVICE_CLASS_TEMPERATURE,
|
||||
DEVICE_CLASS_TEMPERATURE_DELTA,
|
||||
DEVICE_CLASS_VOLATILE_ORGANIC_COMPOUNDS,
|
||||
DEVICE_CLASS_VOLATILE_ORGANIC_COMPOUNDS_PARTS,
|
||||
DEVICE_CLASS_VOLTAGE,
|
||||
|
||||
@@ -916,7 +916,7 @@ void PrometheusHandler::climate_row_(AsyncResponseStream *stream, climate::Clima
|
||||
auto min_temp_value = value_accuracy_to_string(traits.get_visual_min_temperature(), target_accuracy);
|
||||
climate_value_row_(stream, obj, area, node, friendly_name, min_temp, min_temp_value);
|
||||
// now check optional traits
|
||||
if (traits.get_supports_current_temperature()) {
|
||||
if (traits.has_feature_flags(climate::CLIMATE_SUPPORTS_CURRENT_TEMPERATURE)) {
|
||||
std::string current_temp = "current_temperature";
|
||||
if (std::isnan(obj->current_temperature)) {
|
||||
climate_failed_row_(stream, obj, area, node, friendly_name, current_temp, true);
|
||||
@@ -927,7 +927,7 @@ void PrometheusHandler::climate_row_(AsyncResponseStream *stream, climate::Clima
|
||||
climate_failed_row_(stream, obj, area, node, friendly_name, current_temp, false);
|
||||
}
|
||||
}
|
||||
if (traits.get_supports_current_humidity()) {
|
||||
if (traits.has_feature_flags(climate::CLIMATE_SUPPORTS_CURRENT_HUMIDITY)) {
|
||||
std::string current_humidity = "current_humidity";
|
||||
if (std::isnan(obj->current_humidity)) {
|
||||
climate_failed_row_(stream, obj, area, node, friendly_name, current_humidity, true);
|
||||
@@ -938,7 +938,7 @@ void PrometheusHandler::climate_row_(AsyncResponseStream *stream, climate::Clima
|
||||
climate_failed_row_(stream, obj, area, node, friendly_name, current_humidity, false);
|
||||
}
|
||||
}
|
||||
if (traits.get_supports_target_humidity()) {
|
||||
if (traits.has_feature_flags(climate::CLIMATE_SUPPORTS_TARGET_HUMIDITY)) {
|
||||
std::string target_humidity = "target_humidity";
|
||||
if (std::isnan(obj->target_humidity)) {
|
||||
climate_failed_row_(stream, obj, area, node, friendly_name, target_humidity, true);
|
||||
@@ -949,7 +949,8 @@ void PrometheusHandler::climate_row_(AsyncResponseStream *stream, climate::Clima
|
||||
climate_failed_row_(stream, obj, area, node, friendly_name, target_humidity, false);
|
||||
}
|
||||
}
|
||||
if (traits.get_supports_two_point_target_temperature()) {
|
||||
if (traits.has_feature_flags(climate::CLIMATE_SUPPORTS_TWO_POINT_TARGET_TEMPERATURE |
|
||||
climate::CLIMATE_REQUIRES_TWO_POINT_TARGET_TEMPERATURE)) {
|
||||
std::string target_temp_low = "target_temperature_low";
|
||||
auto target_temp_low_value = value_accuracy_to_string(obj->target_temperature_low, target_accuracy);
|
||||
climate_value_row_(stream, obj, area, node, friendly_name, target_temp_low, target_temp_low_value);
|
||||
@@ -961,7 +962,7 @@ void PrometheusHandler::climate_row_(AsyncResponseStream *stream, climate::Clima
|
||||
auto target_temp_value = value_accuracy_to_string(obj->target_temperature, target_accuracy);
|
||||
climate_value_row_(stream, obj, area, node, friendly_name, target_temp, target_temp_value);
|
||||
}
|
||||
if (traits.get_supports_action()) {
|
||||
if (traits.has_feature_flags(climate::CLIMATE_SUPPORTS_ACTION)) {
|
||||
std::string climate_trait_category = "action";
|
||||
const auto *climate_trait_value = climate::climate_action_to_string(obj->action);
|
||||
climate_setting_row_(stream, obj, area, node, friendly_name, climate_trait_category, climate_trait_value);
|
||||
|
||||
@@ -1056,6 +1056,52 @@ async def sony_action(var, config, args):
|
||||
cg.add(var.set_nbits(template_))
|
||||
|
||||
|
||||
# Symphony
|
||||
SymphonyData, SymphonyBinarySensor, SymphonyTrigger, SymphonyAction, SymphonyDumper = (
|
||||
declare_protocol("Symphony")
|
||||
)
|
||||
SYMPHONY_SCHEMA = cv.Schema(
|
||||
{
|
||||
cv.Required(CONF_DATA): cv.hex_uint32_t,
|
||||
cv.Required(CONF_NBITS): cv.int_range(min=1, max=32),
|
||||
cv.Optional(CONF_COMMAND_REPEATS, default=2): cv.uint8_t,
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
@register_binary_sensor("symphony", SymphonyBinarySensor, SYMPHONY_SCHEMA)
|
||||
def symphony_binary_sensor(var, config):
|
||||
cg.add(
|
||||
var.set_data(
|
||||
cg.StructInitializer(
|
||||
SymphonyData,
|
||||
("data", config[CONF_DATA]),
|
||||
("nbits", config[CONF_NBITS]),
|
||||
)
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
@register_trigger("symphony", SymphonyTrigger, SymphonyData)
|
||||
def symphony_trigger(var, config):
|
||||
pass
|
||||
|
||||
|
||||
@register_dumper("symphony", SymphonyDumper)
|
||||
def symphony_dumper(var, config):
|
||||
pass
|
||||
|
||||
|
||||
@register_action("symphony", SymphonyAction, SYMPHONY_SCHEMA)
|
||||
async def symphony_action(var, config, args):
|
||||
template_ = await cg.templatable(config[CONF_DATA], args, cg.uint32)
|
||||
cg.add(var.set_data(template_))
|
||||
template_ = await cg.templatable(config[CONF_NBITS], args, cg.uint32)
|
||||
cg.add(var.set_nbits(template_))
|
||||
template_ = await cg.templatable(config[CONF_COMMAND_REPEATS], args, cg.uint8)
|
||||
cg.add(var.set_repeats(template_))
|
||||
|
||||
|
||||
# Raw
|
||||
def validate_raw_alternating(value):
|
||||
assert isinstance(value, list)
|
||||
|
||||
120
esphome/components/remote_base/symphony_protocol.cpp
Normal file
120
esphome/components/remote_base/symphony_protocol.cpp
Normal file
@@ -0,0 +1,120 @@
|
||||
#include "symphony_protocol.h"
|
||||
#include "esphome/core/log.h"
|
||||
|
||||
namespace esphome {
|
||||
namespace remote_base {
|
||||
|
||||
static const char *const TAG = "remote.symphony";
|
||||
|
||||
// Reference implementation and timing details:
|
||||
// IRremoteESP8266 ir_Symphony.cpp
|
||||
// https://github.com/crankyoldgit/IRremoteESP8266/blob/master/src/ir_Symphony.cpp
|
||||
// The implementation below mirrors the constant bit-time mapping and
|
||||
// footer-gap handling used there.
|
||||
|
||||
// Symphony protocol timing specifications (tuned to handset captures)
|
||||
static const uint32_t BIT_ZERO_HIGH_US = 460; // short
|
||||
static const uint32_t BIT_ZERO_LOW_US = 1260; // long
|
||||
static const uint32_t BIT_ONE_HIGH_US = 1260; // long
|
||||
static const uint32_t BIT_ONE_LOW_US = 460; // short
|
||||
static const uint32_t CARRIER_FREQUENCY = 38000;
|
||||
|
||||
// IRremoteESP8266 reference: kSymphonyFooterGap = 4 * (mark + space)
|
||||
static const uint32_t FOOTER_GAP_US = 4 * (BIT_ZERO_HIGH_US + BIT_ZERO_LOW_US);
|
||||
// Typical inter-frame gap (~34.8 ms observed)
|
||||
static const uint32_t INTER_FRAME_GAP_US = 34760;
|
||||
|
||||
void SymphonyProtocol::encode(RemoteTransmitData *dst, const SymphonyData &data) {
|
||||
dst->set_carrier_frequency(CARRIER_FREQUENCY);
|
||||
ESP_LOGD(TAG, "Sending Symphony: data=0x%0*X nbits=%u repeats=%u", (data.nbits + 3) / 4, (uint32_t) data.data,
|
||||
data.nbits, data.repeats);
|
||||
// Each bit produces a mark+space (2 entries). We fold the inter-frame/footer gap
|
||||
// into the last bit's space of each frame to avoid over-length gaps.
|
||||
dst->reserve(data.nbits * 2u * data.repeats);
|
||||
|
||||
for (uint8_t repeats = 0; repeats < data.repeats; repeats++) {
|
||||
// Data bits (MSB first)
|
||||
for (uint32_t mask = 1UL << (data.nbits - 1); mask != 0; mask >>= 1) {
|
||||
const bool is_last_bit = (mask == 1);
|
||||
const bool is_last_frame = (repeats == (data.repeats - 1));
|
||||
if (is_last_bit) {
|
||||
// Emit last bit's mark; replace its space with the proper gap
|
||||
if (data.data & mask) {
|
||||
dst->mark(BIT_ONE_HIGH_US);
|
||||
} else {
|
||||
dst->mark(BIT_ZERO_HIGH_US);
|
||||
}
|
||||
dst->space(is_last_frame ? FOOTER_GAP_US : INTER_FRAME_GAP_US);
|
||||
} else {
|
||||
if (data.data & mask) {
|
||||
dst->item(BIT_ONE_HIGH_US, BIT_ONE_LOW_US);
|
||||
} else {
|
||||
dst->item(BIT_ZERO_HIGH_US, BIT_ZERO_LOW_US);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
optional<SymphonyData> SymphonyProtocol::decode(RemoteReceiveData src) {
|
||||
auto is_valid_len = [](uint8_t nbits) -> bool { return nbits == 8 || nbits == 12 || nbits == 16; };
|
||||
|
||||
RemoteReceiveData s = src; // copy
|
||||
SymphonyData out{0, 0, 1};
|
||||
|
||||
for (; out.nbits < 32; out.nbits++) {
|
||||
if (s.expect_mark(BIT_ONE_HIGH_US)) {
|
||||
if (!s.expect_space(BIT_ONE_LOW_US)) {
|
||||
// Allow footer gap immediately after the last mark
|
||||
if (s.peek_space_at_least(FOOTER_GAP_US)) {
|
||||
uint8_t bits_with_this = out.nbits + 1;
|
||||
if (is_valid_len(bits_with_this)) {
|
||||
out.data = (out.data << 1UL) | 1UL;
|
||||
out.nbits = bits_with_this;
|
||||
return out;
|
||||
}
|
||||
}
|
||||
return {};
|
||||
}
|
||||
// Successfully consumed a '1' bit (mark + space)
|
||||
out.data = (out.data << 1UL) | 1UL;
|
||||
continue;
|
||||
} else if (s.expect_mark(BIT_ZERO_HIGH_US)) {
|
||||
if (!s.expect_space(BIT_ZERO_LOW_US)) {
|
||||
// Allow footer gap immediately after the last mark
|
||||
if (s.peek_space_at_least(FOOTER_GAP_US)) {
|
||||
uint8_t bits_with_this = out.nbits + 1;
|
||||
if (is_valid_len(bits_with_this)) {
|
||||
out.data = (out.data << 1UL) | 0UL;
|
||||
out.nbits = bits_with_this;
|
||||
return out;
|
||||
}
|
||||
}
|
||||
return {};
|
||||
}
|
||||
// Successfully consumed a '0' bit (mark + space)
|
||||
out.data = (out.data << 1UL) | 0UL;
|
||||
continue;
|
||||
} else {
|
||||
// Completed a valid-length frame followed by a footer gap
|
||||
if (is_valid_len(out.nbits) && s.peek_space_at_least(FOOTER_GAP_US)) {
|
||||
return out;
|
||||
}
|
||||
return {};
|
||||
}
|
||||
}
|
||||
|
||||
if (is_valid_len(out.nbits) && s.peek_space_at_least(FOOTER_GAP_US)) {
|
||||
return out;
|
||||
}
|
||||
|
||||
return {};
|
||||
}
|
||||
|
||||
void SymphonyProtocol::dump(const SymphonyData &data) {
|
||||
const int32_t hex_width = (data.nbits + 3) / 4; // pad to nibble width
|
||||
ESP_LOGI(TAG, "Received Symphony: data=0x%0*X, nbits=%d", hex_width, (uint32_t) data.data, data.nbits);
|
||||
}
|
||||
|
||||
} // namespace remote_base
|
||||
} // namespace esphome
|
||||
44
esphome/components/remote_base/symphony_protocol.h
Normal file
44
esphome/components/remote_base/symphony_protocol.h
Normal file
@@ -0,0 +1,44 @@
|
||||
#pragma once
|
||||
|
||||
#include "esphome/core/component.h"
|
||||
#include "remote_base.h"
|
||||
|
||||
#include <cinttypes>
|
||||
|
||||
namespace esphome {
|
||||
namespace remote_base {
|
||||
|
||||
struct SymphonyData {
|
||||
uint32_t data;
|
||||
uint8_t nbits;
|
||||
uint8_t repeats{1};
|
||||
|
||||
bool operator==(const SymphonyData &rhs) const { return data == rhs.data && nbits == rhs.nbits; }
|
||||
};
|
||||
|
||||
class SymphonyProtocol : public RemoteProtocol<SymphonyData> {
|
||||
public:
|
||||
void encode(RemoteTransmitData *dst, const SymphonyData &data) override;
|
||||
optional<SymphonyData> decode(RemoteReceiveData src) override;
|
||||
void dump(const SymphonyData &data) override;
|
||||
};
|
||||
|
||||
DECLARE_REMOTE_PROTOCOL(Symphony)
|
||||
|
||||
template<typename... Ts> class SymphonyAction : public RemoteTransmitterActionBase<Ts...> {
|
||||
public:
|
||||
TEMPLATABLE_VALUE(uint32_t, data)
|
||||
TEMPLATABLE_VALUE(uint8_t, nbits)
|
||||
TEMPLATABLE_VALUE(uint8_t, repeats)
|
||||
|
||||
void encode(RemoteTransmitData *dst, Ts... x) override {
|
||||
SymphonyData data{};
|
||||
data.data = this->data_.value(x...);
|
||||
data.nbits = this->nbits_.value(x...);
|
||||
data.repeats = this->repeats_.value(x...);
|
||||
SymphonyProtocol().encode(dst, data);
|
||||
}
|
||||
};
|
||||
|
||||
} // namespace remote_base
|
||||
} // namespace esphome
|
||||
@@ -81,7 +81,7 @@ CONFIG_SCHEMA = (
|
||||
cv.int_range(min=0, max=0xFFFF, max_included=False),
|
||||
),
|
||||
cv.Optional(CONF_AMBIENT_PRESSURE_COMPENSATION): cv.pressure,
|
||||
cv.Optional(CONF_TEMPERATURE_OFFSET, default="4°C"): cv.temperature,
|
||||
cv.Optional(CONF_TEMPERATURE_OFFSET, default="4°C"): cv.temperature_delta,
|
||||
cv.Optional(CONF_AMBIENT_PRESSURE_COMPENSATION_SOURCE): cv.use_id(
|
||||
sensor.Sensor
|
||||
),
|
||||
|
||||
@@ -89,6 +89,7 @@ from esphome.const import (
|
||||
DEVICE_CLASS_SPEED,
|
||||
DEVICE_CLASS_SULPHUR_DIOXIDE,
|
||||
DEVICE_CLASS_TEMPERATURE,
|
||||
DEVICE_CLASS_TEMPERATURE_DELTA,
|
||||
DEVICE_CLASS_TIMESTAMP,
|
||||
DEVICE_CLASS_VOLATILE_ORGANIC_COMPOUNDS,
|
||||
DEVICE_CLASS_VOLATILE_ORGANIC_COMPOUNDS_PARTS,
|
||||
@@ -157,6 +158,7 @@ DEVICE_CLASSES = [
|
||||
DEVICE_CLASS_SPEED,
|
||||
DEVICE_CLASS_SULPHUR_DIOXIDE,
|
||||
DEVICE_CLASS_TEMPERATURE,
|
||||
DEVICE_CLASS_TEMPERATURE_DELTA,
|
||||
DEVICE_CLASS_TIMESTAMP,
|
||||
DEVICE_CLASS_VOLATILE_ORGANIC_COMPOUNDS,
|
||||
DEVICE_CLASS_VOLATILE_ORGANIC_COMPOUNDS_PARTS,
|
||||
|
||||
@@ -28,21 +28,6 @@
|
||||
namespace esphome {
|
||||
namespace statsd {
|
||||
|
||||
using sensor_type_t = enum { TYPE_SENSOR, TYPE_BINARY_SENSOR };
|
||||
|
||||
using sensors_t = struct {
|
||||
const char *name;
|
||||
sensor_type_t type;
|
||||
union {
|
||||
#ifdef USE_SENSOR
|
||||
esphome::sensor::Sensor *sensor;
|
||||
#endif
|
||||
#ifdef USE_BINARY_SENSOR
|
||||
esphome::binary_sensor::BinarySensor *binary_sensor;
|
||||
#endif
|
||||
};
|
||||
};
|
||||
|
||||
class StatsdComponent : public PollingComponent {
|
||||
public:
|
||||
~StatsdComponent();
|
||||
@@ -71,6 +56,20 @@ class StatsdComponent : public PollingComponent {
|
||||
const char *prefix_;
|
||||
uint16_t port_;
|
||||
|
||||
using sensor_type_t = enum { TYPE_SENSOR, TYPE_BINARY_SENSOR };
|
||||
using sensors_t = struct {
|
||||
const char *name;
|
||||
sensor_type_t type;
|
||||
union {
|
||||
#ifdef USE_SENSOR
|
||||
esphome::sensor::Sensor *sensor;
|
||||
#endif
|
||||
#ifdef USE_BINARY_SENSOR
|
||||
esphome::binary_sensor::BinarySensor *binary_sensor;
|
||||
#endif
|
||||
};
|
||||
};
|
||||
|
||||
std::vector<sensors_t> sensors_;
|
||||
|
||||
#ifdef USE_ESP8266
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import logging
|
||||
|
||||
from esphome import core
|
||||
from esphome.config_helpers import Extend, Remove, merge_config
|
||||
from esphome.config_helpers import Extend, Remove, merge_config, merge_dicts_ordered
|
||||
import esphome.config_validation as cv
|
||||
from esphome.const import CONF_SUBSTITUTIONS, VALID_SUBSTITUTIONS_CHARACTERS
|
||||
from esphome.yaml_util import ESPHomeDataBase, ESPLiteralValue, make_data_base
|
||||
@@ -170,10 +170,10 @@ def do_substitution_pass(config, command_line_substitutions, ignore_missing=Fals
|
||||
return
|
||||
|
||||
# Merge substitutions in config, overriding with substitutions coming from command line:
|
||||
substitutions = {
|
||||
**config.get(CONF_SUBSTITUTIONS, {}),
|
||||
**(command_line_substitutions or {}),
|
||||
}
|
||||
# Use merge_dicts_ordered to preserve OrderedDict type for move_to_end()
|
||||
substitutions = merge_dicts_ordered(
|
||||
config.get(CONF_SUBSTITUTIONS, {}), command_line_substitutions or {}
|
||||
)
|
||||
with cv.prepend_path("substitutions"):
|
||||
if not isinstance(substitutions, dict):
|
||||
raise cv.Invalid(
|
||||
|
||||
@@ -71,9 +71,14 @@ from esphome.const import (
|
||||
CONF_VISUAL,
|
||||
)
|
||||
|
||||
CONF_PRESET_CHANGE = "preset_change"
|
||||
CONF_DEFAULT_PRESET = "default_preset"
|
||||
CONF_HUMIDITY_CONTROL_DEHUMIDIFY_ACTION = "humidity_control_dehumidify_action"
|
||||
CONF_HUMIDITY_CONTROL_HUMIDIFY_ACTION = "humidity_control_humidify_action"
|
||||
CONF_HUMIDITY_CONTROL_OFF_ACTION = "humidity_control_off_action"
|
||||
CONF_HUMIDITY_HYSTERESIS = "humidity_hysteresis"
|
||||
CONF_ON_BOOT_RESTORE_FROM = "on_boot_restore_from"
|
||||
CONF_PRESET_CHANGE = "preset_change"
|
||||
CONF_TARGET_HUMIDITY_CHANGE_ACTION = "target_humidity_change_action"
|
||||
|
||||
CODEOWNERS = ["@kbx81"]
|
||||
|
||||
@@ -241,6 +246,14 @@ def validate_thermostat(config):
|
||||
CONF_MAX_HEATING_RUN_TIME,
|
||||
CONF_SUPPLEMENTAL_HEATING_ACTION,
|
||||
],
|
||||
CONF_HUMIDITY_CONTROL_DEHUMIDIFY_ACTION: [
|
||||
CONF_HUMIDITY_CONTROL_OFF_ACTION,
|
||||
CONF_HUMIDITY_SENSOR,
|
||||
],
|
||||
CONF_HUMIDITY_CONTROL_HUMIDIFY_ACTION: [
|
||||
CONF_HUMIDITY_CONTROL_OFF_ACTION,
|
||||
CONF_HUMIDITY_SENSOR,
|
||||
],
|
||||
}
|
||||
for config_trigger, req_triggers in requirements.items():
|
||||
for req_trigger in req_triggers:
|
||||
@@ -338,7 +351,7 @@ def validate_thermostat(config):
|
||||
# Warn about using the removed CONF_DEFAULT_MODE and advise users
|
||||
if CONF_DEFAULT_MODE in config and config[CONF_DEFAULT_MODE] is not None:
|
||||
raise cv.Invalid(
|
||||
f"{CONF_DEFAULT_MODE} is no longer valid. Please switch to using presets and specify a {CONF_DEFAULT_PRESET}."
|
||||
f"{CONF_DEFAULT_MODE} is no longer valid. Please switch to using presets and specify a {CONF_DEFAULT_PRESET}"
|
||||
)
|
||||
|
||||
default_mode = config[CONF_DEFAULT_MODE]
|
||||
@@ -588,9 +601,24 @@ CONFIG_SCHEMA = cv.All(
|
||||
cv.Optional(CONF_SWING_VERTICAL_ACTION): automation.validate_automation(
|
||||
single=True
|
||||
),
|
||||
cv.Optional(
|
||||
CONF_TARGET_HUMIDITY_CHANGE_ACTION
|
||||
): automation.validate_automation(single=True),
|
||||
cv.Optional(
|
||||
CONF_TARGET_TEMPERATURE_CHANGE_ACTION
|
||||
): automation.validate_automation(single=True),
|
||||
cv.Exclusive(
|
||||
CONF_HUMIDITY_CONTROL_DEHUMIDIFY_ACTION,
|
||||
group_of_exclusion="humidity_control",
|
||||
): automation.validate_automation(single=True),
|
||||
cv.Exclusive(
|
||||
CONF_HUMIDITY_CONTROL_HUMIDIFY_ACTION,
|
||||
group_of_exclusion="humidity_control",
|
||||
): automation.validate_automation(single=True),
|
||||
cv.Optional(
|
||||
CONF_HUMIDITY_CONTROL_OFF_ACTION
|
||||
): automation.validate_automation(single=True),
|
||||
cv.Optional(CONF_HUMIDITY_HYSTERESIS, default=1.0): cv.percentage,
|
||||
cv.Optional(CONF_DEFAULT_MODE, default=None): cv.valid,
|
||||
cv.Optional(CONF_DEFAULT_PRESET): cv.templatable(cv.string),
|
||||
cv.Optional(CONF_DEFAULT_TARGET_TEMPERATURE_HIGH): cv.temperature,
|
||||
@@ -882,12 +910,39 @@ async def to_code(config):
|
||||
config[CONF_SWING_VERTICAL_ACTION],
|
||||
)
|
||||
cg.add(var.set_supports_swing_mode_vertical(True))
|
||||
if CONF_TARGET_HUMIDITY_CHANGE_ACTION in config:
|
||||
await automation.build_automation(
|
||||
var.get_humidity_change_trigger(),
|
||||
[],
|
||||
config[CONF_TARGET_HUMIDITY_CHANGE_ACTION],
|
||||
)
|
||||
if CONF_TARGET_TEMPERATURE_CHANGE_ACTION in config:
|
||||
await automation.build_automation(
|
||||
var.get_temperature_change_trigger(),
|
||||
[],
|
||||
config[CONF_TARGET_TEMPERATURE_CHANGE_ACTION],
|
||||
)
|
||||
if CONF_HUMIDITY_CONTROL_DEHUMIDIFY_ACTION in config:
|
||||
cg.add(var.set_supports_dehumidification(True))
|
||||
await automation.build_automation(
|
||||
var.get_humidity_control_dehumidify_action_trigger(),
|
||||
[],
|
||||
config[CONF_HUMIDITY_CONTROL_DEHUMIDIFY_ACTION],
|
||||
)
|
||||
if CONF_HUMIDITY_CONTROL_HUMIDIFY_ACTION in config:
|
||||
cg.add(var.set_supports_humidification(True))
|
||||
await automation.build_automation(
|
||||
var.get_humidity_control_humidify_action_trigger(),
|
||||
[],
|
||||
config[CONF_HUMIDITY_CONTROL_HUMIDIFY_ACTION],
|
||||
)
|
||||
if CONF_HUMIDITY_CONTROL_OFF_ACTION in config:
|
||||
await automation.build_automation(
|
||||
var.get_humidity_control_off_action_trigger(),
|
||||
[],
|
||||
config[CONF_HUMIDITY_CONTROL_OFF_ACTION],
|
||||
)
|
||||
cg.add(var.set_humidity_hysteresis(config[CONF_HUMIDITY_HYSTERESIS]))
|
||||
|
||||
if CONF_PRESET in config:
|
||||
for preset_config in config[CONF_PRESET]:
|
||||
|
||||
@@ -32,6 +32,7 @@ void ThermostatClimate::setup() {
|
||||
if (this->humidity_sensor_ != nullptr) {
|
||||
this->humidity_sensor_->add_on_state_callback([this](float state) {
|
||||
this->current_humidity = state;
|
||||
this->switch_to_humidity_control_action_(this->compute_humidity_control_action_());
|
||||
this->publish_state();
|
||||
});
|
||||
this->current_humidity = this->humidity_sensor_->state;
|
||||
@@ -84,6 +85,8 @@ void ThermostatClimate::refresh() {
|
||||
this->switch_to_supplemental_action_(this->compute_supplemental_action_());
|
||||
this->switch_to_fan_mode_(this->fan_mode.value(), false);
|
||||
this->switch_to_swing_mode_(this->swing_mode, false);
|
||||
this->switch_to_humidity_control_action_(this->compute_humidity_control_action_());
|
||||
this->check_humidity_change_trigger_();
|
||||
this->check_temperature_change_trigger_();
|
||||
this->publish_state();
|
||||
}
|
||||
@@ -129,6 +132,11 @@ bool ThermostatClimate::hysteresis_valid() {
|
||||
return true;
|
||||
}
|
||||
|
||||
bool ThermostatClimate::humidity_hysteresis_valid() {
|
||||
return !std::isnan(this->humidity_hysteresis_) && this->humidity_hysteresis_ >= 0.0f &&
|
||||
this->humidity_hysteresis_ < 100.0f;
|
||||
}
|
||||
|
||||
bool ThermostatClimate::limit_setpoints_for_heat_cool() {
|
||||
return this->mode == climate::CLIMATE_MODE_HEAT_COOL ||
|
||||
(this->mode == climate::CLIMATE_MODE_AUTO && this->supports_heat_cool_);
|
||||
@@ -189,6 +197,16 @@ void ThermostatClimate::validate_target_temperature_high() {
|
||||
}
|
||||
}
|
||||
|
||||
void ThermostatClimate::validate_target_humidity() {
|
||||
if (std::isnan(this->target_humidity)) {
|
||||
this->target_humidity =
|
||||
(this->get_traits().get_visual_max_humidity() - this->get_traits().get_visual_min_humidity()) / 2.0f;
|
||||
} else {
|
||||
this->target_humidity = clamp<float>(this->target_humidity, this->get_traits().get_visual_min_humidity(),
|
||||
this->get_traits().get_visual_max_humidity());
|
||||
}
|
||||
}
|
||||
|
||||
void ThermostatClimate::control(const climate::ClimateCall &call) {
|
||||
bool target_temperature_high_changed = false;
|
||||
|
||||
@@ -235,15 +253,27 @@ void ThermostatClimate::control(const climate::ClimateCall &call) {
|
||||
this->validate_target_temperature();
|
||||
}
|
||||
}
|
||||
if (call.get_target_humidity().has_value()) {
|
||||
this->target_humidity = call.get_target_humidity().value();
|
||||
this->validate_target_humidity();
|
||||
}
|
||||
// make any changes happen
|
||||
this->refresh();
|
||||
}
|
||||
|
||||
climate::ClimateTraits ThermostatClimate::traits() {
|
||||
auto traits = climate::ClimateTraits();
|
||||
traits.set_supports_current_temperature(true);
|
||||
|
||||
traits.add_feature_flags(climate::CLIMATE_SUPPORTS_ACTION | climate::CLIMATE_SUPPORTS_CURRENT_TEMPERATURE);
|
||||
|
||||
if (this->supports_two_points_)
|
||||
traits.add_feature_flags(climate::CLIMATE_SUPPORTS_TWO_POINT_TARGET_TEMPERATURE);
|
||||
|
||||
if (this->humidity_sensor_ != nullptr)
|
||||
traits.set_supports_current_humidity(true);
|
||||
traits.add_feature_flags(climate::CLIMATE_SUPPORTS_CURRENT_HUMIDITY);
|
||||
|
||||
if (this->supports_humidification_ || this->supports_dehumidification_)
|
||||
traits.add_feature_flags(climate::CLIMATE_SUPPORTS_TARGET_HUMIDITY);
|
||||
|
||||
if (this->supports_auto_)
|
||||
traits.add_supported_mode(climate::CLIMATE_MODE_AUTO);
|
||||
@@ -294,9 +324,6 @@ climate::ClimateTraits ThermostatClimate::traits() {
|
||||
for (auto &it : this->custom_preset_config_) {
|
||||
traits.add_supported_custom_preset(it.first);
|
||||
}
|
||||
|
||||
traits.set_supports_two_point_target_temperature(this->supports_two_points_);
|
||||
traits.set_supports_action(true);
|
||||
return traits;
|
||||
}
|
||||
|
||||
@@ -421,6 +448,28 @@ climate::ClimateAction ThermostatClimate::compute_supplemental_action_() {
|
||||
return target_action;
|
||||
}
|
||||
|
||||
HumidificationAction ThermostatClimate::compute_humidity_control_action_() {
|
||||
auto target_action = THERMOSTAT_HUMIDITY_CONTROL_ACTION_OFF;
|
||||
// if hysteresis value or current_humidity is not valid, we go to OFF
|
||||
if (std::isnan(this->current_humidity) || !this->humidity_hysteresis_valid()) {
|
||||
return THERMOSTAT_HUMIDITY_CONTROL_ACTION_OFF;
|
||||
}
|
||||
|
||||
// ensure set point is valid before computing the action
|
||||
this->validate_target_humidity();
|
||||
// everything has been validated so we can now safely compute the action
|
||||
if (this->dehumidification_required_() && this->humidification_required_()) {
|
||||
// this is bad and should never happen, so just stop.
|
||||
// target_action = THERMOSTAT_HUMIDITY_CONTROL_ACTION_OFF;
|
||||
} else if (this->supports_dehumidification_ && this->dehumidification_required_()) {
|
||||
target_action = THERMOSTAT_HUMIDITY_CONTROL_ACTION_DEHUMIDIFY;
|
||||
} else if (this->supports_humidification_ && this->humidification_required_()) {
|
||||
target_action = THERMOSTAT_HUMIDITY_CONTROL_ACTION_HUMIDIFY;
|
||||
}
|
||||
|
||||
return target_action;
|
||||
}
|
||||
|
||||
void ThermostatClimate::switch_to_action_(climate::ClimateAction action, bool publish_state) {
|
||||
// setup_complete_ helps us ensure an action is called immediately after boot
|
||||
if ((action == this->action) && this->setup_complete_) {
|
||||
@@ -594,6 +643,44 @@ void ThermostatClimate::trigger_supplemental_action_() {
|
||||
}
|
||||
}
|
||||
|
||||
void ThermostatClimate::switch_to_humidity_control_action_(HumidificationAction action) {
|
||||
// setup_complete_ helps us ensure an action is called immediately after boot
|
||||
if ((action == this->humidification_action_) && this->setup_complete_) {
|
||||
// already in target mode
|
||||
return;
|
||||
}
|
||||
|
||||
Trigger<> *trig = this->humidity_control_off_action_trigger_;
|
||||
switch (action) {
|
||||
case THERMOSTAT_HUMIDITY_CONTROL_ACTION_OFF:
|
||||
// trig = this->humidity_control_off_action_trigger_;
|
||||
ESP_LOGVV(TAG, "Switching to HUMIDIFICATION_OFF action");
|
||||
break;
|
||||
case THERMOSTAT_HUMIDITY_CONTROL_ACTION_DEHUMIDIFY:
|
||||
trig = this->humidity_control_dehumidify_action_trigger_;
|
||||
ESP_LOGVV(TAG, "Switching to DEHUMIDIFY action");
|
||||
break;
|
||||
case THERMOSTAT_HUMIDITY_CONTROL_ACTION_HUMIDIFY:
|
||||
trig = this->humidity_control_humidify_action_trigger_;
|
||||
ESP_LOGVV(TAG, "Switching to HUMIDIFY action");
|
||||
break;
|
||||
case THERMOSTAT_HUMIDITY_CONTROL_ACTION_NONE:
|
||||
default:
|
||||
action = THERMOSTAT_HUMIDITY_CONTROL_ACTION_OFF;
|
||||
// trig = this->humidity_control_off_action_trigger_;
|
||||
}
|
||||
|
||||
if (this->prev_humidity_control_trigger_ != nullptr) {
|
||||
this->prev_humidity_control_trigger_->stop_action();
|
||||
this->prev_humidity_control_trigger_ = nullptr;
|
||||
}
|
||||
this->humidification_action_ = action;
|
||||
this->prev_humidity_control_trigger_ = trig;
|
||||
if (trig != nullptr) {
|
||||
trig->trigger();
|
||||
}
|
||||
}
|
||||
|
||||
void ThermostatClimate::switch_to_fan_mode_(climate::ClimateFanMode fan_mode, bool publish_state) {
|
||||
// setup_complete_ helps us ensure an action is called immediately after boot
|
||||
if ((fan_mode == this->prev_fan_mode_) && this->setup_complete_) {
|
||||
@@ -885,6 +972,20 @@ void ThermostatClimate::idle_on_timer_callback_() {
|
||||
this->switch_to_supplemental_action_(this->compute_supplemental_action_());
|
||||
}
|
||||
|
||||
void ThermostatClimate::check_humidity_change_trigger_() {
|
||||
if ((this->prev_target_humidity_ == this->target_humidity) && this->setup_complete_) {
|
||||
return; // nothing changed, no reason to trigger
|
||||
} else {
|
||||
// save the new temperature so we can check it again later; the trigger will fire below
|
||||
this->prev_target_humidity_ = this->target_humidity;
|
||||
}
|
||||
// trigger the action
|
||||
Trigger<> *trig = this->humidity_change_trigger_;
|
||||
if (trig != nullptr) {
|
||||
trig->trigger();
|
||||
}
|
||||
}
|
||||
|
||||
void ThermostatClimate::check_temperature_change_trigger_() {
|
||||
if (this->supports_two_points_) {
|
||||
// setup_complete_ helps us ensure an action is called immediately after boot
|
||||
@@ -994,6 +1095,32 @@ bool ThermostatClimate::supplemental_heating_required_() {
|
||||
(this->supplemental_action_ == climate::CLIMATE_ACTION_HEATING));
|
||||
}
|
||||
|
||||
bool ThermostatClimate::dehumidification_required_() {
|
||||
if (this->current_humidity > this->target_humidity + this->humidity_hysteresis_) {
|
||||
// if the current humidity exceeds the target + hysteresis, dehumidification is required
|
||||
return true;
|
||||
} else if (this->current_humidity < this->target_humidity - this->humidity_hysteresis_) {
|
||||
// if the current humidity is less than the target - hysteresis, dehumidification should stop
|
||||
return false;
|
||||
}
|
||||
// if we get here, the current humidity is between target + hysteresis and target - hysteresis,
|
||||
// so the action should not change
|
||||
return this->humidification_action_ == THERMOSTAT_HUMIDITY_CONTROL_ACTION_DEHUMIDIFY;
|
||||
}
|
||||
|
||||
bool ThermostatClimate::humidification_required_() {
|
||||
if (this->current_humidity < this->target_humidity - this->humidity_hysteresis_) {
|
||||
// if the current humidity is below the target - hysteresis, humidification is required
|
||||
return true;
|
||||
} else if (this->current_humidity > this->target_humidity + this->humidity_hysteresis_) {
|
||||
// if the current humidity is above the target + hysteresis, humidification should stop
|
||||
return false;
|
||||
}
|
||||
// if we get here, the current humidity is between target - hysteresis and target + hysteresis,
|
||||
// so the action should not change
|
||||
return this->humidification_action_ == THERMOSTAT_HUMIDITY_CONTROL_ACTION_HUMIDIFY;
|
||||
}
|
||||
|
||||
void ThermostatClimate::dump_preset_config_(const char *preset_name, const ThermostatClimateTargetTempConfig &config) {
|
||||
if (this->supports_heat_) {
|
||||
ESP_LOGCONFIG(TAG, " Default Target Temperature Low: %.1f°C",
|
||||
@@ -1150,8 +1277,12 @@ ThermostatClimate::ThermostatClimate()
|
||||
swing_mode_off_trigger_(new Trigger<>()),
|
||||
swing_mode_horizontal_trigger_(new Trigger<>()),
|
||||
swing_mode_vertical_trigger_(new Trigger<>()),
|
||||
humidity_change_trigger_(new Trigger<>()),
|
||||
temperature_change_trigger_(new Trigger<>()),
|
||||
preset_change_trigger_(new Trigger<>()) {}
|
||||
preset_change_trigger_(new Trigger<>()),
|
||||
humidity_control_dehumidify_action_trigger_(new Trigger<>()),
|
||||
humidity_control_humidify_action_trigger_(new Trigger<>()),
|
||||
humidity_control_off_action_trigger_(new Trigger<>()) {}
|
||||
|
||||
void ThermostatClimate::set_default_preset(const std::string &custom_preset) {
|
||||
this->default_custom_preset_ = custom_preset;
|
||||
@@ -1215,6 +1346,9 @@ void ThermostatClimate::set_sensor(sensor::Sensor *sensor) { this->sensor_ = sen
|
||||
void ThermostatClimate::set_humidity_sensor(sensor::Sensor *humidity_sensor) {
|
||||
this->humidity_sensor_ = humidity_sensor;
|
||||
}
|
||||
void ThermostatClimate::set_humidity_hysteresis(float humidity_hysteresis) {
|
||||
this->humidity_hysteresis_ = std::clamp<float>(humidity_hysteresis, 0.0f, 100.0f);
|
||||
}
|
||||
void ThermostatClimate::set_use_startup_delay(bool use_startup_delay) { this->use_startup_delay_ = use_startup_delay; }
|
||||
void ThermostatClimate::set_supports_heat_cool(bool supports_heat_cool) {
|
||||
this->supports_heat_cool_ = supports_heat_cool;
|
||||
@@ -1282,6 +1416,18 @@ void ThermostatClimate::set_supports_swing_mode_vertical(bool supports_swing_mod
|
||||
void ThermostatClimate::set_supports_two_points(bool supports_two_points) {
|
||||
this->supports_two_points_ = supports_two_points;
|
||||
}
|
||||
void ThermostatClimate::set_supports_dehumidification(bool supports_dehumidification) {
|
||||
this->supports_dehumidification_ = supports_dehumidification;
|
||||
if (supports_dehumidification) {
|
||||
this->supports_humidification_ = false;
|
||||
}
|
||||
}
|
||||
void ThermostatClimate::set_supports_humidification(bool supports_humidification) {
|
||||
this->supports_humidification_ = supports_humidification;
|
||||
if (supports_humidification) {
|
||||
this->supports_dehumidification_ = false;
|
||||
}
|
||||
}
|
||||
|
||||
Trigger<> *ThermostatClimate::get_cool_action_trigger() const { return this->cool_action_trigger_; }
|
||||
Trigger<> *ThermostatClimate::get_supplemental_cool_action_trigger() const {
|
||||
@@ -1315,8 +1461,18 @@ Trigger<> *ThermostatClimate::get_swing_mode_both_trigger() const { return this-
|
||||
Trigger<> *ThermostatClimate::get_swing_mode_off_trigger() const { return this->swing_mode_off_trigger_; }
|
||||
Trigger<> *ThermostatClimate::get_swing_mode_horizontal_trigger() const { return this->swing_mode_horizontal_trigger_; }
|
||||
Trigger<> *ThermostatClimate::get_swing_mode_vertical_trigger() const { return this->swing_mode_vertical_trigger_; }
|
||||
Trigger<> *ThermostatClimate::get_humidity_change_trigger() const { return this->humidity_change_trigger_; }
|
||||
Trigger<> *ThermostatClimate::get_temperature_change_trigger() const { return this->temperature_change_trigger_; }
|
||||
Trigger<> *ThermostatClimate::get_preset_change_trigger() const { return this->preset_change_trigger_; }
|
||||
Trigger<> *ThermostatClimate::get_humidity_control_dehumidify_action_trigger() const {
|
||||
return this->humidity_control_dehumidify_action_trigger_;
|
||||
}
|
||||
Trigger<> *ThermostatClimate::get_humidity_control_humidify_action_trigger() const {
|
||||
return this->humidity_control_humidify_action_trigger_;
|
||||
}
|
||||
Trigger<> *ThermostatClimate::get_humidity_control_off_action_trigger() const {
|
||||
return this->humidity_control_off_action_trigger_;
|
||||
}
|
||||
|
||||
void ThermostatClimate::dump_config() {
|
||||
LOG_CLIMATE("", "Thermostat", this);
|
||||
@@ -1420,7 +1576,12 @@ void ThermostatClimate::dump_config() {
|
||||
" OFF: %s\n"
|
||||
" HORIZONTAL: %s\n"
|
||||
" VERTICAL: %s\n"
|
||||
" Supports TWO SET POINTS: %s",
|
||||
" Supports TWO SET POINTS: %s\n"
|
||||
" Supported Humidity Parameters:\n"
|
||||
" CURRENT: %s\n"
|
||||
" TARGET: %s\n"
|
||||
" DEHUMIDIFICATION: %s\n"
|
||||
" HUMIDIFICATION: %s",
|
||||
YESNO(this->supports_fan_mode_on_), YESNO(this->supports_fan_mode_off_),
|
||||
YESNO(this->supports_fan_mode_auto_), YESNO(this->supports_fan_mode_low_),
|
||||
YESNO(this->supports_fan_mode_medium_), YESNO(this->supports_fan_mode_high_),
|
||||
@@ -1428,7 +1589,10 @@ void ThermostatClimate::dump_config() {
|
||||
YESNO(this->supports_fan_mode_diffuse_), YESNO(this->supports_fan_mode_quiet_),
|
||||
YESNO(this->supports_swing_mode_both_), YESNO(this->supports_swing_mode_off_),
|
||||
YESNO(this->supports_swing_mode_horizontal_), YESNO(this->supports_swing_mode_vertical_),
|
||||
YESNO(this->supports_two_points_));
|
||||
YESNO(this->supports_two_points_),
|
||||
YESNO(this->get_traits().has_feature_flags(climate::CLIMATE_SUPPORTS_CURRENT_HUMIDITY)),
|
||||
YESNO(this->supports_dehumidification_ || this->supports_humidification_),
|
||||
YESNO(this->supports_dehumidification_), YESNO(this->supports_humidification_));
|
||||
|
||||
if (!this->preset_config_.empty()) {
|
||||
ESP_LOGCONFIG(TAG, " Supported PRESETS:");
|
||||
|
||||
@@ -13,6 +13,13 @@
|
||||
namespace esphome {
|
||||
namespace thermostat {
|
||||
|
||||
enum HumidificationAction : uint8_t {
|
||||
THERMOSTAT_HUMIDITY_CONTROL_ACTION_OFF = 0,
|
||||
THERMOSTAT_HUMIDITY_CONTROL_ACTION_DEHUMIDIFY = 1,
|
||||
THERMOSTAT_HUMIDITY_CONTROL_ACTION_HUMIDIFY = 2,
|
||||
THERMOSTAT_HUMIDITY_CONTROL_ACTION_NONE,
|
||||
};
|
||||
|
||||
enum ThermostatClimateTimerIndex : uint8_t {
|
||||
THERMOSTAT_TIMER_COOLING_MAX_RUN_TIME = 0,
|
||||
THERMOSTAT_TIMER_COOLING_OFF = 1,
|
||||
@@ -90,6 +97,7 @@ class ThermostatClimate : public climate::Climate, public Component {
|
||||
void set_idle_minimum_time_in_sec(uint32_t time);
|
||||
void set_sensor(sensor::Sensor *sensor);
|
||||
void set_humidity_sensor(sensor::Sensor *humidity_sensor);
|
||||
void set_humidity_hysteresis(float humidity_hysteresis);
|
||||
void set_use_startup_delay(bool use_startup_delay);
|
||||
void set_supports_auto(bool supports_auto);
|
||||
void set_supports_heat_cool(bool supports_heat_cool);
|
||||
@@ -115,6 +123,8 @@ class ThermostatClimate : public climate::Climate, public Component {
|
||||
void set_supports_swing_mode_horizontal(bool supports_swing_mode_horizontal);
|
||||
void set_supports_swing_mode_off(bool supports_swing_mode_off);
|
||||
void set_supports_swing_mode_vertical(bool supports_swing_mode_vertical);
|
||||
void set_supports_dehumidification(bool supports_dehumidification);
|
||||
void set_supports_humidification(bool supports_humidification);
|
||||
void set_supports_two_points(bool supports_two_points);
|
||||
|
||||
void set_preset_config(climate::ClimatePreset preset, const ThermostatClimateTargetTempConfig &config);
|
||||
@@ -148,8 +158,12 @@ class ThermostatClimate : public climate::Climate, public Component {
|
||||
Trigger<> *get_swing_mode_horizontal_trigger() const;
|
||||
Trigger<> *get_swing_mode_off_trigger() const;
|
||||
Trigger<> *get_swing_mode_vertical_trigger() const;
|
||||
Trigger<> *get_humidity_change_trigger() const;
|
||||
Trigger<> *get_temperature_change_trigger() const;
|
||||
Trigger<> *get_preset_change_trigger() const;
|
||||
Trigger<> *get_humidity_control_dehumidify_action_trigger() const;
|
||||
Trigger<> *get_humidity_control_humidify_action_trigger() const;
|
||||
Trigger<> *get_humidity_control_off_action_trigger() const;
|
||||
/// Get current hysteresis values
|
||||
float cool_deadband();
|
||||
float cool_overrun();
|
||||
@@ -166,11 +180,13 @@ class ThermostatClimate : public climate::Climate, public Component {
|
||||
climate::ClimateFanMode locked_fan_mode();
|
||||
/// Set point and hysteresis validation
|
||||
bool hysteresis_valid(); // returns true if valid
|
||||
bool humidity_hysteresis_valid(); // returns true if valid
|
||||
bool limit_setpoints_for_heat_cool(); // returns true if set points should be further limited within visual range
|
||||
void validate_target_temperature();
|
||||
void validate_target_temperatures(bool pin_target_temperature_high);
|
||||
void validate_target_temperature_low();
|
||||
void validate_target_temperature_high();
|
||||
void validate_target_humidity();
|
||||
|
||||
protected:
|
||||
/// Override control to change settings of the climate device.
|
||||
@@ -192,11 +208,13 @@ class ThermostatClimate : public climate::Climate, public Component {
|
||||
/// Re-compute the required action of this climate controller.
|
||||
climate::ClimateAction compute_action_(bool ignore_timers = false);
|
||||
climate::ClimateAction compute_supplemental_action_();
|
||||
HumidificationAction compute_humidity_control_action_();
|
||||
|
||||
/// Switch the climate device to the given climate action.
|
||||
void switch_to_action_(climate::ClimateAction action, bool publish_state = true);
|
||||
void switch_to_supplemental_action_(climate::ClimateAction action);
|
||||
void trigger_supplemental_action_();
|
||||
void switch_to_humidity_control_action_(HumidificationAction action);
|
||||
|
||||
/// Switch the climate device to the given climate fan mode.
|
||||
void switch_to_fan_mode_(climate::ClimateFanMode fan_mode, bool publish_state = true);
|
||||
@@ -207,6 +225,9 @@ class ThermostatClimate : public climate::Climate, public Component {
|
||||
/// Switch the climate device to the given climate swing mode.
|
||||
void switch_to_swing_mode_(climate::ClimateSwingMode swing_mode, bool publish_state = true);
|
||||
|
||||
/// Check if the humidity change trigger should be called.
|
||||
void check_humidity_change_trigger_();
|
||||
|
||||
/// Check if the temperature change trigger should be called.
|
||||
void check_temperature_change_trigger_();
|
||||
|
||||
@@ -243,6 +264,8 @@ class ThermostatClimate : public climate::Climate, public Component {
|
||||
bool heating_required_();
|
||||
bool supplemental_cooling_required_();
|
||||
bool supplemental_heating_required_();
|
||||
bool dehumidification_required_();
|
||||
bool humidification_required_();
|
||||
|
||||
void dump_preset_config_(const char *preset_name, const ThermostatClimateTargetTempConfig &config);
|
||||
|
||||
@@ -259,6 +282,9 @@ class ThermostatClimate : public climate::Climate, public Component {
|
||||
/// The current supplemental action
|
||||
climate::ClimateAction supplemental_action_{climate::CLIMATE_ACTION_OFF};
|
||||
|
||||
/// The current humidification action
|
||||
HumidificationAction humidification_action_{THERMOSTAT_HUMIDITY_CONTROL_ACTION_NONE};
|
||||
|
||||
/// Default standard preset to use on start up
|
||||
climate::ClimatePreset default_preset_{};
|
||||
|
||||
@@ -321,6 +347,12 @@ class ThermostatClimate : public climate::Climate, public Component {
|
||||
/// A false value means that the controller has no such support.
|
||||
bool supports_two_points_{false};
|
||||
|
||||
/// Whether the controller supports dehumidification and/or humidification
|
||||
///
|
||||
/// A false value means that the controller has no such support.
|
||||
bool supports_dehumidification_{false};
|
||||
bool supports_humidification_{false};
|
||||
|
||||
/// Flags indicating if maximum allowable run time was exceeded
|
||||
bool cooling_max_runtime_exceeded_{false};
|
||||
bool heating_max_runtime_exceeded_{false};
|
||||
@@ -331,9 +363,10 @@ class ThermostatClimate : public climate::Climate, public Component {
|
||||
/// setup_complete_ blocks modifying/resetting the temps immediately after boot
|
||||
bool setup_complete_{false};
|
||||
|
||||
/// Store previously-known temperatures
|
||||
/// Store previously-known humidity and temperatures
|
||||
///
|
||||
/// These are used to determine when the temperature change trigger/action needs to be called
|
||||
/// These are used to determine when a temperature/humidity has changed
|
||||
float prev_target_humidity_{NAN};
|
||||
float prev_target_temperature_{NAN};
|
||||
float prev_target_temperature_low_{NAN};
|
||||
float prev_target_temperature_high_{NAN};
|
||||
@@ -347,6 +380,9 @@ class ThermostatClimate : public climate::Climate, public Component {
|
||||
float heating_deadband_{0};
|
||||
float heating_overrun_{0};
|
||||
|
||||
/// Hysteresis values used for computing humidification action
|
||||
float humidity_hysteresis_{0};
|
||||
|
||||
/// Maximum allowable temperature deltas before engaging supplemental cooling/heating actions
|
||||
float supplemental_cool_delta_{0};
|
||||
float supplemental_heat_delta_{0};
|
||||
@@ -448,12 +484,24 @@ class ThermostatClimate : public climate::Climate, public Component {
|
||||
/// The trigger to call when the controller should switch the swing mode to "vertical".
|
||||
Trigger<> *swing_mode_vertical_trigger_{nullptr};
|
||||
|
||||
/// The trigger to call when the target humidity changes.
|
||||
Trigger<> *humidity_change_trigger_{nullptr};
|
||||
|
||||
/// The trigger to call when the target temperature(s) change(es).
|
||||
Trigger<> *temperature_change_trigger_{nullptr};
|
||||
|
||||
/// The trigger to call when the preset mode changes
|
||||
Trigger<> *preset_change_trigger_{nullptr};
|
||||
|
||||
/// The trigger to call when dehumidification is required
|
||||
Trigger<> *humidity_control_dehumidify_action_trigger_{nullptr};
|
||||
|
||||
/// The trigger to call when humidification is required
|
||||
Trigger<> *humidity_control_humidify_action_trigger_{nullptr};
|
||||
|
||||
/// The trigger to call when (de)humidification should stop
|
||||
Trigger<> *humidity_control_off_action_trigger_{nullptr};
|
||||
|
||||
/// A reference to the trigger that was previously active.
|
||||
///
|
||||
/// This is so that the previous trigger can be stopped before enabling a new one
|
||||
@@ -462,6 +510,7 @@ class ThermostatClimate : public climate::Climate, public Component {
|
||||
Trigger<> *prev_fan_mode_trigger_{nullptr};
|
||||
Trigger<> *prev_mode_trigger_{nullptr};
|
||||
Trigger<> *prev_swing_mode_trigger_{nullptr};
|
||||
Trigger<> *prev_humidity_control_trigger_{nullptr};
|
||||
|
||||
/// Default custom preset to use on start up
|
||||
std::string default_custom_preset_{};
|
||||
|
||||
@@ -14,6 +14,7 @@ MODELS = {
|
||||
"GENERIC": Model.MODEL_GENERIC,
|
||||
"RAC-PT1411HWRU-C": Model.MODEL_RAC_PT1411HWRU_C,
|
||||
"RAC-PT1411HWRU-F": Model.MODEL_RAC_PT1411HWRU_F,
|
||||
"RAS-2819T": Model.MODEL_RAS_2819T,
|
||||
}
|
||||
|
||||
CONFIG_SCHEMA = climate_ir.climate_ir_with_receiver_schema(ToshibaClimate).extend(
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
#include "toshiba.h"
|
||||
#include "esphome/components/remote_base/toshiba_ac_protocol.h"
|
||||
|
||||
#include <vector>
|
||||
|
||||
@@ -97,6 +98,282 @@ const std::vector<uint8_t> RAC_PT1411HWRU_TEMPERATURE_F{0x10, 0x30, 0x00, 0x20,
|
||||
0x22, 0x06, 0x26, 0x07, 0x05, 0x25, 0x04, 0x24, 0x0C,
|
||||
0x2C, 0x0D, 0x2D, 0x09, 0x08, 0x28, 0x0A, 0x2A, 0x0B};
|
||||
|
||||
// RAS-2819T protocol constants
|
||||
const uint16_t RAS_2819T_HEADER1 = 0xC23D;
|
||||
const uint8_t RAS_2819T_HEADER2 = 0xD5;
|
||||
const uint8_t RAS_2819T_MESSAGE_LENGTH = 6;
|
||||
|
||||
// RAS-2819T fan speed codes for rc_code_1 (bytes 2-3)
|
||||
const uint16_t RAS_2819T_FAN_AUTO = 0xBF40;
|
||||
const uint16_t RAS_2819T_FAN_QUIET = 0xFF00;
|
||||
const uint16_t RAS_2819T_FAN_LOW = 0x9F60;
|
||||
const uint16_t RAS_2819T_FAN_MEDIUM = 0x5FA0;
|
||||
const uint16_t RAS_2819T_FAN_HIGH = 0x3FC0;
|
||||
|
||||
// RAS-2819T fan speed codes for rc_code_2 (byte 1)
|
||||
const uint8_t RAS_2819T_FAN2_AUTO = 0x66;
|
||||
const uint8_t RAS_2819T_FAN2_QUIET = 0x01;
|
||||
const uint8_t RAS_2819T_FAN2_LOW = 0x28;
|
||||
const uint8_t RAS_2819T_FAN2_MEDIUM = 0x3C;
|
||||
const uint8_t RAS_2819T_FAN2_HIGH = 0x50;
|
||||
|
||||
// RAS-2819T second packet suffix bytes for rc_code_2 (bytes 3-5)
|
||||
// These are fixed patterns, not actual checksums
|
||||
struct Ras2819tPacketSuffix {
|
||||
uint8_t byte3;
|
||||
uint8_t byte4;
|
||||
uint8_t byte5;
|
||||
};
|
||||
const Ras2819tPacketSuffix RAS_2819T_SUFFIX_AUTO{0x00, 0x02, 0x3D};
|
||||
const Ras2819tPacketSuffix RAS_2819T_SUFFIX_QUIET{0x00, 0x02, 0xD8};
|
||||
const Ras2819tPacketSuffix RAS_2819T_SUFFIX_LOW{0x00, 0x02, 0xFF};
|
||||
const Ras2819tPacketSuffix RAS_2819T_SUFFIX_MEDIUM{0x00, 0x02, 0x13};
|
||||
const Ras2819tPacketSuffix RAS_2819T_SUFFIX_HIGH{0x00, 0x02, 0x27};
|
||||
|
||||
// RAS-2819T swing toggle command
|
||||
const uint64_t RAS_2819T_SWING_TOGGLE = 0xC23D6B94E01F;
|
||||
|
||||
// RAS-2819T single-packet commands
|
||||
const uint64_t RAS_2819T_POWER_OFF_COMMAND = 0xC23D7B84E01F;
|
||||
|
||||
// RAS-2819T known valid command patterns for validation
|
||||
const std::array<uint64_t, 2> RAS_2819T_VALID_SINGLE_COMMANDS = {
|
||||
RAS_2819T_POWER_OFF_COMMAND, // Power off
|
||||
RAS_2819T_SWING_TOGGLE, // Swing toggle
|
||||
};
|
||||
|
||||
const uint16_t RAS_2819T_VALID_HEADER1 = 0xC23D;
|
||||
const uint8_t RAS_2819T_VALID_HEADER2 = 0xD5;
|
||||
|
||||
const uint8_t RAS_2819T_DRY_BYTE2 = 0x1F;
|
||||
const uint8_t RAS_2819T_DRY_BYTE3 = 0xE0;
|
||||
const uint8_t RAS_2819T_DRY_TEMP_OFFSET = 0x24;
|
||||
|
||||
const uint8_t RAS_2819T_AUTO_BYTE2 = 0x1F;
|
||||
const uint8_t RAS_2819T_AUTO_BYTE3 = 0xE0;
|
||||
const uint8_t RAS_2819T_AUTO_TEMP_OFFSET = 0x08;
|
||||
|
||||
const uint8_t RAS_2819T_FAN_ONLY_TEMP = 0xE4;
|
||||
const uint8_t RAS_2819T_FAN_ONLY_TEMP_INV = 0x1B;
|
||||
|
||||
const uint8_t RAS_2819T_HEAT_TEMP_OFFSET = 0x0C;
|
||||
|
||||
// RAS-2819T second packet fixed values
|
||||
const uint8_t RAS_2819T_AUTO_DRY_FAN_BYTE = 0x65;
|
||||
const uint8_t RAS_2819T_AUTO_DRY_SUFFIX = 0x3A;
|
||||
const uint8_t RAS_2819T_HEAT_SUFFIX = 0x3B;
|
||||
|
||||
// RAS-2819T temperature codes for 18-30°C
|
||||
static const uint8_t RAS_2819T_TEMP_CODES[] = {
|
||||
0x10, // 18°C
|
||||
0x30, // 19°C
|
||||
0x20, // 20°C
|
||||
0x60, // 21°C
|
||||
0x70, // 22°C
|
||||
0x50, // 23°C
|
||||
0x40, // 24°C
|
||||
0xC0, // 25°C
|
||||
0xD0, // 26°C
|
||||
0x90, // 27°C
|
||||
0x80, // 28°C
|
||||
0xA0, // 29°C
|
||||
0xB0 // 30°C
|
||||
};
|
||||
|
||||
// Helper functions for RAS-2819T protocol
|
||||
//
|
||||
// ===== RAS-2819T PROTOCOL DOCUMENTATION =====
|
||||
//
|
||||
// The RAS-2819T uses a two-packet IR protocol with some exceptions for simple commands.
|
||||
//
|
||||
// PACKET STRUCTURE:
|
||||
// All packets are 6 bytes (48 bits) transmitted with standard Toshiba timing.
|
||||
//
|
||||
// TWO-PACKET COMMANDS (Mode/Temperature/Fan changes):
|
||||
//
|
||||
// First Packet (rc_code_1): [C2 3D] [FAN_HI FAN_LO] [TEMP] [~TEMP]
|
||||
// Byte 0-1: Header (always 0xC23D)
|
||||
// Byte 2-3: Fan speed encoding (varies by mode, see fan tables below)
|
||||
// Byte 4: Temperature + mode encoding
|
||||
// Byte 5: Bitwise complement of temperature byte
|
||||
//
|
||||
// Second Packet (rc_code_2): [D5] [FAN2] [00] [SUF1] [SUF2] [SUF3]
|
||||
// Byte 0: Header (always 0xD5)
|
||||
// Byte 1: Fan speed secondary encoding
|
||||
// Byte 2: Always 0x00
|
||||
// Byte 3-5: Fixed suffix pattern (depends on fan speed and mode)
|
||||
//
|
||||
// TEMPERATURE ENCODING:
|
||||
// Base temp codes: 18°C=0x10, 19°C=0x30, 20°C=0x20, 21°C=0x60, 22°C=0x70,
|
||||
// 23°C=0x50, 24°C=0x40, 25°C=0xC0, 26°C=0xD0, 27°C=0x90,
|
||||
// 28°C=0x80, 29°C=0xA0, 30°C=0xB0
|
||||
// Mode offsets added to base temp:
|
||||
// COOL: No offset
|
||||
// HEAT: +0x0C (e.g., 24°C heat = 0x40 | 0x0C = 0x4C)
|
||||
// AUTO: +0x08 (e.g., 24°C auto = 0x40 | 0x08 = 0x48)
|
||||
// DRY: +0x24 (e.g., 24°C dry = 0x40 | 0x24 = 0x64)
|
||||
//
|
||||
// FAN SPEED ENCODING (First packet bytes 2-3):
|
||||
// AUTO: 0xBF40, QUIET: 0xFF00, LOW: 0x9F60, MEDIUM: 0x5FA0, HIGH: 0x3FC0
|
||||
// Special cases: AUTO/DRY modes use 0x1FE0 instead
|
||||
//
|
||||
// SINGLE-PACKET COMMANDS:
|
||||
// Power Off: 0xC23D7B84E01F (6 bytes, no second packet)
|
||||
// Swing Toggle: 0xC23D6B94E01F (6 bytes, no second packet)
|
||||
//
|
||||
// MODE DETECTION (from first packet):
|
||||
// - Check bytes 2-3: if 0x7B84 → OFF mode
|
||||
// - Check bytes 2-3: if 0x1FE0 → AUTO/DRY/low-temp-COOL (distinguish by temp code)
|
||||
// - Otherwise: COOL/HEAT/FAN_ONLY (distinguish by temp code and byte 5)
|
||||
|
||||
/**
|
||||
* Get fan speed encoding for RAS-2819T first packet (rc_code_1, bytes 2-3)
|
||||
*/
|
||||
static uint16_t get_ras_2819t_fan_code(climate::ClimateFanMode fan_mode) {
|
||||
switch (fan_mode) {
|
||||
case climate::CLIMATE_FAN_QUIET:
|
||||
return RAS_2819T_FAN_QUIET;
|
||||
case climate::CLIMATE_FAN_LOW:
|
||||
return RAS_2819T_FAN_LOW;
|
||||
case climate::CLIMATE_FAN_MEDIUM:
|
||||
return RAS_2819T_FAN_MEDIUM;
|
||||
case climate::CLIMATE_FAN_HIGH:
|
||||
return RAS_2819T_FAN_HIGH;
|
||||
case climate::CLIMATE_FAN_AUTO:
|
||||
default:
|
||||
return RAS_2819T_FAN_AUTO;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get fan speed encoding for RAS-2819T rc_code_2 packet (second packet)
|
||||
*/
|
||||
struct Ras2819tSecondPacketCodes {
|
||||
uint8_t fan_byte;
|
||||
Ras2819tPacketSuffix suffix;
|
||||
};
|
||||
|
||||
static Ras2819tSecondPacketCodes get_ras_2819t_second_packet_codes(climate::ClimateFanMode fan_mode) {
|
||||
switch (fan_mode) {
|
||||
case climate::CLIMATE_FAN_QUIET:
|
||||
return {RAS_2819T_FAN2_QUIET, RAS_2819T_SUFFIX_QUIET};
|
||||
case climate::CLIMATE_FAN_LOW:
|
||||
return {RAS_2819T_FAN2_LOW, RAS_2819T_SUFFIX_LOW};
|
||||
case climate::CLIMATE_FAN_MEDIUM:
|
||||
return {RAS_2819T_FAN2_MEDIUM, RAS_2819T_SUFFIX_MEDIUM};
|
||||
case climate::CLIMATE_FAN_HIGH:
|
||||
return {RAS_2819T_FAN2_HIGH, RAS_2819T_SUFFIX_HIGH};
|
||||
case climate::CLIMATE_FAN_AUTO:
|
||||
default:
|
||||
return {RAS_2819T_FAN2_AUTO, RAS_2819T_SUFFIX_AUTO};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get temperature code for RAS-2819T protocol
|
||||
*/
|
||||
static uint8_t get_ras_2819t_temp_code(float temperature) {
|
||||
int temp_index = static_cast<int>(temperature) - 18;
|
||||
if (temp_index < 0 || temp_index >= static_cast<int>(sizeof(RAS_2819T_TEMP_CODES))) {
|
||||
ESP_LOGW(TAG, "Temperature %.1f°C out of range [18-30°C], defaulting to 24°C", temperature);
|
||||
return 0x40; // Default to 24°C
|
||||
}
|
||||
|
||||
return RAS_2819T_TEMP_CODES[temp_index];
|
||||
}
|
||||
|
||||
/**
|
||||
* Decode temperature from RAS-2819T temp code
|
||||
*/
|
||||
static float decode_ras_2819t_temperature(uint8_t temp_code) {
|
||||
uint8_t base_temp_code = temp_code & 0xF0;
|
||||
|
||||
// Find the code in the temperature array
|
||||
for (size_t temp_index = 0; temp_index < sizeof(RAS_2819T_TEMP_CODES); temp_index++) {
|
||||
if (RAS_2819T_TEMP_CODES[temp_index] == base_temp_code) {
|
||||
return static_cast<float>(temp_index + 18); // 18°C is the minimum
|
||||
}
|
||||
}
|
||||
|
||||
ESP_LOGW(TAG, "Unknown temp code: 0x%02X, defaulting to 24°C", base_temp_code);
|
||||
return 24.0f; // Default to 24°C
|
||||
}
|
||||
|
||||
/**
|
||||
* Decode fan speed from RAS-2819T IR codes
|
||||
*/
|
||||
static climate::ClimateFanMode decode_ras_2819t_fan_mode(uint16_t fan_code) {
|
||||
switch (fan_code) {
|
||||
case RAS_2819T_FAN_QUIET:
|
||||
return climate::CLIMATE_FAN_QUIET;
|
||||
case RAS_2819T_FAN_LOW:
|
||||
return climate::CLIMATE_FAN_LOW;
|
||||
case RAS_2819T_FAN_MEDIUM:
|
||||
return climate::CLIMATE_FAN_MEDIUM;
|
||||
case RAS_2819T_FAN_HIGH:
|
||||
return climate::CLIMATE_FAN_HIGH;
|
||||
case RAS_2819T_FAN_AUTO:
|
||||
default:
|
||||
return climate::CLIMATE_FAN_AUTO;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Validate RAS-2819T IR command structure and content
|
||||
*/
|
||||
static bool is_valid_ras_2819t_command(uint64_t rc_code_1, uint64_t rc_code_2 = 0) {
|
||||
// Check header of first packet
|
||||
uint16_t header1 = (rc_code_1 >> 32) & 0xFFFF;
|
||||
if (header1 != RAS_2819T_VALID_HEADER1) {
|
||||
return false;
|
||||
}
|
||||
|
||||
// Single packet commands
|
||||
if (rc_code_2 == 0) {
|
||||
for (uint64_t valid_cmd : RAS_2819T_VALID_SINGLE_COMMANDS) {
|
||||
if (rc_code_1 == valid_cmd) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
// Additional validation for unknown single packets
|
||||
return false;
|
||||
}
|
||||
|
||||
// Two-packet commands - validate second packet header
|
||||
uint8_t header2 = (rc_code_2 >> 40) & 0xFF;
|
||||
if (header2 != RAS_2819T_VALID_HEADER2) {
|
||||
return false;
|
||||
}
|
||||
|
||||
// Validate temperature complement in first packet (byte 4 should be ~byte 5)
|
||||
uint8_t temp_byte = (rc_code_1 >> 8) & 0xFF;
|
||||
uint8_t temp_complement = rc_code_1 & 0xFF;
|
||||
if (temp_byte != static_cast<uint8_t>(~temp_complement)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
// Validate fan speed combinations make sense
|
||||
uint16_t fan_code = (rc_code_1 >> 16) & 0xFFFF;
|
||||
uint8_t fan2_byte = (rc_code_2 >> 32) & 0xFF;
|
||||
|
||||
// Check if fan codes are from known valid patterns
|
||||
bool valid_fan_combo = false;
|
||||
if (fan_code == RAS_2819T_FAN_AUTO && fan2_byte == RAS_2819T_FAN2_AUTO)
|
||||
valid_fan_combo = true;
|
||||
if (fan_code == RAS_2819T_FAN_QUIET && fan2_byte == RAS_2819T_FAN2_QUIET)
|
||||
valid_fan_combo = true;
|
||||
if (fan_code == RAS_2819T_FAN_LOW && fan2_byte == RAS_2819T_FAN2_LOW)
|
||||
valid_fan_combo = true;
|
||||
if (fan_code == RAS_2819T_FAN_MEDIUM && fan2_byte == RAS_2819T_FAN2_MEDIUM)
|
||||
valid_fan_combo = true;
|
||||
if (fan_code == RAS_2819T_FAN_HIGH && fan2_byte == RAS_2819T_FAN2_HIGH)
|
||||
valid_fan_combo = true;
|
||||
if (fan_code == 0x1FE0 && fan2_byte == RAS_2819T_AUTO_DRY_FAN_BYTE)
|
||||
valid_fan_combo = true; // AUTO/DRY
|
||||
|
||||
return valid_fan_combo;
|
||||
}
|
||||
|
||||
void ToshibaClimate::setup() {
|
||||
if (this->sensor_) {
|
||||
this->sensor_->add_on_state_callback([this](float state) {
|
||||
@@ -126,16 +403,43 @@ void ToshibaClimate::setup() {
|
||||
this->minimum_temperature_ = this->temperature_min_();
|
||||
this->maximum_temperature_ = this->temperature_max_();
|
||||
this->swing_modes_ = this->toshiba_swing_modes_();
|
||||
|
||||
// Ensure swing mode is always initialized to a valid value
|
||||
if (this->swing_modes_.empty() || this->swing_modes_.find(this->swing_mode) == this->swing_modes_.end()) {
|
||||
// No swing support for this model or current swing mode not supported, reset to OFF
|
||||
this->swing_mode = climate::CLIMATE_SWING_OFF;
|
||||
}
|
||||
|
||||
// Ensure mode is valid - ESPHome should only use standard climate modes
|
||||
if (this->mode != climate::CLIMATE_MODE_OFF && this->mode != climate::CLIMATE_MODE_HEAT &&
|
||||
this->mode != climate::CLIMATE_MODE_COOL && this->mode != climate::CLIMATE_MODE_HEAT_COOL &&
|
||||
this->mode != climate::CLIMATE_MODE_DRY && this->mode != climate::CLIMATE_MODE_FAN_ONLY) {
|
||||
ESP_LOGW(TAG, "Invalid mode detected during setup, resetting to OFF");
|
||||
this->mode = climate::CLIMATE_MODE_OFF;
|
||||
}
|
||||
|
||||
// Ensure fan mode is valid
|
||||
if (!this->fan_mode.has_value()) {
|
||||
ESP_LOGW(TAG, "Fan mode not set during setup, defaulting to AUTO");
|
||||
this->fan_mode = climate::CLIMATE_FAN_AUTO;
|
||||
}
|
||||
|
||||
// Never send nan to HA
|
||||
if (std::isnan(this->target_temperature))
|
||||
this->target_temperature = 24;
|
||||
// Log final state for debugging HA errors
|
||||
ESP_LOGV(TAG, "Setup complete - Mode: %d, Fan: %s, Swing: %d, Temp: %.1f", static_cast<int>(this->mode),
|
||||
this->fan_mode.has_value() ? std::to_string(static_cast<int>(this->fan_mode.value())).c_str() : "NONE",
|
||||
static_cast<int>(this->swing_mode), this->target_temperature);
|
||||
}
|
||||
|
||||
void ToshibaClimate::transmit_state() {
|
||||
if (this->model_ == MODEL_RAC_PT1411HWRU_C || this->model_ == MODEL_RAC_PT1411HWRU_F) {
|
||||
transmit_rac_pt1411hwru_();
|
||||
this->transmit_rac_pt1411hwru_();
|
||||
} else if (this->model_ == MODEL_RAS_2819T) {
|
||||
this->transmit_ras_2819t_();
|
||||
} else {
|
||||
transmit_generic_();
|
||||
this->transmit_generic_();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -230,7 +534,7 @@ void ToshibaClimate::transmit_generic_() {
|
||||
auto transmit = this->transmitter_->transmit();
|
||||
auto *data = transmit.get_data();
|
||||
|
||||
encode_(data, message, message_length, 1);
|
||||
this->encode_(data, message, message_length, 1);
|
||||
|
||||
transmit.perform();
|
||||
}
|
||||
@@ -348,15 +652,12 @@ void ToshibaClimate::transmit_rac_pt1411hwru_() {
|
||||
message[11] += message[index];
|
||||
}
|
||||
}
|
||||
ESP_LOGV(TAG, "*** Generated codes: 0x%.2X%.2X%.2X%.2X%.2X%.2X 0x%.2X%.2X%.2X%.2X%.2X%.2X", message[0], message[1],
|
||||
message[2], message[3], message[4], message[5], message[6], message[7], message[8], message[9], message[10],
|
||||
message[11]);
|
||||
|
||||
// load first block of IR code and repeat it once
|
||||
encode_(data, &message[0], RAC_PT1411HWRU_MESSAGE_LENGTH, 1);
|
||||
this->encode_(data, &message[0], RAC_PT1411HWRU_MESSAGE_LENGTH, 1);
|
||||
// load second block of IR code, if present
|
||||
if (message[6] != 0) {
|
||||
encode_(data, &message[6], RAC_PT1411HWRU_MESSAGE_LENGTH, 0);
|
||||
this->encode_(data, &message[6], RAC_PT1411HWRU_MESSAGE_LENGTH, 0);
|
||||
}
|
||||
|
||||
transmit.perform();
|
||||
@@ -366,19 +667,19 @@ void ToshibaClimate::transmit_rac_pt1411hwru_() {
|
||||
data->space(TOSHIBA_PACKET_SPACE);
|
||||
switch (this->swing_mode) {
|
||||
case climate::CLIMATE_SWING_VERTICAL:
|
||||
encode_(data, &RAC_PT1411HWRU_SWING_VERTICAL[0], RAC_PT1411HWRU_MESSAGE_LENGTH, 1);
|
||||
this->encode_(data, &RAC_PT1411HWRU_SWING_VERTICAL[0], RAC_PT1411HWRU_MESSAGE_LENGTH, 1);
|
||||
break;
|
||||
|
||||
case climate::CLIMATE_SWING_OFF:
|
||||
default:
|
||||
encode_(data, &RAC_PT1411HWRU_SWING_OFF[0], RAC_PT1411HWRU_MESSAGE_LENGTH, 1);
|
||||
this->encode_(data, &RAC_PT1411HWRU_SWING_OFF[0], RAC_PT1411HWRU_MESSAGE_LENGTH, 1);
|
||||
}
|
||||
|
||||
data->space(TOSHIBA_PACKET_SPACE);
|
||||
transmit.perform();
|
||||
|
||||
if (this->sensor_) {
|
||||
transmit_rac_pt1411hwru_temp_(true, false);
|
||||
this->transmit_rac_pt1411hwru_temp_(true, false);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -430,15 +731,217 @@ void ToshibaClimate::transmit_rac_pt1411hwru_temp_(const bool cs_state, const bo
|
||||
// Byte 5: Footer lower/bitwise complement of byte 4
|
||||
message[5] = ~message[4];
|
||||
|
||||
ESP_LOGV(TAG, "*** Generated code: 0x%.2X%.2X%.2X%.2X%.2X%.2X", message[0], message[1], message[2], message[3],
|
||||
message[4], message[5]);
|
||||
// load IR code and repeat it once
|
||||
encode_(data, message, RAC_PT1411HWRU_MESSAGE_LENGTH, 1);
|
||||
this->encode_(data, message, RAC_PT1411HWRU_MESSAGE_LENGTH, 1);
|
||||
|
||||
transmit.perform();
|
||||
}
|
||||
}
|
||||
|
||||
void ToshibaClimate::transmit_ras_2819t_() {
|
||||
// Handle swing mode transmission for RAS-2819T
|
||||
// Note: RAS-2819T uses a toggle command, so we need to track state changes
|
||||
|
||||
// Check if ONLY swing mode changed (and no other climate parameters)
|
||||
bool swing_changed = (this->swing_mode != this->last_swing_mode_);
|
||||
bool mode_changed = (this->mode != this->last_mode_);
|
||||
bool fan_changed = (this->fan_mode != this->last_fan_mode_);
|
||||
bool temp_changed = (abs(this->target_temperature - this->last_target_temperature_) > 0.1f);
|
||||
|
||||
bool only_swing_changed = swing_changed && !mode_changed && !fan_changed && !temp_changed;
|
||||
|
||||
if (only_swing_changed) {
|
||||
// Send ONLY swing toggle command (like the physical remote does)
|
||||
auto swing_transmit = this->transmitter_->transmit();
|
||||
auto *swing_data = swing_transmit.get_data();
|
||||
|
||||
// Convert toggle command to bytes for transmission
|
||||
uint8_t swing_message[RAS_2819T_MESSAGE_LENGTH];
|
||||
swing_message[0] = (RAS_2819T_SWING_TOGGLE >> 40) & 0xFF;
|
||||
swing_message[1] = (RAS_2819T_SWING_TOGGLE >> 32) & 0xFF;
|
||||
swing_message[2] = (RAS_2819T_SWING_TOGGLE >> 24) & 0xFF;
|
||||
swing_message[3] = (RAS_2819T_SWING_TOGGLE >> 16) & 0xFF;
|
||||
swing_message[4] = (RAS_2819T_SWING_TOGGLE >> 8) & 0xFF;
|
||||
swing_message[5] = RAS_2819T_SWING_TOGGLE & 0xFF;
|
||||
|
||||
// Use single packet transmission WITH repeat (like regular commands)
|
||||
this->encode_(swing_data, swing_message, RAS_2819T_MESSAGE_LENGTH, 1);
|
||||
swing_transmit.perform();
|
||||
|
||||
// Update all state tracking
|
||||
this->last_swing_mode_ = this->swing_mode;
|
||||
this->last_mode_ = this->mode;
|
||||
this->last_fan_mode_ = this->fan_mode;
|
||||
this->last_target_temperature_ = this->target_temperature;
|
||||
|
||||
// Immediately publish the state change to Home Assistant
|
||||
this->publish_state();
|
||||
|
||||
return; // Exit early - don't send climate command
|
||||
}
|
||||
|
||||
// If we get here, send the regular climate command (temperature/mode/fan)
|
||||
uint8_t message1[RAS_2819T_MESSAGE_LENGTH] = {0};
|
||||
uint8_t message2[RAS_2819T_MESSAGE_LENGTH] = {0};
|
||||
float temperature =
|
||||
clamp<float>(this->target_temperature, TOSHIBA_RAS_2819T_TEMP_C_MIN, TOSHIBA_RAS_2819T_TEMP_C_MAX);
|
||||
|
||||
// Build first packet (RAS_2819T_HEADER1 + 4 bytes)
|
||||
message1[0] = (RAS_2819T_HEADER1 >> 8) & 0xFF;
|
||||
message1[1] = RAS_2819T_HEADER1 & 0xFF;
|
||||
|
||||
// Handle OFF mode
|
||||
if (this->mode == climate::CLIMATE_MODE_OFF) {
|
||||
// Extract bytes from power off command constant
|
||||
message1[2] = (RAS_2819T_POWER_OFF_COMMAND >> 24) & 0xFF;
|
||||
message1[3] = (RAS_2819T_POWER_OFF_COMMAND >> 16) & 0xFF;
|
||||
message1[4] = (RAS_2819T_POWER_OFF_COMMAND >> 8) & 0xFF;
|
||||
message1[5] = RAS_2819T_POWER_OFF_COMMAND & 0xFF;
|
||||
// No second packet for OFF
|
||||
} else {
|
||||
// Get temperature and fan encoding
|
||||
uint8_t temp_code = get_ras_2819t_temp_code(temperature);
|
||||
|
||||
// Get fan speed encoding for rc_code_1
|
||||
climate::ClimateFanMode effective_fan_mode = this->fan_mode.value();
|
||||
|
||||
// Dry mode only supports AUTO fan speed
|
||||
if (this->mode == climate::CLIMATE_MODE_DRY) {
|
||||
effective_fan_mode = climate::CLIMATE_FAN_AUTO;
|
||||
if (this->fan_mode.value() != climate::CLIMATE_FAN_AUTO) {
|
||||
ESP_LOGW(TAG, "Dry mode only supports AUTO fan speed, forcing AUTO");
|
||||
}
|
||||
}
|
||||
|
||||
uint16_t fan_code = get_ras_2819t_fan_code(effective_fan_mode);
|
||||
|
||||
// Mode and temperature encoding
|
||||
switch (this->mode) {
|
||||
case climate::CLIMATE_MODE_COOL:
|
||||
// All cooling temperatures support fan speed control
|
||||
message1[2] = (fan_code >> 8) & 0xFF;
|
||||
message1[3] = fan_code & 0xFF;
|
||||
message1[4] = temp_code;
|
||||
message1[5] = ~temp_code;
|
||||
break;
|
||||
|
||||
case climate::CLIMATE_MODE_HEAT:
|
||||
// Heating supports fan speed control
|
||||
message1[2] = (fan_code >> 8) & 0xFF;
|
||||
message1[3] = fan_code & 0xFF;
|
||||
// Heat mode adds offset to temperature code
|
||||
message1[4] = temp_code | RAS_2819T_HEAT_TEMP_OFFSET;
|
||||
message1[5] = ~(temp_code | RAS_2819T_HEAT_TEMP_OFFSET);
|
||||
break;
|
||||
|
||||
case climate::CLIMATE_MODE_HEAT_COOL:
|
||||
// Auto mode uses fixed encoding
|
||||
message1[2] = RAS_2819T_AUTO_BYTE2;
|
||||
message1[3] = RAS_2819T_AUTO_BYTE3;
|
||||
message1[4] = temp_code | RAS_2819T_AUTO_TEMP_OFFSET;
|
||||
message1[5] = ~(temp_code | RAS_2819T_AUTO_TEMP_OFFSET);
|
||||
break;
|
||||
|
||||
case climate::CLIMATE_MODE_DRY:
|
||||
// Dry mode uses fixed encoding and forces AUTO fan
|
||||
message1[2] = RAS_2819T_DRY_BYTE2;
|
||||
message1[3] = RAS_2819T_DRY_BYTE3;
|
||||
message1[4] = temp_code | RAS_2819T_DRY_TEMP_OFFSET;
|
||||
message1[5] = ~message1[4];
|
||||
break;
|
||||
|
||||
case climate::CLIMATE_MODE_FAN_ONLY:
|
||||
// Fan only mode supports fan speed control
|
||||
message1[2] = (fan_code >> 8) & 0xFF;
|
||||
message1[3] = fan_code & 0xFF;
|
||||
message1[4] = RAS_2819T_FAN_ONLY_TEMP;
|
||||
message1[5] = RAS_2819T_FAN_ONLY_TEMP_INV;
|
||||
break;
|
||||
|
||||
default:
|
||||
// Default case supports fan speed control
|
||||
message1[2] = (fan_code >> 8) & 0xFF;
|
||||
message1[3] = fan_code & 0xFF;
|
||||
message1[4] = temp_code;
|
||||
message1[5] = ~temp_code;
|
||||
break;
|
||||
}
|
||||
|
||||
// Build second packet (RAS_2819T_HEADER2 + 4 bytes)
|
||||
message2[0] = RAS_2819T_HEADER2;
|
||||
|
||||
// Get fan speed encoding for rc_code_2
|
||||
Ras2819tSecondPacketCodes second_packet_codes = get_ras_2819t_second_packet_codes(effective_fan_mode);
|
||||
|
||||
// Determine header byte 2 and fan encoding based on mode
|
||||
switch (this->mode) {
|
||||
case climate::CLIMATE_MODE_COOL:
|
||||
message2[1] = second_packet_codes.fan_byte;
|
||||
message2[2] = 0x00;
|
||||
message2[3] = second_packet_codes.suffix.byte3;
|
||||
message2[4] = second_packet_codes.suffix.byte4;
|
||||
message2[5] = second_packet_codes.suffix.byte5;
|
||||
break;
|
||||
|
||||
case climate::CLIMATE_MODE_HEAT:
|
||||
message2[1] = second_packet_codes.fan_byte;
|
||||
message2[2] = 0x00;
|
||||
message2[3] = second_packet_codes.suffix.byte3;
|
||||
message2[4] = 0x00;
|
||||
message2[5] = RAS_2819T_HEAT_SUFFIX;
|
||||
break;
|
||||
|
||||
case climate::CLIMATE_MODE_HEAT_COOL:
|
||||
case climate::CLIMATE_MODE_DRY:
|
||||
// Auto/Dry modes use fixed values regardless of fan setting
|
||||
message2[1] = RAS_2819T_AUTO_DRY_FAN_BYTE;
|
||||
message2[2] = 0x00;
|
||||
message2[3] = 0x00;
|
||||
message2[4] = 0x00;
|
||||
message2[5] = RAS_2819T_AUTO_DRY_SUFFIX;
|
||||
break;
|
||||
|
||||
case climate::CLIMATE_MODE_FAN_ONLY:
|
||||
message2[1] = second_packet_codes.fan_byte;
|
||||
message2[2] = 0x00;
|
||||
message2[3] = second_packet_codes.suffix.byte3;
|
||||
message2[4] = 0x00;
|
||||
message2[5] = RAS_2819T_HEAT_SUFFIX;
|
||||
break;
|
||||
|
||||
default:
|
||||
message2[1] = second_packet_codes.fan_byte;
|
||||
message2[2] = 0x00;
|
||||
message2[3] = second_packet_codes.suffix.byte3;
|
||||
message2[4] = second_packet_codes.suffix.byte4;
|
||||
message2[5] = second_packet_codes.suffix.byte5;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
// Log final messages being transmitted
|
||||
|
||||
// Transmit using proper Toshiba protocol timing
|
||||
auto transmit = this->transmitter_->transmit();
|
||||
auto *data = transmit.get_data();
|
||||
|
||||
// Use existing Toshiba encode function for proper timing
|
||||
this->encode_(data, message1, RAS_2819T_MESSAGE_LENGTH, 1);
|
||||
|
||||
if (this->mode != climate::CLIMATE_MODE_OFF) {
|
||||
// Send second packet with gap
|
||||
this->encode_(data, message2, RAS_2819T_MESSAGE_LENGTH, 0);
|
||||
}
|
||||
|
||||
transmit.perform();
|
||||
|
||||
// Update all state tracking after successful transmission
|
||||
this->last_swing_mode_ = this->swing_mode;
|
||||
this->last_mode_ = this->mode;
|
||||
this->last_fan_mode_ = this->fan_mode;
|
||||
this->last_target_temperature_ = this->target_temperature;
|
||||
}
|
||||
|
||||
uint8_t ToshibaClimate::is_valid_rac_pt1411hwru_header_(const uint8_t *message) {
|
||||
const std::vector<uint8_t> header{RAC_PT1411HWRU_MESSAGE_HEADER0, RAC_PT1411HWRU_CS_HEADER,
|
||||
RAC_PT1411HWRU_SWING_HEADER};
|
||||
@@ -464,11 +967,11 @@ bool ToshibaClimate::compare_rac_pt1411hwru_packets_(const uint8_t *message1, co
|
||||
bool ToshibaClimate::is_valid_rac_pt1411hwru_message_(const uint8_t *message) {
|
||||
uint8_t checksum = 0;
|
||||
|
||||
switch (is_valid_rac_pt1411hwru_header_(message)) {
|
||||
switch (this->is_valid_rac_pt1411hwru_header_(message)) {
|
||||
case RAC_PT1411HWRU_MESSAGE_HEADER0:
|
||||
case RAC_PT1411HWRU_CS_HEADER:
|
||||
case RAC_PT1411HWRU_SWING_HEADER:
|
||||
if (is_valid_rac_pt1411hwru_header_(message) && (message[2] == static_cast<uint8_t>(~message[3])) &&
|
||||
if (this->is_valid_rac_pt1411hwru_header_(message) && (message[2] == static_cast<uint8_t>(~message[3])) &&
|
||||
(message[4] == static_cast<uint8_t>(~message[5]))) {
|
||||
return true;
|
||||
}
|
||||
@@ -490,7 +993,103 @@ bool ToshibaClimate::is_valid_rac_pt1411hwru_message_(const uint8_t *message) {
|
||||
return false;
|
||||
}
|
||||
|
||||
bool ToshibaClimate::process_ras_2819t_command_(const remote_base::ToshibaAcData &toshiba_data) {
|
||||
// Check for power-off command (single packet)
|
||||
if (toshiba_data.rc_code_2 == 0 && toshiba_data.rc_code_1 == RAS_2819T_POWER_OFF_COMMAND) {
|
||||
this->mode = climate::CLIMATE_MODE_OFF;
|
||||
ESP_LOGI(TAG, "Mode: OFF");
|
||||
this->publish_state();
|
||||
return true;
|
||||
}
|
||||
|
||||
// Check for swing toggle command (single packet)
|
||||
if (toshiba_data.rc_code_2 == 0 && toshiba_data.rc_code_1 == RAS_2819T_SWING_TOGGLE) {
|
||||
// Toggle swing mode
|
||||
if (this->swing_mode == climate::CLIMATE_SWING_VERTICAL) {
|
||||
this->swing_mode = climate::CLIMATE_SWING_OFF;
|
||||
ESP_LOGI(TAG, "Swing: OFF");
|
||||
} else {
|
||||
this->swing_mode = climate::CLIMATE_SWING_VERTICAL;
|
||||
ESP_LOGI(TAG, "Swing: VERTICAL");
|
||||
}
|
||||
this->publish_state();
|
||||
return true;
|
||||
}
|
||||
|
||||
// Handle regular two-packet commands (mode/temperature/fan changes)
|
||||
if (toshiba_data.rc_code_2 != 0) {
|
||||
// Convert to byte array for easier processing
|
||||
uint8_t message1[6], message2[6];
|
||||
for (uint8_t i = 0; i < 6; i++) {
|
||||
message1[i] = (toshiba_data.rc_code_1 >> (40 - i * 8)) & 0xFF;
|
||||
message2[i] = (toshiba_data.rc_code_2 >> (40 - i * 8)) & 0xFF;
|
||||
}
|
||||
|
||||
// Decode the protocol using message1 (rc_code_1)
|
||||
uint8_t temp_code = message1[4];
|
||||
|
||||
// Decode mode - check bytes 2-3 pattern and temperature code
|
||||
if ((message1[2] == 0x7B) && (message1[3] == 0x84)) {
|
||||
// OFF mode has specific pattern
|
||||
this->mode = climate::CLIMATE_MODE_OFF;
|
||||
ESP_LOGI(TAG, "Mode: OFF");
|
||||
} else if ((message1[2] == 0x1F) && (message1[3] == 0xE0)) {
|
||||
// 0x1FE0 pattern is used for AUTO, DRY, and low-temp COOL
|
||||
if ((temp_code & 0x0F) == 0x08) {
|
||||
this->mode = climate::CLIMATE_MODE_HEAT_COOL;
|
||||
ESP_LOGI(TAG, "Mode: AUTO");
|
||||
} else if ((temp_code & 0x0F) == 0x04) {
|
||||
this->mode = climate::CLIMATE_MODE_DRY;
|
||||
ESP_LOGI(TAG, "Mode: DRY");
|
||||
} else {
|
||||
this->mode = climate::CLIMATE_MODE_COOL;
|
||||
ESP_LOGI(TAG, "Mode: COOL (low temp)");
|
||||
}
|
||||
} else {
|
||||
// Variable fan speed patterns - decode by temperature code
|
||||
if ((temp_code & 0x0F) == 0x0C) {
|
||||
this->mode = climate::CLIMATE_MODE_HEAT;
|
||||
ESP_LOGI(TAG, "Mode: HEAT");
|
||||
} else if (message1[5] == 0x1B) {
|
||||
this->mode = climate::CLIMATE_MODE_FAN_ONLY;
|
||||
ESP_LOGI(TAG, "Mode: FAN_ONLY");
|
||||
} else {
|
||||
this->mode = climate::CLIMATE_MODE_COOL;
|
||||
ESP_LOGI(TAG, "Mode: COOL");
|
||||
}
|
||||
}
|
||||
|
||||
// Decode fan speed from rc_code_1
|
||||
uint16_t fan_code = (message1[2] << 8) | message1[3];
|
||||
this->fan_mode = decode_ras_2819t_fan_mode(fan_code);
|
||||
|
||||
// Decode temperature
|
||||
if (this->mode != climate::CLIMATE_MODE_OFF && this->mode != climate::CLIMATE_MODE_FAN_ONLY) {
|
||||
this->target_temperature = decode_ras_2819t_temperature(temp_code);
|
||||
}
|
||||
|
||||
this->publish_state();
|
||||
return true;
|
||||
} else {
|
||||
ESP_LOGD(TAG, "Unknown single-packet RAS-2819T command: 0x%" PRIX64, toshiba_data.rc_code_1);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
bool ToshibaClimate::on_receive(remote_base::RemoteReceiveData data) {
|
||||
// Try modern ToshibaAcProtocol decoder first (handles RAS-2819T and potentially others)
|
||||
remote_base::ToshibaAcProtocol toshiba_protocol;
|
||||
auto decode_result = toshiba_protocol.decode(data);
|
||||
|
||||
if (decode_result.has_value()) {
|
||||
auto toshiba_data = decode_result.value();
|
||||
// Validate and process RAS-2819T commands
|
||||
if (is_valid_ras_2819t_command(toshiba_data.rc_code_1, toshiba_data.rc_code_2)) {
|
||||
return this->process_ras_2819t_command_(toshiba_data);
|
||||
}
|
||||
}
|
||||
|
||||
// Fall back to generic processing for older protocols
|
||||
uint8_t message[18] = {0};
|
||||
uint8_t message_length = TOSHIBA_HEADER_LENGTH, temperature_code = 0;
|
||||
|
||||
@@ -499,11 +1098,11 @@ bool ToshibaClimate::on_receive(remote_base::RemoteReceiveData data) {
|
||||
return false;
|
||||
}
|
||||
// Read incoming bits into buffer
|
||||
if (!decode_(&data, message, message_length)) {
|
||||
if (!this->decode_(&data, message, message_length)) {
|
||||
return false;
|
||||
}
|
||||
// Determine incoming message protocol version and/or length
|
||||
if (is_valid_rac_pt1411hwru_header_(message)) {
|
||||
if (this->is_valid_rac_pt1411hwru_header_(message)) {
|
||||
// We already received four bytes
|
||||
message_length = RAC_PT1411HWRU_MESSAGE_LENGTH - 4;
|
||||
} else if ((message[0] ^ message[1] ^ message[2]) != message[3]) {
|
||||
@@ -514,11 +1113,11 @@ bool ToshibaClimate::on_receive(remote_base::RemoteReceiveData data) {
|
||||
message_length = message[2] + 2;
|
||||
}
|
||||
// Decode the remaining bytes
|
||||
if (!decode_(&data, &message[4], message_length)) {
|
||||
if (!this->decode_(&data, &message[4], message_length)) {
|
||||
return false;
|
||||
}
|
||||
// If this is a RAC-PT1411HWRU message, we expect the first packet a second time and also possibly a third packet
|
||||
if (is_valid_rac_pt1411hwru_header_(message)) {
|
||||
if (this->is_valid_rac_pt1411hwru_header_(message)) {
|
||||
// There is always a space between packets
|
||||
if (!data.expect_item(TOSHIBA_BIT_MARK, TOSHIBA_GAP_SPACE)) {
|
||||
return false;
|
||||
@@ -527,7 +1126,7 @@ bool ToshibaClimate::on_receive(remote_base::RemoteReceiveData data) {
|
||||
if (!data.expect_item(TOSHIBA_HEADER_MARK, TOSHIBA_HEADER_SPACE)) {
|
||||
return false;
|
||||
}
|
||||
if (!decode_(&data, &message[6], RAC_PT1411HWRU_MESSAGE_LENGTH)) {
|
||||
if (!this->decode_(&data, &message[6], RAC_PT1411HWRU_MESSAGE_LENGTH)) {
|
||||
return false;
|
||||
}
|
||||
// If this is a RAC-PT1411HWRU message, there may also be a third packet.
|
||||
@@ -535,25 +1134,25 @@ bool ToshibaClimate::on_receive(remote_base::RemoteReceiveData data) {
|
||||
if (data.expect_item(TOSHIBA_BIT_MARK, TOSHIBA_GAP_SPACE)) {
|
||||
// Validate header 3
|
||||
data.expect_item(TOSHIBA_HEADER_MARK, TOSHIBA_HEADER_SPACE);
|
||||
if (decode_(&data, &message[12], RAC_PT1411HWRU_MESSAGE_LENGTH)) {
|
||||
if (!is_valid_rac_pt1411hwru_message_(&message[12])) {
|
||||
if (this->decode_(&data, &message[12], RAC_PT1411HWRU_MESSAGE_LENGTH)) {
|
||||
if (!this->is_valid_rac_pt1411hwru_message_(&message[12])) {
|
||||
// If a third packet was received but the checksum is not valid, fail
|
||||
return false;
|
||||
}
|
||||
}
|
||||
}
|
||||
if (!compare_rac_pt1411hwru_packets_(&message[0], &message[6])) {
|
||||
if (!this->compare_rac_pt1411hwru_packets_(&message[0], &message[6])) {
|
||||
// If the first two packets don't match each other, fail
|
||||
return false;
|
||||
}
|
||||
if (!is_valid_rac_pt1411hwru_message_(&message[0])) {
|
||||
if (!this->is_valid_rac_pt1411hwru_message_(&message[0])) {
|
||||
// If the first packet isn't valid, fail
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
// Header has been verified, now determine protocol version and set the climate component properties
|
||||
switch (is_valid_rac_pt1411hwru_header_(message)) {
|
||||
switch (this->is_valid_rac_pt1411hwru_header_(message)) {
|
||||
// Power, temperature, mode, fan speed
|
||||
case RAC_PT1411HWRU_MESSAGE_HEADER0:
|
||||
// Get the mode
|
||||
@@ -608,7 +1207,7 @@ bool ToshibaClimate::on_receive(remote_base::RemoteReceiveData data) {
|
||||
break;
|
||||
}
|
||||
// Get the target temperature
|
||||
if (is_valid_rac_pt1411hwru_message_(&message[12])) {
|
||||
if (this->is_valid_rac_pt1411hwru_message_(&message[12])) {
|
||||
temperature_code =
|
||||
(message[4] >> 4) | (message[14] & RAC_PT1411HWRU_FLAG_FRAC) | (message[15] & RAC_PT1411HWRU_FLAG_NEG);
|
||||
if (message[15] & RAC_PT1411HWRU_FLAG_FAH) {
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
#pragma once
|
||||
|
||||
#include "esphome/components/climate_ir/climate_ir.h"
|
||||
#include "esphome/components/remote_base/toshiba_ac_protocol.h"
|
||||
|
||||
namespace esphome {
|
||||
namespace toshiba {
|
||||
@@ -10,6 +11,7 @@ enum Model {
|
||||
MODEL_GENERIC = 0, // Temperature range is from 17 to 30
|
||||
MODEL_RAC_PT1411HWRU_C = 1, // Temperature range is from 16 to 30
|
||||
MODEL_RAC_PT1411HWRU_F = 2, // Temperature range is from 16 to 30
|
||||
MODEL_RAS_2819T = 3, // RAS-2819T protocol variant, temperature range 18 to 30
|
||||
};
|
||||
|
||||
// Supported temperature ranges
|
||||
@@ -19,6 +21,8 @@ const float TOSHIBA_RAC_PT1411HWRU_TEMP_C_MIN = 16.0;
|
||||
const float TOSHIBA_RAC_PT1411HWRU_TEMP_C_MAX = 30.0;
|
||||
const float TOSHIBA_RAC_PT1411HWRU_TEMP_F_MIN = 60.0;
|
||||
const float TOSHIBA_RAC_PT1411HWRU_TEMP_F_MAX = 86.0;
|
||||
const float TOSHIBA_RAS_2819T_TEMP_C_MIN = 18.0;
|
||||
const float TOSHIBA_RAS_2819T_TEMP_C_MAX = 30.0;
|
||||
|
||||
class ToshibaClimate : public climate_ir::ClimateIR {
|
||||
public:
|
||||
@@ -35,6 +39,9 @@ class ToshibaClimate : public climate_ir::ClimateIR {
|
||||
void transmit_generic_();
|
||||
void transmit_rac_pt1411hwru_();
|
||||
void transmit_rac_pt1411hwru_temp_(bool cs_state = true, bool cs_send_update = true);
|
||||
void transmit_ras_2819t_();
|
||||
// Process RAS-2819T IR command data
|
||||
bool process_ras_2819t_command_(const remote_base::ToshibaAcData &toshiba_data);
|
||||
// Returns the header if valid, else returns zero
|
||||
uint8_t is_valid_rac_pt1411hwru_header_(const uint8_t *message);
|
||||
// Returns true if message is a valid RAC-PT1411HWRU IR message, regardless if first or second packet
|
||||
@@ -43,11 +50,26 @@ class ToshibaClimate : public climate_ir::ClimateIR {
|
||||
bool compare_rac_pt1411hwru_packets_(const uint8_t *message1, const uint8_t *message2);
|
||||
bool on_receive(remote_base::RemoteReceiveData data) override;
|
||||
|
||||
private:
|
||||
// RAS-2819T state tracking for swing mode optimization
|
||||
climate::ClimateSwingMode last_swing_mode_{climate::CLIMATE_SWING_OFF};
|
||||
climate::ClimateMode last_mode_{climate::CLIMATE_MODE_OFF};
|
||||
optional<climate::ClimateFanMode> last_fan_mode_{};
|
||||
float last_target_temperature_{24.0f};
|
||||
|
||||
float temperature_min_() {
|
||||
return (this->model_ == MODEL_GENERIC) ? TOSHIBA_GENERIC_TEMP_C_MIN : TOSHIBA_RAC_PT1411HWRU_TEMP_C_MIN;
|
||||
if (this->model_ == MODEL_RAC_PT1411HWRU_C || this->model_ == MODEL_RAC_PT1411HWRU_F)
|
||||
return TOSHIBA_RAC_PT1411HWRU_TEMP_C_MIN;
|
||||
if (this->model_ == MODEL_RAS_2819T)
|
||||
return TOSHIBA_RAS_2819T_TEMP_C_MIN;
|
||||
return TOSHIBA_GENERIC_TEMP_C_MIN; // Default to GENERIC for unknown models
|
||||
}
|
||||
float temperature_max_() {
|
||||
return (this->model_ == MODEL_GENERIC) ? TOSHIBA_GENERIC_TEMP_C_MAX : TOSHIBA_RAC_PT1411HWRU_TEMP_C_MAX;
|
||||
if (this->model_ == MODEL_RAC_PT1411HWRU_C || this->model_ == MODEL_RAC_PT1411HWRU_F)
|
||||
return TOSHIBA_RAC_PT1411HWRU_TEMP_C_MAX;
|
||||
if (this->model_ == MODEL_RAS_2819T)
|
||||
return TOSHIBA_RAS_2819T_TEMP_C_MAX;
|
||||
return TOSHIBA_GENERIC_TEMP_C_MAX; // Default to GENERIC for unknown models
|
||||
}
|
||||
std::set<climate::ClimateSwingMode> toshiba_swing_modes_() {
|
||||
return (this->model_ == MODEL_GENERIC)
|
||||
|
||||
@@ -7,13 +7,24 @@ namespace touchscreen {
|
||||
|
||||
static const char *const TAG = "touchscreen";
|
||||
|
||||
void TouchscreenInterrupt::gpio_intr(TouchscreenInterrupt *store) { store->touched = true; }
|
||||
void TouchscreenInterrupt::gpio_intr(TouchscreenInterrupt *store) {
|
||||
bool new_state = store->isr_pin_.digital_read();
|
||||
if (new_state != store->inverted) {
|
||||
store->touched = true;
|
||||
if (store->component_ != nullptr) {
|
||||
store->component_->enable_loop_soon_any_context();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
void Touchscreen::attach_interrupt_(InternalGPIOPin *irq_pin, esphome::gpio::InterruptType type) {
|
||||
this->store_.isr_pin_ = irq_pin->to_isr();
|
||||
this->store_.component_ = this;
|
||||
this->store_.inverted = irq_pin->is_inverted();
|
||||
irq_pin->attach_interrupt(TouchscreenInterrupt::gpio_intr, &this->store_, type);
|
||||
this->store_.init = true;
|
||||
this->store_.touched = false;
|
||||
ESP_LOGD(TAG, "Attach Touch Interupt");
|
||||
ESP_LOGD(TAG, "Attach Touch Interrupt");
|
||||
}
|
||||
|
||||
void Touchscreen::call_setup() {
|
||||
@@ -71,6 +82,8 @@ void Touchscreen::loop() {
|
||||
}
|
||||
}
|
||||
}
|
||||
if (this->store_.init)
|
||||
this->disable_loop();
|
||||
}
|
||||
|
||||
void Touchscreen::add_raw_touch_position_(uint8_t id, int16_t x_raw, int16_t y_raw, int16_t z_raw) {
|
||||
|
||||
@@ -1,13 +1,13 @@
|
||||
#pragma once
|
||||
|
||||
#include "esphome/core/defines.h"
|
||||
#include "esphome/components/display/display.h"
|
||||
#include "esphome/core/defines.h"
|
||||
|
||||
#include "esphome/core/automation.h"
|
||||
#include "esphome/core/hal.h"
|
||||
|
||||
#include <vector>
|
||||
#include <map>
|
||||
#include <vector>
|
||||
|
||||
namespace esphome {
|
||||
namespace touchscreen {
|
||||
@@ -30,9 +30,12 @@ struct TouchPoint {
|
||||
using TouchPoints_t = std::vector<TouchPoint>;
|
||||
|
||||
struct TouchscreenInterrupt {
|
||||
ISRInternalGPIOPin isr_pin_;
|
||||
volatile bool touched{true};
|
||||
bool init{false};
|
||||
bool inverted{false};
|
||||
static void gpio_intr(TouchscreenInterrupt *store);
|
||||
Component *component_{nullptr};
|
||||
};
|
||||
|
||||
class TouchListener {
|
||||
|
||||
@@ -1325,7 +1325,7 @@ std::string WebServer::climate_json(climate::Climate *obj, JsonDetail start_conf
|
||||
root["max_temp"] = value_accuracy_to_string(traits.get_visual_max_temperature(), target_accuracy);
|
||||
root["min_temp"] = value_accuracy_to_string(traits.get_visual_min_temperature(), target_accuracy);
|
||||
root["step"] = traits.get_visual_target_temperature_step();
|
||||
if (traits.get_supports_action()) {
|
||||
if (traits.has_feature_flags(climate::CLIMATE_SUPPORTS_ACTION)) {
|
||||
root["action"] = PSTR_LOCAL(climate_action_to_string(obj->action));
|
||||
root["state"] = root["action"];
|
||||
has_state = true;
|
||||
@@ -1345,14 +1345,15 @@ std::string WebServer::climate_json(climate::Climate *obj, JsonDetail start_conf
|
||||
if (traits.get_supports_swing_modes()) {
|
||||
root["swing_mode"] = PSTR_LOCAL(climate_swing_mode_to_string(obj->swing_mode));
|
||||
}
|
||||
if (traits.get_supports_current_temperature()) {
|
||||
if (traits.has_feature_flags(climate::CLIMATE_SUPPORTS_CURRENT_TEMPERATURE)) {
|
||||
if (!std::isnan(obj->current_temperature)) {
|
||||
root["current_temperature"] = value_accuracy_to_string(obj->current_temperature, current_accuracy);
|
||||
} else {
|
||||
root["current_temperature"] = "NA";
|
||||
}
|
||||
}
|
||||
if (traits.get_supports_two_point_target_temperature()) {
|
||||
if (traits.has_feature_flags(climate::CLIMATE_SUPPORTS_TWO_POINT_TARGET_TEMPERATURE |
|
||||
climate::CLIMATE_REQUIRES_TWO_POINT_TARGET_TEMPERATURE)) {
|
||||
root["target_temperature_low"] = value_accuracy_to_string(obj->target_temperature_low, target_accuracy);
|
||||
root["target_temperature_high"] = value_accuracy_to_string(obj->target_temperature_high, target_accuracy);
|
||||
if (!has_state) {
|
||||
|
||||
@@ -402,8 +402,8 @@ async def to_code(config):
|
||||
add_idf_sdkconfig_option("CONFIG_LWIP_DHCPS", False)
|
||||
|
||||
# Disable Enterprise WiFi support if no EAP is configured
|
||||
if CORE.is_esp32 and not has_eap:
|
||||
add_idf_sdkconfig_option("CONFIG_ESP_WIFI_ENTERPRISE_SUPPORT", False)
|
||||
if CORE.is_esp32:
|
||||
add_idf_sdkconfig_option("CONFIG_ESP_WIFI_ENTERPRISE_SUPPORT", has_eap)
|
||||
|
||||
cg.add(var.set_reboot_timeout(config[CONF_REBOOT_TIMEOUT]))
|
||||
cg.add(var.set_power_save_mode(config[CONF_POWER_SAVE_MODE]))
|
||||
|
||||
@@ -3,6 +3,7 @@ from esphome.components import i2c, sensor
|
||||
import esphome.config_validation as cv
|
||||
from esphome.const import (
|
||||
CONF_ID,
|
||||
CONF_OVERSAMPLING,
|
||||
CONF_PRESSURE,
|
||||
CONF_TEMPERATURE,
|
||||
DEVICE_CLASS_PRESSURE,
|
||||
@@ -18,6 +19,17 @@ CODEOWNERS = ["@gcormier"]
|
||||
CONF_K_VALUE = "k_value"
|
||||
|
||||
xgzp68xx_ns = cg.esphome_ns.namespace("xgzp68xx")
|
||||
XGZP68XXOversampling = xgzp68xx_ns.enum("XGZP68XXOversampling")
|
||||
OVERSAMPLING_OPTIONS = {
|
||||
"256X": XGZP68XXOversampling.XGZP68XX_OVERSAMPLING_256X,
|
||||
"512X": XGZP68XXOversampling.XGZP68XX_OVERSAMPLING_512X,
|
||||
"1024X": XGZP68XXOversampling.XGZP68XX_OVERSAMPLING_1024X,
|
||||
"2048X": XGZP68XXOversampling.XGZP68XX_OVERSAMPLING_2048X,
|
||||
"4096X": XGZP68XXOversampling.XGZP68XX_OVERSAMPLING_4096X,
|
||||
"8192X": XGZP68XXOversampling.XGZP68XX_OVERSAMPLING_8192X,
|
||||
"16384X": XGZP68XXOversampling.XGZP68XX_OVERSAMPLING_16384X,
|
||||
"32768X": XGZP68XXOversampling.XGZP68XX_OVERSAMPLING_32768X,
|
||||
}
|
||||
XGZP68XXComponent = xgzp68xx_ns.class_(
|
||||
"XGZP68XXComponent", cg.PollingComponent, i2c.I2CDevice
|
||||
)
|
||||
@@ -31,6 +43,12 @@ CONFIG_SCHEMA = (
|
||||
accuracy_decimals=1,
|
||||
device_class=DEVICE_CLASS_PRESSURE,
|
||||
state_class=STATE_CLASS_MEASUREMENT,
|
||||
).extend(
|
||||
{
|
||||
cv.Optional(CONF_OVERSAMPLING, default="4096X"): cv.enum(
|
||||
OVERSAMPLING_OPTIONS, upper=True
|
||||
),
|
||||
}
|
||||
),
|
||||
cv.Optional(CONF_TEMPERATURE): sensor.sensor_schema(
|
||||
unit_of_measurement=UNIT_CELSIUS,
|
||||
@@ -58,5 +76,6 @@ async def to_code(config):
|
||||
if pressure_config := config.get(CONF_PRESSURE):
|
||||
sens = await sensor.new_sensor(pressure_config)
|
||||
cg.add(var.set_pressure_sensor(sens))
|
||||
cg.add(var.set_pressure_oversampling(pressure_config[CONF_OVERSAMPLING]))
|
||||
|
||||
cg.add(var.set_k_value(config[CONF_K_VALUE]))
|
||||
|
||||
@@ -16,16 +16,49 @@ static const uint8_t SYSCONFIG_ADDRESS = 0xA5;
|
||||
static const uint8_t PCONFIG_ADDRESS = 0xA6;
|
||||
static const uint8_t READ_COMMAND = 0x0A;
|
||||
|
||||
[[maybe_unused]] static const char *oversampling_to_str(XGZP68XXOversampling oversampling) {
|
||||
switch (oversampling) {
|
||||
case XGZP68XX_OVERSAMPLING_256X:
|
||||
return "256x";
|
||||
case XGZP68XX_OVERSAMPLING_512X:
|
||||
return "512x";
|
||||
case XGZP68XX_OVERSAMPLING_1024X:
|
||||
return "1024x";
|
||||
case XGZP68XX_OVERSAMPLING_2048X:
|
||||
return "2048x";
|
||||
case XGZP68XX_OVERSAMPLING_4096X:
|
||||
return "4096x";
|
||||
case XGZP68XX_OVERSAMPLING_8192X:
|
||||
return "8192x";
|
||||
case XGZP68XX_OVERSAMPLING_16384X:
|
||||
return "16384x";
|
||||
case XGZP68XX_OVERSAMPLING_32768X:
|
||||
return "32768x";
|
||||
default:
|
||||
return "UNKNOWN";
|
||||
}
|
||||
}
|
||||
|
||||
void XGZP68XXComponent::update() {
|
||||
// Do we need to change oversampling?
|
||||
if (this->last_pressure_oversampling_ != this->pressure_oversampling_) {
|
||||
uint8_t oldconfig = 0;
|
||||
this->read_register(PCONFIG_ADDRESS, &oldconfig, 1);
|
||||
uint8_t newconfig = (oldconfig & 0xf8) | (this->pressure_oversampling_ & 0x7);
|
||||
this->write_register(PCONFIG_ADDRESS, &newconfig, 1);
|
||||
ESP_LOGD(TAG, "oversampling to %s: oldconfig = 0x%x newconfig = 0x%x",
|
||||
oversampling_to_str(this->pressure_oversampling_), oldconfig, newconfig);
|
||||
this->last_pressure_oversampling_ = this->pressure_oversampling_;
|
||||
}
|
||||
|
||||
// Request temp + pressure acquisition
|
||||
this->write_register(0x30, &READ_COMMAND, 1);
|
||||
|
||||
// Wait 20mS per datasheet
|
||||
this->set_timeout("measurement", 20, [this]() {
|
||||
uint8_t data[5];
|
||||
uint32_t pressure_raw;
|
||||
uint16_t temperature_raw;
|
||||
float pressure_in_pa, temperature;
|
||||
uint8_t data[5] = {};
|
||||
uint32_t pressure_raw = 0;
|
||||
uint16_t temperature_raw = 0;
|
||||
int success;
|
||||
|
||||
// Read the sensor data
|
||||
@@ -42,23 +75,11 @@ void XGZP68XXComponent::update() {
|
||||
ESP_LOGV(TAG, "Got raw pressure=%" PRIu32 ", raw temperature=%u", pressure_raw, temperature_raw);
|
||||
ESP_LOGV(TAG, "K value is %u", this->k_value_);
|
||||
|
||||
// The most significant bit of both pressure and temperature will be 1 to indicate a negative value.
|
||||
// This is directly from the datasheet, and the calculations below will handle this.
|
||||
if (pressure_raw > pow(2, 23)) {
|
||||
// Negative pressure
|
||||
pressure_in_pa = (pressure_raw - pow(2, 24)) / (float) (this->k_value_);
|
||||
} else {
|
||||
// Positive pressure
|
||||
pressure_in_pa = pressure_raw / (float) (this->k_value_);
|
||||
}
|
||||
// Sign extend the pressure
|
||||
float pressure_in_pa = (float) (((int32_t) pressure_raw << 8) >> 8);
|
||||
pressure_in_pa /= (float) (this->k_value_);
|
||||
|
||||
if (temperature_raw > pow(2, 15)) {
|
||||
// Negative temperature
|
||||
temperature = (float) (temperature_raw - pow(2, 16)) / 256.0f;
|
||||
} else {
|
||||
// Positive temperature
|
||||
temperature = (float) temperature_raw / 256.0f;
|
||||
}
|
||||
float temperature = ((float) (int16_t) temperature_raw) / 256.0f;
|
||||
|
||||
if (this->pressure_sensor_ != nullptr)
|
||||
this->pressure_sensor_->publish_state(pressure_in_pa);
|
||||
@@ -69,20 +90,27 @@ void XGZP68XXComponent::update() {
|
||||
}
|
||||
|
||||
void XGZP68XXComponent::setup() {
|
||||
uint8_t config;
|
||||
uint8_t config1 = 0, config2 = 0;
|
||||
|
||||
// Display some sample bits to confirm we are talking to the sensor
|
||||
this->read_register(SYSCONFIG_ADDRESS, &config, 1);
|
||||
ESP_LOGCONFIG(TAG,
|
||||
"Gain value is %d\n"
|
||||
"XGZP68xx started!",
|
||||
(config >> 3) & 0b111);
|
||||
if (i2c::ErrorCode::ERROR_OK != this->read_register(SYSCONFIG_ADDRESS, &config1, 1)) {
|
||||
this->mark_failed();
|
||||
return;
|
||||
}
|
||||
if (i2c::ErrorCode::ERROR_OK != this->read_register(PCONFIG_ADDRESS, &config2, 1)) {
|
||||
this->mark_failed();
|
||||
return;
|
||||
}
|
||||
ESP_LOGD(TAG, "sys_config 0x%x, p_config 0x%x", config1, config2);
|
||||
}
|
||||
|
||||
void XGZP68XXComponent::dump_config() {
|
||||
ESP_LOGCONFIG(TAG, "XGZP68xx:");
|
||||
LOG_SENSOR(" ", "Temperature: ", this->temperature_sensor_);
|
||||
LOG_SENSOR(" ", "Pressure: ", this->pressure_sensor_);
|
||||
if (this->pressure_sensor_ != nullptr) {
|
||||
ESP_LOGCONFIG(TAG, " Oversampling: %s", oversampling_to_str(this->pressure_oversampling_));
|
||||
}
|
||||
LOG_I2C_DEVICE(this);
|
||||
if (this->is_failed()) {
|
||||
ESP_LOGE(TAG, " Connection failed");
|
||||
|
||||
@@ -7,11 +7,29 @@
|
||||
namespace esphome {
|
||||
namespace xgzp68xx {
|
||||
|
||||
/// Enum listing all oversampling options for the XGZP68XX.
|
||||
enum XGZP68XXOversampling : uint8_t {
|
||||
XGZP68XX_OVERSAMPLING_256X = 0b100,
|
||||
XGZP68XX_OVERSAMPLING_512X = 0b101,
|
||||
XGZP68XX_OVERSAMPLING_1024X = 0b000,
|
||||
XGZP68XX_OVERSAMPLING_2048X = 0b001,
|
||||
XGZP68XX_OVERSAMPLING_4096X = 0b010,
|
||||
XGZP68XX_OVERSAMPLING_8192X = 0b011,
|
||||
XGZP68XX_OVERSAMPLING_16384X = 0b110,
|
||||
XGZP68XX_OVERSAMPLING_32768X = 0b111,
|
||||
|
||||
XGZP68XX_OVERSAMPLING_UNKNOWN = (uint8_t) -1,
|
||||
};
|
||||
|
||||
class XGZP68XXComponent : public PollingComponent, public sensor::Sensor, public i2c::I2CDevice {
|
||||
public:
|
||||
SUB_SENSOR(temperature)
|
||||
SUB_SENSOR(pressure)
|
||||
void set_k_value(uint16_t k_value) { this->k_value_ = k_value; }
|
||||
/// Set the pressure oversampling value. Defaults to 4096X.
|
||||
void set_pressure_oversampling(XGZP68XXOversampling pressure_oversampling) {
|
||||
this->pressure_oversampling_ = pressure_oversampling;
|
||||
}
|
||||
|
||||
void update() override;
|
||||
void setup() override;
|
||||
@@ -21,6 +39,8 @@ class XGZP68XXComponent : public PollingComponent, public sensor::Sensor, public
|
||||
/// Internal method to read the pressure from the component after it has been scheduled.
|
||||
void read_pressure_();
|
||||
uint16_t k_value_;
|
||||
XGZP68XXOversampling pressure_oversampling_{XGZP68XX_OVERSAMPLING_4096X};
|
||||
XGZP68XXOversampling last_pressure_oversampling_{XGZP68XX_OVERSAMPLING_UNKNOWN};
|
||||
};
|
||||
|
||||
} // namespace xgzp68xx
|
||||
|
||||
@@ -222,18 +222,25 @@ def copy_files():
|
||||
] in ["xiao_ble"]:
|
||||
fake_board_manifest = """
|
||||
{
|
||||
"frameworks": [
|
||||
"zephyr"
|
||||
],
|
||||
"name": "esphome nrf52",
|
||||
"upload": {
|
||||
"maximum_ram_size": 248832,
|
||||
"maximum_size": 815104
|
||||
},
|
||||
"url": "https://esphome.io/",
|
||||
"vendor": "esphome"
|
||||
"frameworks": [
|
||||
"zephyr"
|
||||
],
|
||||
"name": "esphome nrf52",
|
||||
"upload": {
|
||||
"maximum_ram_size": 248832,
|
||||
"maximum_size": 815104,
|
||||
"speed": 115200
|
||||
},
|
||||
"url": "https://esphome.io/",
|
||||
"vendor": "esphome",
|
||||
"build": {
|
||||
"softdevice": {
|
||||
"sd_fwid": "0x00B6"
|
||||
}
|
||||
}
|
||||
}
|
||||
"""
|
||||
|
||||
write_file_if_changed(
|
||||
CORE.relative_build_path(f"boards/{zephyr_data()[KEY_BOARD]}.json"),
|
||||
fake_board_manifest,
|
||||
|
||||
34
esphome/components/zephyr_ble_server/__init__.py
Normal file
34
esphome/components/zephyr_ble_server/__init__.py
Normal file
@@ -0,0 +1,34 @@
|
||||
import esphome.codegen as cg
|
||||
from esphome.components.zephyr import zephyr_add_prj_conf
|
||||
import esphome.config_validation as cv
|
||||
from esphome.const import CONF_ESPHOME, CONF_ID, CONF_NAME, Framework
|
||||
import esphome.final_validate as fv
|
||||
|
||||
zephyr_ble_server_ns = cg.esphome_ns.namespace("zephyr_ble_server")
|
||||
BLEServer = zephyr_ble_server_ns.class_("BLEServer", cg.Component)
|
||||
|
||||
CONFIG_SCHEMA = cv.All(
|
||||
cv.Schema(
|
||||
{
|
||||
cv.GenerateID(): cv.declare_id(BLEServer),
|
||||
}
|
||||
).extend(cv.COMPONENT_SCHEMA),
|
||||
cv.only_with_framework(Framework.ZEPHYR),
|
||||
)
|
||||
|
||||
|
||||
def _final_validate(_):
|
||||
full_config = fv.full_config.get()
|
||||
zephyr_add_prj_conf("BT_DEVICE_NAME", full_config[CONF_ESPHOME][CONF_NAME])
|
||||
|
||||
|
||||
FINAL_VALIDATE_SCHEMA = _final_validate
|
||||
|
||||
|
||||
async def to_code(config):
|
||||
var = cg.new_Pvariable(config[CONF_ID])
|
||||
zephyr_add_prj_conf("BT", True)
|
||||
zephyr_add_prj_conf("BT_PERIPHERAL", True)
|
||||
zephyr_add_prj_conf("BT_RX_STACK_SIZE", 1536)
|
||||
# zephyr_add_prj_conf("BT_LL_SW_SPLIT", True)
|
||||
await cg.register_component(var, config)
|
||||
100
esphome/components/zephyr_ble_server/ble_server.cpp
Normal file
100
esphome/components/zephyr_ble_server/ble_server.cpp
Normal file
@@ -0,0 +1,100 @@
|
||||
#ifdef USE_ZEPHYR
|
||||
#include "ble_server.h"
|
||||
#include "esphome/core/defines.h"
|
||||
#include "esphome/core/log.h"
|
||||
#include <zephyr/bluetooth/bluetooth.h>
|
||||
#include <zephyr/bluetooth/conn.h>
|
||||
|
||||
namespace esphome::zephyr_ble_server {
|
||||
|
||||
static const char *const TAG = "zephyr_ble_server";
|
||||
|
||||
static struct k_work advertise_work; // NOLINT(cppcoreguidelines-avoid-non-const-global-variables)
|
||||
|
||||
#define DEVICE_NAME CONFIG_BT_DEVICE_NAME
|
||||
#define DEVICE_NAME_LEN (sizeof(DEVICE_NAME) - 1)
|
||||
|
||||
static const struct bt_data AD[] = {
|
||||
BT_DATA_BYTES(BT_DATA_FLAGS, (BT_LE_AD_GENERAL | BT_LE_AD_NO_BREDR)),
|
||||
BT_DATA(BT_DATA_NAME_COMPLETE, DEVICE_NAME, DEVICE_NAME_LEN),
|
||||
};
|
||||
|
||||
static const struct bt_data SD[] = {
|
||||
#ifdef USE_OTA
|
||||
BT_DATA_BYTES(BT_DATA_UUID128_ALL, 0x84, 0xaa, 0x60, 0x74, 0x52, 0x8a, 0x8b, 0x86, 0xd3, 0x4c, 0xb7, 0x1d, 0x1d,
|
||||
0xdc, 0x53, 0x8d),
|
||||
#endif
|
||||
};
|
||||
|
||||
const struct bt_le_adv_param *const ADV_PARAM = BT_LE_ADV_CONN;
|
||||
|
||||
static void advertise(struct k_work *work) {
|
||||
int rc = bt_le_adv_stop();
|
||||
if (rc) {
|
||||
ESP_LOGE(TAG, "Advertising failed to stop (rc %d)", rc);
|
||||
}
|
||||
|
||||
rc = bt_le_adv_start(ADV_PARAM, AD, ARRAY_SIZE(AD), SD, ARRAY_SIZE(SD));
|
||||
if (rc) {
|
||||
ESP_LOGE(TAG, "Advertising failed to start (rc %d)", rc);
|
||||
return;
|
||||
}
|
||||
ESP_LOGI(TAG, "Advertising successfully started");
|
||||
}
|
||||
|
||||
static void connected(struct bt_conn *conn, uint8_t err) {
|
||||
if (err) {
|
||||
ESP_LOGE(TAG, "Connection failed (err 0x%02x)", err);
|
||||
} else {
|
||||
ESP_LOGI(TAG, "Connected");
|
||||
}
|
||||
}
|
||||
|
||||
static void disconnected(struct bt_conn *conn, uint8_t reason) {
|
||||
ESP_LOGI(TAG, "Disconnected (reason 0x%02x)", reason);
|
||||
k_work_submit(&advertise_work);
|
||||
}
|
||||
|
||||
static void bt_ready(int err) {
|
||||
if (err != 0) {
|
||||
ESP_LOGE(TAG, "Bluetooth failed to initialise: %d", err);
|
||||
} else {
|
||||
k_work_submit(&advertise_work);
|
||||
}
|
||||
}
|
||||
|
||||
BT_CONN_CB_DEFINE(conn_callbacks) = {
|
||||
.connected = connected,
|
||||
.disconnected = disconnected,
|
||||
};
|
||||
|
||||
void BLEServer::setup() {
|
||||
k_work_init(&advertise_work, advertise);
|
||||
resume_();
|
||||
}
|
||||
|
||||
void BLEServer::loop() {
|
||||
if (this->suspended_) {
|
||||
resume_();
|
||||
this->suspended_ = false;
|
||||
}
|
||||
}
|
||||
|
||||
void BLEServer::resume_() {
|
||||
int rc = bt_enable(bt_ready);
|
||||
if (rc != 0) {
|
||||
ESP_LOGE(TAG, "Bluetooth enable failed: %d", rc);
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
void BLEServer::on_shutdown() {
|
||||
struct k_work_sync sync;
|
||||
k_work_cancel_sync(&advertise_work, &sync);
|
||||
bt_disable();
|
||||
this->suspended_ = true;
|
||||
}
|
||||
|
||||
} // namespace esphome::zephyr_ble_server
|
||||
|
||||
#endif
|
||||
19
esphome/components/zephyr_ble_server/ble_server.h
Normal file
19
esphome/components/zephyr_ble_server/ble_server.h
Normal file
@@ -0,0 +1,19 @@
|
||||
#pragma once
|
||||
#ifdef USE_ZEPHYR
|
||||
#include "esphome/core/component.h"
|
||||
|
||||
namespace esphome::zephyr_ble_server {
|
||||
|
||||
class BLEServer : public Component {
|
||||
public:
|
||||
void setup() override;
|
||||
void loop() override;
|
||||
void on_shutdown() override;
|
||||
|
||||
protected:
|
||||
void resume_();
|
||||
bool suspended_ = false;
|
||||
};
|
||||
|
||||
} // namespace esphome::zephyr_ble_server
|
||||
#endif
|
||||
@@ -12,7 +12,7 @@ from typing import Any
|
||||
import voluptuous as vol
|
||||
|
||||
from esphome import core, loader, pins, yaml_util
|
||||
from esphome.config_helpers import Extend, Remove
|
||||
from esphome.config_helpers import Extend, Remove, merge_dicts_ordered
|
||||
import esphome.config_validation as cv
|
||||
from esphome.const import (
|
||||
CONF_ESPHOME,
|
||||
@@ -922,10 +922,9 @@ def validate_config(
|
||||
if CONF_SUBSTITUTIONS in config or command_line_substitutions:
|
||||
from esphome.components import substitutions
|
||||
|
||||
result[CONF_SUBSTITUTIONS] = {
|
||||
**(config.get(CONF_SUBSTITUTIONS) or {}),
|
||||
**command_line_substitutions,
|
||||
}
|
||||
result[CONF_SUBSTITUTIONS] = merge_dicts_ordered(
|
||||
config.get(CONF_SUBSTITUTIONS) or {}, command_line_substitutions
|
||||
)
|
||||
result.add_output_path([CONF_SUBSTITUTIONS], CONF_SUBSTITUTIONS)
|
||||
try:
|
||||
substitutions.do_substitution_pass(config, command_line_substitutions)
|
||||
|
||||
@@ -10,6 +10,7 @@ from esphome.const import (
|
||||
PlatformFramework,
|
||||
)
|
||||
from esphome.core import CORE
|
||||
from esphome.util import OrderedDict
|
||||
|
||||
# Pre-build lookup map from (platform, framework) tuples to PlatformFramework enum
|
||||
_PLATFORM_FRAMEWORK_LOOKUP = {
|
||||
@@ -17,6 +18,25 @@ _PLATFORM_FRAMEWORK_LOOKUP = {
|
||||
}
|
||||
|
||||
|
||||
def merge_dicts_ordered(*dicts: dict) -> OrderedDict:
|
||||
"""Merge multiple dicts into an OrderedDict, preserving key order.
|
||||
|
||||
This is a helper to ensure that dictionary merging preserves OrderedDict type,
|
||||
which is important for operations like move_to_end().
|
||||
|
||||
Args:
|
||||
*dicts: Variable number of dictionaries to merge (later dicts override earlier ones)
|
||||
|
||||
Returns:
|
||||
OrderedDict with merged contents
|
||||
"""
|
||||
result = OrderedDict()
|
||||
for d in dicts:
|
||||
if d:
|
||||
result.update(d)
|
||||
return result
|
||||
|
||||
|
||||
class Extend:
|
||||
def __init__(self, value):
|
||||
self.value = value
|
||||
@@ -60,7 +80,11 @@ def merge_config(full_old, full_new):
|
||||
if isinstance(new, dict):
|
||||
if not isinstance(old, dict):
|
||||
return new
|
||||
res = old.copy()
|
||||
# Preserve OrderedDict type by copying to OrderedDict if either input is OrderedDict
|
||||
if isinstance(old, OrderedDict) or isinstance(new, OrderedDict):
|
||||
res = OrderedDict(old)
|
||||
else:
|
||||
res = old.copy()
|
||||
for k, v in new.items():
|
||||
if isinstance(v, Remove) and k in old:
|
||||
del res[k]
|
||||
|
||||
@@ -244,6 +244,20 @@ RESERVED_IDS = [
|
||||
"uart0",
|
||||
"uart1",
|
||||
"uart2",
|
||||
# ESP32 ROM functions
|
||||
"crc16_be",
|
||||
"crc16_le",
|
||||
"crc32_be",
|
||||
"crc32_le",
|
||||
"crc8_be",
|
||||
"crc8_le",
|
||||
"dbg_state",
|
||||
"debug_timer",
|
||||
"one_bits",
|
||||
"recv_packet",
|
||||
"send_packet",
|
||||
"check_pos",
|
||||
"software_reset",
|
||||
]
|
||||
|
||||
|
||||
|
||||
@@ -696,6 +696,7 @@ CONF_OPEN_DRAIN = "open_drain"
|
||||
CONF_OPEN_DRAIN_INTERRUPT = "open_drain_interrupt"
|
||||
CONF_OPEN_DURATION = "open_duration"
|
||||
CONF_OPEN_ENDSTOP = "open_endstop"
|
||||
CONF_OPENTHREAD = "openthread"
|
||||
CONF_OPERATION = "operation"
|
||||
CONF_OPTIMISTIC = "optimistic"
|
||||
CONF_OPTION = "option"
|
||||
@@ -1299,6 +1300,7 @@ DEVICE_CLASS_SULPHUR_DIOXIDE = "sulphur_dioxide"
|
||||
DEVICE_CLASS_SWITCH = "switch"
|
||||
DEVICE_CLASS_TAMPER = "tamper"
|
||||
DEVICE_CLASS_TEMPERATURE = "temperature"
|
||||
DEVICE_CLASS_TEMPERATURE_DELTA = "temperature_delta"
|
||||
DEVICE_CLASS_TIMESTAMP = "timestamp"
|
||||
DEVICE_CLASS_UPDATE = "update"
|
||||
DEVICE_CLASS_VIBRATION = "vibration"
|
||||
|
||||
@@ -10,6 +10,10 @@ from esphome.helpers import get_bool_env
|
||||
|
||||
from .util.password import password_hash
|
||||
|
||||
# Sentinel file name used for CORE.config_path when dashboard initializes.
|
||||
# This ensures .parent returns the config directory instead of root.
|
||||
_DASHBOARD_SENTINEL_FILE = "___DASHBOARD_SENTINEL___.yaml"
|
||||
|
||||
|
||||
class DashboardSettings:
|
||||
"""Settings for the dashboard."""
|
||||
@@ -48,7 +52,12 @@ class DashboardSettings:
|
||||
self.config_dir = Path(args.configuration)
|
||||
self.absolute_config_dir = self.config_dir.resolve()
|
||||
self.verbose = args.verbose
|
||||
CORE.config_path = self.config_dir / "."
|
||||
# Set to a sentinel file so .parent gives us the config directory.
|
||||
# Previously this was `os.path.join(self.config_dir, ".")` which worked because
|
||||
# os.path.dirname("/config/.") returns "/config", but Path("/config/.").parent
|
||||
# normalizes to Path("/config") first, then .parent returns Path("/"), breaking
|
||||
# secret resolution. Using a sentinel file ensures .parent gives the correct directory.
|
||||
CORE.config_path = self.config_dir / _DASHBOARD_SENTINEL_FILE
|
||||
|
||||
@property
|
||||
def relative_url(self) -> str:
|
||||
|
||||
@@ -1058,7 +1058,8 @@ class DownloadBinaryRequestHandler(BaseHandler):
|
||||
"download",
|
||||
f"{storage_json.name}-{file_name}",
|
||||
)
|
||||
path = storage_json.firmware_bin_path.with_name(file_name)
|
||||
|
||||
path = storage_json.firmware_bin_path.parent.joinpath(file_name)
|
||||
|
||||
if not path.is_file():
|
||||
args = ["esphome", "idedata", settings.rel_path(configuration)]
|
||||
|
||||
@@ -242,7 +242,7 @@ def send_check(
|
||||
|
||||
|
||||
def perform_ota(
|
||||
sock: socket.socket, password: str, file_handle: io.IOBase, filename: Path
|
||||
sock: socket.socket, password: str | None, file_handle: io.IOBase, filename: Path
|
||||
) -> None:
|
||||
file_contents = file_handle.read()
|
||||
file_size = len(file_contents)
|
||||
@@ -278,13 +278,13 @@ def perform_ota(
|
||||
|
||||
def perform_auth(
|
||||
sock: socket.socket,
|
||||
password: str,
|
||||
password: str | None,
|
||||
hash_func: Callable[..., Any],
|
||||
nonce_size: int,
|
||||
hash_name: str,
|
||||
) -> None:
|
||||
"""Perform challenge-response authentication using specified hash algorithm."""
|
||||
if not password:
|
||||
if password is None:
|
||||
raise OTAError("ESP requests password, but no password given!")
|
||||
|
||||
nonce_bytes = receive_exactly(
|
||||
@@ -385,7 +385,7 @@ def perform_ota(
|
||||
|
||||
|
||||
def run_ota_impl_(
|
||||
remote_host: str | list[str], remote_port: int, password: str, filename: Path
|
||||
remote_host: str | list[str], remote_port: int, password: str | None, filename: Path
|
||||
) -> tuple[int, str | None]:
|
||||
from esphome.core import CORE
|
||||
|
||||
@@ -436,7 +436,7 @@ def run_ota_impl_(
|
||||
|
||||
|
||||
def run_ota(
|
||||
remote_host: str | list[str], remote_port: int, password: str, filename: Path
|
||||
remote_host: str | list[str], remote_port: int, password: str | None, filename: Path
|
||||
) -> tuple[int, str | None]:
|
||||
try:
|
||||
return run_ota_impl_(remote_host, remote_port, password, filename)
|
||||
|
||||
150
esphome/git.py
150
esphome/git.py
@@ -5,6 +5,7 @@ import hashlib
|
||||
import logging
|
||||
from pathlib import Path
|
||||
import re
|
||||
import shutil
|
||||
import subprocess
|
||||
import urllib.parse
|
||||
|
||||
@@ -17,14 +18,60 @@ _LOGGER = logging.getLogger(__name__)
|
||||
NEVER_REFRESH = TimePeriodSeconds(seconds=-1)
|
||||
|
||||
|
||||
def run_git_command(cmd, cwd=None) -> str:
|
||||
_LOGGER.debug("Running git command: %s", " ".join(cmd))
|
||||
class GitException(cv.Invalid):
|
||||
"""Base exception for git-related errors."""
|
||||
|
||||
|
||||
class GitNotInstalledError(GitException):
|
||||
"""Exception raised when git is not installed on the system."""
|
||||
|
||||
|
||||
class GitCommandError(GitException):
|
||||
"""Exception raised when a git command fails."""
|
||||
|
||||
|
||||
class GitRepositoryError(GitException):
|
||||
"""Exception raised when a git repository is in an invalid state."""
|
||||
|
||||
|
||||
def run_git_command(cmd: list[str], git_dir: Path | None = None) -> str:
|
||||
if git_dir is not None:
|
||||
_LOGGER.debug(
|
||||
"Running git command with repository isolation: %s (git_dir=%s)",
|
||||
" ".join(cmd),
|
||||
git_dir,
|
||||
)
|
||||
else:
|
||||
_LOGGER.debug("Running git command: %s", " ".join(cmd))
|
||||
|
||||
# Set up environment for repository isolation if git_dir is provided
|
||||
# Force git to only operate on this specific repository by setting
|
||||
# GIT_DIR and GIT_WORK_TREE. This prevents git from walking up the
|
||||
# directory tree to find parent repositories when the target repo's
|
||||
# .git directory is corrupt. Without this, commands like 'git stash'
|
||||
# could accidentally operate on parent repositories (e.g., the main
|
||||
# ESPHome repo) instead of failing, causing data loss.
|
||||
env: dict[str, str] | None = None
|
||||
cwd: str | None = None
|
||||
if git_dir is not None:
|
||||
env = {
|
||||
**subprocess.os.environ,
|
||||
"GIT_DIR": str(Path(git_dir) / ".git"),
|
||||
"GIT_WORK_TREE": str(git_dir),
|
||||
}
|
||||
cwd = str(git_dir)
|
||||
|
||||
try:
|
||||
ret = subprocess.run(
|
||||
cmd, cwd=cwd, capture_output=True, check=False, close_fds=False
|
||||
cmd,
|
||||
cwd=cwd,
|
||||
capture_output=True,
|
||||
check=False,
|
||||
close_fds=False,
|
||||
env=env,
|
||||
)
|
||||
except FileNotFoundError as err:
|
||||
raise cv.Invalid(
|
||||
raise GitNotInstalledError(
|
||||
"git is not installed but required for external_components.\n"
|
||||
"Please see https://git-scm.com/book/en/v2/Getting-Started-Installing-Git for installing git"
|
||||
) from err
|
||||
@@ -33,8 +80,8 @@ def run_git_command(cmd, cwd=None) -> str:
|
||||
err_str = ret.stderr.decode("utf-8")
|
||||
lines = [x.strip() for x in err_str.splitlines()]
|
||||
if lines[-1].startswith("fatal:"):
|
||||
raise cv.Invalid(lines[-1][len("fatal: ") :])
|
||||
raise cv.Invalid(err_str)
|
||||
raise GitCommandError(lines[-1][len("fatal: ") :])
|
||||
raise GitCommandError(err_str)
|
||||
|
||||
return ret.stdout.decode("utf-8").strip()
|
||||
|
||||
@@ -55,6 +102,7 @@ def clone_or_update(
|
||||
username: str = None,
|
||||
password: str = None,
|
||||
submodules: list[str] | None = None,
|
||||
_recover_broken: bool = True,
|
||||
) -> tuple[Path, Callable[[], None] | None]:
|
||||
key = f"{url}@{ref}"
|
||||
|
||||
@@ -75,15 +123,15 @@ def clone_or_update(
|
||||
# We need to fetch the PR branch first, otherwise git will complain
|
||||
# about missing objects
|
||||
_LOGGER.info("Fetching %s", ref)
|
||||
run_git_command(["git", "fetch", "--", "origin", ref], str(repo_dir))
|
||||
run_git_command(["git", "reset", "--hard", "FETCH_HEAD"], str(repo_dir))
|
||||
run_git_command(["git", "fetch", "--", "origin", ref], git_dir=repo_dir)
|
||||
run_git_command(["git", "reset", "--hard", "FETCH_HEAD"], git_dir=repo_dir)
|
||||
|
||||
if submodules is not None:
|
||||
_LOGGER.info(
|
||||
"Initialising submodules (%s) for %s", ", ".join(submodules), key
|
||||
"Initializing submodules (%s) for %s", ", ".join(submodules), key
|
||||
)
|
||||
run_git_command(
|
||||
["git", "submodule", "update", "--init"] + submodules, str(repo_dir)
|
||||
["git", "submodule", "update", "--init"] + submodules, git_dir=repo_dir
|
||||
)
|
||||
|
||||
else:
|
||||
@@ -99,32 +147,82 @@ def clone_or_update(
|
||||
file_timestamp = Path(repo_dir / ".git" / "HEAD")
|
||||
age = datetime.now() - datetime.fromtimestamp(file_timestamp.stat().st_mtime)
|
||||
if refresh is None or age.total_seconds() > refresh.total_seconds:
|
||||
old_sha = run_git_command(["git", "rev-parse", "HEAD"], str(repo_dir))
|
||||
_LOGGER.info("Updating %s", key)
|
||||
_LOGGER.debug("Location: %s", repo_dir)
|
||||
# Stash local changes (if any)
|
||||
run_git_command(
|
||||
["git", "stash", "push", "--include-untracked"], str(repo_dir)
|
||||
)
|
||||
# Fetch remote ref
|
||||
cmd = ["git", "fetch", "--", "origin"]
|
||||
if ref is not None:
|
||||
cmd.append(ref)
|
||||
run_git_command(cmd, str(repo_dir))
|
||||
# Hard reset to FETCH_HEAD (short-lived git ref corresponding to most recent fetch)
|
||||
run_git_command(["git", "reset", "--hard", "FETCH_HEAD"], str(repo_dir))
|
||||
# Try to update the repository, recovering from broken state if needed
|
||||
old_sha: str | None = None
|
||||
try:
|
||||
# First verify the repository is valid by checking HEAD
|
||||
# Use git_dir parameter to prevent git from walking up to parent repos
|
||||
old_sha = run_git_command(
|
||||
["git", "rev-parse", "HEAD"], git_dir=repo_dir
|
||||
)
|
||||
|
||||
_LOGGER.info("Updating %s", key)
|
||||
_LOGGER.debug("Location: %s", repo_dir)
|
||||
|
||||
# Stash local changes (if any)
|
||||
# Use git_dir to ensure this only affects the specific repo
|
||||
run_git_command(
|
||||
["git", "stash", "push", "--include-untracked"],
|
||||
git_dir=repo_dir,
|
||||
)
|
||||
|
||||
# Fetch remote ref
|
||||
cmd = ["git", "fetch", "--", "origin"]
|
||||
if ref is not None:
|
||||
cmd.append(ref)
|
||||
run_git_command(cmd, git_dir=repo_dir)
|
||||
|
||||
# Hard reset to FETCH_HEAD (short-lived git ref corresponding to most recent fetch)
|
||||
run_git_command(
|
||||
["git", "reset", "--hard", "FETCH_HEAD"],
|
||||
git_dir=repo_dir,
|
||||
)
|
||||
except GitException as err:
|
||||
# Repository is in a broken state or update failed
|
||||
# Only attempt recovery once to prevent infinite recursion
|
||||
if not _recover_broken:
|
||||
_LOGGER.error(
|
||||
"Repository %s recovery failed, cannot retry (already attempted once)",
|
||||
key,
|
||||
)
|
||||
raise
|
||||
|
||||
_LOGGER.warning(
|
||||
"Repository %s has issues (%s), attempting recovery",
|
||||
key,
|
||||
err,
|
||||
)
|
||||
_LOGGER.info("Removing broken repository at %s", repo_dir)
|
||||
shutil.rmtree(repo_dir)
|
||||
_LOGGER.info("Successfully removed broken repository, re-cloning...")
|
||||
|
||||
# Recursively call clone_or_update to re-clone
|
||||
# Set _recover_broken=False to prevent infinite recursion
|
||||
result = clone_or_update(
|
||||
url=url,
|
||||
ref=ref,
|
||||
refresh=refresh,
|
||||
domain=domain,
|
||||
username=username,
|
||||
password=password,
|
||||
submodules=submodules,
|
||||
_recover_broken=False,
|
||||
)
|
||||
_LOGGER.info("Repository %s successfully recovered", key)
|
||||
return result
|
||||
|
||||
if submodules is not None:
|
||||
_LOGGER.info(
|
||||
"Updating submodules (%s) for %s", ", ".join(submodules), key
|
||||
)
|
||||
run_git_command(
|
||||
["git", "submodule", "update", "--init"] + submodules, str(repo_dir)
|
||||
["git", "submodule", "update", "--init"] + submodules,
|
||||
git_dir=repo_dir,
|
||||
)
|
||||
|
||||
def revert():
|
||||
_LOGGER.info("Reverting changes to %s -> %s", key, old_sha)
|
||||
run_git_command(["git", "reset", "--hard", old_sha], str(repo_dir))
|
||||
run_git_command(["git", "reset", "--hard", old_sha], git_dir=repo_dir)
|
||||
|
||||
return repo_dir, revert
|
||||
|
||||
|
||||
@@ -10,9 +10,9 @@ tzdata>=2021.1 # from time
|
||||
pyserial==3.5
|
||||
platformio==6.1.18 # When updating platformio, also update /docker/Dockerfile
|
||||
esptool==5.1.0
|
||||
click==8.3.0
|
||||
click==8.1.7
|
||||
esphome-dashboard==20251013.0
|
||||
aioesphomeapi==42.0.0
|
||||
aioesphomeapi==42.2.0
|
||||
zeroconf==0.148.0
|
||||
puremagic==1.30
|
||||
ruamel.yaml==0.18.15 # dashboard_import
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
pylint==4.0.1
|
||||
flake8==7.3.0 # also change in .pre-commit-config.yaml when updating
|
||||
ruff==0.14.0 # also change in .pre-commit-config.yaml when updating
|
||||
ruff==0.14.1 # also change in .pre-commit-config.yaml when updating
|
||||
pyupgrade==3.21.0 # also change in .pre-commit-config.yaml when updating
|
||||
pre-commit
|
||||
|
||||
|
||||
@@ -50,12 +50,23 @@ PACKAGE_DEPENDENCIES = {
|
||||
|
||||
# Bus types that can be defined directly in config files
|
||||
# Components defining these directly cannot be grouped (they create unique bus IDs)
|
||||
DIRECT_BUS_TYPES = ("i2c", "spi", "uart", "modbus")
|
||||
DIRECT_BUS_TYPES = (
|
||||
"i2c",
|
||||
"spi",
|
||||
"uart",
|
||||
"modbus",
|
||||
"remote_transmitter",
|
||||
"remote_receiver",
|
||||
)
|
||||
|
||||
# Signature for components with no bus requirements
|
||||
# These components can be merged with any other group
|
||||
NO_BUSES_SIGNATURE = "no_buses"
|
||||
|
||||
# Prefix for isolated component signatures
|
||||
# Isolated components have unique signatures and cannot be merged with others
|
||||
ISOLATED_SIGNATURE_PREFIX = "isolated_"
|
||||
|
||||
# Base bus components - these ARE the bus implementations and should not
|
||||
# be flagged as needing migration since they are the platform/base components
|
||||
BASE_BUS_COMPONENTS = {
|
||||
@@ -64,6 +75,8 @@ BASE_BUS_COMPONENTS = {
|
||||
"uart",
|
||||
"modbus",
|
||||
"canbus",
|
||||
"remote_transmitter",
|
||||
"remote_receiver",
|
||||
}
|
||||
|
||||
# Components that must be tested in isolation (not grouped or batched with others)
|
||||
@@ -75,11 +88,10 @@ ISOLATED_COMPONENTS = {
|
||||
"ethernet": "Defines ethernet: which conflicts with wifi: used by most components",
|
||||
"ethernet_info": "Related to ethernet component which conflicts with wifi",
|
||||
"lvgl": "Defines multiple SDL displays on host platform that conflict when merged with other display configs",
|
||||
"mapping": "Uses dict format for image/display sections incompatible with standard list format - ESPHome merge_config cannot handle",
|
||||
"openthread": "Conflicts with wifi: used by most components",
|
||||
"openthread_info": "Conflicts with wifi: used by most components",
|
||||
"matrix_keypad": "Needs isolation due to keypad",
|
||||
"mcp4725": "no YAML config to specify i2c bus id",
|
||||
"mcp47a1": "no YAML config to specify i2c bus id",
|
||||
"modbus_controller": "Defines multiple modbus buses for testing client/server functionality - conflicts with package modbus bus",
|
||||
"neopixelbus": "RMT type conflict with ESP32 Arduino/ESP-IDF headers (enum vs struct rmt_channel_t)",
|
||||
"packages": "cannot merge packages",
|
||||
@@ -368,6 +380,143 @@ def analyze_all_components(
|
||||
return components, non_groupable, direct_bus_components
|
||||
|
||||
|
||||
@lru_cache(maxsize=256)
|
||||
def _get_bus_configs(buses: tuple[str, ...]) -> frozenset[tuple[str, str]]:
|
||||
"""Map bus type to set of configs for that type.
|
||||
|
||||
Args:
|
||||
buses: Tuple of bus package names (e.g., ("uart_9600", "i2c"))
|
||||
|
||||
Returns:
|
||||
Frozenset of (base_type, full_config) tuples
|
||||
Example: frozenset({("uart", "uart_9600"), ("i2c", "i2c")})
|
||||
"""
|
||||
# Split on underscore to get base type: "uart_9600" -> "uart", "i2c" -> "i2c"
|
||||
return frozenset((bus.split("_", 1)[0], bus) for bus in buses)
|
||||
|
||||
|
||||
@lru_cache(maxsize=1024)
|
||||
def are_buses_compatible(buses1: tuple[str, ...], buses2: tuple[str, ...]) -> bool:
|
||||
"""Check if two bus tuples are compatible for merging.
|
||||
|
||||
Two bus lists are compatible if they don't have conflicting configurations
|
||||
for the same bus type. For example:
|
||||
- ("ble", "uart") and ("i2c",) are compatible (different buses)
|
||||
- ("uart_9600",) and ("uart_19200",) are NOT compatible (same bus, different configs)
|
||||
- ("uart_9600",) and ("uart_9600",) are compatible (same bus, same config)
|
||||
|
||||
Args:
|
||||
buses1: First tuple of bus package names
|
||||
buses2: Second tuple of bus package names
|
||||
|
||||
Returns:
|
||||
True if buses can be merged without conflicts
|
||||
"""
|
||||
configs1 = _get_bus_configs(buses1)
|
||||
configs2 = _get_bus_configs(buses2)
|
||||
|
||||
# Group configs by base type
|
||||
bus_types1: dict[str, set[str]] = {}
|
||||
for base_type, full_config in configs1:
|
||||
if base_type not in bus_types1:
|
||||
bus_types1[base_type] = set()
|
||||
bus_types1[base_type].add(full_config)
|
||||
|
||||
bus_types2: dict[str, set[str]] = {}
|
||||
for base_type, full_config in configs2:
|
||||
if base_type not in bus_types2:
|
||||
bus_types2[base_type] = set()
|
||||
bus_types2[base_type].add(full_config)
|
||||
|
||||
# Check for conflicts: same bus type with different configs
|
||||
for bus_type, configs in bus_types1.items():
|
||||
if bus_type not in bus_types2:
|
||||
continue # No conflict - different bus types
|
||||
# Same bus type - check if configs match
|
||||
if configs != bus_types2[bus_type]:
|
||||
return False # Conflict - same bus type, different configs
|
||||
|
||||
return True # No conflicts found
|
||||
|
||||
|
||||
def merge_compatible_bus_groups(
|
||||
grouped_components: dict[tuple[str, str], list[str]],
|
||||
) -> dict[tuple[str, str], list[str]]:
|
||||
"""Merge groups with compatible (non-conflicting) buses.
|
||||
|
||||
This function takes groups keyed by (platform, bus_signature) and merges
|
||||
groups that share the same platform and have compatible bus configurations.
|
||||
Two groups can be merged if their buses don't conflict - meaning they don't
|
||||
have different configurations for the same bus type.
|
||||
|
||||
For example:
|
||||
- ["ble"] + ["uart"] = compatible (different buses)
|
||||
- ["uart_9600"] + ["uart_19200"] = incompatible (same bus, different configs)
|
||||
- ["uart_9600"] + ["uart_9600"] = compatible (same bus, same config)
|
||||
|
||||
Args:
|
||||
grouped_components: Dictionary mapping (platform, signature) to list of component names
|
||||
|
||||
Returns:
|
||||
Dictionary with same structure but with compatible groups merged
|
||||
"""
|
||||
merged_groups: dict[tuple[str, str], list[str]] = {}
|
||||
processed_keys: set[tuple[str, str]] = set()
|
||||
|
||||
for (platform1, sig1), comps1 in sorted(grouped_components.items()):
|
||||
if (platform1, sig1) in processed_keys:
|
||||
continue
|
||||
|
||||
# Skip NO_BUSES_SIGNATURE - kept separate for flexible batch distribution
|
||||
# These components have no bus requirements and can be added to any batch
|
||||
# as "fillers" for load balancing across CI runners
|
||||
if sig1 == NO_BUSES_SIGNATURE:
|
||||
merged_groups[(platform1, sig1)] = comps1
|
||||
processed_keys.add((platform1, sig1))
|
||||
continue
|
||||
|
||||
# Skip isolated components - they can't be merged with others
|
||||
if sig1.startswith(ISOLATED_SIGNATURE_PREFIX):
|
||||
merged_groups[(platform1, sig1)] = comps1
|
||||
processed_keys.add((platform1, sig1))
|
||||
continue
|
||||
|
||||
# Start with this group's components
|
||||
merged_comps: list[str] = list(comps1)
|
||||
merged_sig: str = sig1
|
||||
processed_keys.add((platform1, sig1))
|
||||
|
||||
# Get buses for this group as tuple for caching
|
||||
buses1: tuple[str, ...] = tuple(sorted(sig1.split("+")))
|
||||
|
||||
# Try to merge with other groups on same platform
|
||||
for (platform2, sig2), comps2 in sorted(grouped_components.items()):
|
||||
if (platform2, sig2) in processed_keys:
|
||||
continue
|
||||
if platform2 != platform1:
|
||||
continue # Different platforms can't be merged
|
||||
if sig2 == NO_BUSES_SIGNATURE:
|
||||
continue # Keep separate for flexible batch distribution
|
||||
if sig2.startswith(ISOLATED_SIGNATURE_PREFIX):
|
||||
continue # Isolated components can't be merged
|
||||
|
||||
# Check if buses are compatible
|
||||
buses2: tuple[str, ...] = tuple(sorted(sig2.split("+")))
|
||||
if are_buses_compatible(buses1, buses2):
|
||||
# Compatible! Merge this group
|
||||
merged_comps.extend(comps2)
|
||||
processed_keys.add((platform2, sig2))
|
||||
# Update merged signature to include all unique buses
|
||||
all_buses: set[str] = set(buses1) | set(buses2)
|
||||
merged_sig = "+".join(sorted(all_buses))
|
||||
buses1 = tuple(sorted(all_buses)) # Update for next iteration
|
||||
|
||||
# Store merged group
|
||||
merged_groups[(platform1, merged_sig)] = merged_comps
|
||||
|
||||
return merged_groups
|
||||
|
||||
|
||||
def create_grouping_signature(
|
||||
platform_buses: dict[str, list[str]], platform: str
|
||||
) -> str:
|
||||
|
||||
@@ -11,6 +11,7 @@ from typing import Any
|
||||
|
||||
import aioesphomeapi.api_options_pb2 as pb
|
||||
import google.protobuf.descriptor_pb2 as descriptor
|
||||
from google.protobuf.descriptor_pb2 import FieldDescriptorProto
|
||||
|
||||
|
||||
class WireType(IntEnum):
|
||||
@@ -148,7 +149,7 @@ class TypeInfo(ABC):
|
||||
@property
|
||||
def repeated(self) -> bool:
|
||||
"""Check if the field is repeated."""
|
||||
return self._field.label == 3
|
||||
return self._field.label == FieldDescriptorProto.LABEL_REPEATED
|
||||
|
||||
@property
|
||||
def wire_type(self) -> WireType:
|
||||
@@ -337,7 +338,7 @@ def create_field_type_info(
|
||||
needs_encode: bool = True,
|
||||
) -> TypeInfo:
|
||||
"""Create the appropriate TypeInfo instance for a field, handling repeated fields and custom options."""
|
||||
if field.label == 3: # repeated
|
||||
if field.label == FieldDescriptorProto.LABEL_REPEATED:
|
||||
# Check if this repeated field has fixed_array_with_length_define option
|
||||
if (
|
||||
fixed_size := get_field_opt(field, pb.fixed_array_with_length_define)
|
||||
@@ -1879,6 +1880,9 @@ def build_message_type(
|
||||
)
|
||||
public_content.append("#endif")
|
||||
|
||||
# Collect fixed_vector fields for custom decode generation
|
||||
fixed_vector_fields = []
|
||||
|
||||
for field in desc.field:
|
||||
# Skip deprecated fields completely
|
||||
if field.options.deprecated:
|
||||
@@ -1887,7 +1891,7 @@ def build_message_type(
|
||||
# Validate that fixed_array_size is only used in encode-only messages
|
||||
if (
|
||||
needs_decode
|
||||
and field.label == 3
|
||||
and field.label == FieldDescriptorProto.LABEL_REPEATED
|
||||
and get_field_opt(field, pb.fixed_array_size) is not None
|
||||
):
|
||||
raise ValueError(
|
||||
@@ -1900,7 +1904,7 @@ def build_message_type(
|
||||
# Validate that fixed_array_with_length_define is only used in encode-only messages
|
||||
if (
|
||||
needs_decode
|
||||
and field.label == 3
|
||||
and field.label == FieldDescriptorProto.LABEL_REPEATED
|
||||
and get_field_opt(field, pb.fixed_array_with_length_define) is not None
|
||||
):
|
||||
raise ValueError(
|
||||
@@ -1910,6 +1914,14 @@ def build_message_type(
|
||||
f"since we cannot trust or control the number of items received from clients."
|
||||
)
|
||||
|
||||
# Collect fixed_vector repeated fields for custom decode generation
|
||||
if (
|
||||
needs_decode
|
||||
and field.label == FieldDescriptorProto.LABEL_REPEATED
|
||||
and get_field_opt(field, pb.fixed_vector, False)
|
||||
):
|
||||
fixed_vector_fields.append((field.name, field.number))
|
||||
|
||||
ti = create_field_type_info(field, needs_decode, needs_encode)
|
||||
|
||||
# Skip field declarations for fields that are in the base class
|
||||
@@ -2018,6 +2030,22 @@ def build_message_type(
|
||||
prot = "bool decode_64bit(uint32_t field_id, Proto64Bit value) override;"
|
||||
protected_content.insert(0, prot)
|
||||
|
||||
# Generate custom decode() override for messages with FixedVector fields
|
||||
if fixed_vector_fields:
|
||||
# Generate the decode() implementation in cpp
|
||||
o = f"void {desc.name}::decode(const uint8_t *buffer, size_t length) {{\n"
|
||||
# Count and init each FixedVector field
|
||||
for field_name, field_number in fixed_vector_fields:
|
||||
o += f" uint32_t count_{field_name} = ProtoDecodableMessage::count_repeated_field(buffer, length, {field_number});\n"
|
||||
o += f" this->{field_name}.init(count_{field_name});\n"
|
||||
# Call parent decode to populate the fields
|
||||
o += " ProtoDecodableMessage::decode(buffer, length);\n"
|
||||
o += "}\n"
|
||||
cpp += o
|
||||
# Generate the decode() declaration in header (public method)
|
||||
prot = "void decode(const uint8_t *buffer, size_t length) override;"
|
||||
public_content.append(prot)
|
||||
|
||||
# Only generate encode method if this message needs encoding and has fields
|
||||
if needs_encode and encode:
|
||||
o = f"void {desc.name}::encode(ProtoWriteBuffer buffer) const {{"
|
||||
|
||||
@@ -25,6 +25,7 @@ int main() { return 0;}
|
||||
Path(zephyr_dir / "prj.conf").write_text(
|
||||
"""
|
||||
CONFIG_NEWLIB_LIBC=y
|
||||
CONFIG_BT=y
|
||||
CONFIG_ADC=y
|
||||
""",
|
||||
encoding="utf-8",
|
||||
|
||||
@@ -185,17 +185,20 @@ def main():
|
||||
"-c",
|
||||
"--changed",
|
||||
action="store_true",
|
||||
help="List all components required for testing based on changes (includes dependencies)",
|
||||
help="List all components with dependencies (used by clang-tidy). "
|
||||
"When base test infrastructure changes, returns ALL components.",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--changed-direct",
|
||||
action="store_true",
|
||||
help="List only directly changed components (without dependencies)",
|
||||
help="List only directly changed components, ignoring infrastructure changes "
|
||||
"(used by CI for isolation decisions)",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--changed-with-deps",
|
||||
action="store_true",
|
||||
help="Output JSON with both directly changed and all changed components",
|
||||
help="Output JSON with both directly changed and all changed components "
|
||||
"(with dependencies), ignoring infrastructure changes (used by CI for test determination)",
|
||||
)
|
||||
parser.add_argument(
|
||||
"-b", "--branch", help="Branch to compare changed files against"
|
||||
@@ -213,12 +216,34 @@ def main():
|
||||
# When --changed* is passed, only get the changed files
|
||||
changed = changed_files(args.branch)
|
||||
|
||||
# If any base test file(s) changed, there's no need to filter out components
|
||||
if any("tests/test_build_components" in file for file in changed):
|
||||
# Need to get all component files
|
||||
# If any base test file(s) changed, we need to check all components
|
||||
# BUT only for --changed (used by clang-tidy for comprehensive checking)
|
||||
# NOT for --changed-direct or --changed-with-deps (used by CI for targeted testing)
|
||||
#
|
||||
# Flag usage:
|
||||
# - --changed: Used by clang-tidy (script/helpers.py get_changed_components)
|
||||
# Returns: All components with dependencies when base test files change
|
||||
# Reason: Test infrastructure changes may affect any component
|
||||
#
|
||||
# - --changed-direct: Used by CI isolation (script/determine-jobs.py)
|
||||
# Returns: Only components with actual code changes (not infrastructure)
|
||||
# Reason: Only directly changed components need isolated testing
|
||||
#
|
||||
# - --changed-with-deps: Used by CI test determination (script/determine-jobs.py)
|
||||
# Returns: Components with code changes + their dependencies (not infrastructure)
|
||||
# Reason: CI needs to test changed components and their dependents
|
||||
base_test_changed = any(
|
||||
"tests/test_build_components" in file for file in changed
|
||||
)
|
||||
|
||||
if base_test_changed and not args.changed_direct and not args.changed_with_deps:
|
||||
# Base test infrastructure changed - load all component files
|
||||
# This is for --changed (clang-tidy) which needs comprehensive checking
|
||||
files = get_all_component_files()
|
||||
else:
|
||||
# Only look at changed component files
|
||||
# Only look at changed component files (ignore infrastructure changes)
|
||||
# For --changed-direct: only actual component code changes matter (for isolation)
|
||||
# For --changed-with-deps: only actual component code changes matter (for testing)
|
||||
files = [f for f in changed if filter_component_files(f)]
|
||||
else:
|
||||
# Get all component files
|
||||
|
||||
@@ -16,6 +16,7 @@ The merger handles:
|
||||
from __future__ import annotations
|
||||
|
||||
import argparse
|
||||
from functools import lru_cache
|
||||
from pathlib import Path
|
||||
import re
|
||||
import sys
|
||||
@@ -28,6 +29,10 @@ from esphome import yaml_util
|
||||
from esphome.config_helpers import merge_config
|
||||
from script.analyze_component_buses import PACKAGE_DEPENDENCIES, get_common_bus_packages
|
||||
|
||||
# Prefix for dependency markers in package tracking
|
||||
# Used to mark packages that are included transitively (e.g., uart via modbus)
|
||||
DEPENDENCY_MARKER_PREFIX = "_dep_"
|
||||
|
||||
|
||||
def load_yaml_file(yaml_file: Path) -> dict:
|
||||
"""Load YAML file using ESPHome's YAML loader.
|
||||
@@ -44,6 +49,34 @@ def load_yaml_file(yaml_file: Path) -> dict:
|
||||
return yaml_util.load_yaml(yaml_file)
|
||||
|
||||
|
||||
@lru_cache(maxsize=256)
|
||||
def get_component_packages(
|
||||
component_name: str, platform: str, tests_dir_str: str
|
||||
) -> dict:
|
||||
"""Get packages dict from a component's test file with caching.
|
||||
|
||||
This function is cached to avoid re-loading and re-parsing the same file
|
||||
multiple times when extracting packages during cross-bus merging.
|
||||
|
||||
Args:
|
||||
component_name: Name of the component
|
||||
platform: Platform name (e.g., "esp32-idf")
|
||||
tests_dir_str: String path to tests/components directory (must be string for cache hashability)
|
||||
|
||||
Returns:
|
||||
Dictionary with 'packages' key containing the raw packages dict from the YAML,
|
||||
or empty dict if no packages section exists
|
||||
"""
|
||||
tests_dir = Path(tests_dir_str)
|
||||
test_file = tests_dir / component_name / f"test.{platform}.yaml"
|
||||
comp_data = load_yaml_file(test_file)
|
||||
|
||||
if "packages" not in comp_data or not isinstance(comp_data["packages"], dict):
|
||||
return {}
|
||||
|
||||
return comp_data["packages"]
|
||||
|
||||
|
||||
def extract_packages_from_yaml(data: dict) -> dict[str, str]:
|
||||
"""Extract COMMON BUS package includes from parsed YAML.
|
||||
|
||||
@@ -82,7 +115,7 @@ def extract_packages_from_yaml(data: dict) -> dict[str, str]:
|
||||
if dep not in common_bus_packages:
|
||||
continue
|
||||
# Mark as included via dependency
|
||||
packages[f"_dep_{dep}"] = f"(included via {name})"
|
||||
packages[f"{DEPENDENCY_MARKER_PREFIX}{dep}"] = f"(included via {name})"
|
||||
|
||||
return packages
|
||||
|
||||
@@ -195,6 +228,9 @@ def merge_component_configs(
|
||||
# Start with empty config
|
||||
merged_config_data = {}
|
||||
|
||||
# Convert tests_dir to string for caching
|
||||
tests_dir_str = str(tests_dir)
|
||||
|
||||
# Process each component
|
||||
for comp_name in component_names:
|
||||
comp_dir = tests_dir / comp_name
|
||||
@@ -206,26 +242,29 @@ def merge_component_configs(
|
||||
# Load the component's test file
|
||||
comp_data = load_yaml_file(test_file)
|
||||
|
||||
# Validate packages are compatible
|
||||
# Components with no packages (no_buses) can merge with any group
|
||||
# Merge packages from all components (cross-bus merging)
|
||||
# Components can have different packages (e.g., one with ble, another with uart)
|
||||
# as long as they don't conflict (checked by are_buses_compatible before calling this)
|
||||
comp_packages = extract_packages_from_yaml(comp_data)
|
||||
|
||||
if all_packages is None:
|
||||
# First component - set the baseline
|
||||
all_packages = comp_packages
|
||||
elif not comp_packages:
|
||||
# This component has no packages (no_buses) - it can merge with any group
|
||||
pass
|
||||
elif not all_packages:
|
||||
# Previous components had no packages, but this one does - adopt these packages
|
||||
all_packages = comp_packages
|
||||
elif comp_packages != all_packages:
|
||||
# Both have packages but they differ - this is an error
|
||||
raise ValueError(
|
||||
f"Component {comp_name} has different packages than previous components. "
|
||||
f"Expected: {all_packages}, Got: {comp_packages}. "
|
||||
f"All components must use the same common bus configs to be merged."
|
||||
)
|
||||
# First component - initialize package dict
|
||||
all_packages = comp_packages if comp_packages else {}
|
||||
elif comp_packages:
|
||||
# Merge packages - combine all unique package types
|
||||
# If both have the same package type, verify they're identical
|
||||
for pkg_name, pkg_config in comp_packages.items():
|
||||
if pkg_name in all_packages:
|
||||
# Same package type - verify config matches
|
||||
if all_packages[pkg_name] != pkg_config:
|
||||
raise ValueError(
|
||||
f"Component {comp_name} has conflicting config for package '{pkg_name}'. "
|
||||
f"Expected: {all_packages[pkg_name]}, Got: {pkg_config}. "
|
||||
f"Components with conflicting bus configs cannot be merged."
|
||||
)
|
||||
else:
|
||||
# New package type - add it
|
||||
all_packages[pkg_name] = pkg_config
|
||||
|
||||
# Handle $component_dir by replacing with absolute path
|
||||
# This allows components that use local file references to be grouped
|
||||
@@ -287,26 +326,51 @@ def merge_component_configs(
|
||||
# merge_config handles list merging with ID-based deduplication automatically
|
||||
merged_config_data = merge_config(merged_config_data, comp_data)
|
||||
|
||||
# Add packages back (only once, since they're identical)
|
||||
# IMPORTANT: Only re-add common bus packages (spi, i2c, uart, etc.)
|
||||
# Add merged packages back (union of all component packages)
|
||||
# IMPORTANT: Only include common bus packages (spi, i2c, uart, etc.)
|
||||
# Do NOT re-add component-specific packages as they contain unprefixed $component_dir refs
|
||||
if all_packages:
|
||||
first_comp_data = load_yaml_file(
|
||||
tests_dir / component_names[0] / f"test.{platform}.yaml"
|
||||
)
|
||||
if "packages" in first_comp_data and isinstance(
|
||||
first_comp_data["packages"], dict
|
||||
):
|
||||
# Filter to only include common bus packages
|
||||
# Only dict format can contain common bus packages
|
||||
common_bus_packages = get_common_bus_packages()
|
||||
filtered_packages = {
|
||||
name: value
|
||||
for name, value in first_comp_data["packages"].items()
|
||||
if name in common_bus_packages
|
||||
}
|
||||
if filtered_packages:
|
||||
merged_config_data["packages"] = filtered_packages
|
||||
# Build packages dict from merged all_packages
|
||||
# all_packages is a dict mapping package_name -> str(package_value)
|
||||
# We need to reconstruct the actual package values by loading them from any component
|
||||
# Since packages with the same name must have identical configs (verified above),
|
||||
# we can load the package value from the first component that has each package
|
||||
common_bus_packages = get_common_bus_packages()
|
||||
merged_packages: dict[str, Any] = {}
|
||||
|
||||
# Collect packages that are included as dependencies
|
||||
# If modbus is present, uart is included via modbus.packages.uart
|
||||
packages_to_skip: set[str] = set()
|
||||
for pkg_name in all_packages:
|
||||
if pkg_name.startswith(DEPENDENCY_MARKER_PREFIX):
|
||||
# Extract the actual package name (remove _dep_ prefix)
|
||||
dep_name = pkg_name[len(DEPENDENCY_MARKER_PREFIX) :]
|
||||
packages_to_skip.add(dep_name)
|
||||
|
||||
for pkg_name in all_packages:
|
||||
# Skip dependency markers
|
||||
if pkg_name.startswith(DEPENDENCY_MARKER_PREFIX):
|
||||
continue
|
||||
# Skip non-common-bus packages
|
||||
if pkg_name not in common_bus_packages:
|
||||
continue
|
||||
# Skip packages that are included as dependencies of other packages
|
||||
# This prevents duplicate definitions (e.g., uart via modbus + uart separately)
|
||||
if pkg_name in packages_to_skip:
|
||||
continue
|
||||
|
||||
# Find a component that has this package and extract its value
|
||||
# Uses cached lookup to avoid re-loading the same files
|
||||
for comp_name in component_names:
|
||||
comp_packages = get_component_packages(
|
||||
comp_name, platform, tests_dir_str
|
||||
)
|
||||
if pkg_name in comp_packages:
|
||||
merged_packages[pkg_name] = comp_packages[pkg_name]
|
||||
break
|
||||
|
||||
if merged_packages:
|
||||
merged_config_data["packages"] = merged_packages
|
||||
|
||||
# Deduplicate items with same ID (keeps first occurrence)
|
||||
merged_config_data = deduplicate_by_id(merged_config_data)
|
||||
|
||||
@@ -22,9 +22,11 @@ sys.path.insert(0, str(Path(__file__).parent.parent))
|
||||
|
||||
from script.analyze_component_buses import (
|
||||
ISOLATED_COMPONENTS,
|
||||
ISOLATED_SIGNATURE_PREFIX,
|
||||
NO_BUSES_SIGNATURE,
|
||||
analyze_all_components,
|
||||
create_grouping_signature,
|
||||
merge_compatible_bus_groups,
|
||||
)
|
||||
|
||||
# Weighting for batch creation
|
||||
@@ -33,6 +35,10 @@ from script.analyze_component_buses import (
|
||||
ISOLATED_WEIGHT = 10
|
||||
GROUPABLE_WEIGHT = 1
|
||||
|
||||
# Platform used for batching (platform-agnostic batching)
|
||||
# Batches are split across CI runners and each runner tests all platforms
|
||||
ALL_PLATFORMS = "all"
|
||||
|
||||
|
||||
def has_test_files(component_name: str, tests_dir: Path) -> bool:
|
||||
"""Check if a component has test files.
|
||||
@@ -57,7 +63,7 @@ def create_intelligent_batches(
|
||||
tests_dir: Path,
|
||||
batch_size: int = 40,
|
||||
directly_changed: set[str] | None = None,
|
||||
) -> list[list[str]]:
|
||||
) -> tuple[list[list[str]], dict[tuple[str, str], list[str]]]:
|
||||
"""Create batches optimized for component grouping.
|
||||
|
||||
Args:
|
||||
@@ -67,7 +73,9 @@ def create_intelligent_batches(
|
||||
directly_changed: Set of directly changed components (for logging only)
|
||||
|
||||
Returns:
|
||||
List of component batches (lists of component names)
|
||||
Tuple of (batches, signature_groups) where:
|
||||
- batches: List of component batches (lists of component names)
|
||||
- signature_groups: Dict mapping (platform, signature) to component lists
|
||||
"""
|
||||
# Filter out components without test files
|
||||
# Platform components like 'climate' and 'climate_ir' don't have test files
|
||||
@@ -91,8 +99,9 @@ def create_intelligent_batches(
|
||||
|
||||
# Group components by their bus signature ONLY (ignore platform)
|
||||
# All platforms will be tested by test_build_components.py for each batch
|
||||
# Key: signature, Value: list of components
|
||||
signature_groups: dict[str, list[str]] = defaultdict(list)
|
||||
# Key: (platform, signature), Value: list of components
|
||||
# We use ALL_PLATFORMS since batching is platform-agnostic
|
||||
signature_groups: dict[tuple[str, str], list[str]] = defaultdict(list)
|
||||
|
||||
for component in components_with_tests:
|
||||
# Components that can't be grouped get unique signatures
|
||||
@@ -107,7 +116,9 @@ def create_intelligent_batches(
|
||||
or (directly_changed and component in directly_changed)
|
||||
)
|
||||
if is_isolated:
|
||||
signature_groups[f"isolated_{component}"].append(component)
|
||||
signature_groups[
|
||||
(ALL_PLATFORMS, f"{ISOLATED_SIGNATURE_PREFIX}{component}")
|
||||
].append(component)
|
||||
continue
|
||||
|
||||
# Get signature from any platform (they should all have the same buses)
|
||||
@@ -117,11 +128,17 @@ def create_intelligent_batches(
|
||||
if buses:
|
||||
signature = create_grouping_signature({platform: buses}, platform)
|
||||
# Group by signature only - platform doesn't matter for batching
|
||||
signature_groups[signature].append(component)
|
||||
# Use ALL_PLATFORMS since we're batching across all platforms
|
||||
signature_groups[(ALL_PLATFORMS, signature)].append(component)
|
||||
break # Only use first platform for grouping
|
||||
else:
|
||||
# No buses found for any platform - can be grouped together
|
||||
signature_groups[NO_BUSES_SIGNATURE].append(component)
|
||||
signature_groups[(ALL_PLATFORMS, NO_BUSES_SIGNATURE)].append(component)
|
||||
|
||||
# Merge compatible bus groups (cross-bus optimization)
|
||||
# This allows components with different buses (ble + uart) to be batched together
|
||||
# improving the efficiency of test_build_components.py grouping
|
||||
signature_groups = merge_compatible_bus_groups(signature_groups)
|
||||
|
||||
# Create batches by keeping signature groups together
|
||||
# Components with the same signature stay in the same batches
|
||||
@@ -132,8 +149,8 @@ def create_intelligent_batches(
|
||||
# 2. Sort groupable signatures by size (largest first)
|
||||
# 3. "no_buses" components CAN be grouped together
|
||||
def sort_key(item):
|
||||
signature, components = item
|
||||
is_isolated = signature.startswith("isolated_")
|
||||
(_platform, signature), components = item
|
||||
is_isolated = signature.startswith(ISOLATED_SIGNATURE_PREFIX)
|
||||
# Put "isolated_*" last (1), groupable first (0)
|
||||
# Within each category, sort by size (largest first)
|
||||
return (is_isolated, -len(components))
|
||||
@@ -149,8 +166,8 @@ def create_intelligent_batches(
|
||||
current_batch = []
|
||||
current_weight = 0
|
||||
|
||||
for signature, group_components in sorted_groups:
|
||||
is_isolated = signature.startswith("isolated_")
|
||||
for (_platform, signature), group_components in sorted_groups:
|
||||
is_isolated = signature.startswith(ISOLATED_SIGNATURE_PREFIX)
|
||||
weight_per_component = ISOLATED_WEIGHT if is_isolated else GROUPABLE_WEIGHT
|
||||
|
||||
for component in group_components:
|
||||
@@ -169,7 +186,7 @@ def create_intelligent_batches(
|
||||
if current_batch:
|
||||
batches.append(current_batch)
|
||||
|
||||
return batches
|
||||
return batches, signature_groups
|
||||
|
||||
|
||||
def main() -> int:
|
||||
@@ -231,7 +248,7 @@ def main() -> int:
|
||||
return 1
|
||||
|
||||
# Create intelligent batches
|
||||
batches = create_intelligent_batches(
|
||||
batches, signature_groups = create_intelligent_batches(
|
||||
components=components,
|
||||
tests_dir=args.tests_dir,
|
||||
batch_size=args.batch_size,
|
||||
@@ -256,6 +273,58 @@ def main() -> int:
|
||||
# Re-analyze to get isolated component counts for summary
|
||||
_, non_groupable, _ = analyze_all_components(args.tests_dir)
|
||||
|
||||
# Show grouping details
|
||||
print("\n=== Component Grouping Details ===", file=sys.stderr)
|
||||
# Sort groups by signature for readability
|
||||
groupable_groups = []
|
||||
isolated_groups = []
|
||||
for (platform, signature), group_comps in sorted(signature_groups.items()):
|
||||
if signature.startswith(ISOLATED_SIGNATURE_PREFIX):
|
||||
isolated_groups.append((signature, group_comps))
|
||||
else:
|
||||
groupable_groups.append((signature, group_comps))
|
||||
|
||||
if groupable_groups:
|
||||
print(
|
||||
f"\nGroupable signatures ({len(groupable_groups)} merged groups after cross-bus optimization):",
|
||||
file=sys.stderr,
|
||||
)
|
||||
for signature, group_comps in sorted(
|
||||
groupable_groups, key=lambda x: (-len(x[1]), x[0])
|
||||
):
|
||||
# Check if this is a merged signature (contains +)
|
||||
is_merged = "+" in signature and signature != NO_BUSES_SIGNATURE
|
||||
# Special handling for no_buses components
|
||||
if signature == NO_BUSES_SIGNATURE:
|
||||
print(
|
||||
f" [{signature}]: {len(group_comps)} components (used as fillers across batches)",
|
||||
file=sys.stderr,
|
||||
)
|
||||
else:
|
||||
merge_indicator = " [MERGED]" if is_merged else ""
|
||||
print(
|
||||
f" [{signature}]{merge_indicator}: {len(group_comps)} components",
|
||||
file=sys.stderr,
|
||||
)
|
||||
# Show first few components as examples
|
||||
examples = ", ".join(sorted(group_comps)[:8])
|
||||
if len(group_comps) > 8:
|
||||
examples += f", ... (+{len(group_comps) - 8} more)"
|
||||
print(f" → {examples}", file=sys.stderr)
|
||||
|
||||
if isolated_groups:
|
||||
print(
|
||||
f"\nIsolated components ({len(isolated_groups)} components - tested individually):",
|
||||
file=sys.stderr,
|
||||
)
|
||||
isolated_names = sorted(
|
||||
[comp for _, comps in isolated_groups for comp in comps]
|
||||
)
|
||||
# Group isolated components for compact display
|
||||
for i in range(0, len(isolated_names), 10):
|
||||
chunk = isolated_names[i : i + 10]
|
||||
print(f" {', '.join(chunk)}", file=sys.stderr)
|
||||
|
||||
# Count isolated vs groupable components
|
||||
all_batched_components = [comp for batch in batches for comp in batch]
|
||||
isolated_count = sum(
|
||||
|
||||
@@ -17,11 +17,13 @@ from __future__ import annotations
|
||||
|
||||
import argparse
|
||||
from collections import defaultdict
|
||||
from dataclasses import dataclass
|
||||
import hashlib
|
||||
import os
|
||||
from pathlib import Path
|
||||
import subprocess
|
||||
import sys
|
||||
import time
|
||||
|
||||
# Add esphome to path
|
||||
sys.path.insert(0, str(Path(__file__).parent.parent))
|
||||
@@ -34,32 +36,49 @@ from script.analyze_component_buses import (
|
||||
analyze_all_components,
|
||||
create_grouping_signature,
|
||||
is_platform_component,
|
||||
merge_compatible_bus_groups,
|
||||
uses_local_file_references,
|
||||
)
|
||||
from script.merge_component_configs import merge_component_configs
|
||||
|
||||
# Platform-specific maximum group sizes
|
||||
# ESP8266 has limited IRAM and can't handle large component groups
|
||||
PLATFORM_MAX_GROUP_SIZE = {
|
||||
"esp8266-ard": 10, # ESP8266 Arduino has limited IRAM
|
||||
"esp8266-idf": 10, # ESP8266 IDF also has limited IRAM
|
||||
# BK72xx now uses BK7252 board (1.62MB flash vs 1.03MB) - no limit needed
|
||||
# Other platforms can handle larger groups
|
||||
}
|
||||
|
||||
@dataclass
|
||||
class TestResult:
|
||||
"""Store information about a single test run."""
|
||||
|
||||
test_id: str
|
||||
components: list[str]
|
||||
platform: str
|
||||
success: bool
|
||||
duration: float
|
||||
command: str = ""
|
||||
test_type: str = "compile" # "config" or "compile"
|
||||
|
||||
|
||||
def show_disk_space_if_ci(esphome_command: str) -> None:
|
||||
"""Show disk space usage if running in CI during compile.
|
||||
|
||||
Only shows output during compilation (not config validation) since
|
||||
disk space is only relevant when actually building firmware.
|
||||
|
||||
Args:
|
||||
esphome_command: The esphome command being run (config/compile/clean)
|
||||
"""
|
||||
if os.environ.get("GITHUB_ACTIONS") and esphome_command == "compile":
|
||||
print("\n" + "=" * 80)
|
||||
print("Disk Space After Build:")
|
||||
print("=" * 80)
|
||||
subprocess.run(["df", "-h"], check=False)
|
||||
print("=" * 80 + "\n")
|
||||
# Only show disk space during compilation in CI
|
||||
# Config validation doesn't build anything so disk space isn't relevant
|
||||
if not os.environ.get("GITHUB_ACTIONS"):
|
||||
return
|
||||
if esphome_command != "compile":
|
||||
return
|
||||
|
||||
print("\n" + "=" * 80)
|
||||
print("Disk Space After Build:")
|
||||
print("=" * 80)
|
||||
# Use sys.stdout.flush() to ensure output appears immediately
|
||||
sys.stdout.flush()
|
||||
subprocess.run(["df", "-h"], check=False, stdout=sys.stdout, stderr=sys.stderr)
|
||||
print("=" * 80 + "\n")
|
||||
sys.stdout.flush()
|
||||
|
||||
|
||||
def find_component_tests(
|
||||
@@ -80,7 +99,8 @@ def find_component_tests(
|
||||
if not comp_dir.is_dir():
|
||||
continue
|
||||
|
||||
for test_file in comp_dir.glob("test.*.yaml"):
|
||||
# Find test files matching test.*.yaml or test-*.yaml patterns
|
||||
for test_file in comp_dir.glob("test[.-]*.yaml"):
|
||||
component_tests[comp_dir.name].append(test_file)
|
||||
|
||||
return dict(component_tests)
|
||||
@@ -128,6 +148,140 @@ def get_platform_base_files(base_dir: Path) -> dict[str, list[Path]]:
|
||||
return dict(platform_files)
|
||||
|
||||
|
||||
def group_components_by_platform(
|
||||
failed_results: list[TestResult],
|
||||
) -> dict[tuple[str, str], list[str]]:
|
||||
"""Group failed components by platform and test type for simplified reproduction commands.
|
||||
|
||||
Args:
|
||||
failed_results: List of failed test results
|
||||
|
||||
Returns:
|
||||
Dictionary mapping (platform, test_type) to list of component names
|
||||
"""
|
||||
platform_components: dict[tuple[str, str], list[str]] = {}
|
||||
for result in failed_results:
|
||||
key = (result.platform, result.test_type)
|
||||
if key not in platform_components:
|
||||
platform_components[key] = []
|
||||
platform_components[key].extend(result.components)
|
||||
|
||||
# Remove duplicates and sort for each platform
|
||||
return {
|
||||
key: sorted(set(components)) for key, components in platform_components.items()
|
||||
}
|
||||
|
||||
|
||||
def format_github_summary(test_results: list[TestResult]) -> str:
|
||||
"""Format test results as GitHub Actions job summary markdown.
|
||||
|
||||
Args:
|
||||
test_results: List of all test results
|
||||
|
||||
Returns:
|
||||
Markdown formatted summary string
|
||||
"""
|
||||
# Separate results into passed and failed
|
||||
passed_results = [r for r in test_results if r.success]
|
||||
failed_results = [r for r in test_results if not r.success]
|
||||
|
||||
lines = []
|
||||
|
||||
# Header with emoji based on success/failure
|
||||
if failed_results:
|
||||
lines.append("## :x: Component Tests Failed\n")
|
||||
else:
|
||||
lines.append("## :white_check_mark: Component Tests Passed\n")
|
||||
|
||||
# Summary statistics
|
||||
total_time = sum(r.duration for r in test_results)
|
||||
# Determine test type from results (all should be the same)
|
||||
test_type = test_results[0].test_type if test_results else "unknown"
|
||||
lines.append(
|
||||
f"**Results:** {len(passed_results)} passed, {len(failed_results)} failed\n"
|
||||
)
|
||||
lines.append(f"**Total time:** {total_time:.1f}s\n")
|
||||
lines.append(f"**Test type:** `{test_type}`\n")
|
||||
|
||||
# Show failed tests if any
|
||||
if failed_results:
|
||||
lines.append("### Failed Tests\n")
|
||||
lines.append("| Test | Components | Platform | Duration |\n")
|
||||
lines.append("|------|-----------|----------|----------|\n")
|
||||
for result in failed_results:
|
||||
components_str = ", ".join(result.components)
|
||||
lines.append(
|
||||
f"| `{result.test_id}` | {components_str} | {result.platform} | {result.duration:.1f}s |\n"
|
||||
)
|
||||
lines.append("\n")
|
||||
|
||||
# Show simplified commands to reproduce failures
|
||||
# Group all failed components by platform for a single command per platform
|
||||
lines.append("<details>\n")
|
||||
lines.append("<summary>Commands to reproduce failures</summary>\n\n")
|
||||
lines.append("```bash\n")
|
||||
|
||||
# Generate one command per platform and test type
|
||||
platform_components = group_components_by_platform(failed_results)
|
||||
for platform, test_type in sorted(platform_components.keys()):
|
||||
components_csv = ",".join(platform_components[(platform, test_type)])
|
||||
lines.append(
|
||||
f"script/test_build_components.py -c {components_csv} -t {platform} -e {test_type}\n"
|
||||
)
|
||||
|
||||
lines.append("```\n")
|
||||
lines.append("</details>\n")
|
||||
|
||||
# Show passed tests
|
||||
if passed_results:
|
||||
lines.append("### Passed Tests\n\n")
|
||||
lines.append(f"{len(passed_results)} tests passed successfully\n")
|
||||
|
||||
# Separate grouped and individual tests
|
||||
grouped_results = [r for r in passed_results if len(r.components) > 1]
|
||||
individual_results = [r for r in passed_results if len(r.components) == 1]
|
||||
|
||||
if grouped_results:
|
||||
lines.append("#### Grouped Tests\n")
|
||||
lines.append("| Components | Platform | Count | Duration |\n")
|
||||
lines.append("|-----------|----------|-------|----------|\n")
|
||||
for result in grouped_results:
|
||||
components_str = ", ".join(result.components)
|
||||
lines.append(
|
||||
f"| {components_str} | {result.platform} | {len(result.components)} | {result.duration:.1f}s |\n"
|
||||
)
|
||||
lines.append("\n")
|
||||
|
||||
if individual_results:
|
||||
lines.append("#### Individual Tests\n")
|
||||
# Show first 10 individual tests with timing
|
||||
if len(individual_results) <= 10:
|
||||
lines.extend(
|
||||
f"- `{result.test_id}` - {result.duration:.1f}s\n"
|
||||
for result in individual_results
|
||||
)
|
||||
else:
|
||||
lines.extend(
|
||||
f"- `{result.test_id}` - {result.duration:.1f}s\n"
|
||||
for result in individual_results[:10]
|
||||
)
|
||||
lines.append(f"\n...and {len(individual_results) - 10} more\n")
|
||||
lines.append("\n")
|
||||
|
||||
return "".join(lines)
|
||||
|
||||
|
||||
def write_github_summary(test_results: list[TestResult]) -> None:
|
||||
"""Write GitHub Actions job summary with test results and timing.
|
||||
|
||||
Args:
|
||||
test_results: List of all test results
|
||||
"""
|
||||
summary_content = format_github_summary(test_results)
|
||||
with open(os.environ["GITHUB_STEP_SUMMARY"], "a", encoding="utf-8") as f:
|
||||
f.write(summary_content)
|
||||
|
||||
|
||||
def extract_platform_with_version(base_file: Path) -> str:
|
||||
"""Extract platform with version from base filename.
|
||||
|
||||
@@ -151,7 +305,7 @@ def run_esphome_test(
|
||||
esphome_command: str,
|
||||
continue_on_fail: bool,
|
||||
use_testing_mode: bool = False,
|
||||
) -> tuple[bool, str]:
|
||||
) -> TestResult:
|
||||
"""Run esphome test for a single component.
|
||||
|
||||
Args:
|
||||
@@ -166,7 +320,7 @@ def run_esphome_test(
|
||||
use_testing_mode: Whether to use --testing-mode flag
|
||||
|
||||
Returns:
|
||||
Tuple of (success status, command string)
|
||||
TestResult object with test details and timing
|
||||
"""
|
||||
test_name = test_file.stem.split(".")[0]
|
||||
|
||||
@@ -221,9 +375,13 @@ def run_esphome_test(
|
||||
if use_testing_mode:
|
||||
print(" (using --testing-mode)")
|
||||
|
||||
start_time = time.time()
|
||||
test_id = f"{component}.{test_name}.{platform_with_version}"
|
||||
|
||||
try:
|
||||
result = subprocess.run(cmd, check=False)
|
||||
success = result.returncode == 0
|
||||
duration = time.time() - start_time
|
||||
|
||||
# Show disk space after build in CI during compile
|
||||
show_disk_space_if_ci(esphome_command)
|
||||
@@ -236,12 +394,30 @@ def run_esphome_test(
|
||||
print(cmd_str)
|
||||
print()
|
||||
raise subprocess.CalledProcessError(result.returncode, cmd)
|
||||
return success, cmd_str
|
||||
|
||||
return TestResult(
|
||||
test_id=test_id,
|
||||
components=[component],
|
||||
platform=platform_with_version,
|
||||
success=success,
|
||||
duration=duration,
|
||||
command=cmd_str,
|
||||
test_type=esphome_command,
|
||||
)
|
||||
except subprocess.CalledProcessError:
|
||||
duration = time.time() - start_time
|
||||
# Re-raise if we're not continuing on fail
|
||||
if not continue_on_fail:
|
||||
raise
|
||||
return False, cmd_str
|
||||
return TestResult(
|
||||
test_id=test_id,
|
||||
components=[component],
|
||||
platform=platform_with_version,
|
||||
success=False,
|
||||
duration=duration,
|
||||
command=cmd_str,
|
||||
test_type=esphome_command,
|
||||
)
|
||||
|
||||
|
||||
def run_grouped_test(
|
||||
@@ -253,7 +429,7 @@ def run_grouped_test(
|
||||
tests_dir: Path,
|
||||
esphome_command: str,
|
||||
continue_on_fail: bool,
|
||||
) -> tuple[bool, str]:
|
||||
) -> TestResult:
|
||||
"""Run esphome test for a group of components with shared bus configs.
|
||||
|
||||
Args:
|
||||
@@ -267,7 +443,7 @@ def run_grouped_test(
|
||||
continue_on_fail: Whether to continue on failure
|
||||
|
||||
Returns:
|
||||
Tuple of (success status, command string)
|
||||
TestResult object with test details and timing
|
||||
"""
|
||||
# Create merged config
|
||||
group_name = "_".join(components[:3]) # Use first 3 components for name
|
||||
@@ -294,8 +470,17 @@ def run_grouped_test(
|
||||
print(f"Error merging configs for {components}: {e}")
|
||||
if not continue_on_fail:
|
||||
raise
|
||||
# Return empty command string since we failed before building the command
|
||||
return False, f"# Failed during config merge: {e}"
|
||||
# Return TestResult for merge failure
|
||||
test_id = f"GROUPED[{','.join(components)}].{platform_with_version}"
|
||||
return TestResult(
|
||||
test_id=test_id,
|
||||
components=components,
|
||||
platform=platform_with_version,
|
||||
success=False,
|
||||
duration=0.0,
|
||||
command=f"# Failed during config merge: {e}",
|
||||
test_type=esphome_command,
|
||||
)
|
||||
|
||||
# Create test file that includes merged config
|
||||
output_file = build_dir / f"test_{group_name}.{platform_with_version}.yaml"
|
||||
@@ -334,9 +519,13 @@ def run_grouped_test(
|
||||
print(f"> [GROUPED: {components_str}] [{platform_with_version}]")
|
||||
print(" (using --testing-mode)")
|
||||
|
||||
start_time = time.time()
|
||||
test_id = f"GROUPED[{','.join(components)}].{platform_with_version}"
|
||||
|
||||
try:
|
||||
result = subprocess.run(cmd, check=False)
|
||||
success = result.returncode == 0
|
||||
duration = time.time() - start_time
|
||||
|
||||
# Show disk space after build in CI during compile
|
||||
show_disk_space_if_ci(esphome_command)
|
||||
@@ -349,12 +538,30 @@ def run_grouped_test(
|
||||
print(cmd_str)
|
||||
print()
|
||||
raise subprocess.CalledProcessError(result.returncode, cmd)
|
||||
return success, cmd_str
|
||||
|
||||
return TestResult(
|
||||
test_id=test_id,
|
||||
components=components,
|
||||
platform=platform_with_version,
|
||||
success=success,
|
||||
duration=duration,
|
||||
command=cmd_str,
|
||||
test_type=esphome_command,
|
||||
)
|
||||
except subprocess.CalledProcessError:
|
||||
duration = time.time() - start_time
|
||||
# Re-raise if we're not continuing on fail
|
||||
if not continue_on_fail:
|
||||
raise
|
||||
return False, cmd_str
|
||||
return TestResult(
|
||||
test_id=test_id,
|
||||
components=components,
|
||||
platform=platform_with_version,
|
||||
success=False,
|
||||
duration=duration,
|
||||
command=cmd_str,
|
||||
test_type=esphome_command,
|
||||
)
|
||||
|
||||
|
||||
def run_grouped_component_tests(
|
||||
@@ -366,7 +573,7 @@ def run_grouped_component_tests(
|
||||
esphome_command: str,
|
||||
continue_on_fail: bool,
|
||||
additional_isolated: set[str] | None = None,
|
||||
) -> tuple[set[tuple[str, str]], list[str], list[str], dict[str, str]]:
|
||||
) -> tuple[set[tuple[str, str]], list[TestResult]]:
|
||||
"""Run grouped component tests.
|
||||
|
||||
Args:
|
||||
@@ -380,12 +587,10 @@ def run_grouped_component_tests(
|
||||
additional_isolated: Additional components to treat as isolated (not grouped)
|
||||
|
||||
Returns:
|
||||
Tuple of (tested_components, passed_tests, failed_tests, failed_commands)
|
||||
Tuple of (tested_components, test_results)
|
||||
"""
|
||||
tested_components = set()
|
||||
passed_tests = []
|
||||
failed_tests = []
|
||||
failed_commands = {} # Map test_id to command string
|
||||
test_results = []
|
||||
|
||||
# Group components by platform and bus signature
|
||||
grouped_components: dict[tuple[str, str], list[str]] = defaultdict(list)
|
||||
@@ -462,6 +667,11 @@ def run_grouped_component_tests(
|
||||
if signature:
|
||||
grouped_components[(platform, signature)].append(component)
|
||||
|
||||
# Merge groups with compatible buses (cross-bus grouping optimization)
|
||||
# This allows mixing components with different buses (e.g., ble + uart)
|
||||
# as long as they don't have conflicting configurations for the same bus type
|
||||
grouped_components = merge_compatible_bus_groups(grouped_components)
|
||||
|
||||
# Print detailed grouping plan
|
||||
print("\nGrouping Plan:")
|
||||
print("-" * 80)
|
||||
@@ -560,28 +770,6 @@ def run_grouped_component_tests(
|
||||
# No other groups for this platform - keep no_buses components together
|
||||
grouped_components[(platform, NO_BUSES_SIGNATURE)] = no_buses_comps
|
||||
|
||||
# Split groups that exceed platform-specific maximum sizes
|
||||
# ESP8266 has limited IRAM and can't handle large component groups
|
||||
split_groups = {}
|
||||
for (platform, signature), components in list(grouped_components.items()):
|
||||
max_size = PLATFORM_MAX_GROUP_SIZE.get(platform)
|
||||
if max_size and len(components) > max_size:
|
||||
# Split this group into smaller groups
|
||||
print(
|
||||
f"\n ℹ️ Splitting {platform} group (signature: {signature}) "
|
||||
f"from {len(components)} to max {max_size} components per group"
|
||||
)
|
||||
# Remove original group
|
||||
del grouped_components[(platform, signature)]
|
||||
# Create split groups
|
||||
for i in range(0, len(components), max_size):
|
||||
split_components = components[i : i + max_size]
|
||||
# Create unique signature for each split group
|
||||
split_signature = f"{signature}_split{i // max_size + 1}"
|
||||
split_groups[(platform, split_signature)] = split_components
|
||||
# Add split groups back
|
||||
grouped_components.update(split_groups)
|
||||
|
||||
groups_to_test = []
|
||||
individual_tests = set() # Use set to avoid duplicates
|
||||
|
||||
@@ -672,7 +860,7 @@ def run_grouped_component_tests(
|
||||
continue
|
||||
|
||||
# Run grouped test
|
||||
success, cmd_str = run_grouped_test(
|
||||
test_result = run_grouped_test(
|
||||
components=components_to_group,
|
||||
platform=platform,
|
||||
platform_with_version=platform_with_version,
|
||||
@@ -687,17 +875,10 @@ def run_grouped_component_tests(
|
||||
for comp in components_to_group:
|
||||
tested_components.add((comp, platform_with_version))
|
||||
|
||||
# Record result for each component - show all components in grouped tests
|
||||
test_id = (
|
||||
f"GROUPED[{','.join(components_to_group)}].{platform_with_version}"
|
||||
)
|
||||
if success:
|
||||
passed_tests.append(test_id)
|
||||
else:
|
||||
failed_tests.append(test_id)
|
||||
failed_commands[test_id] = cmd_str
|
||||
# Store test result
|
||||
test_results.append(test_result)
|
||||
|
||||
return tested_components, passed_tests, failed_tests, failed_commands
|
||||
return tested_components, test_results
|
||||
|
||||
|
||||
def run_individual_component_test(
|
||||
@@ -710,9 +891,7 @@ def run_individual_component_test(
|
||||
esphome_command: str,
|
||||
continue_on_fail: bool,
|
||||
tested_components: set[tuple[str, str]],
|
||||
passed_tests: list[str],
|
||||
failed_tests: list[str],
|
||||
failed_commands: dict[str, str],
|
||||
test_results: list[TestResult],
|
||||
) -> None:
|
||||
"""Run an individual component test if not already tested in a group.
|
||||
|
||||
@@ -726,16 +905,13 @@ def run_individual_component_test(
|
||||
esphome_command: ESPHome command
|
||||
continue_on_fail: Whether to continue on failure
|
||||
tested_components: Set of already tested components
|
||||
passed_tests: List to append passed test IDs
|
||||
failed_tests: List to append failed test IDs
|
||||
failed_commands: Dict to store failed test commands
|
||||
test_results: List to append test results
|
||||
"""
|
||||
# Skip if already tested in a group
|
||||
if (component, platform_with_version) in tested_components:
|
||||
return
|
||||
|
||||
test_name = test_file.stem.split(".")[0]
|
||||
success, cmd_str = run_esphome_test(
|
||||
test_result = run_esphome_test(
|
||||
component=component,
|
||||
test_file=test_file,
|
||||
platform=platform,
|
||||
@@ -745,12 +921,7 @@ def run_individual_component_test(
|
||||
esphome_command=esphome_command,
|
||||
continue_on_fail=continue_on_fail,
|
||||
)
|
||||
test_id = f"{component}.{test_name}.{platform_with_version}"
|
||||
if success:
|
||||
passed_tests.append(test_id)
|
||||
else:
|
||||
failed_tests.append(test_id)
|
||||
failed_commands[test_id] = cmd_str
|
||||
test_results.append(test_result)
|
||||
|
||||
|
||||
def test_components(
|
||||
@@ -799,19 +970,12 @@ def test_components(
|
||||
print(f"Found {len(all_tests)} components to test")
|
||||
|
||||
# Run tests
|
||||
failed_tests = []
|
||||
passed_tests = []
|
||||
test_results = []
|
||||
tested_components = set() # Track which components were tested in groups
|
||||
failed_commands = {} # Track commands for failed tests
|
||||
|
||||
# First, run grouped tests if grouping is enabled
|
||||
if enable_grouping:
|
||||
(
|
||||
tested_components,
|
||||
passed_tests,
|
||||
failed_tests,
|
||||
failed_commands,
|
||||
) = run_grouped_component_tests(
|
||||
tested_components, grouped_results = run_grouped_component_tests(
|
||||
all_tests=all_tests,
|
||||
platform_filter=platform_filter,
|
||||
platform_bases=platform_bases,
|
||||
@@ -821,6 +985,7 @@ def test_components(
|
||||
continue_on_fail=continue_on_fail,
|
||||
additional_isolated=isolated_components,
|
||||
)
|
||||
test_results.extend(grouped_results)
|
||||
|
||||
# Then run individual tests for components not in groups
|
||||
for component, test_files in sorted(all_tests.items()):
|
||||
@@ -846,9 +1011,7 @@ def test_components(
|
||||
esphome_command=esphome_command,
|
||||
continue_on_fail=continue_on_fail,
|
||||
tested_components=tested_components,
|
||||
passed_tests=passed_tests,
|
||||
failed_tests=failed_tests,
|
||||
failed_commands=failed_commands,
|
||||
test_results=test_results,
|
||||
)
|
||||
else:
|
||||
# Platform-specific test
|
||||
@@ -880,31 +1043,40 @@ def test_components(
|
||||
esphome_command=esphome_command,
|
||||
continue_on_fail=continue_on_fail,
|
||||
tested_components=tested_components,
|
||||
passed_tests=passed_tests,
|
||||
failed_tests=failed_tests,
|
||||
failed_commands=failed_commands,
|
||||
test_results=test_results,
|
||||
)
|
||||
|
||||
# Separate results into passed and failed
|
||||
passed_results = [r for r in test_results if r.success]
|
||||
failed_results = [r for r in test_results if not r.success]
|
||||
|
||||
# Print summary
|
||||
print("\n" + "=" * 80)
|
||||
print(f"Test Summary: {len(passed_tests)} passed, {len(failed_tests)} failed")
|
||||
print(f"Test Summary: {len(passed_results)} passed, {len(failed_results)} failed")
|
||||
print("=" * 80)
|
||||
|
||||
if failed_tests:
|
||||
if failed_results:
|
||||
print("\nFailed tests:")
|
||||
for test in failed_tests:
|
||||
print(f" - {test}")
|
||||
for result in failed_results:
|
||||
print(f" - {result.test_id}")
|
||||
|
||||
# Print failed commands at the end for easy copy-paste from CI logs
|
||||
# Print simplified commands grouped by platform and test type for easy copy-paste
|
||||
print("\n" + "=" * 80)
|
||||
print("Failed test commands (copy-paste to reproduce locally):")
|
||||
print("Commands to reproduce failures (copy-paste to reproduce locally):")
|
||||
print("=" * 80)
|
||||
for test in failed_tests:
|
||||
if test in failed_commands:
|
||||
print(f"\n# {test}")
|
||||
print(failed_commands[test])
|
||||
platform_components = group_components_by_platform(failed_results)
|
||||
for platform, test_type in sorted(platform_components.keys()):
|
||||
components_csv = ",".join(platform_components[(platform, test_type)])
|
||||
print(
|
||||
f"script/test_build_components.py -c {components_csv} -t {platform} -e {test_type}"
|
||||
)
|
||||
print()
|
||||
|
||||
# Write GitHub Actions job summary if in CI
|
||||
if os.environ.get("GITHUB_STEP_SUMMARY"):
|
||||
write_github_summary(test_results)
|
||||
|
||||
if failed_results:
|
||||
return 1
|
||||
|
||||
return 0
|
||||
|
||||
@@ -8,14 +8,12 @@ sensor:
|
||||
lambda: |-
|
||||
if (millis() > 10000) {
|
||||
return 0.6;
|
||||
} else {
|
||||
return 0.0;
|
||||
}
|
||||
return 0.0;
|
||||
- platform: template
|
||||
id: template_temperature
|
||||
lambda: |-
|
||||
if (millis() > 10000) {
|
||||
return 42.0;
|
||||
} else {
|
||||
return 0.0;
|
||||
}
|
||||
return 0.0;
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
substitutions:
|
||||
irq0_pin: GPIO13
|
||||
irq0_pin: GPIO0
|
||||
irq1_pin: GPIO15
|
||||
reset_pin: GPIO16
|
||||
|
||||
|
||||
@@ -4,10 +4,13 @@ sensor:
|
||||
irq_pin: ${irq_pin}
|
||||
voltage:
|
||||
name: ADE7953 Voltage
|
||||
id: ade7953_i2c_voltage
|
||||
current_a:
|
||||
name: ADE7953 Current A
|
||||
id: ade7953_i2c_current_a
|
||||
current_b:
|
||||
name: ADE7953 Current B
|
||||
id: ade7953_i2c_current_b
|
||||
power_factor_a:
|
||||
name: ADE7953 Power Factor A
|
||||
power_factor_b:
|
||||
|
||||
@@ -4,13 +4,13 @@ sensor:
|
||||
irq_pin: ${irq_pin}
|
||||
voltage:
|
||||
name: ADE7953 Voltage
|
||||
id: ade7953_voltage
|
||||
id: ade7953_spi_voltage
|
||||
current_a:
|
||||
name: ADE7953 Current A
|
||||
id: ade7953_current_a
|
||||
id: ade7953_spi_current_a
|
||||
current_b:
|
||||
name: ADE7953 Current B
|
||||
id: ade7953_current_b
|
||||
id: ade7953_spi_current_b
|
||||
power_factor_a:
|
||||
name: ADE7953 Power Factor A
|
||||
power_factor_b:
|
||||
|
||||
@@ -5,9 +5,8 @@ sensor:
|
||||
lambda: |-
|
||||
if (millis() > 10000) {
|
||||
return 42.0;
|
||||
} else {
|
||||
return 0.0;
|
||||
}
|
||||
return 0.0;
|
||||
update_interval: 15s
|
||||
|
||||
binary_sensor:
|
||||
|
||||
@@ -1,13 +1,16 @@
|
||||
as3935_i2c:
|
||||
id: as3935_i2c_id
|
||||
i2c_id: i2c_bus
|
||||
irq_pin: ${irq_pin}
|
||||
|
||||
binary_sensor:
|
||||
- platform: as3935
|
||||
as3935_id: as3935_i2c_id
|
||||
name: Storm Alert
|
||||
|
||||
sensor:
|
||||
- platform: as3935
|
||||
as3935_id: as3935_i2c_id
|
||||
lightning_energy:
|
||||
name: Lightning Energy
|
||||
distance:
|
||||
|
||||
@@ -1,13 +1,16 @@
|
||||
as3935_spi:
|
||||
id: as3935_spi_id
|
||||
cs_pin: ${cs_pin}
|
||||
irq_pin: ${irq_pin}
|
||||
|
||||
binary_sensor:
|
||||
- platform: as3935
|
||||
as3935_id: as3935_spi_id
|
||||
name: Storm Alert
|
||||
|
||||
sensor:
|
||||
- platform: as3935
|
||||
as3935_id: as3935_spi_id
|
||||
lightning_energy:
|
||||
name: Lightning Energy
|
||||
distance:
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
display:
|
||||
- platform: ssd1306_i2c
|
||||
i2c_id: i2c_bus
|
||||
id: ssd1306_display
|
||||
id: ssd1306_i2c_display
|
||||
model: SSD1306_128X64
|
||||
reset_pin: 19
|
||||
pages:
|
||||
@@ -13,6 +13,6 @@ touchscreen:
|
||||
- platform: axs15231
|
||||
i2c_id: i2c_bus
|
||||
id: axs15231_touchscreen
|
||||
display: ssd1306_display
|
||||
display: ssd1306_i2c_display
|
||||
interrupt_pin: 20
|
||||
reset_pin: 18
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user