mirror of
https://github.com/esphome/esphome.git
synced 2025-11-04 09:01:49 +00:00
Compare commits
87 Commits
2023.11.0b
...
jesserockz
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
970680b1b2 | ||
|
|
f500bd5e6f | ||
|
|
e2bb81e233 | ||
|
|
26a1d14ee0 | ||
|
|
97f07f8d13 | ||
|
|
4e3170dc95 | ||
|
|
c795dbde26 | ||
|
|
4ce627b4ee | ||
|
|
86b4fdc139 | ||
|
|
20ea8bf06e | ||
|
|
642db6d92b | ||
|
|
4aac5a23cd | ||
|
|
c536c976b7 | ||
|
|
214b419db2 | ||
|
|
0c18872888 | ||
|
|
197b6b4275 | ||
|
|
4e8bdc2155 | ||
|
|
f1e8622187 | ||
|
|
e0c7a02fbc | ||
|
|
cdcb25be8e | ||
|
|
aecc6655db | ||
|
|
2754ddec1b | ||
|
|
2a20a5fc11 | ||
|
|
7100d073f8 | ||
|
|
1ac6cf2ff9 | ||
|
|
2ee089c9d5 | ||
|
|
bd568eecf5 | ||
|
|
ae0e481cff | ||
|
|
f198be39d7 | ||
|
|
08fc96b890 | ||
|
|
8c28bea5b1 | ||
|
|
00eedeb8b3 | ||
|
|
0a4853ba7b | ||
|
|
3e2b83acb0 | ||
|
|
c1eb5bd675 | ||
|
|
a9772ebf3f | ||
|
|
45276cc244 | ||
|
|
a9a17ee89d | ||
|
|
f094702a16 | ||
|
|
908f56ff46 | ||
|
|
bd5905c59a | ||
|
|
91299f05f7 | ||
|
|
30e5ff9fff | ||
|
|
163b38e153 | ||
|
|
3b486084c8 | ||
|
|
684cf10230 | ||
|
|
63a277ba80 | ||
|
|
53f3385c49 | ||
|
|
51930a0243 | ||
|
|
6a5cea171e | ||
|
|
3363c8f434 | ||
|
|
3b891bc146 | ||
|
|
0f19450ab4 | ||
|
|
98ec798bfc | ||
|
|
01d28ce3fc | ||
|
|
9d453f0ba2 | ||
|
|
799851a83a | ||
|
|
bc7519f645 | ||
|
|
7a9866f1b6 | ||
|
|
3d30f1f733 | ||
|
|
1e55764d52 | ||
|
|
28513a0502 | ||
|
|
3e3266fa74 | ||
|
|
ce020b1f9f | ||
|
|
d394b957d1 | ||
|
|
cf22c55430 | ||
|
|
511348974e | ||
|
|
972598a698 | ||
|
|
d81bec860b | ||
|
|
fde7a04ee7 | ||
|
|
ff9bffc363 | ||
|
|
89b3af8be4 | ||
|
|
c9b2e54c1a | ||
|
|
6dd92053b5 | ||
|
|
33346c0b6a | ||
|
|
161fbecfe1 | ||
|
|
fce2eafda0 | ||
|
|
c19f0cf6bc | ||
|
|
b05e7bfe0a | ||
|
|
3e58ee2130 | ||
|
|
bab9c7c70e | ||
|
|
0b60a1d9eb | ||
|
|
f7455ad76a | ||
|
|
3190e86ba8 | ||
|
|
a34569d314 | ||
|
|
6c1c200cf9 | ||
|
|
3635179564 |
@@ -3,7 +3,7 @@
|
||||
# See https://pre-commit.com/hooks.html for more hooks
|
||||
repos:
|
||||
- repo: https://github.com/psf/black-pre-commit-mirror
|
||||
rev: 23.10.1
|
||||
rev: 23.11.0
|
||||
hooks:
|
||||
- id: black
|
||||
args:
|
||||
|
||||
@@ -100,6 +100,7 @@ esphome/components/esp32_can/* @Sympatron
|
||||
esphome/components/esp32_improv/* @jesserockz
|
||||
esphome/components/esp32_rmt_led_strip/* @jesserockz
|
||||
esphome/components/esp8266/* @esphome/core
|
||||
esphome/components/esp_adf/* @jesserockz
|
||||
esphome/components/ethernet_info/* @gtjadsonsantos
|
||||
esphome/components/exposure_notifications/* @OttoWinter
|
||||
esphome/components/ezo/* @ssieb
|
||||
@@ -246,6 +247,7 @@ esphome/components/radon_eye_rd200/* @jeffeb3
|
||||
esphome/components/rc522/* @glmnet
|
||||
esphome/components/rc522_i2c/* @glmnet
|
||||
esphome/components/rc522_spi/* @glmnet
|
||||
esphome/components/resistance_sampler/* @jesserockz
|
||||
esphome/components/restart/* @esphome/core
|
||||
esphome/components/rf_bridge/* @jesserockz
|
||||
esphome/components/rgbct/* @jesserockz
|
||||
|
||||
@@ -5,6 +5,7 @@
|
||||
# One of "docker", "hassio"
|
||||
ARG BASEIMGTYPE=docker
|
||||
|
||||
|
||||
# https://github.com/hassio-addons/addon-debian-base/releases
|
||||
FROM ghcr.io/hassio-addons/debian-base:7.2.0 AS base-hassio
|
||||
# https://hub.docker.com/_/debian?tab=tags&page=1&name=bookworm
|
||||
@@ -12,9 +13,10 @@ FROM debian:12.2-slim AS base-docker
|
||||
|
||||
FROM base-${BASEIMGTYPE} AS base
|
||||
|
||||
|
||||
ARG TARGETARCH
|
||||
ARG TARGETVARIANT
|
||||
ARG PIP_EXTRA_INDEX_URL
|
||||
|
||||
|
||||
# Note that --break-system-packages is used below because
|
||||
# https://peps.python.org/pep-0668/ added a safety check that prevents
|
||||
@@ -59,8 +61,7 @@ ENV \
|
||||
# Fix click python3 lang warning https://click.palletsprojects.com/en/7.x/python3/
|
||||
LANG=C.UTF-8 LC_ALL=C.UTF-8 \
|
||||
# Store globally installed pio libs in /piolibs
|
||||
PLATFORMIO_GLOBALLIB_DIR=/piolibs \
|
||||
PIP_EXTRA_INDEX_URL=${PIP_EXTRA_INDEX_URL}
|
||||
PLATFORMIO_GLOBALLIB_DIR=/piolibs
|
||||
|
||||
# Support legacy binaries on Debian multiarch system. There is no "correct" way
|
||||
# to do this, other than using properly built toolchains...
|
||||
@@ -72,7 +73,11 @@ RUN \
|
||||
|
||||
RUN \
|
||||
# Ubuntu python3-pip is missing wheel
|
||||
pip3 install --break-system-packages --no-cache-dir \
|
||||
if [ "$TARGETARCH$TARGETVARIANT" = "armv7" ]; then \
|
||||
export PIP_EXTRA_INDEX_URL="https://www.piwheels.org/simple"; \
|
||||
fi; \
|
||||
pip3 install \
|
||||
--break-system-packages --no-cache-dir \
|
||||
platformio==6.1.11 \
|
||||
# Change some platformio settings
|
||||
&& platformio settings set enable_telemetry No \
|
||||
@@ -84,8 +89,12 @@ RUN \
|
||||
# tmpfs is for https://github.com/rust-lang/cargo/issues/8719
|
||||
|
||||
COPY requirements.txt requirements_optional.txt script/platformio_install_deps.py platformio.ini /
|
||||
RUN --mount=type=tmpfs,target=/root/.cargo CARGO_REGISTRIES_CRATES_IO_PROTOCOL=sparse CARGO_HOME=/root/.cargo \
|
||||
pip3 install --break-system-packages --no-cache-dir -r /requirements.txt -r /requirements_optional.txt \
|
||||
RUN --mount=type=tmpfs,target=/root/.cargo if [ "$TARGETARCH$TARGETVARIANT" = "armv7" ]; then \
|
||||
export PIP_EXTRA_INDEX_URL="https://www.piwheels.org/simple"; \
|
||||
fi; \
|
||||
CARGO_REGISTRIES_CRATES_IO_PROTOCOL=sparse CARGO_HOME=/root/.cargo \
|
||||
pip3 install \
|
||||
--break-system-packages --no-cache-dir -r /requirements.txt -r /requirements_optional.txt \
|
||||
&& /platformio_install_deps.py /platformio.ini --libraries
|
||||
|
||||
|
||||
@@ -94,7 +103,11 @@ FROM base AS docker
|
||||
|
||||
# Copy esphome and install
|
||||
COPY . /esphome
|
||||
RUN pip3 install --break-system-packages --no-cache-dir --no-use-pep517 -e /esphome
|
||||
RUN if [ "$TARGETARCH$TARGETVARIANT" = "armv7" ]; then \
|
||||
export PIP_EXTRA_INDEX_URL="https://www.piwheels.org/simple"; \
|
||||
fi; \
|
||||
pip3 install \
|
||||
--break-system-packages --no-cache-dir --no-use-pep517 -e /esphome
|
||||
|
||||
# Settings for dashboard
|
||||
ENV USERNAME="" PASSWORD=""
|
||||
@@ -140,7 +153,11 @@ COPY docker/ha-addon-rootfs/ /
|
||||
|
||||
# Copy esphome and install
|
||||
COPY . /esphome
|
||||
RUN pip3 install --break-system-packages --no-cache-dir --no-use-pep517 -e /esphome
|
||||
RUN if [ "$TARGETARCH$TARGETVARIANT" = "armv7" ]; then \
|
||||
export PIP_EXTRA_INDEX_URL="https://www.piwheels.org/simple"; \
|
||||
fi; \
|
||||
pip3 install \
|
||||
--break-system-packages --no-cache-dir --no-use-pep517 -e /esphome
|
||||
|
||||
# Labels
|
||||
LABEL \
|
||||
@@ -176,7 +193,11 @@ RUN \
|
||||
/var/lib/apt/lists/*
|
||||
|
||||
COPY requirements_test.txt /
|
||||
RUN pip3 install --break-system-packages --no-cache-dir -r /requirements_test.txt
|
||||
RUN if [ "$TARGETARCH$TARGETVARIANT" = "armv7" ]; then \
|
||||
export PIP_EXTRA_INDEX_URL="https://www.piwheels.org/simple"; \
|
||||
fi; \
|
||||
pip3 install \
|
||||
--break-system-packages --no-cache-dir -r /requirements_test.txt
|
||||
|
||||
VOLUME ["/esphome"]
|
||||
WORKDIR /esphome
|
||||
|
||||
@@ -143,25 +143,15 @@ def main():
|
||||
imgs = [f"{params.build_to}:{tag}" for tag in tags_to_push]
|
||||
imgs += [f"ghcr.io/{params.build_to}:{tag}" for tag in tags_to_push]
|
||||
|
||||
build_args = [
|
||||
"--build-arg",
|
||||
f"BASEIMGTYPE={params.baseimgtype}",
|
||||
"--build-arg",
|
||||
f"BUILD_VERSION={args.tag}",
|
||||
]
|
||||
|
||||
if args.arch == ARCH_ARMV7:
|
||||
build_args += [
|
||||
"--build-arg",
|
||||
"PIP_EXTRA_INDEX_URL=https://www.piwheels.org/simple",
|
||||
]
|
||||
|
||||
# 3. build
|
||||
cmd = [
|
||||
"docker",
|
||||
"buildx",
|
||||
"build",
|
||||
*build_args,
|
||||
"--build-arg",
|
||||
f"BASEIMGTYPE={params.baseimgtype}",
|
||||
"--build-arg",
|
||||
f"BUILD_VERSION={args.tag}",
|
||||
"--cache-from",
|
||||
f"type=registry,ref={cache_img}",
|
||||
"--file",
|
||||
|
||||
@@ -514,7 +514,7 @@ def command_clean(args, config):
|
||||
def command_dashboard(args):
|
||||
from esphome.dashboard import dashboard
|
||||
|
||||
return dashboard.start_web_server(args)
|
||||
return dashboard.start_dashboard(args)
|
||||
|
||||
|
||||
def command_update_all(args):
|
||||
|
||||
@@ -1,71 +1,65 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
import logging
|
||||
from datetime import datetime
|
||||
from typing import Optional
|
||||
from typing import Any
|
||||
|
||||
from aioesphomeapi import APIClient, ReconnectLogic, APIConnectionError, LogLevel
|
||||
import zeroconf
|
||||
from aioesphomeapi import APIClient
|
||||
from aioesphomeapi.api_pb2 import SubscribeLogsResponse
|
||||
from aioesphomeapi.log_runner import async_run
|
||||
from zeroconf.asyncio import AsyncZeroconf
|
||||
|
||||
from esphome.const import CONF_KEY, CONF_PASSWORD, CONF_PORT, __version__
|
||||
from esphome.core import CORE
|
||||
|
||||
from esphome.const import CONF_KEY, CONF_PORT, CONF_PASSWORD, __version__
|
||||
from esphome.util import safe_print
|
||||
from . import CONF_ENCRYPTION
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
async def async_run_logs(config, address):
|
||||
"""Run the logs command in the event loop."""
|
||||
conf = config["api"]
|
||||
port: int = int(conf[CONF_PORT])
|
||||
password: str = conf[CONF_PASSWORD]
|
||||
noise_psk: Optional[str] = None
|
||||
noise_psk: str | None = None
|
||||
if CONF_ENCRYPTION in conf:
|
||||
noise_psk = conf[CONF_ENCRYPTION][CONF_KEY]
|
||||
_LOGGER.info("Starting log output from %s using esphome API", address)
|
||||
aiozc = AsyncZeroconf()
|
||||
|
||||
cli = APIClient(
|
||||
address,
|
||||
port,
|
||||
password,
|
||||
client_info=f"ESPHome Logs {__version__}",
|
||||
noise_psk=noise_psk,
|
||||
zeroconf_instance=aiozc.zeroconf,
|
||||
)
|
||||
first_connect = True
|
||||
dashboard = CORE.dashboard
|
||||
|
||||
def on_log(msg):
|
||||
time_ = datetime.now().time().strftime("[%H:%M:%S]")
|
||||
text = msg.message.decode("utf8", "backslashreplace")
|
||||
safe_print(time_ + text)
|
||||
|
||||
async def on_connect():
|
||||
nonlocal first_connect
|
||||
try:
|
||||
await cli.subscribe_logs(
|
||||
on_log,
|
||||
log_level=LogLevel.LOG_LEVEL_VERY_VERBOSE,
|
||||
dump_config=first_connect,
|
||||
)
|
||||
first_connect = False
|
||||
except APIConnectionError:
|
||||
cli.disconnect()
|
||||
|
||||
async def on_disconnect(expected_disconnect: bool) -> None:
|
||||
_LOGGER.warning("Disconnected from API")
|
||||
|
||||
zc = zeroconf.Zeroconf()
|
||||
reconnect = ReconnectLogic(
|
||||
client=cli,
|
||||
on_connect=on_connect,
|
||||
on_disconnect=on_disconnect,
|
||||
zeroconf_instance=zc,
|
||||
)
|
||||
await reconnect.start()
|
||||
def on_log(msg: SubscribeLogsResponse) -> None:
|
||||
"""Handle a new log message."""
|
||||
time_ = datetime.now()
|
||||
message: bytes = msg.message
|
||||
text = message.decode("utf8", "backslashreplace")
|
||||
if dashboard:
|
||||
text = text.replace("\033", "\\033")
|
||||
print(f"[{time_.hour:02}:{time_.minute:02}:{time_.second:02}]{text}")
|
||||
|
||||
stop = await async_run(cli, on_log, aio_zeroconf_instance=aiozc)
|
||||
try:
|
||||
while True:
|
||||
await asyncio.sleep(60)
|
||||
except KeyboardInterrupt:
|
||||
await reconnect.stop()
|
||||
zc.close()
|
||||
finally:
|
||||
await aiozc.async_close()
|
||||
await stop()
|
||||
|
||||
|
||||
def run_logs(config, address):
|
||||
def run_logs(config: dict[str, Any], address: str) -> None:
|
||||
"""Run the logs command."""
|
||||
try:
|
||||
asyncio.run(async_run_logs(config, address))
|
||||
except KeyboardInterrupt:
|
||||
pass
|
||||
|
||||
@@ -1,38 +1,37 @@
|
||||
import esphome.codegen as cg
|
||||
import esphome.config_validation as cv
|
||||
from esphome.components import (
|
||||
climate,
|
||||
remote_transmitter,
|
||||
remote_receiver,
|
||||
sensor,
|
||||
remote_base,
|
||||
)
|
||||
from esphome.components.remote_base import CONF_RECEIVER_ID, CONF_TRANSMITTER_ID
|
||||
from esphome.components import climate, sensor, remote_base
|
||||
from esphome.const import CONF_SUPPORTS_COOL, CONF_SUPPORTS_HEAT, CONF_SENSOR
|
||||
|
||||
DEPENDENCIES = ["remote_transmitter"]
|
||||
AUTO_LOAD = ["sensor", "remote_base"]
|
||||
CODEOWNERS = ["@glmnet"]
|
||||
|
||||
climate_ir_ns = cg.esphome_ns.namespace("climate_ir")
|
||||
ClimateIR = climate_ir_ns.class_(
|
||||
"ClimateIR", climate.Climate, cg.Component, remote_base.RemoteReceiverListener
|
||||
"ClimateIR",
|
||||
climate.Climate,
|
||||
cg.Component,
|
||||
remote_base.RemoteReceiverListener,
|
||||
remote_base.RemoteTransmittable,
|
||||
)
|
||||
|
||||
CLIMATE_IR_SCHEMA = climate.CLIMATE_SCHEMA.extend(
|
||||
CLIMATE_IR_SCHEMA = (
|
||||
climate.CLIMATE_SCHEMA.extend(
|
||||
{
|
||||
cv.GenerateID(CONF_TRANSMITTER_ID): cv.use_id(
|
||||
remote_transmitter.RemoteTransmitterComponent
|
||||
),
|
||||
cv.Optional(CONF_SUPPORTS_COOL, default=True): cv.boolean,
|
||||
cv.Optional(CONF_SUPPORTS_HEAT, default=True): cv.boolean,
|
||||
cv.Optional(CONF_SENSOR): cv.use_id(sensor.Sensor),
|
||||
}
|
||||
).extend(cv.COMPONENT_SCHEMA)
|
||||
)
|
||||
.extend(cv.COMPONENT_SCHEMA)
|
||||
.extend(remote_base.REMOTE_TRANSMITTABLE_SCHEMA)
|
||||
)
|
||||
|
||||
CLIMATE_IR_WITH_RECEIVER_SCHEMA = CLIMATE_IR_SCHEMA.extend(
|
||||
{
|
||||
cv.Optional(CONF_RECEIVER_ID): cv.use_id(
|
||||
remote_receiver.RemoteReceiverComponent
|
||||
cv.Optional(remote_base.CONF_RECEIVER_ID): cv.use_id(
|
||||
remote_base.RemoteReceiverBase
|
||||
),
|
||||
}
|
||||
)
|
||||
@@ -41,15 +40,11 @@ CLIMATE_IR_WITH_RECEIVER_SCHEMA = CLIMATE_IR_SCHEMA.extend(
|
||||
async def register_climate_ir(var, config):
|
||||
await cg.register_component(var, config)
|
||||
await climate.register_climate(var, config)
|
||||
|
||||
await remote_base.register_transmittable(var, config)
|
||||
cg.add(var.set_supports_cool(config[CONF_SUPPORTS_COOL]))
|
||||
cg.add(var.set_supports_heat(config[CONF_SUPPORTS_HEAT]))
|
||||
if remote_base.CONF_RECEIVER_ID in config:
|
||||
await remote_base.register_listener(var, config)
|
||||
if sensor_id := config.get(CONF_SENSOR):
|
||||
sens = await cg.get_variable(sensor_id)
|
||||
cg.add(var.set_sensor(sens))
|
||||
if receiver_id := config.get(CONF_RECEIVER_ID):
|
||||
receiver = await cg.get_variable(receiver_id)
|
||||
cg.add(receiver.register_listener(var))
|
||||
|
||||
transmitter = await cg.get_variable(config[CONF_TRANSMITTER_ID])
|
||||
cg.add(var.set_transmitter(transmitter))
|
||||
|
||||
@@ -18,7 +18,10 @@ namespace climate_ir {
|
||||
Likewise to decode a IR into the AC state, implement
|
||||
bool RemoteReceiverListener::on_receive(remote_base::RemoteReceiveData data) and return true
|
||||
*/
|
||||
class ClimateIR : public climate::Climate, public Component, public remote_base::RemoteReceiverListener {
|
||||
class ClimateIR : public Component,
|
||||
public climate::Climate,
|
||||
public remote_base::RemoteReceiverListener,
|
||||
public remote_base::RemoteTransmittable {
|
||||
public:
|
||||
ClimateIR(float minimum_temperature, float maximum_temperature, float temperature_step = 1.0f,
|
||||
bool supports_dry = false, bool supports_fan_only = false, std::set<climate::ClimateFanMode> fan_modes = {},
|
||||
@@ -35,9 +38,6 @@ class ClimateIR : public climate::Climate, public Component, public remote_base:
|
||||
|
||||
void setup() override;
|
||||
void dump_config() override;
|
||||
void set_transmitter(remote_transmitter::RemoteTransmitterComponent *transmitter) {
|
||||
this->transmitter_ = transmitter;
|
||||
}
|
||||
void set_supports_cool(bool supports_cool) { this->supports_cool_ = supports_cool; }
|
||||
void set_supports_heat(bool supports_heat) { this->supports_heat_ = supports_heat; }
|
||||
void set_sensor(sensor::Sensor *sensor) { this->sensor_ = sensor; }
|
||||
@@ -64,7 +64,6 @@ class ClimateIR : public climate::Climate, public Component, public remote_base:
|
||||
std::set<climate::ClimateSwingMode> swing_modes_ = {};
|
||||
std::set<climate::ClimatePreset> presets_ = {};
|
||||
|
||||
remote_transmitter::RemoteTransmitterComponent *transmitter_;
|
||||
sensor::Sensor *sensor_{nullptr};
|
||||
};
|
||||
|
||||
|
||||
@@ -102,11 +102,7 @@ void CoolixClimate::transmit_state() {
|
||||
}
|
||||
}
|
||||
ESP_LOGV(TAG, "Sending coolix code: 0x%06" PRIX32, remote_state);
|
||||
|
||||
auto transmit = this->transmitter_->transmit();
|
||||
auto *data = transmit.get_data();
|
||||
remote_base::CoolixProtocol().encode(data, remote_state);
|
||||
transmit.perform();
|
||||
this->transmit_<remote_base::CoolixProtocol>(remote_state);
|
||||
}
|
||||
|
||||
bool CoolixClimate::on_coolix(climate::Climate *parent, remote_base::RemoteReceiveData data) {
|
||||
|
||||
@@ -386,10 +386,21 @@ FRAMEWORK_SCHEMA = cv.typed_schema(
|
||||
)
|
||||
|
||||
|
||||
FLASH_SIZES = [
|
||||
"4MB",
|
||||
"8MB",
|
||||
"16MB",
|
||||
"32MB",
|
||||
]
|
||||
|
||||
CONF_FLASH_SIZE = "flash_size"
|
||||
CONFIG_SCHEMA = cv.All(
|
||||
cv.Schema(
|
||||
{
|
||||
cv.Required(CONF_BOARD): cv.string_strict,
|
||||
cv.Optional(CONF_FLASH_SIZE, default="4MB"): cv.one_of(
|
||||
*FLASH_SIZES, upper=True
|
||||
),
|
||||
cv.Optional(CONF_VARIANT): cv.one_of(*VARIANTS, upper=True),
|
||||
cv.Optional(CONF_FRAMEWORK, default={}): FRAMEWORK_SCHEMA,
|
||||
}
|
||||
@@ -401,6 +412,7 @@ CONFIG_SCHEMA = cv.All(
|
||||
|
||||
async def to_code(config):
|
||||
cg.add_platformio_option("board", config[CONF_BOARD])
|
||||
cg.add_platformio_option("board_upload.flash_size", config[CONF_FLASH_SIZE])
|
||||
cg.add_build_flag("-DUSE_ESP32")
|
||||
cg.add_define("ESPHOME_BOARD", config[CONF_BOARD])
|
||||
cg.add_build_flag(f"-DUSE_ESP32_VARIANT_{config[CONF_VARIANT]}")
|
||||
@@ -505,24 +517,46 @@ async def to_code(config):
|
||||
)
|
||||
|
||||
|
||||
ARDUINO_PARTITIONS_CSV = """\
|
||||
nvs, data, nvs, 0x009000, 0x005000,
|
||||
otadata, data, ota, 0x00e000, 0x002000,
|
||||
app0, app, ota_0, 0x010000, 0x1C0000,
|
||||
app1, app, ota_1, 0x1D0000, 0x1C0000,
|
||||
eeprom, data, 0x99, 0x390000, 0x001000,
|
||||
spiffs, data, spiffs, 0x391000, 0x00F000
|
||||
APP_PARTITION_SIZES = {
|
||||
"4MB": 0x1C0000, # 1792 KB
|
||||
"8MB": 0x3C0000, # 3840 KB
|
||||
"16MB": 0x7C0000, # 7936 KB
|
||||
"32MB": 0xFC0000, # 16128 KB
|
||||
}
|
||||
|
||||
|
||||
def get_arduino_partition_csv(flash_size):
|
||||
app_partition_size = APP_PARTITION_SIZES[flash_size]
|
||||
eeprom_partition_size = 0x1000 # 4 KB
|
||||
spiffs_partition_size = 0xF000 # 60 KB
|
||||
|
||||
app0_partition_start = 0x010000 # 64 KB
|
||||
app1_partition_start = app0_partition_start + app_partition_size
|
||||
eeprom_partition_start = app1_partition_start + app_partition_size
|
||||
spiffs_partition_start = eeprom_partition_start + eeprom_partition_size
|
||||
|
||||
partition_csv = f"""\
|
||||
nvs, data, nvs, 0x9000, 0x5000,
|
||||
otadata, data, ota, 0xE000, 0x2000,
|
||||
app0, app, ota_0, 0x{app0_partition_start:X}, 0x{app_partition_size:X},
|
||||
app1, app, ota_1, 0x{app1_partition_start:X}, 0x{app_partition_size:X},
|
||||
eeprom, data, 0x99, 0x{eeprom_partition_start:X}, 0x{eeprom_partition_size:X},
|
||||
spiffs, data, spiffs, 0x{spiffs_partition_start:X}, 0x{spiffs_partition_size:X}
|
||||
"""
|
||||
return partition_csv
|
||||
|
||||
|
||||
IDF_PARTITIONS_CSV = """\
|
||||
# Name, Type, SubType, Offset, Size, Flags
|
||||
def get_idf_partition_csv(flash_size):
|
||||
app_partition_size = APP_PARTITION_SIZES[flash_size]
|
||||
|
||||
partition_csv = f"""\
|
||||
otadata, data, ota, , 0x2000,
|
||||
phy_init, data, phy, , 0x1000,
|
||||
app0, app, ota_0, , 0x1C0000,
|
||||
app1, app, ota_1, , 0x1C0000,
|
||||
nvs, data, nvs, , 0x6d000,
|
||||
app0, app, ota_0, , 0x{app_partition_size:X},
|
||||
app1, app, ota_1, , 0x{app_partition_size:X},
|
||||
nvs, data, nvs, , 0x6D000,
|
||||
"""
|
||||
return partition_csv
|
||||
|
||||
|
||||
def _format_sdkconfig_val(value: SdkconfigValueType) -> str:
|
||||
@@ -565,13 +599,17 @@ def copy_files():
|
||||
if CORE.using_arduino:
|
||||
write_file_if_changed(
|
||||
CORE.relative_build_path("partitions.csv"),
|
||||
ARDUINO_PARTITIONS_CSV,
|
||||
get_arduino_partition_csv(
|
||||
CORE.platformio_options.get("board_upload.flash_size")
|
||||
),
|
||||
)
|
||||
if CORE.using_esp_idf:
|
||||
_write_sdkconfig()
|
||||
write_file_if_changed(
|
||||
CORE.relative_build_path("partitions.csv"),
|
||||
IDF_PARTITIONS_CSV,
|
||||
get_idf_partition_csv(
|
||||
CORE.platformio_options.get("board_upload.flash_size")
|
||||
),
|
||||
)
|
||||
# IDF build scripts look for version string to put in the build.
|
||||
# However, if the build path does not have an initialized git repo,
|
||||
|
||||
102
esphome/components/esp_adf/__init__.py
Normal file
102
esphome/components/esp_adf/__init__.py
Normal file
@@ -0,0 +1,102 @@
|
||||
import os
|
||||
|
||||
import esphome.config_validation as cv
|
||||
import esphome.codegen as cg
|
||||
import esphome.final_validate as fv
|
||||
|
||||
from esphome.components import esp32
|
||||
|
||||
from esphome.const import CONF_ID, CONF_BOARD
|
||||
|
||||
CODEOWNERS = ["@jesserockz"]
|
||||
DEPENDENCIES = ["esp32"]
|
||||
|
||||
CONF_ESP_ADF_ID = "esp_adf_id"
|
||||
CONF_ESP_ADF = "esp_adf"
|
||||
|
||||
esp_adf_ns = cg.esphome_ns.namespace("esp_adf")
|
||||
ESPADF = esp_adf_ns.class_("ESPADF", cg.Component)
|
||||
ESPADFPipeline = esp_adf_ns.class_("ESPADFPipeline", cg.Parented.template(ESPADF))
|
||||
|
||||
SUPPORTED_BOARDS = {
|
||||
"esp32s3box": "CONFIG_ESP32_S3_BOX_BOARD",
|
||||
"esp32s3boxlite": "CONFIG_ESP32_S3_BOX_LITE_BOARD",
|
||||
"esp32s3box3": "CONFIG_ESP32_S3_BOX_3_BOARD",
|
||||
}
|
||||
|
||||
|
||||
def _default_board(config):
|
||||
config = config.copy()
|
||||
if board := config.get(CONF_BOARD) is None:
|
||||
board = esp32.get_board()
|
||||
if board in SUPPORTED_BOARDS:
|
||||
config[CONF_BOARD] = board
|
||||
return config
|
||||
|
||||
|
||||
def final_validate_usable_board(platform: str):
|
||||
def _validate(adf_config):
|
||||
board = adf_config.get(CONF_BOARD)
|
||||
if board not in SUPPORTED_BOARDS:
|
||||
raise cv.Invalid(f"Board {board} is not supported by esp-adf {platform}")
|
||||
return adf_config
|
||||
|
||||
return cv.Schema(
|
||||
{cv.Required(CONF_ESP_ADF_ID): fv.id_declaration_match_schema(_validate)},
|
||||
extra=cv.ALLOW_EXTRA,
|
||||
)
|
||||
|
||||
|
||||
CONFIG_SCHEMA = cv.All(
|
||||
cv.Schema(
|
||||
{
|
||||
cv.GenerateID(): cv.declare_id(ESPADF),
|
||||
cv.Optional(CONF_BOARD): cv.string_strict,
|
||||
}
|
||||
),
|
||||
_default_board,
|
||||
cv.only_with_esp_idf,
|
||||
)
|
||||
|
||||
|
||||
async def to_code(config):
|
||||
var = cg.new_Pvariable(config[CONF_ID])
|
||||
await cg.register_component(var, config)
|
||||
|
||||
cg.add_define("USE_ESP_ADF")
|
||||
|
||||
cg.add_platformio_option("build_unflags", "-Wl,--end-group")
|
||||
|
||||
esp32.add_idf_component(
|
||||
name="esp-adf",
|
||||
repo="https://github.com/espressif/esp-adf",
|
||||
path="components",
|
||||
ref="v2.5",
|
||||
components=["*"],
|
||||
submodules=["components/esp-sr", "components/esp-adf-libs"],
|
||||
)
|
||||
|
||||
esp32.add_idf_component(
|
||||
name="esp-dsp",
|
||||
repo="https://github.com/espressif/esp-dsp",
|
||||
ref="v1.2.0",
|
||||
)
|
||||
|
||||
cg.add_platformio_option(
|
||||
"board_build.embed_txtfiles", "components/dueros_service/duer_profile"
|
||||
)
|
||||
|
||||
if board := config.get(CONF_BOARD):
|
||||
cg.add_define("USE_ESP_ADF_BOARD")
|
||||
|
||||
esp32.add_idf_sdkconfig_option(SUPPORTED_BOARDS[board], True)
|
||||
|
||||
esp32.add_extra_script(
|
||||
"pre",
|
||||
"apply_adf_patches.py",
|
||||
os.path.join(os.path.dirname(__file__), "apply_adf_patches.py.script"),
|
||||
)
|
||||
esp32.add_extra_build_file(
|
||||
"esp_adf_patches/idf_v4.4_freertos.patch",
|
||||
"https://github.com/espressif/esp-adf/raw/v2.5/idf_patches/idf_v4.4_freertos.patch",
|
||||
)
|
||||
23
esphome/components/esp_adf/apply_adf_patches.py.script
Normal file
23
esphome/components/esp_adf/apply_adf_patches.py.script
Normal file
@@ -0,0 +1,23 @@
|
||||
from os.path import join, isfile
|
||||
|
||||
Import("env")
|
||||
|
||||
FRAMEWORK_DIR = env.PioPlatform().get_package_dir("framework-espidf")
|
||||
patchflag_path = join(FRAMEWORK_DIR, ".adf-patching-done")
|
||||
|
||||
PROJECT_DIR = env.get('PROJECT_DIR')
|
||||
|
||||
PATCH_FILE = join(PROJECT_DIR, "esp_adf_patches", "idf_v4.4_freertos.patch")
|
||||
|
||||
# patch file only if we didn't do it before
|
||||
if not isfile(patchflag_path):
|
||||
print(PATCH_FILE)
|
||||
assert isfile(PATCH_FILE)
|
||||
|
||||
env.Execute("patch -p1 -d %s -i %s" % (FRAMEWORK_DIR, PATCH_FILE))
|
||||
|
||||
def _touch(path):
|
||||
with open(path, "w") as fp:
|
||||
fp.write("")
|
||||
|
||||
env.Execute(lambda *args, **kwargs: _touch(patchflag_path))
|
||||
30
esphome/components/esp_adf/esp_adf.cpp
Normal file
30
esphome/components/esp_adf/esp_adf.cpp
Normal file
@@ -0,0 +1,30 @@
|
||||
#include "esp_adf.h"
|
||||
#include "esphome/core/defines.h"
|
||||
|
||||
#ifdef USE_ESP_IDF
|
||||
|
||||
#ifdef USE_ESP_ADF_BOARD
|
||||
#include <board.h>
|
||||
#endif
|
||||
|
||||
#include "esphome/core/log.h"
|
||||
|
||||
namespace esphome {
|
||||
namespace esp_adf {
|
||||
|
||||
static const char *const TAG = "esp_adf";
|
||||
|
||||
void ESPADF::setup() {
|
||||
#ifdef USE_ESP_ADF_BOARD
|
||||
ESP_LOGI(TAG, "Start codec chip");
|
||||
audio_board_handle_t board_handle = audio_board_init();
|
||||
audio_hal_ctrl_codec(board_handle->audio_hal, AUDIO_HAL_CODEC_MODE_BOTH, AUDIO_HAL_CTRL_START);
|
||||
#endif
|
||||
}
|
||||
|
||||
float ESPADF::get_setup_priority() const { return setup_priority::HARDWARE; }
|
||||
|
||||
} // namespace esp_adf
|
||||
} // namespace esphome
|
||||
|
||||
#endif // USE_ESP_IDF
|
||||
58
esphome/components/esp_adf/esp_adf.h
Normal file
58
esphome/components/esp_adf/esp_adf.h
Normal file
@@ -0,0 +1,58 @@
|
||||
#pragma once
|
||||
|
||||
#ifdef USE_ESP_IDF
|
||||
|
||||
#include "esphome/core/component.h"
|
||||
#include "esphome/core/helpers.h"
|
||||
|
||||
namespace esphome {
|
||||
namespace esp_adf {
|
||||
|
||||
static const size_t BUFFER_SIZE = 1024;
|
||||
|
||||
enum class TaskEventType : uint8_t {
|
||||
STARTING = 0,
|
||||
STARTED,
|
||||
RUNNING,
|
||||
STOPPING,
|
||||
STOPPED,
|
||||
WARNING = 255,
|
||||
};
|
||||
|
||||
struct TaskEvent {
|
||||
TaskEventType type;
|
||||
esp_err_t err;
|
||||
};
|
||||
|
||||
struct CommandEvent {
|
||||
bool stop;
|
||||
};
|
||||
|
||||
struct DataEvent {
|
||||
bool stop;
|
||||
size_t len;
|
||||
uint8_t data[BUFFER_SIZE];
|
||||
};
|
||||
|
||||
class ESPADF;
|
||||
|
||||
class ESPADFPipeline : public Parented<ESPADF> {};
|
||||
|
||||
class ESPADF : public Component {
|
||||
public:
|
||||
void setup() override;
|
||||
|
||||
float get_setup_priority() const override;
|
||||
|
||||
void lock() { this->lock_.lock(); }
|
||||
bool try_lock() { return this->lock_.try_lock(); }
|
||||
void unlock() { this->lock_.unlock(); }
|
||||
|
||||
protected:
|
||||
Mutex lock_;
|
||||
};
|
||||
|
||||
} // namespace esp_adf
|
||||
} // namespace esphome
|
||||
|
||||
#endif // USE_ESP_IDF
|
||||
41
esphome/components/esp_adf/microphone/__init__.py
Normal file
41
esphome/components/esp_adf/microphone/__init__.py
Normal file
@@ -0,0 +1,41 @@
|
||||
import esphome.codegen as cg
|
||||
import esphome.config_validation as cv
|
||||
from esphome.components import microphone
|
||||
from esphome.const import CONF_ID
|
||||
|
||||
from .. import (
|
||||
CONF_ESP_ADF_ID,
|
||||
ESPADF,
|
||||
ESPADFPipeline,
|
||||
esp_adf_ns,
|
||||
final_validate_usable_board,
|
||||
)
|
||||
|
||||
AUTO_LOAD = ["esp_adf"]
|
||||
CONFLICTS_WITH = ["i2s_audio"]
|
||||
DEPENDENCIES = ["esp32"]
|
||||
|
||||
ESPADFMicrophone = esp_adf_ns.class_(
|
||||
"ESPADFMicrophone", ESPADFPipeline, microphone.Microphone, cg.Component
|
||||
)
|
||||
|
||||
|
||||
CONFIG_SCHEMA = cv.All(
|
||||
microphone.MICROPHONE_SCHEMA.extend(
|
||||
{
|
||||
cv.GenerateID(): cv.declare_id(ESPADFMicrophone),
|
||||
cv.GenerateID(CONF_ESP_ADF_ID): cv.use_id(ESPADF),
|
||||
}
|
||||
).extend(cv.COMPONENT_SCHEMA),
|
||||
cv.only_with_esp_idf,
|
||||
)
|
||||
|
||||
FINAL_VALIDATE_SCHEMA = final_validate_usable_board("microphone")
|
||||
|
||||
|
||||
async def to_code(config):
|
||||
var = cg.new_Pvariable(config[CONF_ID])
|
||||
await cg.register_component(var, config)
|
||||
await cg.register_parented(var, config[CONF_ESP_ADF_ID])
|
||||
|
||||
await microphone.register_microphone(var, config)
|
||||
336
esphome/components/esp_adf/microphone/esp_adf_microphone.cpp
Normal file
336
esphome/components/esp_adf/microphone/esp_adf_microphone.cpp
Normal file
@@ -0,0 +1,336 @@
|
||||
#include "esp_adf_microphone.h"
|
||||
|
||||
#ifdef USE_ESP_IDF
|
||||
|
||||
#include <driver/i2s.h>
|
||||
|
||||
#include "esphome/core/hal.h"
|
||||
#include "esphome/core/log.h"
|
||||
|
||||
#include <algorithm_stream.h>
|
||||
#include <audio_element.h>
|
||||
#include <audio_hal.h>
|
||||
#include <audio_pipeline.h>
|
||||
#include <filter_resample.h>
|
||||
#include <i2s_stream.h>
|
||||
#include <raw_stream.h>
|
||||
#include <recorder_sr.h>
|
||||
|
||||
#include <board.h>
|
||||
|
||||
namespace esphome {
|
||||
namespace esp_adf {
|
||||
|
||||
static const char *const TAG = "esp_adf.microphone";
|
||||
|
||||
void ESPADFMicrophone::setup() {
|
||||
this->ring_buffer_ = rb_create(8000, sizeof(int16_t));
|
||||
if (this->ring_buffer_ == nullptr) {
|
||||
ESP_LOGW(TAG, "Could not allocate ring buffer.");
|
||||
this->mark_failed();
|
||||
return;
|
||||
}
|
||||
|
||||
this->read_event_queue_ = xQueueCreate(20, sizeof(TaskEvent));
|
||||
this->read_command_queue_ = xQueueCreate(20, sizeof(CommandEvent));
|
||||
}
|
||||
|
||||
void ESPADFMicrophone::start() {
|
||||
if (this->is_failed())
|
||||
return;
|
||||
if (this->state_ == microphone::STATE_STOPPING) {
|
||||
ESP_LOGW(TAG, "Microphone is stopping, cannot start.");
|
||||
return;
|
||||
}
|
||||
this->state_ = microphone::STATE_STARTING;
|
||||
}
|
||||
void ESPADFMicrophone::start_() {
|
||||
if (!this->parent_->try_lock()) {
|
||||
return;
|
||||
}
|
||||
|
||||
xTaskCreate(ESPADFMicrophone::read_task, "read_task", 8192, (void *) this, 0, &this->read_task_handle_);
|
||||
}
|
||||
|
||||
void ESPADFMicrophone::read_task(void *params) {
|
||||
ESPADFMicrophone *this_mic = (ESPADFMicrophone *) params;
|
||||
TaskEvent event;
|
||||
|
||||
ExternalRAMAllocator<int16_t> allocator(ExternalRAMAllocator<int16_t>::ALLOW_FAILURE);
|
||||
int16_t *buffer = allocator.allocate(BUFFER_SIZE / sizeof(int16_t));
|
||||
if (buffer == nullptr) {
|
||||
event.type = TaskEventType::WARNING;
|
||||
event.err = ESP_ERR_NO_MEM;
|
||||
xQueueSend(this_mic->read_event_queue_, &event, portMAX_DELAY);
|
||||
|
||||
event.type = TaskEventType::STOPPED;
|
||||
event.err = ESP_OK;
|
||||
xQueueSend(this_mic->read_event_queue_, &event, portMAX_DELAY);
|
||||
|
||||
while (true) {
|
||||
delay(10);
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
event.type = TaskEventType::STARTING;
|
||||
xQueueSend(this_mic->read_event_queue_, &event, portMAX_DELAY);
|
||||
|
||||
audio_pipeline_cfg_t pipeline_cfg = {
|
||||
.rb_size = 8 * 1024,
|
||||
};
|
||||
audio_pipeline_handle_t pipeline = audio_pipeline_init(&pipeline_cfg);
|
||||
|
||||
i2s_driver_config_t i2s_config = {
|
||||
.mode = (i2s_mode_t) (I2S_MODE_MASTER | I2S_MODE_RX),
|
||||
.sample_rate = 16000,
|
||||
.bits_per_sample = I2S_BITS_PER_SAMPLE_16BIT,
|
||||
.channel_format = I2S_CHANNEL_FMT_RIGHT_LEFT,
|
||||
.communication_format = I2S_COMM_FORMAT_STAND_I2S,
|
||||
.intr_alloc_flags = ESP_INTR_FLAG_LEVEL2 | ESP_INTR_FLAG_IRAM,
|
||||
.dma_buf_count = 8,
|
||||
.dma_buf_len = 128,
|
||||
.use_apll = false,
|
||||
.tx_desc_auto_clear = true,
|
||||
.fixed_mclk = 0,
|
||||
.mclk_multiple = I2S_MCLK_MULTIPLE_256,
|
||||
.bits_per_chan = I2S_BITS_PER_CHAN_DEFAULT,
|
||||
};
|
||||
|
||||
i2s_stream_cfg_t i2s_cfg = {
|
||||
.type = AUDIO_STREAM_READER,
|
||||
.i2s_config = i2s_config,
|
||||
.i2s_port = static_cast<i2s_port_t>(CODEC_ADC_I2S_PORT),
|
||||
.use_alc = false,
|
||||
.volume = 0,
|
||||
.out_rb_size = I2S_STREAM_RINGBUFFER_SIZE,
|
||||
.task_stack = I2S_STREAM_TASK_STACK,
|
||||
.task_core = I2S_STREAM_TASK_CORE,
|
||||
.task_prio = I2S_STREAM_TASK_PRIO,
|
||||
.stack_in_ext = false,
|
||||
.multi_out_num = 0,
|
||||
.uninstall_drv = true,
|
||||
.need_expand = false,
|
||||
.expand_src_bits = I2S_BITS_PER_SAMPLE_16BIT,
|
||||
};
|
||||
audio_element_handle_t i2s_stream_reader = i2s_stream_init(&i2s_cfg);
|
||||
|
||||
rsp_filter_cfg_t rsp_cfg = {
|
||||
.src_rate = 16000,
|
||||
.src_ch = 2,
|
||||
.dest_rate = 16000,
|
||||
.dest_bits = 16,
|
||||
.dest_ch = 1,
|
||||
.src_bits = I2S_BITS_PER_SAMPLE_16BIT,
|
||||
.mode = RESAMPLE_DECODE_MODE,
|
||||
.max_indata_bytes = RSP_FILTER_BUFFER_BYTE,
|
||||
.out_len_bytes = RSP_FILTER_BUFFER_BYTE,
|
||||
.type = ESP_RESAMPLE_TYPE_AUTO,
|
||||
.complexity = 2,
|
||||
.down_ch_idx = 0,
|
||||
.prefer_flag = ESP_RSP_PREFER_TYPE_SPEED,
|
||||
.out_rb_size = RSP_FILTER_RINGBUFFER_SIZE,
|
||||
.task_stack = RSP_FILTER_TASK_STACK,
|
||||
.task_core = RSP_FILTER_TASK_CORE,
|
||||
.task_prio = RSP_FILTER_TASK_PRIO,
|
||||
.stack_in_ext = true,
|
||||
};
|
||||
audio_element_handle_t filter = rsp_filter_init(&rsp_cfg);
|
||||
|
||||
algorithm_stream_cfg_t algo_cfg = {
|
||||
.input_type = ALGORITHM_STREAM_INPUT_TYPE1,
|
||||
.task_stack = 10 * 1024,
|
||||
.task_prio = ALGORITHM_STREAM_TASK_PERIOD,
|
||||
.task_core = ALGORITHM_STREAM_PINNED_TO_CORE,
|
||||
.out_rb_size = ALGORITHM_STREAM_RINGBUFFER_SIZE,
|
||||
.stack_in_ext = true,
|
||||
.rec_linear_factor = 1,
|
||||
.ref_linear_factor = 1,
|
||||
.debug_input = false,
|
||||
.swap_ch = false,
|
||||
// .algo_mask = ALGORITHM_STREAM_USE_AGC,
|
||||
// .algo_mask = (ALGORITHM_STREAM_USE_AEC | ALGORITHM_STREAM_USE_AGC | ALGORITHM_STREAM_USE_NS),
|
||||
// .algo_mask = (ALGORITHM_STREAM_USE_AGC | ALGORITHM_STREAM_USE_NS),
|
||||
.algo_mask = (ALGORITHM_STREAM_USE_AEC | ALGORITHM_STREAM_USE_NS),
|
||||
// .algo_mask = (ALGORITHM_STREAM_USE_NS),
|
||||
.sample_rate = 16000,
|
||||
.mic_ch = 1,
|
||||
.agc_gain = 10,
|
||||
.aec_low_cost = false,
|
||||
};
|
||||
|
||||
// audio_element_handle_t algo_stream = algo_stream_init(&algo_cfg);
|
||||
|
||||
raw_stream_cfg_t raw_cfg = {
|
||||
.type = AUDIO_STREAM_READER,
|
||||
.out_rb_size = 8 * 1024,
|
||||
};
|
||||
audio_element_handle_t raw_read = raw_stream_init(&raw_cfg);
|
||||
|
||||
audio_pipeline_register(pipeline, i2s_stream_reader, "i2s");
|
||||
audio_pipeline_register(pipeline, filter, "filter");
|
||||
// audio_pipeline_register(pipeline, algo_stream, "algo");
|
||||
audio_pipeline_register(pipeline, raw_read, "raw");
|
||||
|
||||
const char *link_tag[4] = {
|
||||
"i2s",
|
||||
"filter",
|
||||
// "algo",
|
||||
"raw",
|
||||
};
|
||||
audio_pipeline_link(pipeline, &link_tag[0], 3);
|
||||
|
||||
audio_pipeline_run(pipeline);
|
||||
|
||||
event.type = TaskEventType::STARTED;
|
||||
xQueueSend(this_mic->read_event_queue_, &event, portMAX_DELAY);
|
||||
|
||||
CommandEvent command_event;
|
||||
|
||||
while (true) {
|
||||
if (xQueueReceive(this_mic->read_command_queue_, &command_event, 0) == pdTRUE) {
|
||||
if (command_event.stop) {
|
||||
// Stop signal from main thread
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
int bytes_read = raw_stream_read(raw_read, (char *) buffer, BUFFER_SIZE);
|
||||
|
||||
if (bytes_read == -2 || bytes_read == 0) {
|
||||
// No data in buffers to read.
|
||||
continue;
|
||||
} else if (bytes_read < 0) {
|
||||
event.type = TaskEventType::WARNING;
|
||||
event.err = bytes_read;
|
||||
xQueueSend(this_mic->read_event_queue_, &event, 0);
|
||||
continue;
|
||||
}
|
||||
|
||||
event.type = TaskEventType::RUNNING;
|
||||
event.err = bytes_read;
|
||||
xQueueSend(this_mic->read_event_queue_, &event, 0);
|
||||
|
||||
int available = rb_bytes_available(this_mic->ring_buffer_);
|
||||
if (available < bytes_read) {
|
||||
rb_read(this_mic->ring_buffer_, nullptr, bytes_read - available, 0);
|
||||
}
|
||||
rb_write(this_mic->ring_buffer_, (char *) buffer, bytes_read, 0);
|
||||
}
|
||||
|
||||
allocator.deallocate(buffer, BUFFER_SIZE / sizeof(int16_t));
|
||||
|
||||
audio_pipeline_stop(pipeline);
|
||||
audio_pipeline_wait_for_stop(pipeline);
|
||||
audio_pipeline_terminate(pipeline);
|
||||
|
||||
event.type = TaskEventType::STOPPING;
|
||||
xQueueSend(this_mic->read_event_queue_, &event, portMAX_DELAY);
|
||||
|
||||
audio_pipeline_unregister(pipeline, i2s_stream_reader);
|
||||
audio_pipeline_unregister(pipeline, filter);
|
||||
// audio_pipeline_unregister(pipeline, algo_stream);
|
||||
audio_pipeline_unregister(pipeline, raw_read);
|
||||
|
||||
audio_pipeline_deinit(pipeline);
|
||||
audio_element_deinit(i2s_stream_reader);
|
||||
audio_element_deinit(filter);
|
||||
// audio_element_deinit(algo_stream);
|
||||
audio_element_deinit(raw_read);
|
||||
|
||||
event.type = TaskEventType::STOPPED;
|
||||
xQueueSend(this_mic->read_event_queue_, &event, portMAX_DELAY);
|
||||
|
||||
while (true) {
|
||||
delay(10);
|
||||
}
|
||||
}
|
||||
|
||||
void ESPADFMicrophone::stop() {
|
||||
if (this->state_ == microphone::STATE_STOPPED || this->state_ == microphone::STATE_STOPPING || this->is_failed())
|
||||
return;
|
||||
this->state_ = microphone::STATE_STOPPING;
|
||||
CommandEvent command_event;
|
||||
command_event.stop = true;
|
||||
xQueueSendToFront(this->read_command_queue_, &command_event, portMAX_DELAY);
|
||||
ESP_LOGD(TAG, "Stopping microphone");
|
||||
}
|
||||
|
||||
size_t ESPADFMicrophone::read(int16_t *buf, size_t len) {
|
||||
if (rb_bytes_available(this->ring_buffer_) == 0) {
|
||||
return 0; // No data
|
||||
}
|
||||
int bytes_read = rb_read(this->ring_buffer_, (char *) buf, len, 0);
|
||||
|
||||
if (bytes_read == -4 || bytes_read == -2 || bytes_read == 0) {
|
||||
// No data in buffers to read.
|
||||
return 0;
|
||||
} else if (bytes_read < 0) {
|
||||
ESP_LOGW(TAG, "Error reading from I2S microphone %s (%d)", esp_err_to_name(bytes_read), bytes_read);
|
||||
this->status_set_warning();
|
||||
return 0;
|
||||
}
|
||||
this->status_clear_warning();
|
||||
|
||||
return bytes_read;
|
||||
}
|
||||
|
||||
void ESPADFMicrophone::read_() {
|
||||
std::vector<int16_t> samples;
|
||||
samples.resize(BUFFER_SIZE);
|
||||
this->read(samples.data(), samples.size());
|
||||
|
||||
this->data_callbacks_.call(samples);
|
||||
}
|
||||
|
||||
void ESPADFMicrophone::watch_() {
|
||||
TaskEvent event;
|
||||
if (xQueueReceive(this->read_event_queue_, &event, 0) == pdTRUE) {
|
||||
switch (event.type) {
|
||||
case TaskEventType::STARTING:
|
||||
case TaskEventType::STOPPING:
|
||||
break;
|
||||
case TaskEventType::STARTED:
|
||||
ESP_LOGD(TAG, "Microphone started");
|
||||
this->state_ = microphone::STATE_RUNNING;
|
||||
break;
|
||||
case TaskEventType::RUNNING:
|
||||
this->status_clear_warning();
|
||||
// ESP_LOGD(TAG, "Putting %d bytes into ring buffer", event.err);
|
||||
break;
|
||||
case TaskEventType::STOPPED:
|
||||
this->parent_->unlock();
|
||||
this->state_ = microphone::STATE_STOPPED;
|
||||
vTaskDelete(this->read_task_handle_);
|
||||
this->read_task_handle_ = nullptr;
|
||||
ESP_LOGD(TAG, "Microphone stopped");
|
||||
break;
|
||||
case TaskEventType::WARNING:
|
||||
ESP_LOGW(TAG, "Error writing to pipeline: %s", esp_err_to_name(event.err));
|
||||
this->status_set_warning();
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
void ESPADFMicrophone::loop() {
|
||||
this->watch_();
|
||||
switch (this->state_) {
|
||||
case microphone::STATE_STOPPED:
|
||||
case microphone::STATE_STOPPING:
|
||||
break;
|
||||
case microphone::STATE_STARTING:
|
||||
this->start_();
|
||||
break;
|
||||
case microphone::STATE_RUNNING:
|
||||
if (this->data_callbacks_.size() > 0) {
|
||||
this->read_();
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
} // namespace esp_adf
|
||||
} // namespace esphome
|
||||
|
||||
#endif // USE_ESP_IDF
|
||||
42
esphome/components/esp_adf/microphone/esp_adf_microphone.h
Normal file
42
esphome/components/esp_adf/microphone/esp_adf_microphone.h
Normal file
@@ -0,0 +1,42 @@
|
||||
#pragma once
|
||||
|
||||
#ifdef USE_ESP_IDF
|
||||
|
||||
#include "../esp_adf.h"
|
||||
|
||||
#include "esphome/components/microphone/microphone.h"
|
||||
#include "esphome/core/component.h"
|
||||
|
||||
#include <ringbuf.h>
|
||||
|
||||
namespace esphome {
|
||||
namespace esp_adf {
|
||||
|
||||
class ESPADFMicrophone : public ESPADFPipeline, public microphone::Microphone, public Component {
|
||||
public:
|
||||
void setup() override;
|
||||
void start() override;
|
||||
void stop() override;
|
||||
|
||||
void loop() override;
|
||||
|
||||
size_t read(int16_t *buf, size_t len) override;
|
||||
|
||||
protected:
|
||||
void start_();
|
||||
void read_();
|
||||
void watch_();
|
||||
|
||||
static void read_task(void *params);
|
||||
|
||||
ringbuf_handle_t ring_buffer_;
|
||||
|
||||
TaskHandle_t read_task_handle_{nullptr};
|
||||
QueueHandle_t read_event_queue_;
|
||||
QueueHandle_t read_command_queue_;
|
||||
};
|
||||
|
||||
} // namespace esp_adf
|
||||
} // namespace esphome
|
||||
|
||||
#endif // USE_ESP_IDF
|
||||
41
esphome/components/esp_adf/speaker/__init__.py
Normal file
41
esphome/components/esp_adf/speaker/__init__.py
Normal file
@@ -0,0 +1,41 @@
|
||||
import esphome.codegen as cg
|
||||
import esphome.config_validation as cv
|
||||
from esphome.components import speaker
|
||||
from esphome.const import CONF_ID
|
||||
|
||||
from .. import (
|
||||
CONF_ESP_ADF_ID,
|
||||
ESPADF,
|
||||
ESPADFPipeline,
|
||||
esp_adf_ns,
|
||||
final_validate_usable_board,
|
||||
)
|
||||
|
||||
AUTO_LOAD = ["esp_adf"]
|
||||
CONFLICTS_WITH = ["i2s_audio"]
|
||||
DEPENDENCIES = ["esp32"]
|
||||
|
||||
ESPADFSpeaker = esp_adf_ns.class_(
|
||||
"ESPADFSpeaker", ESPADFPipeline, speaker.Speaker, cg.Component
|
||||
)
|
||||
|
||||
|
||||
CONFIG_SCHEMA = cv.All(
|
||||
cv.Schema(
|
||||
{
|
||||
cv.GenerateID(): cv.declare_id(ESPADFSpeaker),
|
||||
cv.GenerateID(CONF_ESP_ADF_ID): cv.use_id(ESPADF),
|
||||
}
|
||||
).extend(cv.COMPONENT_SCHEMA),
|
||||
cv.only_with_esp_idf,
|
||||
)
|
||||
|
||||
FINAL_VALIDATE_SCHEMA = final_validate_usable_board("speaker")
|
||||
|
||||
|
||||
async def to_code(config):
|
||||
var = cg.new_Pvariable(config[CONF_ID])
|
||||
await cg.register_component(var, config)
|
||||
await cg.register_parented(var, config[CONF_ESP_ADF_ID])
|
||||
|
||||
await speaker.register_speaker(var, config)
|
||||
274
esphome/components/esp_adf/speaker/esp_adf_speaker.cpp
Normal file
274
esphome/components/esp_adf/speaker/esp_adf_speaker.cpp
Normal file
@@ -0,0 +1,274 @@
|
||||
#include "esp_adf_speaker.h"
|
||||
|
||||
#ifdef USE_ESP_IDF
|
||||
|
||||
#include <driver/i2s.h>
|
||||
|
||||
#include "esphome/core/application.h"
|
||||
#include "esphome/core/hal.h"
|
||||
#include "esphome/core/log.h"
|
||||
|
||||
#include <audio_hal.h>
|
||||
#include <filter_resample.h>
|
||||
#include <i2s_stream.h>
|
||||
#include <raw_stream.h>
|
||||
|
||||
namespace esphome {
|
||||
namespace esp_adf {
|
||||
|
||||
static const size_t BUFFER_COUNT = 50;
|
||||
|
||||
static const char *const TAG = "esp_adf.speaker";
|
||||
|
||||
void ESPADFSpeaker::setup() {
|
||||
ESP_LOGCONFIG(TAG, "Setting up ESP ADF Speaker...");
|
||||
|
||||
this->buffer_queue_ = xQueueCreate(BUFFER_COUNT, sizeof(DataEvent));
|
||||
this->event_queue_ = xQueueCreate(20, sizeof(TaskEvent));
|
||||
}
|
||||
|
||||
void ESPADFSpeaker::start() { this->state_ = speaker::STATE_STARTING; }
|
||||
void ESPADFSpeaker::start_() {
|
||||
if (!this->parent_->try_lock()) {
|
||||
return; // Waiting for another i2s component to return lock
|
||||
}
|
||||
|
||||
xTaskCreate(ESPADFSpeaker::player_task, "speaker_task", 8192, (void *) this, 0, &this->player_task_handle_);
|
||||
}
|
||||
|
||||
void ESPADFSpeaker::player_task(void *params) {
|
||||
ESPADFSpeaker *this_speaker = (ESPADFSpeaker *) params;
|
||||
|
||||
TaskEvent event;
|
||||
event.type = TaskEventType::STARTING;
|
||||
xQueueSend(this_speaker->event_queue_, &event, portMAX_DELAY);
|
||||
|
||||
i2s_driver_config_t i2s_config = {
|
||||
.mode = (i2s_mode_t) (I2S_MODE_MASTER | I2S_MODE_TX),
|
||||
.sample_rate = 16000,
|
||||
.bits_per_sample = I2S_BITS_PER_SAMPLE_16BIT,
|
||||
.channel_format = I2S_CHANNEL_FMT_ONLY_RIGHT,
|
||||
.communication_format = I2S_COMM_FORMAT_STAND_I2S,
|
||||
.intr_alloc_flags = ESP_INTR_FLAG_LEVEL2 | ESP_INTR_FLAG_IRAM,
|
||||
.dma_buf_count = 8,
|
||||
.dma_buf_len = 1024,
|
||||
.use_apll = false,
|
||||
.tx_desc_auto_clear = true,
|
||||
.fixed_mclk = 0,
|
||||
.mclk_multiple = I2S_MCLK_MULTIPLE_256,
|
||||
.bits_per_chan = I2S_BITS_PER_CHAN_DEFAULT,
|
||||
};
|
||||
|
||||
audio_pipeline_cfg_t pipeline_cfg = {
|
||||
.rb_size = 8 * 1024,
|
||||
};
|
||||
audio_pipeline_handle_t pipeline = audio_pipeline_init(&pipeline_cfg);
|
||||
|
||||
i2s_stream_cfg_t i2s_cfg = {
|
||||
.type = AUDIO_STREAM_WRITER,
|
||||
.i2s_config = i2s_config,
|
||||
.i2s_port = I2S_NUM_0,
|
||||
.use_alc = false,
|
||||
.volume = 0,
|
||||
.out_rb_size = I2S_STREAM_RINGBUFFER_SIZE,
|
||||
.task_stack = I2S_STREAM_TASK_STACK,
|
||||
.task_core = I2S_STREAM_TASK_CORE,
|
||||
.task_prio = I2S_STREAM_TASK_PRIO,
|
||||
.stack_in_ext = false,
|
||||
.multi_out_num = 0,
|
||||
.uninstall_drv = true,
|
||||
.need_expand = false,
|
||||
.expand_src_bits = I2S_BITS_PER_SAMPLE_16BIT,
|
||||
};
|
||||
audio_element_handle_t i2s_stream_writer = i2s_stream_init(&i2s_cfg);
|
||||
|
||||
rsp_filter_cfg_t rsp_cfg = {
|
||||
.src_rate = 16000,
|
||||
.src_ch = 1,
|
||||
.dest_rate = 16000,
|
||||
.dest_bits = 16,
|
||||
.dest_ch = 2,
|
||||
.src_bits = 16,
|
||||
.mode = RESAMPLE_DECODE_MODE,
|
||||
.max_indata_bytes = RSP_FILTER_BUFFER_BYTE,
|
||||
.out_len_bytes = RSP_FILTER_BUFFER_BYTE,
|
||||
.type = ESP_RESAMPLE_TYPE_AUTO,
|
||||
.complexity = 2,
|
||||
.down_ch_idx = 0,
|
||||
.prefer_flag = ESP_RSP_PREFER_TYPE_SPEED,
|
||||
.out_rb_size = RSP_FILTER_RINGBUFFER_SIZE,
|
||||
.task_stack = RSP_FILTER_TASK_STACK,
|
||||
.task_core = RSP_FILTER_TASK_CORE,
|
||||
.task_prio = RSP_FILTER_TASK_PRIO,
|
||||
.stack_in_ext = true,
|
||||
};
|
||||
audio_element_handle_t filter = rsp_filter_init(&rsp_cfg);
|
||||
|
||||
raw_stream_cfg_t raw_cfg = {
|
||||
.type = AUDIO_STREAM_WRITER,
|
||||
.out_rb_size = 8 * 1024,
|
||||
};
|
||||
audio_element_handle_t raw_write = raw_stream_init(&raw_cfg);
|
||||
|
||||
audio_pipeline_register(pipeline, raw_write, "raw");
|
||||
audio_pipeline_register(pipeline, filter, "filter");
|
||||
audio_pipeline_register(pipeline, i2s_stream_writer, "i2s");
|
||||
|
||||
const char *link_tag[3] = {
|
||||
"raw",
|
||||
// "filter",
|
||||
"i2s",
|
||||
};
|
||||
audio_pipeline_link(pipeline, &link_tag[0], 2);
|
||||
|
||||
audio_pipeline_run(pipeline);
|
||||
|
||||
DataEvent data_event;
|
||||
|
||||
event.type = TaskEventType::STARTED;
|
||||
xQueueSend(this_speaker->event_queue_, &event, 0);
|
||||
|
||||
uint32_t last_received = millis();
|
||||
|
||||
while (true) {
|
||||
if (xQueueReceive(this_speaker->buffer_queue_, &data_event, 0) != pdTRUE) {
|
||||
if (millis() - last_received > 500) {
|
||||
// No audio for 500ms, stop
|
||||
break;
|
||||
} else {
|
||||
continue;
|
||||
}
|
||||
}
|
||||
if (data_event.stop) {
|
||||
// Stop signal from main thread
|
||||
while (xQueueReceive(this_speaker->buffer_queue_, &data_event, 0) == pdTRUE) {
|
||||
// Flush queue
|
||||
}
|
||||
break;
|
||||
}
|
||||
|
||||
size_t remaining = data_event.len;
|
||||
size_t current = 0;
|
||||
if (remaining > 0)
|
||||
last_received = millis();
|
||||
|
||||
while (remaining > 0) {
|
||||
int bytes_written = raw_stream_write(raw_write, (char *) data_event.data + current, remaining);
|
||||
if (bytes_written == ESP_FAIL) {
|
||||
event = {.type = TaskEventType::WARNING, .err = ESP_FAIL};
|
||||
xQueueSend(this_speaker->event_queue_, &event, 0);
|
||||
continue;
|
||||
}
|
||||
|
||||
remaining -= bytes_written;
|
||||
current += bytes_written;
|
||||
}
|
||||
|
||||
event.type = TaskEventType::RUNNING;
|
||||
xQueueSend(this_speaker->event_queue_, &event, 0);
|
||||
}
|
||||
|
||||
audio_pipeline_stop(pipeline);
|
||||
audio_pipeline_wait_for_stop(pipeline);
|
||||
audio_pipeline_terminate(pipeline);
|
||||
|
||||
event.type = TaskEventType::STOPPING;
|
||||
xQueueSend(this_speaker->event_queue_, &event, portMAX_DELAY);
|
||||
|
||||
audio_pipeline_unregister(pipeline, i2s_stream_writer);
|
||||
audio_pipeline_unregister(pipeline, filter);
|
||||
audio_pipeline_unregister(pipeline, raw_write);
|
||||
|
||||
audio_pipeline_deinit(pipeline);
|
||||
audio_element_deinit(i2s_stream_writer);
|
||||
audio_element_deinit(filter);
|
||||
audio_element_deinit(raw_write);
|
||||
|
||||
event.type = TaskEventType::STOPPED;
|
||||
xQueueSend(this_speaker->event_queue_, &event, portMAX_DELAY);
|
||||
|
||||
while (true) {
|
||||
delay(10);
|
||||
}
|
||||
}
|
||||
|
||||
void ESPADFSpeaker::stop() {
|
||||
if (this->state_ == speaker::STATE_STOPPED)
|
||||
return;
|
||||
if (this->state_ == speaker::STATE_STARTING) {
|
||||
this->state_ = speaker::STATE_STOPPED;
|
||||
return;
|
||||
}
|
||||
this->state_ = speaker::STATE_STOPPING;
|
||||
DataEvent data;
|
||||
data.stop = true;
|
||||
xQueueSendToFront(this->buffer_queue_, &data, portMAX_DELAY);
|
||||
}
|
||||
|
||||
void ESPADFSpeaker::watch_() {
|
||||
TaskEvent event;
|
||||
if (xQueueReceive(this->event_queue_, &event, 0) == pdTRUE) {
|
||||
switch (event.type) {
|
||||
case TaskEventType::STARTING:
|
||||
case TaskEventType::STOPPING:
|
||||
break;
|
||||
case TaskEventType::STARTED:
|
||||
this->state_ = speaker::STATE_RUNNING;
|
||||
break;
|
||||
case TaskEventType::RUNNING:
|
||||
this->status_clear_warning();
|
||||
break;
|
||||
case TaskEventType::STOPPED:
|
||||
this->parent_->unlock();
|
||||
this->state_ = speaker::STATE_STOPPED;
|
||||
vTaskDelete(this->player_task_handle_);
|
||||
this->player_task_handle_ = nullptr;
|
||||
break;
|
||||
case TaskEventType::WARNING:
|
||||
ESP_LOGW(TAG, "Error writing to pipeline: %s", esp_err_to_name(event.err));
|
||||
this->status_set_warning();
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
void ESPADFSpeaker::loop() {
|
||||
this->watch_();
|
||||
switch (this->state_) {
|
||||
case speaker::STATE_STARTING:
|
||||
this->start_();
|
||||
break;
|
||||
case speaker::STATE_RUNNING:
|
||||
case speaker::STATE_STOPPING:
|
||||
case speaker::STATE_STOPPED:
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
size_t ESPADFSpeaker::play(const uint8_t *data, size_t length) {
|
||||
if (this->state_ != speaker::STATE_RUNNING && this->state_ != speaker::STATE_STARTING) {
|
||||
this->start();
|
||||
}
|
||||
size_t remaining = length;
|
||||
size_t index = 0;
|
||||
while (remaining > 0) {
|
||||
DataEvent event;
|
||||
event.stop = false;
|
||||
size_t to_send_length = std::min(remaining, BUFFER_SIZE);
|
||||
event.len = to_send_length;
|
||||
memcpy(event.data, data + index, to_send_length);
|
||||
if (xQueueSend(this->buffer_queue_, &event, 0) != pdTRUE) {
|
||||
return index; // Queue full
|
||||
}
|
||||
remaining -= to_send_length;
|
||||
index += to_send_length;
|
||||
}
|
||||
return index;
|
||||
}
|
||||
|
||||
bool ESPADFSpeaker::has_buffered_data() const { return uxQueueMessagesWaiting(this->buffer_queue_) > 0; }
|
||||
|
||||
} // namespace esp_adf
|
||||
} // namespace esphome
|
||||
|
||||
#endif // USE_ESP_IDF
|
||||
48
esphome/components/esp_adf/speaker/esp_adf_speaker.h
Normal file
48
esphome/components/esp_adf/speaker/esp_adf_speaker.h
Normal file
@@ -0,0 +1,48 @@
|
||||
#pragma once
|
||||
|
||||
#ifdef USE_ESP_IDF
|
||||
|
||||
#include "../esp_adf.h"
|
||||
|
||||
#include <freertos/FreeRTOS.h>
|
||||
#include <freertos/queue.h>
|
||||
|
||||
#include "esphome/components/speaker/speaker.h"
|
||||
#include "esphome/core/component.h"
|
||||
#include "esphome/core/helpers.h"
|
||||
|
||||
#include <audio_element.h>
|
||||
#include <audio_pipeline.h>
|
||||
|
||||
namespace esphome {
|
||||
namespace esp_adf {
|
||||
|
||||
class ESPADFSpeaker : public ESPADFPipeline, public speaker::Speaker, public Component {
|
||||
public:
|
||||
float get_setup_priority() const override { return esphome::setup_priority::LATE; }
|
||||
|
||||
void setup() override;
|
||||
void loop() override;
|
||||
|
||||
void start() override;
|
||||
void stop() override;
|
||||
|
||||
size_t play(const uint8_t *data, size_t length) override;
|
||||
|
||||
bool has_buffered_data() const override;
|
||||
|
||||
protected:
|
||||
void start_();
|
||||
void watch_();
|
||||
|
||||
static void player_task(void *params);
|
||||
|
||||
TaskHandle_t player_task_handle_{nullptr};
|
||||
QueueHandle_t buffer_queue_;
|
||||
QueueHandle_t event_queue_;
|
||||
};
|
||||
|
||||
} // namespace esp_adf
|
||||
} // namespace esphome
|
||||
|
||||
#endif // USE_ESP_IDF
|
||||
@@ -3,7 +3,6 @@
|
||||
#ifdef USE_ARDUINO
|
||||
|
||||
#include "esphome/components/remote_base/remote_base.h"
|
||||
#include "esphome/components/remote_transmitter/remote_transmitter.h"
|
||||
#include <IRSender.h> // arduino-heatpump library
|
||||
|
||||
namespace esphome {
|
||||
@@ -11,14 +10,13 @@ namespace heatpumpir {
|
||||
|
||||
class IRSenderESPHome : public IRSender {
|
||||
public:
|
||||
IRSenderESPHome(remote_transmitter::RemoteTransmitterComponent *transmitter)
|
||||
: IRSender(0), transmit_(transmitter->transmit()){};
|
||||
IRSenderESPHome(remote_base::RemoteTransmitterBase *transmitter) : IRSender(0), transmit_(transmitter->transmit()){};
|
||||
void setFrequency(int frequency) override; // NOLINT(readability-identifier-naming)
|
||||
void space(int space_length) override;
|
||||
void mark(int mark_length) override;
|
||||
|
||||
protected:
|
||||
remote_transmitter::RemoteTransmitterComponent::TransmitCall transmit_;
|
||||
remote_base::RemoteTransmitterBase::TransmitCall transmit_;
|
||||
};
|
||||
|
||||
} // namespace heatpumpir
|
||||
|
||||
@@ -68,6 +68,7 @@ void LD2420Component::dump_config() {
|
||||
ESP_LOGCONFIG(TAG, "LD2420:");
|
||||
ESP_LOGCONFIG(TAG, " Firmware Version : %7s", this->ld2420_firmware_ver_);
|
||||
ESP_LOGCONFIG(TAG, "LD2420 Number:");
|
||||
#ifdef USE_NUMBER
|
||||
LOG_NUMBER(TAG, " Gate Timeout:", this->gate_timeout_number_);
|
||||
LOG_NUMBER(TAG, " Gate Max Distance:", this->max_gate_distance_number_);
|
||||
LOG_NUMBER(TAG, " Gate Min Distance:", this->min_gate_distance_number_);
|
||||
@@ -76,10 +77,13 @@ void LD2420Component::dump_config() {
|
||||
LOG_NUMBER(TAG, " Gate Move Threshold:", this->gate_move_threshold_numbers_[gate]);
|
||||
LOG_NUMBER(TAG, " Gate Still Threshold::", this->gate_still_threshold_numbers_[gate]);
|
||||
}
|
||||
#endif
|
||||
#ifdef USE_BUTTON
|
||||
LOG_BUTTON(TAG, " Apply Config:", this->apply_config_button_);
|
||||
LOG_BUTTON(TAG, " Revert Edits:", this->revert_config_button_);
|
||||
LOG_BUTTON(TAG, " Factory Reset:", this->factory_reset_button_);
|
||||
LOG_BUTTON(TAG, " Restart Module:", this->restart_module_button_);
|
||||
#endif
|
||||
ESP_LOGCONFIG(TAG, "LD2420 Select:");
|
||||
LOG_SELECT(TAG, " Operating Mode", this->operating_selector_);
|
||||
if (this->get_firmware_int_(ld2420_firmware_ver_) < CALIBRATE_VERSION_MIN) {
|
||||
@@ -183,9 +187,11 @@ void LD2420Component::factory_reset_action() {
|
||||
return;
|
||||
}
|
||||
this->set_min_max_distances_timeout(FACTORY_MAX_GATE, FACTORY_MIN_GATE, FACTORY_TIMEOUT);
|
||||
#ifdef USE_NUMBER
|
||||
this->gate_timeout_number_->state = FACTORY_TIMEOUT;
|
||||
this->min_gate_distance_number_->state = FACTORY_MIN_GATE;
|
||||
this->max_gate_distance_number_->state = FACTORY_MAX_GATE;
|
||||
#endif
|
||||
for (uint8_t gate = 0; gate < LD2420_TOTAL_GATES; gate++) {
|
||||
this->new_config.move_thresh[gate] = FACTORY_MOVE_THRESH[gate];
|
||||
this->new_config.still_thresh[gate] = FACTORY_STILL_THRESH[gate];
|
||||
|
||||
@@ -147,7 +147,7 @@ void MQTTClientComponent::dump_config() {
|
||||
ESP_LOGCONFIG(TAG, " Availability: '%s'", this->availability_.topic.c_str());
|
||||
}
|
||||
}
|
||||
bool MQTTClientComponent::can_proceed() { return this->is_connected(); }
|
||||
bool MQTTClientComponent::can_proceed() { return network::is_disabled() || this->is_connected(); }
|
||||
|
||||
void MQTTClientComponent::start_dnslookup_() {
|
||||
for (auto &subscription : this->subscriptions_) {
|
||||
|
||||
@@ -29,6 +29,14 @@ bool is_connected() {
|
||||
return false;
|
||||
}
|
||||
|
||||
bool is_disabled() {
|
||||
#ifdef USE_WIFI
|
||||
if (wifi::global_wifi_component != nullptr)
|
||||
return wifi::global_wifi_component->is_disabled();
|
||||
#endif
|
||||
return false;
|
||||
}
|
||||
|
||||
network::IPAddress get_ip_address() {
|
||||
#ifdef USE_ETHERNET
|
||||
if (ethernet::global_eth_component != nullptr)
|
||||
|
||||
@@ -8,6 +8,8 @@ namespace network {
|
||||
|
||||
/// Return whether the node is connected to the network (through wifi, eth, ...)
|
||||
bool is_connected();
|
||||
/// Return whether the network is disabled (only wifi for now)
|
||||
bool is_disabled();
|
||||
/// Get the active network hostname
|
||||
std::string get_use_address();
|
||||
IPAddress get_ip_address();
|
||||
|
||||
@@ -36,7 +36,7 @@ CONFIG_SCHEMA = (
|
||||
display.BASIC_DISPLAY_SCHEMA.extend(
|
||||
{
|
||||
cv.GenerateID(): cv.declare_id(Nextion),
|
||||
cv.Optional(CONF_TFT_URL): cv.All(cv.string, cv.only_with_arduino),
|
||||
cv.Optional(CONF_TFT_URL): cv.url,
|
||||
cv.Optional(CONF_BRIGHTNESS, default=1.0): cv.percentage,
|
||||
cv.Optional(CONF_ON_SETUP): automation.validate_automation(
|
||||
{
|
||||
@@ -85,10 +85,10 @@ async def to_code(config):
|
||||
if CONF_TFT_URL in config:
|
||||
cg.add_define("USE_NEXTION_TFT_UPLOAD")
|
||||
cg.add(var.set_tft_url(config[CONF_TFT_URL]))
|
||||
if CORE.is_esp32:
|
||||
if CORE.is_esp32 and CORE.using_arduino:
|
||||
cg.add_library("WiFiClientSecure", None)
|
||||
cg.add_library("HTTPClient", None)
|
||||
if CORE.is_esp8266:
|
||||
elif CORE.is_esp8266 and CORE.using_arduino:
|
||||
cg.add_library("ESP8266HTTPClient", None)
|
||||
|
||||
if CONF_TOUCH_SLEEP_TIMEOUT in config:
|
||||
|
||||
@@ -128,7 +128,7 @@ void Nextion::dump_config() {
|
||||
ESP_LOGCONFIG(TAG, " Wake On Touch: %s", this->auto_wake_on_touch_ ? "True" : "False");
|
||||
|
||||
if (this->touch_sleep_timeout_ != 0) {
|
||||
ESP_LOGCONFIG(TAG, " Touch Timeout: %d", this->touch_sleep_timeout_);
|
||||
ESP_LOGCONFIG(TAG, " Touch Timeout: %" PRIu32, this->touch_sleep_timeout_);
|
||||
}
|
||||
|
||||
if (this->wake_up_page_ != -1) {
|
||||
@@ -868,6 +868,12 @@ uint16_t Nextion::recv_ret_string_(std::string &response, uint32_t timeout, bool
|
||||
start = millis();
|
||||
|
||||
while ((timeout == 0 && this->available()) || millis() - start <= timeout) {
|
||||
if (!this->available()) {
|
||||
App.feed_wdt();
|
||||
delay(1);
|
||||
continue;
|
||||
}
|
||||
|
||||
this->read_byte(&c);
|
||||
if (c == 0xFF) {
|
||||
nr_of_ff_bytes++;
|
||||
@@ -886,7 +892,7 @@ uint16_t Nextion::recv_ret_string_(std::string &response, uint32_t timeout, bool
|
||||
}
|
||||
}
|
||||
App.feed_wdt();
|
||||
delay(1);
|
||||
delay(2);
|
||||
|
||||
if (exit_flag || ff_flag) {
|
||||
break;
|
||||
|
||||
@@ -12,14 +12,18 @@
|
||||
#include "esphome/components/display/display_color_utils.h"
|
||||
|
||||
#ifdef USE_NEXTION_TFT_UPLOAD
|
||||
#ifdef ARDUINO
|
||||
#ifdef USE_ESP32
|
||||
#include <HTTPClient.h>
|
||||
#endif
|
||||
#endif // USE_ESP32
|
||||
#ifdef USE_ESP8266
|
||||
#include <ESP8266HTTPClient.h>
|
||||
#include <WiFiClientSecure.h>
|
||||
#endif
|
||||
#endif
|
||||
#endif // USE_ESP8266
|
||||
#elif defined(USE_ESP_IDF)
|
||||
#include <esp_http_client.h>
|
||||
#endif // ARDUINO vs ESP-IDF
|
||||
#endif // USE_NEXTION_TFT_UPLOAD
|
||||
|
||||
namespace esphome {
|
||||
namespace nextion {
|
||||
@@ -685,16 +689,18 @@ class Nextion : public NextionBase, public PollingComponent, public uart::UARTDe
|
||||
|
||||
#ifdef USE_NEXTION_TFT_UPLOAD
|
||||
/**
|
||||
* Set the tft file URL. https seems problamtic with arduino..
|
||||
* Set the tft file URL. https seems problematic with arduino..
|
||||
*/
|
||||
void set_tft_url(const std::string &tft_url) { this->tft_url_ = tft_url; }
|
||||
|
||||
#endif
|
||||
|
||||
/**
|
||||
* Upload the tft file and softreset the Nextion
|
||||
* Upload the tft file and soft reset Nextion
|
||||
* @return bool True: Transfer completed successfuly, False: Transfer failed.
|
||||
*/
|
||||
void upload_tft();
|
||||
bool upload_tft();
|
||||
|
||||
void dump_config() override;
|
||||
|
||||
/**
|
||||
@@ -817,16 +823,16 @@ class Nextion : public NextionBase, public PollingComponent, public uart::UARTDe
|
||||
BearSSL::WiFiClientSecure *wifi_client_secure_{nullptr};
|
||||
WiFiClient *get_wifi_client_();
|
||||
#endif
|
||||
|
||||
int content_length_ = 0;
|
||||
int tft_size_ = 0;
|
||||
#ifdef ARDUINO
|
||||
/**
|
||||
* will request chunk_size chunks from the web server
|
||||
* and send each to the nextion
|
||||
* @param int contentLength Total size of the file
|
||||
* @param uint32_t chunk_size
|
||||
* @return true if success, false for failure.
|
||||
* @param HTTPClient http HTTP client handler.
|
||||
* @param int range_start Position of next byte to transfer.
|
||||
* @return position of last byte transferred, -1 for failure.
|
||||
*/
|
||||
int content_length_ = 0;
|
||||
int tft_size_ = 0;
|
||||
int upload_by_chunks_(HTTPClient *http, int range_start);
|
||||
|
||||
bool upload_with_range_(uint32_t range_start, uint32_t range_end);
|
||||
@@ -839,7 +845,30 @@ class Nextion : public NextionBase, public PollingComponent, public uart::UARTDe
|
||||
* @return true if success, false for failure.
|
||||
*/
|
||||
bool upload_from_buffer_(const uint8_t *file_buf, size_t buf_size);
|
||||
void upload_end_();
|
||||
/**
|
||||
* Ends the upload process, restart Nextion and, if successful,
|
||||
* restarts ESP
|
||||
* @param bool url successful True: Transfer completed successfuly, False: Transfer failed.
|
||||
* @return bool True: Transfer completed successfuly, False: Transfer failed.
|
||||
*/
|
||||
bool upload_end_(bool successful);
|
||||
#elif defined(USE_ESP_IDF)
|
||||
/**
|
||||
* will request 4096 bytes chunks from the web server
|
||||
* and send each to Nextion
|
||||
* @param std::string url Full url for download.
|
||||
* @param int range_start Position of next byte to transfer.
|
||||
* @return position of last byte transferred, -1 for failure.
|
||||
*/
|
||||
int upload_range(const std::string &url, int range_start);
|
||||
/**
|
||||
* Ends the upload process, restart Nextion and, if successful,
|
||||
* restarts ESP
|
||||
* @param bool url successful True: Transfer completed successfuly, False: Transfer failed.
|
||||
* @return bool True: Transfer completed successfuly, False: Transfer failed.
|
||||
*/
|
||||
bool upload_end(bool successful);
|
||||
#endif // ARDUINO vs ESP-IDF
|
||||
|
||||
#endif // USE_NEXTION_TFT_UPLOAD
|
||||
|
||||
|
||||
@@ -55,7 +55,7 @@ void Nextion::set_protocol_reparse_mode(bool active_mode) {
|
||||
|
||||
// Set Colors
|
||||
void Nextion::set_component_background_color(const char *component, uint32_t color) {
|
||||
this->add_no_result_to_queue_with_printf_("set_component_background_color", "%s.bco=%d", component, color);
|
||||
this->add_no_result_to_queue_with_printf_("set_component_background_color", "%s.bco=%" PRIu32, component, color);
|
||||
}
|
||||
|
||||
void Nextion::set_component_background_color(const char *component, const char *color) {
|
||||
@@ -68,7 +68,8 @@ void Nextion::set_component_background_color(const char *component, Color color)
|
||||
}
|
||||
|
||||
void Nextion::set_component_pressed_background_color(const char *component, uint32_t color) {
|
||||
this->add_no_result_to_queue_with_printf_("set_component_pressed_background_color", "%s.bco2=%d", component, color);
|
||||
this->add_no_result_to_queue_with_printf_("set_component_pressed_background_color", "%s.bco2=%" PRIu32, component,
|
||||
color);
|
||||
}
|
||||
|
||||
void Nextion::set_component_pressed_background_color(const char *component, const char *color) {
|
||||
@@ -89,7 +90,7 @@ void Nextion::set_component_picc(const char *component, uint8_t pic_id) {
|
||||
}
|
||||
|
||||
void Nextion::set_component_font_color(const char *component, uint32_t color) {
|
||||
this->add_no_result_to_queue_with_printf_("set_component_font_color", "%s.pco=%d", component, color);
|
||||
this->add_no_result_to_queue_with_printf_("set_component_font_color", "%s.pco=%" PRIu32, component, color);
|
||||
}
|
||||
|
||||
void Nextion::set_component_font_color(const char *component, const char *color) {
|
||||
@@ -102,7 +103,7 @@ void Nextion::set_component_font_color(const char *component, Color color) {
|
||||
}
|
||||
|
||||
void Nextion::set_component_pressed_font_color(const char *component, uint32_t color) {
|
||||
this->add_no_result_to_queue_with_printf_("set_component_pressed_font_color", "%s.pco2=%d", component, color);
|
||||
this->add_no_result_to_queue_with_printf_("set_component_pressed_font_color", "%s.pco2=%" PRIu32, component, color);
|
||||
}
|
||||
|
||||
void Nextion::set_component_pressed_font_color(const char *component, const char *color) {
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
#include "nextion.h"
|
||||
|
||||
#ifdef ARDUINO
|
||||
#ifdef USE_NEXTION_TFT_UPLOAD
|
||||
|
||||
#include "esphome/core/application.h"
|
||||
@@ -128,15 +129,15 @@ int Nextion::upload_by_chunks_(HTTPClient *http, int range_start) {
|
||||
return range_end + 1;
|
||||
}
|
||||
|
||||
void Nextion::upload_tft() {
|
||||
bool Nextion::upload_tft() {
|
||||
if (this->is_updating_) {
|
||||
ESP_LOGD(TAG, "Currently updating");
|
||||
return;
|
||||
return false;
|
||||
}
|
||||
|
||||
if (!network::is_connected()) {
|
||||
ESP_LOGD(TAG, "network is not connected");
|
||||
return;
|
||||
return false;
|
||||
}
|
||||
|
||||
this->is_updating_ = true;
|
||||
@@ -164,7 +165,7 @@ void Nextion::upload_tft() {
|
||||
ESP_LOGD(TAG, "connection failed");
|
||||
ExternalRAMAllocator<uint8_t> allocator(ExternalRAMAllocator<uint8_t>::ALLOW_FAILURE);
|
||||
allocator.deallocate(this->transfer_buffer_, this->transfer_buffer_size_);
|
||||
return;
|
||||
return false;
|
||||
} else {
|
||||
ESP_LOGD(TAG, "Connected");
|
||||
}
|
||||
@@ -192,7 +193,7 @@ void Nextion::upload_tft() {
|
||||
}
|
||||
|
||||
if ((code != 200 && code != 206) || tries > 5) {
|
||||
this->upload_end_();
|
||||
return this->upload_end_(false);
|
||||
}
|
||||
|
||||
String content_range_string = http.header("Content-Range");
|
||||
@@ -203,7 +204,7 @@ void Nextion::upload_tft() {
|
||||
|
||||
if (this->content_length_ < 4096) {
|
||||
ESP_LOGE(TAG, "Failed to get file size");
|
||||
this->upload_end_();
|
||||
return this->upload_end_(false);
|
||||
}
|
||||
|
||||
ESP_LOGD(TAG, "Updating Nextion %s...", this->device_model_.c_str());
|
||||
@@ -246,7 +247,7 @@ void Nextion::upload_tft() {
|
||||
ESP_LOGD(TAG, "preparation for tft update done");
|
||||
} else {
|
||||
ESP_LOGD(TAG, "preparation for tft update failed %d \"%s\"", response[0], response.c_str());
|
||||
this->upload_end_();
|
||||
return this->upload_end_(false);
|
||||
}
|
||||
|
||||
// Nextion wants 4096 bytes at a time. Make chunk_size a multiple of 4096
|
||||
@@ -280,7 +281,7 @@ void Nextion::upload_tft() {
|
||||
this->transfer_buffer_ = allocator.allocate(chunk_size);
|
||||
|
||||
if (!this->transfer_buffer_)
|
||||
this->upload_end_();
|
||||
return this->upload_end_(false);
|
||||
}
|
||||
|
||||
this->transfer_buffer_size_ = chunk_size;
|
||||
@@ -295,7 +296,7 @@ void Nextion::upload_tft() {
|
||||
result = this->upload_by_chunks_(&http, result);
|
||||
if (result < 0) {
|
||||
ESP_LOGD(TAG, "Error updating Nextion!");
|
||||
this->upload_end_();
|
||||
return this->upload_end_(false);
|
||||
}
|
||||
App.feed_wdt();
|
||||
// NOLINTNEXTLINE(readability-static-accessed-through-instance)
|
||||
@@ -303,16 +304,20 @@ void Nextion::upload_tft() {
|
||||
}
|
||||
ESP_LOGD(TAG, "Successfully updated Nextion!");
|
||||
|
||||
this->upload_end_();
|
||||
return this->upload_end_(true);
|
||||
}
|
||||
|
||||
void Nextion::upload_end_() {
|
||||
bool Nextion::upload_end_(bool successful) {
|
||||
this->is_updating_ = false;
|
||||
ESP_LOGD(TAG, "Restarting Nextion");
|
||||
this->soft_reset();
|
||||
if (successful) {
|
||||
delay(1500); // NOLINT
|
||||
ESP_LOGD(TAG, "Restarting esphome");
|
||||
ESP.restart(); // NOLINT(readability-static-accessed-through-instance)
|
||||
}
|
||||
return successful;
|
||||
}
|
||||
|
||||
#ifdef USE_ESP8266
|
||||
WiFiClient *Nextion::get_wifi_client_() {
|
||||
@@ -337,3 +342,4 @@ WiFiClient *Nextion::get_wifi_client_() {
|
||||
} // namespace esphome
|
||||
|
||||
#endif // USE_NEXTION_TFT_UPLOAD
|
||||
#endif // ARDUINO
|
||||
268
esphome/components/nextion/nextion_upload_idf.cpp
Normal file
268
esphome/components/nextion/nextion_upload_idf.cpp
Normal file
@@ -0,0 +1,268 @@
|
||||
#include "nextion.h"
|
||||
|
||||
#ifdef USE_ESP_IDF
|
||||
#ifdef USE_NEXTION_TFT_UPLOAD
|
||||
|
||||
#include "esphome/core/application.h"
|
||||
#include "esphome/core/defines.h"
|
||||
#include "esphome/core/util.h"
|
||||
#include "esphome/core/log.h"
|
||||
#include "esphome/components/network/util.h"
|
||||
|
||||
#include <esp_heap_caps.h>
|
||||
#include <esp_http_client.h>
|
||||
|
||||
namespace esphome {
|
||||
namespace nextion {
|
||||
static const char *const TAG = "nextion_upload";
|
||||
|
||||
// Followed guide
|
||||
// https://unofficialnextion.com/t/nextion-upload-protocol-v1-2-the-fast-one/1044/2
|
||||
|
||||
int Nextion::upload_range(const std::string &url, int range_start) {
|
||||
ESP_LOGVV(TAG, "url: %s", url.c_str());
|
||||
uint range_size = this->tft_size_ - range_start;
|
||||
ESP_LOGVV(TAG, "tft_size_: %i", this->tft_size_);
|
||||
ESP_LOGV(TAG, "Available heap: %u", esp_get_free_heap_size());
|
||||
int range_end = (range_start == 0) ? std::min(this->tft_size_, 16383) : this->tft_size_;
|
||||
if (range_size <= 0 or range_end <= range_start) {
|
||||
ESP_LOGE(TAG, "Invalid range");
|
||||
ESP_LOGD(TAG, "Range start: %i", range_start);
|
||||
ESP_LOGD(TAG, "Range end: %i", range_end);
|
||||
ESP_LOGD(TAG, "Range size: %i", range_size);
|
||||
return -1;
|
||||
}
|
||||
|
||||
esp_http_client_config_t config = {
|
||||
.url = url.c_str(),
|
||||
.cert_pem = nullptr,
|
||||
};
|
||||
esp_http_client_handle_t client = esp_http_client_init(&config);
|
||||
|
||||
char range_header[64];
|
||||
sprintf(range_header, "bytes=%d-%d", range_start, range_end);
|
||||
ESP_LOGV(TAG, "Requesting range: %s", range_header);
|
||||
esp_http_client_set_header(client, "Range", range_header);
|
||||
ESP_LOGVV(TAG, "Available heap: %u", esp_get_free_heap_size());
|
||||
|
||||
ESP_LOGV(TAG, "Opening http connetion");
|
||||
esp_err_t err;
|
||||
if ((err = esp_http_client_open(client, 0)) != ESP_OK) {
|
||||
ESP_LOGE(TAG, "Failed to open HTTP connection: %s", esp_err_to_name(err));
|
||||
esp_http_client_cleanup(client);
|
||||
return -1;
|
||||
}
|
||||
|
||||
ESP_LOGV(TAG, "Fetch content length");
|
||||
int content_length = esp_http_client_fetch_headers(client);
|
||||
ESP_LOGV(TAG, "content_length = %d", content_length);
|
||||
if (content_length <= 0) {
|
||||
ESP_LOGE(TAG, "Failed to get content length: %d", content_length);
|
||||
esp_http_client_cleanup(client);
|
||||
return -1;
|
||||
}
|
||||
|
||||
int total_read_len = 0, read_len;
|
||||
|
||||
ESP_LOGV(TAG, "Allocate buffer");
|
||||
uint8_t *buffer = new uint8_t[4096];
|
||||
std::string recv_string;
|
||||
if (buffer == nullptr) {
|
||||
ESP_LOGE(TAG, "Failed to allocate memory for buffer");
|
||||
ESP_LOGV(TAG, "Available heap: %u", esp_get_free_heap_size());
|
||||
} else {
|
||||
ESP_LOGV(TAG, "Memory for buffer allocated successfully");
|
||||
|
||||
while (true) {
|
||||
App.feed_wdt();
|
||||
ESP_LOGVV(TAG, "Available heap: %u", esp_get_free_heap_size());
|
||||
int read_len = esp_http_client_read(client, reinterpret_cast<char *>(buffer), 4096);
|
||||
ESP_LOGVV(TAG, "Read %d bytes from HTTP client, writing to UART", read_len);
|
||||
if (read_len > 0) {
|
||||
this->write_array(buffer, read_len);
|
||||
ESP_LOGVV(TAG, "Write to UART successful");
|
||||
this->recv_ret_string_(recv_string, 5000, true);
|
||||
this->content_length_ -= read_len;
|
||||
ESP_LOGD(TAG, "Uploaded %0.2f %%, remaining %d bytes",
|
||||
100.0 * (this->tft_size_ - this->content_length_) / this->tft_size_, this->content_length_);
|
||||
if (recv_string[0] != 0x05) { // 0x05 == "ok"
|
||||
ESP_LOGD(
|
||||
TAG, "recv_string [%s]",
|
||||
format_hex_pretty(reinterpret_cast<const uint8_t *>(recv_string.data()), recv_string.size()).c_str());
|
||||
}
|
||||
// handle partial upload request
|
||||
if (recv_string[0] == 0x08 && recv_string.size() == 5) {
|
||||
uint32_t result = 0;
|
||||
for (int j = 0; j < 4; ++j) {
|
||||
result += static_cast<uint8_t>(recv_string[j + 1]) << (8 * j);
|
||||
}
|
||||
if (result > 0) {
|
||||
ESP_LOGI(TAG, "Nextion reported new range %" PRIu32, result);
|
||||
this->content_length_ = this->tft_size_ - result;
|
||||
// Deallocate the buffer when done
|
||||
delete[] buffer;
|
||||
ESP_LOGVV(TAG, "Memory for buffer deallocated");
|
||||
esp_http_client_cleanup(client);
|
||||
esp_http_client_close(client);
|
||||
return result;
|
||||
}
|
||||
}
|
||||
recv_string.clear();
|
||||
} else if (read_len == 0) {
|
||||
ESP_LOGV(TAG, "End of HTTP response reached");
|
||||
break; // Exit the loop if there is no more data to read
|
||||
} else {
|
||||
ESP_LOGE(TAG, "Failed to read from HTTP client, error code: %d", read_len);
|
||||
break; // Exit the loop on error
|
||||
}
|
||||
}
|
||||
|
||||
// Deallocate the buffer when done
|
||||
delete[] buffer;
|
||||
ESP_LOGVV(TAG, "Memory for buffer deallocated");
|
||||
}
|
||||
esp_http_client_cleanup(client);
|
||||
esp_http_client_close(client);
|
||||
return range_end + 1;
|
||||
}
|
||||
|
||||
bool Nextion::upload_tft() {
|
||||
ESP_LOGD(TAG, "Nextion TFT upload requested");
|
||||
ESP_LOGD(TAG, "url: %s", this->tft_url_.c_str());
|
||||
|
||||
if (this->is_updating_) {
|
||||
ESP_LOGW(TAG, "Currently updating");
|
||||
return false;
|
||||
}
|
||||
|
||||
if (!network::is_connected()) {
|
||||
ESP_LOGE(TAG, "Network is not connected");
|
||||
return false;
|
||||
}
|
||||
|
||||
this->is_updating_ = true;
|
||||
|
||||
// Define the configuration for the HTTP client
|
||||
ESP_LOGV(TAG, "Establishing connection to HTTP server");
|
||||
ESP_LOGVV(TAG, "Available heap: %u", esp_get_free_heap_size());
|
||||
esp_http_client_config_t config = {
|
||||
.url = this->tft_url_.c_str(),
|
||||
.cert_pem = nullptr,
|
||||
.method = HTTP_METHOD_HEAD,
|
||||
.timeout_ms = 15000,
|
||||
};
|
||||
|
||||
// Initialize the HTTP client with the configuration
|
||||
ESP_LOGV(TAG, "Initializing HTTP client");
|
||||
ESP_LOGV(TAG, "Available heap: %u", esp_get_free_heap_size());
|
||||
esp_http_client_handle_t http = esp_http_client_init(&config);
|
||||
if (!http) {
|
||||
ESP_LOGE(TAG, "Failed to initialize HTTP client.");
|
||||
return this->upload_end(false);
|
||||
}
|
||||
|
||||
// Perform the HTTP request
|
||||
ESP_LOGV(TAG, "Check if the client could connect");
|
||||
ESP_LOGV(TAG, "Available heap: %u", esp_get_free_heap_size());
|
||||
esp_err_t err = esp_http_client_perform(http);
|
||||
if (err != ESP_OK) {
|
||||
ESP_LOGE(TAG, "HTTP request failed: %s", esp_err_to_name(err));
|
||||
esp_http_client_cleanup(http);
|
||||
return this->upload_end(false);
|
||||
}
|
||||
|
||||
// Check the HTTP Status Code
|
||||
int status_code = esp_http_client_get_status_code(http);
|
||||
ESP_LOGV(TAG, "HTTP Status Code: %d", status_code);
|
||||
size_t tft_file_size = esp_http_client_get_content_length(http);
|
||||
ESP_LOGD(TAG, "TFT file size: %zu", tft_file_size);
|
||||
|
||||
if (tft_file_size < 4096) {
|
||||
ESP_LOGE(TAG, "File size check failed. Size: %zu", tft_file_size);
|
||||
esp_http_client_cleanup(http);
|
||||
return this->upload_end(false);
|
||||
} else {
|
||||
ESP_LOGV(TAG, "File size check passed. Proceeding...");
|
||||
}
|
||||
this->content_length_ = tft_file_size;
|
||||
this->tft_size_ = tft_file_size;
|
||||
|
||||
ESP_LOGD(TAG, "Updating Nextion");
|
||||
// The Nextion will ignore the update command if it is sleeping
|
||||
|
||||
this->send_command_("sleep=0");
|
||||
this->set_backlight_brightness(1.0);
|
||||
vTaskDelay(pdMS_TO_TICKS(250)); // NOLINT
|
||||
|
||||
App.feed_wdt();
|
||||
char command[128];
|
||||
// Tells the Nextion the content length of the tft file and baud rate it will be sent at
|
||||
// Once the Nextion accepts the command it will wait until the file is successfully uploaded
|
||||
// If it fails for any reason a power cycle of the display will be needed
|
||||
sprintf(command, "whmi-wris %d,%" PRIu32 ",1", this->content_length_, this->parent_->get_baud_rate());
|
||||
|
||||
// Clear serial receive buffer
|
||||
uint8_t d;
|
||||
while (this->available()) {
|
||||
this->read_byte(&d);
|
||||
};
|
||||
|
||||
this->send_command_(command);
|
||||
|
||||
std::string response;
|
||||
ESP_LOGV(TAG, "Waiting for upgrade response");
|
||||
this->recv_ret_string_(response, 2048, true); // This can take some time to return
|
||||
|
||||
// The Nextion display will, if it's ready to accept data, send a 0x05 byte.
|
||||
ESP_LOGD(TAG, "Upgrade response is [%s]",
|
||||
format_hex_pretty(reinterpret_cast<const uint8_t *>(response.data()), response.size()).c_str());
|
||||
|
||||
if (response.find(0x05) != std::string::npos) {
|
||||
ESP_LOGV(TAG, "Preparation for tft update done");
|
||||
} else {
|
||||
ESP_LOGE(TAG, "Preparation for tft update failed %d \"%s\"", response[0], response.c_str());
|
||||
esp_http_client_cleanup(http);
|
||||
return this->upload_end(false);
|
||||
}
|
||||
|
||||
ESP_LOGD(TAG, "Updating tft from \"%s\" with a file size of %d, Heap Size %" PRIu32, this->tft_url_.c_str(),
|
||||
content_length_, esp_get_free_heap_size());
|
||||
|
||||
ESP_LOGV(TAG, "Starting transfer by chunks loop");
|
||||
int result = 0;
|
||||
while (content_length_ > 0) {
|
||||
result = upload_range(this->tft_url_.c_str(), result);
|
||||
if (result < 0) {
|
||||
ESP_LOGE(TAG, "Error updating Nextion!");
|
||||
esp_http_client_cleanup(http);
|
||||
return this->upload_end(false);
|
||||
}
|
||||
App.feed_wdt();
|
||||
ESP_LOGV(TAG, "Heap Size %" PRIu32 ", Bytes left %d", esp_get_free_heap_size(), content_length_);
|
||||
}
|
||||
|
||||
ESP_LOGD(TAG, "Successfully updated Nextion!");
|
||||
|
||||
ESP_LOGD(TAG, "Close HTTP connection");
|
||||
esp_http_client_close(http);
|
||||
esp_http_client_cleanup(http);
|
||||
return upload_end(true);
|
||||
}
|
||||
|
||||
bool Nextion::upload_end(bool successful) {
|
||||
this->is_updating_ = false;
|
||||
ESP_LOGD(TAG, "Restarting Nextion");
|
||||
this->soft_reset();
|
||||
vTaskDelay(pdMS_TO_TICKS(1500)); // NOLINT
|
||||
if (successful) {
|
||||
ESP_LOGD(TAG, "Restarting esphome");
|
||||
esp_restart(); // NOLINT(readability-static-accessed-through-instance)
|
||||
}
|
||||
return successful;
|
||||
}
|
||||
|
||||
} // namespace nextion
|
||||
} // namespace esphome
|
||||
|
||||
#endif // USE_NEXTION_TFT_UPLOAD
|
||||
#endif // USE_ESP_IDF
|
||||
@@ -2,7 +2,7 @@ from math import log
|
||||
|
||||
import esphome.config_validation as cv
|
||||
import esphome.codegen as cg
|
||||
from esphome.components import sensor
|
||||
from esphome.components import sensor, resistance_sampler
|
||||
from esphome.const import (
|
||||
CONF_CALIBRATION,
|
||||
CONF_REFERENCE_RESISTANCE,
|
||||
@@ -15,6 +15,8 @@ from esphome.const import (
|
||||
UNIT_CELSIUS,
|
||||
)
|
||||
|
||||
AUTO_LOAD = ["resistance_sampler"]
|
||||
|
||||
ntc_ns = cg.esphome_ns.namespace("ntc")
|
||||
NTC = ntc_ns.class_("NTC", cg.Component, sensor.Sensor)
|
||||
|
||||
@@ -124,7 +126,7 @@ CONFIG_SCHEMA = (
|
||||
)
|
||||
.extend(
|
||||
{
|
||||
cv.Required(CONF_SENSOR): cv.use_id(sensor.Sensor),
|
||||
cv.Required(CONF_SENSOR): cv.use_id(resistance_sampler.ResistanceSampler),
|
||||
cv.Required(CONF_CALIBRATION): process_calibration,
|
||||
}
|
||||
)
|
||||
|
||||
@@ -52,8 +52,9 @@ RemoteReceiverTrigger = ns.class_(
|
||||
"RemoteReceiverTrigger", automation.Trigger, RemoteReceiverListener
|
||||
)
|
||||
RemoteTransmitterDumper = ns.class_("RemoteTransmitterDumper")
|
||||
RemoteTransmittable = ns.class_("RemoteTransmittable")
|
||||
RemoteTransmitterActionBase = ns.class_(
|
||||
"RemoteTransmitterActionBase", automation.Action
|
||||
"RemoteTransmitterActionBase", RemoteTransmittable, automation.Action
|
||||
)
|
||||
RemoteReceiverBase = ns.class_("RemoteReceiverBase")
|
||||
RemoteTransmitterBase = ns.class_("RemoteTransmitterBase")
|
||||
@@ -68,11 +69,30 @@ def templatize(value):
|
||||
return cv.Schema(ret)
|
||||
|
||||
|
||||
REMOTE_LISTENER_SCHEMA = cv.Schema(
|
||||
{
|
||||
cv.GenerateID(CONF_RECEIVER_ID): cv.use_id(RemoteReceiverBase),
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
REMOTE_TRANSMITTABLE_SCHEMA = cv.Schema(
|
||||
{
|
||||
cv.GenerateID(CONF_TRANSMITTER_ID): cv.use_id(RemoteTransmitterBase),
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
async def register_listener(var, config):
|
||||
receiver = await cg.get_variable(config[CONF_RECEIVER_ID])
|
||||
cg.add(receiver.register_listener(var))
|
||||
|
||||
|
||||
async def register_transmittable(var, config):
|
||||
transmitter_ = await cg.get_variable(config[CONF_TRANSMITTER_ID])
|
||||
cg.add(var.set_transmitter(transmitter_))
|
||||
|
||||
|
||||
def register_binary_sensor(name, type, schema):
|
||||
return BINARY_SENSOR_REGISTRY.register(name, type, schema)
|
||||
|
||||
@@ -129,10 +149,9 @@ def validate_repeat(value):
|
||||
|
||||
BASE_REMOTE_TRANSMITTER_SCHEMA = cv.Schema(
|
||||
{
|
||||
cv.GenerateID(CONF_TRANSMITTER_ID): cv.use_id(RemoteTransmitterBase),
|
||||
cv.Optional(CONF_REPEAT): validate_repeat,
|
||||
}
|
||||
)
|
||||
).extend(REMOTE_TRANSMITTABLE_SCHEMA)
|
||||
|
||||
|
||||
def register_action(name, type_, schema):
|
||||
@@ -143,9 +162,8 @@ def register_action(name, type_, schema):
|
||||
|
||||
def decorator(func):
|
||||
async def new_func(config, action_id, template_arg, args):
|
||||
transmitter = await cg.get_variable(config[CONF_TRANSMITTER_ID])
|
||||
var = cg.new_Pvariable(action_id, template_arg)
|
||||
cg.add(var.set_parent(transmitter))
|
||||
await register_transmittable(var, config)
|
||||
if CONF_REPEAT in config:
|
||||
conf = config[CONF_REPEAT]
|
||||
template_ = await cg.templatable(conf[CONF_TIMES], args, cg.uint32)
|
||||
@@ -1539,7 +1557,7 @@ MIDEA_SCHEMA = cv.Schema(
|
||||
|
||||
@register_binary_sensor("midea", MideaBinarySensor, MIDEA_SCHEMA)
|
||||
def midea_binary_sensor(var, config):
|
||||
cg.add(var.set_code(config[CONF_CODE]))
|
||||
cg.add(var.set_data(config[CONF_CODE]))
|
||||
|
||||
|
||||
@register_trigger("midea", MideaTrigger, MideaData)
|
||||
|
||||
@@ -67,20 +67,7 @@ class MideaProtocol : public RemoteProtocol<MideaData> {
|
||||
void dump(const MideaData &data) override;
|
||||
};
|
||||
|
||||
class MideaBinarySensor : public RemoteReceiverBinarySensorBase {
|
||||
public:
|
||||
bool matches(RemoteReceiveData src) override {
|
||||
auto data = MideaProtocol().decode(src);
|
||||
return data.has_value() && data.value() == this->data_;
|
||||
}
|
||||
void set_code(const std::vector<uint8_t> &code) { this->data_ = code; }
|
||||
|
||||
protected:
|
||||
MideaData data_;
|
||||
};
|
||||
|
||||
using MideaTrigger = RemoteReceiverTrigger<MideaProtocol, MideaData>;
|
||||
using MideaDumper = RemoteReceiverDumper<MideaProtocol, MideaData>;
|
||||
DECLARE_REMOTE_PROTOCOL(Midea)
|
||||
|
||||
template<typename... Ts> class MideaAction : public RemoteTransmitterActionBase<Ts...> {
|
||||
TEMPLATABLE_VALUE(std::vector<uint8_t>, code)
|
||||
|
||||
@@ -15,6 +15,8 @@ struct RCSwitchData {
|
||||
|
||||
class RCSwitchBase {
|
||||
public:
|
||||
using ProtocolData = RCSwitchData;
|
||||
|
||||
RCSwitchBase() = default;
|
||||
RCSwitchBase(uint32_t sync_high, uint32_t sync_low, uint32_t zero_high, uint32_t zero_low, uint32_t one_high,
|
||||
uint32_t one_low, bool inverted);
|
||||
@@ -213,7 +215,7 @@ class RCSwitchDumper : public RemoteReceiverDumperBase {
|
||||
bool dump(RemoteReceiveData src) override;
|
||||
};
|
||||
|
||||
using RCSwitchTrigger = RemoteReceiverTrigger<RCSwitchBase, RCSwitchData>;
|
||||
using RCSwitchTrigger = RemoteReceiverTrigger<RCSwitchBase>;
|
||||
|
||||
} // namespace remote_base
|
||||
} // namespace esphome
|
||||
|
||||
@@ -127,6 +127,14 @@ class RemoteTransmitterBase : public RemoteComponentBase {
|
||||
this->temp_.reset();
|
||||
return TransmitCall(this);
|
||||
}
|
||||
template<typename Protocol>
|
||||
void transmit(const typename Protocol::ProtocolData &data, uint32_t send_times = 1, uint32_t send_wait = 0) {
|
||||
auto call = this->transmit();
|
||||
Protocol().encode(call.get_data(), data);
|
||||
call.set_send_times(send_times);
|
||||
call.set_send_wait(send_wait);
|
||||
call.perform();
|
||||
}
|
||||
|
||||
protected:
|
||||
void send_(uint32_t send_times, uint32_t send_wait);
|
||||
@@ -184,12 +192,13 @@ class RemoteReceiverBinarySensorBase : public binary_sensor::BinarySensorInitial
|
||||
|
||||
template<typename T> class RemoteProtocol {
|
||||
public:
|
||||
virtual void encode(RemoteTransmitData *dst, const T &data) = 0;
|
||||
virtual optional<T> decode(RemoteReceiveData src) = 0;
|
||||
virtual void dump(const T &data) = 0;
|
||||
using ProtocolData = T;
|
||||
virtual void encode(RemoteTransmitData *dst, const ProtocolData &data) = 0;
|
||||
virtual optional<ProtocolData> decode(RemoteReceiveData src) = 0;
|
||||
virtual void dump(const ProtocolData &data) = 0;
|
||||
};
|
||||
|
||||
template<typename T, typename D> class RemoteReceiverBinarySensor : public RemoteReceiverBinarySensorBase {
|
||||
template<typename T> class RemoteReceiverBinarySensor : public RemoteReceiverBinarySensorBase {
|
||||
public:
|
||||
RemoteReceiverBinarySensor() : RemoteReceiverBinarySensorBase() {}
|
||||
|
||||
@@ -201,13 +210,14 @@ template<typename T, typename D> class RemoteReceiverBinarySensor : public Remot
|
||||
}
|
||||
|
||||
public:
|
||||
void set_data(D data) { data_ = data; }
|
||||
void set_data(typename T::ProtocolData data) { data_ = data; }
|
||||
|
||||
protected:
|
||||
D data_;
|
||||
typename T::ProtocolData data_;
|
||||
};
|
||||
|
||||
template<typename T, typename D> class RemoteReceiverTrigger : public Trigger<D>, public RemoteReceiverListener {
|
||||
template<typename T>
|
||||
class RemoteReceiverTrigger : public Trigger<typename T::ProtocolData>, public RemoteReceiverListener {
|
||||
protected:
|
||||
bool on_receive(RemoteReceiveData src) override {
|
||||
auto proto = T();
|
||||
@@ -220,28 +230,36 @@ template<typename T, typename D> class RemoteReceiverTrigger : public Trigger<D>
|
||||
}
|
||||
};
|
||||
|
||||
template<typename... Ts> class RemoteTransmitterActionBase : public Action<Ts...> {
|
||||
class RemoteTransmittable {
|
||||
public:
|
||||
void set_parent(RemoteTransmitterBase *parent) { this->parent_ = parent; }
|
||||
RemoteTransmittable() {}
|
||||
RemoteTransmittable(RemoteTransmitterBase *transmitter) : transmitter_(transmitter) {}
|
||||
void set_transmitter(RemoteTransmitterBase *transmitter) { this->transmitter_ = transmitter; }
|
||||
|
||||
TEMPLATABLE_VALUE(uint32_t, send_times);
|
||||
TEMPLATABLE_VALUE(uint32_t, send_wait);
|
||||
protected:
|
||||
template<typename Protocol>
|
||||
void transmit_(const typename Protocol::ProtocolData &data, uint32_t send_times = 1, uint32_t send_wait = 0) {
|
||||
this->transmitter_->transmit<Protocol>(data, send_times, send_wait);
|
||||
}
|
||||
RemoteTransmitterBase *transmitter_;
|
||||
};
|
||||
|
||||
template<typename... Ts> class RemoteTransmitterActionBase : public RemoteTransmittable, public Action<Ts...> {
|
||||
TEMPLATABLE_VALUE(uint32_t, send_times)
|
||||
TEMPLATABLE_VALUE(uint32_t, send_wait)
|
||||
|
||||
protected:
|
||||
void play(Ts... x) override {
|
||||
auto call = this->parent_->transmit();
|
||||
auto call = this->transmitter_->transmit();
|
||||
this->encode(call.get_data(), x...);
|
||||
call.set_send_times(this->send_times_.value_or(x..., 1));
|
||||
call.set_send_wait(this->send_wait_.value_or(x..., 0));
|
||||
call.perform();
|
||||
}
|
||||
|
||||
protected:
|
||||
virtual void encode(RemoteTransmitData *dst, Ts... x) = 0;
|
||||
|
||||
RemoteTransmitterBase *parent_{};
|
||||
};
|
||||
|
||||
template<typename T, typename D> class RemoteReceiverDumper : public RemoteReceiverDumperBase {
|
||||
template<typename T> class RemoteReceiverDumper : public RemoteReceiverDumperBase {
|
||||
public:
|
||||
bool dump(RemoteReceiveData src) override {
|
||||
auto proto = T();
|
||||
@@ -254,9 +272,9 @@ template<typename T, typename D> class RemoteReceiverDumper : public RemoteRecei
|
||||
};
|
||||
|
||||
#define DECLARE_REMOTE_PROTOCOL_(prefix) \
|
||||
using prefix##BinarySensor = RemoteReceiverBinarySensor<prefix##Protocol, prefix##Data>; \
|
||||
using prefix##Trigger = RemoteReceiverTrigger<prefix##Protocol, prefix##Data>; \
|
||||
using prefix##Dumper = RemoteReceiverDumper<prefix##Protocol, prefix##Data>;
|
||||
using prefix##BinarySensor = RemoteReceiverBinarySensor<prefix##Protocol>; \
|
||||
using prefix##Trigger = RemoteReceiverTrigger<prefix##Protocol>; \
|
||||
using prefix##Dumper = RemoteReceiverDumper<prefix##Protocol>;
|
||||
#define DECLARE_REMOTE_PROTOCOL(prefix) DECLARE_REMOTE_PROTOCOL_(prefix)
|
||||
|
||||
} // namespace remote_base
|
||||
|
||||
@@ -1,7 +1,8 @@
|
||||
#pragma once
|
||||
|
||||
#include "esphome/core/component.h"
|
||||
#include "esphome/components/resistance_sampler/resistance_sampler.h"
|
||||
#include "esphome/components/sensor/sensor.h"
|
||||
#include "esphome/core/component.h"
|
||||
|
||||
namespace esphome {
|
||||
namespace resistance {
|
||||
@@ -11,7 +12,7 @@ enum ResistanceConfiguration {
|
||||
DOWNSTREAM,
|
||||
};
|
||||
|
||||
class ResistanceSensor : public Component, public sensor::Sensor {
|
||||
class ResistanceSensor : public Component, public sensor::Sensor, resistance_sampler::ResistanceSampler {
|
||||
public:
|
||||
void set_sensor(Sensor *sensor) { sensor_ = sensor; }
|
||||
void set_configuration(ResistanceConfiguration configuration) { configuration_ = configuration; }
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import esphome.codegen as cg
|
||||
import esphome.config_validation as cv
|
||||
from esphome.components import sensor
|
||||
from esphome.components import sensor, resistance_sampler
|
||||
from esphome.const import (
|
||||
CONF_SENSOR,
|
||||
STATE_CLASS_MEASUREMENT,
|
||||
@@ -8,8 +8,15 @@ from esphome.const import (
|
||||
ICON_FLASH,
|
||||
)
|
||||
|
||||
AUTO_LOAD = ["resistance_sampler"]
|
||||
|
||||
resistance_ns = cg.esphome_ns.namespace("resistance")
|
||||
ResistanceSensor = resistance_ns.class_("ResistanceSensor", cg.Component, sensor.Sensor)
|
||||
ResistanceSensor = resistance_ns.class_(
|
||||
"ResistanceSensor",
|
||||
cg.Component,
|
||||
sensor.Sensor,
|
||||
resistance_sampler.ResistanceSampler,
|
||||
)
|
||||
|
||||
CONF_REFERENCE_VOLTAGE = "reference_voltage"
|
||||
CONF_CONFIGURATION = "configuration"
|
||||
|
||||
6
esphome/components/resistance_sampler/__init__.py
Normal file
6
esphome/components/resistance_sampler/__init__.py
Normal file
@@ -0,0 +1,6 @@
|
||||
import esphome.codegen as cg
|
||||
|
||||
resistance_sampler_ns = cg.esphome_ns.namespace("resistance_sampler")
|
||||
ResistanceSampler = resistance_sampler_ns.class_("ResistanceSampler")
|
||||
|
||||
CODEOWNERS = ["@jesserockz"]
|
||||
10
esphome/components/resistance_sampler/resistance_sampler.h
Normal file
10
esphome/components/resistance_sampler/resistance_sampler.h
Normal file
@@ -0,0 +1,10 @@
|
||||
#pragma once
|
||||
|
||||
namespace esphome {
|
||||
namespace resistance_sampler {
|
||||
|
||||
/// Abstract interface to mark components that provide resistance values.
|
||||
class ResistanceSampler {};
|
||||
|
||||
} // namespace resistance_sampler
|
||||
} // namespace esphome
|
||||
@@ -74,12 +74,12 @@ def _format_framework_arduino_version(ver: cv.Version) -> str:
|
||||
# The default/recommended arduino framework version
|
||||
# - https://github.com/earlephilhower/arduino-pico/releases
|
||||
# - https://api.registry.platformio.org/v3/packages/earlephilhower/tool/framework-arduinopico
|
||||
RECOMMENDED_ARDUINO_FRAMEWORK_VERSION = cv.Version(3, 4, 0)
|
||||
RECOMMENDED_ARDUINO_FRAMEWORK_VERSION = cv.Version(3, 6, 0)
|
||||
|
||||
# The platformio/raspberrypi version to use for arduino frameworks
|
||||
# - https://github.com/platformio/platform-raspberrypi/releases
|
||||
# - https://api.registry.platformio.org/v3/packages/platformio/platform/raspberrypi
|
||||
ARDUINO_PLATFORM_VERSION = cv.Version(1, 9, 0)
|
||||
ARDUINO_PLATFORM_VERSION = cv.Version(1, 10, 0)
|
||||
|
||||
|
||||
def _arduino_check_versions(value):
|
||||
|
||||
@@ -610,6 +610,11 @@ void VoiceAssistant::on_event(const api::VoiceAssistantEventResponse &msg) {
|
||||
if (code == "wake-word-timeout" || code == "wake_word_detection_aborted") {
|
||||
// Don't change state here since either the "tts-end" or "run-end" events will do it.
|
||||
return;
|
||||
} else if (code == "wake-provider-missing" || code == "wake-engine-missing") {
|
||||
// Wake word is not set up or not ready on Home Assistant so stop and do not retry until user starts again.
|
||||
this->request_stop();
|
||||
this->error_trigger_->trigger(code, message);
|
||||
return;
|
||||
}
|
||||
ESP_LOGE(TAG, "Error: %s - %s", code.c_str(), message.c_str());
|
||||
if (this->state_ != State::IDLE) {
|
||||
|
||||
@@ -389,6 +389,10 @@ void WiFiComponent::print_connect_params_() {
|
||||
bssid_t bssid = wifi_bssid();
|
||||
|
||||
ESP_LOGCONFIG(TAG, " Local MAC: %s", get_mac_address_pretty().c_str());
|
||||
if (this->is_disabled()) {
|
||||
ESP_LOGCONFIG(TAG, " WiFi is disabled!");
|
||||
return;
|
||||
}
|
||||
ESP_LOGCONFIG(TAG, " SSID: " LOG_SECRET("'%s'"), wifi_ssid().c_str());
|
||||
ESP_LOGCONFIG(TAG, " IP Address: %s", wifi_sta_ip().str().c_str());
|
||||
ESP_LOGCONFIG(TAG, " BSSID: " LOG_SECRET("%02X:%02X:%02X:%02X:%02X:%02X"), bssid[0], bssid[1], bssid[2], bssid[3],
|
||||
|
||||
@@ -43,11 +43,17 @@ def validate_mode(mode):
|
||||
return mode
|
||||
|
||||
|
||||
def validate_pin(pin):
|
||||
if pin in (8, 9):
|
||||
raise cv.Invalid(f"pin {pin} doesn't exist")
|
||||
return pin
|
||||
|
||||
|
||||
XL9535_PIN_SCHEMA = cv.All(
|
||||
{
|
||||
cv.GenerateID(): cv.declare_id(XL9535GPIOPin),
|
||||
cv.Required(CONF_XL9535): cv.use_id(XL9535Component),
|
||||
cv.Required(CONF_NUMBER): cv.int_range(min=0, max=15),
|
||||
cv.Required(CONF_NUMBER): cv.All(cv.int_range(min=0, max=17), validate_pin),
|
||||
cv.Optional(CONF_MODE, default={}): cv.All(
|
||||
{
|
||||
cv.Optional(CONF_INPUT, default=False): cv.boolean,
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
"""Constants used by esphome."""
|
||||
|
||||
__version__ = "2023.11.0b2"
|
||||
__version__ = "2023.12.0-dev"
|
||||
|
||||
ALLOWED_NAME_CHARS = "abcdefghijklmnopqrstuvwxyz0123456789-_"
|
||||
VALID_SUBSTITUTIONS_CHARACTERS = (
|
||||
|
||||
@@ -64,6 +64,7 @@
|
||||
// IDF-specific feature flags
|
||||
#ifdef USE_ESP_IDF
|
||||
#define USE_MQTT_IDF_ENQUEUE
|
||||
#define USE_ESP_ADF
|
||||
#endif
|
||||
|
||||
// ESP32-specific feature flags
|
||||
|
||||
95
esphome/dashboard/core.py
Normal file
95
esphome/dashboard/core.py
Normal file
@@ -0,0 +1,95 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
import logging
|
||||
import threading
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from ..zeroconf import DiscoveredImport
|
||||
from .entries import DashboardEntry
|
||||
from .settings import DashboardSettings
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from .status.mdns import MDNSStatus
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def list_dashboard_entries() -> list[DashboardEntry]:
|
||||
"""List all dashboard entries."""
|
||||
return DASHBOARD.settings.entries()
|
||||
|
||||
|
||||
class ESPHomeDashboard:
|
||||
"""Class that represents the dashboard."""
|
||||
|
||||
__slots__ = (
|
||||
"loop",
|
||||
"ping_result",
|
||||
"import_result",
|
||||
"stop_event",
|
||||
"ping_request",
|
||||
"mqtt_ping_request",
|
||||
"mdns_status",
|
||||
"settings",
|
||||
)
|
||||
|
||||
def __init__(self) -> None:
|
||||
"""Initialize the ESPHomeDashboard."""
|
||||
self.loop: asyncio.AbstractEventLoop | None = None
|
||||
self.ping_result: dict[str, bool | None] = {}
|
||||
self.import_result: dict[str, DiscoveredImport] = {}
|
||||
self.stop_event = threading.Event()
|
||||
self.ping_request: asyncio.Event | None = None
|
||||
self.mqtt_ping_request = threading.Event()
|
||||
self.mdns_status: MDNSStatus | None = None
|
||||
self.settings: DashboardSettings = DashboardSettings()
|
||||
|
||||
async def async_setup(self) -> None:
|
||||
"""Setup the dashboard."""
|
||||
self.loop = asyncio.get_running_loop()
|
||||
self.ping_request = asyncio.Event()
|
||||
|
||||
async def async_run(self) -> None:
|
||||
"""Run the dashboard."""
|
||||
settings = self.settings
|
||||
mdns_task: asyncio.Task | None = None
|
||||
ping_status_task: asyncio.Task | None = None
|
||||
|
||||
if settings.status_use_ping:
|
||||
from .status.ping import PingStatus
|
||||
|
||||
ping_status = PingStatus()
|
||||
ping_status_task = asyncio.create_task(ping_status.async_run())
|
||||
else:
|
||||
from .status.mdns import MDNSStatus
|
||||
|
||||
mdns_status = MDNSStatus()
|
||||
await mdns_status.async_refresh_hosts()
|
||||
self.mdns_status = mdns_status
|
||||
mdns_task = asyncio.create_task(mdns_status.async_run())
|
||||
|
||||
if settings.status_use_mqtt:
|
||||
from .status.mqtt import MqttStatusThread
|
||||
|
||||
status_thread_mqtt = MqttStatusThread()
|
||||
status_thread_mqtt.start()
|
||||
|
||||
shutdown_event = asyncio.Event()
|
||||
try:
|
||||
await shutdown_event.wait()
|
||||
finally:
|
||||
_LOGGER.info("Shutting down...")
|
||||
self.stop_event.set()
|
||||
self.ping_request.set()
|
||||
if ping_status_task:
|
||||
ping_status_task.cancel()
|
||||
if mdns_task:
|
||||
mdns_task.cancel()
|
||||
if settings.status_use_mqtt:
|
||||
status_thread_mqtt.join()
|
||||
self.mqtt_ping_request.set()
|
||||
await asyncio.sleep(0)
|
||||
|
||||
|
||||
DASHBOARD = ESPHomeDashboard()
|
||||
File diff suppressed because it is too large
Load Diff
116
esphome/dashboard/entries.py
Normal file
116
esphome/dashboard/entries.py
Normal file
@@ -0,0 +1,116 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
|
||||
from esphome import const
|
||||
from esphome.storage_json import StorageJSON, ext_storage_path
|
||||
|
||||
|
||||
class DashboardEntry:
|
||||
"""Represents a single dashboard entry.
|
||||
|
||||
This class is thread-safe and read-only.
|
||||
"""
|
||||
|
||||
__slots__ = ("path", "_storage", "_loaded_storage")
|
||||
|
||||
def __init__(self, path: str) -> None:
|
||||
"""Initialize the DashboardEntry."""
|
||||
self.path = path
|
||||
self._storage = None
|
||||
self._loaded_storage = False
|
||||
|
||||
def __repr__(self):
|
||||
"""Return the representation of this entry."""
|
||||
return (
|
||||
f"DashboardEntry({self.path} "
|
||||
f"address={self.address} "
|
||||
f"web_port={self.web_port} "
|
||||
f"name={self.name} "
|
||||
f"no_mdns={self.no_mdns})"
|
||||
)
|
||||
|
||||
@property
|
||||
def filename(self):
|
||||
"""Return the filename of this entry."""
|
||||
return os.path.basename(self.path)
|
||||
|
||||
@property
|
||||
def storage(self) -> StorageJSON | None:
|
||||
"""Return the StorageJSON object for this entry."""
|
||||
if not self._loaded_storage:
|
||||
self._storage = StorageJSON.load(ext_storage_path(self.filename))
|
||||
self._loaded_storage = True
|
||||
return self._storage
|
||||
|
||||
@property
|
||||
def address(self):
|
||||
"""Return the address of this entry."""
|
||||
if self.storage is None:
|
||||
return None
|
||||
return self.storage.address
|
||||
|
||||
@property
|
||||
def no_mdns(self):
|
||||
"""Return the no_mdns of this entry."""
|
||||
if self.storage is None:
|
||||
return None
|
||||
return self.storage.no_mdns
|
||||
|
||||
@property
|
||||
def web_port(self):
|
||||
"""Return the web port of this entry."""
|
||||
if self.storage is None:
|
||||
return None
|
||||
return self.storage.web_port
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
"""Return the name of this entry."""
|
||||
if self.storage is None:
|
||||
return self.filename.replace(".yml", "").replace(".yaml", "")
|
||||
return self.storage.name
|
||||
|
||||
@property
|
||||
def friendly_name(self):
|
||||
"""Return the friendly name of this entry."""
|
||||
if self.storage is None:
|
||||
return self.name
|
||||
return self.storage.friendly_name
|
||||
|
||||
@property
|
||||
def comment(self):
|
||||
"""Return the comment of this entry."""
|
||||
if self.storage is None:
|
||||
return None
|
||||
return self.storage.comment
|
||||
|
||||
@property
|
||||
def target_platform(self):
|
||||
"""Return the target platform of this entry."""
|
||||
if self.storage is None:
|
||||
return None
|
||||
return self.storage.target_platform
|
||||
|
||||
@property
|
||||
def update_available(self):
|
||||
"""Return if an update is available for this entry."""
|
||||
if self.storage is None:
|
||||
return True
|
||||
return self.update_old != self.update_new
|
||||
|
||||
@property
|
||||
def update_old(self):
|
||||
if self.storage is None:
|
||||
return ""
|
||||
return self.storage.esphome_version or ""
|
||||
|
||||
@property
|
||||
def update_new(self):
|
||||
return const.__version__
|
||||
|
||||
@property
|
||||
def loaded_integrations(self):
|
||||
if self.storage is None:
|
||||
return []
|
||||
return self.storage.loaded_integrations
|
||||
146
esphome/dashboard/settings.py
Normal file
146
esphome/dashboard/settings.py
Normal file
@@ -0,0 +1,146 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import hmac
|
||||
import os
|
||||
from pathlib import Path
|
||||
|
||||
from esphome import util
|
||||
from esphome.core import CORE
|
||||
from esphome.helpers import get_bool_env
|
||||
from esphome.storage_json import ext_storage_path
|
||||
|
||||
from .entries import DashboardEntry
|
||||
from .util import password_hash
|
||||
|
||||
|
||||
class DashboardSettings:
|
||||
"""Settings for the dashboard."""
|
||||
|
||||
def __init__(self):
|
||||
self.config_dir = ""
|
||||
self.password_hash = ""
|
||||
self.username = ""
|
||||
self.using_password = False
|
||||
self.on_ha_addon = False
|
||||
self.cookie_secret = None
|
||||
self.absolute_config_dir = None
|
||||
self._entry_cache: dict[
|
||||
str, tuple[tuple[int, int, float, int], DashboardEntry]
|
||||
] = {}
|
||||
|
||||
def parse_args(self, args):
|
||||
self.on_ha_addon: bool = args.ha_addon
|
||||
password: str = args.password or os.getenv("PASSWORD", "")
|
||||
if not self.on_ha_addon:
|
||||
self.username: str = args.username or os.getenv("USERNAME", "")
|
||||
self.using_password = bool(password)
|
||||
if self.using_password:
|
||||
self.password_hash = password_hash(password)
|
||||
self.config_dir: str = args.configuration
|
||||
self.absolute_config_dir: Path = Path(self.config_dir).resolve()
|
||||
CORE.config_path = os.path.join(self.config_dir, ".")
|
||||
|
||||
@property
|
||||
def relative_url(self):
|
||||
return os.getenv("ESPHOME_DASHBOARD_RELATIVE_URL", "/")
|
||||
|
||||
@property
|
||||
def status_use_ping(self):
|
||||
return get_bool_env("ESPHOME_DASHBOARD_USE_PING")
|
||||
|
||||
@property
|
||||
def status_use_mqtt(self):
|
||||
return get_bool_env("ESPHOME_DASHBOARD_USE_MQTT")
|
||||
|
||||
@property
|
||||
def using_ha_addon_auth(self):
|
||||
if not self.on_ha_addon:
|
||||
return False
|
||||
return not get_bool_env("DISABLE_HA_AUTHENTICATION")
|
||||
|
||||
@property
|
||||
def using_auth(self):
|
||||
return self.using_password or self.using_ha_addon_auth
|
||||
|
||||
@property
|
||||
def streamer_mode(self):
|
||||
return get_bool_env("ESPHOME_STREAMER_MODE")
|
||||
|
||||
def check_password(self, username, password):
|
||||
if not self.using_auth:
|
||||
return True
|
||||
if username != self.username:
|
||||
return False
|
||||
|
||||
# Compare password in constant running time (to prevent timing attacks)
|
||||
return hmac.compare_digest(self.password_hash, password_hash(password))
|
||||
|
||||
def rel_path(self, *args):
|
||||
joined_path = os.path.join(self.config_dir, *args)
|
||||
# Raises ValueError if not relative to ESPHome config folder
|
||||
Path(joined_path).resolve().relative_to(self.absolute_config_dir)
|
||||
return joined_path
|
||||
|
||||
def list_yaml_files(self) -> list[str]:
|
||||
return util.list_yaml_files([self.config_dir])
|
||||
|
||||
def entries(self) -> list[DashboardEntry]:
|
||||
"""Fetch all dashboard entries, thread-safe."""
|
||||
path_to_cache_key: dict[str, tuple[int, int, float, int]] = {}
|
||||
#
|
||||
# The cache key is (inode, device, mtime, size)
|
||||
# which allows us to avoid locking since it ensures
|
||||
# every iteration of this call will always return the newest
|
||||
# items from disk at the cost of a stat() call on each
|
||||
# file which is much faster than reading the file
|
||||
# for the cache hit case which is the common case.
|
||||
#
|
||||
# Because there is no lock the cache may
|
||||
# get built more than once but that's fine as its still
|
||||
# thread-safe and results in orders of magnitude less
|
||||
# reads from disk than if we did not cache at all and
|
||||
# does not have a lock contention issue.
|
||||
#
|
||||
for file in self.list_yaml_files():
|
||||
try:
|
||||
# Prefer the json storage path if it exists
|
||||
stat = os.stat(ext_storage_path(os.path.basename(file)))
|
||||
except OSError:
|
||||
try:
|
||||
# Fallback to the yaml file if the storage
|
||||
# file does not exist or could not be generated
|
||||
stat = os.stat(file)
|
||||
except OSError:
|
||||
# File was deleted, ignore
|
||||
continue
|
||||
path_to_cache_key[file] = (
|
||||
stat.st_ino,
|
||||
stat.st_dev,
|
||||
stat.st_mtime,
|
||||
stat.st_size,
|
||||
)
|
||||
|
||||
entry_cache = self._entry_cache
|
||||
|
||||
# Remove entries that no longer exist
|
||||
removed: list[str] = []
|
||||
for file in entry_cache:
|
||||
if file not in path_to_cache_key:
|
||||
removed.append(file)
|
||||
|
||||
for file in removed:
|
||||
entry_cache.pop(file)
|
||||
|
||||
dashboard_entries: list[DashboardEntry] = []
|
||||
for file, cache_key in path_to_cache_key.items():
|
||||
if cached_entry := entry_cache.get(file):
|
||||
entry_key, dashboard_entry = cached_entry
|
||||
if entry_key == cache_key:
|
||||
dashboard_entries.append(dashboard_entry)
|
||||
continue
|
||||
|
||||
dashboard_entry = DashboardEntry(file)
|
||||
dashboard_entries.append(dashboard_entry)
|
||||
entry_cache[file] = (cache_key, dashboard_entry)
|
||||
|
||||
return dashboard_entries
|
||||
0
esphome/dashboard/status/__init__.py
Normal file
0
esphome/dashboard/status/__init__.py
Normal file
109
esphome/dashboard/status/mdns.py
Normal file
109
esphome/dashboard/status/mdns.py
Normal file
@@ -0,0 +1,109 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
|
||||
from esphome.zeroconf import (
|
||||
ESPHOME_SERVICE_TYPE,
|
||||
AsyncEsphomeZeroconf,
|
||||
DashboardBrowser,
|
||||
DashboardImportDiscovery,
|
||||
DashboardStatus,
|
||||
)
|
||||
|
||||
from ..core import DASHBOARD, list_dashboard_entries
|
||||
|
||||
|
||||
class MDNSStatus:
|
||||
"""Class that updates the mdns status."""
|
||||
|
||||
def __init__(self) -> None:
|
||||
"""Initialize the MDNSStatus class."""
|
||||
super().__init__()
|
||||
self.aiozc: AsyncEsphomeZeroconf | None = None
|
||||
# This is the current mdns state for each host (True, False, None)
|
||||
self.host_mdns_state: dict[str, bool | None] = {}
|
||||
# This is the hostnames to filenames mapping
|
||||
self.host_name_to_filename: dict[str, str] = {}
|
||||
self.filename_to_host_name: dict[str, str] = {}
|
||||
# This is a set of host names to track (i.e no_mdns = false)
|
||||
self.host_name_with_mdns_enabled: set[set] = set()
|
||||
self._loop = asyncio.get_running_loop()
|
||||
|
||||
def filename_to_host_name_thread_safe(self, filename: str) -> str | None:
|
||||
"""Resolve a filename to an address in a thread-safe manner."""
|
||||
return self.filename_to_host_name.get(filename)
|
||||
|
||||
async def async_resolve_host(self, host_name: str) -> str | None:
|
||||
"""Resolve a host name to an address in a thread-safe manner."""
|
||||
if aiozc := self.aiozc:
|
||||
return await aiozc.async_resolve_host(host_name)
|
||||
return None
|
||||
|
||||
async def async_refresh_hosts(self):
|
||||
"""Refresh the hosts to track."""
|
||||
entries = await self._loop.run_in_executor(None, list_dashboard_entries)
|
||||
host_name_with_mdns_enabled = self.host_name_with_mdns_enabled
|
||||
host_mdns_state = self.host_mdns_state
|
||||
host_name_to_filename = self.host_name_to_filename
|
||||
filename_to_host_name = self.filename_to_host_name
|
||||
ping_result = DASHBOARD.ping_result
|
||||
|
||||
for entry in entries:
|
||||
name = entry.name
|
||||
# If no_mdns is set, remove it from the set
|
||||
if entry.no_mdns:
|
||||
host_name_with_mdns_enabled.discard(name)
|
||||
continue
|
||||
|
||||
# We are tracking this host
|
||||
host_name_with_mdns_enabled.add(name)
|
||||
filename = entry.filename
|
||||
|
||||
# If we just adopted/imported this host, we likely
|
||||
# already have a state for it, so we should make sure
|
||||
# to set it so the dashboard shows it as online
|
||||
if name in host_mdns_state:
|
||||
ping_result[filename] = host_mdns_state[name]
|
||||
|
||||
# Make sure the mapping is up to date
|
||||
# so when we get an mdns update we can map it back
|
||||
# to the filename
|
||||
host_name_to_filename[name] = filename
|
||||
filename_to_host_name[filename] = name
|
||||
|
||||
async def async_run(self) -> None:
|
||||
dashboard = DASHBOARD
|
||||
|
||||
aiozc = AsyncEsphomeZeroconf()
|
||||
self.aiozc = aiozc
|
||||
host_mdns_state = self.host_mdns_state
|
||||
host_name_to_filename = self.host_name_to_filename
|
||||
host_name_with_mdns_enabled = self.host_name_with_mdns_enabled
|
||||
ping_result = dashboard.ping_result
|
||||
|
||||
def on_update(dat: dict[str, bool | None]) -> None:
|
||||
"""Update the global PING_RESULT dict."""
|
||||
for name, result in dat.items():
|
||||
host_mdns_state[name] = result
|
||||
if name in host_name_with_mdns_enabled:
|
||||
filename = host_name_to_filename[name]
|
||||
ping_result[filename] = result
|
||||
|
||||
stat = DashboardStatus(on_update)
|
||||
imports = DashboardImportDiscovery()
|
||||
dashboard.import_result = imports.import_state
|
||||
|
||||
browser = DashboardBrowser(
|
||||
aiozc.zeroconf,
|
||||
ESPHOME_SERVICE_TYPE,
|
||||
[stat.browser_callback, imports.browser_callback],
|
||||
)
|
||||
|
||||
while not dashboard.stop_event.is_set():
|
||||
await self.async_refresh_hosts()
|
||||
await dashboard.ping_request.wait()
|
||||
dashboard.ping_request.clear()
|
||||
|
||||
await browser.async_cancel()
|
||||
await aiozc.async_close()
|
||||
self.aiozc = None
|
||||
67
esphome/dashboard/status/mqtt.py
Normal file
67
esphome/dashboard/status/mqtt.py
Normal file
@@ -0,0 +1,67 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import binascii
|
||||
import json
|
||||
import os
|
||||
import threading
|
||||
|
||||
from esphome import mqtt
|
||||
|
||||
from ..core import DASHBOARD, list_dashboard_entries
|
||||
|
||||
|
||||
class MqttStatusThread(threading.Thread):
|
||||
"""Status thread to get the status of the devices via MQTT."""
|
||||
|
||||
def run(self) -> None:
|
||||
"""Run the status thread."""
|
||||
dashboard = DASHBOARD
|
||||
entries = list_dashboard_entries()
|
||||
|
||||
config = mqtt.config_from_env()
|
||||
topic = "esphome/discover/#"
|
||||
|
||||
def on_message(client, userdata, msg):
|
||||
nonlocal entries
|
||||
|
||||
payload = msg.payload.decode(errors="backslashreplace")
|
||||
if len(payload) > 0:
|
||||
data = json.loads(payload)
|
||||
if "name" not in data:
|
||||
return
|
||||
for entry in entries:
|
||||
if entry.name == data["name"]:
|
||||
dashboard.ping_result[entry.filename] = True
|
||||
return
|
||||
|
||||
def on_connect(client, userdata, flags, return_code):
|
||||
client.publish("esphome/discover", None, retain=False)
|
||||
|
||||
mqttid = str(binascii.hexlify(os.urandom(6)).decode())
|
||||
|
||||
client = mqtt.prepare(
|
||||
config,
|
||||
[topic],
|
||||
on_message,
|
||||
on_connect,
|
||||
None,
|
||||
None,
|
||||
f"esphome-dashboard-{mqttid}",
|
||||
)
|
||||
client.loop_start()
|
||||
|
||||
while not dashboard.stop_event.wait(2):
|
||||
# update entries
|
||||
entries = list_dashboard_entries()
|
||||
|
||||
# will be set to true on on_message
|
||||
for entry in entries:
|
||||
if entry.no_mdns:
|
||||
dashboard.ping_result[entry.filename] = False
|
||||
|
||||
client.publish("esphome/discover", None, retain=False)
|
||||
dashboard.mqtt_ping_request.wait()
|
||||
dashboard.mqtt_ping_request.clear()
|
||||
|
||||
client.disconnect()
|
||||
client.loop_stop()
|
||||
57
esphome/dashboard/status/ping.py
Normal file
57
esphome/dashboard/status/ping.py
Normal file
@@ -0,0 +1,57 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
import os
|
||||
from typing import cast
|
||||
|
||||
from ..core import DASHBOARD
|
||||
from ..entries import DashboardEntry
|
||||
from ..core import list_dashboard_entries
|
||||
from ..util import chunked
|
||||
|
||||
|
||||
async def _async_ping_host(host: str) -> bool:
|
||||
"""Ping a host."""
|
||||
ping_command = ["ping", "-n" if os.name == "nt" else "-c", "1"]
|
||||
process = await asyncio.create_subprocess_exec(
|
||||
*ping_command,
|
||||
host,
|
||||
stdin=asyncio.subprocess.DEVNULL,
|
||||
stdout=asyncio.subprocess.DEVNULL,
|
||||
stderr=asyncio.subprocess.DEVNULL,
|
||||
close_fds=False,
|
||||
)
|
||||
await process.wait()
|
||||
return process.returncode == 0
|
||||
|
||||
|
||||
class PingStatus:
|
||||
def __init__(self) -> None:
|
||||
"""Initialize the PingStatus class."""
|
||||
super().__init__()
|
||||
self._loop = asyncio.get_running_loop()
|
||||
|
||||
async def async_run(self) -> None:
|
||||
"""Run the ping status."""
|
||||
dashboard = DASHBOARD
|
||||
|
||||
while not dashboard.stop_event.is_set():
|
||||
# Only ping if the dashboard is open
|
||||
await dashboard.ping_request.wait()
|
||||
dashboard.ping_result.clear()
|
||||
entries = await self._loop.run_in_executor(None, list_dashboard_entries)
|
||||
to_ping: list[DashboardEntry] = [
|
||||
entry for entry in entries if entry.address is not None
|
||||
]
|
||||
for ping_group in chunked(to_ping, 16):
|
||||
ping_group = cast(list[DashboardEntry], ping_group)
|
||||
results = await asyncio.gather(
|
||||
*(_async_ping_host(entry.address) for entry in ping_group),
|
||||
return_exceptions=True,
|
||||
)
|
||||
for entry, result in zip(ping_group, results):
|
||||
if isinstance(result, Exception):
|
||||
result = False
|
||||
elif isinstance(result, BaseException):
|
||||
raise result
|
||||
dashboard.ping_result[entry.filename] = result
|
||||
@@ -1,5 +1,9 @@
|
||||
import hashlib
|
||||
import unicodedata
|
||||
from collections.abc import Iterable
|
||||
from functools import partial
|
||||
from itertools import islice
|
||||
from typing import Any
|
||||
|
||||
from esphome.const import ALLOWED_NAME_CHARS
|
||||
|
||||
@@ -30,3 +34,19 @@ def friendly_name_slugify(value):
|
||||
.strip("-")
|
||||
)
|
||||
return "".join(c for c in value if c in ALLOWED_NAME_CHARS)
|
||||
|
||||
|
||||
def take(take_num: int, iterable: Iterable) -> list[Any]:
|
||||
"""Return first n items of the iterable as a list.
|
||||
|
||||
From itertools recipes
|
||||
"""
|
||||
return list(islice(iterable, take_num))
|
||||
|
||||
|
||||
def chunked(iterable: Iterable, chunked_num: int) -> Iterable[Any]:
|
||||
"""Break *iterable* into lists of length *n*.
|
||||
|
||||
From more-itertools
|
||||
"""
|
||||
return iter(partial(take, chunked_num, iter(iterable)), [])
|
||||
|
||||
1069
esphome/dashboard/web_server.py
Normal file
1069
esphome/dashboard/web_server.py
Normal file
File diff suppressed because it is too large
Load Diff
@@ -1,10 +1,13 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import gzip
|
||||
import hashlib
|
||||
import io
|
||||
import logging
|
||||
import random
|
||||
import socket
|
||||
import sys
|
||||
import time
|
||||
import gzip
|
||||
|
||||
from esphome.core import EsphomeError
|
||||
from esphome.helpers import is_ip_address, resolve_ip_address
|
||||
@@ -40,6 +43,10 @@ MAGIC_BYTES = [0x6C, 0x26, 0xF7, 0x5C, 0x45]
|
||||
|
||||
FEATURE_SUPPORTS_COMPRESSION = 0x01
|
||||
|
||||
|
||||
UPLOAD_BLOCK_SIZE = 8192
|
||||
UPLOAD_BUFFER_SIZE = UPLOAD_BLOCK_SIZE * 8
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@@ -184,7 +191,9 @@ def send_check(sock, data, msg):
|
||||
raise OTAError(f"Error sending {msg}: {err}") from err
|
||||
|
||||
|
||||
def perform_ota(sock, password, file_handle, filename):
|
||||
def perform_ota(
|
||||
sock: socket.socket, password: str, file_handle: io.IOBase, filename: str
|
||||
) -> None:
|
||||
file_contents = file_handle.read()
|
||||
file_size = len(file_contents)
|
||||
_LOGGER.info("Uploading %s (%s bytes)", filename, file_size)
|
||||
@@ -254,14 +263,16 @@ def perform_ota(sock, password, file_handle, filename):
|
||||
sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 0)
|
||||
# Limit send buffer (usually around 100kB) in order to have progress bar
|
||||
# show the actual progress
|
||||
sock.setsockopt(socket.SOL_SOCKET, socket.SO_SNDBUF, 8192)
|
||||
|
||||
sock.setsockopt(socket.SOL_SOCKET, socket.SO_SNDBUF, UPLOAD_BUFFER_SIZE)
|
||||
# Set higher timeout during upload
|
||||
sock.settimeout(20.0)
|
||||
sock.settimeout(30.0)
|
||||
start_time = time.perf_counter()
|
||||
|
||||
offset = 0
|
||||
progress = ProgressBar()
|
||||
while True:
|
||||
chunk = upload_contents[offset : offset + 1024]
|
||||
chunk = upload_contents[offset : offset + UPLOAD_BLOCK_SIZE]
|
||||
if not chunk:
|
||||
break
|
||||
offset += len(chunk)
|
||||
@@ -277,8 +288,9 @@ def perform_ota(sock, password, file_handle, filename):
|
||||
|
||||
# Enable nodelay for last checks
|
||||
sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
|
||||
duration = time.perf_counter() - start_time
|
||||
|
||||
_LOGGER.info("Waiting for result...")
|
||||
_LOGGER.info("Upload took %.2f seconds, waiting for result...", duration)
|
||||
|
||||
receive_exactly(sock, 1, "receive OK", RESPONSE_RECEIVE_OK)
|
||||
receive_exactly(sock, 1, "Update end", RESPONSE_UPDATE_END_OK)
|
||||
|
||||
@@ -23,6 +23,14 @@ from esphome.core import (
|
||||
from esphome.helpers import add_class_to_obj
|
||||
from esphome.util import OrderedDict, filter_yaml_files
|
||||
|
||||
try:
|
||||
from yaml import CSafeLoader as FastestAvailableSafeLoader
|
||||
except ImportError:
|
||||
from yaml import ( # type: ignore[assignment]
|
||||
SafeLoader as FastestAvailableSafeLoader,
|
||||
)
|
||||
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
# Mostly copied from Home Assistant because that code works fine and
|
||||
@@ -89,7 +97,7 @@ def _add_data_ref(fn):
|
||||
return wrapped
|
||||
|
||||
|
||||
class ESPHomeLoader(yaml.SafeLoader):
|
||||
class ESPHomeLoader(FastestAvailableSafeLoader):
|
||||
"""Loader class that keeps track of line numbers."""
|
||||
|
||||
@_add_data_ref
|
||||
|
||||
@@ -1,22 +1,21 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
import logging
|
||||
from dataclasses import dataclass
|
||||
from typing import Callable
|
||||
|
||||
from zeroconf import (
|
||||
IPVersion,
|
||||
ServiceBrowser,
|
||||
ServiceInfo,
|
||||
ServiceStateChange,
|
||||
Zeroconf,
|
||||
)
|
||||
from zeroconf import IPVersion, ServiceInfo, ServiceStateChange, Zeroconf
|
||||
from zeroconf.asyncio import AsyncServiceBrowser, AsyncServiceInfo, AsyncZeroconf
|
||||
|
||||
from esphome.storage_json import StorageJSON, ext_storage_path
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
_BACKGROUND_TASKS: set[asyncio.Task] = set()
|
||||
|
||||
|
||||
class HostResolver(ServiceInfo):
|
||||
"""Resolve a host name to an IP address."""
|
||||
|
||||
@@ -65,7 +64,7 @@ class DiscoveredImport:
|
||||
network: str
|
||||
|
||||
|
||||
class DashboardBrowser(ServiceBrowser):
|
||||
class DashboardBrowser(AsyncServiceBrowser):
|
||||
"""A class to browse for ESPHome nodes."""
|
||||
|
||||
|
||||
@@ -94,7 +93,28 @@ class DashboardImportDiscovery:
|
||||
# Ignore updates for devices that are not in the import state
|
||||
return
|
||||
|
||||
info = zeroconf.get_service_info(service_type, name)
|
||||
info = AsyncServiceInfo(
|
||||
service_type,
|
||||
name,
|
||||
)
|
||||
if info.load_from_cache(zeroconf):
|
||||
self._process_service_info(name, info)
|
||||
return
|
||||
task = asyncio.create_task(
|
||||
self._async_process_service_info(zeroconf, info, service_type, name)
|
||||
)
|
||||
_BACKGROUND_TASKS.add(task)
|
||||
task.add_done_callback(_BACKGROUND_TASKS.discard)
|
||||
|
||||
async def _async_process_service_info(
|
||||
self, zeroconf: Zeroconf, info: AsyncServiceInfo, service_type: str, name: str
|
||||
) -> None:
|
||||
"""Process a service info."""
|
||||
if await info.async_request(zeroconf):
|
||||
self._process_service_info(name, info)
|
||||
|
||||
def _process_service_info(self, name: str, info: ServiceInfo) -> None:
|
||||
"""Process a service info."""
|
||||
_LOGGER.debug("-> resolved info: %s", info)
|
||||
if info is None:
|
||||
return
|
||||
@@ -146,13 +166,32 @@ class DashboardImportDiscovery:
|
||||
)
|
||||
|
||||
|
||||
class EsphomeZeroconf(Zeroconf):
|
||||
def resolve_host(self, host: str, timeout=3.0):
|
||||
"""Resolve a host name to an IP address."""
|
||||
def _make_host_resolver(host: str) -> HostResolver:
|
||||
"""Create a new HostResolver for the given host name."""
|
||||
name = host.partition(".")[0]
|
||||
info = HostResolver(f"{name}.{ESPHOME_SERVICE_TYPE}", ESPHOME_SERVICE_TYPE)
|
||||
if (info.load_from_cache(self) or info.request(self, timeout * 1000)) and (
|
||||
addresses := info.ip_addresses_by_version(IPVersion.V4Only)
|
||||
):
|
||||
info = HostResolver(ESPHOME_SERVICE_TYPE, f"{name}.{ESPHOME_SERVICE_TYPE}")
|
||||
return info
|
||||
|
||||
|
||||
class EsphomeZeroconf(Zeroconf):
|
||||
def resolve_host(self, host: str, timeout: float = 3.0) -> str | None:
|
||||
"""Resolve a host name to an IP address."""
|
||||
info = _make_host_resolver(host)
|
||||
if (
|
||||
info.load_from_cache(self)
|
||||
or (timeout and info.request(self, timeout * 1000))
|
||||
) and (addresses := info.ip_addresses_by_version(IPVersion.V4Only)):
|
||||
return str(addresses[0])
|
||||
return None
|
||||
|
||||
|
||||
class AsyncEsphomeZeroconf(AsyncZeroconf):
|
||||
async def async_resolve_host(self, host: str, timeout: float = 3.0) -> str | None:
|
||||
"""Resolve a host name to an IP address."""
|
||||
info = _make_host_resolver(host)
|
||||
if (
|
||||
info.load_from_cache(self.zeroconf)
|
||||
or (timeout and await info.async_request(self.zeroconf, timeout * 1000))
|
||||
) and (addresses := info.ip_addresses_by_version(IPVersion.V4Only)):
|
||||
return str(addresses[0])
|
||||
return None
|
||||
|
||||
@@ -159,7 +159,7 @@ board_build.filesystem_size = 0.5m
|
||||
platform = https://github.com/maxgerhardt/platform-raspberrypi.git
|
||||
platform_packages =
|
||||
; earlephilhower/framework-arduinopico@~1.20602.0 ; Cannot use the platformio package until old releases stop getting deleted
|
||||
earlephilhower/framework-arduinopico@https://github.com/earlephilhower/arduino-pico/releases/download/3.4.0/rp2040-3.4.0.zip
|
||||
earlephilhower/framework-arduinopico@https://github.com/earlephilhower/arduino-pico/releases/download/3.6.0/rp2040-3.6.0.zip
|
||||
|
||||
framework = arduino
|
||||
lib_deps =
|
||||
|
||||
@@ -10,8 +10,8 @@ platformio==6.1.11 # When updating platformio, also update Dockerfile
|
||||
esptool==4.6.2
|
||||
click==8.1.7
|
||||
esphome-dashboard==20231107.0
|
||||
aioesphomeapi==18.2.4
|
||||
zeroconf==0.120.0
|
||||
aioesphomeapi==18.4.1
|
||||
zeroconf==0.127.0
|
||||
|
||||
# esp-idf requires this, but doesn't bundle it by default
|
||||
# https://github.com/espressif/esp-idf/blob/220590d599e134d7a5e7f1e683cc4550349ffbf8/requirements.txt#L24
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
pylint==2.17.6
|
||||
flake8==6.1.0 # also change in .pre-commit-config.yaml when updating
|
||||
black==23.10.1 # also change in .pre-commit-config.yaml when updating
|
||||
black==23.11.0 # also change in .pre-commit-config.yaml when updating
|
||||
pyupgrade==3.15.0 # also change in .pre-commit-config.yaml when updating
|
||||
pre-commit
|
||||
|
||||
|
||||
@@ -45,7 +45,7 @@ def sub(path, pattern, repl, expected_count=1):
|
||||
content, count = re.subn(pattern, repl, content, flags=re.MULTILINE)
|
||||
if expected_count is not None:
|
||||
assert count == expected_count, f"Pattern {pattern} replacement failed!"
|
||||
with open(path, "wt") as fh:
|
||||
with open(path, "w") as fh:
|
||||
fh.write(content)
|
||||
|
||||
|
||||
|
||||
@@ -3050,6 +3050,9 @@ remote_receiver:
|
||||
on_coolix:
|
||||
then:
|
||||
delay: !lambda "return x.first + x.second;"
|
||||
on_rc_switch:
|
||||
then:
|
||||
delay: !lambda "return uint32_t(x.code) + x.protocol;"
|
||||
|
||||
status_led:
|
||||
pin: GPIO2
|
||||
|
||||
@@ -425,6 +425,15 @@ binary_sensor:
|
||||
input: true
|
||||
inverted: false
|
||||
|
||||
- platform: gpio
|
||||
name: XL9535 Pin 17
|
||||
pin:
|
||||
xl9535: xl9535_hub
|
||||
number: 17
|
||||
mode:
|
||||
input: true
|
||||
inverted: false
|
||||
|
||||
climate:
|
||||
- platform: tuya
|
||||
id: tuya_climate
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
from typing import Iterator
|
||||
from collections.abc import Iterator
|
||||
|
||||
import math
|
||||
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import pytest
|
||||
from mock import Mock
|
||||
from unittest.mock import Mock
|
||||
|
||||
from esphome import cpp_helpers as ch
|
||||
from esphome import const
|
||||
|
||||
Reference in New Issue
Block a user