1
0
mirror of https://github.com/esphome/esphome.git synced 2025-11-03 16:41:50 +00:00

Compare commits

..

1 Commits

Author SHA1 Message Date
Jesse Hills
f07ea6dcda Add raw bus voltage log 2023-11-10 16:30:15 +13:00
61 changed files with 1827 additions and 2830 deletions

View File

@@ -1,97 +0,0 @@
name: Build Image
inputs:
platform:
description: "Platform to build for"
required: true
example: "linux/amd64"
target:
description: "Target to build"
required: true
example: "docker"
baseimg:
description: "Base image type"
required: true
example: "docker"
suffix:
description: "Suffix to add to tags"
required: true
version:
description: "Version to build"
required: true
example: "2023.12.0"
runs:
using: "composite"
steps:
- name: Generate short tags
id: tags
shell: bash
run: |
output=$(docker/generate_tags.py \
--tag "${{ inputs.version }}" \
--suffix "${{ inputs.suffix }}")
echo $output
for l in $output; do
echo $l >> $GITHUB_OUTPUT
done
- name: Build and push to ghcr by digest
id: build-ghcr
uses: docker/build-push-action@v5.0.0
with:
context: .
file: ./docker/Dockerfile
platforms: ${{ inputs.platform }}
target: ${{ inputs.target }}
cache-from: type=gha
cache-to: type=gha,mode=max
build-args: |
BASEIMGTYPE=${{ inputs.baseimg }}
BUILD_VERSION=${{ inputs.version }}
outputs: |
type=image,name=ghcr.io/${{ steps.tags.outputs.image_name }},push-by-digest=true,name-canonical=true,push=true
- name: Export ghcr digests
shell: bash
run: |
mkdir -p /tmp/digests/${{ inputs.target }}/ghcr
digest="${{ steps.build-ghcr.outputs.digest }}"
touch "/tmp/digests/${{ inputs.target }}/ghcr/${digest#sha256:}"
- name: Upload ghcr digest
uses: actions/upload-artifact@v3.1.3
with:
name: digests-${{ inputs.target }}-ghcr
path: /tmp/digests/${{ inputs.target }}/ghcr/*
if-no-files-found: error
retention-days: 1
- name: Build and push to dockerhub by digest
id: build-dockerhub
uses: docker/build-push-action@v5.0.0
with:
context: .
file: ./docker/Dockerfile
platforms: ${{ inputs.platform }}
target: ${{ inputs.target }}
cache-from: type=gha
cache-to: type=gha,mode=max
build-args: |
BASEIMGTYPE=${{ inputs.baseimg }}
BUILD_VERSION=${{ inputs.version }}
outputs: |
type=image,name=docker.io/${{ steps.tags.outputs.image_name }},push-by-digest=true,name-canonical=true,push=true
- name: Export dockerhub digests
shell: bash
run: |
mkdir -p /tmp/digests/${{ inputs.target }}/dockerhub
digest="${{ steps.build-dockerhub.outputs.digest }}"
touch "/tmp/digests/${{ inputs.target }}/dockerhub/${digest#sha256:}"
- name: Upload dockerhub digest
uses: actions/upload-artifact@v3.1.3
with:
name: digests-${{ inputs.target }}-dockerhub
path: /tmp/digests/${{ inputs.target }}/dockerhub/*
if-no-files-found: error
retention-days: 1

View File

@@ -63,20 +63,30 @@ jobs:
run: twine upload dist/*
deploy-docker:
name: Build ESPHome ${{ matrix.platform }}
name: Build and publish ESPHome ${{ matrix.image.title}}
if: github.repository == 'esphome/esphome'
permissions:
contents: read
packages: write
runs-on: ubuntu-latest
continue-on-error: ${{ matrix.image.title == 'lint' }}
needs: [init]
strategy:
fail-fast: false
matrix:
platform:
- linux/amd64
- linux/arm/v7
- linux/arm64
image:
- title: "ha-addon"
suffix: "hassio"
target: "hassio"
baseimg: "hassio"
- title: "docker"
suffix: ""
target: "docker"
baseimg: "docker"
- title: "lint"
suffix: "lint"
target: "lint"
baseimg: "docker"
steps:
- uses: actions/checkout@v4.1.1
- name: Set up Python
@@ -87,7 +97,6 @@ jobs:
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3.0.0
- name: Set up QEMU
if: matrix.platform != 'linux/amd64'
uses: docker/setup-qemu-action@v3.0.0
- name: Log in to docker hub
@@ -102,105 +111,34 @@ jobs:
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Build docker
uses: ./.github/actions/build-image
with:
platform: ${{ matrix.platform }}
target: docker
baseimg: docker
suffix: ""
version: ${{ needs.init.outputs.tag }}
- name: Build ha-addon
uses: ./.github/actions/build-image
with:
platform: ${{ matrix.platform }}
target: hassio
baseimg: hassio
suffix: "hassio"
version: ${{ needs.init.outputs.tag }}
- name: Build lint
uses: ./.github/actions/build-image
with:
platform: ${{ matrix.platform }}
target: lint
baseimg: docker
suffix: lint
version: ${{ needs.init.outputs.tag }}
deploy-manifest:
name: Publish ESPHome ${{ matrix.image.title }} to ${{ matrix.registry }}
runs-on: ubuntu-latest
needs:
- init
- deploy-docker
if: github.repository == 'esphome/esphome'
permissions:
contents: read
packages: write
strategy:
fail-fast: false
matrix:
image:
- title: "ha-addon"
target: "hassio"
suffix: "hassio"
- title: "docker"
target: "docker"
suffix: ""
- title: "lint"
target: "lint"
suffix: "lint"
registry:
- ghcr
- dockerhub
steps:
- uses: actions/checkout@v4.1.1
- name: Download digests
uses: actions/download-artifact@v3.0.2
with:
name: digests-${{ matrix.image.target }}-${{ matrix.registry }}
path: /tmp/digests
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3.0.0
- name: Log in to docker hub
if: matrix.registry == 'dockerhub'
uses: docker/login-action@v3.0.0
with:
username: ${{ secrets.DOCKER_USER }}
password: ${{ secrets.DOCKER_PASSWORD }}
- name: Log in to the GitHub container registry
if: matrix.registry == 'ghcr'
uses: docker/login-action@v3.0.0
with:
registry: ghcr.io
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Generate short tags
id: tags
run: |
output=$(docker/generate_tags.py \
docker/generate_tags.py \
--tag "${{ needs.init.outputs.tag }}" \
--suffix "${{ matrix.image.suffix }}" \
--registry "${{ matrix.registry }}")
echo $output
for l in $output; do
echo $l >> $GITHUB_OUTPUT
done
--suffix "${{ matrix.image.suffix }}"
- name: Create manifest list and push
working-directory: /tmp/digests
run: |
docker buildx imagetools create $(jq -Rcnr 'inputs | . / "," | map("-t " + .) | join(" ")' <<< "${{ steps.tags.outputs.tags}}") \
$(printf '${{ steps.tags.outputs.image }}@sha256:%s ' *)
- name: Build and push
uses: docker/build-push-action@v5.0.0
with:
context: .
file: ./docker/Dockerfile
platforms: linux/amd64,linux/arm/v7,linux/arm64
target: ${{ matrix.image.target }}
push: true
# yamllint disable rule:line-length
cache-from: type=registry,ref=ghcr.io/${{ steps.tags.outputs.image }}:cache-${{ steps.tags.outputs.channel }}
cache-to: type=registry,ref=ghcr.io/${{ steps.tags.outputs.image }}:cache-${{ steps.tags.outputs.channel }},mode=max
# yamllint enable rule:line-length
tags: ${{ steps.tags.outputs.tags }}
build-args: |
BASEIMGTYPE=${{ matrix.image.baseimg }}
BUILD_VERSION=${{ needs.init.outputs.tag }}
deploy-ha-addon-repo:
if: github.repository == 'esphome/esphome' && github.event_name == 'release'
runs-on: ubuntu-latest
needs: [deploy-manifest]
needs: [deploy-docker]
steps:
- name: Trigger Workflow
uses: actions/github-script@v6.4.1

View File

@@ -3,7 +3,7 @@
# See https://pre-commit.com/hooks.html for more hooks
repos:
- repo: https://github.com/psf/black-pre-commit-mirror
rev: 23.11.0
rev: 23.10.1
hooks:
- id: black
args:

View File

@@ -68,7 +68,7 @@ ENV \
# See: https://unix.stackexchange.com/questions/553743/correct-way-to-add-lib-ld-linux-so-3-in-debian
RUN \
if [ "$TARGETARCH$TARGETVARIANT" = "armv7" ]; then \
ln -s /lib/arm-linux-gnueabihf/ld-linux-armhf.so.3 /lib/ld-linux.so.3; \
ln -s /lib/arm-linux-gnueabihf/ld-linux.so.3 /lib/ld-linux.so.3; \
fi
RUN \

View File

@@ -1,14 +1,13 @@
#!/usr/bin/env python3
import re
import os
import argparse
import json
CHANNEL_DEV = "dev"
CHANNEL_BETA = "beta"
CHANNEL_RELEASE = "release"
GHCR = "ghcr"
DOCKERHUB = "dockerhub"
parser = argparse.ArgumentParser()
parser.add_argument(
"--tag",
@@ -22,31 +21,21 @@ parser.add_argument(
required=True,
help="The suffix of the tag.",
)
parser.add_argument(
"--registry",
type=str,
choices=[GHCR, DOCKERHUB],
required=False,
action="append",
help="The registry to build tags for.",
)
def main():
args = parser.parse_args()
# detect channel from tag
match = re.match(r"^(\d+\.\d+)(?:\.\d+)(?:(b\d+)|(-dev\d+))?$", args.tag)
match = re.match(r"^(\d+\.\d+)(?:\.\d+)?(b\d+)?$", args.tag)
major_minor_version = None
if match is None: # eg 2023.12.0-dev20231109-testbranch
channel = None # Ran with custom tag for a branch etc
elif match.group(3) is not None: # eg 2023.12.0-dev20231109
if match is None:
channel = CHANNEL_DEV
elif match.group(2) is not None: # eg 2023.12.0b1
channel = CHANNEL_BETA
else: # eg 2023.12.0
elif match.group(2) is None:
major_minor_version = match.group(1)
channel = CHANNEL_RELEASE
else:
channel = CHANNEL_BETA
tags_to_push = [args.tag]
if channel == CHANNEL_DEV:
@@ -64,28 +53,15 @@ def main():
suffix = f"-{args.suffix}" if args.suffix else ""
image_name = f"esphome/esphome{suffix}"
with open(os.environ["GITHUB_OUTPUT"], "w") as f:
print(f"channel={channel}", file=f)
print(f"image=esphome/esphome{suffix}", file=f)
full_tags = []
print(f"channel={channel}")
if args.registry is None:
args.registry = [GHCR, DOCKERHUB]
elif len(args.registry) == 1:
if GHCR in args.registry:
print(f"image=ghcr.io/{image_name}")
if DOCKERHUB in args.registry:
print(f"image=docker.io/{image_name}")
print(f"image_name={image_name}")
full_tags = []
for tag in tags_to_push:
if GHCR in args.registry:
full_tags += [f"ghcr.io/{image_name}:{tag}"]
if DOCKERHUB in args.registry:
full_tags += [f"docker.io/{image_name}:{tag}"]
print(f"tags={','.join(full_tags)}")
for tag in tags_to_push:
full_tags += [f"ghcr.io/esphome/esphome{suffix}:{tag}"]
full_tags += [f"esphome/esphome{suffix}:{tag}"]
print(f"tags={','.join(full_tags)}", file=f)
if __name__ == "__main__":

View File

@@ -514,7 +514,7 @@ def command_clean(args, config):
def command_dashboard(args):
from esphome.dashboard import dashboard
return dashboard.start_dashboard(args)
return dashboard.start_web_server(args)
def command_update_all(args):

View File

@@ -1,29 +1,23 @@
from __future__ import annotations
import asyncio
import logging
from datetime import datetime
from typing import Any
from typing import Optional
from aioesphomeapi import APIClient
from aioesphomeapi.api_pb2 import SubscribeLogsResponse
from aioesphomeapi.log_runner import async_run
from esphome.const import CONF_KEY, CONF_PASSWORD, CONF_PORT, __version__
from esphome.core import CORE
from aioesphomeapi import APIClient, ReconnectLogic, APIConnectionError, LogLevel
import zeroconf
from esphome.const import CONF_KEY, CONF_PORT, CONF_PASSWORD, __version__
from esphome.util import safe_print
from . import CONF_ENCRYPTION
_LOGGER = logging.getLogger(__name__)
async def async_run_logs(config: dict[str, Any], address: str) -> None:
"""Run the logs command in the event loop."""
async def async_run_logs(config, address):
conf = config["api"]
name = config["esphome"]["name"]
port: int = int(conf[CONF_PORT])
password: str = conf[CONF_PASSWORD]
noise_psk: str | None = None
noise_psk: Optional[str] = None
if CONF_ENCRYPTION in conf:
noise_psk = conf[CONF_ENCRYPTION][CONF_KEY]
_LOGGER.info("Starting log output from %s using esphome API", address)
@@ -34,27 +28,44 @@ async def async_run_logs(config: dict[str, Any], address: str) -> None:
client_info=f"ESPHome Logs {__version__}",
noise_psk=noise_psk,
)
dashboard = CORE.dashboard
first_connect = True
def on_log(msg: SubscribeLogsResponse) -> None:
"""Handle a new log message."""
time_ = datetime.now()
message: bytes = msg.message
text = message.decode("utf8", "backslashreplace")
if dashboard:
text = text.replace("\033", "\\033")
print(f"[{time_.hour:02}:{time_.minute:02}:{time_.second:02}]{text}")
def on_log(msg):
time_ = datetime.now().time().strftime("[%H:%M:%S]")
text = msg.message.decode("utf8", "backslashreplace")
safe_print(time_ + text)
async def on_connect():
nonlocal first_connect
try:
await cli.subscribe_logs(
on_log,
log_level=LogLevel.LOG_LEVEL_VERY_VERBOSE,
dump_config=first_connect,
)
first_connect = False
except APIConnectionError:
cli.disconnect()
async def on_disconnect(expected_disconnect: bool) -> None:
_LOGGER.warning("Disconnected from API")
zc = zeroconf.Zeroconf()
reconnect = ReconnectLogic(
client=cli,
on_connect=on_connect,
on_disconnect=on_disconnect,
zeroconf_instance=zc,
)
await reconnect.start()
stop = await async_run(cli, on_log, name=name)
try:
await asyncio.Event().wait()
finally:
await stop()
def run_logs(config: dict[str, Any], address: str) -> None:
"""Run the logs command."""
try:
asyncio.run(async_run_logs(config, address))
while True:
await asyncio.sleep(60)
except KeyboardInterrupt:
pass
await reconnect.stop()
zc.close()
def run_logs(config, address):
asyncio.run(async_run_logs(config, address))

View File

@@ -1,37 +1,38 @@
import esphome.codegen as cg
import esphome.config_validation as cv
from esphome.components import climate, sensor, remote_base
from esphome.components import (
climate,
remote_transmitter,
remote_receiver,
sensor,
remote_base,
)
from esphome.components.remote_base import CONF_RECEIVER_ID, CONF_TRANSMITTER_ID
from esphome.const import CONF_SUPPORTS_COOL, CONF_SUPPORTS_HEAT, CONF_SENSOR
DEPENDENCIES = ["remote_transmitter"]
AUTO_LOAD = ["sensor", "remote_base"]
CODEOWNERS = ["@glmnet"]
climate_ir_ns = cg.esphome_ns.namespace("climate_ir")
ClimateIR = climate_ir_ns.class_(
"ClimateIR",
climate.Climate,
cg.Component,
remote_base.RemoteReceiverListener,
remote_base.RemoteTransmittable,
"ClimateIR", climate.Climate, cg.Component, remote_base.RemoteReceiverListener
)
CLIMATE_IR_SCHEMA = (
climate.CLIMATE_SCHEMA.extend(
{
cv.Optional(CONF_SUPPORTS_COOL, default=True): cv.boolean,
cv.Optional(CONF_SUPPORTS_HEAT, default=True): cv.boolean,
cv.Optional(CONF_SENSOR): cv.use_id(sensor.Sensor),
}
)
.extend(cv.COMPONENT_SCHEMA)
.extend(remote_base.REMOTE_TRANSMITTABLE_SCHEMA)
)
CLIMATE_IR_SCHEMA = climate.CLIMATE_SCHEMA.extend(
{
cv.GenerateID(CONF_TRANSMITTER_ID): cv.use_id(
remote_transmitter.RemoteTransmitterComponent
),
cv.Optional(CONF_SUPPORTS_COOL, default=True): cv.boolean,
cv.Optional(CONF_SUPPORTS_HEAT, default=True): cv.boolean,
cv.Optional(CONF_SENSOR): cv.use_id(sensor.Sensor),
}
).extend(cv.COMPONENT_SCHEMA)
CLIMATE_IR_WITH_RECEIVER_SCHEMA = CLIMATE_IR_SCHEMA.extend(
{
cv.Optional(remote_base.CONF_RECEIVER_ID): cv.use_id(
remote_base.RemoteReceiverBase
cv.Optional(CONF_RECEIVER_ID): cv.use_id(
remote_receiver.RemoteReceiverComponent
),
}
)
@@ -40,11 +41,15 @@ CLIMATE_IR_WITH_RECEIVER_SCHEMA = CLIMATE_IR_SCHEMA.extend(
async def register_climate_ir(var, config):
await cg.register_component(var, config)
await climate.register_climate(var, config)
await remote_base.register_transmittable(var, config)
cg.add(var.set_supports_cool(config[CONF_SUPPORTS_COOL]))
cg.add(var.set_supports_heat(config[CONF_SUPPORTS_HEAT]))
if remote_base.CONF_RECEIVER_ID in config:
await remote_base.register_listener(var, config)
if sensor_id := config.get(CONF_SENSOR):
sens = await cg.get_variable(sensor_id)
cg.add(var.set_sensor(sens))
if receiver_id := config.get(CONF_RECEIVER_ID):
receiver = await cg.get_variable(receiver_id)
cg.add(receiver.register_listener(var))
transmitter = await cg.get_variable(config[CONF_TRANSMITTER_ID])
cg.add(var.set_transmitter(transmitter))

View File

@@ -18,10 +18,7 @@ namespace climate_ir {
Likewise to decode a IR into the AC state, implement
bool RemoteReceiverListener::on_receive(remote_base::RemoteReceiveData data) and return true
*/
class ClimateIR : public Component,
public climate::Climate,
public remote_base::RemoteReceiverListener,
public remote_base::RemoteTransmittable {
class ClimateIR : public climate::Climate, public Component, public remote_base::RemoteReceiverListener {
public:
ClimateIR(float minimum_temperature, float maximum_temperature, float temperature_step = 1.0f,
bool supports_dry = false, bool supports_fan_only = false, std::set<climate::ClimateFanMode> fan_modes = {},
@@ -38,6 +35,9 @@ class ClimateIR : public Component,
void setup() override;
void dump_config() override;
void set_transmitter(remote_transmitter::RemoteTransmitterComponent *transmitter) {
this->transmitter_ = transmitter;
}
void set_supports_cool(bool supports_cool) { this->supports_cool_ = supports_cool; }
void set_supports_heat(bool supports_heat) { this->supports_heat_ = supports_heat; }
void set_sensor(sensor::Sensor *sensor) { this->sensor_ = sensor; }
@@ -64,6 +64,7 @@ class ClimateIR : public Component,
std::set<climate::ClimateSwingMode> swing_modes_ = {};
std::set<climate::ClimatePreset> presets_ = {};
remote_transmitter::RemoteTransmitterComponent *transmitter_;
sensor::Sensor *sensor_{nullptr};
};

View File

@@ -102,7 +102,11 @@ void CoolixClimate::transmit_state() {
}
}
ESP_LOGV(TAG, "Sending coolix code: 0x%06" PRIX32, remote_state);
this->transmit_<remote_base::CoolixProtocol>(remote_state);
auto transmit = this->transmitter_->transmit();
auto *data = transmit.get_data();
remote_base::CoolixProtocol().encode(data, remote_state);
transmit.perform();
}
bool CoolixClimate::on_coolix(climate::Climate *parent, remote_base::RemoteReceiveData data) {

View File

@@ -3,26 +3,23 @@ from typing import Union, Optional
from pathlib import Path
import logging
import os
import esphome.final_validate as fv
from esphome.helpers import copy_file_if_changed, write_file_if_changed, mkdir_p
from esphome.const import (
CONF_ADVANCED,
CONF_BOARD,
CONF_COMPONENTS,
CONF_ESPHOME,
CONF_FRAMEWORK,
CONF_IGNORE_EFUSE_MAC_CRC,
CONF_NAME,
CONF_PATH,
CONF_PLATFORMIO_OPTIONS,
CONF_REF,
CONF_REFRESH,
CONF_SOURCE,
CONF_TYPE,
CONF_URL,
CONF_VARIANT,
CONF_VERSION,
CONF_ADVANCED,
CONF_REFRESH,
CONF_PATH,
CONF_URL,
CONF_REF,
CONF_IGNORE_EFUSE_MAC_CRC,
KEY_CORE,
KEY_FRAMEWORK_VERSION,
KEY_NAME,
@@ -330,32 +327,6 @@ def _detect_variant(value):
return value
def final_validate(config):
if CONF_PLATFORMIO_OPTIONS not in fv.full_config.get()[CONF_ESPHOME]:
return config
pio_flash_size_key = "board_upload.flash_size"
pio_partitions_key = "board_build.partitions"
if (
CONF_PARTITIONS in config
and pio_partitions_key
in fv.full_config.get()[CONF_ESPHOME][CONF_PLATFORMIO_OPTIONS]
):
raise cv.Invalid(
f"Do not specify '{pio_partitions_key}' in '{CONF_PLATFORMIO_OPTIONS}' with '{CONF_PARTITIONS}' in esp32"
)
if (
pio_flash_size_key
in fv.full_config.get()[CONF_ESPHOME][CONF_PLATFORMIO_OPTIONS]
):
raise cv.Invalid(
f"Please specify {CONF_FLASH_SIZE} within esp32 configuration only"
)
return config
CONF_PLATFORM_VERSION = "platform_version"
ARDUINO_FRAMEWORK_SCHEMA = cv.All(
@@ -369,13 +340,6 @@ ARDUINO_FRAMEWORK_SCHEMA = cv.All(
_arduino_check_versions,
)
def _check_component_type(config):
if config[CONF_SOURCE][CONF_TYPE] == TYPE_LOCAL:
raise cv.Invalid("Local components are not implemented yet.")
return config
CONF_SDKCONFIG_OPTIONS = "sdkconfig_options"
ESP_IDF_FRAMEWORK_SCHEMA = cv.All(
cv.Schema(
@@ -392,18 +356,15 @@ ESP_IDF_FRAMEWORK_SCHEMA = cv.All(
}
),
cv.Optional(CONF_COMPONENTS, default=[]): cv.ensure_list(
cv.All(
cv.Schema(
{
cv.Required(CONF_NAME): cv.string_strict,
cv.Required(CONF_SOURCE): cv.SOURCE_SCHEMA,
cv.Optional(CONF_PATH): cv.string,
cv.Optional(CONF_REFRESH, default="1d"): cv.All(
cv.string, cv.source_refresh
),
}
),
_check_component_type,
cv.Schema(
{
cv.Required(CONF_NAME): cv.string_strict,
cv.Required(CONF_SOURCE): cv.SOURCE_SCHEMA,
cv.Optional(CONF_PATH): cv.string,
cv.Optional(CONF_REFRESH, default="1d"): cv.All(
cv.string, cv.source_refresh
),
}
)
),
}
@@ -425,24 +386,10 @@ FRAMEWORK_SCHEMA = cv.typed_schema(
)
FLASH_SIZES = [
"2MB",
"4MB",
"8MB",
"16MB",
"32MB",
]
CONF_FLASH_SIZE = "flash_size"
CONF_PARTITIONS = "partitions"
CONFIG_SCHEMA = cv.All(
cv.Schema(
{
cv.Required(CONF_BOARD): cv.string_strict,
cv.Optional(CONF_FLASH_SIZE, default="4MB"): cv.one_of(
*FLASH_SIZES, upper=True
),
cv.Optional(CONF_PARTITIONS): cv.file_,
cv.Optional(CONF_VARIANT): cv.one_of(*VARIANTS, upper=True),
cv.Optional(CONF_FRAMEWORK, default={}): FRAMEWORK_SCHEMA,
}
@@ -452,12 +399,8 @@ CONFIG_SCHEMA = cv.All(
)
FINAL_VALIDATE_SCHEMA = cv.Schema(final_validate)
async def to_code(config):
cg.add_platformio_option("board", config[CONF_BOARD])
cg.add_platformio_option("board_upload.flash_size", config[CONF_FLASH_SIZE])
cg.add_build_flag("-DUSE_ESP32")
cg.add_define("ESPHOME_BOARD", config[CONF_BOARD])
cg.add_build_flag(f"-DUSE_ESP32_VARIANT_{config[CONF_VARIANT]}")
@@ -507,10 +450,7 @@ async def to_code(config):
add_idf_sdkconfig_option("CONFIG_ESP_TASK_WDT_CHECK_IDLE_TASK_CPU0", False)
add_idf_sdkconfig_option("CONFIG_ESP_TASK_WDT_CHECK_IDLE_TASK_CPU1", False)
if CONF_PARTITIONS in config:
cg.add_platformio_option("board_build.partitions", config[CONF_PARTITIONS])
else:
cg.add_platformio_option("board_build.partitions", "partitions.csv")
cg.add_platformio_option("board_build.partitions", "partitions.csv")
for name, value in conf[CONF_SDKCONFIG_OPTIONS].items():
add_idf_sdkconfig_option(name, RawSdkconfigValue(value))
@@ -555,10 +495,7 @@ async def to_code(config):
[f"platformio/framework-arduinoespressif32@{conf[CONF_SOURCE]}"],
)
if CONF_PARTITIONS in config:
cg.add_platformio_option("board_build.partitions", config[CONF_PARTITIONS])
else:
cg.add_platformio_option("board_build.partitions", "partitions.csv")
cg.add_platformio_option("board_build.partitions", "partitions.csv")
cg.add_define(
"USE_ARDUINO_VERSION_CODE",
@@ -568,47 +505,24 @@ async def to_code(config):
)
APP_PARTITION_SIZES = {
"2MB": 0x0C0000, # 768 KB
"4MB": 0x1C0000, # 1792 KB
"8MB": 0x3C0000, # 3840 KB
"16MB": 0x7C0000, # 7936 KB
"32MB": 0xFC0000, # 16128 KB
}
def get_arduino_partition_csv(flash_size):
app_partition_size = APP_PARTITION_SIZES[flash_size]
eeprom_partition_size = 0x1000 # 4 KB
spiffs_partition_size = 0xF000 # 60 KB
app0_partition_start = 0x010000 # 64 KB
app1_partition_start = app0_partition_start + app_partition_size
eeprom_partition_start = app1_partition_start + app_partition_size
spiffs_partition_start = eeprom_partition_start + eeprom_partition_size
partition_csv = f"""\
nvs, data, nvs, 0x9000, 0x5000,
otadata, data, ota, 0xE000, 0x2000,
app0, app, ota_0, 0x{app0_partition_start:X}, 0x{app_partition_size:X},
app1, app, ota_1, 0x{app1_partition_start:X}, 0x{app_partition_size:X},
eeprom, data, 0x99, 0x{eeprom_partition_start:X}, 0x{eeprom_partition_size:X},
spiffs, data, spiffs, 0x{spiffs_partition_start:X}, 0x{spiffs_partition_size:X}
ARDUINO_PARTITIONS_CSV = """\
nvs, data, nvs, 0x009000, 0x005000,
otadata, data, ota, 0x00e000, 0x002000,
app0, app, ota_0, 0x010000, 0x1C0000,
app1, app, ota_1, 0x1D0000, 0x1C0000,
eeprom, data, 0x99, 0x390000, 0x001000,
spiffs, data, spiffs, 0x391000, 0x00F000
"""
return partition_csv
def get_idf_partition_csv(flash_size):
app_partition_size = APP_PARTITION_SIZES[flash_size]
partition_csv = f"""\
IDF_PARTITIONS_CSV = """\
# Name, Type, SubType, Offset, Size, Flags
otadata, data, ota, , 0x2000,
phy_init, data, phy, , 0x1000,
app0, app, ota_0, , 0x{app_partition_size:X},
app1, app, ota_1, , 0x{app_partition_size:X},
nvs, data, nvs, , 0x6D000,
app0, app, ota_0, , 0x1C0000,
app1, app, ota_1, , 0x1C0000,
nvs, data, nvs, , 0x6d000,
"""
return partition_csv
def _format_sdkconfig_val(value: SdkconfigValueType) -> str:
@@ -651,17 +565,13 @@ def copy_files():
if CORE.using_arduino:
write_file_if_changed(
CORE.relative_build_path("partitions.csv"),
get_arduino_partition_csv(
CORE.platformio_options.get("board_upload.flash_size")
),
ARDUINO_PARTITIONS_CSV,
)
if CORE.using_esp_idf:
_write_sdkconfig()
write_file_if_changed(
CORE.relative_build_path("partitions.csv"),
get_idf_partition_csv(
CORE.platformio_options.get("board_upload.flash_size")
),
IDF_PARTITIONS_CSV,
)
# IDF build scripts look for version string to put in the build.
# However, if the build path does not have an initialized git repo,

View File

@@ -3,6 +3,7 @@
#ifdef USE_ARDUINO
#include "esphome/components/remote_base/remote_base.h"
#include "esphome/components/remote_transmitter/remote_transmitter.h"
#include <IRSender.h> // arduino-heatpump library
namespace esphome {
@@ -10,13 +11,14 @@ namespace heatpumpir {
class IRSenderESPHome : public IRSender {
public:
IRSenderESPHome(remote_base::RemoteTransmitterBase *transmitter) : IRSender(0), transmit_(transmitter->transmit()){};
IRSenderESPHome(remote_transmitter::RemoteTransmitterComponent *transmitter)
: IRSender(0), transmit_(transmitter->transmit()){};
void setFrequency(int frequency) override; // NOLINT(readability-identifier-naming)
void space(int space_length) override;
void mark(int mark_length) override;
protected:
remote_base::RemoteTransmitterBase::TransmitCall transmit_;
remote_transmitter::RemoteTransmitterComponent::TransmitCall transmit_;
};
} // namespace heatpumpir

View File

@@ -1,7 +1,7 @@
#include "ina226.h"
#include "esphome/core/log.h"
#include "esphome/core/hal.h"
#include <cinttypes>
#include "esphome/core/hal.h"
#include "esphome/core/log.h"
namespace esphome {
namespace ina226 {
@@ -102,6 +102,7 @@ void INA226Component::update() {
this->status_set_warning();
return;
}
ESP_LOGD(TAG, "Got raw bus voltage: %d", raw_bus_voltage);
float bus_voltage_v = int16_t(raw_bus_voltage) * 0.00125f;
this->bus_voltage_sensor_->publish_state(bus_voltage_v);
}

View File

@@ -68,7 +68,6 @@ void LD2420Component::dump_config() {
ESP_LOGCONFIG(TAG, "LD2420:");
ESP_LOGCONFIG(TAG, " Firmware Version : %7s", this->ld2420_firmware_ver_);
ESP_LOGCONFIG(TAG, "LD2420 Number:");
#ifdef USE_NUMBER
LOG_NUMBER(TAG, " Gate Timeout:", this->gate_timeout_number_);
LOG_NUMBER(TAG, " Gate Max Distance:", this->max_gate_distance_number_);
LOG_NUMBER(TAG, " Gate Min Distance:", this->min_gate_distance_number_);
@@ -77,13 +76,10 @@ void LD2420Component::dump_config() {
LOG_NUMBER(TAG, " Gate Move Threshold:", this->gate_move_threshold_numbers_[gate]);
LOG_NUMBER(TAG, " Gate Still Threshold::", this->gate_still_threshold_numbers_[gate]);
}
#endif
#ifdef USE_BUTTON
LOG_BUTTON(TAG, " Apply Config:", this->apply_config_button_);
LOG_BUTTON(TAG, " Revert Edits:", this->revert_config_button_);
LOG_BUTTON(TAG, " Factory Reset:", this->factory_reset_button_);
LOG_BUTTON(TAG, " Restart Module:", this->restart_module_button_);
#endif
ESP_LOGCONFIG(TAG, "LD2420 Select:");
LOG_SELECT(TAG, " Operating Mode", this->operating_selector_);
if (this->get_firmware_int_(ld2420_firmware_ver_) < CALIBRATE_VERSION_MIN) {
@@ -187,11 +183,9 @@ void LD2420Component::factory_reset_action() {
return;
}
this->set_min_max_distances_timeout(FACTORY_MAX_GATE, FACTORY_MIN_GATE, FACTORY_TIMEOUT);
#ifdef USE_NUMBER
this->gate_timeout_number_->state = FACTORY_TIMEOUT;
this->min_gate_distance_number_->state = FACTORY_MIN_GATE;
this->max_gate_distance_number_->state = FACTORY_MAX_GATE;
#endif
for (uint8_t gate = 0; gate < LD2420_TOTAL_GATES; gate++) {
this->new_config.move_thresh[gate] = FACTORY_MOVE_THRESH[gate];
this->new_config.still_thresh[gate] = FACTORY_STILL_THRESH[gate];

View File

@@ -147,7 +147,7 @@ void MQTTClientComponent::dump_config() {
ESP_LOGCONFIG(TAG, " Availability: '%s'", this->availability_.topic.c_str());
}
}
bool MQTTClientComponent::can_proceed() { return network::is_disabled() || this->is_connected(); }
bool MQTTClientComponent::can_proceed() { return this->is_connected(); }
void MQTTClientComponent::start_dnslookup_() {
for (auto &subscription : this->subscriptions_) {

View File

@@ -1,6 +1,5 @@
#include "my9231.h"
#include "esphome/core/log.h"
#include "esphome/core/helpers.h"
namespace esphome {
namespace my9231 {
@@ -52,11 +51,7 @@ void MY9231OutputComponent::setup() {
MY9231_CMD_SCATTER_APDM | MY9231_CMD_FREQUENCY_DIVIDE_1 | MY9231_CMD_REACTION_FAST | MY9231_CMD_ONE_SHOT_DISABLE;
ESP_LOGV(TAG, " Command: 0x%02X", command);
{
InterruptLock lock;
this->send_dcki_pulses_(32 * this->num_chips_);
this->init_chips_(command);
}
this->init_chips_(command);
ESP_LOGV(TAG, " Chips initialized.");
}
void MY9231OutputComponent::dump_config() {
@@ -71,14 +66,11 @@ void MY9231OutputComponent::loop() {
if (!this->update_)
return;
{
InterruptLock lock;
for (auto pwm_amount : this->pwm_amounts_) {
this->write_word_(pwm_amount, this->bit_depth_);
}
// Send 8 DI pulses. After 8 falling edges, the duty data are store.
this->send_di_pulses_(8);
for (auto pwm_amount : this->pwm_amounts_) {
this->write_word_(pwm_amount, this->bit_depth_);
}
// Send 8 DI pulses. After 8 falling edges, the duty data are store.
this->send_di_pulses_(8);
this->update_ = false;
}
void MY9231OutputComponent::set_channel_value_(uint8_t channel, uint16_t value) {
@@ -100,7 +92,6 @@ void MY9231OutputComponent::init_chips_(uint8_t command) {
// Send 16 DI pulse. After 14 falling edges, the command data are
// stored and after 16 falling edges the duty mode is activated.
this->send_di_pulses_(16);
delayMicroseconds(12);
}
void MY9231OutputComponent::write_word_(uint16_t value, uint8_t bits) {
for (uint8_t i = bits; i > 0; i--) {
@@ -115,13 +106,6 @@ void MY9231OutputComponent::send_di_pulses_(uint8_t count) {
this->pin_di_->digital_write(false);
}
}
void MY9231OutputComponent::send_dcki_pulses_(uint8_t count) {
delayMicroseconds(12);
for (uint8_t i = 0; i < count; i++) {
this->pin_dcki_->digital_write(true);
this->pin_dcki_->digital_write(false);
}
}
} // namespace my9231
} // namespace esphome

View File

@@ -49,7 +49,6 @@ class MY9231OutputComponent : public Component {
void init_chips_(uint8_t command);
void write_word_(uint16_t value, uint8_t bits);
void send_di_pulses_(uint8_t count);
void send_dcki_pulses_(uint8_t count);
GPIOPin *pin_di_;
GPIOPin *pin_dcki_;

View File

@@ -29,14 +29,6 @@ bool is_connected() {
return false;
}
bool is_disabled() {
#ifdef USE_WIFI
if (wifi::global_wifi_component != nullptr)
return wifi::global_wifi_component->is_disabled();
#endif
return false;
}
network::IPAddress get_ip_address() {
#ifdef USE_ETHERNET
if (ethernet::global_eth_component != nullptr)

View File

@@ -8,8 +8,6 @@ namespace network {
/// Return whether the node is connected to the network (through wifi, eth, ...)
bool is_connected();
/// Return whether the network is disabled (only wifi for now)
bool is_disabled();
/// Get the active network hostname
std::string get_use_address();
IPAddress get_ip_address();

View File

@@ -52,9 +52,8 @@ RemoteReceiverTrigger = ns.class_(
"RemoteReceiverTrigger", automation.Trigger, RemoteReceiverListener
)
RemoteTransmitterDumper = ns.class_("RemoteTransmitterDumper")
RemoteTransmittable = ns.class_("RemoteTransmittable")
RemoteTransmitterActionBase = ns.class_(
"RemoteTransmitterActionBase", RemoteTransmittable, automation.Action
"RemoteTransmitterActionBase", automation.Action
)
RemoteReceiverBase = ns.class_("RemoteReceiverBase")
RemoteTransmitterBase = ns.class_("RemoteTransmitterBase")
@@ -69,30 +68,11 @@ def templatize(value):
return cv.Schema(ret)
REMOTE_LISTENER_SCHEMA = cv.Schema(
{
cv.GenerateID(CONF_RECEIVER_ID): cv.use_id(RemoteReceiverBase),
}
)
REMOTE_TRANSMITTABLE_SCHEMA = cv.Schema(
{
cv.GenerateID(CONF_TRANSMITTER_ID): cv.use_id(RemoteTransmitterBase),
}
)
async def register_listener(var, config):
receiver = await cg.get_variable(config[CONF_RECEIVER_ID])
cg.add(receiver.register_listener(var))
async def register_transmittable(var, config):
transmitter_ = await cg.get_variable(config[CONF_TRANSMITTER_ID])
cg.add(var.set_transmitter(transmitter_))
def register_binary_sensor(name, type, schema):
return BINARY_SENSOR_REGISTRY.register(name, type, schema)
@@ -149,9 +129,10 @@ def validate_repeat(value):
BASE_REMOTE_TRANSMITTER_SCHEMA = cv.Schema(
{
cv.GenerateID(CONF_TRANSMITTER_ID): cv.use_id(RemoteTransmitterBase),
cv.Optional(CONF_REPEAT): validate_repeat,
}
).extend(REMOTE_TRANSMITTABLE_SCHEMA)
)
def register_action(name, type_, schema):
@@ -162,8 +143,9 @@ def register_action(name, type_, schema):
def decorator(func):
async def new_func(config, action_id, template_arg, args):
transmitter = await cg.get_variable(config[CONF_TRANSMITTER_ID])
var = cg.new_Pvariable(action_id, template_arg)
await register_transmittable(var, config)
cg.add(var.set_parent(transmitter))
if CONF_REPEAT in config:
conf = config[CONF_REPEAT]
template_ = await cg.templatable(conf[CONF_TIMES], args, cg.uint32)
@@ -1557,7 +1539,7 @@ MIDEA_SCHEMA = cv.Schema(
@register_binary_sensor("midea", MideaBinarySensor, MIDEA_SCHEMA)
def midea_binary_sensor(var, config):
cg.add(var.set_data(config[CONF_CODE]))
cg.add(var.set_code(config[CONF_CODE]))
@register_trigger("midea", MideaTrigger, MideaData)

View File

@@ -67,7 +67,20 @@ class MideaProtocol : public RemoteProtocol<MideaData> {
void dump(const MideaData &data) override;
};
DECLARE_REMOTE_PROTOCOL(Midea)
class MideaBinarySensor : public RemoteReceiverBinarySensorBase {
public:
bool matches(RemoteReceiveData src) override {
auto data = MideaProtocol().decode(src);
return data.has_value() && data.value() == this->data_;
}
void set_code(const std::vector<uint8_t> &code) { this->data_ = code; }
protected:
MideaData data_;
};
using MideaTrigger = RemoteReceiverTrigger<MideaProtocol, MideaData>;
using MideaDumper = RemoteReceiverDumper<MideaProtocol, MideaData>;
template<typename... Ts> class MideaAction : public RemoteTransmitterActionBase<Ts...> {
TEMPLATABLE_VALUE(std::vector<uint8_t>, code)

View File

@@ -15,8 +15,6 @@ struct RCSwitchData {
class RCSwitchBase {
public:
using ProtocolData = RCSwitchData;
RCSwitchBase() = default;
RCSwitchBase(uint32_t sync_high, uint32_t sync_low, uint32_t zero_high, uint32_t zero_low, uint32_t one_high,
uint32_t one_low, bool inverted);
@@ -215,7 +213,7 @@ class RCSwitchDumper : public RemoteReceiverDumperBase {
bool dump(RemoteReceiveData src) override;
};
using RCSwitchTrigger = RemoteReceiverTrigger<RCSwitchBase>;
using RCSwitchTrigger = RemoteReceiverTrigger<RCSwitchBase, RCSwitchData>;
} // namespace remote_base
} // namespace esphome

View File

@@ -127,14 +127,6 @@ class RemoteTransmitterBase : public RemoteComponentBase {
this->temp_.reset();
return TransmitCall(this);
}
template<typename Protocol>
void transmit(const typename Protocol::ProtocolData &data, uint32_t send_times = 1, uint32_t send_wait = 0) {
auto call = this->transmit();
Protocol().encode(call.get_data(), data);
call.set_send_times(send_times);
call.set_send_wait(send_wait);
call.perform();
}
protected:
void send_(uint32_t send_times, uint32_t send_wait);
@@ -192,13 +184,12 @@ class RemoteReceiverBinarySensorBase : public binary_sensor::BinarySensorInitial
template<typename T> class RemoteProtocol {
public:
using ProtocolData = T;
virtual void encode(RemoteTransmitData *dst, const ProtocolData &data) = 0;
virtual optional<ProtocolData> decode(RemoteReceiveData src) = 0;
virtual void dump(const ProtocolData &data) = 0;
virtual void encode(RemoteTransmitData *dst, const T &data) = 0;
virtual optional<T> decode(RemoteReceiveData src) = 0;
virtual void dump(const T &data) = 0;
};
template<typename T> class RemoteReceiverBinarySensor : public RemoteReceiverBinarySensorBase {
template<typename T, typename D> class RemoteReceiverBinarySensor : public RemoteReceiverBinarySensorBase {
public:
RemoteReceiverBinarySensor() : RemoteReceiverBinarySensorBase() {}
@@ -210,14 +201,13 @@ template<typename T> class RemoteReceiverBinarySensor : public RemoteReceiverBin
}
public:
void set_data(typename T::ProtocolData data) { data_ = data; }
void set_data(D data) { data_ = data; }
protected:
typename T::ProtocolData data_;
D data_;
};
template<typename T>
class RemoteReceiverTrigger : public Trigger<typename T::ProtocolData>, public RemoteReceiverListener {
template<typename T, typename D> class RemoteReceiverTrigger : public Trigger<D>, public RemoteReceiverListener {
protected:
bool on_receive(RemoteReceiveData src) override {
auto proto = T();
@@ -230,36 +220,28 @@ class RemoteReceiverTrigger : public Trigger<typename T::ProtocolData>, public R
}
};
class RemoteTransmittable {
template<typename... Ts> class RemoteTransmitterActionBase : public Action<Ts...> {
public:
RemoteTransmittable() {}
RemoteTransmittable(RemoteTransmitterBase *transmitter) : transmitter_(transmitter) {}
void set_transmitter(RemoteTransmitterBase *transmitter) { this->transmitter_ = transmitter; }
void set_parent(RemoteTransmitterBase *parent) { this->parent_ = parent; }
protected:
template<typename Protocol>
void transmit_(const typename Protocol::ProtocolData &data, uint32_t send_times = 1, uint32_t send_wait = 0) {
this->transmitter_->transmit<Protocol>(data, send_times, send_wait);
}
RemoteTransmitterBase *transmitter_;
};
TEMPLATABLE_VALUE(uint32_t, send_times);
TEMPLATABLE_VALUE(uint32_t, send_wait);
template<typename... Ts> class RemoteTransmitterActionBase : public RemoteTransmittable, public Action<Ts...> {
TEMPLATABLE_VALUE(uint32_t, send_times)
TEMPLATABLE_VALUE(uint32_t, send_wait)
protected:
void play(Ts... x) override {
auto call = this->transmitter_->transmit();
auto call = this->parent_->transmit();
this->encode(call.get_data(), x...);
call.set_send_times(this->send_times_.value_or(x..., 1));
call.set_send_wait(this->send_wait_.value_or(x..., 0));
call.perform();
}
protected:
virtual void encode(RemoteTransmitData *dst, Ts... x) = 0;
RemoteTransmitterBase *parent_{};
};
template<typename T> class RemoteReceiverDumper : public RemoteReceiverDumperBase {
template<typename T, typename D> class RemoteReceiverDumper : public RemoteReceiverDumperBase {
public:
bool dump(RemoteReceiveData src) override {
auto proto = T();
@@ -272,9 +254,9 @@ template<typename T> class RemoteReceiverDumper : public RemoteReceiverDumperBas
};
#define DECLARE_REMOTE_PROTOCOL_(prefix) \
using prefix##BinarySensor = RemoteReceiverBinarySensor<prefix##Protocol>; \
using prefix##Trigger = RemoteReceiverTrigger<prefix##Protocol>; \
using prefix##Dumper = RemoteReceiverDumper<prefix##Protocol>;
using prefix##BinarySensor = RemoteReceiverBinarySensor<prefix##Protocol, prefix##Data>; \
using prefix##Trigger = RemoteReceiverTrigger<prefix##Protocol, prefix##Data>; \
using prefix##Dumper = RemoteReceiverDumper<prefix##Protocol, prefix##Data>;
#define DECLARE_REMOTE_PROTOCOL(prefix) DECLARE_REMOTE_PROTOCOL_(prefix)
} // namespace remote_base

View File

@@ -74,12 +74,12 @@ def _format_framework_arduino_version(ver: cv.Version) -> str:
# The default/recommended arduino framework version
# - https://github.com/earlephilhower/arduino-pico/releases
# - https://api.registry.platformio.org/v3/packages/earlephilhower/tool/framework-arduinopico
RECOMMENDED_ARDUINO_FRAMEWORK_VERSION = cv.Version(3, 6, 0)
RECOMMENDED_ARDUINO_FRAMEWORK_VERSION = cv.Version(3, 4, 0)
# The platformio/raspberrypi version to use for arduino frameworks
# - https://github.com/platformio/platform-raspberrypi/releases
# - https://api.registry.platformio.org/v3/packages/platformio/platform/raspberrypi
ARDUINO_PLATFORM_VERSION = cv.Version(1, 10, 0)
ARDUINO_PLATFORM_VERSION = cv.Version(1, 9, 0)
def _arduino_check_versions(value):

View File

@@ -33,7 +33,6 @@ MODELS = {
"SH1106_96X16": SSD1306Model.SH1106_MODEL_96_16,
"SH1106_64X48": SSD1306Model.SH1106_MODEL_64_48,
"SH1107_128X64": SSD1306Model.SH1107_MODEL_128_64,
"SH1107_128X128": SSD1306Model.SH1107_MODEL_128_128,
"SSD1305_128X32": SSD1306Model.SSD1305_MODEL_128_32,
"SSD1305_128X64": SSD1306Model.SSD1305_MODEL_128_64,
}
@@ -64,10 +63,8 @@ SSD1306_SCHEMA = display.FULL_DISPLAY_SCHEMA.extend(
cv.Optional(CONF_EXTERNAL_VCC): cv.boolean,
cv.Optional(CONF_FLIP_X, default=True): cv.boolean,
cv.Optional(CONF_FLIP_Y, default=True): cv.boolean,
# Offsets determine shifts of memory location to LCD rows/columns,
# and this family of controllers supports up to 128x128 screens
cv.Optional(CONF_OFFSET_X, default=0): cv.int_range(min=0, max=128),
cv.Optional(CONF_OFFSET_Y, default=0): cv.int_range(min=0, max=128),
cv.Optional(CONF_OFFSET_X, default=0): cv.int_range(min=-32, max=32),
cv.Optional(CONF_OFFSET_Y, default=0): cv.int_range(min=-32, max=32),
cv.Optional(CONF_INVERT, default=False): cv.boolean,
}
).extend(cv.polling_component_schema("1s"))

View File

@@ -35,31 +35,16 @@ static const uint8_t SSD1306_COMMAND_INVERSE_DISPLAY = 0xA7;
static const uint8_t SSD1305_COMMAND_SET_BRIGHTNESS = 0x82;
static const uint8_t SSD1305_COMMAND_SET_AREA_COLOR = 0xD8;
static const uint8_t SH1107_COMMAND_SET_START_LINE = 0xDC;
static const uint8_t SH1107_COMMAND_CHARGE_PUMP = 0xAD;
void SSD1306::setup() {
this->init_internal_(this->get_buffer_length_());
// SH1107 resources
//
// Datasheet v2.3:
// www.displayfuture.com/Display/datasheet/controller/SH1107.pdf
// Adafruit C++ driver:
// github.com/adafruit/Adafruit_SH110x
// Adafruit CircuitPython driver:
// github.com/adafruit/Adafruit_CircuitPython_DisplayIO_SH1107
// Turn off display during initialization (0xAE)
this->command(SSD1306_COMMAND_DISPLAY_OFF);
// If SH1107, use POR defaults (0x50) = divider 1, frequency +0%
if (!this->is_sh1107_()) {
// Set oscillator frequency to 4'b1000 with no clock division (0xD5)
this->command(SSD1306_COMMAND_SET_DISPLAY_CLOCK_DIV);
// Oscillator frequency <= 4'b1000, no clock division
this->command(0x80);
}
// Set oscillator frequency to 4'b1000 with no clock division (0xD5)
this->command(SSD1306_COMMAND_SET_DISPLAY_CLOCK_DIV);
// Oscillator frequency <= 4'b1000, no clock division
this->command(0x80);
// Enable low power display mode for SSD1305 (0xD8)
if (this->is_ssd1305_()) {
@@ -75,26 +60,11 @@ void SSD1306::setup() {
this->command(SSD1306_COMMAND_SET_DISPLAY_OFFSET_Y);
this->command(0x00 + this->offset_y_);
if (this->is_sh1107_()) {
// Set start line at line 0 (0xDC)
this->command(SH1107_COMMAND_SET_START_LINE);
this->command(0x00);
} else {
// Set start line at line 0 (0x40)
this->command(SSD1306_COMMAND_SET_START_LINE | 0x00);
}
// Set start line at line 0 (0x40)
this->command(SSD1306_COMMAND_SET_START_LINE | 0x00);
if (this->is_ssd1305_()) {
// SSD1305 does not have charge pump
} else if (this->is_sh1107_()) {
// Enable charge pump (0xAD)
this->command(SH1107_COMMAND_CHARGE_PUMP);
if (this->external_vcc_) {
this->command(0x8A);
} else {
this->command(0x8B);
}
} else {
// SSD1305 does not have charge pump
if (!this->is_ssd1305_()) {
// Enable charge pump (0x8D)
this->command(SSD1306_COMMAND_CHARGE_PUMP);
if (this->external_vcc_) {
@@ -106,41 +76,34 @@ void SSD1306::setup() {
// Set addressing mode to horizontal (0x20)
this->command(SSD1306_COMMAND_MEMORY_MODE);
if (!this->is_sh1107_()) {
// SH1107 memory mode is a 1 byte command
this->command(0x00);
}
this->command(0x00);
// X flip mode (0xA0, 0xA1)
this->command(SSD1306_COMMAND_SEGRE_MAP | this->flip_x_);
// Y flip mode (0xC0, 0xC8)
this->command(SSD1306_COMMAND_COM_SCAN_INC | (this->flip_y_ << 3));
if (!this->is_sh1107_()) {
// Set pin configuration (0xDA)
this->command(SSD1306_COMMAND_SET_COM_PINS);
switch (this->model_) {
case SSD1306_MODEL_128_32:
case SH1106_MODEL_128_32:
case SSD1306_MODEL_96_16:
case SH1106_MODEL_96_16:
this->command(0x02);
break;
case SSD1306_MODEL_128_64:
case SH1106_MODEL_128_64:
case SSD1306_MODEL_64_48:
case SSD1306_MODEL_64_32:
case SH1106_MODEL_64_48:
case SSD1305_MODEL_128_32:
case SSD1305_MODEL_128_64:
case SSD1306_MODEL_72_40:
this->command(0x12);
break;
case SH1107_MODEL_128_64:
case SH1107_MODEL_128_128:
// Not used, but prevents build warning
break;
}
// Set pin configuration (0xDA)
this->command(SSD1306_COMMAND_SET_COM_PINS);
switch (this->model_) {
case SSD1306_MODEL_128_32:
case SH1106_MODEL_128_32:
case SSD1306_MODEL_96_16:
case SH1106_MODEL_96_16:
this->command(0x02);
break;
case SSD1306_MODEL_128_64:
case SH1106_MODEL_128_64:
case SSD1306_MODEL_64_48:
case SSD1306_MODEL_64_32:
case SH1106_MODEL_64_48:
case SH1107_MODEL_128_64:
case SSD1305_MODEL_128_32:
case SSD1305_MODEL_128_64:
case SSD1306_MODEL_72_40:
this->command(0x12);
break;
}
// Pre-charge period (0xD9)
@@ -155,7 +118,6 @@ void SSD1306::setup() {
this->command(SSD1306_COMMAND_SET_VCOM_DETECT);
switch (this->model_) {
case SH1107_MODEL_128_64:
case SH1107_MODEL_128_128:
this->command(0x35);
break;
case SSD1306_MODEL_72_40:
@@ -187,7 +149,7 @@ void SSD1306::setup() {
this->turn_on();
}
void SSD1306::display() {
if (this->is_sh1106_() || this->is_sh1107_()) {
if (this->is_sh1106_()) {
this->write_display_data();
return;
}
@@ -221,7 +183,6 @@ bool SSD1306::is_sh1106_() const {
return this->model_ == SH1106_MODEL_96_16 || this->model_ == SH1106_MODEL_128_32 ||
this->model_ == SH1106_MODEL_128_64;
}
bool SSD1306::is_sh1107_() const { return this->model_ == SH1107_MODEL_128_64 || this->model_ == SH1107_MODEL_128_128; }
bool SSD1306::is_ssd1305_() const {
return this->model_ == SSD1305_MODEL_128_64 || this->model_ == SSD1305_MODEL_128_64;
}
@@ -263,7 +224,6 @@ void SSD1306::turn_off() {
int SSD1306::get_height_internal() {
switch (this->model_) {
case SH1107_MODEL_128_64:
case SH1107_MODEL_128_128:
return 128;
case SSD1306_MODEL_128_32:
case SSD1306_MODEL_64_32:
@@ -294,7 +254,6 @@ int SSD1306::get_width_internal() {
case SH1106_MODEL_128_64:
case SSD1305_MODEL_128_32:
case SSD1305_MODEL_128_64:
case SH1107_MODEL_128_128:
return 128;
case SSD1306_MODEL_96_16:
case SH1106_MODEL_96_16:

View File

@@ -19,7 +19,6 @@ enum SSD1306Model {
SH1106_MODEL_96_16,
SH1106_MODEL_64_48,
SH1107_MODEL_128_64,
SH1107_MODEL_128_128,
SSD1305_MODEL_128_32,
SSD1305_MODEL_128_64,
};
@@ -59,7 +58,6 @@ class SSD1306 : public PollingComponent, public display::DisplayBuffer {
void init_reset_();
bool is_sh1106_() const;
bool is_sh1107_() const;
bool is_ssd1305_() const;
void draw_absolute_pixel_internal(int x, int y, Color color) override;

View File

@@ -38,19 +38,13 @@ void I2CSSD1306::dump_config() {
}
void I2CSSD1306::command(uint8_t value) { this->write_byte(0x00, value); }
void HOT I2CSSD1306::write_display_data() {
if (this->is_sh1106_() || this->is_sh1107_()) {
if (this->is_sh1106_()) {
uint32_t i = 0;
for (uint8_t page = 0; page < (uint8_t) this->get_height_internal() / 8; page++) {
this->command(0xB0 + page); // row
if (this->is_sh1106_()) {
this->command(0x02); // lower column - 0x02 is historical SH1106 value
} else {
// Other SH1107 drivers use 0x00
// Column values dont change and it seems they can be set only once,
// but we follow SH1106 implementation and resend them
this->command(0x00);
}
this->command(0x10); // higher column
this->command(0x02); // lower column
this->command(0x10); // higher column
for (uint8_t x = 0; x < (uint8_t) this->get_width_internal() / 16; x++) {
uint8_t data[16];
for (uint8_t &j : data)

View File

@@ -36,14 +36,10 @@ void SPISSD1306::command(uint8_t value) {
this->disable();
}
void HOT SPISSD1306::write_display_data() {
if (this->is_sh1106_() || this->is_sh1107_()) {
if (this->is_sh1106_()) {
for (uint8_t y = 0; y < (uint8_t) this->get_height_internal() / 8; y++) {
this->command(0xB0 + y);
if (this->is_sh1106_()) {
this->command(0x02);
} else {
this->command(0x00);
}
this->command(0x02);
this->command(0x10);
this->dc_pin_->digital_write(true);
for (uint8_t x = 0; x < (uint8_t) this->get_width_internal(); x++) {

View File

@@ -18,25 +18,20 @@ DEPENDENCIES = ["api", "microphone"]
CODEOWNERS = ["@jesserockz"]
CONF_ON_END = "on_end"
CONF_ON_ERROR = "on_error"
CONF_ON_INTENT_END = "on_intent_end"
CONF_ON_INTENT_START = "on_intent_start"
CONF_SILENCE_DETECTION = "silence_detection"
CONF_ON_LISTENING = "on_listening"
CONF_ON_START = "on_start"
CONF_ON_STT_END = "on_stt_end"
CONF_ON_STT_VAD_END = "on_stt_vad_end"
CONF_ON_STT_VAD_START = "on_stt_vad_start"
CONF_ON_TTS_END = "on_tts_end"
CONF_ON_TTS_START = "on_tts_start"
CONF_ON_WAKE_WORD_DETECTED = "on_wake_word_detected"
CONF_SILENCE_DETECTION = "silence_detection"
CONF_ON_STT_END = "on_stt_end"
CONF_ON_TTS_START = "on_tts_start"
CONF_ON_TTS_END = "on_tts_end"
CONF_ON_END = "on_end"
CONF_ON_ERROR = "on_error"
CONF_USE_WAKE_WORD = "use_wake_word"
CONF_VAD_THRESHOLD = "vad_threshold"
CONF_AUTO_GAIN = "auto_gain"
CONF_NOISE_SUPPRESSION_LEVEL = "noise_suppression_level"
CONF_AUTO_GAIN = "auto_gain"
CONF_VOLUME_MULTIPLIER = "volume_multiplier"
@@ -93,18 +88,6 @@ CONFIG_SCHEMA = cv.All(
cv.Optional(CONF_ON_CLIENT_DISCONNECTED): automation.validate_automation(
single=True
),
cv.Optional(CONF_ON_INTENT_START): automation.validate_automation(
single=True
),
cv.Optional(CONF_ON_INTENT_END): automation.validate_automation(
single=True
),
cv.Optional(CONF_ON_STT_VAD_START): automation.validate_automation(
single=True
),
cv.Optional(CONF_ON_STT_VAD_END): automation.validate_automation(
single=True
),
}
).extend(cv.COMPONENT_SCHEMA),
)
@@ -194,34 +177,6 @@ async def to_code(config):
config[CONF_ON_CLIENT_DISCONNECTED],
)
if CONF_ON_INTENT_START in config:
await automation.build_automation(
var.get_intent_start_trigger(),
[],
config[CONF_ON_INTENT_START],
)
if CONF_ON_INTENT_END in config:
await automation.build_automation(
var.get_intent_end_trigger(),
[],
config[CONF_ON_INTENT_END],
)
if CONF_ON_STT_VAD_START in config:
await automation.build_automation(
var.get_stt_vad_start_trigger(),
[],
config[CONF_ON_STT_VAD_START],
)
if CONF_ON_STT_VAD_END in config:
await automation.build_automation(
var.get_stt_vad_end_trigger(),
[],
config[CONF_ON_STT_VAD_END],
)
cg.add_define("USE_VOICE_ASSISTANT")

View File

@@ -31,7 +31,7 @@ void VoiceAssistant::setup() {
this->socket_ = socket::socket(AF_INET, SOCK_DGRAM, IPPROTO_IP);
if (socket_ == nullptr) {
ESP_LOGW(TAG, "Could not create socket");
ESP_LOGW(TAG, "Could not create socket.");
this->mark_failed();
return;
}
@@ -69,7 +69,7 @@ void VoiceAssistant::setup() {
ExternalRAMAllocator<uint8_t> speaker_allocator(ExternalRAMAllocator<uint8_t>::ALLOW_FAILURE);
this->speaker_buffer_ = speaker_allocator.allocate(SPEAKER_BUFFER_SIZE);
if (this->speaker_buffer_ == nullptr) {
ESP_LOGW(TAG, "Could not allocate speaker buffer");
ESP_LOGW(TAG, "Could not allocate speaker buffer.");
this->mark_failed();
return;
}
@@ -79,7 +79,7 @@ void VoiceAssistant::setup() {
ExternalRAMAllocator<int16_t> allocator(ExternalRAMAllocator<int16_t>::ALLOW_FAILURE);
this->input_buffer_ = allocator.allocate(INPUT_BUFFER_SIZE);
if (this->input_buffer_ == nullptr) {
ESP_LOGW(TAG, "Could not allocate input buffer");
ESP_LOGW(TAG, "Could not allocate input buffer.");
this->mark_failed();
return;
}
@@ -89,7 +89,7 @@ void VoiceAssistant::setup() {
this->ring_buffer_ = rb_create(BUFFER_SIZE, sizeof(int16_t));
if (this->ring_buffer_ == nullptr) {
ESP_LOGW(TAG, "Could not allocate ring buffer");
ESP_LOGW(TAG, "Could not allocate ring buffer.");
this->mark_failed();
return;
}
@@ -98,7 +98,7 @@ void VoiceAssistant::setup() {
ExternalRAMAllocator<uint8_t> send_allocator(ExternalRAMAllocator<uint8_t>::ALLOW_FAILURE);
this->send_buffer_ = send_allocator.allocate(SEND_BUFFER_SIZE);
if (send_buffer_ == nullptr) {
ESP_LOGW(TAG, "Could not allocate send buffer");
ESP_LOGW(TAG, "Could not allocate send buffer.");
this->mark_failed();
return;
}
@@ -221,8 +221,8 @@ void VoiceAssistant::loop() {
msg.audio_settings = audio_settings;
if (this->api_client_ == nullptr || !this->api_client_->send_voice_assistant_request(msg)) {
ESP_LOGW(TAG, "Could not request start");
this->error_trigger_->trigger("not-connected", "Could not request start");
ESP_LOGW(TAG, "Could not request start.");
this->error_trigger_->trigger("not-connected", "Could not request start.");
this->continuous_ = false;
this->set_state_(State::IDLE, State::IDLE);
break;
@@ -280,7 +280,7 @@ void VoiceAssistant::loop() {
this->speaker_buffer_size_ += len;
}
} else {
ESP_LOGW(TAG, "Receive buffer full");
ESP_LOGW(TAG, "Receive buffer full.");
}
if (this->speaker_buffer_size_ > 0) {
size_t written = this->speaker_->play(this->speaker_buffer_, this->speaker_buffer_size_);
@@ -290,7 +290,7 @@ void VoiceAssistant::loop() {
this->speaker_buffer_index_ -= written;
this->set_timeout("speaker-timeout", 2000, [this]() { this->speaker_->stop(); });
} else {
ESP_LOGW(TAG, "Speaker buffer full");
ESP_LOGW(TAG, "Speaker buffer full.");
}
}
if (this->wait_for_stream_end_) {
@@ -513,7 +513,7 @@ void VoiceAssistant::on_event(const api::VoiceAssistantEventResponse &msg) {
break;
}
case api::enums::VOICE_ASSISTANT_STT_START:
ESP_LOGD(TAG, "STT started");
ESP_LOGD(TAG, "STT Started");
this->listening_trigger_->trigger();
break;
case api::enums::VOICE_ASSISTANT_STT_END: {
@@ -525,24 +525,19 @@ void VoiceAssistant::on_event(const api::VoiceAssistantEventResponse &msg) {
}
}
if (text.empty()) {
ESP_LOGW(TAG, "No text in STT_END event");
ESP_LOGW(TAG, "No text in STT_END event.");
return;
}
ESP_LOGD(TAG, "Speech recognised as: \"%s\"", text.c_str());
this->stt_end_trigger_->trigger(text);
break;
}
case api::enums::VOICE_ASSISTANT_INTENT_START:
ESP_LOGD(TAG, "Intent started");
this->intent_start_trigger_->trigger();
break;
case api::enums::VOICE_ASSISTANT_INTENT_END: {
for (auto arg : msg.data) {
if (arg.name == "conversation_id") {
this->conversation_id_ = std::move(arg.value);
}
}
this->intent_end_trigger_->trigger();
break;
}
case api::enums::VOICE_ASSISTANT_TTS_START: {
@@ -553,7 +548,7 @@ void VoiceAssistant::on_event(const api::VoiceAssistantEventResponse &msg) {
}
}
if (text.empty()) {
ESP_LOGW(TAG, "No text in TTS_START event");
ESP_LOGW(TAG, "No text in TTS_START event.");
return;
}
ESP_LOGD(TAG, "Response: \"%s\"", text.c_str());
@@ -571,7 +566,7 @@ void VoiceAssistant::on_event(const api::VoiceAssistantEventResponse &msg) {
}
}
if (url.empty()) {
ESP_LOGW(TAG, "No url in TTS_END event");
ESP_LOGW(TAG, "No url in TTS_END event.");
return;
}
ESP_LOGD(TAG, "Response URL: \"%s\"", url.c_str());
@@ -615,11 +610,6 @@ void VoiceAssistant::on_event(const api::VoiceAssistantEventResponse &msg) {
if (code == "wake-word-timeout" || code == "wake_word_detection_aborted") {
// Don't change state here since either the "tts-end" or "run-end" events will do it.
return;
} else if (code == "wake-provider-missing" || code == "wake-engine-missing") {
// Wake word is not set up or not ready on Home Assistant so stop and do not retry until user starts again.
this->request_stop();
this->error_trigger_->trigger(code, message);
return;
}
ESP_LOGE(TAG, "Error: %s - %s", code.c_str(), message.c_str());
if (this->state_ != State::IDLE) {
@@ -639,14 +629,6 @@ void VoiceAssistant::on_event(const api::VoiceAssistantEventResponse &msg) {
this->set_state_(State::RESPONSE_FINISHED, State::IDLE);
break;
}
case api::enums::VOICE_ASSISTANT_STT_VAD_START:
ESP_LOGD(TAG, "Starting STT by VAD");
this->stt_vad_start_trigger_->trigger();
break;
case api::enums::VOICE_ASSISTANT_STT_VAD_END:
ESP_LOGD(TAG, "STT by VAD end");
this->stt_vad_end_trigger_->trigger();
break;
default:
ESP_LOGD(TAG, "Unhandled event type: %d", msg.event_type);
break;

View File

@@ -100,17 +100,13 @@ class VoiceAssistant : public Component {
void set_auto_gain(uint8_t auto_gain) { this->auto_gain_ = auto_gain; }
void set_volume_multiplier(float volume_multiplier) { this->volume_multiplier_ = volume_multiplier; }
Trigger<> *get_intent_end_trigger() const { return this->intent_end_trigger_; }
Trigger<> *get_intent_start_trigger() const { return this->intent_start_trigger_; }
Trigger<> *get_listening_trigger() const { return this->listening_trigger_; }
Trigger<> *get_end_trigger() const { return this->end_trigger_; }
Trigger<> *get_start_trigger() const { return this->start_trigger_; }
Trigger<> *get_stt_vad_end_trigger() const { return this->stt_vad_end_trigger_; }
Trigger<> *get_stt_vad_start_trigger() const { return this->stt_vad_start_trigger_; }
Trigger<> *get_wake_word_detected_trigger() const { return this->wake_word_detected_trigger_; }
Trigger<std::string> *get_stt_end_trigger() const { return this->stt_end_trigger_; }
Trigger<std::string> *get_tts_end_trigger() const { return this->tts_end_trigger_; }
Trigger<std::string> *get_tts_start_trigger() const { return this->tts_start_trigger_; }
Trigger<std::string> *get_tts_end_trigger() const { return this->tts_end_trigger_; }
Trigger<> *get_end_trigger() const { return this->end_trigger_; }
Trigger<std::string, std::string> *get_error_trigger() const { return this->error_trigger_; }
Trigger<> *get_client_connected_trigger() const { return this->client_connected_trigger_; }
@@ -128,17 +124,13 @@ class VoiceAssistant : public Component {
std::unique_ptr<socket::Socket> socket_ = nullptr;
struct sockaddr_storage dest_addr_;
Trigger<> *intent_end_trigger_ = new Trigger<>();
Trigger<> *intent_start_trigger_ = new Trigger<>();
Trigger<> *listening_trigger_ = new Trigger<>();
Trigger<> *end_trigger_ = new Trigger<>();
Trigger<> *start_trigger_ = new Trigger<>();
Trigger<> *stt_vad_start_trigger_ = new Trigger<>();
Trigger<> *stt_vad_end_trigger_ = new Trigger<>();
Trigger<> *wake_word_detected_trigger_ = new Trigger<>();
Trigger<std::string> *stt_end_trigger_ = new Trigger<std::string>();
Trigger<std::string> *tts_end_trigger_ = new Trigger<std::string>();
Trigger<std::string> *tts_start_trigger_ = new Trigger<std::string>();
Trigger<std::string> *tts_end_trigger_ = new Trigger<std::string>();
Trigger<> *end_trigger_ = new Trigger<>();
Trigger<std::string, std::string> *error_trigger_ = new Trigger<std::string, std::string>();
Trigger<> *client_connected_trigger_ = new Trigger<>();

View File

@@ -389,10 +389,6 @@ void WiFiComponent::print_connect_params_() {
bssid_t bssid = wifi_bssid();
ESP_LOGCONFIG(TAG, " Local MAC: %s", get_mac_address_pretty().c_str());
if (this->is_disabled()) {
ESP_LOGCONFIG(TAG, " WiFi is disabled!");
return;
}
ESP_LOGCONFIG(TAG, " SSID: " LOG_SECRET("'%s'"), wifi_ssid().c_str());
ESP_LOGCONFIG(TAG, " IP Address: %s", wifi_sta_ip().str().c_str());
ESP_LOGCONFIG(TAG, " BSSID: " LOG_SECRET("%02X:%02X:%02X:%02X:%02X:%02X"), bssid[0], bssid[1], bssid[2], bssid[3],

View File

@@ -43,17 +43,11 @@ def validate_mode(mode):
return mode
def validate_pin(pin):
if pin in (8, 9):
raise cv.Invalid(f"pin {pin} doesn't exist")
return pin
XL9535_PIN_SCHEMA = cv.All(
{
cv.GenerateID(): cv.declare_id(XL9535GPIOPin),
cv.Required(CONF_XL9535): cv.use_id(XL9535Component),
cv.Required(CONF_NUMBER): cv.All(cv.int_range(min=0, max=17), validate_pin),
cv.Required(CONF_NUMBER): cv.int_range(min=0, max=15),
cv.Optional(CONF_MODE, default={}): cv.All(
{
cv.Optional(CONF_INPUT, default=False): cv.boolean,

View File

@@ -1,8 +0,0 @@
from __future__ import annotations
EVENT_ENTRY_ADDED = "entry_added"
EVENT_ENTRY_REMOVED = "entry_removed"
EVENT_ENTRY_UPDATED = "entry_updated"
EVENT_ENTRY_STATE_CHANGED = "entry_state_changed"
SENTINEL = object()

View File

@@ -1,135 +0,0 @@
from __future__ import annotations
import asyncio
import logging
import threading
from dataclasses import dataclass
from functools import partial
from typing import TYPE_CHECKING, Any, Callable
from ..zeroconf import DiscoveredImport
from .entries import DashboardEntries
from .settings import DashboardSettings
if TYPE_CHECKING:
from .status.mdns import MDNSStatus
_LOGGER = logging.getLogger(__name__)
@dataclass
class Event:
"""Dashboard Event."""
event_type: str
data: dict[str, Any]
class EventBus:
"""Dashboard event bus."""
def __init__(self) -> None:
"""Initialize the Dashboard event bus."""
self._listeners: dict[str, set[Callable[[Event], None]]] = {}
def async_add_listener(
self, event_type: str, listener: Callable[[Event], None]
) -> Callable[[], None]:
"""Add a listener to the event bus."""
self._listeners.setdefault(event_type, set()).add(listener)
return partial(self._async_remove_listener, event_type, listener)
def _async_remove_listener(
self, event_type: str, listener: Callable[[Event], None]
) -> None:
"""Remove a listener from the event bus."""
self._listeners[event_type].discard(listener)
def async_fire(self, event_type: str, event_data: dict[str, Any]) -> None:
"""Fire an event."""
event = Event(event_type, event_data)
_LOGGER.debug("Firing event: %s", event)
for listener in self._listeners.get(event_type, set()):
listener(event)
class ESPHomeDashboard:
"""Class that represents the dashboard."""
__slots__ = (
"bus",
"entries",
"loop",
"import_result",
"stop_event",
"ping_request",
"mqtt_ping_request",
"mdns_status",
"settings",
)
def __init__(self) -> None:
"""Initialize the ESPHomeDashboard."""
self.bus = EventBus()
self.entries: DashboardEntries | None = None
self.loop: asyncio.AbstractEventLoop | None = None
self.import_result: dict[str, DiscoveredImport] = {}
self.stop_event = threading.Event()
self.ping_request: asyncio.Event | None = None
self.mqtt_ping_request = threading.Event()
self.mdns_status: MDNSStatus | None = None
self.settings: DashboardSettings = DashboardSettings()
async def async_setup(self) -> None:
"""Setup the dashboard."""
self.loop = asyncio.get_running_loop()
self.ping_request = asyncio.Event()
self.entries = DashboardEntries(self)
async def async_run(self) -> None:
"""Run the dashboard."""
settings = self.settings
mdns_task: asyncio.Task | None = None
ping_status_task: asyncio.Task | None = None
await self.entries.async_update_entries()
if settings.status_use_ping:
from .status.ping import PingStatus
ping_status = PingStatus()
ping_status_task = asyncio.create_task(ping_status.async_run())
else:
from .status.mdns import MDNSStatus
mdns_status = MDNSStatus()
await mdns_status.async_refresh_hosts()
self.mdns_status = mdns_status
mdns_task = asyncio.create_task(mdns_status.async_run())
if settings.status_use_mqtt:
from .status.mqtt import MqttStatusThread
status_thread_mqtt = MqttStatusThread()
status_thread_mqtt.start()
shutdown_event = asyncio.Event()
try:
await shutdown_event.wait()
finally:
_LOGGER.info("Shutting down...")
self.stop_event.set()
self.ping_request.set()
if ping_status_task:
ping_status_task.cancel()
if mdns_task:
mdns_task.cancel()
if settings.status_use_mqtt:
status_thread_mqtt.join()
self.mqtt_ping_request.set()
await asyncio.sleep(0)
DASHBOARD = ESPHomeDashboard()

File diff suppressed because it is too large Load Diff

View File

@@ -1,371 +0,0 @@
from __future__ import annotations
import asyncio
import logging
import os
from typing import TYPE_CHECKING, Any
from esphome import const, util
from esphome.storage_json import StorageJSON, ext_storage_path
from .const import (
EVENT_ENTRY_ADDED,
EVENT_ENTRY_REMOVED,
EVENT_ENTRY_STATE_CHANGED,
EVENT_ENTRY_UPDATED,
)
from .enum import StrEnum
if TYPE_CHECKING:
from .core import ESPHomeDashboard
_LOGGER = logging.getLogger(__name__)
DashboardCacheKeyType = tuple[int, int, float, int]
# Currently EntryState is a simple
# online/offline/unknown enum, but in the future
# it may be expanded to include more states
class EntryState(StrEnum):
ONLINE = "online"
OFFLINE = "offline"
UNKNOWN = "unknown"
_BOOL_TO_ENTRY_STATE = {
True: EntryState.ONLINE,
False: EntryState.OFFLINE,
None: EntryState.UNKNOWN,
}
_ENTRY_STATE_TO_BOOL = {
EntryState.ONLINE: True,
EntryState.OFFLINE: False,
EntryState.UNKNOWN: None,
}
def bool_to_entry_state(value: bool) -> EntryState:
"""Convert a bool to an entry state."""
return _BOOL_TO_ENTRY_STATE[value]
def entry_state_to_bool(value: EntryState) -> bool | None:
"""Convert an entry state to a bool."""
return _ENTRY_STATE_TO_BOOL[value]
class DashboardEntries:
"""Represents all dashboard entries."""
__slots__ = (
"_dashboard",
"_loop",
"_config_dir",
"_entries",
"_entry_states",
"_loaded_entries",
"_update_lock",
)
def __init__(self, dashboard: ESPHomeDashboard) -> None:
"""Initialize the DashboardEntries."""
self._dashboard = dashboard
self._loop = asyncio.get_running_loop()
self._config_dir = dashboard.settings.config_dir
# Entries are stored as
# {
# "path/to/file.yaml": DashboardEntry,
# ...
# }
self._entries: dict[str, DashboardEntry] = {}
self._loaded_entries = False
self._update_lock = asyncio.Lock()
def get(self, path: str) -> DashboardEntry | None:
"""Get an entry by path."""
return self._entries.get(path)
async def _async_all(self) -> list[DashboardEntry]:
"""Return all entries."""
return list(self._entries.values())
def all(self) -> list[DashboardEntry]:
"""Return all entries."""
return asyncio.run_coroutine_threadsafe(self._async_all, self._loop).result()
def async_all(self) -> list[DashboardEntry]:
"""Return all entries."""
return list(self._entries.values())
def set_state(self, entry: DashboardEntry, state: EntryState) -> None:
"""Set the state for an entry."""
asyncio.run_coroutine_threadsafe(
self._async_set_state(entry, state), self._loop
).result()
async def _async_set_state(self, entry: DashboardEntry, state: EntryState) -> None:
"""Set the state for an entry."""
self.async_set_state(entry, state)
def async_set_state(self, entry: DashboardEntry, state: EntryState) -> None:
"""Set the state for an entry."""
if entry.state == state:
return
entry.state = state
self._dashboard.bus.async_fire(
EVENT_ENTRY_STATE_CHANGED, {"entry": entry, "state": state}
)
async def async_request_update_entries(self) -> None:
"""Request an update of the dashboard entries from disk.
If an update is already in progress, this will do nothing.
"""
if self._update_lock.locked():
_LOGGER.debug("Dashboard entries are already being updated")
return
await self.async_update_entries()
async def async_update_entries(self) -> None:
"""Update the dashboard entries from disk."""
async with self._update_lock:
await self._async_update_entries()
def _load_entries(
self, entries: dict[DashboardEntry, DashboardCacheKeyType]
) -> None:
"""Load all entries from disk."""
for entry, cache_key in entries.items():
_LOGGER.debug(
"Loading dashboard entry %s because cache key changed: %s",
entry.path,
cache_key,
)
entry.load_from_disk(cache_key)
async def _async_update_entries(self) -> list[DashboardEntry]:
"""Sync the dashboard entries from disk."""
_LOGGER.debug("Updating dashboard entries")
# At some point it would be nice to use watchdog to avoid polling
path_to_cache_key = await self._loop.run_in_executor(
None, self._get_path_to_cache_key
)
entries = self._entries
added: dict[DashboardEntry, DashboardCacheKeyType] = {}
updated: dict[DashboardEntry, DashboardCacheKeyType] = {}
removed: set[DashboardEntry] = {
entry
for filename, entry in entries.items()
if filename not in path_to_cache_key
}
for path, cache_key in path_to_cache_key.items():
if entry := entries.get(path):
if entry.cache_key != cache_key:
updated[entry] = cache_key
else:
entry = DashboardEntry(path, cache_key)
added[entry] = cache_key
if added or updated:
await self._loop.run_in_executor(
None, self._load_entries, {**added, **updated}
)
bus = self._dashboard.bus
for entry in added:
entries[entry.path] = entry
bus.async_fire(EVENT_ENTRY_ADDED, {"entry": entry})
for entry in removed:
del entries[entry.path]
bus.async_fire(EVENT_ENTRY_REMOVED, {"entry": entry})
for entry in updated:
bus.async_fire(EVENT_ENTRY_UPDATED, {"entry": entry})
def _get_path_to_cache_key(self) -> dict[str, DashboardCacheKeyType]:
"""Return a dict of path to cache key."""
path_to_cache_key: dict[str, DashboardCacheKeyType] = {}
#
# The cache key is (inode, device, mtime, size)
# which allows us to avoid locking since it ensures
# every iteration of this call will always return the newest
# items from disk at the cost of a stat() call on each
# file which is much faster than reading the file
# for the cache hit case which is the common case.
#
for file in util.list_yaml_files([self._config_dir]):
try:
# Prefer the json storage path if it exists
stat = os.stat(ext_storage_path(os.path.basename(file)))
except OSError:
try:
# Fallback to the yaml file if the storage
# file does not exist or could not be generated
stat = os.stat(file)
except OSError:
# File was deleted, ignore
continue
path_to_cache_key[file] = (
stat.st_ino,
stat.st_dev,
stat.st_mtime,
stat.st_size,
)
return path_to_cache_key
class DashboardEntry:
"""Represents a single dashboard entry.
This class is thread-safe and read-only.
"""
__slots__ = (
"path",
"filename",
"_storage_path",
"cache_key",
"storage",
"state",
"_to_dict",
)
def __init__(self, path: str, cache_key: DashboardCacheKeyType) -> None:
"""Initialize the DashboardEntry."""
self.path = path
self.filename: str = os.path.basename(path)
self._storage_path = ext_storage_path(self.filename)
self.cache_key = cache_key
self.storage: StorageJSON | None = None
self.state = EntryState.UNKNOWN
self._to_dict: dict[str, Any] | None = None
def __repr__(self):
"""Return the representation of this entry."""
return (
f"DashboardEntry(path={self.path} "
f"address={self.address} "
f"web_port={self.web_port} "
f"name={self.name} "
f"no_mdns={self.no_mdns} "
f"state={self.state} "
")"
)
def to_dict(self) -> dict[str, Any]:
"""Return a dict representation of this entry.
The dict includes the loaded configuration but not
the current state of the entry.
"""
if self._to_dict is None:
self._to_dict = {
"name": self.name,
"friendly_name": self.friendly_name,
"configuration": self.filename,
"loaded_integrations": self.loaded_integrations,
"deployed_version": self.update_old,
"current_version": self.update_new,
"path": self.path,
"comment": self.comment,
"address": self.address,
"web_port": self.web_port,
"target_platform": self.target_platform,
}
return self._to_dict
def load_from_disk(self, cache_key: DashboardCacheKeyType | None = None) -> None:
"""Load this entry from disk."""
self.storage = StorageJSON.load(self._storage_path)
self._to_dict = None
#
# Currently StorageJSON.load() will return None if the file does not exist
#
# StorageJSON currently does not provide an updated cache key so we use the
# one that is passed in.
#
# The cache key was read from the disk moments ago and may be stale but
# it does not matter since we are polling anyways, and the next call to
# async_update_entries() will load it again in the extremely rare case that
# it changed between the two calls.
#
if cache_key:
self.cache_key = cache_key
@property
def address(self) -> str | None:
"""Return the address of this entry."""
if self.storage is None:
return None
return self.storage.address
@property
def no_mdns(self) -> bool | None:
"""Return the no_mdns of this entry."""
if self.storage is None:
return None
return self.storage.no_mdns
@property
def web_port(self) -> int | None:
"""Return the web port of this entry."""
if self.storage is None:
return None
return self.storage.web_port
@property
def name(self) -> str:
"""Return the name of this entry."""
if self.storage is None:
return self.filename.replace(".yml", "").replace(".yaml", "")
return self.storage.name
@property
def friendly_name(self) -> str:
"""Return the friendly name of this entry."""
if self.storage is None:
return self.name
return self.storage.friendly_name
@property
def comment(self) -> str | None:
"""Return the comment of this entry."""
if self.storage is None:
return None
return self.storage.comment
@property
def target_platform(self) -> str | None:
"""Return the target platform of this entry."""
if self.storage is None:
return None
return self.storage.target_platform
@property
def update_available(self) -> bool:
"""Return if an update is available for this entry."""
if self.storage is None:
return True
return self.update_old != self.update_new
@property
def update_old(self) -> str:
if self.storage is None:
return ""
return self.storage.esphome_version or ""
@property
def update_new(self) -> str:
return const.__version__
@property
def loaded_integrations(self) -> list[str]:
if self.storage is None:
return []
return self.storage.loaded_integrations

View File

@@ -1,19 +0,0 @@
"""Enum backports from standard lib."""
from __future__ import annotations
from enum import Enum
from typing import Any
class StrEnum(str, Enum):
"""Partial backport of Python 3.11's StrEnum for our basic use cases."""
def __new__(cls, value: str, *args: Any, **kwargs: Any) -> StrEnum:
"""Create a new StrEnum instance."""
if not isinstance(value, str):
raise TypeError(f"{value!r} is not a string")
return super().__new__(cls, value, *args, **kwargs)
def __str__(self) -> str:
"""Return self.value."""
return str(self.value)

View File

@@ -1,76 +0,0 @@
from __future__ import annotations
import hmac
import os
from pathlib import Path
from esphome.core import CORE
from esphome.helpers import get_bool_env
from .util.password import password_hash
class DashboardSettings:
"""Settings for the dashboard."""
def __init__(self) -> None:
self.config_dir: str = ""
self.password_hash: str = ""
self.username: str = ""
self.using_password: bool = False
self.on_ha_addon: bool = False
self.cookie_secret: str | None = None
self.absolute_config_dir: Path | None = None
def parse_args(self, args):
self.on_ha_addon: bool = args.ha_addon
password: str = args.password or os.getenv("PASSWORD", "")
if not self.on_ha_addon:
self.username: str = args.username or os.getenv("USERNAME", "")
self.using_password = bool(password)
if self.using_password:
self.password_hash = password_hash(password)
self.config_dir: str = args.configuration
self.absolute_config_dir: Path = Path(self.config_dir).resolve()
CORE.config_path = os.path.join(self.config_dir, ".")
@property
def relative_url(self):
return os.getenv("ESPHOME_DASHBOARD_RELATIVE_URL", "/")
@property
def status_use_ping(self):
return get_bool_env("ESPHOME_DASHBOARD_USE_PING")
@property
def status_use_mqtt(self):
return get_bool_env("ESPHOME_DASHBOARD_USE_MQTT")
@property
def using_ha_addon_auth(self):
if not self.on_ha_addon:
return False
return not get_bool_env("DISABLE_HA_AUTHENTICATION")
@property
def using_auth(self):
return self.using_password or self.using_ha_addon_auth
@property
def streamer_mode(self):
return get_bool_env("ESPHOME_STREAMER_MODE")
def check_password(self, username, password):
if not self.using_auth:
return True
if username != self.username:
return False
# Compare password in constant running time (to prevent timing attacks)
return hmac.compare_digest(self.password_hash, password_hash(password))
def rel_path(self, *args):
joined_path = os.path.join(self.config_dir, *args)
# Raises ValueError if not relative to ESPHome config folder
Path(joined_path).resolve().relative_to(self.absolute_config_dir)
return joined_path

View File

@@ -1,112 +0,0 @@
from __future__ import annotations
import asyncio
from esphome.zeroconf import (
ESPHOME_SERVICE_TYPE,
AsyncEsphomeZeroconf,
DashboardBrowser,
DashboardImportDiscovery,
DashboardStatus,
)
from ..const import SENTINEL
from ..core import DASHBOARD
from ..entries import bool_to_entry_state
class MDNSStatus:
"""Class that updates the mdns status."""
def __init__(self) -> None:
"""Initialize the MDNSStatus class."""
super().__init__()
self.aiozc: AsyncEsphomeZeroconf | None = None
# This is the current mdns state for each host (True, False, None)
self.host_mdns_state: dict[str, bool | None] = {}
# This is the hostnames to path mapping
self.host_name_to_path: dict[str, str] = {}
self.path_to_host_name: dict[str, str] = {}
# This is a set of host names to track (i.e no_mdns = false)
self.host_name_with_mdns_enabled: set[set] = set()
self._loop = asyncio.get_running_loop()
def get_path_to_host_name(self, path: str) -> str | None:
"""Resolve a path to an address in a thread-safe manner."""
return self.path_to_host_name.get(path)
async def async_resolve_host(self, host_name: str) -> str | None:
"""Resolve a host name to an address in a thread-safe manner."""
if aiozc := self.aiozc:
return await aiozc.async_resolve_host(host_name)
return None
async def async_refresh_hosts(self):
"""Refresh the hosts to track."""
dashboard = DASHBOARD
current_entries = dashboard.entries.async_all()
host_name_with_mdns_enabled = self.host_name_with_mdns_enabled
host_mdns_state = self.host_mdns_state
host_name_to_path = self.host_name_to_path
path_to_host_name = self.path_to_host_name
entries = dashboard.entries
for entry in current_entries:
name = entry.name
# If no_mdns is set, remove it from the set
if entry.no_mdns:
host_name_with_mdns_enabled.discard(name)
continue
# We are tracking this host
host_name_with_mdns_enabled.add(name)
path = entry.path
# If we just adopted/imported this host, we likely
# already have a state for it, so we should make sure
# to set it so the dashboard shows it as online
if (online := host_mdns_state.get(name, SENTINEL)) != SENTINEL:
entries.async_set_state(entry, bool_to_entry_state(online))
# Make sure the mapping is up to date
# so when we get an mdns update we can map it back
# to the filename
host_name_to_path[name] = path
path_to_host_name[path] = name
async def async_run(self) -> None:
dashboard = DASHBOARD
entries = dashboard.entries
aiozc = AsyncEsphomeZeroconf()
self.aiozc = aiozc
host_mdns_state = self.host_mdns_state
host_name_to_path = self.host_name_to_path
host_name_with_mdns_enabled = self.host_name_with_mdns_enabled
def on_update(dat: dict[str, bool | None]) -> None:
"""Update the entry state."""
for name, result in dat.items():
host_mdns_state[name] = result
if name not in host_name_with_mdns_enabled:
continue
if entry := entries.get(host_name_to_path[name]):
entries.async_set_state(entry, bool_to_entry_state(result))
stat = DashboardStatus(on_update)
imports = DashboardImportDiscovery()
dashboard.import_result = imports.import_state
browser = DashboardBrowser(
aiozc.zeroconf,
ESPHOME_SERVICE_TYPE,
[stat.browser_callback, imports.browser_callback],
)
while not dashboard.stop_event.is_set():
await self.async_refresh_hosts()
await dashboard.ping_request.wait()
dashboard.ping_request.clear()
await browser.async_cancel()
await aiozc.async_close()
self.aiozc = None

View File

@@ -1,67 +0,0 @@
from __future__ import annotations
import binascii
import json
import os
import threading
from esphome import mqtt
from ..core import DASHBOARD
from ..entries import EntryState
class MqttStatusThread(threading.Thread):
"""Status thread to get the status of the devices via MQTT."""
def run(self) -> None:
"""Run the status thread."""
dashboard = DASHBOARD
entries = dashboard.entries
current_entries = entries.all()
config = mqtt.config_from_env()
topic = "esphome/discover/#"
def on_message(client, userdata, msg):
nonlocal current_entries
payload = msg.payload.decode(errors="backslashreplace")
if len(payload) > 0:
data = json.loads(payload)
if "name" not in data:
return
for entry in current_entries:
if entry.name == data["name"]:
entries.set_state(entry, EntryState.ONLINE)
return
def on_connect(client, userdata, flags, return_code):
client.publish("esphome/discover", None, retain=False)
mqttid = str(binascii.hexlify(os.urandom(6)).decode())
client = mqtt.prepare(
config,
[topic],
on_message,
on_connect,
None,
None,
f"esphome-dashboard-{mqttid}",
)
client.loop_start()
while not dashboard.stop_event.wait(2):
current_entries = entries.all()
# will be set to true on on_message
for entry in current_entries:
if entry.no_mdns:
entries.set_state(entry, EntryState.OFFLINE)
client.publish("esphome/discover", None, retain=False)
dashboard.mqtt_ping_request.wait()
dashboard.mqtt_ping_request.clear()
client.disconnect()
client.loop_stop()

View File

@@ -1,49 +0,0 @@
from __future__ import annotations
import asyncio
import os
from typing import cast
from ..core import DASHBOARD
from ..entries import DashboardEntry, bool_to_entry_state
from ..util.itertools import chunked
from ..util.subprocess import async_system_command_status
async def _async_ping_host(host: str) -> bool:
"""Ping a host."""
return await async_system_command_status(
["ping", "-n" if os.name == "nt" else "-c", "1", host]
)
class PingStatus:
def __init__(self) -> None:
"""Initialize the PingStatus class."""
super().__init__()
self._loop = asyncio.get_running_loop()
async def async_run(self) -> None:
"""Run the ping status."""
dashboard = DASHBOARD
entries = dashboard.entries
while not dashboard.stop_event.is_set():
# Only ping if the dashboard is open
await dashboard.ping_request.wait()
current_entries = dashboard.entries.async_all()
to_ping: list[DashboardEntry] = [
entry for entry in current_entries if entry.address is not None
]
for ping_group in chunked(to_ping, 16):
ping_group = cast(list[DashboardEntry], ping_group)
results = await asyncio.gather(
*(_async_ping_host(entry.address) for entry in ping_group),
return_exceptions=True,
)
for entry, result in zip(ping_group, results):
if isinstance(result, Exception):
result = False
elif isinstance(result, BaseException):
raise result
entries.async_set_state(entry, bool_to_entry_state(result))

View File

@@ -1,10 +1,17 @@
from __future__ import annotations
import hashlib
import unicodedata
from esphome.const import ALLOWED_NAME_CHARS
def password_hash(password: str) -> bytes:
"""Create a hash of a password to transform it to a fixed-length digest.
Note this is not meant for secure storage, but for securely comparing passwords.
"""
return hashlib.sha256(password.encode()).digest()
def strip_accents(value):
return "".join(
c

View File

@@ -1,22 +0,0 @@
from __future__ import annotations
from collections.abc import Iterable
from functools import partial
from itertools import islice
from typing import Any
def take(take_num: int, iterable: Iterable) -> list[Any]:
"""Return first n items of the iterable as a list.
From itertools recipes
"""
return list(islice(iterable, take_num))
def chunked(iterable: Iterable, chunked_num: int) -> Iterable[Any]:
"""Break *iterable* into lists of length *n*.
From more-itertools
"""
return iter(partial(take, chunked_num, iter(iterable)), [])

View File

@@ -1,11 +0,0 @@
from __future__ import annotations
import hashlib
def password_hash(password: str) -> bytes:
"""Create a hash of a password to transform it to a fixed-length digest.
Note this is not meant for secure storage, but for securely comparing passwords.
"""
return hashlib.sha256(password.encode()).digest()

View File

@@ -1,31 +0,0 @@
from __future__ import annotations
import asyncio
from collections.abc import Iterable
async def async_system_command_status(command: Iterable[str]) -> bool:
"""Run a system command checking only the status."""
process = await asyncio.create_subprocess_exec(
*command,
stdin=asyncio.subprocess.DEVNULL,
stdout=asyncio.subprocess.DEVNULL,
stderr=asyncio.subprocess.DEVNULL,
close_fds=False,
)
await process.wait()
return process.returncode == 0
async def async_run_system_command(command: Iterable[str]) -> tuple[bool, bytes, bytes]:
"""Run a system command and return a tuple of returncode, stdout, stderr."""
process = await asyncio.create_subprocess_exec(
*command,
stdin=asyncio.subprocess.DEVNULL,
stdout=asyncio.subprocess.PIPE,
stderr=asyncio.subprocess.PIPE,
close_fds=False,
)
stdout, stderr = await process.communicate()
await process.wait()
return process.returncode, stdout, stderr

File diff suppressed because it is too large Load Diff

View File

@@ -1,13 +1,10 @@
from __future__ import annotations
import gzip
import hashlib
import io
import logging
import random
import socket
import sys
import time
import gzip
from esphome.core import EsphomeError
from esphome.helpers import is_ip_address, resolve_ip_address
@@ -43,10 +40,6 @@ MAGIC_BYTES = [0x6C, 0x26, 0xF7, 0x5C, 0x45]
FEATURE_SUPPORTS_COMPRESSION = 0x01
UPLOAD_BLOCK_SIZE = 8192
UPLOAD_BUFFER_SIZE = UPLOAD_BLOCK_SIZE * 8
_LOGGER = logging.getLogger(__name__)
@@ -191,9 +184,7 @@ def send_check(sock, data, msg):
raise OTAError(f"Error sending {msg}: {err}") from err
def perform_ota(
sock: socket.socket, password: str, file_handle: io.IOBase, filename: str
) -> None:
def perform_ota(sock, password, file_handle, filename):
file_contents = file_handle.read()
file_size = len(file_contents)
_LOGGER.info("Uploading %s (%s bytes)", filename, file_size)
@@ -263,16 +254,14 @@ def perform_ota(
sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 0)
# Limit send buffer (usually around 100kB) in order to have progress bar
# show the actual progress
sock.setsockopt(socket.SOL_SOCKET, socket.SO_SNDBUF, UPLOAD_BUFFER_SIZE)
sock.setsockopt(socket.SOL_SOCKET, socket.SO_SNDBUF, 8192)
# Set higher timeout during upload
sock.settimeout(30.0)
start_time = time.perf_counter()
sock.settimeout(20.0)
offset = 0
progress = ProgressBar()
while True:
chunk = upload_contents[offset : offset + UPLOAD_BLOCK_SIZE]
chunk = upload_contents[offset : offset + 1024]
if not chunk:
break
offset += len(chunk)
@@ -288,9 +277,8 @@ def perform_ota(
# Enable nodelay for last checks
sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
duration = time.perf_counter() - start_time
_LOGGER.info("Upload took %.2f seconds, waiting for result...", duration)
_LOGGER.info("Waiting for result...")
receive_exactly(sock, 1, "receive OK", RESPONSE_RECEIVE_OK)
receive_exactly(sock, 1, "Update end", RESPONSE_UPDATE_END_OK)

View File

@@ -23,14 +23,6 @@ from esphome.core import (
from esphome.helpers import add_class_to_obj
from esphome.util import OrderedDict, filter_yaml_files
try:
from yaml import CSafeLoader as FastestAvailableSafeLoader
except ImportError:
from yaml import ( # type: ignore[assignment]
SafeLoader as FastestAvailableSafeLoader,
)
_LOGGER = logging.getLogger(__name__)
# Mostly copied from Home Assistant because that code works fine and
@@ -97,7 +89,7 @@ def _add_data_ref(fn):
return wrapped
class ESPHomeLoader(FastestAvailableSafeLoader):
class ESPHomeLoader(yaml.SafeLoader):
"""Loader class that keeps track of line numbers."""
@_add_data_ref

View File

@@ -1,21 +1,22 @@
from __future__ import annotations
import asyncio
import logging
from dataclasses import dataclass
from typing import Callable
from zeroconf import IPVersion, ServiceInfo, ServiceStateChange, Zeroconf
from zeroconf.asyncio import AsyncServiceBrowser, AsyncServiceInfo, AsyncZeroconf
from zeroconf import (
IPVersion,
ServiceBrowser,
ServiceInfo,
ServiceStateChange,
Zeroconf,
)
from esphome.storage_json import StorageJSON, ext_storage_path
_LOGGER = logging.getLogger(__name__)
_BACKGROUND_TASKS: set[asyncio.Task] = set()
class HostResolver(ServiceInfo):
"""Resolve a host name to an IP address."""
@@ -64,7 +65,7 @@ class DiscoveredImport:
network: str
class DashboardBrowser(AsyncServiceBrowser):
class DashboardBrowser(ServiceBrowser):
"""A class to browse for ESPHome nodes."""
@@ -93,28 +94,7 @@ class DashboardImportDiscovery:
# Ignore updates for devices that are not in the import state
return
info = AsyncServiceInfo(
service_type,
name,
)
if info.load_from_cache(zeroconf):
self._process_service_info(name, info)
return
task = asyncio.create_task(
self._async_process_service_info(zeroconf, info, service_type, name)
)
_BACKGROUND_TASKS.add(task)
task.add_done_callback(_BACKGROUND_TASKS.discard)
async def _async_process_service_info(
self, zeroconf: Zeroconf, info: AsyncServiceInfo, service_type: str, name: str
) -> None:
"""Process a service info."""
if await info.async_request(zeroconf):
self._process_service_info(name, info)
def _process_service_info(self, name: str, info: ServiceInfo) -> None:
"""Process a service info."""
info = zeroconf.get_service_info(service_type, name)
_LOGGER.debug("-> resolved info: %s", info)
if info is None:
return
@@ -166,32 +146,13 @@ class DashboardImportDiscovery:
)
def _make_host_resolver(host: str) -> HostResolver:
"""Create a new HostResolver for the given host name."""
name = host.partition(".")[0]
info = HostResolver(ESPHOME_SERVICE_TYPE, f"{name}.{ESPHOME_SERVICE_TYPE}")
return info
class EsphomeZeroconf(Zeroconf):
def resolve_host(self, host: str, timeout: float = 3.0) -> str | None:
def resolve_host(self, host: str, timeout=3.0):
"""Resolve a host name to an IP address."""
info = _make_host_resolver(host)
if (
info.load_from_cache(self)
or (timeout and info.request(self, timeout * 1000))
) and (addresses := info.ip_addresses_by_version(IPVersion.V4Only)):
return str(addresses[0])
return None
class AsyncEsphomeZeroconf(AsyncZeroconf):
async def async_resolve_host(self, host: str, timeout: float = 3.0) -> str | None:
"""Resolve a host name to an IP address."""
info = _make_host_resolver(host)
if (
info.load_from_cache(self.zeroconf)
or (timeout and await info.async_request(self.zeroconf, timeout * 1000))
) and (addresses := info.ip_addresses_by_version(IPVersion.V4Only)):
name = host.partition(".")[0]
info = HostResolver(f"{name}.{ESPHOME_SERVICE_TYPE}", ESPHOME_SERVICE_TYPE)
if (info.load_from_cache(self) or info.request(self, timeout * 1000)) and (
addresses := info.ip_addresses_by_version(IPVersion.V4Only)
):
return str(addresses[0])
return None

View File

@@ -159,7 +159,7 @@ board_build.filesystem_size = 0.5m
platform = https://github.com/maxgerhardt/platform-raspberrypi.git
platform_packages =
; earlephilhower/framework-arduinopico@~1.20602.0 ; Cannot use the platformio package until old releases stop getting deleted
earlephilhower/framework-arduinopico@https://github.com/earlephilhower/arduino-pico/releases/download/3.6.0/rp2040-3.6.0.zip
earlephilhower/framework-arduinopico@https://github.com/earlephilhower/arduino-pico/releases/download/3.4.0/rp2040-3.4.0.zip
framework = arduino
lib_deps =

View File

@@ -10,8 +10,8 @@ platformio==6.1.11 # When updating platformio, also update Dockerfile
esptool==4.6.2
click==8.1.7
esphome-dashboard==20231107.0
aioesphomeapi==18.5.3
zeroconf==0.127.0
aioesphomeapi==18.2.7
zeroconf==0.122.3
# esp-idf requires this, but doesn't bundle it by default
# https://github.com/espressif/esp-idf/blob/220590d599e134d7a5e7f1e683cc4550349ffbf8/requirements.txt#L24

View File

@@ -1,6 +1,6 @@
pylint==2.17.6
flake8==6.1.0 # also change in .pre-commit-config.yaml when updating
black==23.11.0 # also change in .pre-commit-config.yaml when updating
black==23.10.1 # also change in .pre-commit-config.yaml when updating
pyupgrade==3.15.0 # also change in .pre-commit-config.yaml when updating
pre-commit

View File

@@ -45,7 +45,7 @@ def sub(path, pattern, repl, expected_count=1):
content, count = re.subn(pattern, repl, content, flags=re.MULTILINE)
if expected_count is not None:
assert count == expected_count, f"Pattern {pattern} replacement failed!"
with open(path, "w") as fh:
with open(path, "wt") as fh:
fh.write(content)

View File

@@ -3050,9 +3050,6 @@ remote_receiver:
on_coolix:
then:
delay: !lambda "return x.first + x.second;"
on_rc_switch:
then:
delay: !lambda "return uint32_t(x.code) + x.protocol;"
status_led:
pin: GPIO2

View File

@@ -425,15 +425,6 @@ binary_sensor:
input: true
inverted: false
- platform: gpio
name: XL9535 Pin 17
pin:
xl9535: xl9535_hub
number: 17
mode:
input: true
inverted: false
climate:
- platform: tuya
id: tuya_climate

View File

@@ -1,4 +1,4 @@
from collections.abc import Iterator
from typing import Iterator
import math

View File

@@ -1,5 +1,5 @@
import pytest
from unittest.mock import Mock
from mock import Mock
from esphome import cpp_helpers as ch
from esphome import const