mirror of
https://github.com/esphome/esphome.git
synced 2025-11-03 16:41:50 +00:00
Compare commits
15 Commits
jesserockz
...
jesserockz
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
df6ac61148 | ||
|
|
8fbb4e27d1 | ||
|
|
3c243e663f | ||
|
|
288af1f4d2 | ||
|
|
6f8d7c6acd | ||
|
|
32e3f26239 | ||
|
|
5464368c08 | ||
|
|
208edf89dc | ||
|
|
fefdb80fdc | ||
|
|
754bd5b7be | ||
|
|
10a9129b7b | ||
|
|
ef945d298c | ||
|
|
149d814fab | ||
|
|
5f1d8dfa5b | ||
|
|
3644853d38 |
97
.github/actions/build-image/action.yaml
vendored
Normal file
97
.github/actions/build-image/action.yaml
vendored
Normal file
@@ -0,0 +1,97 @@
|
||||
name: Build Image
|
||||
inputs:
|
||||
platform:
|
||||
description: "Platform to build for"
|
||||
required: true
|
||||
example: "linux/amd64"
|
||||
target:
|
||||
description: "Target to build"
|
||||
required: true
|
||||
example: "docker"
|
||||
baseimg:
|
||||
description: "Base image type"
|
||||
required: true
|
||||
example: "docker"
|
||||
suffix:
|
||||
description: "Suffix to add to tags"
|
||||
required: true
|
||||
version:
|
||||
description: "Version to build"
|
||||
required: true
|
||||
example: "2023.12.0"
|
||||
runs:
|
||||
using: "composite"
|
||||
steps:
|
||||
- name: Generate short tags
|
||||
id: tags
|
||||
shell: bash
|
||||
run: |
|
||||
output=$(docker/generate_tags.py \
|
||||
--tag "${{ inputs.version }}" \
|
||||
--suffix "${{ inputs.suffix }}")
|
||||
echo $output
|
||||
for l in $output; do
|
||||
echo $l >> $GITHUB_OUTPUT
|
||||
done
|
||||
|
||||
- name: Build and push to ghcr by digest
|
||||
id: build-ghcr
|
||||
uses: docker/build-push-action@v5.0.0
|
||||
with:
|
||||
context: .
|
||||
file: ./docker/Dockerfile
|
||||
platforms: ${{ inputs.platform }}
|
||||
target: ${{ inputs.target }}
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
||||
build-args: |
|
||||
BASEIMGTYPE=${{ inputs.baseimg }}
|
||||
BUILD_VERSION=${{ inputs.version }}
|
||||
outputs: |
|
||||
type=image,name=ghcr.io/${{ steps.tags.outputs.image_name }},push-by-digest=true,name-canonical=true,push=true
|
||||
|
||||
- name: Export ghcr digests
|
||||
shell: bash
|
||||
run: |
|
||||
mkdir -p /tmp/digests/${{ inputs.target }}/ghcr
|
||||
digest="${{ steps.build-ghcr.outputs.digest }}"
|
||||
touch "/tmp/digests/${{ inputs.target }}/ghcr/${digest#sha256:}"
|
||||
|
||||
- name: Upload ghcr digest
|
||||
uses: actions/upload-artifact@v3.1.3
|
||||
with:
|
||||
name: digests-${{ inputs.target }}-ghcr
|
||||
path: /tmp/digests/${{ inputs.target }}/ghcr/*
|
||||
if-no-files-found: error
|
||||
retention-days: 1
|
||||
|
||||
- name: Build and push to dockerhub by digest
|
||||
id: build-dockerhub
|
||||
uses: docker/build-push-action@v5.0.0
|
||||
with:
|
||||
context: .
|
||||
file: ./docker/Dockerfile
|
||||
platforms: ${{ inputs.platform }}
|
||||
target: ${{ inputs.target }}
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
||||
build-args: |
|
||||
BASEIMGTYPE=${{ inputs.baseimg }}
|
||||
BUILD_VERSION=${{ inputs.version }}
|
||||
outputs: |
|
||||
type=image,name=docker.io/${{ steps.tags.outputs.image_name }},push-by-digest=true,name-canonical=true,push=true
|
||||
|
||||
- name: Export dockerhub digests
|
||||
shell: bash
|
||||
run: |
|
||||
mkdir -p /tmp/digests/${{ inputs.target }}/dockerhub
|
||||
digest="${{ steps.build-dockerhub.outputs.digest }}"
|
||||
touch "/tmp/digests/${{ inputs.target }}/dockerhub/${digest#sha256:}"
|
||||
|
||||
- name: Upload dockerhub digest
|
||||
uses: actions/upload-artifact@v3.1.3
|
||||
with:
|
||||
name: digests-${{ inputs.target }}-dockerhub
|
||||
path: /tmp/digests/${{ inputs.target }}/dockerhub/*
|
||||
if-no-files-found: error
|
||||
retention-days: 1
|
||||
130
.github/workflows/release.yml
vendored
130
.github/workflows/release.yml
vendored
@@ -63,30 +63,20 @@ jobs:
|
||||
run: twine upload dist/*
|
||||
|
||||
deploy-docker:
|
||||
name: Build and publish ESPHome ${{ matrix.image.title}}
|
||||
name: Build ESPHome ${{ matrix.platform }}
|
||||
if: github.repository == 'esphome/esphome'
|
||||
permissions:
|
||||
contents: read
|
||||
packages: write
|
||||
runs-on: ubuntu-latest
|
||||
continue-on-error: ${{ matrix.image.title == 'lint' }}
|
||||
needs: [init]
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
image:
|
||||
- title: "ha-addon"
|
||||
suffix: "hassio"
|
||||
target: "hassio"
|
||||
baseimg: "hassio"
|
||||
- title: "docker"
|
||||
suffix: ""
|
||||
target: "docker"
|
||||
baseimg: "docker"
|
||||
- title: "lint"
|
||||
suffix: "lint"
|
||||
target: "lint"
|
||||
baseimg: "docker"
|
||||
platform:
|
||||
- linux/amd64
|
||||
- linux/arm/v7
|
||||
- linux/arm64
|
||||
steps:
|
||||
- uses: actions/checkout@v4.1.1
|
||||
- name: Set up Python
|
||||
@@ -97,6 +87,7 @@ jobs:
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3.0.0
|
||||
- name: Set up QEMU
|
||||
if: matrix.platform != 'linux/amd64'
|
||||
uses: docker/setup-qemu-action@v3.0.0
|
||||
|
||||
- name: Log in to docker hub
|
||||
@@ -111,34 +102,105 @@ jobs:
|
||||
username: ${{ github.actor }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Build docker
|
||||
uses: ./.github/actions/build-image
|
||||
with:
|
||||
platform: ${{ matrix.platform }}
|
||||
target: docker
|
||||
baseimg: docker
|
||||
suffix: ""
|
||||
version: ${{ needs.init.outputs.tag }}
|
||||
|
||||
- name: Build ha-addon
|
||||
uses: ./.github/actions/build-image
|
||||
with:
|
||||
platform: ${{ matrix.platform }}
|
||||
target: hassio
|
||||
baseimg: hassio
|
||||
suffix: "hassio"
|
||||
version: ${{ needs.init.outputs.tag }}
|
||||
|
||||
- name: Build lint
|
||||
uses: ./.github/actions/build-image
|
||||
with:
|
||||
platform: ${{ matrix.platform }}
|
||||
target: lint
|
||||
baseimg: docker
|
||||
suffix: lint
|
||||
version: ${{ needs.init.outputs.tag }}
|
||||
|
||||
deploy-manifest:
|
||||
name: Publish ESPHome ${{ matrix.image.title }} to ${{ matrix.registry }}
|
||||
runs-on: ubuntu-latest
|
||||
needs:
|
||||
- init
|
||||
- deploy-docker
|
||||
if: github.repository == 'esphome/esphome'
|
||||
permissions:
|
||||
contents: read
|
||||
packages: write
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
image:
|
||||
- title: "ha-addon"
|
||||
target: "hassio"
|
||||
suffix: "hassio"
|
||||
- title: "docker"
|
||||
target: "docker"
|
||||
suffix: ""
|
||||
- title: "lint"
|
||||
target: "lint"
|
||||
suffix: "lint"
|
||||
registry:
|
||||
- ghcr
|
||||
- dockerhub
|
||||
steps:
|
||||
- uses: actions/checkout@v4.1.1
|
||||
- name: Download digests
|
||||
uses: actions/download-artifact@v3.0.2
|
||||
with:
|
||||
name: digests-${{ matrix.image.target }}-${{ matrix.registry }}
|
||||
path: /tmp/digests
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3.0.0
|
||||
|
||||
- name: Log in to docker hub
|
||||
if: matrix.registry == 'dockerhub'
|
||||
uses: docker/login-action@v3.0.0
|
||||
with:
|
||||
username: ${{ secrets.DOCKER_USER }}
|
||||
password: ${{ secrets.DOCKER_PASSWORD }}
|
||||
- name: Log in to the GitHub container registry
|
||||
if: matrix.registry == 'ghcr'
|
||||
uses: docker/login-action@v3.0.0
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.actor }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Generate short tags
|
||||
id: tags
|
||||
run: |
|
||||
docker/generate_tags.py \
|
||||
output=$(docker/generate_tags.py \
|
||||
--tag "${{ needs.init.outputs.tag }}" \
|
||||
--suffix "${{ matrix.image.suffix }}"
|
||||
--suffix "${{ matrix.image.suffix }}" \
|
||||
--registry "${{ matrix.registry }}")
|
||||
echo $output
|
||||
for l in $output; do
|
||||
echo $l >> $GITHUB_OUTPUT
|
||||
done
|
||||
|
||||
- name: Build and push
|
||||
uses: docker/build-push-action@v5.0.0
|
||||
with:
|
||||
context: .
|
||||
file: ./docker/Dockerfile
|
||||
platforms: linux/amd64,linux/arm/v7,linux/arm64
|
||||
target: ${{ matrix.image.target }}
|
||||
push: true
|
||||
# yamllint disable rule:line-length
|
||||
cache-from: type=registry,ref=ghcr.io/${{ steps.tags.outputs.image }}:cache-${{ steps.tags.outputs.channel }}
|
||||
cache-to: type=registry,ref=ghcr.io/${{ steps.tags.outputs.image }}:cache-${{ steps.tags.outputs.channel }},mode=max
|
||||
# yamllint enable rule:line-length
|
||||
tags: ${{ steps.tags.outputs.tags }}
|
||||
build-args: |
|
||||
BASEIMGTYPE=${{ matrix.image.baseimg }}
|
||||
BUILD_VERSION=${{ needs.init.outputs.tag }}
|
||||
- name: Create manifest list and push
|
||||
working-directory: /tmp/digests
|
||||
run: |
|
||||
docker buildx imagetools create $(jq -Rcnr 'inputs | . / "," | map("-t " + .) | join(" ")' <<< "${{ steps.tags.outputs.tags}}") \
|
||||
$(printf '${{ steps.tags.outputs.image }}@sha256:%s ' *)
|
||||
|
||||
deploy-ha-addon-repo:
|
||||
if: github.repository == 'esphome/esphome' && github.event_name == 'release'
|
||||
runs-on: ubuntu-latest
|
||||
needs: [deploy-docker]
|
||||
needs: [deploy-manifest]
|
||||
steps:
|
||||
- name: Trigger Workflow
|
||||
uses: actions/github-script@v6.4.1
|
||||
|
||||
@@ -68,7 +68,7 @@ ENV \
|
||||
# See: https://unix.stackexchange.com/questions/553743/correct-way-to-add-lib-ld-linux-so-3-in-debian
|
||||
RUN \
|
||||
if [ "$TARGETARCH$TARGETVARIANT" = "armv7" ]; then \
|
||||
ln -s /lib/arm-linux-gnueabihf/ld-linux.so.3 /lib/ld-linux.so.3; \
|
||||
ln -s /lib/arm-linux-gnueabihf/ld-linux-armhf.so.3 /lib/ld-linux.so.3; \
|
||||
fi
|
||||
|
||||
RUN \
|
||||
|
||||
@@ -1,13 +1,14 @@
|
||||
#!/usr/bin/env python3
|
||||
import re
|
||||
import os
|
||||
import argparse
|
||||
import json
|
||||
|
||||
CHANNEL_DEV = "dev"
|
||||
CHANNEL_BETA = "beta"
|
||||
CHANNEL_RELEASE = "release"
|
||||
|
||||
GHCR = "ghcr"
|
||||
DOCKERHUB = "dockerhub"
|
||||
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument(
|
||||
"--tag",
|
||||
@@ -21,21 +22,31 @@ parser.add_argument(
|
||||
required=True,
|
||||
help="The suffix of the tag.",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--registry",
|
||||
type=str,
|
||||
choices=[GHCR, DOCKERHUB],
|
||||
required=False,
|
||||
action="append",
|
||||
help="The registry to build tags for.",
|
||||
)
|
||||
|
||||
|
||||
def main():
|
||||
args = parser.parse_args()
|
||||
|
||||
# detect channel from tag
|
||||
match = re.match(r"^(\d+\.\d+)(?:\.\d+)?(b\d+)?$", args.tag)
|
||||
match = re.match(r"^(\d+\.\d+)(?:\.\d+)(?:(b\d+)|(-dev\d+))?$", args.tag)
|
||||
major_minor_version = None
|
||||
if match is None:
|
||||
if match is None: # eg 2023.12.0-dev20231109-testbranch
|
||||
channel = None # Ran with custom tag for a branch etc
|
||||
elif match.group(3) is not None: # eg 2023.12.0-dev20231109
|
||||
channel = CHANNEL_DEV
|
||||
elif match.group(2) is None:
|
||||
elif match.group(2) is not None: # eg 2023.12.0b1
|
||||
channel = CHANNEL_BETA
|
||||
else: # eg 2023.12.0
|
||||
major_minor_version = match.group(1)
|
||||
channel = CHANNEL_RELEASE
|
||||
else:
|
||||
channel = CHANNEL_BETA
|
||||
|
||||
tags_to_push = [args.tag]
|
||||
if channel == CHANNEL_DEV:
|
||||
@@ -53,15 +64,28 @@ def main():
|
||||
|
||||
suffix = f"-{args.suffix}" if args.suffix else ""
|
||||
|
||||
with open(os.environ["GITHUB_OUTPUT"], "w") as f:
|
||||
print(f"channel={channel}", file=f)
|
||||
print(f"image=esphome/esphome{suffix}", file=f)
|
||||
full_tags = []
|
||||
image_name = f"esphome/esphome{suffix}"
|
||||
|
||||
for tag in tags_to_push:
|
||||
full_tags += [f"ghcr.io/esphome/esphome{suffix}:{tag}"]
|
||||
full_tags += [f"esphome/esphome{suffix}:{tag}"]
|
||||
print(f"tags={','.join(full_tags)}", file=f)
|
||||
print(f"channel={channel}")
|
||||
|
||||
if args.registry is None:
|
||||
args.registry = [GHCR, DOCKERHUB]
|
||||
elif len(args.registry) == 1:
|
||||
if GHCR in args.registry:
|
||||
print(f"image=ghcr.io/{image_name}")
|
||||
if DOCKERHUB in args.registry:
|
||||
print(f"image=docker.io/{image_name}")
|
||||
|
||||
print(f"image_name={image_name}")
|
||||
|
||||
full_tags = []
|
||||
|
||||
for tag in tags_to_push:
|
||||
if GHCR in args.registry:
|
||||
full_tags += [f"ghcr.io/{image_name}:{tag}"]
|
||||
if DOCKERHUB in args.registry:
|
||||
full_tags += [f"docker.io/{image_name}:{tag}"]
|
||||
print(f"tags={','.join(full_tags)}")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
||||
@@ -8,7 +8,6 @@ from typing import Any
|
||||
from aioesphomeapi import APIClient
|
||||
from aioesphomeapi.api_pb2 import SubscribeLogsResponse
|
||||
from aioesphomeapi.log_runner import async_run
|
||||
from zeroconf.asyncio import AsyncZeroconf
|
||||
|
||||
from esphome.const import CONF_KEY, CONF_PASSWORD, CONF_PORT, __version__
|
||||
from esphome.core import CORE
|
||||
@@ -18,24 +17,22 @@ from . import CONF_ENCRYPTION
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
async def async_run_logs(config, address):
|
||||
async def async_run_logs(config: dict[str, Any], address: str) -> None:
|
||||
"""Run the logs command in the event loop."""
|
||||
conf = config["api"]
|
||||
name = config["esphome"]["name"]
|
||||
port: int = int(conf[CONF_PORT])
|
||||
password: str = conf[CONF_PASSWORD]
|
||||
noise_psk: str | None = None
|
||||
if CONF_ENCRYPTION in conf:
|
||||
noise_psk = conf[CONF_ENCRYPTION][CONF_KEY]
|
||||
_LOGGER.info("Starting log output from %s using esphome API", address)
|
||||
aiozc = AsyncZeroconf()
|
||||
|
||||
cli = APIClient(
|
||||
address,
|
||||
port,
|
||||
password,
|
||||
client_info=f"ESPHome Logs {__version__}",
|
||||
noise_psk=noise_psk,
|
||||
zeroconf_instance=aiozc.zeroconf,
|
||||
)
|
||||
dashboard = CORE.dashboard
|
||||
|
||||
@@ -48,12 +45,10 @@ async def async_run_logs(config, address):
|
||||
text = text.replace("\033", "\\033")
|
||||
print(f"[{time_.hour:02}:{time_.minute:02}:{time_.second:02}]{text}")
|
||||
|
||||
stop = await async_run(cli, on_log, aio_zeroconf_instance=aiozc)
|
||||
stop = await async_run(cli, on_log, name=name)
|
||||
try:
|
||||
while True:
|
||||
await asyncio.sleep(60)
|
||||
await asyncio.Event().wait()
|
||||
finally:
|
||||
await aiozc.async_close()
|
||||
await stop()
|
||||
|
||||
|
||||
|
||||
@@ -3,23 +3,26 @@ from typing import Union, Optional
|
||||
from pathlib import Path
|
||||
import logging
|
||||
import os
|
||||
import esphome.final_validate as fv
|
||||
|
||||
from esphome.helpers import copy_file_if_changed, write_file_if_changed, mkdir_p
|
||||
from esphome.const import (
|
||||
CONF_ADVANCED,
|
||||
CONF_BOARD,
|
||||
CONF_COMPONENTS,
|
||||
CONF_ESPHOME,
|
||||
CONF_FRAMEWORK,
|
||||
CONF_IGNORE_EFUSE_MAC_CRC,
|
||||
CONF_NAME,
|
||||
CONF_PATH,
|
||||
CONF_PLATFORMIO_OPTIONS,
|
||||
CONF_REF,
|
||||
CONF_REFRESH,
|
||||
CONF_SOURCE,
|
||||
CONF_TYPE,
|
||||
CONF_URL,
|
||||
CONF_VARIANT,
|
||||
CONF_VERSION,
|
||||
CONF_ADVANCED,
|
||||
CONF_REFRESH,
|
||||
CONF_PATH,
|
||||
CONF_URL,
|
||||
CONF_REF,
|
||||
CONF_IGNORE_EFUSE_MAC_CRC,
|
||||
KEY_CORE,
|
||||
KEY_FRAMEWORK_VERSION,
|
||||
KEY_NAME,
|
||||
@@ -327,6 +330,32 @@ def _detect_variant(value):
|
||||
return value
|
||||
|
||||
|
||||
def final_validate(config):
|
||||
if CONF_PLATFORMIO_OPTIONS not in fv.full_config.get()[CONF_ESPHOME]:
|
||||
return config
|
||||
|
||||
pio_flash_size_key = "board_upload.flash_size"
|
||||
pio_partitions_key = "board_build.partitions"
|
||||
if (
|
||||
CONF_PARTITIONS in config
|
||||
and pio_partitions_key
|
||||
in fv.full_config.get()[CONF_ESPHOME][CONF_PLATFORMIO_OPTIONS]
|
||||
):
|
||||
raise cv.Invalid(
|
||||
f"Do not specify '{pio_partitions_key}' in '{CONF_PLATFORMIO_OPTIONS}' with '{CONF_PARTITIONS}' in esp32"
|
||||
)
|
||||
|
||||
if (
|
||||
pio_flash_size_key
|
||||
in fv.full_config.get()[CONF_ESPHOME][CONF_PLATFORMIO_OPTIONS]
|
||||
):
|
||||
raise cv.Invalid(
|
||||
f"Please specify {CONF_FLASH_SIZE} within esp32 configuration only"
|
||||
)
|
||||
|
||||
return config
|
||||
|
||||
|
||||
CONF_PLATFORM_VERSION = "platform_version"
|
||||
|
||||
ARDUINO_FRAMEWORK_SCHEMA = cv.All(
|
||||
@@ -340,6 +369,13 @@ ARDUINO_FRAMEWORK_SCHEMA = cv.All(
|
||||
_arduino_check_versions,
|
||||
)
|
||||
|
||||
|
||||
def _check_component_type(config):
|
||||
if config[CONF_SOURCE][CONF_TYPE] == TYPE_LOCAL:
|
||||
raise cv.Invalid("Local components are not implemented yet.")
|
||||
return config
|
||||
|
||||
|
||||
CONF_SDKCONFIG_OPTIONS = "sdkconfig_options"
|
||||
ESP_IDF_FRAMEWORK_SCHEMA = cv.All(
|
||||
cv.Schema(
|
||||
@@ -356,15 +392,18 @@ ESP_IDF_FRAMEWORK_SCHEMA = cv.All(
|
||||
}
|
||||
),
|
||||
cv.Optional(CONF_COMPONENTS, default=[]): cv.ensure_list(
|
||||
cv.Schema(
|
||||
{
|
||||
cv.Required(CONF_NAME): cv.string_strict,
|
||||
cv.Required(CONF_SOURCE): cv.SOURCE_SCHEMA,
|
||||
cv.Optional(CONF_PATH): cv.string,
|
||||
cv.Optional(CONF_REFRESH, default="1d"): cv.All(
|
||||
cv.string, cv.source_refresh
|
||||
),
|
||||
}
|
||||
cv.All(
|
||||
cv.Schema(
|
||||
{
|
||||
cv.Required(CONF_NAME): cv.string_strict,
|
||||
cv.Required(CONF_SOURCE): cv.SOURCE_SCHEMA,
|
||||
cv.Optional(CONF_PATH): cv.string,
|
||||
cv.Optional(CONF_REFRESH, default="1d"): cv.All(
|
||||
cv.string, cv.source_refresh
|
||||
),
|
||||
}
|
||||
),
|
||||
_check_component_type,
|
||||
)
|
||||
),
|
||||
}
|
||||
@@ -387,6 +426,7 @@ FRAMEWORK_SCHEMA = cv.typed_schema(
|
||||
|
||||
|
||||
FLASH_SIZES = [
|
||||
"2MB",
|
||||
"4MB",
|
||||
"8MB",
|
||||
"16MB",
|
||||
@@ -394,6 +434,7 @@ FLASH_SIZES = [
|
||||
]
|
||||
|
||||
CONF_FLASH_SIZE = "flash_size"
|
||||
CONF_PARTITIONS = "partitions"
|
||||
CONFIG_SCHEMA = cv.All(
|
||||
cv.Schema(
|
||||
{
|
||||
@@ -401,6 +442,7 @@ CONFIG_SCHEMA = cv.All(
|
||||
cv.Optional(CONF_FLASH_SIZE, default="4MB"): cv.one_of(
|
||||
*FLASH_SIZES, upper=True
|
||||
),
|
||||
cv.Optional(CONF_PARTITIONS): cv.file_,
|
||||
cv.Optional(CONF_VARIANT): cv.one_of(*VARIANTS, upper=True),
|
||||
cv.Optional(CONF_FRAMEWORK, default={}): FRAMEWORK_SCHEMA,
|
||||
}
|
||||
@@ -410,6 +452,9 @@ CONFIG_SCHEMA = cv.All(
|
||||
)
|
||||
|
||||
|
||||
FINAL_VALIDATE_SCHEMA = cv.Schema(final_validate)
|
||||
|
||||
|
||||
async def to_code(config):
|
||||
cg.add_platformio_option("board", config[CONF_BOARD])
|
||||
cg.add_platformio_option("board_upload.flash_size", config[CONF_FLASH_SIZE])
|
||||
@@ -462,7 +507,10 @@ async def to_code(config):
|
||||
add_idf_sdkconfig_option("CONFIG_ESP_TASK_WDT_CHECK_IDLE_TASK_CPU0", False)
|
||||
add_idf_sdkconfig_option("CONFIG_ESP_TASK_WDT_CHECK_IDLE_TASK_CPU1", False)
|
||||
|
||||
cg.add_platformio_option("board_build.partitions", "partitions.csv")
|
||||
if CONF_PARTITIONS in config:
|
||||
cg.add_platformio_option("board_build.partitions", config[CONF_PARTITIONS])
|
||||
else:
|
||||
cg.add_platformio_option("board_build.partitions", "partitions.csv")
|
||||
|
||||
for name, value in conf[CONF_SDKCONFIG_OPTIONS].items():
|
||||
add_idf_sdkconfig_option(name, RawSdkconfigValue(value))
|
||||
@@ -507,7 +555,10 @@ async def to_code(config):
|
||||
[f"platformio/framework-arduinoespressif32@{conf[CONF_SOURCE]}"],
|
||||
)
|
||||
|
||||
cg.add_platformio_option("board_build.partitions", "partitions.csv")
|
||||
if CONF_PARTITIONS in config:
|
||||
cg.add_platformio_option("board_build.partitions", config[CONF_PARTITIONS])
|
||||
else:
|
||||
cg.add_platformio_option("board_build.partitions", "partitions.csv")
|
||||
|
||||
cg.add_define(
|
||||
"USE_ARDUINO_VERSION_CODE",
|
||||
@@ -518,6 +569,7 @@ async def to_code(config):
|
||||
|
||||
|
||||
APP_PARTITION_SIZES = {
|
||||
"2MB": 0x0C0000, # 768 KB
|
||||
"4MB": 0x1C0000, # 1792 KB
|
||||
"8MB": 0x3C0000, # 3840 KB
|
||||
"16MB": 0x7C0000, # 7936 KB
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
#include "my9231.h"
|
||||
#include "esphome/core/log.h"
|
||||
#include "esphome/core/helpers.h"
|
||||
|
||||
namespace esphome {
|
||||
namespace my9231 {
|
||||
@@ -51,7 +52,11 @@ void MY9231OutputComponent::setup() {
|
||||
MY9231_CMD_SCATTER_APDM | MY9231_CMD_FREQUENCY_DIVIDE_1 | MY9231_CMD_REACTION_FAST | MY9231_CMD_ONE_SHOT_DISABLE;
|
||||
ESP_LOGV(TAG, " Command: 0x%02X", command);
|
||||
|
||||
this->init_chips_(command);
|
||||
{
|
||||
InterruptLock lock;
|
||||
this->send_dcki_pulses_(32 * this->num_chips_);
|
||||
this->init_chips_(command);
|
||||
}
|
||||
ESP_LOGV(TAG, " Chips initialized.");
|
||||
}
|
||||
void MY9231OutputComponent::dump_config() {
|
||||
@@ -66,11 +71,14 @@ void MY9231OutputComponent::loop() {
|
||||
if (!this->update_)
|
||||
return;
|
||||
|
||||
for (auto pwm_amount : this->pwm_amounts_) {
|
||||
this->write_word_(pwm_amount, this->bit_depth_);
|
||||
{
|
||||
InterruptLock lock;
|
||||
for (auto pwm_amount : this->pwm_amounts_) {
|
||||
this->write_word_(pwm_amount, this->bit_depth_);
|
||||
}
|
||||
// Send 8 DI pulses. After 8 falling edges, the duty data are store.
|
||||
this->send_di_pulses_(8);
|
||||
}
|
||||
// Send 8 DI pulses. After 8 falling edges, the duty data are store.
|
||||
this->send_di_pulses_(8);
|
||||
this->update_ = false;
|
||||
}
|
||||
void MY9231OutputComponent::set_channel_value_(uint8_t channel, uint16_t value) {
|
||||
@@ -92,6 +100,7 @@ void MY9231OutputComponent::init_chips_(uint8_t command) {
|
||||
// Send 16 DI pulse. After 14 falling edges, the command data are
|
||||
// stored and after 16 falling edges the duty mode is activated.
|
||||
this->send_di_pulses_(16);
|
||||
delayMicroseconds(12);
|
||||
}
|
||||
void MY9231OutputComponent::write_word_(uint16_t value, uint8_t bits) {
|
||||
for (uint8_t i = bits; i > 0; i--) {
|
||||
@@ -106,6 +115,13 @@ void MY9231OutputComponent::send_di_pulses_(uint8_t count) {
|
||||
this->pin_di_->digital_write(false);
|
||||
}
|
||||
}
|
||||
void MY9231OutputComponent::send_dcki_pulses_(uint8_t count) {
|
||||
delayMicroseconds(12);
|
||||
for (uint8_t i = 0; i < count; i++) {
|
||||
this->pin_dcki_->digital_write(true);
|
||||
this->pin_dcki_->digital_write(false);
|
||||
}
|
||||
}
|
||||
|
||||
} // namespace my9231
|
||||
} // namespace esphome
|
||||
|
||||
@@ -49,6 +49,7 @@ class MY9231OutputComponent : public Component {
|
||||
void init_chips_(uint8_t command);
|
||||
void write_word_(uint16_t value, uint8_t bits);
|
||||
void send_di_pulses_(uint8_t count);
|
||||
void send_dcki_pulses_(uint8_t count);
|
||||
|
||||
GPIOPin *pin_di_;
|
||||
GPIOPin *pin_dcki_;
|
||||
|
||||
@@ -33,6 +33,7 @@ MODELS = {
|
||||
"SH1106_96X16": SSD1306Model.SH1106_MODEL_96_16,
|
||||
"SH1106_64X48": SSD1306Model.SH1106_MODEL_64_48,
|
||||
"SH1107_128X64": SSD1306Model.SH1107_MODEL_128_64,
|
||||
"SH1107_128X128": SSD1306Model.SH1107_MODEL_128_128,
|
||||
"SSD1305_128X32": SSD1306Model.SSD1305_MODEL_128_32,
|
||||
"SSD1305_128X64": SSD1306Model.SSD1305_MODEL_128_64,
|
||||
}
|
||||
@@ -63,8 +64,10 @@ SSD1306_SCHEMA = display.FULL_DISPLAY_SCHEMA.extend(
|
||||
cv.Optional(CONF_EXTERNAL_VCC): cv.boolean,
|
||||
cv.Optional(CONF_FLIP_X, default=True): cv.boolean,
|
||||
cv.Optional(CONF_FLIP_Y, default=True): cv.boolean,
|
||||
cv.Optional(CONF_OFFSET_X, default=0): cv.int_range(min=-32, max=32),
|
||||
cv.Optional(CONF_OFFSET_Y, default=0): cv.int_range(min=-32, max=32),
|
||||
# Offsets determine shifts of memory location to LCD rows/columns,
|
||||
# and this family of controllers supports up to 128x128 screens
|
||||
cv.Optional(CONF_OFFSET_X, default=0): cv.int_range(min=0, max=128),
|
||||
cv.Optional(CONF_OFFSET_Y, default=0): cv.int_range(min=0, max=128),
|
||||
cv.Optional(CONF_INVERT, default=False): cv.boolean,
|
||||
}
|
||||
).extend(cv.polling_component_schema("1s"))
|
||||
|
||||
@@ -35,16 +35,31 @@ static const uint8_t SSD1306_COMMAND_INVERSE_DISPLAY = 0xA7;
|
||||
static const uint8_t SSD1305_COMMAND_SET_BRIGHTNESS = 0x82;
|
||||
static const uint8_t SSD1305_COMMAND_SET_AREA_COLOR = 0xD8;
|
||||
|
||||
static const uint8_t SH1107_COMMAND_SET_START_LINE = 0xDC;
|
||||
static const uint8_t SH1107_COMMAND_CHARGE_PUMP = 0xAD;
|
||||
|
||||
void SSD1306::setup() {
|
||||
this->init_internal_(this->get_buffer_length_());
|
||||
|
||||
// SH1107 resources
|
||||
//
|
||||
// Datasheet v2.3:
|
||||
// www.displayfuture.com/Display/datasheet/controller/SH1107.pdf
|
||||
// Adafruit C++ driver:
|
||||
// github.com/adafruit/Adafruit_SH110x
|
||||
// Adafruit CircuitPython driver:
|
||||
// github.com/adafruit/Adafruit_CircuitPython_DisplayIO_SH1107
|
||||
|
||||
// Turn off display during initialization (0xAE)
|
||||
this->command(SSD1306_COMMAND_DISPLAY_OFF);
|
||||
|
||||
// Set oscillator frequency to 4'b1000 with no clock division (0xD5)
|
||||
this->command(SSD1306_COMMAND_SET_DISPLAY_CLOCK_DIV);
|
||||
// Oscillator frequency <= 4'b1000, no clock division
|
||||
this->command(0x80);
|
||||
// If SH1107, use POR defaults (0x50) = divider 1, frequency +0%
|
||||
if (!this->is_sh1107_()) {
|
||||
// Set oscillator frequency to 4'b1000 with no clock division (0xD5)
|
||||
this->command(SSD1306_COMMAND_SET_DISPLAY_CLOCK_DIV);
|
||||
// Oscillator frequency <= 4'b1000, no clock division
|
||||
this->command(0x80);
|
||||
}
|
||||
|
||||
// Enable low power display mode for SSD1305 (0xD8)
|
||||
if (this->is_ssd1305_()) {
|
||||
@@ -60,11 +75,26 @@ void SSD1306::setup() {
|
||||
this->command(SSD1306_COMMAND_SET_DISPLAY_OFFSET_Y);
|
||||
this->command(0x00 + this->offset_y_);
|
||||
|
||||
// Set start line at line 0 (0x40)
|
||||
this->command(SSD1306_COMMAND_SET_START_LINE | 0x00);
|
||||
if (this->is_sh1107_()) {
|
||||
// Set start line at line 0 (0xDC)
|
||||
this->command(SH1107_COMMAND_SET_START_LINE);
|
||||
this->command(0x00);
|
||||
} else {
|
||||
// Set start line at line 0 (0x40)
|
||||
this->command(SSD1306_COMMAND_SET_START_LINE | 0x00);
|
||||
}
|
||||
|
||||
// SSD1305 does not have charge pump
|
||||
if (!this->is_ssd1305_()) {
|
||||
if (this->is_ssd1305_()) {
|
||||
// SSD1305 does not have charge pump
|
||||
} else if (this->is_sh1107_()) {
|
||||
// Enable charge pump (0xAD)
|
||||
this->command(SH1107_COMMAND_CHARGE_PUMP);
|
||||
if (this->external_vcc_) {
|
||||
this->command(0x8A);
|
||||
} else {
|
||||
this->command(0x8B);
|
||||
}
|
||||
} else {
|
||||
// Enable charge pump (0x8D)
|
||||
this->command(SSD1306_COMMAND_CHARGE_PUMP);
|
||||
if (this->external_vcc_) {
|
||||
@@ -76,34 +106,41 @@ void SSD1306::setup() {
|
||||
|
||||
// Set addressing mode to horizontal (0x20)
|
||||
this->command(SSD1306_COMMAND_MEMORY_MODE);
|
||||
this->command(0x00);
|
||||
|
||||
if (!this->is_sh1107_()) {
|
||||
// SH1107 memory mode is a 1 byte command
|
||||
this->command(0x00);
|
||||
}
|
||||
// X flip mode (0xA0, 0xA1)
|
||||
this->command(SSD1306_COMMAND_SEGRE_MAP | this->flip_x_);
|
||||
|
||||
// Y flip mode (0xC0, 0xC8)
|
||||
this->command(SSD1306_COMMAND_COM_SCAN_INC | (this->flip_y_ << 3));
|
||||
|
||||
// Set pin configuration (0xDA)
|
||||
this->command(SSD1306_COMMAND_SET_COM_PINS);
|
||||
switch (this->model_) {
|
||||
case SSD1306_MODEL_128_32:
|
||||
case SH1106_MODEL_128_32:
|
||||
case SSD1306_MODEL_96_16:
|
||||
case SH1106_MODEL_96_16:
|
||||
this->command(0x02);
|
||||
break;
|
||||
case SSD1306_MODEL_128_64:
|
||||
case SH1106_MODEL_128_64:
|
||||
case SSD1306_MODEL_64_48:
|
||||
case SSD1306_MODEL_64_32:
|
||||
case SH1106_MODEL_64_48:
|
||||
case SH1107_MODEL_128_64:
|
||||
case SSD1305_MODEL_128_32:
|
||||
case SSD1305_MODEL_128_64:
|
||||
case SSD1306_MODEL_72_40:
|
||||
this->command(0x12);
|
||||
break;
|
||||
if (!this->is_sh1107_()) {
|
||||
// Set pin configuration (0xDA)
|
||||
this->command(SSD1306_COMMAND_SET_COM_PINS);
|
||||
switch (this->model_) {
|
||||
case SSD1306_MODEL_128_32:
|
||||
case SH1106_MODEL_128_32:
|
||||
case SSD1306_MODEL_96_16:
|
||||
case SH1106_MODEL_96_16:
|
||||
this->command(0x02);
|
||||
break;
|
||||
case SSD1306_MODEL_128_64:
|
||||
case SH1106_MODEL_128_64:
|
||||
case SSD1306_MODEL_64_48:
|
||||
case SSD1306_MODEL_64_32:
|
||||
case SH1106_MODEL_64_48:
|
||||
case SSD1305_MODEL_128_32:
|
||||
case SSD1305_MODEL_128_64:
|
||||
case SSD1306_MODEL_72_40:
|
||||
this->command(0x12);
|
||||
break;
|
||||
case SH1107_MODEL_128_64:
|
||||
case SH1107_MODEL_128_128:
|
||||
// Not used, but prevents build warning
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
// Pre-charge period (0xD9)
|
||||
@@ -118,6 +155,7 @@ void SSD1306::setup() {
|
||||
this->command(SSD1306_COMMAND_SET_VCOM_DETECT);
|
||||
switch (this->model_) {
|
||||
case SH1107_MODEL_128_64:
|
||||
case SH1107_MODEL_128_128:
|
||||
this->command(0x35);
|
||||
break;
|
||||
case SSD1306_MODEL_72_40:
|
||||
@@ -149,7 +187,7 @@ void SSD1306::setup() {
|
||||
this->turn_on();
|
||||
}
|
||||
void SSD1306::display() {
|
||||
if (this->is_sh1106_()) {
|
||||
if (this->is_sh1106_() || this->is_sh1107_()) {
|
||||
this->write_display_data();
|
||||
return;
|
||||
}
|
||||
@@ -183,6 +221,7 @@ bool SSD1306::is_sh1106_() const {
|
||||
return this->model_ == SH1106_MODEL_96_16 || this->model_ == SH1106_MODEL_128_32 ||
|
||||
this->model_ == SH1106_MODEL_128_64;
|
||||
}
|
||||
bool SSD1306::is_sh1107_() const { return this->model_ == SH1107_MODEL_128_64 || this->model_ == SH1107_MODEL_128_128; }
|
||||
bool SSD1306::is_ssd1305_() const {
|
||||
return this->model_ == SSD1305_MODEL_128_64 || this->model_ == SSD1305_MODEL_128_64;
|
||||
}
|
||||
@@ -224,6 +263,7 @@ void SSD1306::turn_off() {
|
||||
int SSD1306::get_height_internal() {
|
||||
switch (this->model_) {
|
||||
case SH1107_MODEL_128_64:
|
||||
case SH1107_MODEL_128_128:
|
||||
return 128;
|
||||
case SSD1306_MODEL_128_32:
|
||||
case SSD1306_MODEL_64_32:
|
||||
@@ -254,6 +294,7 @@ int SSD1306::get_width_internal() {
|
||||
case SH1106_MODEL_128_64:
|
||||
case SSD1305_MODEL_128_32:
|
||||
case SSD1305_MODEL_128_64:
|
||||
case SH1107_MODEL_128_128:
|
||||
return 128;
|
||||
case SSD1306_MODEL_96_16:
|
||||
case SH1106_MODEL_96_16:
|
||||
|
||||
@@ -19,6 +19,7 @@ enum SSD1306Model {
|
||||
SH1106_MODEL_96_16,
|
||||
SH1106_MODEL_64_48,
|
||||
SH1107_MODEL_128_64,
|
||||
SH1107_MODEL_128_128,
|
||||
SSD1305_MODEL_128_32,
|
||||
SSD1305_MODEL_128_64,
|
||||
};
|
||||
@@ -58,6 +59,7 @@ class SSD1306 : public PollingComponent, public display::DisplayBuffer {
|
||||
void init_reset_();
|
||||
|
||||
bool is_sh1106_() const;
|
||||
bool is_sh1107_() const;
|
||||
bool is_ssd1305_() const;
|
||||
|
||||
void draw_absolute_pixel_internal(int x, int y, Color color) override;
|
||||
|
||||
@@ -38,13 +38,19 @@ void I2CSSD1306::dump_config() {
|
||||
}
|
||||
void I2CSSD1306::command(uint8_t value) { this->write_byte(0x00, value); }
|
||||
void HOT I2CSSD1306::write_display_data() {
|
||||
if (this->is_sh1106_()) {
|
||||
if (this->is_sh1106_() || this->is_sh1107_()) {
|
||||
uint32_t i = 0;
|
||||
for (uint8_t page = 0; page < (uint8_t) this->get_height_internal() / 8; page++) {
|
||||
this->command(0xB0 + page); // row
|
||||
this->command(0x02); // lower column
|
||||
this->command(0x10); // higher column
|
||||
|
||||
if (this->is_sh1106_()) {
|
||||
this->command(0x02); // lower column - 0x02 is historical SH1106 value
|
||||
} else {
|
||||
// Other SH1107 drivers use 0x00
|
||||
// Column values dont change and it seems they can be set only once,
|
||||
// but we follow SH1106 implementation and resend them
|
||||
this->command(0x00);
|
||||
}
|
||||
this->command(0x10); // higher column
|
||||
for (uint8_t x = 0; x < (uint8_t) this->get_width_internal() / 16; x++) {
|
||||
uint8_t data[16];
|
||||
for (uint8_t &j : data)
|
||||
|
||||
@@ -36,10 +36,14 @@ void SPISSD1306::command(uint8_t value) {
|
||||
this->disable();
|
||||
}
|
||||
void HOT SPISSD1306::write_display_data() {
|
||||
if (this->is_sh1106_()) {
|
||||
if (this->is_sh1106_() || this->is_sh1107_()) {
|
||||
for (uint8_t y = 0; y < (uint8_t) this->get_height_internal() / 8; y++) {
|
||||
this->command(0xB0 + y);
|
||||
this->command(0x02);
|
||||
if (this->is_sh1106_()) {
|
||||
this->command(0x02);
|
||||
} else {
|
||||
this->command(0x00);
|
||||
}
|
||||
this->command(0x10);
|
||||
this->dc_pin_->digital_write(true);
|
||||
for (uint8_t x = 0; x < (uint8_t) this->get_width_internal(); x++) {
|
||||
|
||||
@@ -18,20 +18,25 @@ DEPENDENCIES = ["api", "microphone"]
|
||||
|
||||
CODEOWNERS = ["@jesserockz"]
|
||||
|
||||
CONF_SILENCE_DETECTION = "silence_detection"
|
||||
CONF_ON_LISTENING = "on_listening"
|
||||
CONF_ON_START = "on_start"
|
||||
CONF_ON_WAKE_WORD_DETECTED = "on_wake_word_detected"
|
||||
CONF_ON_STT_END = "on_stt_end"
|
||||
CONF_ON_TTS_START = "on_tts_start"
|
||||
CONF_ON_TTS_END = "on_tts_end"
|
||||
CONF_ON_END = "on_end"
|
||||
CONF_ON_ERROR = "on_error"
|
||||
CONF_ON_INTENT_END = "on_intent_end"
|
||||
CONF_ON_INTENT_START = "on_intent_start"
|
||||
CONF_ON_LISTENING = "on_listening"
|
||||
CONF_ON_START = "on_start"
|
||||
CONF_ON_STT_END = "on_stt_end"
|
||||
CONF_ON_STT_VAD_END = "on_stt_vad_end"
|
||||
CONF_ON_STT_VAD_START = "on_stt_vad_start"
|
||||
CONF_ON_TTS_END = "on_tts_end"
|
||||
CONF_ON_TTS_START = "on_tts_start"
|
||||
CONF_ON_WAKE_WORD_DETECTED = "on_wake_word_detected"
|
||||
|
||||
CONF_SILENCE_DETECTION = "silence_detection"
|
||||
CONF_USE_WAKE_WORD = "use_wake_word"
|
||||
CONF_VAD_THRESHOLD = "vad_threshold"
|
||||
|
||||
CONF_NOISE_SUPPRESSION_LEVEL = "noise_suppression_level"
|
||||
CONF_AUTO_GAIN = "auto_gain"
|
||||
CONF_NOISE_SUPPRESSION_LEVEL = "noise_suppression_level"
|
||||
CONF_VOLUME_MULTIPLIER = "volume_multiplier"
|
||||
|
||||
|
||||
@@ -88,6 +93,18 @@ CONFIG_SCHEMA = cv.All(
|
||||
cv.Optional(CONF_ON_CLIENT_DISCONNECTED): automation.validate_automation(
|
||||
single=True
|
||||
),
|
||||
cv.Optional(CONF_ON_INTENT_START): automation.validate_automation(
|
||||
single=True
|
||||
),
|
||||
cv.Optional(CONF_ON_INTENT_END): automation.validate_automation(
|
||||
single=True
|
||||
),
|
||||
cv.Optional(CONF_ON_STT_VAD_START): automation.validate_automation(
|
||||
single=True
|
||||
),
|
||||
cv.Optional(CONF_ON_STT_VAD_END): automation.validate_automation(
|
||||
single=True
|
||||
),
|
||||
}
|
||||
).extend(cv.COMPONENT_SCHEMA),
|
||||
)
|
||||
@@ -177,6 +194,34 @@ async def to_code(config):
|
||||
config[CONF_ON_CLIENT_DISCONNECTED],
|
||||
)
|
||||
|
||||
if CONF_ON_INTENT_START in config:
|
||||
await automation.build_automation(
|
||||
var.get_intent_start_trigger(),
|
||||
[],
|
||||
config[CONF_ON_INTENT_START],
|
||||
)
|
||||
|
||||
if CONF_ON_INTENT_END in config:
|
||||
await automation.build_automation(
|
||||
var.get_intent_end_trigger(),
|
||||
[],
|
||||
config[CONF_ON_INTENT_END],
|
||||
)
|
||||
|
||||
if CONF_ON_STT_VAD_START in config:
|
||||
await automation.build_automation(
|
||||
var.get_stt_vad_start_trigger(),
|
||||
[],
|
||||
config[CONF_ON_STT_VAD_START],
|
||||
)
|
||||
|
||||
if CONF_ON_STT_VAD_END in config:
|
||||
await automation.build_automation(
|
||||
var.get_stt_vad_end_trigger(),
|
||||
[],
|
||||
config[CONF_ON_STT_VAD_END],
|
||||
)
|
||||
|
||||
cg.add_define("USE_VOICE_ASSISTANT")
|
||||
|
||||
|
||||
|
||||
@@ -31,7 +31,7 @@ void VoiceAssistant::setup() {
|
||||
|
||||
this->socket_ = socket::socket(AF_INET, SOCK_DGRAM, IPPROTO_IP);
|
||||
if (socket_ == nullptr) {
|
||||
ESP_LOGW(TAG, "Could not create socket.");
|
||||
ESP_LOGW(TAG, "Could not create socket");
|
||||
this->mark_failed();
|
||||
return;
|
||||
}
|
||||
@@ -69,7 +69,7 @@ void VoiceAssistant::setup() {
|
||||
ExternalRAMAllocator<uint8_t> speaker_allocator(ExternalRAMAllocator<uint8_t>::ALLOW_FAILURE);
|
||||
this->speaker_buffer_ = speaker_allocator.allocate(SPEAKER_BUFFER_SIZE);
|
||||
if (this->speaker_buffer_ == nullptr) {
|
||||
ESP_LOGW(TAG, "Could not allocate speaker buffer.");
|
||||
ESP_LOGW(TAG, "Could not allocate speaker buffer");
|
||||
this->mark_failed();
|
||||
return;
|
||||
}
|
||||
@@ -79,7 +79,7 @@ void VoiceAssistant::setup() {
|
||||
ExternalRAMAllocator<int16_t> allocator(ExternalRAMAllocator<int16_t>::ALLOW_FAILURE);
|
||||
this->input_buffer_ = allocator.allocate(INPUT_BUFFER_SIZE);
|
||||
if (this->input_buffer_ == nullptr) {
|
||||
ESP_LOGW(TAG, "Could not allocate input buffer.");
|
||||
ESP_LOGW(TAG, "Could not allocate input buffer");
|
||||
this->mark_failed();
|
||||
return;
|
||||
}
|
||||
@@ -89,7 +89,7 @@ void VoiceAssistant::setup() {
|
||||
|
||||
this->ring_buffer_ = rb_create(BUFFER_SIZE, sizeof(int16_t));
|
||||
if (this->ring_buffer_ == nullptr) {
|
||||
ESP_LOGW(TAG, "Could not allocate ring buffer.");
|
||||
ESP_LOGW(TAG, "Could not allocate ring buffer");
|
||||
this->mark_failed();
|
||||
return;
|
||||
}
|
||||
@@ -98,7 +98,7 @@ void VoiceAssistant::setup() {
|
||||
ExternalRAMAllocator<uint8_t> send_allocator(ExternalRAMAllocator<uint8_t>::ALLOW_FAILURE);
|
||||
this->send_buffer_ = send_allocator.allocate(SEND_BUFFER_SIZE);
|
||||
if (send_buffer_ == nullptr) {
|
||||
ESP_LOGW(TAG, "Could not allocate send buffer.");
|
||||
ESP_LOGW(TAG, "Could not allocate send buffer");
|
||||
this->mark_failed();
|
||||
return;
|
||||
}
|
||||
@@ -221,8 +221,8 @@ void VoiceAssistant::loop() {
|
||||
msg.audio_settings = audio_settings;
|
||||
|
||||
if (this->api_client_ == nullptr || !this->api_client_->send_voice_assistant_request(msg)) {
|
||||
ESP_LOGW(TAG, "Could not request start.");
|
||||
this->error_trigger_->trigger("not-connected", "Could not request start.");
|
||||
ESP_LOGW(TAG, "Could not request start");
|
||||
this->error_trigger_->trigger("not-connected", "Could not request start");
|
||||
this->continuous_ = false;
|
||||
this->set_state_(State::IDLE, State::IDLE);
|
||||
break;
|
||||
@@ -280,7 +280,7 @@ void VoiceAssistant::loop() {
|
||||
this->speaker_buffer_size_ += len;
|
||||
}
|
||||
} else {
|
||||
ESP_LOGW(TAG, "Receive buffer full.");
|
||||
ESP_LOGW(TAG, "Receive buffer full");
|
||||
}
|
||||
if (this->speaker_buffer_size_ > 0) {
|
||||
size_t written = this->speaker_->play(this->speaker_buffer_, this->speaker_buffer_size_);
|
||||
@@ -290,7 +290,7 @@ void VoiceAssistant::loop() {
|
||||
this->speaker_buffer_index_ -= written;
|
||||
this->set_timeout("speaker-timeout", 2000, [this]() { this->speaker_->stop(); });
|
||||
} else {
|
||||
ESP_LOGW(TAG, "Speaker buffer full.");
|
||||
ESP_LOGW(TAG, "Speaker buffer full");
|
||||
}
|
||||
}
|
||||
if (this->wait_for_stream_end_) {
|
||||
@@ -513,7 +513,7 @@ void VoiceAssistant::on_event(const api::VoiceAssistantEventResponse &msg) {
|
||||
break;
|
||||
}
|
||||
case api::enums::VOICE_ASSISTANT_STT_START:
|
||||
ESP_LOGD(TAG, "STT Started");
|
||||
ESP_LOGD(TAG, "STT started");
|
||||
this->listening_trigger_->trigger();
|
||||
break;
|
||||
case api::enums::VOICE_ASSISTANT_STT_END: {
|
||||
@@ -525,19 +525,24 @@ void VoiceAssistant::on_event(const api::VoiceAssistantEventResponse &msg) {
|
||||
}
|
||||
}
|
||||
if (text.empty()) {
|
||||
ESP_LOGW(TAG, "No text in STT_END event.");
|
||||
ESP_LOGW(TAG, "No text in STT_END event");
|
||||
return;
|
||||
}
|
||||
ESP_LOGD(TAG, "Speech recognised as: \"%s\"", text.c_str());
|
||||
this->stt_end_trigger_->trigger(text);
|
||||
break;
|
||||
}
|
||||
case api::enums::VOICE_ASSISTANT_INTENT_START:
|
||||
ESP_LOGD(TAG, "Intent started");
|
||||
this->intent_start_trigger_->trigger();
|
||||
break;
|
||||
case api::enums::VOICE_ASSISTANT_INTENT_END: {
|
||||
for (auto arg : msg.data) {
|
||||
if (arg.name == "conversation_id") {
|
||||
this->conversation_id_ = std::move(arg.value);
|
||||
}
|
||||
}
|
||||
this->intent_end_trigger_->trigger();
|
||||
break;
|
||||
}
|
||||
case api::enums::VOICE_ASSISTANT_TTS_START: {
|
||||
@@ -548,7 +553,7 @@ void VoiceAssistant::on_event(const api::VoiceAssistantEventResponse &msg) {
|
||||
}
|
||||
}
|
||||
if (text.empty()) {
|
||||
ESP_LOGW(TAG, "No text in TTS_START event.");
|
||||
ESP_LOGW(TAG, "No text in TTS_START event");
|
||||
return;
|
||||
}
|
||||
ESP_LOGD(TAG, "Response: \"%s\"", text.c_str());
|
||||
@@ -566,7 +571,7 @@ void VoiceAssistant::on_event(const api::VoiceAssistantEventResponse &msg) {
|
||||
}
|
||||
}
|
||||
if (url.empty()) {
|
||||
ESP_LOGW(TAG, "No url in TTS_END event.");
|
||||
ESP_LOGW(TAG, "No url in TTS_END event");
|
||||
return;
|
||||
}
|
||||
ESP_LOGD(TAG, "Response URL: \"%s\"", url.c_str());
|
||||
@@ -634,6 +639,14 @@ void VoiceAssistant::on_event(const api::VoiceAssistantEventResponse &msg) {
|
||||
this->set_state_(State::RESPONSE_FINISHED, State::IDLE);
|
||||
break;
|
||||
}
|
||||
case api::enums::VOICE_ASSISTANT_STT_VAD_START:
|
||||
ESP_LOGD(TAG, "Starting STT by VAD");
|
||||
this->stt_vad_start_trigger_->trigger();
|
||||
break;
|
||||
case api::enums::VOICE_ASSISTANT_STT_VAD_END:
|
||||
ESP_LOGD(TAG, "STT by VAD end");
|
||||
this->stt_vad_end_trigger_->trigger();
|
||||
break;
|
||||
default:
|
||||
ESP_LOGD(TAG, "Unhandled event type: %d", msg.event_type);
|
||||
break;
|
||||
|
||||
@@ -100,13 +100,17 @@ class VoiceAssistant : public Component {
|
||||
void set_auto_gain(uint8_t auto_gain) { this->auto_gain_ = auto_gain; }
|
||||
void set_volume_multiplier(float volume_multiplier) { this->volume_multiplier_ = volume_multiplier; }
|
||||
|
||||
Trigger<> *get_intent_end_trigger() const { return this->intent_end_trigger_; }
|
||||
Trigger<> *get_intent_start_trigger() const { return this->intent_start_trigger_; }
|
||||
Trigger<> *get_listening_trigger() const { return this->listening_trigger_; }
|
||||
Trigger<> *get_end_trigger() const { return this->end_trigger_; }
|
||||
Trigger<> *get_start_trigger() const { return this->start_trigger_; }
|
||||
Trigger<> *get_stt_vad_end_trigger() const { return this->stt_vad_end_trigger_; }
|
||||
Trigger<> *get_stt_vad_start_trigger() const { return this->stt_vad_start_trigger_; }
|
||||
Trigger<> *get_wake_word_detected_trigger() const { return this->wake_word_detected_trigger_; }
|
||||
Trigger<std::string> *get_stt_end_trigger() const { return this->stt_end_trigger_; }
|
||||
Trigger<std::string> *get_tts_start_trigger() const { return this->tts_start_trigger_; }
|
||||
Trigger<std::string> *get_tts_end_trigger() const { return this->tts_end_trigger_; }
|
||||
Trigger<> *get_end_trigger() const { return this->end_trigger_; }
|
||||
Trigger<std::string> *get_tts_start_trigger() const { return this->tts_start_trigger_; }
|
||||
Trigger<std::string, std::string> *get_error_trigger() const { return this->error_trigger_; }
|
||||
|
||||
Trigger<> *get_client_connected_trigger() const { return this->client_connected_trigger_; }
|
||||
@@ -124,13 +128,17 @@ class VoiceAssistant : public Component {
|
||||
std::unique_ptr<socket::Socket> socket_ = nullptr;
|
||||
struct sockaddr_storage dest_addr_;
|
||||
|
||||
Trigger<> *intent_end_trigger_ = new Trigger<>();
|
||||
Trigger<> *intent_start_trigger_ = new Trigger<>();
|
||||
Trigger<> *listening_trigger_ = new Trigger<>();
|
||||
Trigger<> *end_trigger_ = new Trigger<>();
|
||||
Trigger<> *start_trigger_ = new Trigger<>();
|
||||
Trigger<> *stt_vad_start_trigger_ = new Trigger<>();
|
||||
Trigger<> *stt_vad_end_trigger_ = new Trigger<>();
|
||||
Trigger<> *wake_word_detected_trigger_ = new Trigger<>();
|
||||
Trigger<std::string> *stt_end_trigger_ = new Trigger<std::string>();
|
||||
Trigger<std::string> *tts_start_trigger_ = new Trigger<std::string>();
|
||||
Trigger<std::string> *tts_end_trigger_ = new Trigger<std::string>();
|
||||
Trigger<> *end_trigger_ = new Trigger<>();
|
||||
Trigger<std::string> *tts_start_trigger_ = new Trigger<std::string>();
|
||||
Trigger<std::string, std::string> *error_trigger_ = new Trigger<std::string, std::string>();
|
||||
|
||||
Trigger<> *client_connected_trigger_ = new Trigger<>();
|
||||
|
||||
8
esphome/dashboard/const.py
Normal file
8
esphome/dashboard/const.py
Normal file
@@ -0,0 +1,8 @@
|
||||
from __future__ import annotations
|
||||
|
||||
EVENT_ENTRY_ADDED = "entry_added"
|
||||
EVENT_ENTRY_REMOVED = "entry_removed"
|
||||
EVENT_ENTRY_UPDATED = "entry_updated"
|
||||
EVENT_ENTRY_STATE_CHANGED = "entry_state_changed"
|
||||
|
||||
SENTINEL = object()
|
||||
@@ -3,29 +3,66 @@ from __future__ import annotations
|
||||
import asyncio
|
||||
import logging
|
||||
import threading
|
||||
from typing import TYPE_CHECKING
|
||||
from dataclasses import dataclass
|
||||
from functools import partial
|
||||
from typing import TYPE_CHECKING, Any, Callable
|
||||
|
||||
from ..zeroconf import DiscoveredImport
|
||||
from .entries import DashboardEntry
|
||||
from .entries import DashboardEntries
|
||||
from .settings import DashboardSettings
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from .status.mdns import MDNSStatus
|
||||
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def list_dashboard_entries() -> list[DashboardEntry]:
|
||||
"""List all dashboard entries."""
|
||||
return DASHBOARD.settings.entries()
|
||||
@dataclass
|
||||
class Event:
|
||||
"""Dashboard Event."""
|
||||
|
||||
event_type: str
|
||||
data: dict[str, Any]
|
||||
|
||||
|
||||
class EventBus:
|
||||
"""Dashboard event bus."""
|
||||
|
||||
def __init__(self) -> None:
|
||||
"""Initialize the Dashboard event bus."""
|
||||
self._listeners: dict[str, set[Callable[[Event], None]]] = {}
|
||||
|
||||
def async_add_listener(
|
||||
self, event_type: str, listener: Callable[[Event], None]
|
||||
) -> Callable[[], None]:
|
||||
"""Add a listener to the event bus."""
|
||||
self._listeners.setdefault(event_type, set()).add(listener)
|
||||
return partial(self._async_remove_listener, event_type, listener)
|
||||
|
||||
def _async_remove_listener(
|
||||
self, event_type: str, listener: Callable[[Event], None]
|
||||
) -> None:
|
||||
"""Remove a listener from the event bus."""
|
||||
self._listeners[event_type].discard(listener)
|
||||
|
||||
def async_fire(self, event_type: str, event_data: dict[str, Any]) -> None:
|
||||
"""Fire an event."""
|
||||
event = Event(event_type, event_data)
|
||||
|
||||
_LOGGER.debug("Firing event: %s", event)
|
||||
|
||||
for listener in self._listeners.get(event_type, set()):
|
||||
listener(event)
|
||||
|
||||
|
||||
class ESPHomeDashboard:
|
||||
"""Class that represents the dashboard."""
|
||||
|
||||
__slots__ = (
|
||||
"bus",
|
||||
"entries",
|
||||
"loop",
|
||||
"ping_result",
|
||||
"import_result",
|
||||
"stop_event",
|
||||
"ping_request",
|
||||
@@ -36,8 +73,9 @@ class ESPHomeDashboard:
|
||||
|
||||
def __init__(self) -> None:
|
||||
"""Initialize the ESPHomeDashboard."""
|
||||
self.bus = EventBus()
|
||||
self.entries: DashboardEntries | None = None
|
||||
self.loop: asyncio.AbstractEventLoop | None = None
|
||||
self.ping_result: dict[str, bool | None] = {}
|
||||
self.import_result: dict[str, DiscoveredImport] = {}
|
||||
self.stop_event = threading.Event()
|
||||
self.ping_request: asyncio.Event | None = None
|
||||
@@ -49,12 +87,14 @@ class ESPHomeDashboard:
|
||||
"""Setup the dashboard."""
|
||||
self.loop = asyncio.get_running_loop()
|
||||
self.ping_request = asyncio.Event()
|
||||
self.entries = DashboardEntries(self)
|
||||
|
||||
async def async_run(self) -> None:
|
||||
"""Run the dashboard."""
|
||||
settings = self.settings
|
||||
mdns_task: asyncio.Task | None = None
|
||||
ping_status_task: asyncio.Task | None = None
|
||||
await self.entries.async_update_entries()
|
||||
|
||||
if settings.status_use_ping:
|
||||
from .status.ping import PingStatus
|
||||
|
||||
@@ -1,10 +1,224 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
import logging
|
||||
import os
|
||||
from typing import TYPE_CHECKING, Any
|
||||
|
||||
from esphome import const
|
||||
from esphome import const, util
|
||||
from esphome.storage_json import StorageJSON, ext_storage_path
|
||||
|
||||
from .const import (
|
||||
EVENT_ENTRY_ADDED,
|
||||
EVENT_ENTRY_REMOVED,
|
||||
EVENT_ENTRY_STATE_CHANGED,
|
||||
EVENT_ENTRY_UPDATED,
|
||||
)
|
||||
from .enum import StrEnum
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from .core import ESPHomeDashboard
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
DashboardCacheKeyType = tuple[int, int, float, int]
|
||||
|
||||
# Currently EntryState is a simple
|
||||
# online/offline/unknown enum, but in the future
|
||||
# it may be expanded to include more states
|
||||
|
||||
|
||||
class EntryState(StrEnum):
|
||||
ONLINE = "online"
|
||||
OFFLINE = "offline"
|
||||
UNKNOWN = "unknown"
|
||||
|
||||
|
||||
_BOOL_TO_ENTRY_STATE = {
|
||||
True: EntryState.ONLINE,
|
||||
False: EntryState.OFFLINE,
|
||||
None: EntryState.UNKNOWN,
|
||||
}
|
||||
_ENTRY_STATE_TO_BOOL = {
|
||||
EntryState.ONLINE: True,
|
||||
EntryState.OFFLINE: False,
|
||||
EntryState.UNKNOWN: None,
|
||||
}
|
||||
|
||||
|
||||
def bool_to_entry_state(value: bool) -> EntryState:
|
||||
"""Convert a bool to an entry state."""
|
||||
return _BOOL_TO_ENTRY_STATE[value]
|
||||
|
||||
|
||||
def entry_state_to_bool(value: EntryState) -> bool | None:
|
||||
"""Convert an entry state to a bool."""
|
||||
return _ENTRY_STATE_TO_BOOL[value]
|
||||
|
||||
|
||||
class DashboardEntries:
|
||||
"""Represents all dashboard entries."""
|
||||
|
||||
__slots__ = (
|
||||
"_dashboard",
|
||||
"_loop",
|
||||
"_config_dir",
|
||||
"_entries",
|
||||
"_entry_states",
|
||||
"_loaded_entries",
|
||||
"_update_lock",
|
||||
)
|
||||
|
||||
def __init__(self, dashboard: ESPHomeDashboard) -> None:
|
||||
"""Initialize the DashboardEntries."""
|
||||
self._dashboard = dashboard
|
||||
self._loop = asyncio.get_running_loop()
|
||||
self._config_dir = dashboard.settings.config_dir
|
||||
# Entries are stored as
|
||||
# {
|
||||
# "path/to/file.yaml": DashboardEntry,
|
||||
# ...
|
||||
# }
|
||||
self._entries: dict[str, DashboardEntry] = {}
|
||||
self._loaded_entries = False
|
||||
self._update_lock = asyncio.Lock()
|
||||
|
||||
def get(self, path: str) -> DashboardEntry | None:
|
||||
"""Get an entry by path."""
|
||||
return self._entries.get(path)
|
||||
|
||||
async def _async_all(self) -> list[DashboardEntry]:
|
||||
"""Return all entries."""
|
||||
return list(self._entries.values())
|
||||
|
||||
def all(self) -> list[DashboardEntry]:
|
||||
"""Return all entries."""
|
||||
return asyncio.run_coroutine_threadsafe(self._async_all, self._loop).result()
|
||||
|
||||
def async_all(self) -> list[DashboardEntry]:
|
||||
"""Return all entries."""
|
||||
return list(self._entries.values())
|
||||
|
||||
def set_state(self, entry: DashboardEntry, state: EntryState) -> None:
|
||||
"""Set the state for an entry."""
|
||||
asyncio.run_coroutine_threadsafe(
|
||||
self._async_set_state(entry, state), self._loop
|
||||
).result()
|
||||
|
||||
async def _async_set_state(self, entry: DashboardEntry, state: EntryState) -> None:
|
||||
"""Set the state for an entry."""
|
||||
self.async_set_state(entry, state)
|
||||
|
||||
def async_set_state(self, entry: DashboardEntry, state: EntryState) -> None:
|
||||
"""Set the state for an entry."""
|
||||
if entry.state == state:
|
||||
return
|
||||
entry.state = state
|
||||
self._dashboard.bus.async_fire(
|
||||
EVENT_ENTRY_STATE_CHANGED, {"entry": entry, "state": state}
|
||||
)
|
||||
|
||||
async def async_request_update_entries(self) -> None:
|
||||
"""Request an update of the dashboard entries from disk.
|
||||
|
||||
If an update is already in progress, this will do nothing.
|
||||
"""
|
||||
if self._update_lock.locked():
|
||||
_LOGGER.debug("Dashboard entries are already being updated")
|
||||
return
|
||||
await self.async_update_entries()
|
||||
|
||||
async def async_update_entries(self) -> None:
|
||||
"""Update the dashboard entries from disk."""
|
||||
async with self._update_lock:
|
||||
await self._async_update_entries()
|
||||
|
||||
def _load_entries(
|
||||
self, entries: dict[DashboardEntry, DashboardCacheKeyType]
|
||||
) -> None:
|
||||
"""Load all entries from disk."""
|
||||
for entry, cache_key in entries.items():
|
||||
_LOGGER.debug(
|
||||
"Loading dashboard entry %s because cache key changed: %s",
|
||||
entry.path,
|
||||
cache_key,
|
||||
)
|
||||
entry.load_from_disk(cache_key)
|
||||
|
||||
async def _async_update_entries(self) -> list[DashboardEntry]:
|
||||
"""Sync the dashboard entries from disk."""
|
||||
_LOGGER.debug("Updating dashboard entries")
|
||||
# At some point it would be nice to use watchdog to avoid polling
|
||||
|
||||
path_to_cache_key = await self._loop.run_in_executor(
|
||||
None, self._get_path_to_cache_key
|
||||
)
|
||||
entries = self._entries
|
||||
added: dict[DashboardEntry, DashboardCacheKeyType] = {}
|
||||
updated: dict[DashboardEntry, DashboardCacheKeyType] = {}
|
||||
removed: set[DashboardEntry] = {
|
||||
entry
|
||||
for filename, entry in entries.items()
|
||||
if filename not in path_to_cache_key
|
||||
}
|
||||
|
||||
for path, cache_key in path_to_cache_key.items():
|
||||
if entry := entries.get(path):
|
||||
if entry.cache_key != cache_key:
|
||||
updated[entry] = cache_key
|
||||
else:
|
||||
entry = DashboardEntry(path, cache_key)
|
||||
added[entry] = cache_key
|
||||
|
||||
if added or updated:
|
||||
await self._loop.run_in_executor(
|
||||
None, self._load_entries, {**added, **updated}
|
||||
)
|
||||
|
||||
bus = self._dashboard.bus
|
||||
for entry in added:
|
||||
entries[entry.path] = entry
|
||||
bus.async_fire(EVENT_ENTRY_ADDED, {"entry": entry})
|
||||
|
||||
for entry in removed:
|
||||
del entries[entry.path]
|
||||
bus.async_fire(EVENT_ENTRY_REMOVED, {"entry": entry})
|
||||
|
||||
for entry in updated:
|
||||
bus.async_fire(EVENT_ENTRY_UPDATED, {"entry": entry})
|
||||
|
||||
def _get_path_to_cache_key(self) -> dict[str, DashboardCacheKeyType]:
|
||||
"""Return a dict of path to cache key."""
|
||||
path_to_cache_key: dict[str, DashboardCacheKeyType] = {}
|
||||
#
|
||||
# The cache key is (inode, device, mtime, size)
|
||||
# which allows us to avoid locking since it ensures
|
||||
# every iteration of this call will always return the newest
|
||||
# items from disk at the cost of a stat() call on each
|
||||
# file which is much faster than reading the file
|
||||
# for the cache hit case which is the common case.
|
||||
#
|
||||
for file in util.list_yaml_files([self._config_dir]):
|
||||
try:
|
||||
# Prefer the json storage path if it exists
|
||||
stat = os.stat(ext_storage_path(os.path.basename(file)))
|
||||
except OSError:
|
||||
try:
|
||||
# Fallback to the yaml file if the storage
|
||||
# file does not exist or could not be generated
|
||||
stat = os.stat(file)
|
||||
except OSError:
|
||||
# File was deleted, ignore
|
||||
continue
|
||||
path_to_cache_key[file] = (
|
||||
stat.st_ino,
|
||||
stat.st_dev,
|
||||
stat.st_mtime,
|
||||
stat.st_size,
|
||||
)
|
||||
return path_to_cache_key
|
||||
|
||||
|
||||
class DashboardEntry:
|
||||
"""Represents a single dashboard entry.
|
||||
@@ -12,105 +226,146 @@ class DashboardEntry:
|
||||
This class is thread-safe and read-only.
|
||||
"""
|
||||
|
||||
__slots__ = ("path", "_storage", "_loaded_storage")
|
||||
__slots__ = (
|
||||
"path",
|
||||
"filename",
|
||||
"_storage_path",
|
||||
"cache_key",
|
||||
"storage",
|
||||
"state",
|
||||
"_to_dict",
|
||||
)
|
||||
|
||||
def __init__(self, path: str) -> None:
|
||||
def __init__(self, path: str, cache_key: DashboardCacheKeyType) -> None:
|
||||
"""Initialize the DashboardEntry."""
|
||||
self.path = path
|
||||
self._storage = None
|
||||
self._loaded_storage = False
|
||||
self.filename: str = os.path.basename(path)
|
||||
self._storage_path = ext_storage_path(self.filename)
|
||||
self.cache_key = cache_key
|
||||
self.storage: StorageJSON | None = None
|
||||
self.state = EntryState.UNKNOWN
|
||||
self._to_dict: dict[str, Any] | None = None
|
||||
|
||||
def __repr__(self):
|
||||
"""Return the representation of this entry."""
|
||||
return (
|
||||
f"DashboardEntry({self.path} "
|
||||
f"DashboardEntry(path={self.path} "
|
||||
f"address={self.address} "
|
||||
f"web_port={self.web_port} "
|
||||
f"name={self.name} "
|
||||
f"no_mdns={self.no_mdns})"
|
||||
f"no_mdns={self.no_mdns} "
|
||||
f"state={self.state} "
|
||||
")"
|
||||
)
|
||||
|
||||
@property
|
||||
def filename(self):
|
||||
"""Return the filename of this entry."""
|
||||
return os.path.basename(self.path)
|
||||
def to_dict(self) -> dict[str, Any]:
|
||||
"""Return a dict representation of this entry.
|
||||
|
||||
The dict includes the loaded configuration but not
|
||||
the current state of the entry.
|
||||
"""
|
||||
if self._to_dict is None:
|
||||
self._to_dict = {
|
||||
"name": self.name,
|
||||
"friendly_name": self.friendly_name,
|
||||
"configuration": self.filename,
|
||||
"loaded_integrations": self.loaded_integrations,
|
||||
"deployed_version": self.update_old,
|
||||
"current_version": self.update_new,
|
||||
"path": self.path,
|
||||
"comment": self.comment,
|
||||
"address": self.address,
|
||||
"web_port": self.web_port,
|
||||
"target_platform": self.target_platform,
|
||||
}
|
||||
return self._to_dict
|
||||
|
||||
def load_from_disk(self, cache_key: DashboardCacheKeyType | None = None) -> None:
|
||||
"""Load this entry from disk."""
|
||||
self.storage = StorageJSON.load(self._storage_path)
|
||||
self._to_dict = None
|
||||
#
|
||||
# Currently StorageJSON.load() will return None if the file does not exist
|
||||
#
|
||||
# StorageJSON currently does not provide an updated cache key so we use the
|
||||
# one that is passed in.
|
||||
#
|
||||
# The cache key was read from the disk moments ago and may be stale but
|
||||
# it does not matter since we are polling anyways, and the next call to
|
||||
# async_update_entries() will load it again in the extremely rare case that
|
||||
# it changed between the two calls.
|
||||
#
|
||||
if cache_key:
|
||||
self.cache_key = cache_key
|
||||
|
||||
@property
|
||||
def storage(self) -> StorageJSON | None:
|
||||
"""Return the StorageJSON object for this entry."""
|
||||
if not self._loaded_storage:
|
||||
self._storage = StorageJSON.load(ext_storage_path(self.filename))
|
||||
self._loaded_storage = True
|
||||
return self._storage
|
||||
|
||||
@property
|
||||
def address(self):
|
||||
def address(self) -> str | None:
|
||||
"""Return the address of this entry."""
|
||||
if self.storage is None:
|
||||
return None
|
||||
return self.storage.address
|
||||
|
||||
@property
|
||||
def no_mdns(self):
|
||||
def no_mdns(self) -> bool | None:
|
||||
"""Return the no_mdns of this entry."""
|
||||
if self.storage is None:
|
||||
return None
|
||||
return self.storage.no_mdns
|
||||
|
||||
@property
|
||||
def web_port(self):
|
||||
def web_port(self) -> int | None:
|
||||
"""Return the web port of this entry."""
|
||||
if self.storage is None:
|
||||
return None
|
||||
return self.storage.web_port
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
def name(self) -> str:
|
||||
"""Return the name of this entry."""
|
||||
if self.storage is None:
|
||||
return self.filename.replace(".yml", "").replace(".yaml", "")
|
||||
return self.storage.name
|
||||
|
||||
@property
|
||||
def friendly_name(self):
|
||||
def friendly_name(self) -> str:
|
||||
"""Return the friendly name of this entry."""
|
||||
if self.storage is None:
|
||||
return self.name
|
||||
return self.storage.friendly_name
|
||||
|
||||
@property
|
||||
def comment(self):
|
||||
def comment(self) -> str | None:
|
||||
"""Return the comment of this entry."""
|
||||
if self.storage is None:
|
||||
return None
|
||||
return self.storage.comment
|
||||
|
||||
@property
|
||||
def target_platform(self):
|
||||
def target_platform(self) -> str | None:
|
||||
"""Return the target platform of this entry."""
|
||||
if self.storage is None:
|
||||
return None
|
||||
return self.storage.target_platform
|
||||
|
||||
@property
|
||||
def update_available(self):
|
||||
def update_available(self) -> bool:
|
||||
"""Return if an update is available for this entry."""
|
||||
if self.storage is None:
|
||||
return True
|
||||
return self.update_old != self.update_new
|
||||
|
||||
@property
|
||||
def update_old(self):
|
||||
def update_old(self) -> str:
|
||||
if self.storage is None:
|
||||
return ""
|
||||
return self.storage.esphome_version or ""
|
||||
|
||||
@property
|
||||
def update_new(self):
|
||||
def update_new(self) -> str:
|
||||
return const.__version__
|
||||
|
||||
@property
|
||||
def loaded_integrations(self):
|
||||
def loaded_integrations(self) -> list[str]:
|
||||
if self.storage is None:
|
||||
return []
|
||||
return self.storage.loaded_integrations
|
||||
|
||||
19
esphome/dashboard/enum.py
Normal file
19
esphome/dashboard/enum.py
Normal file
@@ -0,0 +1,19 @@
|
||||
"""Enum backports from standard lib."""
|
||||
from __future__ import annotations
|
||||
|
||||
from enum import Enum
|
||||
from typing import Any
|
||||
|
||||
|
||||
class StrEnum(str, Enum):
|
||||
"""Partial backport of Python 3.11's StrEnum for our basic use cases."""
|
||||
|
||||
def __new__(cls, value: str, *args: Any, **kwargs: Any) -> StrEnum:
|
||||
"""Create a new StrEnum instance."""
|
||||
if not isinstance(value, str):
|
||||
raise TypeError(f"{value!r} is not a string")
|
||||
return super().__new__(cls, value, *args, **kwargs)
|
||||
|
||||
def __str__(self) -> str:
|
||||
"""Return self.value."""
|
||||
return str(self.value)
|
||||
@@ -4,29 +4,23 @@ import hmac
|
||||
import os
|
||||
from pathlib import Path
|
||||
|
||||
from esphome import util
|
||||
from esphome.core import CORE
|
||||
from esphome.helpers import get_bool_env
|
||||
from esphome.storage_json import ext_storage_path
|
||||
|
||||
from .entries import DashboardEntry
|
||||
from .util import password_hash
|
||||
from .util.password import password_hash
|
||||
|
||||
|
||||
class DashboardSettings:
|
||||
"""Settings for the dashboard."""
|
||||
|
||||
def __init__(self):
|
||||
self.config_dir = ""
|
||||
self.password_hash = ""
|
||||
self.username = ""
|
||||
self.using_password = False
|
||||
self.on_ha_addon = False
|
||||
self.cookie_secret = None
|
||||
self.absolute_config_dir = None
|
||||
self._entry_cache: dict[
|
||||
str, tuple[tuple[int, int, float, int], DashboardEntry]
|
||||
] = {}
|
||||
def __init__(self) -> None:
|
||||
self.config_dir: str = ""
|
||||
self.password_hash: str = ""
|
||||
self.username: str = ""
|
||||
self.using_password: bool = False
|
||||
self.on_ha_addon: bool = False
|
||||
self.cookie_secret: str | None = None
|
||||
self.absolute_config_dir: Path | None = None
|
||||
|
||||
def parse_args(self, args):
|
||||
self.on_ha_addon: bool = args.ha_addon
|
||||
@@ -80,67 +74,3 @@ class DashboardSettings:
|
||||
# Raises ValueError if not relative to ESPHome config folder
|
||||
Path(joined_path).resolve().relative_to(self.absolute_config_dir)
|
||||
return joined_path
|
||||
|
||||
def list_yaml_files(self) -> list[str]:
|
||||
return util.list_yaml_files([self.config_dir])
|
||||
|
||||
def entries(self) -> list[DashboardEntry]:
|
||||
"""Fetch all dashboard entries, thread-safe."""
|
||||
path_to_cache_key: dict[str, tuple[int, int, float, int]] = {}
|
||||
#
|
||||
# The cache key is (inode, device, mtime, size)
|
||||
# which allows us to avoid locking since it ensures
|
||||
# every iteration of this call will always return the newest
|
||||
# items from disk at the cost of a stat() call on each
|
||||
# file which is much faster than reading the file
|
||||
# for the cache hit case which is the common case.
|
||||
#
|
||||
# Because there is no lock the cache may
|
||||
# get built more than once but that's fine as its still
|
||||
# thread-safe and results in orders of magnitude less
|
||||
# reads from disk than if we did not cache at all and
|
||||
# does not have a lock contention issue.
|
||||
#
|
||||
for file in self.list_yaml_files():
|
||||
try:
|
||||
# Prefer the json storage path if it exists
|
||||
stat = os.stat(ext_storage_path(os.path.basename(file)))
|
||||
except OSError:
|
||||
try:
|
||||
# Fallback to the yaml file if the storage
|
||||
# file does not exist or could not be generated
|
||||
stat = os.stat(file)
|
||||
except OSError:
|
||||
# File was deleted, ignore
|
||||
continue
|
||||
path_to_cache_key[file] = (
|
||||
stat.st_ino,
|
||||
stat.st_dev,
|
||||
stat.st_mtime,
|
||||
stat.st_size,
|
||||
)
|
||||
|
||||
entry_cache = self._entry_cache
|
||||
|
||||
# Remove entries that no longer exist
|
||||
removed: list[str] = []
|
||||
for file in entry_cache:
|
||||
if file not in path_to_cache_key:
|
||||
removed.append(file)
|
||||
|
||||
for file in removed:
|
||||
entry_cache.pop(file)
|
||||
|
||||
dashboard_entries: list[DashboardEntry] = []
|
||||
for file, cache_key in path_to_cache_key.items():
|
||||
if cached_entry := entry_cache.get(file):
|
||||
entry_key, dashboard_entry = cached_entry
|
||||
if entry_key == cache_key:
|
||||
dashboard_entries.append(dashboard_entry)
|
||||
continue
|
||||
|
||||
dashboard_entry = DashboardEntry(file)
|
||||
dashboard_entries.append(dashboard_entry)
|
||||
entry_cache[file] = (cache_key, dashboard_entry)
|
||||
|
||||
return dashboard_entries
|
||||
|
||||
@@ -10,7 +10,9 @@ from esphome.zeroconf import (
|
||||
DashboardStatus,
|
||||
)
|
||||
|
||||
from ..core import DASHBOARD, list_dashboard_entries
|
||||
from ..const import SENTINEL
|
||||
from ..core import DASHBOARD
|
||||
from ..entries import bool_to_entry_state
|
||||
|
||||
|
||||
class MDNSStatus:
|
||||
@@ -22,16 +24,16 @@ class MDNSStatus:
|
||||
self.aiozc: AsyncEsphomeZeroconf | None = None
|
||||
# This is the current mdns state for each host (True, False, None)
|
||||
self.host_mdns_state: dict[str, bool | None] = {}
|
||||
# This is the hostnames to filenames mapping
|
||||
self.host_name_to_filename: dict[str, str] = {}
|
||||
self.filename_to_host_name: dict[str, str] = {}
|
||||
# This is the hostnames to path mapping
|
||||
self.host_name_to_path: dict[str, str] = {}
|
||||
self.path_to_host_name: dict[str, str] = {}
|
||||
# This is a set of host names to track (i.e no_mdns = false)
|
||||
self.host_name_with_mdns_enabled: set[set] = set()
|
||||
self._loop = asyncio.get_running_loop()
|
||||
|
||||
def filename_to_host_name_thread_safe(self, filename: str) -> str | None:
|
||||
"""Resolve a filename to an address in a thread-safe manner."""
|
||||
return self.filename_to_host_name.get(filename)
|
||||
def get_path_to_host_name(self, path: str) -> str | None:
|
||||
"""Resolve a path to an address in a thread-safe manner."""
|
||||
return self.path_to_host_name.get(path)
|
||||
|
||||
async def async_resolve_host(self, host_name: str) -> str | None:
|
||||
"""Resolve a host name to an address in a thread-safe manner."""
|
||||
@@ -41,14 +43,15 @@ class MDNSStatus:
|
||||
|
||||
async def async_refresh_hosts(self):
|
||||
"""Refresh the hosts to track."""
|
||||
entries = await self._loop.run_in_executor(None, list_dashboard_entries)
|
||||
dashboard = DASHBOARD
|
||||
current_entries = dashboard.entries.async_all()
|
||||
host_name_with_mdns_enabled = self.host_name_with_mdns_enabled
|
||||
host_mdns_state = self.host_mdns_state
|
||||
host_name_to_filename = self.host_name_to_filename
|
||||
filename_to_host_name = self.filename_to_host_name
|
||||
ping_result = DASHBOARD.ping_result
|
||||
host_name_to_path = self.host_name_to_path
|
||||
path_to_host_name = self.path_to_host_name
|
||||
entries = dashboard.entries
|
||||
|
||||
for entry in entries:
|
||||
for entry in current_entries:
|
||||
name = entry.name
|
||||
# If no_mdns is set, remove it from the set
|
||||
if entry.no_mdns:
|
||||
@@ -57,37 +60,37 @@ class MDNSStatus:
|
||||
|
||||
# We are tracking this host
|
||||
host_name_with_mdns_enabled.add(name)
|
||||
filename = entry.filename
|
||||
path = entry.path
|
||||
|
||||
# If we just adopted/imported this host, we likely
|
||||
# already have a state for it, so we should make sure
|
||||
# to set it so the dashboard shows it as online
|
||||
if name in host_mdns_state:
|
||||
ping_result[filename] = host_mdns_state[name]
|
||||
if (online := host_mdns_state.get(name, SENTINEL)) != SENTINEL:
|
||||
entries.async_set_state(entry, bool_to_entry_state(online))
|
||||
|
||||
# Make sure the mapping is up to date
|
||||
# so when we get an mdns update we can map it back
|
||||
# to the filename
|
||||
host_name_to_filename[name] = filename
|
||||
filename_to_host_name[filename] = name
|
||||
host_name_to_path[name] = path
|
||||
path_to_host_name[path] = name
|
||||
|
||||
async def async_run(self) -> None:
|
||||
dashboard = DASHBOARD
|
||||
|
||||
entries = dashboard.entries
|
||||
aiozc = AsyncEsphomeZeroconf()
|
||||
self.aiozc = aiozc
|
||||
host_mdns_state = self.host_mdns_state
|
||||
host_name_to_filename = self.host_name_to_filename
|
||||
host_name_to_path = self.host_name_to_path
|
||||
host_name_with_mdns_enabled = self.host_name_with_mdns_enabled
|
||||
ping_result = dashboard.ping_result
|
||||
|
||||
def on_update(dat: dict[str, bool | None]) -> None:
|
||||
"""Update the global PING_RESULT dict."""
|
||||
"""Update the entry state."""
|
||||
for name, result in dat.items():
|
||||
host_mdns_state[name] = result
|
||||
if name in host_name_with_mdns_enabled:
|
||||
filename = host_name_to_filename[name]
|
||||
ping_result[filename] = result
|
||||
if name not in host_name_with_mdns_enabled:
|
||||
continue
|
||||
if entry := entries.get(host_name_to_path[name]):
|
||||
entries.async_set_state(entry, bool_to_entry_state(result))
|
||||
|
||||
stat = DashboardStatus(on_update)
|
||||
imports = DashboardImportDiscovery()
|
||||
|
||||
@@ -7,7 +7,8 @@ import threading
|
||||
|
||||
from esphome import mqtt
|
||||
|
||||
from ..core import DASHBOARD, list_dashboard_entries
|
||||
from ..core import DASHBOARD
|
||||
from ..entries import EntryState
|
||||
|
||||
|
||||
class MqttStatusThread(threading.Thread):
|
||||
@@ -16,22 +17,23 @@ class MqttStatusThread(threading.Thread):
|
||||
def run(self) -> None:
|
||||
"""Run the status thread."""
|
||||
dashboard = DASHBOARD
|
||||
entries = list_dashboard_entries()
|
||||
entries = dashboard.entries
|
||||
current_entries = entries.all()
|
||||
|
||||
config = mqtt.config_from_env()
|
||||
topic = "esphome/discover/#"
|
||||
|
||||
def on_message(client, userdata, msg):
|
||||
nonlocal entries
|
||||
nonlocal current_entries
|
||||
|
||||
payload = msg.payload.decode(errors="backslashreplace")
|
||||
if len(payload) > 0:
|
||||
data = json.loads(payload)
|
||||
if "name" not in data:
|
||||
return
|
||||
for entry in entries:
|
||||
for entry in current_entries:
|
||||
if entry.name == data["name"]:
|
||||
dashboard.ping_result[entry.filename] = True
|
||||
entries.set_state(entry, EntryState.ONLINE)
|
||||
return
|
||||
|
||||
def on_connect(client, userdata, flags, return_code):
|
||||
@@ -51,13 +53,11 @@ class MqttStatusThread(threading.Thread):
|
||||
client.loop_start()
|
||||
|
||||
while not dashboard.stop_event.wait(2):
|
||||
# update entries
|
||||
entries = list_dashboard_entries()
|
||||
|
||||
current_entries = entries.all()
|
||||
# will be set to true on on_message
|
||||
for entry in entries:
|
||||
for entry in current_entries:
|
||||
if entry.no_mdns:
|
||||
dashboard.ping_result[entry.filename] = False
|
||||
entries.set_state(entry, EntryState.OFFLINE)
|
||||
|
||||
client.publish("esphome/discover", None, retain=False)
|
||||
dashboard.mqtt_ping_request.wait()
|
||||
|
||||
@@ -5,24 +5,16 @@ import os
|
||||
from typing import cast
|
||||
|
||||
from ..core import DASHBOARD
|
||||
from ..entries import DashboardEntry
|
||||
from ..core import list_dashboard_entries
|
||||
from ..util import chunked
|
||||
from ..entries import DashboardEntry, bool_to_entry_state
|
||||
from ..util.itertools import chunked
|
||||
from ..util.subprocess import async_system_command_status
|
||||
|
||||
|
||||
async def _async_ping_host(host: str) -> bool:
|
||||
"""Ping a host."""
|
||||
ping_command = ["ping", "-n" if os.name == "nt" else "-c", "1"]
|
||||
process = await asyncio.create_subprocess_exec(
|
||||
*ping_command,
|
||||
host,
|
||||
stdin=asyncio.subprocess.DEVNULL,
|
||||
stdout=asyncio.subprocess.DEVNULL,
|
||||
stderr=asyncio.subprocess.DEVNULL,
|
||||
close_fds=False,
|
||||
return await async_system_command_status(
|
||||
["ping", "-n" if os.name == "nt" else "-c", "1", host]
|
||||
)
|
||||
await process.wait()
|
||||
return process.returncode == 0
|
||||
|
||||
|
||||
class PingStatus:
|
||||
@@ -34,14 +26,14 @@ class PingStatus:
|
||||
async def async_run(self) -> None:
|
||||
"""Run the ping status."""
|
||||
dashboard = DASHBOARD
|
||||
entries = dashboard.entries
|
||||
|
||||
while not dashboard.stop_event.is_set():
|
||||
# Only ping if the dashboard is open
|
||||
await dashboard.ping_request.wait()
|
||||
dashboard.ping_result.clear()
|
||||
entries = await self._loop.run_in_executor(None, list_dashboard_entries)
|
||||
current_entries = dashboard.entries.async_all()
|
||||
to_ping: list[DashboardEntry] = [
|
||||
entry for entry in entries if entry.address is not None
|
||||
entry for entry in current_entries if entry.address is not None
|
||||
]
|
||||
for ping_group in chunked(to_ping, 16):
|
||||
ping_group = cast(list[DashboardEntry], ping_group)
|
||||
@@ -54,4 +46,4 @@ class PingStatus:
|
||||
result = False
|
||||
elif isinstance(result, BaseException):
|
||||
raise result
|
||||
dashboard.ping_result[entry.filename] = result
|
||||
entries.async_set_state(entry, bool_to_entry_state(result))
|
||||
|
||||
@@ -1,52 +0,0 @@
|
||||
import hashlib
|
||||
import unicodedata
|
||||
from collections.abc import Iterable
|
||||
from functools import partial
|
||||
from itertools import islice
|
||||
from typing import Any
|
||||
|
||||
from esphome.const import ALLOWED_NAME_CHARS
|
||||
|
||||
|
||||
def password_hash(password: str) -> bytes:
|
||||
"""Create a hash of a password to transform it to a fixed-length digest.
|
||||
|
||||
Note this is not meant for secure storage, but for securely comparing passwords.
|
||||
"""
|
||||
return hashlib.sha256(password.encode()).digest()
|
||||
|
||||
|
||||
def strip_accents(value):
|
||||
return "".join(
|
||||
c
|
||||
for c in unicodedata.normalize("NFD", str(value))
|
||||
if unicodedata.category(c) != "Mn"
|
||||
)
|
||||
|
||||
|
||||
def friendly_name_slugify(value):
|
||||
value = (
|
||||
strip_accents(value)
|
||||
.lower()
|
||||
.replace(" ", "-")
|
||||
.replace("_", "-")
|
||||
.replace("--", "-")
|
||||
.strip("-")
|
||||
)
|
||||
return "".join(c for c in value if c in ALLOWED_NAME_CHARS)
|
||||
|
||||
|
||||
def take(take_num: int, iterable: Iterable) -> list[Any]:
|
||||
"""Return first n items of the iterable as a list.
|
||||
|
||||
From itertools recipes
|
||||
"""
|
||||
return list(islice(iterable, take_num))
|
||||
|
||||
|
||||
def chunked(iterable: Iterable, chunked_num: int) -> Iterable[Any]:
|
||||
"""Break *iterable* into lists of length *n*.
|
||||
|
||||
From more-itertools
|
||||
"""
|
||||
return iter(partial(take, chunked_num, iter(iterable)), [])
|
||||
0
esphome/dashboard/util/__init__.py
Normal file
0
esphome/dashboard/util/__init__.py
Normal file
22
esphome/dashboard/util/itertools.py
Normal file
22
esphome/dashboard/util/itertools.py
Normal file
@@ -0,0 +1,22 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Iterable
|
||||
from functools import partial
|
||||
from itertools import islice
|
||||
from typing import Any
|
||||
|
||||
|
||||
def take(take_num: int, iterable: Iterable) -> list[Any]:
|
||||
"""Return first n items of the iterable as a list.
|
||||
|
||||
From itertools recipes
|
||||
"""
|
||||
return list(islice(iterable, take_num))
|
||||
|
||||
|
||||
def chunked(iterable: Iterable, chunked_num: int) -> Iterable[Any]:
|
||||
"""Break *iterable* into lists of length *n*.
|
||||
|
||||
From more-itertools
|
||||
"""
|
||||
return iter(partial(take, chunked_num, iter(iterable)), [])
|
||||
11
esphome/dashboard/util/password.py
Normal file
11
esphome/dashboard/util/password.py
Normal file
@@ -0,0 +1,11 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import hashlib
|
||||
|
||||
|
||||
def password_hash(password: str) -> bytes:
|
||||
"""Create a hash of a password to transform it to a fixed-length digest.
|
||||
|
||||
Note this is not meant for secure storage, but for securely comparing passwords.
|
||||
"""
|
||||
return hashlib.sha256(password.encode()).digest()
|
||||
31
esphome/dashboard/util/subprocess.py
Normal file
31
esphome/dashboard/util/subprocess.py
Normal file
@@ -0,0 +1,31 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
from collections.abc import Iterable
|
||||
|
||||
|
||||
async def async_system_command_status(command: Iterable[str]) -> bool:
|
||||
"""Run a system command checking only the status."""
|
||||
process = await asyncio.create_subprocess_exec(
|
||||
*command,
|
||||
stdin=asyncio.subprocess.DEVNULL,
|
||||
stdout=asyncio.subprocess.DEVNULL,
|
||||
stderr=asyncio.subprocess.DEVNULL,
|
||||
close_fds=False,
|
||||
)
|
||||
await process.wait()
|
||||
return process.returncode == 0
|
||||
|
||||
|
||||
async def async_run_system_command(command: Iterable[str]) -> tuple[bool, bytes, bytes]:
|
||||
"""Run a system command and return a tuple of returncode, stdout, stderr."""
|
||||
process = await asyncio.create_subprocess_exec(
|
||||
*command,
|
||||
stdin=asyncio.subprocess.DEVNULL,
|
||||
stdout=asyncio.subprocess.PIPE,
|
||||
stderr=asyncio.subprocess.PIPE,
|
||||
close_fds=False,
|
||||
)
|
||||
stdout, stderr = await process.communicate()
|
||||
await process.wait()
|
||||
return process.returncode, stdout, stderr
|
||||
25
esphome/dashboard/util/text.py
Normal file
25
esphome/dashboard/util/text.py
Normal file
@@ -0,0 +1,25 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import unicodedata
|
||||
|
||||
from esphome.const import ALLOWED_NAME_CHARS
|
||||
|
||||
|
||||
def strip_accents(value):
|
||||
return "".join(
|
||||
c
|
||||
for c in unicodedata.normalize("NFD", str(value))
|
||||
if unicodedata.category(c) != "Mn"
|
||||
)
|
||||
|
||||
|
||||
def friendly_name_slugify(value):
|
||||
value = (
|
||||
strip_accents(value)
|
||||
.lower()
|
||||
.replace(" ", "-")
|
||||
.replace("_", "-")
|
||||
.replace("--", "-")
|
||||
.strip("-")
|
||||
)
|
||||
return "".join(c for c in value if c in ALLOWED_NAME_CHARS)
|
||||
@@ -31,13 +31,15 @@ import yaml
|
||||
from tornado.log import access_log
|
||||
|
||||
from esphome import const, platformio_api, yaml_util
|
||||
from esphome.helpers import get_bool_env, mkdir_p, run_system_command
|
||||
from esphome.helpers import get_bool_env, mkdir_p
|
||||
from esphome.storage_json import StorageJSON, ext_storage_path, trash_storage_path
|
||||
from esphome.util import get_serial_ports, shlex_quote
|
||||
from esphome.yaml_util import FastestAvailableSafeLoader
|
||||
|
||||
from .core import DASHBOARD, list_dashboard_entries
|
||||
from .entries import DashboardEntry
|
||||
from .util import friendly_name_slugify
|
||||
from .core import DASHBOARD
|
||||
from .entries import EntryState, entry_state_to_bool
|
||||
from .util.subprocess import async_run_system_command
|
||||
from .util.text import friendly_name_slugify
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -274,7 +276,7 @@ class EsphomePortCommandWebSocket(EsphomeCommandWebSocket):
|
||||
if (
|
||||
port == "OTA"
|
||||
and (mdns := dashboard.mdns_status)
|
||||
and (host_name := mdns.filename_to_host_name_thread_safe(configuration))
|
||||
and (host_name := mdns.get_path_to_host_name(config_file))
|
||||
and (address := await mdns.async_resolve_host(host_name))
|
||||
):
|
||||
port = address
|
||||
@@ -314,7 +316,9 @@ class EsphomeRenameHandler(EsphomeCommandWebSocket):
|
||||
return
|
||||
|
||||
# Remove the old ping result from the cache
|
||||
DASHBOARD.ping_result.pop(self.old_name, None)
|
||||
entries = DASHBOARD.entries
|
||||
if entry := entries.get(self.old_name):
|
||||
entries.async_set_state(entry, EntryState.UNKNOWN)
|
||||
|
||||
|
||||
class EsphomeUploadHandler(EsphomePortCommandWebSocket):
|
||||
@@ -522,7 +526,7 @@ class DownloadListRequestHandler(BaseHandler):
|
||||
class DownloadBinaryRequestHandler(BaseHandler):
|
||||
@authenticated
|
||||
@bind_config
|
||||
def get(self, configuration=None):
|
||||
async def get(self, configuration=None):
|
||||
compressed = self.get_argument("compressed", "0") == "1"
|
||||
|
||||
storage_path = ext_storage_path(configuration)
|
||||
@@ -548,7 +552,7 @@ class DownloadBinaryRequestHandler(BaseHandler):
|
||||
|
||||
if not Path(path).is_file():
|
||||
args = ["esphome", "idedata", settings.rel_path(configuration)]
|
||||
rc, stdout, _ = run_system_command(*args)
|
||||
rc, stdout, _ = await async_run_system_command(args)
|
||||
|
||||
if rc != 0:
|
||||
self.send_error(404 if rc == 2 else 500)
|
||||
@@ -599,31 +603,16 @@ class EsphomeVersionHandler(BaseHandler):
|
||||
class ListDevicesHandler(BaseHandler):
|
||||
@authenticated
|
||||
async def get(self):
|
||||
loop = asyncio.get_running_loop()
|
||||
entries = await loop.run_in_executor(None, list_dashboard_entries)
|
||||
dashboard = DASHBOARD
|
||||
await dashboard.entries.async_request_update_entries()
|
||||
entries = dashboard.entries.async_all()
|
||||
self.set_header("content-type", "application/json")
|
||||
configured = {entry.name for entry in entries}
|
||||
dashboard = DASHBOARD
|
||||
|
||||
self.write(
|
||||
json.dumps(
|
||||
{
|
||||
"configured": [
|
||||
{
|
||||
"name": entry.name,
|
||||
"friendly_name": entry.friendly_name,
|
||||
"configuration": entry.filename,
|
||||
"loaded_integrations": entry.loaded_integrations,
|
||||
"deployed_version": entry.update_old,
|
||||
"current_version": entry.update_new,
|
||||
"path": entry.path,
|
||||
"comment": entry.comment,
|
||||
"address": entry.address,
|
||||
"web_port": entry.web_port,
|
||||
"target_platform": entry.target_platform,
|
||||
}
|
||||
for entry in entries
|
||||
],
|
||||
"configured": [entry.to_dict() for entry in entries],
|
||||
"importable": [
|
||||
{
|
||||
"name": res.device_name,
|
||||
@@ -656,8 +645,10 @@ class MainRequestHandler(BaseHandler):
|
||||
|
||||
class PrometheusServiceDiscoveryHandler(BaseHandler):
|
||||
@authenticated
|
||||
def get(self):
|
||||
entries = list_dashboard_entries()
|
||||
async def get(self):
|
||||
dashboard = DASHBOARD
|
||||
await dashboard.entries.async_request_update_entries()
|
||||
entries = dashboard.entries.async_all()
|
||||
self.set_header("content-type", "application/json")
|
||||
sd = []
|
||||
for entry in entries:
|
||||
@@ -725,22 +716,31 @@ class PingRequestHandler(BaseHandler):
|
||||
if settings.status_use_mqtt:
|
||||
dashboard.mqtt_ping_request.set()
|
||||
self.set_header("content-type", "application/json")
|
||||
self.write(json.dumps(dashboard.ping_result))
|
||||
|
||||
self.write(
|
||||
json.dumps(
|
||||
{
|
||||
entry.filename: entry_state_to_bool(entry.state)
|
||||
for entry in dashboard.entries.async_all()
|
||||
}
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
class InfoRequestHandler(BaseHandler):
|
||||
@authenticated
|
||||
@bind_config
|
||||
def get(self, configuration=None):
|
||||
async def get(self, configuration=None):
|
||||
yaml_path = settings.rel_path(configuration)
|
||||
all_yaml_files = settings.list_yaml_files()
|
||||
dashboard = DASHBOARD
|
||||
entry = dashboard.entries.get(yaml_path)
|
||||
|
||||
if yaml_path not in all_yaml_files:
|
||||
if not entry:
|
||||
self.set_status(404)
|
||||
return
|
||||
|
||||
self.set_header("content-type", "application/json")
|
||||
self.write(DashboardEntry(yaml_path).storage.to_json())
|
||||
self.write(entry.storage.to_json())
|
||||
|
||||
|
||||
class EditRequestHandler(BaseHandler):
|
||||
@@ -781,9 +781,6 @@ class DeleteRequestHandler(BaseHandler):
|
||||
if build_folder is not None:
|
||||
shutil.rmtree(build_folder, os.path.join(trash_path, name))
|
||||
|
||||
# Remove the old ping result from the cache
|
||||
DASHBOARD.ping_result.pop(configuration, None)
|
||||
|
||||
|
||||
class UndoDeleteRequestHandler(BaseHandler):
|
||||
@authenticated
|
||||
@@ -885,7 +882,7 @@ class SecretKeysRequestHandler(BaseHandler):
|
||||
self.write(json.dumps(secret_keys))
|
||||
|
||||
|
||||
class SafeLoaderIgnoreUnknown(yaml.SafeLoader):
|
||||
class SafeLoaderIgnoreUnknown(FastestAvailableSafeLoader):
|
||||
def ignore_unknown(self, node):
|
||||
return f"{node.tag} {node.value}"
|
||||
|
||||
@@ -902,7 +899,7 @@ SafeLoaderIgnoreUnknown.add_constructor(
|
||||
class JsonConfigRequestHandler(BaseHandler):
|
||||
@authenticated
|
||||
@bind_config
|
||||
def get(self, configuration=None):
|
||||
async def get(self, configuration=None):
|
||||
filename = settings.rel_path(configuration)
|
||||
if not os.path.isfile(filename):
|
||||
self.send_error(404)
|
||||
@@ -910,7 +907,7 @@ class JsonConfigRequestHandler(BaseHandler):
|
||||
|
||||
args = ["esphome", "config", filename, "--show-secrets"]
|
||||
|
||||
rc, stdout, _ = run_system_command(*args)
|
||||
rc, stdout, _ = await async_run_system_command(args)
|
||||
|
||||
if rc != 0:
|
||||
self.send_error(422)
|
||||
|
||||
@@ -10,7 +10,7 @@ platformio==6.1.11 # When updating platformio, also update Dockerfile
|
||||
esptool==4.6.2
|
||||
click==8.1.7
|
||||
esphome-dashboard==20231107.0
|
||||
aioesphomeapi==18.4.1
|
||||
aioesphomeapi==18.5.3
|
||||
zeroconf==0.127.0
|
||||
|
||||
# esp-idf requires this, but doesn't bundle it by default
|
||||
|
||||
Reference in New Issue
Block a user