2023-11-06 16:07:59 -06:00
|
|
|
from __future__ import annotations
|
|
|
|
|
2022-05-11 01:38:05 +02:00
|
|
|
import base64
|
2023-05-17 06:29:56 +02:00
|
|
|
import binascii
|
2019-03-03 16:50:06 +01:00
|
|
|
import collections
|
2023-11-08 15:04:01 -06:00
|
|
|
import datetime
|
2019-04-24 17:08:05 +02:00
|
|
|
import functools
|
2023-06-21 20:48:17 -03:00
|
|
|
import gzip
|
2019-03-16 22:24:26 +01:00
|
|
|
import hashlib
|
2018-06-07 20:47:06 +02:00
|
|
|
import hmac
|
2018-05-21 16:40:22 +02:00
|
|
|
import json
|
|
|
|
import logging
|
2019-03-03 16:50:06 +01:00
|
|
|
import multiprocessing
|
2018-05-21 16:40:22 +02:00
|
|
|
import os
|
2021-06-11 15:49:05 -07:00
|
|
|
import secrets
|
2019-03-16 22:24:26 +01:00
|
|
|
import shutil
|
2018-05-21 16:40:22 +02:00
|
|
|
import subprocess
|
2018-12-05 21:22:06 +01:00
|
|
|
import threading
|
2022-11-24 11:09:19 +13:00
|
|
|
from pathlib import Path
|
2018-12-05 21:22:06 +01:00
|
|
|
|
|
|
|
import tornado
|
|
|
|
import tornado.concurrent
|
|
|
|
import tornado.gen
|
2019-02-13 16:54:02 +01:00
|
|
|
import tornado.httpserver
|
2018-12-05 21:22:06 +01:00
|
|
|
import tornado.ioloop
|
|
|
|
import tornado.iostream
|
2019-02-13 16:54:02 +01:00
|
|
|
import tornado.netutil
|
2018-12-05 21:22:06 +01:00
|
|
|
import tornado.process
|
2023-07-19 22:39:35 +02:00
|
|
|
import tornado.queues
|
2018-12-05 21:22:06 +01:00
|
|
|
import tornado.web
|
|
|
|
import tornado.websocket
|
2023-02-01 16:59:51 +13:00
|
|
|
import yaml
|
2022-11-24 11:09:19 +13:00
|
|
|
from tornado.log import access_log
|
2018-12-05 21:22:06 +01:00
|
|
|
|
2021-12-06 20:15:34 +13:00
|
|
|
from esphome import const, platformio_api, util, yaml_util
|
2023-09-12 09:26:48 +12:00
|
|
|
from esphome.core import CORE
|
2022-11-24 11:09:19 +13:00
|
|
|
from esphome.helpers import get_bool_env, mkdir_p, run_system_command
|
2021-03-07 16:03:16 -03:00
|
|
|
from esphome.storage_json import (
|
|
|
|
EsphomeStorageJSON,
|
|
|
|
StorageJSON,
|
|
|
|
esphome_storage_path,
|
|
|
|
ext_storage_path,
|
|
|
|
trash_storage_path,
|
|
|
|
)
|
2022-11-24 11:09:19 +13:00
|
|
|
from esphome.util import get_serial_ports, shlex_quote
|
2023-11-06 16:07:59 -06:00
|
|
|
from esphome.zeroconf import (
|
|
|
|
ESPHOME_SERVICE_TYPE,
|
|
|
|
DashboardBrowser,
|
|
|
|
DashboardImportDiscovery,
|
|
|
|
DashboardStatus,
|
|
|
|
EsphomeZeroconf,
|
|
|
|
)
|
2019-02-10 16:57:34 +01:00
|
|
|
|
2023-02-01 16:59:51 +13:00
|
|
|
from .util import friendly_name_slugify, password_hash
|
2022-11-24 11:09:19 +13:00
|
|
|
|
2018-06-02 22:22:20 +02:00
|
|
|
_LOGGER = logging.getLogger(__name__)
|
2019-04-24 17:08:05 +02:00
|
|
|
|
2021-06-11 15:49:05 -07:00
|
|
|
ENV_DEV = "ESPHOME_DASHBOARD_DEV"
|
|
|
|
|
2019-04-24 17:08:05 +02:00
|
|
|
|
2019-12-07 18:28:55 +01:00
|
|
|
class DashboardSettings:
|
2019-04-24 17:08:05 +02:00
|
|
|
def __init__(self):
|
2021-03-07 16:03:16 -03:00
|
|
|
self.config_dir = ""
|
|
|
|
self.password_hash = ""
|
|
|
|
self.username = ""
|
2019-04-24 17:08:05 +02:00
|
|
|
self.using_password = False
|
2022-02-09 23:46:20 +13:00
|
|
|
self.on_ha_addon = False
|
2019-04-24 17:08:05 +02:00
|
|
|
self.cookie_secret = None
|
2023-02-08 20:20:30 -05:00
|
|
|
self.absolute_config_dir = None
|
2023-11-07 00:04:55 -06:00
|
|
|
self._entry_cache: dict[
|
|
|
|
str, tuple[tuple[int, int, float, int], DashboardEntry]
|
|
|
|
] = {}
|
2019-04-24 17:08:05 +02:00
|
|
|
|
|
|
|
def parse_args(self, args):
|
2022-02-09 23:46:20 +13:00
|
|
|
self.on_ha_addon = args.ha_addon
|
2021-03-07 16:03:16 -03:00
|
|
|
password = args.password or os.getenv("PASSWORD", "")
|
2022-02-09 23:46:20 +13:00
|
|
|
if not self.on_ha_addon:
|
2021-03-07 16:03:16 -03:00
|
|
|
self.username = args.username or os.getenv("USERNAME", "")
|
2019-10-13 14:52:02 +03:00
|
|
|
self.using_password = bool(password)
|
2019-04-24 17:08:05 +02:00
|
|
|
if self.using_password:
|
2020-07-24 11:10:09 +02:00
|
|
|
self.password_hash = password_hash(password)
|
2021-06-08 01:14:12 +02:00
|
|
|
self.config_dir = args.configuration
|
2023-02-08 20:20:30 -05:00
|
|
|
self.absolute_config_dir = Path(self.config_dir).resolve()
|
2023-09-12 09:26:48 +12:00
|
|
|
CORE.config_path = os.path.join(self.config_dir, ".")
|
2019-04-24 17:08:05 +02:00
|
|
|
|
|
|
|
@property
|
|
|
|
def relative_url(self):
|
2021-03-07 16:03:16 -03:00
|
|
|
return os.getenv("ESPHOME_DASHBOARD_RELATIVE_URL", "/")
|
2019-04-24 17:08:05 +02:00
|
|
|
|
|
|
|
@property
|
|
|
|
def status_use_ping(self):
|
2021-03-07 16:03:16 -03:00
|
|
|
return get_bool_env("ESPHOME_DASHBOARD_USE_PING")
|
2019-04-24 17:08:05 +02:00
|
|
|
|
2023-05-17 06:29:56 +02:00
|
|
|
@property
|
|
|
|
def status_use_mqtt(self):
|
|
|
|
return get_bool_env("ESPHOME_DASHBOARD_USE_MQTT")
|
|
|
|
|
2019-04-24 17:08:05 +02:00
|
|
|
@property
|
2022-02-09 23:46:20 +13:00
|
|
|
def using_ha_addon_auth(self):
|
|
|
|
if not self.on_ha_addon:
|
2019-04-24 17:08:05 +02:00
|
|
|
return False
|
2021-03-07 16:03:16 -03:00
|
|
|
return not get_bool_env("DISABLE_HA_AUTHENTICATION")
|
2019-04-24 17:08:05 +02:00
|
|
|
|
|
|
|
@property
|
|
|
|
def using_auth(self):
|
2022-02-09 23:46:20 +13:00
|
|
|
return self.using_password or self.using_ha_addon_auth
|
2019-04-24 17:08:05 +02:00
|
|
|
|
2023-07-20 22:47:37 +02:00
|
|
|
@property
|
|
|
|
def streamer_mode(self):
|
|
|
|
return get_bool_env("ESPHOME_STREAMER_MODE")
|
|
|
|
|
2019-10-13 14:52:02 +03:00
|
|
|
def check_password(self, username, password):
|
2019-04-24 17:08:05 +02:00
|
|
|
if not self.using_auth:
|
|
|
|
return True
|
2019-10-31 17:10:52 +03:00
|
|
|
if username != self.username:
|
|
|
|
return False
|
2019-04-24 17:08:05 +02:00
|
|
|
|
2020-07-24 11:10:09 +02:00
|
|
|
# Compare password in constant running time (to prevent timing attacks)
|
2021-03-07 16:03:16 -03:00
|
|
|
return hmac.compare_digest(self.password_hash, password_hash(password))
|
2019-04-24 17:08:05 +02:00
|
|
|
|
|
|
|
def rel_path(self, *args):
|
2023-02-08 20:20:30 -05:00
|
|
|
joined_path = os.path.join(self.config_dir, *args)
|
|
|
|
# Raises ValueError if not relative to ESPHome config folder
|
|
|
|
Path(joined_path).resolve().relative_to(self.absolute_config_dir)
|
|
|
|
return joined_path
|
2019-04-24 17:08:05 +02:00
|
|
|
|
2023-11-07 00:04:55 -06:00
|
|
|
def list_yaml_files(self) -> list[str]:
|
2021-08-11 07:21:57 +02:00
|
|
|
return util.list_yaml_files([self.config_dir])
|
2019-04-24 17:08:05 +02:00
|
|
|
|
2023-11-07 00:04:55 -06:00
|
|
|
def entries(self) -> list[DashboardEntry]:
|
|
|
|
"""Fetch all dashboard entries, thread-safe."""
|
|
|
|
path_to_cache_key: dict[str, tuple[int, int, float, int]] = {}
|
|
|
|
#
|
|
|
|
# The cache key is (inode, device, mtime, size)
|
|
|
|
# which allows us to avoid locking since it ensures
|
|
|
|
# every iteration of this call will always return the newest
|
|
|
|
# items from disk at the cost of a stat() call on each
|
|
|
|
# file which is much faster than reading the file
|
|
|
|
# for the cache hit case which is the common case.
|
|
|
|
#
|
|
|
|
# Because there is no lock the cache may
|
|
|
|
# get built more than once but that's fine as its still
|
|
|
|
# thread-safe and results in orders of magnitude less
|
|
|
|
# reads from disk than if we did not cache at all and
|
|
|
|
# does not have a lock contention issue.
|
|
|
|
#
|
|
|
|
for file in self.list_yaml_files():
|
|
|
|
try:
|
|
|
|
# Prefer the json storage path if it exists
|
|
|
|
stat = os.stat(ext_storage_path(os.path.basename(file)))
|
|
|
|
except OSError:
|
|
|
|
try:
|
|
|
|
# Fallback to the yaml file if the storage
|
|
|
|
# file does not exist or could not be generated
|
|
|
|
stat = os.stat(file)
|
|
|
|
except OSError:
|
|
|
|
# File was deleted, ignore
|
|
|
|
continue
|
|
|
|
path_to_cache_key[file] = (
|
|
|
|
stat.st_ino,
|
|
|
|
stat.st_dev,
|
|
|
|
stat.st_mtime,
|
|
|
|
stat.st_size,
|
|
|
|
)
|
|
|
|
|
|
|
|
entry_cache = self._entry_cache
|
|
|
|
|
|
|
|
# Remove entries that no longer exist
|
|
|
|
removed: list[str] = []
|
|
|
|
for file in entry_cache:
|
|
|
|
if file not in path_to_cache_key:
|
|
|
|
removed.append(file)
|
|
|
|
|
|
|
|
for file in removed:
|
|
|
|
entry_cache.pop(file)
|
|
|
|
|
|
|
|
dashboard_entries: list[DashboardEntry] = []
|
|
|
|
for file, cache_key in path_to_cache_key.items():
|
|
|
|
if cached_entry := entry_cache.get(file):
|
|
|
|
entry_key, dashboard_entry = cached_entry
|
|
|
|
if entry_key == cache_key:
|
|
|
|
dashboard_entries.append(dashboard_entry)
|
|
|
|
continue
|
|
|
|
|
|
|
|
dashboard_entry = DashboardEntry(file)
|
|
|
|
dashboard_entries.append(dashboard_entry)
|
|
|
|
entry_cache[file] = (cache_key, dashboard_entry)
|
|
|
|
|
|
|
|
return dashboard_entries
|
|
|
|
|
2019-04-24 17:08:05 +02:00
|
|
|
|
|
|
|
settings = DashboardSettings()
|
2018-06-07 20:47:06 +02:00
|
|
|
|
2021-03-07 16:03:16 -03:00
|
|
|
cookie_authenticated_yes = b"yes"
|
2019-01-19 22:09:46 +01:00
|
|
|
|
2018-06-07 20:47:06 +02:00
|
|
|
|
2019-03-03 16:50:06 +01:00
|
|
|
def template_args():
|
|
|
|
version = const.__version__
|
2021-03-07 16:03:16 -03:00
|
|
|
if "b" in version:
|
|
|
|
docs_link = "https://beta.esphome.io/"
|
|
|
|
elif "dev" in version:
|
|
|
|
docs_link = "https://next.esphome.io/"
|
2020-07-10 18:55:16 -03:00
|
|
|
else:
|
2021-03-07 16:03:16 -03:00
|
|
|
docs_link = "https://www.esphome.io/"
|
2021-06-11 15:49:05 -07:00
|
|
|
|
2019-03-03 16:50:06 +01:00
|
|
|
return {
|
2021-03-07 16:03:16 -03:00
|
|
|
"version": version,
|
|
|
|
"docs_link": docs_link,
|
|
|
|
"get_static_file_url": get_static_file_url,
|
|
|
|
"relative_url": settings.relative_url,
|
2023-07-20 22:47:37 +02:00
|
|
|
"streamer_mode": settings.streamer_mode,
|
2021-03-07 16:03:16 -03:00
|
|
|
"config_dir": settings.config_dir,
|
2019-03-03 16:50:06 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2019-03-16 22:24:26 +01:00
|
|
|
def authenticated(func):
|
2019-04-24 17:08:05 +02:00
|
|
|
@functools.wraps(func)
|
2019-03-16 22:24:26 +01:00
|
|
|
def decorator(self, *args, **kwargs):
|
2019-04-22 21:56:30 +02:00
|
|
|
if not is_authenticated(self):
|
2021-03-07 16:03:16 -03:00
|
|
|
self.redirect("./login")
|
2019-03-16 22:24:26 +01:00
|
|
|
return None
|
|
|
|
return func(self, *args, **kwargs)
|
2021-03-07 16:03:16 -03:00
|
|
|
|
2019-03-16 22:24:26 +01:00
|
|
|
return decorator
|
|
|
|
|
|
|
|
|
2019-04-22 21:56:30 +02:00
|
|
|
def is_authenticated(request_handler):
|
2022-02-09 23:46:20 +13:00
|
|
|
if settings.on_ha_addon:
|
2019-04-24 17:08:05 +02:00
|
|
|
# Handle ingress - disable auth on ingress port
|
2022-02-09 23:46:20 +13:00
|
|
|
# X-HA-Ingress is automatically stripped on the non-ingress server in nginx
|
|
|
|
header = request_handler.request.headers.get("X-HA-Ingress", "NO")
|
2021-03-07 16:03:16 -03:00
|
|
|
if str(header) == "YES":
|
2019-04-24 17:08:05 +02:00
|
|
|
return True
|
|
|
|
if settings.using_auth:
|
2021-03-07 16:03:16 -03:00
|
|
|
return (
|
|
|
|
request_handler.get_secure_cookie("authenticated")
|
|
|
|
== cookie_authenticated_yes
|
|
|
|
)
|
2019-04-22 21:56:30 +02:00
|
|
|
return True
|
|
|
|
|
|
|
|
|
2019-03-16 22:24:26 +01:00
|
|
|
def bind_config(func):
|
|
|
|
def decorator(self, *args, **kwargs):
|
2021-03-07 16:03:16 -03:00
|
|
|
configuration = self.get_argument("configuration")
|
2019-03-16 22:24:26 +01:00
|
|
|
kwargs = kwargs.copy()
|
2021-03-07 16:03:16 -03:00
|
|
|
kwargs["configuration"] = configuration
|
2019-03-16 22:24:26 +01:00
|
|
|
return func(self, *args, **kwargs)
|
2021-03-07 16:03:16 -03:00
|
|
|
|
2019-03-16 22:24:26 +01:00
|
|
|
return decorator
|
|
|
|
|
|
|
|
|
2018-06-07 20:47:06 +02:00
|
|
|
# pylint: disable=abstract-method
|
|
|
|
class BaseHandler(tornado.web.RequestHandler):
|
2019-04-22 21:56:30 +02:00
|
|
|
pass
|
|
|
|
|
|
|
|
|
|
|
|
def websocket_class(cls):
|
|
|
|
# pylint: disable=protected-access
|
2021-03-07 16:03:16 -03:00
|
|
|
if not hasattr(cls, "_message_handlers"):
|
2019-04-22 21:56:30 +02:00
|
|
|
cls._message_handlers = {}
|
|
|
|
|
2019-06-01 22:00:19 +02:00
|
|
|
for _, method in cls.__dict__.items():
|
2019-04-22 21:56:30 +02:00
|
|
|
if hasattr(method, "_message_handler"):
|
|
|
|
cls._message_handlers[method._message_handler] = method
|
2018-12-05 21:22:06 +01:00
|
|
|
|
2019-04-22 21:56:30 +02:00
|
|
|
return cls
|
|
|
|
|
|
|
|
|
|
|
|
def websocket_method(name):
|
|
|
|
def wrap(fn):
|
|
|
|
# pylint: disable=protected-access
|
|
|
|
fn._message_handler = name
|
|
|
|
return fn
|
2021-03-07 16:03:16 -03:00
|
|
|
|
2019-04-22 21:56:30 +02:00
|
|
|
return wrap
|
2018-05-21 16:40:22 +02:00
|
|
|
|
2018-06-01 22:49:04 +02:00
|
|
|
|
2019-04-22 21:56:30 +02:00
|
|
|
@websocket_class
|
2019-02-13 16:54:02 +01:00
|
|
|
class EsphomeCommandWebSocket(tornado.websocket.WebSocketHandler):
|
2018-05-21 16:40:22 +02:00
|
|
|
def __init__(self, application, request, **kwargs):
|
2019-12-07 18:28:55 +01:00
|
|
|
super().__init__(application, request, **kwargs)
|
2019-04-22 21:56:30 +02:00
|
|
|
self._proc = None
|
2023-07-19 22:39:35 +02:00
|
|
|
self._queue = None
|
2019-04-22 21:56:30 +02:00
|
|
|
self._is_closed = False
|
2023-07-19 22:39:35 +02:00
|
|
|
# Windows doesn't support non-blocking pipes,
|
|
|
|
# use Popen() with a reading thread instead
|
|
|
|
self._use_popen = os.name == "nt"
|
2018-05-21 16:40:22 +02:00
|
|
|
|
2019-04-24 17:08:05 +02:00
|
|
|
@authenticated
|
2018-05-21 16:40:22 +02:00
|
|
|
def on_message(self, message):
|
2019-04-22 21:56:30 +02:00
|
|
|
# Messages are always JSON, 500 when not
|
|
|
|
json_message = json.loads(message)
|
2021-03-07 16:03:16 -03:00
|
|
|
type_ = json_message["type"]
|
2019-04-22 21:56:30 +02:00
|
|
|
# pylint: disable=no-member
|
|
|
|
handlers = type(self)._message_handlers
|
|
|
|
if type_ not in handlers:
|
|
|
|
_LOGGER.warning("Requested unknown message type %s", type_)
|
2018-05-21 16:40:22 +02:00
|
|
|
return
|
2019-04-22 21:56:30 +02:00
|
|
|
|
|
|
|
handlers[type_](self, json_message)
|
|
|
|
|
2021-03-07 16:03:16 -03:00
|
|
|
@websocket_method("spawn")
|
2019-04-22 21:56:30 +02:00
|
|
|
def handle_spawn(self, json_message):
|
|
|
|
if self._proc is not None:
|
|
|
|
# spawn can only be called once
|
|
|
|
return
|
|
|
|
command = self.build_command(json_message)
|
2021-03-07 16:03:16 -03:00
|
|
|
_LOGGER.info("Running command '%s'", " ".join(shlex_quote(x) for x in command))
|
2023-07-19 22:39:35 +02:00
|
|
|
|
|
|
|
if self._use_popen:
|
|
|
|
self._queue = tornado.queues.Queue()
|
|
|
|
# pylint: disable=consider-using-with
|
|
|
|
self._proc = subprocess.Popen(
|
|
|
|
command,
|
|
|
|
stdin=subprocess.PIPE,
|
|
|
|
stdout=subprocess.PIPE,
|
|
|
|
stderr=subprocess.STDOUT,
|
|
|
|
)
|
|
|
|
stdout_thread = threading.Thread(target=self._stdout_thread)
|
|
|
|
stdout_thread.daemon = True
|
|
|
|
stdout_thread.start()
|
|
|
|
else:
|
|
|
|
self._proc = tornado.process.Subprocess(
|
|
|
|
command,
|
|
|
|
stdout=tornado.process.Subprocess.STREAM,
|
|
|
|
stderr=subprocess.STDOUT,
|
|
|
|
stdin=tornado.process.Subprocess.STREAM,
|
|
|
|
)
|
|
|
|
self._proc.set_exit_callback(self._proc_on_exit)
|
|
|
|
|
2019-04-22 21:56:30 +02:00
|
|
|
tornado.ioloop.IOLoop.current().spawn_callback(self._redirect_stdout)
|
|
|
|
|
|
|
|
@property
|
|
|
|
def is_process_active(self):
|
|
|
|
return self._proc is not None and self._proc.returncode is None
|
|
|
|
|
2021-03-07 16:03:16 -03:00
|
|
|
@websocket_method("stdin")
|
2019-04-22 21:56:30 +02:00
|
|
|
def handle_stdin(self, json_message):
|
|
|
|
if not self.is_process_active:
|
|
|
|
return
|
2023-11-13 18:44:49 -06:00
|
|
|
text: str = json_message["data"]
|
|
|
|
data = text.encode("utf-8", "replace")
|
2019-04-22 21:56:30 +02:00
|
|
|
_LOGGER.debug("< stdin: %s", data)
|
|
|
|
self._proc.stdin.write(data)
|
2018-05-21 16:40:22 +02:00
|
|
|
|
|
|
|
@tornado.gen.coroutine
|
2019-04-22 21:56:30 +02:00
|
|
|
def _redirect_stdout(self):
|
2021-03-07 16:03:16 -03:00
|
|
|
reg = b"[\n\r]"
|
2019-04-22 21:56:30 +02:00
|
|
|
|
2018-05-21 16:40:22 +02:00
|
|
|
while True:
|
|
|
|
try:
|
2023-07-19 22:39:35 +02:00
|
|
|
if self._use_popen:
|
2023-11-13 18:44:49 -06:00
|
|
|
data: bytes = yield self._queue.get()
|
2023-07-19 22:39:35 +02:00
|
|
|
if data is None:
|
|
|
|
self._proc_on_exit(self._proc.poll())
|
|
|
|
break
|
|
|
|
else:
|
2023-11-13 18:44:49 -06:00
|
|
|
data: bytes = yield self._proc.stdout.read_until_regex(reg)
|
2018-05-21 16:40:22 +02:00
|
|
|
except tornado.iostream.StreamClosedError:
|
|
|
|
break
|
2019-04-22 21:56:30 +02:00
|
|
|
|
2023-11-13 18:44:49 -06:00
|
|
|
text = data.decode("utf-8", "replace")
|
|
|
|
_LOGGER.debug("> stdout: %s", text)
|
|
|
|
self.write_message({"event": "line", "data": text})
|
2018-05-21 16:40:22 +02:00
|
|
|
|
2023-07-19 22:39:35 +02:00
|
|
|
def _stdout_thread(self):
|
|
|
|
if not self._use_popen:
|
|
|
|
return
|
|
|
|
while True:
|
|
|
|
data = self._proc.stdout.readline()
|
|
|
|
if data:
|
|
|
|
data = data.replace(b"\r", b"")
|
|
|
|
self._queue.put_nowait(data)
|
|
|
|
if self._proc.poll() is not None:
|
|
|
|
break
|
|
|
|
self._proc.wait(1.0)
|
|
|
|
self._queue.put_nowait(None)
|
|
|
|
|
2019-04-22 21:56:30 +02:00
|
|
|
def _proc_on_exit(self, returncode):
|
|
|
|
if not self._is_closed:
|
|
|
|
# Check if the proc was not forcibly closed
|
2019-04-24 17:08:05 +02:00
|
|
|
_LOGGER.info("Process exited with return code %s", returncode)
|
2021-03-07 16:03:16 -03:00
|
|
|
self.write_message({"event": "exit", "code": returncode})
|
2018-05-21 16:40:22 +02:00
|
|
|
|
|
|
|
def on_close(self):
|
2019-04-22 21:56:30 +02:00
|
|
|
# Check if proc exists (if 'start' has been run)
|
|
|
|
if self.is_process_active:
|
2018-05-21 16:40:22 +02:00
|
|
|
_LOGGER.debug("Terminating process")
|
2023-07-19 22:39:35 +02:00
|
|
|
if self._use_popen:
|
|
|
|
self._proc.terminate()
|
|
|
|
else:
|
|
|
|
self._proc.proc.terminate()
|
2019-05-11 11:41:09 +02:00
|
|
|
# Shutdown proc on WS close
|
|
|
|
self._is_closed = True
|
2018-05-21 16:40:22 +02:00
|
|
|
|
2019-04-22 21:56:30 +02:00
|
|
|
def build_command(self, json_message):
|
2018-05-21 16:40:22 +02:00
|
|
|
raise NotImplementedError
|
|
|
|
|
|
|
|
|
2019-02-13 16:54:02 +01:00
|
|
|
class EsphomeLogsHandler(EsphomeCommandWebSocket):
|
2019-04-22 21:56:30 +02:00
|
|
|
def build_command(self, json_message):
|
2021-03-07 16:03:16 -03:00
|
|
|
config_file = settings.rel_path(json_message["configuration"])
|
|
|
|
return [
|
|
|
|
"esphome",
|
|
|
|
"--dashboard",
|
|
|
|
"logs",
|
2021-06-08 01:14:12 +02:00
|
|
|
config_file,
|
|
|
|
"--device",
|
2021-03-07 16:03:16 -03:00
|
|
|
json_message["port"],
|
|
|
|
]
|
2018-05-21 16:40:22 +02:00
|
|
|
|
|
|
|
|
2022-05-08 18:53:34 -07:00
|
|
|
class EsphomeRenameHandler(EsphomeCommandWebSocket):
|
2023-01-15 19:31:03 -05:00
|
|
|
old_name: str
|
|
|
|
|
2022-05-08 18:53:34 -07:00
|
|
|
def build_command(self, json_message):
|
|
|
|
config_file = settings.rel_path(json_message["configuration"])
|
2023-01-15 19:31:03 -05:00
|
|
|
self.old_name = json_message["configuration"]
|
2022-05-08 18:53:34 -07:00
|
|
|
return [
|
|
|
|
"esphome",
|
|
|
|
"--dashboard",
|
|
|
|
"rename",
|
|
|
|
config_file,
|
|
|
|
json_message["newName"],
|
|
|
|
]
|
|
|
|
|
2023-01-15 19:31:03 -05:00
|
|
|
def _proc_on_exit(self, returncode):
|
|
|
|
super()._proc_on_exit(returncode)
|
|
|
|
|
|
|
|
if returncode != 0:
|
|
|
|
return
|
|
|
|
|
|
|
|
# Remove the old ping result from the cache
|
|
|
|
PING_RESULT.pop(self.old_name, None)
|
|
|
|
|
2022-05-08 18:53:34 -07:00
|
|
|
|
2019-04-22 21:56:30 +02:00
|
|
|
class EsphomeUploadHandler(EsphomeCommandWebSocket):
|
2023-01-19 15:28:28 -05:00
|
|
|
def build_command(self, json_message):
|
|
|
|
config_file = settings.rel_path(json_message["configuration"])
|
|
|
|
return [
|
|
|
|
"esphome",
|
|
|
|
"--dashboard",
|
|
|
|
"upload",
|
|
|
|
config_file,
|
|
|
|
"--device",
|
|
|
|
json_message["port"],
|
|
|
|
]
|
|
|
|
|
|
|
|
|
|
|
|
class EsphomeRunHandler(EsphomeCommandWebSocket):
|
2019-04-22 21:56:30 +02:00
|
|
|
def build_command(self, json_message):
|
2021-03-07 16:03:16 -03:00
|
|
|
config_file = settings.rel_path(json_message["configuration"])
|
|
|
|
return [
|
|
|
|
"esphome",
|
|
|
|
"--dashboard",
|
|
|
|
"run",
|
2021-06-08 01:14:12 +02:00
|
|
|
config_file,
|
|
|
|
"--device",
|
2021-03-07 16:03:16 -03:00
|
|
|
json_message["port"],
|
|
|
|
]
|
2018-05-21 16:40:22 +02:00
|
|
|
|
|
|
|
|
2019-02-13 16:54:02 +01:00
|
|
|
class EsphomeCompileHandler(EsphomeCommandWebSocket):
|
2019-04-22 21:56:30 +02:00
|
|
|
def build_command(self, json_message):
|
2021-03-07 16:03:16 -03:00
|
|
|
config_file = settings.rel_path(json_message["configuration"])
|
2022-11-10 09:42:03 +13:00
|
|
|
command = ["esphome", "--dashboard", "compile"]
|
|
|
|
if json_message.get("only_generate", False):
|
|
|
|
command.append("--only-generate")
|
|
|
|
command.append(config_file)
|
|
|
|
return command
|
2018-05-21 16:40:22 +02:00
|
|
|
|
|
|
|
|
2019-02-13 16:54:02 +01:00
|
|
|
class EsphomeValidateHandler(EsphomeCommandWebSocket):
|
2019-04-22 21:56:30 +02:00
|
|
|
def build_command(self, json_message):
|
2021-03-07 16:03:16 -03:00
|
|
|
config_file = settings.rel_path(json_message["configuration"])
|
2023-07-20 22:47:37 +02:00
|
|
|
command = ["esphome", "--dashboard", "config", config_file]
|
|
|
|
if not settings.streamer_mode:
|
|
|
|
command.append("--show-secrets")
|
|
|
|
return command
|
2018-06-03 12:16:43 +02:00
|
|
|
|
|
|
|
|
2019-02-13 16:54:02 +01:00
|
|
|
class EsphomeCleanMqttHandler(EsphomeCommandWebSocket):
|
2019-04-22 21:56:30 +02:00
|
|
|
def build_command(self, json_message):
|
2021-03-07 16:03:16 -03:00
|
|
|
config_file = settings.rel_path(json_message["configuration"])
|
2021-06-08 01:14:12 +02:00
|
|
|
return ["esphome", "--dashboard", "clean-mqtt", config_file]
|
2018-10-04 19:01:02 +02:00
|
|
|
|
|
|
|
|
2019-02-13 16:54:02 +01:00
|
|
|
class EsphomeCleanHandler(EsphomeCommandWebSocket):
|
2019-04-22 21:56:30 +02:00
|
|
|
def build_command(self, json_message):
|
2021-03-07 16:03:16 -03:00
|
|
|
config_file = settings.rel_path(json_message["configuration"])
|
2021-06-08 01:14:12 +02:00
|
|
|
return ["esphome", "--dashboard", "clean", config_file]
|
2018-10-14 18:52:21 +02:00
|
|
|
|
|
|
|
|
2019-04-22 21:56:30 +02:00
|
|
|
class EsphomeVscodeHandler(EsphomeCommandWebSocket):
|
|
|
|
def build_command(self, json_message):
|
2021-06-08 01:14:12 +02:00
|
|
|
return ["esphome", "--dashboard", "-q", "vscode", "dummy"]
|
2018-11-03 14:08:31 +01:00
|
|
|
|
|
|
|
|
2019-05-11 11:41:09 +02:00
|
|
|
class EsphomeAceEditorHandler(EsphomeCommandWebSocket):
|
|
|
|
def build_command(self, json_message):
|
2021-08-10 03:27:21 +02:00
|
|
|
return ["esphome", "--dashboard", "-q", "vscode", "--ace", settings.config_dir]
|
2019-05-11 11:41:09 +02:00
|
|
|
|
|
|
|
|
2019-06-07 14:26:28 +02:00
|
|
|
class EsphomeUpdateAllHandler(EsphomeCommandWebSocket):
|
|
|
|
def build_command(self, json_message):
|
2021-06-08 01:14:12 +02:00
|
|
|
return ["esphome", "--dashboard", "update-all", settings.config_dir]
|
2019-06-07 14:26:28 +02:00
|
|
|
|
|
|
|
|
2018-06-07 20:47:06 +02:00
|
|
|
class SerialPortRequestHandler(BaseHandler):
|
2019-03-16 22:24:26 +01:00
|
|
|
@authenticated
|
2018-05-21 16:40:22 +02:00
|
|
|
def get(self):
|
|
|
|
ports = get_serial_ports()
|
|
|
|
data = []
|
2020-07-24 10:09:43 +02:00
|
|
|
for port in ports:
|
|
|
|
desc = port.description
|
2021-03-07 16:03:16 -03:00
|
|
|
if port.path == "/dev/ttyAMA0":
|
|
|
|
desc = "UART pins on GPIO header"
|
|
|
|
split_desc = desc.split(" - ")
|
2018-06-03 11:18:53 +02:00
|
|
|
if len(split_desc) == 2 and split_desc[0] == split_desc[1]:
|
|
|
|
# Some serial ports repeat their values
|
|
|
|
desc = split_desc[0]
|
2021-03-07 16:03:16 -03:00
|
|
|
data.append({"port": port.path, "desc": desc})
|
|
|
|
data.append({"port": "OTA", "desc": "Over-The-Air"})
|
|
|
|
data.sort(key=lambda x: x["port"], reverse=True)
|
2021-06-11 15:49:05 -07:00
|
|
|
self.set_header("content-type", "application/json")
|
2019-01-03 16:05:33 +01:00
|
|
|
self.write(json.dumps(data))
|
2018-05-21 16:40:22 +02:00
|
|
|
|
|
|
|
|
2018-06-07 20:47:06 +02:00
|
|
|
class WizardRequestHandler(BaseHandler):
|
2019-03-16 22:24:26 +01:00
|
|
|
@authenticated
|
2018-05-21 16:40:22 +02:00
|
|
|
def post(self):
|
2019-02-13 16:54:02 +01:00
|
|
|
from esphome import wizard
|
2018-05-21 16:40:22 +02:00
|
|
|
|
2020-01-10 08:23:25 +11:00
|
|
|
kwargs = {
|
2021-09-27 19:10:53 +02:00
|
|
|
k: v
|
|
|
|
for k, v in json.loads(self.request.body.decode()).items()
|
2021-06-11 15:49:05 -07:00
|
|
|
if k in ("name", "platform", "board", "ssid", "psk", "password")
|
2020-01-10 08:23:25 +11:00
|
|
|
}
|
2023-01-17 10:28:09 +13:00
|
|
|
if not kwargs["name"]:
|
|
|
|
self.set_status(422)
|
|
|
|
self.set_header("content-type", "application/json")
|
|
|
|
self.write(json.dumps({"error": "Name is required"}))
|
|
|
|
return
|
|
|
|
|
|
|
|
kwargs["friendly_name"] = kwargs["name"]
|
|
|
|
kwargs["name"] = friendly_name_slugify(kwargs["friendly_name"])
|
|
|
|
|
2021-06-11 15:49:05 -07:00
|
|
|
kwargs["ota_password"] = secrets.token_hex(16)
|
2022-05-11 01:38:05 +02:00
|
|
|
noise_psk = secrets.token_bytes(32)
|
|
|
|
kwargs["api_encryption_key"] = base64.b64encode(noise_psk).decode()
|
2023-01-17 10:28:09 +13:00
|
|
|
filename = f"{kwargs['name']}.yaml"
|
|
|
|
destination = settings.rel_path(filename)
|
2018-12-05 21:22:06 +01:00
|
|
|
wizard.wizard_write(path=destination, **kwargs)
|
2021-06-11 15:49:05 -07:00
|
|
|
self.set_status(200)
|
2023-01-17 10:28:09 +13:00
|
|
|
self.set_header("content-type", "application/json")
|
|
|
|
self.write(json.dumps({"configuration": filename}))
|
2021-06-11 15:49:05 -07:00
|
|
|
self.finish()
|
2018-05-21 16:40:22 +02:00
|
|
|
|
|
|
|
|
2021-09-27 19:10:53 +02:00
|
|
|
class ImportRequestHandler(BaseHandler):
|
|
|
|
@authenticated
|
|
|
|
def post(self):
|
|
|
|
from esphome.components.dashboard_import import import_config
|
|
|
|
|
|
|
|
args = json.loads(self.request.body.decode())
|
|
|
|
try:
|
|
|
|
name = args["name"]
|
2023-01-17 10:28:09 +13:00
|
|
|
friendly_name = args.get("friendly_name")
|
2023-02-07 12:27:07 +13:00
|
|
|
encryption = args.get("encryption", False)
|
2022-10-07 15:35:48 +13:00
|
|
|
|
|
|
|
imported_device = next(
|
|
|
|
(res for res in IMPORT_RESULT.values() if res.device_name == name), None
|
|
|
|
)
|
|
|
|
|
|
|
|
if imported_device is not None:
|
|
|
|
network = imported_device.network
|
2023-01-17 10:28:09 +13:00
|
|
|
if friendly_name is None:
|
|
|
|
friendly_name = imported_device.friendly_name
|
2022-10-07 15:35:48 +13:00
|
|
|
else:
|
|
|
|
network = const.CONF_WIFI
|
|
|
|
|
2021-09-27 19:10:53 +02:00
|
|
|
import_config(
|
|
|
|
settings.rel_path(f"{name}.yaml"),
|
|
|
|
name,
|
2023-01-17 10:28:09 +13:00
|
|
|
friendly_name,
|
2021-09-27 19:10:53 +02:00
|
|
|
args["project_name"],
|
|
|
|
args["package_import_url"],
|
2022-10-07 15:35:48 +13:00
|
|
|
network,
|
2023-02-07 12:27:07 +13:00
|
|
|
encryption,
|
2021-09-27 19:10:53 +02:00
|
|
|
)
|
2023-11-06 16:07:59 -06:00
|
|
|
# Make sure the device gets marked online right away
|
|
|
|
PING_REQUEST.set()
|
2021-09-27 19:10:53 +02:00
|
|
|
except FileExistsError:
|
|
|
|
self.set_status(500)
|
|
|
|
self.write("File already exists")
|
|
|
|
return
|
2022-12-07 07:29:56 +13:00
|
|
|
except ValueError:
|
|
|
|
self.set_status(422)
|
|
|
|
self.write("Invalid package url")
|
|
|
|
return
|
2021-09-27 19:10:53 +02:00
|
|
|
|
|
|
|
self.set_status(200)
|
2023-01-17 10:28:09 +13:00
|
|
|
self.set_header("content-type", "application/json")
|
|
|
|
self.write(json.dumps({"configuration": f"{name}.yaml"}))
|
2021-09-27 19:10:53 +02:00
|
|
|
self.finish()
|
|
|
|
|
|
|
|
|
2023-09-01 08:17:33 +02:00
|
|
|
class DownloadListRequestHandler(BaseHandler):
|
2019-03-16 22:24:26 +01:00
|
|
|
@authenticated
|
|
|
|
@bind_config
|
|
|
|
def get(self, configuration=None):
|
2023-09-12 09:26:48 +12:00
|
|
|
storage_path = ext_storage_path(configuration)
|
2022-10-20 16:50:39 +13:00
|
|
|
storage_json = StorageJSON.load(storage_path)
|
|
|
|
if storage_json is None:
|
|
|
|
self.send_error(404)
|
|
|
|
return
|
|
|
|
|
2023-11-06 16:07:59 -06:00
|
|
|
from esphome.components.esp32 import VARIANTS as ESP32_VARIANTS
|
|
|
|
from esphome.components.esp32 import get_download_types as esp32_types
|
2023-09-01 08:17:33 +02:00
|
|
|
from esphome.components.esp8266 import get_download_types as esp8266_types
|
2023-09-05 00:16:08 +02:00
|
|
|
from esphome.components.libretiny import get_download_types as libretiny_types
|
2023-11-06 16:07:59 -06:00
|
|
|
from esphome.components.rp2040 import get_download_types as rp2040_types
|
2023-09-01 08:17:33 +02:00
|
|
|
|
|
|
|
downloads = []
|
|
|
|
platform = storage_json.target_platform.lower()
|
|
|
|
if platform == const.PLATFORM_RP2040:
|
|
|
|
downloads = rp2040_types(storage_json)
|
|
|
|
elif platform == const.PLATFORM_ESP8266:
|
|
|
|
downloads = esp8266_types(storage_json)
|
2023-09-08 23:20:26 +02:00
|
|
|
elif platform.upper() in ESP32_VARIANTS:
|
2023-09-01 08:17:33 +02:00
|
|
|
downloads = esp32_types(storage_json)
|
2023-09-05 00:16:08 +02:00
|
|
|
elif platform == const.PLATFORM_BK72XX:
|
|
|
|
downloads = libretiny_types(storage_json)
|
|
|
|
elif platform == const.PLATFORM_RTL87XX:
|
|
|
|
downloads = libretiny_types(storage_json)
|
2023-09-01 08:17:33 +02:00
|
|
|
else:
|
|
|
|
self.send_error(418)
|
|
|
|
return
|
2022-10-20 16:50:39 +13:00
|
|
|
|
2023-09-01 08:17:33 +02:00
|
|
|
self.set_status(200)
|
|
|
|
self.set_header("content-type", "application/json")
|
|
|
|
self.write(json.dumps(downloads))
|
|
|
|
self.finish()
|
|
|
|
return
|
2022-10-20 16:50:39 +13:00
|
|
|
|
2021-10-17 19:54:09 +13:00
|
|
|
|
2023-09-01 08:17:33 +02:00
|
|
|
class DownloadBinaryRequestHandler(BaseHandler):
|
|
|
|
@authenticated
|
|
|
|
@bind_config
|
|
|
|
def get(self, configuration=None):
|
|
|
|
compressed = self.get_argument("compressed", "0") == "1"
|
2022-01-11 15:24:26 +13:00
|
|
|
|
2023-09-12 09:26:48 +12:00
|
|
|
storage_path = ext_storage_path(configuration)
|
2023-09-01 08:17:33 +02:00
|
|
|
storage_json = StorageJSON.load(storage_path)
|
|
|
|
if storage_json is None:
|
|
|
|
self.send_error(404)
|
|
|
|
return
|
|
|
|
|
|
|
|
# fallback to type=, but prioritize file=
|
|
|
|
file_name = self.get_argument("type", None)
|
|
|
|
file_name = self.get_argument("file", file_name)
|
|
|
|
if file_name is None:
|
|
|
|
self.send_error(400)
|
|
|
|
return
|
|
|
|
file_name = file_name.replace("..", "").lstrip("/")
|
|
|
|
# get requested download name, or build it based on filename
|
|
|
|
download_name = self.get_argument(
|
|
|
|
"download",
|
|
|
|
f"{storage_json.name}-{file_name}",
|
|
|
|
)
|
|
|
|
path = os.path.dirname(storage_json.firmware_bin_path)
|
|
|
|
path = os.path.join(path, file_name)
|
|
|
|
|
|
|
|
if not Path(path).is_file():
|
2021-10-17 19:54:09 +13:00
|
|
|
args = ["esphome", "idedata", settings.rel_path(configuration)]
|
|
|
|
rc, stdout, _ = run_system_command(*args)
|
|
|
|
|
|
|
|
if rc != 0:
|
|
|
|
self.send_error(404 if rc == 2 else 500)
|
|
|
|
return
|
|
|
|
|
|
|
|
idedata = platformio_api.IDEData(json.loads(stdout))
|
|
|
|
|
|
|
|
found = False
|
|
|
|
for image in idedata.extra_flash_images:
|
2023-09-01 08:17:33 +02:00
|
|
|
if image.path.endswith(file_name):
|
2021-10-17 19:54:09 +13:00
|
|
|
path = image.path
|
2023-09-01 08:17:33 +02:00
|
|
|
download_name = file_name
|
2021-10-17 19:54:09 +13:00
|
|
|
found = True
|
|
|
|
break
|
|
|
|
|
|
|
|
if not found:
|
|
|
|
self.send_error(404)
|
|
|
|
return
|
2018-12-05 21:22:06 +01:00
|
|
|
|
2023-09-01 08:17:33 +02:00
|
|
|
download_name = download_name + ".gz" if compressed else download_name
|
2023-06-21 20:48:17 -03:00
|
|
|
|
2021-03-07 16:03:16 -03:00
|
|
|
self.set_header("Content-Type", "application/octet-stream")
|
2023-09-01 08:17:33 +02:00
|
|
|
self.set_header(
|
|
|
|
"Content-Disposition", f'attachment; filename="{download_name}"'
|
|
|
|
)
|
2022-01-16 22:14:45 +00:00
|
|
|
self.set_header("Cache-Control", "no-cache")
|
2021-10-17 19:54:09 +13:00
|
|
|
if not Path(path).is_file():
|
|
|
|
self.send_error(404)
|
|
|
|
return
|
|
|
|
|
2021-03-07 16:03:16 -03:00
|
|
|
with open(path, "rb") as f:
|
2023-06-27 20:13:14 -03:00
|
|
|
data = f.read()
|
|
|
|
if compressed:
|
|
|
|
data = gzip.compress(data, 9)
|
|
|
|
self.write(data)
|
2023-06-21 20:48:17 -03:00
|
|
|
|
2018-05-21 16:40:22 +02:00
|
|
|
self.finish()
|
|
|
|
|
|
|
|
|
2023-02-14 11:38:05 +13:00
|
|
|
class EsphomeVersionHandler(BaseHandler):
|
|
|
|
@authenticated
|
|
|
|
def get(self):
|
|
|
|
self.set_header("Content-Type", "application/json")
|
|
|
|
self.write(json.dumps({"version": const.__version__}))
|
|
|
|
self.finish()
|
|
|
|
|
|
|
|
|
2023-11-07 00:04:55 -06:00
|
|
|
def _list_dashboard_entries() -> list[DashboardEntry]:
|
|
|
|
return settings.entries()
|
2018-12-05 21:22:06 +01:00
|
|
|
|
|
|
|
|
2019-12-07 18:28:55 +01:00
|
|
|
class DashboardEntry:
|
2023-11-07 00:04:55 -06:00
|
|
|
"""Represents a single dashboard entry.
|
|
|
|
|
|
|
|
This class is thread-safe and read-only.
|
|
|
|
"""
|
|
|
|
|
|
|
|
__slots__ = ("path", "_storage", "_loaded_storage")
|
|
|
|
|
|
|
|
def __init__(self, path: str) -> None:
|
|
|
|
"""Initialize the DashboardEntry."""
|
2019-06-09 17:03:51 +02:00
|
|
|
self.path = path
|
2018-12-05 21:22:06 +01:00
|
|
|
self._storage = None
|
|
|
|
self._loaded_storage = False
|
|
|
|
|
2023-11-06 16:07:59 -06:00
|
|
|
def __repr__(self):
|
2023-11-07 00:04:55 -06:00
|
|
|
"""Return the representation of this entry."""
|
2023-11-06 16:07:59 -06:00
|
|
|
return (
|
|
|
|
f"DashboardEntry({self.path} "
|
|
|
|
f"address={self.address} "
|
|
|
|
f"web_port={self.web_port} "
|
|
|
|
f"name={self.name} "
|
|
|
|
f"no_mdns={self.no_mdns})"
|
|
|
|
)
|
|
|
|
|
2018-12-05 21:22:06 +01:00
|
|
|
@property
|
2019-06-09 17:03:51 +02:00
|
|
|
def filename(self):
|
2023-11-07 00:04:55 -06:00
|
|
|
"""Return the filename of this entry."""
|
2019-06-09 17:03:51 +02:00
|
|
|
return os.path.basename(self.path)
|
2018-12-05 21:22:06 +01:00
|
|
|
|
|
|
|
@property
|
2023-11-06 16:07:59 -06:00
|
|
|
def storage(self) -> StorageJSON | None:
|
2023-11-07 00:04:55 -06:00
|
|
|
"""Return the StorageJSON object for this entry."""
|
2018-12-05 21:22:06 +01:00
|
|
|
if not self._loaded_storage:
|
2023-09-12 09:26:48 +12:00
|
|
|
self._storage = StorageJSON.load(ext_storage_path(self.filename))
|
2018-12-05 21:22:06 +01:00
|
|
|
self._loaded_storage = True
|
|
|
|
return self._storage
|
|
|
|
|
|
|
|
@property
|
|
|
|
def address(self):
|
2023-11-07 00:04:55 -06:00
|
|
|
"""Return the address of this entry."""
|
2018-12-05 21:22:06 +01:00
|
|
|
if self.storage is None:
|
|
|
|
return None
|
|
|
|
return self.storage.address
|
|
|
|
|
2023-05-17 06:29:56 +02:00
|
|
|
@property
|
|
|
|
def no_mdns(self):
|
2023-11-07 00:04:55 -06:00
|
|
|
"""Return the no_mdns of this entry."""
|
2023-05-17 06:29:56 +02:00
|
|
|
if self.storage is None:
|
|
|
|
return None
|
|
|
|
return self.storage.no_mdns
|
|
|
|
|
2021-10-28 00:46:55 +02:00
|
|
|
@property
|
|
|
|
def web_port(self):
|
2023-11-07 00:04:55 -06:00
|
|
|
"""Return the web port of this entry."""
|
2021-10-28 00:46:55 +02:00
|
|
|
if self.storage is None:
|
|
|
|
return None
|
|
|
|
return self.storage.web_port
|
|
|
|
|
2018-12-05 21:22:06 +01:00
|
|
|
@property
|
|
|
|
def name(self):
|
2023-11-07 00:04:55 -06:00
|
|
|
"""Return the name of this entry."""
|
2018-12-05 21:22:06 +01:00
|
|
|
if self.storage is None:
|
2021-09-08 19:51:20 +12:00
|
|
|
return self.filename.replace(".yml", "").replace(".yaml", "")
|
2018-12-05 21:22:06 +01:00
|
|
|
return self.storage.name
|
|
|
|
|
2023-01-17 10:28:09 +13:00
|
|
|
@property
|
|
|
|
def friendly_name(self):
|
2023-11-07 00:04:55 -06:00
|
|
|
"""Return the friendly name of this entry."""
|
2023-01-17 10:28:09 +13:00
|
|
|
if self.storage is None:
|
|
|
|
return self.name
|
|
|
|
return self.storage.friendly_name
|
|
|
|
|
2019-10-14 12:27:07 +03:00
|
|
|
@property
|
|
|
|
def comment(self):
|
2023-11-07 00:04:55 -06:00
|
|
|
"""Return the comment of this entry."""
|
2019-10-14 12:27:07 +03:00
|
|
|
if self.storage is None:
|
|
|
|
return None
|
|
|
|
return self.storage.comment
|
|
|
|
|
2018-12-05 21:22:06 +01:00
|
|
|
@property
|
2021-09-20 11:47:51 +02:00
|
|
|
def target_platform(self):
|
2023-11-07 00:04:55 -06:00
|
|
|
"""Return the target platform of this entry."""
|
2018-12-05 21:22:06 +01:00
|
|
|
if self.storage is None:
|
|
|
|
return None
|
2021-09-20 11:47:51 +02:00
|
|
|
return self.storage.target_platform
|
2018-12-05 21:22:06 +01:00
|
|
|
|
|
|
|
@property
|
|
|
|
def update_available(self):
|
2023-11-07 00:04:55 -06:00
|
|
|
"""Return if an update is available for this entry."""
|
2018-12-05 21:22:06 +01:00
|
|
|
if self.storage is None:
|
|
|
|
return True
|
|
|
|
return self.update_old != self.update_new
|
|
|
|
|
|
|
|
@property
|
|
|
|
def update_old(self):
|
|
|
|
if self.storage is None:
|
2021-03-07 16:03:16 -03:00
|
|
|
return ""
|
|
|
|
return self.storage.esphome_version or ""
|
2018-12-05 21:22:06 +01:00
|
|
|
|
|
|
|
@property
|
|
|
|
def update_new(self):
|
|
|
|
return const.__version__
|
|
|
|
|
2019-05-28 10:19:17 +02:00
|
|
|
@property
|
|
|
|
def loaded_integrations(self):
|
|
|
|
if self.storage is None:
|
|
|
|
return []
|
|
|
|
return self.storage.loaded_integrations
|
|
|
|
|
2018-12-05 21:22:06 +01:00
|
|
|
|
2021-09-27 19:10:53 +02:00
|
|
|
class ListDevicesHandler(BaseHandler):
|
|
|
|
@authenticated
|
|
|
|
def get(self):
|
|
|
|
entries = _list_dashboard_entries()
|
|
|
|
self.set_header("content-type", "application/json")
|
|
|
|
configured = {entry.name for entry in entries}
|
|
|
|
self.write(
|
|
|
|
json.dumps(
|
|
|
|
{
|
|
|
|
"configured": [
|
|
|
|
{
|
|
|
|
"name": entry.name,
|
2023-01-17 10:28:09 +13:00
|
|
|
"friendly_name": entry.friendly_name,
|
2021-09-27 19:10:53 +02:00
|
|
|
"configuration": entry.filename,
|
|
|
|
"loaded_integrations": entry.loaded_integrations,
|
|
|
|
"deployed_version": entry.update_old,
|
|
|
|
"current_version": entry.update_new,
|
|
|
|
"path": entry.path,
|
|
|
|
"comment": entry.comment,
|
|
|
|
"address": entry.address,
|
2021-10-28 00:46:55 +02:00
|
|
|
"web_port": entry.web_port,
|
2021-09-27 19:10:53 +02:00
|
|
|
"target_platform": entry.target_platform,
|
|
|
|
}
|
|
|
|
for entry in entries
|
|
|
|
],
|
|
|
|
"importable": [
|
|
|
|
{
|
|
|
|
"name": res.device_name,
|
2023-01-17 10:28:09 +13:00
|
|
|
"friendly_name": res.friendly_name,
|
2021-09-27 19:10:53 +02:00
|
|
|
"package_import_url": res.package_import_url,
|
|
|
|
"project_name": res.project_name,
|
|
|
|
"project_version": res.project_version,
|
2022-10-07 14:42:28 +13:00
|
|
|
"network": res.network,
|
2021-09-27 19:10:53 +02:00
|
|
|
}
|
|
|
|
for res in IMPORT_RESULT.values()
|
|
|
|
if res.device_name not in configured
|
|
|
|
],
|
|
|
|
}
|
|
|
|
)
|
|
|
|
)
|
|
|
|
|
|
|
|
|
2018-06-07 20:47:06 +02:00
|
|
|
class MainRequestHandler(BaseHandler):
|
2019-03-16 22:24:26 +01:00
|
|
|
@authenticated
|
2018-05-21 16:40:22 +02:00
|
|
|
def get(self):
|
2021-03-07 16:03:16 -03:00
|
|
|
begin = bool(self.get_argument("begin", False))
|
2018-12-05 21:22:06 +01:00
|
|
|
|
2021-03-07 16:03:16 -03:00
|
|
|
self.render(
|
2022-01-03 08:57:09 -08:00
|
|
|
"index.template.html",
|
2021-03-07 16:03:16 -03:00
|
|
|
begin=begin,
|
|
|
|
**template_args(),
|
2021-10-20 13:25:00 +13:00
|
|
|
login_enabled=settings.using_password,
|
2021-03-07 16:03:16 -03:00
|
|
|
)
|
2018-12-05 21:22:06 +01:00
|
|
|
|
|
|
|
|
2019-03-03 16:50:06 +01:00
|
|
|
def _ping_func(filename, address):
|
2021-03-07 16:03:16 -03:00
|
|
|
if os.name == "nt":
|
|
|
|
command = ["ping", "-n", "1", address]
|
2019-03-03 16:50:06 +01:00
|
|
|
else:
|
2021-03-07 16:03:16 -03:00
|
|
|
command = ["ping", "-c", "1", address]
|
2019-03-03 16:50:06 +01:00
|
|
|
rc, _, _ = run_system_command(*command)
|
|
|
|
return filename, rc == 0
|
|
|
|
|
|
|
|
|
2022-09-14 05:22:59 +02:00
|
|
|
class PrometheusServiceDiscoveryHandler(BaseHandler):
|
|
|
|
@authenticated
|
|
|
|
def get(self):
|
|
|
|
entries = _list_dashboard_entries()
|
|
|
|
self.set_header("content-type", "application/json")
|
|
|
|
sd = []
|
|
|
|
for entry in entries:
|
|
|
|
if entry.web_port is None:
|
|
|
|
continue
|
|
|
|
labels = {
|
|
|
|
"__meta_name": entry.name,
|
|
|
|
"__meta_esp_platform": entry.target_platform,
|
|
|
|
"__meta_esphome_version": entry.storage.esphome_version,
|
|
|
|
}
|
|
|
|
for integration in entry.storage.loaded_integrations:
|
|
|
|
labels[f"__meta_integration_{integration}"] = "true"
|
|
|
|
sd.append(
|
|
|
|
{
|
|
|
|
"targets": [
|
|
|
|
f"{entry.address}:{entry.web_port}",
|
|
|
|
],
|
|
|
|
"labels": labels,
|
|
|
|
}
|
|
|
|
)
|
|
|
|
self.write(json.dumps(sd))
|
|
|
|
|
|
|
|
|
2022-12-07 16:07:51 +13:00
|
|
|
class BoardsRequestHandler(BaseHandler):
|
|
|
|
@authenticated
|
2022-12-31 20:02:56 +01:00
|
|
|
def get(self, platform: str):
|
2023-11-06 16:07:59 -06:00
|
|
|
from esphome.components.bk72xx.boards import BOARDS as BK72XX_BOARDS
|
2022-12-07 16:07:51 +13:00
|
|
|
from esphome.components.esp32.boards import BOARDS as ESP32_BOARDS
|
|
|
|
from esphome.components.esp8266.boards import BOARDS as ESP8266_BOARDS
|
|
|
|
from esphome.components.rp2040.boards import BOARDS as RP2040_BOARDS
|
2023-09-05 00:16:08 +02:00
|
|
|
from esphome.components.rtl87xx.boards import BOARDS as RTL87XX_BOARDS
|
2022-12-07 16:07:51 +13:00
|
|
|
|
2022-12-31 20:02:56 +01:00
|
|
|
platform_to_boards = {
|
2023-10-10 10:54:15 +13:00
|
|
|
const.PLATFORM_ESP32: ESP32_BOARDS,
|
|
|
|
const.PLATFORM_ESP8266: ESP8266_BOARDS,
|
|
|
|
const.PLATFORM_RP2040: RP2040_BOARDS,
|
|
|
|
const.PLATFORM_BK72XX: BK72XX_BOARDS,
|
|
|
|
const.PLATFORM_RTL87XX: RTL87XX_BOARDS,
|
2022-12-07 16:07:51 +13:00
|
|
|
}
|
2022-12-31 20:02:56 +01:00
|
|
|
# filter all ESP32 variants by requested platform
|
|
|
|
if platform.startswith("esp32"):
|
|
|
|
boards = {
|
|
|
|
k: v
|
2023-10-10 10:54:15 +13:00
|
|
|
for k, v in platform_to_boards[const.PLATFORM_ESP32].items()
|
2022-12-31 20:02:56 +01:00
|
|
|
if v[const.KEY_VARIANT] == platform.upper()
|
|
|
|
}
|
|
|
|
else:
|
|
|
|
boards = platform_to_boards[platform]
|
|
|
|
|
|
|
|
# map to a {board_name: board_title} dict
|
|
|
|
platform_boards = {key: val[const.KEY_NAME] for key, val in boards.items()}
|
|
|
|
# sort by board title
|
|
|
|
boards_items = sorted(platform_boards.items(), key=lambda item: item[1])
|
2023-02-14 11:55:36 +13:00
|
|
|
output = [{"items": dict(boards_items)}]
|
2022-12-31 20:02:56 +01:00
|
|
|
|
2022-12-07 16:07:51 +13:00
|
|
|
self.set_header("content-type", "application/json")
|
2022-12-31 20:02:56 +01:00
|
|
|
self.write(json.dumps(output))
|
2022-12-07 16:07:51 +13:00
|
|
|
|
|
|
|
|
2019-03-03 16:50:06 +01:00
|
|
|
class MDNSStatusThread(threading.Thread):
|
2023-11-06 16:07:59 -06:00
|
|
|
def __init__(self):
|
|
|
|
"""Initialize the MDNSStatusThread."""
|
|
|
|
super().__init__()
|
|
|
|
# This is the current mdns state for each host (True, False, None)
|
|
|
|
self.host_mdns_state: dict[str, bool | None] = {}
|
|
|
|
# This is the hostnames to filenames mapping
|
|
|
|
self.host_name_to_filename: dict[str, str] = {}
|
|
|
|
# This is a set of host names to track (i.e no_mdns = false)
|
|
|
|
self.host_name_with_mdns_enabled: set[set] = set()
|
|
|
|
self._refresh_hosts()
|
|
|
|
|
|
|
|
def _refresh_hosts(self):
|
|
|
|
"""Refresh the hosts to track."""
|
|
|
|
entries = _list_dashboard_entries()
|
|
|
|
host_name_with_mdns_enabled = self.host_name_with_mdns_enabled
|
|
|
|
host_mdns_state = self.host_mdns_state
|
|
|
|
host_name_to_filename = self.host_name_to_filename
|
|
|
|
|
|
|
|
for entry in entries:
|
|
|
|
name = entry.name
|
|
|
|
# If no_mdns is set, remove it from the set
|
|
|
|
if entry.no_mdns:
|
|
|
|
host_name_with_mdns_enabled.discard(name)
|
|
|
|
continue
|
|
|
|
|
|
|
|
# We are tracking this host
|
|
|
|
host_name_with_mdns_enabled.add(name)
|
|
|
|
filename = entry.filename
|
|
|
|
|
|
|
|
# If we just adopted/imported this host, we likely
|
|
|
|
# already have a state for it, so we should make sure
|
|
|
|
# to set it so the dashboard shows it as online
|
|
|
|
if name in host_mdns_state:
|
|
|
|
PING_RESULT[filename] = host_mdns_state[name]
|
|
|
|
|
|
|
|
# Make sure the mapping is up to date
|
|
|
|
# so when we get an mdns update we can map it back
|
|
|
|
# to the filename
|
|
|
|
host_name_to_filename[name] = filename
|
|
|
|
|
2018-12-05 21:22:06 +01:00
|
|
|
def run(self):
|
2021-09-27 19:10:53 +02:00
|
|
|
global IMPORT_RESULT
|
|
|
|
|
2021-09-06 08:22:15 +12:00
|
|
|
zc = EsphomeZeroconf()
|
2023-11-06 16:07:59 -06:00
|
|
|
host_mdns_state = self.host_mdns_state
|
|
|
|
host_name_to_filename = self.host_name_to_filename
|
|
|
|
host_name_with_mdns_enabled = self.host_name_with_mdns_enabled
|
|
|
|
|
|
|
|
def on_update(dat: dict[str, bool | None]) -> None:
|
|
|
|
"""Update the global PING_RESULT dict."""
|
|
|
|
for name, result in dat.items():
|
|
|
|
host_mdns_state[name] = result
|
|
|
|
if name in host_name_with_mdns_enabled:
|
|
|
|
filename = host_name_to_filename[name]
|
|
|
|
PING_RESULT[filename] = result
|
|
|
|
|
|
|
|
self._refresh_hosts()
|
|
|
|
stat = DashboardStatus(on_update)
|
|
|
|
imports = DashboardImportDiscovery()
|
|
|
|
browser = DashboardBrowser(
|
|
|
|
zc, ESPHOME_SERVICE_TYPE, [stat.browser_callback, imports.browser_callback]
|
|
|
|
)
|
2018-12-05 21:22:06 +01:00
|
|
|
|
|
|
|
while not STOP_EVENT.is_set():
|
2023-11-06 16:07:59 -06:00
|
|
|
self._refresh_hosts()
|
2021-09-27 19:10:53 +02:00
|
|
|
IMPORT_RESULT = imports.import_state
|
2018-12-05 21:22:06 +01:00
|
|
|
PING_REQUEST.wait()
|
|
|
|
PING_REQUEST.clear()
|
2021-09-27 19:10:53 +02:00
|
|
|
|
2023-11-06 16:07:59 -06:00
|
|
|
browser.cancel()
|
2019-02-10 16:57:34 +01:00
|
|
|
zc.close()
|
2018-12-05 21:22:06 +01:00
|
|
|
|
|
|
|
|
2019-03-03 16:50:06 +01:00
|
|
|
class PingStatusThread(threading.Thread):
|
|
|
|
def run(self):
|
2021-05-10 17:57:25 -03:00
|
|
|
with multiprocessing.Pool(processes=8) as pool:
|
2021-10-06 00:44:48 +02:00
|
|
|
while not STOP_EVENT.wait(2):
|
2021-05-10 17:57:25 -03:00
|
|
|
# Only do pings if somebody has the dashboard open
|
|
|
|
|
|
|
|
def callback(ret):
|
|
|
|
PING_RESULT[ret[0]] = ret[1]
|
|
|
|
|
|
|
|
entries = _list_dashboard_entries()
|
|
|
|
queue = collections.deque()
|
|
|
|
for entry in entries:
|
|
|
|
if entry.address is None:
|
|
|
|
PING_RESULT[entry.filename] = None
|
|
|
|
continue
|
|
|
|
|
|
|
|
result = pool.apply_async(
|
|
|
|
_ping_func, (entry.filename, entry.address), callback=callback
|
|
|
|
)
|
|
|
|
queue.append(result)
|
|
|
|
|
|
|
|
while queue:
|
|
|
|
item = queue[0]
|
|
|
|
if item.ready():
|
|
|
|
queue.popleft()
|
|
|
|
continue
|
|
|
|
|
|
|
|
try:
|
|
|
|
item.get(0.1)
|
|
|
|
except OSError:
|
|
|
|
# ping not installed
|
|
|
|
pass
|
|
|
|
except multiprocessing.TimeoutError:
|
|
|
|
pass
|
|
|
|
|
|
|
|
if STOP_EVENT.is_set():
|
|
|
|
pool.terminate()
|
|
|
|
return
|
|
|
|
|
|
|
|
PING_REQUEST.wait()
|
|
|
|
PING_REQUEST.clear()
|
2019-03-03 16:50:06 +01:00
|
|
|
|
|
|
|
|
2023-05-17 06:29:56 +02:00
|
|
|
class MqttStatusThread(threading.Thread):
|
|
|
|
def run(self):
|
|
|
|
from esphome import mqtt
|
|
|
|
|
|
|
|
entries = _list_dashboard_entries()
|
|
|
|
|
|
|
|
config = mqtt.config_from_env()
|
|
|
|
topic = "esphome/discover/#"
|
|
|
|
|
|
|
|
def on_message(client, userdata, msg):
|
|
|
|
nonlocal entries
|
|
|
|
|
|
|
|
payload = msg.payload.decode(errors="backslashreplace")
|
|
|
|
if len(payload) > 0:
|
|
|
|
data = json.loads(payload)
|
|
|
|
if "name" not in data:
|
|
|
|
return
|
|
|
|
for entry in entries:
|
|
|
|
if entry.name == data["name"]:
|
|
|
|
PING_RESULT[entry.filename] = True
|
|
|
|
return
|
|
|
|
|
|
|
|
def on_connect(client, userdata, flags, return_code):
|
|
|
|
client.publish("esphome/discover", None, retain=False)
|
|
|
|
|
|
|
|
mqttid = str(binascii.hexlify(os.urandom(6)).decode())
|
|
|
|
|
|
|
|
client = mqtt.prepare(
|
|
|
|
config,
|
|
|
|
[topic],
|
|
|
|
on_message,
|
|
|
|
on_connect,
|
|
|
|
None,
|
|
|
|
None,
|
|
|
|
f"esphome-dashboard-{mqttid}",
|
|
|
|
)
|
|
|
|
client.loop_start()
|
|
|
|
|
|
|
|
while not STOP_EVENT.wait(2):
|
|
|
|
# update entries
|
|
|
|
entries = _list_dashboard_entries()
|
|
|
|
|
|
|
|
# will be set to true on on_message
|
|
|
|
for entry in entries:
|
|
|
|
if entry.no_mdns:
|
|
|
|
PING_RESULT[entry.filename] = False
|
|
|
|
|
|
|
|
client.publish("esphome/discover", None, retain=False)
|
|
|
|
MQTT_PING_REQUEST.wait()
|
|
|
|
MQTT_PING_REQUEST.clear()
|
|
|
|
|
|
|
|
client.disconnect()
|
|
|
|
client.loop_stop()
|
|
|
|
|
|
|
|
|
2018-12-05 21:22:06 +01:00
|
|
|
class PingRequestHandler(BaseHandler):
|
2019-03-16 22:24:26 +01:00
|
|
|
@authenticated
|
2018-12-05 21:22:06 +01:00
|
|
|
def get(self):
|
|
|
|
PING_REQUEST.set()
|
2023-05-17 06:29:56 +02:00
|
|
|
if settings.status_use_mqtt:
|
|
|
|
MQTT_PING_REQUEST.set()
|
2021-06-11 15:49:05 -07:00
|
|
|
self.set_header("content-type", "application/json")
|
2018-12-05 21:22:06 +01:00
|
|
|
self.write(json.dumps(PING_RESULT))
|
|
|
|
|
|
|
|
|
2021-06-11 15:49:05 -07:00
|
|
|
class InfoRequestHandler(BaseHandler):
|
|
|
|
@authenticated
|
|
|
|
@bind_config
|
|
|
|
def get(self, configuration=None):
|
|
|
|
yaml_path = settings.rel_path(configuration)
|
|
|
|
all_yaml_files = settings.list_yaml_files()
|
|
|
|
|
|
|
|
if yaml_path not in all_yaml_files:
|
|
|
|
self.set_status(404)
|
|
|
|
return
|
|
|
|
|
|
|
|
self.set_header("content-type", "application/json")
|
|
|
|
self.write(DashboardEntry(yaml_path).storage.to_json())
|
|
|
|
|
|
|
|
|
2018-12-05 21:22:06 +01:00
|
|
|
class EditRequestHandler(BaseHandler):
|
2019-03-16 22:24:26 +01:00
|
|
|
@authenticated
|
|
|
|
@bind_config
|
|
|
|
def get(self, configuration=None):
|
2019-10-13 13:57:28 +03:00
|
|
|
filename = settings.rel_path(configuration)
|
2021-03-07 16:03:16 -03:00
|
|
|
content = ""
|
2019-10-13 13:57:28 +03:00
|
|
|
if os.path.isfile(filename):
|
2022-02-10 21:55:11 +13:00
|
|
|
with open(file=filename, encoding="utf-8") as f:
|
2019-10-13 13:57:28 +03:00
|
|
|
content = f.read()
|
2018-12-05 21:22:06 +01:00
|
|
|
self.write(content)
|
|
|
|
|
2019-03-16 22:24:26 +01:00
|
|
|
@authenticated
|
|
|
|
@bind_config
|
|
|
|
def post(self, configuration=None):
|
2021-09-06 08:57:37 +12:00
|
|
|
with open(file=settings.rel_path(configuration), mode="wb") as f:
|
2018-12-05 21:22:06 +01:00
|
|
|
f.write(self.request.body)
|
|
|
|
self.set_status(200)
|
2019-03-16 22:24:26 +01:00
|
|
|
|
|
|
|
|
|
|
|
class DeleteRequestHandler(BaseHandler):
|
|
|
|
@authenticated
|
|
|
|
@bind_config
|
|
|
|
def post(self, configuration=None):
|
2019-04-24 17:08:05 +02:00
|
|
|
config_file = settings.rel_path(configuration)
|
2023-09-12 09:26:48 +12:00
|
|
|
storage_path = ext_storage_path(configuration)
|
2019-03-16 22:24:26 +01:00
|
|
|
|
2023-09-12 09:26:48 +12:00
|
|
|
trash_path = trash_storage_path()
|
2019-03-16 22:24:26 +01:00
|
|
|
mkdir_p(trash_path)
|
|
|
|
shutil.move(config_file, os.path.join(trash_path, configuration))
|
|
|
|
|
2021-09-27 19:10:53 +02:00
|
|
|
storage_json = StorageJSON.load(storage_path)
|
|
|
|
if storage_json is not None:
|
|
|
|
# Delete build folder (if exists)
|
|
|
|
name = storage_json.name
|
|
|
|
build_folder = os.path.join(settings.config_dir, name)
|
|
|
|
if build_folder is not None:
|
|
|
|
shutil.rmtree(build_folder, os.path.join(trash_path, name))
|
2019-03-16 22:24:26 +01:00
|
|
|
|
2023-01-15 19:31:03 -05:00
|
|
|
# Remove the old ping result from the cache
|
|
|
|
PING_RESULT.pop(configuration, None)
|
|
|
|
|
2019-03-16 22:24:26 +01:00
|
|
|
|
|
|
|
class UndoDeleteRequestHandler(BaseHandler):
|
|
|
|
@authenticated
|
|
|
|
@bind_config
|
|
|
|
def post(self, configuration=None):
|
2019-04-24 17:08:05 +02:00
|
|
|
config_file = settings.rel_path(configuration)
|
2023-09-12 09:26:48 +12:00
|
|
|
trash_path = trash_storage_path()
|
2019-03-16 22:24:26 +01:00
|
|
|
shutil.move(os.path.join(trash_path, configuration), config_file)
|
2018-12-05 21:22:06 +01:00
|
|
|
|
|
|
|
|
2022-10-05 20:09:27 +13:00
|
|
|
PING_RESULT: dict = {}
|
2021-09-27 19:10:53 +02:00
|
|
|
IMPORT_RESULT = {}
|
2018-12-05 21:22:06 +01:00
|
|
|
STOP_EVENT = threading.Event()
|
|
|
|
PING_REQUEST = threading.Event()
|
2023-05-17 06:29:56 +02:00
|
|
|
MQTT_PING_REQUEST = threading.Event()
|
2018-05-21 16:40:22 +02:00
|
|
|
|
|
|
|
|
2018-06-07 20:47:06 +02:00
|
|
|
class LoginHandler(BaseHandler):
|
|
|
|
def get(self):
|
2019-10-13 14:52:02 +03:00
|
|
|
if is_authenticated(self):
|
2022-12-01 01:15:32 +01:00
|
|
|
self.redirect("./")
|
2019-10-13 14:52:02 +03:00
|
|
|
else:
|
|
|
|
self.render_login_page()
|
2018-06-07 20:47:06 +02:00
|
|
|
|
2019-10-13 14:52:02 +03:00
|
|
|
def render_login_page(self, error=None):
|
2021-03-07 16:03:16 -03:00
|
|
|
self.render(
|
2022-01-03 08:57:09 -08:00
|
|
|
"login.template.html",
|
2021-03-07 16:03:16 -03:00
|
|
|
error=error,
|
2022-02-09 23:46:20 +13:00
|
|
|
ha_addon=settings.using_ha_addon_auth,
|
2021-03-07 16:03:16 -03:00
|
|
|
has_username=bool(settings.username),
|
|
|
|
**template_args(),
|
|
|
|
)
|
2018-12-05 21:22:06 +01:00
|
|
|
|
2022-02-09 23:46:20 +13:00
|
|
|
def post_ha_addon_login(self):
|
2018-12-05 21:22:06 +01:00
|
|
|
import requests
|
|
|
|
|
|
|
|
headers = {
|
2022-09-06 16:41:23 +12:00
|
|
|
"X-Supervisor-Token": os.getenv("SUPERVISOR_TOKEN"),
|
2018-12-05 21:22:06 +01:00
|
|
|
}
|
2022-09-06 16:41:23 +12:00
|
|
|
|
2018-12-05 21:22:06 +01:00
|
|
|
data = {
|
2021-03-07 16:03:16 -03:00
|
|
|
"username": self.get_argument("username", ""),
|
|
|
|
"password": self.get_argument("password", ""),
|
2018-12-05 21:22:06 +01:00
|
|
|
}
|
|
|
|
try:
|
2022-08-31 17:01:36 +12:00
|
|
|
req = requests.post(
|
2022-09-06 16:41:23 +12:00
|
|
|
"http://supervisor/auth", headers=headers, json=data, timeout=30
|
2022-08-31 17:01:36 +12:00
|
|
|
)
|
2018-12-05 21:22:06 +01:00
|
|
|
if req.status_code == 200:
|
2019-01-19 22:09:46 +01:00
|
|
|
self.set_secure_cookie("authenticated", cookie_authenticated_yes)
|
2021-03-07 16:03:16 -03:00
|
|
|
self.redirect("/")
|
2018-12-05 21:22:06 +01:00
|
|
|
return
|
|
|
|
except Exception as err: # pylint: disable=broad-except
|
2019-01-02 14:11:11 +01:00
|
|
|
_LOGGER.warning("Error during Hass.io auth request: %s", err)
|
2018-12-05 21:22:06 +01:00
|
|
|
self.set_status(500)
|
2019-10-13 14:52:02 +03:00
|
|
|
self.render_login_page(error="Internal server error")
|
|
|
|
return
|
|
|
|
self.set_status(401)
|
|
|
|
self.render_login_page(error="Invalid username or password")
|
|
|
|
|
|
|
|
def post_native_login(self):
|
2021-03-07 16:03:16 -03:00
|
|
|
username = self.get_argument("username", "")
|
|
|
|
password = self.get_argument("password", "")
|
2019-10-13 14:52:02 +03:00
|
|
|
if settings.check_password(username, password):
|
|
|
|
self.set_secure_cookie("authenticated", cookie_authenticated_yes)
|
2022-12-01 01:15:32 +01:00
|
|
|
self.redirect("./")
|
2018-12-05 21:22:06 +01:00
|
|
|
return
|
2021-03-07 16:03:16 -03:00
|
|
|
error_str = (
|
|
|
|
"Invalid username or password" if settings.username else "Invalid password"
|
|
|
|
)
|
2018-12-05 21:22:06 +01:00
|
|
|
self.set_status(401)
|
2019-10-13 14:52:02 +03:00
|
|
|
self.render_login_page(error=error_str)
|
2018-12-05 21:22:06 +01:00
|
|
|
|
2018-06-07 20:47:06 +02:00
|
|
|
def post(self):
|
2022-02-09 23:46:20 +13:00
|
|
|
if settings.using_ha_addon_auth:
|
|
|
|
self.post_ha_addon_login()
|
2019-10-13 14:52:02 +03:00
|
|
|
else:
|
|
|
|
self.post_native_login()
|
2018-12-05 21:22:06 +01:00
|
|
|
|
2019-10-13 14:52:02 +03:00
|
|
|
|
|
|
|
class LogoutHandler(BaseHandler):
|
|
|
|
@authenticated
|
|
|
|
def get(self):
|
|
|
|
self.clear_cookie("authenticated")
|
2021-03-07 16:03:16 -03:00
|
|
|
self.redirect("./login")
|
2018-06-07 20:47:06 +02:00
|
|
|
|
|
|
|
|
2021-12-06 20:15:34 +13:00
|
|
|
class SecretKeysRequestHandler(BaseHandler):
|
|
|
|
@authenticated
|
|
|
|
def get(self):
|
|
|
|
filename = None
|
|
|
|
|
|
|
|
for secret_filename in const.SECRETS_FILES:
|
|
|
|
relative_filename = settings.rel_path(secret_filename)
|
|
|
|
if os.path.isfile(relative_filename):
|
|
|
|
filename = relative_filename
|
|
|
|
break
|
|
|
|
|
|
|
|
if filename is None:
|
|
|
|
self.send_error(404)
|
|
|
|
return
|
|
|
|
|
|
|
|
secret_keys = list(yaml_util.load_yaml(filename, clear_secrets=False))
|
|
|
|
|
|
|
|
self.set_header("content-type", "application/json")
|
|
|
|
self.write(json.dumps(secret_keys))
|
|
|
|
|
|
|
|
|
2023-02-01 16:59:51 +13:00
|
|
|
class SafeLoaderIgnoreUnknown(yaml.SafeLoader):
|
|
|
|
def ignore_unknown(self, node):
|
|
|
|
return f"{node.tag} {node.value}"
|
|
|
|
|
2023-02-14 11:38:21 +13:00
|
|
|
def construct_yaml_binary(self, node) -> str:
|
|
|
|
return super().construct_yaml_binary(node).decode("ascii")
|
|
|
|
|
2023-02-01 16:59:51 +13:00
|
|
|
|
|
|
|
SafeLoaderIgnoreUnknown.add_constructor(None, SafeLoaderIgnoreUnknown.ignore_unknown)
|
2023-02-14 11:38:21 +13:00
|
|
|
SafeLoaderIgnoreUnknown.add_constructor(
|
|
|
|
"tag:yaml.org,2002:binary", SafeLoaderIgnoreUnknown.construct_yaml_binary
|
|
|
|
)
|
2023-02-01 16:59:51 +13:00
|
|
|
|
|
|
|
|
2022-10-20 05:39:34 +02:00
|
|
|
class JsonConfigRequestHandler(BaseHandler):
|
|
|
|
@authenticated
|
|
|
|
@bind_config
|
|
|
|
def get(self, configuration=None):
|
|
|
|
filename = settings.rel_path(configuration)
|
|
|
|
if not os.path.isfile(filename):
|
|
|
|
self.send_error(404)
|
|
|
|
return
|
|
|
|
|
2023-07-20 22:47:37 +02:00
|
|
|
args = ["esphome", "config", filename, "--show-secrets"]
|
2023-02-01 16:59:51 +13:00
|
|
|
|
|
|
|
rc, stdout, _ = run_system_command(*args)
|
|
|
|
|
|
|
|
if rc != 0:
|
|
|
|
self.send_error(422)
|
|
|
|
return
|
|
|
|
|
|
|
|
data = yaml.load(stdout, Loader=SafeLoaderIgnoreUnknown)
|
|
|
|
self.set_header("content-type", "application/json")
|
|
|
|
self.write(json.dumps(data))
|
|
|
|
self.finish()
|
2022-10-20 05:39:34 +02:00
|
|
|
|
|
|
|
|
2021-06-11 15:49:05 -07:00
|
|
|
def get_base_frontend_path():
|
|
|
|
if ENV_DEV not in os.environ:
|
|
|
|
import esphome_dashboard
|
|
|
|
|
|
|
|
return esphome_dashboard.where()
|
|
|
|
|
|
|
|
static_path = os.environ[ENV_DEV]
|
|
|
|
if not static_path.endswith("/"):
|
|
|
|
static_path += "/"
|
|
|
|
|
|
|
|
# This path can be relative, so resolve against the root or else templates don't work
|
|
|
|
return os.path.abspath(os.path.join(os.getcwd(), static_path, "esphome_dashboard"))
|
|
|
|
|
|
|
|
|
|
|
|
def get_static_path(*args):
|
|
|
|
return os.path.join(get_base_frontend_path(), "static", *args)
|
|
|
|
|
|
|
|
|
2022-10-05 20:09:27 +13:00
|
|
|
@functools.cache
|
2018-12-24 14:29:11 +01:00
|
|
|
def get_static_file_url(name):
|
2021-06-17 12:35:54 -07:00
|
|
|
base = f"./static/{name}"
|
|
|
|
|
|
|
|
if ENV_DEV in os.environ:
|
|
|
|
return base
|
|
|
|
|
2021-06-11 15:49:05 -07:00
|
|
|
# Module imports can't deduplicate if stuff added to url
|
|
|
|
if name == "js/esphome/index.js":
|
2021-06-17 12:35:54 -07:00
|
|
|
import esphome_dashboard
|
2021-06-11 15:49:05 -07:00
|
|
|
|
2021-06-17 12:35:54 -07:00
|
|
|
return base.replace("index.js", esphome_dashboard.entrypoint())
|
|
|
|
|
|
|
|
path = get_static_path(name)
|
|
|
|
with open(path, "rb") as f_handle:
|
|
|
|
hash_ = hashlib.md5(f_handle.read()).hexdigest()[:8]
|
|
|
|
return f"{base}?hash={hash_}"
|
2018-12-24 14:29:11 +01:00
|
|
|
|
|
|
|
|
2021-06-11 15:49:05 -07:00
|
|
|
def make_app(debug=get_bool_env(ENV_DEV)):
|
2018-12-05 21:22:06 +01:00
|
|
|
def log_function(handler):
|
|
|
|
if handler.get_status() < 400:
|
|
|
|
log_method = access_log.info
|
|
|
|
|
|
|
|
if isinstance(handler, SerialPortRequestHandler) and not debug:
|
|
|
|
return
|
|
|
|
if isinstance(handler, PingRequestHandler) and not debug:
|
|
|
|
return
|
|
|
|
elif handler.get_status() < 500:
|
|
|
|
log_method = access_log.warning
|
|
|
|
else:
|
|
|
|
log_method = access_log.error
|
|
|
|
|
|
|
|
request_time = 1000.0 * handler.request.request_time()
|
|
|
|
# pylint: disable=protected-access
|
2021-03-07 16:03:16 -03:00
|
|
|
log_method(
|
|
|
|
"%d %s %.2fms",
|
|
|
|
handler.get_status(),
|
|
|
|
handler._request_summary(),
|
|
|
|
request_time,
|
|
|
|
)
|
2018-12-05 21:22:06 +01:00
|
|
|
|
|
|
|
class StaticFileHandler(tornado.web.StaticFileHandler):
|
2023-11-08 15:04:01 -06:00
|
|
|
def get_cache_time(
|
|
|
|
self, path: str, modified: datetime.datetime | None, mime_type: str
|
|
|
|
) -> int:
|
|
|
|
"""Override to customize cache control behavior."""
|
|
|
|
if debug:
|
|
|
|
return 0
|
|
|
|
# Assets that are hashed have ?hash= in the URL, all javascript
|
|
|
|
# filenames hashed so we can cache them for a long time
|
|
|
|
if "hash" in self.request.arguments or "/javascript" in mime_type:
|
|
|
|
return self.CACHE_MAX_AGE
|
|
|
|
return super().get_cache_time(path, modified, mime_type)
|
2018-12-05 21:22:06 +01:00
|
|
|
|
2019-04-24 17:08:05 +02:00
|
|
|
app_settings = {
|
2021-03-07 16:03:16 -03:00
|
|
|
"debug": debug,
|
|
|
|
"cookie_secret": settings.cookie_secret,
|
|
|
|
"log_function": log_function,
|
|
|
|
"websocket_ping_interval": 30.0,
|
2022-01-03 08:57:09 -08:00
|
|
|
"template_path": get_base_frontend_path(),
|
2019-01-19 22:10:10 +01:00
|
|
|
}
|
2019-04-24 17:08:05 +02:00
|
|
|
rel = settings.relative_url
|
2021-03-07 16:03:16 -03:00
|
|
|
app = tornado.web.Application(
|
|
|
|
[
|
2021-09-19 19:22:28 +02:00
|
|
|
(f"{rel}", MainRequestHandler),
|
|
|
|
(f"{rel}login", LoginHandler),
|
|
|
|
(f"{rel}logout", LogoutHandler),
|
|
|
|
(f"{rel}logs", EsphomeLogsHandler),
|
|
|
|
(f"{rel}upload", EsphomeUploadHandler),
|
2023-01-19 15:28:28 -05:00
|
|
|
(f"{rel}run", EsphomeRunHandler),
|
2021-09-19 19:22:28 +02:00
|
|
|
(f"{rel}compile", EsphomeCompileHandler),
|
|
|
|
(f"{rel}validate", EsphomeValidateHandler),
|
|
|
|
(f"{rel}clean-mqtt", EsphomeCleanMqttHandler),
|
|
|
|
(f"{rel}clean", EsphomeCleanHandler),
|
|
|
|
(f"{rel}vscode", EsphomeVscodeHandler),
|
|
|
|
(f"{rel}ace", EsphomeAceEditorHandler),
|
|
|
|
(f"{rel}update-all", EsphomeUpdateAllHandler),
|
|
|
|
(f"{rel}info", InfoRequestHandler),
|
|
|
|
(f"{rel}edit", EditRequestHandler),
|
2023-09-01 08:17:33 +02:00
|
|
|
(f"{rel}downloads", DownloadListRequestHandler),
|
2021-09-19 19:22:28 +02:00
|
|
|
(f"{rel}download.bin", DownloadBinaryRequestHandler),
|
|
|
|
(f"{rel}serial-ports", SerialPortRequestHandler),
|
|
|
|
(f"{rel}ping", PingRequestHandler),
|
|
|
|
(f"{rel}delete", DeleteRequestHandler),
|
|
|
|
(f"{rel}undo-delete", UndoDeleteRequestHandler),
|
2021-09-27 19:10:53 +02:00
|
|
|
(f"{rel}wizard", WizardRequestHandler),
|
2021-09-19 19:22:28 +02:00
|
|
|
(f"{rel}static/(.*)", StaticFileHandler, {"path": get_static_path()}),
|
2021-09-27 19:10:53 +02:00
|
|
|
(f"{rel}devices", ListDevicesHandler),
|
|
|
|
(f"{rel}import", ImportRequestHandler),
|
2021-12-06 20:15:34 +13:00
|
|
|
(f"{rel}secret_keys", SecretKeysRequestHandler),
|
2022-10-20 05:39:34 +02:00
|
|
|
(f"{rel}json-config", JsonConfigRequestHandler),
|
2022-05-08 18:53:34 -07:00
|
|
|
(f"{rel}rename", EsphomeRenameHandler),
|
2022-09-14 05:22:59 +02:00
|
|
|
(f"{rel}prometheus-sd", PrometheusServiceDiscoveryHandler),
|
2022-12-31 20:02:56 +01:00
|
|
|
(f"{rel}boards/([a-z0-9]+)", BoardsRequestHandler),
|
2023-02-14 11:38:05 +13:00
|
|
|
(f"{rel}version", EsphomeVersionHandler),
|
2021-03-07 16:03:16 -03:00
|
|
|
],
|
|
|
|
**app_settings,
|
|
|
|
)
|
2018-12-24 14:29:11 +01:00
|
|
|
|
2018-12-05 21:22:06 +01:00
|
|
|
return app
|
|
|
|
|
|
|
|
|
2018-05-21 16:40:22 +02:00
|
|
|
def start_web_server(args):
|
2019-04-24 17:08:05 +02:00
|
|
|
settings.parse_args(args)
|
2018-12-05 21:22:06 +01:00
|
|
|
|
2019-04-24 17:08:05 +02:00
|
|
|
if settings.using_auth:
|
2023-09-12 09:26:48 +12:00
|
|
|
path = esphome_storage_path()
|
2019-02-13 16:54:02 +01:00
|
|
|
storage = EsphomeStorageJSON.load(path)
|
2018-12-05 21:22:06 +01:00
|
|
|
if storage is None:
|
2019-02-13 16:54:02 +01:00
|
|
|
storage = EsphomeStorageJSON.get_default()
|
2018-12-05 21:22:06 +01:00
|
|
|
storage.save(path)
|
2019-04-24 17:08:05 +02:00
|
|
|
settings.cookie_secret = storage.cookie_secret
|
2018-06-07 20:47:06 +02:00
|
|
|
|
2018-06-03 11:18:53 +02:00
|
|
|
app = make_app(args.verbose)
|
2019-01-02 12:21:26 +01:00
|
|
|
if args.socket is not None:
|
2021-03-07 16:03:16 -03:00
|
|
|
_LOGGER.info(
|
|
|
|
"Starting dashboard web server on unix socket %s and configuration dir %s...",
|
|
|
|
args.socket,
|
|
|
|
settings.config_dir,
|
|
|
|
)
|
2019-01-02 12:21:26 +01:00
|
|
|
server = tornado.httpserver.HTTPServer(app)
|
|
|
|
socket = tornado.netutil.bind_unix_socket(args.socket, mode=0o666)
|
|
|
|
server.add_socket(socket)
|
|
|
|
else:
|
2021-03-07 16:03:16 -03:00
|
|
|
_LOGGER.info(
|
2021-11-25 07:59:32 +13:00
|
|
|
"Starting dashboard web server on http://%s:%s and configuration dir %s...",
|
|
|
|
args.address,
|
2021-03-07 16:03:16 -03:00
|
|
|
args.port,
|
|
|
|
settings.config_dir,
|
|
|
|
)
|
2021-11-25 07:59:32 +13:00
|
|
|
app.listen(args.port, args.address)
|
2018-10-04 19:01:02 +02:00
|
|
|
|
2019-01-02 12:21:26 +01:00
|
|
|
if args.open_ui:
|
|
|
|
import webbrowser
|
2018-10-04 19:01:02 +02:00
|
|
|
|
2021-11-25 07:59:32 +13:00
|
|
|
webbrowser.open(f"http://{args.address}:{args.port}")
|
2018-10-04 19:01:02 +02:00
|
|
|
|
2019-05-12 23:04:36 +02:00
|
|
|
if settings.status_use_ping:
|
2019-03-03 16:50:06 +01:00
|
|
|
status_thread = PingStatusThread()
|
|
|
|
else:
|
|
|
|
status_thread = MDNSStatusThread()
|
|
|
|
status_thread.start()
|
2023-05-17 06:29:56 +02:00
|
|
|
|
|
|
|
if settings.status_use_mqtt:
|
|
|
|
status_thread_mqtt = MqttStatusThread()
|
|
|
|
status_thread_mqtt.start()
|
|
|
|
|
2018-05-21 16:40:22 +02:00
|
|
|
try:
|
|
|
|
tornado.ioloop.IOLoop.current().start()
|
|
|
|
except KeyboardInterrupt:
|
|
|
|
_LOGGER.info("Shutting down...")
|
2018-12-05 21:22:06 +01:00
|
|
|
STOP_EVENT.set()
|
|
|
|
PING_REQUEST.set()
|
2019-03-03 16:50:06 +01:00
|
|
|
status_thread.join()
|
2023-05-17 06:29:56 +02:00
|
|
|
if settings.status_use_mqtt:
|
|
|
|
status_thread_mqtt.join()
|
|
|
|
MQTT_PING_REQUEST.set()
|
2019-01-02 12:21:26 +01:00
|
|
|
if args.socket is not None:
|
|
|
|
os.remove(args.socket)
|