mirror of
https://github.com/esphome/esphome.git
synced 2025-11-10 11:55:52 +00:00
Compare commits
62 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
596c334fcb | ||
|
|
ca4450858f | ||
|
|
f3ec83fe31 | ||
|
|
c4ada8c9f0 | ||
|
|
23df5d8af7 | ||
|
|
289acade1e | ||
|
|
a99f99779a | ||
|
|
5c14ca030a | ||
|
|
0fc6a027a7 | ||
|
|
3c0d97ef69 | ||
|
|
5b4f98d414 | ||
|
|
7ebfcd3807 | ||
|
|
3ef0634dd2 | ||
|
|
0928c9739f | ||
|
|
e009f21a72 | ||
|
|
606c412616 | ||
|
|
975b5127d6 | ||
|
|
60c9ffef30 | ||
|
|
99861259d7 | ||
|
|
067ec30c56 | ||
|
|
5a102c2ab7 | ||
|
|
4b017e2096 | ||
|
|
8495ce96a3 | ||
|
|
55caf4f648 | ||
|
|
c123f0091d | ||
|
|
7c65d44976 | ||
|
|
5b8d12a80c | ||
|
|
3951a2b22a | ||
|
|
69a74a30e8 | ||
|
|
f3ee5b55e9 | ||
|
|
f6cc9f7caa | ||
|
|
a5d0ecdb13 | ||
|
|
71cbc9cfb0 | ||
|
|
88625c656d | ||
|
|
971b15ac67 | ||
|
|
a4edcc48ca | ||
|
|
1778dd4df9 | ||
|
|
311e837196 | ||
|
|
3b00cfd6c4 | ||
|
|
1c7ca4bc6f | ||
|
|
38e7b597d6 | ||
|
|
808ee19180 | ||
|
|
c2a0c22bd9 | ||
|
|
e785ad5401 | ||
|
|
bacddc3673 | ||
|
|
7dd0dabaf5 | ||
|
|
92f8b043ce | ||
|
|
2e5fd7e90d | ||
|
|
407c46cb03 | ||
|
|
12ce448f2d | ||
|
|
e7ce8f7a13 | ||
|
|
3d1cce2a29 | ||
|
|
af0c213024 | ||
|
|
88e036ddb2 | ||
|
|
8012de3ba4 | ||
|
|
f0c0131ed1 | ||
|
|
52750d933b | ||
|
|
7e7a85abfd | ||
|
|
c1b8107aaf | ||
|
|
0bdcce609f | ||
|
|
6b702f8014 | ||
|
|
078ab26fd2 |
7
.github/issue-close-app.yml
vendored
Normal file
7
.github/issue-close-app.yml
vendored
Normal file
@@ -0,0 +1,7 @@
|
||||
comment: >-
|
||||
https://github.com/esphome/esphome/issues/430
|
||||
issueConfigs:
|
||||
- content:
|
||||
- "OTHERWISE THE ISSUE WILL BE CLOSED AUTOMATICALLY"
|
||||
|
||||
caseInsensitive: false
|
||||
@@ -41,11 +41,11 @@ stages:
|
||||
|
||||
- |
|
||||
if [[ "${IS_HASSIO}" == "YES" ]]; then
|
||||
BUILD_FROM=esphome/esphome-hassio-base-${BUILD_ARCH}:1.2.1
|
||||
BUILD_FROM=esphome/esphome-hassio-base-${BUILD_ARCH}:1.3.0
|
||||
BUILD_TO=esphome/esphome-hassio-${BUILD_ARCH}
|
||||
DOCKERFILE=docker/Dockerfile.hassio
|
||||
else
|
||||
BUILD_FROM=esphome/esphome-base-${BUILD_ARCH}:1.2.1
|
||||
BUILD_FROM=esphome/esphome-base-${BUILD_ARCH}:1.3.0
|
||||
if [[ "${BUILD_ARCH}" == "amd64" ]]; then
|
||||
BUILD_TO=esphome/esphome
|
||||
else
|
||||
@@ -177,48 +177,48 @@ deploy-beta:pypi:
|
||||
.dev-vars: &dev-vars
|
||||
DEV: YES
|
||||
|
||||
aarch64-beta-docker:
|
||||
<<: *beta
|
||||
variables:
|
||||
BETA: "YES"
|
||||
BUILD_ARCH: aarch64
|
||||
IS_HASSIO: "NO"
|
||||
RELEASE: "YES"
|
||||
aarch64-beta-hassio:
|
||||
<<: *beta
|
||||
variables:
|
||||
BETA: "YES"
|
||||
BUILD_ARCH: aarch64
|
||||
IS_HASSIO: "YES"
|
||||
RELEASE: "YES"
|
||||
aarch64-dev-docker:
|
||||
<<: *dev
|
||||
variables:
|
||||
BUILD_ARCH: aarch64
|
||||
DEV: "YES"
|
||||
IS_HASSIO: "NO"
|
||||
aarch64-dev-hassio:
|
||||
<<: *dev
|
||||
variables:
|
||||
BUILD_ARCH: aarch64
|
||||
DEV: "YES"
|
||||
IS_HASSIO: "YES"
|
||||
aarch64-latest-docker:
|
||||
<<: *latest
|
||||
variables:
|
||||
BETA: "YES"
|
||||
BUILD_ARCH: aarch64
|
||||
IS_HASSIO: "NO"
|
||||
LATEST: "YES"
|
||||
RELEASE: "YES"
|
||||
aarch64-latest-hassio:
|
||||
<<: *latest
|
||||
variables:
|
||||
BETA: "YES"
|
||||
BUILD_ARCH: aarch64
|
||||
IS_HASSIO: "YES"
|
||||
LATEST: "YES"
|
||||
RELEASE: "YES"
|
||||
#aarch64-beta-docker:
|
||||
# <<: *beta
|
||||
# variables:
|
||||
# BETA: "YES"
|
||||
# BUILD_ARCH: aarch64
|
||||
# IS_HASSIO: "NO"
|
||||
# RELEASE: "YES"
|
||||
#aarch64-beta-hassio:
|
||||
# <<: *beta
|
||||
# variables:
|
||||
# BETA: "YES"
|
||||
# BUILD_ARCH: aarch64
|
||||
# IS_HASSIO: "YES"
|
||||
# RELEASE: "YES"
|
||||
#aarch64-dev-docker:
|
||||
# <<: *dev
|
||||
# variables:
|
||||
# BUILD_ARCH: aarch64
|
||||
# DEV: "YES"
|
||||
# IS_HASSIO: "NO"
|
||||
#aarch64-dev-hassio:
|
||||
# <<: *dev
|
||||
# variables:
|
||||
# BUILD_ARCH: aarch64
|
||||
# DEV: "YES"
|
||||
# IS_HASSIO: "YES"
|
||||
#aarch64-latest-docker:
|
||||
# <<: *latest
|
||||
# variables:
|
||||
# BETA: "YES"
|
||||
# BUILD_ARCH: aarch64
|
||||
# IS_HASSIO: "NO"
|
||||
# LATEST: "YES"
|
||||
# RELEASE: "YES"
|
||||
#aarch64-latest-hassio:
|
||||
# <<: *latest
|
||||
# variables:
|
||||
# BETA: "YES"
|
||||
# BUILD_ARCH: aarch64
|
||||
# IS_HASSIO: "YES"
|
||||
# LATEST: "YES"
|
||||
# RELEASE: "YES"
|
||||
amd64-beta-docker:
|
||||
<<: *beta
|
||||
variables:
|
||||
|
||||
11
.travis.yml
11
.travis.yml
@@ -1,6 +1,13 @@
|
||||
sudo: false
|
||||
language: python
|
||||
|
||||
cache:
|
||||
directories:
|
||||
- "~/.platformio"
|
||||
- "$TRAVIS_BUILD_DIR/tests/build/test1/.piolibdeps"
|
||||
- "$TRAVIS_BUILD_DIR/tests/build/test2/.piolibdeps"
|
||||
- "$TRAVIS_BUILD_DIR/tests/build/test3/.piolibdeps"
|
||||
|
||||
matrix:
|
||||
fast_finish: true
|
||||
include:
|
||||
@@ -12,7 +19,7 @@ matrix:
|
||||
- pylint esphome
|
||||
- python: "3.5.3"
|
||||
env: TARGET=Lint3.5
|
||||
install: pip install -U https://github.com/platformio/platformio-core/archive/develop.zip && pip install -e . && pip install flake8==3.6.0 pylint==2.2.2 pillow
|
||||
install: pip install -U https://github.com/platformio/platformio-core/archive/develop.zip && pip install -e . && pip install flake8==3.6.0 pylint==2.3.0 pillow
|
||||
script:
|
||||
- flake8 esphome
|
||||
- pylint esphome
|
||||
@@ -25,7 +32,7 @@ matrix:
|
||||
- esphome tests/test3.yaml compile
|
||||
#- python: "3.5.3"
|
||||
# env: TARGET=Test3.5
|
||||
# install: pip install -U https://github.com/platformio/platformio-core/archive/develop.zip && pip install -e . && pip install flake8==3.6.0 pylint==2.2.2 pillow
|
||||
# install: pip install -U https://github.com/platformio/platformio-core/archive/develop.zip && pip install -e . && pip install flake8==3.6.0 pylint==2.3.0 pillow
|
||||
# script:
|
||||
# - esphome tests/test1.yaml compile
|
||||
# - esphome tests/test2.yaml compile
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
ARG BUILD_FROM=esphome/esphome-base-amd64:1.2.1
|
||||
ARG BUILD_FROM=esphome/esphome-base-amd64:1.3.0
|
||||
FROM ${BUILD_FROM}
|
||||
|
||||
COPY . .
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
ARG BUILD_FROM=esphome/esphome-hassio-base-amd64:1.2.1
|
||||
ARG BUILD_FROM=esphome/esphome-hassio-base-amd64:1.3.0
|
||||
FROM ${BUILD_FROM}
|
||||
|
||||
# Copy root filesystem
|
||||
|
||||
@@ -16,11 +16,11 @@ echo "PWD: $PWD"
|
||||
|
||||
if [[ ${IS_HASSIO} = "YES" ]]; then
|
||||
docker build \
|
||||
--build-arg "BUILD_FROM=esphome/esphome-hassio-base-${BUILD_ARCH}:1.2.1" \
|
||||
--build-arg "BUILD_FROM=esphome/esphome-hassio-base-${BUILD_ARCH}:1.3.0" \
|
||||
--build-arg "BUILD_VERSION=${CACHE_TAG}" \
|
||||
-t "${IMAGE_NAME}" -f ../docker/Dockerfile.hassio ..
|
||||
else
|
||||
docker build \
|
||||
--build-arg "BUILD_FROM=esphome/esphome-base-${BUILD_ARCH}:1.2.1" \
|
||||
--build-arg "BUILD_FROM=esphome/esphome-base-${BUILD_ARCH}:1.3.0" \
|
||||
-t "${IMAGE_NAME}" -f ../docker/Dockerfile ..
|
||||
fi
|
||||
|
||||
@@ -6,9 +6,23 @@
|
||||
# shellcheck disable=SC1091
|
||||
source /usr/lib/hassio-addons/base.sh
|
||||
|
||||
export ESPHOME_IS_HASSIO=true
|
||||
|
||||
if hass.config.true 'leave_front_door_open'; then
|
||||
export DISABLE_HA_AUTHENTICATION=true
|
||||
fi
|
||||
|
||||
if hass.config.true 'streamer_mode'; then
|
||||
export ESPHOME_STREAMER_MODE=true
|
||||
fi
|
||||
|
||||
if hass.config.true 'status_use_ping'; then
|
||||
export ESPHOME_DASHBOARD_USE_PING=true
|
||||
fi
|
||||
|
||||
if hass.config.has_value 'relative_url'; then
|
||||
export ESPHOME_DASHBOARD_RELATIVE_URL=$(hass.config.get 'relative_url')
|
||||
fi
|
||||
|
||||
hass.log.info "Starting ESPHome dashboard..."
|
||||
exec esphome /config/esphome dashboard --socket /var/run/esphome.sock --hassio
|
||||
|
||||
@@ -17,9 +17,9 @@ from esphome.core import CORE, EsphomeError
|
||||
from esphome.cpp_generator import Expression, RawStatement, add, statement
|
||||
from esphome.helpers import color, indent
|
||||
from esphome.py_compat import IS_PY2, safe_input, text_type
|
||||
from esphome.storage_json import StorageJSON, esphome_storage_path, \
|
||||
start_update_check_thread, storage_path
|
||||
from esphome.util import run_external_command, run_external_process, safe_print
|
||||
from esphome.storage_json import StorageJSON, storage_path
|
||||
from esphome.util import run_external_command, run_external_process, safe_print, \
|
||||
is_dev_esphome_version
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -157,17 +157,21 @@ def write_cpp(config):
|
||||
|
||||
def compile_program(args, config):
|
||||
_LOGGER.info("Compiling app...")
|
||||
update_check = not os.getenv('ESPHOME_NO_UPDATE_CHECK', '')
|
||||
if update_check:
|
||||
thread = start_update_check_thread(esphome_storage_path(CORE.config_dir))
|
||||
rc = platformio_api.run_compile(config, args.verbose)
|
||||
if update_check:
|
||||
thread.join()
|
||||
if rc != 0 and CORE.is_dev_esphome_core_version and not is_dev_esphome_version():
|
||||
_LOGGER.warning("You're using 'esphome_core_version: dev' but not using the "
|
||||
"dev version of the ESPHome tool.")
|
||||
_LOGGER.warning("Expect compile errors if these versions are out of sync.")
|
||||
_LOGGER.warning("Please install the dev version of ESPHome too when using "
|
||||
"'esphome_core_version: dev'.")
|
||||
_LOGGER.warning(" - Hass.io: Install 'ESPHome (dev)' addon")
|
||||
_LOGGER.warning(" - Docker: docker run [...] esphome/esphome:dev [...]")
|
||||
_LOGGER.warning(" - PIP: pip install -U https://github.com/esphome/esphome/archive/dev.zip")
|
||||
return rc
|
||||
|
||||
|
||||
def upload_using_esptool(config, port):
|
||||
path = os.path.join(CORE.build_path, '.pioenvs', CORE.name, 'firmware.bin')
|
||||
path = CORE.firmware_bin
|
||||
cmd = ['esptool.py', '--before', 'default_reset', '--after', 'hard_reset',
|
||||
'--chip', 'esp8266', '--port', port, 'write_flash', '0x0', path]
|
||||
|
||||
|
||||
@@ -3,9 +3,9 @@ import copy
|
||||
import voluptuous as vol
|
||||
|
||||
import esphome.config_validation as cv
|
||||
from esphome.const import CONF_ABOVE, CONF_ACTION_ID, CONF_AND, CONF_AUTOMATION_ID, \
|
||||
CONF_BELOW, CONF_CONDITION, CONF_CONDITION_ID, CONF_DELAY, CONF_ELSE, CONF_ID, CONF_IF, \
|
||||
CONF_LAMBDA, CONF_OR, CONF_RANGE, CONF_THEN, CONF_TRIGGER_ID, CONF_WHILE, CONF_WAIT_UNTIL
|
||||
from esphome.const import CONF_ABOVE, CONF_ACTION_ID, CONF_AND, CONF_AUTOMATION_ID, CONF_BELOW, \
|
||||
CONF_CONDITION, CONF_CONDITION_ID, CONF_DELAY, CONF_ELSE, CONF_ID, CONF_IF, CONF_LAMBDA, \
|
||||
CONF_OR, CONF_RANGE, CONF_THEN, CONF_TRIGGER_ID, CONF_WAIT_UNTIL, CONF_WHILE
|
||||
from esphome.core import CORE
|
||||
from esphome.cpp_generator import Pvariable, TemplateArguments, add, get_variable, \
|
||||
process_lambda, templatable
|
||||
@@ -48,7 +48,7 @@ def validate_recursive_condition(value):
|
||||
u"".format(key, key2), path)
|
||||
validator = CONDITION_REGISTRY[key][0]
|
||||
try:
|
||||
condition = validator(item[key])
|
||||
condition = validator(item[key] or {})
|
||||
except vol.Invalid as err:
|
||||
err.prepend(path)
|
||||
raise err
|
||||
@@ -83,7 +83,7 @@ def validate_recursive_action(value):
|
||||
u"".format(key, key2), path)
|
||||
validator = ACTION_REGISTRY[key][0]
|
||||
try:
|
||||
action = validator(item[key])
|
||||
action = validator(item[key] or {})
|
||||
except vol.Invalid as err:
|
||||
err.prepend(path)
|
||||
raise err
|
||||
@@ -131,7 +131,7 @@ def validate_automation(extra_schema=None, extra_validators=None, single=False):
|
||||
|
||||
# Next try as a sequence of automations
|
||||
try:
|
||||
return vol.Schema([schema])(value)
|
||||
return cv.Schema([schema])(value)
|
||||
except vol.Invalid as err2:
|
||||
if 'Unable to find action' in str(err):
|
||||
raise err2
|
||||
@@ -146,7 +146,7 @@ def validate_automation(extra_schema=None, extra_validators=None, single=False):
|
||||
def validator(value):
|
||||
value = validator_(value)
|
||||
if extra_validators is not None:
|
||||
value = vol.Schema([extra_validators])(value)
|
||||
value = cv.Schema([extra_validators])(value)
|
||||
if single:
|
||||
if len(value) != 1:
|
||||
raise vol.Invalid("Cannot have more than 1 automation for templates")
|
||||
@@ -156,10 +156,9 @@ def validate_automation(extra_schema=None, extra_validators=None, single=False):
|
||||
return validator
|
||||
|
||||
|
||||
AUTOMATION_SCHEMA = vol.Schema({
|
||||
AUTOMATION_SCHEMA = cv.Schema({
|
||||
cv.GenerateID(CONF_TRIGGER_ID): cv.declare_variable_id(Trigger),
|
||||
cv.GenerateID(CONF_AUTOMATION_ID): cv.declare_variable_id(Automation),
|
||||
vol.Optional(CONF_IF): validate_recursive_condition,
|
||||
vol.Required(CONF_THEN): validate_recursive_action,
|
||||
})
|
||||
|
||||
@@ -167,8 +166,8 @@ AND_CONDITION_SCHEMA = validate_recursive_condition
|
||||
|
||||
|
||||
@CONDITION_REGISTRY.register(CONF_AND, AND_CONDITION_SCHEMA)
|
||||
def and_condition_to_code(config, condition_id, arg_type, template_arg):
|
||||
for conditions in build_conditions(config, arg_type):
|
||||
def and_condition_to_code(config, condition_id, template_arg, args):
|
||||
for conditions in build_conditions(config, template_arg, args):
|
||||
yield
|
||||
rhs = AndCondition.new(template_arg, conditions)
|
||||
type = AndCondition.template(template_arg)
|
||||
@@ -179,33 +178,33 @@ OR_CONDITION_SCHEMA = validate_recursive_condition
|
||||
|
||||
|
||||
@CONDITION_REGISTRY.register(CONF_OR, OR_CONDITION_SCHEMA)
|
||||
def or_condition_to_code(config, condition_id, arg_type, template_arg):
|
||||
for conditions in build_conditions(config, arg_type):
|
||||
def or_condition_to_code(config, condition_id, template_arg, args):
|
||||
for conditions in build_conditions(config, template_arg, args):
|
||||
yield
|
||||
rhs = OrCondition.new(template_arg, conditions)
|
||||
type = OrCondition.template(template_arg)
|
||||
yield Pvariable(condition_id, rhs, type=type)
|
||||
|
||||
|
||||
RANGE_CONDITION_SCHEMA = vol.All(vol.Schema({
|
||||
RANGE_CONDITION_SCHEMA = vol.All(cv.Schema({
|
||||
vol.Optional(CONF_ABOVE): cv.templatable(cv.float_),
|
||||
vol.Optional(CONF_BELOW): cv.templatable(cv.float_),
|
||||
}), cv.has_at_least_one_key(CONF_ABOVE, CONF_BELOW))
|
||||
|
||||
|
||||
@CONDITION_REGISTRY.register(CONF_RANGE, RANGE_CONDITION_SCHEMA)
|
||||
def range_condition_to_code(config, condition_id, arg_type, template_arg):
|
||||
for conditions in build_conditions(config, arg_type):
|
||||
def range_condition_to_code(config, condition_id, template_arg, args):
|
||||
for conditions in build_conditions(config, template_arg, args):
|
||||
yield
|
||||
rhs = RangeCondition.new(template_arg, conditions)
|
||||
type = RangeCondition.template(template_arg)
|
||||
condition = Pvariable(condition_id, rhs, type=type)
|
||||
if CONF_ABOVE in config:
|
||||
for template_ in templatable(config[CONF_ABOVE], arg_type, float_):
|
||||
for template_ in templatable(config[CONF_ABOVE], args, float_):
|
||||
yield
|
||||
condition.set_min(template_)
|
||||
if CONF_BELOW in config:
|
||||
for template_ in templatable(config[CONF_BELOW], arg_type, float_):
|
||||
for template_ in templatable(config[CONF_BELOW], args, float_):
|
||||
yield
|
||||
condition.set_max(template_)
|
||||
yield condition
|
||||
@@ -215,11 +214,11 @@ DELAY_ACTION_SCHEMA = cv.templatable(cv.positive_time_period_milliseconds)
|
||||
|
||||
|
||||
@ACTION_REGISTRY.register(CONF_DELAY, DELAY_ACTION_SCHEMA)
|
||||
def delay_action_to_code(config, action_id, arg_type, template_arg):
|
||||
def delay_action_to_code(config, action_id, template_arg, args):
|
||||
rhs = App.register_component(DelayAction.new(template_arg))
|
||||
type = DelayAction.template(template_arg)
|
||||
action = Pvariable(action_id, rhs, type=type)
|
||||
for template_ in templatable(config, arg_type, uint32):
|
||||
for template_ in templatable(config, args, uint32):
|
||||
yield
|
||||
add(action.set_delay(template_))
|
||||
yield action
|
||||
@@ -233,44 +232,44 @@ IF_ACTION_SCHEMA = vol.All({
|
||||
|
||||
|
||||
@ACTION_REGISTRY.register(CONF_IF, IF_ACTION_SCHEMA)
|
||||
def if_action_to_code(config, action_id, arg_type, template_arg):
|
||||
for conditions in build_conditions(config[CONF_CONDITION], arg_type):
|
||||
def if_action_to_code(config, action_id, template_arg, args):
|
||||
for conditions in build_conditions(config[CONF_CONDITION], template_arg, args):
|
||||
yield None
|
||||
rhs = IfAction.new(template_arg, conditions)
|
||||
type = IfAction.template(template_arg)
|
||||
action = Pvariable(action_id, rhs, type=type)
|
||||
if CONF_THEN in config:
|
||||
for actions in build_actions(config[CONF_THEN], arg_type):
|
||||
for actions in build_actions(config[CONF_THEN], template_arg, args):
|
||||
yield None
|
||||
add(action.add_then(actions))
|
||||
if CONF_ELSE in config:
|
||||
for actions in build_actions(config[CONF_ELSE], arg_type):
|
||||
for actions in build_actions(config[CONF_ELSE], template_arg, args):
|
||||
yield None
|
||||
add(action.add_else(actions))
|
||||
yield action
|
||||
|
||||
|
||||
WHILE_ACTION_SCHEMA = vol.Schema({
|
||||
WHILE_ACTION_SCHEMA = cv.Schema({
|
||||
vol.Required(CONF_CONDITION): validate_recursive_condition,
|
||||
vol.Required(CONF_THEN): validate_recursive_action,
|
||||
})
|
||||
|
||||
|
||||
@ACTION_REGISTRY.register(CONF_WHILE, WHILE_ACTION_SCHEMA)
|
||||
def while_action_to_code(config, action_id, arg_type, template_arg):
|
||||
for conditions in build_conditions(config[CONF_CONDITION], arg_type):
|
||||
def while_action_to_code(config, action_id, template_arg, args):
|
||||
for conditions in build_conditions(config[CONF_CONDITION], template_arg, args):
|
||||
yield None
|
||||
rhs = WhileAction.new(template_arg, conditions)
|
||||
type = WhileAction.template(template_arg)
|
||||
action = Pvariable(action_id, rhs, type=type)
|
||||
for actions in build_actions(config[CONF_THEN], arg_type):
|
||||
for actions in build_actions(config[CONF_THEN], template_arg, args):
|
||||
yield None
|
||||
add(action.add_then(actions))
|
||||
yield action
|
||||
|
||||
|
||||
def validate_wait_until(value):
|
||||
schema = vol.Schema({
|
||||
schema = cv.Schema({
|
||||
vol.Required(CONF_CONDITION): validate_recursive_condition
|
||||
})
|
||||
if isinstance(value, dict) and CONF_CONDITION in value:
|
||||
@@ -282,8 +281,8 @@ WAIT_UNTIL_ACTION_SCHEMA = validate_wait_until
|
||||
|
||||
|
||||
@ACTION_REGISTRY.register(CONF_WAIT_UNTIL, WAIT_UNTIL_ACTION_SCHEMA)
|
||||
def wait_until_action_to_code(config, action_id, arg_type, template_arg):
|
||||
for conditions in build_conditions(config[CONF_CONDITION], arg_type):
|
||||
def wait_until_action_to_code(config, action_id, template_arg, args):
|
||||
for conditions in build_conditions(config[CONF_CONDITION], template_arg, args):
|
||||
yield None
|
||||
rhs = WaitUntilAction.new(template_arg, conditions)
|
||||
type = WaitUntilAction.template(template_arg)
|
||||
@@ -296,8 +295,8 @@ LAMBDA_ACTION_SCHEMA = cv.lambda_
|
||||
|
||||
|
||||
@ACTION_REGISTRY.register(CONF_LAMBDA, LAMBDA_ACTION_SCHEMA)
|
||||
def lambda_action_to_code(config, action_id, arg_type, template_arg):
|
||||
for lambda_ in process_lambda(config, [(arg_type, 'x')], return_type=void):
|
||||
def lambda_action_to_code(config, action_id, template_arg, args):
|
||||
for lambda_ in process_lambda(config, args, return_type=void):
|
||||
yield None
|
||||
rhs = LambdaAction.new(template_arg, lambda_)
|
||||
type = LambdaAction.template(template_arg)
|
||||
@@ -308,8 +307,8 @@ LAMBDA_CONDITION_SCHEMA = cv.lambda_
|
||||
|
||||
|
||||
@CONDITION_REGISTRY.register(CONF_LAMBDA, LAMBDA_CONDITION_SCHEMA)
|
||||
def lambda_condition_to_code(config, condition_id, arg_type, template_arg):
|
||||
for lambda_ in process_lambda(config, [(arg_type, 'x')], return_type=bool_):
|
||||
def lambda_condition_to_code(config, condition_id, template_arg, args):
|
||||
for lambda_ in process_lambda(config, args, return_type=bool_):
|
||||
yield
|
||||
rhs = LambdaCondition.new(template_arg, lambda_)
|
||||
type = LambdaCondition.template(template_arg)
|
||||
@@ -323,7 +322,7 @@ COMPONENT_UPDATE_ACTION_SCHEMA = maybe_simple_id({
|
||||
|
||||
|
||||
@ACTION_REGISTRY.register(CONF_COMPONENT_UPDATE, COMPONENT_UPDATE_ACTION_SCHEMA)
|
||||
def component_update_action_to_code(config, action_id, arg_type, template_arg):
|
||||
def component_update_action_to_code(config, action_id, template_arg, args):
|
||||
for var in get_variable(config[CONF_ID]):
|
||||
yield None
|
||||
rhs = UpdateComponentAction.new(template_arg, var)
|
||||
@@ -331,61 +330,55 @@ def component_update_action_to_code(config, action_id, arg_type, template_arg):
|
||||
yield Pvariable(action_id, rhs, type=type)
|
||||
|
||||
|
||||
def build_action(full_config, arg_type):
|
||||
def build_action(full_config, template_arg, args):
|
||||
action_id = full_config[CONF_ACTION_ID]
|
||||
key, config = next((k, v) for k, v in full_config.items() if k in ACTION_REGISTRY)
|
||||
|
||||
builder = ACTION_REGISTRY[key][1]
|
||||
template_arg = TemplateArguments(arg_type)
|
||||
for result in builder(config, action_id, arg_type, template_arg):
|
||||
for result in builder(config, action_id, template_arg, args):
|
||||
yield None
|
||||
yield result
|
||||
|
||||
|
||||
def build_actions(config, arg_type):
|
||||
def build_actions(config, templ, arg_type):
|
||||
actions = []
|
||||
for conf in config:
|
||||
for action in build_action(conf, arg_type):
|
||||
for action in build_action(conf, templ, arg_type):
|
||||
yield None
|
||||
actions.append(action)
|
||||
yield actions
|
||||
|
||||
|
||||
def build_condition(full_config, arg_type):
|
||||
def build_condition(full_config, template_arg, args):
|
||||
action_id = full_config[CONF_CONDITION_ID]
|
||||
key, config = next((k, v) for k, v in full_config.items() if k in CONDITION_REGISTRY)
|
||||
|
||||
builder = CONDITION_REGISTRY[key][1]
|
||||
template_arg = TemplateArguments(arg_type)
|
||||
for result in builder(config, action_id, arg_type, template_arg):
|
||||
for result in builder(config, action_id, template_arg, args):
|
||||
yield None
|
||||
yield result
|
||||
|
||||
|
||||
def build_conditions(config, arg_type):
|
||||
def build_conditions(config, templ, args):
|
||||
conditions = []
|
||||
for conf in config:
|
||||
for condition in build_condition(conf, arg_type):
|
||||
for condition in build_condition(conf, templ, args):
|
||||
yield None
|
||||
conditions.append(condition)
|
||||
yield conditions
|
||||
|
||||
|
||||
def build_automation_(trigger, arg_type, config):
|
||||
rhs = App.make_automation(TemplateArguments(arg_type), trigger)
|
||||
type = Automation.template(arg_type)
|
||||
def build_automation_(trigger, args, config):
|
||||
arg_types = [arg[0] for arg in args]
|
||||
templ = TemplateArguments(*arg_types)
|
||||
rhs = App.make_automation(templ, trigger)
|
||||
type = Automation.template(templ)
|
||||
obj = Pvariable(config[CONF_AUTOMATION_ID], rhs, type=type)
|
||||
if CONF_IF in config:
|
||||
conditions = None
|
||||
for conditions in build_conditions(config[CONF_IF], arg_type):
|
||||
yield None
|
||||
add(obj.add_conditions(conditions))
|
||||
actions = None
|
||||
for actions in build_actions(config[CONF_THEN], arg_type):
|
||||
for actions in build_actions(config[CONF_THEN], templ, args):
|
||||
yield None
|
||||
add(obj.add_actions(actions))
|
||||
yield obj
|
||||
|
||||
|
||||
def build_automation(trigger, arg_type, config):
|
||||
CORE.add_job(build_automation_, trigger, arg_type, config)
|
||||
def build_automations(trigger, args, config):
|
||||
CORE.add_job(build_automation_, trigger, args, config)
|
||||
|
||||
@@ -12,7 +12,7 @@ MULTI_CONF = True
|
||||
|
||||
ADS1115Component = sensor.sensor_ns.class_('ADS1115Component', Component, i2c.I2CDevice)
|
||||
|
||||
CONFIG_SCHEMA = vol.Schema({
|
||||
CONFIG_SCHEMA = cv.Schema({
|
||||
cv.GenerateID(): cv.declare_variable_id(ADS1115Component),
|
||||
vol.Required(CONF_ADDRESS): cv.i2c_address,
|
||||
}).extend(cv.COMPONENT_SCHEMA.schema)
|
||||
|
||||
@@ -13,7 +13,7 @@ MULTI_CONF = True
|
||||
CONF_APDS9960_ID = 'apds9960_id'
|
||||
APDS9960 = sensor.sensor_ns.class_('APDS9960', PollingComponent, i2c.I2CDevice)
|
||||
|
||||
CONFIG_SCHEMA = vol.Schema({
|
||||
CONFIG_SCHEMA = cv.Schema({
|
||||
cv.GenerateID(): cv.declare_variable_id(APDS9960),
|
||||
vol.Optional(CONF_ADDRESS): cv.i2c_address,
|
||||
vol.Optional(CONF_UPDATE_INTERVAL): cv.update_interval,
|
||||
|
||||
@@ -1,25 +1,52 @@
|
||||
import voluptuous as vol
|
||||
|
||||
from esphome.automation import ACTION_REGISTRY
|
||||
from esphome import automation
|
||||
from esphome.automation import ACTION_REGISTRY, CONDITION_REGISTRY, Condition
|
||||
import esphome.config_validation as cv
|
||||
from esphome.const import CONF_DATA, CONF_DATA_TEMPLATE, CONF_ID, CONF_PASSWORD, CONF_PORT, \
|
||||
CONF_REBOOT_TIMEOUT, CONF_SERVICE, CONF_VARIABLES
|
||||
CONF_REBOOT_TIMEOUT, CONF_SERVICE, CONF_VARIABLES, CONF_SERVICES, CONF_TRIGGER_ID
|
||||
from esphome.core import CORE
|
||||
from esphome.cpp_generator import Pvariable, add, get_variable, process_lambda
|
||||
from esphome.cpp_helpers import setup_component
|
||||
from esphome.cpp_types import Action, App, Component, StoringController, esphome_ns
|
||||
from esphome.cpp_types import Action, App, Component, StoringController, esphome_ns, Trigger, \
|
||||
bool_, int32, float_, std_string
|
||||
|
||||
api_ns = esphome_ns.namespace('api')
|
||||
APIServer = api_ns.class_('APIServer', Component, StoringController)
|
||||
HomeAssistantServiceCallAction = api_ns.class_('HomeAssistantServiceCallAction', Action)
|
||||
KeyValuePair = api_ns.class_('KeyValuePair')
|
||||
TemplatableKeyValuePair = api_ns.class_('TemplatableKeyValuePair')
|
||||
APIConnectedCondition = api_ns.class_('APIConnectedCondition', Condition)
|
||||
|
||||
CONFIG_SCHEMA = vol.Schema({
|
||||
UserService = api_ns.class_('UserService', Trigger)
|
||||
ServiceTypeArgument = api_ns.class_('ServiceTypeArgument')
|
||||
ServiceArgType = api_ns.enum('ServiceArgType')
|
||||
SERVICE_ARG_TYPES = {
|
||||
'bool': ServiceArgType.SERVICE_ARG_TYPE_BOOL,
|
||||
'int': ServiceArgType.SERVICE_ARG_TYPE_INT,
|
||||
'float': ServiceArgType.SERVICE_ARG_TYPE_FLOAT,
|
||||
'string': ServiceArgType.SERVICE_ARG_TYPE_STRING,
|
||||
}
|
||||
SERVICE_ARG_NATIVE_TYPES = {
|
||||
'bool': bool_,
|
||||
'int': int32,
|
||||
'float': float_,
|
||||
'string': std_string,
|
||||
}
|
||||
|
||||
|
||||
CONFIG_SCHEMA = cv.Schema({
|
||||
cv.GenerateID(): cv.declare_variable_id(APIServer),
|
||||
vol.Optional(CONF_PORT, default=6053): cv.port,
|
||||
vol.Optional(CONF_PASSWORD, default=''): cv.string_strict,
|
||||
vol.Optional(CONF_REBOOT_TIMEOUT): cv.positive_time_period_milliseconds,
|
||||
vol.Optional(CONF_SERVICES): automation.validate_automation({
|
||||
cv.GenerateID(CONF_TRIGGER_ID): cv.declare_variable_id(UserService),
|
||||
vol.Required(CONF_SERVICE): cv.valid_name,
|
||||
vol.Optional(CONF_VARIABLES, default={}): cv.Schema({
|
||||
cv.validate_id_name: cv.one_of(*SERVICE_ARG_TYPES, lower=True),
|
||||
}),
|
||||
}),
|
||||
}).extend(cv.COMPONENT_SCHEMA.schema)
|
||||
|
||||
|
||||
@@ -34,6 +61,21 @@ def to_code(config):
|
||||
if CONF_REBOOT_TIMEOUT in config:
|
||||
add(api.set_reboot_timeout(config[CONF_REBOOT_TIMEOUT]))
|
||||
|
||||
for conf in config.get(CONF_SERVICES, []):
|
||||
template_args = []
|
||||
func_args = []
|
||||
service_type_args = []
|
||||
for name, var_ in conf[CONF_VARIABLES].items():
|
||||
native = SERVICE_ARG_NATIVE_TYPES[var_]
|
||||
template_args.append(native)
|
||||
func_args.append((native, name))
|
||||
service_type_args.append(ServiceTypeArgument(name, SERVICE_ARG_TYPES[var_]))
|
||||
func = api.make_user_service_trigger.template(*template_args)
|
||||
rhs = func(conf[CONF_SERVICE], service_type_args)
|
||||
type_ = UserService.template(*template_args)
|
||||
trigger = Pvariable(conf[CONF_TRIGGER_ID], rhs, type=type_)
|
||||
automation.build_automations(trigger, func_args, conf)
|
||||
|
||||
setup_component(api, config)
|
||||
|
||||
|
||||
@@ -42,34 +84,34 @@ BUILD_FLAGS = '-DUSE_API'
|
||||
|
||||
def lib_deps(config):
|
||||
if CORE.is_esp32:
|
||||
return 'AsyncTCP@1.0.1'
|
||||
return 'AsyncTCP@1.0.3'
|
||||
if CORE.is_esp8266:
|
||||
return 'ESPAsyncTCP@1.1.3'
|
||||
raise NotImplementedError
|
||||
|
||||
|
||||
CONF_HOMEASSISTANT_SERVICE = 'homeassistant.service'
|
||||
HOMEASSISTANT_SERVIC_ACTION_SCHEMA = vol.Schema({
|
||||
HOMEASSISTANT_SERVIC_ACTION_SCHEMA = cv.Schema({
|
||||
cv.GenerateID(): cv.use_variable_id(APIServer),
|
||||
vol.Required(CONF_SERVICE): cv.string,
|
||||
vol.Optional(CONF_DATA): vol.Schema({
|
||||
vol.Optional(CONF_DATA): cv.Schema({
|
||||
cv.string: cv.string,
|
||||
}),
|
||||
vol.Optional(CONF_DATA_TEMPLATE): vol.Schema({
|
||||
vol.Optional(CONF_DATA_TEMPLATE): cv.Schema({
|
||||
cv.string: cv.string,
|
||||
}),
|
||||
vol.Optional(CONF_VARIABLES): vol.Schema({
|
||||
vol.Optional(CONF_VARIABLES): cv.Schema({
|
||||
cv.string: cv.lambda_,
|
||||
}),
|
||||
})
|
||||
|
||||
|
||||
@ACTION_REGISTRY.register(CONF_HOMEASSISTANT_SERVICE, HOMEASSISTANT_SERVIC_ACTION_SCHEMA)
|
||||
def homeassistant_service_to_code(config, action_id, arg_type, template_arg):
|
||||
def homeassistant_service_to_code(config, action_id, template_arg, args):
|
||||
for var in get_variable(config[CONF_ID]):
|
||||
yield None
|
||||
rhs = var.make_home_assistant_service_call_action(template_arg)
|
||||
type = HomeAssistantServiceCallAction.template(arg_type)
|
||||
type = HomeAssistantServiceCallAction.template(template_arg)
|
||||
act = Pvariable(action_id, rhs, type=type)
|
||||
add(act.set_service(config[CONF_SERVICE]))
|
||||
if CONF_DATA in config:
|
||||
@@ -86,3 +128,14 @@ def homeassistant_service_to_code(config, action_id, arg_type, template_arg):
|
||||
datas.append(TemplatableKeyValuePair(key, value_))
|
||||
add(act.set_variables(datas))
|
||||
yield act
|
||||
|
||||
|
||||
CONF_API_CONNECTED = 'api.connected'
|
||||
API_CONNECTED_CONDITION_SCHEMA = cv.Schema({})
|
||||
|
||||
|
||||
@CONDITION_REGISTRY.register(CONF_API_CONNECTED, API_CONNECTED_CONDITION_SCHEMA)
|
||||
def api_connected_to_code(config, condition_id, template_arg, args):
|
||||
rhs = APIConnectedCondition.new(template_arg)
|
||||
type = APIConnectedCondition.template(template_arg)
|
||||
yield Pvariable(condition_id, rhs, type=type)
|
||||
|
||||
@@ -9,10 +9,10 @@ from esphome.const import CONF_DELAYED_OFF, CONF_DELAYED_ON, CONF_DEVICE_CLASS,
|
||||
CONF_HEARTBEAT, CONF_ID, CONF_INTERNAL, CONF_INVALID_COOLDOWN, CONF_INVERT, CONF_INVERTED, \
|
||||
CONF_LAMBDA, CONF_MAX_LENGTH, CONF_MIN_LENGTH, CONF_MQTT_ID, CONF_ON_CLICK, \
|
||||
CONF_ON_DOUBLE_CLICK, CONF_ON_MULTI_CLICK, CONF_ON_PRESS, CONF_ON_RELEASE, CONF_ON_STATE, \
|
||||
CONF_STATE, CONF_TIMING, CONF_TRIGGER_ID
|
||||
CONF_STATE, CONF_TIMING, CONF_TRIGGER_ID, CONF_FOR
|
||||
from esphome.core import CORE
|
||||
from esphome.cpp_generator import Pvariable, StructInitializer, add, get_variable, process_lambda
|
||||
from esphome.cpp_types import App, Component, Nameable, NoArg, Trigger, bool_, esphome_ns, optional
|
||||
from esphome.cpp_types import App, Component, Nameable, Trigger, bool_, esphome_ns, optional
|
||||
from esphome.py_compat import string_types
|
||||
|
||||
DEVICE_CLASSES = [
|
||||
@@ -32,11 +32,11 @@ BinarySensorPtr = BinarySensor.operator('ptr')
|
||||
MQTTBinarySensorComponent = binary_sensor_ns.class_('MQTTBinarySensorComponent', mqtt.MQTTComponent)
|
||||
|
||||
# Triggers
|
||||
PressTrigger = binary_sensor_ns.class_('PressTrigger', Trigger.template(NoArg))
|
||||
ReleaseTrigger = binary_sensor_ns.class_('ReleaseTrigger', Trigger.template(NoArg))
|
||||
ClickTrigger = binary_sensor_ns.class_('ClickTrigger', Trigger.template(NoArg))
|
||||
DoubleClickTrigger = binary_sensor_ns.class_('DoubleClickTrigger', Trigger.template(NoArg))
|
||||
MultiClickTrigger = binary_sensor_ns.class_('MultiClickTrigger', Trigger.template(NoArg), Component)
|
||||
PressTrigger = binary_sensor_ns.class_('PressTrigger', Trigger.template())
|
||||
ReleaseTrigger = binary_sensor_ns.class_('ReleaseTrigger', Trigger.template())
|
||||
ClickTrigger = binary_sensor_ns.class_('ClickTrigger', Trigger.template())
|
||||
DoubleClickTrigger = binary_sensor_ns.class_('DoubleClickTrigger', Trigger.template())
|
||||
MultiClickTrigger = binary_sensor_ns.class_('MultiClickTrigger', Trigger.template(), Component)
|
||||
MultiClickTriggerEvent = binary_sensor_ns.struct('MultiClickTriggerEvent')
|
||||
StateTrigger = binary_sensor_ns.class_('StateTrigger', Trigger.template(bool_))
|
||||
|
||||
@@ -62,7 +62,7 @@ FILTERS_SCHEMA = cv.ensure_list({
|
||||
vol.Optional(CONF_HEARTBEAT): cv.invalid("The heartbeat filter has been removed in 1.11.0"),
|
||||
}, cv.has_exactly_one_key(*FILTER_KEYS))
|
||||
|
||||
MULTI_CLICK_TIMING_SCHEMA = vol.Schema({
|
||||
MULTI_CLICK_TIMING_SCHEMA = cv.Schema({
|
||||
vol.Optional(CONF_STATE): cv.boolean,
|
||||
vol.Optional(CONF_MIN_LENGTH): cv.positive_time_period_milliseconds,
|
||||
vol.Optional(CONF_MAX_LENGTH): cv.positive_time_period_milliseconds,
|
||||
@@ -234,23 +234,23 @@ def setup_binary_sensor_core_(binary_sensor_var, config):
|
||||
for conf in config.get(CONF_ON_PRESS, []):
|
||||
rhs = binary_sensor_var.make_press_trigger()
|
||||
trigger = Pvariable(conf[CONF_TRIGGER_ID], rhs)
|
||||
automation.build_automation(trigger, NoArg, conf)
|
||||
automation.build_automations(trigger, [], conf)
|
||||
|
||||
for conf in config.get(CONF_ON_RELEASE, []):
|
||||
rhs = binary_sensor_var.make_release_trigger()
|
||||
trigger = Pvariable(conf[CONF_TRIGGER_ID], rhs)
|
||||
automation.build_automation(trigger, NoArg, conf)
|
||||
automation.build_automations(trigger, [], conf)
|
||||
|
||||
for conf in config.get(CONF_ON_CLICK, []):
|
||||
rhs = binary_sensor_var.make_click_trigger(conf[CONF_MIN_LENGTH], conf[CONF_MAX_LENGTH])
|
||||
trigger = Pvariable(conf[CONF_TRIGGER_ID], rhs)
|
||||
automation.build_automation(trigger, NoArg, conf)
|
||||
automation.build_automations(trigger, [], conf)
|
||||
|
||||
for conf in config.get(CONF_ON_DOUBLE_CLICK, []):
|
||||
rhs = binary_sensor_var.make_double_click_trigger(conf[CONF_MIN_LENGTH],
|
||||
conf[CONF_MAX_LENGTH])
|
||||
trigger = Pvariable(conf[CONF_TRIGGER_ID], rhs)
|
||||
automation.build_automation(trigger, NoArg, conf)
|
||||
automation.build_automations(trigger, [], conf)
|
||||
|
||||
for conf in config.get(CONF_ON_MULTI_CLICK, []):
|
||||
timings = []
|
||||
@@ -265,12 +265,12 @@ def setup_binary_sensor_core_(binary_sensor_var, config):
|
||||
trigger = Pvariable(conf[CONF_TRIGGER_ID], rhs)
|
||||
if CONF_INVALID_COOLDOWN in conf:
|
||||
add(trigger.set_invalid_cooldown(conf[CONF_INVALID_COOLDOWN]))
|
||||
automation.build_automation(trigger, NoArg, conf)
|
||||
automation.build_automations(trigger, [], conf)
|
||||
|
||||
for conf in config.get(CONF_ON_STATE, []):
|
||||
rhs = binary_sensor_var.make_state_trigger()
|
||||
trigger = Pvariable(conf[CONF_TRIGGER_ID], rhs)
|
||||
automation.build_automation(trigger, bool_, conf)
|
||||
automation.build_automations(trigger, [(bool_, 'x')], conf)
|
||||
|
||||
setup_mqtt_component(binary_sensor_var.Pget_mqtt(), config)
|
||||
|
||||
@@ -302,28 +302,30 @@ BUILD_FLAGS = '-DUSE_BINARY_SENSOR'
|
||||
CONF_BINARY_SENSOR_IS_ON = 'binary_sensor.is_on'
|
||||
BINARY_SENSOR_IS_ON_CONDITION_SCHEMA = maybe_simple_id({
|
||||
vol.Required(CONF_ID): cv.use_variable_id(BinarySensor),
|
||||
vol.Optional(CONF_FOR): cv.positive_time_period_milliseconds,
|
||||
})
|
||||
|
||||
|
||||
@CONDITION_REGISTRY.register(CONF_BINARY_SENSOR_IS_ON, BINARY_SENSOR_IS_ON_CONDITION_SCHEMA)
|
||||
def binary_sensor_is_on_to_code(config, condition_id, arg_type, template_arg):
|
||||
def binary_sensor_is_on_to_code(config, condition_id, template_arg, args):
|
||||
for var in get_variable(config[CONF_ID]):
|
||||
yield None
|
||||
rhs = var.make_binary_sensor_is_on_condition(template_arg)
|
||||
type = BinarySensorCondition.template(arg_type)
|
||||
rhs = var.make_binary_sensor_is_on_condition(template_arg, config.get(CONF_FOR))
|
||||
type = BinarySensorCondition.template(template_arg)
|
||||
yield Pvariable(condition_id, rhs, type=type)
|
||||
|
||||
|
||||
CONF_BINARY_SENSOR_IS_OFF = 'binary_sensor.is_off'
|
||||
BINARY_SENSOR_IS_OFF_CONDITION_SCHEMA = maybe_simple_id({
|
||||
vol.Required(CONF_ID): cv.use_variable_id(BinarySensor),
|
||||
vol.Optional(CONF_FOR): cv.positive_time_period_milliseconds,
|
||||
})
|
||||
|
||||
|
||||
@CONDITION_REGISTRY.register(CONF_BINARY_SENSOR_IS_OFF, BINARY_SENSOR_IS_OFF_CONDITION_SCHEMA)
|
||||
def binary_sensor_is_off_to_code(config, condition_id, arg_type, template_arg):
|
||||
def binary_sensor_is_off_to_code(config, condition_id, template_arg, args):
|
||||
for var in get_variable(config[CONF_ID]):
|
||||
yield None
|
||||
rhs = var.make_binary_sensor_is_off_condition(template_arg)
|
||||
type = BinarySensorCondition.template(arg_type)
|
||||
rhs = var.make_binary_sensor_is_off_condition(template_arg, config.get(CONF_FOR))
|
||||
type = BinarySensorCondition.template(template_arg)
|
||||
yield Pvariable(condition_id, rhs, type=type)
|
||||
|
||||
28
esphome/components/binary_sensor/mpr121.py
Normal file
28
esphome/components/binary_sensor/mpr121.py
Normal file
@@ -0,0 +1,28 @@
|
||||
import voluptuous as vol
|
||||
|
||||
from esphome.components import binary_sensor
|
||||
from esphome.components.mpr121 import MPR121Component, CONF_MPR121_ID
|
||||
import esphome.config_validation as cv
|
||||
from esphome.const import CONF_CHANNEL, CONF_NAME
|
||||
from esphome.cpp_generator import get_variable
|
||||
|
||||
DEPENDENCIES = ['mpr121']
|
||||
MPR121Channel = binary_sensor.binary_sensor_ns.class_(
|
||||
'MPR121Channel', binary_sensor.BinarySensor)
|
||||
|
||||
PLATFORM_SCHEMA = cv.nameable(binary_sensor.BINARY_SENSOR_PLATFORM_SCHEMA.extend({
|
||||
cv.GenerateID(): cv.declare_variable_id(MPR121Channel),
|
||||
cv.GenerateID(CONF_MPR121_ID): cv.use_variable_id(MPR121Component),
|
||||
vol.Required(CONF_CHANNEL): vol.All(vol.Coerce(int), vol.Range(min=0, max=11))
|
||||
}))
|
||||
|
||||
|
||||
def to_code(config):
|
||||
for hub in get_variable(config[CONF_MPR121_ID]):
|
||||
yield
|
||||
rhs = MPR121Channel.new(config[CONF_NAME], config[CONF_CHANNEL])
|
||||
binary_sensor.register_binary_sensor(hub.add_channel(rhs), config)
|
||||
|
||||
|
||||
def to_hass_config(data, config):
|
||||
return binary_sensor.core_to_hass_config(data, config)
|
||||
@@ -41,7 +41,7 @@ RCSwitchTypeDReceiver = remote_ns.class_('RCSwitchTypeDReceiver', RCSwitchRawRec
|
||||
|
||||
def validate_raw(value):
|
||||
if isinstance(value, dict):
|
||||
return vol.Schema({
|
||||
return cv.Schema({
|
||||
cv.GenerateID(): cv.declare_variable_id(int32),
|
||||
vol.Required(CONF_DATA): [vol.Any(vol.Coerce(int), cv.time_period_microseconds)],
|
||||
})(value)
|
||||
@@ -52,29 +52,29 @@ def validate_raw(value):
|
||||
|
||||
PLATFORM_SCHEMA = cv.nameable(binary_sensor.BINARY_SENSOR_PLATFORM_SCHEMA.extend({
|
||||
cv.GenerateID(): cv.declare_variable_id(RemoteReceiver),
|
||||
vol.Optional(CONF_JVC): vol.Schema({
|
||||
vol.Optional(CONF_JVC): cv.Schema({
|
||||
vol.Required(CONF_DATA): cv.hex_uint32_t,
|
||||
}),
|
||||
vol.Optional(CONF_LG): vol.Schema({
|
||||
vol.Optional(CONF_LG): cv.Schema({
|
||||
vol.Required(CONF_DATA): cv.hex_uint32_t,
|
||||
vol.Optional(CONF_NBITS, default=28): cv.one_of(28, 32, int=True),
|
||||
}),
|
||||
vol.Optional(CONF_NEC): vol.Schema({
|
||||
vol.Optional(CONF_NEC): cv.Schema({
|
||||
vol.Required(CONF_ADDRESS): cv.hex_uint16_t,
|
||||
vol.Required(CONF_COMMAND): cv.hex_uint16_t,
|
||||
}),
|
||||
vol.Optional(CONF_SAMSUNG): vol.Schema({
|
||||
vol.Optional(CONF_SAMSUNG): cv.Schema({
|
||||
vol.Required(CONF_DATA): cv.hex_uint32_t,
|
||||
}),
|
||||
vol.Optional(CONF_SONY): vol.Schema({
|
||||
vol.Optional(CONF_SONY): cv.Schema({
|
||||
vol.Required(CONF_DATA): cv.hex_uint32_t,
|
||||
vol.Optional(CONF_NBITS, default=12): cv.one_of(12, 15, 20, int=True),
|
||||
}),
|
||||
vol.Optional(CONF_PANASONIC): vol.Schema({
|
||||
vol.Optional(CONF_PANASONIC): cv.Schema({
|
||||
vol.Required(CONF_ADDRESS): cv.hex_uint16_t,
|
||||
vol.Required(CONF_COMMAND): cv.hex_uint32_t,
|
||||
}),
|
||||
vol.Optional(CONF_RC5): vol.Schema({
|
||||
vol.Optional(CONF_RC5): cv.Schema({
|
||||
vol.Required(CONF_ADDRESS): vol.All(cv.hex_int, vol.Range(min=0, max=0x1F)),
|
||||
vol.Required(CONF_COMMAND): vol.All(cv.hex_int, vol.Range(min=0, max=0x3F)),
|
||||
}),
|
||||
|
||||
@@ -36,7 +36,7 @@ def to_code(config):
|
||||
BUILD_FLAGS = '-DUSE_TEMPLATE_BINARY_SENSOR'
|
||||
|
||||
CONF_BINARY_SENSOR_TEMPLATE_PUBLISH = 'binary_sensor.template.publish'
|
||||
BINARY_SENSOR_TEMPLATE_PUBLISH_ACTION_SCHEMA = vol.Schema({
|
||||
BINARY_SENSOR_TEMPLATE_PUBLISH_ACTION_SCHEMA = cv.Schema({
|
||||
vol.Required(CONF_ID): cv.use_variable_id(binary_sensor.BinarySensor),
|
||||
vol.Required(CONF_STATE): cv.templatable(cv.boolean),
|
||||
})
|
||||
@@ -44,13 +44,13 @@ BINARY_SENSOR_TEMPLATE_PUBLISH_ACTION_SCHEMA = vol.Schema({
|
||||
|
||||
@ACTION_REGISTRY.register(CONF_BINARY_SENSOR_TEMPLATE_PUBLISH,
|
||||
BINARY_SENSOR_TEMPLATE_PUBLISH_ACTION_SCHEMA)
|
||||
def binary_sensor_template_publish_to_code(config, action_id, arg_type, template_arg):
|
||||
def binary_sensor_template_publish_to_code(config, action_id, template_arg, args):
|
||||
for var in get_variable(config[CONF_ID]):
|
||||
yield None
|
||||
rhs = var.make_binary_sensor_publish_action(template_arg)
|
||||
type = BinarySensorPublishAction.template(arg_type)
|
||||
type = BinarySensorPublishAction.template(template_arg)
|
||||
action = Pvariable(action_id, rhs, type=type)
|
||||
for template_ in templatable(config[CONF_STATE], arg_type, bool_):
|
||||
for template_ in templatable(config[CONF_STATE], args, bool_):
|
||||
yield None
|
||||
add(action.set_state(template_))
|
||||
yield action
|
||||
|
||||
@@ -60,11 +60,11 @@ COVER_OPEN_ACTION_SCHEMA = maybe_simple_id({
|
||||
|
||||
|
||||
@ACTION_REGISTRY.register(CONF_COVER_OPEN, COVER_OPEN_ACTION_SCHEMA)
|
||||
def cover_open_to_code(config, action_id, arg_type, template_arg):
|
||||
def cover_open_to_code(config, action_id, template_arg, args):
|
||||
for var in get_variable(config[CONF_ID]):
|
||||
yield None
|
||||
rhs = var.make_open_action(template_arg)
|
||||
type = OpenAction.template(arg_type)
|
||||
type = OpenAction.template(template_arg)
|
||||
yield Pvariable(action_id, rhs, type=type)
|
||||
|
||||
|
||||
@@ -75,11 +75,11 @@ COVER_CLOSE_ACTION_SCHEMA = maybe_simple_id({
|
||||
|
||||
|
||||
@ACTION_REGISTRY.register(CONF_COVER_CLOSE, COVER_CLOSE_ACTION_SCHEMA)
|
||||
def cover_close_to_code(config, action_id, arg_type, template_arg):
|
||||
def cover_close_to_code(config, action_id, template_arg, args):
|
||||
for var in get_variable(config[CONF_ID]):
|
||||
yield None
|
||||
rhs = var.make_close_action(template_arg)
|
||||
type = CloseAction.template(arg_type)
|
||||
type = CloseAction.template(template_arg)
|
||||
yield Pvariable(action_id, rhs, type=type)
|
||||
|
||||
|
||||
@@ -90,11 +90,11 @@ COVER_STOP_ACTION_SCHEMA = maybe_simple_id({
|
||||
|
||||
|
||||
@ACTION_REGISTRY.register(CONF_COVER_STOP, COVER_STOP_ACTION_SCHEMA)
|
||||
def cover_stop_to_code(config, action_id, arg_type, template_arg):
|
||||
def cover_stop_to_code(config, action_id, template_arg, args):
|
||||
for var in get_variable(config[CONF_ID]):
|
||||
yield None
|
||||
rhs = var.make_stop_action(template_arg)
|
||||
type = StopAction.template(arg_type)
|
||||
type = StopAction.template(template_arg)
|
||||
yield Pvariable(action_id, rhs, type=type)
|
||||
|
||||
|
||||
|
||||
@@ -8,7 +8,7 @@ from esphome.const import CONF_ASSUMED_STATE, CONF_CLOSE_ACTION, CONF_ID, CONF_L
|
||||
CONF_OPEN_ACTION, CONF_OPTIMISTIC, CONF_STATE, CONF_STOP_ACTION
|
||||
from esphome.cpp_generator import Pvariable, add, get_variable, process_lambda, templatable
|
||||
from esphome.cpp_helpers import setup_component
|
||||
from esphome.cpp_types import Action, App, NoArg, optional
|
||||
from esphome.cpp_types import Action, App, optional
|
||||
from esphome.py_compat import string_types
|
||||
|
||||
TemplateCover = cover.cover_ns.class_('TemplateCover', cover.Cover)
|
||||
@@ -38,14 +38,14 @@ def to_code(config):
|
||||
yield
|
||||
add(var.set_state_lambda(template_))
|
||||
if CONF_OPEN_ACTION in config:
|
||||
automation.build_automation(var.get_open_trigger(), NoArg,
|
||||
config[CONF_OPEN_ACTION])
|
||||
automation.build_automations(var.get_open_trigger(), [],
|
||||
config[CONF_OPEN_ACTION])
|
||||
if CONF_CLOSE_ACTION in config:
|
||||
automation.build_automation(var.get_close_trigger(), NoArg,
|
||||
config[CONF_CLOSE_ACTION])
|
||||
automation.build_automations(var.get_close_trigger(), [],
|
||||
config[CONF_CLOSE_ACTION])
|
||||
if CONF_STOP_ACTION in config:
|
||||
automation.build_automation(var.get_stop_trigger(), NoArg,
|
||||
config[CONF_STOP_ACTION])
|
||||
automation.build_automations(var.get_stop_trigger(), [],
|
||||
config[CONF_STOP_ACTION])
|
||||
if CONF_OPTIMISTIC in config:
|
||||
add(var.set_optimistic(config[CONF_OPTIMISTIC]))
|
||||
if CONF_ASSUMED_STATE in config:
|
||||
@@ -55,7 +55,7 @@ def to_code(config):
|
||||
BUILD_FLAGS = '-DUSE_TEMPLATE_COVER'
|
||||
|
||||
CONF_COVER_TEMPLATE_PUBLISH = 'cover.template.publish'
|
||||
COVER_TEMPLATE_PUBLISH_ACTION_SCHEMA = vol.Schema({
|
||||
COVER_TEMPLATE_PUBLISH_ACTION_SCHEMA = cv.Schema({
|
||||
vol.Required(CONF_ID): cv.use_variable_id(cover.Cover),
|
||||
vol.Required(CONF_STATE): cv.templatable(cover.validate_cover_state),
|
||||
})
|
||||
@@ -63,17 +63,17 @@ COVER_TEMPLATE_PUBLISH_ACTION_SCHEMA = vol.Schema({
|
||||
|
||||
@ACTION_REGISTRY.register(CONF_COVER_TEMPLATE_PUBLISH,
|
||||
COVER_TEMPLATE_PUBLISH_ACTION_SCHEMA)
|
||||
def cover_template_publish_to_code(config, action_id, arg_type, template_arg):
|
||||
def cover_template_publish_to_code(config, action_id, template_arg, args):
|
||||
for var in get_variable(config[CONF_ID]):
|
||||
yield None
|
||||
rhs = var.make_cover_publish_action(template_arg)
|
||||
type = CoverPublishAction.template(arg_type)
|
||||
type = CoverPublishAction.template(template_arg)
|
||||
action = Pvariable(action_id, rhs, type=type)
|
||||
state = config[CONF_STATE]
|
||||
if isinstance(state, string_types):
|
||||
template_ = cover.COVER_STATES[state]
|
||||
else:
|
||||
for template_ in templatable(state, arg_type, cover.CoverState):
|
||||
for template_ in templatable(state, args, cover.CoverState):
|
||||
yield None
|
||||
add(action.set_state(template_))
|
||||
yield action
|
||||
|
||||
@@ -9,10 +9,10 @@ from esphome.cpp_types import Component, ComponentPtr, esphome_ns, std_vector
|
||||
CustomComponentConstructor = esphome_ns.class_('CustomComponentConstructor')
|
||||
MULTI_CONF = True
|
||||
|
||||
CONFIG_SCHEMA = vol.Schema({
|
||||
CONFIG_SCHEMA = cv.Schema({
|
||||
cv.GenerateID(): cv.declare_variable_id(CustomComponentConstructor),
|
||||
vol.Required(CONF_LAMBDA): cv.lambda_,
|
||||
vol.Optional(CONF_COMPONENTS): cv.ensure_list(vol.Schema({
|
||||
vol.Optional(CONF_COMPONENTS): cv.ensure_list(cv.Schema({
|
||||
cv.GenerateID(): cv.declare_variable_id(Component)
|
||||
}).extend(cv.COMPONENT_SCHEMA.schema)),
|
||||
})
|
||||
|
||||
@@ -11,9 +11,9 @@ from esphome.cpp_types import App, PollingComponent
|
||||
DallasComponent = sensor.sensor_ns.class_('DallasComponent', PollingComponent)
|
||||
MULTI_CONF = True
|
||||
|
||||
CONFIG_SCHEMA = vol.Schema({
|
||||
CONFIG_SCHEMA = cv.Schema({
|
||||
cv.GenerateID(): cv.declare_variable_id(DallasComponent),
|
||||
vol.Required(CONF_PIN): pins.input_pullup_pin,
|
||||
vol.Required(CONF_PIN): pins.input_pin,
|
||||
vol.Optional(CONF_UPDATE_INTERVAL): cv.update_interval,
|
||||
}).extend(cv.COMPONENT_SCHEMA.schema)
|
||||
|
||||
|
||||
@@ -1,11 +1,10 @@
|
||||
import voluptuous as vol
|
||||
|
||||
import esphome.config_validation as cv
|
||||
from esphome.cpp_generator import add
|
||||
from esphome.cpp_types import App
|
||||
|
||||
DEPENDENCIES = ['logger']
|
||||
|
||||
CONFIG_SCHEMA = vol.Schema({})
|
||||
CONFIG_SCHEMA = cv.Schema({})
|
||||
|
||||
|
||||
def to_code(config):
|
||||
|
||||
@@ -38,14 +38,14 @@ EXT1_WAKEUP_MODES = {
|
||||
CONF_WAKEUP_PIN_MODE = 'wakeup_pin_mode'
|
||||
CONF_ESP32_EXT1_WAKEUP = 'esp32_ext1_wakeup'
|
||||
|
||||
CONFIG_SCHEMA = vol.Schema({
|
||||
CONFIG_SCHEMA = cv.Schema({
|
||||
cv.GenerateID(): cv.declare_variable_id(DeepSleepComponent),
|
||||
vol.Optional(CONF_SLEEP_DURATION): cv.positive_time_period_milliseconds,
|
||||
vol.Optional(CONF_WAKEUP_PIN): vol.All(cv.only_on_esp32, pins.internal_gpio_input_pin_schema,
|
||||
validate_pin_number),
|
||||
vol.Optional(CONF_WAKEUP_PIN_MODE): vol.All(cv.only_on_esp32,
|
||||
cv.one_of(*WAKEUP_PIN_MODES), upper=True),
|
||||
vol.Optional(CONF_ESP32_EXT1_WAKEUP): vol.All(cv.only_on_esp32, vol.Schema({
|
||||
vol.Optional(CONF_ESP32_EXT1_WAKEUP): vol.All(cv.only_on_esp32, cv.Schema({
|
||||
vol.Required(CONF_PINS): cv.ensure_list(pins.shorthand_input_pin, validate_pin_number),
|
||||
vol.Required(CONF_MODE): cv.one_of(*EXT1_WAKEUP_MODES, upper=True),
|
||||
})),
|
||||
@@ -95,11 +95,11 @@ DEEP_SLEEP_ENTER_ACTION_SCHEMA = maybe_simple_id({
|
||||
|
||||
|
||||
@ACTION_REGISTRY.register(CONF_DEEP_SLEEP_ENTER, DEEP_SLEEP_ENTER_ACTION_SCHEMA)
|
||||
def deep_sleep_enter_to_code(config, action_id, arg_type, template_arg):
|
||||
def deep_sleep_enter_to_code(config, action_id, template_arg, args):
|
||||
for var in get_variable(config[CONF_ID]):
|
||||
yield None
|
||||
rhs = var.make_enter_deep_sleep_action(template_arg)
|
||||
type = EnterDeepSleepAction.template(arg_type)
|
||||
type = EnterDeepSleepAction.template(template_arg)
|
||||
yield Pvariable(action_id, rhs, type=type)
|
||||
|
||||
|
||||
@@ -110,9 +110,9 @@ DEEP_SLEEP_PREVENT_ACTION_SCHEMA = maybe_simple_id({
|
||||
|
||||
|
||||
@ACTION_REGISTRY.register(CONF_DEEP_SLEEP_PREVENT, DEEP_SLEEP_PREVENT_ACTION_SCHEMA)
|
||||
def deep_sleep_prevent_to_code(config, action_id, arg_type, template_arg):
|
||||
def deep_sleep_prevent_to_code(config, action_id, template_arg, args):
|
||||
for var in get_variable(config[CONF_ID]):
|
||||
yield None
|
||||
rhs = var.make_prevent_deep_sleep_action(template_arg)
|
||||
type = PreventDeepSleepAction.template(arg_type)
|
||||
type = PreventDeepSleepAction.template(template_arg)
|
||||
yield Pvariable(action_id, rhs, type=type)
|
||||
|
||||
@@ -78,11 +78,11 @@ DISPLAY_PAGE_SHOW_ACTION_SCHEMA = maybe_simple_id({
|
||||
|
||||
|
||||
@ACTION_REGISTRY.register(CONF_DISPLAY_PAGE_SHOW, DISPLAY_PAGE_SHOW_ACTION_SCHEMA)
|
||||
def display_page_show_to_code(config, action_id, arg_type, template_arg):
|
||||
type = DisplayPageShowAction.template(arg_type)
|
||||
def display_page_show_to_code(config, action_id, template_arg, args):
|
||||
type = DisplayPageShowAction.template(template_arg)
|
||||
action = Pvariable(action_id, type.new(), type=type)
|
||||
if isinstance(config[CONF_ID], core.Lambda):
|
||||
for template_ in templatable(config[CONF_ID], arg_type, DisplayPagePtr):
|
||||
for template_ in templatable(config[CONF_ID], args, DisplayPagePtr):
|
||||
yield None
|
||||
add(action.set_page(template_))
|
||||
else:
|
||||
@@ -99,10 +99,10 @@ DISPLAY_PAGE_SHOW_NEXT_ACTION_SCHEMA = maybe_simple_id({
|
||||
|
||||
|
||||
@ACTION_REGISTRY.register(CONF_DISPLAY_PAGE_SHOW_NEXT, DISPLAY_PAGE_SHOW_NEXT_ACTION_SCHEMA)
|
||||
def display_page_show_next_to_code(config, action_id, arg_type, template_arg):
|
||||
def display_page_show_next_to_code(config, action_id, template_arg, args):
|
||||
for var in get_variable(config[CONF_ID]):
|
||||
yield None
|
||||
type = DisplayPageShowNextAction.template(arg_type)
|
||||
type = DisplayPageShowNextAction.template(template_arg)
|
||||
yield Pvariable(action_id, type.new(var), type=type)
|
||||
|
||||
|
||||
@@ -113,10 +113,10 @@ DISPLAY_PAGE_SHOW_PREVIOUS_ACTION_SCHEMA = maybe_simple_id({
|
||||
|
||||
|
||||
@ACTION_REGISTRY.register(CONF_DISPLAY_PAGE_SHOW_PREVIOUS, DISPLAY_PAGE_SHOW_PREVIOUS_ACTION_SCHEMA)
|
||||
def display_page_show_previous_to_code(config, action_id, arg_type, template_arg):
|
||||
def display_page_show_previous_to_code(config, action_id, template_arg, args):
|
||||
for var in get_variable(config[CONF_ID]):
|
||||
yield None
|
||||
type = DisplayPageShowPrevAction.template(arg_type)
|
||||
type = DisplayPageShowPrevAction.template(template_arg)
|
||||
yield Pvariable(action_id, type.new(var), type=type)
|
||||
|
||||
|
||||
|
||||
@@ -13,7 +13,7 @@ ESP32BLEBeacon = esphome_ns.class_('ESP32BLEBeacon', Component)
|
||||
CONF_MAJOR = 'major'
|
||||
CONF_MINOR = 'minor'
|
||||
|
||||
CONFIG_SCHEMA = vol.Schema({
|
||||
CONFIG_SCHEMA = cv.Schema({
|
||||
cv.GenerateID(): cv.declare_variable_id(ESP32BLEBeacon),
|
||||
vol.Required(CONF_TYPE): cv.one_of('IBEACON', upper=True),
|
||||
vol.Required(CONF_UUID): cv.uuid,
|
||||
|
||||
@@ -18,7 +18,7 @@ XIAOMI_SENSOR_SCHEMA = sensor.SENSOR_SCHEMA.extend({
|
||||
cv.GenerateID(): cv.declare_variable_id(XiaomiSensor)
|
||||
})
|
||||
|
||||
CONFIG_SCHEMA = vol.Schema({
|
||||
CONFIG_SCHEMA = cv.Schema({
|
||||
cv.GenerateID(): cv.declare_variable_id(ESP32BLETracker),
|
||||
vol.Optional(CONF_SCAN_INTERVAL): cv.positive_time_period_seconds,
|
||||
}).extend(cv.COMPONENT_SCHEMA.schema)
|
||||
|
||||
130
esphome/components/esp32_camera.py
Normal file
130
esphome/components/esp32_camera.py
Normal file
@@ -0,0 +1,130 @@
|
||||
import voluptuous as vol
|
||||
|
||||
from esphome import config_validation as cv, pins
|
||||
from esphome.const import CONF_FREQUENCY, CONF_ID, CONF_NAME, CONF_PIN, CONF_SCL, CONF_SDA, \
|
||||
ESP_PLATFORM_ESP32
|
||||
from esphome.cpp_generator import Pvariable, add
|
||||
from esphome.cpp_types import App, Nameable, PollingComponent, esphome_ns
|
||||
|
||||
ESP_PLATFORMS = [ESP_PLATFORM_ESP32]
|
||||
|
||||
ESP32Camera = esphome_ns.class_('ESP32Camera', PollingComponent, Nameable)
|
||||
ESP32CameraFrameSize = esphome_ns.enum('ESP32CameraFrameSize')
|
||||
FRAME_SIZES = {
|
||||
'160X120': ESP32CameraFrameSize.ESP32_CAMERA_SIZE_160X120,
|
||||
'QQVGA': ESP32CameraFrameSize.ESP32_CAMERA_SIZE_160X120,
|
||||
'128x160': ESP32CameraFrameSize.ESP32_CAMERA_SIZE_128X160,
|
||||
'QQVGA2': ESP32CameraFrameSize.ESP32_CAMERA_SIZE_128X160,
|
||||
'176X144': ESP32CameraFrameSize.ESP32_CAMERA_SIZE_176X144,
|
||||
'QCIF': ESP32CameraFrameSize.ESP32_CAMERA_SIZE_176X144,
|
||||
'240X176': ESP32CameraFrameSize.ESP32_CAMERA_SIZE_240X176,
|
||||
'HQVGA': ESP32CameraFrameSize.ESP32_CAMERA_SIZE_240X176,
|
||||
'320X240': ESP32CameraFrameSize.ESP32_CAMERA_SIZE_320X240,
|
||||
'QVGA': ESP32CameraFrameSize.ESP32_CAMERA_SIZE_320X240,
|
||||
'400X296': ESP32CameraFrameSize.ESP32_CAMERA_SIZE_400X296,
|
||||
'CIF': ESP32CameraFrameSize.ESP32_CAMERA_SIZE_400X296,
|
||||
'640X480': ESP32CameraFrameSize.ESP32_CAMERA_SIZE_640X480,
|
||||
'VGA': ESP32CameraFrameSize.ESP32_CAMERA_SIZE_640X480,
|
||||
'800X600': ESP32CameraFrameSize.ESP32_CAMERA_SIZE_800X600,
|
||||
'SVGA': ESP32CameraFrameSize.ESP32_CAMERA_SIZE_800X600,
|
||||
'1024X768': ESP32CameraFrameSize.ESP32_CAMERA_SIZE_1024X768,
|
||||
'XGA': ESP32CameraFrameSize.ESP32_CAMERA_SIZE_1024X768,
|
||||
'1280x1024': ESP32CameraFrameSize.ESP32_CAMERA_SIZE_1280X1024,
|
||||
'SXGA': ESP32CameraFrameSize.ESP32_CAMERA_SIZE_1280X1024,
|
||||
'1600X1200': ESP32CameraFrameSize.ESP32_CAMERA_SIZE_1600X1200,
|
||||
'UXGA': ESP32CameraFrameSize.ESP32_CAMERA_SIZE_1600X1200,
|
||||
}
|
||||
|
||||
CONF_DATA_PINS = 'data_pins'
|
||||
CONF_VSYNC_PIN = 'vsync_pin'
|
||||
CONF_HREF_PIN = 'href_pin'
|
||||
CONF_PIXEL_CLOCK_PIN = 'pixel_clock_pin'
|
||||
CONF_EXTERNAL_CLOCK = 'external_clock'
|
||||
CONF_I2C_PINS = 'i2c_pins'
|
||||
CONF_RESET_PIN = 'reset_pin'
|
||||
CONF_POWER_DOWN_PIN = 'power_down_pin'
|
||||
|
||||
CONF_MAX_FRAMERATE = 'max_framerate'
|
||||
CONF_IDLE_FRAMERATE = 'idle_framerate'
|
||||
CONF_RESOLUTION = 'resolution'
|
||||
CONF_JPEG_QUALITY = 'jpeg_quality'
|
||||
CONF_VERTICAL_FLIP = 'vertical_flip'
|
||||
CONF_HORIZONTAL_MIRROR = 'horizontal_mirror'
|
||||
CONF_CONTRAST = 'contrast'
|
||||
CONF_BRIGHTNESS = 'brightness'
|
||||
CONF_SATURATION = 'saturation'
|
||||
CONF_TEST_PATTERN = 'test_pattern'
|
||||
|
||||
camera_range_param = vol.All(cv.int_, vol.Range(min=-2, max=2))
|
||||
|
||||
CONFIG_SCHEMA = cv.Schema({
|
||||
cv.GenerateID(): cv.declare_variable_id(ESP32Camera),
|
||||
vol.Required(CONF_NAME): cv.string,
|
||||
vol.Required(CONF_DATA_PINS): vol.All([pins.input_pin], vol.Length(min=8, max=8)),
|
||||
vol.Required(CONF_VSYNC_PIN): pins.input_pin,
|
||||
vol.Required(CONF_HREF_PIN): pins.input_pin,
|
||||
vol.Required(CONF_PIXEL_CLOCK_PIN): pins.input_pin,
|
||||
vol.Required(CONF_EXTERNAL_CLOCK): vol.Schema({
|
||||
vol.Required(CONF_PIN): pins.output_pin,
|
||||
vol.Optional(CONF_FREQUENCY, default='20MHz'): vol.All(cv.frequency, vol.In([20e6, 10e6])),
|
||||
}),
|
||||
vol.Required(CONF_I2C_PINS): vol.Schema({
|
||||
vol.Required(CONF_SDA): pins.output_pin,
|
||||
vol.Required(CONF_SCL): pins.output_pin,
|
||||
}),
|
||||
vol.Optional(CONF_RESET_PIN): pins.output_pin,
|
||||
vol.Optional(CONF_POWER_DOWN_PIN): pins.output_pin,
|
||||
|
||||
vol.Optional(CONF_MAX_FRAMERATE, default='10 fps'): vol.All(cv.framerate,
|
||||
vol.Range(min=0, min_included=False,
|
||||
max=60)),
|
||||
vol.Optional(CONF_IDLE_FRAMERATE, default='0.1 fps'): vol.All(cv.framerate,
|
||||
vol.Range(min=0, max=1)),
|
||||
vol.Optional(CONF_RESOLUTION, default='640X480'): cv.one_of(*FRAME_SIZES, upper=True),
|
||||
vol.Optional(CONF_JPEG_QUALITY, default=10): vol.All(cv.int_, vol.Range(min=10, max=63)),
|
||||
vol.Optional(CONF_CONTRAST, default=0): camera_range_param,
|
||||
vol.Optional(CONF_BRIGHTNESS, default=0): camera_range_param,
|
||||
vol.Optional(CONF_SATURATION, default=0): camera_range_param,
|
||||
vol.Optional(CONF_VERTICAL_FLIP, default=True): cv.boolean,
|
||||
vol.Optional(CONF_HORIZONTAL_MIRROR, default=True): cv.boolean,
|
||||
vol.Optional(CONF_TEST_PATTERN, default=False): cv.boolean,
|
||||
}).extend(cv.COMPONENT_SCHEMA.schema)
|
||||
|
||||
SETTERS = {
|
||||
CONF_DATA_PINS: 'set_data_pins',
|
||||
CONF_VSYNC_PIN: 'set_vsync_pin',
|
||||
CONF_HREF_PIN: 'set_href_pin',
|
||||
CONF_PIXEL_CLOCK_PIN: 'set_pixel_clock_pin',
|
||||
CONF_RESET_PIN: 'set_reset_pin',
|
||||
CONF_POWER_DOWN_PIN: 'set_power_down_pin',
|
||||
CONF_JPEG_QUALITY: 'set_jpeg_quality',
|
||||
CONF_VERTICAL_FLIP: 'set_vertical_flip',
|
||||
CONF_HORIZONTAL_MIRROR: 'set_horizontal_mirror',
|
||||
CONF_CONTRAST: 'set_contrast',
|
||||
CONF_BRIGHTNESS: 'set_brightness',
|
||||
CONF_SATURATION: 'set_saturation',
|
||||
CONF_TEST_PATTERN: 'set_test_pattern',
|
||||
}
|
||||
|
||||
|
||||
def to_code(config):
|
||||
rhs = App.register_component(ESP32Camera.new(config[CONF_NAME]))
|
||||
cam = Pvariable(config[CONF_ID], rhs)
|
||||
|
||||
for key, setter in SETTERS.items():
|
||||
if key in config:
|
||||
add(getattr(cam, setter)(config[key]))
|
||||
|
||||
extclk = config[CONF_EXTERNAL_CLOCK]
|
||||
add(cam.set_external_clock(extclk[CONF_PIN], extclk[CONF_FREQUENCY]))
|
||||
i2c_pins = config[CONF_I2C_PINS]
|
||||
add(cam.set_i2c_pins(i2c_pins[CONF_SDA], i2c_pins[CONF_SCL]))
|
||||
add(cam.set_max_update_interval(1000 / config[CONF_MAX_FRAMERATE]))
|
||||
if config[CONF_IDLE_FRAMERATE] == 0:
|
||||
add(cam.set_idle_update_interval(0))
|
||||
else:
|
||||
add(cam.set_idle_update_interval(1000 / config[CONF_IDLE_FRAMERATE]))
|
||||
add(cam.set_frame_size(FRAME_SIZES[config[CONF_RESOLUTION]]))
|
||||
|
||||
|
||||
BUILD_FLAGS = '-DUSE_ESP32_CAMERA'
|
||||
@@ -46,7 +46,7 @@ VOLTAGE_ATTENUATION = {
|
||||
|
||||
ESP32TouchComponent = binary_sensor.binary_sensor_ns.class_('ESP32TouchComponent', Component)
|
||||
|
||||
CONFIG_SCHEMA = vol.Schema({
|
||||
CONFIG_SCHEMA = cv.Schema({
|
||||
cv.GenerateID(): cv.declare_variable_id(ESP32TouchComponent),
|
||||
vol.Optional(CONF_SETUP_MODE): cv.boolean,
|
||||
vol.Optional(CONF_IIR_FILTER): cv.positive_time_period_milliseconds,
|
||||
|
||||
@@ -46,7 +46,7 @@ def validate(config):
|
||||
return config
|
||||
|
||||
|
||||
CONFIG_SCHEMA = vol.All(vol.Schema({
|
||||
CONFIG_SCHEMA = vol.All(cv.Schema({
|
||||
cv.GenerateID(): cv.declare_variable_id(EthernetComponent),
|
||||
vol.Required(CONF_TYPE): cv.one_of(*ETHERNET_TYPES, upper=True),
|
||||
vol.Required(CONF_MDC_PIN): pins.output_pin,
|
||||
|
||||
@@ -81,11 +81,11 @@ FAN_TOGGLE_ACTION_SCHEMA = maybe_simple_id({
|
||||
|
||||
|
||||
@ACTION_REGISTRY.register(CONF_FAN_TOGGLE, FAN_TOGGLE_ACTION_SCHEMA)
|
||||
def fan_toggle_to_code(config, action_id, arg_type, template_arg):
|
||||
def fan_toggle_to_code(config, action_id, template_arg, args):
|
||||
for var in get_variable(config[CONF_ID]):
|
||||
yield None
|
||||
rhs = var.make_toggle_action(template_arg)
|
||||
type = ToggleAction.template(arg_type)
|
||||
type = ToggleAction.template(template_arg)
|
||||
yield Pvariable(action_id, rhs, type=type)
|
||||
|
||||
|
||||
@@ -96,11 +96,11 @@ FAN_TURN_OFF_ACTION_SCHEMA = maybe_simple_id({
|
||||
|
||||
|
||||
@ACTION_REGISTRY.register(CONF_FAN_TURN_OFF, FAN_TURN_OFF_ACTION_SCHEMA)
|
||||
def fan_turn_off_to_code(config, action_id, arg_type, template_arg):
|
||||
def fan_turn_off_to_code(config, action_id, template_arg, args):
|
||||
for var in get_variable(config[CONF_ID]):
|
||||
yield None
|
||||
rhs = var.make_turn_off_action(template_arg)
|
||||
type = TurnOffAction.template(arg_type)
|
||||
type = TurnOffAction.template(template_arg)
|
||||
yield Pvariable(action_id, rhs, type=type)
|
||||
|
||||
|
||||
@@ -113,18 +113,18 @@ FAN_TURN_ON_ACTION_SCHEMA = maybe_simple_id({
|
||||
|
||||
|
||||
@ACTION_REGISTRY.register(CONF_FAN_TURN_ON, FAN_TURN_ON_ACTION_SCHEMA)
|
||||
def fan_turn_on_to_code(config, action_id, arg_type, template_arg):
|
||||
def fan_turn_on_to_code(config, action_id, template_arg, args):
|
||||
for var in get_variable(config[CONF_ID]):
|
||||
yield None
|
||||
rhs = var.make_turn_on_action(template_arg)
|
||||
type = TurnOnAction.template(arg_type)
|
||||
type = TurnOnAction.template(template_arg)
|
||||
action = Pvariable(action_id, rhs, type=type)
|
||||
if CONF_OSCILLATING in config:
|
||||
for template_ in templatable(config[CONF_OSCILLATING], arg_type, bool_):
|
||||
for template_ in templatable(config[CONF_OSCILLATING], args, bool_):
|
||||
yield None
|
||||
add(action.set_oscillating(template_))
|
||||
if CONF_SPEED in config:
|
||||
for template_ in templatable(config[CONF_SPEED], arg_type, FanSpeed):
|
||||
for template_ in templatable(config[CONF_SPEED], args, FanSpeed):
|
||||
yield None
|
||||
if isinstance(template_, string_types):
|
||||
template_ = FAN_SPEEDS[template_]
|
||||
|
||||
@@ -14,7 +14,7 @@ PLATFORM_SCHEMA = cv.nameable(fan.FAN_PLATFORM_SCHEMA.extend({
|
||||
vol.Optional(CONF_SPEED_STATE_TOPIC): cv.publish_topic,
|
||||
vol.Optional(CONF_SPEED_COMMAND_TOPIC): cv.subscribe_topic,
|
||||
vol.Optional(CONF_OSCILLATION_OUTPUT): cv.use_variable_id(output.BinaryOutput),
|
||||
vol.Optional(CONF_SPEED): vol.Schema({
|
||||
vol.Optional(CONF_SPEED): cv.Schema({
|
||||
vol.Required(CONF_LOW): cv.percentage,
|
||||
vol.Required(CONF_MEDIUM): cv.percentage,
|
||||
vol.Required(CONF_HIGH): cv.percentage,
|
||||
|
||||
@@ -19,8 +19,8 @@ Glyph = display.display_ns.class_('Glyph')
|
||||
|
||||
def validate_glyphs(value):
|
||||
if isinstance(value, list):
|
||||
value = vol.Schema([cv.string])(value)
|
||||
value = vol.Schema([cv.string])(list(value))
|
||||
value = cv.Schema([cv.string])(value)
|
||||
value = cv.Schema([cv.string])(list(value))
|
||||
|
||||
def comparator(x, y):
|
||||
x_ = x.encode('utf-8')
|
||||
@@ -69,7 +69,7 @@ def validate_truetype_file(value):
|
||||
DEFAULT_GLYPHS = u' !"%()+,-.:0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZ_abcdefghijklmnopqrstuvwxyz°'
|
||||
CONF_RAW_DATA_ID = 'raw_data_id'
|
||||
|
||||
FONT_SCHEMA = vol.Schema({
|
||||
FONT_SCHEMA = cv.Schema({
|
||||
vol.Required(CONF_ID): cv.declare_variable_id(Font),
|
||||
vol.Required(CONF_FILE): validate_truetype_file,
|
||||
vol.Optional(CONF_GLYPHS, default=DEFAULT_GLYPHS): validate_glyphs,
|
||||
|
||||
@@ -10,7 +10,7 @@ GlobalVariableComponent = esphome_ns.class_('GlobalVariableComponent', Component
|
||||
|
||||
MULTI_CONF = True
|
||||
|
||||
CONFIG_SCHEMA = vol.Schema({
|
||||
CONFIG_SCHEMA = cv.Schema({
|
||||
vol.Required(CONF_ID): cv.declare_variable_id(GlobalVariableComponent),
|
||||
vol.Required(CONF_TYPE): cv.string_strict,
|
||||
vol.Optional(CONF_INITIAL_VALUE): cv.string_strict,
|
||||
|
||||
@@ -11,12 +11,12 @@ from esphome.cpp_types import App, Component, esphome_ns
|
||||
I2CComponent = esphome_ns.class_('I2CComponent', Component)
|
||||
I2CDevice = pins.I2CDevice
|
||||
|
||||
CONFIG_SCHEMA = vol.Schema({
|
||||
CONFIG_SCHEMA = cv.Schema({
|
||||
cv.GenerateID(): cv.declare_variable_id(I2CComponent),
|
||||
vol.Optional(CONF_SDA, default='SDA'): pins.input_pin,
|
||||
vol.Optional(CONF_SCL, default='SCL'): pins.input_pin,
|
||||
vol.Optional(CONF_FREQUENCY): vol.All(cv.frequency, vol.Range(min=0, min_included=False)),
|
||||
vol.Optional(CONF_SCAN): cv.boolean,
|
||||
vol.Optional(CONF_SCAN, default=True): cv.boolean,
|
||||
|
||||
vol.Optional(CONF_RECEIVE_TIMEOUT): cv.invalid("The receive_timeout option has been removed "
|
||||
"because timeouts are already handled by the "
|
||||
|
||||
@@ -20,7 +20,7 @@ Image_ = display.display_ns.class_('Image')
|
||||
|
||||
CONF_RAW_DATA_ID = 'raw_data_id'
|
||||
|
||||
IMAGE_SCHEMA = vol.Schema({
|
||||
IMAGE_SCHEMA = cv.Schema({
|
||||
vol.Required(CONF_ID): cv.declare_variable_id(Image_),
|
||||
vol.Required(CONF_FILE): cv.file_,
|
||||
vol.Optional(CONF_RESIZE): cv.dimensions,
|
||||
|
||||
@@ -5,11 +5,11 @@ import esphome.config_validation as cv
|
||||
from esphome.const import CONF_ID, CONF_INTERVAL
|
||||
from esphome.cpp_generator import Pvariable
|
||||
from esphome.cpp_helpers import setup_component
|
||||
from esphome.cpp_types import App, NoArg, PollingComponent, Trigger, esphome_ns
|
||||
from esphome.cpp_types import App, PollingComponent, Trigger, esphome_ns
|
||||
|
||||
IntervalTrigger = esphome_ns.class_('IntervalTrigger', Trigger.template(NoArg), PollingComponent)
|
||||
IntervalTrigger = esphome_ns.class_('IntervalTrigger', Trigger.template(), PollingComponent)
|
||||
|
||||
CONFIG_SCHEMA = automation.validate_automation(vol.Schema({
|
||||
CONFIG_SCHEMA = automation.validate_automation(cv.Schema({
|
||||
cv.GenerateID(): cv.declare_variable_id(IntervalTrigger),
|
||||
vol.Required(CONF_INTERVAL): cv.positive_time_period_milliseconds,
|
||||
}).extend(cv.COMPONENT_SCHEMA.schema))
|
||||
@@ -21,4 +21,4 @@ def to_code(config):
|
||||
trigger = Pvariable(conf[CONF_ID], rhs)
|
||||
setup_component(trigger, conf)
|
||||
|
||||
automation.build_automation(trigger, NoArg, conf)
|
||||
automation.build_automations(trigger, [], conf)
|
||||
|
||||
@@ -88,22 +88,22 @@ ADDRESSABLE_EFFECTS = RGB_EFFECTS + [CONF_ADDRESSABLE_LAMBDA, CONF_ADDRESSABLE_R
|
||||
CONF_ADDRESSABLE_TWINKLE, CONF_ADDRESSABLE_RANDOM_TWINKLE,
|
||||
CONF_ADDRESSABLE_FIREWORKS, CONF_ADDRESSABLE_FLICKER]
|
||||
|
||||
EFFECTS_SCHEMA = vol.Schema({
|
||||
vol.Optional(CONF_LAMBDA): vol.Schema({
|
||||
EFFECTS_SCHEMA = cv.Schema({
|
||||
vol.Optional(CONF_LAMBDA): cv.Schema({
|
||||
vol.Required(CONF_NAME): cv.string,
|
||||
vol.Required(CONF_LAMBDA): cv.lambda_,
|
||||
vol.Optional(CONF_UPDATE_INTERVAL, default='0ms'): cv.positive_time_period_milliseconds,
|
||||
}),
|
||||
vol.Optional(CONF_RANDOM): vol.Schema({
|
||||
vol.Optional(CONF_RANDOM): cv.Schema({
|
||||
cv.GenerateID(CONF_EFFECT_ID): cv.declare_variable_id(RandomLightEffect),
|
||||
vol.Optional(CONF_NAME, default="Random"): cv.string,
|
||||
vol.Optional(CONF_TRANSITION_LENGTH): cv.positive_time_period_milliseconds,
|
||||
vol.Optional(CONF_UPDATE_INTERVAL): cv.positive_time_period_milliseconds,
|
||||
}),
|
||||
vol.Optional(CONF_STROBE): vol.Schema({
|
||||
vol.Optional(CONF_STROBE): cv.Schema({
|
||||
cv.GenerateID(CONF_EFFECT_ID): cv.declare_variable_id(StrobeLightEffect),
|
||||
vol.Optional(CONF_NAME, default="Strobe"): cv.string,
|
||||
vol.Optional(CONF_COLORS): vol.All(cv.ensure_list(vol.Schema({
|
||||
vol.Optional(CONF_COLORS): vol.All(cv.ensure_list(cv.Schema({
|
||||
vol.Optional(CONF_STATE, default=True): cv.boolean,
|
||||
vol.Optional(CONF_BRIGHTNESS, default=1.0): cv.percentage,
|
||||
vol.Optional(CONF_RED, default=1.0): cv.percentage,
|
||||
@@ -114,24 +114,24 @@ EFFECTS_SCHEMA = vol.Schema({
|
||||
}), cv.has_at_least_one_key(CONF_STATE, CONF_BRIGHTNESS, CONF_RED, CONF_GREEN, CONF_BLUE,
|
||||
CONF_WHITE)), vol.Length(min=2)),
|
||||
}),
|
||||
vol.Optional(CONF_FLICKER): vol.Schema({
|
||||
vol.Optional(CONF_FLICKER): cv.Schema({
|
||||
cv.GenerateID(CONF_EFFECT_ID): cv.declare_variable_id(FlickerLightEffect),
|
||||
vol.Optional(CONF_NAME, default="Flicker"): cv.string,
|
||||
vol.Optional(CONF_ALPHA): cv.percentage,
|
||||
vol.Optional(CONF_INTENSITY): cv.percentage,
|
||||
}),
|
||||
vol.Optional(CONF_ADDRESSABLE_LAMBDA): vol.Schema({
|
||||
vol.Optional(CONF_ADDRESSABLE_LAMBDA): cv.Schema({
|
||||
vol.Required(CONF_NAME): cv.string,
|
||||
vol.Required(CONF_LAMBDA): cv.lambda_,
|
||||
vol.Optional(CONF_UPDATE_INTERVAL, default='0ms'): cv.positive_time_period_milliseconds,
|
||||
}),
|
||||
vol.Optional(CONF_ADDRESSABLE_RAINBOW): vol.Schema({
|
||||
vol.Optional(CONF_ADDRESSABLE_RAINBOW): cv.Schema({
|
||||
cv.GenerateID(CONF_EFFECT_ID): cv.declare_variable_id(AddressableRainbowLightEffect),
|
||||
vol.Optional(CONF_NAME, default="Rainbow"): cv.string,
|
||||
vol.Optional(CONF_SPEED): cv.uint32_t,
|
||||
vol.Optional(CONF_WIDTH): cv.uint32_t,
|
||||
}),
|
||||
vol.Optional(CONF_ADDRESSABLE_COLOR_WIPE): vol.Schema({
|
||||
vol.Optional(CONF_ADDRESSABLE_COLOR_WIPE): cv.Schema({
|
||||
cv.GenerateID(CONF_EFFECT_ID): cv.declare_variable_id(AddressableColorWipeEffect),
|
||||
vol.Optional(CONF_NAME, default="Color Wipe"): cv.string,
|
||||
vol.Optional(CONF_COLORS): cv.ensure_list({
|
||||
@@ -145,24 +145,24 @@ EFFECTS_SCHEMA = vol.Schema({
|
||||
vol.Optional(CONF_ADD_LED_INTERVAL): cv.positive_time_period_milliseconds,
|
||||
vol.Optional(CONF_REVERSE): cv.boolean,
|
||||
}),
|
||||
vol.Optional(CONF_ADDRESSABLE_SCAN): vol.Schema({
|
||||
vol.Optional(CONF_ADDRESSABLE_SCAN): cv.Schema({
|
||||
cv.GenerateID(CONF_EFFECT_ID): cv.declare_variable_id(AddressableScanEffect),
|
||||
vol.Optional(CONF_NAME, default="Scan"): cv.string,
|
||||
vol.Optional(CONF_MOVE_INTERVAL): cv.positive_time_period_milliseconds,
|
||||
}),
|
||||
vol.Optional(CONF_ADDRESSABLE_TWINKLE): vol.Schema({
|
||||
vol.Optional(CONF_ADDRESSABLE_TWINKLE): cv.Schema({
|
||||
cv.GenerateID(CONF_EFFECT_ID): cv.declare_variable_id(AddressableTwinkleEffect),
|
||||
vol.Optional(CONF_NAME, default="Twinkle"): cv.string,
|
||||
vol.Optional(CONF_TWINKLE_PROBABILITY): cv.percentage,
|
||||
vol.Optional(CONF_PROGRESS_INTERVAL): cv.positive_time_period_milliseconds,
|
||||
}),
|
||||
vol.Optional(CONF_ADDRESSABLE_RANDOM_TWINKLE): vol.Schema({
|
||||
vol.Optional(CONF_ADDRESSABLE_RANDOM_TWINKLE): cv.Schema({
|
||||
cv.GenerateID(CONF_EFFECT_ID): cv.declare_variable_id(AddressableRandomTwinkleEffect),
|
||||
vol.Optional(CONF_NAME, default="Random Twinkle"): cv.string,
|
||||
vol.Optional(CONF_TWINKLE_PROBABILITY): cv.percentage,
|
||||
vol.Optional(CONF_PROGRESS_INTERVAL): cv.positive_time_period_milliseconds,
|
||||
}),
|
||||
vol.Optional(CONF_ADDRESSABLE_FIREWORKS): vol.Schema({
|
||||
vol.Optional(CONF_ADDRESSABLE_FIREWORKS): cv.Schema({
|
||||
cv.GenerateID(CONF_EFFECT_ID): cv.declare_variable_id(AddressableFireworksEffect),
|
||||
vol.Optional(CONF_NAME, default="Fireworks"): cv.string,
|
||||
vol.Optional(CONF_UPDATE_INTERVAL): cv.positive_time_period_milliseconds,
|
||||
@@ -170,7 +170,7 @@ EFFECTS_SCHEMA = vol.Schema({
|
||||
vol.Optional(CONF_USE_RANDOM_COLOR): cv.boolean,
|
||||
vol.Optional(CONF_FADE_OUT_RATE): cv.uint8_t,
|
||||
}),
|
||||
vol.Optional(CONF_ADDRESSABLE_FLICKER): vol.Schema({
|
||||
vol.Optional(CONF_ADDRESSABLE_FLICKER): cv.Schema({
|
||||
cv.GenerateID(CONF_EFFECT_ID): cv.declare_variable_id(AddressableFlickerEffect),
|
||||
vol.Optional(CONF_NAME, default="Addressable Flicker"): cv.string,
|
||||
vol.Optional(CONF_UPDATE_INTERVAL): cv.positive_time_period_milliseconds,
|
||||
@@ -404,14 +404,14 @@ LIGHT_TOGGLE_ACTION_SCHEMA = maybe_simple_id({
|
||||
|
||||
|
||||
@ACTION_REGISTRY.register(CONF_LIGHT_TOGGLE, LIGHT_TOGGLE_ACTION_SCHEMA)
|
||||
def light_toggle_to_code(config, action_id, arg_type, template_arg):
|
||||
def light_toggle_to_code(config, action_id, template_arg, args):
|
||||
for var in get_variable(config[CONF_ID]):
|
||||
yield None
|
||||
rhs = var.make_toggle_action(template_arg)
|
||||
type = ToggleAction.template(template_arg)
|
||||
action = Pvariable(action_id, rhs, type=type)
|
||||
if CONF_TRANSITION_LENGTH in config:
|
||||
for template_ in templatable(config[CONF_TRANSITION_LENGTH], arg_type, uint32):
|
||||
for template_ in templatable(config[CONF_TRANSITION_LENGTH], args, uint32):
|
||||
yield None
|
||||
add(action.set_transition_length(template_))
|
||||
yield action
|
||||
@@ -425,14 +425,14 @@ LIGHT_TURN_OFF_ACTION_SCHEMA = maybe_simple_id({
|
||||
|
||||
|
||||
@ACTION_REGISTRY.register(CONF_LIGHT_TURN_OFF, LIGHT_TURN_OFF_ACTION_SCHEMA)
|
||||
def light_turn_off_to_code(config, action_id, arg_type, template_arg):
|
||||
def light_turn_off_to_code(config, action_id, template_arg, args):
|
||||
for var in get_variable(config[CONF_ID]):
|
||||
yield None
|
||||
rhs = var.make_turn_off_action(template_arg)
|
||||
type = TurnOffAction.template(template_arg)
|
||||
action = Pvariable(action_id, rhs, type=type)
|
||||
if CONF_TRANSITION_LENGTH in config:
|
||||
for template_ in templatable(config[CONF_TRANSITION_LENGTH], arg_type, uint32):
|
||||
for template_ in templatable(config[CONF_TRANSITION_LENGTH], args, uint32):
|
||||
yield None
|
||||
add(action.set_transition_length(template_))
|
||||
yield action
|
||||
@@ -456,46 +456,46 @@ LIGHT_TURN_ON_ACTION_SCHEMA = maybe_simple_id({
|
||||
|
||||
|
||||
@ACTION_REGISTRY.register(CONF_LIGHT_TURN_ON, LIGHT_TURN_ON_ACTION_SCHEMA)
|
||||
def light_turn_on_to_code(config, action_id, arg_type, template_arg):
|
||||
def light_turn_on_to_code(config, action_id, template_arg, args):
|
||||
for var in get_variable(config[CONF_ID]):
|
||||
yield None
|
||||
rhs = var.make_turn_on_action(template_arg)
|
||||
type = TurnOnAction.template(template_arg)
|
||||
action = Pvariable(action_id, rhs, type=type)
|
||||
if CONF_TRANSITION_LENGTH in config:
|
||||
for template_ in templatable(config[CONF_TRANSITION_LENGTH], arg_type, uint32):
|
||||
for template_ in templatable(config[CONF_TRANSITION_LENGTH], args, uint32):
|
||||
yield None
|
||||
add(action.set_transition_length(template_))
|
||||
if CONF_FLASH_LENGTH in config:
|
||||
for template_ in templatable(config[CONF_FLASH_LENGTH], arg_type, uint32):
|
||||
for template_ in templatable(config[CONF_FLASH_LENGTH], args, uint32):
|
||||
yield None
|
||||
add(action.set_flash_length(template_))
|
||||
if CONF_BRIGHTNESS in config:
|
||||
for template_ in templatable(config[CONF_BRIGHTNESS], arg_type, float_):
|
||||
for template_ in templatable(config[CONF_BRIGHTNESS], args, float_):
|
||||
yield None
|
||||
add(action.set_brightness(template_))
|
||||
if CONF_RED in config:
|
||||
for template_ in templatable(config[CONF_RED], arg_type, float_):
|
||||
for template_ in templatable(config[CONF_RED], args, float_):
|
||||
yield None
|
||||
add(action.set_red(template_))
|
||||
if CONF_GREEN in config:
|
||||
for template_ in templatable(config[CONF_GREEN], arg_type, float_):
|
||||
for template_ in templatable(config[CONF_GREEN], args, float_):
|
||||
yield None
|
||||
add(action.set_green(template_))
|
||||
if CONF_BLUE in config:
|
||||
for template_ in templatable(config[CONF_BLUE], arg_type, float_):
|
||||
for template_ in templatable(config[CONF_BLUE], args, float_):
|
||||
yield None
|
||||
add(action.set_blue(template_))
|
||||
if CONF_WHITE in config:
|
||||
for template_ in templatable(config[CONF_WHITE], arg_type, float_):
|
||||
for template_ in templatable(config[CONF_WHITE], args, float_):
|
||||
yield None
|
||||
add(action.set_white(template_))
|
||||
if CONF_COLOR_TEMPERATURE in config:
|
||||
for template_ in templatable(config[CONF_COLOR_TEMPERATURE], arg_type, float_):
|
||||
for template_ in templatable(config[CONF_COLOR_TEMPERATURE], args, float_):
|
||||
yield None
|
||||
add(action.set_color_temperature(template_))
|
||||
if CONF_EFFECT in config:
|
||||
for template_ in templatable(config[CONF_EFFECT], arg_type, std_string):
|
||||
for template_ in templatable(config[CONF_EFFECT], args, std_string):
|
||||
yield None
|
||||
add(action.set_effect(template_))
|
||||
yield action
|
||||
|
||||
@@ -73,13 +73,13 @@ def validate_local_no_higher_than_global(value):
|
||||
|
||||
LogComponent = esphome_ns.class_('LogComponent', Component)
|
||||
|
||||
CONFIG_SCHEMA = vol.All(vol.Schema({
|
||||
CONFIG_SCHEMA = vol.All(cv.Schema({
|
||||
cv.GenerateID(): cv.declare_variable_id(LogComponent),
|
||||
vol.Optional(CONF_BAUD_RATE, default=115200): cv.positive_int,
|
||||
vol.Optional(CONF_TX_BUFFER_SIZE, default=512): cv.validate_bytes,
|
||||
vol.Optional(CONF_HARDWARE_UART, default='UART0'): uart_selection,
|
||||
vol.Optional(CONF_LEVEL): is_log_level,
|
||||
vol.Optional(CONF_LOGS): vol.Schema({
|
||||
vol.Optional(CONF_LOGS): cv.Schema({
|
||||
cv.string: is_log_level,
|
||||
})
|
||||
}), validate_local_no_higher_than_global)
|
||||
@@ -102,7 +102,9 @@ def required_build_flags(config):
|
||||
flags.append(u'-DESPHOME_LOG_LEVEL={}'.format(str(LOG_LEVELS[config[CONF_LEVEL]])))
|
||||
this_severity = LOG_LEVEL_SEVERITY.index(config[CONF_LEVEL])
|
||||
verbose_severity = LOG_LEVEL_SEVERITY.index('VERBOSE')
|
||||
very_verbose_severity = LOG_LEVEL_SEVERITY.index('VERY_VERBOSE')
|
||||
is_at_least_verbose = this_severity >= verbose_severity
|
||||
is_at_least_very_verbose = this_severity >= very_verbose_severity
|
||||
has_serial_logging = config.get(CONF_BAUD_RATE) != 0
|
||||
if CORE.is_esp8266 and has_serial_logging and is_at_least_verbose:
|
||||
debug_serial_port = HARDWARE_UART_TO_SERIAL[config.get(CONF_HARDWARE_UART)]
|
||||
@@ -122,6 +124,8 @@ def required_build_flags(config):
|
||||
flags.append(u"-DDEBUG_ESP_{}".format(comp))
|
||||
if CORE.is_esp32 and is_at_least_verbose:
|
||||
flags.append('-DCORE_DEBUG_LEVEL=5')
|
||||
if CORE.is_esp32 and is_at_least_very_verbose:
|
||||
flags.append('-DENABLE_I2C_DEBUG_BUFFER')
|
||||
|
||||
return flags
|
||||
|
||||
@@ -129,8 +133,8 @@ def required_build_flags(config):
|
||||
def maybe_simple_message(schema):
|
||||
def validator(value):
|
||||
if isinstance(value, dict):
|
||||
return vol.Schema(schema)(value)
|
||||
return vol.Schema(schema)({CONF_FORMAT: value})
|
||||
return cv.Schema(schema)(value)
|
||||
return cv.Schema(schema)({CONF_FORMAT: value})
|
||||
|
||||
return validator
|
||||
|
||||
@@ -167,13 +171,13 @@ LOGGER_LOG_ACTION_SCHEMA = vol.All(maybe_simple_message({
|
||||
|
||||
|
||||
@ACTION_REGISTRY.register(CONF_LOGGER_LOG, LOGGER_LOG_ACTION_SCHEMA)
|
||||
def logger_log_action_to_code(config, action_id, arg_type, template_arg):
|
||||
def logger_log_action_to_code(config, action_id, template_arg, args):
|
||||
esp_log = LOG_LEVEL_TO_ESP_LOG[config[CONF_LEVEL]]
|
||||
args = [RawExpression(text_type(x)) for x in config[CONF_ARGS]]
|
||||
args_ = [RawExpression(text_type(x)) for x in config[CONF_ARGS]]
|
||||
|
||||
text = text_type(statement(esp_log(config[CONF_TAG], config[CONF_FORMAT], *args)))
|
||||
text = text_type(statement(esp_log(config[CONF_TAG], config[CONF_FORMAT], *args_)))
|
||||
|
||||
for lambda_ in process_lambda(Lambda(text), [(arg_type, 'x')], return_type=void):
|
||||
for lambda_ in process_lambda(Lambda(text), args, return_type=void):
|
||||
yield None
|
||||
rhs = LambdaAction.new(template_arg, lambda_)
|
||||
type = LambdaAction.template(template_arg)
|
||||
|
||||
35
esphome/components/mcp23017.py
Normal file
35
esphome/components/mcp23017.py
Normal file
@@ -0,0 +1,35 @@
|
||||
import voluptuous as vol
|
||||
|
||||
from esphome import pins
|
||||
import esphome.config_validation as cv
|
||||
from esphome.const import CONF_ADDRESS, CONF_ID
|
||||
from esphome.cpp_generator import Pvariable
|
||||
from esphome.cpp_helpers import setup_component
|
||||
from esphome.cpp_types import App, GPIOInputPin, GPIOOutputPin, io_ns, esphome_ns
|
||||
|
||||
DEPENDENCIES = ['i2c']
|
||||
MULTI_CONF = True
|
||||
|
||||
MCP23017GPIOMode = esphome_ns.enum('MCP23017GPIOMode')
|
||||
MCP23017_GPIO_MODES = {
|
||||
'INPUT': MCP23017GPIOMode.MCP23017_INPUT,
|
||||
'INPUT_PULLUP': MCP23017GPIOMode.MCP23017_INPUT_PULLUP,
|
||||
'OUTPUT': MCP23017GPIOMode.MCP23017_OUTPUT,
|
||||
}
|
||||
|
||||
MCP23017GPIOInputPin = io_ns.class_('MCP23017GPIOInputPin', GPIOInputPin)
|
||||
MCP23017GPIOOutputPin = io_ns.class_('MCP23017GPIOOutputPin', GPIOOutputPin)
|
||||
|
||||
CONFIG_SCHEMA = cv.Schema({
|
||||
vol.Required(CONF_ID): cv.declare_variable_id(pins.MCP23017),
|
||||
vol.Optional(CONF_ADDRESS, default=0x20): cv.i2c_address,
|
||||
}).extend(cv.COMPONENT_SCHEMA.schema)
|
||||
|
||||
|
||||
def to_code(config):
|
||||
rhs = App.make_mcp23017_component(config[CONF_ADDRESS])
|
||||
var = Pvariable(config[CONF_ID], rhs)
|
||||
setup_component(var, config)
|
||||
|
||||
|
||||
BUILD_FLAGS = '-DUSE_MCP23017'
|
||||
29
esphome/components/mpr121.py
Normal file
29
esphome/components/mpr121.py
Normal file
@@ -0,0 +1,29 @@
|
||||
import voluptuous as vol
|
||||
|
||||
from esphome.components import i2c, binary_sensor
|
||||
import esphome.config_validation as cv
|
||||
from esphome.const import CONF_ADDRESS, CONF_ID
|
||||
from esphome.cpp_generator import Pvariable
|
||||
from esphome.cpp_helpers import setup_component
|
||||
from esphome.cpp_types import App, Component
|
||||
|
||||
DEPENDENCIES = ['i2c']
|
||||
MULTI_CONF = True
|
||||
|
||||
CONF_MPR121_ID = 'mpr121_id'
|
||||
MPR121Component = binary_sensor.binary_sensor_ns.class_('MPR121Component', Component, i2c.I2CDevice)
|
||||
|
||||
CONFIG_SCHEMA = cv.Schema({
|
||||
cv.GenerateID(): cv.declare_variable_id(MPR121Component),
|
||||
vol.Optional(CONF_ADDRESS): cv.i2c_address
|
||||
}).extend(cv.COMPONENT_SCHEMA.schema)
|
||||
|
||||
|
||||
def to_code(config):
|
||||
rhs = App.make_mpr121(config.get(CONF_ADDRESS))
|
||||
var = Pvariable(config[CONF_ID], rhs)
|
||||
|
||||
setup_component(var, config)
|
||||
|
||||
|
||||
BUILD_FLAGS = '-DUSE_MPR121'
|
||||
@@ -4,7 +4,7 @@ import re
|
||||
import voluptuous as vol
|
||||
|
||||
from esphome import automation
|
||||
from esphome.automation import ACTION_REGISTRY
|
||||
from esphome.automation import ACTION_REGISTRY, CONDITION_REGISTRY, Condition
|
||||
from esphome.components import logger
|
||||
import esphome.config_validation as cv
|
||||
from esphome.const import CONF_AVAILABILITY, CONF_BIRTH_MESSAGE, CONF_BROKER, CONF_CLIENT_ID, \
|
||||
@@ -26,7 +26,7 @@ def validate_message_just_topic(value):
|
||||
return MQTT_MESSAGE_BASE({CONF_TOPIC: value})
|
||||
|
||||
|
||||
MQTT_MESSAGE_BASE = vol.Schema({
|
||||
MQTT_MESSAGE_BASE = cv.Schema({
|
||||
vol.Required(CONF_TOPIC): cv.publish_topic,
|
||||
vol.Optional(CONF_QOS, default=0): cv.mqtt_qos,
|
||||
vol.Optional(CONF_RETAIN, default=True): cv.boolean,
|
||||
@@ -47,6 +47,7 @@ MQTTMessageTrigger = mqtt_ns.class_('MQTTMessageTrigger', Trigger.template(std_s
|
||||
MQTTJsonMessageTrigger = mqtt_ns.class_('MQTTJsonMessageTrigger',
|
||||
Trigger.template(JsonObjectConstRef))
|
||||
MQTTComponent = mqtt_ns.class_('MQTTComponent', Component)
|
||||
MQTTConnectedCondition = mqtt_ns.class_('MQTTConnectedCondition', Condition)
|
||||
|
||||
|
||||
def validate_config(value):
|
||||
@@ -67,7 +68,7 @@ def validate_fingerprint(value):
|
||||
return value
|
||||
|
||||
|
||||
CONFIG_SCHEMA = vol.All(vol.Schema({
|
||||
CONFIG_SCHEMA = vol.All(cv.Schema({
|
||||
cv.GenerateID(): cv.declare_variable_id(MQTTClientComponent),
|
||||
vol.Required(CONF_BROKER): cv.string_strict,
|
||||
vol.Optional(CONF_PORT): cv.port,
|
||||
@@ -184,16 +185,16 @@ def to_code(config):
|
||||
add(trigger.set_qos(conf[CONF_QOS]))
|
||||
if CONF_PAYLOAD in conf:
|
||||
add(trigger.set_payload(conf[CONF_PAYLOAD]))
|
||||
automation.build_automation(trigger, std_string, conf)
|
||||
automation.build_automations(trigger, [(std_string, 'x')], conf)
|
||||
|
||||
for conf in config.get(CONF_ON_JSON_MESSAGE, []):
|
||||
rhs = mqtt.make_json_message_trigger(conf[CONF_TOPIC], conf[CONF_QOS])
|
||||
trigger = Pvariable(conf[CONF_TRIGGER_ID], rhs)
|
||||
automation.build_automation(trigger, JsonObjectConstRef, conf)
|
||||
automation.build_automations(trigger, [(JsonObjectConstRef, 'x')], conf)
|
||||
|
||||
|
||||
CONF_MQTT_PUBLISH = 'mqtt.publish'
|
||||
MQTT_PUBLISH_ACTION_SCHEMA = vol.Schema({
|
||||
MQTT_PUBLISH_ACTION_SCHEMA = cv.Schema({
|
||||
cv.GenerateID(): cv.use_variable_id(MQTTClientComponent),
|
||||
vol.Required(CONF_TOPIC): cv.templatable(cv.publish_topic),
|
||||
vol.Required(CONF_PAYLOAD): cv.templatable(cv.mqtt_payload),
|
||||
@@ -203,32 +204,32 @@ MQTT_PUBLISH_ACTION_SCHEMA = vol.Schema({
|
||||
|
||||
|
||||
@ACTION_REGISTRY.register(CONF_MQTT_PUBLISH, MQTT_PUBLISH_ACTION_SCHEMA)
|
||||
def mqtt_publish_action_to_code(config, action_id, arg_type, template_arg):
|
||||
def mqtt_publish_action_to_code(config, action_id, template_arg, args):
|
||||
for var in get_variable(config[CONF_ID]):
|
||||
yield None
|
||||
rhs = var.make_publish_action(template_arg)
|
||||
type = MQTTPublishAction.template(template_arg)
|
||||
action = Pvariable(action_id, rhs, type=type)
|
||||
for template_ in templatable(config[CONF_TOPIC], arg_type, std_string):
|
||||
for template_ in templatable(config[CONF_TOPIC], args, std_string):
|
||||
yield None
|
||||
add(action.set_topic(template_))
|
||||
|
||||
for template_ in templatable(config[CONF_PAYLOAD], arg_type, std_string):
|
||||
for template_ in templatable(config[CONF_PAYLOAD], args, std_string):
|
||||
yield None
|
||||
add(action.set_payload(template_))
|
||||
if CONF_QOS in config:
|
||||
for template_ in templatable(config[CONF_QOS], arg_type, uint8):
|
||||
for template_ in templatable(config[CONF_QOS], args, uint8):
|
||||
yield
|
||||
add(action.set_qos(template_))
|
||||
if CONF_RETAIN in config:
|
||||
for template_ in templatable(config[CONF_RETAIN], arg_type, bool_):
|
||||
for template_ in templatable(config[CONF_RETAIN], args, bool_):
|
||||
yield None
|
||||
add(action.set_retain(template_))
|
||||
yield action
|
||||
|
||||
|
||||
CONF_MQTT_PUBLISH_JSON = 'mqtt.publish_json'
|
||||
MQTT_PUBLISH_JSON_ACTION_SCHEMA = vol.Schema({
|
||||
MQTT_PUBLISH_JSON_ACTION_SCHEMA = cv.Schema({
|
||||
cv.GenerateID(): cv.use_variable_id(MQTTClientComponent),
|
||||
vol.Required(CONF_TOPIC): cv.templatable(cv.publish_topic),
|
||||
vol.Required(CONF_PAYLOAD): cv.lambda_,
|
||||
@@ -238,18 +239,18 @@ MQTT_PUBLISH_JSON_ACTION_SCHEMA = vol.Schema({
|
||||
|
||||
|
||||
@ACTION_REGISTRY.register(CONF_MQTT_PUBLISH_JSON, MQTT_PUBLISH_JSON_ACTION_SCHEMA)
|
||||
def mqtt_publish_json_action_to_code(config, action_id, arg_type, template_arg):
|
||||
def mqtt_publish_json_action_to_code(config, action_id, template_arg, args):
|
||||
for var in get_variable(config[CONF_ID]):
|
||||
yield None
|
||||
rhs = var.make_publish_json_action(template_arg)
|
||||
type = MQTTPublishJsonAction.template(template_arg)
|
||||
action = Pvariable(action_id, rhs, type=type)
|
||||
for template_ in templatable(config[CONF_TOPIC], arg_type, std_string):
|
||||
for template_ in templatable(config[CONF_TOPIC], args, std_string):
|
||||
yield None
|
||||
add(action.set_topic(template_))
|
||||
|
||||
for lambda_ in process_lambda(config[CONF_PAYLOAD], [(arg_type, 'x'), (JsonObjectRef, 'root')],
|
||||
return_type=void):
|
||||
args_ = args + [(JsonObjectRef, 'root')]
|
||||
for lambda_ in process_lambda(config[CONF_PAYLOAD], args_, return_type=void):
|
||||
yield None
|
||||
add(action.set_payload(lambda_))
|
||||
if CONF_QOS in config:
|
||||
@@ -347,3 +348,13 @@ def setup_mqtt_component(obj, config):
|
||||
|
||||
LIB_DEPS = 'AsyncMqttClient@0.8.2'
|
||||
BUILD_FLAGS = '-DUSE_MQTT'
|
||||
|
||||
CONF_MQTT_CONNECTED = 'mqtt.connected'
|
||||
MQTT_CONNECTED_CONDITION_SCHEMA = cv.Schema({})
|
||||
|
||||
|
||||
@CONDITION_REGISTRY.register(CONF_MQTT_CONNECTED, MQTT_CONNECTED_CONDITION_SCHEMA)
|
||||
def mqtt_connected_to_code(config, condition_id, template_arg, args):
|
||||
rhs = MQTTConnectedCondition.new(template_arg)
|
||||
type = MQTTConnectedCondition.template(template_arg)
|
||||
yield Pvariable(condition_id, rhs, type=type)
|
||||
|
||||
@@ -12,7 +12,7 @@ from esphome.cpp_types import App, Component
|
||||
MY9231OutputComponent = output.output_ns.class_('MY9231OutputComponent', Component)
|
||||
MULTI_CONF = True
|
||||
|
||||
CONFIG_SCHEMA = vol.Schema({
|
||||
CONFIG_SCHEMA = cv.Schema({
|
||||
cv.GenerateID(): cv.declare_variable_id(MY9231OutputComponent),
|
||||
vol.Required(CONF_DATA_PIN): pins.gpio_output_pin_schema,
|
||||
vol.Required(CONF_CLOCK_PIN): pins.gpio_output_pin_schema,
|
||||
|
||||
@@ -12,7 +12,7 @@ _LOGGER = logging.getLogger(__name__)
|
||||
|
||||
OTAComponent = esphome_ns.class_('OTAComponent', Component)
|
||||
|
||||
CONFIG_SCHEMA = vol.Schema({
|
||||
CONFIG_SCHEMA = cv.Schema({
|
||||
cv.GenerateID(): cv.declare_variable_id(OTAComponent),
|
||||
vol.Optional(CONF_SAFE_MODE, default=True): cv.boolean,
|
||||
vol.Optional(CONF_PORT): cv.port,
|
||||
@@ -51,7 +51,7 @@ REQUIRED_BUILD_FLAGS = '-DUSE_NEW_OTA'
|
||||
|
||||
def lib_deps(config):
|
||||
if CORE.is_esp32:
|
||||
return ['Update', 'ESPmDNS']
|
||||
return ['Update']
|
||||
if CORE.is_esp8266:
|
||||
return ['Hash', 'ESP8266mDNS']
|
||||
return ['Hash']
|
||||
raise NotImplementedError
|
||||
|
||||
@@ -3,7 +3,8 @@ import voluptuous as vol
|
||||
from esphome.automation import ACTION_REGISTRY, maybe_simple_id
|
||||
from esphome.components.power_supply import PowerSupplyComponent
|
||||
import esphome.config_validation as cv
|
||||
from esphome.const import CONF_ID, CONF_INVERTED, CONF_LEVEL, CONF_MAX_POWER, CONF_POWER_SUPPLY
|
||||
from esphome.const import CONF_ID, CONF_INVERTED, CONF_LEVEL, CONF_MAX_POWER, \
|
||||
CONF_MIN_POWER, CONF_POWER_SUPPLY
|
||||
from esphome.core import CORE
|
||||
from esphome.cpp_generator import Pvariable, add, get_variable, templatable
|
||||
from esphome.cpp_types import Action, esphome_ns, float_
|
||||
@@ -12,7 +13,7 @@ PLATFORM_SCHEMA = cv.PLATFORM_SCHEMA.extend({
|
||||
|
||||
})
|
||||
|
||||
BINARY_OUTPUT_SCHEMA = vol.Schema({
|
||||
BINARY_OUTPUT_SCHEMA = cv.Schema({
|
||||
vol.Optional(CONF_POWER_SUPPLY): cv.use_variable_id(PowerSupplyComponent),
|
||||
vol.Optional(CONF_INVERTED): cv.boolean,
|
||||
})
|
||||
@@ -21,6 +22,7 @@ BINARY_OUTPUT_PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(BINARY_OUTPUT_SCHEMA.sche
|
||||
|
||||
FLOAT_OUTPUT_SCHEMA = BINARY_OUTPUT_SCHEMA.extend({
|
||||
vol.Optional(CONF_MAX_POWER): cv.percentage,
|
||||
vol.Optional(CONF_MIN_POWER): cv.percentage,
|
||||
})
|
||||
|
||||
FLOAT_OUTPUT_PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(FLOAT_OUTPUT_SCHEMA.schema)
|
||||
@@ -47,6 +49,8 @@ def setup_output_platform_(obj, config, skip_power_supply=False):
|
||||
add(obj.set_power_supply(power_supply))
|
||||
if CONF_MAX_POWER in config:
|
||||
add(obj.set_max_power(config[CONF_MAX_POWER]))
|
||||
if CONF_MIN_POWER in config:
|
||||
add(obj.set_min_power(config[CONF_MIN_POWER]))
|
||||
|
||||
|
||||
def setup_output_platform(obj, config, skip_power_supply=False):
|
||||
@@ -67,11 +71,11 @@ OUTPUT_TURN_ON_ACTION = maybe_simple_id({
|
||||
|
||||
|
||||
@ACTION_REGISTRY.register(CONF_OUTPUT_TURN_ON, OUTPUT_TURN_ON_ACTION)
|
||||
def output_turn_on_to_code(config, action_id, arg_type, template_arg):
|
||||
def output_turn_on_to_code(config, action_id, template_arg, args):
|
||||
for var in get_variable(config[CONF_ID]):
|
||||
yield None
|
||||
rhs = var.make_turn_on_action(template_arg)
|
||||
type = TurnOnAction.template(arg_type)
|
||||
type = TurnOnAction.template(template_arg)
|
||||
yield Pvariable(action_id, rhs, type=type)
|
||||
|
||||
|
||||
@@ -82,29 +86,29 @@ OUTPUT_TURN_OFF_ACTION = maybe_simple_id({
|
||||
|
||||
|
||||
@ACTION_REGISTRY.register(CONF_OUTPUT_TURN_OFF, OUTPUT_TURN_OFF_ACTION)
|
||||
def output_turn_off_to_code(config, action_id, arg_type, template_arg):
|
||||
def output_turn_off_to_code(config, action_id, template_arg, args):
|
||||
for var in get_variable(config[CONF_ID]):
|
||||
yield None
|
||||
rhs = var.make_turn_off_action(template_arg)
|
||||
type = TurnOffAction.template(arg_type)
|
||||
type = TurnOffAction.template(template_arg)
|
||||
yield Pvariable(action_id, rhs, type=type)
|
||||
|
||||
|
||||
CONF_OUTPUT_SET_LEVEL = 'output.set_level'
|
||||
OUTPUT_SET_LEVEL_ACTION = vol.Schema({
|
||||
OUTPUT_SET_LEVEL_ACTION = cv.Schema({
|
||||
vol.Required(CONF_ID): cv.use_variable_id(FloatOutput),
|
||||
vol.Required(CONF_LEVEL): cv.templatable(cv.percentage),
|
||||
})
|
||||
|
||||
|
||||
@ACTION_REGISTRY.register(CONF_OUTPUT_SET_LEVEL, OUTPUT_SET_LEVEL_ACTION)
|
||||
def output_set_level_to_code(config, action_id, arg_type, template_arg):
|
||||
def output_set_level_to_code(config, action_id, template_arg, args):
|
||||
for var in get_variable(config[CONF_ID]):
|
||||
yield None
|
||||
rhs = var.make_set_level_action(template_arg)
|
||||
type = SetLevelAction.template(arg_type)
|
||||
type = SetLevelAction.template(template_arg)
|
||||
action = Pvariable(action_id, rhs, type=type)
|
||||
for template_ in templatable(config[CONF_LEVEL], arg_type, float_):
|
||||
for template_ in templatable(config[CONF_LEVEL], args, float_):
|
||||
yield None
|
||||
add(action.set_level(template_))
|
||||
yield action
|
||||
|
||||
58
esphome/components/output/copy.py
Normal file
58
esphome/components/output/copy.py
Normal file
@@ -0,0 +1,58 @@
|
||||
import voluptuous as vol
|
||||
|
||||
from esphome.components import output
|
||||
import esphome.config_validation as cv
|
||||
from esphome.const import CONF_ID, CONF_OUTPUTS, CONF_TYPE
|
||||
from esphome.cpp_generator import Pvariable, get_variable
|
||||
from esphome.cpp_helpers import setup_component
|
||||
|
||||
BinaryCopyOutput = output.output_ns.class_('BinaryCopyOutput', output.BinaryOutput)
|
||||
FloatCopyOutput = output.output_ns.class_('FloatCopyOutput', output.FloatOutput)
|
||||
|
||||
BINARY_SCHEMA = output.PLATFORM_SCHEMA.extend({
|
||||
vol.Required(CONF_ID): cv.declare_variable_id(BinaryCopyOutput),
|
||||
vol.Required(CONF_TYPE): 'binary',
|
||||
vol.Required(CONF_OUTPUTS): cv.ensure_list(cv.use_variable_id(output.BinaryOutput)),
|
||||
})
|
||||
|
||||
FLOAT_SCHEMA = output.PLATFORM_SCHEMA.extend({
|
||||
vol.Required(CONF_ID): cv.declare_variable_id(FloatCopyOutput),
|
||||
vol.Required(CONF_TYPE): 'float',
|
||||
vol.Required(CONF_OUTPUTS): cv.ensure_list(cv.use_variable_id(output.FloatOutput)),
|
||||
})
|
||||
|
||||
|
||||
def validate_copy_output(value):
|
||||
if not isinstance(value, dict):
|
||||
raise vol.Invalid("Value must be dict")
|
||||
type = cv.string_strict(value.get(CONF_TYPE, 'float')).lower()
|
||||
value[CONF_TYPE] = type
|
||||
if type == 'binary':
|
||||
return BINARY_SCHEMA(value)
|
||||
if type == 'float':
|
||||
return FLOAT_SCHEMA(value)
|
||||
raise vol.Invalid("type must either be binary or float, not {}!".format(type))
|
||||
|
||||
|
||||
PLATFORM_SCHEMA = validate_copy_output
|
||||
|
||||
|
||||
def to_code(config):
|
||||
outputs = []
|
||||
for out in config[CONF_OUTPUTS]:
|
||||
for var in get_variable(out):
|
||||
yield
|
||||
outputs.append(var)
|
||||
|
||||
klass = {
|
||||
'binary': BinaryCopyOutput,
|
||||
'float': FloatCopyOutput,
|
||||
}[config[CONF_TYPE]]
|
||||
rhs = klass.new(outputs)
|
||||
gpio = Pvariable(config[CONF_ID], rhs)
|
||||
|
||||
output.setup_output_platform(gpio, config)
|
||||
setup_component(gpio, config)
|
||||
|
||||
|
||||
BUILD_FLAGS = '-DUSE_COPY_OUTPUT'
|
||||
@@ -13,7 +13,7 @@ MULTI_CONF = True
|
||||
PCA9685OutputComponent = output.output_ns.class_('PCA9685OutputComponent',
|
||||
Component, i2c.I2CDevice)
|
||||
|
||||
CONFIG_SCHEMA = vol.Schema({
|
||||
CONFIG_SCHEMA = cv.Schema({
|
||||
cv.GenerateID(): cv.declare_variable_id(PCA9685OutputComponent),
|
||||
vol.Required(CONF_FREQUENCY): vol.All(cv.frequency,
|
||||
vol.Range(min=23.84, max=1525.88)),
|
||||
|
||||
@@ -20,7 +20,7 @@ PCF8675_GPIO_MODES = {
|
||||
PCF8574GPIOInputPin = io_ns.class_('PCF8574GPIOInputPin', GPIOInputPin)
|
||||
PCF8574GPIOOutputPin = io_ns.class_('PCF8574GPIOOutputPin', GPIOOutputPin)
|
||||
|
||||
CONFIG_SCHEMA = vol.Schema({
|
||||
CONFIG_SCHEMA = cv.Schema({
|
||||
vol.Required(CONF_ID): cv.declare_variable_id(pins.PCF8574Component),
|
||||
vol.Optional(CONF_ADDRESS, default=0x21): cv.i2c_address,
|
||||
vol.Optional(CONF_PCF8575, default=False): cv.boolean,
|
||||
|
||||
@@ -17,7 +17,7 @@ PN532Component = binary_sensor.binary_sensor_ns.class_('PN532Component', Polling
|
||||
spi.SPIDevice)
|
||||
PN532Trigger = binary_sensor.binary_sensor_ns.class_('PN532Trigger', Trigger.template(std_string))
|
||||
|
||||
CONFIG_SCHEMA = vol.Schema({
|
||||
CONFIG_SCHEMA = cv.Schema({
|
||||
cv.GenerateID(): cv.declare_variable_id(PN532Component),
|
||||
cv.GenerateID(CONF_SPI_ID): cv.use_variable_id(SPIComponent),
|
||||
vol.Required(CONF_CS_PIN): pins.gpio_output_pin_schema,
|
||||
@@ -38,7 +38,7 @@ def to_code(config):
|
||||
|
||||
for conf_ in config.get(CONF_ON_TAG, []):
|
||||
trigger = Pvariable(conf_[CONF_TRIGGER_ID], pn532.make_trigger())
|
||||
automation.build_automation(trigger, std_string, conf_)
|
||||
automation.build_automations(trigger, [(std_string, 'x')], conf_)
|
||||
|
||||
setup_component(pn532, config)
|
||||
|
||||
|
||||
@@ -11,7 +11,7 @@ PowerSupplyComponent = esphome_ns.class_('PowerSupplyComponent', Component)
|
||||
|
||||
MULTI_CONF = True
|
||||
|
||||
CONFIG_SCHEMA = vol.Schema({
|
||||
CONFIG_SCHEMA = cv.Schema({
|
||||
vol.Required(CONF_ID): cv.declare_variable_id(PowerSupplyComponent),
|
||||
vol.Required(CONF_PIN): pins.gpio_output_pin_schema,
|
||||
vol.Optional(CONF_ENABLE_TIME): cv.positive_time_period_milliseconds,
|
||||
|
||||
@@ -1,5 +1,3 @@
|
||||
import voluptuous as vol
|
||||
|
||||
from esphome.components import binary_sensor, uart
|
||||
import esphome.config_validation as cv
|
||||
from esphome.const import CONF_ID, CONF_UART_ID
|
||||
@@ -12,7 +10,7 @@ DEPENDENCIES = ['uart']
|
||||
RDM6300Component = binary_sensor.binary_sensor_ns.class_('RDM6300Component', Component,
|
||||
uart.UARTDevice)
|
||||
|
||||
CONFIG_SCHEMA = vol.Schema({
|
||||
CONFIG_SCHEMA = cv.Schema({
|
||||
cv.GenerateID(): cv.declare_variable_id(RDM6300Component),
|
||||
cv.GenerateID(CONF_UART_ID): cv.use_variable_id(uart.UARTComponent),
|
||||
}).extend(cv.COMPONENT_SCHEMA.schema)
|
||||
|
||||
@@ -40,9 +40,10 @@ def validate_dumpers_all(value):
|
||||
raise vol.Invalid("Not valid dumpers")
|
||||
|
||||
|
||||
CONFIG_SCHEMA = vol.Schema({
|
||||
CONFIG_SCHEMA = cv.Schema({
|
||||
cv.GenerateID(): cv.declare_variable_id(RemoteReceiverComponent),
|
||||
vol.Required(CONF_PIN): pins.gpio_input_pin_schema,
|
||||
vol.Required(CONF_PIN): vol.All(pins.internal_gpio_input_pin_schema,
|
||||
pins.validate_has_interrupt),
|
||||
vol.Optional(CONF_DUMP, default=[]):
|
||||
vol.Any(validate_dumpers_all, cv.ensure_list(cv.one_of(*DUMPERS, lower=True))),
|
||||
vol.Optional(CONF_TOLERANCE): vol.All(cv.percentage_int, vol.Range(min=0)),
|
||||
|
||||
@@ -39,7 +39,7 @@ RC_SWITCH_TIMING_SCHEMA = vol.All([cv.uint8_t], vol.Length(min=2, max=2))
|
||||
|
||||
RC_SWITCH_PROTOCOL_SCHEMA = vol.Any(
|
||||
vol.All(vol.Coerce(int), vol.Range(min=1, max=7)),
|
||||
vol.Schema({
|
||||
cv.Schema({
|
||||
vol.Required(CONF_PULSE_LENGTH): cv.uint32_t,
|
||||
vol.Optional(CONF_SYNC, default=[1, 31]): RC_SWITCH_TIMING_SCHEMA,
|
||||
vol.Optional(CONF_ZERO, default=[1, 3]): RC_SWITCH_TIMING_SCHEMA,
|
||||
@@ -48,23 +48,23 @@ RC_SWITCH_PROTOCOL_SCHEMA = vol.Any(
|
||||
})
|
||||
)
|
||||
|
||||
RC_SWITCH_RAW_SCHEMA = vol.Schema({
|
||||
RC_SWITCH_RAW_SCHEMA = cv.Schema({
|
||||
vol.Required(CONF_CODE): validate_rc_switch_code,
|
||||
vol.Optional(CONF_PROTOCOL, default=1): RC_SWITCH_PROTOCOL_SCHEMA,
|
||||
})
|
||||
RC_SWITCH_TYPE_A_SCHEMA = vol.Schema({
|
||||
RC_SWITCH_TYPE_A_SCHEMA = cv.Schema({
|
||||
vol.Required(CONF_GROUP): vol.All(validate_rc_switch_code, vol.Length(min=5, max=5)),
|
||||
vol.Required(CONF_DEVICE): vol.All(validate_rc_switch_code, vol.Length(min=5, max=5)),
|
||||
vol.Required(CONF_STATE): cv.boolean,
|
||||
vol.Optional(CONF_PROTOCOL, default=1): RC_SWITCH_PROTOCOL_SCHEMA,
|
||||
})
|
||||
RC_SWITCH_TYPE_B_SCHEMA = vol.Schema({
|
||||
RC_SWITCH_TYPE_B_SCHEMA = cv.Schema({
|
||||
vol.Required(CONF_ADDRESS): vol.All(cv.uint8_t, vol.Range(min=1, max=4)),
|
||||
vol.Required(CONF_CHANNEL): vol.All(cv.uint8_t, vol.Range(min=1, max=4)),
|
||||
vol.Required(CONF_STATE): cv.boolean,
|
||||
vol.Optional(CONF_PROTOCOL, default=1): RC_SWITCH_PROTOCOL_SCHEMA,
|
||||
})
|
||||
RC_SWITCH_TYPE_C_SCHEMA = vol.Schema({
|
||||
RC_SWITCH_TYPE_C_SCHEMA = cv.Schema({
|
||||
vol.Required(CONF_FAMILY): cv.one_of('a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i', 'j', 'k',
|
||||
'l', 'm', 'n', 'o', 'p', lower=True),
|
||||
vol.Required(CONF_GROUP): vol.All(cv.uint8_t, vol.Range(min=1, max=4)),
|
||||
@@ -72,14 +72,14 @@ RC_SWITCH_TYPE_C_SCHEMA = vol.Schema({
|
||||
vol.Required(CONF_STATE): cv.boolean,
|
||||
vol.Optional(CONF_PROTOCOL, default=1): RC_SWITCH_PROTOCOL_SCHEMA,
|
||||
})
|
||||
RC_SWITCH_TYPE_D_SCHEMA = vol.Schema({
|
||||
RC_SWITCH_TYPE_D_SCHEMA = cv.Schema({
|
||||
vol.Required(CONF_GROUP): cv.one_of('a', 'b', 'c', 'd', lower=True),
|
||||
vol.Required(CONF_DEVICE): vol.All(cv.uint8_t, vol.Range(min=1, max=3)),
|
||||
vol.Required(CONF_STATE): cv.boolean,
|
||||
vol.Optional(CONF_PROTOCOL, default=1): RC_SWITCH_PROTOCOL_SCHEMA,
|
||||
})
|
||||
|
||||
CONFIG_SCHEMA = vol.Schema({
|
||||
CONFIG_SCHEMA = cv.Schema({
|
||||
cv.GenerateID(): cv.declare_variable_id(RemoteTransmitterComponent),
|
||||
vol.Required(CONF_PIN): pins.gpio_output_pin_schema,
|
||||
vol.Optional(CONF_CARRIER_DUTY_PERCENT): vol.All(cv.percentage_int,
|
||||
|
||||
@@ -5,9 +5,9 @@ from esphome.automation import ACTION_REGISTRY, maybe_simple_id
|
||||
import esphome.config_validation as cv
|
||||
from esphome.const import CONF_ID
|
||||
from esphome.cpp_generator import Pvariable, get_variable
|
||||
from esphome.cpp_types import Action, NoArg, Trigger, esphome_ns
|
||||
from esphome.cpp_types import Action, Trigger, esphome_ns
|
||||
|
||||
Script = esphome_ns.class_('Script', Trigger.template(NoArg))
|
||||
Script = esphome_ns.class_('Script', Trigger.template())
|
||||
ScriptExecuteAction = esphome_ns.class_('ScriptExecuteAction', Action)
|
||||
ScriptStopAction = esphome_ns.class_('ScriptStopAction', Action)
|
||||
|
||||
@@ -19,7 +19,7 @@ CONFIG_SCHEMA = automation.validate_automation({
|
||||
def to_code(config):
|
||||
for conf in config:
|
||||
trigger = Pvariable(conf[CONF_ID], Script.new())
|
||||
automation.build_automation(trigger, NoArg, conf)
|
||||
automation.build_automations(trigger, [], conf)
|
||||
|
||||
|
||||
CONF_SCRIPT_EXECUTE = 'script.execute'
|
||||
@@ -29,11 +29,11 @@ SCRIPT_EXECUTE_ACTION_SCHEMA = maybe_simple_id({
|
||||
|
||||
|
||||
@ACTION_REGISTRY.register(CONF_SCRIPT_EXECUTE, SCRIPT_EXECUTE_ACTION_SCHEMA)
|
||||
def script_execute_action_to_code(config, action_id, arg_type, template_arg):
|
||||
def script_execute_action_to_code(config, action_id, template_arg, args):
|
||||
for var in get_variable(config[CONF_ID]):
|
||||
yield None
|
||||
rhs = var.make_execute_action(template_arg)
|
||||
type = ScriptExecuteAction.template(arg_type)
|
||||
type = ScriptExecuteAction.template(template_arg)
|
||||
yield Pvariable(action_id, rhs, type=type)
|
||||
|
||||
|
||||
@@ -44,9 +44,9 @@ SCRIPT_STOP_ACTION_SCHEMA = maybe_simple_id({
|
||||
|
||||
|
||||
@ACTION_REGISTRY.register(CONF_SCRIPT_STOP, SCRIPT_STOP_ACTION_SCHEMA)
|
||||
def script_stop_action_to_code(config, action_id, arg_type, template_arg):
|
||||
def script_stop_action_to_code(config, action_id, template_arg, args):
|
||||
for var in get_variable(config[CONF_ID]):
|
||||
yield None
|
||||
rhs = var.make_stop_action(template_arg)
|
||||
type = ScriptStopAction.template(arg_type)
|
||||
type = ScriptStopAction.template(template_arg)
|
||||
yield Pvariable(action_id, rhs, type=type)
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
import math
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from esphome import automation
|
||||
@@ -6,12 +8,13 @@ from esphome.components import mqtt
|
||||
from esphome.components.mqtt import setup_mqtt_component
|
||||
import esphome.config_validation as cv
|
||||
from esphome.const import CONF_ABOVE, CONF_ACCURACY_DECIMALS, CONF_ALPHA, CONF_BELOW, \
|
||||
CONF_DEBOUNCE, CONF_DELTA, CONF_EXPIRE_AFTER, CONF_EXPONENTIAL_MOVING_AVERAGE, CONF_FILTERS, \
|
||||
CONF_FILTER_NAN, CONF_FILTER_OUT, CONF_HEARTBEAT, CONF_ICON, CONF_ID, CONF_INTERNAL, \
|
||||
CONF_LAMBDA, CONF_MQTT_ID, CONF_MULTIPLY, CONF_OFFSET, CONF_ON_RAW_VALUE, CONF_ON_VALUE, \
|
||||
CONF_ON_VALUE_RANGE, CONF_OR, CONF_SEND_EVERY, CONF_SEND_FIRST_AT, \
|
||||
CONF_SLIDING_WINDOW_MOVING_AVERAGE, CONF_THROTTLE, CONF_TRIGGER_ID, CONF_UNIQUE, \
|
||||
CONF_UNIT_OF_MEASUREMENT, CONF_WINDOW_SIZE
|
||||
CONF_CALIBRATE_LINEAR, CONF_DEBOUNCE, CONF_DELTA, CONF_EXPIRE_AFTER, \
|
||||
CONF_EXPONENTIAL_MOVING_AVERAGE, CONF_FILTERS, CONF_FILTER_OUT, CONF_FROM, \
|
||||
CONF_HEARTBEAT, CONF_ICON, CONF_ID, CONF_INTERNAL, CONF_LAMBDA, CONF_MQTT_ID, \
|
||||
CONF_MULTIPLY, CONF_OFFSET, CONF_ON_RAW_VALUE, CONF_ON_VALUE, CONF_ON_VALUE_RANGE, CONF_OR, \
|
||||
CONF_SEND_EVERY, CONF_SEND_FIRST_AT, CONF_SLIDING_WINDOW_MOVING_AVERAGE, \
|
||||
CONF_THROTTLE, CONF_TO, CONF_TRIGGER_ID, CONF_UNIQUE, CONF_UNIT_OF_MEASUREMENT, \
|
||||
CONF_WINDOW_SIZE
|
||||
from esphome.core import CORE
|
||||
from esphome.cpp_generator import Pvariable, add, get_variable, process_lambda, templatable
|
||||
from esphome.cpp_types import App, Component, Nameable, PollingComponent, Trigger, \
|
||||
@@ -35,28 +38,51 @@ def validate_send_first_at(value):
|
||||
return value
|
||||
|
||||
|
||||
FILTER_KEYS = [CONF_OFFSET, CONF_MULTIPLY, CONF_FILTER_OUT, CONF_FILTER_NAN,
|
||||
FILTER_KEYS = [CONF_OFFSET, CONF_MULTIPLY, CONF_FILTER_OUT,
|
||||
CONF_SLIDING_WINDOW_MOVING_AVERAGE, CONF_EXPONENTIAL_MOVING_AVERAGE, CONF_LAMBDA,
|
||||
CONF_THROTTLE, CONF_DELTA, CONF_UNIQUE, CONF_HEARTBEAT, CONF_DEBOUNCE, CONF_OR]
|
||||
CONF_THROTTLE, CONF_DELTA, CONF_HEARTBEAT, CONF_DEBOUNCE, CONF_OR,
|
||||
CONF_CALIBRATE_LINEAR]
|
||||
|
||||
|
||||
def validate_datapoint(value):
|
||||
if isinstance(value, dict):
|
||||
return cv.Schema({
|
||||
vol.Required(CONF_FROM): cv.float_,
|
||||
vol.Required(CONF_TO): cv.float_,
|
||||
})(value)
|
||||
value = cv.string(value)
|
||||
if '->' not in value:
|
||||
raise vol.Invalid("Datapoint mapping must contain '->'")
|
||||
a, b = value.split('->', 1)
|
||||
a, b = a.strip(), b.strip()
|
||||
return validate_datapoint({
|
||||
CONF_FROM: cv.float_(a),
|
||||
CONF_TO: cv.float_(b)
|
||||
})
|
||||
|
||||
|
||||
FILTERS_SCHEMA = cv.ensure_list({
|
||||
vol.Optional(CONF_OFFSET): cv.float_,
|
||||
vol.Optional(CONF_MULTIPLY): cv.float_,
|
||||
vol.Optional(CONF_FILTER_OUT): cv.float_,
|
||||
vol.Optional(CONF_FILTER_NAN): None,
|
||||
vol.Optional(CONF_SLIDING_WINDOW_MOVING_AVERAGE): vol.All(vol.Schema({
|
||||
vol.Optional('filter_nan'): cv.invalid("The filter_nan filter has been removed. Please use "
|
||||
"'filter_out: nan' instead"),
|
||||
vol.Optional(CONF_SLIDING_WINDOW_MOVING_AVERAGE): vol.All(cv.Schema({
|
||||
vol.Optional(CONF_WINDOW_SIZE, default=15): cv.positive_not_null_int,
|
||||
vol.Optional(CONF_SEND_EVERY, default=15): cv.positive_not_null_int,
|
||||
vol.Optional(CONF_SEND_FIRST_AT): cv.positive_not_null_int,
|
||||
}), validate_send_first_at),
|
||||
vol.Optional(CONF_EXPONENTIAL_MOVING_AVERAGE): vol.Schema({
|
||||
vol.Optional(CONF_EXPONENTIAL_MOVING_AVERAGE): cv.Schema({
|
||||
vol.Optional(CONF_ALPHA, default=0.1): cv.positive_float,
|
||||
vol.Optional(CONF_SEND_EVERY, default=15): cv.positive_not_null_int,
|
||||
}),
|
||||
vol.Optional(CONF_CALIBRATE_LINEAR): vol.All(
|
||||
cv.ensure_list(validate_datapoint), vol.Length(min=2)),
|
||||
vol.Optional(CONF_LAMBDA): cv.lambda_,
|
||||
vol.Optional(CONF_THROTTLE): cv.positive_time_period_milliseconds,
|
||||
vol.Optional(CONF_DELTA): cv.float_,
|
||||
vol.Optional(CONF_UNIQUE): None,
|
||||
vol.Optional(CONF_UNIQUE): cv.invalid("The unique filter has been removed in 1.12, please "
|
||||
"replace with a delta filter with small value."),
|
||||
vol.Optional(CONF_HEARTBEAT): cv.positive_time_period_milliseconds,
|
||||
vol.Optional(CONF_DEBOUNCE): cv.positive_time_period_milliseconds,
|
||||
vol.Optional(CONF_OR): validate_recursive_filter,
|
||||
@@ -85,13 +111,12 @@ LambdaFilter = sensor_ns.class_('LambdaFilter', Filter)
|
||||
OffsetFilter = sensor_ns.class_('OffsetFilter', Filter)
|
||||
MultiplyFilter = sensor_ns.class_('MultiplyFilter', Filter)
|
||||
FilterOutValueFilter = sensor_ns.class_('FilterOutValueFilter', Filter)
|
||||
FilterOutNANFilter = sensor_ns.class_('FilterOutNANFilter', Filter)
|
||||
ThrottleFilter = sensor_ns.class_('ThrottleFilter', Filter)
|
||||
DebounceFilter = sensor_ns.class_('DebounceFilter', Filter, Component)
|
||||
HeartbeatFilter = sensor_ns.class_('HeartbeatFilter', Filter, Component)
|
||||
DeltaFilter = sensor_ns.class_('DeltaFilter', Filter)
|
||||
OrFilter = sensor_ns.class_('OrFilter', Filter)
|
||||
UniqueFilter = sensor_ns.class_('UniqueFilter', Filter)
|
||||
CalibrateLinearFilter = sensor_ns.class_('CalibrateLinearFilter', Filter)
|
||||
SensorInRangeCondition = sensor_ns.class_('SensorInRangeCondition', Filter)
|
||||
|
||||
SENSOR_SCHEMA = cv.MQTT_COMPONENT_SCHEMA.extend({
|
||||
@@ -125,8 +150,6 @@ def setup_filter(config):
|
||||
yield MultiplyFilter.new(config[CONF_MULTIPLY])
|
||||
elif CONF_FILTER_OUT in config:
|
||||
yield FilterOutValueFilter.new(config[CONF_FILTER_OUT])
|
||||
elif CONF_FILTER_NAN in config:
|
||||
yield FilterOutNANFilter.new()
|
||||
elif CONF_SLIDING_WINDOW_MOVING_AVERAGE in config:
|
||||
conf = config[CONF_SLIDING_WINDOW_MOVING_AVERAGE]
|
||||
yield SlidingWindowMovingAverageFilter.new(conf[CONF_WINDOW_SIZE], conf[CONF_SEND_EVERY],
|
||||
@@ -151,8 +174,11 @@ def setup_filter(config):
|
||||
yield App.register_component(HeartbeatFilter.new(config[CONF_HEARTBEAT]))
|
||||
elif CONF_DEBOUNCE in config:
|
||||
yield App.register_component(DebounceFilter.new(config[CONF_DEBOUNCE]))
|
||||
elif CONF_UNIQUE in config:
|
||||
yield UniqueFilter.new()
|
||||
elif CONF_CALIBRATE_LINEAR in config:
|
||||
x = [conf[CONF_FROM] for conf in config[CONF_CALIBRATE_LINEAR]]
|
||||
y = [conf[CONF_TO] for conf in config[CONF_CALIBRATE_LINEAR]]
|
||||
k, b = fit_linear(x, y)
|
||||
yield CalibrateLinearFilter.new(k, b)
|
||||
|
||||
|
||||
def setup_filters(config):
|
||||
@@ -181,11 +207,11 @@ def setup_sensor_core_(sensor_var, config):
|
||||
for conf in config.get(CONF_ON_VALUE, []):
|
||||
rhs = sensor_var.make_state_trigger()
|
||||
trigger = Pvariable(conf[CONF_TRIGGER_ID], rhs)
|
||||
automation.build_automation(trigger, float_, conf)
|
||||
automation.build_automations(trigger, [(float_, 'x')], conf)
|
||||
for conf in config.get(CONF_ON_RAW_VALUE, []):
|
||||
rhs = sensor_var.make_raw_state_trigger()
|
||||
trigger = Pvariable(conf[CONF_TRIGGER_ID], rhs)
|
||||
automation.build_automation(trigger, float_, conf)
|
||||
automation.build_automations(trigger, [(float_, 'x')], conf)
|
||||
for conf in config.get(CONF_ON_VALUE_RANGE, []):
|
||||
rhs = sensor_var.make_value_range_trigger()
|
||||
trigger = Pvariable(conf[CONF_TRIGGER_ID], rhs)
|
||||
@@ -198,7 +224,7 @@ def setup_sensor_core_(sensor_var, config):
|
||||
for template_ in templatable(conf[CONF_BELOW], float_, float_):
|
||||
yield
|
||||
add(trigger.set_max(template_))
|
||||
automation.build_automation(trigger, float_, conf)
|
||||
automation.build_automations(trigger, [(float_, 'x')], conf)
|
||||
|
||||
mqtt_ = sensor_var.Pget_mqtt()
|
||||
if CONF_EXPIRE_AFTER in config:
|
||||
@@ -232,11 +258,11 @@ SENSOR_IN_RANGE_CONDITION_SCHEMA = vol.All({
|
||||
|
||||
|
||||
@CONDITION_REGISTRY.register(CONF_SENSOR_IN_RANGE, SENSOR_IN_RANGE_CONDITION_SCHEMA)
|
||||
def sensor_in_range_to_code(config, condition_id, arg_type, template_arg):
|
||||
def sensor_in_range_to_code(config, condition_id, template_arg, args):
|
||||
for var in get_variable(config[CONF_ID]):
|
||||
yield None
|
||||
rhs = var.make_sensor_in_range_condition(template_arg)
|
||||
type = SensorInRangeCondition.template(arg_type)
|
||||
type = SensorInRangeCondition.template(template_arg)
|
||||
cond = Pvariable(condition_id, rhs, type=type)
|
||||
|
||||
if CONF_ABOVE in config:
|
||||
@@ -260,3 +286,28 @@ def core_to_hass_config(data, config):
|
||||
if CONF_ICON in config:
|
||||
ret['icon'] = config[CONF_ICON]
|
||||
return ret
|
||||
|
||||
|
||||
def _mean(xs):
|
||||
return sum(xs) / len(xs)
|
||||
|
||||
|
||||
def _std(x):
|
||||
return math.sqrt(sum((x_ - _mean(x))**2 for x_ in x) / (len(x) - 1))
|
||||
|
||||
|
||||
def _correlation_coeff(x, y):
|
||||
m_x, m_y = _mean(x), _mean(y)
|
||||
s_xy = sum((x_ - m_x) * (y_ - m_y) for x_, y_ in zip(x, y))
|
||||
s_sq_x = sum((x_ - m_x)**2 for x_ in x)
|
||||
s_sq_y = sum((y_ - m_y)**2 for y_ in y)
|
||||
return s_xy / math.sqrt(s_sq_x * s_sq_y)
|
||||
|
||||
|
||||
def fit_linear(x, y):
|
||||
assert len(x) == len(y)
|
||||
m_x, m_y = _mean(x), _mean(y)
|
||||
r = _correlation_coeff(x, y)
|
||||
k = r * (_std(y) / _std(x))
|
||||
b = m_y - k * m_x
|
||||
return k, b
|
||||
|
||||
@@ -25,7 +25,7 @@ MUX = {
|
||||
ADS1115Gain = sensor.sensor_ns.enum('ADS1115Gain')
|
||||
GAIN = {
|
||||
'6.144': ADS1115Gain.ADS1115_GAIN_6P144,
|
||||
'4.096': ADS1115Gain.ADS1115_GAIN_6P096,
|
||||
'4.096': ADS1115Gain.ADS1115_GAIN_4P096,
|
||||
'2.048': ADS1115Gain.ADS1115_GAIN_2P048,
|
||||
'1.024': ADS1115Gain.ADS1115_GAIN_1P024,
|
||||
'0.512': ADS1115Gain.ADS1115_GAIN_0P512,
|
||||
|
||||
@@ -64,7 +64,7 @@ PLATFORM_SCHEMA = sensor.PLATFORM_SCHEMA.extend({
|
||||
cv.GenerateID(): cv.declare_variable_id(BME680GasResistanceSensor),
|
||||
})),
|
||||
vol.Optional(CONF_IIR_FILTER): cv.one_of(*IIR_FILTER_OPTIONS, upper=True),
|
||||
vol.Optional(CONF_HEATER): vol.Any(None, vol.All(vol.Schema({
|
||||
vol.Optional(CONF_HEATER): vol.Any(None, vol.All(cv.Schema({
|
||||
vol.Optional(CONF_TEMPERATURE, default=320): vol.All(vol.Coerce(int), vol.Range(200, 400)),
|
||||
vol.Optional(CONF_DURATION, default='150ms'): vol.All(
|
||||
cv.positive_time_period_milliseconds, vol.Range(max=core.TimePeriod(milliseconds=4032)))
|
||||
|
||||
@@ -12,7 +12,8 @@ DutyCycleSensor = sensor.sensor_ns.class_('DutyCycleSensor', sensor.PollingSenso
|
||||
|
||||
PLATFORM_SCHEMA = cv.nameable(sensor.SENSOR_PLATFORM_SCHEMA.extend({
|
||||
cv.GenerateID(): cv.declare_variable_id(DutyCycleSensor),
|
||||
vol.Required(CONF_PIN): pins.internal_gpio_input_pin_schema,
|
||||
vol.Required(CONF_PIN): vol.All(pins.internal_gpio_input_pin_schema,
|
||||
pins.validate_has_interrupt),
|
||||
vol.Optional(CONF_UPDATE_INTERVAL): cv.update_interval,
|
||||
}).extend(cv.COMPONENT_SCHEMA.schema))
|
||||
|
||||
|
||||
@@ -24,7 +24,7 @@ INA3221PowerSensor = sensor.sensor_ns.class_('INA3221PowerSensor', sensor.EmptyP
|
||||
|
||||
SENSOR_KEYS = [CONF_BUS_VOLTAGE, CONF_SHUNT_VOLTAGE, CONF_CURRENT, CONF_POWER]
|
||||
|
||||
INA3221_CHANNEL_SCHEMA = vol.All(vol.Schema({
|
||||
INA3221_CHANNEL_SCHEMA = vol.All(cv.Schema({
|
||||
vol.Optional(CONF_BUS_VOLTAGE): cv.nameable(sensor.SENSOR_SCHEMA.extend({
|
||||
cv.GenerateID(): cv.declare_variable_id(INA3221VoltageSensor),
|
||||
})),
|
||||
|
||||
@@ -4,7 +4,7 @@ from esphome import pins
|
||||
from esphome.components import sensor
|
||||
import esphome.config_validation as cv
|
||||
from esphome.const import CONF_COUNT_MODE, CONF_FALLING_EDGE, CONF_ID, CONF_INTERNAL_FILTER, \
|
||||
CONF_NAME, CONF_PIN, CONF_RISING_EDGE, CONF_UPDATE_INTERVAL
|
||||
CONF_NAME, CONF_PIN, CONF_RISING_EDGE, CONF_UPDATE_INTERVAL, CONF_NUMBER
|
||||
from esphome.core import CORE
|
||||
from esphome.cpp_generator import Pvariable, add
|
||||
from esphome.cpp_helpers import gpio_input_pin_expression, setup_component
|
||||
@@ -38,10 +38,17 @@ def validate_internal_filter(value):
|
||||
return cv.positive_time_period_microseconds(value)
|
||||
|
||||
|
||||
def validate_pulse_counter_pin(value):
|
||||
value = pins.internal_gpio_input_pin_schema(value)
|
||||
if CORE.is_esp8266 and value[CONF_NUMBER] >= 16:
|
||||
raise vol.Invalid("Pins GPIO16 and GPIO17 cannot be used as pulse counters on ESP8266.")
|
||||
return value
|
||||
|
||||
|
||||
PLATFORM_SCHEMA = cv.nameable(sensor.SENSOR_PLATFORM_SCHEMA.extend({
|
||||
cv.GenerateID(): cv.declare_variable_id(PulseCounterSensorComponent),
|
||||
vol.Required(CONF_PIN): pins.internal_gpio_input_pin_schema,
|
||||
vol.Optional(CONF_COUNT_MODE): vol.Schema({
|
||||
vol.Required(CONF_PIN): validate_pulse_counter_pin,
|
||||
vol.Optional(CONF_COUNT_MODE): cv.Schema({
|
||||
vol.Required(CONF_RISING_EDGE): COUNT_MODE_SCHEMA,
|
||||
vol.Required(CONF_FALLING_EDGE): COUNT_MODE_SCHEMA,
|
||||
}),
|
||||
|
||||
@@ -3,16 +3,16 @@ import voluptuous as vol
|
||||
from esphome import pins
|
||||
from esphome.components import sensor
|
||||
import esphome.config_validation as cv
|
||||
from esphome.const import CONF_ID, CONF_NAME, CONF_RESOLUTION
|
||||
from esphome.const import CONF_ID, CONF_NAME, CONF_RESOLUTION, CONF_MIN_VALUE, CONF_MAX_VALUE
|
||||
from esphome.cpp_generator import Pvariable, add
|
||||
from esphome.cpp_helpers import gpio_input_pin_expression, setup_component
|
||||
from esphome.cpp_types import App, Component
|
||||
|
||||
RotaryEncoderResolution = sensor.sensor_ns.enum('RotaryEncoderResolution')
|
||||
RESOLUTIONS = {
|
||||
'1': RotaryEncoderResolution.ROTARY_ENCODER_1_PULSE_PER_CYCLE,
|
||||
'2': RotaryEncoderResolution.ROTARY_ENCODER_2_PULSES_PER_CYCLE,
|
||||
'4': RotaryEncoderResolution.ROTARY_ENCODER_4_PULSES_PER_CYCLE,
|
||||
1: RotaryEncoderResolution.ROTARY_ENCODER_1_PULSE_PER_CYCLE,
|
||||
2: RotaryEncoderResolution.ROTARY_ENCODER_2_PULSES_PER_CYCLE,
|
||||
4: RotaryEncoderResolution.ROTARY_ENCODER_4_PULSES_PER_CYCLE,
|
||||
}
|
||||
|
||||
CONF_PIN_A = 'pin_a'
|
||||
@@ -21,13 +21,28 @@ CONF_PIN_RESET = 'pin_reset'
|
||||
|
||||
RotaryEncoderSensor = sensor.sensor_ns.class_('RotaryEncoderSensor', sensor.Sensor, Component)
|
||||
|
||||
|
||||
def validate_min_max_value(config):
|
||||
if CONF_MIN_VALUE in config and CONF_MAX_VALUE in config:
|
||||
min_val = config[CONF_MIN_VALUE]
|
||||
max_val = config[CONF_MAX_VALUE]
|
||||
if min_val >= max_val:
|
||||
raise vol.Invalid("Max value {} must be smaller than min value {}"
|
||||
"".format(max_val, min_val))
|
||||
return config
|
||||
|
||||
|
||||
PLATFORM_SCHEMA = cv.nameable(sensor.SENSOR_PLATFORM_SCHEMA.extend({
|
||||
cv.GenerateID(): cv.declare_variable_id(RotaryEncoderSensor),
|
||||
vol.Required(CONF_PIN_A): pins.internal_gpio_input_pin_schema,
|
||||
vol.Required(CONF_PIN_B): pins.internal_gpio_input_pin_schema,
|
||||
vol.Required(CONF_PIN_A): vol.All(pins.internal_gpio_input_pin_schema,
|
||||
pins.validate_has_interrupt),
|
||||
vol.Required(CONF_PIN_B): vol.All(pins.internal_gpio_input_pin_schema,
|
||||
pins.validate_has_interrupt),
|
||||
vol.Optional(CONF_PIN_RESET): pins.internal_gpio_input_pin_schema,
|
||||
vol.Optional(CONF_RESOLUTION): cv.one_of(*RESOLUTIONS, string=True),
|
||||
}).extend(cv.COMPONENT_SCHEMA.schema))
|
||||
vol.Optional(CONF_RESOLUTION): cv.one_of(*RESOLUTIONS, int=True),
|
||||
vol.Optional(CONF_MIN_VALUE): cv.int_,
|
||||
vol.Optional(CONF_MAX_VALUE): cv.int_,
|
||||
}).extend(cv.COMPONENT_SCHEMA.schema), validate_min_max_value)
|
||||
|
||||
|
||||
def to_code(config):
|
||||
@@ -45,6 +60,10 @@ def to_code(config):
|
||||
if CONF_RESOLUTION in config:
|
||||
resolution = RESOLUTIONS[config[CONF_RESOLUTION]]
|
||||
add(encoder.set_resolution(resolution))
|
||||
if CONF_MIN_VALUE in config:
|
||||
add(encoder.set_min_value(config[CONF_MIN_VALUE]))
|
||||
if CONF_MAX_VALUE in config:
|
||||
add(encoder.set_min_value(config[CONF_MAX_VALUE]))
|
||||
|
||||
sensor.setup_sensor(encoder, config)
|
||||
setup_component(encoder, config)
|
||||
|
||||
78
esphome/components/sensor/sds011.py
Normal file
78
esphome/components/sensor/sds011.py
Normal file
@@ -0,0 +1,78 @@
|
||||
import voluptuous as vol
|
||||
|
||||
from esphome.components import sensor, uart
|
||||
from esphome.components.uart import UARTComponent
|
||||
import esphome.config_validation as cv
|
||||
from esphome.const import (CONF_ID, CONF_NAME, CONF_PM_10_0, CONF_PM_2_5, CONF_RX_ONLY,
|
||||
CONF_UART_ID, CONF_UPDATE_INTERVAL)
|
||||
from esphome.cpp_generator import Pvariable, add, get_variable
|
||||
from esphome.cpp_helpers import setup_component
|
||||
from esphome.cpp_types import App, Component
|
||||
|
||||
DEPENDENCIES = ['uart']
|
||||
|
||||
SDS011Component = sensor.sensor_ns.class_('SDS011Component', uart.UARTDevice, Component)
|
||||
SDS011Sensor = sensor.sensor_ns.class_('SDS011Sensor', sensor.EmptySensor)
|
||||
|
||||
|
||||
def validate_sds011_rx_mode(value):
|
||||
if CONF_UPDATE_INTERVAL in value and not value.get(CONF_RX_ONLY):
|
||||
update_interval = value[CONF_UPDATE_INTERVAL]
|
||||
if update_interval.total_minutes > 30:
|
||||
raise vol.Invalid("Maximum update interval is 30min")
|
||||
elif value.get(CONF_RX_ONLY) and CONF_UPDATE_INTERVAL in value:
|
||||
# update_interval does not affect anything in rx-only mode, let's warn user about
|
||||
# that
|
||||
raise vol.Invalid("update_interval has no effect in rx_only mode. Please remove it.",
|
||||
path=['update_interval'])
|
||||
return value
|
||||
|
||||
|
||||
SDS011_SENSOR_SCHEMA = sensor.SENSOR_SCHEMA.extend({
|
||||
cv.GenerateID(): cv.declare_variable_id(SDS011Sensor),
|
||||
})
|
||||
|
||||
PLATFORM_SCHEMA = vol.All(sensor.PLATFORM_SCHEMA.extend({
|
||||
cv.GenerateID(): cv.declare_variable_id(SDS011Component),
|
||||
cv.GenerateID(CONF_UART_ID): cv.use_variable_id(UARTComponent),
|
||||
|
||||
vol.Optional(CONF_RX_ONLY): cv.boolean,
|
||||
|
||||
vol.Optional(CONF_PM_2_5): cv.nameable(SDS011_SENSOR_SCHEMA),
|
||||
vol.Optional(CONF_PM_10_0): cv.nameable(SDS011_SENSOR_SCHEMA),
|
||||
vol.Optional(CONF_UPDATE_INTERVAL): cv.positive_time_period_minutes,
|
||||
}).extend(cv.COMPONENT_SCHEMA.schema), cv.has_at_least_one_key(CONF_PM_2_5, CONF_PM_10_0),
|
||||
validate_sds011_rx_mode)
|
||||
|
||||
|
||||
def to_code(config):
|
||||
for uart_ in get_variable(config[CONF_UART_ID]):
|
||||
yield
|
||||
|
||||
rhs = App.make_sds011(uart_)
|
||||
sds011 = Pvariable(config[CONF_ID], rhs)
|
||||
|
||||
if CONF_UPDATE_INTERVAL in config:
|
||||
add(sds011.set_update_interval_min(config.get(CONF_UPDATE_INTERVAL)))
|
||||
if CONF_RX_ONLY in config:
|
||||
add(sds011.set_rx_mode_only(config[CONF_RX_ONLY]))
|
||||
|
||||
if CONF_PM_2_5 in config:
|
||||
conf = config[CONF_PM_2_5]
|
||||
sensor.register_sensor(sds011.make_pm_2_5_sensor(conf[CONF_NAME]), conf)
|
||||
if CONF_PM_10_0 in config:
|
||||
conf = config[CONF_PM_10_0]
|
||||
sensor.register_sensor(sds011.make_pm_10_0_sensor(conf[CONF_NAME]), conf)
|
||||
|
||||
setup_component(sds011, config)
|
||||
|
||||
|
||||
BUILD_FLAGS = '-DUSE_SDS011'
|
||||
|
||||
|
||||
def to_hass_config(data, config):
|
||||
ret = []
|
||||
for key in (CONF_PM_2_5, CONF_PM_10_0):
|
||||
if key in config:
|
||||
ret.append(sensor.core_to_hass_config(data, config[key]))
|
||||
return ret
|
||||
@@ -35,20 +35,20 @@ def to_code(config):
|
||||
BUILD_FLAGS = '-DUSE_TEMPLATE_SENSOR'
|
||||
|
||||
CONF_SENSOR_TEMPLATE_PUBLISH = 'sensor.template.publish'
|
||||
SENSOR_TEMPLATE_PUBLISH_ACTION_SCHEMA = vol.Schema({
|
||||
SENSOR_TEMPLATE_PUBLISH_ACTION_SCHEMA = cv.Schema({
|
||||
vol.Required(CONF_ID): cv.use_variable_id(sensor.Sensor),
|
||||
vol.Required(CONF_STATE): cv.templatable(cv.float_),
|
||||
})
|
||||
|
||||
|
||||
@ACTION_REGISTRY.register(CONF_SENSOR_TEMPLATE_PUBLISH, SENSOR_TEMPLATE_PUBLISH_ACTION_SCHEMA)
|
||||
def sensor_template_publish_to_code(config, action_id, arg_type, template_arg):
|
||||
def sensor_template_publish_to_code(config, action_id, template_arg, args):
|
||||
for var in get_variable(config[CONF_ID]):
|
||||
yield None
|
||||
rhs = var.make_sensor_publish_action(template_arg)
|
||||
type = SensorPublishAction.template(arg_type)
|
||||
type = SensorPublishAction.template(template_arg)
|
||||
action = Pvariable(action_id, rhs, type=type)
|
||||
for template_ in templatable(config[CONF_STATE], arg_type, float_):
|
||||
for template_ in templatable(config[CONF_STATE], args, float_):
|
||||
yield None
|
||||
add(action.set_state(template_))
|
||||
yield action
|
||||
|
||||
@@ -3,23 +3,32 @@ import voluptuous as vol
|
||||
from esphome import pins
|
||||
from esphome.components import sensor
|
||||
import esphome.config_validation as cv
|
||||
from esphome.const import CONF_ECHO_PIN, CONF_ID, CONF_NAME, CONF_TIMEOUT_METER, \
|
||||
CONF_TIMEOUT_TIME, CONF_TRIGGER_PIN, CONF_UPDATE_INTERVAL
|
||||
from esphome.const import CONF_ECHO_PIN, CONF_ID, CONF_NAME, CONF_TRIGGER_PIN, \
|
||||
CONF_UPDATE_INTERVAL, CONF_TIMEOUT
|
||||
from esphome.cpp_generator import Pvariable, add
|
||||
from esphome.cpp_helpers import gpio_input_pin_expression, gpio_output_pin_expression, \
|
||||
setup_component
|
||||
from esphome.cpp_types import App
|
||||
|
||||
CONF_PULSE_TIME = 'pulse_time'
|
||||
|
||||
UltrasonicSensorComponent = sensor.sensor_ns.class_('UltrasonicSensorComponent',
|
||||
sensor.PollingSensorComponent)
|
||||
|
||||
|
||||
PLATFORM_SCHEMA = cv.nameable(sensor.SENSOR_PLATFORM_SCHEMA.extend({
|
||||
cv.GenerateID(): cv.declare_variable_id(UltrasonicSensorComponent),
|
||||
vol.Required(CONF_TRIGGER_PIN): pins.gpio_output_pin_schema,
|
||||
vol.Required(CONF_ECHO_PIN): pins.internal_gpio_input_pin_schema,
|
||||
vol.Exclusive(CONF_TIMEOUT_METER, 'timeout'): cv.positive_float,
|
||||
vol.Exclusive(CONF_TIMEOUT_TIME, 'timeout'): cv.positive_time_period_microseconds,
|
||||
|
||||
vol.Optional(CONF_TIMEOUT): cv.distance,
|
||||
vol.Optional(CONF_PULSE_TIME): cv.positive_time_period_microseconds,
|
||||
vol.Optional(CONF_UPDATE_INTERVAL): cv.update_interval,
|
||||
|
||||
vol.Optional('timeout_meter'): cv.invalid("The timeout_meter option has been renamed "
|
||||
"to 'timeout'."),
|
||||
vol.Optional('timeout_time'): cv.invalid("The timeout_time option has been removed. Please "
|
||||
"use 'timeout'."),
|
||||
}))
|
||||
|
||||
|
||||
@@ -32,10 +41,11 @@ def to_code(config):
|
||||
config.get(CONF_UPDATE_INTERVAL))
|
||||
ultrasonic = Pvariable(config[CONF_ID], rhs)
|
||||
|
||||
if CONF_TIMEOUT_TIME in config:
|
||||
add(ultrasonic.set_timeout_us(config[CONF_TIMEOUT_TIME]))
|
||||
elif CONF_TIMEOUT_METER in config:
|
||||
add(ultrasonic.set_timeout_m(config[CONF_TIMEOUT_METER]))
|
||||
if CONF_TIMEOUT in config:
|
||||
add(ultrasonic.set_timeout_us(config[CONF_TIMEOUT] / (0.000343 / 2)))
|
||||
|
||||
if CONF_PULSE_TIME in config:
|
||||
add(ultrasonic.set_pulse_time_us(config[CONF_PULSE_TIME]))
|
||||
|
||||
sensor.setup_sensor(ultrasonic, config)
|
||||
setup_component(ultrasonic, config)
|
||||
|
||||
59
esphome/components/servo.py
Normal file
59
esphome/components/servo.py
Normal file
@@ -0,0 +1,59 @@
|
||||
import voluptuous as vol
|
||||
|
||||
from esphome.automation import ACTION_REGISTRY
|
||||
from esphome.components.output import FloatOutput
|
||||
import esphome.config_validation as cv
|
||||
from esphome.const import CONF_ID, CONF_IDLE_LEVEL, CONF_MAX_LEVEL, CONF_MIN_LEVEL, CONF_OUTPUT, \
|
||||
CONF_LEVEL
|
||||
from esphome.cpp_generator import Pvariable, add, get_variable, templatable
|
||||
from esphome.cpp_helpers import setup_component
|
||||
from esphome.cpp_types import App, Component, esphome_ns, Action, float_
|
||||
|
||||
Servo = esphome_ns.class_('Servo', Component)
|
||||
ServoWriteAction = esphome_ns.class_('ServoWriteAction', Action)
|
||||
|
||||
MULTI_CONF = True
|
||||
|
||||
CONFIG_SCHEMA = cv.Schema({
|
||||
vol.Required(CONF_ID): cv.declare_variable_id(Servo),
|
||||
vol.Required(CONF_OUTPUT): cv.use_variable_id(FloatOutput),
|
||||
vol.Optional(CONF_MIN_LEVEL, default='3%'): cv.percentage,
|
||||
vol.Optional(CONF_IDLE_LEVEL, default='7.5%'): cv.percentage,
|
||||
vol.Optional(CONF_MAX_LEVEL, default='12%'): cv.percentage,
|
||||
}).extend(cv.COMPONENT_SCHEMA.schema)
|
||||
|
||||
|
||||
def to_code(config):
|
||||
for out in get_variable(config[CONF_OUTPUT]):
|
||||
yield
|
||||
|
||||
rhs = App.register_component(Servo.new(out))
|
||||
servo = Pvariable(config[CONF_ID], rhs)
|
||||
|
||||
add(servo.set_min_level(config[CONF_MIN_LEVEL]))
|
||||
add(servo.set_idle_level(config[CONF_IDLE_LEVEL]))
|
||||
add(servo.set_max_level(config[CONF_MAX_LEVEL]))
|
||||
|
||||
setup_component(servo, config)
|
||||
|
||||
|
||||
BUILD_FLAGS = '-DUSE_SERVO'
|
||||
|
||||
CONF_SERVO_WRITE = 'servo.write'
|
||||
SERVO_WRITE_ACTION_SCHEMA = cv.Schema({
|
||||
vol.Required(CONF_ID): cv.use_variable_id(Servo),
|
||||
vol.Required(CONF_LEVEL): cv.templatable(cv.possibly_negative_percentage),
|
||||
})
|
||||
|
||||
|
||||
@ACTION_REGISTRY.register(CONF_SERVO_WRITE, SERVO_WRITE_ACTION_SCHEMA)
|
||||
def servo_write_to_code(config, action_id, template_arg, args):
|
||||
for var in get_variable(config[CONF_ID]):
|
||||
yield None
|
||||
rhs = ServoWriteAction.new(template_arg, var)
|
||||
type = ServoWriteAction.template(template_arg)
|
||||
action = Pvariable(action_id, rhs, type=type)
|
||||
for template_ in templatable(config[CONF_LEVEL], args, float_):
|
||||
yield None
|
||||
add(action.set_value(template_))
|
||||
yield action
|
||||
@@ -12,7 +12,7 @@ SPIComponent = esphome_ns.class_('SPIComponent', Component)
|
||||
SPIDevice = esphome_ns.class_('SPIDevice')
|
||||
MULTI_CONF = True
|
||||
|
||||
CONFIG_SCHEMA = vol.All(vol.Schema({
|
||||
CONFIG_SCHEMA = vol.All(cv.Schema({
|
||||
cv.GenerateID(): cv.declare_variable_id(SPIComponent),
|
||||
vol.Required(CONF_CLK_PIN): pins.gpio_output_pin_schema,
|
||||
vol.Optional(CONF_MISO_PIN): pins.gpio_input_pin_schema,
|
||||
|
||||
@@ -8,7 +8,7 @@ from esphome.cpp_types import App, Component, esphome_ns
|
||||
|
||||
StatusLEDComponent = esphome_ns.class_('StatusLEDComponent', Component)
|
||||
|
||||
CONFIG_SCHEMA = vol.Schema({
|
||||
CONFIG_SCHEMA = cv.Schema({
|
||||
cv.GenerateID(): cv.declare_variable_id(StatusLEDComponent),
|
||||
vol.Optional(CONF_PIN): pins.gpio_output_pin_schema,
|
||||
}).extend(cv.COMPONENT_SCHEMA.schema)
|
||||
|
||||
@@ -60,7 +60,7 @@ def validate_speed(value):
|
||||
return value
|
||||
|
||||
|
||||
STEPPER_SCHEMA = vol.Schema({
|
||||
STEPPER_SCHEMA = cv.Schema({
|
||||
vol.Required(CONF_MAX_SPEED): validate_speed,
|
||||
vol.Optional(CONF_ACCELERATION): validate_acceleration,
|
||||
vol.Optional(CONF_DECELERATION): validate_acceleration,
|
||||
@@ -85,40 +85,40 @@ def setup_stepper(stepper_var, config):
|
||||
BUILD_FLAGS = '-DUSE_STEPPER'
|
||||
|
||||
CONF_STEPPER_SET_TARGET = 'stepper.set_target'
|
||||
STEPPER_SET_TARGET_ACTION_SCHEMA = vol.Schema({
|
||||
STEPPER_SET_TARGET_ACTION_SCHEMA = cv.Schema({
|
||||
vol.Required(CONF_ID): cv.use_variable_id(Stepper),
|
||||
vol.Required(CONF_TARGET): cv.templatable(cv.int_),
|
||||
})
|
||||
|
||||
|
||||
@ACTION_REGISTRY.register(CONF_STEPPER_SET_TARGET, STEPPER_SET_TARGET_ACTION_SCHEMA)
|
||||
def stepper_set_target_to_code(config, action_id, arg_type, template_arg):
|
||||
def stepper_set_target_to_code(config, action_id, template_arg, args):
|
||||
for var in get_variable(config[CONF_ID]):
|
||||
yield None
|
||||
rhs = var.make_set_target_action(template_arg)
|
||||
type = SetTargetAction.template(arg_type)
|
||||
type = SetTargetAction.template(template_arg)
|
||||
action = Pvariable(action_id, rhs, type=type)
|
||||
for template_ in templatable(config[CONF_TARGET], arg_type, int32):
|
||||
for template_ in templatable(config[CONF_TARGET], args, int32):
|
||||
yield None
|
||||
add(action.set_target(template_))
|
||||
yield action
|
||||
|
||||
|
||||
CONF_STEPPER_REPORT_POSITION = 'stepper.report_position'
|
||||
STEPPER_REPORT_POSITION_ACTION_SCHEMA = vol.Schema({
|
||||
STEPPER_REPORT_POSITION_ACTION_SCHEMA = cv.Schema({
|
||||
vol.Required(CONF_ID): cv.use_variable_id(Stepper),
|
||||
vol.Required(CONF_POSITION): cv.templatable(cv.int_),
|
||||
})
|
||||
|
||||
|
||||
@ACTION_REGISTRY.register(CONF_STEPPER_REPORT_POSITION, STEPPER_REPORT_POSITION_ACTION_SCHEMA)
|
||||
def stepper_report_position_to_code(config, action_id, arg_type, template_arg):
|
||||
def stepper_report_position_to_code(config, action_id, template_arg, args):
|
||||
for var in get_variable(config[CONF_ID]):
|
||||
yield None
|
||||
rhs = var.make_report_position_action(template_arg)
|
||||
type = ReportPositionAction.template(arg_type)
|
||||
type = ReportPositionAction.template(template_arg)
|
||||
action = Pvariable(action_id, rhs, type=type)
|
||||
for template_ in templatable(config[CONF_POSITION], arg_type, int32):
|
||||
for template_ in templatable(config[CONF_POSITION], args, int32):
|
||||
yield None
|
||||
add(action.set_position(template_))
|
||||
yield action
|
||||
|
||||
@@ -6,6 +6,7 @@ import voluptuous as vol
|
||||
from esphome import core
|
||||
import esphome.config_validation as cv
|
||||
from esphome.core import EsphomeError
|
||||
from esphome.py_compat import string_types
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -31,7 +32,7 @@ def validate_substitution_key(value):
|
||||
return value
|
||||
|
||||
|
||||
CONFIG_SCHEMA = vol.Schema({
|
||||
CONFIG_SCHEMA = cv.Schema({
|
||||
validate_substitution_key: cv.string_strict,
|
||||
})
|
||||
|
||||
@@ -93,7 +94,7 @@ def _substitute_item(substitutions, item, path):
|
||||
for old, new in replace_keys:
|
||||
item[new] = item[old]
|
||||
del item[old]
|
||||
elif isinstance(item, str):
|
||||
elif isinstance(item, string_types):
|
||||
sub = _expand_substitutions(substitutions, item, path)
|
||||
if sub != item:
|
||||
return sub
|
||||
|
||||
@@ -9,7 +9,7 @@ from esphome.const import CONF_ICON, CONF_ID, CONF_INTERNAL, CONF_INVERTED, CONF
|
||||
CONF_ON_TURN_OFF, CONF_ON_TURN_ON, CONF_OPTIMISTIC, CONF_TRIGGER_ID
|
||||
from esphome.core import CORE
|
||||
from esphome.cpp_generator import Pvariable, add, get_variable
|
||||
from esphome.cpp_types import Action, App, Nameable, NoArg, Trigger, esphome_ns
|
||||
from esphome.cpp_types import Action, App, Nameable, Trigger, esphome_ns
|
||||
|
||||
PLATFORM_SCHEMA = cv.PLATFORM_SCHEMA.extend({
|
||||
|
||||
@@ -25,8 +25,8 @@ TurnOffAction = switch_ns.class_('TurnOffAction', Action)
|
||||
TurnOnAction = switch_ns.class_('TurnOnAction', Action)
|
||||
|
||||
SwitchCondition = switch_ns.class_('SwitchCondition', Condition)
|
||||
SwitchTurnOnTrigger = switch_ns.class_('SwitchTurnOnTrigger', Trigger.template(NoArg))
|
||||
SwitchTurnOffTrigger = switch_ns.class_('SwitchTurnOffTrigger', Trigger.template(NoArg))
|
||||
SwitchTurnOnTrigger = switch_ns.class_('SwitchTurnOnTrigger', Trigger.template())
|
||||
SwitchTurnOffTrigger = switch_ns.class_('SwitchTurnOffTrigger', Trigger.template())
|
||||
|
||||
SWITCH_SCHEMA = cv.MQTT_COMMAND_COMPONENT_SCHEMA.extend({
|
||||
cv.GenerateID(CONF_MQTT_ID): cv.declare_variable_id(MQTTSwitchComponent),
|
||||
@@ -53,11 +53,11 @@ def setup_switch_core_(switch_var, config):
|
||||
for conf in config.get(CONF_ON_TURN_ON, []):
|
||||
rhs = switch_var.make_switch_turn_on_trigger()
|
||||
trigger = Pvariable(conf[CONF_TRIGGER_ID], rhs)
|
||||
automation.build_automation(trigger, NoArg, conf)
|
||||
automation.build_automations(trigger, [], conf)
|
||||
for conf in config.get(CONF_ON_TURN_OFF, []):
|
||||
rhs = switch_var.make_switch_turn_off_trigger()
|
||||
trigger = Pvariable(conf[CONF_TRIGGER_ID], rhs)
|
||||
automation.build_automation(trigger, NoArg, conf)
|
||||
automation.build_automations(trigger, [], conf)
|
||||
|
||||
setup_mqtt_component(switch_var.Pget_mqtt(), config)
|
||||
|
||||
@@ -83,11 +83,11 @@ SWITCH_TOGGLE_ACTION_SCHEMA = maybe_simple_id({
|
||||
|
||||
|
||||
@ACTION_REGISTRY.register(CONF_SWITCH_TOGGLE, SWITCH_TOGGLE_ACTION_SCHEMA)
|
||||
def switch_toggle_to_code(config, action_id, arg_type, template_arg):
|
||||
def switch_toggle_to_code(config, action_id, template_arg, args):
|
||||
for var in get_variable(config[CONF_ID]):
|
||||
yield None
|
||||
rhs = var.make_toggle_action(template_arg)
|
||||
type = ToggleAction.template(arg_type)
|
||||
type = ToggleAction.template(template_arg)
|
||||
yield Pvariable(action_id, rhs, type=type)
|
||||
|
||||
|
||||
@@ -98,11 +98,11 @@ SWITCH_TURN_OFF_ACTION_SCHEMA = maybe_simple_id({
|
||||
|
||||
|
||||
@ACTION_REGISTRY.register(CONF_SWITCH_TURN_OFF, SWITCH_TURN_OFF_ACTION_SCHEMA)
|
||||
def switch_turn_off_to_code(config, action_id, arg_type, template_arg):
|
||||
def switch_turn_off_to_code(config, action_id, template_arg, args):
|
||||
for var in get_variable(config[CONF_ID]):
|
||||
yield None
|
||||
rhs = var.make_turn_off_action(template_arg)
|
||||
type = TurnOffAction.template(arg_type)
|
||||
type = TurnOffAction.template(template_arg)
|
||||
yield Pvariable(action_id, rhs, type=type)
|
||||
|
||||
|
||||
@@ -113,11 +113,11 @@ SWITCH_TURN_ON_ACTION_SCHEMA = maybe_simple_id({
|
||||
|
||||
|
||||
@ACTION_REGISTRY.register(CONF_SWITCH_TURN_ON, SWITCH_TURN_ON_ACTION_SCHEMA)
|
||||
def switch_turn_on_to_code(config, action_id, arg_type, template_arg):
|
||||
def switch_turn_on_to_code(config, action_id, template_arg, args):
|
||||
for var in get_variable(config[CONF_ID]):
|
||||
yield None
|
||||
rhs = var.make_turn_on_action(template_arg)
|
||||
type = TurnOnAction.template(arg_type)
|
||||
type = TurnOnAction.template(template_arg)
|
||||
yield Pvariable(action_id, rhs, type=type)
|
||||
|
||||
|
||||
@@ -128,11 +128,11 @@ SWITCH_IS_ON_CONDITION_SCHEMA = maybe_simple_id({
|
||||
|
||||
|
||||
@CONDITION_REGISTRY.register(CONF_SWITCH_IS_ON, SWITCH_IS_ON_CONDITION_SCHEMA)
|
||||
def switch_is_on_to_code(config, condition_id, arg_type, template_arg):
|
||||
def switch_is_on_to_code(config, condition_id, template_arg, args):
|
||||
for var in get_variable(config[CONF_ID]):
|
||||
yield None
|
||||
rhs = var.make_switch_is_on_condition(template_arg)
|
||||
type = SwitchCondition.template(arg_type)
|
||||
type = SwitchCondition.template(template_arg)
|
||||
yield Pvariable(condition_id, rhs, type=type)
|
||||
|
||||
|
||||
@@ -143,11 +143,11 @@ SWITCH_IS_OFF_CONDITION_SCHEMA = maybe_simple_id({
|
||||
|
||||
|
||||
@CONDITION_REGISTRY.register(CONF_SWITCH_IS_OFF, SWITCH_IS_OFF_CONDITION_SCHEMA)
|
||||
def switch_is_off_to_code(config, condition_id, arg_type, template_arg):
|
||||
def switch_is_off_to_code(config, condition_id, template_arg, args):
|
||||
for var in get_variable(config[CONF_ID]):
|
||||
yield None
|
||||
rhs = var.make_switch_is_off_condition(template_arg)
|
||||
type = SwitchCondition.template(arg_type)
|
||||
type = SwitchCondition.template(template_arg)
|
||||
yield Pvariable(condition_id, rhs, type=type)
|
||||
|
||||
|
||||
|
||||
@@ -43,7 +43,7 @@ RCSwitchTypeDTransmitter = remote_ns.class_('RCSwitchTypeDTransmitter', RCSwitch
|
||||
|
||||
def validate_raw(value):
|
||||
if isinstance(value, dict):
|
||||
return vol.Schema({
|
||||
return cv.Schema({
|
||||
cv.GenerateID(): cv.declare_variable_id(int32),
|
||||
vol.Required(CONF_DATA): [vol.Any(vol.Coerce(int), cv.time_period_microseconds)],
|
||||
vol.Optional(CONF_CARRIER_FREQUENCY): vol.All(cv.frequency, vol.Coerce(int)),
|
||||
@@ -55,29 +55,29 @@ def validate_raw(value):
|
||||
|
||||
PLATFORM_SCHEMA = cv.nameable(switch.SWITCH_PLATFORM_SCHEMA.extend({
|
||||
cv.GenerateID(): cv.declare_variable_id(RemoteTransmitter),
|
||||
vol.Optional(CONF_JVC): vol.Schema({
|
||||
vol.Optional(CONF_JVC): cv.Schema({
|
||||
vol.Required(CONF_DATA): cv.hex_uint32_t,
|
||||
}),
|
||||
vol.Optional(CONF_LG): vol.Schema({
|
||||
vol.Optional(CONF_LG): cv.Schema({
|
||||
vol.Required(CONF_DATA): cv.hex_uint32_t,
|
||||
vol.Optional(CONF_NBITS, default=28): cv.one_of(28, 32, int=True),
|
||||
}),
|
||||
vol.Optional(CONF_NEC): vol.Schema({
|
||||
vol.Optional(CONF_NEC): cv.Schema({
|
||||
vol.Required(CONF_ADDRESS): cv.hex_uint16_t,
|
||||
vol.Required(CONF_COMMAND): cv.hex_uint16_t,
|
||||
}),
|
||||
vol.Optional(CONF_SAMSUNG): vol.Schema({
|
||||
vol.Optional(CONF_SAMSUNG): cv.Schema({
|
||||
vol.Required(CONF_DATA): cv.hex_uint32_t,
|
||||
}),
|
||||
vol.Optional(CONF_SONY): vol.Schema({
|
||||
vol.Optional(CONF_SONY): cv.Schema({
|
||||
vol.Required(CONF_DATA): cv.hex_uint32_t,
|
||||
vol.Optional(CONF_NBITS, default=12): cv.one_of(12, 15, 20, int=True),
|
||||
}),
|
||||
vol.Optional(CONF_PANASONIC): vol.Schema({
|
||||
vol.Optional(CONF_PANASONIC): cv.Schema({
|
||||
vol.Required(CONF_ADDRESS): cv.hex_uint16_t,
|
||||
vol.Required(CONF_COMMAND): cv.hex_uint32_t,
|
||||
}),
|
||||
vol.Optional(CONF_RC5): vol.Schema({
|
||||
vol.Optional(CONF_RC5): cv.Schema({
|
||||
vol.Required(CONF_ADDRESS): vol.All(cv.hex_int, vol.Range(min=0, max=0x1F)),
|
||||
vol.Required(CONF_COMMAND): vol.All(cv.hex_int, vol.Range(min=0, max=0x3F)),
|
||||
}),
|
||||
@@ -88,7 +88,7 @@ PLATFORM_SCHEMA = cv.nameable(switch.SWITCH_PLATFORM_SCHEMA.extend({
|
||||
vol.Optional(CONF_RC_SWITCH_TYPE_C): RC_SWITCH_TYPE_C_SCHEMA,
|
||||
vol.Optional(CONF_RC_SWITCH_TYPE_D): RC_SWITCH_TYPE_D_SCHEMA,
|
||||
|
||||
vol.Optional(CONF_REPEAT): vol.Any(cv.positive_not_null_int, vol.Schema({
|
||||
vol.Optional(CONF_REPEAT): vol.Any(cv.positive_not_null_int, cv.Schema({
|
||||
vol.Required(CONF_TIMES): cv.positive_not_null_int,
|
||||
vol.Required(CONF_WAIT_TIME): cv.positive_time_period_microseconds,
|
||||
})),
|
||||
|
||||
@@ -8,7 +8,7 @@ from esphome.const import CONF_ASSUMED_STATE, CONF_ID, CONF_LAMBDA, CONF_NAME, C
|
||||
CONF_RESTORE_STATE, CONF_STATE, CONF_TURN_OFF_ACTION, CONF_TURN_ON_ACTION
|
||||
from esphome.cpp_generator import Pvariable, add, get_variable, process_lambda, templatable
|
||||
from esphome.cpp_helpers import setup_component
|
||||
from esphome.cpp_types import Action, App, Component, NoArg, bool_, optional
|
||||
from esphome.cpp_types import Action, App, Component, bool_, optional
|
||||
|
||||
TemplateSwitch = switch.switch_ns.class_('TemplateSwitch', switch.Switch, Component)
|
||||
SwitchPublishAction = switch.switch_ns.class_('SwitchPublishAction', Action)
|
||||
@@ -36,11 +36,11 @@ def to_code(config):
|
||||
yield
|
||||
add(template.set_state_lambda(template_))
|
||||
if CONF_TURN_OFF_ACTION in config:
|
||||
automation.build_automation(template.get_turn_off_trigger(), NoArg,
|
||||
config[CONF_TURN_OFF_ACTION])
|
||||
automation.build_automations(template.get_turn_off_trigger(), [],
|
||||
config[CONF_TURN_OFF_ACTION])
|
||||
if CONF_TURN_ON_ACTION in config:
|
||||
automation.build_automation(template.get_turn_on_trigger(), NoArg,
|
||||
config[CONF_TURN_ON_ACTION])
|
||||
automation.build_automations(template.get_turn_on_trigger(), [],
|
||||
config[CONF_TURN_ON_ACTION])
|
||||
if CONF_OPTIMISTIC in config:
|
||||
add(template.set_optimistic(config[CONF_OPTIMISTIC]))
|
||||
if CONF_ASSUMED_STATE in config:
|
||||
@@ -55,20 +55,20 @@ def to_code(config):
|
||||
BUILD_FLAGS = '-DUSE_TEMPLATE_SWITCH'
|
||||
|
||||
CONF_SWITCH_TEMPLATE_PUBLISH = 'switch.template.publish'
|
||||
SWITCH_TEMPLATE_PUBLISH_ACTION_SCHEMA = vol.Schema({
|
||||
SWITCH_TEMPLATE_PUBLISH_ACTION_SCHEMA = cv.Schema({
|
||||
vol.Required(CONF_ID): cv.use_variable_id(switch.Switch),
|
||||
vol.Required(CONF_STATE): cv.templatable(cv.boolean),
|
||||
})
|
||||
|
||||
|
||||
@ACTION_REGISTRY.register(CONF_SWITCH_TEMPLATE_PUBLISH, SWITCH_TEMPLATE_PUBLISH_ACTION_SCHEMA)
|
||||
def switch_template_publish_to_code(config, action_id, arg_type, template_arg):
|
||||
def switch_template_publish_to_code(config, action_id, template_arg, args):
|
||||
for var in get_variable(config[CONF_ID]):
|
||||
yield None
|
||||
rhs = var.make_switch_publish_action(template_arg)
|
||||
type = SwitchPublishAction.template(arg_type)
|
||||
type = SwitchPublishAction.template(template_arg)
|
||||
action = Pvariable(action_id, rhs, type=type)
|
||||
for template_ in templatable(config[CONF_STATE], arg_type, bool_):
|
||||
for template_ in templatable(config[CONF_STATE], args, bool_):
|
||||
yield None
|
||||
add(action.set_state(template_))
|
||||
yield action
|
||||
|
||||
@@ -20,7 +20,7 @@ def validate_data(value):
|
||||
if isinstance(value, str):
|
||||
return value
|
||||
if isinstance(value, list):
|
||||
return vol.Schema([cv.hex_uint8_t])(value)
|
||||
return cv.Schema([cv.hex_uint8_t])(value)
|
||||
raise vol.Invalid("data must either be a string wrapped in quotes or a list of bytes")
|
||||
|
||||
|
||||
|
||||
@@ -44,7 +44,7 @@ def setup_text_sensor_core_(text_sensor_var, config):
|
||||
for conf in config.get(CONF_ON_VALUE, []):
|
||||
rhs = text_sensor_var.make_state_trigger()
|
||||
trigger = Pvariable(conf[CONF_TRIGGER_ID], rhs)
|
||||
automation.build_automation(trigger, std_string, conf)
|
||||
automation.build_automations(trigger, [(std_string, 'x')], conf)
|
||||
|
||||
setup_mqtt_component(text_sensor_var.get_mqtt(), config)
|
||||
|
||||
@@ -56,9 +56,10 @@ def setup_text_sensor(text_sensor_obj, config):
|
||||
|
||||
|
||||
def register_text_sensor(var, config):
|
||||
text_sensor_var = Pvariable(config[CONF_ID], var, has_side_effects=True)
|
||||
add(App.register_text_sensor(text_sensor_var))
|
||||
CORE.add_job(setup_text_sensor_core_, text_sensor_var, config)
|
||||
if not CORE.has_id(config[CONF_ID]):
|
||||
var = Pvariable(config[CONF_ID], var, has_side_effects=True)
|
||||
add(App.register_text_sensor(var))
|
||||
CORE.add_job(setup_text_sensor_core_, var, config)
|
||||
|
||||
|
||||
BUILD_FLAGS = '-DUSE_TEXT_SENSOR'
|
||||
|
||||
@@ -35,7 +35,7 @@ def to_code(config):
|
||||
BUILD_FLAGS = '-DUSE_TEMPLATE_TEXT_SENSOR'
|
||||
|
||||
CONF_TEXT_SENSOR_TEMPLATE_PUBLISH = 'text_sensor.template.publish'
|
||||
TEXT_SENSOR_TEMPLATE_PUBLISH_ACTION_SCHEMA = vol.Schema({
|
||||
TEXT_SENSOR_TEMPLATE_PUBLISH_ACTION_SCHEMA = cv.Schema({
|
||||
vol.Required(CONF_ID): cv.use_variable_id(text_sensor.TextSensor),
|
||||
vol.Required(CONF_STATE): cv.templatable(cv.string_strict),
|
||||
})
|
||||
@@ -43,13 +43,13 @@ TEXT_SENSOR_TEMPLATE_PUBLISH_ACTION_SCHEMA = vol.Schema({
|
||||
|
||||
@ACTION_REGISTRY.register(CONF_TEXT_SENSOR_TEMPLATE_PUBLISH,
|
||||
TEXT_SENSOR_TEMPLATE_PUBLISH_ACTION_SCHEMA)
|
||||
def text_sensor_template_publish_to_code(config, action_id, arg_type, template_arg):
|
||||
def text_sensor_template_publish_to_code(config, action_id, template_arg, args):
|
||||
for var in get_variable(config[CONF_ID]):
|
||||
yield None
|
||||
rhs = var.make_text_sensor_publish_action(template_arg)
|
||||
type = TextSensorPublishAction.template(arg_type)
|
||||
type = TextSensorPublishAction.template(template_arg)
|
||||
action = Pvariable(action_id, rhs, type=type)
|
||||
for template_ in templatable(config[CONF_STATE], arg_type, std_string):
|
||||
for template_ in templatable(config[CONF_STATE], args, std_string):
|
||||
yield None
|
||||
add(action.set_state(template_))
|
||||
yield action
|
||||
|
||||
45
esphome/components/text_sensor/wifi_info.py
Normal file
45
esphome/components/text_sensor/wifi_info.py
Normal file
@@ -0,0 +1,45 @@
|
||||
import voluptuous as vol
|
||||
|
||||
from esphome.components import text_sensor
|
||||
import esphome.config_validation as cv
|
||||
from esphome.const import CONF_BSSID, CONF_ID, CONF_IP_ADDRESS, CONF_NAME, CONF_SSID
|
||||
from esphome.cpp_generator import Pvariable
|
||||
from esphome.cpp_types import App, Component
|
||||
|
||||
DEPENDENCIES = ['wifi']
|
||||
|
||||
IPAddressWiFiInfo = text_sensor.text_sensor_ns.class_('IPAddressWiFiInfo',
|
||||
text_sensor.TextSensor, Component)
|
||||
SSIDWiFiInfo = text_sensor.text_sensor_ns.class_('SSIDWiFiInfo',
|
||||
text_sensor.TextSensor, Component)
|
||||
BSSIDWiFiInfo = text_sensor.text_sensor_ns.class_('BSSIDWiFiInfo',
|
||||
text_sensor.TextSensor, Component)
|
||||
|
||||
PLATFORM_SCHEMA = text_sensor.PLATFORM_SCHEMA.extend({
|
||||
vol.Optional(CONF_IP_ADDRESS): cv.nameable(text_sensor.TEXT_SENSOR_SCHEMA.extend({
|
||||
cv.GenerateID(): cv.declare_variable_id(IPAddressWiFiInfo),
|
||||
})),
|
||||
vol.Optional(CONF_SSID): cv.nameable(text_sensor.TEXT_SENSOR_SCHEMA.extend({
|
||||
cv.GenerateID(): cv.declare_variable_id(SSIDWiFiInfo),
|
||||
})),
|
||||
vol.Optional(CONF_BSSID): cv.nameable(text_sensor.TEXT_SENSOR_SCHEMA.extend({
|
||||
cv.GenerateID(): cv.declare_variable_id(BSSIDWiFiInfo),
|
||||
})),
|
||||
})
|
||||
|
||||
|
||||
def setup_conf(config, key, klass):
|
||||
if key in config:
|
||||
conf = config[key]
|
||||
rhs = App.register_component(klass.new(conf[CONF_NAME]))
|
||||
sensor_ = Pvariable(conf[CONF_ID], rhs)
|
||||
text_sensor.register_text_sensor(sensor_, conf)
|
||||
|
||||
|
||||
def to_code(config):
|
||||
setup_conf(config, CONF_IP_ADDRESS, IPAddressWiFiInfo)
|
||||
setup_conf(config, CONF_SSID, SSIDWiFiInfo)
|
||||
setup_conf(config, CONF_BSSID, BSSIDWiFiInfo)
|
||||
|
||||
|
||||
BUILD_FLAGS = '-DUSE_WIFI_INFO_TEXT_SENSOR'
|
||||
@@ -10,7 +10,7 @@ from esphome.const import CONF_CRON, CONF_DAYS_OF_MONTH, CONF_DAYS_OF_WEEK, CONF
|
||||
CONF_MINUTES, CONF_MONTHS, CONF_ON_TIME, CONF_SECONDS, CONF_TIMEZONE, CONF_TRIGGER_ID
|
||||
from esphome.core import CORE
|
||||
from esphome.cpp_generator import Pvariable, add
|
||||
from esphome.cpp_types import App, Component, NoArg, Trigger, esphome_ns
|
||||
from esphome.cpp_types import App, Component, Trigger, esphome_ns
|
||||
from esphome.py_compat import string_types
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
@@ -21,7 +21,7 @@ PLATFORM_SCHEMA = cv.PLATFORM_SCHEMA.extend({
|
||||
|
||||
time_ns = esphome_ns.namespace('time')
|
||||
RealTimeClockComponent = time_ns.class_('RealTimeClockComponent', Component)
|
||||
CronTrigger = time_ns.class_('CronTrigger', Trigger.template(NoArg), Component)
|
||||
CronTrigger = time_ns.class_('CronTrigger', Trigger.template(), Component)
|
||||
ESPTime = time_ns.struct('ESPTime')
|
||||
|
||||
|
||||
@@ -295,7 +295,7 @@ def setup_time_core_(time_var, config):
|
||||
days_of_week = conf.get(CONF_DAYS_OF_WEEK, [x for x in range(1, 8)])
|
||||
add(trigger.add_days_of_week(days_of_week))
|
||||
|
||||
automation.build_automation(trigger, NoArg, conf)
|
||||
automation.build_automations(trigger, [], conf)
|
||||
|
||||
|
||||
def setup_time(time_var, config):
|
||||
|
||||
@@ -3,6 +3,7 @@ import voluptuous as vol
|
||||
from esphome import pins
|
||||
import esphome.config_validation as cv
|
||||
from esphome.const import CONF_BAUD_RATE, CONF_ID, CONF_RX_PIN, CONF_TX_PIN
|
||||
from esphome.core import CORE
|
||||
from esphome.cpp_generator import Pvariable, add
|
||||
from esphome.cpp_helpers import setup_component
|
||||
from esphome.cpp_types import App, Component, esphome_ns
|
||||
@@ -11,10 +12,18 @@ UARTComponent = esphome_ns.class_('UARTComponent', Component)
|
||||
UARTDevice = esphome_ns.class_('UARTDevice')
|
||||
MULTI_CONF = True
|
||||
|
||||
CONFIG_SCHEMA = vol.All(vol.Schema({
|
||||
|
||||
def validate_rx_pin(value):
|
||||
value = pins.input_pin(value)
|
||||
if CORE.is_esp8266 and value >= 16:
|
||||
raise vol.Invalid("Pins GPIO16 and GPIO17 cannot be used as RX pins on ESP8266.")
|
||||
return value
|
||||
|
||||
|
||||
CONFIG_SCHEMA = vol.All(cv.Schema({
|
||||
cv.GenerateID(): cv.declare_variable_id(UARTComponent),
|
||||
vol.Optional(CONF_TX_PIN): pins.output_pin,
|
||||
vol.Optional(CONF_RX_PIN): pins.input_pin,
|
||||
vol.Optional(CONF_RX_PIN): validate_rx_pin,
|
||||
vol.Required(CONF_BAUD_RATE): cv.positive_int,
|
||||
}).extend(cv.COMPONENT_SCHEMA.schema), cv.has_at_least_one_key(CONF_TX_PIN, CONF_RX_PIN))
|
||||
|
||||
|
||||
@@ -9,7 +9,7 @@ from esphome.cpp_types import App, Component, StoringController, esphome_ns
|
||||
|
||||
WebServer = esphome_ns.class_('WebServer', Component, StoringController)
|
||||
|
||||
CONFIG_SCHEMA = vol.Schema({
|
||||
CONFIG_SCHEMA = cv.Schema({
|
||||
cv.GenerateID(): cv.declare_variable_id(WebServer),
|
||||
vol.Optional(CONF_PORT): cv.port,
|
||||
vol.Optional(CONF_CSS_URL): cv.string,
|
||||
|
||||
@@ -1,10 +1,11 @@
|
||||
import voluptuous as vol
|
||||
|
||||
from esphome.automation import CONDITION_REGISTRY, Condition
|
||||
import esphome.config_validation as cv
|
||||
from esphome.const import CONF_AP, CONF_BSSID, CONF_CHANNEL, CONF_DNS1, CONF_DNS2, \
|
||||
CONF_DOMAIN, CONF_FAST_CONNECT, CONF_GATEWAY, CONF_ID, CONF_MANUAL_IP, CONF_NETWORKS, \
|
||||
from esphome.const import CONF_AP, CONF_BSSID, CONF_CHANNEL, CONF_DNS1, CONF_DNS2, CONF_DOMAIN, \
|
||||
CONF_FAST_CONNECT, CONF_GATEWAY, CONF_HIDDEN, CONF_ID, CONF_MANUAL_IP, CONF_NETWORKS, \
|
||||
CONF_PASSWORD, CONF_POWER_SAVE_MODE, CONF_REBOOT_TIMEOUT, CONF_SSID, CONF_STATIC_IP, \
|
||||
CONF_SUBNET, CONF_USE_ADDRESS, CONF_HIDDEN
|
||||
CONF_SUBNET, CONF_USE_ADDRESS
|
||||
from esphome.core import CORE, HexInt
|
||||
from esphome.cpp_generator import Pvariable, StructInitializer, add, variable
|
||||
from esphome.cpp_types import App, Component, esphome_ns, global_ns
|
||||
@@ -20,6 +21,7 @@ WIFI_POWER_SAVE_MODES = {
|
||||
'LIGHT': WiFiPowerSaveMode.WIFI_POWER_SAVE_LIGHT,
|
||||
'HIGH': WiFiPowerSaveMode.WIFI_POWER_SAVE_HIGH,
|
||||
}
|
||||
WiFiConnectedCondition = esphome_ns.class_('WiFiConnectedCondition', Condition)
|
||||
|
||||
|
||||
def validate_password(value):
|
||||
@@ -42,7 +44,7 @@ def validate_channel(value):
|
||||
return value
|
||||
|
||||
|
||||
AP_MANUAL_IP_SCHEMA = vol.Schema({
|
||||
AP_MANUAL_IP_SCHEMA = cv.Schema({
|
||||
vol.Required(CONF_STATIC_IP): cv.ipv4,
|
||||
vol.Required(CONF_GATEWAY): cv.ipv4,
|
||||
vol.Required(CONF_SUBNET): cv.ipv4,
|
||||
@@ -53,7 +55,7 @@ STA_MANUAL_IP_SCHEMA = AP_MANUAL_IP_SCHEMA.extend({
|
||||
vol.Optional(CONF_DNS2, default="1.0.0.1"): cv.ipv4,
|
||||
})
|
||||
|
||||
WIFI_NETWORK_BASE = vol.Schema({
|
||||
WIFI_NETWORK_BASE = cv.Schema({
|
||||
cv.GenerateID(): cv.declare_variable_id(WiFiAP),
|
||||
vol.Optional(CONF_SSID): cv.ssid,
|
||||
vol.Optional(CONF_PASSWORD): validate_password,
|
||||
@@ -105,7 +107,7 @@ def validate(config):
|
||||
return config
|
||||
|
||||
|
||||
CONFIG_SCHEMA = vol.All(vol.Schema({
|
||||
CONFIG_SCHEMA = vol.All(cv.Schema({
|
||||
cv.GenerateID(): cv.declare_variable_id(WiFiComponent),
|
||||
vol.Optional(CONF_NETWORKS): cv.ensure_list(WIFI_NETWORK_STA),
|
||||
|
||||
@@ -188,3 +190,14 @@ def lib_deps(config):
|
||||
if CORE.is_esp32:
|
||||
return None
|
||||
raise NotImplementedError
|
||||
|
||||
|
||||
CONF_WIFI_CONNECTED = 'wifi.connected'
|
||||
WIFI_CONNECTED_CONDITION_SCHEMA = cv.Schema({})
|
||||
|
||||
|
||||
@CONDITION_REGISTRY.register(CONF_WIFI_CONNECTED, WIFI_CONNECTED_CONDITION_SCHEMA)
|
||||
def wifi_connected_to_code(config, condition_id, template_arg, args):
|
||||
rhs = WiFiConnectedCondition.new(template_arg)
|
||||
type = WiFiConnectedCondition.template(template_arg)
|
||||
yield Pvariable(condition_id, rhs, type=type)
|
||||
|
||||
@@ -2,7 +2,6 @@ from __future__ import print_function
|
||||
|
||||
from collections import OrderedDict
|
||||
import importlib
|
||||
import json
|
||||
import logging
|
||||
import re
|
||||
|
||||
@@ -19,6 +18,8 @@ from esphome.util import safe_print
|
||||
# pylint: disable=unused-import, wrong-import-order
|
||||
from typing import List, Optional, Tuple, Union # noqa
|
||||
from esphome.core import ConfigType # noqa
|
||||
from esphome.yaml_util import is_secret
|
||||
from esphome.voluptuous_schema import ExtraKeysInvalid
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -397,10 +398,7 @@ def _nested_getitem(data, path):
|
||||
def humanize_error(config, validation_error):
|
||||
offending_item_summary = _nested_getitem(config, validation_error.path)
|
||||
if isinstance(offending_item_summary, dict):
|
||||
try:
|
||||
offending_item_summary = json.dumps(offending_item_summary)
|
||||
except (TypeError, ValueError):
|
||||
pass
|
||||
offending_item_summary = None
|
||||
validation_error = text_type(validation_error)
|
||||
m = re.match(r'^(.*?)\s*(?:for dictionary value )?@ data\[.*$', validation_error)
|
||||
if m is not None:
|
||||
@@ -408,25 +406,30 @@ def humanize_error(config, validation_error):
|
||||
validation_error = validation_error.strip()
|
||||
if not validation_error.endswith(u'.'):
|
||||
validation_error += u'.'
|
||||
if offending_item_summary is None:
|
||||
if offending_item_summary is None or is_secret(offending_item_summary):
|
||||
return validation_error
|
||||
|
||||
return u"{} Got '{}'".format(validation_error, offending_item_summary)
|
||||
|
||||
|
||||
def _format_vol_invalid(ex, config, path, domain):
|
||||
# type: (vol.Invalid, ConfigType, ConfigPath, basestring) -> unicode
|
||||
message = u''
|
||||
if u'extra keys not allowed' in ex.error_message:
|
||||
try:
|
||||
paren = ex.path[-2]
|
||||
except IndexError:
|
||||
paren = domain
|
||||
try:
|
||||
paren = ex.path[-2]
|
||||
except IndexError:
|
||||
paren = domain
|
||||
|
||||
if isinstance(ex, ExtraKeysInvalid):
|
||||
if ex.candidates:
|
||||
message += u'[{}] is an invalid option for [{}]. Did you mean {}?'.format(
|
||||
ex.path[-1], paren, u', '.join(u'[{}]'.format(x) for x in ex.candidates))
|
||||
else:
|
||||
message += u'[{}] is an invalid option for [{}]. Please check the indentation.'.format(
|
||||
ex.path[-1], paren)
|
||||
elif u'extra keys not allowed' in ex.error_message:
|
||||
message += u'[{}] is an invalid option for [{}].'.format(ex.path[-1], paren)
|
||||
elif u'required key not provided' in ex.error_message:
|
||||
try:
|
||||
paren = ex.path[-2]
|
||||
except IndexError:
|
||||
paren = domain
|
||||
message += u"'{}' is a required option for [{}].".format(ex.path[-1], paren)
|
||||
else:
|
||||
message += humanize_error(_nested_getitem(config, path), ex)
|
||||
@@ -438,7 +441,8 @@ def load_config():
|
||||
try:
|
||||
config = yaml_util.load_yaml(CORE.config_path)
|
||||
except OSError:
|
||||
raise EsphomeError(u"Invalid YAML at {}".format(CORE.config_path))
|
||||
raise EsphomeError(u"Invalid YAML at {}. Please see YAML syntax reference or use an online "
|
||||
u"YAML syntax validator".format(CORE.config_path))
|
||||
CORE.raw_config = config
|
||||
config = substitutions.do_substitution_pass(config)
|
||||
core_config.preload_core_config(config)
|
||||
@@ -536,6 +540,8 @@ def dump_dict(config, path, at_root=True):
|
||||
msg = msg + u' ' + inf
|
||||
ret += st + msg + u'\n'
|
||||
elif isinstance(conf, str):
|
||||
if is_secret(conf):
|
||||
conf = u'!secret {}'.format(is_secret(conf))
|
||||
if not conf:
|
||||
conf += u"''"
|
||||
|
||||
@@ -545,6 +551,9 @@ def dump_dict(config, path, at_root=True):
|
||||
col = 'bold_red' if error else 'white'
|
||||
ret += color(col, text_type(conf))
|
||||
elif isinstance(conf, core.Lambda):
|
||||
if is_secret(conf):
|
||||
conf = u'!secret {}'.format(is_secret(conf))
|
||||
|
||||
conf = u'!lambda |-\n' + indent(text_type(conf.value))
|
||||
error = config.get_error_for_path(path)
|
||||
col = 'bold_red' if error else 'white'
|
||||
|
||||
@@ -15,17 +15,20 @@ from esphome.const import CONF_AVAILABILITY, CONF_COMMAND_TOPIC, CONF_DISCOVERY,
|
||||
CONF_RETAIN, CONF_SETUP_PRIORITY, CONF_STATE_TOPIC, CONF_TOPIC, ESP_PLATFORM_ESP32, \
|
||||
ESP_PLATFORM_ESP8266
|
||||
from esphome.core import CORE, HexInt, IPAddress, Lambda, TimePeriod, TimePeriodMicroseconds, \
|
||||
TimePeriodMilliseconds, TimePeriodSeconds
|
||||
TimePeriodMilliseconds, TimePeriodSeconds, TimePeriodMinutes
|
||||
from esphome.py_compat import integer_types, string_types, text_type
|
||||
from esphome.voluptuous_schema import _Schema
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
# pylint: disable=invalid-name
|
||||
|
||||
Schema = _Schema
|
||||
port = vol.All(vol.Coerce(int), vol.Range(min=1, max=65535))
|
||||
float_ = vol.Coerce(float)
|
||||
positive_float = vol.All(float_, vol.Range(min=0))
|
||||
zero_to_one_float = vol.All(float_, vol.Range(min=0, max=1))
|
||||
negative_one_to_one_float = vol.All(float_, vol.Range(min=-1, max=1))
|
||||
positive_int = vol.All(vol.Coerce(int), vol.Range(min=0))
|
||||
positive_not_null_int = vol.All(vol.Coerce(int), vol.Range(min=0, min_included=False))
|
||||
|
||||
@@ -64,7 +67,7 @@ def valid_name(value):
|
||||
for c in value:
|
||||
if c not in ALLOWED_NAME_CHARS:
|
||||
raise vol.Invalid(u"'{}' is an invalid character for names. Valid characters are: {}"
|
||||
u"".format(c, ALLOWED_NAME_CHARS))
|
||||
u" (lowercase, no spaces)".format(c, ALLOWED_NAME_CHARS))
|
||||
return value
|
||||
|
||||
|
||||
@@ -160,7 +163,7 @@ def int_(value):
|
||||
hex_int = vol.Coerce(hex_int_)
|
||||
|
||||
|
||||
def variable_id_str_(value):
|
||||
def validate_id_name(value):
|
||||
value = string(value)
|
||||
if not value:
|
||||
raise vol.Invalid("ID must not be empty")
|
||||
@@ -183,7 +186,7 @@ def use_variable_id(type):
|
||||
if value is None:
|
||||
return core.ID(None, is_declaration=False, type=type)
|
||||
|
||||
return core.ID(variable_id_str_(value), is_declaration=False, type=type)
|
||||
return core.ID(validate_id_name(value), is_declaration=False, type=type)
|
||||
|
||||
return validator
|
||||
|
||||
@@ -193,7 +196,7 @@ def declare_variable_id(type):
|
||||
if value is None:
|
||||
return core.ID(None, is_declaration=True, type=type)
|
||||
|
||||
return core.ID(variable_id_str_(value), is_declaration=True, type=type)
|
||||
return core.ID(validate_id_name(value), is_declaration=True, type=type)
|
||||
|
||||
return validator
|
||||
|
||||
@@ -203,7 +206,7 @@ def templatable(other_validators):
|
||||
if isinstance(value, Lambda):
|
||||
return value
|
||||
if isinstance(other_validators, dict):
|
||||
return vol.Schema(other_validators)(value)
|
||||
return Schema(other_validators)(value)
|
||||
return other_validators(value)
|
||||
|
||||
return validator
|
||||
@@ -273,7 +276,7 @@ def has_at_most_one_key(*keys):
|
||||
TIME_PERIOD_ERROR = "Time period {} should be format number + unit, for example 5ms, 5s, 5min, 5h"
|
||||
|
||||
time_period_dict = vol.All(
|
||||
dict, vol.Schema({
|
||||
dict, Schema({
|
||||
'days': float_,
|
||||
'hours': float_,
|
||||
'minutes': float_,
|
||||
@@ -290,7 +293,7 @@ def time_period_str_colon(value):
|
||||
"""Validate and transform time offset with format HH:MM[:SS]."""
|
||||
if isinstance(value, int):
|
||||
raise vol.Invalid('Make sure you wrap time values in quotes')
|
||||
elif not isinstance(value, str):
|
||||
if not isinstance(value, str):
|
||||
raise vol.Invalid(TIME_PERIOD_ERROR.format(value))
|
||||
|
||||
try:
|
||||
@@ -314,7 +317,7 @@ def time_period_str_unit(value):
|
||||
if isinstance(value, int):
|
||||
raise vol.Invalid("Don't know what '{0}' means as it has no time *unit*! Did you mean "
|
||||
"'{0}s'?".format(value))
|
||||
elif not isinstance(value, string_types):
|
||||
if not isinstance(value, string_types):
|
||||
raise vol.Invalid("Expected string for time period with unit.")
|
||||
|
||||
unit_to_kwarg = {
|
||||
@@ -361,6 +364,16 @@ def time_period_in_seconds_(value):
|
||||
return TimePeriodSeconds(**value.as_dict())
|
||||
|
||||
|
||||
def time_period_in_minutes_(value):
|
||||
if value.microseconds is not None and value.microseconds != 0:
|
||||
raise vol.Invalid("Maximum precision is minutes")
|
||||
if value.milliseconds is not None and value.milliseconds != 0:
|
||||
raise vol.Invalid("Maximum precision is minutes")
|
||||
if value.seconds is not None and value.seconds != 0:
|
||||
raise vol.Invalid("Maximum precision is minutes")
|
||||
return TimePeriodMinutes(**value.as_dict())
|
||||
|
||||
|
||||
def update_interval(value):
|
||||
if value == 'never':
|
||||
return 4294967295 # uint32_t max
|
||||
@@ -371,6 +384,7 @@ time_period = vol.Any(time_period_str_unit, time_period_str_colon, time_period_d
|
||||
positive_time_period = vol.All(time_period, vol.Range(min=TimePeriod()))
|
||||
positive_time_period_milliseconds = vol.All(positive_time_period, time_period_in_milliseconds_)
|
||||
positive_time_period_seconds = vol.All(positive_time_period, time_period_in_seconds_)
|
||||
positive_time_period_minutes = vol.All(positive_time_period, time_period_in_minutes_)
|
||||
time_period_microseconds = vol.All(time_period, time_period_in_microseconds_)
|
||||
positive_time_period_microseconds = vol.All(positive_time_period, time_period_in_microseconds_)
|
||||
positive_not_null_time_period = vol.All(time_period,
|
||||
@@ -428,6 +442,8 @@ frequency = float_with_unit("frequency", r"(Hz|HZ|hz)?")
|
||||
resistance = float_with_unit("resistance", r"(Ω|Ω|ohm|Ohm|OHM)?")
|
||||
current = float_with_unit("current", r"(a|A|amp|Amp|amps|Amps|ampere|Ampere)?")
|
||||
voltage = float_with_unit("voltage", r"(v|V|volt|Volts)?")
|
||||
distance = float_with_unit("distance", r"(m)")
|
||||
framerate = float_with_unit("framerate", r"(FPS|fps|Fps|FpS|Hz)")
|
||||
|
||||
|
||||
def validate_bytes(value):
|
||||
@@ -592,6 +608,11 @@ i2c_address = hex_uint8_t
|
||||
|
||||
|
||||
def percentage(value):
|
||||
value = possibly_negative_percentage(value)
|
||||
return zero_to_one_float(value)
|
||||
|
||||
|
||||
def possibly_negative_percentage(value):
|
||||
has_percent_sign = isinstance(value, string_types) and value.endswith('%')
|
||||
if has_percent_sign:
|
||||
value = float(value[:-1].rstrip()) / 100.0
|
||||
@@ -600,7 +621,12 @@ def percentage(value):
|
||||
if not has_percent_sign:
|
||||
msg += " Please put a percent sign after the number!"
|
||||
raise vol.Invalid(msg)
|
||||
return zero_to_one_float(value)
|
||||
if value < -1:
|
||||
msg = "Percentage must not be smaller than -100%."
|
||||
if not has_percent_sign:
|
||||
msg += " Please put a percent sign after the number!"
|
||||
raise vol.Invalid(msg)
|
||||
return negative_one_to_one_float(value)
|
||||
|
||||
|
||||
def percentage_int(value):
|
||||
@@ -727,17 +753,17 @@ def nameable(*schemas):
|
||||
return validator
|
||||
|
||||
|
||||
PLATFORM_SCHEMA = vol.Schema({
|
||||
PLATFORM_SCHEMA = Schema({
|
||||
vol.Required(CONF_PLATFORM): valid,
|
||||
})
|
||||
|
||||
MQTT_COMPONENT_AVAILABILITY_SCHEMA = vol.Schema({
|
||||
MQTT_COMPONENT_AVAILABILITY_SCHEMA = Schema({
|
||||
vol.Required(CONF_TOPIC): subscribe_topic,
|
||||
vol.Optional(CONF_PAYLOAD_AVAILABLE, default='online'): mqtt_payload,
|
||||
vol.Optional(CONF_PAYLOAD_NOT_AVAILABLE, default='offline'): mqtt_payload,
|
||||
})
|
||||
|
||||
MQTT_COMPONENT_SCHEMA = vol.Schema({
|
||||
MQTT_COMPONENT_SCHEMA = Schema({
|
||||
vol.Optional(CONF_NAME): string,
|
||||
vol.Optional(CONF_RETAIN): vol.All(requires_component('mqtt'), boolean),
|
||||
vol.Optional(CONF_DISCOVERY): vol.All(requires_component('mqtt'), boolean),
|
||||
@@ -751,6 +777,6 @@ MQTT_COMMAND_COMPONENT_SCHEMA = MQTT_COMPONENT_SCHEMA.extend({
|
||||
vol.Optional(CONF_COMMAND_TOPIC): vol.All(requires_component('mqtt'), subscribe_topic),
|
||||
})
|
||||
|
||||
COMPONENT_SCHEMA = vol.Schema({
|
||||
COMPONENT_SCHEMA = Schema({
|
||||
vol.Optional(CONF_SETUP_PRIORITY): float_
|
||||
})
|
||||
|
||||
@@ -1,11 +1,11 @@
|
||||
"""Constants used by esphome."""
|
||||
|
||||
MAJOR_VERSION = 1
|
||||
MINOR_VERSION = 11
|
||||
PATCH_VERSION = '0'
|
||||
MINOR_VERSION = 12
|
||||
PATCH_VERSION = '0b1'
|
||||
__short_version__ = '{}.{}'.format(MAJOR_VERSION, MINOR_VERSION)
|
||||
__version__ = '{}.{}'.format(__short_version__, PATCH_VERSION)
|
||||
ESPHOME_CORE_VERSION = '1.11.0'
|
||||
ESPHOME_CORE_VERSION = '1.12.0b1'
|
||||
|
||||
ESP_PLATFORM_ESP32 = 'ESP32'
|
||||
ESP_PLATFORM_ESP8266 = 'ESP8266'
|
||||
@@ -23,11 +23,13 @@ CONF_ARDUINO_VERSION = 'arduino_version'
|
||||
CONF_LOCAL = 'local'
|
||||
CONF_REPOSITORY = 'repository'
|
||||
CONF_COMMIT = 'commit'
|
||||
CONF_SERVICES = 'services'
|
||||
CONF_TAG = 'tag'
|
||||
CONF_BRANCH = 'branch'
|
||||
CONF_LOGGER = 'logger'
|
||||
CONF_WIFI = 'wifi'
|
||||
CONF_SSID = 'ssid'
|
||||
CONF_IP_ADDRESS = 'ip_address'
|
||||
CONF_BSSID = 'bssid'
|
||||
CONF_PASSWORD = 'password'
|
||||
CONF_MANUAL_IP = 'manual_ip'
|
||||
@@ -38,6 +40,9 @@ CONF_OTA = 'ota'
|
||||
CONF_MQTT = 'mqtt'
|
||||
CONF_BROKER = 'broker'
|
||||
CONF_USERNAME = 'username'
|
||||
CONF_MIN_LEVEL = 'min_level'
|
||||
CONF_IDLE_LEVEL = 'idle_level'
|
||||
CONF_MAX_LEVEL = 'max_level'
|
||||
CONF_POWER_SUPPLY = 'power_supply'
|
||||
CONF_ID = 'id'
|
||||
CONF_MQTT_ID = 'mqtt_id'
|
||||
@@ -59,6 +64,7 @@ CONF_PCA9685 = 'pca9685'
|
||||
CONF_PCA9685_ID = 'pca9685_id'
|
||||
CONF_OUTPUT = 'output'
|
||||
CONF_CHANNEL = 'channel'
|
||||
CONF_CHANNELS = 'channels'
|
||||
CONF_LIGHT = 'light'
|
||||
CONF_RED = 'red'
|
||||
CONF_GREEN = 'green'
|
||||
@@ -127,7 +133,6 @@ CONF_FILTERS = 'filters'
|
||||
CONF_OFFSET = 'offset'
|
||||
CONF_MULTIPLY = 'multiply'
|
||||
CONF_FILTER_OUT = 'filter_out'
|
||||
CONF_FILTER_NAN = 'filter_nan'
|
||||
CONF_SLIDING_WINDOW_MOVING_AVERAGE = 'sliding_window_moving_average'
|
||||
CONF_EXPONENTIAL_MOVING_AVERAGE = 'exponential_moving_average'
|
||||
CONF_WINDOW_SIZE = 'window_size'
|
||||
@@ -137,6 +142,7 @@ CONF_LAMBDA = 'lambda'
|
||||
CONF_THROTTLE = 'throttle'
|
||||
CONF_DELTA = 'delta'
|
||||
CONF_OR = 'or'
|
||||
CONF_CALIBRATE_LINEAR = 'calibrate_linear'
|
||||
CONF_AND = 'and'
|
||||
CONF_RANGE = 'range'
|
||||
CONF_UNIQUE = 'unique'
|
||||
@@ -155,8 +161,7 @@ CONF_ATTENUATION = 'attenuation'
|
||||
CONF_PRESSURE = 'pressure'
|
||||
CONF_TRIGGER_PIN = 'trigger_pin'
|
||||
CONF_ECHO_PIN = 'echo_pin'
|
||||
CONF_TIMEOUT_METER = 'timeout_meter'
|
||||
CONF_TIMEOUT_TIME = 'timeout_time'
|
||||
CONF_TIMEOUT = 'timeout'
|
||||
CONF_CARRIER_DUTY_PERCENT = 'carrier_duty_percent'
|
||||
CONF_NEC = 'nec'
|
||||
CONF_COMMAND = 'command'
|
||||
@@ -198,6 +203,7 @@ CONF_CSS_URL = 'css_url'
|
||||
CONF_JS_URL = 'js_url'
|
||||
CONF_SSL_FINGERPRINTS = 'ssl_fingerprints'
|
||||
CONF_PCF8574 = 'pcf8574'
|
||||
CONF_MCP23017 = 'mcp23017'
|
||||
CONF_PCF8575 = 'pcf8575'
|
||||
CONF_SCAN = 'scan'
|
||||
CONF_KEEPALIVE = 'keepalive'
|
||||
@@ -315,6 +321,7 @@ CONF_ROTATION = 'rotation'
|
||||
CONF_DC_PIN = 'dc_pin'
|
||||
CONF_RESET_PIN = 'reset_pin'
|
||||
CONF_BUSY_PIN = 'busy_pin'
|
||||
CONF_ESP8266_RESTORE_FROM_FLASH = 'esp8266_restore_from_flash'
|
||||
CONF_FULL_UPDATE_EVERY = 'full_update_every'
|
||||
CONF_DATA_PINS = 'data_pins'
|
||||
CONF_ENABLE_PIN = 'enable_pin'
|
||||
@@ -360,6 +367,7 @@ CONF_FORMALDEHYDE = 'formaldehyde'
|
||||
CONF_ON_TAG = 'on_tag'
|
||||
CONF_ARGS = 'args'
|
||||
CONF_FORMAT = 'format'
|
||||
CONF_FOR = 'for'
|
||||
CONF_COLOR_CORRECT = 'color_correct'
|
||||
CONF_ON_JSON_MESSAGE = 'on_json_message'
|
||||
CONF_ACCELERATION = 'acceleration'
|
||||
@@ -412,9 +420,15 @@ CONF_USE_ADDRESS = 'use_address'
|
||||
CONF_FROM = 'from'
|
||||
CONF_TO = 'to'
|
||||
CONF_SEGMENTS = 'segments'
|
||||
CONF_MIN_POWER = 'min_power'
|
||||
CONF_MIN_VALUE = 'min_value'
|
||||
CONF_MAX_VALUE = 'max_value'
|
||||
CONF_RX_ONLY = 'rx_only'
|
||||
|
||||
|
||||
ALLOWED_NAME_CHARS = u'abcdefghijklmnopqrstuvwxyz0123456789_'
|
||||
ARDUINO_VERSION_ESP32_DEV = 'https://github.com/platformio/platform-espressif32.git#feature/stage'
|
||||
ARDUINO_VERSION_ESP32_1_0_0 = 'espressif32@1.5.0'
|
||||
ARDUINO_VERSION_ESP32_1_0_1 = 'espressif32@1.6.0'
|
||||
ARDUINO_VERSION_ESP8266_DEV = 'https://github.com/platformio/platform-espressif8266.git#feature' \
|
||||
'/stage'
|
||||
|
||||
@@ -10,9 +10,9 @@ import re
|
||||
from typing import Any, Dict, List # noqa
|
||||
|
||||
from esphome.const import CONF_ARDUINO_VERSION, CONF_ESPHOME, CONF_ESPHOME_CORE_VERSION, \
|
||||
CONF_LOCAL, \
|
||||
CONF_USE_ADDRESS, CONF_WIFI, ESP_PLATFORM_ESP32, ESP_PLATFORM_ESP8266
|
||||
from esphome.helpers import ensure_unique_string
|
||||
CONF_LOCAL, CONF_USE_ADDRESS, CONF_WIFI, ESP_PLATFORM_ESP32, ESP_PLATFORM_ESP8266, \
|
||||
CONF_REPOSITORY, CONF_BRANCH
|
||||
from esphome.helpers import ensure_unique_string, is_hassio
|
||||
from esphome.py_compat import IS_PY2, integer_types
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
@@ -215,6 +215,10 @@ class TimePeriodSeconds(TimePeriod):
|
||||
pass
|
||||
|
||||
|
||||
class TimePeriodMinutes(TimePeriod):
|
||||
pass
|
||||
|
||||
|
||||
LAMBDA_PROG = re.compile(r'id\(\s*([a-zA-Z_][a-zA-Z0-9_]*)\s*\)(\.?)')
|
||||
|
||||
|
||||
@@ -288,7 +292,7 @@ class ID(object):
|
||||
return hash(self.id)
|
||||
|
||||
|
||||
# pylint: disable=too-many-instance-attributes
|
||||
# pylint: disable=too-many-instance-attributes,too-many-public-methods
|
||||
class EsphomeCore(object):
|
||||
def __init__(self):
|
||||
# True if command is run from dashboard
|
||||
@@ -328,6 +332,12 @@ class EsphomeCore(object):
|
||||
def esphome_core_version(self): # type: () -> Dict[str, str]
|
||||
return self.config[CONF_ESPHOME][CONF_ESPHOME_CORE_VERSION]
|
||||
|
||||
@property
|
||||
def is_dev_esphome_core_version(self):
|
||||
if CONF_REPOSITORY not in self.esphome_core_version:
|
||||
return False
|
||||
return self.esphome_core_version.get(CONF_BRANCH) == 'dev'
|
||||
|
||||
@property
|
||||
def is_local_esphome_core_copy(self):
|
||||
return CONF_LOCAL in self.esphome_core_version
|
||||
@@ -352,9 +362,19 @@ class EsphomeCore(object):
|
||||
path_ = os.path.expanduser(os.path.join(*path))
|
||||
return os.path.join(self.build_path, path_)
|
||||
|
||||
def relative_pioenvs_path(self, *path):
|
||||
if is_hassio():
|
||||
return os.path.join('/data', self.name, '.pioenvs', *path)
|
||||
return self.relative_build_path('.pioenvs', *path)
|
||||
|
||||
def relative_piolibdeps_path(self, *path):
|
||||
if is_hassio():
|
||||
return os.path.join('/data', self.name, '.piolibdeps', *path)
|
||||
return self.relative_build_path('.piolibdeps', *path)
|
||||
|
||||
@property
|
||||
def firmware_bin(self):
|
||||
return self.relative_build_path('.pioenvs', self.name, 'firmware.bin')
|
||||
return self.relative_pioenvs_path(self.name, 'firmware.bin')
|
||||
|
||||
@property
|
||||
def is_esp8266(self):
|
||||
|
||||
@@ -9,14 +9,13 @@ import esphome.config_validation as cv
|
||||
from esphome.const import ARDUINO_VERSION_ESP32_DEV, ARDUINO_VERSION_ESP8266_DEV, \
|
||||
CONF_ARDUINO_VERSION, CONF_BOARD, CONF_BOARD_FLASH_MODE, CONF_BRANCH, CONF_BUILD_PATH, \
|
||||
CONF_COMMIT, CONF_ESPHOME, CONF_ESPHOME_CORE_VERSION, CONF_INCLUDES, CONF_LIBRARIES, \
|
||||
CONF_LOCAL, \
|
||||
CONF_NAME, CONF_ON_BOOT, CONF_ON_LOOP, CONF_ON_SHUTDOWN, CONF_PLATFORM, \
|
||||
CONF_PLATFORMIO_OPTIONS, \
|
||||
CONF_PRIORITY, CONF_REPOSITORY, CONF_TAG, CONF_TRIGGER_ID, CONF_USE_CUSTOM_CODE, \
|
||||
ESPHOME_CORE_VERSION, ESP_PLATFORM_ESP32, ESP_PLATFORM_ESP8266
|
||||
CONF_LOCAL, CONF_NAME, CONF_ON_BOOT, CONF_ON_LOOP, CONF_ON_SHUTDOWN, CONF_PLATFORM, \
|
||||
CONF_PLATFORMIO_OPTIONS, CONF_PRIORITY, CONF_REPOSITORY, CONF_TAG, CONF_TRIGGER_ID, \
|
||||
CONF_USE_CUSTOM_CODE, ESPHOME_CORE_VERSION, ESP_PLATFORM_ESP32, ESP_PLATFORM_ESP8266, \
|
||||
CONF_ESP8266_RESTORE_FROM_FLASH
|
||||
from esphome.core import CORE, EsphomeError
|
||||
from esphome.cpp_generator import Pvariable, RawExpression, add
|
||||
from esphome.cpp_types import App, NoArg, const_char_ptr, esphome_ns
|
||||
from esphome.cpp_types import App, const_char_ptr, esphome_ns
|
||||
from esphome.py_compat import text_type
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
@@ -87,11 +86,11 @@ def validate_commit(value):
|
||||
|
||||
ESPHOME_CORE_VERSION_SCHEMA = vol.Any(
|
||||
validate_simple_esphome_core_version,
|
||||
vol.Schema({
|
||||
cv.Schema({
|
||||
vol.Required(CONF_LOCAL): validate_local_esphome_core_version,
|
||||
}),
|
||||
vol.All(
|
||||
vol.Schema({
|
||||
cv.Schema({
|
||||
vol.Optional(CONF_REPOSITORY, default=LIBRARY_URI_REPO): cv.string,
|
||||
vol.Optional(CONF_COMMIT): validate_commit,
|
||||
vol.Optional(CONF_BRANCH): cv.string,
|
||||
@@ -114,7 +113,7 @@ def validate_platform(value):
|
||||
|
||||
|
||||
PLATFORMIO_ESP8266_LUT = {
|
||||
'2.5.0': 'espressif8266@2.0.0',
|
||||
'2.5.0': 'espressif8266@2.0.1',
|
||||
'2.4.2': 'espressif8266@1.8.0',
|
||||
'2.4.1': 'espressif8266@1.7.3',
|
||||
'2.4.0': 'espressif8266@1.6.0',
|
||||
@@ -127,7 +126,7 @@ PLATFORMIO_ESP8266_LUT = {
|
||||
PLATFORMIO_ESP32_LUT = {
|
||||
'1.0.0': 'espressif32@1.4.0',
|
||||
'1.0.1': 'espressif32@1.6.0',
|
||||
'RECOMMENDED': 'espressif32@1.5.0',
|
||||
'RECOMMENDED': 'espressif32@1.6.0',
|
||||
'LATEST': 'espressif32',
|
||||
'DEV': ARDUINO_VERSION_ESP32_DEV,
|
||||
}
|
||||
@@ -159,7 +158,7 @@ def default_build_path():
|
||||
return CORE.name
|
||||
|
||||
|
||||
CONFIG_SCHEMA = vol.Schema({
|
||||
CONFIG_SCHEMA = cv.Schema({
|
||||
vol.Required(CONF_NAME): cv.valid_name,
|
||||
vol.Required(CONF_PLATFORM): cv.one_of('ESP8266', 'ESPRESSIF8266', 'ESP32', 'ESPRESSIF32',
|
||||
upper=True),
|
||||
@@ -168,9 +167,10 @@ CONFIG_SCHEMA = vol.Schema({
|
||||
vol.Optional(CONF_ARDUINO_VERSION, default='recommended'): validate_arduino_version,
|
||||
vol.Optional(CONF_USE_CUSTOM_CODE, default=False): cv.boolean,
|
||||
vol.Optional(CONF_BUILD_PATH, default=default_build_path): cv.string,
|
||||
vol.Optional(CONF_PLATFORMIO_OPTIONS): vol.Schema({
|
||||
vol.Optional(CONF_PLATFORMIO_OPTIONS): cv.Schema({
|
||||
cv.string_strict: vol.Any([cv.string], cv.string),
|
||||
}),
|
||||
vol.Optional(CONF_ESP8266_RESTORE_FROM_FLASH): vol.All(cv.only_on_esp8266, cv.boolean),
|
||||
|
||||
vol.Optional(CONF_BOARD_FLASH_MODE, default='dout'): cv.one_of(*BUILD_FLASH_MODES, lower=True),
|
||||
vol.Optional(CONF_ON_BOOT): automation.validate_automation({
|
||||
@@ -194,7 +194,7 @@ CONFIG_SCHEMA = vol.Schema({
|
||||
def preload_core_config(config):
|
||||
if 'esphomeyaml' in config:
|
||||
_LOGGER.warning("The esphomeyaml section has been renamed to esphome in 1.11.0. "
|
||||
"Please replace 'esphomeyaml:' in your configuration by 'esphome:'.")
|
||||
"Please replace 'esphomeyaml:' in your configuration with 'esphome:'.")
|
||||
config[CONF_ESPHOME] = config.pop('esphomeyaml')
|
||||
if CONF_ESPHOME not in config:
|
||||
raise EsphomeError(u"No esphome section in config")
|
||||
@@ -222,16 +222,16 @@ def to_code(config):
|
||||
for conf in config.get(CONF_ON_BOOT, []):
|
||||
rhs = App.register_component(StartupTrigger.new(conf.get(CONF_PRIORITY)))
|
||||
trigger = Pvariable(conf[CONF_TRIGGER_ID], rhs)
|
||||
automation.build_automation(trigger, NoArg, conf)
|
||||
automation.build_automations(trigger, [], conf)
|
||||
|
||||
for conf in config.get(CONF_ON_SHUTDOWN, []):
|
||||
trigger = Pvariable(conf[CONF_TRIGGER_ID], ShutdownTrigger.new())
|
||||
automation.build_automation(trigger, const_char_ptr, conf)
|
||||
automation.build_automations(trigger, [(const_char_ptr, 'x')], conf)
|
||||
|
||||
for conf in config.get(CONF_ON_LOOP, []):
|
||||
rhs = App.register_component(LoopTrigger.new())
|
||||
trigger = Pvariable(conf[CONF_TRIGGER_ID], rhs)
|
||||
automation.build_automation(trigger, NoArg, conf)
|
||||
automation.build_automations(trigger, [], conf)
|
||||
|
||||
add(App.set_compilation_datetime(RawExpression('__DATE__ ", " __TIME__')))
|
||||
|
||||
@@ -247,3 +247,9 @@ def includes(config):
|
||||
res = os.path.relpath(path, CORE.relative_build_path('src'))
|
||||
ret.append(u'#include "{}"'.format(res))
|
||||
return ret
|
||||
|
||||
|
||||
def required_build_flags(config):
|
||||
if config.get(CONF_ESP8266_RESTORE_FROM_FLASH, False):
|
||||
return ['-DUSE_ESP8266_PREFERENCES_FLASH']
|
||||
return []
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
from collections import OrderedDict
|
||||
|
||||
from esphome.core import CORE, HexInt, Lambda, TimePeriod, TimePeriodMicroseconds, \
|
||||
TimePeriodMilliseconds, TimePeriodSeconds
|
||||
TimePeriodMilliseconds, TimePeriodSeconds, TimePeriodMinutes
|
||||
from esphome.helpers import cpp_string_escape, indent_all_but_first_and_last
|
||||
|
||||
# pylint: disable=unused-import, wrong-import-order
|
||||
@@ -286,6 +286,8 @@ def safe_exp(
|
||||
return IntLiteral(int(obj.total_milliseconds))
|
||||
if isinstance(obj, TimePeriodSeconds):
|
||||
return IntLiteral(int(obj.total_seconds))
|
||||
if isinstance(obj, TimePeriodMinutes):
|
||||
return IntLiteral(int(obj.total_minutes))
|
||||
if isinstance(obj, (tuple, list)):
|
||||
return ArrayInitializer(*[safe_exp(o) for o in obj])
|
||||
raise ValueError(u"Object is not an expression", obj)
|
||||
@@ -420,12 +422,11 @@ def process_lambda(value, # type: Lambda
|
||||
|
||||
|
||||
def templatable(value, # type: Any
|
||||
input_type, # type: Expression
|
||||
args, # type: List[Tuple[Expression, str]]
|
||||
output_type # type: Optional[Expression]
|
||||
):
|
||||
if isinstance(value, Lambda):
|
||||
lambda_ = None
|
||||
for lambda_ in process_lambda(value, [(input_type, 'x')], return_type=output_type):
|
||||
for lambda_ in process_lambda(value, args, return_type=output_type):
|
||||
yield None
|
||||
yield lambda_
|
||||
else:
|
||||
@@ -475,9 +476,11 @@ class MockObj(Expression):
|
||||
continue
|
||||
require.require()
|
||||
|
||||
def template(self, args): # type: (Union[TemplateArguments, Expression]) -> MockObj
|
||||
if not isinstance(args, TemplateArguments):
|
||||
args = TemplateArguments(args)
|
||||
def template(self, *args): # type: (Tuple[Union[TemplateArguments, Expression]]) -> MockObj
|
||||
if len(args) != 1 or not isinstance(args[0], TemplateArguments):
|
||||
args = TemplateArguments(*args)
|
||||
else:
|
||||
args = args[0]
|
||||
obj = MockObj(u'{}{}'.format(self.base, args))
|
||||
obj.requires.append(self)
|
||||
obj.requires.append(args)
|
||||
@@ -553,9 +556,14 @@ class MockObjClass(MockObj):
|
||||
return True
|
||||
return False
|
||||
|
||||
def template(self, args): # type: (Union[TemplateArguments, Expression]) -> MockObjClass
|
||||
if not isinstance(args, TemplateArguments):
|
||||
args = TemplateArguments(args)
|
||||
def template(self,
|
||||
*args # type: Tuple[Union[TemplateArguments, Expression]]
|
||||
):
|
||||
# type: (...) -> MockObjClass
|
||||
if len(args) != 1 or not isinstance(args[0], TemplateArguments):
|
||||
args = TemplateArguments(*args)
|
||||
else:
|
||||
args = args[0]
|
||||
new_parents = self._parents[:]
|
||||
new_parents.append(self)
|
||||
obj = MockObjClass(u'{}{}'.format(self.base, args), parents=new_parents)
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
from esphome.const import CONF_INVERTED, CONF_MODE, CONF_NUMBER, CONF_PCF8574, \
|
||||
CONF_SETUP_PRIORITY
|
||||
CONF_SETUP_PRIORITY, CONF_MCP23017
|
||||
from esphome.core import CORE, EsphomeError
|
||||
from esphome.cpp_generator import IntLiteral, RawExpression
|
||||
from esphome.cpp_types import GPIOInputPin, GPIOOutputPin
|
||||
@@ -24,6 +24,21 @@ def generic_gpio_pin_expression_(conf, mock_obj, default_mode):
|
||||
yield hub.make_output_pin(number, inverted)
|
||||
return
|
||||
|
||||
raise EsphomeError(u"Unknown default mode {}".format(default_mode))
|
||||
if CONF_MCP23017 in conf:
|
||||
from esphome.components import mcp23017
|
||||
|
||||
for hub in CORE.get_variable(conf[CONF_MCP23017]):
|
||||
yield None
|
||||
|
||||
if default_mode == u'INPUT':
|
||||
mode = mcp23017.MCP23017_GPIO_MODES[conf.get(CONF_MODE, u'INPUT')]
|
||||
yield hub.make_input_pin(number, mode, inverted)
|
||||
return
|
||||
if default_mode == u'OUTPUT':
|
||||
yield hub.make_output_pin(number, inverted)
|
||||
return
|
||||
|
||||
raise EsphomeError(u"Unknown default mode {}".format(default_mode))
|
||||
if len(conf) == 1:
|
||||
yield IntLiteral(number)
|
||||
|
||||
@@ -14,7 +14,6 @@ int32 = global_ns.namespace('int32_t')
|
||||
const_char_ptr = global_ns.namespace('const char *')
|
||||
NAN = global_ns.namespace('NAN')
|
||||
esphome_ns = global_ns # using namespace esphome;
|
||||
NoArg = esphome_ns.class_('NoArg')
|
||||
App = esphome_ns.App
|
||||
io_ns = esphome_ns.namespace('io')
|
||||
Nameable = esphome_ns.class_('Nameable')
|
||||
|
||||
@@ -2,9 +2,11 @@
|
||||
from __future__ import print_function
|
||||
|
||||
import codecs
|
||||
import collections
|
||||
import hmac
|
||||
import json
|
||||
import logging
|
||||
import multiprocessing
|
||||
import os
|
||||
import subprocess
|
||||
import threading
|
||||
@@ -23,7 +25,7 @@ import tornado.websocket
|
||||
|
||||
from esphome import const
|
||||
from esphome.__main__ import get_serial_ports
|
||||
from esphome.helpers import mkdir_p
|
||||
from esphome.helpers import mkdir_p, get_bool_env, run_system_command
|
||||
from esphome.py_compat import IS_PY2
|
||||
from esphome.storage_json import EsphomeStorageJSON, StorageJSON, \
|
||||
esphome_storage_path, ext_storage_path
|
||||
@@ -42,6 +44,8 @@ USING_PASSWORD = False
|
||||
ON_HASSIO = False
|
||||
USING_HASSIO_AUTH = True
|
||||
HASSIO_MQTT_CONFIG = None
|
||||
RELATIVE_URL = os.getenv('ESPHOME_DASHBOARD_RELATIVE_URL', '/')
|
||||
STATUS_USE_PING = get_bool_env('ESPHOME_DASHBOARD_USE_PING')
|
||||
|
||||
if IS_PY2:
|
||||
cookie_authenticated_yes = 'yes'
|
||||
@@ -49,6 +53,17 @@ else:
|
||||
cookie_authenticated_yes = b'yes'
|
||||
|
||||
|
||||
def template_args():
|
||||
version = const.__version__
|
||||
return {
|
||||
'version': version,
|
||||
'docs_link': 'https://beta.esphome.io/' if 'b' in version else 'https://esphome.io/',
|
||||
'get_static_file_url': get_static_file_url,
|
||||
'relative_url': RELATIVE_URL,
|
||||
'streamer_mode': get_bool_env('ESPHOME_STREAMER_MODE'),
|
||||
}
|
||||
|
||||
|
||||
# pylint: disable=abstract-method
|
||||
class BaseHandler(tornado.web.RequestHandler):
|
||||
def is_authenticated(self):
|
||||
@@ -165,7 +180,7 @@ class EsphomeHassConfigHandler(EsphomeCommandWebSocket):
|
||||
class SerialPortRequestHandler(BaseHandler):
|
||||
def get(self):
|
||||
if not self.is_authenticated():
|
||||
self.redirect('/login')
|
||||
self.redirect(RELATIVE_URL + 'login')
|
||||
return
|
||||
ports = get_serial_ports()
|
||||
data = []
|
||||
@@ -187,7 +202,7 @@ class WizardRequestHandler(BaseHandler):
|
||||
from esphome import wizard
|
||||
|
||||
if not self.is_authenticated():
|
||||
self.redirect('/login')
|
||||
self.redirect(RELATIVE_URL + 'login')
|
||||
return
|
||||
kwargs = {k: ''.join(v) for k, v in self.request.arguments.items()}
|
||||
destination = os.path.join(CONFIG_DIR, kwargs['name'] + '.yaml')
|
||||
@@ -198,9 +213,10 @@ class WizardRequestHandler(BaseHandler):
|
||||
class DownloadBinaryRequestHandler(BaseHandler):
|
||||
def get(self):
|
||||
if not self.is_authenticated():
|
||||
self.redirect('/login')
|
||||
self.redirect(RELATIVE_URL + 'login')
|
||||
return
|
||||
|
||||
# pylint: disable=no-value-for-parameter
|
||||
configuration = self.get_argument('configuration')
|
||||
storage_path = ext_storage_path(CONFIG_DIR, configuration)
|
||||
storage_json = StorageJSON.load(storage_path)
|
||||
@@ -213,8 +229,8 @@ class DownloadBinaryRequestHandler(BaseHandler):
|
||||
filename = '{}.bin'.format(storage_json.name)
|
||||
self.set_header("Content-Disposition", 'attachment; filename="{}"'.format(filename))
|
||||
with open(path, 'rb') as f:
|
||||
while 1:
|
||||
data = f.read(16384) # or some other nice-sized chunk
|
||||
while True:
|
||||
data = f.read(16384)
|
||||
if not data:
|
||||
break
|
||||
self.write(data)
|
||||
@@ -301,21 +317,26 @@ class DashboardEntry(object):
|
||||
class MainRequestHandler(BaseHandler):
|
||||
def get(self):
|
||||
if not self.is_authenticated():
|
||||
self.redirect('/login')
|
||||
self.redirect(RELATIVE_URL + 'login')
|
||||
return
|
||||
|
||||
begin = bool(self.get_argument('begin', False))
|
||||
entries = _list_dashboard_entries()
|
||||
version = const.__version__
|
||||
docs_link = 'https://beta.esphome.io/' if 'b' in version else \
|
||||
'https://esphome.io/'
|
||||
|
||||
self.render("templates/index.html", entries=entries,
|
||||
version=version, begin=begin, docs_link=docs_link,
|
||||
get_static_file_url=get_static_file_url)
|
||||
self.render("templates/index.html", entries=entries, begin=begin,
|
||||
**template_args())
|
||||
|
||||
|
||||
class PingThread(threading.Thread):
|
||||
def _ping_func(filename, address):
|
||||
if os.name == 'nt':
|
||||
command = ['ping', '-n', '1', address]
|
||||
else:
|
||||
command = ['ping', '-c', '1', address]
|
||||
rc, _, _ = run_system_command(*command)
|
||||
return filename, rc == 0
|
||||
|
||||
|
||||
class MDNSStatusThread(threading.Thread):
|
||||
def run(self):
|
||||
zc = Zeroconf()
|
||||
|
||||
@@ -336,10 +357,52 @@ class PingThread(threading.Thread):
|
||||
zc.close()
|
||||
|
||||
|
||||
class PingStatusThread(threading.Thread):
|
||||
def run(self):
|
||||
pool = multiprocessing.Pool(processes=8)
|
||||
while not STOP_EVENT.is_set():
|
||||
# Only do pings if somebody has the dashboard open
|
||||
|
||||
def callback(ret):
|
||||
PING_RESULT[ret[0]] = ret[1]
|
||||
|
||||
entries = _list_dashboard_entries()
|
||||
queue = collections.deque()
|
||||
for entry in entries:
|
||||
if entry.address is None:
|
||||
PING_RESULT[entry.filename] = None
|
||||
continue
|
||||
|
||||
result = pool.apply_async(_ping_func, (entry.filename, entry.address),
|
||||
callback=callback)
|
||||
queue.append(result)
|
||||
|
||||
while queue:
|
||||
item = queue[0]
|
||||
if item.ready():
|
||||
queue.popleft()
|
||||
continue
|
||||
|
||||
try:
|
||||
item.get(0.1)
|
||||
except OSError:
|
||||
# ping not installed
|
||||
pass
|
||||
except multiprocessing.TimeoutError:
|
||||
pass
|
||||
|
||||
if STOP_EVENT.is_set():
|
||||
pool.terminate()
|
||||
return
|
||||
|
||||
PING_REQUEST.wait()
|
||||
PING_REQUEST.clear()
|
||||
|
||||
|
||||
class PingRequestHandler(BaseHandler):
|
||||
def get(self):
|
||||
if not self.is_authenticated():
|
||||
self.redirect('/login')
|
||||
self.redirect(RELATIVE_URL + 'login')
|
||||
return
|
||||
|
||||
PING_REQUEST.set()
|
||||
@@ -353,8 +416,9 @@ def is_allowed(configuration):
|
||||
class EditRequestHandler(BaseHandler):
|
||||
def get(self):
|
||||
if not self.is_authenticated():
|
||||
self.redirect('/login')
|
||||
self.redirect(RELATIVE_URL + 'login')
|
||||
return
|
||||
# pylint: disable=no-value-for-parameter
|
||||
configuration = self.get_argument('configuration')
|
||||
if not is_allowed(configuration):
|
||||
self.set_status(401)
|
||||
@@ -366,8 +430,9 @@ class EditRequestHandler(BaseHandler):
|
||||
|
||||
def post(self):
|
||||
if not self.is_authenticated():
|
||||
self.redirect('/login')
|
||||
self.redirect(RELATIVE_URL + 'login')
|
||||
return
|
||||
# pylint: disable=no-value-for-parameter
|
||||
configuration = self.get_argument('configuration')
|
||||
if not is_allowed(configuration):
|
||||
self.set_status(401)
|
||||
@@ -389,18 +454,13 @@ class LoginHandler(BaseHandler):
|
||||
if USING_HASSIO_AUTH:
|
||||
self.render_hassio_login()
|
||||
return
|
||||
self.write('<html><body><form action="/login" method="post">'
|
||||
self.write('<html><body><form action="' + RELATIVE_URL + 'login" method="post">'
|
||||
'Password: <input type="password" name="password">'
|
||||
'<input type="submit" value="Sign in">'
|
||||
'</form></body></html>')
|
||||
|
||||
def render_hassio_login(self, error=None):
|
||||
version = const.__version__
|
||||
docs_link = 'https://beta.esphome.io/' if 'b' in version else \
|
||||
'https://esphome.io/'
|
||||
|
||||
self.render("templates/login.html", version=version, docs_link=docs_link, error=error,
|
||||
get_static_file_url=get_static_file_url)
|
||||
self.render("templates/login.html", error=error, **template_args())
|
||||
|
||||
def post_hassio_login(self):
|
||||
import requests
|
||||
@@ -451,9 +511,9 @@ def get_static_file_url(name):
|
||||
else:
|
||||
path = os.path.join(static_path, name)
|
||||
with open(path, 'rb') as f_handle:
|
||||
hash_ = hash(f_handle.read())
|
||||
hash_ = hash(f_handle.read()) & (2**32-1)
|
||||
_STATIC_FILE_HASHES[name] = hash_
|
||||
return u'/static/{}?hash={}'.format(name, hash_)
|
||||
return RELATIVE_URL + u'static/{}?hash={:08X}'.format(name, hash_)
|
||||
|
||||
|
||||
def make_app(debug=False):
|
||||
@@ -488,21 +548,21 @@ def make_app(debug=False):
|
||||
'websocket_ping_interval': 30.0,
|
||||
}
|
||||
app = tornado.web.Application([
|
||||
(r"/", MainRequestHandler),
|
||||
(r"/login", LoginHandler),
|
||||
(r"/logs", EsphomeLogsHandler),
|
||||
(r"/run", EsphomeRunHandler),
|
||||
(r"/compile", EsphomeCompileHandler),
|
||||
(r"/validate", EsphomeValidateHandler),
|
||||
(r"/clean-mqtt", EsphomeCleanMqttHandler),
|
||||
(r"/clean", EsphomeCleanHandler),
|
||||
(r"/hass-config", EsphomeHassConfigHandler),
|
||||
(r"/edit", EditRequestHandler),
|
||||
(r"/download.bin", DownloadBinaryRequestHandler),
|
||||
(r"/serial-ports", SerialPortRequestHandler),
|
||||
(r"/ping", PingRequestHandler),
|
||||
(r"/wizard.html", WizardRequestHandler),
|
||||
(r'/static/(.*)', StaticFileHandler, {'path': static_path}),
|
||||
(RELATIVE_URL + "", MainRequestHandler),
|
||||
(RELATIVE_URL + "login", LoginHandler),
|
||||
(RELATIVE_URL + "logs", EsphomeLogsHandler),
|
||||
(RELATIVE_URL + "run", EsphomeRunHandler),
|
||||
(RELATIVE_URL + "compile", EsphomeCompileHandler),
|
||||
(RELATIVE_URL + "validate", EsphomeValidateHandler),
|
||||
(RELATIVE_URL + "clean-mqtt", EsphomeCleanMqttHandler),
|
||||
(RELATIVE_URL + "clean", EsphomeCleanHandler),
|
||||
(RELATIVE_URL + "hass-config", EsphomeHassConfigHandler),
|
||||
(RELATIVE_URL + "edit", EditRequestHandler),
|
||||
(RELATIVE_URL + "download.bin", DownloadBinaryRequestHandler),
|
||||
(RELATIVE_URL + "serial-ports", SerialPortRequestHandler),
|
||||
(RELATIVE_URL + "ping", PingRequestHandler),
|
||||
(RELATIVE_URL + "wizard.html", WizardRequestHandler),
|
||||
(RELATIVE_URL + r"static/(.*)", StaticFileHandler, {'path': static_path}),
|
||||
], **settings)
|
||||
|
||||
if debug:
|
||||
@@ -525,7 +585,7 @@ def start_web_server(args):
|
||||
|
||||
ON_HASSIO = args.hassio
|
||||
if ON_HASSIO:
|
||||
USING_HASSIO_AUTH = not bool(os.getenv('DISABLE_HA_AUTHENTICATION'))
|
||||
USING_HASSIO_AUTH = not get_bool_env('DISABLE_HA_AUTHENTICATION')
|
||||
USING_PASSWORD = False
|
||||
else:
|
||||
USING_HASSIO_AUTH = False
|
||||
@@ -562,14 +622,17 @@ def start_web_server(args):
|
||||
|
||||
webbrowser.open('localhost:{}'.format(args.port))
|
||||
|
||||
ping_thread = PingThread()
|
||||
ping_thread.start()
|
||||
if STATUS_USE_PING:
|
||||
status_thread = PingStatusThread()
|
||||
else:
|
||||
status_thread = MDNSStatusThread()
|
||||
status_thread.start()
|
||||
try:
|
||||
tornado.ioloop.IOLoop.current().start()
|
||||
except KeyboardInterrupt:
|
||||
_LOGGER.info("Shutting down...")
|
||||
STOP_EVENT.set()
|
||||
PING_REQUEST.set()
|
||||
ping_thread.join()
|
||||
status_thread.join()
|
||||
if args.socket is not None:
|
||||
os.remove(args.socket)
|
||||
|
||||
@@ -235,3 +235,10 @@ ul.stepper:not(.horizontal) .step.active::before, ul.stepper:not(.horizontal) .s
|
||||
vertical-align: middle;
|
||||
color: #666 !important;
|
||||
}
|
||||
|
||||
.error {
|
||||
background: #e53935;
|
||||
color: #fff;
|
||||
padding: 10px 15px;
|
||||
margin-top: 15px;
|
||||
}
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
// Disclaimer: This file was written in a hurry and by someone
|
||||
// who does not know JS at all. This file desperately needs cleanup.
|
||||
document.addEventListener('DOMContentLoaded', () => {
|
||||
M.AutoInit(document.body);
|
||||
});
|
||||
@@ -183,7 +185,7 @@ let wsProtocol = "ws:";
|
||||
if (window.location.protocol === "https:") {
|
||||
wsProtocol = 'wss:';
|
||||
}
|
||||
const wsUrl = wsProtocol + '//' + window.location.hostname + ':' + window.location.port;
|
||||
const wsUrl = `${wsProtocol}//${window.location.hostname}:${window.location.port}${relative_url}`;
|
||||
|
||||
let isFetchingPing = false;
|
||||
const fetchPing = () => {
|
||||
@@ -191,7 +193,7 @@ const fetchPing = () => {
|
||||
return;
|
||||
isFetchingPing = true;
|
||||
|
||||
fetch('/ping', {credentials: "same-origin"}).then(res => res.json())
|
||||
fetch(`${relative_url}ping`, {credentials: "same-origin"}).then(res => res.json())
|
||||
.then(response => {
|
||||
for (let filename in response) {
|
||||
let node = document.querySelector(`.status-indicator[data-node="${filename}"]`);
|
||||
@@ -233,7 +235,7 @@ const portSelect = document.querySelector('.nav-wrapper select');
|
||||
let ports = [];
|
||||
|
||||
const fetchSerialPorts = (begin=false) => {
|
||||
fetch('/serial-ports', {credentials: "same-origin"}).then(res => res.json())
|
||||
fetch(`${relative_url}serial-ports`, {credentials: "same-origin"}).then(res => res.json())
|
||||
.then(response => {
|
||||
if (ports.length === response.length) {
|
||||
let allEqual = true;
|
||||
@@ -301,7 +303,7 @@ document.querySelectorAll(".action-show-logs").forEach((showLogs) => {
|
||||
const filenameField = logsModalElem.querySelector('.filename');
|
||||
filenameField.innerHTML = configuration;
|
||||
|
||||
const logSocket = new WebSocket(wsUrl + "/logs");
|
||||
const logSocket = new WebSocket(wsUrl + "logs");
|
||||
logSocket.addEventListener('message', (event) => {
|
||||
const data = JSON.parse(event.data);
|
||||
if (data.event === "line") {
|
||||
@@ -350,7 +352,7 @@ document.querySelectorAll(".action-upload").forEach((upload) => {
|
||||
const filenameField = uploadModalElem.querySelector('.filename');
|
||||
filenameField.innerHTML = configuration;
|
||||
|
||||
const logSocket = new WebSocket(wsUrl + "/run");
|
||||
const logSocket = new WebSocket(wsUrl + "run");
|
||||
logSocket.addEventListener('message', (event) => {
|
||||
const data = JSON.parse(event.data);
|
||||
if (data.event === "line") {
|
||||
@@ -399,7 +401,7 @@ document.querySelectorAll(".action-validate").forEach((upload) => {
|
||||
const filenameField = validateModalElem.querySelector('.filename');
|
||||
filenameField.innerHTML = configuration;
|
||||
|
||||
const logSocket = new WebSocket(wsUrl + "/validate");
|
||||
const logSocket = new WebSocket(wsUrl + "validate");
|
||||
logSocket.addEventListener('message', (event) => {
|
||||
const data = JSON.parse(event.data);
|
||||
if (data.event === "line") {
|
||||
@@ -457,7 +459,7 @@ document.querySelectorAll(".action-compile").forEach((upload) => {
|
||||
const filenameField = compileModalElem.querySelector('.filename');
|
||||
filenameField.innerHTML = configuration;
|
||||
|
||||
const logSocket = new WebSocket(wsUrl + "/compile");
|
||||
const logSocket = new WebSocket(wsUrl + "compile");
|
||||
logSocket.addEventListener('message', (event) => {
|
||||
const data = JSON.parse(event.data);
|
||||
if (data.event === "line") {
|
||||
@@ -492,7 +494,7 @@ document.querySelectorAll(".action-compile").forEach((upload) => {
|
||||
downloadButton.addEventListener('click', () => {
|
||||
const link = document.createElement("a");
|
||||
link.download = name;
|
||||
link.href = '/download.bin?configuration=' + encodeURIComponent(configuration);
|
||||
link.href = `${relative_url}download.bin?configuration=${encodeURIComponent(configuration)}`;
|
||||
document.body.appendChild(link);
|
||||
link.click();
|
||||
link.remove();
|
||||
@@ -515,7 +517,7 @@ document.querySelectorAll(".action-clean-mqtt").forEach((btn) => {
|
||||
const filenameField = cleanMqttModalElem.querySelector('.filename');
|
||||
filenameField.innerHTML = configuration;
|
||||
|
||||
const logSocket = new WebSocket(wsUrl + "/clean-mqtt");
|
||||
const logSocket = new WebSocket(wsUrl + "clean-mqtt");
|
||||
logSocket.addEventListener('message', (event) => {
|
||||
const data = JSON.parse(event.data);
|
||||
if (data.event === "line") {
|
||||
@@ -557,7 +559,7 @@ document.querySelectorAll(".action-clean").forEach((btn) => {
|
||||
const filenameField = cleanModalElem.querySelector('.filename');
|
||||
filenameField.innerHTML = configuration;
|
||||
|
||||
const logSocket = new WebSocket(wsUrl + "/clean");
|
||||
const logSocket = new WebSocket(wsUrl + "clean");
|
||||
logSocket.addEventListener('message', (event) => {
|
||||
const data = JSON.parse(event.data);
|
||||
if (data.event === "line") {
|
||||
@@ -605,7 +607,7 @@ document.querySelectorAll(".action-hass-config").forEach((btn) => {
|
||||
const filenameField = hassConfigModalElem.querySelector('.filename');
|
||||
filenameField.innerHTML = configuration;
|
||||
|
||||
const logSocket = new WebSocket(wsUrl + "/hass-config");
|
||||
const logSocket = new WebSocket(wsUrl + "hass-config");
|
||||
logSocket.addEventListener('message', (event) => {
|
||||
const data = JSON.parse(event.data);
|
||||
if (data.event === "line") {
|
||||
@@ -646,7 +648,7 @@ editor.session.setOption('tabSize', 2);
|
||||
|
||||
const saveButton = editModalElem.querySelector(".save-button");
|
||||
const saveEditor = () => {
|
||||
fetch(`/edit?configuration=${configuration}`, {
|
||||
fetch(`${relative_url}edit?configuration=${configuration}`, {
|
||||
credentials: "same-origin",
|
||||
method: "POST",
|
||||
body: editor.getValue()
|
||||
@@ -673,7 +675,7 @@ document.querySelectorAll(".action-edit").forEach((btn) => {
|
||||
const filenameField = editModalElem.querySelector('.filename');
|
||||
filenameField.innerHTML = configuration;
|
||||
|
||||
fetch(`/edit?configuration=${configuration}`, {credentials: "same-origin"})
|
||||
fetch(`${relative_url}edit?configuration=${configuration}`, {credentials: "same-origin"})
|
||||
.then(res => res.text()).then(response => {
|
||||
editor.setValue(response, -1);
|
||||
});
|
||||
|
||||
@@ -16,6 +16,15 @@
|
||||
|
||||
<script src="{{ get_static_file_url('materialize-stepper.min.js') }}"></script>
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0"/>
|
||||
<script>const relative_url = "{{ relative_url }}";</script>
|
||||
|
||||
{% if streamer_mode %}
|
||||
<style>
|
||||
.log-secret {
|
||||
visibility: hidden !important;
|
||||
}
|
||||
</style>
|
||||
{% end %}
|
||||
</head>
|
||||
<body>
|
||||
|
||||
@@ -286,6 +295,16 @@
|
||||
<option value="iotbusio">oddWires IoT-Bus Io</option>
|
||||
<option value="iotbusproteus">oddWires Proteus IoT-Bus</option>
|
||||
<option value="nina_w10">u-blox NINA-W10 series</option>
|
||||
<option value="bpi-bit">BananaPi-Bit</option>
|
||||
<option value="d-duino-32">DSTIKE D-duino-32</option>
|
||||
<option value="fm-devkit">ESP32 FM DevKit</option>
|
||||
<option value="esp32-poe">OLIMEX ESP32-PoE</option>
|
||||
<option value="oroca_edubot">OROCA EduBot</option>
|
||||
<option value="lopy">Pycom LoPy</option>
|
||||
<option value="lopy4">Pycom LoPy4</option>
|
||||
<option value="wesp32">Silicognition wESP32</option>
|
||||
<option value="ttgo-t-beam">TTGO T-Beam</option>
|
||||
<option value="turta_iot_node">Turta IoT Node</option>
|
||||
</optgroup>
|
||||
</select>
|
||||
</div>
|
||||
|
||||
@@ -28,14 +28,14 @@
|
||||
<div class="container">
|
||||
<div class="row">
|
||||
<div class="col card s10 offset-s1 m10 offset-m1 l8 offset-l2">
|
||||
<form action="/login" method="post">
|
||||
<form action="{{ relative_url }}login" method="post">
|
||||
<div class="card-content">
|
||||
<span class="card-title">Enter credentials</span>
|
||||
<p>
|
||||
Please login using your Home Assistant credentials.
|
||||
</p>
|
||||
{% if error is not None %}
|
||||
<p>
|
||||
<p class="error">
|
||||
{{ escape(error) }}
|
||||
</p>
|
||||
{% end %}
|
||||
|
||||
@@ -132,3 +132,24 @@ def resolve_ip_address(host):
|
||||
raise EsphomeError("Error resolving IP address: {}".format(err))
|
||||
|
||||
return ip
|
||||
|
||||
|
||||
def get_bool_env(var, default=False):
|
||||
return bool(os.getenv(var, default))
|
||||
|
||||
|
||||
def is_hassio():
|
||||
return get_bool_env('ESPHOME_IS_HASSIO')
|
||||
|
||||
|
||||
def symlink(src, dst):
|
||||
if hasattr(os, 'symlink'):
|
||||
os.symlink(src, dst)
|
||||
else:
|
||||
import ctypes
|
||||
csl = ctypes.windll.kernel32.CreateSymbolicLinkW
|
||||
csl.argtypes = (ctypes.c_wchar_p, ctypes.c_wchar_p, ctypes.c_uint32)
|
||||
csl.restype = ctypes.c_ubyte
|
||||
flags = 1 if os.path.isdir(src) else 0
|
||||
if csl(dst, src, flags) == 0:
|
||||
raise ctypes.WinError()
|
||||
|
||||
@@ -5,7 +5,7 @@ import logging
|
||||
import voluptuous as vol
|
||||
|
||||
import esphome.config_validation as cv
|
||||
from esphome.const import CONF_INVERTED, CONF_MODE, CONF_NUMBER, CONF_PCF8574
|
||||
from esphome.const import CONF_INVERTED, CONF_MODE, CONF_NUMBER, CONF_PCF8574, CONF_MCP23017
|
||||
from esphome.core import CORE
|
||||
from esphome.cpp_types import Component, esphome_ns, io_ns
|
||||
|
||||
@@ -127,9 +127,18 @@ ESP32_BOARD_PINS = {
|
||||
'SW2': 2, 'SW3': 0, 'POT1': 32, 'POT2': 33, 'PIEZO1': 19, 'PIEZO2': 18,
|
||||
'PHOTO': 25, 'DHT_PIN': 26, 'S1': 4, 'S2': 16, 'S3': 18, 'S4': 19, 'S5': 21,
|
||||
'SDA': 27, 'SCL': 14, 'SS': 19, 'MOSI': 21, 'MISO': 22, 'SCK': 23},
|
||||
'bpi-bit': {'BUZZER': 25, 'BUTTON_A': 35, 'BUTTON_B': 27, 'RGB_LED': 4, 'LIGHT_SENSOR1': 36,
|
||||
'LIGHT_SENSOR2': 39, 'TEMPERATURE_SENSOR': 34, 'MPU9250_INT': 0, 'P0': 25, 'P1': 32,
|
||||
'P2': 33, 'P3': 13, 'P4': 15, 'P5': 35, 'P6': 12, 'P7': 14, 'P8': 16, 'P9': 17,
|
||||
'P10': 26, 'P11': 27, 'P12': 2, 'P13': 18, 'P14': 19, 'P15': 23, 'P16': 5,
|
||||
'P19': 22, 'P20': 21, 'DAC1': 26},
|
||||
'd-duino-32': {'SDA': 5, 'SCL': 4, 'SS ': 15, 'MOSI ': 13, 'MISO ': 12, 'SCK ': 14, 'D1': 5,
|
||||
'D2': 4, 'D3': 0, 'D4': 2, 'D5': 14, 'D6': 12, 'D7': 13, 'D8': 15, 'D9': 3,
|
||||
'D10': 1},
|
||||
'esp-wrover-kit': {},
|
||||
'esp32-evb': {'BUTTON': 34, 'SDA': 13, 'SCL': 16, 'SS': 17, 'MOSI': 2, 'MISO': 15, 'SCK': 14},
|
||||
'esp32-gateway': {'LED': 33, 'BUTTON': 34, 'SCL': 16, 'SDA': 17},
|
||||
'esp32-poe': {'BUTTON': 34, 'SDA': 13, 'SCL': 16, 'MOSI': 2, 'MISO': 15, 'SCK': 14},
|
||||
'esp320': {'LED': 5, 'SDA': 2, 'SCL': 14, 'SS': 15, 'MOSI': 13, 'MISO': 12, 'SCK': 14},
|
||||
'esp32dev': {},
|
||||
'esp32doit-devkit-v1': {'LED': 2},
|
||||
@@ -142,12 +151,15 @@ ESP32_BOARD_PINS = {
|
||||
'A0': 26, 'A1': 25, 'A2': 34, 'A4': 36, 'A5': 4, 'A6': 14, 'A7': 32, 'A8': 15,
|
||||
'A9': 33, 'A10': 27, 'A11': 12, 'A12': 13, 'A13': 35},
|
||||
'firebeetle32': {'LED': 2},
|
||||
'fm-devkit': {'LED': 5, 'SW1': 4, 'SW2': 18, 'SW3': 19, 'SW4': 21, 'I2S_MCLK': 2,
|
||||
'I2S_LRCLK': 25, 'I2S_SCLK': 26, 'I2S_DOUT': 22, 'D0': 34, 'D1': 35, 'D2': 32,
|
||||
'D3': 33, 'D4': 27, 'D5': 14, 'D6': 12, 'D7': 13, 'D8': 15, 'D9': 23, 'D10': 0,
|
||||
'SDA': 16, 'SCL': 17},
|
||||
'heltec_wifi_kit_32': {'LED': 25, 'BUTTON': 0, 'A1': 37, 'A2': 38},
|
||||
'heltec_wifi_lora_32': {'LED': 25, 'BUTTON': 0, 'SDA': 4, 'SCL': 15, 'SS': 18, 'MOSI': 27,
|
||||
'SCK': 5, 'A1': 37, 'A2': 38, 'T8': 32, 'T9': 33, 'DAC1': 26,
|
||||
'DAC2': 25, 'OLED_SCL': 15, 'OLED_SDA': 4, 'OLED_RST': 16,
|
||||
'LORA_SCK': 5, 'LORA_MOSI': 27, 'LORA_MISO': 19, 'LORA_CS': 18,
|
||||
'LORA_RST': 14, 'LORA_IRQ': 26},
|
||||
'SCK': 5, 'A1': 37, 'A2': 38, 'OLED_SCL': 15, 'OLED_SDA': 4,
|
||||
'OLED_RST': 16, 'LORA_SCK': 5, 'LORA_MOSI': 27, 'LORA_MISO': 19,
|
||||
'LORA_CS': 18, 'LORA_RST': 14, 'LORA_IRQ': 26},
|
||||
'hornbill32dev': {'LED': 13, 'BUTTON': 0},
|
||||
'hornbill32minima': {'SS': 2},
|
||||
'intorobot': {'LED': 4, 'LED_RED': 27, 'LED_GREEN': 21, 'LED_BLUE': 22,
|
||||
@@ -159,6 +171,12 @@ ESP32_BOARD_PINS = {
|
||||
'lolin_d32': {'LED': 5, 'VBAT': 35},
|
||||
'lolin_d32_pro': {'LED': 5, 'VBAT': 35, 'TF_CS': 4, 'TS_CS': 12, 'TFT_CS': 14, 'TFT_LED': 32,
|
||||
'TFT_RST': 33, 'TFT_DC': 27},
|
||||
'lopy': {'LORA_SCK': 5, 'LORA_MISO': 19, 'LORA_MOSI': 27, 'LORA_CS': 17, 'LORA_RST': 18,
|
||||
'LORA_IRQ': 23, 'LED': 0, 'ANT_SELECT': 16, 'SDA': 12, 'SCL': 13, 'SS': 17,
|
||||
'MOSI': 22, 'MISO': 37, 'SCK': 13, 'A1': 37, 'A2': 38},
|
||||
'lopy4': {'LORA_SCK': 5, 'LORA_MISO': 19, 'LORA_MOSI': 27, 'LORA_CS': 18, 'LORA_IRQ': 23,
|
||||
'LED': 0, 'ANT_SELECT': 21, 'SDA': 12, 'SCL': 13, 'SS': 18, 'MOSI': 22, 'MISO': 37,
|
||||
'SCK': 13, 'A1': 37, 'A2': 38},
|
||||
'm5stack-core-esp32': {'TXD2': 17, 'RXD2': 16, 'G23': 23, 'G19': 19, 'G18': 18, 'G3': 3,
|
||||
'G16': 16, 'G21': 21, 'G2': 2, 'G12': 12, 'G15': 15, 'G35': 35,
|
||||
'G36': 36, 'G25': 25, 'G26': 26, 'G1': 1, 'G17': 17, 'G22': 22, 'G5': 5,
|
||||
@@ -184,14 +202,27 @@ ESP32_BOARD_PINS = {
|
||||
'nodemcu-32s': {'LED': 2, 'BUTTON': 0},
|
||||
'odroid_esp32': {'LED': 2, 'SDA': 15, 'SCL': 4, 'SS': 22, 'ADC1': 35, 'ADC2': 36},
|
||||
'onehorse32dev': {'LED': 5, 'BUTTON': 0, 'A1': 37, 'A2': 38},
|
||||
'oroca_edubot': {'LED': 13, 'TX': 17, 'RX': 16, 'SDA': 23, 'SS': 2, 'MOSI': 18, 'SCK': 5,
|
||||
'A0': 34, 'A1': 39, 'A2': 36, 'A3': 33, 'D0': 4, 'D1': 16, 'D2': 17, 'D3': 22,
|
||||
'D4': 23, 'D5': 5, 'D6': 18, 'D7': 19, 'D8': 33, 'VBAT': 35},
|
||||
'pico32': {},
|
||||
'pocket_32': {'LED': 16},
|
||||
'quantum': {},
|
||||
'ttgo-lora32-v1': {'LED': 2, 'BUTTON': 0, 'SS': 18, 'MOSI': 27, 'SCK': 5, 'A1': 37, 'A2': 38,
|
||||
'T8': 32, 'T9': 33, 'DAC1': 26, 'DAC2': 25, 'OLED_SDA': 4, 'OLED_SCL': 15,
|
||||
'OLED_RST': 16, 'LORA_SCK': 5, 'LORA_MISO': 19, 'LORA_MOSI': 27,
|
||||
'LORA_CS': 18, 'LORA_RST': 14, 'LORA_IRQ': 26},
|
||||
'OLED_SDA': 4, 'OLED_SCL': 15, 'OLED_RST': 16, 'LORA_SCK': 5,
|
||||
'LORA_MISO': 19, 'LORA_MOSI': 27, 'LORA_CS': 18, 'LORA_RST': 14,
|
||||
'LORA_IRQ': 26},
|
||||
'ttgo-t-beam': {'LORA_SCK': 5, 'LORA_MISO': 19, 'LORA_MOSI': 27, 'LORA_CS': 18, 'LORA_RST': 23,
|
||||
'LORA_IRQ': 26, 'LORA_IO1': 33, 'LORA_IO2': 32, 'SS': 18, 'MOSI': 27, 'SCK': 5,
|
||||
'T8': 32, 'T9': 33, 'DAC2': 25},
|
||||
'turta_iot_node': {'LED': 13, 'TX': 10, 'RX': 9, 'SDA': 23, 'SS': 21, 'MOSI': 18, 'SCK': 5,
|
||||
'A0': 4, 'A1': 25, 'A2': 26, 'A3': 27, 'A8': 38, 'T1': 25, 'T2': 26,
|
||||
'T3': 27, 'T4': 32, 'T5': 33, 'T6': 34, 'T7': 35, 'T8': 22, 'T9': 23,
|
||||
'T10': 10, 'T11': 9, 'T12': 21, 'T13': 5, 'T14': 18, 'T15': 19,
|
||||
'T16': 37, 'T17': 14, 'T18': 2, 'T19': 38},
|
||||
'wemosbat': 'pocket_32',
|
||||
'wesp32': {'SCL': 4, 'SDA': 2, 'MISO': 32, 'ETH_PHY_ADDR': 0, 'ETH_PHY_MDC': 16,
|
||||
'ETH_PHY_MDIO': 17},
|
||||
'widora-air': {'LED': 25, 'BUTTON': 0, 'SDA': 23, 'SCL': 19, 'MOSI': 16, 'MISO': 17, 'A1': 39,
|
||||
'A2': 35, 'A3': 25, 'A4': 26, 'A5': 14, 'A6': 12, 'A7': 15, 'A8': 13, 'A9': 2,
|
||||
'D0': 19, 'D1': 23, 'D2': 18, 'D3': 17, 'D4': 16, 'D5': 5, 'D6': 4, 'T0': 19,
|
||||
@@ -290,7 +321,7 @@ def analog_pin(value):
|
||||
if 32 <= value <= 39: # ADC1
|
||||
return value
|
||||
raise vol.Invalid(u"ESP32: Only pins 32 though 39 support ADC.")
|
||||
elif CORE.is_esp8266:
|
||||
if CORE.is_esp8266:
|
||||
if value == 17: # A0
|
||||
return value
|
||||
raise vol.Invalid(u"ESP8266: Only pin A0 (17) supports ADC.")
|
||||
@@ -320,13 +351,13 @@ def pin_mode(value):
|
||||
raise NotImplementedError
|
||||
|
||||
|
||||
GPIO_FULL_OUTPUT_PIN_SCHEMA = vol.Schema({
|
||||
GPIO_FULL_OUTPUT_PIN_SCHEMA = cv.Schema({
|
||||
vol.Required(CONF_NUMBER): output_pin,
|
||||
vol.Optional(CONF_MODE): pin_mode,
|
||||
vol.Optional(CONF_INVERTED): cv.boolean,
|
||||
})
|
||||
|
||||
GPIO_FULL_INPUT_PIN_SCHEMA = vol.Schema({
|
||||
GPIO_FULL_INPUT_PIN_SCHEMA = cv.Schema({
|
||||
vol.Required(CONF_NUMBER): input_pin,
|
||||
vol.Optional(CONF_MODE): pin_mode,
|
||||
vol.Optional(CONF_INVERTED): cv.boolean,
|
||||
@@ -348,10 +379,19 @@ def shorthand_input_pullup_pin(value):
|
||||
return {CONF_NUMBER: value}
|
||||
|
||||
|
||||
def validate_has_interrupt(value):
|
||||
if CORE.is_esp8266:
|
||||
if value[CONF_NUMBER] >= 16:
|
||||
raise vol.Invalid("Pins GPIO16 and GPIO17 do not support interrupts and cannot be used "
|
||||
"here, got {}".format(value[CONF_NUMBER]))
|
||||
return value
|
||||
|
||||
|
||||
I2CDevice = esphome_ns.class_('I2CDevice')
|
||||
PCF8574Component = io_ns.class_('PCF8574Component', Component, I2CDevice)
|
||||
MCP23017 = io_ns.class_('MCP23017', Component, I2CDevice)
|
||||
|
||||
PCF8574_OUTPUT_PIN_SCHEMA = vol.Schema({
|
||||
PCF8574_OUTPUT_PIN_SCHEMA = cv.Schema({
|
||||
vol.Required(CONF_PCF8574): cv.use_variable_id(PCF8574Component),
|
||||
vol.Required(CONF_NUMBER): vol.Coerce(int),
|
||||
vol.Optional(CONF_MODE): cv.one_of("OUTPUT", upper=True),
|
||||
@@ -362,6 +402,17 @@ PCF8574_INPUT_PIN_SCHEMA = PCF8574_OUTPUT_PIN_SCHEMA.extend({
|
||||
vol.Optional(CONF_MODE): cv.one_of("INPUT", "INPUT_PULLUP", upper=True),
|
||||
})
|
||||
|
||||
MCP23017_OUTPUT_PIN_SCHEMA = cv.Schema({
|
||||
vol.Required(CONF_MCP23017): cv.use_variable_id(MCP23017),
|
||||
vol.Required(CONF_NUMBER): vol.All(vol.Coerce(int), vol.Range(min=0, max=15)),
|
||||
vol.Optional(CONF_MODE): cv.one_of("OUTPUT", upper=True),
|
||||
vol.Optional(CONF_INVERTED, default=False): cv.boolean,
|
||||
})
|
||||
|
||||
MCP23017_INPUT_PIN_SCHEMA = MCP23017_OUTPUT_PIN_SCHEMA.extend({
|
||||
vol.Optional(CONF_MODE): cv.one_of("INPUT", "INPUT_PULLUP", upper=True),
|
||||
})
|
||||
|
||||
|
||||
def internal_gpio_output_pin_schema(value):
|
||||
if isinstance(value, dict):
|
||||
@@ -372,6 +423,8 @@ def internal_gpio_output_pin_schema(value):
|
||||
def gpio_output_pin_schema(value):
|
||||
if isinstance(value, dict) and CONF_PCF8574 in value:
|
||||
return PCF8574_OUTPUT_PIN_SCHEMA(value)
|
||||
if isinstance(value, dict) and CONF_MCP23017 in value:
|
||||
return MCP23017_OUTPUT_PIN_SCHEMA(value)
|
||||
return internal_gpio_output_pin_schema(value)
|
||||
|
||||
|
||||
@@ -384,6 +437,8 @@ def internal_gpio_input_pin_schema(value):
|
||||
def gpio_input_pin_schema(value):
|
||||
if isinstance(value, dict) and CONF_PCF8574 in value:
|
||||
return PCF8574_INPUT_PIN_SCHEMA(value)
|
||||
if isinstance(value, dict) and CONF_MCP23017 in value:
|
||||
return MCP23017_INPUT_PIN_SCHEMA(value)
|
||||
return internal_gpio_input_pin_schema(value)
|
||||
|
||||
|
||||
@@ -396,4 +451,6 @@ def internal_gpio_input_pullup_pin_schema(value):
|
||||
def gpio_input_pullup_pin_schema(value):
|
||||
if isinstance(value, dict) and CONF_PCF8574 in value:
|
||||
return PCF8574_INPUT_PIN_SCHEMA(value)
|
||||
if isinstance(value, dict) and CONF_MCP23017 in value:
|
||||
return MCP23017_INPUT_PIN_SCHEMA(value)
|
||||
return internal_gpio_input_pin_schema(value)
|
||||
|
||||
@@ -14,6 +14,8 @@ _LOGGER = logging.getLogger(__name__)
|
||||
|
||||
def run_platformio_cli(*args, **kwargs):
|
||||
os.environ["PLATFORMIO_FORCE_COLOR"] = "true"
|
||||
os.environ["PLATFORMIO_BUILD_DIR"] = os.path.abspath(CORE.relative_pioenvs_path())
|
||||
os.environ["PLATFORMIO_LIBDEPS_DIR"] = os.path.abspath(CORE.relative_piolibdeps_path())
|
||||
cmd = ['platformio'] + list(args)
|
||||
|
||||
if os.environ.get('ESPHOME_USE_SUBPROCESS') is None:
|
||||
|
||||
@@ -1,10 +1,9 @@
|
||||
import binascii
|
||||
import codecs
|
||||
from datetime import datetime, timedelta
|
||||
from datetime import datetime
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import threading
|
||||
|
||||
from esphome import const
|
||||
from esphome.core import CORE
|
||||
@@ -226,72 +225,3 @@ class EsphomeStorageJSON(object):
|
||||
|
||||
def __eq__(self, o): # type: (Any) -> bool
|
||||
return isinstance(o, EsphomeStorageJSON) and self.as_dict() == o.as_dict()
|
||||
|
||||
@property
|
||||
def should_do_esphome_update_check(self): # type: () -> bool
|
||||
if self.last_update_check is None:
|
||||
return True
|
||||
return self.last_update_check + timedelta(days=3) < datetime.utcnow()
|
||||
|
||||
|
||||
class CheckForUpdateThread(threading.Thread):
|
||||
def __init__(self, path):
|
||||
threading.Thread.__init__(self)
|
||||
self._path = path
|
||||
|
||||
@property
|
||||
def docs_base(self):
|
||||
return 'https://beta.esphome.io' if 'b' in const.__version__ else \
|
||||
'https://esphome.io'
|
||||
|
||||
def fetch_remote_version(self):
|
||||
import requests
|
||||
|
||||
storage = EsphomeStorageJSON.load(self._path) or \
|
||||
EsphomeStorageJSON.get_default()
|
||||
if not storage.should_do_esphome_update_check:
|
||||
return storage
|
||||
|
||||
req = requests.get('{}/_static/version'.format(self.docs_base))
|
||||
req.raise_for_status()
|
||||
storage.remote_version = req.text.strip()
|
||||
storage.last_update_check = datetime.utcnow()
|
||||
storage.save(self._path)
|
||||
return storage
|
||||
|
||||
@staticmethod
|
||||
def format_version(ver):
|
||||
vstr = '.'.join(map(str, ver.version))
|
||||
if ver.prerelease:
|
||||
vstr += ver.prerelease[0] + str(ver.prerelease[1])
|
||||
return vstr
|
||||
|
||||
def cmp_versions(self, storage):
|
||||
# pylint: disable=no-name-in-module, import-error
|
||||
from distutils.version import StrictVersion
|
||||
|
||||
remote_version = StrictVersion(storage.remote_version)
|
||||
self_version = StrictVersion(const.__version__)
|
||||
if remote_version > self_version:
|
||||
_LOGGER.warning("*" * 80)
|
||||
_LOGGER.warning("A new version of ESPHome is available: %s (this is %s)",
|
||||
self.format_version(remote_version), self.format_version(self_version))
|
||||
_LOGGER.warning("Changelog: %s/changelog/index.html", self.docs_base)
|
||||
_LOGGER.warning("Update Instructions: %s/guides/faq.html"
|
||||
"#how-do-i-update-to-the-latest-version", self.docs_base)
|
||||
_LOGGER.warning("*" * 80)
|
||||
|
||||
def run(self):
|
||||
try:
|
||||
storage = self.fetch_remote_version()
|
||||
self.cmp_versions(storage)
|
||||
except Exception: # pylint: disable=broad-except
|
||||
pass
|
||||
|
||||
|
||||
def start_update_check_thread(path):
|
||||
# dummy call to strptime as python 2.7 has a bug with strptime when importing from threads
|
||||
datetime.strptime('20180101', '%Y%m%d')
|
||||
thread = CheckForUpdateThread(os.path.abspath(path))
|
||||
thread.start()
|
||||
return thread
|
||||
|
||||
@@ -6,6 +6,8 @@ import re
|
||||
import subprocess
|
||||
import sys
|
||||
|
||||
from esphome import const
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@@ -147,3 +149,7 @@ def run_external_process(*cmd, **kwargs):
|
||||
if capture_stdout:
|
||||
# pylint: disable=lost-exception
|
||||
return sub_stdout.getvalue()
|
||||
|
||||
|
||||
def is_dev_esphome_version():
|
||||
return 'dev' in const.__version__
|
||||
|
||||
161
esphome/voluptuous_schema.py
Normal file
161
esphome/voluptuous_schema.py
Normal file
@@ -0,0 +1,161 @@
|
||||
import difflib
|
||||
import itertools
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from esphome.py_compat import string_types
|
||||
|
||||
|
||||
class ExtraKeysInvalid(vol.Invalid):
|
||||
def __init__(self, *arg, **kwargs):
|
||||
self.candidates = kwargs.pop('candidates')
|
||||
vol.Invalid.__init__(self, *arg, **kwargs)
|
||||
|
||||
|
||||
# pylint: disable=protected-access, unidiomatic-typecheck
|
||||
class _Schema(vol.Schema):
|
||||
"""Custom cv.Schema that prints similar keys on error."""
|
||||
def _compile_mapping(self, schema, invalid_msg=None):
|
||||
invalid_msg = invalid_msg or 'mapping value'
|
||||
|
||||
# Keys that may be required
|
||||
all_required_keys = set(key for key in schema
|
||||
if key is not vol.Extra and
|
||||
((self.required and not isinstance(key, (vol.Optional, vol.Remove)))
|
||||
or isinstance(key, vol.Required)))
|
||||
|
||||
# Keys that may have defaults
|
||||
all_default_keys = set(key for key in schema
|
||||
if isinstance(key, (vol.Required, vol.Optional)))
|
||||
|
||||
_compiled_schema = {}
|
||||
for skey, svalue in vol.iteritems(schema):
|
||||
new_key = self._compile(skey)
|
||||
new_value = self._compile(svalue)
|
||||
_compiled_schema[skey] = (new_key, new_value)
|
||||
|
||||
candidates = list(vol.schema_builder._iterate_mapping_candidates(_compiled_schema))
|
||||
|
||||
# After we have the list of candidates in the correct order, we want to apply some
|
||||
# optimization so that each
|
||||
# key in the data being validated will be matched against the relevant schema keys only.
|
||||
# No point in matching against different keys
|
||||
additional_candidates = []
|
||||
candidates_by_key = {}
|
||||
for skey, (ckey, cvalue) in candidates:
|
||||
if type(skey) in vol.primitive_types:
|
||||
candidates_by_key.setdefault(skey, []).append((skey, (ckey, cvalue)))
|
||||
elif isinstance(skey, vol.Marker) and type(skey.schema) in vol.primitive_types:
|
||||
candidates_by_key.setdefault(skey.schema, []).append((skey, (ckey, cvalue)))
|
||||
else:
|
||||
# These are wildcards such as 'int', 'str', 'Remove' and others which should be
|
||||
# applied to all keys
|
||||
additional_candidates.append((skey, (ckey, cvalue)))
|
||||
|
||||
key_names = []
|
||||
for skey in schema:
|
||||
if isinstance(skey, string_types):
|
||||
key_names.append(skey)
|
||||
elif isinstance(skey, vol.Marker) and isinstance(skey.schema, string_types):
|
||||
key_names.append(skey.schema)
|
||||
|
||||
def validate_mapping(path, iterable, out):
|
||||
required_keys = all_required_keys.copy()
|
||||
|
||||
# Build a map of all provided key-value pairs.
|
||||
# The type(out) is used to retain ordering in case a ordered
|
||||
# map type is provided as input.
|
||||
key_value_map = type(out)()
|
||||
for key, value in iterable:
|
||||
key_value_map[key] = value
|
||||
|
||||
# Insert default values for non-existing keys.
|
||||
for key in all_default_keys:
|
||||
if not isinstance(key.default, vol.Undefined) and \
|
||||
key.schema not in key_value_map:
|
||||
# A default value has been specified for this missing
|
||||
# key, insert it.
|
||||
key_value_map[key.schema] = key.default()
|
||||
|
||||
error = None
|
||||
errors = []
|
||||
for key, value in key_value_map.items():
|
||||
key_path = path + [key]
|
||||
remove_key = False
|
||||
|
||||
# Optimization. Validate against the matching key first, then fallback to the rest
|
||||
relevant_candidates = itertools.chain(candidates_by_key.get(key, []),
|
||||
additional_candidates)
|
||||
|
||||
# compare each given key/value against all compiled key/values
|
||||
# schema key, (compiled key, compiled value)
|
||||
for skey, (ckey, cvalue) in relevant_candidates:
|
||||
try:
|
||||
new_key = ckey(key_path, key)
|
||||
except vol.Invalid as e:
|
||||
if len(e.path) > len(key_path):
|
||||
raise
|
||||
if not error or len(e.path) > len(error.path):
|
||||
error = e
|
||||
continue
|
||||
# Backtracking is not performed once a key is selected, so if
|
||||
# the value is invalid we immediately throw an exception.
|
||||
exception_errors = []
|
||||
# check if the key is marked for removal
|
||||
is_remove = new_key is vol.Remove
|
||||
try:
|
||||
cval = cvalue(key_path, value)
|
||||
# include if it's not marked for removal
|
||||
if not is_remove:
|
||||
out[new_key] = cval
|
||||
else:
|
||||
remove_key = True
|
||||
continue
|
||||
except vol.MultipleInvalid as e:
|
||||
exception_errors.extend(e.errors)
|
||||
except vol.Invalid as e:
|
||||
exception_errors.append(e)
|
||||
|
||||
if exception_errors:
|
||||
if is_remove or remove_key:
|
||||
continue
|
||||
for err in exception_errors:
|
||||
if len(err.path) <= len(key_path):
|
||||
err.error_type = invalid_msg
|
||||
errors.append(err)
|
||||
# If there is a validation error for a required
|
||||
# key, this means that the key was provided.
|
||||
# Discard the required key so it does not
|
||||
# create an additional, noisy exception.
|
||||
required_keys.discard(skey)
|
||||
break
|
||||
|
||||
# Key and value okay, mark as found in case it was
|
||||
# a Required() field.
|
||||
required_keys.discard(skey)
|
||||
|
||||
break
|
||||
else:
|
||||
if remove_key:
|
||||
# remove key
|
||||
continue
|
||||
elif self.extra == vol.ALLOW_EXTRA:
|
||||
out[key] = value
|
||||
elif self.extra != vol.REMOVE_EXTRA:
|
||||
if isinstance(key, string_types) and key_names:
|
||||
matches = difflib.get_close_matches(key, key_names)
|
||||
errors.append(ExtraKeysInvalid('extra keys not allowed', key_path,
|
||||
candidates=matches))
|
||||
else:
|
||||
errors.append(vol.Invalid('extra keys not allowed', key_path))
|
||||
|
||||
# for any required keys left that weren't found and don't have defaults:
|
||||
for key in required_keys:
|
||||
msg = key.msg if hasattr(key, 'msg') and key.msg else 'required key not provided'
|
||||
errors.append(vol.RequiredFieldInvalid(msg, path + [key]))
|
||||
if errors:
|
||||
raise vol.MultipleInvalid(errors)
|
||||
|
||||
return out
|
||||
|
||||
return validate_mapping
|
||||
@@ -8,7 +8,7 @@ import voluptuous as vol
|
||||
|
||||
import esphome.config_validation as cv
|
||||
from esphome.const import ESP_PLATFORMS, ESP_PLATFORM_ESP32, ESP_PLATFORM_ESP8266
|
||||
from esphome.helpers import color
|
||||
from esphome.helpers import color, get_bool_env
|
||||
# pylint: disable=anomalous-backslash-in-string
|
||||
from esphome.pins import ESP32_BOARD_PINS, ESP8266_BOARD_PINS
|
||||
from esphome.py_compat import safe_input, text_type
|
||||
@@ -50,8 +50,8 @@ BASE_CONFIG = u"""esphome:
|
||||
board: {board}
|
||||
|
||||
wifi:
|
||||
ssid: '{ssid}'
|
||||
password: '{psk}'
|
||||
ssid: "{ssid}"
|
||||
password: "{psk}"
|
||||
|
||||
# Enable logging
|
||||
logger:
|
||||
@@ -86,7 +86,7 @@ def wizard_write(path, **kwargs):
|
||||
storage.save(storage_path)
|
||||
|
||||
|
||||
if os.getenv('ESPHOME_QUICKWIZARD', ''):
|
||||
if get_bool_env('ESPHOME_QUICKWIZARD'):
|
||||
def sleep(time):
|
||||
pass
|
||||
else:
|
||||
|
||||
@@ -8,13 +8,12 @@ import re
|
||||
import shutil
|
||||
|
||||
from esphome.config import iter_components
|
||||
from esphome.const import ARDUINO_VERSION_ESP32_1_0_1, ARDUINO_VERSION_ESP32_DEV, \
|
||||
ARDUINO_VERSION_ESP8266_2_5_0, ARDUINO_VERSION_ESP8266_DEV, CONF_BOARD_FLASH_MODE, \
|
||||
CONF_BRANCH, CONF_COMMIT, CONF_ESPHOME, CONF_LOCAL, CONF_PLATFORMIO_OPTIONS, CONF_REPOSITORY, \
|
||||
CONF_TAG, CONF_USE_CUSTOM_CODE
|
||||
from esphome.const import ARDUINO_VERSION_ESP32_1_0_0, ARDUINO_VERSION_ESP8266_2_5_0, \
|
||||
ARDUINO_VERSION_ESP8266_DEV, CONF_BOARD_FLASH_MODE, CONF_BRANCH, CONF_COMMIT, CONF_ESPHOME, \
|
||||
CONF_LOCAL, CONF_PLATFORMIO_OPTIONS, CONF_REPOSITORY, CONF_TAG, CONF_USE_CUSTOM_CODE
|
||||
from esphome.core import CORE, EsphomeError
|
||||
from esphome.core_config import GITHUB_ARCHIVE_ZIP, LIBRARY_URI_REPO, VERSION_REGEX
|
||||
from esphome.helpers import mkdir_p, run_system_command
|
||||
from esphome.helpers import mkdir_p, run_system_command, symlink
|
||||
from esphome.pins import ESP8266_FLASH_SIZES, ESP8266_LD_SCRIPTS
|
||||
from esphome.py_compat import IS_PY3, string_types
|
||||
from esphome.storage_json import StorageJSON, storage_path
|
||||
@@ -105,7 +104,7 @@ def update_esphome_core_repo():
|
||||
# Git commit hash or tag cannot be updated
|
||||
return
|
||||
|
||||
esphome_core_path = CORE.relative_build_path('.piolibdeps', 'esphome-core')
|
||||
esphome_core_path = CORE.relative_piolibdeps_path('esphome-core')
|
||||
|
||||
rc, _, _ = run_system_command('git', '-C', esphome_core_path, '--help')
|
||||
if rc != 0:
|
||||
@@ -229,7 +228,7 @@ def symlink_esphome_core_version(esphome_core_version):
|
||||
do_write = False
|
||||
if do_write:
|
||||
mkdir_p(lib_path)
|
||||
os.symlink(src_path, dst_path)
|
||||
symlink(src_path, dst_path)
|
||||
else:
|
||||
# Remove symlink when changing back from local version
|
||||
if os.path.islink(dst_path):
|
||||
@@ -280,16 +279,18 @@ def gather_lib_deps():
|
||||
if CORE.is_esp32:
|
||||
lib_deps |= {
|
||||
'Preferences', # Preferences helper
|
||||
'AsyncTCP@1.0.1', # Pin AsyncTCP version
|
||||
'AsyncTCP@1.0.3', # Pin AsyncTCP version
|
||||
}
|
||||
lib_deps.discard('AsyncTCP@1.0.3')
|
||||
|
||||
# Manual fix for AsyncTCP
|
||||
if CORE.arduino_version in (ARDUINO_VERSION_ESP32_DEV, ARDUINO_VERSION_ESP32_1_0_1):
|
||||
lib_deps.add('AsyncTCP@1.0.3')
|
||||
lib_deps.discard('AsyncTCP@1.0.1')
|
||||
if CORE.arduino_version == ARDUINO_VERSION_ESP32_1_0_0:
|
||||
lib_deps.discard('AsyncTCP@1.0.3')
|
||||
lib_deps.add('AsyncTCP@1.0.1')
|
||||
lib_deps.add('ESPmDNS')
|
||||
elif CORE.is_esp8266:
|
||||
lib_deps.add('ESPAsyncTCP@1.1.3')
|
||||
lib_deps.add('ESP8266mDNS')
|
||||
|
||||
# avoid changing build flags order
|
||||
lib_deps_l = list(lib_deps)
|
||||
lib_deps_l.sort()
|
||||
@@ -340,14 +341,6 @@ def gather_build_flags():
|
||||
'-DUSE_WIFI_SIGNAL_SENSOR',
|
||||
}
|
||||
|
||||
# avoid changing build flags order
|
||||
return list(sorted(list(build_flags)))
|
||||
|
||||
|
||||
def get_ini_content():
|
||||
lib_deps = gather_lib_deps()
|
||||
build_flags = gather_build_flags()
|
||||
|
||||
if CORE.is_esp8266 and CORE.board in ESP8266_FLASH_SIZES:
|
||||
flash_size = ESP8266_FLASH_SIZES[CORE.board]
|
||||
ld_scripts = ESP8266_LD_SCRIPTS[flash_size]
|
||||
@@ -360,7 +353,19 @@ def get_ini_content():
|
||||
ld_script = ld_scripts[1]
|
||||
|
||||
if ld_script is not None:
|
||||
build_flags.append('-Wl,-T{}'.format(ld_script))
|
||||
build_flags.add('-Wl,-T{}'.format(ld_script))
|
||||
|
||||
if CORE.is_esp8266 and CORE.arduino_version in (ARDUINO_VERSION_ESP8266_DEV,
|
||||
ARDUINO_VERSION_ESP8266_2_5_0):
|
||||
build_flags.add('-fno-exceptions')
|
||||
|
||||
# avoid changing build flags order
|
||||
return list(sorted(list(build_flags)))
|
||||
|
||||
|
||||
def get_ini_content():
|
||||
lib_deps = gather_lib_deps()
|
||||
build_flags = gather_build_flags()
|
||||
|
||||
data = {
|
||||
'platform': CORE.arduino_version,
|
||||
@@ -393,7 +398,6 @@ def get_ini_content():
|
||||
data['lib_ldf_mode'] = 'chain'
|
||||
REMOVABLE_LIBRARIES = [
|
||||
'ArduinoOTA',
|
||||
'ESPmDNS',
|
||||
'Update',
|
||||
'Wire',
|
||||
'FastLED',
|
||||
@@ -502,12 +506,14 @@ def write_cpp(code_s):
|
||||
|
||||
|
||||
def clean_build():
|
||||
for directory in ('.piolibdeps', '.pioenvs'):
|
||||
dir_path = CORE.relative_build_path(directory)
|
||||
if not os.path.isdir(dir_path):
|
||||
continue
|
||||
_LOGGER.info("Deleting %s", dir_path)
|
||||
shutil.rmtree(dir_path)
|
||||
pioenvs = CORE.relative_pioenvs_path()
|
||||
if os.path.isdir(pioenvs):
|
||||
_LOGGER.info("Deleting %s", pioenvs)
|
||||
shutil.rmtree(pioenvs)
|
||||
piolibdeps = CORE.relative_piolibdeps_path()
|
||||
if os.path.isdir(piolibdeps):
|
||||
_LOGGER.info("Deleting %s", piolibdeps)
|
||||
shutil.rmtree(piolibdeps)
|
||||
|
||||
|
||||
GITIGNORE_CONTENT = """# Gitignore settings for ESPHome
|
||||
|
||||
@@ -12,7 +12,7 @@ import yaml.constructor
|
||||
|
||||
from esphome import core
|
||||
from esphome.core import EsphomeError, HexInt, IPAddress, Lambda, MACAddress, TimePeriod
|
||||
from esphome.py_compat import string_types, text_type
|
||||
from esphome.py_compat import string_types, text_type, IS_PY2
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -20,6 +20,8 @@ _LOGGER = logging.getLogger(__name__)
|
||||
# let's not reinvent the wheel here
|
||||
|
||||
SECRET_YAML = u'secrets.yaml'
|
||||
_SECRET_CACHE = {}
|
||||
_SECRET_VALUES = {}
|
||||
|
||||
|
||||
class NodeListClass(list):
|
||||
@@ -42,6 +44,12 @@ class SafeLineLoader(yaml.SafeLoader): # pylint: disable=too-many-ancestors
|
||||
|
||||
|
||||
def load_yaml(fname):
|
||||
_SECRET_VALUES.clear()
|
||||
_SECRET_CACHE.clear()
|
||||
return _load_yaml_internal(fname)
|
||||
|
||||
|
||||
def _load_yaml_internal(fname):
|
||||
"""Load a YAML file."""
|
||||
try:
|
||||
with codecs.open(fname, encoding='utf-8') as conf_file:
|
||||
@@ -193,7 +201,7 @@ def _include_yaml(loader, node):
|
||||
device_tracker: !include device_tracker.yaml
|
||||
"""
|
||||
fname = os.path.join(os.path.dirname(loader.name), node.value)
|
||||
return _add_reference(load_yaml(fname), loader, node)
|
||||
return _add_reference(_load_yaml_internal(fname), loader, node)
|
||||
|
||||
|
||||
def _is_file_valid(name):
|
||||
@@ -217,7 +225,7 @@ def _include_dir_named_yaml(loader, node):
|
||||
loc = os.path.join(os.path.dirname(loader.name), node.value)
|
||||
for fname in _find_files(loc, '*.yaml'):
|
||||
filename = os.path.splitext(os.path.basename(fname))[0]
|
||||
mapping[filename] = load_yaml(fname)
|
||||
mapping[filename] = _load_yaml_internal(fname)
|
||||
return _add_reference(mapping, loader, node)
|
||||
|
||||
|
||||
@@ -228,7 +236,7 @@ def _include_dir_merge_named_yaml(loader, node):
|
||||
for fname in _find_files(loc, '*.yaml'):
|
||||
if os.path.basename(fname) == SECRET_YAML:
|
||||
continue
|
||||
loaded_yaml = load_yaml(fname)
|
||||
loaded_yaml = _load_yaml_internal(fname)
|
||||
if isinstance(loaded_yaml, dict):
|
||||
mapping.update(loaded_yaml)
|
||||
return _add_reference(mapping, loader, node)
|
||||
@@ -237,7 +245,7 @@ def _include_dir_merge_named_yaml(loader, node):
|
||||
def _include_dir_list_yaml(loader, node):
|
||||
"""Load multiple files from directory as a list."""
|
||||
loc = os.path.join(os.path.dirname(loader.name), node.value)
|
||||
return [load_yaml(f) for f in _find_files(loc, '*.yaml')
|
||||
return [_load_yaml_internal(f) for f in _find_files(loc, '*.yaml')
|
||||
if os.path.basename(f) != SECRET_YAML]
|
||||
|
||||
|
||||
@@ -248,20 +256,29 @@ def _include_dir_merge_list_yaml(loader, node):
|
||||
for fname in _find_files(path, '*.yaml'):
|
||||
if os.path.basename(fname) == SECRET_YAML:
|
||||
continue
|
||||
loaded_yaml = load_yaml(fname)
|
||||
loaded_yaml = _load_yaml_internal(fname)
|
||||
if isinstance(loaded_yaml, list):
|
||||
merged_list.extend(loaded_yaml)
|
||||
return _add_reference(merged_list, loader, node)
|
||||
|
||||
|
||||
def is_secret(value):
|
||||
try:
|
||||
return _SECRET_VALUES[text_type(value)]
|
||||
except (KeyError, ValueError):
|
||||
return None
|
||||
|
||||
|
||||
# pylint: disable=protected-access
|
||||
def _secret_yaml(loader, node):
|
||||
"""Load secrets and embed it into the configuration YAML."""
|
||||
secret_path = os.path.join(os.path.dirname(loader.name), SECRET_YAML)
|
||||
secrets = load_yaml(secret_path)
|
||||
secrets = _load_yaml_internal(secret_path)
|
||||
if node.value not in secrets:
|
||||
raise EsphomeError(u"Secret {} not defined".format(node.value))
|
||||
return secrets[node.value]
|
||||
val = secrets[node.value]
|
||||
_SECRET_VALUES[text_type(val)] = node.value
|
||||
return val
|
||||
|
||||
|
||||
def _lambda(loader, node):
|
||||
@@ -310,17 +327,27 @@ def represent_odict(dump, tag, mapping, flow_style=None):
|
||||
return node
|
||||
|
||||
|
||||
def represent_secret(value):
|
||||
return yaml.ScalarNode(tag=u'!secret', value=_SECRET_VALUES[value])
|
||||
|
||||
|
||||
def unicode_representer(_, uni):
|
||||
if is_secret(uni):
|
||||
return represent_secret(uni)
|
||||
node = yaml.ScalarNode(tag=u'tag:yaml.org,2002:str', value=uni)
|
||||
return node
|
||||
|
||||
|
||||
def hex_int_representer(_, data):
|
||||
if is_secret(data):
|
||||
return represent_secret(data)
|
||||
node = yaml.ScalarNode(tag=u'tag:yaml.org,2002:int', value=str(data))
|
||||
return node
|
||||
|
||||
|
||||
def stringify_representer(_, data):
|
||||
if is_secret(data):
|
||||
return represent_secret(data)
|
||||
node = yaml.ScalarNode(tag=u'tag:yaml.org,2002:str', value=str(data))
|
||||
return node
|
||||
|
||||
@@ -345,18 +372,18 @@ def represent_time_period(dumper, data):
|
||||
|
||||
|
||||
def represent_lambda(_, data):
|
||||
if is_secret(data.value):
|
||||
return represent_secret(data.value)
|
||||
node = yaml.ScalarNode(tag='!lambda', value=data.value, style='|')
|
||||
return node
|
||||
|
||||
|
||||
def represent_id(_, data):
|
||||
if is_secret(data.id):
|
||||
return represent_secret(data.id)
|
||||
return yaml.ScalarNode(tag=u'tag:yaml.org,2002:str', value=data.id)
|
||||
|
||||
|
||||
def represent_uuid(_, data):
|
||||
return yaml.ScalarNode(tag=u'tag:yaml.org,2002:str', value=str(data))
|
||||
|
||||
|
||||
yaml.SafeDumper.add_representer(
|
||||
OrderedDict,
|
||||
lambda dumper, value:
|
||||
@@ -369,11 +396,13 @@ yaml.SafeDumper.add_representer(
|
||||
dumper.represent_sequence('tag:yaml.org,2002:seq', value)
|
||||
)
|
||||
|
||||
yaml.SafeDumper.add_representer(text_type, unicode_representer)
|
||||
yaml.SafeDumper.add_representer(str, unicode_representer)
|
||||
if IS_PY2:
|
||||
yaml.SafeDumper.add_representer(unicode, unicode_representer)
|
||||
yaml.SafeDumper.add_representer(HexInt, hex_int_representer)
|
||||
yaml.SafeDumper.add_representer(IPAddress, stringify_representer)
|
||||
yaml.SafeDumper.add_representer(MACAddress, stringify_representer)
|
||||
yaml.SafeDumper.add_multi_representer(TimePeriod, represent_time_period)
|
||||
yaml.SafeDumper.add_multi_representer(Lambda, represent_lambda)
|
||||
yaml.SafeDumper.add_multi_representer(core.ID, represent_id)
|
||||
yaml.SafeDumper.add_multi_representer(uuid.UUID, represent_uuid)
|
||||
yaml.SafeDumper.add_multi_representer(uuid.UUID, stringify_representer)
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user