mirror of
https://github.com/esphome/esphome.git
synced 2025-10-30 14:43:51 +00:00
[ruff] Enable SIM rules and fix code simplification violations (#9872)
This commit is contained in:
@@ -119,9 +119,7 @@ def mqtt_logging_enabled(mqtt_config):
|
||||
return False
|
||||
if CONF_TOPIC not in log_topic:
|
||||
return False
|
||||
if log_topic.get(CONF_LEVEL, None) == "NONE":
|
||||
return False
|
||||
return True
|
||||
return log_topic.get(CONF_LEVEL, None) != "NONE"
|
||||
|
||||
|
||||
def get_port_type(port):
|
||||
|
||||
@@ -14,6 +14,8 @@ with warnings.catch_warnings():
|
||||
from aioesphomeapi import APIClient, parse_log_message
|
||||
from aioesphomeapi.log_runner import async_run
|
||||
|
||||
import contextlib
|
||||
|
||||
from esphome.const import CONF_KEY, CONF_PASSWORD, CONF_PORT, __version__
|
||||
from esphome.core import CORE
|
||||
|
||||
@@ -66,7 +68,5 @@ async def async_run_logs(config: dict[str, Any], address: str) -> None:
|
||||
|
||||
def run_logs(config: dict[str, Any], address: str) -> None:
|
||||
"""Run the logs command."""
|
||||
try:
|
||||
with contextlib.suppress(KeyboardInterrupt):
|
||||
asyncio.run(async_run_logs(config, address))
|
||||
except KeyboardInterrupt:
|
||||
pass
|
||||
|
||||
@@ -22,9 +22,8 @@ def validate_id(config):
|
||||
if CONF_CAN_ID in config:
|
||||
can_id = config[CONF_CAN_ID]
|
||||
id_ext = config[CONF_USE_EXTENDED_ID]
|
||||
if not id_ext:
|
||||
if can_id > 0x7FF:
|
||||
raise cv.Invalid("Standard IDs must be 11 Bit (0x000-0x7ff / 0-2047)")
|
||||
if not id_ext and can_id > 0x7FF:
|
||||
raise cv.Invalid("Standard IDs must be 11 Bit (0x000-0x7ff / 0-2047)")
|
||||
return config
|
||||
|
||||
|
||||
|
||||
@@ -953,14 +953,16 @@ def _write_idf_component_yml():
|
||||
|
||||
# Called by writer.py
|
||||
def copy_files():
|
||||
if CORE.using_arduino:
|
||||
if "partitions.csv" not in CORE.data[KEY_ESP32][KEY_EXTRA_BUILD_FILES]:
|
||||
write_file_if_changed(
|
||||
CORE.relative_build_path("partitions.csv"),
|
||||
get_arduino_partition_csv(
|
||||
CORE.platformio_options.get("board_upload.flash_size")
|
||||
),
|
||||
)
|
||||
if (
|
||||
CORE.using_arduino
|
||||
and "partitions.csv" not in CORE.data[KEY_ESP32][KEY_EXTRA_BUILD_FILES]
|
||||
):
|
||||
write_file_if_changed(
|
||||
CORE.relative_build_path("partitions.csv"),
|
||||
get_arduino_partition_csv(
|
||||
CORE.platformio_options.get("board_upload.flash_size")
|
||||
),
|
||||
)
|
||||
if CORE.using_esp_idf:
|
||||
_write_sdkconfig()
|
||||
_write_idf_component_yml()
|
||||
|
||||
@@ -140,20 +140,22 @@ VALUE_TYPES = {
|
||||
|
||||
|
||||
def validate_char_on_write(char_config):
|
||||
if CONF_ON_WRITE in char_config:
|
||||
if not char_config[CONF_WRITE] and not char_config[CONF_WRITE_NO_RESPONSE]:
|
||||
raise cv.Invalid(
|
||||
f"{CONF_ON_WRITE} requires the {CONF_WRITE} or {CONF_WRITE_NO_RESPONSE} property to be set"
|
||||
)
|
||||
if (
|
||||
CONF_ON_WRITE in char_config
|
||||
and not char_config[CONF_WRITE]
|
||||
and not char_config[CONF_WRITE_NO_RESPONSE]
|
||||
):
|
||||
raise cv.Invalid(
|
||||
f"{CONF_ON_WRITE} requires the {CONF_WRITE} or {CONF_WRITE_NO_RESPONSE} property to be set"
|
||||
)
|
||||
return char_config
|
||||
|
||||
|
||||
def validate_descriptor(desc_config):
|
||||
if CONF_ON_WRITE in desc_config:
|
||||
if not desc_config[CONF_WRITE]:
|
||||
raise cv.Invalid(
|
||||
f"{CONF_ON_WRITE} requires the {CONF_WRITE} property to be set"
|
||||
)
|
||||
if CONF_ON_WRITE in desc_config and not desc_config[CONF_WRITE]:
|
||||
raise cv.Invalid(
|
||||
f"{CONF_ON_WRITE} requires the {CONF_WRITE} property to be set"
|
||||
)
|
||||
if CONF_MAX_LENGTH not in desc_config:
|
||||
value = desc_config[CONF_VALUE][CONF_DATA]
|
||||
if cg.is_template(value):
|
||||
|
||||
@@ -294,9 +294,8 @@ async def to_code(config):
|
||||
)
|
||||
)
|
||||
|
||||
if get_esp32_variant() == VARIANT_ESP32:
|
||||
if CONF_IIR_FILTER in config:
|
||||
cg.add(touch.set_iir_filter(config[CONF_IIR_FILTER]))
|
||||
if get_esp32_variant() == VARIANT_ESP32 and CONF_IIR_FILTER in config:
|
||||
cg.add(touch.set_iir_filter(config[CONF_IIR_FILTER]))
|
||||
|
||||
if get_esp32_variant() == VARIANT_ESP32S2 or get_esp32_variant() == VARIANT_ESP32S3:
|
||||
if CONF_FILTER_MODE in config:
|
||||
|
||||
@@ -245,7 +245,7 @@ async def to_code(config):
|
||||
if ver <= cv.Version(2, 3, 0):
|
||||
# No ld script support
|
||||
ld_script = None
|
||||
if ver <= cv.Version(2, 4, 2):
|
||||
elif ver <= cv.Version(2, 4, 2):
|
||||
# Old ld script path
|
||||
ld_script = ld_scripts[0]
|
||||
else:
|
||||
|
||||
@@ -112,7 +112,7 @@ def _is_framework_spi_polling_mode_supported():
|
||||
return True
|
||||
if cv.Version(5, 3, 0) > framework_version >= cv.Version(5, 2, 1):
|
||||
return True
|
||||
if cv.Version(5, 2, 0) > framework_version >= cv.Version(5, 1, 4):
|
||||
if cv.Version(5, 2, 0) > framework_version >= cv.Version(5, 1, 4): # noqa: SIM103
|
||||
return True
|
||||
return False
|
||||
if CORE.using_arduino:
|
||||
|
||||
@@ -55,9 +55,7 @@ CONFIG_SCHEMA = cv.All(
|
||||
async def to_code(config):
|
||||
var = await fastled_base.new_fastled_light(config)
|
||||
|
||||
rgb_order = cg.RawExpression(
|
||||
config[CONF_RGB_ORDER] if CONF_RGB_ORDER in config else "RGB"
|
||||
)
|
||||
rgb_order = cg.RawExpression(config.get(CONF_RGB_ORDER, "RGB"))
|
||||
data_rate = None
|
||||
|
||||
if CONF_DATA_RATE in config:
|
||||
|
||||
@@ -116,7 +116,7 @@ GRAPH_SCHEMA = cv.Schema(
|
||||
|
||||
|
||||
def _relocate_fields_to_subfolder(config, subfolder, subschema):
|
||||
fields = [k.schema for k in subschema.schema.keys()]
|
||||
fields = [k.schema for k in subschema.schema]
|
||||
fields.remove(CONF_ID)
|
||||
if subfolder in config:
|
||||
# Ensure no ambiguous fields in base of config
|
||||
|
||||
@@ -70,9 +70,8 @@ def validate_url(value):
|
||||
def validate_ssl_verification(config):
|
||||
error_message = ""
|
||||
|
||||
if CORE.is_esp32:
|
||||
if not CORE.using_esp_idf and config[CONF_VERIFY_SSL]:
|
||||
error_message = "ESPHome supports certificate verification only via ESP-IDF"
|
||||
if CORE.is_esp32 and not CORE.using_esp_idf and config[CONF_VERIFY_SSL]:
|
||||
error_message = "ESPHome supports certificate verification only via ESP-IDF"
|
||||
|
||||
if CORE.is_rp2040 and config[CONF_VERIFY_SSL]:
|
||||
error_message = "ESPHome does not support certificate verification on RP2040"
|
||||
|
||||
@@ -66,11 +66,10 @@ PROTOCOL_NAMES = {
|
||||
|
||||
def _validate(config):
|
||||
for conf, models in SUPPORTED_OPTIONS.items():
|
||||
if conf in config:
|
||||
if config[CONF_MODEL] not in models:
|
||||
raise cv.Invalid(
|
||||
f"{conf} is only available on {' and '.join(models)}, not {config[CONF_MODEL]}"
|
||||
)
|
||||
if conf in config and config[CONF_MODEL] not in models:
|
||||
raise cv.Invalid(
|
||||
f"{conf} is only available on {' and '.join(models)}, not {config[CONF_MODEL]}"
|
||||
)
|
||||
return config
|
||||
|
||||
|
||||
|
||||
@@ -243,10 +243,7 @@ def _final_validate(_):
|
||||
|
||||
|
||||
def use_legacy():
|
||||
if CORE.using_esp_idf:
|
||||
if not _use_legacy_driver:
|
||||
return False
|
||||
return True
|
||||
return not (CORE.using_esp_idf and not _use_legacy_driver)
|
||||
|
||||
|
||||
FINAL_VALIDATE_SCHEMA = _final_validate
|
||||
|
||||
@@ -44,9 +44,8 @@ PDM_VARIANTS = [esp32.const.VARIANT_ESP32, esp32.const.VARIANT_ESP32S3]
|
||||
def _validate_esp32_variant(config):
|
||||
variant = esp32.get_esp32_variant()
|
||||
if config[CONF_ADC_TYPE] == "external":
|
||||
if config[CONF_PDM]:
|
||||
if variant not in PDM_VARIANTS:
|
||||
raise cv.Invalid(f"{variant} does not support PDM")
|
||||
if config[CONF_PDM] and variant not in PDM_VARIANTS:
|
||||
raise cv.Invalid(f"{variant} does not support PDM")
|
||||
return config
|
||||
if config[CONF_ADC_TYPE] == "internal":
|
||||
if variant not in INTERNAL_ADC_VARIANTS:
|
||||
@@ -122,9 +121,8 @@ CONFIG_SCHEMA = cv.All(
|
||||
|
||||
|
||||
def _final_validate(config):
|
||||
if not use_legacy():
|
||||
if config[CONF_ADC_TYPE] == "internal":
|
||||
raise cv.Invalid("Internal ADC is only compatible with legacy i2s driver.")
|
||||
if not use_legacy() and config[CONF_ADC_TYPE] == "internal":
|
||||
raise cv.Invalid("Internal ADC is only compatible with legacy i2s driver.")
|
||||
|
||||
|
||||
FINAL_VALIDATE_SCHEMA = _final_validate
|
||||
|
||||
@@ -138,9 +138,10 @@ def _validate(config):
|
||||
]:
|
||||
raise cv.Invalid("Selected model can't run on ESP8266.")
|
||||
|
||||
if model == "CUSTOM":
|
||||
if CONF_INIT_SEQUENCE not in config or CONF_DIMENSIONS not in config:
|
||||
raise cv.Invalid("CUSTOM model requires init_sequence and dimensions")
|
||||
if model == "CUSTOM" and (
|
||||
CONF_INIT_SEQUENCE not in config or CONF_DIMENSIONS not in config
|
||||
):
|
||||
raise cv.Invalid("CUSTOM model requires init_sequence and dimensions")
|
||||
|
||||
return config
|
||||
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import contextlib
|
||||
import hashlib
|
||||
import io
|
||||
import logging
|
||||
@@ -174,9 +175,8 @@ class ImageGrayscale(ImageEncoder):
|
||||
b = 1
|
||||
if self.invert_alpha:
|
||||
b ^= 0xFF
|
||||
if self.transparency == CONF_ALPHA_CHANNEL:
|
||||
if a != 0xFF:
|
||||
b = a
|
||||
if self.transparency == CONF_ALPHA_CHANNEL and a != 0xFF:
|
||||
b = a
|
||||
self.data[self.index] = b
|
||||
self.index += 1
|
||||
|
||||
@@ -672,10 +672,8 @@ async def write_image(config, all_frames=False):
|
||||
invert_alpha = config[CONF_INVERT_ALPHA]
|
||||
frame_count = 1
|
||||
if all_frames:
|
||||
try:
|
||||
with contextlib.suppress(AttributeError):
|
||||
frame_count = image.n_frames
|
||||
except AttributeError:
|
||||
pass
|
||||
if frame_count <= 1:
|
||||
_LOGGER.warning("Image file %s has no animation frames", path)
|
||||
|
||||
|
||||
@@ -27,14 +27,13 @@ def validate_logger(config):
|
||||
logger_conf = fv.full_config.get()[CONF_LOGGER]
|
||||
if logger_conf[CONF_BAUD_RATE] == 0:
|
||||
raise cv.Invalid("improv_serial requires the logger baud_rate to be not 0")
|
||||
if CORE.using_esp_idf:
|
||||
if (
|
||||
logger_conf[CONF_HARDWARE_UART] == USB_CDC
|
||||
and get_esp32_variant() == VARIANT_ESP32S3
|
||||
):
|
||||
raise cv.Invalid(
|
||||
"improv_serial does not support the selected logger hardware_uart"
|
||||
)
|
||||
if CORE.using_esp_idf and (
|
||||
logger_conf[CONF_HARDWARE_UART] == USB_CDC
|
||||
and get_esp32_variant() == VARIANT_ESP32S3
|
||||
):
|
||||
raise cv.Invalid(
|
||||
"improv_serial does not support the selected logger hardware_uart"
|
||||
)
|
||||
return config
|
||||
|
||||
|
||||
|
||||
@@ -78,11 +78,8 @@ def validate_model_config(config):
|
||||
model = config[CONF_MODEL]
|
||||
|
||||
for key in config:
|
||||
if key in SENSOR_MODEL_OPTIONS:
|
||||
if model not in SENSOR_MODEL_OPTIONS[key]:
|
||||
raise cv.Invalid(
|
||||
f"Device model '{model}' does not support '{key}' sensor"
|
||||
)
|
||||
if key in SENSOR_MODEL_OPTIONS and model not in SENSOR_MODEL_OPTIONS[key]:
|
||||
raise cv.Invalid(f"Device model '{model}' does not support '{key}' sensor")
|
||||
|
||||
tempco = config[CONF_TEMPERATURE_COEFFICIENT]
|
||||
if tempco > 0 and model not in ["INA228", "INA229"]:
|
||||
|
||||
@@ -56,7 +56,8 @@ async def to_code(config):
|
||||
sens = await text_sensor.new_text_sensor(mac_address_config)
|
||||
cg.add(ld2450_component.set_mac_text_sensor(sens))
|
||||
for n in range(MAX_TARGETS):
|
||||
if direction_conf := config.get(f"target_{n + 1}"):
|
||||
if direction_config := direction_conf.get(CONF_DIRECTION):
|
||||
sens = await text_sensor.new_text_sensor(direction_config)
|
||||
cg.add(ld2450_component.set_direction_text_sensor(n, sens))
|
||||
if (direction_conf := config.get(f"target_{n + 1}")) and (
|
||||
direction_config := direction_conf.get(CONF_DIRECTION)
|
||||
):
|
||||
sens = await text_sensor.new_text_sensor(direction_config)
|
||||
cg.add(ld2450_component.set_direction_text_sensor(n, sens))
|
||||
|
||||
@@ -526,7 +526,7 @@ def validate_effects(allowed_effects):
|
||||
errors = []
|
||||
names = set()
|
||||
for i, x in enumerate(value):
|
||||
key = next(it for it in x.keys())
|
||||
key = next(it for it in x)
|
||||
if key not in allowed_effects:
|
||||
errors.append(
|
||||
cv.Invalid(
|
||||
|
||||
@@ -346,14 +346,13 @@ async def to_code(config):
|
||||
if config.get(CONF_ESP8266_STORE_LOG_STRINGS_IN_FLASH):
|
||||
cg.add_build_flag("-DUSE_STORE_LOG_STR_IN_FLASH")
|
||||
|
||||
if CORE.using_arduino:
|
||||
if config[CONF_HARDWARE_UART] == USB_CDC:
|
||||
cg.add_build_flag("-DARDUINO_USB_CDC_ON_BOOT=1")
|
||||
if CORE.is_esp32 and get_esp32_variant() in (
|
||||
VARIANT_ESP32C3,
|
||||
VARIANT_ESP32C6,
|
||||
):
|
||||
cg.add_build_flag("-DARDUINO_USB_MODE=1")
|
||||
if CORE.using_arduino and config[CONF_HARDWARE_UART] == USB_CDC:
|
||||
cg.add_build_flag("-DARDUINO_USB_CDC_ON_BOOT=1")
|
||||
if CORE.is_esp32 and get_esp32_variant() in (
|
||||
VARIANT_ESP32C3,
|
||||
VARIANT_ESP32C6,
|
||||
):
|
||||
cg.add_build_flag("-DARDUINO_USB_MODE=1")
|
||||
|
||||
if CORE.using_esp_idf:
|
||||
if config[CONF_HARDWARE_UART] == USB_CDC:
|
||||
|
||||
@@ -201,9 +201,8 @@ def final_validation(configs):
|
||||
multi_conf_validate(configs)
|
||||
global_config = full_config.get()
|
||||
for config in configs:
|
||||
if pages := config.get(CONF_PAGES):
|
||||
if all(p[df.CONF_SKIP] for p in pages):
|
||||
raise cv.Invalid("At least one page must not be skipped")
|
||||
if (pages := config.get(CONF_PAGES)) and all(p[df.CONF_SKIP] for p in pages):
|
||||
raise cv.Invalid("At least one page must not be skipped")
|
||||
for display_id in config[df.CONF_DISPLAYS]:
|
||||
path = global_config.get_path_for_id(display_id)[:-1]
|
||||
display = global_config.get_config_for_path(path)
|
||||
|
||||
@@ -28,9 +28,10 @@ CONF_HAS_PULLDOWNS = "has_pulldowns"
|
||||
|
||||
|
||||
def check_keys(obj):
|
||||
if CONF_KEYS in obj:
|
||||
if len(obj[CONF_KEYS]) != len(obj[CONF_ROWS]) * len(obj[CONF_COLUMNS]):
|
||||
raise cv.Invalid("The number of key codes must equal the number of buttons")
|
||||
if CONF_KEYS in obj and len(obj[CONF_KEYS]) != len(obj[CONF_ROWS]) * len(
|
||||
obj[CONF_COLUMNS]
|
||||
):
|
||||
raise cv.Invalid("The number of key codes must equal the number of buttons")
|
||||
return obj
|
||||
|
||||
|
||||
|
||||
@@ -124,11 +124,10 @@ async def to_code(config):
|
||||
|
||||
if task_stack_in_psram := config.get(CONF_TASK_STACK_IN_PSRAM):
|
||||
cg.add(var.set_task_stack_in_psram(task_stack_in_psram))
|
||||
if task_stack_in_psram:
|
||||
if config[CONF_TASK_STACK_IN_PSRAM]:
|
||||
esp32.add_idf_sdkconfig_option(
|
||||
"CONFIG_SPIRAM_ALLOW_STACK_EXTERNAL_MEMORY", True
|
||||
)
|
||||
if task_stack_in_psram and config[CONF_TASK_STACK_IN_PSRAM]:
|
||||
esp32.add_idf_sdkconfig_option(
|
||||
"CONFIG_SPIRAM_ALLOW_STACK_EXTERNAL_MEMORY", True
|
||||
)
|
||||
|
||||
for speaker_config in config[CONF_SOURCE_SPEAKERS]:
|
||||
source_speaker = cg.new_Pvariable(speaker_config[CONF_ID])
|
||||
|
||||
@@ -63,11 +63,13 @@ def _validate(config):
|
||||
raise cv.Invalid(
|
||||
f"{axis}: {CONF_RESOLUTION} cannot be {res} with {CONF_TEMPERATURE_COMPENSATION} enabled"
|
||||
)
|
||||
if config[CONF_HALLCONF] == 0xC:
|
||||
if (config[CONF_OVERSAMPLING], config[CONF_FILTER]) in [(0, 0), (1, 0), (0, 1)]:
|
||||
raise cv.Invalid(
|
||||
f"{CONF_OVERSAMPLING}=={config[CONF_OVERSAMPLING]} and {CONF_FILTER}=={config[CONF_FILTER]} not allowed with {CONF_HALLCONF}=={config[CONF_HALLCONF]:#02x}"
|
||||
)
|
||||
if config[CONF_HALLCONF] == 0xC and (
|
||||
config[CONF_OVERSAMPLING],
|
||||
config[CONF_FILTER],
|
||||
) in [(0, 0), (1, 0), (0, 1)]:
|
||||
raise cv.Invalid(
|
||||
f"{CONF_OVERSAMPLING}=={config[CONF_OVERSAMPLING]} and {CONF_FILTER}=={config[CONF_FILTER]} not allowed with {CONF_HALLCONF}=={config[CONF_HALLCONF]:#02x}"
|
||||
)
|
||||
return config
|
||||
|
||||
|
||||
|
||||
@@ -56,12 +56,13 @@ def _final_validate(config):
|
||||
for binary_sensor in binary_sensors:
|
||||
if binary_sensor.get(CONF_MPR121_ID) == config[CONF_ID]:
|
||||
max_touch_channel = max(max_touch_channel, binary_sensor[CONF_CHANNEL])
|
||||
if max_touch_channel_in_config := config.get(CONF_MAX_TOUCH_CHANNEL):
|
||||
if max_touch_channel != max_touch_channel_in_config:
|
||||
raise cv.Invalid(
|
||||
"Max touch channel must equal the highest binary sensor channel or be removed for auto calculation",
|
||||
path=[CONF_MAX_TOUCH_CHANNEL],
|
||||
)
|
||||
if (
|
||||
max_touch_channel_in_config := config.get(CONF_MAX_TOUCH_CHANNEL)
|
||||
) and max_touch_channel != max_touch_channel_in_config:
|
||||
raise cv.Invalid(
|
||||
"Max touch channel must equal the highest binary sensor channel or be removed for auto calculation",
|
||||
path=[CONF_MAX_TOUCH_CHANNEL],
|
||||
)
|
||||
path = fconf.get_path_for_id(config[CONF_ID])[:-1]
|
||||
this_config = fconf.get_config_for_path(path)
|
||||
this_config[CONF_MAX_TOUCH_CHANNEL] = max_touch_channel
|
||||
|
||||
@@ -25,9 +25,9 @@ async def new_openthermnumber(config: dict[str, Any]) -> cg.Pvariable:
|
||||
await cg.register_component(var, config)
|
||||
input.generate_setters(var, config)
|
||||
|
||||
if (initial_value := config.get(CONF_INITIAL_VALUE, None)) is not None:
|
||||
if (initial_value := config.get(CONF_INITIAL_VALUE)) is not None:
|
||||
cg.add(var.set_initial_value(initial_value))
|
||||
if (restore_value := config.get(CONF_RESTORE_VALUE, None)) is not None:
|
||||
if (restore_value := config.get(CONF_RESTORE_VALUE)) is not None:
|
||||
cg.add(var.set_restore_value(restore_value))
|
||||
|
||||
return var
|
||||
|
||||
@@ -79,9 +79,8 @@ def set_sdkconfig_options(config):
|
||||
"CONFIG_OPENTHREAD_NETWORK_PSKC", f"{pskc:X}".lower()
|
||||
)
|
||||
|
||||
if force_dataset := config.get(CONF_FORCE_DATASET):
|
||||
if force_dataset:
|
||||
cg.add_define("USE_OPENTHREAD_FORCE_DATASET")
|
||||
if config.get(CONF_FORCE_DATASET):
|
||||
cg.add_define("USE_OPENTHREAD_FORCE_DATASET")
|
||||
|
||||
add_idf_sdkconfig_option("CONFIG_OPENTHREAD_DNS64_CLIENT", True)
|
||||
add_idf_sdkconfig_option("CONFIG_OPENTHREAD_SRP_CLIENT", True)
|
||||
|
||||
@@ -89,9 +89,10 @@ def validate_(config):
|
||||
raise cv.Invalid("No sensors or binary sensors to encrypt")
|
||||
elif config[CONF_ROLLING_CODE_ENABLE]:
|
||||
raise cv.Invalid("Rolling code requires an encryption key")
|
||||
if config[CONF_PING_PONG_ENABLE]:
|
||||
if not any(CONF_ENCRYPTION in p for p in config.get(CONF_PROVIDERS) or ()):
|
||||
raise cv.Invalid("Ping-pong requires at least one encrypted provider")
|
||||
if config[CONF_PING_PONG_ENABLE] and not any(
|
||||
CONF_ENCRYPTION in p for p in config.get(CONF_PROVIDERS) or ()
|
||||
):
|
||||
raise cv.Invalid("Ping-pong requires at least one encrypted provider")
|
||||
return config
|
||||
|
||||
|
||||
|
||||
@@ -49,12 +49,15 @@ def validate_internal_filter(value):
|
||||
[CONF_USE_PCNT],
|
||||
)
|
||||
|
||||
if CORE.is_esp32 and use_pcnt:
|
||||
if value.get(CONF_INTERNAL_FILTER).total_microseconds > 13:
|
||||
raise cv.Invalid(
|
||||
"Maximum internal filter value when using ESP32 hardware PCNT is 13us",
|
||||
[CONF_INTERNAL_FILTER],
|
||||
)
|
||||
if (
|
||||
CORE.is_esp32
|
||||
and use_pcnt
|
||||
and value.get(CONF_INTERNAL_FILTER).total_microseconds > 13
|
||||
):
|
||||
raise cv.Invalid(
|
||||
"Maximum internal filter value when using ESP32 hardware PCNT is 13us",
|
||||
[CONF_INTERNAL_FILTER],
|
||||
)
|
||||
|
||||
return value
|
||||
|
||||
|
||||
@@ -73,9 +73,8 @@ def map_sequence(value):
|
||||
|
||||
def _validate(config):
|
||||
chip = DriverChip.chips[config[CONF_MODEL]]
|
||||
if not chip.initsequence:
|
||||
if CONF_INIT_SEQUENCE not in config:
|
||||
raise cv.Invalid(f"{chip.name} model requires init_sequence")
|
||||
if not chip.initsequence and CONF_INIT_SEQUENCE not in config:
|
||||
raise cv.Invalid(f"{chip.name} model requires init_sequence")
|
||||
return config
|
||||
|
||||
|
||||
|
||||
@@ -24,9 +24,8 @@ QwiicPIRComponent = qwiic_pir_ns.class_(
|
||||
|
||||
|
||||
def validate_no_debounce_unless_native(config):
|
||||
if CONF_DEBOUNCE in config:
|
||||
if config[CONF_DEBOUNCE_MODE] != "NATIVE":
|
||||
raise cv.Invalid("debounce can only be set if debounce_mode is NATIVE")
|
||||
if CONF_DEBOUNCE in config and config[CONF_DEBOUNCE_MODE] != "NATIVE":
|
||||
raise cv.Invalid("debounce can only be set if debounce_mode is NATIVE")
|
||||
return config
|
||||
|
||||
|
||||
|
||||
@@ -1062,12 +1062,11 @@ def validate_raw_alternating(value):
|
||||
last_negative = None
|
||||
for i, val in enumerate(value):
|
||||
this_negative = val < 0
|
||||
if i != 0:
|
||||
if this_negative == last_negative:
|
||||
raise cv.Invalid(
|
||||
f"Values must alternate between being positive and negative, please see index {i} and {i + 1}",
|
||||
[i],
|
||||
)
|
||||
if i != 0 and this_negative == last_negative:
|
||||
raise cv.Invalid(
|
||||
f"Values must alternate between being positive and negative, please see index {i} and {i + 1}",
|
||||
[i],
|
||||
)
|
||||
last_negative = this_negative
|
||||
return value
|
||||
|
||||
|
||||
@@ -90,11 +90,10 @@ async def to_code(config):
|
||||
|
||||
if task_stack_in_psram := config.get(CONF_TASK_STACK_IN_PSRAM):
|
||||
cg.add(var.set_task_stack_in_psram(task_stack_in_psram))
|
||||
if task_stack_in_psram:
|
||||
if config[CONF_TASK_STACK_IN_PSRAM]:
|
||||
esp32.add_idf_sdkconfig_option(
|
||||
"CONFIG_SPIRAM_ALLOW_STACK_EXTERNAL_MEMORY", True
|
||||
)
|
||||
if task_stack_in_psram and config[CONF_TASK_STACK_IN_PSRAM]:
|
||||
esp32.add_idf_sdkconfig_option(
|
||||
"CONFIG_SPIRAM_ALLOW_STACK_EXTERNAL_MEMORY", True
|
||||
)
|
||||
|
||||
cg.add(var.set_target_bits_per_sample(config[CONF_BITS_PER_SAMPLE]))
|
||||
cg.add(var.set_target_sample_rate(config[CONF_SAMPLE_RATE]))
|
||||
|
||||
@@ -140,7 +140,6 @@ async def to_code(config):
|
||||
cg.add(var.set_vsync_front_porch(config[CONF_VSYNC_FRONT_PORCH]))
|
||||
cg.add(var.set_pclk_inverted(config[CONF_PCLK_INVERTED]))
|
||||
cg.add(var.set_pclk_frequency(config[CONF_PCLK_FREQUENCY]))
|
||||
index = 0
|
||||
dpins = []
|
||||
if CONF_RED in config[CONF_DATA_PINS]:
|
||||
red_pins = config[CONF_DATA_PINS][CONF_RED]
|
||||
@@ -158,10 +157,9 @@ async def to_code(config):
|
||||
dpins = dpins[8:16] + dpins[0:8]
|
||||
else:
|
||||
dpins = config[CONF_DATA_PINS]
|
||||
for pin in dpins:
|
||||
for index, pin in enumerate(dpins):
|
||||
data_pin = await cg.gpio_pin_expression(pin)
|
||||
cg.add(var.add_data_pin(data_pin, index))
|
||||
index += 1
|
||||
|
||||
if enable_pin := config.get(CONF_ENABLE_PIN):
|
||||
enable = await cg.gpio_pin_expression(enable_pin)
|
||||
|
||||
@@ -204,13 +204,14 @@ def _validate_pipeline(config):
|
||||
|
||||
|
||||
def _validate_repeated_speaker(config):
|
||||
if (announcement_config := config.get(CONF_ANNOUNCEMENT_PIPELINE)) and (
|
||||
media_config := config.get(CONF_MEDIA_PIPELINE)
|
||||
if (
|
||||
(announcement_config := config.get(CONF_ANNOUNCEMENT_PIPELINE))
|
||||
and (media_config := config.get(CONF_MEDIA_PIPELINE))
|
||||
and announcement_config[CONF_SPEAKER] == media_config[CONF_SPEAKER]
|
||||
):
|
||||
if announcement_config[CONF_SPEAKER] == media_config[CONF_SPEAKER]:
|
||||
raise cv.Invalid(
|
||||
"The announcement and media pipelines cannot use the same speaker. Use the `mixer` speaker component to create two source speakers."
|
||||
)
|
||||
raise cv.Invalid(
|
||||
"The announcement and media pipelines cannot use the same speaker. Use the `mixer` speaker component to create two source speakers."
|
||||
)
|
||||
|
||||
return config
|
||||
|
||||
|
||||
@@ -115,9 +115,7 @@ def get_target_platform():
|
||||
|
||||
|
||||
def get_target_variant():
|
||||
return (
|
||||
CORE.data[KEY_ESP32][KEY_VARIANT] if KEY_VARIANT in CORE.data[KEY_ESP32] else ""
|
||||
)
|
||||
return CORE.data[KEY_ESP32].get(KEY_VARIANT, "")
|
||||
|
||||
|
||||
# Get a list of available hardware interfaces based on target and variant.
|
||||
@@ -213,9 +211,7 @@ def validate_hw_pins(spi, index=-1):
|
||||
return False
|
||||
if sdo_pin_no not in pin_set[CONF_MOSI_PIN]:
|
||||
return False
|
||||
if sdi_pin_no not in pin_set[CONF_MISO_PIN]:
|
||||
return False
|
||||
return True
|
||||
return sdi_pin_no in pin_set[CONF_MISO_PIN]
|
||||
return False
|
||||
|
||||
|
||||
|
||||
@@ -130,11 +130,11 @@ def validate_sprinkler(config):
|
||||
if (
|
||||
CONF_PUMP_SWITCH_OFF_DURING_VALVE_OPEN_DELAY in sprinkler_controller
|
||||
and CONF_VALVE_OPEN_DELAY not in sprinkler_controller
|
||||
and sprinkler_controller[CONF_PUMP_SWITCH_OFF_DURING_VALVE_OPEN_DELAY]
|
||||
):
|
||||
if sprinkler_controller[CONF_PUMP_SWITCH_OFF_DURING_VALVE_OPEN_DELAY]:
|
||||
raise cv.Invalid(
|
||||
f"{CONF_VALVE_OPEN_DELAY} must be defined when {CONF_PUMP_SWITCH_OFF_DURING_VALVE_OPEN_DELAY} is enabled"
|
||||
)
|
||||
raise cv.Invalid(
|
||||
f"{CONF_VALVE_OPEN_DELAY} must be defined when {CONF_PUMP_SWITCH_OFF_DURING_VALVE_OPEN_DELAY} is enabled"
|
||||
)
|
||||
|
||||
if (
|
||||
CONF_REPEAT in sprinkler_controller
|
||||
|
||||
@@ -42,14 +42,15 @@ SSD1306_MODEL = cv.enum(MODELS, upper=True, space="_")
|
||||
|
||||
def _validate(value):
|
||||
model = value[CONF_MODEL]
|
||||
if model not in ("SSD1305_128X32", "SSD1305_128X64"):
|
||||
# Contrast is default value (1.0) while brightness is not
|
||||
# Indicates user is using old `brightness` option
|
||||
if value[CONF_BRIGHTNESS] != 1.0 and value[CONF_CONTRAST] == 1.0:
|
||||
raise cv.Invalid(
|
||||
"SSD1306/SH1106 no longer accepts brightness option, "
|
||||
'please use "contrast" instead.'
|
||||
)
|
||||
if (
|
||||
model not in ("SSD1305_128X32", "SSD1305_128X64")
|
||||
and value[CONF_BRIGHTNESS] != 1.0
|
||||
and value[CONF_CONTRAST] == 1.0
|
||||
):
|
||||
raise cv.Invalid(
|
||||
"SSD1306/SH1106 no longer accepts brightness option, "
|
||||
'please use "contrast" instead.'
|
||||
)
|
||||
|
||||
return value
|
||||
|
||||
|
||||
@@ -189,7 +189,6 @@ async def to_code(config):
|
||||
cg.add(var.set_vsync_front_porch(config[CONF_VSYNC_FRONT_PORCH]))
|
||||
cg.add(var.set_pclk_inverted(config[CONF_PCLK_INVERTED]))
|
||||
cg.add(var.set_pclk_frequency(config[CONF_PCLK_FREQUENCY]))
|
||||
index = 0
|
||||
dpins = []
|
||||
if CONF_RED in config[CONF_DATA_PINS]:
|
||||
red_pins = config[CONF_DATA_PINS][CONF_RED]
|
||||
@@ -207,10 +206,9 @@ async def to_code(config):
|
||||
dpins = dpins[8:16] + dpins[0:8]
|
||||
else:
|
||||
dpins = config[CONF_DATA_PINS]
|
||||
for pin in dpins:
|
||||
for index, pin in enumerate(dpins):
|
||||
data_pin = await cg.gpio_pin_expression(pin)
|
||||
cg.add(var.add_data_pin(data_pin, index))
|
||||
index += 1
|
||||
|
||||
if dc_pin := config.get(CONF_DC_PIN):
|
||||
dc = await cg.gpio_pin_expression(dc_pin)
|
||||
|
||||
@@ -49,15 +49,14 @@ def _expand_jinja(value, orig_value, path, jinja, ignore_missing):
|
||||
try:
|
||||
# Invoke the jinja engine to evaluate the expression.
|
||||
value, err = jinja.expand(value)
|
||||
if err is not None:
|
||||
if not ignore_missing and "password" not in path:
|
||||
_LOGGER.warning(
|
||||
"Found '%s' (see %s) which looks like an expression,"
|
||||
" but could not resolve all the variables: %s",
|
||||
value,
|
||||
"->".join(str(x) for x in path),
|
||||
err.message,
|
||||
)
|
||||
if err is not None and not ignore_missing and "password" not in path:
|
||||
_LOGGER.warning(
|
||||
"Found '%s' (see %s) which looks like an expression,"
|
||||
" but could not resolve all the variables: %s",
|
||||
value,
|
||||
"->".join(str(x) for x in path),
|
||||
err.message,
|
||||
)
|
||||
except (
|
||||
TemplateError,
|
||||
TemplateRuntimeError,
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
import contextlib
|
||||
import re
|
||||
|
||||
from esphome import automation
|
||||
@@ -41,12 +42,10 @@ ELEVATION_MAP = {
|
||||
|
||||
def elevation(value):
|
||||
if isinstance(value, str):
|
||||
try:
|
||||
with contextlib.suppress(cv.Invalid):
|
||||
value = ELEVATION_MAP[
|
||||
cv.one_of(*ELEVATION_MAP, lower=True, space="_")(value)
|
||||
]
|
||||
except cv.Invalid:
|
||||
pass
|
||||
value = cv.angle(value)
|
||||
return cv.float_range(min=-180, max=180)(value)
|
||||
|
||||
|
||||
@@ -41,11 +41,13 @@ SX1509KeyTrigger = sx1509_ns.class_(
|
||||
|
||||
|
||||
def check_keys(config):
|
||||
if CONF_KEYS in config:
|
||||
if len(config[CONF_KEYS]) != config[CONF_KEY_ROWS] * config[CONF_KEY_COLUMNS]:
|
||||
raise cv.Invalid(
|
||||
"The number of key codes must equal the number of rows * columns"
|
||||
)
|
||||
if (
|
||||
CONF_KEYS in config
|
||||
and len(config[CONF_KEYS]) != config[CONF_KEY_ROWS] * config[CONF_KEY_COLUMNS]
|
||||
):
|
||||
raise cv.Invalid(
|
||||
"The number of key codes must equal the number of rows * columns"
|
||||
)
|
||||
return config
|
||||
|
||||
|
||||
|
||||
@@ -477,11 +477,11 @@ def validate_thermostat(config):
|
||||
if (
|
||||
CONF_ON_BOOT_RESTORE_FROM in config
|
||||
and config[CONF_ON_BOOT_RESTORE_FROM] is OnBootRestoreFrom.DEFAULT_PRESET
|
||||
and CONF_DEFAULT_PRESET not in config
|
||||
):
|
||||
if CONF_DEFAULT_PRESET not in config:
|
||||
raise cv.Invalid(
|
||||
f"{CONF_DEFAULT_PRESET} must be defined to use {CONF_ON_BOOT_RESTORE_FROM} in DEFAULT_PRESET mode"
|
||||
)
|
||||
raise cv.Invalid(
|
||||
f"{CONF_DEFAULT_PRESET} must be defined to use {CONF_ON_BOOT_RESTORE_FROM} in DEFAULT_PRESET mode"
|
||||
)
|
||||
|
||||
if config[CONF_FAN_WITH_COOLING] is True and CONF_FAN_ONLY_ACTION not in config:
|
||||
raise cv.Invalid(
|
||||
|
||||
@@ -236,7 +236,7 @@ def validate_time_at(value):
|
||||
|
||||
def validate_cron_keys(value):
|
||||
if CONF_CRON in value:
|
||||
for key in value.keys():
|
||||
for key in value:
|
||||
if key in CRON_KEYS:
|
||||
raise cv.Invalid(f"Cannot use option {key} when cron: is specified.")
|
||||
if CONF_AT in value:
|
||||
@@ -246,7 +246,7 @@ def validate_cron_keys(value):
|
||||
value.update(cron_)
|
||||
return value
|
||||
if CONF_AT in value:
|
||||
for key in value.keys():
|
||||
for key in value:
|
||||
if key in CRON_KEYS:
|
||||
raise cv.Invalid(f"Cannot use option {key} when at: is specified.")
|
||||
at_ = value[CONF_AT]
|
||||
|
||||
@@ -46,16 +46,15 @@ TuyaClimate = tuya_ns.class_("TuyaClimate", climate.Climate, cg.Component)
|
||||
|
||||
|
||||
def validate_temperature_multipliers(value):
|
||||
if CONF_TEMPERATURE_MULTIPLIER in value:
|
||||
if (
|
||||
CONF_CURRENT_TEMPERATURE_MULTIPLIER in value
|
||||
or CONF_TARGET_TEMPERATURE_MULTIPLIER in value
|
||||
):
|
||||
raise cv.Invalid(
|
||||
f"Cannot have {CONF_TEMPERATURE_MULTIPLIER} at the same time as "
|
||||
f"{CONF_CURRENT_TEMPERATURE_MULTIPLIER} and "
|
||||
f"{CONF_TARGET_TEMPERATURE_MULTIPLIER}"
|
||||
)
|
||||
if CONF_TEMPERATURE_MULTIPLIER in value and (
|
||||
CONF_CURRENT_TEMPERATURE_MULTIPLIER in value
|
||||
or CONF_TARGET_TEMPERATURE_MULTIPLIER in value
|
||||
):
|
||||
raise cv.Invalid(
|
||||
f"Cannot have {CONF_TEMPERATURE_MULTIPLIER} at the same time as "
|
||||
f"{CONF_CURRENT_TEMPERATURE_MULTIPLIER} and "
|
||||
f"{CONF_TARGET_TEMPERATURE_MULTIPLIER}"
|
||||
)
|
||||
if (
|
||||
CONF_CURRENT_TEMPERATURE_MULTIPLIER in value
|
||||
and CONF_TARGET_TEMPERATURE_MULTIPLIER not in value
|
||||
|
||||
@@ -34,12 +34,14 @@ def validate_min_max(config):
|
||||
min_value = config[CONF_MIN_VALUE]
|
||||
if max_value <= min_value:
|
||||
raise cv.Invalid("max_value must be greater than min_value")
|
||||
if hidden_config := config.get(CONF_DATAPOINT_HIDDEN):
|
||||
if (initial_value := hidden_config.get(CONF_INITIAL_VALUE, None)) is not None:
|
||||
if (initial_value > max_value) or (initial_value < min_value):
|
||||
raise cv.Invalid(
|
||||
f"{CONF_INITIAL_VALUE} must be a value between {CONF_MAX_VALUE} and {CONF_MIN_VALUE}"
|
||||
)
|
||||
if (
|
||||
(hidden_config := config.get(CONF_DATAPOINT_HIDDEN))
|
||||
and (initial_value := hidden_config.get(CONF_INITIAL_VALUE, None)) is not None
|
||||
and ((initial_value > max_value) or (initial_value < min_value))
|
||||
):
|
||||
raise cv.Invalid(
|
||||
f"{CONF_INITIAL_VALUE} must be a value between {CONF_MAX_VALUE} and {CONF_MIN_VALUE}"
|
||||
)
|
||||
return config
|
||||
|
||||
|
||||
|
||||
@@ -442,9 +442,7 @@ async def to_code(config):
|
||||
|
||||
if CORE.is_esp8266:
|
||||
cg.add_library("ESP8266WiFi", None)
|
||||
elif CORE.is_esp32 and CORE.using_arduino:
|
||||
cg.add_library("WiFi", None)
|
||||
elif CORE.is_rp2040:
|
||||
elif (CORE.is_esp32 and CORE.using_arduino) or CORE.is_rp2040:
|
||||
cg.add_library("WiFi", None)
|
||||
|
||||
if CORE.is_esp32 and CORE.using_esp_idf:
|
||||
|
||||
@@ -198,10 +198,7 @@ class Config(OrderedDict, fv.FinalValidateConfig):
|
||||
self.output_paths.remove((path, domain))
|
||||
|
||||
def is_in_error_path(self, path: ConfigPath) -> bool:
|
||||
for err in self.errors:
|
||||
if _path_begins_with(err.path, path):
|
||||
return True
|
||||
return False
|
||||
return any(_path_begins_with(err.path, path) for err in self.errors)
|
||||
|
||||
def set_by_path(self, path, value):
|
||||
conf = self
|
||||
@@ -224,7 +221,7 @@ class Config(OrderedDict, fv.FinalValidateConfig):
|
||||
for index, path_item in enumerate(path):
|
||||
try:
|
||||
if path_item in data:
|
||||
key_data = [x for x in data.keys() if x == path_item][0]
|
||||
key_data = [x for x in data if x == path_item][0]
|
||||
if isinstance(key_data, ESPHomeDataBase):
|
||||
doc_range = key_data.esp_range
|
||||
if get_key and index == len(path) - 1:
|
||||
@@ -1081,7 +1078,7 @@ def dump_dict(
|
||||
ret += "{}"
|
||||
multiline = False
|
||||
|
||||
for k in conf.keys():
|
||||
for k in conf:
|
||||
path_ = path + [k]
|
||||
error = config.get_error_for_path(path_)
|
||||
if error is not None:
|
||||
@@ -1097,10 +1094,7 @@ def dump_dict(
|
||||
msg = f"\n{indent(msg)}"
|
||||
|
||||
if inf is not None:
|
||||
if m:
|
||||
msg = f" {inf}{msg}"
|
||||
else:
|
||||
msg = f"{msg} {inf}"
|
||||
msg = f" {inf}{msg}" if m else f"{msg} {inf}"
|
||||
ret += f"{st + msg}\n"
|
||||
elif isinstance(conf, str):
|
||||
if is_secret(conf):
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from contextlib import contextmanager
|
||||
from contextlib import contextmanager, suppress
|
||||
from dataclasses import dataclass
|
||||
from datetime import datetime
|
||||
from ipaddress import (
|
||||
@@ -2113,10 +2113,8 @@ def require_esphome_version(year, month, patch):
|
||||
|
||||
@contextmanager
|
||||
def suppress_invalid():
|
||||
try:
|
||||
with suppress(vol.Invalid):
|
||||
yield
|
||||
except vol.Invalid:
|
||||
pass
|
||||
|
||||
|
||||
GIT_SCHEMA = Schema(
|
||||
|
||||
@@ -1037,10 +1037,7 @@ class MockObjClass(MockObj):
|
||||
def inherits_from(self, other: "MockObjClass") -> bool:
|
||||
if str(self) == str(other):
|
||||
return True
|
||||
for parent in self._parents:
|
||||
if str(parent) == str(other):
|
||||
return True
|
||||
return False
|
||||
return any(str(parent) == str(other) for parent in self._parents)
|
||||
|
||||
def template(self, *args: SafeExpType) -> "MockObjClass":
|
||||
if len(args) != 1 or not isinstance(args[0], TemplateArguments):
|
||||
|
||||
@@ -3,6 +3,7 @@ from __future__ import annotations
|
||||
import asyncio
|
||||
from asyncio import events
|
||||
from concurrent.futures import ThreadPoolExecutor
|
||||
import contextlib
|
||||
import logging
|
||||
import os
|
||||
import socket
|
||||
@@ -125,10 +126,8 @@ def start_dashboard(args) -> None:
|
||||
|
||||
asyncio.set_event_loop_policy(DashboardEventLoopPolicy(settings.verbose))
|
||||
|
||||
try:
|
||||
with contextlib.suppress(KeyboardInterrupt):
|
||||
asyncio.run(async_start(args))
|
||||
except KeyboardInterrupt:
|
||||
pass
|
||||
|
||||
|
||||
async def async_start(args) -> None:
|
||||
|
||||
@@ -88,10 +88,7 @@ def recv_decode(sock, amount, decode=True):
|
||||
|
||||
|
||||
def receive_exactly(sock, amount, msg, expect, decode=True):
|
||||
if decode:
|
||||
data = []
|
||||
else:
|
||||
data = b""
|
||||
data = [] if decode else b""
|
||||
|
||||
try:
|
||||
data += recv_decode(sock, 1, decode=decode)
|
||||
|
||||
@@ -96,9 +96,7 @@ def cpp_string_escape(string, encoding="utf-8"):
|
||||
def _should_escape(byte: int) -> bool:
|
||||
if not 32 <= byte < 127:
|
||||
return True
|
||||
if byte in (ord("\\"), ord('"')):
|
||||
return True
|
||||
return False
|
||||
return byte in (ord("\\"), ord('"'))
|
||||
|
||||
if isinstance(string, str):
|
||||
string = string.encode(encoding)
|
||||
|
||||
@@ -61,7 +61,7 @@ class ESPHomeLogFormatter(logging.Formatter):
|
||||
}.get(record.levelname, "")
|
||||
message = f"{prefix}{formatted}{AnsiStyle.RESET_ALL.value}"
|
||||
if CORE.dashboard:
|
||||
try:
|
||||
try: # noqa: SIM105
|
||||
message = message.replace("\033", "\\033")
|
||||
except UnicodeEncodeError:
|
||||
pass
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
import contextlib
|
||||
from datetime import datetime
|
||||
import hashlib
|
||||
import json
|
||||
@@ -52,10 +53,8 @@ def initialize(
|
||||
client = prepare(
|
||||
config, subscriptions, on_message, on_connect, username, password, client_id
|
||||
)
|
||||
try:
|
||||
with contextlib.suppress(KeyboardInterrupt):
|
||||
client.loop_forever()
|
||||
except KeyboardInterrupt:
|
||||
pass
|
||||
return 0
|
||||
|
||||
|
||||
|
||||
@@ -131,9 +131,11 @@ def _load_idedata(config):
|
||||
temp_idedata = Path(CORE.relative_internal_path("idedata", f"{CORE.name}.json"))
|
||||
|
||||
changed = False
|
||||
if not platformio_ini.is_file() or not temp_idedata.is_file():
|
||||
changed = True
|
||||
elif platformio_ini.stat().st_mtime >= temp_idedata.stat().st_mtime:
|
||||
if (
|
||||
not platformio_ini.is_file()
|
||||
or not temp_idedata.is_file()
|
||||
or platformio_ini.stat().st_mtime >= temp_idedata.stat().st_mtime
|
||||
):
|
||||
changed = True
|
||||
|
||||
if not changed:
|
||||
|
||||
@@ -59,7 +59,7 @@ def safe_print(message="", end="\n"):
|
||||
from esphome.core import CORE
|
||||
|
||||
if CORE.dashboard:
|
||||
try:
|
||||
try: # noqa: SIM105
|
||||
message = message.replace("\033", "\\033")
|
||||
except UnicodeEncodeError:
|
||||
pass
|
||||
|
||||
@@ -116,10 +116,7 @@ def wizard_file(**kwargs):
|
||||
kwargs["fallback_name"] = ap_name
|
||||
kwargs["fallback_psk"] = "".join(random.choice(letters) for _ in range(12))
|
||||
|
||||
if kwargs.get("friendly_name"):
|
||||
base = BASE_CONFIG_FRIENDLY
|
||||
else:
|
||||
base = BASE_CONFIG
|
||||
base = BASE_CONFIG_FRIENDLY if kwargs.get("friendly_name") else BASE_CONFIG
|
||||
|
||||
config = base.format(**kwargs)
|
||||
|
||||
|
||||
@@ -86,21 +86,17 @@ def storage_should_clean(old: StorageJSON, new: StorageJSON) -> bool:
|
||||
|
||||
if old.src_version != new.src_version:
|
||||
return True
|
||||
if old.build_path != new.build_path:
|
||||
return True
|
||||
|
||||
return False
|
||||
return old.build_path != new.build_path
|
||||
|
||||
|
||||
def storage_should_update_cmake_cache(old: StorageJSON, new: StorageJSON) -> bool:
|
||||
if (
|
||||
old.loaded_integrations != new.loaded_integrations
|
||||
or old.loaded_platforms != new.loaded_platforms
|
||||
):
|
||||
if new.core_platform == PLATFORM_ESP32:
|
||||
from esphome.components.esp32 import FRAMEWORK_ESP_IDF
|
||||
) and new.core_platform == PLATFORM_ESP32:
|
||||
from esphome.components.esp32 import FRAMEWORK_ESP_IDF
|
||||
|
||||
return new.framework == FRAMEWORK_ESP_IDF
|
||||
return new.framework == FRAMEWORK_ESP_IDF
|
||||
return False
|
||||
|
||||
|
||||
|
||||
@@ -56,9 +56,12 @@ class ESPHomeDataBase:
|
||||
def from_node(self, node):
|
||||
# pylint: disable=attribute-defined-outside-init
|
||||
self._esp_range = DocumentRange.from_marks(node.start_mark, node.end_mark)
|
||||
if isinstance(node, yaml.ScalarNode):
|
||||
if node.style is not None and node.style in "|>":
|
||||
self._content_offset = 1
|
||||
if (
|
||||
isinstance(node, yaml.ScalarNode)
|
||||
and node.style is not None
|
||||
and node.style in "|>"
|
||||
):
|
||||
self._content_offset = 1
|
||||
|
||||
def from_database(self, database):
|
||||
# pylint: disable=attribute-defined-outside-init
|
||||
|
||||
Reference in New Issue
Block a user