diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index cda0384d54..cab433c7f9 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -11,7 +11,7 @@ ci: repos: - repo: https://github.com/astral-sh/ruff-pre-commit # Ruff version. - rev: v0.13.0 + rev: v0.13.1 hooks: # Run the linter. - id: ruff diff --git a/esphome/__main__.py b/esphome/__main__.py index 07cd267c96..fff66bcd50 100644 --- a/esphome/__main__.py +++ b/esphome/__main__.py @@ -6,6 +6,7 @@ import getpass import importlib import logging import os +from pathlib import Path import re import sys import time @@ -452,7 +453,7 @@ def upload_using_esptool( "detect", ] for img in flash_images: - cmd += [img.offset, img.path] + cmd += [img.offset, str(img.path)] if os.environ.get("ESPHOME_USE_SUBPROCESS") is None: import esptool @@ -538,7 +539,10 @@ def upload_program( remote_port = int(ota_conf[CONF_PORT]) password = ota_conf.get(CONF_PASSWORD, "") - binary = args.file if getattr(args, "file", None) is not None else CORE.firmware_bin + if getattr(args, "file", None) is not None: + binary = Path(args.file) + else: + binary = CORE.firmware_bin # MQTT address resolution if get_port_type(host) in ("MQTT", "MQTTIP"): @@ -605,7 +609,7 @@ def clean_mqtt(config: ConfigType, args: ArgsProtocol) -> int | None: def command_wizard(args: ArgsProtocol) -> int | None: from esphome import wizard - return wizard.wizard(args.configuration) + return wizard.wizard(Path(args.configuration)) def command_config(args: ArgsProtocol, config: ConfigType) -> int | None: @@ -818,7 +822,8 @@ def command_idedata(args: ArgsProtocol, config: ConfigType) -> int: def command_rename(args: ArgsProtocol, config: ConfigType) -> int | None: - for c in args.name: + new_name = args.name + for c in new_name: if c not in ALLOWED_NAME_CHARS: print( color( @@ -829,8 +834,7 @@ def command_rename(args: ArgsProtocol, config: ConfigType) -> int | None: ) return 1 # Load existing yaml file - with open(CORE.config_path, mode="r+", encoding="utf-8") as raw_file: - raw_contents = raw_file.read() + raw_contents = CORE.config_path.read_text(encoding="utf-8") yaml = yaml_util.load_yaml(CORE.config_path) if CONF_ESPHOME not in yaml or CONF_NAME not in yaml[CONF_ESPHOME]: @@ -845,7 +849,7 @@ def command_rename(args: ArgsProtocol, config: ConfigType) -> int | None: if match is None: new_raw = re.sub( rf"name:\s+[\"']?{old_name}[\"']?", - f'name: "{args.name}"', + f'name: "{new_name}"', raw_contents, ) else: @@ -865,29 +869,28 @@ def command_rename(args: ArgsProtocol, config: ConfigType) -> int | None: new_raw = re.sub( rf"^(\s+{match.group(1)}):\s+[\"']?{old_name}[\"']?", - f'\\1: "{args.name}"', + f'\\1: "{new_name}"', raw_contents, flags=re.MULTILINE, ) - new_path = os.path.join(CORE.config_dir, args.name + ".yaml") + new_path: Path = CORE.config_dir / (new_name + ".yaml") print( - f"Updating {color(AnsiFore.CYAN, CORE.config_path)} to {color(AnsiFore.CYAN, new_path)}" + f"Updating {color(AnsiFore.CYAN, str(CORE.config_path))} to {color(AnsiFore.CYAN, str(new_path))}" ) print() - with open(new_path, mode="w", encoding="utf-8") as new_file: - new_file.write(new_raw) + new_path.write_text(new_raw, encoding="utf-8") - rc = run_external_process("esphome", "config", new_path) + rc = run_external_process("esphome", "config", str(new_path)) if rc != 0: print(color(AnsiFore.BOLD_RED, "Rename failed. Reverting changes.")) - os.remove(new_path) + new_path.unlink() return 1 cli_args = [ "run", - new_path, + str(new_path), "--no-logs", "--device", CORE.address, @@ -901,11 +904,11 @@ def command_rename(args: ArgsProtocol, config: ConfigType) -> int | None: except KeyboardInterrupt: rc = 1 if rc != 0: - os.remove(new_path) + new_path.unlink() return 1 if CORE.config_path != new_path: - os.remove(CORE.config_path) + CORE.config_path.unlink() print(color(AnsiFore.BOLD_GREEN, "SUCCESS")) print() @@ -1262,7 +1265,8 @@ def run_esphome(argv): _LOGGER.info("ESPHome %s", const.__version__) for conf_path in args.configuration: - if any(os.path.basename(conf_path) == x for x in SECRETS_FILES): + conf_path = Path(conf_path) + if any(conf_path.name == x for x in SECRETS_FILES): _LOGGER.warning("Skipping secrets file %s", conf_path) continue diff --git a/esphome/build_gen/platformio.py b/esphome/build_gen/platformio.py index 9bbe86694b..30dbb69d86 100644 --- a/esphome/build_gen/platformio.py +++ b/esphome/build_gen/platformio.py @@ -1,5 +1,3 @@ -import os - from esphome.const import __version__ from esphome.core import CORE from esphome.helpers import mkdir_p, read_file, write_file_if_changed @@ -63,7 +61,7 @@ def write_ini(content): update_storage_json() path = CORE.relative_build_path("platformio.ini") - if os.path.isfile(path): + if path.is_file(): text = read_file(path) content_format = find_begin_end( text, INI_AUTO_GENERATE_BEGIN, INI_AUTO_GENERATE_END diff --git a/esphome/components/api/api.proto b/esphome/components/api/api.proto index fa64649df5..ad99de4b4a 100644 --- a/esphome/components/api/api.proto +++ b/esphome/components/api/api.proto @@ -260,6 +260,7 @@ message DeviceInfoResponse { // Indicates if Z-Wave proxy support is available and features supported uint32 zwave_proxy_feature_flags = 23 [(field_ifdef) = "USE_ZWAVE_PROXY"]; + uint32 zwave_home_id = 24 [(field_ifdef) = "USE_ZWAVE_PROXY"]; } message ListEntitiesRequest { diff --git a/esphome/components/api/api_connection.cpp b/esphome/components/api/api_connection.cpp index 697b02b915..a27adfe241 100644 --- a/esphome/components/api/api_connection.cpp +++ b/esphome/components/api/api_connection.cpp @@ -1474,6 +1474,7 @@ bool APIConnection::send_device_info_response(const DeviceInfoRequest &msg) { #endif #ifdef USE_ZWAVE_PROXY resp.zwave_proxy_feature_flags = zwave_proxy::global_zwave_proxy->get_feature_flags(); + resp.zwave_home_id = zwave_proxy::global_zwave_proxy->get_home_id(); #endif #ifdef USE_API_NOISE resp.api_encryption_supported = true; diff --git a/esphome/components/api/api_pb2.cpp b/esphome/components/api/api_pb2.cpp index 58a083ad06..245933724b 100644 --- a/esphome/components/api/api_pb2.cpp +++ b/esphome/components/api/api_pb2.cpp @@ -132,6 +132,9 @@ void DeviceInfoResponse::encode(ProtoWriteBuffer buffer) const { #ifdef USE_ZWAVE_PROXY buffer.encode_uint32(23, this->zwave_proxy_feature_flags); #endif +#ifdef USE_ZWAVE_PROXY + buffer.encode_uint32(24, this->zwave_home_id); +#endif } void DeviceInfoResponse::calculate_size(ProtoSize &size) const { #ifdef USE_API_PASSWORD @@ -187,6 +190,9 @@ void DeviceInfoResponse::calculate_size(ProtoSize &size) const { #ifdef USE_ZWAVE_PROXY size.add_uint32(2, this->zwave_proxy_feature_flags); #endif +#ifdef USE_ZWAVE_PROXY + size.add_uint32(2, this->zwave_home_id); +#endif } #ifdef USE_BINARY_SENSOR void ListEntitiesBinarySensorResponse::encode(ProtoWriteBuffer buffer) const { diff --git a/esphome/components/api/api_pb2.h b/esphome/components/api/api_pb2.h index d52cb9eab3..248a4b1f82 100644 --- a/esphome/components/api/api_pb2.h +++ b/esphome/components/api/api_pb2.h @@ -498,7 +498,7 @@ class DeviceInfo final : public ProtoMessage { class DeviceInfoResponse final : public ProtoMessage { public: static constexpr uint8_t MESSAGE_TYPE = 10; - static constexpr uint8_t ESTIMATED_SIZE = 252; + static constexpr uint16_t ESTIMATED_SIZE = 257; #ifdef HAS_PROTO_MESSAGE_DUMP const char *message_name() const override { return "device_info_response"; } #endif @@ -561,6 +561,9 @@ class DeviceInfoResponse final : public ProtoMessage { #endif #ifdef USE_ZWAVE_PROXY uint32_t zwave_proxy_feature_flags{0}; +#endif +#ifdef USE_ZWAVE_PROXY + uint32_t zwave_home_id{0}; #endif void encode(ProtoWriteBuffer buffer) const override; void calculate_size(ProtoSize &size) const override; diff --git a/esphome/components/api/api_pb2_dump.cpp b/esphome/components/api/api_pb2_dump.cpp index d24f9b3fdc..ac43af6d54 100644 --- a/esphome/components/api/api_pb2_dump.cpp +++ b/esphome/components/api/api_pb2_dump.cpp @@ -769,6 +769,9 @@ void DeviceInfoResponse::dump_to(std::string &out) const { #ifdef USE_ZWAVE_PROXY dump_field(out, "zwave_proxy_feature_flags", this->zwave_proxy_feature_flags); #endif +#ifdef USE_ZWAVE_PROXY + dump_field(out, "zwave_home_id", this->zwave_home_id); +#endif } void ListEntitiesRequest::dump_to(std::string &out) const { out.append("ListEntitiesRequest {}"); } void ListEntitiesDoneResponse::dump_to(std::string &out) const { out.append("ListEntitiesDoneResponse {}"); } diff --git a/esphome/components/esp32/__init__.py b/esphome/components/esp32/__init__.py index 1a2cbd9daf..f5eda52cae 100644 --- a/esphome/components/esp32/__init__.py +++ b/esphome/components/esp32/__init__.py @@ -37,7 +37,7 @@ from esphome.const import ( ) from esphome.core import CORE, HexInt, TimePeriod import esphome.final_validate as fv -from esphome.helpers import copy_file_if_changed, mkdir_p, write_file_if_changed +from esphome.helpers import copy_file_if_changed, write_file_if_changed from esphome.types import ConfigType from esphome.writer import clean_cmake_cache @@ -272,14 +272,14 @@ def add_idf_component( } -def add_extra_script(stage: str, filename: str, path: str): +def add_extra_script(stage: str, filename: str, path: Path): """Add an extra script to the project.""" key = f"{stage}:{filename}" if add_extra_build_file(filename, path): cg.add_platformio_option("extra_scripts", [key]) -def add_extra_build_file(filename: str, path: str) -> bool: +def add_extra_build_file(filename: str, path: Path) -> bool: """Add an extra build file to the project.""" if filename not in CORE.data[KEY_ESP32][KEY_EXTRA_BUILD_FILES]: CORE.data[KEY_ESP32][KEY_EXTRA_BUILD_FILES][filename] = { @@ -818,7 +818,7 @@ async def to_code(config): add_extra_script( "post", "post_build.py", - os.path.join(os.path.dirname(__file__), "post_build.py.script"), + Path(__file__).parent / "post_build.py.script", ) if conf[CONF_TYPE] == FRAMEWORK_ESP_IDF: @@ -1040,7 +1040,7 @@ def _write_sdkconfig(): def _write_idf_component_yml(): - yml_path = Path(CORE.relative_build_path("src/idf_component.yml")) + yml_path = CORE.relative_build_path("src/idf_component.yml") if CORE.data[KEY_ESP32][KEY_COMPONENTS]: components: dict = CORE.data[KEY_ESP32][KEY_COMPONENTS] dependencies = {} @@ -1058,8 +1058,8 @@ def _write_idf_component_yml(): contents = "" if write_file_if_changed(yml_path, contents): dependencies_lock = CORE.relative_build_path("dependencies.lock") - if os.path.isfile(dependencies_lock): - os.remove(dependencies_lock) + if dependencies_lock.is_file(): + dependencies_lock.unlink() clean_cmake_cache() @@ -1093,14 +1093,13 @@ def copy_files(): ) for file in CORE.data[KEY_ESP32][KEY_EXTRA_BUILD_FILES].values(): - if file[KEY_PATH].startswith("http"): + name: str = file[KEY_NAME] + path: Path = file[KEY_PATH] + if str(path).startswith("http"): import requests - mkdir_p(CORE.relative_build_path(os.path.dirname(file[KEY_NAME]))) - with open(CORE.relative_build_path(file[KEY_NAME]), "wb") as f: - f.write(requests.get(file[KEY_PATH], timeout=30).content) + CORE.relative_build_path(name).parent.mkdir(parents=True, exist_ok=True) + content = requests.get(path, timeout=30).content + CORE.relative_build_path(name).write_bytes(content) else: - copy_file_if_changed( - file[KEY_PATH], - CORE.relative_build_path(file[KEY_NAME]), - ) + copy_file_if_changed(path, CORE.relative_build_path(name)) diff --git a/esphome/components/esp32_hosted/__init__.py b/esphome/components/esp32_hosted/__init__.py index 330800df12..9cea02c322 100644 --- a/esphome/components/esp32_hosted/__init__.py +++ b/esphome/components/esp32_hosted/__init__.py @@ -1,4 +1,5 @@ import os +from pathlib import Path from esphome import pins from esphome.components import esp32 @@ -97,5 +98,5 @@ async def to_code(config): esp32.add_extra_script( "post", "esp32_hosted.py", - os.path.join(os.path.dirname(__file__), "esp32_hosted.py.script"), + Path(__file__).parent / "esp32_hosted.py.script", ) diff --git a/esphome/components/esp8266/__init__.py b/esphome/components/esp8266/__init__.py index b85314214e..8a7fbbcb0a 100644 --- a/esphome/components/esp8266/__init__.py +++ b/esphome/components/esp8266/__init__.py @@ -1,5 +1,5 @@ import logging -import os +from pathlib import Path import esphome.codegen as cg import esphome.config_validation as cv @@ -259,8 +259,8 @@ async def to_code(config): # Called by writer.py def copy_files(): - dir = os.path.dirname(__file__) - post_build_file = os.path.join(dir, "post_build.py.script") + dir = Path(__file__).parent + post_build_file = dir / "post_build.py.script" copy_file_if_changed( post_build_file, CORE.relative_build_path("post_build.py"), diff --git a/esphome/components/font/__init__.py b/esphome/components/font/__init__.py index 4ecc76c561..ddcee14635 100644 --- a/esphome/components/font/__init__.py +++ b/esphome/components/font/__init__.py @@ -3,7 +3,6 @@ import functools import hashlib from itertools import accumulate import logging -import os from pathlib import Path import re @@ -38,6 +37,7 @@ from esphome.const import ( ) from esphome.core import CORE, HexInt from esphome.helpers import cpp_string_escape +from esphome.types import ConfigType _LOGGER = logging.getLogger(__name__) @@ -253,11 +253,11 @@ def validate_truetype_file(value): return CORE.relative_config_path(cv.file_(value)) -def add_local_file(value): +def add_local_file(value: ConfigType) -> ConfigType: if value in FONT_CACHE: return value - path = value[CONF_PATH] - if not os.path.isfile(path): + path = Path(value[CONF_PATH]) + if not path.is_file(): raise cv.Invalid(f"File '{path}' not found.") FONT_CACHE[value] = path return value @@ -318,7 +318,7 @@ def download_gfont(value): external_files.compute_local_file_dir(DOMAIN) / f"{value[CONF_FAMILY]}@{value[CONF_WEIGHT]}@{value[CONF_ITALIC]}@v1.ttf" ) - if not external_files.is_file_recent(str(path), value[CONF_REFRESH]): + if not external_files.is_file_recent(path, value[CONF_REFRESH]): _LOGGER.debug("download_gfont: path=%s", path) try: req = requests.get(url, timeout=external_files.NETWORK_TIMEOUT) diff --git a/esphome/components/libretiny/__init__.py b/esphome/components/libretiny/__init__.py index 178660cb40..c63d6d7faa 100644 --- a/esphome/components/libretiny/__init__.py +++ b/esphome/components/libretiny/__init__.py @@ -1,6 +1,5 @@ import json import logging -from os.path import dirname, isfile, join import esphome.codegen as cg import esphome.config_validation as cv @@ -24,6 +23,7 @@ from esphome.const import ( __version__, ) from esphome.core import CORE +from esphome.storage_json import StorageJSON from . import gpio # noqa from .const import ( @@ -129,7 +129,7 @@ def only_on_family(*, supported=None, unsupported=None): return validator_ -def get_download_types(storage_json=None): +def get_download_types(storage_json: StorageJSON = None): types = [ { "title": "UF2 package (recommended)", @@ -139,11 +139,11 @@ def get_download_types(storage_json=None): }, ] - build_dir = dirname(storage_json.firmware_bin_path) - outputs = join(build_dir, "firmware.json") - if not isfile(outputs): + build_dir = storage_json.firmware_bin_path.parent + outputs = build_dir / "firmware.json" + if not outputs.is_file(): return types - with open(outputs, encoding="utf-8") as f: + with outputs.open(encoding="utf-8") as f: outputs = json.load(f) for output in outputs: if not output["public"]: diff --git a/esphome/components/rp2040/__init__.py b/esphome/components/rp2040/__init__.py index 1ec38e0159..3a1ea16fa3 100644 --- a/esphome/components/rp2040/__init__.py +++ b/esphome/components/rp2040/__init__.py @@ -1,5 +1,5 @@ import logging -import os +from pathlib import Path from string import ascii_letters, digits import esphome.codegen as cg @@ -19,7 +19,7 @@ from esphome.const import ( ThreadModel, ) from esphome.core import CORE, CoroPriority, EsphomeError, coroutine_with_priority -from esphome.helpers import copy_file_if_changed, mkdir_p, read_file, write_file +from esphome.helpers import copy_file_if_changed, read_file, write_file_if_changed from .const import KEY_BOARD, KEY_PIO_FILES, KEY_RP2040, rp2040_ns @@ -221,18 +221,18 @@ def generate_pio_files() -> bool: if not files: return False for key, data in files.items(): - pio_path = CORE.relative_build_path(f"src/pio/{key}.pio") - mkdir_p(os.path.dirname(pio_path)) - write_file(pio_path, data) + pio_path = CORE.build_path / "src" / "pio" / f"{key}.pio" + pio_path.parent.mkdir(parents=True, exist_ok=True) + write_file_if_changed(pio_path, data) includes.append(f"pio/{key}.pio.h") - write_file( + write_file_if_changed( CORE.relative_build_path("src/pio_includes.h"), "#pragma once\n" + "\n".join([f'#include "{include}"' for include in includes]), ) - dir = os.path.dirname(__file__) - build_pio_file = os.path.join(dir, "build_pio.py.script") + dir = Path(__file__).parent + build_pio_file = dir / "build_pio.py.script" copy_file_if_changed( build_pio_file, CORE.relative_build_path("build_pio.py"), @@ -243,8 +243,8 @@ def generate_pio_files() -> bool: # Called by writer.py def copy_files(): - dir = os.path.dirname(__file__) - post_build_file = os.path.join(dir, "post_build.py.script") + dir = Path(__file__).parent + post_build_file = dir / "post_build.py.script" copy_file_if_changed( post_build_file, CORE.relative_build_path("post_build.py"), @@ -252,4 +252,4 @@ def copy_files(): if generate_pio_files(): path = CORE.relative_src_path("esphome.h") content = read_file(path).rstrip("\n") - write_file(path, content + '\n#include "pio_includes.h"\n') + write_file_if_changed(path, content + '\n#include "pio_includes.h"\n') diff --git a/esphome/components/zwave_proxy/zwave_proxy.cpp b/esphome/components/zwave_proxy/zwave_proxy.cpp index e39f857743..12c4ee0c0d 100644 --- a/esphome/components/zwave_proxy/zwave_proxy.cpp +++ b/esphome/components/zwave_proxy/zwave_proxy.cpp @@ -8,8 +8,26 @@ namespace zwave_proxy { static const char *const TAG = "zwave_proxy"; +static constexpr uint8_t ZWAVE_COMMAND_GET_NETWORK_IDS = 0x20; +// GET_NETWORK_IDS response: [SOF][LENGTH][TYPE][CMD][HOME_ID(4)][NODE_ID][...] +static constexpr uint8_t ZWAVE_COMMAND_TYPE_RESPONSE = 0x01; // Response type field value +static constexpr uint8_t ZWAVE_MIN_GET_NETWORK_IDS_LENGTH = 9; // TYPE + CMD + HOME_ID(4) + NODE_ID + checksum + +static uint8_t calculate_frame_checksum(const uint8_t *data, uint8_t length) { + // Calculate Z-Wave frame checksum + // XOR all bytes between SOF and checksum position (exclusive) + // Initial value is 0xFF per Z-Wave protocol specification + uint8_t checksum = 0xFF; + for (uint8_t i = 1; i < length - 1; i++) { + checksum ^= data[i]; + } + return checksum; +} + ZWaveProxy::ZWaveProxy() { global_zwave_proxy = this; } +void ZWaveProxy::setup() { this->send_simple_command_(ZWAVE_COMMAND_GET_NETWORK_IDS); } + void ZWaveProxy::loop() { if (this->response_handler_()) { ESP_LOGV(TAG, "Handled late response"); @@ -26,6 +44,21 @@ void ZWaveProxy::loop() { return; } if (this->parse_byte_(byte)) { + // Check if this is a GET_NETWORK_IDS response frame + // Frame format: [SOF][LENGTH][TYPE][CMD][HOME_ID(4)][NODE_ID][...] + // We verify: + // - buffer_[0]: Start of frame marker (0x01) + // - buffer_[1]: Length field must be >= 9 to contain all required data + // - buffer_[2]: Command type (0x01 for response) + // - buffer_[3]: Command ID (0x20 for GET_NETWORK_IDS) + if (this->buffer_[3] == ZWAVE_COMMAND_GET_NETWORK_IDS && this->buffer_[2] == ZWAVE_COMMAND_TYPE_RESPONSE && + this->buffer_[1] >= ZWAVE_MIN_GET_NETWORK_IDS_LENGTH && this->buffer_[0] == ZWAVE_FRAME_TYPE_START) { + // Extract the 4-byte Home ID starting at offset 4 + // The frame parser has already validated the checksum and ensured all bytes are present + std::memcpy(this->home_id_.data(), this->buffer_.data() + 4, this->home_id_.size()); + ESP_LOGI(TAG, "Home ID: %s", + format_hex_pretty(this->home_id_.data(), this->home_id_.size(), ':', false).c_str()); + } ESP_LOGV(TAG, "Sending to client: %s", YESNO(this->api_connection_ != nullptr)); if (this->api_connection_ != nullptr) { // minimize copying to reduce CPU overhead @@ -35,7 +68,7 @@ void ZWaveProxy::loop() { // If this is a data frame, use frame length indicator + 2 (for SoF + checksum), else assume 1 for ACK/NAK/CAN this->outgoing_proto_msg_.data_len = this->buffer_[0] == ZWAVE_FRAME_TYPE_START ? this->buffer_[1] + 2 : 1; } - std::memcpy(this->outgoing_proto_msg_.data, this->buffer_, this->outgoing_proto_msg_.data_len); + std::memcpy(this->outgoing_proto_msg_.data, this->buffer_.data(), this->outgoing_proto_msg_.data_len); this->api_connection_->send_message(this->outgoing_proto_msg_, api::ZWaveProxyFrame::MESSAGE_TYPE); } } @@ -77,6 +110,15 @@ void ZWaveProxy::send_frame(const uint8_t *data, size_t length) { this->write_array(data, length); } +void ZWaveProxy::send_simple_command_(const uint8_t command_id) { + // Send a simple Z-Wave command with no parameters + // Frame format: [SOF][LENGTH][TYPE][CMD][CHECKSUM] + // Where LENGTH=0x03 (3 bytes: TYPE + CMD + CHECKSUM) + uint8_t cmd[] = {0x01, 0x03, 0x00, command_id, 0x00}; + cmd[4] = calculate_frame_checksum(cmd, sizeof(cmd)); + this->send_frame(cmd, sizeof(cmd)); +} + bool ZWaveProxy::parse_byte_(uint8_t byte) { bool frame_completed = false; // Basic parsing logic for received frames @@ -94,43 +136,40 @@ bool ZWaveProxy::parse_byte_(uint8_t byte) { this->end_frame_after_ = this->buffer_index_ + byte; ESP_LOGVV(TAG, "Calculated EOF: %u", this->end_frame_after_); this->buffer_[this->buffer_index_++] = byte; - this->checksum_ ^= byte; this->parsing_state_ = ZWAVE_PARSING_STATE_WAIT_TYPE; break; case ZWAVE_PARSING_STATE_WAIT_TYPE: this->buffer_[this->buffer_index_++] = byte; ESP_LOGVV(TAG, "Received TYPE: 0x%02X", byte); - this->checksum_ ^= byte; this->parsing_state_ = ZWAVE_PARSING_STATE_WAIT_COMMAND_ID; break; case ZWAVE_PARSING_STATE_WAIT_COMMAND_ID: this->buffer_[this->buffer_index_++] = byte; ESP_LOGVV(TAG, "Received COMMAND ID: 0x%02X", byte); - this->checksum_ ^= byte; this->parsing_state_ = ZWAVE_PARSING_STATE_WAIT_PAYLOAD; break; case ZWAVE_PARSING_STATE_WAIT_PAYLOAD: this->buffer_[this->buffer_index_++] = byte; - this->checksum_ ^= byte; ESP_LOGVV(TAG, "Received PAYLOAD: 0x%02X", byte); if (this->buffer_index_ >= this->end_frame_after_) { this->parsing_state_ = ZWAVE_PARSING_STATE_WAIT_CHECKSUM; } break; - case ZWAVE_PARSING_STATE_WAIT_CHECKSUM: + case ZWAVE_PARSING_STATE_WAIT_CHECKSUM: { this->buffer_[this->buffer_index_++] = byte; - ESP_LOGVV(TAG, "Received CHECKSUM: 0x%02X", byte); - ESP_LOGV(TAG, "Calculated CHECKSUM: 0x%02X", this->checksum_); - if (this->checksum_ != byte) { - ESP_LOGW(TAG, "Bad checksum: expected 0x%02X, got 0x%02X", this->checksum_, byte); + auto checksum = calculate_frame_checksum(this->buffer_.data(), this->buffer_index_); + ESP_LOGVV(TAG, "CHECKSUM Received: 0x%02X - Calculated: 0x%02X", byte, checksum); + if (checksum != byte) { + ESP_LOGW(TAG, "Bad checksum: expected 0x%02X, got 0x%02X", checksum, byte); this->parsing_state_ = ZWAVE_PARSING_STATE_SEND_NAK; } else { this->parsing_state_ = ZWAVE_PARSING_STATE_SEND_ACK; - ESP_LOGVV(TAG, "Received frame: %s", format_hex_pretty(this->buffer_, this->buffer_index_).c_str()); + ESP_LOGVV(TAG, "Received frame: %s", format_hex_pretty(this->buffer_.data(), this->buffer_index_).c_str()); frame_completed = true; } this->response_handler_(); break; + } case ZWAVE_PARSING_STATE_READ_BL_MENU: this->buffer_[this->buffer_index_++] = byte; if (!byte) { @@ -151,7 +190,6 @@ bool ZWaveProxy::parse_byte_(uint8_t byte) { void ZWaveProxy::parse_start_(uint8_t byte) { this->buffer_index_ = 0; - this->checksum_ = 0xFF; this->parsing_state_ = ZWAVE_PARSING_STATE_WAIT_START; switch (byte) { case ZWAVE_FRAME_TYPE_START: diff --git a/esphome/components/zwave_proxy/zwave_proxy.h b/esphome/components/zwave_proxy/zwave_proxy.h index a0f25849e4..5d908b328c 100644 --- a/esphome/components/zwave_proxy/zwave_proxy.h +++ b/esphome/components/zwave_proxy/zwave_proxy.h @@ -3,8 +3,11 @@ #include "esphome/components/api/api_connection.h" #include "esphome/components/api/api_pb2.h" #include "esphome/core/component.h" +#include "esphome/core/helpers.h" #include "esphome/components/uart/uart.h" +#include + namespace esphome { namespace zwave_proxy { @@ -38,6 +41,7 @@ class ZWaveProxy : public uart::UARTDevice, public Component { public: ZWaveProxy(); + void setup() override; void loop() override; void dump_config() override; @@ -45,21 +49,25 @@ class ZWaveProxy : public uart::UARTDevice, public Component { api::APIConnection *get_api_connection() { return this->api_connection_; } uint32_t get_feature_flags() const { return ZWaveProxyFeature::FEATURE_ZWAVE_PROXY_ENABLED; } + uint32_t get_home_id() { + return encode_uint32(this->home_id_[0], this->home_id_[1], this->home_id_[2], this->home_id_[3]); + } void send_frame(const uint8_t *data, size_t length); protected: + void send_simple_command_(uint8_t command_id); bool parse_byte_(uint8_t byte); // Returns true if frame parsing was completed (a frame is ready in the buffer) void parse_start_(uint8_t byte); bool response_handler_(); api::APIConnection *api_connection_{nullptr}; // Current subscribed client - uint8_t buffer_[sizeof(api::ZWaveProxyFrame::data)]; // Fixed buffer for incoming data - uint8_t buffer_index_{0}; // Index for populating the data buffer - uint8_t checksum_{0}; // Checksum of the frame being parsed - uint8_t end_frame_after_{0}; // Payload reception ends after this index - uint8_t last_response_{0}; // Last response type sent + std::array home_id_{0, 0, 0, 0}; // Fixed buffer for home ID + std::array buffer_; // Fixed buffer for incoming data + uint8_t buffer_index_{0}; // Index for populating the data buffer + uint8_t end_frame_after_{0}; // Payload reception ends after this index + uint8_t last_response_{0}; // Last response type sent ZWaveParsingState parsing_state_{ZWAVE_PARSING_STATE_WAIT_START}; bool in_bootloader_{false}; // True if the device is detected to be in bootloader mode diff --git a/esphome/config_validation.py b/esphome/config_validation.py index 866ed4f8aa..7aaba886e3 100644 --- a/esphome/config_validation.py +++ b/esphome/config_validation.py @@ -15,7 +15,7 @@ from ipaddress import ( ip_network, ) import logging -import os +from pathlib import Path import re from string import ascii_letters, digits import uuid as uuid_ @@ -1609,34 +1609,32 @@ def dimensions(value): return dimensions([match.group(1), match.group(2)]) -def directory(value): +def directory(value: object) -> Path: value = string(value) path = CORE.relative_config_path(value) - if not os.path.exists(path): + if not path.exists(): raise Invalid( - f"Could not find directory '{path}'. Please make sure it exists (full path: {os.path.abspath(path)})." + f"Could not find directory '{path}'. Please make sure it exists (full path: {path.resolve()})." ) - if not os.path.isdir(path): + if not path.is_dir(): raise Invalid( - f"Path '{path}' is not a directory (full path: {os.path.abspath(path)})." + f"Path '{path}' is not a directory (full path: {path.resolve()})." ) - return value + return path -def file_(value): +def file_(value: object) -> Path: value = string(value) path = CORE.relative_config_path(value) - if not os.path.exists(path): + if not path.exists(): raise Invalid( - f"Could not find file '{path}'. Please make sure it exists (full path: {os.path.abspath(path)})." + f"Could not find file '{path}'. Please make sure it exists (full path: {path.resolve()})." ) - if not os.path.isfile(path): - raise Invalid( - f"Path '{path}' is not a file (full path: {os.path.abspath(path)})." - ) - return value + if not path.is_file(): + raise Invalid(f"Path '{path}' is not a file (full path: {path.resolve()}).") + return path ENTITY_ID_CHARACTERS = "abcdefghijklmnopqrstuvwxyz0123456789_" diff --git a/esphome/core/__init__.py b/esphome/core/__init__.py index 242a6854df..7ab8a3ba71 100644 --- a/esphome/core/__init__.py +++ b/esphome/core/__init__.py @@ -3,6 +3,7 @@ from contextlib import contextmanager import logging import math import os +from pathlib import Path import re from typing import TYPE_CHECKING @@ -383,7 +384,7 @@ class DocumentLocation: @classmethod def from_mark(cls, mark): - return cls(mark.name, mark.line, mark.column) + return cls(str(mark.name), mark.line, mark.column) def __str__(self): return f"{self.document} {self.line}:{self.column}" @@ -538,9 +539,9 @@ class EsphomeCore: # The first key to this dict should always be the integration name self.data = {} # The relative path to the configuration YAML - self.config_path: str | None = None + self.config_path: Path | None = None # The relative path to where all build files are stored - self.build_path: str | None = None + self.build_path: Path | None = None # The validated configuration, this is None until the config has been validated self.config: ConfigType | None = None # The pending tasks in the task queue (mostly for C++ generation) @@ -664,43 +665,46 @@ class EsphomeCore: return None @property - def config_dir(self): - return os.path.abspath(os.path.dirname(self.config_path)) + def config_dir(self) -> Path: + if self.config_path.is_dir(): + return self.config_path.absolute() + return self.config_path.absolute().parent @property - def data_dir(self): + def data_dir(self) -> Path: if is_ha_addon(): - return os.path.join("/data") + return Path("/data") if "ESPHOME_DATA_DIR" in os.environ: - return get_str_env("ESPHOME_DATA_DIR", None) + return Path(get_str_env("ESPHOME_DATA_DIR", None)) return self.relative_config_path(".esphome") @property - def config_filename(self): - return os.path.basename(self.config_path) + def config_filename(self) -> str: + return self.config_path.name - def relative_config_path(self, *path): - path_ = os.path.expanduser(os.path.join(*path)) - return os.path.join(self.config_dir, path_) + def relative_config_path(self, *path: str | Path) -> Path: + path_ = Path(*path).expanduser() + return self.config_dir / path_ - def relative_internal_path(self, *path: str) -> str: - return os.path.join(self.data_dir, *path) + def relative_internal_path(self, *path: str | Path) -> Path: + path_ = Path(*path).expanduser() + return self.data_dir / path_ - def relative_build_path(self, *path): - path_ = os.path.expanduser(os.path.join(*path)) - return os.path.join(self.build_path, path_) + def relative_build_path(self, *path: str | Path) -> Path: + path_ = Path(*path).expanduser() + return self.build_path / path_ - def relative_src_path(self, *path): + def relative_src_path(self, *path: str | Path) -> Path: return self.relative_build_path("src", *path) - def relative_pioenvs_path(self, *path): + def relative_pioenvs_path(self, *path: str | Path) -> Path: return self.relative_build_path(".pioenvs", *path) - def relative_piolibdeps_path(self, *path): + def relative_piolibdeps_path(self, *path: str | Path) -> Path: return self.relative_build_path(".piolibdeps", *path) @property - def firmware_bin(self): + def firmware_bin(self) -> Path: if self.is_libretiny: return self.relative_pioenvs_path(self.name, "firmware.uf2") return self.relative_pioenvs_path(self.name, "firmware.bin") diff --git a/esphome/core/config.py b/esphome/core/config.py index 96b9e23861..6d4f5af692 100644 --- a/esphome/core/config.py +++ b/esphome/core/config.py @@ -136,21 +136,21 @@ def validate_ids_and_references(config: ConfigType) -> ConfigType: return config -def valid_include(value): +def valid_include(value: str) -> str: # Look for "<...>" includes if value.startswith("<") and value.endswith(">"): return value try: - return cv.directory(value) + return str(cv.directory(value)) except cv.Invalid: pass - value = cv.file_(value) - _, ext = os.path.splitext(value) + path = cv.file_(value) + ext = path.suffix if ext not in VALID_INCLUDE_EXTS: raise cv.Invalid( f"Include has invalid file extension {ext} - valid extensions are {', '.join(VALID_INCLUDE_EXTS)}" ) - return value + return str(path) def valid_project_name(value: str): @@ -311,9 +311,9 @@ def preload_core_config(config, result) -> str: CORE.data[KEY_CORE] = {} if CONF_BUILD_PATH not in conf: - build_path = get_str_env("ESPHOME_BUILD_PATH", "build") - conf[CONF_BUILD_PATH] = os.path.join(build_path, CORE.name) - CORE.build_path = CORE.relative_internal_path(conf[CONF_BUILD_PATH]) + build_path = Path(get_str_env("ESPHOME_BUILD_PATH", "build")) + conf[CONF_BUILD_PATH] = str(build_path / CORE.name) + CORE.build_path = CORE.data_dir / conf[CONF_BUILD_PATH] target_platforms = [] @@ -339,12 +339,12 @@ def preload_core_config(config, result) -> str: return target_platforms[0] -def include_file(path, basename): - parts = basename.split(os.path.sep) +def include_file(path: Path, basename: Path): + parts = basename.parts dst = CORE.relative_src_path(*parts) copy_file_if_changed(path, dst) - _, ext = os.path.splitext(path) + ext = path.suffix if ext in [".h", ".hpp", ".tcc"]: # Header, add include statement cg.add_global(cg.RawStatement(f'#include "{basename}"')) @@ -377,18 +377,18 @@ async def add_arduino_global_workaround(): @coroutine_with_priority(CoroPriority.FINAL) -async def add_includes(includes): +async def add_includes(includes: list[str]) -> None: # Add includes at the very end, so that the included files can access global variables for include in includes: path = CORE.relative_config_path(include) - if os.path.isdir(path): + if path.is_dir(): # Directory, copy tree for p in walk_files(path): - basename = os.path.relpath(p, os.path.dirname(path)) + basename = p.relative_to(path.parent) include_file(p, basename) else: # Copy file - basename = os.path.basename(path) + basename = Path(path.name) include_file(path, basename) diff --git a/esphome/dashboard/core.py b/esphome/dashboard/core.py index 410ef0c29d..11aa4f3cb5 100644 --- a/esphome/dashboard/core.py +++ b/esphome/dashboard/core.py @@ -7,7 +7,6 @@ from dataclasses import dataclass from functools import partial import json import logging -from pathlib import Path import threading from typing import Any @@ -108,7 +107,7 @@ class ESPHomeDashboard: await self.loop.run_in_executor(None, self.load_ignored_devices) def load_ignored_devices(self) -> None: - storage_path = Path(ignored_devices_storage_path()) + storage_path = ignored_devices_storage_path() try: with storage_path.open("r", encoding="utf-8") as f_handle: data = json.load(f_handle) @@ -117,7 +116,7 @@ class ESPHomeDashboard: pass def save_ignored_devices(self) -> None: - storage_path = Path(ignored_devices_storage_path()) + storage_path = ignored_devices_storage_path() with storage_path.open("w", encoding="utf-8") as f_handle: json.dump( {"ignored_devices": sorted(self.ignored_devices)}, indent=2, fp=f_handle diff --git a/esphome/dashboard/entries.py b/esphome/dashboard/entries.py index b138cfd272..de868fbf2b 100644 --- a/esphome/dashboard/entries.py +++ b/esphome/dashboard/entries.py @@ -5,7 +5,7 @@ from collections import defaultdict from dataclasses import dataclass from functools import lru_cache import logging -import os +from pathlib import Path from typing import TYPE_CHECKING, Any from esphome import const, util @@ -287,12 +287,12 @@ class DashboardEntries: for file in util.list_yaml_files([self._config_dir]): try: # Prefer the json storage path if it exists - stat = os.stat(ext_storage_path(os.path.basename(file))) + stat = ext_storage_path(file.name).stat() except OSError: try: # Fallback to the yaml file if the storage # file does not exist or could not be generated - stat = os.stat(file) + stat = file.stat() except OSError: # File was deleted, ignore continue @@ -329,10 +329,10 @@ class DashboardEntry: "_to_dict", ) - def __init__(self, path: str, cache_key: DashboardCacheKeyType) -> None: + def __init__(self, path: Path, cache_key: DashboardCacheKeyType) -> None: """Initialize the DashboardEntry.""" self.path = path - self.filename: str = os.path.basename(path) + self.filename: str = path.name self._storage_path = ext_storage_path(self.filename) self.cache_key = cache_key self.storage: StorageJSON | None = None @@ -365,7 +365,7 @@ class DashboardEntry: "loaded_integrations": sorted(self.loaded_integrations), "deployed_version": self.update_old, "current_version": self.update_new, - "path": self.path, + "path": str(self.path), "comment": self.comment, "address": self.address, "web_port": self.web_port, diff --git a/esphome/dashboard/settings.py b/esphome/dashboard/settings.py index fa39b55016..35b67c0d23 100644 --- a/esphome/dashboard/settings.py +++ b/esphome/dashboard/settings.py @@ -27,7 +27,7 @@ class DashboardSettings: def __init__(self) -> None: """Initialize the dashboard settings.""" - self.config_dir: str = "" + self.config_dir: Path = None self.password_hash: str = "" self.username: str = "" self.using_password: bool = False @@ -45,10 +45,10 @@ class DashboardSettings: self.using_password = bool(password) if self.using_password: self.password_hash = password_hash(password) - self.config_dir = args.configuration - self.absolute_config_dir = Path(self.config_dir).resolve() + self.config_dir = Path(args.configuration) + self.absolute_config_dir = self.config_dir.resolve() self.verbose = args.verbose - CORE.config_path = os.path.join(self.config_dir, ".") + CORE.config_path = self.config_dir / "." @property def relative_url(self) -> str: @@ -81,9 +81,9 @@ class DashboardSettings: # Compare password in constant running time (to prevent timing attacks) return hmac.compare_digest(self.password_hash, password_hash(password)) - def rel_path(self, *args: Any) -> str: + def rel_path(self, *args: Any) -> Path: """Return a path relative to the ESPHome config folder.""" - joined_path = os.path.join(self.config_dir, *args) + joined_path = self.config_dir / Path(*args) # Raises ValueError if not relative to ESPHome config folder - Path(joined_path).resolve().relative_to(self.absolute_config_dir) + joined_path.resolve().relative_to(self.absolute_config_dir) return joined_path diff --git a/esphome/dashboard/util/file.py b/esphome/dashboard/util/file.py deleted file mode 100644 index bb263f9ad7..0000000000 --- a/esphome/dashboard/util/file.py +++ /dev/null @@ -1,63 +0,0 @@ -import logging -import os -from pathlib import Path -import tempfile - -_LOGGER = logging.getLogger(__name__) - - -def write_utf8_file( - filename: Path, - utf8_str: str, - private: bool = False, -) -> None: - """Write a file and rename it into place. - - Writes all or nothing. - """ - write_file(filename, utf8_str.encode("utf-8"), private) - - -# from https://github.com/home-assistant/core/blob/dev/homeassistant/util/file.py -def write_file( - filename: Path, - utf8_data: bytes, - private: bool = False, -) -> None: - """Write a file and rename it into place. - - Writes all or nothing. - """ - - tmp_filename = "" - missing_fchmod = False - try: - # Modern versions of Python tempfile create this file with mode 0o600 - with tempfile.NamedTemporaryFile( - mode="wb", dir=os.path.dirname(filename), delete=False - ) as fdesc: - fdesc.write(utf8_data) - tmp_filename = fdesc.name - if not private: - try: - os.fchmod(fdesc.fileno(), 0o644) - except AttributeError: - # os.fchmod is not available on Windows - missing_fchmod = True - - os.replace(tmp_filename, filename) - if missing_fchmod: - os.chmod(filename, 0o644) - finally: - if os.path.exists(tmp_filename): - try: - os.remove(tmp_filename) - except OSError as err: - # If we are cleaning up then something else went wrong, so - # we should suppress likely follow-on errors in the cleanup - _LOGGER.error( - "File replacement cleanup failed for %s while saving %s: %s", - tmp_filename, - filename, - err, - ) diff --git a/esphome/dashboard/web_server.py b/esphome/dashboard/web_server.py index 24595eb942..7b6e6b4507 100644 --- a/esphome/dashboard/web_server.py +++ b/esphome/dashboard/web_server.py @@ -49,10 +49,10 @@ from esphome.storage_json import ( from esphome.util import get_serial_ports, shlex_quote from esphome.yaml_util import FastestAvailableSafeLoader +from ..helpers import write_file from .const import DASHBOARD_COMMAND from .core import DASHBOARD, ESPHomeDashboard from .entries import UNKNOWN_STATE, DashboardEntry, entry_state_to_bool -from .util.file import write_file from .util.subprocess import async_run_system_command from .util.text import friendly_name_slugify @@ -581,7 +581,7 @@ class WizardRequestHandler(BaseHandler): destination = settings.rel_path(filename) # Check if destination file already exists - if os.path.exists(destination): + if destination.exists(): self.set_status(409) # Conflict status code self.set_header("content-type", "application/json") self.write( @@ -798,10 +798,9 @@ class DownloadBinaryRequestHandler(BaseHandler): "download", f"{storage_json.name}-{file_name}", ) - path = os.path.dirname(storage_json.firmware_bin_path) - path = os.path.join(path, file_name) + path = storage_json.firmware_bin_path.with_name(file_name) - if not Path(path).is_file(): + if not path.is_file(): args = ["esphome", "idedata", settings.rel_path(configuration)] rc, stdout, _ = await async_run_system_command(args) @@ -1016,7 +1015,7 @@ class EditRequestHandler(BaseHandler): return filename = settings.rel_path(configuration) - if Path(filename).resolve().parent != settings.absolute_config_dir: + if filename.resolve().parent != settings.absolute_config_dir: self.send_error(404) return @@ -1039,10 +1038,6 @@ class EditRequestHandler(BaseHandler): self.set_status(404) return None - def _write_file(self, filename: str, content: bytes) -> None: - """Write a file with the given content.""" - write_file(filename, content) - @authenticated @bind_config async def post(self, configuration: str | None = None) -> None: @@ -1052,12 +1047,12 @@ class EditRequestHandler(BaseHandler): return filename = settings.rel_path(configuration) - if Path(filename).resolve().parent != settings.absolute_config_dir: + if filename.resolve().parent != settings.absolute_config_dir: self.send_error(404) return loop = asyncio.get_running_loop() - await loop.run_in_executor(None, self._write_file, filename, self.request.body) + await loop.run_in_executor(None, write_file, filename, self.request.body) # Ensure the StorageJSON is updated as well DASHBOARD.entries.async_schedule_storage_json_update(filename) self.set_status(200) @@ -1072,7 +1067,7 @@ class ArchiveRequestHandler(BaseHandler): archive_path = archive_storage_path() mkdir_p(archive_path) - shutil.move(config_file, os.path.join(archive_path, configuration)) + shutil.move(config_file, archive_path / configuration) storage_json = StorageJSON.load(storage_path) if storage_json is not None and storage_json.build_path: @@ -1086,7 +1081,7 @@ class UnArchiveRequestHandler(BaseHandler): def post(self, configuration: str | None = None) -> None: config_file = settings.rel_path(configuration) archive_path = archive_storage_path() - shutil.move(os.path.join(archive_path, configuration), config_file) + shutil.move(archive_path / configuration, config_file) class LoginHandler(BaseHandler): @@ -1173,7 +1168,7 @@ class SecretKeysRequestHandler(BaseHandler): for secret_filename in const.SECRETS_FILES: relative_filename = settings.rel_path(secret_filename) - if os.path.isfile(relative_filename): + if relative_filename.is_file(): filename = relative_filename break @@ -1206,16 +1201,17 @@ class JsonConfigRequestHandler(BaseHandler): @bind_config async def get(self, configuration: str | None = None) -> None: filename = settings.rel_path(configuration) - if not os.path.isfile(filename): + if not filename.is_file(): self.send_error(404) return - args = ["esphome", "config", filename, "--show-secrets"] + args = ["esphome", "config", str(filename), "--show-secrets"] - rc, stdout, _ = await async_run_system_command(args) + rc, stdout, stderr = await async_run_system_command(args) if rc != 0: - self.send_error(422) + self.set_status(422) + self.write(stderr) return data = yaml.load(stdout, Loader=SafeLoaderIgnoreUnknown) @@ -1224,7 +1220,7 @@ class JsonConfigRequestHandler(BaseHandler): self.finish() -def get_base_frontend_path() -> str: +def get_base_frontend_path() -> Path: if ENV_DEV not in os.environ: import esphome_dashboard @@ -1235,11 +1231,12 @@ def get_base_frontend_path() -> str: static_path += "/" # This path can be relative, so resolve against the root or else templates don't work - return os.path.abspath(os.path.join(os.getcwd(), static_path, "esphome_dashboard")) + path = Path(os.getcwd()) / static_path / "esphome_dashboard" + return path.resolve() -def get_static_path(*args: Iterable[str]) -> str: - return os.path.join(get_base_frontend_path(), "static", *args) +def get_static_path(*args: Iterable[str]) -> Path: + return get_base_frontend_path() / "static" / Path(*args) @functools.cache @@ -1256,8 +1253,7 @@ def get_static_file_url(name: str) -> str: return base.replace("index.js", esphome_dashboard.entrypoint()) path = get_static_path(name) - with open(path, "rb") as f_handle: - hash_ = hashlib.md5(f_handle.read()).hexdigest()[:8] + hash_ = hashlib.md5(path.read_bytes()).hexdigest()[:8] return f"{base}?hash={hash_}" @@ -1357,7 +1353,7 @@ def start_web_server( """Start the web server listener.""" trash_path = trash_storage_path() - if os.path.exists(trash_path): + if trash_path.is_dir() and trash_path.exists(): _LOGGER.info("Renaming 'trash' folder to 'archive'") archive_path = archive_storage_path() shutil.move(trash_path, archive_path) diff --git a/esphome/espota2.py b/esphome/espota2.py index bf45d91d60..8215c14cb3 100644 --- a/esphome/espota2.py +++ b/esphome/espota2.py @@ -4,6 +4,7 @@ import gzip import hashlib import io import logging +from pathlib import Path import random import socket import sys @@ -193,7 +194,7 @@ def send_check(sock, data, msg): def perform_ota( - sock: socket.socket, password: str, file_handle: io.IOBase, filename: str + sock: socket.socket, password: str, file_handle: io.IOBase, filename: Path ) -> None: file_contents = file_handle.read() file_size = len(file_contents) @@ -330,7 +331,7 @@ def perform_ota( def run_ota_impl_( - remote_host: str | list[str], remote_port: int, password: str, filename: str + remote_host: str | list[str], remote_port: int, password: str, filename: Path ) -> tuple[int, str | None]: from esphome.core import CORE @@ -381,7 +382,7 @@ def run_ota_impl_( def run_ota( - remote_host: str | list[str], remote_port: int, password: str, filename: str + remote_host: str | list[str], remote_port: int, password: str, filename: Path ) -> tuple[int, str | None]: try: return run_ota_impl_(remote_host, remote_port, password, filename) diff --git a/esphome/external_files.py b/esphome/external_files.py index 057ff52f3f..80b54ebb2f 100644 --- a/esphome/external_files.py +++ b/esphome/external_files.py @@ -2,7 +2,6 @@ from __future__ import annotations from datetime import datetime import logging -import os from pathlib import Path import requests @@ -23,11 +22,11 @@ CONTENT_DISPOSITION = "content-disposition" TEMP_DIR = "temp" -def has_remote_file_changed(url, local_file_path): - if os.path.exists(local_file_path): +def has_remote_file_changed(url: str, local_file_path: Path) -> bool: + if local_file_path.exists(): _LOGGER.debug("has_remote_file_changed: File exists at %s", local_file_path) try: - local_modification_time = os.path.getmtime(local_file_path) + local_modification_time = local_file_path.stat().st_mtime local_modification_time_str = datetime.utcfromtimestamp( local_modification_time ).strftime("%a, %d %b %Y %H:%M:%S GMT") @@ -65,9 +64,9 @@ def has_remote_file_changed(url, local_file_path): return True -def is_file_recent(file_path: str, refresh: TimePeriodSeconds) -> bool: - if os.path.exists(file_path): - creation_time = os.path.getctime(file_path) +def is_file_recent(file_path: Path, refresh: TimePeriodSeconds) -> bool: + if file_path.exists(): + creation_time = file_path.stat().st_ctime current_time = datetime.now().timestamp() return current_time - creation_time <= refresh.total_seconds return False diff --git a/esphome/helpers.py b/esphome/helpers.py index 2b7221355c..fb7b71775d 100644 --- a/esphome/helpers.py +++ b/esphome/helpers.py @@ -1,6 +1,5 @@ from __future__ import annotations -import codecs from contextlib import suppress import ipaddress import logging @@ -8,6 +7,7 @@ import os from pathlib import Path import platform import re +import shutil import tempfile from typing import TYPE_CHECKING from urllib.parse import urlparse @@ -140,16 +140,16 @@ def run_system_command(*args): return rc, stdout, stderr -def mkdir_p(path): +def mkdir_p(path: Path): if not path: # Empty path - means create current dir return try: - os.makedirs(path) + path.mkdir(parents=True, exist_ok=True) except OSError as err: import errno - if err.errno == errno.EEXIST and os.path.isdir(path): + if err.errno == errno.EEXIST and path.is_dir(): pass else: from esphome.core import EsphomeError @@ -331,16 +331,15 @@ def is_ha_addon(): return get_bool_env("ESPHOME_IS_HA_ADDON") -def walk_files(path): +def walk_files(path: Path): for root, _, files in os.walk(path): for name in files: - yield os.path.join(root, name) + yield Path(root) / name -def read_file(path): +def read_file(path: Path) -> str: try: - with codecs.open(path, "r", encoding="utf-8") as f_handle: - return f_handle.read() + return path.read_text(encoding="utf-8") except OSError as err: from esphome.core import EsphomeError @@ -351,13 +350,15 @@ def read_file(path): raise EsphomeError(f"Error reading file {path}: {err}") from err -def _write_file(path: Path | str, text: str | bytes): +def _write_file( + path: Path, + text: str | bytes, + private: bool = False, +) -> None: """Atomically writes `text` to the given path. Automatically creates all parent directories. """ - if not isinstance(path, Path): - path = Path(path) data = text if isinstance(text, str): data = text.encode() @@ -365,42 +366,54 @@ def _write_file(path: Path | str, text: str | bytes): directory = path.parent directory.mkdir(exist_ok=True, parents=True) - tmp_path = None + tmp_filename: Path | None = None + missing_fchmod = False try: + # Modern versions of Python tempfile create this file with mode 0o600 with tempfile.NamedTemporaryFile( mode="wb", dir=directory, delete=False ) as f_handle: - tmp_path = f_handle.name f_handle.write(data) - # Newer tempfile implementations create the file with mode 0o600 - os.chmod(tmp_path, 0o644) - # If destination exists, will be overwritten - os.replace(tmp_path, path) + tmp_filename = Path(f_handle.name) + + if not private: + try: + os.fchmod(f_handle.fileno(), 0o644) + except AttributeError: + # os.fchmod is not available on Windows + missing_fchmod = True + shutil.move(tmp_filename, path) + if missing_fchmod: + path.chmod(0o644) finally: - if tmp_path is not None and os.path.exists(tmp_path): + if tmp_filename and tmp_filename.exists(): try: - os.remove(tmp_path) + tmp_filename.unlink() except OSError as err: - _LOGGER.error("Write file cleanup failed: %s", err) + # If we are cleaning up then something else went wrong, so + # we should suppress likely follow-on errors in the cleanup + _LOGGER.error( + "File replacement cleanup failed for %s while saving %s: %s", + tmp_filename, + path, + err, + ) -def write_file(path: Path | str, text: str): +def write_file(path: Path, text: str | bytes, private: bool = False) -> None: try: - _write_file(path, text) + _write_file(path, text, private=private) except OSError as err: from esphome.core import EsphomeError raise EsphomeError(f"Could not write file at {path}") from err -def write_file_if_changed(path: Path | str, text: str) -> bool: +def write_file_if_changed(path: Path, text: str) -> bool: """Write text to the given path, but not if the contents match already. Returns true if the file was changed. """ - if not isinstance(path, Path): - path = Path(path) - src_content = None if path.is_file(): src_content = read_file(path) @@ -410,12 +423,10 @@ def write_file_if_changed(path: Path | str, text: str) -> bool: return True -def copy_file_if_changed(src: os.PathLike, dst: os.PathLike) -> None: - import shutil - +def copy_file_if_changed(src: Path, dst: Path) -> None: if file_compare(src, dst): return - mkdir_p(os.path.dirname(dst)) + dst.parent.mkdir(parents=True, exist_ok=True) try: shutil.copyfile(src, dst) except OSError as err: @@ -440,12 +451,12 @@ def list_starts_with(list_, sub): return len(sub) <= len(list_) and all(list_[i] == x for i, x in enumerate(sub)) -def file_compare(path1: os.PathLike, path2: os.PathLike) -> bool: +def file_compare(path1: Path, path2: Path) -> bool: """Return True if the files path1 and path2 have the same contents.""" import stat try: - stat1, stat2 = os.stat(path1), os.stat(path2) + stat1, stat2 = path1.stat(), path2.stat() except OSError: # File doesn't exist or another error -> not equal return False @@ -462,7 +473,7 @@ def file_compare(path1: os.PathLike, path2: os.PathLike) -> bool: bufsize = 8 * 1024 # Read files in blocks until a mismatch is found - with open(path1, "rb") as fh1, open(path2, "rb") as fh2: + with path1.open("rb") as fh1, path2.open("rb") as fh2: while True: blob1, blob2 = fh1.read(bufsize), fh2.read(bufsize) if blob1 != blob2: diff --git a/esphome/platformio_api.py b/esphome/platformio_api.py index d8c8611884..8c2e147020 100644 --- a/esphome/platformio_api.py +++ b/esphome/platformio_api.py @@ -18,23 +18,25 @@ def patch_structhash(): # removed/added. This might have unintended consequences, but this improves compile # times greatly when adding/removing components and a simple clean build solves # all issues - from os import makedirs - from os.path import getmtime, isdir, join - from platformio.run import cli, helpers def patched_clean_build_dir(build_dir, *args): from platformio import fs from platformio.project.helpers import get_project_dir - platformio_ini = join(get_project_dir(), "platformio.ini") + platformio_ini = Path(get_project_dir()) / "platformio.ini" + + build_dir = Path(build_dir) # if project's config is modified - if isdir(build_dir) and getmtime(platformio_ini) > getmtime(build_dir): + if ( + build_dir.is_dir() + and platformio_ini.stat().st_mtime > build_dir.stat().st_mtime + ): fs.rmtree(build_dir) - if not isdir(build_dir): - makedirs(build_dir) + if not build_dir.is_dir(): + build_dir.mkdir(parents=True) helpers.clean_build_dir = patched_clean_build_dir cli.clean_build_dir = patched_clean_build_dir @@ -77,9 +79,9 @@ FILTER_PLATFORMIO_LINES = [ def run_platformio_cli(*args, **kwargs) -> str | int: os.environ["PLATFORMIO_FORCE_COLOR"] = "true" - os.environ["PLATFORMIO_BUILD_DIR"] = os.path.abspath(CORE.relative_pioenvs_path()) + os.environ["PLATFORMIO_BUILD_DIR"] = str(CORE.relative_pioenvs_path().absolute()) os.environ.setdefault( - "PLATFORMIO_LIBDEPS_DIR", os.path.abspath(CORE.relative_piolibdeps_path()) + "PLATFORMIO_LIBDEPS_DIR", str(CORE.relative_piolibdeps_path().absolute()) ) # Suppress Python syntax warnings from third-party scripts during compilation os.environ.setdefault("PYTHONWARNINGS", "ignore::SyntaxWarning") @@ -98,7 +100,7 @@ def run_platformio_cli(*args, **kwargs) -> str | int: def run_platformio_cli_run(config, verbose, *args, **kwargs) -> str | int: - command = ["run", "-d", CORE.build_path] + command = ["run", "-d", str(CORE.build_path)] if verbose: command += ["-v"] command += list(args) @@ -130,8 +132,8 @@ def _run_idedata(config): def _load_idedata(config): - platformio_ini = Path(CORE.relative_build_path("platformio.ini")) - temp_idedata = Path(CORE.relative_internal_path("idedata", f"{CORE.name}.json")) + platformio_ini = CORE.relative_build_path("platformio.ini") + temp_idedata = CORE.relative_internal_path("idedata", f"{CORE.name}.json") changed = False if ( @@ -301,7 +303,7 @@ def process_stacktrace(config, line, backtrace_state): @dataclass class FlashImage: - path: str + path: Path offset: str @@ -310,17 +312,17 @@ class IDEData: self.raw = raw @property - def firmware_elf_path(self): - return self.raw["prog_path"] + def firmware_elf_path(self) -> Path: + return Path(self.raw["prog_path"]) @property - def firmware_bin_path(self) -> str: - return str(Path(self.firmware_elf_path).with_suffix(".bin")) + def firmware_bin_path(self) -> Path: + return self.firmware_elf_path.with_suffix(".bin") @property def extra_flash_images(self) -> list[FlashImage]: return [ - FlashImage(path=entry["path"], offset=entry["offset"]) + FlashImage(path=Path(entry["path"]), offset=entry["offset"]) for entry in self.raw["extra"]["flash_images"] ] diff --git a/esphome/storage_json.py b/esphome/storage_json.py index b69dc2dd3f..d5423ab1c7 100644 --- a/esphome/storage_json.py +++ b/esphome/storage_json.py @@ -1,11 +1,11 @@ from __future__ import annotations import binascii -import codecs from datetime import datetime import json import logging import os +from pathlib import Path from esphome import const from esphome.const import CONF_DISABLED, CONF_MDNS @@ -16,30 +16,35 @@ from esphome.types import CoreType _LOGGER = logging.getLogger(__name__) -def storage_path() -> str: - return os.path.join(CORE.data_dir, "storage", f"{CORE.config_filename}.json") +def storage_path() -> Path: + return CORE.data_dir / "storage" / f"{CORE.config_filename}.json" -def ext_storage_path(config_filename: str) -> str: - return os.path.join(CORE.data_dir, "storage", f"{config_filename}.json") +def ext_storage_path(config_filename: str) -> Path: + return CORE.data_dir / "storage" / f"{config_filename}.json" -def esphome_storage_path() -> str: - return os.path.join(CORE.data_dir, "esphome.json") +def esphome_storage_path() -> Path: + return CORE.data_dir / "esphome.json" -def ignored_devices_storage_path() -> str: - return os.path.join(CORE.data_dir, "ignored-devices.json") +def ignored_devices_storage_path() -> Path: + return CORE.data_dir / "ignored-devices.json" -def trash_storage_path() -> str: +def trash_storage_path() -> Path: return CORE.relative_config_path("trash") -def archive_storage_path() -> str: +def archive_storage_path() -> Path: return CORE.relative_config_path("archive") +def _to_path_if_not_none(value: str | None) -> Path | None: + """Convert a string to Path if it's not None.""" + return Path(value) if value is not None else None + + class StorageJSON: def __init__( self, @@ -52,8 +57,8 @@ class StorageJSON: address: str, web_port: int | None, target_platform: str, - build_path: str | None, - firmware_bin_path: str | None, + build_path: Path | None, + firmware_bin_path: Path | None, loaded_integrations: set[str], loaded_platforms: set[str], no_mdns: bool, @@ -107,8 +112,8 @@ class StorageJSON: "address": self.address, "web_port": self.web_port, "esp_platform": self.target_platform, - "build_path": self.build_path, - "firmware_bin_path": self.firmware_bin_path, + "build_path": str(self.build_path), + "firmware_bin_path": str(self.firmware_bin_path), "loaded_integrations": sorted(self.loaded_integrations), "loaded_platforms": sorted(self.loaded_platforms), "no_mdns": self.no_mdns, @@ -176,8 +181,8 @@ class StorageJSON: ) @staticmethod - def _load_impl(path: str) -> StorageJSON | None: - with codecs.open(path, "r", encoding="utf-8") as f_handle: + def _load_impl(path: Path) -> StorageJSON | None: + with path.open("r", encoding="utf-8") as f_handle: storage = json.load(f_handle) storage_version = storage["storage_version"] name = storage.get("name") @@ -190,8 +195,8 @@ class StorageJSON: address = storage.get("address") web_port = storage.get("web_port") esp_platform = storage.get("esp_platform") - build_path = storage.get("build_path") - firmware_bin_path = storage.get("firmware_bin_path") + build_path = _to_path_if_not_none(storage.get("build_path")) + firmware_bin_path = _to_path_if_not_none(storage.get("firmware_bin_path")) loaded_integrations = set(storage.get("loaded_integrations", [])) loaded_platforms = set(storage.get("loaded_platforms", [])) no_mdns = storage.get("no_mdns", False) @@ -217,7 +222,7 @@ class StorageJSON: ) @staticmethod - def load(path: str) -> StorageJSON | None: + def load(path: Path) -> StorageJSON | None: try: return StorageJSON._load_impl(path) except Exception: # pylint: disable=broad-except @@ -268,7 +273,7 @@ class EsphomeStorageJSON: @staticmethod def _load_impl(path: str) -> EsphomeStorageJSON | None: - with codecs.open(path, "r", encoding="utf-8") as f_handle: + with Path(path).open("r", encoding="utf-8") as f_handle: storage = json.load(f_handle) storage_version = storage["storage_version"] cookie_secret = storage.get("cookie_secret") diff --git a/esphome/util.py b/esphome/util.py index 23a66be4eb..3bf3248cb3 100644 --- a/esphome/util.py +++ b/esphome/util.py @@ -1,7 +1,6 @@ import collections import io import logging -import os from pathlib import Path import re import subprocess @@ -86,7 +85,10 @@ def safe_input(prompt=""): return input() -def shlex_quote(s): +def shlex_quote(s: str | Path) -> str: + # Convert Path objects to strings + if isinstance(s, Path): + s = str(s) if not s: return "''" if re.search(r"[^\w@%+=:,./-]", s) is None: @@ -272,25 +274,28 @@ class OrderedDict(collections.OrderedDict): return dict(self).__repr__() -def list_yaml_files(configs: list[str]) -> list[str]: - files: list[str] = [] +def list_yaml_files(configs: list[str | Path]) -> list[Path]: + files: list[Path] = [] for config in configs: - if os.path.isfile(config): + config = Path(config) + if not config.exists(): + raise FileNotFoundError(f"Config path '{config}' does not exist!") + if config.is_file(): files.append(config) else: - files.extend(os.path.join(config, p) for p in os.listdir(config)) + files.extend(config.glob("*")) files = filter_yaml_files(files) return sorted(files) -def filter_yaml_files(files: list[str]) -> list[str]: +def filter_yaml_files(files: list[Path]) -> list[Path]: return [ f for f in files if ( - os.path.splitext(f)[1] in (".yaml", ".yml") - and os.path.basename(f) not in ("secrets.yaml", "secrets.yml") - and not os.path.basename(f).startswith(".") + f.suffix in (".yaml", ".yml") + and f.name not in ("secrets.yaml", "secrets.yml") + and not f.name.startswith(".") ) ] diff --git a/esphome/vscode.py b/esphome/vscode.py index f5e2a20b97..53bb339a8e 100644 --- a/esphome/vscode.py +++ b/esphome/vscode.py @@ -2,7 +2,7 @@ from __future__ import annotations from io import StringIO import json -import os +from pathlib import Path from typing import Any from esphome.config import Config, _format_vol_invalid, validate_config @@ -67,24 +67,24 @@ def _read_file_content_from_json_on_stdin() -> str: return data["content"] -def _print_file_read_event(path: str) -> None: +def _print_file_read_event(path: Path) -> None: """Print a file read event.""" print( json.dumps( { "type": "read_file", - "path": path, + "path": str(path), } ) ) -def _request_and_get_stream_on_stdin(fname: str) -> StringIO: +def _request_and_get_stream_on_stdin(fname: Path) -> StringIO: _print_file_read_event(fname) return StringIO(_read_file_content_from_json_on_stdin()) -def _vscode_loader(fname: str) -> dict[str, Any]: +def _vscode_loader(fname: Path) -> dict[str, Any]: raw_yaml_stream = _request_and_get_stream_on_stdin(fname) # it is required to set the name on StringIO so document on start_mark # is set properly. Otherwise it is initialized with "" @@ -92,7 +92,7 @@ def _vscode_loader(fname: str) -> dict[str, Any]: return parse_yaml(fname, raw_yaml_stream, _vscode_loader) -def _ace_loader(fname: str) -> dict[str, Any]: +def _ace_loader(fname: Path) -> dict[str, Any]: raw_yaml_stream = _request_and_get_stream_on_stdin(fname) return parse_yaml(fname, raw_yaml_stream) @@ -120,10 +120,10 @@ def read_config(args): return CORE.vscode = True if args.ace: # Running from ESPHome Compiler dashboard, not vscode - CORE.config_path = os.path.join(args.configuration, data["file"]) + CORE.config_path = Path(args.configuration) / data["file"] loader = _ace_loader else: - CORE.config_path = data["file"] + CORE.config_path = Path(data["file"]) loader = _vscode_loader file_name = CORE.config_path diff --git a/esphome/wizard.py b/esphome/wizard.py index 3edf519816..97343eea99 100644 --- a/esphome/wizard.py +++ b/esphome/wizard.py @@ -1,4 +1,4 @@ -import os +from pathlib import Path import random import string from typing import Literal, NotRequired, TypedDict, Unpack @@ -213,7 +213,7 @@ class WizardWriteKwargs(TypedDict): file_text: NotRequired[str] -def wizard_write(path: str, **kwargs: Unpack[WizardWriteKwargs]) -> bool: +def wizard_write(path: Path, **kwargs: Unpack[WizardWriteKwargs]) -> bool: from esphome.components.bk72xx import boards as bk72xx_boards from esphome.components.esp32 import boards as esp32_boards from esphome.components.esp8266 import boards as esp8266_boards @@ -256,13 +256,13 @@ def wizard_write(path: str, **kwargs: Unpack[WizardWriteKwargs]) -> bool: file_text = wizard_file(**kwargs) # Check if file already exists to prevent overwriting - if os.path.exists(path) and os.path.isfile(path): + if path.exists() and path.is_file(): safe_print(color(AnsiFore.RED, f'The file "{path}" already exists.')) return False write_file(path, file_text) storage = StorageJSON.from_wizard(name, name, f"{name}.local", hardware) - storage_path = ext_storage_path(os.path.basename(path)) + storage_path = ext_storage_path(path.name) storage.save(storage_path) return True @@ -301,7 +301,7 @@ def strip_accents(value: str) -> str: ) -def wizard(path: str) -> int: +def wizard(path: Path) -> int: from esphome.components.bk72xx import boards as bk72xx_boards from esphome.components.esp32 import boards as esp32_boards from esphome.components.esp8266 import boards as esp8266_boards @@ -309,14 +309,14 @@ def wizard(path: str) -> int: from esphome.components.rp2040 import boards as rp2040_boards from esphome.components.rtl87xx import boards as rtl87xx_boards - if not path.endswith(".yaml") and not path.endswith(".yml"): + if path.suffix not in (".yaml", ".yml"): safe_print( - f"Please make your configuration file {color(AnsiFore.CYAN, path)} have the extension .yaml or .yml" + f"Please make your configuration file {color(AnsiFore.CYAN, str(path))} have the extension .yaml or .yml" ) return 1 - if os.path.exists(path): + if path.exists(): safe_print( - f"Uh oh, it seems like {color(AnsiFore.CYAN, path)} already exists, please delete that file first or chose another configuration file." + f"Uh oh, it seems like {color(AnsiFore.CYAN, str(path))} already exists, please delete that file first or chose another configuration file." ) return 2 @@ -549,7 +549,7 @@ def wizard(path: str) -> int: safe_print() safe_print( color(AnsiFore.CYAN, "DONE! I've now written a new configuration file to ") - + color(AnsiFore.BOLD_CYAN, path) + + color(AnsiFore.BOLD_CYAN, str(path)) ) safe_print() safe_print("Next steps:") diff --git a/esphome/writer.py b/esphome/writer.py index 2a9c6a770d..c0d4379b3a 100644 --- a/esphome/writer.py +++ b/esphome/writer.py @@ -1,6 +1,5 @@ import importlib import logging -import os from pathlib import Path import re @@ -266,7 +265,7 @@ def generate_version_h(): def write_cpp(code_s): path = CORE.relative_src_path("main.cpp") - if os.path.isfile(path): + if path.is_file(): text = read_file(path) code_format = find_begin_end( text, CPP_AUTO_GENERATE_BEGIN, CPP_AUTO_GENERATE_END @@ -292,28 +291,28 @@ def write_cpp(code_s): def clean_cmake_cache(): pioenvs = CORE.relative_pioenvs_path() - if os.path.isdir(pioenvs): - pioenvs_cmake_path = CORE.relative_pioenvs_path(CORE.name, "CMakeCache.txt") - if os.path.isfile(pioenvs_cmake_path): + if pioenvs.is_dir(): + pioenvs_cmake_path = pioenvs / CORE.name / "CMakeCache.txt" + if pioenvs_cmake_path.is_file(): _LOGGER.info("Deleting %s", pioenvs_cmake_path) - os.remove(pioenvs_cmake_path) + pioenvs_cmake_path.unlink() def clean_build(): import shutil pioenvs = CORE.relative_pioenvs_path() - if os.path.isdir(pioenvs): + if pioenvs.is_dir(): _LOGGER.info("Deleting %s", pioenvs) shutil.rmtree(pioenvs) piolibdeps = CORE.relative_piolibdeps_path() - if os.path.isdir(piolibdeps): + if piolibdeps.is_dir(): _LOGGER.info("Deleting %s", piolibdeps) shutil.rmtree(piolibdeps) dependencies_lock = CORE.relative_build_path("dependencies.lock") - if os.path.isfile(dependencies_lock): + if dependencies_lock.is_file(): _LOGGER.info("Deleting %s", dependencies_lock) - os.remove(dependencies_lock) + dependencies_lock.unlink() # Clean PlatformIO cache to resolve CMake compiler detection issues # This helps when toolchain paths change or get corrupted @@ -324,9 +323,11 @@ def clean_build(): pass else: cache_dir = get_project_cache_dir() - if cache_dir and cache_dir.strip() and os.path.isdir(cache_dir): - _LOGGER.info("Deleting PlatformIO cache %s", cache_dir) - shutil.rmtree(cache_dir) + if cache_dir and cache_dir.strip(): + cache_path = Path(cache_dir) + if cache_path.is_dir(): + _LOGGER.info("Deleting PlatformIO cache %s", cache_dir) + shutil.rmtree(cache_dir) GITIGNORE_CONTENT = """# Gitignore settings for ESPHome @@ -339,6 +340,5 @@ GITIGNORE_CONTENT = """# Gitignore settings for ESPHome def write_gitignore(): path = CORE.relative_config_path(".gitignore") - if not os.path.isfile(path): - with open(file=path, mode="w", encoding="utf-8") as f: - f.write(GITIGNORE_CONTENT) + if not path.is_file(): + path.write_text(GITIGNORE_CONTENT, encoding="utf-8") diff --git a/esphome/yaml_util.py b/esphome/yaml_util.py index f26bc0502d..f430fa22df 100644 --- a/esphome/yaml_util.py +++ b/esphome/yaml_util.py @@ -1,7 +1,6 @@ from __future__ import annotations from collections.abc import Callable -import fnmatch import functools import inspect from io import BytesIO, TextIOBase, TextIOWrapper @@ -9,6 +8,7 @@ from ipaddress import _BaseAddress, _BaseNetwork import logging import math import os +from pathlib import Path from typing import Any import uuid @@ -109,7 +109,9 @@ def _add_data_ref(fn): class ESPHomeLoaderMixin: """Loader class that keeps track of line numbers.""" - def __init__(self, name: str, yaml_loader: Callable[[str], dict[str, Any]]) -> None: + def __init__( + self, name: Path, yaml_loader: Callable[[Path], dict[str, Any]] + ) -> None: """Initialize the loader.""" self.name = name self.yaml_loader = yaml_loader @@ -254,12 +256,8 @@ class ESPHomeLoaderMixin: f"Environment variable '{node.value}' not defined", node.start_mark ) - @property - def _directory(self) -> str: - return os.path.dirname(self.name) - - def _rel_path(self, *args: str) -> str: - return os.path.join(self._directory, *args) + def _rel_path(self, *args: str) -> Path: + return self.name.parent / Path(*args) @_add_data_ref def construct_secret(self, node: yaml.Node) -> str: @@ -269,8 +267,8 @@ class ESPHomeLoaderMixin: if self.name == CORE.config_path: raise e try: - main_config_dir = os.path.dirname(CORE.config_path) - main_secret_yml = os.path.join(main_config_dir, SECRET_YAML) + main_config_dir = CORE.config_path.parent + main_secret_yml = main_config_dir / SECRET_YAML secrets = self.yaml_loader(main_secret_yml) except EsphomeError as er: raise EsphomeError(f"{e}\n{er}") from er @@ -329,7 +327,7 @@ class ESPHomeLoaderMixin: files = filter_yaml_files(_find_files(self._rel_path(node.value), "*.yaml")) mapping = OrderedDict() for fname in files: - filename = os.path.splitext(os.path.basename(fname))[0] + filename = fname.stem mapping[filename] = self.yaml_loader(fname) return mapping @@ -369,8 +367,8 @@ class ESPHomeLoader(ESPHomeLoaderMixin, FastestAvailableSafeLoader): def __init__( self, stream: TextIOBase | BytesIO, - name: str, - yaml_loader: Callable[[str], dict[str, Any]], + name: Path, + yaml_loader: Callable[[Path], dict[str, Any]], ) -> None: FastestAvailableSafeLoader.__init__(self, stream) ESPHomeLoaderMixin.__init__(self, name, yaml_loader) @@ -382,8 +380,8 @@ class ESPHomePurePythonLoader(ESPHomeLoaderMixin, PurePythonLoader): def __init__( self, stream: TextIOBase | BytesIO, - name: str, - yaml_loader: Callable[[str], dict[str, Any]], + name: Path, + yaml_loader: Callable[[Path], dict[str, Any]], ) -> None: PurePythonLoader.__init__(self, stream) ESPHomeLoaderMixin.__init__(self, name, yaml_loader) @@ -414,24 +412,24 @@ for _loader in (ESPHomeLoader, ESPHomePurePythonLoader): _loader.add_constructor("!remove", _loader.construct_remove) -def load_yaml(fname: str, clear_secrets: bool = True) -> Any: +def load_yaml(fname: Path, clear_secrets: bool = True) -> Any: if clear_secrets: _SECRET_VALUES.clear() _SECRET_CACHE.clear() return _load_yaml_internal(fname) -def _load_yaml_internal(fname: str) -> Any: +def _load_yaml_internal(fname: Path) -> Any: """Load a YAML file.""" try: - with open(fname, encoding="utf-8") as f_handle: + with fname.open(encoding="utf-8") as f_handle: return parse_yaml(fname, f_handle) except (UnicodeDecodeError, OSError) as err: raise EsphomeError(f"Error reading file {fname}: {err}") from err def parse_yaml( - file_name: str, file_handle: TextIOWrapper, yaml_loader=_load_yaml_internal + file_name: Path, file_handle: TextIOWrapper, yaml_loader=_load_yaml_internal ) -> Any: """Parse a YAML file.""" try: @@ -483,9 +481,9 @@ def substitute_vars(config, vars): def _load_yaml_internal_with_type( loader_type: type[ESPHomeLoader] | type[ESPHomePurePythonLoader], - fname: str, + fname: Path, content: TextIOWrapper, - yaml_loader: Any, + yaml_loader: Callable[[Path], dict[str, Any]], ) -> Any: """Load a YAML file.""" loader = loader_type(content, fname, yaml_loader) @@ -512,13 +510,14 @@ def _is_file_valid(name: str) -> bool: return not name.startswith(".") -def _find_files(directory, pattern): +def _find_files(directory: Path, pattern): """Recursively load files in a directory.""" - for root, dirs, files in os.walk(directory, topdown=True): + for root, dirs, files in os.walk(directory): dirs[:] = [d for d in dirs if _is_file_valid(d)] - for basename in files: - if _is_file_valid(basename) and fnmatch.fnmatch(basename, pattern): - filename = os.path.join(root, basename) + for f in files: + filename = Path(f) + if _is_file_valid(f) and filename.match(pattern): + filename = Path(root) / filename yield filename @@ -627,3 +626,4 @@ ESPHomeDumper.add_multi_representer(TimePeriod, ESPHomeDumper.represent_stringif ESPHomeDumper.add_multi_representer(Lambda, ESPHomeDumper.represent_lambda) ESPHomeDumper.add_multi_representer(core.ID, ESPHomeDumper.represent_id) ESPHomeDumper.add_multi_representer(uuid.UUID, ESPHomeDumper.represent_stringify) +ESPHomeDumper.add_multi_representer(Path, ESPHomeDumper.represent_stringify) diff --git a/requirements_test.txt b/requirements_test.txt index 2e2f8a1c0d..2c78eadf45 100644 --- a/requirements_test.txt +++ b/requirements_test.txt @@ -1,6 +1,6 @@ pylint==3.3.8 flake8==7.3.0 # also change in .pre-commit-config.yaml when updating -ruff==0.13.0 # also change in .pre-commit-config.yaml when updating +ruff==0.13.1 # also change in .pre-commit-config.yaml when updating pyupgrade==3.20.0 # also change in .pre-commit-config.yaml when updating pre-commit diff --git a/script/api_protobuf/api_protobuf.py b/script/api_protobuf/api_protobuf.py index 05567629b2..fa04222c5d 100755 --- a/script/api_protobuf/api_protobuf.py +++ b/script/api_protobuf/api_protobuf.py @@ -3,7 +3,6 @@ from __future__ import annotations from abc import ABC, abstractmethod from enum import IntEnum -import os from pathlib import Path import re from subprocess import call @@ -2704,8 +2703,8 @@ static const char *const TAG = "api.service"; import clang_format def exec_clang_format(path: Path) -> None: - clang_format_path = os.path.join( - os.path.dirname(clang_format.__file__), "data", "bin", "clang-format" + clang_format_path = ( + Path(clang_format.__file__).parent / "data" / "bin" / "clang-format" ) call([clang_format_path, "-i", path]) diff --git a/script/build_codeowners.py b/script/build_codeowners.py index 27ea82611b..10ca1295b7 100755 --- a/script/build_codeowners.py +++ b/script/build_codeowners.py @@ -39,7 +39,7 @@ esphome/core/* @esphome/core parts = [BASE] # Fake some directory so that get_component works -CORE.config_path = str(root) +CORE.config_path = root CORE.data[KEY_CORE] = {KEY_TARGET_FRAMEWORK: None, KEY_TARGET_PLATFORM: None} codeowners = defaultdict(list) diff --git a/script/build_language_schema.py b/script/build_language_schema.py index ff6e898902..1ffe3c2873 100755 --- a/script/build_language_schema.py +++ b/script/build_language_schema.py @@ -1,9 +1,9 @@ #!/usr/bin/env python3 import argparse -import glob import inspect import json import os +from pathlib import Path import re import voluptuous as vol @@ -70,14 +70,14 @@ def get_component_names(): component_names = ["esphome", "sensor", "esp32", "esp8266"] skip_components = [] - for d in os.listdir(CORE_COMPONENTS_PATH): + for d in CORE_COMPONENTS_PATH.iterdir(): if ( - not d.startswith("__") - and os.path.isdir(os.path.join(CORE_COMPONENTS_PATH, d)) - and d not in component_names - and d not in skip_components + not d.name.startswith("__") + and d.is_dir() + and d.name not in component_names + and d.name not in skip_components ): - component_names.append(d) + component_names.append(d.name) return sorted(component_names) @@ -121,7 +121,7 @@ from esphome.util import Registry # noqa: E402 def write_file(name, obj): - full_path = os.path.join(args.output_path, name + ".json") + full_path = Path(args.output_path) / f"{name}.json" if JSON_DUMP_PRETTY: json_str = json.dumps(obj, indent=2) else: @@ -131,9 +131,10 @@ def write_file(name, obj): def delete_extra_files(keep_names): - for d in os.listdir(args.output_path): - if d.endswith(".json") and d[:-5] not in keep_names: - os.remove(os.path.join(args.output_path, d)) + output_path = Path(args.output_path) + for d in output_path.iterdir(): + if d.suffix == ".json" and d.stem not in keep_names: + d.unlink() print(f"Deleted {d}") @@ -367,13 +368,11 @@ def get_logger_tags(): "scheduler", "api.service", ] - for x in os.walk(CORE_COMPONENTS_PATH): - for y in glob.glob(os.path.join(x[0], "*.cpp")): - with open(y, encoding="utf-8") as file: - data = file.read() - match = pattern.search(data) - if match: - tags.append(match.group(1)) + for file in CORE_COMPONENTS_PATH.rglob("*.cpp"): + data = file.read_text() + match = pattern.search(data) + if match: + tags.append(match.group(1)) return tags diff --git a/script/ci-custom.py b/script/ci-custom.py index 61081608d5..bc1ebda93b 100755 --- a/script/ci-custom.py +++ b/script/ci-custom.py @@ -6,6 +6,7 @@ import collections import fnmatch import functools import os.path +from pathlib import Path import re import sys import time @@ -75,12 +76,12 @@ ignore_types = ( LINT_FILE_CHECKS = [] LINT_CONTENT_CHECKS = [] LINT_POST_CHECKS = [] -EXECUTABLE_BIT = {} +EXECUTABLE_BIT: dict[str, int] = {} -errors = collections.defaultdict(list) +errors: collections.defaultdict[Path, list] = collections.defaultdict(list) -def add_errors(fname, errs): +def add_errors(fname: Path, errs: list[tuple[int, int, str] | None]) -> None: if not isinstance(errs, list): errs = [errs] for err in errs: @@ -246,8 +247,8 @@ def lint_ext_check(fname): ".github/copilot-instructions.md", ] ) -def lint_executable_bit(fname): - ex = EXECUTABLE_BIT[fname] +def lint_executable_bit(fname: Path) -> str | None: + ex = EXECUTABLE_BIT[str(fname)] if ex != 100644: return ( f"File has invalid executable bit {ex}. If running from a windows machine please " @@ -506,8 +507,8 @@ def lint_constants_usage(): return errs -def relative_cpp_search_text(fname, content): - parts = fname.split("/") +def relative_cpp_search_text(fname: Path, content) -> str: + parts = fname.parts integration = parts[2] return f'#include "esphome/components/{integration}' @@ -524,8 +525,8 @@ def lint_relative_cpp_import(fname, line, col, content): ) -def relative_py_search_text(fname, content): - parts = fname.split("/") +def relative_py_search_text(fname: Path, content: str) -> str: + parts = fname.parts integration = parts[2] return f"esphome.components.{integration}" @@ -591,10 +592,8 @@ def lint_relative_py_import(fname, line, col, content): "esphome/components/http_request/httplib.h", ], ) -def lint_namespace(fname, content): - expected_name = re.match( - r"^esphome/components/([^/]+)/.*", fname.replace(os.path.sep, "/") - ).group(1) +def lint_namespace(fname: Path, content: str) -> str | None: + expected_name = fname.parts[2] # Check for both old style and C++17 nested namespace syntax search_old = f"namespace {expected_name}" search_new = f"namespace esphome::{expected_name}" @@ -733,9 +732,9 @@ def main(): files.sort() for fname in files: - _, ext = os.path.splitext(fname) + fname = Path(fname) run_checks(LINT_FILE_CHECKS, fname, fname) - if ext in ignore_types: + if fname.suffix in ignore_types: continue try: with codecs.open(fname, "r", encoding="utf-8") as f_handle: diff --git a/script/helpers.py b/script/helpers.py index 2c2f44a513..38e6fcbd1e 100644 --- a/script/helpers.py +++ b/script/helpers.py @@ -52,10 +52,10 @@ def styled(color: str | tuple[str, ...], msg: str, reset: bool = True) -> str: return prefix + msg + suffix -def print_error_for_file(file: str, body: str | None) -> None: +def print_error_for_file(file: str | Path, body: str | None) -> None: print( styled(colorama.Fore.GREEN, "### File ") - + styled((colorama.Fore.GREEN, colorama.Style.BRIGHT), file) + + styled((colorama.Fore.GREEN, colorama.Style.BRIGHT), str(file)) ) print() if body is not None: @@ -513,7 +513,7 @@ def get_all_dependencies(component_names: set[str]) -> set[str]: # Set up fake config path for component loading root = Path(__file__).parent.parent - CORE.config_path = str(root) + CORE.config_path = root CORE.data[KEY_CORE] = {} # Keep finding dependencies until no new ones are found @@ -553,7 +553,7 @@ def get_components_from_integration_fixtures() -> set[str]: fixtures_dir = Path(__file__).parent.parent / "tests" / "integration" / "fixtures" for yaml_file in fixtures_dir.glob("*.yaml"): - config: dict[str, any] | None = yaml_util.load_yaml(str(yaml_file)) + config: dict[str, any] | None = yaml_util.load_yaml(yaml_file) if not config: continue diff --git a/script/list-components.py b/script/list-components.py index 66212f44e7..ef02aecdf6 100755 --- a/script/list-components.py +++ b/script/list-components.py @@ -50,7 +50,7 @@ def create_components_graph(): root = Path(__file__).parent.parent components_dir = root / "esphome" / "components" # Fake some directory so that get_component works - CORE.config_path = str(root) + CORE.config_path = root # Various configuration to capture different outcomes used by `AUTO_LOAD` function. TARGET_CONFIGURATIONS = [ {KEY_TARGET_FRAMEWORK: None, KEY_TARGET_PLATFORM: None}, diff --git a/tests/component_tests/conftest.py b/tests/component_tests/conftest.py index 2045b03502..79b4151c55 100644 --- a/tests/component_tests/conftest.py +++ b/tests/component_tests/conftest.py @@ -40,9 +40,9 @@ def config_path(request: pytest.FixtureRequest) -> Generator[None]: if config_dir.exists(): # Set config_path to a dummy yaml file in the config directory # This ensures CORE.config_dir points to the config directory - CORE.config_path = str(config_dir / "dummy.yaml") + CORE.config_path = config_dir / "dummy.yaml" else: - CORE.config_path = str(Path(request.fspath).parent / "dummy.yaml") + CORE.config_path = Path(request.fspath).parent / "dummy.yaml" yield CORE.config_path = original_path @@ -129,7 +129,7 @@ def generate_main() -> Generator[Callable[[str | Path], str]]: """Generates the C++ main.cpp from a given yaml file and returns it in string form.""" def generator(path: str | Path) -> str: - CORE.config_path = str(path) + CORE.config_path = Path(path) CORE.config = read_config({}) generate_cpp_contents(CORE.config) return CORE.cpp_main_section diff --git a/tests/dashboard/test_entries.py b/tests/dashboard/test_entries.py index a86c33a16f..2f6d5667b7 100644 --- a/tests/dashboard/test_entries.py +++ b/tests/dashboard/test_entries.py @@ -22,7 +22,7 @@ def create_cache_key() -> tuple[int, int, float, int]: def setup_core(): """Set up CORE for testing.""" with tempfile.TemporaryDirectory() as tmpdir: - CORE.config_path = str(Path(tmpdir) / "test.yaml") + CORE.config_path = Path(tmpdir) / "test.yaml" yield CORE.reset() @@ -44,7 +44,7 @@ async def dashboard_entries(mock_settings: MagicMock) -> DashboardEntries: def test_dashboard_entry_path_initialization() -> None: """Test DashboardEntry initializes with path correctly.""" - test_path = "/test/config/device.yaml" + test_path = Path("/test/config/device.yaml") cache_key = create_cache_key() entry = DashboardEntry(test_path, cache_key) @@ -59,21 +59,21 @@ def test_dashboard_entry_path_with_absolute_path() -> None: test_path = Path.cwd() / "absolute" / "path" / "to" / "config.yaml" cache_key = create_cache_key() - entry = DashboardEntry(str(test_path), cache_key) + entry = DashboardEntry(test_path, cache_key) - assert entry.path == str(test_path) - assert Path(entry.path).is_absolute() + assert entry.path == test_path + assert entry.path.is_absolute() def test_dashboard_entry_path_with_relative_path() -> None: """Test DashboardEntry handles relative paths.""" - test_path = "configs/device.yaml" + test_path = Path("configs/device.yaml") cache_key = create_cache_key() entry = DashboardEntry(test_path, cache_key) assert entry.path == test_path - assert not Path(entry.path).is_absolute() + assert not entry.path.is_absolute() @pytest.mark.asyncio @@ -81,12 +81,12 @@ async def test_dashboard_entries_get_by_path( dashboard_entries: DashboardEntries, ) -> None: """Test getting entry by path.""" - test_path = "/test/config/device.yaml" + test_path = Path("/test/config/device.yaml") entry = DashboardEntry(test_path, create_cache_key()) - dashboard_entries._entries[test_path] = entry + dashboard_entries._entries[str(test_path)] = entry - result = dashboard_entries.get(test_path) + result = dashboard_entries.get(str(test_path)) assert result == entry @@ -104,12 +104,12 @@ async def test_dashboard_entries_path_normalization( dashboard_entries: DashboardEntries, ) -> None: """Test that paths are handled consistently.""" - path1 = "/test/config/device.yaml" + path1 = Path("/test/config/device.yaml") entry = DashboardEntry(path1, create_cache_key()) - dashboard_entries._entries[path1] = entry + dashboard_entries._entries[str(path1)] = entry - result = dashboard_entries.get(path1) + result = dashboard_entries.get(str(path1)) assert result == entry @@ -118,12 +118,12 @@ async def test_dashboard_entries_path_with_spaces( dashboard_entries: DashboardEntries, ) -> None: """Test handling paths with spaces.""" - test_path = "/test/config/my device.yaml" + test_path = Path("/test/config/my device.yaml") entry = DashboardEntry(test_path, create_cache_key()) - dashboard_entries._entries[test_path] = entry + dashboard_entries._entries[str(test_path)] = entry - result = dashboard_entries.get(test_path) + result = dashboard_entries.get(str(test_path)) assert result == entry assert result.path == test_path @@ -133,18 +133,18 @@ async def test_dashboard_entries_path_with_special_chars( dashboard_entries: DashboardEntries, ) -> None: """Test handling paths with special characters.""" - test_path = "/test/config/device-01_test.yaml" + test_path = Path("/test/config/device-01_test.yaml") entry = DashboardEntry(test_path, create_cache_key()) - dashboard_entries._entries[test_path] = entry + dashboard_entries._entries[str(test_path)] = entry - result = dashboard_entries.get(test_path) + result = dashboard_entries.get(str(test_path)) assert result == entry def test_dashboard_entries_windows_path() -> None: """Test handling Windows-style paths.""" - test_path = r"C:\Users\test\esphome\device.yaml" + test_path = Path(r"C:\Users\test\esphome\device.yaml") cache_key = create_cache_key() entry = DashboardEntry(test_path, cache_key) @@ -157,28 +157,28 @@ async def test_dashboard_entries_path_to_cache_key_mapping( dashboard_entries: DashboardEntries, ) -> None: """Test internal entries storage with paths and cache keys.""" - path1 = "/test/config/device1.yaml" - path2 = "/test/config/device2.yaml" + path1 = Path("/test/config/device1.yaml") + path2 = Path("/test/config/device2.yaml") entry1 = DashboardEntry(path1, create_cache_key()) entry2 = DashboardEntry(path2, (1, 1, 1.0, 1)) - dashboard_entries._entries[path1] = entry1 - dashboard_entries._entries[path2] = entry2 + dashboard_entries._entries[str(path1)] = entry1 + dashboard_entries._entries[str(path2)] = entry2 - assert path1 in dashboard_entries._entries - assert path2 in dashboard_entries._entries - assert dashboard_entries._entries[path1].cache_key == create_cache_key() - assert dashboard_entries._entries[path2].cache_key == (1, 1, 1.0, 1) + assert str(path1) in dashboard_entries._entries + assert str(path2) in dashboard_entries._entries + assert dashboard_entries._entries[str(path1)].cache_key == create_cache_key() + assert dashboard_entries._entries[str(path2)].cache_key == (1, 1, 1.0, 1) def test_dashboard_entry_path_property() -> None: """Test that path property returns expected value.""" - test_path = "/test/config/device.yaml" + test_path = Path("/test/config/device.yaml") entry = DashboardEntry(test_path, create_cache_key()) assert entry.path == test_path - assert isinstance(entry.path, str) + assert isinstance(entry.path, Path) @pytest.mark.asyncio @@ -187,14 +187,14 @@ async def test_dashboard_entries_all_returns_entries_with_paths( ) -> None: """Test that all() returns entries with their paths intact.""" paths = [ - "/test/config/device1.yaml", - "/test/config/device2.yaml", - "/test/config/subfolder/device3.yaml", + Path("/test/config/device1.yaml"), + Path("/test/config/device2.yaml"), + Path("/test/config/subfolder/device3.yaml"), ] for path in paths: entry = DashboardEntry(path, create_cache_key()) - dashboard_entries._entries[path] = entry + dashboard_entries._entries[str(path)] = entry all_entries = dashboard_entries.async_all() diff --git a/tests/dashboard/test_settings.py b/tests/dashboard/test_settings.py index 90a79ac0f8..c9097fe5e2 100644 --- a/tests/dashboard/test_settings.py +++ b/tests/dashboard/test_settings.py @@ -2,7 +2,6 @@ from __future__ import annotations -import os from pathlib import Path import tempfile @@ -17,7 +16,7 @@ def dashboard_settings(tmp_path: Path) -> DashboardSettings: settings = DashboardSettings() # Resolve symlinks to ensure paths match resolved_dir = tmp_path.resolve() - settings.config_dir = str(resolved_dir) + settings.config_dir = resolved_dir settings.absolute_config_dir = resolved_dir return settings @@ -26,7 +25,7 @@ def test_rel_path_simple(dashboard_settings: DashboardSettings) -> None: """Test rel_path with simple relative path.""" result = dashboard_settings.rel_path("config.yaml") - expected = str(Path(dashboard_settings.config_dir) / "config.yaml") + expected = dashboard_settings.config_dir / "config.yaml" assert result == expected @@ -34,9 +33,7 @@ def test_rel_path_multiple_components(dashboard_settings: DashboardSettings) -> """Test rel_path with multiple path components.""" result = dashboard_settings.rel_path("subfolder", "device", "config.yaml") - expected = str( - Path(dashboard_settings.config_dir) / "subfolder" / "device" / "config.yaml" - ) + expected = dashboard_settings.config_dir / "subfolder" / "device" / "config.yaml" assert result == expected @@ -55,7 +52,7 @@ def test_rel_path_absolute_path_within_config( internal_path.touch() result = dashboard_settings.rel_path("internal.yaml") - expected = str(Path(dashboard_settings.config_dir) / "internal.yaml") + expected = dashboard_settings.config_dir / "internal.yaml" assert result == expected @@ -80,7 +77,7 @@ def test_rel_path_with_pathlib_path(dashboard_settings: DashboardSettings) -> No path_obj = Path("subfolder") / "config.yaml" result = dashboard_settings.rel_path(path_obj) - expected = str(Path(dashboard_settings.config_dir) / "subfolder" / "config.yaml") + expected = dashboard_settings.config_dir / "subfolder" / "config.yaml" assert result == expected @@ -93,9 +90,7 @@ def test_rel_path_normalizes_slashes(dashboard_settings: DashboardSettings) -> N assert result1 == result2 # Also test that the result is as expected - expected = os.path.join( - dashboard_settings.config_dir, "folder", "subfolder", "file.yaml" - ) + expected = dashboard_settings.config_dir / "folder" / "subfolder" / "file.yaml" assert result1 == expected @@ -103,7 +98,7 @@ def test_rel_path_handles_spaces(dashboard_settings: DashboardSettings) -> None: """Test rel_path handles paths with spaces.""" result = dashboard_settings.rel_path("my folder", "my config.yaml") - expected = str(Path(dashboard_settings.config_dir) / "my folder" / "my config.yaml") + expected = dashboard_settings.config_dir / "my folder" / "my config.yaml" assert result == expected @@ -111,15 +106,13 @@ def test_rel_path_handles_special_chars(dashboard_settings: DashboardSettings) - """Test rel_path handles paths with special characters.""" result = dashboard_settings.rel_path("device-01_test", "config.yaml") - expected = str( - Path(dashboard_settings.config_dir) / "device-01_test" / "config.yaml" - ) + expected = dashboard_settings.config_dir / "device-01_test" / "config.yaml" assert result == expected def test_config_dir_as_path_property(dashboard_settings: DashboardSettings) -> None: """Test that config_dir can be accessed and used with Path operations.""" - config_path = Path(dashboard_settings.config_dir) + config_path = dashboard_settings.config_dir assert config_path.exists() assert config_path.is_dir() @@ -141,7 +134,7 @@ def test_rel_path_symlink_inside_config(dashboard_settings: DashboardSettings) - symlink = dashboard_settings.absolute_config_dir / "link.yaml" symlink.symlink_to(target) result = dashboard_settings.rel_path("link.yaml") - expected = str(Path(dashboard_settings.config_dir) / "link.yaml") + expected = dashboard_settings.config_dir / "link.yaml" assert result == expected @@ -157,12 +150,12 @@ def test_rel_path_symlink_outside_config(dashboard_settings: DashboardSettings) def test_rel_path_with_none_arg(dashboard_settings: DashboardSettings) -> None: """Test rel_path handles None arguments gracefully.""" result = dashboard_settings.rel_path("None") - expected = str(Path(dashboard_settings.config_dir) / "None") + expected = dashboard_settings.config_dir / "None" assert result == expected def test_rel_path_with_numeric_args(dashboard_settings: DashboardSettings) -> None: """Test rel_path handles numeric arguments.""" result = dashboard_settings.rel_path("123", "456.789") - expected = str(Path(dashboard_settings.config_dir) / "123" / "456.789") + expected = dashboard_settings.config_dir / "123" / "456.789" assert result == expected diff --git a/tests/dashboard/test_web_server.py b/tests/dashboard/test_web_server.py index 605df4e02c..14a7d7b136 100644 --- a/tests/dashboard/test_web_server.py +++ b/tests/dashboard/test_web_server.py @@ -49,7 +49,7 @@ def mock_trash_storage_path(tmp_path: Path) -> Generator[MagicMock]: """Fixture to mock trash_storage_path.""" trash_dir = tmp_path / "trash" with patch( - "esphome.dashboard.web_server.trash_storage_path", return_value=str(trash_dir) + "esphome.dashboard.web_server.trash_storage_path", return_value=trash_dir ) as mock: yield mock @@ -60,7 +60,7 @@ def mock_archive_storage_path(tmp_path: Path) -> Generator[MagicMock]: archive_dir = tmp_path / "archive" with patch( "esphome.dashboard.web_server.archive_storage_path", - return_value=str(archive_dir), + return_value=archive_dir, ) as mock: yield mock @@ -257,7 +257,7 @@ async def test_download_binary_handler_with_file( # Mock storage JSON mock_storage = Mock() mock_storage.name = "test_device" - mock_storage.firmware_bin_path = str(firmware_file) + mock_storage.firmware_bin_path = firmware_file mock_storage_json.load.return_value = mock_storage response = await dashboard.fetch( @@ -289,7 +289,7 @@ async def test_download_binary_handler_compressed( # Mock storage JSON mock_storage = Mock() mock_storage.name = "test_device" - mock_storage.firmware_bin_path = str(firmware_file) + mock_storage.firmware_bin_path = firmware_file mock_storage_json.load.return_value = mock_storage response = await dashboard.fetch( @@ -321,7 +321,7 @@ async def test_download_binary_handler_custom_download_name( # Mock storage JSON mock_storage = Mock() mock_storage.name = "test_device" - mock_storage.firmware_bin_path = str(firmware_file) + mock_storage.firmware_bin_path = firmware_file mock_storage_json.load.return_value = mock_storage response = await dashboard.fetch( @@ -355,7 +355,7 @@ async def test_download_binary_handler_idedata_fallback( # Mock storage JSON mock_storage = Mock() mock_storage.name = "test_device" - mock_storage.firmware_bin_path = str(firmware_file) + mock_storage.firmware_bin_path = firmware_file mock_storage_json.load.return_value = mock_storage # Mock idedata response @@ -402,7 +402,7 @@ async def test_edit_request_handler_post_existing( test_file.write_text("esphome:\n name: original\n") # Configure the mock settings - mock_dashboard_settings.rel_path.return_value = str(test_file) + mock_dashboard_settings.rel_path.return_value = test_file mock_dashboard_settings.absolute_config_dir = test_file.parent new_content = "esphome:\n name: modified\n" @@ -426,7 +426,7 @@ async def test_unarchive_request_handler( ) -> None: """Test the UnArchiveRequestHandler.post method.""" # Set up an archived file - archive_dir = Path(mock_archive_storage_path.return_value) + archive_dir = mock_archive_storage_path.return_value archive_dir.mkdir(parents=True, exist_ok=True) archived_file = archive_dir / "archived.yaml" archived_file.write_text("test content") @@ -435,7 +435,7 @@ async def test_unarchive_request_handler( config_dir = tmp_path / "config" config_dir.mkdir(parents=True, exist_ok=True) destination_file = config_dir / "archived.yaml" - mock_dashboard_settings.rel_path.return_value = str(destination_file) + mock_dashboard_settings.rel_path.return_value = destination_file response = await dashboard.fetch( "/unarchive?configuration=archived.yaml", @@ -474,7 +474,7 @@ async def test_secret_keys_handler_with_file( # Configure mock to return our temp secrets file # Since the file actually exists, os.path.isfile will return True naturally - mock_dashboard_settings.rel_path.return_value = str(secrets_file) + mock_dashboard_settings.rel_path.return_value = secrets_file response = await dashboard.fetch("/secret_keys", method="GET") assert response.code == 200 @@ -538,8 +538,8 @@ def test_start_web_server_with_address_port( ) -> None: """Test the start_web_server function with address and port.""" app = Mock() - trash_dir = Path(mock_trash_storage_path.return_value) - archive_dir = Path(mock_archive_storage_path.return_value) + trash_dir = mock_trash_storage_path.return_value + archive_dir = mock_archive_storage_path.return_value # Create trash dir to test migration trash_dir.mkdir() @@ -643,12 +643,12 @@ async def test_archive_handler_with_build_folder( (build_folder / ".pioenvs").mkdir() mock_dashboard_settings.config_dir = str(config_dir) - mock_dashboard_settings.rel_path.return_value = str(test_config) - mock_archive_storage_path.return_value = str(archive_dir) + mock_dashboard_settings.rel_path.return_value = test_config + mock_archive_storage_path.return_value = archive_dir mock_storage = MagicMock() mock_storage.name = "test_device" - mock_storage.build_path = str(build_folder) + mock_storage.build_path = build_folder mock_storage_json.load.return_value = mock_storage response = await dashboard.fetch( @@ -686,8 +686,8 @@ async def test_archive_handler_no_build_folder( test_config.write_text("esphome:\n name: test_device\n") mock_dashboard_settings.config_dir = str(config_dir) - mock_dashboard_settings.rel_path.return_value = str(test_config) - mock_archive_storage_path.return_value = str(archive_dir) + mock_dashboard_settings.rel_path.return_value = test_config + mock_archive_storage_path.return_value = archive_dir mock_storage = MagicMock() mock_storage.name = "test_device" diff --git a/tests/dashboard/test_web_server_paths.py b/tests/dashboard/test_web_server_paths.py index f66e6a7ec2..b596ebb581 100644 --- a/tests/dashboard/test_web_server_paths.py +++ b/tests/dashboard/test_web_server_paths.py @@ -13,14 +13,14 @@ from esphome.dashboard import web_server def test_get_base_frontend_path_production() -> None: """Test get_base_frontend_path in production mode.""" mock_module = MagicMock() - mock_module.where.return_value = "/usr/local/lib/esphome_dashboard" + mock_module.where.return_value = Path("/usr/local/lib/esphome_dashboard") with ( patch.dict(os.environ, {}, clear=True), patch.dict("sys.modules", {"esphome_dashboard": mock_module}), ): result = web_server.get_base_frontend_path() - assert result == "/usr/local/lib/esphome_dashboard" + assert result == Path("/usr/local/lib/esphome_dashboard") mock_module.where.assert_called_once() @@ -31,13 +31,12 @@ def test_get_base_frontend_path_dev_mode() -> None: with patch.dict(os.environ, {"ESPHOME_DASHBOARD_DEV": test_path}): result = web_server.get_base_frontend_path() - # The function uses os.path.abspath which doesn't resolve symlinks - # We need to match that behavior + # The function uses Path.resolve() which resolves symlinks # The actual function adds "/" to the path, so we simulate that test_path_with_slash = test_path if test_path.endswith("/") else test_path + "/" - expected = os.path.abspath( - os.path.join(os.getcwd(), test_path_with_slash, "esphome_dashboard") - ) + expected = ( + Path(os.getcwd()) / test_path_with_slash / "esphome_dashboard" + ).resolve() assert result == expected @@ -48,8 +47,8 @@ def test_get_base_frontend_path_dev_mode_with_trailing_slash() -> None: with patch.dict(os.environ, {"ESPHOME_DASHBOARD_DEV": test_path}): result = web_server.get_base_frontend_path() - # The function uses os.path.abspath which doesn't resolve symlinks - expected = os.path.abspath(str(Path.cwd() / test_path / "esphome_dashboard")) + # The function uses Path.resolve() which resolves symlinks + expected = (Path.cwd() / test_path / "esphome_dashboard").resolve() assert result == expected @@ -60,76 +59,72 @@ def test_get_base_frontend_path_dev_mode_relative_path() -> None: with patch.dict(os.environ, {"ESPHOME_DASHBOARD_DEV": test_path}): result = web_server.get_base_frontend_path() - # The function uses os.path.abspath which doesn't resolve symlinks - # We need to match that behavior + # The function uses Path.resolve() which resolves symlinks # The actual function adds "/" to the path, so we simulate that test_path_with_slash = test_path if test_path.endswith("/") else test_path + "/" - expected = os.path.abspath( - os.path.join(os.getcwd(), test_path_with_slash, "esphome_dashboard") - ) + expected = ( + Path(os.getcwd()) / test_path_with_slash / "esphome_dashboard" + ).resolve() assert result == expected - assert Path(result).is_absolute() + assert result.is_absolute() def test_get_static_path_single_component() -> None: """Test get_static_path with single path component.""" with patch("esphome.dashboard.web_server.get_base_frontend_path") as mock_base: - mock_base.return_value = "/base/frontend" + mock_base.return_value = Path("/base/frontend") result = web_server.get_static_path("file.js") - assert result == os.path.join("/base/frontend", "static", "file.js") + assert result == Path("/base/frontend") / "static" / "file.js" def test_get_static_path_multiple_components() -> None: """Test get_static_path with multiple path components.""" with patch("esphome.dashboard.web_server.get_base_frontend_path") as mock_base: - mock_base.return_value = "/base/frontend" + mock_base.return_value = Path("/base/frontend") result = web_server.get_static_path("js", "esphome", "index.js") - assert result == os.path.join( - "/base/frontend", "static", "js", "esphome", "index.js" + assert ( + result == Path("/base/frontend") / "static" / "js" / "esphome" / "index.js" ) def test_get_static_path_empty_args() -> None: """Test get_static_path with no arguments.""" with patch("esphome.dashboard.web_server.get_base_frontend_path") as mock_base: - mock_base.return_value = "/base/frontend" + mock_base.return_value = Path("/base/frontend") result = web_server.get_static_path() - assert result == os.path.join("/base/frontend", "static") + assert result == Path("/base/frontend") / "static" def test_get_static_path_with_pathlib_path() -> None: """Test get_static_path with Path objects.""" with patch("esphome.dashboard.web_server.get_base_frontend_path") as mock_base: - mock_base.return_value = "/base/frontend" + mock_base.return_value = Path("/base/frontend") path_obj = Path("js") / "app.js" result = web_server.get_static_path(str(path_obj)) - assert result == os.path.join("/base/frontend", "static", "js", "app.js") + assert result == Path("/base/frontend") / "static" / "js" / "app.js" def test_get_static_file_url_production() -> None: """Test get_static_file_url in production mode.""" web_server.get_static_file_url.cache_clear() mock_module = MagicMock() - mock_file = MagicMock() - mock_file.read.return_value = b"test content" - mock_file.__enter__ = MagicMock(return_value=mock_file) - mock_file.__exit__ = MagicMock(return_value=None) + mock_path = MagicMock(spec=Path) + mock_path.read_bytes.return_value = b"test content" with ( patch.dict(os.environ, {}, clear=True), patch.dict("sys.modules", {"esphome_dashboard": mock_module}), patch("esphome.dashboard.web_server.get_static_path") as mock_get_path, - patch("esphome.dashboard.web_server.open", create=True, return_value=mock_file), ): - mock_get_path.return_value = "/fake/path/js/app.js" + mock_get_path.return_value = mock_path result = web_server.get_static_file_url("js/app.js") assert result.startswith("./static/js/app.js?hash=") @@ -182,26 +177,26 @@ def test_load_file_compressed_path(tmp_path: Path) -> None: def test_path_normalization_in_static_path() -> None: """Test that paths are normalized correctly.""" with patch("esphome.dashboard.web_server.get_base_frontend_path") as mock_base: - mock_base.return_value = "/base/frontend" + mock_base.return_value = Path("/base/frontend") # Test with separate components result1 = web_server.get_static_path("js", "app.js") result2 = web_server.get_static_path("js", "app.js") assert result1 == result2 - assert result1 == os.path.join("/base/frontend", "static", "js", "app.js") + assert result1 == Path("/base/frontend") / "static" / "js" / "app.js" def test_windows_path_handling() -> None: """Test handling of Windows-style paths.""" with patch("esphome.dashboard.web_server.get_base_frontend_path") as mock_base: - mock_base.return_value = r"C:\Program Files\esphome\frontend" + mock_base.return_value = Path(r"C:\Program Files\esphome\frontend") result = web_server.get_static_path("js", "app.js") - # os.path.join should handle this correctly on the platform - expected = os.path.join( - r"C:\Program Files\esphome\frontend", "static", "js", "app.js" + # Path should handle this correctly on the platform + expected = ( + Path(r"C:\Program Files\esphome\frontend") / "static" / "js" / "app.js" ) assert result == expected @@ -209,22 +204,20 @@ def test_windows_path_handling() -> None: def test_path_with_special_characters() -> None: """Test paths with special characters.""" with patch("esphome.dashboard.web_server.get_base_frontend_path") as mock_base: - mock_base.return_value = "/base/frontend" + mock_base.return_value = Path("/base/frontend") result = web_server.get_static_path("js-modules", "app_v1.0.js") - assert result == os.path.join( - "/base/frontend", "static", "js-modules", "app_v1.0.js" + assert ( + result == Path("/base/frontend") / "static" / "js-modules" / "app_v1.0.js" ) def test_path_with_spaces() -> None: """Test paths with spaces.""" with patch("esphome.dashboard.web_server.get_base_frontend_path") as mock_base: - mock_base.return_value = "/base/my frontend" + mock_base.return_value = Path("/base/my frontend") result = web_server.get_static_path("my js", "my app.js") - assert result == os.path.join( - "/base/my frontend", "static", "my js", "my app.js" - ) + assert result == Path("/base/my frontend") / "static" / "my js" / "my app.js" diff --git a/tests/dashboard/util/test_file.py b/tests/dashboard/util/test_file.py deleted file mode 100644 index 51ba10b328..0000000000 --- a/tests/dashboard/util/test_file.py +++ /dev/null @@ -1,56 +0,0 @@ -import os -from pathlib import Path -from unittest.mock import patch - -import py -import pytest - -from esphome.dashboard.util.file import write_file, write_utf8_file - - -def test_write_utf8_file(tmp_path: Path) -> None: - write_utf8_file(tmp_path.joinpath("foo.txt"), "foo") - assert tmp_path.joinpath("foo.txt").read_text() == "foo" - - with pytest.raises(OSError): - write_utf8_file(Path("/dev/not-writable"), "bar") - - -def test_write_file(tmp_path: Path) -> None: - write_file(tmp_path.joinpath("foo.txt"), b"foo") - assert tmp_path.joinpath("foo.txt").read_text() == "foo" - - -def test_write_utf8_file_fails_at_rename( - tmpdir: py.path.local, caplog: pytest.LogCaptureFixture -) -> None: - """Test that if rename fails not not remove, we do not log the failed cleanup.""" - test_dir = tmpdir.mkdir("files") - test_file = Path(test_dir / "test.json") - - with ( - pytest.raises(OSError), - patch("esphome.dashboard.util.file.os.replace", side_effect=OSError), - ): - write_utf8_file(test_file, '{"some":"data"}', False) - - assert not os.path.exists(test_file) - - assert "File replacement cleanup failed" not in caplog.text - - -def test_write_utf8_file_fails_at_rename_and_remove( - tmpdir: py.path.local, caplog: pytest.LogCaptureFixture -) -> None: - """Test that if rename and remove both fail, we log the failed cleanup.""" - test_dir = tmpdir.mkdir("files") - test_file = Path(test_dir / "test.json") - - with ( - pytest.raises(OSError), - patch("esphome.dashboard.util.file.os.remove", side_effect=OSError), - patch("esphome.dashboard.util.file.os.replace", side_effect=OSError), - ): - write_utf8_file(test_file, '{"some":"data"}', False) - - assert "File replacement cleanup failed" in caplog.text diff --git a/tests/integration/conftest.py b/tests/integration/conftest.py index 0530752551..94632f8439 100644 --- a/tests/integration/conftest.py +++ b/tests/integration/conftest.py @@ -271,7 +271,7 @@ async def compile_esphome( def _read_config_and_get_binary(): CORE.reset() # Reset CORE state between test runs - CORE.config_path = str(config_path) + CORE.config_path = config_path config = esphome.config.read_config( {"command": "compile", "config": str(config_path)} ) diff --git a/tests/unit_tests/build_gen/test_platformio.py b/tests/unit_tests/build_gen/test_platformio.py index 04b7381141..a124dbc128 100644 --- a/tests/unit_tests/build_gen/test_platformio.py +++ b/tests/unit_tests/build_gen/test_platformio.py @@ -172,7 +172,7 @@ def test_write_ini_no_change_when_content_same( # write_file_if_changed should be called with the same content mock_write_file_if_changed.assert_called_once() call_args = mock_write_file_if_changed.call_args[0] - assert call_args[0] == str(ini_file) + assert call_args[0] == ini_file assert content in call_args[1] diff --git a/tests/unit_tests/conftest.py b/tests/unit_tests/conftest.py index 06d06d0506..0151bce00c 100644 --- a/tests/unit_tests/conftest.py +++ b/tests/unit_tests/conftest.py @@ -43,7 +43,7 @@ def fixture_path() -> Path: @pytest.fixture def setup_core(tmp_path: Path) -> Path: """Set up CORE with test paths.""" - CORE.config_path = str(tmp_path / "test.yaml") + CORE.config_path = tmp_path / "test.yaml" return tmp_path diff --git a/tests/unit_tests/core/common.py b/tests/unit_tests/core/common.py index 1848d5397b..daa429dc96 100644 --- a/tests/unit_tests/core/common.py +++ b/tests/unit_tests/core/common.py @@ -10,7 +10,7 @@ from esphome.core import CORE def load_config_from_yaml( - yaml_file: Callable[[str], str], yaml_content: str + yaml_file: Callable[[str], Path], yaml_content: str ) -> Config | None: """Load configuration from YAML content.""" yaml_path = yaml_file(yaml_content) @@ -25,7 +25,7 @@ def load_config_from_yaml( def load_config_from_fixture( - yaml_file: Callable[[str], str], fixture_name: str, fixtures_dir: Path + yaml_file: Callable[[str], Path], fixture_name: str, fixtures_dir: Path ) -> Config | None: """Load configuration from a fixture file.""" fixture_path = fixtures_dir / fixture_name diff --git a/tests/unit_tests/core/conftest.py b/tests/unit_tests/core/conftest.py index 60d6738ce9..42e59c15e6 100644 --- a/tests/unit_tests/core/conftest.py +++ b/tests/unit_tests/core/conftest.py @@ -7,12 +7,12 @@ import pytest @pytest.fixture -def yaml_file(tmp_path: Path) -> Callable[[str], str]: +def yaml_file(tmp_path: Path) -> Callable[[str], Path]: """Create a temporary YAML file for testing.""" - def _yaml_file(content: str) -> str: + def _yaml_file(content: str) -> Path: yaml_path = tmp_path / "test.yaml" yaml_path.write_text(content) - return str(yaml_path) + return yaml_path return _yaml_file diff --git a/tests/unit_tests/core/test_config.py b/tests/unit_tests/core/test_config.py index 7d3b90794b..4fddfc9678 100644 --- a/tests/unit_tests/core/test_config.py +++ b/tests/unit_tests/core/test_config.py @@ -289,7 +289,7 @@ def test_valid_include_with_angle_brackets() -> None: def test_valid_include_with_valid_file(tmp_path: Path) -> None: """Test valid_include accepts valid include files.""" - CORE.config_path = str(tmp_path / "test.yaml") + CORE.config_path = tmp_path / "test.yaml" include_file = tmp_path / "include.h" include_file.touch() @@ -298,7 +298,7 @@ def test_valid_include_with_valid_file(tmp_path: Path) -> None: def test_valid_include_with_valid_directory(tmp_path: Path) -> None: """Test valid_include accepts valid directories.""" - CORE.config_path = str(tmp_path / "test.yaml") + CORE.config_path = tmp_path / "test.yaml" include_dir = tmp_path / "includes" include_dir.mkdir() @@ -307,7 +307,7 @@ def test_valid_include_with_valid_directory(tmp_path: Path) -> None: def test_valid_include_invalid_extension(tmp_path: Path) -> None: """Test valid_include rejects files with invalid extensions.""" - CORE.config_path = str(tmp_path / "test.yaml") + CORE.config_path = tmp_path / "test.yaml" invalid_file = tmp_path / "file.txt" invalid_file.touch() @@ -481,7 +481,7 @@ def test_include_file_header(tmp_path: Path, mock_copy_file_if_changed: Mock) -> src_file = tmp_path / "source.h" src_file.write_text("// Header content") - CORE.build_path = str(tmp_path / "build") + CORE.build_path = tmp_path / "build" with patch("esphome.core.config.cg") as mock_cg: # Mock RawStatement to capture the text @@ -494,7 +494,7 @@ def test_include_file_header(tmp_path: Path, mock_copy_file_if_changed: Mock) -> mock_cg.RawStatement.side_effect = raw_statement_side_effect - config.include_file(str(src_file), "test.h") + config.include_file(src_file, Path("test.h")) mock_copy_file_if_changed.assert_called_once() mock_cg.add_global.assert_called_once() @@ -507,10 +507,10 @@ def test_include_file_cpp(tmp_path: Path, mock_copy_file_if_changed: Mock) -> No src_file = tmp_path / "source.cpp" src_file.write_text("// CPP content") - CORE.build_path = str(tmp_path / "build") + CORE.build_path = tmp_path / "build" with patch("esphome.core.config.cg") as mock_cg: - config.include_file(str(src_file), "test.cpp") + config.include_file(src_file, Path("test.cpp")) mock_copy_file_if_changed.assert_called_once() # Should not add include statement for .cpp files @@ -602,8 +602,8 @@ async def test_add_includes_with_single_file( mock_cg_with_include_capture: tuple[Mock, list[str]], ) -> None: """Test add_includes copies a single header file to build directory.""" - CORE.config_path = str(tmp_path / "config.yaml") - CORE.build_path = str(tmp_path / "build") + CORE.config_path = tmp_path / "config.yaml" + CORE.build_path = tmp_path / "build" os.makedirs(CORE.build_path, exist_ok=True) # Create include file @@ -617,7 +617,7 @@ async def test_add_includes_with_single_file( # Verify copy_file_if_changed was called to copy the file # Note: add_includes adds files to a src/ subdirectory mock_copy_file_if_changed.assert_called_once_with( - str(include_file), str(Path(CORE.build_path) / "src" / "my_header.h") + include_file, CORE.build_path / "src" / "my_header.h" ) # Verify include statement was added @@ -632,8 +632,8 @@ async def test_add_includes_with_directory_unix( mock_cg_with_include_capture: tuple[Mock, list[str]], ) -> None: """Test add_includes copies all files from a directory on Unix.""" - CORE.config_path = str(tmp_path / "config.yaml") - CORE.build_path = str(tmp_path / "build") + CORE.config_path = tmp_path / "config.yaml" + CORE.build_path = tmp_path / "build" os.makedirs(CORE.build_path, exist_ok=True) # Create include directory with files @@ -677,8 +677,8 @@ async def test_add_includes_with_directory_windows( mock_cg_with_include_capture: tuple[Mock, list[str]], ) -> None: """Test add_includes copies all files from a directory on Windows.""" - CORE.config_path = str(tmp_path / "config.yaml") - CORE.build_path = str(tmp_path / "build") + CORE.config_path = tmp_path / "config.yaml" + CORE.build_path = tmp_path / "build" os.makedirs(CORE.build_path, exist_ok=True) # Create include directory with files @@ -719,8 +719,8 @@ async def test_add_includes_with_multiple_sources( tmp_path: Path, mock_copy_file_if_changed: Mock ) -> None: """Test add_includes with multiple files and directories.""" - CORE.config_path = str(tmp_path / "config.yaml") - CORE.build_path = str(tmp_path / "build") + CORE.config_path = tmp_path / "config.yaml" + CORE.build_path = tmp_path / "build" os.makedirs(CORE.build_path, exist_ok=True) # Create various include sources @@ -747,8 +747,8 @@ async def test_add_includes_empty_directory( tmp_path: Path, mock_copy_file_if_changed: Mock ) -> None: """Test add_includes with an empty directory doesn't fail.""" - CORE.config_path = str(tmp_path / "config.yaml") - CORE.build_path = str(tmp_path / "build") + CORE.config_path = tmp_path / "config.yaml" + CORE.build_path = tmp_path / "build" os.makedirs(CORE.build_path, exist_ok=True) # Create empty directory @@ -769,8 +769,8 @@ async def test_add_includes_preserves_directory_structure_unix( tmp_path: Path, mock_copy_file_if_changed: Mock ) -> None: """Test that add_includes preserves relative directory structure on Unix.""" - CORE.config_path = str(tmp_path / "config.yaml") - CORE.build_path = str(tmp_path / "build") + CORE.config_path = tmp_path / "config.yaml" + CORE.build_path = tmp_path / "build" os.makedirs(CORE.build_path, exist_ok=True) # Create nested directory structure @@ -793,8 +793,8 @@ async def test_add_includes_preserves_directory_structure_unix( dest_paths = [call[0][1] for call in calls] # Check that relative paths are preserved - assert any("lib/src/core.h" in path for path in dest_paths) - assert any("lib/utils/helper.h" in path for path in dest_paths) + assert any("lib/src/core.h" in str(path) for path in dest_paths) + assert any("lib/utils/helper.h" in str(path) for path in dest_paths) @pytest.mark.asyncio @@ -803,8 +803,8 @@ async def test_add_includes_preserves_directory_structure_windows( tmp_path: Path, mock_copy_file_if_changed: Mock ) -> None: """Test that add_includes preserves relative directory structure on Windows.""" - CORE.config_path = str(tmp_path / "config.yaml") - CORE.build_path = str(tmp_path / "build") + CORE.config_path = tmp_path / "config.yaml" + CORE.build_path = tmp_path / "build" os.makedirs(CORE.build_path, exist_ok=True) # Create nested directory structure @@ -827,8 +827,8 @@ async def test_add_includes_preserves_directory_structure_windows( dest_paths = [call[0][1] for call in calls] # Check that relative paths are preserved - assert any("lib\\src\\core.h" in path for path in dest_paths) - assert any("lib\\utils\\helper.h" in path for path in dest_paths) + assert any("lib\\src\\core.h" in str(path) for path in dest_paths) + assert any("lib\\utils\\helper.h" in str(path) for path in dest_paths) @pytest.mark.asyncio @@ -836,8 +836,8 @@ async def test_add_includes_overwrites_existing_files( tmp_path: Path, mock_copy_file_if_changed: Mock ) -> None: """Test that add_includes overwrites existing files in build directory.""" - CORE.config_path = str(tmp_path / "config.yaml") - CORE.build_path = str(tmp_path / "build") + CORE.config_path = tmp_path / "config.yaml" + CORE.build_path = tmp_path / "build" os.makedirs(CORE.build_path, exist_ok=True) # Create include file @@ -850,5 +850,5 @@ async def test_add_includes_overwrites_existing_files( # Verify copy_file_if_changed was called (it handles overwriting) # Note: add_includes adds files to a src/ subdirectory mock_copy_file_if_changed.assert_called_once_with( - str(include_file), str(Path(CORE.build_path) / "src" / "header.h") + include_file, CORE.build_path / "src" / "header.h" ) diff --git a/tests/unit_tests/test_config_validation_paths.py b/tests/unit_tests/test_config_validation_paths.py index f8f038390e..f327e9c443 100644 --- a/tests/unit_tests/test_config_validation_paths.py +++ b/tests/unit_tests/test_config_validation_paths.py @@ -15,7 +15,7 @@ def test_directory_valid_path(setup_core: Path) -> None: result = cv.directory("test_directory") - assert result == "test_directory" + assert result == test_dir def test_directory_absolute_path(setup_core: Path) -> None: @@ -25,7 +25,7 @@ def test_directory_absolute_path(setup_core: Path) -> None: result = cv.directory(str(test_dir)) - assert result == str(test_dir) + assert result == test_dir def test_directory_nonexistent_path(setup_core: Path) -> None: @@ -52,7 +52,7 @@ def test_directory_with_parent_directory(setup_core: Path) -> None: result = cv.directory("parent/child/grandchild") - assert result == "parent/child/grandchild" + assert result == nested_dir def test_file_valid_path(setup_core: Path) -> None: @@ -62,7 +62,7 @@ def test_file_valid_path(setup_core: Path) -> None: result = cv.file_("test_file.yaml") - assert result == "test_file.yaml" + assert result == test_file def test_file_absolute_path(setup_core: Path) -> None: @@ -72,7 +72,7 @@ def test_file_absolute_path(setup_core: Path) -> None: result = cv.file_(str(test_file)) - assert result == str(test_file) + assert result == test_file def test_file_nonexistent_path(setup_core: Path) -> None: @@ -99,7 +99,7 @@ def test_file_with_parent_directory(setup_core: Path) -> None: result = cv.file_("configs/sensors/temperature.yaml") - assert result == "configs/sensors/temperature.yaml" + assert result == test_file def test_directory_handles_trailing_slash(setup_core: Path) -> None: @@ -108,29 +108,29 @@ def test_directory_handles_trailing_slash(setup_core: Path) -> None: test_dir.mkdir() result = cv.directory("test_dir/") - assert result == "test_dir/" + assert result == test_dir result = cv.directory("test_dir") - assert result == "test_dir" + assert result == test_dir def test_file_handles_various_extensions(setup_core: Path) -> None: """Test file_ validator works with different file extensions.""" yaml_file = setup_core / "config.yaml" yaml_file.write_text("yaml content") - assert cv.file_("config.yaml") == "config.yaml" + assert cv.file_("config.yaml") == yaml_file yml_file = setup_core / "config.yml" yml_file.write_text("yml content") - assert cv.file_("config.yml") == "config.yml" + assert cv.file_("config.yml") == yml_file txt_file = setup_core / "readme.txt" txt_file.write_text("text content") - assert cv.file_("readme.txt") == "readme.txt" + assert cv.file_("readme.txt") == txt_file no_ext_file = setup_core / "LICENSE" no_ext_file.write_text("license content") - assert cv.file_("LICENSE") == "LICENSE" + assert cv.file_("LICENSE") == no_ext_file def test_directory_with_symlink(setup_core: Path) -> None: @@ -142,7 +142,7 @@ def test_directory_with_symlink(setup_core: Path) -> None: symlink_dir.symlink_to(actual_dir) result = cv.directory("symlink_directory") - assert result == "symlink_directory" + assert result == symlink_dir def test_file_with_symlink(setup_core: Path) -> None: @@ -154,7 +154,7 @@ def test_file_with_symlink(setup_core: Path) -> None: symlink_file.symlink_to(actual_file) result = cv.file_("symlink_file.txt") - assert result == "symlink_file.txt" + assert result == symlink_file def test_directory_error_shows_full_path(setup_core: Path) -> None: @@ -175,7 +175,7 @@ def test_directory_with_spaces_in_name(setup_core: Path) -> None: dir_with_spaces.mkdir() result = cv.directory("my test directory") - assert result == "my test directory" + assert result == dir_with_spaces def test_file_with_spaces_in_name(setup_core: Path) -> None: @@ -184,4 +184,4 @@ def test_file_with_spaces_in_name(setup_core: Path) -> None: file_with_spaces.write_text("content") result = cv.file_("my test file.yaml") - assert result == "my test file.yaml" + assert result == file_with_spaces diff --git a/tests/unit_tests/test_core.py b/tests/unit_tests/test_core.py index 9a69329e80..48eae06ea6 100644 --- a/tests/unit_tests/test_core.py +++ b/tests/unit_tests/test_core.py @@ -1,4 +1,5 @@ import os +from pathlib import Path from unittest.mock import patch from hypothesis import given @@ -536,8 +537,8 @@ class TestEsphomeCore: @pytest.fixture def target(self, fixture_path): target = core.EsphomeCore() - target.build_path = "foo/build" - target.config_path = "foo/config" + target.build_path = Path("foo/build") + target.config_path = Path("foo/config") return target def test_reset(self, target): @@ -584,33 +585,33 @@ class TestEsphomeCore: @pytest.mark.skipif(os.name == "nt", reason="Unix-specific test") def test_data_dir_default_unix(self, target): """Test data_dir returns .esphome in config directory by default on Unix.""" - target.config_path = "/home/user/config.yaml" - assert target.data_dir == "/home/user/.esphome" + target.config_path = Path("/home/user/config.yaml") + assert target.data_dir == Path("/home/user/.esphome") @pytest.mark.skipif(os.name != "nt", reason="Windows-specific test") def test_data_dir_default_windows(self, target): """Test data_dir returns .esphome in config directory by default on Windows.""" - target.config_path = "D:\\home\\user\\config.yaml" - assert target.data_dir == "D:\\home\\user\\.esphome" + target.config_path = Path("D:\\home\\user\\config.yaml") + assert target.data_dir == Path("D:\\home\\user\\.esphome") def test_data_dir_ha_addon(self, target): """Test data_dir returns /data when running as Home Assistant addon.""" - target.config_path = "/config/test.yaml" + target.config_path = Path("/config/test.yaml") with patch.dict(os.environ, {"ESPHOME_IS_HA_ADDON": "true"}): - assert target.data_dir == "/data" + assert target.data_dir == Path("/data") def test_data_dir_env_override(self, target): """Test data_dir uses ESPHOME_DATA_DIR environment variable when set.""" - target.config_path = "/home/user/config.yaml" + target.config_path = Path("/home/user/config.yaml") with patch.dict(os.environ, {"ESPHOME_DATA_DIR": "/custom/data/path"}): - assert target.data_dir == "/custom/data/path" + assert target.data_dir == Path("/custom/data/path") @pytest.mark.skipif(os.name == "nt", reason="Unix-specific test") def test_data_dir_priority_unix(self, target): """Test data_dir priority on Unix: HA addon > env var > default.""" - target.config_path = "/config/test.yaml" + target.config_path = Path("/config/test.yaml") expected_default = "/config/.esphome" # Test HA addon takes priority over env var @@ -618,26 +619,26 @@ class TestEsphomeCore: os.environ, {"ESPHOME_IS_HA_ADDON": "true", "ESPHOME_DATA_DIR": "/custom/path"}, ): - assert target.data_dir == "/data" + assert target.data_dir == Path("/data") # Test env var is used when not HA addon with patch.dict( os.environ, {"ESPHOME_IS_HA_ADDON": "false", "ESPHOME_DATA_DIR": "/custom/path"}, ): - assert target.data_dir == "/custom/path" + assert target.data_dir == Path("/custom/path") # Test default when neither is set with patch.dict(os.environ, {}, clear=True): # Ensure these env vars are not set os.environ.pop("ESPHOME_IS_HA_ADDON", None) os.environ.pop("ESPHOME_DATA_DIR", None) - assert target.data_dir == expected_default + assert target.data_dir == Path(expected_default) @pytest.mark.skipif(os.name != "nt", reason="Windows-specific test") def test_data_dir_priority_windows(self, target): """Test data_dir priority on Windows: HA addon > env var > default.""" - target.config_path = "D:\\config\\test.yaml" + target.config_path = Path("D:\\config\\test.yaml") expected_default = "D:\\config\\.esphome" # Test HA addon takes priority over env var @@ -645,18 +646,18 @@ class TestEsphomeCore: os.environ, {"ESPHOME_IS_HA_ADDON": "true", "ESPHOME_DATA_DIR": "/custom/path"}, ): - assert target.data_dir == "/data" + assert target.data_dir == Path("/data") # Test env var is used when not HA addon with patch.dict( os.environ, {"ESPHOME_IS_HA_ADDON": "false", "ESPHOME_DATA_DIR": "/custom/path"}, ): - assert target.data_dir == "/custom/path" + assert target.data_dir == Path("/custom/path") # Test default when neither is set with patch.dict(os.environ, {}, clear=True): # Ensure these env vars are not set os.environ.pop("ESPHOME_IS_HA_ADDON", None) os.environ.pop("ESPHOME_DATA_DIR", None) - assert target.data_dir == expected_default + assert target.data_dir == Path(expected_default) diff --git a/tests/unit_tests/test_external_files.py b/tests/unit_tests/test_external_files.py index 3fa7de2f64..05e0bd3523 100644 --- a/tests/unit_tests/test_external_files.py +++ b/tests/unit_tests/test_external_files.py @@ -42,7 +42,7 @@ def test_is_file_recent_with_recent_file(setup_core: Path) -> None: refresh = TimePeriod(seconds=3600) - result = external_files.is_file_recent(str(test_file), refresh) + result = external_files.is_file_recent(test_file, refresh) assert result is True @@ -53,11 +53,13 @@ def test_is_file_recent_with_old_file(setup_core: Path) -> None: test_file.write_text("content") old_time = time.time() - 7200 + mock_stat = MagicMock() + mock_stat.st_ctime = old_time - with patch("os.path.getctime", return_value=old_time): + with patch.object(Path, "stat", return_value=mock_stat): refresh = TimePeriod(seconds=3600) - result = external_files.is_file_recent(str(test_file), refresh) + result = external_files.is_file_recent(test_file, refresh) assert result is False @@ -67,7 +69,7 @@ def test_is_file_recent_nonexistent_file(setup_core: Path) -> None: test_file = setup_core / "nonexistent.txt" refresh = TimePeriod(seconds=3600) - result = external_files.is_file_recent(str(test_file), refresh) + result = external_files.is_file_recent(test_file, refresh) assert result is False @@ -77,10 +79,12 @@ def test_is_file_recent_with_zero_refresh(setup_core: Path) -> None: test_file = setup_core / "test.txt" test_file.write_text("content") - # Mock getctime to return a time 10 seconds ago - with patch("os.path.getctime", return_value=time.time() - 10): + # Mock stat to return a time 10 seconds ago + mock_stat = MagicMock() + mock_stat.st_ctime = time.time() - 10 + with patch.object(Path, "stat", return_value=mock_stat): refresh = TimePeriod(seconds=0) - result = external_files.is_file_recent(str(test_file), refresh) + result = external_files.is_file_recent(test_file, refresh) assert result is False @@ -97,7 +101,7 @@ def test_has_remote_file_changed_not_modified( mock_head.return_value = mock_response url = "https://example.com/file.txt" - result = external_files.has_remote_file_changed(url, str(test_file)) + result = external_files.has_remote_file_changed(url, test_file) assert result is False mock_head.assert_called_once() @@ -121,7 +125,7 @@ def test_has_remote_file_changed_modified( mock_head.return_value = mock_response url = "https://example.com/file.txt" - result = external_files.has_remote_file_changed(url, str(test_file)) + result = external_files.has_remote_file_changed(url, test_file) assert result is True @@ -131,7 +135,7 @@ def test_has_remote_file_changed_no_local_file(setup_core: Path) -> None: test_file = setup_core / "nonexistent.txt" url = "https://example.com/file.txt" - result = external_files.has_remote_file_changed(url, str(test_file)) + result = external_files.has_remote_file_changed(url, test_file) assert result is True @@ -149,7 +153,7 @@ def test_has_remote_file_changed_network_error( url = "https://example.com/file.txt" with pytest.raises(Invalid, match="Could not check if.*Network error"): - external_files.has_remote_file_changed(url, str(test_file)) + external_files.has_remote_file_changed(url, test_file) @patch("esphome.external_files.requests.head") @@ -165,7 +169,7 @@ def test_has_remote_file_changed_timeout( mock_head.return_value = mock_response url = "https://example.com/file.txt" - external_files.has_remote_file_changed(url, str(test_file)) + external_files.has_remote_file_changed(url, test_file) call_args = mock_head.call_args assert call_args[1]["timeout"] == external_files.NETWORK_TIMEOUT @@ -191,6 +195,6 @@ def test_is_file_recent_handles_float_seconds(setup_core: Path) -> None: refresh = TimePeriod(seconds=3600.5) - result = external_files.is_file_recent(str(test_file), refresh) + result = external_files.is_file_recent(test_file, refresh) assert result is True diff --git a/tests/unit_tests/test_helpers.py b/tests/unit_tests/test_helpers.py index b49e5797c1..87ed901ecb 100644 --- a/tests/unit_tests/test_helpers.py +++ b/tests/unit_tests/test_helpers.py @@ -154,11 +154,11 @@ def test_walk_files(fixture_path): actual = list(helpers.walk_files(path)) # Ensure paths start with the root - assert all(p.startswith(str(path)) for p in actual) + assert all(p.is_relative_to(path) for p in actual) class Test_write_file_if_changed: - def test_src_and_dst_match(self, tmp_path): + def test_src_and_dst_match(self, tmp_path: Path): text = "A files are unique.\n" initial = text dst = tmp_path / "file-a.txt" @@ -168,7 +168,7 @@ class Test_write_file_if_changed: assert dst.read_text() == text - def test_src_and_dst_do_not_match(self, tmp_path): + def test_src_and_dst_do_not_match(self, tmp_path: Path): text = "A files are unique.\n" initial = "B files are unique.\n" dst = tmp_path / "file-a.txt" @@ -178,7 +178,7 @@ class Test_write_file_if_changed: assert dst.read_text() == text - def test_dst_does_not_exist(self, tmp_path): + def test_dst_does_not_exist(self, tmp_path: Path): text = "A files are unique.\n" dst = tmp_path / "file-a.txt" @@ -188,7 +188,7 @@ class Test_write_file_if_changed: class Test_copy_file_if_changed: - def test_src_and_dst_match(self, tmp_path, fixture_path): + def test_src_and_dst_match(self, tmp_path: Path, fixture_path: Path): src = fixture_path / "helpers" / "file-a.txt" initial = fixture_path / "helpers" / "file-a.txt" dst = tmp_path / "file-a.txt" @@ -197,7 +197,7 @@ class Test_copy_file_if_changed: helpers.copy_file_if_changed(src, dst) - def test_src_and_dst_do_not_match(self, tmp_path, fixture_path): + def test_src_and_dst_do_not_match(self, tmp_path: Path, fixture_path: Path): src = fixture_path / "helpers" / "file-a.txt" initial = fixture_path / "helpers" / "file-c.txt" dst = tmp_path / "file-a.txt" @@ -208,7 +208,7 @@ class Test_copy_file_if_changed: assert src.read_text() == dst.read_text() - def test_dst_does_not_exist(self, tmp_path, fixture_path): + def test_dst_does_not_exist(self, tmp_path: Path, fixture_path: Path): src = fixture_path / "helpers" / "file-a.txt" dst = tmp_path / "file-a.txt" @@ -604,9 +604,8 @@ def test_mkdir_p_with_existing_file_raises_error(tmp_path: Path) -> None: helpers.mkdir_p(dir_path) -@pytest.mark.skipif(os.name == "nt", reason="Unix-specific test") -def test_read_file_unix(tmp_path: Path) -> None: - """Test read_file reads file content correctly on Unix.""" +def test_read_file(tmp_path: Path) -> None: + """Test read_file reads file content correctly.""" # Test reading regular file test_file = tmp_path / "test.txt" expected_content = "Test content\nLine 2\n" @@ -624,31 +623,10 @@ def test_read_file_unix(tmp_path: Path) -> None: assert content == utf8_content -@pytest.mark.skipif(os.name != "nt", reason="Windows-specific test") -def test_read_file_windows(tmp_path: Path) -> None: - """Test read_file reads file content correctly on Windows.""" - # Test reading regular file - test_file = tmp_path / "test.txt" - expected_content = "Test content\nLine 2\n" - test_file.write_text(expected_content) - - content = helpers.read_file(test_file) - # On Windows, text mode reading converts \n to \r\n - assert content == expected_content.replace("\n", "\r\n") - - # Test reading file with UTF-8 characters - utf8_file = tmp_path / "utf8.txt" - utf8_content = "Hello δΈ–η•Œ 🌍" - utf8_file.write_text(utf8_content, encoding="utf-8") - - content = helpers.read_file(utf8_file) - assert content == utf8_content - - def test_read_file_not_found() -> None: """Test read_file raises error for non-existent file.""" with pytest.raises(EsphomeError, match=r"Error reading file"): - helpers.read_file("/nonexistent/file.txt") + helpers.read_file(Path("/nonexistent/file.txt")) def test_read_file_unicode_decode_error(tmp_path: Path) -> None: diff --git a/tests/unit_tests/test_main.py b/tests/unit_tests/test_main.py index fff0b2cd48..e805ecb2eb 100644 --- a/tests/unit_tests/test_main.py +++ b/tests/unit_tests/test_main.py @@ -885,7 +885,7 @@ def test_upload_program_ota_success( assert exit_code == 0 assert host == "192.168.1.100" - expected_firmware = str( + expected_firmware = ( tmp_path / ".esphome" / "build" / "test" / ".pioenvs" / "test" / "firmware.bin" ) mock_run_ota.assert_called_once_with( @@ -919,7 +919,9 @@ def test_upload_program_ota_with_file_arg( assert exit_code == 0 assert host == "192.168.1.100" - mock_run_ota.assert_called_once_with(["192.168.1.100"], 3232, "", "custom.bin") + mock_run_ota.assert_called_once_with( + ["192.168.1.100"], 3232, "", Path("custom.bin") + ) def test_upload_program_ota_no_config( @@ -972,7 +974,7 @@ def test_upload_program_ota_with_mqtt_resolution( assert exit_code == 0 assert host == "192.168.1.100" mock_mqtt_get_ip.assert_called_once_with(config, "user", "pass", "client") - expected_firmware = str( + expected_firmware = ( tmp_path / ".esphome" / "build" / "test" / ".pioenvs" / "test" / "firmware.bin" ) mock_run_ota.assert_called_once_with(["192.168.1.100"], 3232, "", expected_firmware) @@ -1382,7 +1384,7 @@ def test_command_wizard(tmp_path: Path) -> None: result = command_wizard(args) assert result == 0 - mock_wizard.assert_called_once_with(str(config_file)) + mock_wizard.assert_called_once_with(config_file) def test_command_rename_invalid_characters( @@ -1407,7 +1409,7 @@ def test_command_rename_complex_yaml( config_file = tmp_path / "test.yaml" config_file.write_text("# Complex YAML without esphome section\nsome_key: value\n") setup_core(tmp_path=tmp_path) - CORE.config_path = str(config_file) + CORE.config_path = config_file args = MockArgs(name="newname") result = command_rename(args, {}) @@ -1436,7 +1438,7 @@ wifi: password: "test1234" """) setup_core(tmp_path=tmp_path) - CORE.config_path = str(config_file) + CORE.config_path = config_file # Set up CORE.config to avoid ValueError when accessing CORE.address CORE.config = {CONF_ESPHOME: {CONF_NAME: "oldname"}} @@ -1486,7 +1488,7 @@ esp32: board: nodemcu-32s """) setup_core(tmp_path=tmp_path) - CORE.config_path = str(config_file) + CORE.config_path = config_file # Set up CORE.config to avoid ValueError when accessing CORE.address CORE.config = { @@ -1523,7 +1525,7 @@ esp32: board: nodemcu-32s """) setup_core(tmp_path=tmp_path) - CORE.config_path = str(config_file) + CORE.config_path = config_file args = MockArgs(name="newname", dashboard=False) diff --git a/tests/unit_tests/test_platformio_api.py b/tests/unit_tests/test_platformio_api.py index 7c7883d391..07948cc6ad 100644 --- a/tests/unit_tests/test_platformio_api.py +++ b/tests/unit_tests/test_platformio_api.py @@ -15,45 +15,45 @@ from esphome.core import CORE, EsphomeError def test_idedata_firmware_elf_path(setup_core: Path) -> None: """Test IDEData.firmware_elf_path returns correct path.""" - CORE.build_path = str(setup_core / "build" / "test") + CORE.build_path = setup_core / "build" / "test" CORE.name = "test" raw_data = {"prog_path": "/path/to/firmware.elf"} idedata = platformio_api.IDEData(raw_data) - assert idedata.firmware_elf_path == "/path/to/firmware.elf" + assert idedata.firmware_elf_path == Path("/path/to/firmware.elf") def test_idedata_firmware_bin_path(setup_core: Path) -> None: """Test IDEData.firmware_bin_path returns Path with .bin extension.""" - CORE.build_path = str(setup_core / "build" / "test") + CORE.build_path = setup_core / "build" / "test" CORE.name = "test" prog_path = str(Path("/path/to/firmware.elf")) raw_data = {"prog_path": prog_path} idedata = platformio_api.IDEData(raw_data) result = idedata.firmware_bin_path - assert isinstance(result, str) - expected = str(Path("/path/to/firmware.bin")) + assert isinstance(result, Path) + expected = Path("/path/to/firmware.bin") assert result == expected - assert result.endswith(".bin") + assert str(result).endswith(".bin") def test_idedata_firmware_bin_path_preserves_directory(setup_core: Path) -> None: """Test firmware_bin_path preserves the directory structure.""" - CORE.build_path = str(setup_core / "build" / "test") + CORE.build_path = setup_core / "build" / "test" CORE.name = "test" prog_path = str(Path("/complex/path/to/build/firmware.elf")) raw_data = {"prog_path": prog_path} idedata = platformio_api.IDEData(raw_data) result = idedata.firmware_bin_path - expected = str(Path("/complex/path/to/build/firmware.bin")) + expected = Path("/complex/path/to/build/firmware.bin") assert result == expected def test_idedata_extra_flash_images(setup_core: Path) -> None: """Test IDEData.extra_flash_images returns list of FlashImage objects.""" - CORE.build_path = str(setup_core / "build" / "test") + CORE.build_path = setup_core / "build" / "test" CORE.name = "test" raw_data = { "prog_path": "/path/to/firmware.elf", @@ -69,15 +69,15 @@ def test_idedata_extra_flash_images(setup_core: Path) -> None: images = idedata.extra_flash_images assert len(images) == 2 assert all(isinstance(img, platformio_api.FlashImage) for img in images) - assert images[0].path == "/path/to/bootloader.bin" + assert images[0].path == Path("/path/to/bootloader.bin") assert images[0].offset == "0x1000" - assert images[1].path == "/path/to/partition.bin" + assert images[1].path == Path("/path/to/partition.bin") assert images[1].offset == "0x8000" def test_idedata_extra_flash_images_empty(setup_core: Path) -> None: """Test extra_flash_images returns empty list when no extra images.""" - CORE.build_path = str(setup_core / "build" / "test") + CORE.build_path = setup_core / "build" / "test" CORE.name = "test" raw_data = {"prog_path": "/path/to/firmware.elf", "extra": {"flash_images": []}} idedata = platformio_api.IDEData(raw_data) @@ -88,7 +88,7 @@ def test_idedata_extra_flash_images_empty(setup_core: Path) -> None: def test_idedata_cc_path(setup_core: Path) -> None: """Test IDEData.cc_path returns compiler path.""" - CORE.build_path = str(setup_core / "build" / "test") + CORE.build_path = setup_core / "build" / "test" CORE.name = "test" raw_data = { "prog_path": "/path/to/firmware.elf", @@ -104,9 +104,9 @@ def test_idedata_cc_path(setup_core: Path) -> None: def test_flash_image_dataclass() -> None: """Test FlashImage dataclass stores path and offset correctly.""" - image = platformio_api.FlashImage(path="/path/to/image.bin", offset="0x10000") + image = platformio_api.FlashImage(path=Path("/path/to/image.bin"), offset="0x10000") - assert image.path == "/path/to/image.bin" + assert image.path == Path("/path/to/image.bin") assert image.offset == "0x10000" @@ -114,7 +114,7 @@ def test_load_idedata_returns_dict( setup_core: Path, mock_run_platformio_cli_run ) -> None: """Test _load_idedata returns parsed idedata dict when successful.""" - CORE.build_path = str(setup_core / "build" / "test") + CORE.build_path = setup_core / "build" / "test" CORE.name = "test" # Create required files @@ -366,7 +366,7 @@ def test_get_idedata_caches_result( assert result1 is result2 assert isinstance(result1, platformio_api.IDEData) - assert result1.firmware_elf_path == "/test/firmware.elf" + assert result1.firmware_elf_path == Path("/test/firmware.elf") def test_idedata_addr2line_path_windows(setup_core: Path) -> None: @@ -434,9 +434,9 @@ def test_patched_clean_build_dir_removes_outdated(setup_core: Path) -> None: os.utime(platformio_ini, (build_mtime + 1, build_mtime + 1)) # Track if directory was removed - removed_paths: list[str] = [] + removed_paths: list[Path] = [] - def track_rmtree(path: str) -> None: + def track_rmtree(path: Path) -> None: removed_paths.append(path) shutil.rmtree(path) @@ -466,7 +466,7 @@ def test_patched_clean_build_dir_removes_outdated(setup_core: Path) -> None: # Verify directory was removed and recreated assert len(removed_paths) == 1 - assert removed_paths[0] == str(build_dir) + assert removed_paths[0] == build_dir assert build_dir.exists() # makedirs recreated it diff --git a/tests/unit_tests/test_storage_json.py b/tests/unit_tests/test_storage_json.py index e1abe565b1..a3a38960e7 100644 --- a/tests/unit_tests/test_storage_json.py +++ b/tests/unit_tests/test_storage_json.py @@ -15,12 +15,12 @@ from esphome.core import CORE def test_storage_path(setup_core: Path) -> None: """Test storage_path returns correct path for current config.""" - CORE.config_path = str(setup_core / "my_device.yaml") + CORE.config_path = setup_core / "my_device.yaml" result = storage_json.storage_path() data_dir = Path(CORE.data_dir) - expected = str(data_dir / "storage" / "my_device.yaml.json") + expected = data_dir / "storage" / "my_device.yaml.json" assert result == expected @@ -29,20 +29,20 @@ def test_ext_storage_path(setup_core: Path) -> None: result = storage_json.ext_storage_path("other_device.yaml") data_dir = Path(CORE.data_dir) - expected = str(data_dir / "storage" / "other_device.yaml.json") + expected = data_dir / "storage" / "other_device.yaml.json" assert result == expected def test_ext_storage_path_handles_various_extensions(setup_core: Path) -> None: """Test ext_storage_path works with different file extensions.""" result_yml = storage_json.ext_storage_path("device.yml") - assert result_yml.endswith("device.yml.json") + assert str(result_yml).endswith("device.yml.json") result_no_ext = storage_json.ext_storage_path("device") - assert result_no_ext.endswith("device.json") + assert str(result_no_ext).endswith("device.json") result_path = storage_json.ext_storage_path("my/device.yaml") - assert result_path.endswith("device.yaml.json") + assert str(result_path).endswith("device.yaml.json") def test_esphome_storage_path(setup_core: Path) -> None: @@ -50,7 +50,7 @@ def test_esphome_storage_path(setup_core: Path) -> None: result = storage_json.esphome_storage_path() data_dir = Path(CORE.data_dir) - expected = str(data_dir / "esphome.json") + expected = data_dir / "esphome.json" assert result == expected @@ -59,27 +59,27 @@ def test_ignored_devices_storage_path(setup_core: Path) -> None: result = storage_json.ignored_devices_storage_path() data_dir = Path(CORE.data_dir) - expected = str(data_dir / "ignored-devices.json") + expected = data_dir / "ignored-devices.json" assert result == expected def test_trash_storage_path(setup_core: Path) -> None: """Test trash_storage_path returns correct path.""" - CORE.config_path = str(setup_core / "configs" / "device.yaml") + CORE.config_path = setup_core / "configs" / "device.yaml" result = storage_json.trash_storage_path() - expected = str(setup_core / "configs" / "trash") + expected = setup_core / "configs" / "trash" assert result == expected def test_archive_storage_path(setup_core: Path) -> None: """Test archive_storage_path returns correct path.""" - CORE.config_path = str(setup_core / "configs" / "device.yaml") + CORE.config_path = setup_core / "configs" / "device.yaml" result = storage_json.archive_storage_path() - expected = str(setup_core / "configs" / "archive") + expected = setup_core / "configs" / "archive" assert result == expected @@ -87,12 +87,12 @@ def test_storage_path_with_subdirectory(setup_core: Path) -> None: """Test storage paths work correctly when config is in subdirectory.""" subdir = setup_core / "configs" / "basement" subdir.mkdir(parents=True, exist_ok=True) - CORE.config_path = str(subdir / "sensor.yaml") + CORE.config_path = subdir / "sensor.yaml" result = storage_json.storage_path() data_dir = Path(CORE.data_dir) - expected = str(data_dir / "storage" / "sensor.yaml.json") + expected = data_dir / "storage" / "sensor.yaml.json" assert result == expected @@ -173,16 +173,16 @@ def test_storage_paths_with_ha_addon(mock_is_ha_addon: bool, tmp_path: Path) -> """Test storage paths when running as Home Assistant addon.""" mock_is_ha_addon.return_value = True - CORE.config_path = str(tmp_path / "test.yaml") + CORE.config_path = tmp_path / "test.yaml" result = storage_json.storage_path() # When is_ha_addon is True, CORE.data_dir returns "/data" # This is the standard mount point for HA addon containers - expected = str(Path("/data") / "storage" / "test.yaml.json") + expected = Path("/data") / "storage" / "test.yaml.json" assert result == expected result = storage_json.esphome_storage_path() - expected = str(Path("/data") / "esphome.json") + expected = Path("/data") / "esphome.json" assert result == expected @@ -375,7 +375,7 @@ def test_storage_json_load_valid_file(tmp_path: Path) -> None: file_path = tmp_path / "storage.json" file_path.write_text(json.dumps(storage_data)) - result = storage_json.StorageJSON.load(str(file_path)) + result = storage_json.StorageJSON.load(file_path) assert result is not None assert result.name == "loaded_device" @@ -386,8 +386,8 @@ def test_storage_json_load_valid_file(tmp_path: Path) -> None: assert result.address == "10.0.0.1" assert result.web_port == 8080 assert result.target_platform == "ESP32" - assert result.build_path == "/loaded/build" - assert result.firmware_bin_path == "/loaded/firmware.bin" + assert result.build_path == Path("/loaded/build") + assert result.firmware_bin_path == Path("/loaded/firmware.bin") assert result.loaded_integrations == {"wifi", "api"} assert result.loaded_platforms == {"sensor"} assert result.no_mdns is True @@ -400,7 +400,7 @@ def test_storage_json_load_invalid_file(tmp_path: Path) -> None: file_path = tmp_path / "invalid.json" file_path.write_text("not valid json{") - result = storage_json.StorageJSON.load(str(file_path)) + result = storage_json.StorageJSON.load(file_path) assert result is None @@ -654,7 +654,7 @@ def test_storage_json_load_legacy_esphomeyaml_version(tmp_path: Path) -> None: file_path = tmp_path / "legacy.json" file_path.write_text(json.dumps(storage_data)) - result = storage_json.StorageJSON.load(str(file_path)) + result = storage_json.StorageJSON.load(file_path) assert result is not None assert result.esphome_version == "1.14.0" # Should map to esphome_version diff --git a/tests/unit_tests/test_substitutions.py b/tests/unit_tests/test_substitutions.py index b2b7cb1ea4..dd419aba9c 100644 --- a/tests/unit_tests/test_substitutions.py +++ b/tests/unit_tests/test_substitutions.py @@ -1,6 +1,6 @@ import glob import logging -import os +from pathlib import Path from esphome import yaml_util from esphome.components import substitutions @@ -52,9 +52,8 @@ def dict_diff(a, b, path=""): return diffs -def write_yaml(path, data): - with open(path, "w", encoding="utf-8") as f: - f.write(yaml_util.dump(data)) +def write_yaml(path: Path, data: dict) -> None: + path.write_text(yaml_util.dump(data), encoding="utf-8") def test_substitutions_fixtures(fixture_path): @@ -64,11 +63,10 @@ def test_substitutions_fixtures(fixture_path): failures = [] for source_path in sources: + source_path = Path(source_path) try: - expected_path = source_path.replace(".input.yaml", ".approved.yaml") - test_case = os.path.splitext(os.path.basename(source_path))[0].replace( - ".input", "" - ) + expected_path = source_path.with_suffix("").with_suffix(".approved.yaml") + test_case = source_path.with_suffix("").stem # Load using ESPHome's YAML loader config = yaml_util.load_yaml(source_path) @@ -81,12 +79,12 @@ def test_substitutions_fixtures(fixture_path): substitutions.do_substitution_pass(config, None) # Also load expected using ESPHome's loader, or use {} if missing and DEV_MODE - if os.path.isfile(expected_path): + if expected_path.is_file(): expected = yaml_util.load_yaml(expected_path) elif DEV_MODE: expected = {} else: - assert os.path.isfile(expected_path), ( + assert expected_path.is_file(), ( f"Expected file missing: {expected_path}" ) @@ -97,16 +95,14 @@ def test_substitutions_fixtures(fixture_path): if got_sorted != expected_sorted: diff = "\n".join(dict_diff(got_sorted, expected_sorted)) msg = ( - f"Substitution result mismatch for {os.path.basename(source_path)}\n" + f"Substitution result mismatch for {source_path.name}\n" f"Diff:\n{diff}\n\n" f"Got: {got_sorted}\n" f"Expected: {expected_sorted}" ) # Write out the received file when test fails if DEV_MODE: - received_path = os.path.join( - os.path.dirname(source_path), f"{test_case}.received.yaml" - ) + received_path = source_path.with_name(f"{test_case}.received.yaml") write_yaml(received_path, config) print(msg) failures.append(msg) diff --git a/tests/unit_tests/test_util.py b/tests/unit_tests/test_util.py index 6feda854b7..85873caea8 100644 --- a/tests/unit_tests/test_util.py +++ b/tests/unit_tests/test_util.py @@ -32,21 +32,21 @@ def test_list_yaml_files_with_files_and_directories(tmp_path: Path) -> None: # Test with mixed input (directories and files) configs = [ - str(dir1), - str(standalone1), - str(dir2), - str(standalone2), + dir1, + standalone1, + dir2, + standalone2, ] result = util.list_yaml_files(configs) # Should include all YAML files but not the .txt file assert set(result) == { - str(dir1 / "config1.yaml"), - str(dir1 / "config2.yml"), - str(dir2 / "config3.yaml"), - str(standalone1), - str(standalone2), + dir1 / "config1.yaml", + dir1 / "config2.yml", + dir2 / "config3.yaml", + standalone1, + standalone2, } # Check that results are sorted assert result == sorted(result) @@ -63,12 +63,12 @@ def test_list_yaml_files_only_directories(tmp_path: Path) -> None: (dir1 / "b.yml").write_text("test: b") (dir2 / "c.yaml").write_text("test: c") - result = util.list_yaml_files([str(dir1), str(dir2)]) + result = util.list_yaml_files([dir1, dir2]) assert set(result) == { - str(dir1 / "a.yaml"), - str(dir1 / "b.yml"), - str(dir2 / "c.yaml"), + dir1 / "a.yaml", + dir1 / "b.yml", + dir2 / "c.yaml", } assert result == sorted(result) @@ -88,17 +88,17 @@ def test_list_yaml_files_only_files(tmp_path: Path) -> None: # Include a non-YAML file to test filtering result = util.list_yaml_files( [ - str(file1), - str(file2), - str(file3), - str(non_yaml), + file1, + file2, + file3, + non_yaml, ] ) assert set(result) == { - str(file1), - str(file2), - str(file3), + file1, + file2, + file3, } assert result == sorted(result) @@ -108,7 +108,7 @@ def test_list_yaml_files_empty_directory(tmp_path: Path) -> None: empty_dir = tmp_path / "empty" empty_dir.mkdir() - result = util.list_yaml_files([str(empty_dir)]) + result = util.list_yaml_files([empty_dir]) assert result == [] @@ -121,7 +121,7 @@ def test_list_yaml_files_nonexistent_path(tmp_path: Path) -> None: # Should raise an error for non-existent directory with pytest.raises(FileNotFoundError): - util.list_yaml_files([str(nonexistent), str(existing)]) + util.list_yaml_files([nonexistent, existing]) def test_list_yaml_files_mixed_extensions(tmp_path: Path) -> None: @@ -137,11 +137,11 @@ def test_list_yaml_files_mixed_extensions(tmp_path: Path) -> None: yml_file.write_text("test: yml") other_file.write_text("test: txt") - result = util.list_yaml_files([str(dir1)]) + result = util.list_yaml_files([dir1]) assert set(result) == { - str(yaml_file), - str(yml_file), + yaml_file, + yml_file, } @@ -174,17 +174,18 @@ def test_list_yaml_files_does_not_recurse_into_subdirectories(tmp_path: Path) -> assert len(result) == 3 # Check that only root-level files are found - assert str(root / "config1.yaml") in result - assert str(root / "config2.yml") in result - assert str(root / "device.yaml") in result + assert root / "config1.yaml" in result + assert root / "config2.yml" in result + assert root / "device.yaml" in result # Ensure nested files are NOT found for r in result: - assert "subdir" not in r - assert "deeper" not in r - assert "nested1.yaml" not in r - assert "nested2.yml" not in r - assert "very_nested.yaml" not in r + r_str = str(r) + assert "subdir" not in r_str + assert "deeper" not in r_str + assert "nested1.yaml" not in r_str + assert "nested2.yml" not in r_str + assert "very_nested.yaml" not in r_str def test_list_yaml_files_excludes_secrets(tmp_path: Path) -> None: @@ -202,10 +203,10 @@ def test_list_yaml_files_excludes_secrets(tmp_path: Path) -> None: # Should find 2 files (config.yaml and device.yaml), not secrets assert len(result) == 2 - assert str(root / "config.yaml") in result - assert str(root / "device.yaml") in result - assert str(root / "secrets.yaml") not in result - assert str(root / "secrets.yml") not in result + assert root / "config.yaml" in result + assert root / "device.yaml" in result + assert root / "secrets.yaml" not in result + assert root / "secrets.yml" not in result def test_list_yaml_files_excludes_hidden_files(tmp_path: Path) -> None: @@ -223,93 +224,102 @@ def test_list_yaml_files_excludes_hidden_files(tmp_path: Path) -> None: # Should find only non-hidden files assert len(result) == 2 - assert str(root / "config.yaml") in result - assert str(root / "device.yaml") in result - assert str(root / ".hidden.yaml") not in result - assert str(root / ".backup.yml") not in result + assert root / "config.yaml" in result + assert root / "device.yaml" in result + assert root / ".hidden.yaml" not in result + assert root / ".backup.yml" not in result def test_filter_yaml_files_basic() -> None: """Test filter_yaml_files function.""" files = [ - "/path/to/config.yaml", - "/path/to/device.yml", - "/path/to/readme.txt", - "/path/to/script.py", - "/path/to/data.json", - "/path/to/another.yaml", + Path("/path/to/config.yaml"), + Path("/path/to/device.yml"), + Path("/path/to/readme.txt"), + Path("/path/to/script.py"), + Path("/path/to/data.json"), + Path("/path/to/another.yaml"), ] result = util.filter_yaml_files(files) assert len(result) == 3 - assert "/path/to/config.yaml" in result - assert "/path/to/device.yml" in result - assert "/path/to/another.yaml" in result - assert "/path/to/readme.txt" not in result - assert "/path/to/script.py" not in result - assert "/path/to/data.json" not in result + assert Path("/path/to/config.yaml") in result + assert Path("/path/to/device.yml") in result + assert Path("/path/to/another.yaml") in result + assert Path("/path/to/readme.txt") not in result + assert Path("/path/to/script.py") not in result + assert Path("/path/to/data.json") not in result def test_filter_yaml_files_excludes_secrets() -> None: """Test that filter_yaml_files excludes secrets files.""" files = [ - "/path/to/config.yaml", - "/path/to/secrets.yaml", - "/path/to/secrets.yml", - "/path/to/device.yaml", - "/some/dir/secrets.yaml", + Path("/path/to/config.yaml"), + Path("/path/to/secrets.yaml"), + Path("/path/to/secrets.yml"), + Path("/path/to/device.yaml"), + Path("/some/dir/secrets.yaml"), ] result = util.filter_yaml_files(files) assert len(result) == 2 - assert "/path/to/config.yaml" in result - assert "/path/to/device.yaml" in result - assert "/path/to/secrets.yaml" not in result - assert "/path/to/secrets.yml" not in result - assert "/some/dir/secrets.yaml" not in result + assert Path("/path/to/config.yaml") in result + assert Path("/path/to/device.yaml") in result + assert Path("/path/to/secrets.yaml") not in result + assert Path("/path/to/secrets.yml") not in result + assert Path("/some/dir/secrets.yaml") not in result def test_filter_yaml_files_excludes_hidden() -> None: """Test that filter_yaml_files excludes hidden files.""" files = [ - "/path/to/config.yaml", - "/path/to/.hidden.yaml", - "/path/to/.backup.yml", - "/path/to/device.yaml", - "/some/dir/.config.yaml", + Path("/path/to/config.yaml"), + Path("/path/to/.hidden.yaml"), + Path("/path/to/.backup.yml"), + Path("/path/to/device.yaml"), + Path("/some/dir/.config.yaml"), ] result = util.filter_yaml_files(files) assert len(result) == 2 - assert "/path/to/config.yaml" in result - assert "/path/to/device.yaml" in result - assert "/path/to/.hidden.yaml" not in result - assert "/path/to/.backup.yml" not in result - assert "/some/dir/.config.yaml" not in result + assert Path("/path/to/config.yaml") in result + assert Path("/path/to/device.yaml") in result + assert Path("/path/to/.hidden.yaml") not in result + assert Path("/path/to/.backup.yml") not in result + assert Path("/some/dir/.config.yaml") not in result def test_filter_yaml_files_case_sensitive() -> None: """Test that filter_yaml_files is case-sensitive for extensions.""" files = [ - "/path/to/config.yaml", - "/path/to/config.YAML", - "/path/to/config.YML", - "/path/to/config.Yaml", - "/path/to/config.yml", + Path("/path/to/config.yaml"), + Path("/path/to/config.YAML"), + Path("/path/to/config.YML"), + Path("/path/to/config.Yaml"), + Path("/path/to/config.yml"), ] result = util.filter_yaml_files(files) # Should only match lowercase .yaml and .yml assert len(result) == 2 - assert "/path/to/config.yaml" in result - assert "/path/to/config.yml" in result - assert "/path/to/config.YAML" not in result - assert "/path/to/config.YML" not in result - assert "/path/to/config.Yaml" not in result + + # Check the actual suffixes to ensure case-sensitive filtering + result_suffixes = [p.suffix for p in result] + assert ".yaml" in result_suffixes + assert ".yml" in result_suffixes + + # Verify the filtered files have the expected names + result_names = [p.name for p in result] + assert "config.yaml" in result_names + assert "config.yml" in result_names + # Ensure uppercase extensions are NOT included + assert "config.YAML" not in result_names + assert "config.YML" not in result_names + assert "config.Yaml" not in result_names @pytest.mark.parametrize( diff --git a/tests/unit_tests/test_vscode.py b/tests/unit_tests/test_vscode.py index 4b28a2215b..63bdf3e255 100644 --- a/tests/unit_tests/test_vscode.py +++ b/tests/unit_tests/test_vscode.py @@ -1,5 +1,5 @@ import json -import os +from pathlib import Path from unittest.mock import Mock, patch from esphome import vscode @@ -45,7 +45,7 @@ RESULT_NO_ERROR = '{"type": "result", "yaml_errors": [], "validation_errors": [] def test_multi_file(): - source_path = os.path.join("dir_path", "x.yaml") + source_path = str(Path("dir_path", "x.yaml")) output_lines = _run_repl_test( [ _validate(source_path), @@ -62,7 +62,7 @@ esp8266: expected_lines = [ _read_file(source_path), - _read_file(os.path.join("dir_path", "secrets.yaml")), + _read_file(str(Path("dir_path", "secrets.yaml"))), RESULT_NO_ERROR, ] @@ -70,7 +70,7 @@ esp8266: def test_shows_correct_range_error(): - source_path = os.path.join("dir_path", "x.yaml") + source_path = str(Path("dir_path", "x.yaml")) output_lines = _run_repl_test( [ _validate(source_path), @@ -98,7 +98,7 @@ esp8266: def test_shows_correct_loaded_file_error(): - source_path = os.path.join("dir_path", "x.yaml") + source_path = str(Path("dir_path", "x.yaml")) output_lines = _run_repl_test( [ _validate(source_path), @@ -121,7 +121,7 @@ packages: validation_error = error["validation_errors"][0] assert validation_error["message"].startswith("[broad] is an invalid option for") range = validation_error["range"] - assert range["document"] == os.path.join("dir_path", ".pkg.esp8266.yaml") + assert range["document"] == str(Path("dir_path", ".pkg.esp8266.yaml")) assert range["start_line"] == 1 assert range["start_col"] == 2 assert range["end_line"] == 1 diff --git a/tests/unit_tests/test_wizard.py b/tests/unit_tests/test_wizard.py index 7af4db813a..fd53a0b0b7 100644 --- a/tests/unit_tests/test_wizard.py +++ b/tests/unit_tests/test_wizard.py @@ -1,6 +1,5 @@ """Tests for the wizard.py file.""" -import os from pathlib import Path from typing import Any from unittest.mock import MagicMock @@ -127,7 +126,7 @@ def test_wizard_write_sets_platform( # Given del default_config["platform"] monkeypatch.setattr(wz, "write_file", MagicMock()) - monkeypatch.setattr(CORE, "config_path", os.path.dirname(tmp_path)) + monkeypatch.setattr(CORE, "config_path", tmp_path.parent) # When wz.wizard_write(tmp_path, **default_config) @@ -147,7 +146,7 @@ def test_wizard_empty_config(tmp_path: Path, monkeypatch: MonkeyPatch): "name": "test-empty", } monkeypatch.setattr(wz, "write_file", MagicMock()) - monkeypatch.setattr(CORE, "config_path", os.path.dirname(tmp_path)) + monkeypatch.setattr(CORE, "config_path", tmp_path.parent) # When wz.wizard_write(tmp_path, **empty_config) @@ -168,7 +167,7 @@ def test_wizard_upload_config(tmp_path: Path, monkeypatch: MonkeyPatch): "file_text": "# imported file πŸ“\n\n", } monkeypatch.setattr(wz, "write_file", MagicMock()) - monkeypatch.setattr(CORE, "config_path", os.path.dirname(tmp_path)) + monkeypatch.setattr(CORE, "config_path", tmp_path.parent) # When wz.wizard_write(tmp_path, **empty_config) @@ -189,7 +188,7 @@ def test_wizard_write_defaults_platform_from_board_esp8266( default_config["board"] = [*ESP8266_BOARD_PINS][0] monkeypatch.setattr(wz, "write_file", MagicMock()) - monkeypatch.setattr(CORE, "config_path", os.path.dirname(tmp_path)) + monkeypatch.setattr(CORE, "config_path", tmp_path.parent) # When wz.wizard_write(tmp_path, **default_config) @@ -210,7 +209,7 @@ def test_wizard_write_defaults_platform_from_board_esp32( default_config["board"] = [*ESP32_BOARD_PINS][0] monkeypatch.setattr(wz, "write_file", MagicMock()) - monkeypatch.setattr(CORE, "config_path", os.path.dirname(tmp_path)) + monkeypatch.setattr(CORE, "config_path", tmp_path.parent) # When wz.wizard_write(tmp_path, **default_config) @@ -231,7 +230,7 @@ def test_wizard_write_defaults_platform_from_board_bk72xx( default_config["board"] = [*BK72XX_BOARD_PINS][0] monkeypatch.setattr(wz, "write_file", MagicMock()) - monkeypatch.setattr(CORE, "config_path", os.path.dirname(tmp_path)) + monkeypatch.setattr(CORE, "config_path", tmp_path.parent) # When wz.wizard_write(tmp_path, **default_config) @@ -252,7 +251,7 @@ def test_wizard_write_defaults_platform_from_board_ln882x( default_config["board"] = [*LN882X_BOARD_PINS][0] monkeypatch.setattr(wz, "write_file", MagicMock()) - monkeypatch.setattr(CORE, "config_path", os.path.dirname(tmp_path)) + monkeypatch.setattr(CORE, "config_path", tmp_path.parent) # When wz.wizard_write(tmp_path, **default_config) @@ -273,7 +272,7 @@ def test_wizard_write_defaults_platform_from_board_rtl87xx( default_config["board"] = [*RTL87XX_BOARD_PINS][0] monkeypatch.setattr(wz, "write_file", MagicMock()) - monkeypatch.setattr(CORE, "config_path", os.path.dirname(tmp_path)) + monkeypatch.setattr(CORE, "config_path", tmp_path.parent) # When wz.wizard_write(tmp_path, **default_config) @@ -362,7 +361,7 @@ def test_wizard_rejects_path_with_invalid_extension(): """ # Given - config_file = "test.json" + config_file = Path("test.json") # When retval = wz.wizard(config_file) @@ -371,31 +370,31 @@ def test_wizard_rejects_path_with_invalid_extension(): assert retval == 1 -def test_wizard_rejects_existing_files(tmpdir): +def test_wizard_rejects_existing_files(tmp_path): """ The wizard should reject any configuration file that already exists """ # Given - config_file = tmpdir.join("test.yaml") - config_file.write("") + config_file = tmp_path / "test.yaml" + config_file.write_text("") # When - retval = wz.wizard(str(config_file)) + retval = wz.wizard(config_file) # Then assert retval == 2 def test_wizard_accepts_default_answers_esp8266( - tmpdir, monkeypatch: MonkeyPatch, wizard_answers: list[str] + tmp_path: Path, monkeypatch: MonkeyPatch, wizard_answers: list[str] ): """ The wizard should accept the given default answers for esp8266 """ # Given - config_file = tmpdir.join("test.yaml") + config_file = tmp_path / "test.yaml" input_mock = MagicMock(side_effect=wizard_answers) monkeypatch.setattr("builtins.input", input_mock) monkeypatch.setattr(wz, "safe_print", lambda t=None, end=None: 0) @@ -403,14 +402,14 @@ def test_wizard_accepts_default_answers_esp8266( monkeypatch.setattr(wz, "wizard_write", MagicMock()) # When - retval = wz.wizard(str(config_file)) + retval = wz.wizard(config_file) # Then assert retval == 0 def test_wizard_accepts_default_answers_esp32( - tmpdir, monkeypatch: MonkeyPatch, wizard_answers: list[str] + tmp_path: Path, monkeypatch: MonkeyPatch, wizard_answers: list[str] ): """ The wizard should accept the given default answers for esp32 @@ -419,7 +418,7 @@ def test_wizard_accepts_default_answers_esp32( # Given wizard_answers[1] = "ESP32" wizard_answers[2] = "nodemcu-32s" - config_file = tmpdir.join("test.yaml") + config_file = tmp_path / "test.yaml" input_mock = MagicMock(side_effect=wizard_answers) monkeypatch.setattr("builtins.input", input_mock) monkeypatch.setattr(wz, "safe_print", lambda t=None, end=None: 0) @@ -427,14 +426,14 @@ def test_wizard_accepts_default_answers_esp32( monkeypatch.setattr(wz, "wizard_write", MagicMock()) # When - retval = wz.wizard(str(config_file)) + retval = wz.wizard(config_file) # Then assert retval == 0 def test_wizard_offers_better_node_name( - tmpdir, monkeypatch: MonkeyPatch, wizard_answers: list[str] + tmp_path: Path, monkeypatch: MonkeyPatch, wizard_answers: list[str] ): """ When the node name does not conform, a better alternative is offered @@ -451,7 +450,7 @@ def test_wizard_offers_better_node_name( wz, "default_input", MagicMock(side_effect=lambda _, default: default) ) - config_file = tmpdir.join("test.yaml") + config_file = tmp_path / "test.yaml" input_mock = MagicMock(side_effect=wizard_answers) monkeypatch.setattr("builtins.input", input_mock) monkeypatch.setattr(wz, "safe_print", lambda t=None, end=None: 0) @@ -459,7 +458,7 @@ def test_wizard_offers_better_node_name( monkeypatch.setattr(wz, "wizard_write", MagicMock()) # When - retval = wz.wizard(str(config_file)) + retval = wz.wizard(config_file) # Then assert retval == 0 @@ -467,7 +466,7 @@ def test_wizard_offers_better_node_name( def test_wizard_requires_correct_platform( - tmpdir, monkeypatch: MonkeyPatch, wizard_answers: list[str] + tmp_path: Path, monkeypatch: MonkeyPatch, wizard_answers: list[str] ): """ When the platform is not either esp32 or esp8266, the wizard should reject it @@ -476,7 +475,7 @@ def test_wizard_requires_correct_platform( # Given wizard_answers.insert(1, "foobar") # add invalid entry for platform - config_file = tmpdir.join("test.yaml") + config_file = tmp_path / "test.yaml" input_mock = MagicMock(side_effect=wizard_answers) monkeypatch.setattr("builtins.input", input_mock) monkeypatch.setattr(wz, "safe_print", lambda t=None, end=None: 0) @@ -484,14 +483,14 @@ def test_wizard_requires_correct_platform( monkeypatch.setattr(wz, "wizard_write", MagicMock()) # When - retval = wz.wizard(str(config_file)) + retval = wz.wizard(config_file) # Then assert retval == 0 def test_wizard_requires_correct_board( - tmpdir, monkeypatch: MonkeyPatch, wizard_answers: list[str] + tmp_path: Path, monkeypatch: MonkeyPatch, wizard_answers: list[str] ): """ When the board is not a valid esp8266 board, the wizard should reject it @@ -500,7 +499,7 @@ def test_wizard_requires_correct_board( # Given wizard_answers.insert(2, "foobar") # add an invalid entry for board - config_file = tmpdir.join("test.yaml") + config_file = tmp_path / "test.yaml" input_mock = MagicMock(side_effect=wizard_answers) monkeypatch.setattr("builtins.input", input_mock) monkeypatch.setattr(wz, "safe_print", lambda t=None, end=None: 0) @@ -508,14 +507,14 @@ def test_wizard_requires_correct_board( monkeypatch.setattr(wz, "wizard_write", MagicMock()) # When - retval = wz.wizard(str(config_file)) + retval = wz.wizard(config_file) # Then assert retval == 0 def test_wizard_requires_valid_ssid( - tmpdir, monkeypatch: MonkeyPatch, wizard_answers: list[str] + tmp_path: Path, monkeypatch: MonkeyPatch, wizard_answers: list[str] ): """ When the board is not a valid esp8266 board, the wizard should reject it @@ -524,7 +523,7 @@ def test_wizard_requires_valid_ssid( # Given wizard_answers.insert(3, "") # add an invalid entry for ssid - config_file = tmpdir.join("test.yaml") + config_file = tmp_path / "test.yaml" input_mock = MagicMock(side_effect=wizard_answers) monkeypatch.setattr("builtins.input", input_mock) monkeypatch.setattr(wz, "safe_print", lambda t=None, end=None: 0) @@ -532,28 +531,28 @@ def test_wizard_requires_valid_ssid( monkeypatch.setattr(wz, "wizard_write", MagicMock()) # When - retval = wz.wizard(str(config_file)) + retval = wz.wizard(config_file) # Then assert retval == 0 def test_wizard_write_protects_existing_config( - tmpdir, default_config: dict[str, Any], monkeypatch: MonkeyPatch + tmp_path: Path, default_config: dict[str, Any], monkeypatch: MonkeyPatch ): """ The wizard_write function should not overwrite existing config files and return False """ # Given - config_file = tmpdir.join("test.yaml") + config_file = tmp_path / "test.yaml" original_content = "# Original config content\n" - config_file.write(original_content) + config_file.write_text(original_content) - monkeypatch.setattr(CORE, "config_path", str(tmpdir)) + monkeypatch.setattr(CORE, "config_path", tmp_path.parent) # When - result = wz.wizard_write(str(config_file), **default_config) + result = wz.wizard_write(config_file, **default_config) # Then assert result is False # Should return False when file exists - assert config_file.read() == original_content + assert config_file.read_text() == original_content diff --git a/tests/unit_tests/test_writer.py b/tests/unit_tests/test_writer.py index 970e0fada6..ba309f2406 100644 --- a/tests/unit_tests/test_writer.py +++ b/tests/unit_tests/test_writer.py @@ -257,10 +257,7 @@ def test_clean_cmake_cache( cmake_cache_file.write_text("# CMake cache file") # Setup mocks - mock_core.relative_pioenvs_path.side_effect = [ - str(pioenvs_dir), # First call for directory check - str(cmake_cache_file), # Second call for file path - ] + mock_core.relative_pioenvs_path.return_value = pioenvs_dir mock_core.name = "test_device" # Verify file exists before @@ -288,7 +285,7 @@ def test_clean_cmake_cache_no_pioenvs_dir( pioenvs_dir = tmp_path / ".pioenvs" # Setup mocks - mock_core.relative_pioenvs_path.return_value = str(pioenvs_dir) + mock_core.relative_pioenvs_path.return_value = pioenvs_dir # Verify directory doesn't exist assert not pioenvs_dir.exists() @@ -314,10 +311,7 @@ def test_clean_cmake_cache_no_cmake_file( cmake_cache_file = device_dir / "CMakeCache.txt" # Setup mocks - mock_core.relative_pioenvs_path.side_effect = [ - str(pioenvs_dir), # First call for directory check - str(cmake_cache_file), # Second call for file path - ] + mock_core.relative_pioenvs_path.return_value = pioenvs_dir mock_core.name = "test_device" # Verify file doesn't exist @@ -358,9 +352,9 @@ def test_clean_build( (platformio_cache_dir / "downloads" / "package.tar.gz").write_text("package") # Setup mocks - mock_core.relative_pioenvs_path.return_value = str(pioenvs_dir) - mock_core.relative_piolibdeps_path.return_value = str(piolibdeps_dir) - mock_core.relative_build_path.return_value = str(dependencies_lock) + mock_core.relative_pioenvs_path.return_value = pioenvs_dir + mock_core.relative_piolibdeps_path.return_value = piolibdeps_dir + mock_core.relative_build_path.return_value = dependencies_lock # Verify all exist before assert pioenvs_dir.exists() @@ -408,9 +402,9 @@ def test_clean_build_partial_exists( dependencies_lock = tmp_path / "dependencies.lock" # Setup mocks - mock_core.relative_pioenvs_path.return_value = str(pioenvs_dir) - mock_core.relative_piolibdeps_path.return_value = str(piolibdeps_dir) - mock_core.relative_build_path.return_value = str(dependencies_lock) + mock_core.relative_pioenvs_path.return_value = pioenvs_dir + mock_core.relative_piolibdeps_path.return_value = piolibdeps_dir + mock_core.relative_build_path.return_value = dependencies_lock # Verify only pioenvs exists assert pioenvs_dir.exists() @@ -445,9 +439,9 @@ def test_clean_build_nothing_exists( dependencies_lock = tmp_path / "dependencies.lock" # Setup mocks - mock_core.relative_pioenvs_path.return_value = str(pioenvs_dir) - mock_core.relative_piolibdeps_path.return_value = str(piolibdeps_dir) - mock_core.relative_build_path.return_value = str(dependencies_lock) + mock_core.relative_pioenvs_path.return_value = pioenvs_dir + mock_core.relative_piolibdeps_path.return_value = piolibdeps_dir + mock_core.relative_build_path.return_value = dependencies_lock # Verify nothing exists assert not pioenvs_dir.exists() @@ -481,9 +475,9 @@ def test_clean_build_platformio_not_available( dependencies_lock.write_text("lock file") # Setup mocks - mock_core.relative_pioenvs_path.return_value = str(pioenvs_dir) - mock_core.relative_piolibdeps_path.return_value = str(piolibdeps_dir) - mock_core.relative_build_path.return_value = str(dependencies_lock) + mock_core.relative_pioenvs_path.return_value = pioenvs_dir + mock_core.relative_piolibdeps_path.return_value = piolibdeps_dir + mock_core.relative_build_path.return_value = dependencies_lock # Verify all exist before assert pioenvs_dir.exists() @@ -519,9 +513,9 @@ def test_clean_build_empty_cache_dir( pioenvs_dir.mkdir() # Setup mocks - mock_core.relative_pioenvs_path.return_value = str(pioenvs_dir) - mock_core.relative_piolibdeps_path.return_value = str(tmp_path / ".piolibdeps") - mock_core.relative_build_path.return_value = str(tmp_path / "dependencies.lock") + mock_core.relative_pioenvs_path.return_value = pioenvs_dir + mock_core.relative_piolibdeps_path.return_value = tmp_path / ".piolibdeps" + mock_core.relative_build_path.return_value = tmp_path / "dependencies.lock" # Verify pioenvs exists before assert pioenvs_dir.exists() @@ -552,7 +546,7 @@ def test_write_gitignore_creates_new_file( gitignore_path = tmp_path / ".gitignore" # Setup mocks - mock_core.relative_config_path.return_value = str(gitignore_path) + mock_core.relative_config_path.return_value = gitignore_path # Verify file doesn't exist assert not gitignore_path.exists() @@ -576,7 +570,7 @@ def test_write_gitignore_skips_existing_file( gitignore_path.write_text(existing_content) # Setup mocks - mock_core.relative_config_path.return_value = str(gitignore_path) + mock_core.relative_config_path.return_value = gitignore_path # Verify file exists with custom content assert gitignore_path.exists() @@ -615,7 +609,7 @@ void loop() {{}}""" main_cpp.write_text(existing_content) # Setup mocks - mock_core.relative_src_path.return_value = str(main_cpp) + mock_core.relative_src_path.return_value = main_cpp mock_core.cpp_global_section = "// Global section" # Call the function @@ -652,7 +646,7 @@ def test_write_cpp_creates_new_file( main_cpp = tmp_path / "main.cpp" # Setup mocks - mock_core.relative_src_path.return_value = str(main_cpp) + mock_core.relative_src_path.return_value = main_cpp mock_core.cpp_global_section = "// Global section" # Verify file doesn't exist @@ -668,7 +662,7 @@ def test_write_cpp_creates_new_file( # Get the content that would be written mock_write_file.assert_called_once() written_path, written_content = mock_write_file.call_args[0] - assert written_path == str(main_cpp) + assert written_path == main_cpp # Check that all necessary parts are in the new file assert '#include "esphome.h"' in written_content @@ -698,7 +692,7 @@ def test_write_cpp_with_missing_end_marker( main_cpp.write_text(existing_content) # Setup mocks - mock_core.relative_src_path.return_value = str(main_cpp) + mock_core.relative_src_path.return_value = main_cpp # Call should raise an error with pytest.raises(EsphomeError, match="Could not find auto generated code end"): @@ -724,7 +718,7 @@ def test_write_cpp_with_duplicate_markers( main_cpp.write_text(existing_content) # Setup mocks - mock_core.relative_src_path.return_value = str(main_cpp) + mock_core.relative_src_path.return_value = main_cpp # Call should raise an error with pytest.raises(EsphomeError, match="Found multiple auto generate code begins"): diff --git a/tests/unit_tests/test_yaml_util.py b/tests/unit_tests/test_yaml_util.py index bc3c89a64d..eac0ceabb8 100644 --- a/tests/unit_tests/test_yaml_util.py +++ b/tests/unit_tests/test_yaml_util.py @@ -67,18 +67,18 @@ def test_parsing_with_custom_loader(fixture_path): """ yaml_file = fixture_path / "yaml_util" / "includetest.yaml" - loader_calls = [] + loader_calls: list[Path] = [] - def custom_loader(fname): + def custom_loader(fname: Path): loader_calls.append(fname) - with open(yaml_file, encoding="utf-8") as f_handle: + with yaml_file.open(encoding="utf-8") as f_handle: yaml_util.parse_yaml(yaml_file, f_handle, custom_loader) assert len(loader_calls) == 3 - assert loader_calls[0].endswith("includes/included.yaml") - assert loader_calls[1].endswith("includes/list.yaml") - assert loader_calls[2].endswith("includes/scalar.yaml") + assert loader_calls[0].parts[-2:] == ("includes", "included.yaml") + assert loader_calls[1].parts[-2:] == ("includes", "list.yaml") + assert loader_calls[2].parts[-2:] == ("includes", "scalar.yaml") def test_construct_secret_simple(fixture_path: Path) -> None: @@ -110,7 +110,7 @@ wifi: secrets_yaml.write_text("some_other_secret: value") with pytest.raises(EsphomeError, match="Secret 'nonexistent_secret' not defined"): - yaml_util.load_yaml(str(test_yaml)) + yaml_util.load_yaml(test_yaml) def test_construct_secret_no_secrets_file(tmp_path: Path) -> None: @@ -124,10 +124,10 @@ wifi: # Mock CORE.config_path to avoid NoneType error with ( - patch.object(core.CORE, "config_path", str(tmp_path / "main.yaml")), + patch.object(core.CORE, "config_path", tmp_path / "main.yaml"), pytest.raises(EsphomeError, match="secrets.yaml"), ): - yaml_util.load_yaml(str(test_yaml)) + yaml_util.load_yaml(test_yaml) def test_construct_secret_fallback_to_main_config_dir( @@ -149,8 +149,8 @@ wifi: main_secrets.write_text("test_secret: main_secret_value") # Mock CORE.config_path to point to main directory - with patch.object(core.CORE, "config_path", str(tmp_path / "main.yaml")): - actual = yaml_util.load_yaml(str(test_yaml)) + with patch.object(core.CORE, "config_path", tmp_path / "main.yaml"): + actual = yaml_util.load_yaml(test_yaml) assert actual["wifi"]["password"] == "main_secret_value" @@ -167,7 +167,7 @@ def test_construct_include_dir_named(fixture_path: Path, tmp_path: Path) -> None sensor: !include_dir_named named_dir """) - actual = yaml_util.load_yaml(str(test_yaml)) + actual = yaml_util.load_yaml(test_yaml) actual_sensor = actual["sensor"] # Check that files were loaded with their names as keys @@ -202,7 +202,7 @@ def test_construct_include_dir_named_empty_dir(tmp_path: Path) -> None: sensor: !include_dir_named empty_dir """) - actual = yaml_util.load_yaml(str(test_yaml)) + actual = yaml_util.load_yaml(test_yaml) # Should return empty OrderedDict assert isinstance(actual["sensor"], OrderedDict) @@ -234,7 +234,7 @@ def test_construct_include_dir_named_with_dots(tmp_path: Path) -> None: test: !include_dir_named test_dir """) - actual = yaml_util.load_yaml(str(test_yaml)) + actual = yaml_util.load_yaml(test_yaml) # Should only include visible file assert "visible" in actual["test"] @@ -258,7 +258,7 @@ def test_find_files_recursive(fixture_path: Path, tmp_path: Path) -> None: all_sensors: !include_dir_named named_dir """) - actual = yaml_util.load_yaml(str(test_yaml)) + actual = yaml_util.load_yaml(test_yaml) # Should find sensor1.yaml, sensor2.yaml, and subdir/sensor3.yaml (all flattened) assert len(actual["all_sensors"]) == 3