mirror of
https://github.com/esphome/esphome.git
synced 2025-10-30 06:33:51 +00:00
Merge remote-tracking branch 'upstream/dev' into integration
This commit is contained in:
@@ -172,7 +172,7 @@ def test_write_ini_no_change_when_content_same(
|
||||
# write_file_if_changed should be called with the same content
|
||||
mock_write_file_if_changed.assert_called_once()
|
||||
call_args = mock_write_file_if_changed.call_args[0]
|
||||
assert call_args[0] == str(ini_file)
|
||||
assert call_args[0] == ini_file
|
||||
assert content in call_args[1]
|
||||
|
||||
|
||||
|
||||
@@ -43,7 +43,7 @@ def fixture_path() -> Path:
|
||||
@pytest.fixture
|
||||
def setup_core(tmp_path: Path) -> Path:
|
||||
"""Set up CORE with test paths."""
|
||||
CORE.config_path = str(tmp_path / "test.yaml")
|
||||
CORE.config_path = tmp_path / "test.yaml"
|
||||
return tmp_path
|
||||
|
||||
|
||||
|
||||
@@ -10,7 +10,7 @@ from esphome.core import CORE
|
||||
|
||||
|
||||
def load_config_from_yaml(
|
||||
yaml_file: Callable[[str], str], yaml_content: str
|
||||
yaml_file: Callable[[str], Path], yaml_content: str
|
||||
) -> Config | None:
|
||||
"""Load configuration from YAML content."""
|
||||
yaml_path = yaml_file(yaml_content)
|
||||
@@ -25,7 +25,7 @@ def load_config_from_yaml(
|
||||
|
||||
|
||||
def load_config_from_fixture(
|
||||
yaml_file: Callable[[str], str], fixture_name: str, fixtures_dir: Path
|
||||
yaml_file: Callable[[str], Path], fixture_name: str, fixtures_dir: Path
|
||||
) -> Config | None:
|
||||
"""Load configuration from a fixture file."""
|
||||
fixture_path = fixtures_dir / fixture_name
|
||||
|
||||
@@ -7,12 +7,12 @@ import pytest
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def yaml_file(tmp_path: Path) -> Callable[[str], str]:
|
||||
def yaml_file(tmp_path: Path) -> Callable[[str], Path]:
|
||||
"""Create a temporary YAML file for testing."""
|
||||
|
||||
def _yaml_file(content: str) -> str:
|
||||
def _yaml_file(content: str) -> Path:
|
||||
yaml_path = tmp_path / "test.yaml"
|
||||
yaml_path.write_text(content)
|
||||
return str(yaml_path)
|
||||
return yaml_path
|
||||
|
||||
return _yaml_file
|
||||
|
||||
@@ -289,7 +289,7 @@ def test_valid_include_with_angle_brackets() -> None:
|
||||
|
||||
def test_valid_include_with_valid_file(tmp_path: Path) -> None:
|
||||
"""Test valid_include accepts valid include files."""
|
||||
CORE.config_path = str(tmp_path / "test.yaml")
|
||||
CORE.config_path = tmp_path / "test.yaml"
|
||||
include_file = tmp_path / "include.h"
|
||||
include_file.touch()
|
||||
|
||||
@@ -298,7 +298,7 @@ def test_valid_include_with_valid_file(tmp_path: Path) -> None:
|
||||
|
||||
def test_valid_include_with_valid_directory(tmp_path: Path) -> None:
|
||||
"""Test valid_include accepts valid directories."""
|
||||
CORE.config_path = str(tmp_path / "test.yaml")
|
||||
CORE.config_path = tmp_path / "test.yaml"
|
||||
include_dir = tmp_path / "includes"
|
||||
include_dir.mkdir()
|
||||
|
||||
@@ -307,7 +307,7 @@ def test_valid_include_with_valid_directory(tmp_path: Path) -> None:
|
||||
|
||||
def test_valid_include_invalid_extension(tmp_path: Path) -> None:
|
||||
"""Test valid_include rejects files with invalid extensions."""
|
||||
CORE.config_path = str(tmp_path / "test.yaml")
|
||||
CORE.config_path = tmp_path / "test.yaml"
|
||||
invalid_file = tmp_path / "file.txt"
|
||||
invalid_file.touch()
|
||||
|
||||
@@ -481,7 +481,7 @@ def test_include_file_header(tmp_path: Path, mock_copy_file_if_changed: Mock) ->
|
||||
src_file = tmp_path / "source.h"
|
||||
src_file.write_text("// Header content")
|
||||
|
||||
CORE.build_path = str(tmp_path / "build")
|
||||
CORE.build_path = tmp_path / "build"
|
||||
|
||||
with patch("esphome.core.config.cg") as mock_cg:
|
||||
# Mock RawStatement to capture the text
|
||||
@@ -494,7 +494,7 @@ def test_include_file_header(tmp_path: Path, mock_copy_file_if_changed: Mock) ->
|
||||
|
||||
mock_cg.RawStatement.side_effect = raw_statement_side_effect
|
||||
|
||||
config.include_file(str(src_file), "test.h")
|
||||
config.include_file(src_file, Path("test.h"))
|
||||
|
||||
mock_copy_file_if_changed.assert_called_once()
|
||||
mock_cg.add_global.assert_called_once()
|
||||
@@ -507,10 +507,10 @@ def test_include_file_cpp(tmp_path: Path, mock_copy_file_if_changed: Mock) -> No
|
||||
src_file = tmp_path / "source.cpp"
|
||||
src_file.write_text("// CPP content")
|
||||
|
||||
CORE.build_path = str(tmp_path / "build")
|
||||
CORE.build_path = tmp_path / "build"
|
||||
|
||||
with patch("esphome.core.config.cg") as mock_cg:
|
||||
config.include_file(str(src_file), "test.cpp")
|
||||
config.include_file(src_file, Path("test.cpp"))
|
||||
|
||||
mock_copy_file_if_changed.assert_called_once()
|
||||
# Should not add include statement for .cpp files
|
||||
@@ -602,8 +602,8 @@ async def test_add_includes_with_single_file(
|
||||
mock_cg_with_include_capture: tuple[Mock, list[str]],
|
||||
) -> None:
|
||||
"""Test add_includes copies a single header file to build directory."""
|
||||
CORE.config_path = str(tmp_path / "config.yaml")
|
||||
CORE.build_path = str(tmp_path / "build")
|
||||
CORE.config_path = tmp_path / "config.yaml"
|
||||
CORE.build_path = tmp_path / "build"
|
||||
os.makedirs(CORE.build_path, exist_ok=True)
|
||||
|
||||
# Create include file
|
||||
@@ -617,7 +617,7 @@ async def test_add_includes_with_single_file(
|
||||
# Verify copy_file_if_changed was called to copy the file
|
||||
# Note: add_includes adds files to a src/ subdirectory
|
||||
mock_copy_file_if_changed.assert_called_once_with(
|
||||
str(include_file), str(Path(CORE.build_path) / "src" / "my_header.h")
|
||||
include_file, CORE.build_path / "src" / "my_header.h"
|
||||
)
|
||||
|
||||
# Verify include statement was added
|
||||
@@ -632,8 +632,8 @@ async def test_add_includes_with_directory_unix(
|
||||
mock_cg_with_include_capture: tuple[Mock, list[str]],
|
||||
) -> None:
|
||||
"""Test add_includes copies all files from a directory on Unix."""
|
||||
CORE.config_path = str(tmp_path / "config.yaml")
|
||||
CORE.build_path = str(tmp_path / "build")
|
||||
CORE.config_path = tmp_path / "config.yaml"
|
||||
CORE.build_path = tmp_path / "build"
|
||||
os.makedirs(CORE.build_path, exist_ok=True)
|
||||
|
||||
# Create include directory with files
|
||||
@@ -677,8 +677,8 @@ async def test_add_includes_with_directory_windows(
|
||||
mock_cg_with_include_capture: tuple[Mock, list[str]],
|
||||
) -> None:
|
||||
"""Test add_includes copies all files from a directory on Windows."""
|
||||
CORE.config_path = str(tmp_path / "config.yaml")
|
||||
CORE.build_path = str(tmp_path / "build")
|
||||
CORE.config_path = tmp_path / "config.yaml"
|
||||
CORE.build_path = tmp_path / "build"
|
||||
os.makedirs(CORE.build_path, exist_ok=True)
|
||||
|
||||
# Create include directory with files
|
||||
@@ -719,8 +719,8 @@ async def test_add_includes_with_multiple_sources(
|
||||
tmp_path: Path, mock_copy_file_if_changed: Mock
|
||||
) -> None:
|
||||
"""Test add_includes with multiple files and directories."""
|
||||
CORE.config_path = str(tmp_path / "config.yaml")
|
||||
CORE.build_path = str(tmp_path / "build")
|
||||
CORE.config_path = tmp_path / "config.yaml"
|
||||
CORE.build_path = tmp_path / "build"
|
||||
os.makedirs(CORE.build_path, exist_ok=True)
|
||||
|
||||
# Create various include sources
|
||||
@@ -747,8 +747,8 @@ async def test_add_includes_empty_directory(
|
||||
tmp_path: Path, mock_copy_file_if_changed: Mock
|
||||
) -> None:
|
||||
"""Test add_includes with an empty directory doesn't fail."""
|
||||
CORE.config_path = str(tmp_path / "config.yaml")
|
||||
CORE.build_path = str(tmp_path / "build")
|
||||
CORE.config_path = tmp_path / "config.yaml"
|
||||
CORE.build_path = tmp_path / "build"
|
||||
os.makedirs(CORE.build_path, exist_ok=True)
|
||||
|
||||
# Create empty directory
|
||||
@@ -769,8 +769,8 @@ async def test_add_includes_preserves_directory_structure_unix(
|
||||
tmp_path: Path, mock_copy_file_if_changed: Mock
|
||||
) -> None:
|
||||
"""Test that add_includes preserves relative directory structure on Unix."""
|
||||
CORE.config_path = str(tmp_path / "config.yaml")
|
||||
CORE.build_path = str(tmp_path / "build")
|
||||
CORE.config_path = tmp_path / "config.yaml"
|
||||
CORE.build_path = tmp_path / "build"
|
||||
os.makedirs(CORE.build_path, exist_ok=True)
|
||||
|
||||
# Create nested directory structure
|
||||
@@ -793,8 +793,8 @@ async def test_add_includes_preserves_directory_structure_unix(
|
||||
dest_paths = [call[0][1] for call in calls]
|
||||
|
||||
# Check that relative paths are preserved
|
||||
assert any("lib/src/core.h" in path for path in dest_paths)
|
||||
assert any("lib/utils/helper.h" in path for path in dest_paths)
|
||||
assert any("lib/src/core.h" in str(path) for path in dest_paths)
|
||||
assert any("lib/utils/helper.h" in str(path) for path in dest_paths)
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@@ -803,8 +803,8 @@ async def test_add_includes_preserves_directory_structure_windows(
|
||||
tmp_path: Path, mock_copy_file_if_changed: Mock
|
||||
) -> None:
|
||||
"""Test that add_includes preserves relative directory structure on Windows."""
|
||||
CORE.config_path = str(tmp_path / "config.yaml")
|
||||
CORE.build_path = str(tmp_path / "build")
|
||||
CORE.config_path = tmp_path / "config.yaml"
|
||||
CORE.build_path = tmp_path / "build"
|
||||
os.makedirs(CORE.build_path, exist_ok=True)
|
||||
|
||||
# Create nested directory structure
|
||||
@@ -827,8 +827,8 @@ async def test_add_includes_preserves_directory_structure_windows(
|
||||
dest_paths = [call[0][1] for call in calls]
|
||||
|
||||
# Check that relative paths are preserved
|
||||
assert any("lib\\src\\core.h" in path for path in dest_paths)
|
||||
assert any("lib\\utils\\helper.h" in path for path in dest_paths)
|
||||
assert any("lib\\src\\core.h" in str(path) for path in dest_paths)
|
||||
assert any("lib\\utils\\helper.h" in str(path) for path in dest_paths)
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@@ -836,8 +836,8 @@ async def test_add_includes_overwrites_existing_files(
|
||||
tmp_path: Path, mock_copy_file_if_changed: Mock
|
||||
) -> None:
|
||||
"""Test that add_includes overwrites existing files in build directory."""
|
||||
CORE.config_path = str(tmp_path / "config.yaml")
|
||||
CORE.build_path = str(tmp_path / "build")
|
||||
CORE.config_path = tmp_path / "config.yaml"
|
||||
CORE.build_path = tmp_path / "build"
|
||||
os.makedirs(CORE.build_path, exist_ok=True)
|
||||
|
||||
# Create include file
|
||||
@@ -850,5 +850,5 @@ async def test_add_includes_overwrites_existing_files(
|
||||
# Verify copy_file_if_changed was called (it handles overwriting)
|
||||
# Note: add_includes adds files to a src/ subdirectory
|
||||
mock_copy_file_if_changed.assert_called_once_with(
|
||||
str(include_file), str(Path(CORE.build_path) / "src" / "header.h")
|
||||
include_file, CORE.build_path / "src" / "header.h"
|
||||
)
|
||||
|
||||
@@ -15,7 +15,7 @@ def test_directory_valid_path(setup_core: Path) -> None:
|
||||
|
||||
result = cv.directory("test_directory")
|
||||
|
||||
assert result == "test_directory"
|
||||
assert result == test_dir
|
||||
|
||||
|
||||
def test_directory_absolute_path(setup_core: Path) -> None:
|
||||
@@ -25,7 +25,7 @@ def test_directory_absolute_path(setup_core: Path) -> None:
|
||||
|
||||
result = cv.directory(str(test_dir))
|
||||
|
||||
assert result == str(test_dir)
|
||||
assert result == test_dir
|
||||
|
||||
|
||||
def test_directory_nonexistent_path(setup_core: Path) -> None:
|
||||
@@ -52,7 +52,7 @@ def test_directory_with_parent_directory(setup_core: Path) -> None:
|
||||
|
||||
result = cv.directory("parent/child/grandchild")
|
||||
|
||||
assert result == "parent/child/grandchild"
|
||||
assert result == nested_dir
|
||||
|
||||
|
||||
def test_file_valid_path(setup_core: Path) -> None:
|
||||
@@ -62,7 +62,7 @@ def test_file_valid_path(setup_core: Path) -> None:
|
||||
|
||||
result = cv.file_("test_file.yaml")
|
||||
|
||||
assert result == "test_file.yaml"
|
||||
assert result == test_file
|
||||
|
||||
|
||||
def test_file_absolute_path(setup_core: Path) -> None:
|
||||
@@ -72,7 +72,7 @@ def test_file_absolute_path(setup_core: Path) -> None:
|
||||
|
||||
result = cv.file_(str(test_file))
|
||||
|
||||
assert result == str(test_file)
|
||||
assert result == test_file
|
||||
|
||||
|
||||
def test_file_nonexistent_path(setup_core: Path) -> None:
|
||||
@@ -99,7 +99,7 @@ def test_file_with_parent_directory(setup_core: Path) -> None:
|
||||
|
||||
result = cv.file_("configs/sensors/temperature.yaml")
|
||||
|
||||
assert result == "configs/sensors/temperature.yaml"
|
||||
assert result == test_file
|
||||
|
||||
|
||||
def test_directory_handles_trailing_slash(setup_core: Path) -> None:
|
||||
@@ -108,29 +108,29 @@ def test_directory_handles_trailing_slash(setup_core: Path) -> None:
|
||||
test_dir.mkdir()
|
||||
|
||||
result = cv.directory("test_dir/")
|
||||
assert result == "test_dir/"
|
||||
assert result == test_dir
|
||||
|
||||
result = cv.directory("test_dir")
|
||||
assert result == "test_dir"
|
||||
assert result == test_dir
|
||||
|
||||
|
||||
def test_file_handles_various_extensions(setup_core: Path) -> None:
|
||||
"""Test file_ validator works with different file extensions."""
|
||||
yaml_file = setup_core / "config.yaml"
|
||||
yaml_file.write_text("yaml content")
|
||||
assert cv.file_("config.yaml") == "config.yaml"
|
||||
assert cv.file_("config.yaml") == yaml_file
|
||||
|
||||
yml_file = setup_core / "config.yml"
|
||||
yml_file.write_text("yml content")
|
||||
assert cv.file_("config.yml") == "config.yml"
|
||||
assert cv.file_("config.yml") == yml_file
|
||||
|
||||
txt_file = setup_core / "readme.txt"
|
||||
txt_file.write_text("text content")
|
||||
assert cv.file_("readme.txt") == "readme.txt"
|
||||
assert cv.file_("readme.txt") == txt_file
|
||||
|
||||
no_ext_file = setup_core / "LICENSE"
|
||||
no_ext_file.write_text("license content")
|
||||
assert cv.file_("LICENSE") == "LICENSE"
|
||||
assert cv.file_("LICENSE") == no_ext_file
|
||||
|
||||
|
||||
def test_directory_with_symlink(setup_core: Path) -> None:
|
||||
@@ -142,7 +142,7 @@ def test_directory_with_symlink(setup_core: Path) -> None:
|
||||
symlink_dir.symlink_to(actual_dir)
|
||||
|
||||
result = cv.directory("symlink_directory")
|
||||
assert result == "symlink_directory"
|
||||
assert result == symlink_dir
|
||||
|
||||
|
||||
def test_file_with_symlink(setup_core: Path) -> None:
|
||||
@@ -154,7 +154,7 @@ def test_file_with_symlink(setup_core: Path) -> None:
|
||||
symlink_file.symlink_to(actual_file)
|
||||
|
||||
result = cv.file_("symlink_file.txt")
|
||||
assert result == "symlink_file.txt"
|
||||
assert result == symlink_file
|
||||
|
||||
|
||||
def test_directory_error_shows_full_path(setup_core: Path) -> None:
|
||||
@@ -175,7 +175,7 @@ def test_directory_with_spaces_in_name(setup_core: Path) -> None:
|
||||
dir_with_spaces.mkdir()
|
||||
|
||||
result = cv.directory("my test directory")
|
||||
assert result == "my test directory"
|
||||
assert result == dir_with_spaces
|
||||
|
||||
|
||||
def test_file_with_spaces_in_name(setup_core: Path) -> None:
|
||||
@@ -184,4 +184,4 @@ def test_file_with_spaces_in_name(setup_core: Path) -> None:
|
||||
file_with_spaces.write_text("content")
|
||||
|
||||
result = cv.file_("my test file.yaml")
|
||||
assert result == "my test file.yaml"
|
||||
assert result == file_with_spaces
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
import os
|
||||
from pathlib import Path
|
||||
from unittest.mock import patch
|
||||
|
||||
from hypothesis import given
|
||||
@@ -536,8 +537,8 @@ class TestEsphomeCore:
|
||||
@pytest.fixture
|
||||
def target(self, fixture_path):
|
||||
target = core.EsphomeCore()
|
||||
target.build_path = "foo/build"
|
||||
target.config_path = "foo/config"
|
||||
target.build_path = Path("foo/build")
|
||||
target.config_path = Path("foo/config")
|
||||
return target
|
||||
|
||||
def test_reset(self, target):
|
||||
@@ -584,33 +585,33 @@ class TestEsphomeCore:
|
||||
@pytest.mark.skipif(os.name == "nt", reason="Unix-specific test")
|
||||
def test_data_dir_default_unix(self, target):
|
||||
"""Test data_dir returns .esphome in config directory by default on Unix."""
|
||||
target.config_path = "/home/user/config.yaml"
|
||||
assert target.data_dir == "/home/user/.esphome"
|
||||
target.config_path = Path("/home/user/config.yaml")
|
||||
assert target.data_dir == Path("/home/user/.esphome")
|
||||
|
||||
@pytest.mark.skipif(os.name != "nt", reason="Windows-specific test")
|
||||
def test_data_dir_default_windows(self, target):
|
||||
"""Test data_dir returns .esphome in config directory by default on Windows."""
|
||||
target.config_path = "D:\\home\\user\\config.yaml"
|
||||
assert target.data_dir == "D:\\home\\user\\.esphome"
|
||||
target.config_path = Path("D:\\home\\user\\config.yaml")
|
||||
assert target.data_dir == Path("D:\\home\\user\\.esphome")
|
||||
|
||||
def test_data_dir_ha_addon(self, target):
|
||||
"""Test data_dir returns /data when running as Home Assistant addon."""
|
||||
target.config_path = "/config/test.yaml"
|
||||
target.config_path = Path("/config/test.yaml")
|
||||
|
||||
with patch.dict(os.environ, {"ESPHOME_IS_HA_ADDON": "true"}):
|
||||
assert target.data_dir == "/data"
|
||||
assert target.data_dir == Path("/data")
|
||||
|
||||
def test_data_dir_env_override(self, target):
|
||||
"""Test data_dir uses ESPHOME_DATA_DIR environment variable when set."""
|
||||
target.config_path = "/home/user/config.yaml"
|
||||
target.config_path = Path("/home/user/config.yaml")
|
||||
|
||||
with patch.dict(os.environ, {"ESPHOME_DATA_DIR": "/custom/data/path"}):
|
||||
assert target.data_dir == "/custom/data/path"
|
||||
assert target.data_dir == Path("/custom/data/path")
|
||||
|
||||
@pytest.mark.skipif(os.name == "nt", reason="Unix-specific test")
|
||||
def test_data_dir_priority_unix(self, target):
|
||||
"""Test data_dir priority on Unix: HA addon > env var > default."""
|
||||
target.config_path = "/config/test.yaml"
|
||||
target.config_path = Path("/config/test.yaml")
|
||||
expected_default = "/config/.esphome"
|
||||
|
||||
# Test HA addon takes priority over env var
|
||||
@@ -618,26 +619,26 @@ class TestEsphomeCore:
|
||||
os.environ,
|
||||
{"ESPHOME_IS_HA_ADDON": "true", "ESPHOME_DATA_DIR": "/custom/path"},
|
||||
):
|
||||
assert target.data_dir == "/data"
|
||||
assert target.data_dir == Path("/data")
|
||||
|
||||
# Test env var is used when not HA addon
|
||||
with patch.dict(
|
||||
os.environ,
|
||||
{"ESPHOME_IS_HA_ADDON": "false", "ESPHOME_DATA_DIR": "/custom/path"},
|
||||
):
|
||||
assert target.data_dir == "/custom/path"
|
||||
assert target.data_dir == Path("/custom/path")
|
||||
|
||||
# Test default when neither is set
|
||||
with patch.dict(os.environ, {}, clear=True):
|
||||
# Ensure these env vars are not set
|
||||
os.environ.pop("ESPHOME_IS_HA_ADDON", None)
|
||||
os.environ.pop("ESPHOME_DATA_DIR", None)
|
||||
assert target.data_dir == expected_default
|
||||
assert target.data_dir == Path(expected_default)
|
||||
|
||||
@pytest.mark.skipif(os.name != "nt", reason="Windows-specific test")
|
||||
def test_data_dir_priority_windows(self, target):
|
||||
"""Test data_dir priority on Windows: HA addon > env var > default."""
|
||||
target.config_path = "D:\\config\\test.yaml"
|
||||
target.config_path = Path("D:\\config\\test.yaml")
|
||||
expected_default = "D:\\config\\.esphome"
|
||||
|
||||
# Test HA addon takes priority over env var
|
||||
@@ -645,21 +646,21 @@ class TestEsphomeCore:
|
||||
os.environ,
|
||||
{"ESPHOME_IS_HA_ADDON": "true", "ESPHOME_DATA_DIR": "/custom/path"},
|
||||
):
|
||||
assert target.data_dir == "/data"
|
||||
assert target.data_dir == Path("/data")
|
||||
|
||||
# Test env var is used when not HA addon
|
||||
with patch.dict(
|
||||
os.environ,
|
||||
{"ESPHOME_IS_HA_ADDON": "false", "ESPHOME_DATA_DIR": "/custom/path"},
|
||||
):
|
||||
assert target.data_dir == "/custom/path"
|
||||
assert target.data_dir == Path("/custom/path")
|
||||
|
||||
# Test default when neither is set
|
||||
with patch.dict(os.environ, {}, clear=True):
|
||||
# Ensure these env vars are not set
|
||||
os.environ.pop("ESPHOME_IS_HA_ADDON", None)
|
||||
os.environ.pop("ESPHOME_DATA_DIR", None)
|
||||
assert target.data_dir == expected_default
|
||||
assert target.data_dir == Path(expected_default)
|
||||
|
||||
def test_platformio_cache_dir_with_env_var(self):
|
||||
"""Test platformio_cache_dir when PLATFORMIO_CACHE_DIR env var is set."""
|
||||
|
||||
@@ -13,7 +13,12 @@ def test_coro_priority_enum_values() -> None:
|
||||
assert CoroPriority.CORE == 100
|
||||
assert CoroPriority.DIAGNOSTICS == 90
|
||||
assert CoroPriority.STATUS == 80
|
||||
assert CoroPriority.WEB_SERVER_BASE == 65
|
||||
assert CoroPriority.CAPTIVE_PORTAL == 64
|
||||
assert CoroPriority.COMMUNICATION == 60
|
||||
assert CoroPriority.NETWORK_SERVICES == 55
|
||||
assert CoroPriority.OTA_UPDATES == 54
|
||||
assert CoroPriority.WEB_SERVER_OTA == 52
|
||||
assert CoroPriority.APPLICATION == 50
|
||||
assert CoroPriority.WEB == 40
|
||||
assert CoroPriority.AUTOMATION == 30
|
||||
@@ -70,7 +75,12 @@ def test_float_and_enum_are_interchangeable() -> None:
|
||||
(CoroPriority.CORE, 100.0),
|
||||
(CoroPriority.DIAGNOSTICS, 90.0),
|
||||
(CoroPriority.STATUS, 80.0),
|
||||
(CoroPriority.WEB_SERVER_BASE, 65.0),
|
||||
(CoroPriority.CAPTIVE_PORTAL, 64.0),
|
||||
(CoroPriority.COMMUNICATION, 60.0),
|
||||
(CoroPriority.NETWORK_SERVICES, 55.0),
|
||||
(CoroPriority.OTA_UPDATES, 54.0),
|
||||
(CoroPriority.WEB_SERVER_OTA, 52.0),
|
||||
(CoroPriority.APPLICATION, 50.0),
|
||||
(CoroPriority.WEB, 40.0),
|
||||
(CoroPriority.AUTOMATION, 30.0),
|
||||
@@ -164,8 +174,13 @@ def test_enum_priority_comparison() -> None:
|
||||
assert CoroPriority.NETWORK_TRANSPORT > CoroPriority.CORE
|
||||
assert CoroPriority.CORE > CoroPriority.DIAGNOSTICS
|
||||
assert CoroPriority.DIAGNOSTICS > CoroPriority.STATUS
|
||||
assert CoroPriority.STATUS > CoroPriority.COMMUNICATION
|
||||
assert CoroPriority.COMMUNICATION > CoroPriority.APPLICATION
|
||||
assert CoroPriority.STATUS > CoroPriority.WEB_SERVER_BASE
|
||||
assert CoroPriority.WEB_SERVER_BASE > CoroPriority.CAPTIVE_PORTAL
|
||||
assert CoroPriority.CAPTIVE_PORTAL > CoroPriority.COMMUNICATION
|
||||
assert CoroPriority.COMMUNICATION > CoroPriority.NETWORK_SERVICES
|
||||
assert CoroPriority.NETWORK_SERVICES > CoroPriority.OTA_UPDATES
|
||||
assert CoroPriority.OTA_UPDATES > CoroPriority.WEB_SERVER_OTA
|
||||
assert CoroPriority.WEB_SERVER_OTA > CoroPriority.APPLICATION
|
||||
assert CoroPriority.APPLICATION > CoroPriority.WEB
|
||||
assert CoroPriority.WEB > CoroPriority.AUTOMATION
|
||||
assert CoroPriority.AUTOMATION > CoroPriority.BUS
|
||||
|
||||
@@ -42,7 +42,7 @@ def test_is_file_recent_with_recent_file(setup_core: Path) -> None:
|
||||
|
||||
refresh = TimePeriod(seconds=3600)
|
||||
|
||||
result = external_files.is_file_recent(str(test_file), refresh)
|
||||
result = external_files.is_file_recent(test_file, refresh)
|
||||
|
||||
assert result is True
|
||||
|
||||
@@ -53,11 +53,13 @@ def test_is_file_recent_with_old_file(setup_core: Path) -> None:
|
||||
test_file.write_text("content")
|
||||
|
||||
old_time = time.time() - 7200
|
||||
mock_stat = MagicMock()
|
||||
mock_stat.st_ctime = old_time
|
||||
|
||||
with patch("os.path.getctime", return_value=old_time):
|
||||
with patch.object(Path, "stat", return_value=mock_stat):
|
||||
refresh = TimePeriod(seconds=3600)
|
||||
|
||||
result = external_files.is_file_recent(str(test_file), refresh)
|
||||
result = external_files.is_file_recent(test_file, refresh)
|
||||
|
||||
assert result is False
|
||||
|
||||
@@ -67,7 +69,7 @@ def test_is_file_recent_nonexistent_file(setup_core: Path) -> None:
|
||||
test_file = setup_core / "nonexistent.txt"
|
||||
refresh = TimePeriod(seconds=3600)
|
||||
|
||||
result = external_files.is_file_recent(str(test_file), refresh)
|
||||
result = external_files.is_file_recent(test_file, refresh)
|
||||
|
||||
assert result is False
|
||||
|
||||
@@ -77,10 +79,12 @@ def test_is_file_recent_with_zero_refresh(setup_core: Path) -> None:
|
||||
test_file = setup_core / "test.txt"
|
||||
test_file.write_text("content")
|
||||
|
||||
# Mock getctime to return a time 10 seconds ago
|
||||
with patch("os.path.getctime", return_value=time.time() - 10):
|
||||
# Mock stat to return a time 10 seconds ago
|
||||
mock_stat = MagicMock()
|
||||
mock_stat.st_ctime = time.time() - 10
|
||||
with patch.object(Path, "stat", return_value=mock_stat):
|
||||
refresh = TimePeriod(seconds=0)
|
||||
result = external_files.is_file_recent(str(test_file), refresh)
|
||||
result = external_files.is_file_recent(test_file, refresh)
|
||||
assert result is False
|
||||
|
||||
|
||||
@@ -97,7 +101,7 @@ def test_has_remote_file_changed_not_modified(
|
||||
mock_head.return_value = mock_response
|
||||
|
||||
url = "https://example.com/file.txt"
|
||||
result = external_files.has_remote_file_changed(url, str(test_file))
|
||||
result = external_files.has_remote_file_changed(url, test_file)
|
||||
|
||||
assert result is False
|
||||
mock_head.assert_called_once()
|
||||
@@ -121,7 +125,7 @@ def test_has_remote_file_changed_modified(
|
||||
mock_head.return_value = mock_response
|
||||
|
||||
url = "https://example.com/file.txt"
|
||||
result = external_files.has_remote_file_changed(url, str(test_file))
|
||||
result = external_files.has_remote_file_changed(url, test_file)
|
||||
|
||||
assert result is True
|
||||
|
||||
@@ -131,7 +135,7 @@ def test_has_remote_file_changed_no_local_file(setup_core: Path) -> None:
|
||||
test_file = setup_core / "nonexistent.txt"
|
||||
|
||||
url = "https://example.com/file.txt"
|
||||
result = external_files.has_remote_file_changed(url, str(test_file))
|
||||
result = external_files.has_remote_file_changed(url, test_file)
|
||||
|
||||
assert result is True
|
||||
|
||||
@@ -149,7 +153,7 @@ def test_has_remote_file_changed_network_error(
|
||||
url = "https://example.com/file.txt"
|
||||
|
||||
with pytest.raises(Invalid, match="Could not check if.*Network error"):
|
||||
external_files.has_remote_file_changed(url, str(test_file))
|
||||
external_files.has_remote_file_changed(url, test_file)
|
||||
|
||||
|
||||
@patch("esphome.external_files.requests.head")
|
||||
@@ -165,7 +169,7 @@ def test_has_remote_file_changed_timeout(
|
||||
mock_head.return_value = mock_response
|
||||
|
||||
url = "https://example.com/file.txt"
|
||||
external_files.has_remote_file_changed(url, str(test_file))
|
||||
external_files.has_remote_file_changed(url, test_file)
|
||||
|
||||
call_args = mock_head.call_args
|
||||
assert call_args[1]["timeout"] == external_files.NETWORK_TIMEOUT
|
||||
@@ -191,6 +195,6 @@ def test_is_file_recent_handles_float_seconds(setup_core: Path) -> None:
|
||||
|
||||
refresh = TimePeriod(seconds=3600.5)
|
||||
|
||||
result = external_files.is_file_recent(str(test_file), refresh)
|
||||
result = external_files.is_file_recent(test_file, refresh)
|
||||
|
||||
assert result is True
|
||||
|
||||
@@ -154,11 +154,11 @@ def test_walk_files(fixture_path):
|
||||
actual = list(helpers.walk_files(path))
|
||||
|
||||
# Ensure paths start with the root
|
||||
assert all(p.startswith(str(path)) for p in actual)
|
||||
assert all(p.is_relative_to(path) for p in actual)
|
||||
|
||||
|
||||
class Test_write_file_if_changed:
|
||||
def test_src_and_dst_match(self, tmp_path):
|
||||
def test_src_and_dst_match(self, tmp_path: Path):
|
||||
text = "A files are unique.\n"
|
||||
initial = text
|
||||
dst = tmp_path / "file-a.txt"
|
||||
@@ -168,7 +168,7 @@ class Test_write_file_if_changed:
|
||||
|
||||
assert dst.read_text() == text
|
||||
|
||||
def test_src_and_dst_do_not_match(self, tmp_path):
|
||||
def test_src_and_dst_do_not_match(self, tmp_path: Path):
|
||||
text = "A files are unique.\n"
|
||||
initial = "B files are unique.\n"
|
||||
dst = tmp_path / "file-a.txt"
|
||||
@@ -178,7 +178,7 @@ class Test_write_file_if_changed:
|
||||
|
||||
assert dst.read_text() == text
|
||||
|
||||
def test_dst_does_not_exist(self, tmp_path):
|
||||
def test_dst_does_not_exist(self, tmp_path: Path):
|
||||
text = "A files are unique.\n"
|
||||
dst = tmp_path / "file-a.txt"
|
||||
|
||||
@@ -188,7 +188,7 @@ class Test_write_file_if_changed:
|
||||
|
||||
|
||||
class Test_copy_file_if_changed:
|
||||
def test_src_and_dst_match(self, tmp_path, fixture_path):
|
||||
def test_src_and_dst_match(self, tmp_path: Path, fixture_path: Path):
|
||||
src = fixture_path / "helpers" / "file-a.txt"
|
||||
initial = fixture_path / "helpers" / "file-a.txt"
|
||||
dst = tmp_path / "file-a.txt"
|
||||
@@ -197,7 +197,7 @@ class Test_copy_file_if_changed:
|
||||
|
||||
helpers.copy_file_if_changed(src, dst)
|
||||
|
||||
def test_src_and_dst_do_not_match(self, tmp_path, fixture_path):
|
||||
def test_src_and_dst_do_not_match(self, tmp_path: Path, fixture_path: Path):
|
||||
src = fixture_path / "helpers" / "file-a.txt"
|
||||
initial = fixture_path / "helpers" / "file-c.txt"
|
||||
dst = tmp_path / "file-a.txt"
|
||||
@@ -208,7 +208,7 @@ class Test_copy_file_if_changed:
|
||||
|
||||
assert src.read_text() == dst.read_text()
|
||||
|
||||
def test_dst_does_not_exist(self, tmp_path, fixture_path):
|
||||
def test_dst_does_not_exist(self, tmp_path: Path, fixture_path: Path):
|
||||
src = fixture_path / "helpers" / "file-a.txt"
|
||||
dst = tmp_path / "file-a.txt"
|
||||
|
||||
@@ -604,9 +604,8 @@ def test_mkdir_p_with_existing_file_raises_error(tmp_path: Path) -> None:
|
||||
helpers.mkdir_p(dir_path)
|
||||
|
||||
|
||||
@pytest.mark.skipif(os.name == "nt", reason="Unix-specific test")
|
||||
def test_read_file_unix(tmp_path: Path) -> None:
|
||||
"""Test read_file reads file content correctly on Unix."""
|
||||
def test_read_file(tmp_path: Path) -> None:
|
||||
"""Test read_file reads file content correctly."""
|
||||
# Test reading regular file
|
||||
test_file = tmp_path / "test.txt"
|
||||
expected_content = "Test content\nLine 2\n"
|
||||
@@ -624,31 +623,10 @@ def test_read_file_unix(tmp_path: Path) -> None:
|
||||
assert content == utf8_content
|
||||
|
||||
|
||||
@pytest.mark.skipif(os.name != "nt", reason="Windows-specific test")
|
||||
def test_read_file_windows(tmp_path: Path) -> None:
|
||||
"""Test read_file reads file content correctly on Windows."""
|
||||
# Test reading regular file
|
||||
test_file = tmp_path / "test.txt"
|
||||
expected_content = "Test content\nLine 2\n"
|
||||
test_file.write_text(expected_content)
|
||||
|
||||
content = helpers.read_file(test_file)
|
||||
# On Windows, text mode reading converts \n to \r\n
|
||||
assert content == expected_content.replace("\n", "\r\n")
|
||||
|
||||
# Test reading file with UTF-8 characters
|
||||
utf8_file = tmp_path / "utf8.txt"
|
||||
utf8_content = "Hello 世界 🌍"
|
||||
utf8_file.write_text(utf8_content, encoding="utf-8")
|
||||
|
||||
content = helpers.read_file(utf8_file)
|
||||
assert content == utf8_content
|
||||
|
||||
|
||||
def test_read_file_not_found() -> None:
|
||||
"""Test read_file raises error for non-existent file."""
|
||||
with pytest.raises(EsphomeError, match=r"Error reading file"):
|
||||
helpers.read_file("/nonexistent/file.txt")
|
||||
helpers.read_file(Path("/nonexistent/file.txt"))
|
||||
|
||||
|
||||
def test_read_file_unicode_decode_error(tmp_path: Path) -> None:
|
||||
|
||||
@@ -885,7 +885,7 @@ def test_upload_program_ota_success(
|
||||
|
||||
assert exit_code == 0
|
||||
assert host == "192.168.1.100"
|
||||
expected_firmware = str(
|
||||
expected_firmware = (
|
||||
tmp_path / ".esphome" / "build" / "test" / ".pioenvs" / "test" / "firmware.bin"
|
||||
)
|
||||
mock_run_ota.assert_called_once_with(
|
||||
@@ -919,7 +919,9 @@ def test_upload_program_ota_with_file_arg(
|
||||
|
||||
assert exit_code == 0
|
||||
assert host == "192.168.1.100"
|
||||
mock_run_ota.assert_called_once_with(["192.168.1.100"], 3232, "", "custom.bin")
|
||||
mock_run_ota.assert_called_once_with(
|
||||
["192.168.1.100"], 3232, "", Path("custom.bin")
|
||||
)
|
||||
|
||||
|
||||
def test_upload_program_ota_no_config(
|
||||
@@ -972,7 +974,7 @@ def test_upload_program_ota_with_mqtt_resolution(
|
||||
assert exit_code == 0
|
||||
assert host == "192.168.1.100"
|
||||
mock_mqtt_get_ip.assert_called_once_with(config, "user", "pass", "client")
|
||||
expected_firmware = str(
|
||||
expected_firmware = (
|
||||
tmp_path / ".esphome" / "build" / "test" / ".pioenvs" / "test" / "firmware.bin"
|
||||
)
|
||||
mock_run_ota.assert_called_once_with(["192.168.1.100"], 3232, "", expected_firmware)
|
||||
@@ -1382,7 +1384,7 @@ def test_command_wizard(tmp_path: Path) -> None:
|
||||
result = command_wizard(args)
|
||||
|
||||
assert result == 0
|
||||
mock_wizard.assert_called_once_with(str(config_file))
|
||||
mock_wizard.assert_called_once_with(config_file)
|
||||
|
||||
|
||||
def test_command_rename_invalid_characters(
|
||||
@@ -1407,7 +1409,7 @@ def test_command_rename_complex_yaml(
|
||||
config_file = tmp_path / "test.yaml"
|
||||
config_file.write_text("# Complex YAML without esphome section\nsome_key: value\n")
|
||||
setup_core(tmp_path=tmp_path)
|
||||
CORE.config_path = str(config_file)
|
||||
CORE.config_path = config_file
|
||||
|
||||
args = MockArgs(name="newname")
|
||||
result = command_rename(args, {})
|
||||
@@ -1436,7 +1438,7 @@ wifi:
|
||||
password: "test1234"
|
||||
""")
|
||||
setup_core(tmp_path=tmp_path)
|
||||
CORE.config_path = str(config_file)
|
||||
CORE.config_path = config_file
|
||||
|
||||
# Set up CORE.config to avoid ValueError when accessing CORE.address
|
||||
CORE.config = {CONF_ESPHOME: {CONF_NAME: "oldname"}}
|
||||
@@ -1486,7 +1488,7 @@ esp32:
|
||||
board: nodemcu-32s
|
||||
""")
|
||||
setup_core(tmp_path=tmp_path)
|
||||
CORE.config_path = str(config_file)
|
||||
CORE.config_path = config_file
|
||||
|
||||
# Set up CORE.config to avoid ValueError when accessing CORE.address
|
||||
CORE.config = {
|
||||
@@ -1523,7 +1525,7 @@ esp32:
|
||||
board: nodemcu-32s
|
||||
""")
|
||||
setup_core(tmp_path=tmp_path)
|
||||
CORE.config_path = str(config_file)
|
||||
CORE.config_path = config_file
|
||||
|
||||
args = MockArgs(name="newname", dashboard=False)
|
||||
|
||||
|
||||
@@ -15,45 +15,45 @@ from esphome.core import CORE, EsphomeError
|
||||
|
||||
def test_idedata_firmware_elf_path(setup_core: Path) -> None:
|
||||
"""Test IDEData.firmware_elf_path returns correct path."""
|
||||
CORE.build_path = str(setup_core / "build" / "test")
|
||||
CORE.build_path = setup_core / "build" / "test"
|
||||
CORE.name = "test"
|
||||
raw_data = {"prog_path": "/path/to/firmware.elf"}
|
||||
idedata = platformio_api.IDEData(raw_data)
|
||||
|
||||
assert idedata.firmware_elf_path == "/path/to/firmware.elf"
|
||||
assert idedata.firmware_elf_path == Path("/path/to/firmware.elf")
|
||||
|
||||
|
||||
def test_idedata_firmware_bin_path(setup_core: Path) -> None:
|
||||
"""Test IDEData.firmware_bin_path returns Path with .bin extension."""
|
||||
CORE.build_path = str(setup_core / "build" / "test")
|
||||
CORE.build_path = setup_core / "build" / "test"
|
||||
CORE.name = "test"
|
||||
prog_path = str(Path("/path/to/firmware.elf"))
|
||||
raw_data = {"prog_path": prog_path}
|
||||
idedata = platformio_api.IDEData(raw_data)
|
||||
|
||||
result = idedata.firmware_bin_path
|
||||
assert isinstance(result, str)
|
||||
expected = str(Path("/path/to/firmware.bin"))
|
||||
assert isinstance(result, Path)
|
||||
expected = Path("/path/to/firmware.bin")
|
||||
assert result == expected
|
||||
assert result.endswith(".bin")
|
||||
assert str(result).endswith(".bin")
|
||||
|
||||
|
||||
def test_idedata_firmware_bin_path_preserves_directory(setup_core: Path) -> None:
|
||||
"""Test firmware_bin_path preserves the directory structure."""
|
||||
CORE.build_path = str(setup_core / "build" / "test")
|
||||
CORE.build_path = setup_core / "build" / "test"
|
||||
CORE.name = "test"
|
||||
prog_path = str(Path("/complex/path/to/build/firmware.elf"))
|
||||
raw_data = {"prog_path": prog_path}
|
||||
idedata = platformio_api.IDEData(raw_data)
|
||||
|
||||
result = idedata.firmware_bin_path
|
||||
expected = str(Path("/complex/path/to/build/firmware.bin"))
|
||||
expected = Path("/complex/path/to/build/firmware.bin")
|
||||
assert result == expected
|
||||
|
||||
|
||||
def test_idedata_extra_flash_images(setup_core: Path) -> None:
|
||||
"""Test IDEData.extra_flash_images returns list of FlashImage objects."""
|
||||
CORE.build_path = str(setup_core / "build" / "test")
|
||||
CORE.build_path = setup_core / "build" / "test"
|
||||
CORE.name = "test"
|
||||
raw_data = {
|
||||
"prog_path": "/path/to/firmware.elf",
|
||||
@@ -69,15 +69,15 @@ def test_idedata_extra_flash_images(setup_core: Path) -> None:
|
||||
images = idedata.extra_flash_images
|
||||
assert len(images) == 2
|
||||
assert all(isinstance(img, platformio_api.FlashImage) for img in images)
|
||||
assert images[0].path == "/path/to/bootloader.bin"
|
||||
assert images[0].path == Path("/path/to/bootloader.bin")
|
||||
assert images[0].offset == "0x1000"
|
||||
assert images[1].path == "/path/to/partition.bin"
|
||||
assert images[1].path == Path("/path/to/partition.bin")
|
||||
assert images[1].offset == "0x8000"
|
||||
|
||||
|
||||
def test_idedata_extra_flash_images_empty(setup_core: Path) -> None:
|
||||
"""Test extra_flash_images returns empty list when no extra images."""
|
||||
CORE.build_path = str(setup_core / "build" / "test")
|
||||
CORE.build_path = setup_core / "build" / "test"
|
||||
CORE.name = "test"
|
||||
raw_data = {"prog_path": "/path/to/firmware.elf", "extra": {"flash_images": []}}
|
||||
idedata = platformio_api.IDEData(raw_data)
|
||||
@@ -88,7 +88,7 @@ def test_idedata_extra_flash_images_empty(setup_core: Path) -> None:
|
||||
|
||||
def test_idedata_cc_path(setup_core: Path) -> None:
|
||||
"""Test IDEData.cc_path returns compiler path."""
|
||||
CORE.build_path = str(setup_core / "build" / "test")
|
||||
CORE.build_path = setup_core / "build" / "test"
|
||||
CORE.name = "test"
|
||||
raw_data = {
|
||||
"prog_path": "/path/to/firmware.elf",
|
||||
@@ -104,9 +104,9 @@ def test_idedata_cc_path(setup_core: Path) -> None:
|
||||
|
||||
def test_flash_image_dataclass() -> None:
|
||||
"""Test FlashImage dataclass stores path and offset correctly."""
|
||||
image = platformio_api.FlashImage(path="/path/to/image.bin", offset="0x10000")
|
||||
image = platformio_api.FlashImage(path=Path("/path/to/image.bin"), offset="0x10000")
|
||||
|
||||
assert image.path == "/path/to/image.bin"
|
||||
assert image.path == Path("/path/to/image.bin")
|
||||
assert image.offset == "0x10000"
|
||||
|
||||
|
||||
@@ -114,7 +114,7 @@ def test_load_idedata_returns_dict(
|
||||
setup_core: Path, mock_run_platformio_cli_run
|
||||
) -> None:
|
||||
"""Test _load_idedata returns parsed idedata dict when successful."""
|
||||
CORE.build_path = str(setup_core / "build" / "test")
|
||||
CORE.build_path = setup_core / "build" / "test"
|
||||
CORE.name = "test"
|
||||
|
||||
# Create required files
|
||||
@@ -366,7 +366,7 @@ def test_get_idedata_caches_result(
|
||||
|
||||
assert result1 is result2
|
||||
assert isinstance(result1, platformio_api.IDEData)
|
||||
assert result1.firmware_elf_path == "/test/firmware.elf"
|
||||
assert result1.firmware_elf_path == Path("/test/firmware.elf")
|
||||
|
||||
|
||||
def test_idedata_addr2line_path_windows(setup_core: Path) -> None:
|
||||
@@ -434,9 +434,9 @@ def test_patched_clean_build_dir_removes_outdated(setup_core: Path) -> None:
|
||||
os.utime(platformio_ini, (build_mtime + 1, build_mtime + 1))
|
||||
|
||||
# Track if directory was removed
|
||||
removed_paths: list[str] = []
|
||||
removed_paths: list[Path] = []
|
||||
|
||||
def track_rmtree(path: str) -> None:
|
||||
def track_rmtree(path: Path) -> None:
|
||||
removed_paths.append(path)
|
||||
shutil.rmtree(path)
|
||||
|
||||
@@ -466,7 +466,7 @@ def test_patched_clean_build_dir_removes_outdated(setup_core: Path) -> None:
|
||||
|
||||
# Verify directory was removed and recreated
|
||||
assert len(removed_paths) == 1
|
||||
assert removed_paths[0] == str(build_dir)
|
||||
assert removed_paths[0] == build_dir
|
||||
assert build_dir.exists() # makedirs recreated it
|
||||
|
||||
|
||||
|
||||
@@ -15,12 +15,12 @@ from esphome.core import CORE
|
||||
|
||||
def test_storage_path(setup_core: Path) -> None:
|
||||
"""Test storage_path returns correct path for current config."""
|
||||
CORE.config_path = str(setup_core / "my_device.yaml")
|
||||
CORE.config_path = setup_core / "my_device.yaml"
|
||||
|
||||
result = storage_json.storage_path()
|
||||
|
||||
data_dir = Path(CORE.data_dir)
|
||||
expected = str(data_dir / "storage" / "my_device.yaml.json")
|
||||
expected = data_dir / "storage" / "my_device.yaml.json"
|
||||
assert result == expected
|
||||
|
||||
|
||||
@@ -29,20 +29,20 @@ def test_ext_storage_path(setup_core: Path) -> None:
|
||||
result = storage_json.ext_storage_path("other_device.yaml")
|
||||
|
||||
data_dir = Path(CORE.data_dir)
|
||||
expected = str(data_dir / "storage" / "other_device.yaml.json")
|
||||
expected = data_dir / "storage" / "other_device.yaml.json"
|
||||
assert result == expected
|
||||
|
||||
|
||||
def test_ext_storage_path_handles_various_extensions(setup_core: Path) -> None:
|
||||
"""Test ext_storage_path works with different file extensions."""
|
||||
result_yml = storage_json.ext_storage_path("device.yml")
|
||||
assert result_yml.endswith("device.yml.json")
|
||||
assert str(result_yml).endswith("device.yml.json")
|
||||
|
||||
result_no_ext = storage_json.ext_storage_path("device")
|
||||
assert result_no_ext.endswith("device.json")
|
||||
assert str(result_no_ext).endswith("device.json")
|
||||
|
||||
result_path = storage_json.ext_storage_path("my/device.yaml")
|
||||
assert result_path.endswith("device.yaml.json")
|
||||
assert str(result_path).endswith("device.yaml.json")
|
||||
|
||||
|
||||
def test_esphome_storage_path(setup_core: Path) -> None:
|
||||
@@ -50,7 +50,7 @@ def test_esphome_storage_path(setup_core: Path) -> None:
|
||||
result = storage_json.esphome_storage_path()
|
||||
|
||||
data_dir = Path(CORE.data_dir)
|
||||
expected = str(data_dir / "esphome.json")
|
||||
expected = data_dir / "esphome.json"
|
||||
assert result == expected
|
||||
|
||||
|
||||
@@ -59,27 +59,27 @@ def test_ignored_devices_storage_path(setup_core: Path) -> None:
|
||||
result = storage_json.ignored_devices_storage_path()
|
||||
|
||||
data_dir = Path(CORE.data_dir)
|
||||
expected = str(data_dir / "ignored-devices.json")
|
||||
expected = data_dir / "ignored-devices.json"
|
||||
assert result == expected
|
||||
|
||||
|
||||
def test_trash_storage_path(setup_core: Path) -> None:
|
||||
"""Test trash_storage_path returns correct path."""
|
||||
CORE.config_path = str(setup_core / "configs" / "device.yaml")
|
||||
CORE.config_path = setup_core / "configs" / "device.yaml"
|
||||
|
||||
result = storage_json.trash_storage_path()
|
||||
|
||||
expected = str(setup_core / "configs" / "trash")
|
||||
expected = setup_core / "configs" / "trash"
|
||||
assert result == expected
|
||||
|
||||
|
||||
def test_archive_storage_path(setup_core: Path) -> None:
|
||||
"""Test archive_storage_path returns correct path."""
|
||||
CORE.config_path = str(setup_core / "configs" / "device.yaml")
|
||||
CORE.config_path = setup_core / "configs" / "device.yaml"
|
||||
|
||||
result = storage_json.archive_storage_path()
|
||||
|
||||
expected = str(setup_core / "configs" / "archive")
|
||||
expected = setup_core / "configs" / "archive"
|
||||
assert result == expected
|
||||
|
||||
|
||||
@@ -87,12 +87,12 @@ def test_storage_path_with_subdirectory(setup_core: Path) -> None:
|
||||
"""Test storage paths work correctly when config is in subdirectory."""
|
||||
subdir = setup_core / "configs" / "basement"
|
||||
subdir.mkdir(parents=True, exist_ok=True)
|
||||
CORE.config_path = str(subdir / "sensor.yaml")
|
||||
CORE.config_path = subdir / "sensor.yaml"
|
||||
|
||||
result = storage_json.storage_path()
|
||||
|
||||
data_dir = Path(CORE.data_dir)
|
||||
expected = str(data_dir / "storage" / "sensor.yaml.json")
|
||||
expected = data_dir / "storage" / "sensor.yaml.json"
|
||||
assert result == expected
|
||||
|
||||
|
||||
@@ -173,16 +173,16 @@ def test_storage_paths_with_ha_addon(mock_is_ha_addon: bool, tmp_path: Path) ->
|
||||
"""Test storage paths when running as Home Assistant addon."""
|
||||
mock_is_ha_addon.return_value = True
|
||||
|
||||
CORE.config_path = str(tmp_path / "test.yaml")
|
||||
CORE.config_path = tmp_path / "test.yaml"
|
||||
|
||||
result = storage_json.storage_path()
|
||||
# When is_ha_addon is True, CORE.data_dir returns "/data"
|
||||
# This is the standard mount point for HA addon containers
|
||||
expected = str(Path("/data") / "storage" / "test.yaml.json")
|
||||
expected = Path("/data") / "storage" / "test.yaml.json"
|
||||
assert result == expected
|
||||
|
||||
result = storage_json.esphome_storage_path()
|
||||
expected = str(Path("/data") / "esphome.json")
|
||||
expected = Path("/data") / "esphome.json"
|
||||
assert result == expected
|
||||
|
||||
|
||||
@@ -375,7 +375,7 @@ def test_storage_json_load_valid_file(tmp_path: Path) -> None:
|
||||
file_path = tmp_path / "storage.json"
|
||||
file_path.write_text(json.dumps(storage_data))
|
||||
|
||||
result = storage_json.StorageJSON.load(str(file_path))
|
||||
result = storage_json.StorageJSON.load(file_path)
|
||||
|
||||
assert result is not None
|
||||
assert result.name == "loaded_device"
|
||||
@@ -386,8 +386,8 @@ def test_storage_json_load_valid_file(tmp_path: Path) -> None:
|
||||
assert result.address == "10.0.0.1"
|
||||
assert result.web_port == 8080
|
||||
assert result.target_platform == "ESP32"
|
||||
assert result.build_path == "/loaded/build"
|
||||
assert result.firmware_bin_path == "/loaded/firmware.bin"
|
||||
assert result.build_path == Path("/loaded/build")
|
||||
assert result.firmware_bin_path == Path("/loaded/firmware.bin")
|
||||
assert result.loaded_integrations == {"wifi", "api"}
|
||||
assert result.loaded_platforms == {"sensor"}
|
||||
assert result.no_mdns is True
|
||||
@@ -400,7 +400,7 @@ def test_storage_json_load_invalid_file(tmp_path: Path) -> None:
|
||||
file_path = tmp_path / "invalid.json"
|
||||
file_path.write_text("not valid json{")
|
||||
|
||||
result = storage_json.StorageJSON.load(str(file_path))
|
||||
result = storage_json.StorageJSON.load(file_path)
|
||||
|
||||
assert result is None
|
||||
|
||||
@@ -654,7 +654,7 @@ def test_storage_json_load_legacy_esphomeyaml_version(tmp_path: Path) -> None:
|
||||
file_path = tmp_path / "legacy.json"
|
||||
file_path.write_text(json.dumps(storage_data))
|
||||
|
||||
result = storage_json.StorageJSON.load(str(file_path))
|
||||
result = storage_json.StorageJSON.load(file_path)
|
||||
|
||||
assert result is not None
|
||||
assert result.esphome_version == "1.14.0" # Should map to esphome_version
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import glob
|
||||
import logging
|
||||
import os
|
||||
from pathlib import Path
|
||||
|
||||
from esphome import yaml_util
|
||||
from esphome.components import substitutions
|
||||
@@ -52,9 +52,8 @@ def dict_diff(a, b, path=""):
|
||||
return diffs
|
||||
|
||||
|
||||
def write_yaml(path, data):
|
||||
with open(path, "w", encoding="utf-8") as f:
|
||||
f.write(yaml_util.dump(data))
|
||||
def write_yaml(path: Path, data: dict) -> None:
|
||||
path.write_text(yaml_util.dump(data), encoding="utf-8")
|
||||
|
||||
|
||||
def test_substitutions_fixtures(fixture_path):
|
||||
@@ -64,11 +63,10 @@ def test_substitutions_fixtures(fixture_path):
|
||||
|
||||
failures = []
|
||||
for source_path in sources:
|
||||
source_path = Path(source_path)
|
||||
try:
|
||||
expected_path = source_path.replace(".input.yaml", ".approved.yaml")
|
||||
test_case = os.path.splitext(os.path.basename(source_path))[0].replace(
|
||||
".input", ""
|
||||
)
|
||||
expected_path = source_path.with_suffix("").with_suffix(".approved.yaml")
|
||||
test_case = source_path.with_suffix("").stem
|
||||
|
||||
# Load using ESPHome's YAML loader
|
||||
config = yaml_util.load_yaml(source_path)
|
||||
@@ -81,12 +79,12 @@ def test_substitutions_fixtures(fixture_path):
|
||||
substitutions.do_substitution_pass(config, None)
|
||||
|
||||
# Also load expected using ESPHome's loader, or use {} if missing and DEV_MODE
|
||||
if os.path.isfile(expected_path):
|
||||
if expected_path.is_file():
|
||||
expected = yaml_util.load_yaml(expected_path)
|
||||
elif DEV_MODE:
|
||||
expected = {}
|
||||
else:
|
||||
assert os.path.isfile(expected_path), (
|
||||
assert expected_path.is_file(), (
|
||||
f"Expected file missing: {expected_path}"
|
||||
)
|
||||
|
||||
@@ -97,16 +95,14 @@ def test_substitutions_fixtures(fixture_path):
|
||||
if got_sorted != expected_sorted:
|
||||
diff = "\n".join(dict_diff(got_sorted, expected_sorted))
|
||||
msg = (
|
||||
f"Substitution result mismatch for {os.path.basename(source_path)}\n"
|
||||
f"Substitution result mismatch for {source_path.name}\n"
|
||||
f"Diff:\n{diff}\n\n"
|
||||
f"Got: {got_sorted}\n"
|
||||
f"Expected: {expected_sorted}"
|
||||
)
|
||||
# Write out the received file when test fails
|
||||
if DEV_MODE:
|
||||
received_path = os.path.join(
|
||||
os.path.dirname(source_path), f"{test_case}.received.yaml"
|
||||
)
|
||||
received_path = source_path.with_name(f"{test_case}.received.yaml")
|
||||
write_yaml(received_path, config)
|
||||
print(msg)
|
||||
failures.append(msg)
|
||||
|
||||
@@ -32,21 +32,21 @@ def test_list_yaml_files_with_files_and_directories(tmp_path: Path) -> None:
|
||||
|
||||
# Test with mixed input (directories and files)
|
||||
configs = [
|
||||
str(dir1),
|
||||
str(standalone1),
|
||||
str(dir2),
|
||||
str(standalone2),
|
||||
dir1,
|
||||
standalone1,
|
||||
dir2,
|
||||
standalone2,
|
||||
]
|
||||
|
||||
result = util.list_yaml_files(configs)
|
||||
|
||||
# Should include all YAML files but not the .txt file
|
||||
assert set(result) == {
|
||||
str(dir1 / "config1.yaml"),
|
||||
str(dir1 / "config2.yml"),
|
||||
str(dir2 / "config3.yaml"),
|
||||
str(standalone1),
|
||||
str(standalone2),
|
||||
dir1 / "config1.yaml",
|
||||
dir1 / "config2.yml",
|
||||
dir2 / "config3.yaml",
|
||||
standalone1,
|
||||
standalone2,
|
||||
}
|
||||
# Check that results are sorted
|
||||
assert result == sorted(result)
|
||||
@@ -63,12 +63,12 @@ def test_list_yaml_files_only_directories(tmp_path: Path) -> None:
|
||||
(dir1 / "b.yml").write_text("test: b")
|
||||
(dir2 / "c.yaml").write_text("test: c")
|
||||
|
||||
result = util.list_yaml_files([str(dir1), str(dir2)])
|
||||
result = util.list_yaml_files([dir1, dir2])
|
||||
|
||||
assert set(result) == {
|
||||
str(dir1 / "a.yaml"),
|
||||
str(dir1 / "b.yml"),
|
||||
str(dir2 / "c.yaml"),
|
||||
dir1 / "a.yaml",
|
||||
dir1 / "b.yml",
|
||||
dir2 / "c.yaml",
|
||||
}
|
||||
assert result == sorted(result)
|
||||
|
||||
@@ -88,17 +88,17 @@ def test_list_yaml_files_only_files(tmp_path: Path) -> None:
|
||||
# Include a non-YAML file to test filtering
|
||||
result = util.list_yaml_files(
|
||||
[
|
||||
str(file1),
|
||||
str(file2),
|
||||
str(file3),
|
||||
str(non_yaml),
|
||||
file1,
|
||||
file2,
|
||||
file3,
|
||||
non_yaml,
|
||||
]
|
||||
)
|
||||
|
||||
assert set(result) == {
|
||||
str(file1),
|
||||
str(file2),
|
||||
str(file3),
|
||||
file1,
|
||||
file2,
|
||||
file3,
|
||||
}
|
||||
assert result == sorted(result)
|
||||
|
||||
@@ -108,7 +108,7 @@ def test_list_yaml_files_empty_directory(tmp_path: Path) -> None:
|
||||
empty_dir = tmp_path / "empty"
|
||||
empty_dir.mkdir()
|
||||
|
||||
result = util.list_yaml_files([str(empty_dir)])
|
||||
result = util.list_yaml_files([empty_dir])
|
||||
|
||||
assert result == []
|
||||
|
||||
@@ -121,7 +121,7 @@ def test_list_yaml_files_nonexistent_path(tmp_path: Path) -> None:
|
||||
|
||||
# Should raise an error for non-existent directory
|
||||
with pytest.raises(FileNotFoundError):
|
||||
util.list_yaml_files([str(nonexistent), str(existing)])
|
||||
util.list_yaml_files([nonexistent, existing])
|
||||
|
||||
|
||||
def test_list_yaml_files_mixed_extensions(tmp_path: Path) -> None:
|
||||
@@ -137,11 +137,11 @@ def test_list_yaml_files_mixed_extensions(tmp_path: Path) -> None:
|
||||
yml_file.write_text("test: yml")
|
||||
other_file.write_text("test: txt")
|
||||
|
||||
result = util.list_yaml_files([str(dir1)])
|
||||
result = util.list_yaml_files([dir1])
|
||||
|
||||
assert set(result) == {
|
||||
str(yaml_file),
|
||||
str(yml_file),
|
||||
yaml_file,
|
||||
yml_file,
|
||||
}
|
||||
|
||||
|
||||
@@ -174,17 +174,18 @@ def test_list_yaml_files_does_not_recurse_into_subdirectories(tmp_path: Path) ->
|
||||
assert len(result) == 3
|
||||
|
||||
# Check that only root-level files are found
|
||||
assert str(root / "config1.yaml") in result
|
||||
assert str(root / "config2.yml") in result
|
||||
assert str(root / "device.yaml") in result
|
||||
assert root / "config1.yaml" in result
|
||||
assert root / "config2.yml" in result
|
||||
assert root / "device.yaml" in result
|
||||
|
||||
# Ensure nested files are NOT found
|
||||
for r in result:
|
||||
assert "subdir" not in r
|
||||
assert "deeper" not in r
|
||||
assert "nested1.yaml" not in r
|
||||
assert "nested2.yml" not in r
|
||||
assert "very_nested.yaml" not in r
|
||||
r_str = str(r)
|
||||
assert "subdir" not in r_str
|
||||
assert "deeper" not in r_str
|
||||
assert "nested1.yaml" not in r_str
|
||||
assert "nested2.yml" not in r_str
|
||||
assert "very_nested.yaml" not in r_str
|
||||
|
||||
|
||||
def test_list_yaml_files_excludes_secrets(tmp_path: Path) -> None:
|
||||
@@ -202,10 +203,10 @@ def test_list_yaml_files_excludes_secrets(tmp_path: Path) -> None:
|
||||
|
||||
# Should find 2 files (config.yaml and device.yaml), not secrets
|
||||
assert len(result) == 2
|
||||
assert str(root / "config.yaml") in result
|
||||
assert str(root / "device.yaml") in result
|
||||
assert str(root / "secrets.yaml") not in result
|
||||
assert str(root / "secrets.yml") not in result
|
||||
assert root / "config.yaml" in result
|
||||
assert root / "device.yaml" in result
|
||||
assert root / "secrets.yaml" not in result
|
||||
assert root / "secrets.yml" not in result
|
||||
|
||||
|
||||
def test_list_yaml_files_excludes_hidden_files(tmp_path: Path) -> None:
|
||||
@@ -223,93 +224,102 @@ def test_list_yaml_files_excludes_hidden_files(tmp_path: Path) -> None:
|
||||
|
||||
# Should find only non-hidden files
|
||||
assert len(result) == 2
|
||||
assert str(root / "config.yaml") in result
|
||||
assert str(root / "device.yaml") in result
|
||||
assert str(root / ".hidden.yaml") not in result
|
||||
assert str(root / ".backup.yml") not in result
|
||||
assert root / "config.yaml" in result
|
||||
assert root / "device.yaml" in result
|
||||
assert root / ".hidden.yaml" not in result
|
||||
assert root / ".backup.yml" not in result
|
||||
|
||||
|
||||
def test_filter_yaml_files_basic() -> None:
|
||||
"""Test filter_yaml_files function."""
|
||||
files = [
|
||||
"/path/to/config.yaml",
|
||||
"/path/to/device.yml",
|
||||
"/path/to/readme.txt",
|
||||
"/path/to/script.py",
|
||||
"/path/to/data.json",
|
||||
"/path/to/another.yaml",
|
||||
Path("/path/to/config.yaml"),
|
||||
Path("/path/to/device.yml"),
|
||||
Path("/path/to/readme.txt"),
|
||||
Path("/path/to/script.py"),
|
||||
Path("/path/to/data.json"),
|
||||
Path("/path/to/another.yaml"),
|
||||
]
|
||||
|
||||
result = util.filter_yaml_files(files)
|
||||
|
||||
assert len(result) == 3
|
||||
assert "/path/to/config.yaml" in result
|
||||
assert "/path/to/device.yml" in result
|
||||
assert "/path/to/another.yaml" in result
|
||||
assert "/path/to/readme.txt" not in result
|
||||
assert "/path/to/script.py" not in result
|
||||
assert "/path/to/data.json" not in result
|
||||
assert Path("/path/to/config.yaml") in result
|
||||
assert Path("/path/to/device.yml") in result
|
||||
assert Path("/path/to/another.yaml") in result
|
||||
assert Path("/path/to/readme.txt") not in result
|
||||
assert Path("/path/to/script.py") not in result
|
||||
assert Path("/path/to/data.json") not in result
|
||||
|
||||
|
||||
def test_filter_yaml_files_excludes_secrets() -> None:
|
||||
"""Test that filter_yaml_files excludes secrets files."""
|
||||
files = [
|
||||
"/path/to/config.yaml",
|
||||
"/path/to/secrets.yaml",
|
||||
"/path/to/secrets.yml",
|
||||
"/path/to/device.yaml",
|
||||
"/some/dir/secrets.yaml",
|
||||
Path("/path/to/config.yaml"),
|
||||
Path("/path/to/secrets.yaml"),
|
||||
Path("/path/to/secrets.yml"),
|
||||
Path("/path/to/device.yaml"),
|
||||
Path("/some/dir/secrets.yaml"),
|
||||
]
|
||||
|
||||
result = util.filter_yaml_files(files)
|
||||
|
||||
assert len(result) == 2
|
||||
assert "/path/to/config.yaml" in result
|
||||
assert "/path/to/device.yaml" in result
|
||||
assert "/path/to/secrets.yaml" not in result
|
||||
assert "/path/to/secrets.yml" not in result
|
||||
assert "/some/dir/secrets.yaml" not in result
|
||||
assert Path("/path/to/config.yaml") in result
|
||||
assert Path("/path/to/device.yaml") in result
|
||||
assert Path("/path/to/secrets.yaml") not in result
|
||||
assert Path("/path/to/secrets.yml") not in result
|
||||
assert Path("/some/dir/secrets.yaml") not in result
|
||||
|
||||
|
||||
def test_filter_yaml_files_excludes_hidden() -> None:
|
||||
"""Test that filter_yaml_files excludes hidden files."""
|
||||
files = [
|
||||
"/path/to/config.yaml",
|
||||
"/path/to/.hidden.yaml",
|
||||
"/path/to/.backup.yml",
|
||||
"/path/to/device.yaml",
|
||||
"/some/dir/.config.yaml",
|
||||
Path("/path/to/config.yaml"),
|
||||
Path("/path/to/.hidden.yaml"),
|
||||
Path("/path/to/.backup.yml"),
|
||||
Path("/path/to/device.yaml"),
|
||||
Path("/some/dir/.config.yaml"),
|
||||
]
|
||||
|
||||
result = util.filter_yaml_files(files)
|
||||
|
||||
assert len(result) == 2
|
||||
assert "/path/to/config.yaml" in result
|
||||
assert "/path/to/device.yaml" in result
|
||||
assert "/path/to/.hidden.yaml" not in result
|
||||
assert "/path/to/.backup.yml" not in result
|
||||
assert "/some/dir/.config.yaml" not in result
|
||||
assert Path("/path/to/config.yaml") in result
|
||||
assert Path("/path/to/device.yaml") in result
|
||||
assert Path("/path/to/.hidden.yaml") not in result
|
||||
assert Path("/path/to/.backup.yml") not in result
|
||||
assert Path("/some/dir/.config.yaml") not in result
|
||||
|
||||
|
||||
def test_filter_yaml_files_case_sensitive() -> None:
|
||||
"""Test that filter_yaml_files is case-sensitive for extensions."""
|
||||
files = [
|
||||
"/path/to/config.yaml",
|
||||
"/path/to/config.YAML",
|
||||
"/path/to/config.YML",
|
||||
"/path/to/config.Yaml",
|
||||
"/path/to/config.yml",
|
||||
Path("/path/to/config.yaml"),
|
||||
Path("/path/to/config.YAML"),
|
||||
Path("/path/to/config.YML"),
|
||||
Path("/path/to/config.Yaml"),
|
||||
Path("/path/to/config.yml"),
|
||||
]
|
||||
|
||||
result = util.filter_yaml_files(files)
|
||||
|
||||
# Should only match lowercase .yaml and .yml
|
||||
assert len(result) == 2
|
||||
assert "/path/to/config.yaml" in result
|
||||
assert "/path/to/config.yml" in result
|
||||
assert "/path/to/config.YAML" not in result
|
||||
assert "/path/to/config.YML" not in result
|
||||
assert "/path/to/config.Yaml" not in result
|
||||
|
||||
# Check the actual suffixes to ensure case-sensitive filtering
|
||||
result_suffixes = [p.suffix for p in result]
|
||||
assert ".yaml" in result_suffixes
|
||||
assert ".yml" in result_suffixes
|
||||
|
||||
# Verify the filtered files have the expected names
|
||||
result_names = [p.name for p in result]
|
||||
assert "config.yaml" in result_names
|
||||
assert "config.yml" in result_names
|
||||
# Ensure uppercase extensions are NOT included
|
||||
assert "config.YAML" not in result_names
|
||||
assert "config.YML" not in result_names
|
||||
assert "config.Yaml" not in result_names
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import json
|
||||
import os
|
||||
from pathlib import Path
|
||||
from unittest.mock import Mock, patch
|
||||
|
||||
from esphome import vscode
|
||||
@@ -45,7 +45,7 @@ RESULT_NO_ERROR = '{"type": "result", "yaml_errors": [], "validation_errors": []
|
||||
|
||||
|
||||
def test_multi_file():
|
||||
source_path = os.path.join("dir_path", "x.yaml")
|
||||
source_path = str(Path("dir_path", "x.yaml"))
|
||||
output_lines = _run_repl_test(
|
||||
[
|
||||
_validate(source_path),
|
||||
@@ -62,7 +62,7 @@ esp8266:
|
||||
|
||||
expected_lines = [
|
||||
_read_file(source_path),
|
||||
_read_file(os.path.join("dir_path", "secrets.yaml")),
|
||||
_read_file(str(Path("dir_path", "secrets.yaml"))),
|
||||
RESULT_NO_ERROR,
|
||||
]
|
||||
|
||||
@@ -70,7 +70,7 @@ esp8266:
|
||||
|
||||
|
||||
def test_shows_correct_range_error():
|
||||
source_path = os.path.join("dir_path", "x.yaml")
|
||||
source_path = str(Path("dir_path", "x.yaml"))
|
||||
output_lines = _run_repl_test(
|
||||
[
|
||||
_validate(source_path),
|
||||
@@ -98,7 +98,7 @@ esp8266:
|
||||
|
||||
|
||||
def test_shows_correct_loaded_file_error():
|
||||
source_path = os.path.join("dir_path", "x.yaml")
|
||||
source_path = str(Path("dir_path", "x.yaml"))
|
||||
output_lines = _run_repl_test(
|
||||
[
|
||||
_validate(source_path),
|
||||
@@ -121,7 +121,7 @@ packages:
|
||||
validation_error = error["validation_errors"][0]
|
||||
assert validation_error["message"].startswith("[broad] is an invalid option for")
|
||||
range = validation_error["range"]
|
||||
assert range["document"] == os.path.join("dir_path", ".pkg.esp8266.yaml")
|
||||
assert range["document"] == str(Path("dir_path", ".pkg.esp8266.yaml"))
|
||||
assert range["start_line"] == 1
|
||||
assert range["start_col"] == 2
|
||||
assert range["end_line"] == 1
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
"""Tests for the wizard.py file."""
|
||||
|
||||
import os
|
||||
from pathlib import Path
|
||||
from typing import Any
|
||||
from unittest.mock import MagicMock
|
||||
@@ -127,7 +126,7 @@ def test_wizard_write_sets_platform(
|
||||
# Given
|
||||
del default_config["platform"]
|
||||
monkeypatch.setattr(wz, "write_file", MagicMock())
|
||||
monkeypatch.setattr(CORE, "config_path", os.path.dirname(tmp_path))
|
||||
monkeypatch.setattr(CORE, "config_path", tmp_path.parent)
|
||||
|
||||
# When
|
||||
wz.wizard_write(tmp_path, **default_config)
|
||||
@@ -147,7 +146,7 @@ def test_wizard_empty_config(tmp_path: Path, monkeypatch: MonkeyPatch):
|
||||
"name": "test-empty",
|
||||
}
|
||||
monkeypatch.setattr(wz, "write_file", MagicMock())
|
||||
monkeypatch.setattr(CORE, "config_path", os.path.dirname(tmp_path))
|
||||
monkeypatch.setattr(CORE, "config_path", tmp_path.parent)
|
||||
|
||||
# When
|
||||
wz.wizard_write(tmp_path, **empty_config)
|
||||
@@ -168,7 +167,7 @@ def test_wizard_upload_config(tmp_path: Path, monkeypatch: MonkeyPatch):
|
||||
"file_text": "# imported file 📁\n\n",
|
||||
}
|
||||
monkeypatch.setattr(wz, "write_file", MagicMock())
|
||||
monkeypatch.setattr(CORE, "config_path", os.path.dirname(tmp_path))
|
||||
monkeypatch.setattr(CORE, "config_path", tmp_path.parent)
|
||||
|
||||
# When
|
||||
wz.wizard_write(tmp_path, **empty_config)
|
||||
@@ -189,7 +188,7 @@ def test_wizard_write_defaults_platform_from_board_esp8266(
|
||||
default_config["board"] = [*ESP8266_BOARD_PINS][0]
|
||||
|
||||
monkeypatch.setattr(wz, "write_file", MagicMock())
|
||||
monkeypatch.setattr(CORE, "config_path", os.path.dirname(tmp_path))
|
||||
monkeypatch.setattr(CORE, "config_path", tmp_path.parent)
|
||||
|
||||
# When
|
||||
wz.wizard_write(tmp_path, **default_config)
|
||||
@@ -210,7 +209,7 @@ def test_wizard_write_defaults_platform_from_board_esp32(
|
||||
default_config["board"] = [*ESP32_BOARD_PINS][0]
|
||||
|
||||
monkeypatch.setattr(wz, "write_file", MagicMock())
|
||||
monkeypatch.setattr(CORE, "config_path", os.path.dirname(tmp_path))
|
||||
monkeypatch.setattr(CORE, "config_path", tmp_path.parent)
|
||||
|
||||
# When
|
||||
wz.wizard_write(tmp_path, **default_config)
|
||||
@@ -231,7 +230,7 @@ def test_wizard_write_defaults_platform_from_board_bk72xx(
|
||||
default_config["board"] = [*BK72XX_BOARD_PINS][0]
|
||||
|
||||
monkeypatch.setattr(wz, "write_file", MagicMock())
|
||||
monkeypatch.setattr(CORE, "config_path", os.path.dirname(tmp_path))
|
||||
monkeypatch.setattr(CORE, "config_path", tmp_path.parent)
|
||||
|
||||
# When
|
||||
wz.wizard_write(tmp_path, **default_config)
|
||||
@@ -252,7 +251,7 @@ def test_wizard_write_defaults_platform_from_board_ln882x(
|
||||
default_config["board"] = [*LN882X_BOARD_PINS][0]
|
||||
|
||||
monkeypatch.setattr(wz, "write_file", MagicMock())
|
||||
monkeypatch.setattr(CORE, "config_path", os.path.dirname(tmp_path))
|
||||
monkeypatch.setattr(CORE, "config_path", tmp_path.parent)
|
||||
|
||||
# When
|
||||
wz.wizard_write(tmp_path, **default_config)
|
||||
@@ -273,7 +272,7 @@ def test_wizard_write_defaults_platform_from_board_rtl87xx(
|
||||
default_config["board"] = [*RTL87XX_BOARD_PINS][0]
|
||||
|
||||
monkeypatch.setattr(wz, "write_file", MagicMock())
|
||||
monkeypatch.setattr(CORE, "config_path", os.path.dirname(tmp_path))
|
||||
monkeypatch.setattr(CORE, "config_path", tmp_path.parent)
|
||||
|
||||
# When
|
||||
wz.wizard_write(tmp_path, **default_config)
|
||||
@@ -362,7 +361,7 @@ def test_wizard_rejects_path_with_invalid_extension():
|
||||
"""
|
||||
|
||||
# Given
|
||||
config_file = "test.json"
|
||||
config_file = Path("test.json")
|
||||
|
||||
# When
|
||||
retval = wz.wizard(config_file)
|
||||
@@ -371,31 +370,31 @@ def test_wizard_rejects_path_with_invalid_extension():
|
||||
assert retval == 1
|
||||
|
||||
|
||||
def test_wizard_rejects_existing_files(tmpdir):
|
||||
def test_wizard_rejects_existing_files(tmp_path):
|
||||
"""
|
||||
The wizard should reject any configuration file that already exists
|
||||
"""
|
||||
|
||||
# Given
|
||||
config_file = tmpdir.join("test.yaml")
|
||||
config_file.write("")
|
||||
config_file = tmp_path / "test.yaml"
|
||||
config_file.write_text("")
|
||||
|
||||
# When
|
||||
retval = wz.wizard(str(config_file))
|
||||
retval = wz.wizard(config_file)
|
||||
|
||||
# Then
|
||||
assert retval == 2
|
||||
|
||||
|
||||
def test_wizard_accepts_default_answers_esp8266(
|
||||
tmpdir, monkeypatch: MonkeyPatch, wizard_answers: list[str]
|
||||
tmp_path: Path, monkeypatch: MonkeyPatch, wizard_answers: list[str]
|
||||
):
|
||||
"""
|
||||
The wizard should accept the given default answers for esp8266
|
||||
"""
|
||||
|
||||
# Given
|
||||
config_file = tmpdir.join("test.yaml")
|
||||
config_file = tmp_path / "test.yaml"
|
||||
input_mock = MagicMock(side_effect=wizard_answers)
|
||||
monkeypatch.setattr("builtins.input", input_mock)
|
||||
monkeypatch.setattr(wz, "safe_print", lambda t=None, end=None: 0)
|
||||
@@ -403,14 +402,14 @@ def test_wizard_accepts_default_answers_esp8266(
|
||||
monkeypatch.setattr(wz, "wizard_write", MagicMock())
|
||||
|
||||
# When
|
||||
retval = wz.wizard(str(config_file))
|
||||
retval = wz.wizard(config_file)
|
||||
|
||||
# Then
|
||||
assert retval == 0
|
||||
|
||||
|
||||
def test_wizard_accepts_default_answers_esp32(
|
||||
tmpdir, monkeypatch: MonkeyPatch, wizard_answers: list[str]
|
||||
tmp_path: Path, monkeypatch: MonkeyPatch, wizard_answers: list[str]
|
||||
):
|
||||
"""
|
||||
The wizard should accept the given default answers for esp32
|
||||
@@ -419,7 +418,7 @@ def test_wizard_accepts_default_answers_esp32(
|
||||
# Given
|
||||
wizard_answers[1] = "ESP32"
|
||||
wizard_answers[2] = "nodemcu-32s"
|
||||
config_file = tmpdir.join("test.yaml")
|
||||
config_file = tmp_path / "test.yaml"
|
||||
input_mock = MagicMock(side_effect=wizard_answers)
|
||||
monkeypatch.setattr("builtins.input", input_mock)
|
||||
monkeypatch.setattr(wz, "safe_print", lambda t=None, end=None: 0)
|
||||
@@ -427,14 +426,14 @@ def test_wizard_accepts_default_answers_esp32(
|
||||
monkeypatch.setattr(wz, "wizard_write", MagicMock())
|
||||
|
||||
# When
|
||||
retval = wz.wizard(str(config_file))
|
||||
retval = wz.wizard(config_file)
|
||||
|
||||
# Then
|
||||
assert retval == 0
|
||||
|
||||
|
||||
def test_wizard_offers_better_node_name(
|
||||
tmpdir, monkeypatch: MonkeyPatch, wizard_answers: list[str]
|
||||
tmp_path: Path, monkeypatch: MonkeyPatch, wizard_answers: list[str]
|
||||
):
|
||||
"""
|
||||
When the node name does not conform, a better alternative is offered
|
||||
@@ -451,7 +450,7 @@ def test_wizard_offers_better_node_name(
|
||||
wz, "default_input", MagicMock(side_effect=lambda _, default: default)
|
||||
)
|
||||
|
||||
config_file = tmpdir.join("test.yaml")
|
||||
config_file = tmp_path / "test.yaml"
|
||||
input_mock = MagicMock(side_effect=wizard_answers)
|
||||
monkeypatch.setattr("builtins.input", input_mock)
|
||||
monkeypatch.setattr(wz, "safe_print", lambda t=None, end=None: 0)
|
||||
@@ -459,7 +458,7 @@ def test_wizard_offers_better_node_name(
|
||||
monkeypatch.setattr(wz, "wizard_write", MagicMock())
|
||||
|
||||
# When
|
||||
retval = wz.wizard(str(config_file))
|
||||
retval = wz.wizard(config_file)
|
||||
|
||||
# Then
|
||||
assert retval == 0
|
||||
@@ -467,7 +466,7 @@ def test_wizard_offers_better_node_name(
|
||||
|
||||
|
||||
def test_wizard_requires_correct_platform(
|
||||
tmpdir, monkeypatch: MonkeyPatch, wizard_answers: list[str]
|
||||
tmp_path: Path, monkeypatch: MonkeyPatch, wizard_answers: list[str]
|
||||
):
|
||||
"""
|
||||
When the platform is not either esp32 or esp8266, the wizard should reject it
|
||||
@@ -476,7 +475,7 @@ def test_wizard_requires_correct_platform(
|
||||
# Given
|
||||
wizard_answers.insert(1, "foobar") # add invalid entry for platform
|
||||
|
||||
config_file = tmpdir.join("test.yaml")
|
||||
config_file = tmp_path / "test.yaml"
|
||||
input_mock = MagicMock(side_effect=wizard_answers)
|
||||
monkeypatch.setattr("builtins.input", input_mock)
|
||||
monkeypatch.setattr(wz, "safe_print", lambda t=None, end=None: 0)
|
||||
@@ -484,14 +483,14 @@ def test_wizard_requires_correct_platform(
|
||||
monkeypatch.setattr(wz, "wizard_write", MagicMock())
|
||||
|
||||
# When
|
||||
retval = wz.wizard(str(config_file))
|
||||
retval = wz.wizard(config_file)
|
||||
|
||||
# Then
|
||||
assert retval == 0
|
||||
|
||||
|
||||
def test_wizard_requires_correct_board(
|
||||
tmpdir, monkeypatch: MonkeyPatch, wizard_answers: list[str]
|
||||
tmp_path: Path, monkeypatch: MonkeyPatch, wizard_answers: list[str]
|
||||
):
|
||||
"""
|
||||
When the board is not a valid esp8266 board, the wizard should reject it
|
||||
@@ -500,7 +499,7 @@ def test_wizard_requires_correct_board(
|
||||
# Given
|
||||
wizard_answers.insert(2, "foobar") # add an invalid entry for board
|
||||
|
||||
config_file = tmpdir.join("test.yaml")
|
||||
config_file = tmp_path / "test.yaml"
|
||||
input_mock = MagicMock(side_effect=wizard_answers)
|
||||
monkeypatch.setattr("builtins.input", input_mock)
|
||||
monkeypatch.setattr(wz, "safe_print", lambda t=None, end=None: 0)
|
||||
@@ -508,14 +507,14 @@ def test_wizard_requires_correct_board(
|
||||
monkeypatch.setattr(wz, "wizard_write", MagicMock())
|
||||
|
||||
# When
|
||||
retval = wz.wizard(str(config_file))
|
||||
retval = wz.wizard(config_file)
|
||||
|
||||
# Then
|
||||
assert retval == 0
|
||||
|
||||
|
||||
def test_wizard_requires_valid_ssid(
|
||||
tmpdir, monkeypatch: MonkeyPatch, wizard_answers: list[str]
|
||||
tmp_path: Path, monkeypatch: MonkeyPatch, wizard_answers: list[str]
|
||||
):
|
||||
"""
|
||||
When the board is not a valid esp8266 board, the wizard should reject it
|
||||
@@ -524,7 +523,7 @@ def test_wizard_requires_valid_ssid(
|
||||
# Given
|
||||
wizard_answers.insert(3, "") # add an invalid entry for ssid
|
||||
|
||||
config_file = tmpdir.join("test.yaml")
|
||||
config_file = tmp_path / "test.yaml"
|
||||
input_mock = MagicMock(side_effect=wizard_answers)
|
||||
monkeypatch.setattr("builtins.input", input_mock)
|
||||
monkeypatch.setattr(wz, "safe_print", lambda t=None, end=None: 0)
|
||||
@@ -532,28 +531,28 @@ def test_wizard_requires_valid_ssid(
|
||||
monkeypatch.setattr(wz, "wizard_write", MagicMock())
|
||||
|
||||
# When
|
||||
retval = wz.wizard(str(config_file))
|
||||
retval = wz.wizard(config_file)
|
||||
|
||||
# Then
|
||||
assert retval == 0
|
||||
|
||||
|
||||
def test_wizard_write_protects_existing_config(
|
||||
tmpdir, default_config: dict[str, Any], monkeypatch: MonkeyPatch
|
||||
tmp_path: Path, default_config: dict[str, Any], monkeypatch: MonkeyPatch
|
||||
):
|
||||
"""
|
||||
The wizard_write function should not overwrite existing config files and return False
|
||||
"""
|
||||
# Given
|
||||
config_file = tmpdir.join("test.yaml")
|
||||
config_file = tmp_path / "test.yaml"
|
||||
original_content = "# Original config content\n"
|
||||
config_file.write(original_content)
|
||||
config_file.write_text(original_content)
|
||||
|
||||
monkeypatch.setattr(CORE, "config_path", str(tmpdir))
|
||||
monkeypatch.setattr(CORE, "config_path", tmp_path.parent)
|
||||
|
||||
# When
|
||||
result = wz.wizard_write(str(config_file), **default_config)
|
||||
result = wz.wizard_write(config_file, **default_config)
|
||||
|
||||
# Then
|
||||
assert result is False # Should return False when file exists
|
||||
assert config_file.read() == original_content
|
||||
assert config_file.read_text() == original_content
|
||||
|
||||
@@ -257,10 +257,7 @@ def test_clean_cmake_cache(
|
||||
cmake_cache_file.write_text("# CMake cache file")
|
||||
|
||||
# Setup mocks
|
||||
mock_core.relative_pioenvs_path.side_effect = [
|
||||
str(pioenvs_dir), # First call for directory check
|
||||
str(cmake_cache_file), # Second call for file path
|
||||
]
|
||||
mock_core.relative_pioenvs_path.return_value = pioenvs_dir
|
||||
mock_core.name = "test_device"
|
||||
|
||||
# Verify file exists before
|
||||
@@ -288,7 +285,7 @@ def test_clean_cmake_cache_no_pioenvs_dir(
|
||||
pioenvs_dir = tmp_path / ".pioenvs"
|
||||
|
||||
# Setup mocks
|
||||
mock_core.relative_pioenvs_path.return_value = str(pioenvs_dir)
|
||||
mock_core.relative_pioenvs_path.return_value = pioenvs_dir
|
||||
|
||||
# Verify directory doesn't exist
|
||||
assert not pioenvs_dir.exists()
|
||||
@@ -314,10 +311,7 @@ def test_clean_cmake_cache_no_cmake_file(
|
||||
cmake_cache_file = device_dir / "CMakeCache.txt"
|
||||
|
||||
# Setup mocks
|
||||
mock_core.relative_pioenvs_path.side_effect = [
|
||||
str(pioenvs_dir), # First call for directory check
|
||||
str(cmake_cache_file), # Second call for file path
|
||||
]
|
||||
mock_core.relative_pioenvs_path.return_value = pioenvs_dir
|
||||
mock_core.name = "test_device"
|
||||
|
||||
# Verify file doesn't exist
|
||||
@@ -358,9 +352,9 @@ def test_clean_build(
|
||||
(platformio_cache_dir / "downloads" / "package.tar.gz").write_text("package")
|
||||
|
||||
# Setup mocks
|
||||
mock_core.relative_pioenvs_path.return_value = str(pioenvs_dir)
|
||||
mock_core.relative_piolibdeps_path.return_value = str(piolibdeps_dir)
|
||||
mock_core.relative_build_path.return_value = str(dependencies_lock)
|
||||
mock_core.relative_pioenvs_path.return_value = pioenvs_dir
|
||||
mock_core.relative_piolibdeps_path.return_value = piolibdeps_dir
|
||||
mock_core.relative_build_path.return_value = dependencies_lock
|
||||
mock_core.platformio_cache_dir = str(platformio_cache_dir)
|
||||
|
||||
# Verify all exist before
|
||||
@@ -409,9 +403,9 @@ def test_clean_build_partial_exists(
|
||||
dependencies_lock = tmp_path / "dependencies.lock"
|
||||
|
||||
# Setup mocks
|
||||
mock_core.relative_pioenvs_path.return_value = str(pioenvs_dir)
|
||||
mock_core.relative_piolibdeps_path.return_value = str(piolibdeps_dir)
|
||||
mock_core.relative_build_path.return_value = str(dependencies_lock)
|
||||
mock_core.relative_pioenvs_path.return_value = pioenvs_dir
|
||||
mock_core.relative_piolibdeps_path.return_value = piolibdeps_dir
|
||||
mock_core.relative_build_path.return_value = dependencies_lock
|
||||
|
||||
# Verify only pioenvs exists
|
||||
assert pioenvs_dir.exists()
|
||||
@@ -446,9 +440,9 @@ def test_clean_build_nothing_exists(
|
||||
dependencies_lock = tmp_path / "dependencies.lock"
|
||||
|
||||
# Setup mocks
|
||||
mock_core.relative_pioenvs_path.return_value = str(pioenvs_dir)
|
||||
mock_core.relative_piolibdeps_path.return_value = str(piolibdeps_dir)
|
||||
mock_core.relative_build_path.return_value = str(dependencies_lock)
|
||||
mock_core.relative_pioenvs_path.return_value = pioenvs_dir
|
||||
mock_core.relative_piolibdeps_path.return_value = piolibdeps_dir
|
||||
mock_core.relative_build_path.return_value = dependencies_lock
|
||||
|
||||
# Verify nothing exists
|
||||
assert not pioenvs_dir.exists()
|
||||
@@ -482,9 +476,9 @@ def test_clean_build_platformio_not_available(
|
||||
dependencies_lock.write_text("lock file")
|
||||
|
||||
# Setup mocks
|
||||
mock_core.relative_pioenvs_path.return_value = str(pioenvs_dir)
|
||||
mock_core.relative_piolibdeps_path.return_value = str(piolibdeps_dir)
|
||||
mock_core.relative_build_path.return_value = str(dependencies_lock)
|
||||
mock_core.relative_pioenvs_path.return_value = pioenvs_dir
|
||||
mock_core.relative_piolibdeps_path.return_value = piolibdeps_dir
|
||||
mock_core.relative_build_path.return_value = dependencies_lock
|
||||
|
||||
# Verify all exist before
|
||||
assert pioenvs_dir.exists()
|
||||
@@ -520,9 +514,9 @@ def test_clean_build_empty_cache_dir(
|
||||
pioenvs_dir.mkdir()
|
||||
|
||||
# Setup mocks
|
||||
mock_core.relative_pioenvs_path.return_value = str(pioenvs_dir)
|
||||
mock_core.relative_piolibdeps_path.return_value = str(tmp_path / ".piolibdeps")
|
||||
mock_core.relative_build_path.return_value = str(tmp_path / "dependencies.lock")
|
||||
mock_core.relative_pioenvs_path.return_value = pioenvs_dir
|
||||
mock_core.relative_piolibdeps_path.return_value = tmp_path / ".piolibdeps"
|
||||
mock_core.relative_build_path.return_value = tmp_path / "dependencies.lock"
|
||||
|
||||
# Verify pioenvs exists before
|
||||
assert pioenvs_dir.exists()
|
||||
@@ -553,7 +547,7 @@ def test_write_gitignore_creates_new_file(
|
||||
gitignore_path = tmp_path / ".gitignore"
|
||||
|
||||
# Setup mocks
|
||||
mock_core.relative_config_path.return_value = str(gitignore_path)
|
||||
mock_core.relative_config_path.return_value = gitignore_path
|
||||
|
||||
# Verify file doesn't exist
|
||||
assert not gitignore_path.exists()
|
||||
@@ -577,7 +571,7 @@ def test_write_gitignore_skips_existing_file(
|
||||
gitignore_path.write_text(existing_content)
|
||||
|
||||
# Setup mocks
|
||||
mock_core.relative_config_path.return_value = str(gitignore_path)
|
||||
mock_core.relative_config_path.return_value = gitignore_path
|
||||
|
||||
# Verify file exists with custom content
|
||||
assert gitignore_path.exists()
|
||||
@@ -616,7 +610,7 @@ void loop() {{}}"""
|
||||
main_cpp.write_text(existing_content)
|
||||
|
||||
# Setup mocks
|
||||
mock_core.relative_src_path.return_value = str(main_cpp)
|
||||
mock_core.relative_src_path.return_value = main_cpp
|
||||
mock_core.cpp_global_section = "// Global section"
|
||||
|
||||
# Call the function
|
||||
@@ -653,7 +647,7 @@ def test_write_cpp_creates_new_file(
|
||||
main_cpp = tmp_path / "main.cpp"
|
||||
|
||||
# Setup mocks
|
||||
mock_core.relative_src_path.return_value = str(main_cpp)
|
||||
mock_core.relative_src_path.return_value = main_cpp
|
||||
mock_core.cpp_global_section = "// Global section"
|
||||
|
||||
# Verify file doesn't exist
|
||||
@@ -669,7 +663,7 @@ def test_write_cpp_creates_new_file(
|
||||
# Get the content that would be written
|
||||
mock_write_file.assert_called_once()
|
||||
written_path, written_content = mock_write_file.call_args[0]
|
||||
assert written_path == str(main_cpp)
|
||||
assert written_path == main_cpp
|
||||
|
||||
# Check that all necessary parts are in the new file
|
||||
assert '#include "esphome.h"' in written_content
|
||||
@@ -699,7 +693,7 @@ def test_write_cpp_with_missing_end_marker(
|
||||
main_cpp.write_text(existing_content)
|
||||
|
||||
# Setup mocks
|
||||
mock_core.relative_src_path.return_value = str(main_cpp)
|
||||
mock_core.relative_src_path.return_value = main_cpp
|
||||
|
||||
# Call should raise an error
|
||||
with pytest.raises(EsphomeError, match="Could not find auto generated code end"):
|
||||
@@ -725,7 +719,7 @@ def test_write_cpp_with_duplicate_markers(
|
||||
main_cpp.write_text(existing_content)
|
||||
|
||||
# Setup mocks
|
||||
mock_core.relative_src_path.return_value = str(main_cpp)
|
||||
mock_core.relative_src_path.return_value = main_cpp
|
||||
|
||||
# Call should raise an error
|
||||
with pytest.raises(EsphomeError, match="Found multiple auto generate code begins"):
|
||||
|
||||
@@ -67,18 +67,18 @@ def test_parsing_with_custom_loader(fixture_path):
|
||||
"""
|
||||
yaml_file = fixture_path / "yaml_util" / "includetest.yaml"
|
||||
|
||||
loader_calls = []
|
||||
loader_calls: list[Path] = []
|
||||
|
||||
def custom_loader(fname):
|
||||
def custom_loader(fname: Path):
|
||||
loader_calls.append(fname)
|
||||
|
||||
with open(yaml_file, encoding="utf-8") as f_handle:
|
||||
with yaml_file.open(encoding="utf-8") as f_handle:
|
||||
yaml_util.parse_yaml(yaml_file, f_handle, custom_loader)
|
||||
|
||||
assert len(loader_calls) == 3
|
||||
assert loader_calls[0].endswith("includes/included.yaml")
|
||||
assert loader_calls[1].endswith("includes/list.yaml")
|
||||
assert loader_calls[2].endswith("includes/scalar.yaml")
|
||||
assert loader_calls[0].parts[-2:] == ("includes", "included.yaml")
|
||||
assert loader_calls[1].parts[-2:] == ("includes", "list.yaml")
|
||||
assert loader_calls[2].parts[-2:] == ("includes", "scalar.yaml")
|
||||
|
||||
|
||||
def test_construct_secret_simple(fixture_path: Path) -> None:
|
||||
@@ -110,7 +110,7 @@ wifi:
|
||||
secrets_yaml.write_text("some_other_secret: value")
|
||||
|
||||
with pytest.raises(EsphomeError, match="Secret 'nonexistent_secret' not defined"):
|
||||
yaml_util.load_yaml(str(test_yaml))
|
||||
yaml_util.load_yaml(test_yaml)
|
||||
|
||||
|
||||
def test_construct_secret_no_secrets_file(tmp_path: Path) -> None:
|
||||
@@ -124,10 +124,10 @@ wifi:
|
||||
|
||||
# Mock CORE.config_path to avoid NoneType error
|
||||
with (
|
||||
patch.object(core.CORE, "config_path", str(tmp_path / "main.yaml")),
|
||||
patch.object(core.CORE, "config_path", tmp_path / "main.yaml"),
|
||||
pytest.raises(EsphomeError, match="secrets.yaml"),
|
||||
):
|
||||
yaml_util.load_yaml(str(test_yaml))
|
||||
yaml_util.load_yaml(test_yaml)
|
||||
|
||||
|
||||
def test_construct_secret_fallback_to_main_config_dir(
|
||||
@@ -149,8 +149,8 @@ wifi:
|
||||
main_secrets.write_text("test_secret: main_secret_value")
|
||||
|
||||
# Mock CORE.config_path to point to main directory
|
||||
with patch.object(core.CORE, "config_path", str(tmp_path / "main.yaml")):
|
||||
actual = yaml_util.load_yaml(str(test_yaml))
|
||||
with patch.object(core.CORE, "config_path", tmp_path / "main.yaml"):
|
||||
actual = yaml_util.load_yaml(test_yaml)
|
||||
assert actual["wifi"]["password"] == "main_secret_value"
|
||||
|
||||
|
||||
@@ -167,7 +167,7 @@ def test_construct_include_dir_named(fixture_path: Path, tmp_path: Path) -> None
|
||||
sensor: !include_dir_named named_dir
|
||||
""")
|
||||
|
||||
actual = yaml_util.load_yaml(str(test_yaml))
|
||||
actual = yaml_util.load_yaml(test_yaml)
|
||||
actual_sensor = actual["sensor"]
|
||||
|
||||
# Check that files were loaded with their names as keys
|
||||
@@ -202,7 +202,7 @@ def test_construct_include_dir_named_empty_dir(tmp_path: Path) -> None:
|
||||
sensor: !include_dir_named empty_dir
|
||||
""")
|
||||
|
||||
actual = yaml_util.load_yaml(str(test_yaml))
|
||||
actual = yaml_util.load_yaml(test_yaml)
|
||||
|
||||
# Should return empty OrderedDict
|
||||
assert isinstance(actual["sensor"], OrderedDict)
|
||||
@@ -234,7 +234,7 @@ def test_construct_include_dir_named_with_dots(tmp_path: Path) -> None:
|
||||
test: !include_dir_named test_dir
|
||||
""")
|
||||
|
||||
actual = yaml_util.load_yaml(str(test_yaml))
|
||||
actual = yaml_util.load_yaml(test_yaml)
|
||||
|
||||
# Should only include visible file
|
||||
assert "visible" in actual["test"]
|
||||
@@ -258,7 +258,7 @@ def test_find_files_recursive(fixture_path: Path, tmp_path: Path) -> None:
|
||||
all_sensors: !include_dir_named named_dir
|
||||
""")
|
||||
|
||||
actual = yaml_util.load_yaml(str(test_yaml))
|
||||
actual = yaml_util.load_yaml(test_yaml)
|
||||
|
||||
# Should find sensor1.yaml, sensor2.yaml, and subdir/sensor3.yaml (all flattened)
|
||||
assert len(actual["all_sensors"]) == 3
|
||||
|
||||
Reference in New Issue
Block a user