mirror of
https://github.com/esphome/esphome.git
synced 2025-10-12 23:03:46 +01:00
Group component tests to reduce CI time (#11134)
This commit is contained in:
@@ -186,6 +186,11 @@ This document provides essential context for AI models interacting with this pro
|
|||||||
└── components/[component]/ # Component-specific tests
|
└── components/[component]/ # Component-specific tests
|
||||||
```
|
```
|
||||||
Run them using `script/test_build_components`. Use `-c <component>` to test specific components and `-t <target>` for specific platforms.
|
Run them using `script/test_build_components`. Use `-c <component>` to test specific components and `-t <target>` for specific platforms.
|
||||||
|
* **Testing All Components Together:** To verify that all components can be tested together without ID conflicts or configuration issues, use:
|
||||||
|
```bash
|
||||||
|
./script/test_component_grouping.py -e config --all
|
||||||
|
```
|
||||||
|
This tests all components in a single build to catch conflicts that might not appear when testing components individually. Use `-e config` for fast configuration validation, or `-e compile` for full compilation testing.
|
||||||
* **Debugging and Troubleshooting:**
|
* **Debugging and Troubleshooting:**
|
||||||
* **Debug Tools:**
|
* **Debug Tools:**
|
||||||
- `esphome config <file>.yaml` to validate configuration.
|
- `esphome config <file>.yaml` to validate configuration.
|
||||||
|
96
.github/workflows/ci.yml
vendored
96
.github/workflows/ci.yml
vendored
@@ -369,10 +369,11 @@ jobs:
|
|||||||
matrix:
|
matrix:
|
||||||
file: ${{ fromJson(needs.determine-jobs.outputs.changed-components) }}
|
file: ${{ fromJson(needs.determine-jobs.outputs.changed-components) }}
|
||||||
steps:
|
steps:
|
||||||
- name: Install dependencies
|
- name: Cache apt packages
|
||||||
run: |
|
uses: awalsh128/cache-apt-pkgs-action@acb598e5ddbc6f68a970c5da0688d2f3a9f04d05 # v1.5.3
|
||||||
sudo apt-get update
|
with:
|
||||||
sudo apt-get install libsdl2-dev
|
packages: libsdl2-dev
|
||||||
|
version: 1.0
|
||||||
|
|
||||||
- name: Check out code from GitHub
|
- name: Check out code from GitHub
|
||||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||||
@@ -381,17 +382,17 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||||
cache-key: ${{ needs.common.outputs.cache-key }}
|
cache-key: ${{ needs.common.outputs.cache-key }}
|
||||||
- name: test_build_components -e config -c ${{ matrix.file }}
|
- name: Validate config for ${{ matrix.file }}
|
||||||
run: |
|
run: |
|
||||||
. venv/bin/activate
|
. venv/bin/activate
|
||||||
./script/test_build_components -e config -c ${{ matrix.file }}
|
python3 script/test_build_components.py -e config -c ${{ matrix.file }}
|
||||||
- name: test_build_components -e compile -c ${{ matrix.file }}
|
- name: Compile config for ${{ matrix.file }}
|
||||||
run: |
|
run: |
|
||||||
. venv/bin/activate
|
. venv/bin/activate
|
||||||
./script/test_build_components -e compile -c ${{ matrix.file }}
|
python3 script/test_build_components.py -e compile -c ${{ matrix.file }}
|
||||||
|
|
||||||
test-build-components-splitter:
|
test-build-components-splitter:
|
||||||
name: Split components for testing into 10 components per group
|
name: Split components for intelligent grouping (40 weighted per batch)
|
||||||
runs-on: ubuntu-24.04
|
runs-on: ubuntu-24.04
|
||||||
needs:
|
needs:
|
||||||
- common
|
- common
|
||||||
@@ -402,14 +403,26 @@ jobs:
|
|||||||
steps:
|
steps:
|
||||||
- name: Check out code from GitHub
|
- name: Check out code from GitHub
|
||||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||||
- name: Split components into groups of 10
|
- name: Restore Python
|
||||||
|
uses: ./.github/actions/restore-python
|
||||||
|
with:
|
||||||
|
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||||
|
cache-key: ${{ needs.common.outputs.cache-key }}
|
||||||
|
- name: Split components intelligently based on bus configurations
|
||||||
id: split
|
id: split
|
||||||
run: |
|
run: |
|
||||||
components=$(echo '${{ needs.determine-jobs.outputs.changed-components }}' | jq -c '.[]' | shuf | jq -s -c '[_nwise(10) | join(" ")]')
|
. venv/bin/activate
|
||||||
echo "components=$components" >> $GITHUB_OUTPUT
|
|
||||||
|
# Use intelligent splitter that groups components with same bus configs
|
||||||
|
components='${{ needs.determine-jobs.outputs.changed-components }}'
|
||||||
|
|
||||||
|
echo "Splitting components intelligently..."
|
||||||
|
output=$(python3 script/split_components_for_ci.py --components "$components" --batch-size 40 --output github)
|
||||||
|
|
||||||
|
echo "$output" >> $GITHUB_OUTPUT
|
||||||
|
|
||||||
test-build-components-split:
|
test-build-components-split:
|
||||||
name: Test split components
|
name: Test components batch (${{ matrix.components }})
|
||||||
runs-on: ubuntu-24.04
|
runs-on: ubuntu-24.04
|
||||||
needs:
|
needs:
|
||||||
- common
|
- common
|
||||||
@@ -418,17 +431,23 @@ jobs:
|
|||||||
if: github.event_name == 'pull_request' && fromJSON(needs.determine-jobs.outputs.component-test-count) >= 100
|
if: github.event_name == 'pull_request' && fromJSON(needs.determine-jobs.outputs.component-test-count) >= 100
|
||||||
strategy:
|
strategy:
|
||||||
fail-fast: false
|
fail-fast: false
|
||||||
max-parallel: 4
|
max-parallel: 5
|
||||||
matrix:
|
matrix:
|
||||||
components: ${{ fromJson(needs.test-build-components-splitter.outputs.matrix) }}
|
components: ${{ fromJson(needs.test-build-components-splitter.outputs.matrix) }}
|
||||||
steps:
|
steps:
|
||||||
|
- name: Show disk space
|
||||||
|
run: |
|
||||||
|
echo "Available disk space:"
|
||||||
|
df -h
|
||||||
|
|
||||||
- name: List components
|
- name: List components
|
||||||
run: echo ${{ matrix.components }}
|
run: echo ${{ matrix.components }}
|
||||||
|
|
||||||
- name: Install dependencies
|
- name: Cache apt packages
|
||||||
run: |
|
uses: awalsh128/cache-apt-pkgs-action@acb598e5ddbc6f68a970c5da0688d2f3a9f04d05 # v1.5.3
|
||||||
sudo apt-get update
|
with:
|
||||||
sudo apt-get install libsdl2-dev
|
packages: libsdl2-dev
|
||||||
|
version: 1.0
|
||||||
|
|
||||||
- name: Check out code from GitHub
|
- name: Check out code from GitHub
|
||||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||||
@@ -437,20 +456,37 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||||
cache-key: ${{ needs.common.outputs.cache-key }}
|
cache-key: ${{ needs.common.outputs.cache-key }}
|
||||||
- name: Validate config
|
- name: Validate and compile components with intelligent grouping
|
||||||
run: |
|
run: |
|
||||||
. venv/bin/activate
|
. venv/bin/activate
|
||||||
for component in ${{ matrix.components }}; do
|
# Use /mnt for build files (70GB available vs ~29GB on /)
|
||||||
./script/test_build_components -e config -c $component
|
# Bind mount PlatformIO directory to /mnt (tools, packages, build cache all go there)
|
||||||
done
|
sudo mkdir -p /mnt/platformio
|
||||||
- name: Compile config
|
sudo chown $USER:$USER /mnt/platformio
|
||||||
run: |
|
mkdir -p ~/.platformio
|
||||||
. venv/bin/activate
|
sudo mount --bind /mnt/platformio ~/.platformio
|
||||||
mkdir build_cache
|
|
||||||
export PLATFORMIO_BUILD_CACHE_DIR=$PWD/build_cache
|
# Bind mount test build directory to /mnt
|
||||||
for component in ${{ matrix.components }}; do
|
sudo mkdir -p /mnt/test_build_components_build
|
||||||
./script/test_build_components -e compile -c $component
|
sudo chown $USER:$USER /mnt/test_build_components_build
|
||||||
done
|
mkdir -p tests/test_build_components/build
|
||||||
|
sudo mount --bind /mnt/test_build_components_build tests/test_build_components/build
|
||||||
|
|
||||||
|
# Convert space-separated components to comma-separated for Python script
|
||||||
|
components_csv=$(echo "${{ matrix.components }}" | tr ' ' ',')
|
||||||
|
|
||||||
|
echo "Testing components: $components_csv"
|
||||||
|
echo ""
|
||||||
|
|
||||||
|
# Run config validation with grouping
|
||||||
|
python3 script/test_build_components.py -e config -c "$components_csv" -f
|
||||||
|
|
||||||
|
echo ""
|
||||||
|
echo "Config validation passed! Starting compilation..."
|
||||||
|
echo ""
|
||||||
|
|
||||||
|
# Run compilation with grouping
|
||||||
|
python3 script/test_build_components.py -e compile -c "$components_csv" -f
|
||||||
|
|
||||||
pre-commit-ci-lite:
|
pre-commit-ci-lite:
|
||||||
name: pre-commit.ci lite
|
name: pre-commit.ci lite
|
||||||
|
@@ -1002,6 +1002,12 @@ def parse_args(argv):
|
|||||||
action="append",
|
action="append",
|
||||||
default=[],
|
default=[],
|
||||||
)
|
)
|
||||||
|
options_parser.add_argument(
|
||||||
|
"--testing-mode",
|
||||||
|
help="Enable testing mode (disables validation checks for grouped component testing)",
|
||||||
|
action="store_true",
|
||||||
|
default=False,
|
||||||
|
)
|
||||||
|
|
||||||
parser = argparse.ArgumentParser(
|
parser = argparse.ArgumentParser(
|
||||||
description=f"ESPHome {const.__version__}", parents=[options_parser]
|
description=f"ESPHome {const.__version__}", parents=[options_parser]
|
||||||
@@ -1260,6 +1266,7 @@ def run_esphome(argv):
|
|||||||
|
|
||||||
args = parse_args(argv)
|
args = parse_args(argv)
|
||||||
CORE.dashboard = args.dashboard
|
CORE.dashboard = args.dashboard
|
||||||
|
CORE.testing_mode = args.testing_mode
|
||||||
|
|
||||||
# Create address cache from command-line arguments
|
# Create address cache from command-line arguments
|
||||||
CORE.address_cache = AddressCache.from_cli_args(
|
CORE.address_cache = AddressCache.from_cli_args(
|
||||||
|
@@ -285,6 +285,10 @@ def consume_connection_slots(
|
|||||||
|
|
||||||
def validate_connection_slots(max_connections: int) -> None:
|
def validate_connection_slots(max_connections: int) -> None:
|
||||||
"""Validate that BLE connection slots don't exceed the configured maximum."""
|
"""Validate that BLE connection slots don't exceed the configured maximum."""
|
||||||
|
# Skip validation in testing mode to allow component grouping
|
||||||
|
if CORE.testing_mode:
|
||||||
|
return
|
||||||
|
|
||||||
ble_data = CORE.data.get(KEY_ESP32_BLE, {})
|
ble_data = CORE.data.get(KEY_ESP32_BLE, {})
|
||||||
used_slots = ble_data.get(KEY_USED_CONNECTION_SLOTS, [])
|
used_slots = ble_data.get(KEY_USED_CONNECTION_SLOTS, [])
|
||||||
num_used = len(used_slots)
|
num_used = len(used_slots)
|
||||||
|
@@ -347,7 +347,7 @@ def final_validate_device_schema(
|
|||||||
|
|
||||||
def validate_pin(opt, device):
|
def validate_pin(opt, device):
|
||||||
def validator(value):
|
def validator(value):
|
||||||
if opt in device:
|
if opt in device and not CORE.testing_mode:
|
||||||
raise cv.Invalid(
|
raise cv.Invalid(
|
||||||
f"The uart {opt} is used both by {name} and {device[opt]}, "
|
f"The uart {opt} is used both by {name} and {device[opt]}, "
|
||||||
f"but can only be used by one. Please create a new uart bus for {name}."
|
f"but can only be used by one. Please create a new uart bus for {name}."
|
||||||
|
@@ -529,6 +529,8 @@ class EsphomeCore:
|
|||||||
self.dashboard = False
|
self.dashboard = False
|
||||||
# True if command is run from vscode api
|
# True if command is run from vscode api
|
||||||
self.vscode = False
|
self.vscode = False
|
||||||
|
# True if running in testing mode (disables validation checks for grouped testing)
|
||||||
|
self.testing_mode = False
|
||||||
# The name of the node
|
# The name of the node
|
||||||
self.name: str | None = None
|
self.name: str | None = None
|
||||||
# The friendly name of the node
|
# The friendly name of the node
|
||||||
|
@@ -246,6 +246,9 @@ def entity_duplicate_validator(platform: str) -> Callable[[ConfigType], ConfigTy
|
|||||||
"\n to distinguish them"
|
"\n to distinguish them"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
# Skip duplicate entity name validation when testing_mode is enabled
|
||||||
|
# This flag is used for grouped component testing
|
||||||
|
if not CORE.testing_mode:
|
||||||
raise cv.Invalid(
|
raise cv.Invalid(
|
||||||
f"Duplicate {platform} entity with name '{entity_name}' found{device_prefix}. "
|
f"Duplicate {platform} entity with name '{entity_name}' found{device_prefix}. "
|
||||||
f"{conflict_msg}. "
|
f"{conflict_msg}. "
|
||||||
|
@@ -118,11 +118,11 @@ class PinRegistry(dict):
|
|||||||
parent_config = fconf.get_config_for_path(parent_path)
|
parent_config = fconf.get_config_for_path(parent_path)
|
||||||
final_val_fun(pin_config, parent_config)
|
final_val_fun(pin_config, parent_config)
|
||||||
allow_others = pin_config.get(CONF_ALLOW_OTHER_USES, False)
|
allow_others = pin_config.get(CONF_ALLOW_OTHER_USES, False)
|
||||||
if count != 1 and not allow_others:
|
if count != 1 and not allow_others and not CORE.testing_mode:
|
||||||
raise cv.Invalid(
|
raise cv.Invalid(
|
||||||
f"Pin {pin_config[CONF_NUMBER]} is used in multiple places"
|
f"Pin {pin_config[CONF_NUMBER]} is used in multiple places"
|
||||||
)
|
)
|
||||||
if count == 1 and allow_others:
|
if count == 1 and allow_others and not CORE.testing_mode:
|
||||||
raise cv.Invalid(
|
raise cv.Invalid(
|
||||||
f"Pin {pin_config[CONF_NUMBER]} incorrectly sets {CONF_ALLOW_OTHER_USES}: true"
|
f"Pin {pin_config[CONF_NUMBER]} incorrectly sets {CONF_ALLOW_OTHER_USES}: true"
|
||||||
)
|
)
|
||||||
|
@@ -5,6 +5,7 @@ import os
|
|||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
import re
|
import re
|
||||||
import subprocess
|
import subprocess
|
||||||
|
from typing import Any
|
||||||
|
|
||||||
from esphome.const import CONF_COMPILE_PROCESS_LIMIT, CONF_ESPHOME, KEY_CORE
|
from esphome.const import CONF_COMPILE_PROCESS_LIMIT, CONF_ESPHOME, KEY_CORE
|
||||||
from esphome.core import CORE, EsphomeError
|
from esphome.core import CORE, EsphomeError
|
||||||
@@ -42,6 +43,35 @@ def patch_structhash():
|
|||||||
cli.clean_build_dir = patched_clean_build_dir
|
cli.clean_build_dir = patched_clean_build_dir
|
||||||
|
|
||||||
|
|
||||||
|
def patch_file_downloader():
|
||||||
|
"""Patch PlatformIO's FileDownloader to retry on PackageException errors."""
|
||||||
|
from platformio.package.download import FileDownloader
|
||||||
|
from platformio.package.exception import PackageException
|
||||||
|
|
||||||
|
original_init = FileDownloader.__init__
|
||||||
|
|
||||||
|
def patched_init(self, *args: Any, **kwargs: Any) -> None:
|
||||||
|
max_retries = 3
|
||||||
|
|
||||||
|
for attempt in range(max_retries):
|
||||||
|
try:
|
||||||
|
return original_init(self, *args, **kwargs)
|
||||||
|
except PackageException as e:
|
||||||
|
if attempt < max_retries - 1:
|
||||||
|
_LOGGER.warning(
|
||||||
|
"Package download failed: %s. Retrying... (attempt %d/%d)",
|
||||||
|
str(e),
|
||||||
|
attempt + 1,
|
||||||
|
max_retries,
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
# Final attempt - re-raise
|
||||||
|
raise
|
||||||
|
return None
|
||||||
|
|
||||||
|
FileDownloader.__init__ = patched_init
|
||||||
|
|
||||||
|
|
||||||
IGNORE_LIB_WARNINGS = f"(?:{'|'.join(['Hash', 'Update'])})"
|
IGNORE_LIB_WARNINGS = f"(?:{'|'.join(['Hash', 'Update'])})"
|
||||||
FILTER_PLATFORMIO_LINES = [
|
FILTER_PLATFORMIO_LINES = [
|
||||||
r"Verbose mode can be enabled via `-v, --verbose` option.*",
|
r"Verbose mode can be enabled via `-v, --verbose` option.*",
|
||||||
@@ -99,6 +129,7 @@ def run_platformio_cli(*args, **kwargs) -> str | int:
|
|||||||
import platformio.__main__
|
import platformio.__main__
|
||||||
|
|
||||||
patch_structhash()
|
patch_structhash()
|
||||||
|
patch_file_downloader()
|
||||||
return run_external_command(platformio.__main__.main, *cmd, **kwargs)
|
return run_external_command(platformio.__main__.main, *cmd, **kwargs)
|
||||||
|
|
||||||
|
|
||||||
|
523
script/analyze_component_buses.py
Executable file
523
script/analyze_component_buses.py
Executable file
@@ -0,0 +1,523 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
"""Analyze component test files to detect which common bus configs they use.
|
||||||
|
|
||||||
|
This script scans component test files and extracts which common bus configurations
|
||||||
|
(i2c, spi, uart, etc.) are included via the packages mechanism. This information
|
||||||
|
is used to group components that can be tested together.
|
||||||
|
|
||||||
|
Components can only be grouped together if they use the EXACT SAME set of common
|
||||||
|
bus configurations, ensuring that merged configs are compatible.
|
||||||
|
|
||||||
|
Example output:
|
||||||
|
{
|
||||||
|
"component1": {
|
||||||
|
"esp32-ard": ["i2c", "uart_19200"],
|
||||||
|
"esp32-idf": ["i2c", "uart_19200"]
|
||||||
|
},
|
||||||
|
"component2": {
|
||||||
|
"esp32-ard": ["spi"],
|
||||||
|
"esp32-idf": ["spi"]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
"""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import argparse
|
||||||
|
from functools import lru_cache
|
||||||
|
import json
|
||||||
|
from pathlib import Path
|
||||||
|
import re
|
||||||
|
import sys
|
||||||
|
from typing import Any
|
||||||
|
|
||||||
|
# Add esphome to path
|
||||||
|
sys.path.insert(0, str(Path(__file__).parent.parent))
|
||||||
|
|
||||||
|
from esphome import yaml_util
|
||||||
|
from esphome.config_helpers import Extend, Remove
|
||||||
|
|
||||||
|
# Path to common bus configs
|
||||||
|
COMMON_BUS_PATH = Path("tests/test_build_components/common")
|
||||||
|
|
||||||
|
# Package dependencies - maps packages to the packages they include
|
||||||
|
# When a component uses a package on the left, it automatically gets
|
||||||
|
# the packages on the right as well
|
||||||
|
PACKAGE_DEPENDENCIES = {
|
||||||
|
"modbus": ["uart"], # modbus packages include uart packages
|
||||||
|
# Add more package dependencies here as needed
|
||||||
|
}
|
||||||
|
|
||||||
|
# Bus types that can be defined directly in config files
|
||||||
|
# Components defining these directly cannot be grouped (they create unique bus IDs)
|
||||||
|
DIRECT_BUS_TYPES = ("i2c", "spi", "uart", "modbus")
|
||||||
|
|
||||||
|
# Signature for components with no bus requirements
|
||||||
|
# These components can be merged with any other group
|
||||||
|
NO_BUSES_SIGNATURE = "no_buses"
|
||||||
|
|
||||||
|
# Base bus components - these ARE the bus implementations and should not
|
||||||
|
# be flagged as needing migration since they are the platform/base components
|
||||||
|
BASE_BUS_COMPONENTS = {
|
||||||
|
"i2c",
|
||||||
|
"spi",
|
||||||
|
"uart",
|
||||||
|
"modbus",
|
||||||
|
"canbus",
|
||||||
|
}
|
||||||
|
|
||||||
|
# Components that must be tested in isolation (not grouped or batched with others)
|
||||||
|
# These have known build issues that prevent grouping
|
||||||
|
# NOTE: This should be kept in sync with both test_build_components and split_components_for_ci.py
|
||||||
|
ISOLATED_COMPONENTS = {
|
||||||
|
"animation": "Has display lambda in common.yaml that requires existing display platform - breaks when merged without display",
|
||||||
|
"esphome": "Defines devices/areas in esphome: section that are referenced in other sections - breaks when merged",
|
||||||
|
"ethernet": "Defines ethernet: which conflicts with wifi: used by most components",
|
||||||
|
"ethernet_info": "Related to ethernet component which conflicts with wifi",
|
||||||
|
"lvgl": "Defines multiple SDL displays on host platform that conflict when merged with other display configs",
|
||||||
|
"matrix_keypad": "Needs isolation due to keypad",
|
||||||
|
"mcp4725": "no YAML config to specify i2c bus id",
|
||||||
|
"mcp47a1": "no YAML config to specify i2c bus id",
|
||||||
|
"modbus_controller": "Defines multiple modbus buses for testing client/server functionality - conflicts with package modbus bus",
|
||||||
|
"neopixelbus": "RMT type conflict with ESP32 Arduino/ESP-IDF headers (enum vs struct rmt_channel_t)",
|
||||||
|
"packages": "cannot merge packages",
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@lru_cache(maxsize=1)
|
||||||
|
def get_common_bus_packages() -> frozenset[str]:
|
||||||
|
"""Get the list of common bus package names.
|
||||||
|
|
||||||
|
Reads from tests/test_build_components/common/ directory
|
||||||
|
and caches the result. All bus types support component grouping
|
||||||
|
for config validation since --testing-mode bypasses runtime conflicts.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Frozenset of common bus package names (i2c, spi, uart, etc.)
|
||||||
|
"""
|
||||||
|
if not COMMON_BUS_PATH.exists():
|
||||||
|
return frozenset()
|
||||||
|
|
||||||
|
# List all directories in common/ - these are the bus package names
|
||||||
|
return frozenset(d.name for d in COMMON_BUS_PATH.iterdir() if d.is_dir())
|
||||||
|
|
||||||
|
|
||||||
|
def uses_local_file_references(component_dir: Path) -> bool:
|
||||||
|
"""Check if a component uses local file references via $component_dir.
|
||||||
|
|
||||||
|
Components that reference local files cannot be grouped because each needs
|
||||||
|
a unique component_dir path pointing to their specific directory.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
component_dir: Path to the component's test directory
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
True if the component uses $component_dir for local file references
|
||||||
|
"""
|
||||||
|
common_yaml = component_dir / "common.yaml"
|
||||||
|
if not common_yaml.exists():
|
||||||
|
return False
|
||||||
|
|
||||||
|
try:
|
||||||
|
content = common_yaml.read_text()
|
||||||
|
except Exception: # pylint: disable=broad-exception-caught
|
||||||
|
return False
|
||||||
|
|
||||||
|
# Pattern to match $component_dir or ${component_dir} references
|
||||||
|
# These indicate local file usage that prevents grouping
|
||||||
|
return bool(re.search(r"\$\{?component_dir\}?", content))
|
||||||
|
|
||||||
|
|
||||||
|
def is_platform_component(component_dir: Path) -> bool:
|
||||||
|
"""Check if a component is a platform component (abstract base class).
|
||||||
|
|
||||||
|
Platform components have IS_PLATFORM_COMPONENT = True and cannot be
|
||||||
|
instantiated without a platform-specific implementation. These components
|
||||||
|
define abstract methods and cause linker errors if compiled standalone.
|
||||||
|
|
||||||
|
Examples: canbus, mcp23x08_base, mcp23x17_base
|
||||||
|
|
||||||
|
Args:
|
||||||
|
component_dir: Path to the component's test directory
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
True if this is a platform component
|
||||||
|
"""
|
||||||
|
# Check in the actual component source, not tests
|
||||||
|
# tests/components/X -> tests/components -> tests -> repo root
|
||||||
|
repo_root = component_dir.parent.parent.parent
|
||||||
|
comp_init = (
|
||||||
|
repo_root / "esphome" / "components" / component_dir.name / "__init__.py"
|
||||||
|
)
|
||||||
|
|
||||||
|
if not comp_init.exists():
|
||||||
|
return False
|
||||||
|
|
||||||
|
try:
|
||||||
|
content = comp_init.read_text()
|
||||||
|
return "IS_PLATFORM_COMPONENT = True" in content
|
||||||
|
except Exception: # pylint: disable=broad-exception-caught
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
def _contains_extend_or_remove(data: Any) -> bool:
|
||||||
|
"""Recursively check if data contains Extend or Remove objects.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
data: Parsed YAML data structure
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
True if any Extend or Remove objects are found
|
||||||
|
"""
|
||||||
|
if isinstance(data, (Extend, Remove)):
|
||||||
|
return True
|
||||||
|
|
||||||
|
if isinstance(data, dict):
|
||||||
|
for value in data.values():
|
||||||
|
if _contains_extend_or_remove(value):
|
||||||
|
return True
|
||||||
|
|
||||||
|
if isinstance(data, list):
|
||||||
|
for item in data:
|
||||||
|
if _contains_extend_or_remove(item):
|
||||||
|
return True
|
||||||
|
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
def analyze_yaml_file(yaml_file: Path) -> dict[str, Any]:
|
||||||
|
"""Load a YAML file once and extract all needed information.
|
||||||
|
|
||||||
|
This loads the YAML file a single time and extracts all information needed
|
||||||
|
for component analysis, avoiding multiple file reads.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
yaml_file: Path to the YAML file to analyze
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Dictionary with keys:
|
||||||
|
- buses: set of common bus package names
|
||||||
|
- has_extend_remove: bool indicating if Extend/Remove objects are present
|
||||||
|
- has_direct_bus_config: bool indicating if buses are defined directly (not via packages)
|
||||||
|
- loaded: bool indicating if file was successfully loaded
|
||||||
|
"""
|
||||||
|
result = {
|
||||||
|
"buses": set(),
|
||||||
|
"has_extend_remove": False,
|
||||||
|
"has_direct_bus_config": False,
|
||||||
|
"loaded": False,
|
||||||
|
}
|
||||||
|
|
||||||
|
if not yaml_file.exists():
|
||||||
|
return result
|
||||||
|
|
||||||
|
try:
|
||||||
|
data = yaml_util.load_yaml(yaml_file)
|
||||||
|
result["loaded"] = True
|
||||||
|
except Exception: # pylint: disable=broad-exception-caught
|
||||||
|
return result
|
||||||
|
|
||||||
|
# Check for Extend/Remove objects
|
||||||
|
result["has_extend_remove"] = _contains_extend_or_remove(data)
|
||||||
|
|
||||||
|
# Check if buses are defined directly (not via packages)
|
||||||
|
# Components that define i2c, spi, uart, or modbus directly in test files
|
||||||
|
# cannot be grouped because they create unique bus IDs
|
||||||
|
if isinstance(data, dict):
|
||||||
|
for bus_type in DIRECT_BUS_TYPES:
|
||||||
|
if bus_type in data:
|
||||||
|
result["has_direct_bus_config"] = True
|
||||||
|
break
|
||||||
|
|
||||||
|
# Extract common bus packages
|
||||||
|
if not isinstance(data, dict) or "packages" not in data:
|
||||||
|
return result
|
||||||
|
|
||||||
|
packages = data["packages"]
|
||||||
|
if not isinstance(packages, dict):
|
||||||
|
return result
|
||||||
|
|
||||||
|
valid_buses = get_common_bus_packages()
|
||||||
|
for pkg_name in packages:
|
||||||
|
if pkg_name not in valid_buses:
|
||||||
|
continue
|
||||||
|
result["buses"].add(pkg_name)
|
||||||
|
# Add any package dependencies (e.g., modbus includes uart)
|
||||||
|
if pkg_name not in PACKAGE_DEPENDENCIES:
|
||||||
|
continue
|
||||||
|
for dep in PACKAGE_DEPENDENCIES[pkg_name]:
|
||||||
|
if dep not in valid_buses:
|
||||||
|
continue
|
||||||
|
result["buses"].add(dep)
|
||||||
|
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
|
def analyze_component(component_dir: Path) -> tuple[dict[str, list[str]], bool, bool]:
|
||||||
|
"""Analyze a component directory to find which buses each platform uses.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
component_dir: Path to the component's test directory
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Tuple of:
|
||||||
|
- Dictionary mapping platform to list of bus configs
|
||||||
|
Example: {"esp32-ard": ["i2c", "spi"], "esp32-idf": ["i2c"]}
|
||||||
|
- Boolean indicating if component uses !extend or !remove
|
||||||
|
- Boolean indicating if component defines buses directly (not via packages)
|
||||||
|
"""
|
||||||
|
if not component_dir.is_dir():
|
||||||
|
return {}, False, False
|
||||||
|
|
||||||
|
platform_buses = {}
|
||||||
|
has_extend_remove = False
|
||||||
|
has_direct_bus_config = False
|
||||||
|
|
||||||
|
# Analyze all YAML files in the component directory
|
||||||
|
for yaml_file in component_dir.glob("*.yaml"):
|
||||||
|
analysis = analyze_yaml_file(yaml_file)
|
||||||
|
|
||||||
|
# Track if any file uses extend/remove
|
||||||
|
if analysis["has_extend_remove"]:
|
||||||
|
has_extend_remove = True
|
||||||
|
|
||||||
|
# Track if any file defines buses directly
|
||||||
|
if analysis["has_direct_bus_config"]:
|
||||||
|
has_direct_bus_config = True
|
||||||
|
|
||||||
|
# For test.*.yaml files, extract platform and buses
|
||||||
|
if yaml_file.name.startswith("test.") and yaml_file.suffix == ".yaml":
|
||||||
|
# Extract platform name (e.g., test.esp32-ard.yaml -> esp32-ard)
|
||||||
|
platform = yaml_file.stem.replace("test.", "")
|
||||||
|
# Always add platform, even if it has no buses (empty list)
|
||||||
|
# This allows grouping components that don't use any shared buses
|
||||||
|
platform_buses[platform] = (
|
||||||
|
sorted(analysis["buses"]) if analysis["buses"] else []
|
||||||
|
)
|
||||||
|
|
||||||
|
return platform_buses, has_extend_remove, has_direct_bus_config
|
||||||
|
|
||||||
|
|
||||||
|
def analyze_all_components(
|
||||||
|
tests_dir: Path = None,
|
||||||
|
) -> tuple[dict[str, dict[str, list[str]]], set[str], set[str]]:
|
||||||
|
"""Analyze all component test directories.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
tests_dir: Path to tests/components directory (defaults to auto-detect)
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Tuple of:
|
||||||
|
- Dictionary mapping component name to platform->buses mapping
|
||||||
|
- Set of component names that cannot be grouped
|
||||||
|
- Set of component names that define buses directly (need migration warning)
|
||||||
|
"""
|
||||||
|
if tests_dir is None:
|
||||||
|
tests_dir = Path("tests/components")
|
||||||
|
|
||||||
|
if not tests_dir.exists():
|
||||||
|
print(f"Error: {tests_dir} does not exist", file=sys.stderr)
|
||||||
|
return {}, set(), set()
|
||||||
|
|
||||||
|
components = {}
|
||||||
|
non_groupable = set()
|
||||||
|
direct_bus_components = set()
|
||||||
|
|
||||||
|
for component_dir in sorted(tests_dir.iterdir()):
|
||||||
|
if not component_dir.is_dir():
|
||||||
|
continue
|
||||||
|
|
||||||
|
component_name = component_dir.name
|
||||||
|
platform_buses, has_extend_remove, has_direct_bus_config = analyze_component(
|
||||||
|
component_dir
|
||||||
|
)
|
||||||
|
|
||||||
|
if platform_buses:
|
||||||
|
components[component_name] = platform_buses
|
||||||
|
|
||||||
|
# Note: Components using $component_dir are now groupable because the merge
|
||||||
|
# script rewrites these to absolute paths with component-specific substitutions
|
||||||
|
|
||||||
|
# Check if component is explicitly isolated
|
||||||
|
# These have known issues that prevent grouping with other components
|
||||||
|
if component_name in ISOLATED_COMPONENTS:
|
||||||
|
non_groupable.add(component_name)
|
||||||
|
|
||||||
|
# Check if component is a base bus component
|
||||||
|
# These ARE the bus platform implementations and define buses directly for testing
|
||||||
|
# They cannot be grouped with components that use bus packages (causes ID conflicts)
|
||||||
|
if component_name in BASE_BUS_COMPONENTS:
|
||||||
|
non_groupable.add(component_name)
|
||||||
|
|
||||||
|
# Check if component uses !extend or !remove directives
|
||||||
|
# These rely on specific config structure and cannot be merged with other components
|
||||||
|
# The directives work within a component's own package hierarchy but break when
|
||||||
|
# merging independent components together
|
||||||
|
if has_extend_remove:
|
||||||
|
non_groupable.add(component_name)
|
||||||
|
|
||||||
|
# Check if component defines buses directly in test files
|
||||||
|
# These create unique bus IDs and cause conflicts when merged
|
||||||
|
# Exclude base bus components (i2c, spi, uart, etc.) since they ARE the platform
|
||||||
|
if has_direct_bus_config and component_name not in BASE_BUS_COMPONENTS:
|
||||||
|
non_groupable.add(component_name)
|
||||||
|
direct_bus_components.add(component_name)
|
||||||
|
|
||||||
|
return components, non_groupable, direct_bus_components
|
||||||
|
|
||||||
|
|
||||||
|
def create_grouping_signature(
|
||||||
|
platform_buses: dict[str, list[str]], platform: str
|
||||||
|
) -> str:
|
||||||
|
"""Create a signature string for grouping components.
|
||||||
|
|
||||||
|
Components with the same signature can be grouped together for testing.
|
||||||
|
All valid bus types can be grouped since --testing-mode bypasses runtime
|
||||||
|
conflicts during config validation.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
platform_buses: Mapping of platform to list of buses
|
||||||
|
platform: The specific platform to create signature for
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Signature string (e.g., "i2c" or "uart") or empty if no valid buses
|
||||||
|
"""
|
||||||
|
buses = platform_buses.get(platform, [])
|
||||||
|
if not buses:
|
||||||
|
return ""
|
||||||
|
|
||||||
|
# Only include valid bus types in signature
|
||||||
|
common_buses = get_common_bus_packages()
|
||||||
|
valid_buses = [b for b in buses if b in common_buses]
|
||||||
|
if not valid_buses:
|
||||||
|
return ""
|
||||||
|
|
||||||
|
return "+".join(sorted(valid_buses))
|
||||||
|
|
||||||
|
|
||||||
|
def group_components_by_signature(
|
||||||
|
components: dict[str, dict[str, list[str]]], platform: str
|
||||||
|
) -> dict[str, list[str]]:
|
||||||
|
"""Group components by their bus signature for a specific platform.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
components: Component analysis results from analyze_all_components()
|
||||||
|
platform: Platform to group for (e.g., "esp32-ard")
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Dictionary mapping signature to list of component names
|
||||||
|
Example: {"i2c+uart_19200": ["comp1", "comp2"], "spi": ["comp3"]}
|
||||||
|
"""
|
||||||
|
signature_groups: dict[str, list[str]] = {}
|
||||||
|
|
||||||
|
for component_name, platform_buses in components.items():
|
||||||
|
if platform not in platform_buses:
|
||||||
|
continue
|
||||||
|
|
||||||
|
signature = create_grouping_signature(platform_buses, platform)
|
||||||
|
if not signature:
|
||||||
|
continue
|
||||||
|
|
||||||
|
if signature not in signature_groups:
|
||||||
|
signature_groups[signature] = []
|
||||||
|
signature_groups[signature].append(component_name)
|
||||||
|
|
||||||
|
return signature_groups
|
||||||
|
|
||||||
|
|
||||||
|
def main() -> None:
|
||||||
|
"""Main entry point."""
|
||||||
|
parser = argparse.ArgumentParser(
|
||||||
|
description="Analyze component test files to detect common bus usage"
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"--components",
|
||||||
|
"-c",
|
||||||
|
nargs="+",
|
||||||
|
help="Specific components to analyze (default: all)",
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"--platform",
|
||||||
|
"-p",
|
||||||
|
help="Show grouping for a specific platform",
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"--json",
|
||||||
|
action="store_true",
|
||||||
|
help="Output as JSON",
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"--group",
|
||||||
|
action="store_true",
|
||||||
|
help="Show component groupings by bus signature",
|
||||||
|
)
|
||||||
|
|
||||||
|
args = parser.parse_args()
|
||||||
|
|
||||||
|
# Analyze components
|
||||||
|
tests_dir = Path("tests/components")
|
||||||
|
|
||||||
|
if args.components:
|
||||||
|
# Analyze only specified components
|
||||||
|
components = {}
|
||||||
|
non_groupable = set()
|
||||||
|
direct_bus_components = set()
|
||||||
|
for comp in args.components:
|
||||||
|
comp_dir = tests_dir / comp
|
||||||
|
platform_buses, has_extend_remove, has_direct_bus_config = (
|
||||||
|
analyze_component(comp_dir)
|
||||||
|
)
|
||||||
|
if platform_buses:
|
||||||
|
components[comp] = platform_buses
|
||||||
|
# Note: Components using $component_dir are now groupable
|
||||||
|
if comp in ISOLATED_COMPONENTS:
|
||||||
|
non_groupable.add(comp)
|
||||||
|
if comp in BASE_BUS_COMPONENTS:
|
||||||
|
non_groupable.add(comp)
|
||||||
|
if has_direct_bus_config and comp not in BASE_BUS_COMPONENTS:
|
||||||
|
non_groupable.add(comp)
|
||||||
|
direct_bus_components.add(comp)
|
||||||
|
else:
|
||||||
|
# Analyze all components
|
||||||
|
components, non_groupable, direct_bus_components = analyze_all_components(
|
||||||
|
tests_dir
|
||||||
|
)
|
||||||
|
|
||||||
|
# Output results
|
||||||
|
if args.group and args.platform:
|
||||||
|
# Show groupings for a specific platform
|
||||||
|
groups = group_components_by_signature(components, args.platform)
|
||||||
|
|
||||||
|
if args.json:
|
||||||
|
print(json.dumps(groups, indent=2))
|
||||||
|
else:
|
||||||
|
print(f"Component groupings for {args.platform}:")
|
||||||
|
print()
|
||||||
|
for signature, comp_list in sorted(groups.items()):
|
||||||
|
print(f" {signature}:")
|
||||||
|
for comp in sorted(comp_list):
|
||||||
|
print(f" - {comp}")
|
||||||
|
print()
|
||||||
|
elif args.json:
|
||||||
|
# JSON output
|
||||||
|
print(json.dumps(components, indent=2))
|
||||||
|
else:
|
||||||
|
# Human-readable output
|
||||||
|
for component, platform_buses in sorted(components.items()):
|
||||||
|
non_groupable_marker = (
|
||||||
|
" [NON-GROUPABLE]" if component in non_groupable else ""
|
||||||
|
)
|
||||||
|
print(f"{component}{non_groupable_marker}:")
|
||||||
|
for platform, buses in sorted(platform_buses.items()):
|
||||||
|
bus_str = ", ".join(buses)
|
||||||
|
print(f" {platform}: {bus_str}")
|
||||||
|
print()
|
||||||
|
print(f"Total components analyzed: {len(components)}")
|
||||||
|
if non_groupable:
|
||||||
|
print(f"Non-groupable components (use local files): {len(non_groupable)}")
|
||||||
|
for comp in sorted(non_groupable):
|
||||||
|
print(f" - {comp}")
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
379
script/merge_component_configs.py
Executable file
379
script/merge_component_configs.py
Executable file
@@ -0,0 +1,379 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
"""Merge multiple component test configurations into a single test file.
|
||||||
|
|
||||||
|
This script combines multiple component test files that use the same common bus
|
||||||
|
configurations into a single merged test file. This allows testing multiple
|
||||||
|
compatible components together, reducing CI build time.
|
||||||
|
|
||||||
|
The merger handles:
|
||||||
|
- Component-specific substitutions (prefixing to avoid conflicts)
|
||||||
|
- Multiple instances of component configurations
|
||||||
|
- Shared common bus packages (included only once)
|
||||||
|
- Platform-specific configurations
|
||||||
|
- Uses ESPHome's built-in merge_config for proper YAML merging
|
||||||
|
"""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import argparse
|
||||||
|
from pathlib import Path
|
||||||
|
import re
|
||||||
|
import sys
|
||||||
|
from typing import Any
|
||||||
|
|
||||||
|
# Add esphome to path so we can import from it
|
||||||
|
sys.path.insert(0, str(Path(__file__).parent.parent))
|
||||||
|
|
||||||
|
from esphome import yaml_util
|
||||||
|
from esphome.config_helpers import merge_config
|
||||||
|
from script.analyze_component_buses import PACKAGE_DEPENDENCIES, get_common_bus_packages
|
||||||
|
|
||||||
|
|
||||||
|
def load_yaml_file(yaml_file: Path) -> dict:
|
||||||
|
"""Load YAML file using ESPHome's YAML loader.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
yaml_file: Path to the YAML file
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Parsed YAML as dictionary
|
||||||
|
"""
|
||||||
|
if not yaml_file.exists():
|
||||||
|
raise FileNotFoundError(f"YAML file not found: {yaml_file}")
|
||||||
|
|
||||||
|
return yaml_util.load_yaml(yaml_file)
|
||||||
|
|
||||||
|
|
||||||
|
def extract_packages_from_yaml(data: dict) -> dict[str, str]:
|
||||||
|
"""Extract COMMON BUS package includes from parsed YAML.
|
||||||
|
|
||||||
|
Only extracts packages that are from test_build_components/common/,
|
||||||
|
ignoring component-specific packages.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
data: Parsed YAML dictionary
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Dictionary mapping package name to include path (as string representation)
|
||||||
|
Only includes common bus packages (i2c, spi, uart, etc.)
|
||||||
|
"""
|
||||||
|
if "packages" not in data:
|
||||||
|
return {}
|
||||||
|
|
||||||
|
packages_value = data["packages"]
|
||||||
|
if not isinstance(packages_value, dict):
|
||||||
|
# List format doesn't include common bus packages (those use dict format)
|
||||||
|
return {}
|
||||||
|
|
||||||
|
# Get common bus package names (cached)
|
||||||
|
common_bus_packages = get_common_bus_packages()
|
||||||
|
packages = {}
|
||||||
|
|
||||||
|
# Dictionary format: packages: {name: value}
|
||||||
|
for name, value in packages_value.items():
|
||||||
|
# Only include common bus packages, ignore component-specific ones
|
||||||
|
if name not in common_bus_packages:
|
||||||
|
continue
|
||||||
|
packages[name] = str(value)
|
||||||
|
# Also track package dependencies (e.g., modbus includes uart)
|
||||||
|
if name not in PACKAGE_DEPENDENCIES:
|
||||||
|
continue
|
||||||
|
for dep in PACKAGE_DEPENDENCIES[name]:
|
||||||
|
if dep not in common_bus_packages:
|
||||||
|
continue
|
||||||
|
# Mark as included via dependency
|
||||||
|
packages[f"_dep_{dep}"] = f"(included via {name})"
|
||||||
|
|
||||||
|
return packages
|
||||||
|
|
||||||
|
|
||||||
|
def prefix_substitutions_in_dict(
|
||||||
|
data: Any, prefix: str, exclude: set[str] | None = None
|
||||||
|
) -> Any:
|
||||||
|
"""Recursively prefix all substitution references in a data structure.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
data: YAML data structure (dict, list, or scalar)
|
||||||
|
prefix: Prefix to add to substitution names
|
||||||
|
exclude: Set of substitution names to exclude from prefixing
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Data structure with prefixed substitution references
|
||||||
|
"""
|
||||||
|
if exclude is None:
|
||||||
|
exclude = set()
|
||||||
|
|
||||||
|
def replace_sub(text: str) -> str:
|
||||||
|
"""Replace substitution references in a string."""
|
||||||
|
|
||||||
|
def replace_match(match):
|
||||||
|
sub_name = match.group(1)
|
||||||
|
if sub_name in exclude:
|
||||||
|
return match.group(0)
|
||||||
|
# Always use braced format in output for consistency
|
||||||
|
return f"${{{prefix}_{sub_name}}}"
|
||||||
|
|
||||||
|
# Match both ${substitution} and $substitution formats
|
||||||
|
return re.sub(r"\$\{?(\w+)\}?", replace_match, text)
|
||||||
|
|
||||||
|
if isinstance(data, dict):
|
||||||
|
result = {}
|
||||||
|
for key, value in data.items():
|
||||||
|
result[key] = prefix_substitutions_in_dict(value, prefix, exclude)
|
||||||
|
return result
|
||||||
|
if isinstance(data, list):
|
||||||
|
return [prefix_substitutions_in_dict(item, prefix, exclude) for item in data]
|
||||||
|
if isinstance(data, str):
|
||||||
|
return replace_sub(data)
|
||||||
|
return data
|
||||||
|
|
||||||
|
|
||||||
|
def deduplicate_by_id(data: dict) -> dict:
|
||||||
|
"""Deduplicate list items with the same ID.
|
||||||
|
|
||||||
|
Keeps only the first occurrence of each ID. If items with the same ID
|
||||||
|
are identical, this silently deduplicates. If they differ, the first
|
||||||
|
one is kept (ESPHome's validation will catch if this causes issues).
|
||||||
|
|
||||||
|
Args:
|
||||||
|
data: Parsed config dictionary
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Config with deduplicated lists
|
||||||
|
"""
|
||||||
|
if not isinstance(data, dict):
|
||||||
|
return data
|
||||||
|
|
||||||
|
result = {}
|
||||||
|
for key, value in data.items():
|
||||||
|
if isinstance(value, list):
|
||||||
|
# Check for items with 'id' field
|
||||||
|
seen_ids = set()
|
||||||
|
deduped_list = []
|
||||||
|
|
||||||
|
for item in value:
|
||||||
|
if isinstance(item, dict) and "id" in item:
|
||||||
|
item_id = item["id"]
|
||||||
|
if item_id not in seen_ids:
|
||||||
|
seen_ids.add(item_id)
|
||||||
|
deduped_list.append(item)
|
||||||
|
# else: skip duplicate ID (keep first occurrence)
|
||||||
|
else:
|
||||||
|
# No ID, just add it
|
||||||
|
deduped_list.append(item)
|
||||||
|
|
||||||
|
result[key] = deduped_list
|
||||||
|
elif isinstance(value, dict):
|
||||||
|
# Recursively deduplicate nested dicts
|
||||||
|
result[key] = deduplicate_by_id(value)
|
||||||
|
else:
|
||||||
|
result[key] = value
|
||||||
|
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
|
def merge_component_configs(
|
||||||
|
component_names: list[str],
|
||||||
|
platform: str,
|
||||||
|
tests_dir: Path,
|
||||||
|
output_file: Path,
|
||||||
|
) -> None:
|
||||||
|
"""Merge multiple component test configs into a single file.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
component_names: List of component names to merge
|
||||||
|
platform: Platform to merge for (e.g., "esp32-ard")
|
||||||
|
tests_dir: Path to tests/components directory
|
||||||
|
output_file: Path to output merged config file
|
||||||
|
"""
|
||||||
|
if not component_names:
|
||||||
|
raise ValueError("No components specified")
|
||||||
|
|
||||||
|
# Track packages to ensure they're identical
|
||||||
|
all_packages = None
|
||||||
|
|
||||||
|
# Start with empty config
|
||||||
|
merged_config_data = {}
|
||||||
|
|
||||||
|
# Process each component
|
||||||
|
for comp_name in component_names:
|
||||||
|
comp_dir = tests_dir / comp_name
|
||||||
|
test_file = comp_dir / f"test.{platform}.yaml"
|
||||||
|
|
||||||
|
if not test_file.exists():
|
||||||
|
raise FileNotFoundError(f"Test file not found: {test_file}")
|
||||||
|
|
||||||
|
# Load the component's test file
|
||||||
|
comp_data = load_yaml_file(test_file)
|
||||||
|
|
||||||
|
# Validate packages are compatible
|
||||||
|
# Components with no packages (no_buses) can merge with any group
|
||||||
|
comp_packages = extract_packages_from_yaml(comp_data)
|
||||||
|
|
||||||
|
if all_packages is None:
|
||||||
|
# First component - set the baseline
|
||||||
|
all_packages = comp_packages
|
||||||
|
elif not comp_packages:
|
||||||
|
# This component has no packages (no_buses) - it can merge with any group
|
||||||
|
pass
|
||||||
|
elif not all_packages:
|
||||||
|
# Previous components had no packages, but this one does - adopt these packages
|
||||||
|
all_packages = comp_packages
|
||||||
|
elif comp_packages != all_packages:
|
||||||
|
# Both have packages but they differ - this is an error
|
||||||
|
raise ValueError(
|
||||||
|
f"Component {comp_name} has different packages than previous components. "
|
||||||
|
f"Expected: {all_packages}, Got: {comp_packages}. "
|
||||||
|
f"All components must use the same common bus configs to be merged."
|
||||||
|
)
|
||||||
|
|
||||||
|
# Handle $component_dir by replacing with absolute path
|
||||||
|
# This allows components that use local file references to be grouped
|
||||||
|
comp_abs_dir = str(comp_dir.absolute())
|
||||||
|
|
||||||
|
# Save top-level substitutions BEFORE expanding packages
|
||||||
|
# In ESPHome, top-level substitutions override package substitutions
|
||||||
|
top_level_subs = (
|
||||||
|
comp_data["substitutions"].copy()
|
||||||
|
if "substitutions" in comp_data and comp_data["substitutions"] is not None
|
||||||
|
else {}
|
||||||
|
)
|
||||||
|
|
||||||
|
# Expand packages - but we'll restore substitution priority after
|
||||||
|
if "packages" in comp_data:
|
||||||
|
packages_value = comp_data["packages"]
|
||||||
|
|
||||||
|
if isinstance(packages_value, dict):
|
||||||
|
# Dict format - check each package
|
||||||
|
common_bus_packages = get_common_bus_packages()
|
||||||
|
for pkg_name, pkg_value in list(packages_value.items()):
|
||||||
|
if pkg_name in common_bus_packages:
|
||||||
|
continue
|
||||||
|
if not isinstance(pkg_value, dict):
|
||||||
|
continue
|
||||||
|
# Component-specific package - expand its content into top level
|
||||||
|
comp_data = merge_config(comp_data, pkg_value)
|
||||||
|
elif isinstance(packages_value, list):
|
||||||
|
# List format - expand all package includes
|
||||||
|
for pkg_value in packages_value:
|
||||||
|
if not isinstance(pkg_value, dict):
|
||||||
|
continue
|
||||||
|
comp_data = merge_config(comp_data, pkg_value)
|
||||||
|
|
||||||
|
# Remove all packages (common will be re-added at the end)
|
||||||
|
del comp_data["packages"]
|
||||||
|
|
||||||
|
# Restore top-level substitution priority
|
||||||
|
# Top-level substitutions override any from packages
|
||||||
|
if "substitutions" not in comp_data or comp_data["substitutions"] is None:
|
||||||
|
comp_data["substitutions"] = {}
|
||||||
|
|
||||||
|
# Merge: package subs as base, top-level subs override
|
||||||
|
comp_data["substitutions"].update(top_level_subs)
|
||||||
|
|
||||||
|
# Now prefix the final merged substitutions
|
||||||
|
comp_data["substitutions"] = {
|
||||||
|
f"{comp_name}_{sub_name}": sub_value
|
||||||
|
for sub_name, sub_value in comp_data["substitutions"].items()
|
||||||
|
}
|
||||||
|
|
||||||
|
# Add component_dir substitution with absolute path for this component
|
||||||
|
comp_data["substitutions"][f"{comp_name}_component_dir"] = comp_abs_dir
|
||||||
|
|
||||||
|
# Prefix substitution references throughout the config
|
||||||
|
comp_data = prefix_substitutions_in_dict(comp_data, comp_name)
|
||||||
|
|
||||||
|
# Use ESPHome's merge_config to merge this component into the result
|
||||||
|
# merge_config handles list merging with ID-based deduplication automatically
|
||||||
|
merged_config_data = merge_config(merged_config_data, comp_data)
|
||||||
|
|
||||||
|
# Add packages back (only once, since they're identical)
|
||||||
|
# IMPORTANT: Only re-add common bus packages (spi, i2c, uart, etc.)
|
||||||
|
# Do NOT re-add component-specific packages as they contain unprefixed $component_dir refs
|
||||||
|
if all_packages:
|
||||||
|
first_comp_data = load_yaml_file(
|
||||||
|
tests_dir / component_names[0] / f"test.{platform}.yaml"
|
||||||
|
)
|
||||||
|
if "packages" in first_comp_data and isinstance(
|
||||||
|
first_comp_data["packages"], dict
|
||||||
|
):
|
||||||
|
# Filter to only include common bus packages
|
||||||
|
# Only dict format can contain common bus packages
|
||||||
|
common_bus_packages = get_common_bus_packages()
|
||||||
|
filtered_packages = {
|
||||||
|
name: value
|
||||||
|
for name, value in first_comp_data["packages"].items()
|
||||||
|
if name in common_bus_packages
|
||||||
|
}
|
||||||
|
if filtered_packages:
|
||||||
|
merged_config_data["packages"] = filtered_packages
|
||||||
|
|
||||||
|
# Deduplicate items with same ID (keeps first occurrence)
|
||||||
|
merged_config_data = deduplicate_by_id(merged_config_data)
|
||||||
|
|
||||||
|
# Remove esphome section since it will be provided by the wrapper file
|
||||||
|
# The wrapper file includes this merged config via packages and provides
|
||||||
|
# the proper esphome: section with name, platform, etc.
|
||||||
|
if "esphome" in merged_config_data:
|
||||||
|
del merged_config_data["esphome"]
|
||||||
|
|
||||||
|
# Write merged config
|
||||||
|
output_file.parent.mkdir(parents=True, exist_ok=True)
|
||||||
|
yaml_content = yaml_util.dump(merged_config_data)
|
||||||
|
output_file.write_text(yaml_content)
|
||||||
|
|
||||||
|
print(f"Successfully merged {len(component_names)} components into {output_file}")
|
||||||
|
|
||||||
|
|
||||||
|
def main() -> None:
|
||||||
|
"""Main entry point."""
|
||||||
|
parser = argparse.ArgumentParser(
|
||||||
|
description="Merge multiple component test configs into a single file"
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"--components",
|
||||||
|
"-c",
|
||||||
|
required=True,
|
||||||
|
help="Comma-separated list of component names to merge",
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"--platform",
|
||||||
|
"-p",
|
||||||
|
required=True,
|
||||||
|
help="Platform to merge for (e.g., esp32-ard)",
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"--output",
|
||||||
|
"-o",
|
||||||
|
required=True,
|
||||||
|
type=Path,
|
||||||
|
help="Output file path for merged config",
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"--tests-dir",
|
||||||
|
type=Path,
|
||||||
|
default=Path("tests/components"),
|
||||||
|
help="Path to tests/components directory",
|
||||||
|
)
|
||||||
|
|
||||||
|
args = parser.parse_args()
|
||||||
|
|
||||||
|
component_names = [c.strip() for c in args.components.split(",")]
|
||||||
|
|
||||||
|
try:
|
||||||
|
merge_component_configs(
|
||||||
|
component_names=component_names,
|
||||||
|
platform=args.platform,
|
||||||
|
tests_dir=args.tests_dir,
|
||||||
|
output_file=args.output,
|
||||||
|
)
|
||||||
|
except Exception as e:
|
||||||
|
print(f"Error merging configs: {e}", file=sys.stderr)
|
||||||
|
import traceback
|
||||||
|
|
||||||
|
traceback.print_exc()
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
268
script/split_components_for_ci.py
Executable file
268
script/split_components_for_ci.py
Executable file
@@ -0,0 +1,268 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
"""Split components into batches with intelligent grouping.
|
||||||
|
|
||||||
|
This script analyzes components to identify which ones share common bus configurations
|
||||||
|
and intelligently groups them into batches to maximize the efficiency of the
|
||||||
|
component grouping system in CI.
|
||||||
|
|
||||||
|
Components with the same bus signature are placed in the same batch whenever possible,
|
||||||
|
allowing the test_build_components.py script to merge them into single builds.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import argparse
|
||||||
|
from collections import defaultdict
|
||||||
|
import json
|
||||||
|
from pathlib import Path
|
||||||
|
import sys
|
||||||
|
|
||||||
|
# Add esphome to path
|
||||||
|
sys.path.insert(0, str(Path(__file__).parent.parent))
|
||||||
|
|
||||||
|
from script.analyze_component_buses import (
|
||||||
|
ISOLATED_COMPONENTS,
|
||||||
|
NO_BUSES_SIGNATURE,
|
||||||
|
analyze_all_components,
|
||||||
|
create_grouping_signature,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Weighting for batch creation
|
||||||
|
# Isolated components can't be grouped/merged, so they count as 10x
|
||||||
|
# Groupable components can be merged into single builds, so they count as 1x
|
||||||
|
ISOLATED_WEIGHT = 10
|
||||||
|
GROUPABLE_WEIGHT = 1
|
||||||
|
|
||||||
|
|
||||||
|
def has_test_files(component_name: str, tests_dir: Path) -> bool:
|
||||||
|
"""Check if a component has test files.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
component_name: Name of the component
|
||||||
|
tests_dir: Path to tests/components directory
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
True if the component has test.*.yaml files
|
||||||
|
"""
|
||||||
|
component_dir = tests_dir / component_name
|
||||||
|
if not component_dir.exists() or not component_dir.is_dir():
|
||||||
|
return False
|
||||||
|
|
||||||
|
# Check for test.*.yaml files
|
||||||
|
return any(component_dir.glob("test.*.yaml"))
|
||||||
|
|
||||||
|
|
||||||
|
def create_intelligent_batches(
|
||||||
|
components: list[str],
|
||||||
|
tests_dir: Path,
|
||||||
|
batch_size: int = 40,
|
||||||
|
) -> list[list[str]]:
|
||||||
|
"""Create batches optimized for component grouping.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
components: List of component names to batch
|
||||||
|
tests_dir: Path to tests/components directory
|
||||||
|
batch_size: Target size for each batch
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
List of component batches (lists of component names)
|
||||||
|
"""
|
||||||
|
# Filter out components without test files
|
||||||
|
# Platform components like 'climate' and 'climate_ir' don't have test files
|
||||||
|
components_with_tests = [
|
||||||
|
comp for comp in components if has_test_files(comp, tests_dir)
|
||||||
|
]
|
||||||
|
|
||||||
|
# Log filtered components to stderr for debugging
|
||||||
|
if len(components_with_tests) < len(components):
|
||||||
|
filtered_out = set(components) - set(components_with_tests)
|
||||||
|
print(
|
||||||
|
f"Note: Filtered {len(filtered_out)} components without test files: "
|
||||||
|
f"{', '.join(sorted(filtered_out))}",
|
||||||
|
file=sys.stderr,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Analyze all components to get their bus signatures
|
||||||
|
component_buses, non_groupable, _direct_bus_components = analyze_all_components(
|
||||||
|
tests_dir
|
||||||
|
)
|
||||||
|
|
||||||
|
# Group components by their bus signature ONLY (ignore platform)
|
||||||
|
# All platforms will be tested by test_build_components.py for each batch
|
||||||
|
# Key: signature, Value: list of components
|
||||||
|
signature_groups: dict[str, list[str]] = defaultdict(list)
|
||||||
|
|
||||||
|
for component in components_with_tests:
|
||||||
|
# Components that can't be grouped get unique signatures
|
||||||
|
# This includes both manually curated ISOLATED_COMPONENTS and
|
||||||
|
# automatically detected non_groupable components
|
||||||
|
# These can share a batch/runner but won't be grouped/merged
|
||||||
|
if component in ISOLATED_COMPONENTS or component in non_groupable:
|
||||||
|
signature_groups[f"isolated_{component}"].append(component)
|
||||||
|
continue
|
||||||
|
|
||||||
|
# Get signature from any platform (they should all have the same buses)
|
||||||
|
# Components not in component_buses were filtered out by has_test_files check
|
||||||
|
comp_platforms = component_buses[component]
|
||||||
|
for platform, buses in comp_platforms.items():
|
||||||
|
if buses:
|
||||||
|
signature = create_grouping_signature({platform: buses}, platform)
|
||||||
|
# Group by signature only - platform doesn't matter for batching
|
||||||
|
signature_groups[signature].append(component)
|
||||||
|
break # Only use first platform for grouping
|
||||||
|
else:
|
||||||
|
# No buses found for any platform - can be grouped together
|
||||||
|
signature_groups[NO_BUSES_SIGNATURE].append(component)
|
||||||
|
|
||||||
|
# Create batches by keeping signature groups together
|
||||||
|
# Components with the same signature stay in the same batches
|
||||||
|
batches = []
|
||||||
|
|
||||||
|
# Sort signature groups to prioritize groupable components
|
||||||
|
# 1. Put "isolated_*" signatures last (can't be grouped with others)
|
||||||
|
# 2. Sort groupable signatures by size (largest first)
|
||||||
|
# 3. "no_buses" components CAN be grouped together
|
||||||
|
def sort_key(item):
|
||||||
|
signature, components = item
|
||||||
|
is_isolated = signature.startswith("isolated_")
|
||||||
|
# Put "isolated_*" last (1), groupable first (0)
|
||||||
|
# Within each category, sort by size (largest first)
|
||||||
|
return (is_isolated, -len(components))
|
||||||
|
|
||||||
|
sorted_groups = sorted(signature_groups.items(), key=sort_key)
|
||||||
|
|
||||||
|
# Strategy: Create batches using weighted sizes
|
||||||
|
# - Isolated components count as 10x (since they can't be grouped/merged)
|
||||||
|
# - Groupable components count as 1x (can be merged into single builds)
|
||||||
|
# - This distributes isolated components across more runners
|
||||||
|
# - Ensures each runner has a good mix of groupable vs isolated components
|
||||||
|
|
||||||
|
current_batch = []
|
||||||
|
current_weight = 0
|
||||||
|
|
||||||
|
for signature, group_components in sorted_groups:
|
||||||
|
is_isolated = signature.startswith("isolated_")
|
||||||
|
weight_per_component = ISOLATED_WEIGHT if is_isolated else GROUPABLE_WEIGHT
|
||||||
|
|
||||||
|
for component in group_components:
|
||||||
|
# Check if adding this component would exceed the batch size
|
||||||
|
if current_weight + weight_per_component > batch_size and current_batch:
|
||||||
|
# Start a new batch
|
||||||
|
batches.append(current_batch)
|
||||||
|
current_batch = []
|
||||||
|
current_weight = 0
|
||||||
|
|
||||||
|
# Add component to current batch
|
||||||
|
current_batch.append(component)
|
||||||
|
current_weight += weight_per_component
|
||||||
|
|
||||||
|
# Don't forget the last batch
|
||||||
|
if current_batch:
|
||||||
|
batches.append(current_batch)
|
||||||
|
|
||||||
|
return batches
|
||||||
|
|
||||||
|
|
||||||
|
def main() -> int:
|
||||||
|
"""Main entry point."""
|
||||||
|
parser = argparse.ArgumentParser(
|
||||||
|
description="Split components into intelligent batches for CI testing"
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"--components",
|
||||||
|
"-c",
|
||||||
|
required=True,
|
||||||
|
help="JSON array of component names",
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"--batch-size",
|
||||||
|
"-b",
|
||||||
|
type=int,
|
||||||
|
default=40,
|
||||||
|
help="Target batch size (default: 40, weighted)",
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"--tests-dir",
|
||||||
|
type=Path,
|
||||||
|
default=Path("tests/components"),
|
||||||
|
help="Path to tests/components directory",
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"--output",
|
||||||
|
"-o",
|
||||||
|
choices=["json", "github"],
|
||||||
|
default="github",
|
||||||
|
help="Output format (json or github for GitHub Actions)",
|
||||||
|
)
|
||||||
|
|
||||||
|
args = parser.parse_args()
|
||||||
|
|
||||||
|
# Parse component list from JSON
|
||||||
|
try:
|
||||||
|
components = json.loads(args.components)
|
||||||
|
except json.JSONDecodeError as e:
|
||||||
|
print(f"Error parsing components JSON: {e}", file=sys.stderr)
|
||||||
|
return 1
|
||||||
|
|
||||||
|
if not isinstance(components, list):
|
||||||
|
print("Components must be a JSON array", file=sys.stderr)
|
||||||
|
return 1
|
||||||
|
|
||||||
|
# Create intelligent batches
|
||||||
|
batches = create_intelligent_batches(
|
||||||
|
components=components,
|
||||||
|
tests_dir=args.tests_dir,
|
||||||
|
batch_size=args.batch_size,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Convert batches to space-separated strings for CI
|
||||||
|
batch_strings = [" ".join(batch) for batch in batches]
|
||||||
|
|
||||||
|
if args.output == "json":
|
||||||
|
# Output as JSON array
|
||||||
|
print(json.dumps(batch_strings))
|
||||||
|
else:
|
||||||
|
# Output for GitHub Actions (set output)
|
||||||
|
output_json = json.dumps(batch_strings)
|
||||||
|
print(f"components={output_json}")
|
||||||
|
|
||||||
|
# Print summary to stderr so it shows in CI logs
|
||||||
|
# Count actual components being batched
|
||||||
|
actual_components = sum(len(batch.split()) for batch in batch_strings)
|
||||||
|
|
||||||
|
# Re-analyze to get isolated component counts for summary
|
||||||
|
_, non_groupable, _ = analyze_all_components(args.tests_dir)
|
||||||
|
|
||||||
|
# Count isolated vs groupable components
|
||||||
|
all_batched_components = [comp for batch in batches for comp in batch]
|
||||||
|
isolated_count = sum(
|
||||||
|
1
|
||||||
|
for comp in all_batched_components
|
||||||
|
if comp in ISOLATED_COMPONENTS or comp in non_groupable
|
||||||
|
)
|
||||||
|
groupable_count = actual_components - isolated_count
|
||||||
|
|
||||||
|
print("\n=== Intelligent Batch Summary ===", file=sys.stderr)
|
||||||
|
print(f"Total components requested: {len(components)}", file=sys.stderr)
|
||||||
|
print(f"Components with test files: {actual_components}", file=sys.stderr)
|
||||||
|
print(f" - Groupable (weight=1): {groupable_count}", file=sys.stderr)
|
||||||
|
print(f" - Isolated (weight=10): {isolated_count}", file=sys.stderr)
|
||||||
|
if actual_components < len(components):
|
||||||
|
print(
|
||||||
|
f"Components skipped (no test files): {len(components) - actual_components}",
|
||||||
|
file=sys.stderr,
|
||||||
|
)
|
||||||
|
print(f"Number of batches: {len(batches)}", file=sys.stderr)
|
||||||
|
print(f"Batch size target (weighted): {args.batch_size}", file=sys.stderr)
|
||||||
|
if len(batches) > 0:
|
||||||
|
print(
|
||||||
|
f"Average components per batch: {actual_components / len(batches):.1f}",
|
||||||
|
file=sys.stderr,
|
||||||
|
)
|
||||||
|
print(file=sys.stderr)
|
||||||
|
|
||||||
|
return 0
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
sys.exit(main())
|
@@ -1,106 +0,0 @@
|
|||||||
#!/usr/bin/env bash
|
|
||||||
|
|
||||||
set -e
|
|
||||||
|
|
||||||
help() {
|
|
||||||
echo "Usage: $0 [-e <config|compile|clean>] [-c <string>] [-t <string>]" 1>&2
|
|
||||||
echo 1>&2
|
|
||||||
echo " - e - Parameter for esphome command. Default compile. Common alternative is config." 1>&2
|
|
||||||
echo " - c - Component folder name to test. Default *. E.g. '-c logger'." 1>&2
|
|
||||||
echo " - t - Target name to test. Put '-t list' to display all possibilities. E.g. '-t esp32-s2-idf-51'." 1>&2
|
|
||||||
exit 1
|
|
||||||
}
|
|
||||||
|
|
||||||
# Parse parameter:
|
|
||||||
# - `e` - Parameter for `esphome` command. Default `compile`. Common alternative is `config`.
|
|
||||||
# - `c` - Component folder name to test. Default `*`.
|
|
||||||
esphome_command="compile"
|
|
||||||
target_component="*"
|
|
||||||
while getopts e:c:t: flag
|
|
||||||
do
|
|
||||||
case $flag in
|
|
||||||
e) esphome_command=${OPTARG};;
|
|
||||||
c) target_component=${OPTARG};;
|
|
||||||
t) requested_target_platform=${OPTARG};;
|
|
||||||
\?) help;;
|
|
||||||
esac
|
|
||||||
done
|
|
||||||
|
|
||||||
cd "$(dirname "$0")/.."
|
|
||||||
|
|
||||||
if ! [ -d "./tests/test_build_components/build" ]; then
|
|
||||||
mkdir ./tests/test_build_components/build
|
|
||||||
fi
|
|
||||||
|
|
||||||
start_esphome() {
|
|
||||||
if [ -n "$requested_target_platform" ] && [ "$requested_target_platform" != "$target_platform_with_version" ]; then
|
|
||||||
echo "Skipping $target_platform_with_version"
|
|
||||||
return
|
|
||||||
fi
|
|
||||||
# create dynamic yaml file in `build` folder.
|
|
||||||
# `./tests/test_build_components/build/[target_component].[test_name].[target_platform_with_version].yaml`
|
|
||||||
component_test_file="./tests/test_build_components/build/$target_component.$test_name.$target_platform_with_version.yaml"
|
|
||||||
|
|
||||||
cp $target_platform_file $component_test_file
|
|
||||||
if [[ "$OSTYPE" == "darwin"* ]]; then
|
|
||||||
# macOS sed is...different
|
|
||||||
sed -i '' "s!\$component_test_file!../../.$f!g" $component_test_file
|
|
||||||
else
|
|
||||||
sed -i "s!\$component_test_file!../../.$f!g" $component_test_file
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Start esphome process
|
|
||||||
echo "> [$target_component] [$test_name] [$target_platform_with_version]"
|
|
||||||
set -x
|
|
||||||
# TODO: Validate escape of Command line substitution value
|
|
||||||
python3 -m esphome -s component_name $target_component -s component_dir ../../components/$target_component -s test_name $test_name -s target_platform $target_platform $esphome_command $component_test_file
|
|
||||||
{ set +x; } 2>/dev/null
|
|
||||||
}
|
|
||||||
|
|
||||||
# Find all test yaml files.
|
|
||||||
# - `./tests/components/[target_component]/[test_name].[target_platform].yaml`
|
|
||||||
# - `./tests/components/[target_component]/[test_name].all.yaml`
|
|
||||||
for f in ./tests/components/$target_component/*.*.yaml; do
|
|
||||||
[ -f "$f" ] || continue
|
|
||||||
IFS='/' read -r -a folder_name <<< "$f"
|
|
||||||
target_component="${folder_name[3]}"
|
|
||||||
|
|
||||||
IFS='.' read -r -a file_name <<< "${folder_name[4]}"
|
|
||||||
test_name="${file_name[0]}"
|
|
||||||
target_platform="${file_name[1]}"
|
|
||||||
file_name_parts=${#file_name[@]}
|
|
||||||
|
|
||||||
if [ "$target_platform" = "all" ] || [ $file_name_parts = 2 ]; then
|
|
||||||
# Test has *not* defined a specific target platform. Need to run tests for all possible target platforms.
|
|
||||||
|
|
||||||
for target_platform_file in ./tests/test_build_components/build_components_base.*.yaml; do
|
|
||||||
IFS='/' read -r -a folder_name <<< "$target_platform_file"
|
|
||||||
IFS='.' read -r -a file_name <<< "${folder_name[3]}"
|
|
||||||
target_platform="${file_name[1]}"
|
|
||||||
|
|
||||||
start_esphome
|
|
||||||
done
|
|
||||||
|
|
||||||
else
|
|
||||||
# Test has defined a specific target platform.
|
|
||||||
|
|
||||||
# Validate we have a base test yaml for selected platform.
|
|
||||||
# The target_platform is sourced from the following location.
|
|
||||||
# 1. `./tests/test_build_components/build_components_base.[target_platform].yaml`
|
|
||||||
# 2. `./tests/test_build_components/build_components_base.[target_platform]-ard.yaml`
|
|
||||||
target_platform_file="./tests/test_build_components/build_components_base.$target_platform.yaml"
|
|
||||||
if ! [ -f "$target_platform_file" ]; then
|
|
||||||
echo "No base test file [./tests/test_build_components/build_components_base.$target_platform.yaml] for component test [$f] found."
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
|
|
||||||
for target_platform_file in ./tests/test_build_components/build_components_base.$target_platform*.yaml; do
|
|
||||||
# trim off "./tests/test_build_components/build_components_base." prefix
|
|
||||||
target_platform_with_version=${target_platform_file:52}
|
|
||||||
# ...now remove suffix starting with "." leaving just the test target hardware and software platform (possibly with version)
|
|
||||||
# For example: "esp32-s3-idf-50"
|
|
||||||
target_platform_with_version=${target_platform_with_version%.*}
|
|
||||||
start_esphome
|
|
||||||
done
|
|
||||||
fi
|
|
||||||
done
|
|
1
script/test_build_components
Symbolic link
1
script/test_build_components
Symbolic link
@@ -0,0 +1 @@
|
|||||||
|
test_build_components.py
|
931
script/test_build_components.py
Executable file
931
script/test_build_components.py
Executable file
@@ -0,0 +1,931 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
"""Test ESPHome component builds with intelligent grouping.
|
||||||
|
|
||||||
|
This script replaces the bash test_build_components script with Python,
|
||||||
|
adding support for intelligent component grouping based on shared bus
|
||||||
|
configurations to reduce CI build time.
|
||||||
|
|
||||||
|
Features:
|
||||||
|
- Analyzes components for shared common bus configs
|
||||||
|
- Groups compatible components together
|
||||||
|
- Merges configs for grouped components
|
||||||
|
- Uses --testing-mode for grouped tests
|
||||||
|
- Maintains backward compatibility with single component testing
|
||||||
|
"""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import argparse
|
||||||
|
from collections import defaultdict
|
||||||
|
import hashlib
|
||||||
|
import os
|
||||||
|
from pathlib import Path
|
||||||
|
import subprocess
|
||||||
|
import sys
|
||||||
|
|
||||||
|
# Add esphome to path
|
||||||
|
sys.path.insert(0, str(Path(__file__).parent.parent))
|
||||||
|
|
||||||
|
# pylint: disable=wrong-import-position
|
||||||
|
from script.analyze_component_buses import (
|
||||||
|
BASE_BUS_COMPONENTS,
|
||||||
|
ISOLATED_COMPONENTS,
|
||||||
|
NO_BUSES_SIGNATURE,
|
||||||
|
analyze_all_components,
|
||||||
|
create_grouping_signature,
|
||||||
|
is_platform_component,
|
||||||
|
uses_local_file_references,
|
||||||
|
)
|
||||||
|
from script.merge_component_configs import merge_component_configs
|
||||||
|
|
||||||
|
# Platform-specific maximum group sizes
|
||||||
|
# ESP8266 has limited IRAM and can't handle large component groups
|
||||||
|
PLATFORM_MAX_GROUP_SIZE = {
|
||||||
|
"esp8266-ard": 10, # ESP8266 Arduino has limited IRAM
|
||||||
|
"esp8266-idf": 10, # ESP8266 IDF also has limited IRAM
|
||||||
|
# BK72xx now uses BK7252 board (1.62MB flash vs 1.03MB) - no limit needed
|
||||||
|
# Other platforms can handle larger groups
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def show_disk_space_if_ci(esphome_command: str) -> None:
|
||||||
|
"""Show disk space usage if running in CI during compile.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
esphome_command: The esphome command being run (config/compile/clean)
|
||||||
|
"""
|
||||||
|
if os.environ.get("GITHUB_ACTIONS") and esphome_command == "compile":
|
||||||
|
print("\n" + "=" * 80)
|
||||||
|
print("Disk Space After Build:")
|
||||||
|
print("=" * 80)
|
||||||
|
subprocess.run(["df", "-h"], check=False)
|
||||||
|
print("=" * 80 + "\n")
|
||||||
|
|
||||||
|
|
||||||
|
def find_component_tests(
|
||||||
|
components_dir: Path, component_pattern: str = "*"
|
||||||
|
) -> dict[str, list[Path]]:
|
||||||
|
"""Find all component test files.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
components_dir: Path to tests/components directory
|
||||||
|
component_pattern: Glob pattern for component names
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Dictionary mapping component name to list of test files
|
||||||
|
"""
|
||||||
|
component_tests = defaultdict(list)
|
||||||
|
|
||||||
|
for comp_dir in components_dir.glob(component_pattern):
|
||||||
|
if not comp_dir.is_dir():
|
||||||
|
continue
|
||||||
|
|
||||||
|
for test_file in comp_dir.glob("test.*.yaml"):
|
||||||
|
component_tests[comp_dir.name].append(test_file)
|
||||||
|
|
||||||
|
return dict(component_tests)
|
||||||
|
|
||||||
|
|
||||||
|
def parse_test_filename(test_file: Path) -> tuple[str, str]:
|
||||||
|
"""Parse test filename to extract test name and platform.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
test_file: Path to test file
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Tuple of (test_name, platform)
|
||||||
|
"""
|
||||||
|
parts = test_file.stem.split(".")
|
||||||
|
if len(parts) == 2:
|
||||||
|
return parts[0], parts[1] # test, platform
|
||||||
|
return parts[0], "all"
|
||||||
|
|
||||||
|
|
||||||
|
def get_platform_base_files(base_dir: Path) -> dict[str, list[Path]]:
|
||||||
|
"""Get all platform base files.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
base_dir: Path to test_build_components directory
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Dictionary mapping platform to list of base files (for version variants)
|
||||||
|
"""
|
||||||
|
platform_files = defaultdict(list)
|
||||||
|
|
||||||
|
for base_file in base_dir.glob("build_components_base.*.yaml"):
|
||||||
|
# Extract platform from filename
|
||||||
|
# e.g., build_components_base.esp32-idf.yaml -> esp32-idf
|
||||||
|
# or build_components_base.esp32-idf-50.yaml -> esp32-idf
|
||||||
|
filename = base_file.stem
|
||||||
|
parts = filename.replace("build_components_base.", "").split("-")
|
||||||
|
|
||||||
|
# Platform is everything before version number (if present)
|
||||||
|
# Check if last part is a number (version)
|
||||||
|
platform = "-".join(parts[:-1]) if parts[-1].isdigit() else "-".join(parts)
|
||||||
|
|
||||||
|
platform_files[platform].append(base_file)
|
||||||
|
|
||||||
|
return dict(platform_files)
|
||||||
|
|
||||||
|
|
||||||
|
def extract_platform_with_version(base_file: Path) -> str:
|
||||||
|
"""Extract platform with version from base filename.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
base_file: Path to base file
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Platform with version (e.g., "esp32-idf-50" or "esp32-idf")
|
||||||
|
"""
|
||||||
|
# Remove "build_components_base." prefix and ".yaml" suffix
|
||||||
|
return base_file.stem.replace("build_components_base.", "")
|
||||||
|
|
||||||
|
|
||||||
|
def run_esphome_test(
|
||||||
|
component: str,
|
||||||
|
test_file: Path,
|
||||||
|
platform: str,
|
||||||
|
platform_with_version: str,
|
||||||
|
base_file: Path,
|
||||||
|
build_dir: Path,
|
||||||
|
esphome_command: str,
|
||||||
|
continue_on_fail: bool,
|
||||||
|
use_testing_mode: bool = False,
|
||||||
|
) -> tuple[bool, str]:
|
||||||
|
"""Run esphome test for a single component.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
component: Component name
|
||||||
|
test_file: Path to component test file
|
||||||
|
platform: Platform name (e.g., "esp32-idf")
|
||||||
|
platform_with_version: Platform with version (e.g., "esp32-idf-50")
|
||||||
|
base_file: Path to platform base file
|
||||||
|
build_dir: Path to build directory
|
||||||
|
esphome_command: ESPHome command (config/compile)
|
||||||
|
continue_on_fail: Whether to continue on failure
|
||||||
|
use_testing_mode: Whether to use --testing-mode flag
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Tuple of (success status, command string)
|
||||||
|
"""
|
||||||
|
test_name = test_file.stem.split(".")[0]
|
||||||
|
|
||||||
|
# Create dynamic test file in build directory
|
||||||
|
output_file = build_dir / f"{component}.{test_name}.{platform_with_version}.yaml"
|
||||||
|
|
||||||
|
# Copy base file and substitute component test file reference
|
||||||
|
base_content = base_file.read_text()
|
||||||
|
# Get relative path from build dir to test file
|
||||||
|
repo_root = Path(__file__).parent.parent
|
||||||
|
component_test_ref = f"../../{test_file.relative_to(repo_root / 'tests')}"
|
||||||
|
output_content = base_content.replace("$component_test_file", component_test_ref)
|
||||||
|
output_file.write_text(output_content)
|
||||||
|
|
||||||
|
# Build esphome command
|
||||||
|
cmd = [
|
||||||
|
sys.executable,
|
||||||
|
"-m",
|
||||||
|
"esphome",
|
||||||
|
]
|
||||||
|
|
||||||
|
# Add --testing-mode if needed (must be before subcommand)
|
||||||
|
if use_testing_mode:
|
||||||
|
cmd.append("--testing-mode")
|
||||||
|
|
||||||
|
# Add substitutions
|
||||||
|
cmd.extend(
|
||||||
|
[
|
||||||
|
"-s",
|
||||||
|
"component_name",
|
||||||
|
component,
|
||||||
|
"-s",
|
||||||
|
"component_dir",
|
||||||
|
f"../../components/{component}",
|
||||||
|
"-s",
|
||||||
|
"test_name",
|
||||||
|
test_name,
|
||||||
|
"-s",
|
||||||
|
"target_platform",
|
||||||
|
platform,
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
|
# Add command and config file
|
||||||
|
cmd.extend([esphome_command, str(output_file)])
|
||||||
|
|
||||||
|
# Build command string for display/logging
|
||||||
|
cmd_str = " ".join(cmd)
|
||||||
|
|
||||||
|
# Run command
|
||||||
|
print(f"> [{component}] [{test_name}] [{platform_with_version}]")
|
||||||
|
if use_testing_mode:
|
||||||
|
print(" (using --testing-mode)")
|
||||||
|
|
||||||
|
try:
|
||||||
|
result = subprocess.run(cmd, check=False)
|
||||||
|
success = result.returncode == 0
|
||||||
|
|
||||||
|
# Show disk space after build in CI during compile
|
||||||
|
show_disk_space_if_ci(esphome_command)
|
||||||
|
|
||||||
|
if not success and not continue_on_fail:
|
||||||
|
# Print command immediately for failed tests
|
||||||
|
print(f"\n{'=' * 80}")
|
||||||
|
print("FAILED - Command to reproduce:")
|
||||||
|
print(f"{'=' * 80}")
|
||||||
|
print(cmd_str)
|
||||||
|
print()
|
||||||
|
raise subprocess.CalledProcessError(result.returncode, cmd)
|
||||||
|
return success, cmd_str
|
||||||
|
except subprocess.CalledProcessError:
|
||||||
|
# Re-raise if we're not continuing on fail
|
||||||
|
if not continue_on_fail:
|
||||||
|
raise
|
||||||
|
return False, cmd_str
|
||||||
|
|
||||||
|
|
||||||
|
def run_grouped_test(
|
||||||
|
components: list[str],
|
||||||
|
platform: str,
|
||||||
|
platform_with_version: str,
|
||||||
|
base_file: Path,
|
||||||
|
build_dir: Path,
|
||||||
|
tests_dir: Path,
|
||||||
|
esphome_command: str,
|
||||||
|
continue_on_fail: bool,
|
||||||
|
) -> tuple[bool, str]:
|
||||||
|
"""Run esphome test for a group of components with shared bus configs.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
components: List of component names to test together
|
||||||
|
platform: Platform name (e.g., "esp32-idf")
|
||||||
|
platform_with_version: Platform with version (e.g., "esp32-idf-50")
|
||||||
|
base_file: Path to platform base file
|
||||||
|
build_dir: Path to build directory
|
||||||
|
tests_dir: Path to tests/components directory
|
||||||
|
esphome_command: ESPHome command (config/compile)
|
||||||
|
continue_on_fail: Whether to continue on failure
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Tuple of (success status, command string)
|
||||||
|
"""
|
||||||
|
# Create merged config
|
||||||
|
group_name = "_".join(components[:3]) # Use first 3 components for name
|
||||||
|
if len(components) > 3:
|
||||||
|
group_name += f"_plus_{len(components) - 3}"
|
||||||
|
|
||||||
|
# Create unique device name by hashing sorted component list + platform
|
||||||
|
# This prevents conflicts when different component groups are tested
|
||||||
|
sorted_components = sorted(components)
|
||||||
|
hash_input = "_".join(sorted_components) + "_" + platform
|
||||||
|
group_hash = hashlib.md5(hash_input.encode()).hexdigest()[:8]
|
||||||
|
device_name = f"comptest{platform.replace('-', '')}{group_hash}"
|
||||||
|
|
||||||
|
merged_config_file = build_dir / f"merged_{group_name}.{platform_with_version}.yaml"
|
||||||
|
|
||||||
|
try:
|
||||||
|
merge_component_configs(
|
||||||
|
component_names=components,
|
||||||
|
platform=platform_with_version,
|
||||||
|
tests_dir=tests_dir,
|
||||||
|
output_file=merged_config_file,
|
||||||
|
)
|
||||||
|
except Exception as e: # pylint: disable=broad-exception-caught
|
||||||
|
print(f"Error merging configs for {components}: {e}")
|
||||||
|
if not continue_on_fail:
|
||||||
|
raise
|
||||||
|
# Return empty command string since we failed before building the command
|
||||||
|
return False, f"# Failed during config merge: {e}"
|
||||||
|
|
||||||
|
# Create test file that includes merged config
|
||||||
|
output_file = build_dir / f"test_{group_name}.{platform_with_version}.yaml"
|
||||||
|
base_content = base_file.read_text()
|
||||||
|
merged_ref = merged_config_file.name
|
||||||
|
output_content = base_content.replace("$component_test_file", merged_ref)
|
||||||
|
output_file.write_text(output_content)
|
||||||
|
|
||||||
|
# Build esphome command with --testing-mode
|
||||||
|
cmd = [
|
||||||
|
sys.executable,
|
||||||
|
"-m",
|
||||||
|
"esphome",
|
||||||
|
"--testing-mode", # Required for grouped tests
|
||||||
|
"-s",
|
||||||
|
"component_name",
|
||||||
|
device_name, # Use unique hash-based device name
|
||||||
|
"-s",
|
||||||
|
"component_dir",
|
||||||
|
"../../components",
|
||||||
|
"-s",
|
||||||
|
"test_name",
|
||||||
|
"merged",
|
||||||
|
"-s",
|
||||||
|
"target_platform",
|
||||||
|
platform,
|
||||||
|
esphome_command,
|
||||||
|
str(output_file),
|
||||||
|
]
|
||||||
|
|
||||||
|
# Build command string for display/logging
|
||||||
|
cmd_str = " ".join(cmd)
|
||||||
|
|
||||||
|
# Run command
|
||||||
|
components_str = ", ".join(components)
|
||||||
|
print(f"> [GROUPED: {components_str}] [{platform_with_version}]")
|
||||||
|
print(" (using --testing-mode)")
|
||||||
|
|
||||||
|
try:
|
||||||
|
result = subprocess.run(cmd, check=False)
|
||||||
|
success = result.returncode == 0
|
||||||
|
|
||||||
|
# Show disk space after build in CI during compile
|
||||||
|
show_disk_space_if_ci(esphome_command)
|
||||||
|
|
||||||
|
if not success and not continue_on_fail:
|
||||||
|
# Print command immediately for failed tests
|
||||||
|
print(f"\n{'=' * 80}")
|
||||||
|
print("FAILED - Command to reproduce:")
|
||||||
|
print(f"{'=' * 80}")
|
||||||
|
print(cmd_str)
|
||||||
|
print()
|
||||||
|
raise subprocess.CalledProcessError(result.returncode, cmd)
|
||||||
|
return success, cmd_str
|
||||||
|
except subprocess.CalledProcessError:
|
||||||
|
# Re-raise if we're not continuing on fail
|
||||||
|
if not continue_on_fail:
|
||||||
|
raise
|
||||||
|
return False, cmd_str
|
||||||
|
|
||||||
|
|
||||||
|
def run_grouped_component_tests(
|
||||||
|
all_tests: dict[str, list[Path]],
|
||||||
|
platform_filter: str | None,
|
||||||
|
platform_bases: dict[str, list[Path]],
|
||||||
|
tests_dir: Path,
|
||||||
|
build_dir: Path,
|
||||||
|
esphome_command: str,
|
||||||
|
continue_on_fail: bool,
|
||||||
|
) -> tuple[set[tuple[str, str]], list[str], list[str], dict[str, str]]:
|
||||||
|
"""Run grouped component tests.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
all_tests: Dictionary mapping component names to test files
|
||||||
|
platform_filter: Optional platform to filter by
|
||||||
|
platform_bases: Platform base files mapping
|
||||||
|
tests_dir: Path to tests/components directory
|
||||||
|
build_dir: Path to build directory
|
||||||
|
esphome_command: ESPHome command (config/compile)
|
||||||
|
continue_on_fail: Whether to continue on failure
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Tuple of (tested_components, passed_tests, failed_tests, failed_commands)
|
||||||
|
"""
|
||||||
|
tested_components = set()
|
||||||
|
passed_tests = []
|
||||||
|
failed_tests = []
|
||||||
|
failed_commands = {} # Map test_id to command string
|
||||||
|
|
||||||
|
# Group components by platform and bus signature
|
||||||
|
grouped_components: dict[tuple[str, str], list[str]] = defaultdict(list)
|
||||||
|
print("\n" + "=" * 80)
|
||||||
|
print("Analyzing components for intelligent grouping...")
|
||||||
|
print("=" * 80)
|
||||||
|
component_buses, non_groupable, direct_bus_components = analyze_all_components(
|
||||||
|
tests_dir
|
||||||
|
)
|
||||||
|
|
||||||
|
# Track why components can't be grouped (for detailed output)
|
||||||
|
non_groupable_reasons = {}
|
||||||
|
|
||||||
|
# Group by (platform, bus_signature)
|
||||||
|
for component, platforms in component_buses.items():
|
||||||
|
if component not in all_tests:
|
||||||
|
continue
|
||||||
|
|
||||||
|
# Skip components that must be tested in isolation
|
||||||
|
# These are shown separately and should not be in non_groupable_reasons
|
||||||
|
if component in ISOLATED_COMPONENTS:
|
||||||
|
continue
|
||||||
|
|
||||||
|
# Skip base bus components (these test the bus platforms themselves)
|
||||||
|
if component in BASE_BUS_COMPONENTS:
|
||||||
|
continue
|
||||||
|
|
||||||
|
# Skip components that use local file references or direct bus configs
|
||||||
|
if component in non_groupable:
|
||||||
|
# Track the reason (using pre-calculated results to avoid expensive re-analysis)
|
||||||
|
if component not in non_groupable_reasons:
|
||||||
|
if component in direct_bus_components:
|
||||||
|
non_groupable_reasons[component] = (
|
||||||
|
"Defines buses directly (not via packages) - NEEDS MIGRATION"
|
||||||
|
)
|
||||||
|
elif uses_local_file_references(tests_dir / component):
|
||||||
|
non_groupable_reasons[component] = (
|
||||||
|
"Uses local file references ($component_dir)"
|
||||||
|
)
|
||||||
|
elif is_platform_component(tests_dir / component):
|
||||||
|
non_groupable_reasons[component] = (
|
||||||
|
"Platform component (abstract base class)"
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
non_groupable_reasons[component] = (
|
||||||
|
"Uses !extend or !remove directives"
|
||||||
|
)
|
||||||
|
continue
|
||||||
|
|
||||||
|
for platform, buses in platforms.items():
|
||||||
|
# Skip if platform doesn't match filter
|
||||||
|
if platform_filter and not platform.startswith(platform_filter):
|
||||||
|
continue
|
||||||
|
|
||||||
|
# Create signature for this component's bus configuration
|
||||||
|
# Components with no buses get NO_BUSES_SIGNATURE so they can be grouped together
|
||||||
|
if buses:
|
||||||
|
signature = create_grouping_signature({platform: buses}, platform)
|
||||||
|
else:
|
||||||
|
signature = NO_BUSES_SIGNATURE
|
||||||
|
|
||||||
|
# Add to grouped components (including those with no buses)
|
||||||
|
if signature:
|
||||||
|
grouped_components[(platform, signature)].append(component)
|
||||||
|
|
||||||
|
# Print detailed grouping plan
|
||||||
|
print("\nGrouping Plan:")
|
||||||
|
print("-" * 80)
|
||||||
|
|
||||||
|
# Show isolated components (must test individually due to known issues)
|
||||||
|
isolated_in_tests = [c for c in ISOLATED_COMPONENTS if c in all_tests]
|
||||||
|
if isolated_in_tests:
|
||||||
|
print(
|
||||||
|
f"\n⚠ {len(isolated_in_tests)} components must be tested in isolation (known build issues):"
|
||||||
|
)
|
||||||
|
for comp in sorted(isolated_in_tests):
|
||||||
|
reason = ISOLATED_COMPONENTS[comp]
|
||||||
|
print(f" - {comp}: {reason}")
|
||||||
|
|
||||||
|
# Show base bus components (test the bus platform implementations)
|
||||||
|
base_bus_in_tests = [c for c in BASE_BUS_COMPONENTS if c in all_tests]
|
||||||
|
if base_bus_in_tests:
|
||||||
|
print(
|
||||||
|
f"\n○ {len(base_bus_in_tests)} base bus platform components (tested individually):"
|
||||||
|
)
|
||||||
|
for comp in sorted(base_bus_in_tests):
|
||||||
|
print(f" - {comp}")
|
||||||
|
|
||||||
|
# Show excluded components with detailed reasons
|
||||||
|
if non_groupable_reasons:
|
||||||
|
excluded_in_tests = [c for c in non_groupable_reasons if c in all_tests]
|
||||||
|
if excluded_in_tests:
|
||||||
|
print(
|
||||||
|
f"\n⚠ {len(excluded_in_tests)} components excluded from grouping (each needs individual build):"
|
||||||
|
)
|
||||||
|
# Group by reason to show summary
|
||||||
|
direct_bus = [
|
||||||
|
c
|
||||||
|
for c in excluded_in_tests
|
||||||
|
if "NEEDS MIGRATION" in non_groupable_reasons.get(c, "")
|
||||||
|
]
|
||||||
|
if direct_bus:
|
||||||
|
print(
|
||||||
|
f"\n ⚠⚠⚠ {len(direct_bus)} DEFINE BUSES DIRECTLY - NEED MIGRATION TO PACKAGES:"
|
||||||
|
)
|
||||||
|
for comp in sorted(direct_bus):
|
||||||
|
print(f" - {comp}")
|
||||||
|
|
||||||
|
other_reasons = [
|
||||||
|
c
|
||||||
|
for c in excluded_in_tests
|
||||||
|
if "NEEDS MIGRATION" not in non_groupable_reasons.get(c, "")
|
||||||
|
]
|
||||||
|
if other_reasons and len(other_reasons) <= 10:
|
||||||
|
print("\n Other non-groupable components:")
|
||||||
|
for comp in sorted(other_reasons):
|
||||||
|
reason = non_groupable_reasons[comp]
|
||||||
|
print(f" - {comp}: {reason}")
|
||||||
|
elif other_reasons:
|
||||||
|
print(
|
||||||
|
f"\n Other non-groupable components: {len(other_reasons)} components"
|
||||||
|
)
|
||||||
|
|
||||||
|
# Distribute no_buses components into other groups to maximize efficiency
|
||||||
|
# Components with no buses can merge with any bus group since they have no conflicting requirements
|
||||||
|
no_buses_by_platform: dict[str, list[str]] = {}
|
||||||
|
for (platform, signature), components in list(grouped_components.items()):
|
||||||
|
if signature == NO_BUSES_SIGNATURE:
|
||||||
|
no_buses_by_platform[platform] = components
|
||||||
|
# Remove from grouped_components - we'll distribute them
|
||||||
|
del grouped_components[(platform, signature)]
|
||||||
|
|
||||||
|
# Distribute no_buses components into existing groups for each platform
|
||||||
|
for platform, no_buses_comps in no_buses_by_platform.items():
|
||||||
|
# Find all non-empty groups for this platform (excluding no_buses)
|
||||||
|
platform_groups = [
|
||||||
|
(sig, comps)
|
||||||
|
for (plat, sig), comps in grouped_components.items()
|
||||||
|
if plat == platform and sig != NO_BUSES_SIGNATURE
|
||||||
|
]
|
||||||
|
|
||||||
|
if platform_groups:
|
||||||
|
# Distribute no_buses components round-robin across existing groups
|
||||||
|
for i, comp in enumerate(no_buses_comps):
|
||||||
|
sig, _ = platform_groups[i % len(platform_groups)]
|
||||||
|
grouped_components[(platform, sig)].append(comp)
|
||||||
|
else:
|
||||||
|
# No other groups for this platform - keep no_buses components together
|
||||||
|
grouped_components[(platform, NO_BUSES_SIGNATURE)] = no_buses_comps
|
||||||
|
|
||||||
|
# Split groups that exceed platform-specific maximum sizes
|
||||||
|
# ESP8266 has limited IRAM and can't handle large component groups
|
||||||
|
split_groups = {}
|
||||||
|
for (platform, signature), components in list(grouped_components.items()):
|
||||||
|
max_size = PLATFORM_MAX_GROUP_SIZE.get(platform)
|
||||||
|
if max_size and len(components) > max_size:
|
||||||
|
# Split this group into smaller groups
|
||||||
|
print(
|
||||||
|
f"\n ℹ️ Splitting {platform} group (signature: {signature}) "
|
||||||
|
f"from {len(components)} to max {max_size} components per group"
|
||||||
|
)
|
||||||
|
# Remove original group
|
||||||
|
del grouped_components[(platform, signature)]
|
||||||
|
# Create split groups
|
||||||
|
for i in range(0, len(components), max_size):
|
||||||
|
split_components = components[i : i + max_size]
|
||||||
|
# Create unique signature for each split group
|
||||||
|
split_signature = f"{signature}_split{i // max_size + 1}"
|
||||||
|
split_groups[(platform, split_signature)] = split_components
|
||||||
|
# Add split groups back
|
||||||
|
grouped_components.update(split_groups)
|
||||||
|
|
||||||
|
groups_to_test = []
|
||||||
|
individual_tests = set() # Use set to avoid duplicates
|
||||||
|
|
||||||
|
for (platform, signature), components in sorted(grouped_components.items()):
|
||||||
|
if len(components) > 1:
|
||||||
|
groups_to_test.append((platform, signature, components))
|
||||||
|
# Note: Don't add single-component groups to individual_tests here
|
||||||
|
# They'll be added below when we check for ungrouped components
|
||||||
|
|
||||||
|
# Add components that weren't grouped on any platform
|
||||||
|
for component in all_tests:
|
||||||
|
if component not in [c for _, _, comps in groups_to_test for c in comps]:
|
||||||
|
individual_tests.add(component)
|
||||||
|
|
||||||
|
if groups_to_test:
|
||||||
|
print(f"\n✓ {len(groups_to_test)} groups will be tested together:")
|
||||||
|
for platform, signature, components in groups_to_test:
|
||||||
|
component_list = ", ".join(sorted(components))
|
||||||
|
print(f" [{platform}] [{signature}]: {component_list}")
|
||||||
|
print(
|
||||||
|
f" → {len(components)} components in 1 build (saves {len(components) - 1} builds)"
|
||||||
|
)
|
||||||
|
|
||||||
|
if individual_tests:
|
||||||
|
print(f"\n○ {len(individual_tests)} components will be tested individually:")
|
||||||
|
sorted_individual = sorted(individual_tests)
|
||||||
|
for comp in sorted_individual[:10]:
|
||||||
|
print(f" - {comp}")
|
||||||
|
if len(individual_tests) > 10:
|
||||||
|
print(f" ... and {len(individual_tests) - 10} more")
|
||||||
|
|
||||||
|
# Calculate actual build counts based on test files, not component counts
|
||||||
|
# Without grouping: every test file would be built separately
|
||||||
|
total_test_files = sum(len(test_files) for test_files in all_tests.values())
|
||||||
|
|
||||||
|
# With grouping:
|
||||||
|
# - 1 build per group (regardless of how many components)
|
||||||
|
# - Individual components still need all their platform builds
|
||||||
|
individual_test_file_count = sum(
|
||||||
|
len(all_tests[comp]) for comp in individual_tests if comp in all_tests
|
||||||
|
)
|
||||||
|
|
||||||
|
total_grouped_components = sum(len(comps) for _, _, comps in groups_to_test)
|
||||||
|
total_builds_with_grouping = len(groups_to_test) + individual_test_file_count
|
||||||
|
builds_saved = total_test_files - total_builds_with_grouping
|
||||||
|
|
||||||
|
print(f"\n{'=' * 80}")
|
||||||
|
print(
|
||||||
|
f"Summary: {total_builds_with_grouping} builds total (vs {total_test_files} without grouping)"
|
||||||
|
)
|
||||||
|
print(
|
||||||
|
f" • {len(groups_to_test)} grouped builds ({total_grouped_components} components)"
|
||||||
|
)
|
||||||
|
print(
|
||||||
|
f" • {individual_test_file_count} individual builds ({len(individual_tests)} components)"
|
||||||
|
)
|
||||||
|
if total_test_files > 0:
|
||||||
|
reduction_pct = (builds_saved / total_test_files) * 100
|
||||||
|
print(f" • Saves {builds_saved} builds ({reduction_pct:.1f}% reduction)")
|
||||||
|
print("=" * 80 + "\n")
|
||||||
|
|
||||||
|
# Execute grouped tests
|
||||||
|
for (platform, signature), components in grouped_components.items():
|
||||||
|
# Only group if we have multiple components with same signature
|
||||||
|
if len(components) <= 1:
|
||||||
|
continue
|
||||||
|
|
||||||
|
# Filter out components not in our test list
|
||||||
|
components_to_group = [c for c in components if c in all_tests]
|
||||||
|
if len(components_to_group) <= 1:
|
||||||
|
continue
|
||||||
|
|
||||||
|
# Get platform base files
|
||||||
|
if platform not in platform_bases:
|
||||||
|
continue
|
||||||
|
|
||||||
|
for base_file in platform_bases[platform]:
|
||||||
|
platform_with_version = extract_platform_with_version(base_file)
|
||||||
|
|
||||||
|
# Skip if platform filter doesn't match
|
||||||
|
if platform_filter and platform != platform_filter:
|
||||||
|
continue
|
||||||
|
if (
|
||||||
|
platform_filter
|
||||||
|
and platform_with_version != platform_filter
|
||||||
|
and not platform_with_version.startswith(f"{platform_filter}-")
|
||||||
|
):
|
||||||
|
continue
|
||||||
|
|
||||||
|
# Run grouped test
|
||||||
|
success, cmd_str = run_grouped_test(
|
||||||
|
components=components_to_group,
|
||||||
|
platform=platform,
|
||||||
|
platform_with_version=platform_with_version,
|
||||||
|
base_file=base_file,
|
||||||
|
build_dir=build_dir,
|
||||||
|
tests_dir=tests_dir,
|
||||||
|
esphome_command=esphome_command,
|
||||||
|
continue_on_fail=continue_on_fail,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Mark all components as tested
|
||||||
|
for comp in components_to_group:
|
||||||
|
tested_components.add((comp, platform_with_version))
|
||||||
|
|
||||||
|
# Record result for each component - show all components in grouped tests
|
||||||
|
test_id = (
|
||||||
|
f"GROUPED[{','.join(components_to_group)}].{platform_with_version}"
|
||||||
|
)
|
||||||
|
if success:
|
||||||
|
passed_tests.append(test_id)
|
||||||
|
else:
|
||||||
|
failed_tests.append(test_id)
|
||||||
|
failed_commands[test_id] = cmd_str
|
||||||
|
|
||||||
|
return tested_components, passed_tests, failed_tests, failed_commands
|
||||||
|
|
||||||
|
|
||||||
|
def run_individual_component_test(
|
||||||
|
component: str,
|
||||||
|
test_file: Path,
|
||||||
|
platform: str,
|
||||||
|
platform_with_version: str,
|
||||||
|
base_file: Path,
|
||||||
|
build_dir: Path,
|
||||||
|
esphome_command: str,
|
||||||
|
continue_on_fail: bool,
|
||||||
|
tested_components: set[tuple[str, str]],
|
||||||
|
passed_tests: list[str],
|
||||||
|
failed_tests: list[str],
|
||||||
|
failed_commands: dict[str, str],
|
||||||
|
) -> None:
|
||||||
|
"""Run an individual component test if not already tested in a group.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
component: Component name
|
||||||
|
test_file: Test file path
|
||||||
|
platform: Platform name
|
||||||
|
platform_with_version: Platform with version
|
||||||
|
base_file: Base file for platform
|
||||||
|
build_dir: Build directory
|
||||||
|
esphome_command: ESPHome command
|
||||||
|
continue_on_fail: Whether to continue on failure
|
||||||
|
tested_components: Set of already tested components
|
||||||
|
passed_tests: List to append passed test IDs
|
||||||
|
failed_tests: List to append failed test IDs
|
||||||
|
failed_commands: Dict to store failed test commands
|
||||||
|
"""
|
||||||
|
# Skip if already tested in a group
|
||||||
|
if (component, platform_with_version) in tested_components:
|
||||||
|
return
|
||||||
|
|
||||||
|
test_name = test_file.stem.split(".")[0]
|
||||||
|
success, cmd_str = run_esphome_test(
|
||||||
|
component=component,
|
||||||
|
test_file=test_file,
|
||||||
|
platform=platform,
|
||||||
|
platform_with_version=platform_with_version,
|
||||||
|
base_file=base_file,
|
||||||
|
build_dir=build_dir,
|
||||||
|
esphome_command=esphome_command,
|
||||||
|
continue_on_fail=continue_on_fail,
|
||||||
|
)
|
||||||
|
test_id = f"{component}.{test_name}.{platform_with_version}"
|
||||||
|
if success:
|
||||||
|
passed_tests.append(test_id)
|
||||||
|
else:
|
||||||
|
failed_tests.append(test_id)
|
||||||
|
failed_commands[test_id] = cmd_str
|
||||||
|
|
||||||
|
|
||||||
|
def test_components(
|
||||||
|
component_patterns: list[str],
|
||||||
|
platform_filter: str | None,
|
||||||
|
esphome_command: str,
|
||||||
|
continue_on_fail: bool,
|
||||||
|
enable_grouping: bool = True,
|
||||||
|
) -> int:
|
||||||
|
"""Test components with optional intelligent grouping.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
component_patterns: List of component name patterns
|
||||||
|
platform_filter: Optional platform to filter by
|
||||||
|
esphome_command: ESPHome command (config/compile)
|
||||||
|
continue_on_fail: Whether to continue on failure
|
||||||
|
enable_grouping: Whether to enable component grouping
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Exit code (0 for success, 1 for failure)
|
||||||
|
"""
|
||||||
|
# Setup paths
|
||||||
|
repo_root = Path(__file__).parent.parent
|
||||||
|
tests_dir = repo_root / "tests" / "components"
|
||||||
|
build_components_dir = repo_root / "tests" / "test_build_components"
|
||||||
|
build_dir = build_components_dir / "build"
|
||||||
|
build_dir.mkdir(parents=True, exist_ok=True)
|
||||||
|
|
||||||
|
# Get platform base files
|
||||||
|
platform_bases = get_platform_base_files(build_components_dir)
|
||||||
|
|
||||||
|
# Find all component tests
|
||||||
|
all_tests = {}
|
||||||
|
for pattern in component_patterns:
|
||||||
|
all_tests.update(find_component_tests(tests_dir, pattern))
|
||||||
|
|
||||||
|
if not all_tests:
|
||||||
|
print(f"No components found matching: {component_patterns}")
|
||||||
|
return 1
|
||||||
|
|
||||||
|
print(f"Found {len(all_tests)} components to test")
|
||||||
|
|
||||||
|
# Run tests
|
||||||
|
failed_tests = []
|
||||||
|
passed_tests = []
|
||||||
|
tested_components = set() # Track which components were tested in groups
|
||||||
|
failed_commands = {} # Track commands for failed tests
|
||||||
|
|
||||||
|
# First, run grouped tests if grouping is enabled
|
||||||
|
if enable_grouping:
|
||||||
|
(
|
||||||
|
tested_components,
|
||||||
|
passed_tests,
|
||||||
|
failed_tests,
|
||||||
|
failed_commands,
|
||||||
|
) = run_grouped_component_tests(
|
||||||
|
all_tests=all_tests,
|
||||||
|
platform_filter=platform_filter,
|
||||||
|
platform_bases=platform_bases,
|
||||||
|
tests_dir=tests_dir,
|
||||||
|
build_dir=build_dir,
|
||||||
|
esphome_command=esphome_command,
|
||||||
|
continue_on_fail=continue_on_fail,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Then run individual tests for components not in groups
|
||||||
|
for component, test_files in sorted(all_tests.items()):
|
||||||
|
for test_file in test_files:
|
||||||
|
test_name, platform = parse_test_filename(test_file)
|
||||||
|
|
||||||
|
# Handle "all" platform tests
|
||||||
|
if platform == "all":
|
||||||
|
# Run for all platforms
|
||||||
|
for plat, base_files in platform_bases.items():
|
||||||
|
if platform_filter and plat != platform_filter:
|
||||||
|
continue
|
||||||
|
|
||||||
|
for base_file in base_files:
|
||||||
|
platform_with_version = extract_platform_with_version(base_file)
|
||||||
|
run_individual_component_test(
|
||||||
|
component=component,
|
||||||
|
test_file=test_file,
|
||||||
|
platform=plat,
|
||||||
|
platform_with_version=platform_with_version,
|
||||||
|
base_file=base_file,
|
||||||
|
build_dir=build_dir,
|
||||||
|
esphome_command=esphome_command,
|
||||||
|
continue_on_fail=continue_on_fail,
|
||||||
|
tested_components=tested_components,
|
||||||
|
passed_tests=passed_tests,
|
||||||
|
failed_tests=failed_tests,
|
||||||
|
failed_commands=failed_commands,
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
# Platform-specific test
|
||||||
|
if platform_filter and platform != platform_filter:
|
||||||
|
continue
|
||||||
|
|
||||||
|
if platform not in platform_bases:
|
||||||
|
print(f"No base file for platform: {platform}")
|
||||||
|
continue
|
||||||
|
|
||||||
|
for base_file in platform_bases[platform]:
|
||||||
|
platform_with_version = extract_platform_with_version(base_file)
|
||||||
|
|
||||||
|
# Skip if requested platform doesn't match
|
||||||
|
if (
|
||||||
|
platform_filter
|
||||||
|
and platform_with_version != platform_filter
|
||||||
|
and not platform_with_version.startswith(f"{platform_filter}-")
|
||||||
|
):
|
||||||
|
continue
|
||||||
|
|
||||||
|
run_individual_component_test(
|
||||||
|
component=component,
|
||||||
|
test_file=test_file,
|
||||||
|
platform=platform,
|
||||||
|
platform_with_version=platform_with_version,
|
||||||
|
base_file=base_file,
|
||||||
|
build_dir=build_dir,
|
||||||
|
esphome_command=esphome_command,
|
||||||
|
continue_on_fail=continue_on_fail,
|
||||||
|
tested_components=tested_components,
|
||||||
|
passed_tests=passed_tests,
|
||||||
|
failed_tests=failed_tests,
|
||||||
|
failed_commands=failed_commands,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Print summary
|
||||||
|
print("\n" + "=" * 80)
|
||||||
|
print(f"Test Summary: {len(passed_tests)} passed, {len(failed_tests)} failed")
|
||||||
|
print("=" * 80)
|
||||||
|
|
||||||
|
if failed_tests:
|
||||||
|
print("\nFailed tests:")
|
||||||
|
for test in failed_tests:
|
||||||
|
print(f" - {test}")
|
||||||
|
|
||||||
|
# Print failed commands at the end for easy copy-paste from CI logs
|
||||||
|
print("\n" + "=" * 80)
|
||||||
|
print("Failed test commands (copy-paste to reproduce locally):")
|
||||||
|
print("=" * 80)
|
||||||
|
for test in failed_tests:
|
||||||
|
if test in failed_commands:
|
||||||
|
print(f"\n# {test}")
|
||||||
|
print(failed_commands[test])
|
||||||
|
print()
|
||||||
|
|
||||||
|
return 1
|
||||||
|
|
||||||
|
return 0
|
||||||
|
|
||||||
|
|
||||||
|
def main() -> int:
|
||||||
|
"""Main entry point."""
|
||||||
|
parser = argparse.ArgumentParser(
|
||||||
|
description="Test ESPHome component builds with intelligent grouping"
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"-e",
|
||||||
|
"--esphome-command",
|
||||||
|
default="compile",
|
||||||
|
choices=["config", "compile", "clean"],
|
||||||
|
help="ESPHome command to run (default: compile)",
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"-c",
|
||||||
|
"--components",
|
||||||
|
default="*",
|
||||||
|
help="Component pattern(s) to test (default: *). Comma-separated.",
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"-t",
|
||||||
|
"--target",
|
||||||
|
help="Target platform to test (e.g., esp32-idf)",
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"-f",
|
||||||
|
"--continue-on-fail",
|
||||||
|
action="store_true",
|
||||||
|
help="Continue testing even if a test fails",
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"--no-grouping",
|
||||||
|
action="store_true",
|
||||||
|
help="Disable component grouping (test each component individually)",
|
||||||
|
)
|
||||||
|
|
||||||
|
args = parser.parse_args()
|
||||||
|
|
||||||
|
# Parse component patterns
|
||||||
|
component_patterns = [p.strip() for p in args.components.split(",")]
|
||||||
|
|
||||||
|
return test_components(
|
||||||
|
component_patterns=component_patterns,
|
||||||
|
platform_filter=args.target,
|
||||||
|
esphome_command=args.esphome_command,
|
||||||
|
continue_on_fail=args.continue_on_fail,
|
||||||
|
enable_grouping=not args.no_grouping,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
sys.exit(main())
|
227
script/test_component_grouping.py
Executable file
227
script/test_component_grouping.py
Executable file
@@ -0,0 +1,227 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
"""Test component grouping by finding and testing groups of components.
|
||||||
|
|
||||||
|
This script analyzes components, finds groups that can be tested together,
|
||||||
|
and runs test builds for those groups.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import argparse
|
||||||
|
from pathlib import Path
|
||||||
|
import subprocess
|
||||||
|
import sys
|
||||||
|
|
||||||
|
# Add esphome to path
|
||||||
|
sys.path.insert(0, str(Path(__file__).parent.parent))
|
||||||
|
|
||||||
|
from script.analyze_component_buses import (
|
||||||
|
analyze_all_components,
|
||||||
|
group_components_by_signature,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def test_component_group(
|
||||||
|
components: list[str],
|
||||||
|
platform: str,
|
||||||
|
esphome_command: str = "compile",
|
||||||
|
dry_run: bool = False,
|
||||||
|
) -> bool:
|
||||||
|
"""Test a group of components together.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
components: List of component names to test together
|
||||||
|
platform: Platform to test on (e.g., "esp32-idf")
|
||||||
|
esphome_command: ESPHome command to run (config/compile/clean)
|
||||||
|
dry_run: If True, only print the command without running it
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
True if test passed, False otherwise
|
||||||
|
"""
|
||||||
|
components_str = ",".join(components)
|
||||||
|
cmd = [
|
||||||
|
"./script/test_build_components",
|
||||||
|
"-c",
|
||||||
|
components_str,
|
||||||
|
"-t",
|
||||||
|
platform,
|
||||||
|
"-e",
|
||||||
|
esphome_command,
|
||||||
|
]
|
||||||
|
|
||||||
|
print(f"\n{'=' * 80}")
|
||||||
|
print(f"Testing {len(components)} components on {platform}:")
|
||||||
|
for comp in components:
|
||||||
|
print(f" - {comp}")
|
||||||
|
print(f"{'=' * 80}")
|
||||||
|
print(f"Command: {' '.join(cmd)}\n")
|
||||||
|
|
||||||
|
if dry_run:
|
||||||
|
print("[DRY RUN] Skipping actual test")
|
||||||
|
return True
|
||||||
|
|
||||||
|
try:
|
||||||
|
result = subprocess.run(cmd, check=False)
|
||||||
|
return result.returncode == 0
|
||||||
|
except Exception as e:
|
||||||
|
print(f"Error running test: {e}")
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
def main() -> None:
|
||||||
|
"""Main entry point."""
|
||||||
|
parser = argparse.ArgumentParser(
|
||||||
|
description="Test component grouping by finding and testing groups"
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"--platform",
|
||||||
|
"-p",
|
||||||
|
default="esp32-idf",
|
||||||
|
help="Platform to test (default: esp32-idf)",
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"-e",
|
||||||
|
"--esphome-command",
|
||||||
|
default="compile",
|
||||||
|
choices=["config", "compile", "clean"],
|
||||||
|
help="ESPHome command to run (default: compile)",
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"--all",
|
||||||
|
action="store_true",
|
||||||
|
help="Test all components (sets --min-size=1, --max-size=10000, --max-groups=10000)",
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"--min-size",
|
||||||
|
type=int,
|
||||||
|
default=3,
|
||||||
|
help="Minimum group size to test (default: 3)",
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"--max-size",
|
||||||
|
type=int,
|
||||||
|
default=10,
|
||||||
|
help="Maximum group size to test (default: 10)",
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"--max-groups",
|
||||||
|
type=int,
|
||||||
|
default=5,
|
||||||
|
help="Maximum number of groups to test (default: 5)",
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"--signature",
|
||||||
|
"-s",
|
||||||
|
help="Only test groups with this bus signature (e.g., 'spi', 'i2c', 'uart')",
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"--dry-run",
|
||||||
|
action="store_true",
|
||||||
|
help="Print commands without running them",
|
||||||
|
)
|
||||||
|
|
||||||
|
args = parser.parse_args()
|
||||||
|
|
||||||
|
# If --all is specified, test all components without grouping
|
||||||
|
if args.all:
|
||||||
|
# Get all components from tests/components directory
|
||||||
|
components_dir = Path("tests/components")
|
||||||
|
all_components = sorted(
|
||||||
|
[d.name for d in components_dir.iterdir() if d.is_dir()]
|
||||||
|
)
|
||||||
|
|
||||||
|
if not all_components:
|
||||||
|
print(f"\nNo components found in {components_dir}")
|
||||||
|
return
|
||||||
|
|
||||||
|
print(f"\nTesting all {len(all_components)} components together")
|
||||||
|
|
||||||
|
success = test_component_group(
|
||||||
|
all_components, args.platform, args.esphome_command, args.dry_run
|
||||||
|
)
|
||||||
|
|
||||||
|
# Print summary
|
||||||
|
print(f"\n{'=' * 80}")
|
||||||
|
print("TEST SUMMARY")
|
||||||
|
print(f"{'=' * 80}")
|
||||||
|
status = "✅ PASS" if success else "❌ FAIL"
|
||||||
|
print(f"{status} All components: {len(all_components)} components")
|
||||||
|
|
||||||
|
if not args.dry_run and not success:
|
||||||
|
sys.exit(1)
|
||||||
|
return
|
||||||
|
|
||||||
|
print("Analyzing all components...")
|
||||||
|
components, non_groupable, _ = analyze_all_components(Path("tests/components"))
|
||||||
|
|
||||||
|
print(f"Found {len(components)} components, {len(non_groupable)} non-groupable")
|
||||||
|
|
||||||
|
# Group components by signature for the platform
|
||||||
|
groups = group_components_by_signature(components, args.platform)
|
||||||
|
|
||||||
|
# Filter and sort groups
|
||||||
|
filtered_groups = []
|
||||||
|
for signature, comp_list in groups.items():
|
||||||
|
# Filter by signature if specified
|
||||||
|
if args.signature and signature != args.signature:
|
||||||
|
continue
|
||||||
|
|
||||||
|
# Remove non-groupable components
|
||||||
|
comp_list = [c for c in comp_list if c not in non_groupable]
|
||||||
|
|
||||||
|
# Filter by minimum size
|
||||||
|
if len(comp_list) < args.min_size:
|
||||||
|
continue
|
||||||
|
|
||||||
|
# If group is larger than max_size, we'll take a subset later
|
||||||
|
filtered_groups.append((signature, comp_list))
|
||||||
|
|
||||||
|
# Sort by group size (largest first)
|
||||||
|
filtered_groups.sort(key=lambda x: len(x[1]), reverse=True)
|
||||||
|
|
||||||
|
# Limit number of groups
|
||||||
|
filtered_groups = filtered_groups[: args.max_groups]
|
||||||
|
|
||||||
|
if not filtered_groups:
|
||||||
|
print("\nNo groups found matching criteria:")
|
||||||
|
print(f" - Platform: {args.platform}")
|
||||||
|
print(f" - Size: {args.min_size}-{args.max_size}")
|
||||||
|
if args.signature:
|
||||||
|
print(f" - Signature: {args.signature}")
|
||||||
|
return
|
||||||
|
|
||||||
|
print(f"\nFound {len(filtered_groups)} groups to test:")
|
||||||
|
for signature, comp_list in filtered_groups:
|
||||||
|
print(f" [{signature}]: {len(comp_list)} components")
|
||||||
|
|
||||||
|
# Test each group
|
||||||
|
results = []
|
||||||
|
for signature, comp_list in filtered_groups:
|
||||||
|
# Limit to max_size if group is larger
|
||||||
|
if len(comp_list) > args.max_size:
|
||||||
|
comp_list = comp_list[: args.max_size]
|
||||||
|
|
||||||
|
success = test_component_group(
|
||||||
|
comp_list, args.platform, args.esphome_command, args.dry_run
|
||||||
|
)
|
||||||
|
results.append((signature, comp_list, success))
|
||||||
|
|
||||||
|
if not args.dry_run and not success:
|
||||||
|
print(f"\n❌ FAILED: {signature} group")
|
||||||
|
break
|
||||||
|
|
||||||
|
# Print summary
|
||||||
|
print(f"\n{'=' * 80}")
|
||||||
|
print("TEST SUMMARY")
|
||||||
|
print(f"{'=' * 80}")
|
||||||
|
for signature, comp_list, success in results:
|
||||||
|
status = "✅ PASS" if success else "❌ FAIL"
|
||||||
|
print(f"{status} [{signature}]: {len(comp_list)} components")
|
||||||
|
|
||||||
|
# Exit with error if any tests failed
|
||||||
|
if not args.dry_run and any(not success for _, _, success in results):
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
@@ -1,11 +1,4 @@
|
|||||||
uart:
|
|
||||||
- id: uart_a01nyub
|
|
||||||
tx_pin: ${tx_pin}
|
|
||||||
rx_pin: ${rx_pin}
|
|
||||||
baud_rate: 9600
|
|
||||||
|
|
||||||
sensor:
|
sensor:
|
||||||
- platform: a01nyub
|
- platform: a01nyub
|
||||||
id: a01nyub_sensor
|
id: a01nyub_sensor
|
||||||
name: a01nyub Distance
|
name: a01nyub Distance
|
||||||
uart_id: uart_a01nyub
|
|
||||||
|
@@ -1,3 +1,6 @@
|
|||||||
|
packages:
|
||||||
|
uart: !include ../../test_build_components/common/uart/esp32-c3-idf.yaml
|
||||||
|
|
||||||
substitutions:
|
substitutions:
|
||||||
tx_pin: GPIO4
|
tx_pin: GPIO4
|
||||||
rx_pin: GPIO5
|
rx_pin: GPIO5
|
||||||
|
@@ -1,5 +1,8 @@
|
|||||||
substitutions:
|
substitutions:
|
||||||
tx_pin: GPIO17
|
tx_pin: GPIO4
|
||||||
rx_pin: GPIO16
|
rx_pin: GPIO5
|
||||||
|
|
||||||
|
packages:
|
||||||
|
uart: !include ../../test_build_components/common/uart/esp32-idf.yaml
|
||||||
|
|
||||||
<<: !include common.yaml
|
<<: !include common.yaml
|
||||||
|
@@ -1,5 +1,4 @@
|
|||||||
substitutions:
|
packages:
|
||||||
tx_pin: GPIO4
|
uart: !include ../../test_build_components/common/uart/esp8266-ard.yaml
|
||||||
rx_pin: GPIO5
|
|
||||||
|
|
||||||
<<: !include common.yaml
|
<<: !include common.yaml
|
||||||
|
@@ -1,5 +1,4 @@
|
|||||||
substitutions:
|
packages:
|
||||||
tx_pin: GPIO4
|
uart: !include ../../test_build_components/common/uart/rp2040-ard.yaml
|
||||||
rx_pin: GPIO5
|
|
||||||
|
|
||||||
<<: !include common.yaml
|
<<: !include common.yaml
|
||||||
|
@@ -1,11 +1,4 @@
|
|||||||
uart:
|
|
||||||
- id: uart_a02yyuw
|
|
||||||
tx_pin: ${tx_pin}
|
|
||||||
rx_pin: ${rx_pin}
|
|
||||||
baud_rate: 9600
|
|
||||||
|
|
||||||
sensor:
|
sensor:
|
||||||
- platform: a02yyuw
|
- platform: a02yyuw
|
||||||
id: a02yyuw_sensor
|
id: a02yyuw_sensor
|
||||||
name: a02yyuw Distance
|
name: a02yyuw Distance
|
||||||
uart_id: uart_a02yyuw
|
|
||||||
|
@@ -1,3 +1,6 @@
|
|||||||
|
packages:
|
||||||
|
uart: !include ../../test_build_components/common/uart/esp32-c3-idf.yaml
|
||||||
|
|
||||||
substitutions:
|
substitutions:
|
||||||
tx_pin: GPIO4
|
tx_pin: GPIO4
|
||||||
rx_pin: GPIO5
|
rx_pin: GPIO5
|
||||||
|
@@ -1,5 +1,8 @@
|
|||||||
substitutions:
|
substitutions:
|
||||||
tx_pin: GPIO17
|
tx_pin: GPIO4
|
||||||
rx_pin: GPIO16
|
rx_pin: GPIO5
|
||||||
|
|
||||||
|
packages:
|
||||||
|
uart: !include ../../test_build_components/common/uart/esp32-idf.yaml
|
||||||
|
|
||||||
<<: !include common.yaml
|
<<: !include common.yaml
|
||||||
|
@@ -1,5 +1,4 @@
|
|||||||
substitutions:
|
packages:
|
||||||
tx_pin: GPIO4
|
uart: !include ../../test_build_components/common/uart/esp8266-ard.yaml
|
||||||
rx_pin: GPIO5
|
|
||||||
|
|
||||||
<<: !include common.yaml
|
<<: !include common.yaml
|
||||||
|
@@ -1,5 +1,4 @@
|
|||||||
substitutions:
|
packages:
|
||||||
tx_pin: GPIO4
|
uart: !include ../../test_build_components/common/uart/rp2040-ard.yaml
|
||||||
rx_pin: GPIO5
|
|
||||||
|
|
||||||
<<: !include common.yaml
|
<<: !include common.yaml
|
||||||
|
@@ -1,6 +1,6 @@
|
|||||||
substitutions:
|
substitutions:
|
||||||
step_pin: GPIO22
|
step_pin: GPIO22
|
||||||
dir_pin: GPIO23
|
dir_pin: GPIO4
|
||||||
sleep_pin: GPIO25
|
sleep_pin: GPIO25
|
||||||
|
|
||||||
<<: !include common.yaml
|
<<: !include common.yaml
|
||||||
|
@@ -1,6 +1,6 @@
|
|||||||
substitutions:
|
substitutions:
|
||||||
step_pin: GPIO1
|
step_pin: GPIO1
|
||||||
dir_pin: GPIO2
|
dir_pin: GPIO2
|
||||||
sleep_pin: GPIO5
|
sleep_pin: GPIO0
|
||||||
|
|
||||||
<<: !include common.yaml
|
<<: !include common.yaml
|
||||||
|
@@ -1,5 +1,5 @@
|
|||||||
substitutions:
|
substitutions:
|
||||||
gate_pin: GPIO18
|
gate_pin: GPIO4
|
||||||
zero_cross_pin: GPIO19
|
zero_cross_pin: GPIO5
|
||||||
|
|
||||||
<<: !include common.yaml
|
<<: !include common.yaml
|
||||||
|
@@ -1,5 +1,5 @@
|
|||||||
substitutions:
|
substitutions:
|
||||||
gate_pin: GPIO5
|
gate_pin: GPIO0
|
||||||
zero_cross_pin: GPIO4
|
zero_cross_pin: GPIO2
|
||||||
|
|
||||||
<<: !include common.yaml
|
<<: !include common.yaml
|
||||||
|
@@ -1,11 +0,0 @@
|
|||||||
sensor:
|
|
||||||
- id: my_sensor
|
|
||||||
platform: adc
|
|
||||||
name: ADC Test sensor
|
|
||||||
update_interval: "1:01"
|
|
||||||
attenuation: 2.5db
|
|
||||||
unit_of_measurement: "°C"
|
|
||||||
icon: "mdi:water-percent"
|
|
||||||
accuracy_decimals: 5
|
|
||||||
setup_priority: -100
|
|
||||||
force_update: true
|
|
@@ -1,7 +1,11 @@
|
|||||||
packages:
|
|
||||||
base: !include common.yaml
|
|
||||||
|
|
||||||
sensor:
|
sensor:
|
||||||
- id: !extend my_sensor
|
- id: my_sensor
|
||||||
|
platform: adc
|
||||||
pin: P23
|
pin: P23
|
||||||
attenuation: !remove
|
name: ADC Test sensor
|
||||||
|
update_interval: "1:01"
|
||||||
|
unit_of_measurement: "°C"
|
||||||
|
icon: "mdi:water-percent"
|
||||||
|
accuracy_decimals: 5
|
||||||
|
setup_priority: -100
|
||||||
|
force_update: true
|
||||||
|
@@ -1,6 +1,12 @@
|
|||||||
packages:
|
|
||||||
base: !include common.yaml
|
|
||||||
|
|
||||||
sensor:
|
sensor:
|
||||||
- id: !extend my_sensor
|
- id: my_sensor
|
||||||
pin: 4
|
platform: adc
|
||||||
|
pin: GPIO1
|
||||||
|
name: ADC Test sensor
|
||||||
|
update_interval: "1:01"
|
||||||
|
attenuation: 2.5db
|
||||||
|
unit_of_measurement: "°C"
|
||||||
|
icon: "mdi:water-percent"
|
||||||
|
accuracy_decimals: 5
|
||||||
|
setup_priority: -100
|
||||||
|
force_update: true
|
||||||
|
@@ -1,6 +1,12 @@
|
|||||||
packages:
|
|
||||||
base: !include common.yaml
|
|
||||||
|
|
||||||
sensor:
|
sensor:
|
||||||
- id: !extend my_sensor
|
- id: my_sensor
|
||||||
|
platform: adc
|
||||||
pin: A0
|
pin: A0
|
||||||
|
name: ADC Test sensor
|
||||||
|
update_interval: "1:01"
|
||||||
|
attenuation: 2.5db
|
||||||
|
unit_of_measurement: "°C"
|
||||||
|
icon: "mdi:water-percent"
|
||||||
|
accuracy_decimals: 5
|
||||||
|
setup_priority: -100
|
||||||
|
force_update: true
|
||||||
|
@@ -1,6 +1,12 @@
|
|||||||
packages:
|
|
||||||
base: !include common.yaml
|
|
||||||
|
|
||||||
sensor:
|
sensor:
|
||||||
- id: !extend my_sensor
|
- id: my_sensor
|
||||||
pin: GPIO50
|
platform: adc
|
||||||
|
pin: GPIO16
|
||||||
|
name: ADC Test sensor
|
||||||
|
update_interval: "1:01"
|
||||||
|
attenuation: 2.5db
|
||||||
|
unit_of_measurement: "°C"
|
||||||
|
icon: "mdi:water-percent"
|
||||||
|
accuracy_decimals: 5
|
||||||
|
setup_priority: -100
|
||||||
|
force_update: true
|
||||||
|
@@ -1,6 +1,12 @@
|
|||||||
packages:
|
|
||||||
base: !include common.yaml
|
|
||||||
|
|
||||||
sensor:
|
sensor:
|
||||||
- id: !extend my_sensor
|
- id: my_sensor
|
||||||
pin: 1
|
platform: adc
|
||||||
|
pin: GPIO1
|
||||||
|
name: ADC Test sensor
|
||||||
|
update_interval: "1:01"
|
||||||
|
attenuation: 2.5db
|
||||||
|
unit_of_measurement: "°C"
|
||||||
|
icon: "mdi:water-percent"
|
||||||
|
accuracy_decimals: 5
|
||||||
|
setup_priority: -100
|
||||||
|
force_update: true
|
||||||
|
@@ -1,6 +1,12 @@
|
|||||||
packages:
|
|
||||||
base: !include common.yaml
|
|
||||||
|
|
||||||
sensor:
|
sensor:
|
||||||
- id: !extend my_sensor
|
- id: my_sensor
|
||||||
pin: 1
|
platform: adc
|
||||||
|
pin: GPIO1
|
||||||
|
name: ADC Test sensor
|
||||||
|
update_interval: "1:01"
|
||||||
|
attenuation: 2.5db
|
||||||
|
unit_of_measurement: "°C"
|
||||||
|
icon: "mdi:water-percent"
|
||||||
|
accuracy_decimals: 5
|
||||||
|
setup_priority: -100
|
||||||
|
force_update: true
|
||||||
|
@@ -1,7 +1,11 @@
|
|||||||
packages:
|
|
||||||
base: !include common.yaml
|
|
||||||
|
|
||||||
sensor:
|
sensor:
|
||||||
- id: !extend my_sensor
|
- id: my_sensor
|
||||||
|
platform: adc
|
||||||
pin: VCC
|
pin: VCC
|
||||||
attenuation: !remove
|
name: ADC Test sensor
|
||||||
|
update_interval: "1:01"
|
||||||
|
unit_of_measurement: "°C"
|
||||||
|
icon: "mdi:water-percent"
|
||||||
|
accuracy_decimals: 5
|
||||||
|
setup_priority: -100
|
||||||
|
force_update: true
|
||||||
|
@@ -1,7 +1,11 @@
|
|||||||
packages:
|
|
||||||
base: !include common.yaml
|
|
||||||
|
|
||||||
sensor:
|
sensor:
|
||||||
- id: !extend my_sensor
|
- id: my_sensor
|
||||||
pin: PA0
|
platform: adc
|
||||||
attenuation: !remove
|
pin: A5
|
||||||
|
name: ADC Test sensor
|
||||||
|
update_interval: "1:01"
|
||||||
|
unit_of_measurement: "°C"
|
||||||
|
icon: "mdi:water-percent"
|
||||||
|
accuracy_decimals: 5
|
||||||
|
setup_priority: -100
|
||||||
|
force_update: true
|
||||||
|
@@ -1,7 +1,11 @@
|
|||||||
packages:
|
|
||||||
base: !include common.yaml
|
|
||||||
|
|
||||||
sensor:
|
sensor:
|
||||||
- id: !extend my_sensor
|
- id: my_sensor
|
||||||
|
platform: adc
|
||||||
pin: VCC
|
pin: VCC
|
||||||
attenuation: !remove
|
name: ADC Test sensor
|
||||||
|
update_interval: "1:01"
|
||||||
|
unit_of_measurement: "°C"
|
||||||
|
icon: "mdi:water-percent"
|
||||||
|
accuracy_decimals: 5
|
||||||
|
setup_priority: -100
|
||||||
|
force_update: true
|
||||||
|
@@ -1,9 +1,3 @@
|
|||||||
spi:
|
|
||||||
- id: spi_adc128s102
|
|
||||||
clk_pin: ${clk_pin}
|
|
||||||
mosi_pin: ${mosi_pin}
|
|
||||||
miso_pin: ${miso_pin}
|
|
||||||
|
|
||||||
adc128s102:
|
adc128s102:
|
||||||
cs_pin: ${cs_pin}
|
cs_pin: ${cs_pin}
|
||||||
id: adc128s102_adc
|
id: adc128s102_adc
|
||||||
|
@@ -1,7 +1,7 @@
|
|||||||
substitutions:
|
substitutions:
|
||||||
clk_pin: GPIO6
|
|
||||||
mosi_pin: GPIO7
|
|
||||||
miso_pin: GPIO5
|
|
||||||
cs_pin: GPIO2
|
cs_pin: GPIO2
|
||||||
|
|
||||||
|
packages:
|
||||||
|
spi: !include ../../test_build_components/common/spi/esp32-c3-idf.yaml
|
||||||
|
|
||||||
<<: !include common.yaml
|
<<: !include common.yaml
|
||||||
|
@@ -1,7 +1,7 @@
|
|||||||
substitutions:
|
substitutions:
|
||||||
clk_pin: GPIO16
|
|
||||||
mosi_pin: GPIO17
|
|
||||||
miso_pin: GPIO15
|
|
||||||
cs_pin: GPIO12
|
cs_pin: GPIO12
|
||||||
|
|
||||||
|
packages:
|
||||||
|
spi: !include ../../test_build_components/common/spi/esp32-idf.yaml
|
||||||
|
|
||||||
<<: !include common.yaml
|
<<: !include common.yaml
|
||||||
|
@@ -1,7 +1,10 @@
|
|||||||
substitutions:
|
substitutions:
|
||||||
clk_pin: GPIO14
|
clk_pin: GPIO0
|
||||||
mosi_pin: GPIO13
|
mosi_pin: GPIO2
|
||||||
miso_pin: GPIO12
|
miso_pin: GPIO16
|
||||||
cs_pin: GPIO15
|
cs_pin: GPIO15
|
||||||
|
|
||||||
|
packages:
|
||||||
|
spi: !include ../../test_build_components/common/spi/esp8266-ard.yaml
|
||||||
|
|
||||||
<<: !include common.yaml
|
<<: !include common.yaml
|
||||||
|
@@ -4,4 +4,7 @@ substitutions:
|
|||||||
miso_pin: GPIO4
|
miso_pin: GPIO4
|
||||||
cs_pin: GPIO5
|
cs_pin: GPIO5
|
||||||
|
|
||||||
|
packages:
|
||||||
|
spi: !include ../../test_build_components/common/spi/rp2040-ard.yaml
|
||||||
|
|
||||||
<<: !include common.yaml
|
<<: !include common.yaml
|
||||||
|
@@ -1,11 +1,6 @@
|
|||||||
i2c:
|
|
||||||
- id: i2c_ade7880
|
|
||||||
scl: ${scl_pin}
|
|
||||||
sda: ${sda_pin}
|
|
||||||
|
|
||||||
sensor:
|
sensor:
|
||||||
- platform: ade7880
|
- platform: ade7880
|
||||||
i2c_id: i2c_ade7880
|
i2c_id: i2c_bus
|
||||||
irq0_pin: ${irq0_pin}
|
irq0_pin: ${irq0_pin}
|
||||||
irq1_pin: ${irq1_pin}
|
irq1_pin: ${irq1_pin}
|
||||||
reset_pin: ${reset_pin}
|
reset_pin: ${reset_pin}
|
||||||
|
@@ -1,8 +1,9 @@
|
|||||||
substitutions:
|
substitutions:
|
||||||
scl_pin: GPIO5
|
|
||||||
sda_pin: GPIO4
|
|
||||||
irq0_pin: GPIO6
|
irq0_pin: GPIO6
|
||||||
irq1_pin: GPIO7
|
irq1_pin: GPIO7
|
||||||
reset_pin: GPIO10
|
reset_pin: GPIO9
|
||||||
|
|
||||||
|
packages:
|
||||||
|
i2c: !include ../../test_build_components/common/i2c/esp32-c3-idf.yaml
|
||||||
|
|
||||||
<<: !include common.yaml
|
<<: !include common.yaml
|
||||||
|
@@ -1,8 +1,9 @@
|
|||||||
substitutions:
|
substitutions:
|
||||||
scl_pin: GPIO5
|
|
||||||
sda_pin: GPIO4
|
|
||||||
irq0_pin: GPIO13
|
irq0_pin: GPIO13
|
||||||
irq1_pin: GPIO15
|
irq1_pin: GPIO15
|
||||||
reset_pin: GPIO16
|
reset_pin: GPIO12
|
||||||
|
|
||||||
|
packages:
|
||||||
|
i2c: !include ../../test_build_components/common/i2c/esp32-idf.yaml
|
||||||
|
|
||||||
<<: !include common.yaml
|
<<: !include common.yaml
|
||||||
|
@@ -1,8 +1,9 @@
|
|||||||
substitutions:
|
substitutions:
|
||||||
scl_pin: GPIO5
|
|
||||||
sda_pin: GPIO4
|
|
||||||
irq0_pin: GPIO13
|
irq0_pin: GPIO13
|
||||||
irq1_pin: GPIO15
|
irq1_pin: GPIO15
|
||||||
reset_pin: GPIO16
|
reset_pin: GPIO16
|
||||||
|
|
||||||
|
packages:
|
||||||
|
i2c: !include ../../test_build_components/common/i2c/esp8266-ard.yaml
|
||||||
|
|
||||||
<<: !include common.yaml
|
<<: !include common.yaml
|
||||||
|
@@ -1,8 +1,9 @@
|
|||||||
substitutions:
|
substitutions:
|
||||||
scl_pin: GPIO5
|
|
||||||
sda_pin: GPIO4
|
|
||||||
irq0_pin: GPIO13
|
irq0_pin: GPIO13
|
||||||
irq1_pin: GPIO15
|
irq1_pin: GPIO15
|
||||||
reset_pin: GPIO16
|
reset_pin: GPIO16
|
||||||
|
|
||||||
|
packages:
|
||||||
|
i2c: !include ../../test_build_components/common/i2c/rp2040-ard.yaml
|
||||||
|
|
||||||
<<: !include common.yaml
|
<<: !include common.yaml
|
||||||
|
@@ -1,20 +1,13 @@
|
|||||||
i2c:
|
|
||||||
- id: i2c_ade7953
|
|
||||||
scl: ${scl_pin}
|
|
||||||
sda: ${sda_pin}
|
|
||||||
|
|
||||||
sensor:
|
sensor:
|
||||||
- platform: ade7953_i2c
|
- platform: ade7953_i2c
|
||||||
|
i2c_id: i2c_bus
|
||||||
irq_pin: ${irq_pin}
|
irq_pin: ${irq_pin}
|
||||||
voltage:
|
voltage:
|
||||||
name: ADE7953 Voltage
|
name: ADE7953 Voltage
|
||||||
id: ade7953_voltage
|
|
||||||
current_a:
|
current_a:
|
||||||
name: ADE7953 Current A
|
name: ADE7953 Current A
|
||||||
id: ade7953_current_a
|
|
||||||
current_b:
|
current_b:
|
||||||
name: ADE7953 Current B
|
name: ADE7953 Current B
|
||||||
id: ade7953_current_b
|
|
||||||
power_factor_a:
|
power_factor_a:
|
||||||
name: ADE7953 Power Factor A
|
name: ADE7953 Power Factor A
|
||||||
power_factor_b:
|
power_factor_b:
|
||||||
|
@@ -1,6 +1,7 @@
|
|||||||
substitutions:
|
substitutions:
|
||||||
scl_pin: GPIO5
|
|
||||||
sda_pin: GPIO4
|
|
||||||
irq_pin: GPIO6
|
irq_pin: GPIO6
|
||||||
|
|
||||||
|
packages:
|
||||||
|
i2c: !include ../../test_build_components/common/i2c/esp32-c3-idf.yaml
|
||||||
|
|
||||||
<<: !include common.yaml
|
<<: !include common.yaml
|
||||||
|
@@ -1,6 +1,7 @@
|
|||||||
substitutions:
|
substitutions:
|
||||||
scl_pin: GPIO16
|
|
||||||
sda_pin: GPIO17
|
|
||||||
irq_pin: GPIO15
|
irq_pin: GPIO15
|
||||||
|
|
||||||
|
packages:
|
||||||
|
i2c: !include ../../test_build_components/common/i2c/esp32-idf.yaml
|
||||||
|
|
||||||
<<: !include common.yaml
|
<<: !include common.yaml
|
||||||
|
@@ -1,6 +1,7 @@
|
|||||||
substitutions:
|
substitutions:
|
||||||
scl_pin: GPIO5
|
|
||||||
sda_pin: GPIO4
|
|
||||||
irq_pin: GPIO15
|
irq_pin: GPIO15
|
||||||
|
|
||||||
|
packages:
|
||||||
|
i2c: !include ../../test_build_components/common/i2c/esp8266-ard.yaml
|
||||||
|
|
||||||
<<: !include common.yaml
|
<<: !include common.yaml
|
||||||
|
@@ -1,6 +1,7 @@
|
|||||||
substitutions:
|
substitutions:
|
||||||
scl_pin: GPIO5
|
|
||||||
sda_pin: GPIO4
|
|
||||||
irq_pin: GPIO6
|
irq_pin: GPIO6
|
||||||
|
|
||||||
|
packages:
|
||||||
|
i2c: !include ../../test_build_components/common/i2c/rp2040-ard.yaml
|
||||||
|
|
||||||
<<: !include common.yaml
|
<<: !include common.yaml
|
||||||
|
@@ -1,9 +1,3 @@
|
|||||||
spi:
|
|
||||||
- id: spi_ade7953
|
|
||||||
clk_pin: ${clk_pin}
|
|
||||||
mosi_pin: ${mosi_pin}
|
|
||||||
miso_pin: ${miso_pin}
|
|
||||||
|
|
||||||
sensor:
|
sensor:
|
||||||
- platform: ade7953_spi
|
- platform: ade7953_spi
|
||||||
cs_pin: ${cs_pin}
|
cs_pin: ${cs_pin}
|
||||||
|
@@ -1,8 +1,7 @@
|
|||||||
substitutions:
|
substitutions:
|
||||||
clk_pin: GPIO6
|
|
||||||
mosi_pin: GPIO7
|
|
||||||
miso_pin: GPIO5
|
|
||||||
irq_pin: GPIO9
|
irq_pin: GPIO9
|
||||||
cs_pin: GPIO8
|
cs_pin: GPIO8
|
||||||
|
packages:
|
||||||
|
spi: !include ../../test_build_components/common/spi/esp32-c3-idf.yaml
|
||||||
|
|
||||||
<<: !include common.yaml
|
<<: !include common.yaml
|
||||||
|
@@ -1,8 +1,8 @@
|
|||||||
substitutions:
|
substitutions:
|
||||||
clk_pin: GPIO16
|
|
||||||
mosi_pin: GPIO17
|
|
||||||
miso_pin: GPIO15
|
|
||||||
irq_pin: GPIO13
|
irq_pin: GPIO13
|
||||||
cs_pin: GPIO5
|
cs_pin: GPIO5
|
||||||
|
|
||||||
|
packages:
|
||||||
|
spi: !include ../../test_build_components/common/spi/esp32-idf.yaml
|
||||||
|
|
||||||
<<: !include common.yaml
|
<<: !include common.yaml
|
||||||
|
@@ -1,8 +1,11 @@
|
|||||||
substitutions:
|
substitutions:
|
||||||
clk_pin: GPIO14
|
clk_pin: GPIO0
|
||||||
mosi_pin: GPIO13
|
mosi_pin: GPIO2
|
||||||
miso_pin: GPIO12
|
miso_pin: GPIO16
|
||||||
irq_pin: GPIO5
|
irq_pin: GPIO5
|
||||||
cs_pin: GPIO15
|
cs_pin: GPIO15
|
||||||
|
|
||||||
|
packages:
|
||||||
|
spi: !include ../../test_build_components/common/spi/esp8266-ard.yaml
|
||||||
|
|
||||||
<<: !include common.yaml
|
<<: !include common.yaml
|
||||||
|
@@ -5,4 +5,7 @@ substitutions:
|
|||||||
irq_pin: GPIO5
|
irq_pin: GPIO5
|
||||||
cs_pin: GPIO6
|
cs_pin: GPIO6
|
||||||
|
|
||||||
|
packages:
|
||||||
|
spi: !include ../../test_build_components/common/spi/rp2040-ard.yaml
|
||||||
|
|
||||||
<<: !include common.yaml
|
<<: !include common.yaml
|
||||||
|
@@ -1,9 +1,5 @@
|
|||||||
i2c:
|
|
||||||
- id: i2c_ads1115
|
|
||||||
scl: ${scl_pin}
|
|
||||||
sda: ${sda_pin}
|
|
||||||
|
|
||||||
ads1115:
|
ads1115:
|
||||||
|
i2c_id: i2c_bus
|
||||||
address: 0x48
|
address: 0x48
|
||||||
|
|
||||||
sensor:
|
sensor:
|
||||||
|
@@ -1,5 +1,4 @@
|
|||||||
substitutions:
|
packages:
|
||||||
scl_pin: GPIO5
|
i2c: !include ../../test_build_components/common/i2c/esp32-c3-idf.yaml
|
||||||
sda_pin: GPIO4
|
|
||||||
|
|
||||||
<<: !include common.yaml
|
<<: !include common.yaml
|
||||||
|
@@ -1,5 +1,4 @@
|
|||||||
substitutions:
|
packages:
|
||||||
scl_pin: GPIO16
|
i2c: !include ../../test_build_components/common/i2c/esp32-idf.yaml
|
||||||
sda_pin: GPIO17
|
|
||||||
|
|
||||||
<<: !include common.yaml
|
<<: !include common.yaml
|
||||||
|
@@ -1,5 +1,4 @@
|
|||||||
substitutions:
|
packages:
|
||||||
scl_pin: GPIO5
|
i2c: !include ../../test_build_components/common/i2c/esp8266-ard.yaml
|
||||||
sda_pin: GPIO4
|
|
||||||
|
|
||||||
<<: !include common.yaml
|
<<: !include common.yaml
|
||||||
|
@@ -1,5 +1,4 @@
|
|||||||
substitutions:
|
packages:
|
||||||
scl_pin: GPIO5
|
i2c: !include ../../test_build_components/common/i2c/rp2040-ard.yaml
|
||||||
sda_pin: GPIO4
|
|
||||||
|
|
||||||
<<: !include common.yaml
|
<<: !include common.yaml
|
||||||
|
@@ -1,9 +1,3 @@
|
|||||||
i2c:
|
|
||||||
- id: i2c_ags10
|
|
||||||
scl: ${scl_pin}
|
|
||||||
sda: ${sda_pin}
|
|
||||||
frequency: 10kHz
|
|
||||||
|
|
||||||
sensor:
|
sensor:
|
||||||
- platform: ags10
|
- platform: ags10
|
||||||
id: ags10_1
|
id: ags10_1
|
||||||
|
@@ -1,5 +1,4 @@
|
|||||||
substitutions:
|
packages:
|
||||||
scl_pin: GPIO5
|
i2c_low_freq: !include ../../test_build_components/common/i2c_low_freq/esp32-c3-idf.yaml
|
||||||
sda_pin: GPIO4
|
|
||||||
|
|
||||||
<<: !include common.yaml
|
<<: !include common.yaml
|
||||||
|
@@ -1,5 +1,4 @@
|
|||||||
substitutions:
|
packages:
|
||||||
scl_pin: GPIO16
|
i2c_low_freq: !include ../../test_build_components/common/i2c_low_freq/esp32-idf.yaml
|
||||||
sda_pin: GPIO17
|
|
||||||
|
|
||||||
<<: !include common.yaml
|
<<: !include common.yaml
|
||||||
|
@@ -1,5 +1,4 @@
|
|||||||
substitutions:
|
packages:
|
||||||
scl_pin: GPIO5
|
i2c_low_freq: !include ../../test_build_components/common/i2c_low_freq/esp8266-ard.yaml
|
||||||
sda_pin: GPIO4
|
|
||||||
|
|
||||||
<<: !include common.yaml
|
<<: !include common.yaml
|
||||||
|
@@ -1,10 +1,6 @@
|
|||||||
i2c:
|
|
||||||
- id: i2c_aht10
|
|
||||||
scl: ${scl_pin}
|
|
||||||
sda: ${sda_pin}
|
|
||||||
|
|
||||||
sensor:
|
sensor:
|
||||||
- platform: aht10
|
- platform: aht10
|
||||||
|
i2c_id: i2c_bus
|
||||||
temperature:
|
temperature:
|
||||||
name: Temperature
|
name: Temperature
|
||||||
humidity:
|
humidity:
|
||||||
|
@@ -1,5 +1,4 @@
|
|||||||
substitutions:
|
packages:
|
||||||
scl_pin: GPIO5
|
i2c: !include ../../test_build_components/common/i2c/esp32-c3-idf.yaml
|
||||||
sda_pin: GPIO4
|
|
||||||
|
|
||||||
<<: !include common.yaml
|
<<: !include common.yaml
|
||||||
|
@@ -1,5 +1,4 @@
|
|||||||
substitutions:
|
packages:
|
||||||
scl_pin: GPIO16
|
i2c: !include ../../test_build_components/common/i2c/esp32-idf.yaml
|
||||||
sda_pin: GPIO17
|
|
||||||
|
|
||||||
<<: !include common.yaml
|
<<: !include common.yaml
|
||||||
|
@@ -1,5 +1,4 @@
|
|||||||
substitutions:
|
packages:
|
||||||
scl_pin: GPIO5
|
i2c: !include ../../test_build_components/common/i2c/esp8266-ard.yaml
|
||||||
sda_pin: GPIO4
|
|
||||||
|
|
||||||
<<: !include common.yaml
|
<<: !include common.yaml
|
||||||
|
@@ -1,5 +1,4 @@
|
|||||||
substitutions:
|
packages:
|
||||||
scl_pin: GPIO5
|
i2c: !include ../../test_build_components/common/i2c/rp2040-ard.yaml
|
||||||
sda_pin: GPIO4
|
|
||||||
|
|
||||||
<<: !include common.yaml
|
<<: !include common.yaml
|
||||||
|
@@ -6,10 +6,6 @@ esphome:
|
|||||||
- audio_dac.set_volume:
|
- audio_dac.set_volume:
|
||||||
volume: 50%
|
volume: 50%
|
||||||
|
|
||||||
i2c:
|
|
||||||
- id: i2c_aic3204
|
|
||||||
scl: ${scl_pin}
|
|
||||||
sda: ${sda_pin}
|
|
||||||
|
|
||||||
audio_dac:
|
audio_dac:
|
||||||
- platform: aic3204
|
- platform: aic3204
|
||||||
|
i2c_id: i2c_bus
|
||||||
|
@@ -1,5 +1,4 @@
|
|||||||
substitutions:
|
packages:
|
||||||
scl_pin: GPIO5
|
i2c: !include ../../test_build_components/common/i2c/esp32-c3-idf.yaml
|
||||||
sda_pin: GPIO4
|
|
||||||
|
|
||||||
<<: !include common.yaml
|
<<: !include common.yaml
|
||||||
|
@@ -1,5 +1,4 @@
|
|||||||
substitutions:
|
packages:
|
||||||
scl_pin: GPIO16
|
i2c: !include ../../test_build_components/common/i2c/esp32-idf.yaml
|
||||||
sda_pin: GPIO17
|
|
||||||
|
|
||||||
<<: !include common.yaml
|
<<: !include common.yaml
|
||||||
|
@@ -1,5 +1,4 @@
|
|||||||
substitutions:
|
packages:
|
||||||
scl_pin: GPIO5
|
i2c: !include ../../test_build_components/common/i2c/esp8266-ard.yaml
|
||||||
sda_pin: GPIO4
|
|
||||||
|
|
||||||
<<: !include common.yaml
|
<<: !include common.yaml
|
||||||
|
@@ -1 +1,4 @@
|
|||||||
|
packages:
|
||||||
|
ble: !include ../../test_build_components/common/ble/esp32-c3-idf.yaml
|
||||||
|
|
||||||
<<: !include common.yaml
|
<<: !include common.yaml
|
||||||
|
@@ -1 +1,4 @@
|
|||||||
|
packages:
|
||||||
|
ble: !include ../../test_build_components/common/ble/esp32-idf.yaml
|
||||||
|
|
||||||
<<: !include common.yaml
|
<<: !include common.yaml
|
||||||
|
@@ -1 +1,4 @@
|
|||||||
|
packages:
|
||||||
|
ble: !include ../../test_build_components/common/ble/esp32-c3-idf.yaml
|
||||||
|
|
||||||
<<: !include common.yaml
|
<<: !include common.yaml
|
||||||
|
@@ -1 +1,4 @@
|
|||||||
|
packages:
|
||||||
|
ble: !include ../../test_build_components/common/ble/esp32-idf.yaml
|
||||||
|
|
||||||
<<: !include common.yaml
|
<<: !include common.yaml
|
||||||
|
@@ -1 +1,4 @@
|
|||||||
|
packages:
|
||||||
|
ble: !include ../../test_build_components/common/ble/esp32-c3-idf.yaml
|
||||||
|
|
||||||
<<: !include common.yaml
|
<<: !include common.yaml
|
||||||
|
@@ -1 +1,4 @@
|
|||||||
|
packages:
|
||||||
|
ble: !include ../../test_build_components/common/ble/esp32-idf.yaml
|
||||||
|
|
||||||
<<: !include common.yaml
|
<<: !include common.yaml
|
||||||
|
@@ -1,10 +1,6 @@
|
|||||||
i2c:
|
|
||||||
- id: i2c_am2315c
|
|
||||||
scl: ${scl_pin}
|
|
||||||
sda: ${sda_pin}
|
|
||||||
|
|
||||||
sensor:
|
sensor:
|
||||||
- platform: am2315c
|
- platform: am2315c
|
||||||
|
i2c_id: i2c_bus
|
||||||
temperature:
|
temperature:
|
||||||
name: Temperature
|
name: Temperature
|
||||||
humidity:
|
humidity:
|
||||||
|
@@ -1,5 +1,4 @@
|
|||||||
substitutions:
|
packages:
|
||||||
scl_pin: GPIO5
|
i2c: !include ../../test_build_components/common/i2c/esp32-c3-idf.yaml
|
||||||
sda_pin: GPIO4
|
|
||||||
|
|
||||||
<<: !include common.yaml
|
<<: !include common.yaml
|
||||||
|
@@ -1,5 +1,4 @@
|
|||||||
substitutions:
|
packages:
|
||||||
scl_pin: GPIO16
|
i2c: !include ../../test_build_components/common/i2c/esp32-idf.yaml
|
||||||
sda_pin: GPIO17
|
|
||||||
|
|
||||||
<<: !include common.yaml
|
<<: !include common.yaml
|
||||||
|
@@ -1,5 +1,4 @@
|
|||||||
substitutions:
|
packages:
|
||||||
scl_pin: GPIO5
|
i2c: !include ../../test_build_components/common/i2c/esp8266-ard.yaml
|
||||||
sda_pin: GPIO4
|
|
||||||
|
|
||||||
<<: !include common.yaml
|
<<: !include common.yaml
|
||||||
|
@@ -1,5 +1,4 @@
|
|||||||
substitutions:
|
packages:
|
||||||
scl_pin: GPIO5
|
i2c: !include ../../test_build_components/common/i2c/rp2040-ard.yaml
|
||||||
sda_pin: GPIO4
|
|
||||||
|
|
||||||
<<: !include common.yaml
|
<<: !include common.yaml
|
||||||
|
@@ -1,10 +1,6 @@
|
|||||||
i2c:
|
|
||||||
- id: i2c_am2320
|
|
||||||
scl: ${scl_pin}
|
|
||||||
sda: ${sda_pin}
|
|
||||||
|
|
||||||
sensor:
|
sensor:
|
||||||
- platform: am2320
|
- platform: am2320
|
||||||
|
i2c_id: i2c_bus
|
||||||
temperature:
|
temperature:
|
||||||
name: Temperature
|
name: Temperature
|
||||||
humidity:
|
humidity:
|
||||||
|
@@ -1,5 +1,4 @@
|
|||||||
substitutions:
|
packages:
|
||||||
scl_pin: GPIO5
|
i2c: !include ../../test_build_components/common/i2c/esp32-c3-idf.yaml
|
||||||
sda_pin: GPIO4
|
|
||||||
|
|
||||||
<<: !include common.yaml
|
<<: !include common.yaml
|
||||||
|
@@ -1,5 +1,4 @@
|
|||||||
substitutions:
|
packages:
|
||||||
scl_pin: GPIO16
|
i2c: !include ../../test_build_components/common/i2c/esp32-idf.yaml
|
||||||
sda_pin: GPIO17
|
|
||||||
|
|
||||||
<<: !include common.yaml
|
<<: !include common.yaml
|
||||||
|
@@ -1,5 +1,4 @@
|
|||||||
substitutions:
|
packages:
|
||||||
scl_pin: GPIO5
|
i2c: !include ../../test_build_components/common/i2c/esp8266-ard.yaml
|
||||||
sda_pin: GPIO4
|
|
||||||
|
|
||||||
<<: !include common.yaml
|
<<: !include common.yaml
|
||||||
|
@@ -1,5 +1,4 @@
|
|||||||
substitutions:
|
packages:
|
||||||
scl_pin: GPIO5
|
i2c: !include ../../test_build_components/common/i2c/rp2040-ard.yaml
|
||||||
sda_pin: GPIO4
|
|
||||||
|
|
||||||
<<: !include common.yaml
|
<<: !include common.yaml
|
||||||
|
@@ -1 +1,4 @@
|
|||||||
|
packages:
|
||||||
|
ble: !include ../../test_build_components/common/ble/esp32-c3-idf.yaml
|
||||||
|
|
||||||
<<: !include common.yaml
|
<<: !include common.yaml
|
||||||
|
@@ -1 +1,4 @@
|
|||||||
|
packages:
|
||||||
|
ble: !include ../../test_build_components/common/ble/esp32-idf.yaml
|
||||||
|
|
||||||
<<: !include common.yaml
|
<<: !include common.yaml
|
||||||
|
@@ -1,17 +1,13 @@
|
|||||||
spi:
|
packages:
|
||||||
- id: spi_main_lcd
|
spi: !include ../../test_build_components/common/spi/esp32-c3-idf.yaml
|
||||||
clk_pin: 6
|
animation: !include common.yaml
|
||||||
mosi_pin: 7
|
|
||||||
miso_pin: 5
|
|
||||||
|
|
||||||
display:
|
display:
|
||||||
- platform: ili9xxx
|
- platform: ili9xxx
|
||||||
id: main_lcd
|
id: main_lcd
|
||||||
|
spi_id: spi_bus
|
||||||
model: ili9342
|
model: ili9342
|
||||||
cs_pin: 8
|
cs_pin: 8
|
||||||
dc_pin: 9
|
dc_pin: 9
|
||||||
reset_pin: 10
|
reset_pin: 10
|
||||||
invert_colors: false
|
invert_colors: false
|
||||||
|
|
||||||
packages:
|
|
||||||
animation: !include common.yaml
|
|
||||||
|
@@ -1,17 +1,13 @@
|
|||||||
spi:
|
packages:
|
||||||
- id: spi_main_lcd
|
spi: !include ../../test_build_components/common/spi/esp32-idf.yaml
|
||||||
clk_pin: 16
|
animation: !include common.yaml
|
||||||
mosi_pin: 17
|
|
||||||
miso_pin: 15
|
|
||||||
|
|
||||||
display:
|
display:
|
||||||
- platform: ili9xxx
|
- platform: ili9xxx
|
||||||
id: main_lcd
|
id: main_lcd
|
||||||
|
spi_id: spi_bus
|
||||||
model: ili9342
|
model: ili9342
|
||||||
cs_pin: 12
|
cs_pin: 12
|
||||||
dc_pin: 13
|
dc_pin: 13
|
||||||
reset_pin: 21
|
reset_pin: 21
|
||||||
invert_colors: false
|
invert_colors: false
|
||||||
|
|
||||||
packages:
|
|
||||||
animation: !include common.yaml
|
|
||||||
|
@@ -1,17 +1,13 @@
|
|||||||
spi:
|
packages:
|
||||||
- id: spi_main_lcd
|
spi: !include ../../test_build_components/common/spi/esp8266-ard.yaml
|
||||||
clk_pin: 14
|
animation: !include common.yaml
|
||||||
mosi_pin: 13
|
|
||||||
miso_pin: 12
|
|
||||||
|
|
||||||
display:
|
display:
|
||||||
- platform: ili9xxx
|
- platform: ili9xxx
|
||||||
id: main_lcd
|
id: main_lcd
|
||||||
|
spi_id: spi_bus
|
||||||
model: ili9342
|
model: ili9342
|
||||||
cs_pin: 5
|
cs_pin: 5
|
||||||
dc_pin: 15
|
dc_pin: 15
|
||||||
reset_pin: 16
|
reset_pin: 16
|
||||||
invert_colors: false
|
invert_colors: false
|
||||||
|
|
||||||
packages:
|
|
||||||
animation: !include common.yaml
|
|
||||||
|
@@ -1,17 +1,13 @@
|
|||||||
spi:
|
packages:
|
||||||
- id: spi_main_lcd
|
spi: !include ../../test_build_components/common/spi/rp2040-ard.yaml
|
||||||
clk_pin: 2
|
animation: !include common.yaml
|
||||||
mosi_pin: 3
|
|
||||||
miso_pin: 4
|
|
||||||
|
|
||||||
display:
|
display:
|
||||||
- platform: ili9xxx
|
- platform: ili9xxx
|
||||||
id: main_lcd
|
id: main_lcd
|
||||||
|
spi_id: spi_bus
|
||||||
model: ili9342
|
model: ili9342
|
||||||
cs_pin: 20
|
cs_pin: 20
|
||||||
dc_pin: 21
|
dc_pin: 21
|
||||||
reset_pin: 22
|
reset_pin: 22
|
||||||
invert_colors: false
|
invert_colors: false
|
||||||
|
|
||||||
packages:
|
|
||||||
animation: !include common.yaml
|
|
||||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user