mirror of
https://github.com/esphome/esphome.git
synced 2025-10-12 06:43:48 +01:00
fix
This commit is contained in:
268
script/analyze_component_buses.py
Normal file
268
script/analyze_component_buses.py
Normal file
@@ -0,0 +1,268 @@
|
||||
#!/usr/bin/env python3
|
||||
"""Analyze component test files to detect which common bus configs they use.
|
||||
|
||||
This script scans component test files and extracts which common bus configurations
|
||||
(i2c, spi, uart, etc.) are included via the packages mechanism. This information
|
||||
is used to group components that can be tested together.
|
||||
|
||||
Components can only be grouped together if they use the EXACT SAME set of common
|
||||
bus configurations, ensuring that merged configs are compatible.
|
||||
|
||||
Example output:
|
||||
{
|
||||
"component1": {
|
||||
"esp32-ard": ["i2c", "uart_19200"],
|
||||
"esp32-idf": ["i2c", "uart_19200"]
|
||||
},
|
||||
"component2": {
|
||||
"esp32-ard": ["spi"],
|
||||
"esp32-idf": ["spi"]
|
||||
}
|
||||
}
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import argparse
|
||||
import json
|
||||
from pathlib import Path
|
||||
import re
|
||||
import sys
|
||||
|
||||
# Path to common bus configs
|
||||
COMMON_BUS_PATH = Path("tests/test_build_components/common")
|
||||
|
||||
# Valid common bus config directories
|
||||
VALID_BUS_CONFIGS = {
|
||||
"ble",
|
||||
"i2c",
|
||||
"i2c_low_freq",
|
||||
"qspi",
|
||||
"spi",
|
||||
"uart",
|
||||
"uart_115200",
|
||||
"uart_1200",
|
||||
"uart_1200_even",
|
||||
"uart_19200",
|
||||
"uart_38400",
|
||||
"uart_4800",
|
||||
"uart_4800_even",
|
||||
"uart_9600_even",
|
||||
}
|
||||
|
||||
|
||||
def extract_common_buses(yaml_file: Path) -> set[str]:
|
||||
"""Extract which common bus configs are included in a YAML test file.
|
||||
|
||||
Args:
|
||||
yaml_file: Path to the component test YAML file
|
||||
|
||||
Returns:
|
||||
Set of common bus config names (e.g., {'i2c', 'uart_19200'})
|
||||
"""
|
||||
if not yaml_file.exists():
|
||||
return set()
|
||||
|
||||
try:
|
||||
content = yaml_file.read_text()
|
||||
except Exception:
|
||||
return set()
|
||||
|
||||
buses = set()
|
||||
|
||||
# Pattern to match package includes for common bus configs
|
||||
# Matches: !include ../../test_build_components/common/{bus}/{platform}.yaml
|
||||
pattern = r"!include\s+\.\./\.\./test_build_components/common/([^/]+)/"
|
||||
|
||||
for match in re.finditer(pattern, content):
|
||||
bus_name = match.group(1)
|
||||
if bus_name in VALID_BUS_CONFIGS:
|
||||
buses.add(bus_name)
|
||||
|
||||
return buses
|
||||
|
||||
|
||||
def analyze_component(component_dir: Path) -> dict[str, list[str]]:
|
||||
"""Analyze a component directory to find which buses each platform uses.
|
||||
|
||||
Args:
|
||||
component_dir: Path to the component's test directory
|
||||
|
||||
Returns:
|
||||
Dictionary mapping platform to list of bus configs
|
||||
Example: {"esp32-ard": ["i2c", "spi"], "esp32-idf": ["i2c"]}
|
||||
"""
|
||||
if not component_dir.is_dir():
|
||||
return {}
|
||||
|
||||
platform_buses = {}
|
||||
|
||||
# Find all test.*.yaml files
|
||||
for test_file in component_dir.glob("test.*.yaml"):
|
||||
# Extract platform name from filename (e.g., test.esp32-ard.yaml -> esp32-ard)
|
||||
platform = test_file.stem.replace("test.", "")
|
||||
|
||||
buses = extract_common_buses(test_file)
|
||||
if buses:
|
||||
# Sort for consistent comparison
|
||||
platform_buses[platform] = sorted(buses)
|
||||
|
||||
return platform_buses
|
||||
|
||||
|
||||
def analyze_all_components(tests_dir: Path = None) -> dict[str, dict[str, list[str]]]:
|
||||
"""Analyze all component test directories.
|
||||
|
||||
Args:
|
||||
tests_dir: Path to tests/components directory (defaults to auto-detect)
|
||||
|
||||
Returns:
|
||||
Dictionary mapping component name to platform->buses mapping
|
||||
"""
|
||||
if tests_dir is None:
|
||||
tests_dir = Path("tests/components")
|
||||
|
||||
if not tests_dir.exists():
|
||||
print(f"Error: {tests_dir} does not exist", file=sys.stderr)
|
||||
return {}
|
||||
|
||||
components = {}
|
||||
|
||||
for component_dir in sorted(tests_dir.iterdir()):
|
||||
if not component_dir.is_dir():
|
||||
continue
|
||||
|
||||
component_name = component_dir.name
|
||||
platform_buses = analyze_component(component_dir)
|
||||
|
||||
if platform_buses:
|
||||
components[component_name] = platform_buses
|
||||
|
||||
return components
|
||||
|
||||
|
||||
def create_grouping_signature(
|
||||
platform_buses: dict[str, list[str]], platform: str
|
||||
) -> str:
|
||||
"""Create a signature string for grouping components.
|
||||
|
||||
Components with the same signature can be grouped together for testing.
|
||||
|
||||
Args:
|
||||
platform_buses: Mapping of platform to list of buses
|
||||
platform: The specific platform to create signature for
|
||||
|
||||
Returns:
|
||||
Signature string (e.g., "i2c+uart_19200" or "spi")
|
||||
"""
|
||||
buses = platform_buses.get(platform, [])
|
||||
if not buses:
|
||||
return ""
|
||||
return "+".join(sorted(buses))
|
||||
|
||||
|
||||
def group_components_by_signature(
|
||||
components: dict[str, dict[str, list[str]]], platform: str
|
||||
) -> dict[str, list[str]]:
|
||||
"""Group components by their bus signature for a specific platform.
|
||||
|
||||
Args:
|
||||
components: Component analysis results from analyze_all_components()
|
||||
platform: Platform to group for (e.g., "esp32-ard")
|
||||
|
||||
Returns:
|
||||
Dictionary mapping signature to list of component names
|
||||
Example: {"i2c+uart_19200": ["comp1", "comp2"], "spi": ["comp3"]}
|
||||
"""
|
||||
signature_groups: dict[str, list[str]] = {}
|
||||
|
||||
for component_name, platform_buses in components.items():
|
||||
if platform not in platform_buses:
|
||||
continue
|
||||
|
||||
signature = create_grouping_signature(platform_buses, platform)
|
||||
if not signature:
|
||||
continue
|
||||
|
||||
if signature not in signature_groups:
|
||||
signature_groups[signature] = []
|
||||
signature_groups[signature].append(component_name)
|
||||
|
||||
return signature_groups
|
||||
|
||||
|
||||
def main() -> None:
|
||||
"""Main entry point."""
|
||||
parser = argparse.ArgumentParser(
|
||||
description="Analyze component test files to detect common bus usage"
|
||||
)
|
||||
parser.add_argument(
|
||||
"--components",
|
||||
"-c",
|
||||
nargs="+",
|
||||
help="Specific components to analyze (default: all)",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--platform",
|
||||
"-p",
|
||||
help="Show grouping for a specific platform",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--json",
|
||||
action="store_true",
|
||||
help="Output as JSON",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--group",
|
||||
action="store_true",
|
||||
help="Show component groupings by bus signature",
|
||||
)
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
# Analyze components
|
||||
tests_dir = Path("tests/components")
|
||||
|
||||
if args.components:
|
||||
# Analyze only specified components
|
||||
components = {}
|
||||
for comp in args.components:
|
||||
comp_dir = tests_dir / comp
|
||||
platform_buses = analyze_component(comp_dir)
|
||||
if platform_buses:
|
||||
components[comp] = platform_buses
|
||||
else:
|
||||
# Analyze all components
|
||||
components = analyze_all_components(tests_dir)
|
||||
|
||||
# Output results
|
||||
if args.group and args.platform:
|
||||
# Show groupings for a specific platform
|
||||
groups = group_components_by_signature(components, args.platform)
|
||||
|
||||
if args.json:
|
||||
print(json.dumps(groups, indent=2))
|
||||
else:
|
||||
print(f"Component groupings for {args.platform}:")
|
||||
print()
|
||||
for signature, comp_list in sorted(groups.items()):
|
||||
print(f" {signature}:")
|
||||
for comp in sorted(comp_list):
|
||||
print(f" - {comp}")
|
||||
print()
|
||||
elif args.json:
|
||||
# JSON output
|
||||
print(json.dumps(components, indent=2))
|
||||
else:
|
||||
# Human-readable output
|
||||
for component, platform_buses in sorted(components.items()):
|
||||
print(f"{component}:")
|
||||
for platform, buses in sorted(platform_buses.items()):
|
||||
bus_str = ", ".join(buses)
|
||||
print(f" {platform}: {bus_str}")
|
||||
print()
|
||||
print(f"Total components analyzed: {len(components)}")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
267
script/merge_component_configs.py
Executable file
267
script/merge_component_configs.py
Executable file
@@ -0,0 +1,267 @@
|
||||
#!/usr/bin/env python3
|
||||
"""Merge multiple component test configurations into a single test file.
|
||||
|
||||
This script combines multiple component test files that use the same common bus
|
||||
configurations into a single merged test file. This allows testing multiple
|
||||
compatible components together, reducing CI build time.
|
||||
|
||||
The merger handles:
|
||||
- Component-specific substitutions (prefixing to avoid conflicts)
|
||||
- Multiple instances of component configurations
|
||||
- Shared common bus packages (included only once)
|
||||
- Platform-specific configurations
|
||||
- Uses ESPHome's built-in merge_config for proper YAML merging
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import argparse
|
||||
from pathlib import Path
|
||||
import re
|
||||
import sys
|
||||
from typing import Any
|
||||
|
||||
# Add esphome to path so we can import from it
|
||||
sys.path.insert(0, str(Path(__file__).parent.parent))
|
||||
|
||||
from esphome import yaml_util
|
||||
from esphome.config_helpers import merge_config
|
||||
|
||||
|
||||
def load_yaml_file(yaml_file: Path) -> dict:
|
||||
"""Load YAML file using ESPHome's YAML loader.
|
||||
|
||||
Args:
|
||||
yaml_file: Path to the YAML file
|
||||
|
||||
Returns:
|
||||
Parsed YAML as dictionary
|
||||
"""
|
||||
if not yaml_file.exists():
|
||||
raise FileNotFoundError(f"YAML file not found: {yaml_file}")
|
||||
|
||||
return yaml_util.load_yaml(yaml_file)
|
||||
|
||||
|
||||
def extract_packages_from_yaml(data: dict) -> dict[str, str]:
|
||||
"""Extract package includes from parsed YAML.
|
||||
|
||||
Args:
|
||||
data: Parsed YAML dictionary
|
||||
|
||||
Returns:
|
||||
Dictionary mapping package name to include path (as string representation)
|
||||
"""
|
||||
if "packages" not in data:
|
||||
return {}
|
||||
|
||||
packages = {}
|
||||
for name, value in data["packages"].items():
|
||||
# Store package info for comparison
|
||||
packages[name] = str(value)
|
||||
|
||||
return packages
|
||||
|
||||
|
||||
def prefix_substitutions_in_dict(
|
||||
data: Any, prefix: str, exclude: set[str] | None = None
|
||||
) -> Any:
|
||||
"""Recursively prefix all substitution references in a data structure.
|
||||
|
||||
Args:
|
||||
data: YAML data structure (dict, list, or scalar)
|
||||
prefix: Prefix to add to substitution names
|
||||
exclude: Set of substitution names to exclude from prefixing
|
||||
|
||||
Returns:
|
||||
Data structure with prefixed substitution references
|
||||
"""
|
||||
if exclude is None:
|
||||
exclude = set()
|
||||
|
||||
def replace_sub(text: str) -> str:
|
||||
"""Replace substitution references in a string."""
|
||||
|
||||
def replace_match(match):
|
||||
sub_name = match.group(1)
|
||||
if sub_name in exclude:
|
||||
return match.group(0)
|
||||
return f"${{{prefix}_{sub_name}}}"
|
||||
|
||||
return re.sub(r"\$\{(\w+)\}", replace_match, text)
|
||||
|
||||
if isinstance(data, dict):
|
||||
result = {}
|
||||
for key, value in data.items():
|
||||
result[key] = prefix_substitutions_in_dict(value, prefix, exclude)
|
||||
return result
|
||||
if isinstance(data, list):
|
||||
return [prefix_substitutions_in_dict(item, prefix, exclude) for item in data]
|
||||
if isinstance(data, str):
|
||||
return replace_sub(data)
|
||||
return data
|
||||
|
||||
|
||||
def prefix_ids_in_dict(data: Any, prefix: str) -> Any:
|
||||
"""Recursively prefix all 'id' fields in a data structure.
|
||||
|
||||
Args:
|
||||
data: YAML data structure (dict, list, or scalar)
|
||||
prefix: Prefix to add to IDs
|
||||
|
||||
Returns:
|
||||
Data structure with prefixed IDs
|
||||
"""
|
||||
if isinstance(data, dict):
|
||||
result = {}
|
||||
for key, value in data.items():
|
||||
if key == "id" and isinstance(value, str):
|
||||
# Prefix the ID value
|
||||
result[key] = f"{prefix}_{value}"
|
||||
else:
|
||||
# Recursively process the value
|
||||
result[key] = prefix_ids_in_dict(value, prefix)
|
||||
return result
|
||||
if isinstance(data, list):
|
||||
return [prefix_ids_in_dict(item, prefix) for item in data]
|
||||
return data
|
||||
|
||||
|
||||
def merge_component_configs(
|
||||
component_names: list[str],
|
||||
platform: str,
|
||||
tests_dir: Path,
|
||||
output_file: Path,
|
||||
) -> None:
|
||||
"""Merge multiple component test configs into a single file.
|
||||
|
||||
Args:
|
||||
component_names: List of component names to merge
|
||||
platform: Platform to merge for (e.g., "esp32-ard")
|
||||
tests_dir: Path to tests/components directory
|
||||
output_file: Path to output merged config file
|
||||
"""
|
||||
if not component_names:
|
||||
raise ValueError("No components specified")
|
||||
|
||||
# Track packages to ensure they're identical
|
||||
all_packages = None
|
||||
|
||||
# Start with empty config
|
||||
merged_config_data = {}
|
||||
|
||||
# Process each component
|
||||
for comp_name in component_names:
|
||||
comp_dir = tests_dir / comp_name
|
||||
test_file = comp_dir / f"test.{platform}.yaml"
|
||||
|
||||
if not test_file.exists():
|
||||
raise FileNotFoundError(f"Test file not found: {test_file}")
|
||||
|
||||
# Load the component's test file
|
||||
comp_data = load_yaml_file(test_file)
|
||||
|
||||
# Validate packages are identical
|
||||
comp_packages = extract_packages_from_yaml(comp_data)
|
||||
if all_packages is None:
|
||||
all_packages = comp_packages
|
||||
elif comp_packages != all_packages:
|
||||
raise ValueError(
|
||||
f"Component {comp_name} has different packages than previous components. "
|
||||
f"Expected: {all_packages}, Got: {comp_packages}. "
|
||||
f"All components must use the same common bus configs to be merged."
|
||||
)
|
||||
|
||||
# Remove packages from component data (we'll add them back once)
|
||||
if "packages" in comp_data:
|
||||
del comp_data["packages"]
|
||||
|
||||
# Prefix substitutions in component data
|
||||
if "substitutions" in comp_data:
|
||||
prefixed_subs = {}
|
||||
for sub_name, sub_value in comp_data["substitutions"].items():
|
||||
prefixed_subs[f"{comp_name}_{sub_name}"] = sub_value
|
||||
comp_data["substitutions"] = prefixed_subs
|
||||
|
||||
# Prefix substitution references throughout the config
|
||||
comp_data = prefix_substitutions_in_dict(comp_data, comp_name)
|
||||
|
||||
# Note: We don't prefix IDs because that requires updating all ID references
|
||||
# throughout the config, which is complex. Instead, we rely on components
|
||||
# already having unique IDs (which they should if properly designed).
|
||||
# ESPHome's merge_config will handle ID conflicts by replacing duplicates.
|
||||
|
||||
# Use ESPHome's merge_config to merge this component into the result
|
||||
merged_config_data = merge_config(merged_config_data, comp_data)
|
||||
|
||||
# Add packages back (only once, since they're identical)
|
||||
if all_packages and "packages" in list(
|
||||
load_yaml_file(tests_dir / component_names[0] / f"test.{platform}.yaml")
|
||||
):
|
||||
# Re-read first component to get original packages with !include
|
||||
first_comp_data = load_yaml_file(
|
||||
tests_dir / component_names[0] / f"test.{platform}.yaml"
|
||||
)
|
||||
if "packages" in first_comp_data:
|
||||
merged_config_data["packages"] = first_comp_data["packages"]
|
||||
|
||||
# Write merged config
|
||||
output_file.parent.mkdir(parents=True, exist_ok=True)
|
||||
yaml_content = yaml_util.dump(merged_config_data)
|
||||
output_file.write_text(yaml_content)
|
||||
|
||||
print(f"Successfully merged {len(component_names)} components into {output_file}")
|
||||
|
||||
|
||||
def main() -> None:
|
||||
"""Main entry point."""
|
||||
parser = argparse.ArgumentParser(
|
||||
description="Merge multiple component test configs into a single file"
|
||||
)
|
||||
parser.add_argument(
|
||||
"--components",
|
||||
"-c",
|
||||
required=True,
|
||||
help="Comma-separated list of component names to merge",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--platform",
|
||||
"-p",
|
||||
required=True,
|
||||
help="Platform to merge for (e.g., esp32-ard)",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--output",
|
||||
"-o",
|
||||
required=True,
|
||||
type=Path,
|
||||
help="Output file path for merged config",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--tests-dir",
|
||||
type=Path,
|
||||
default=Path("tests/components"),
|
||||
help="Path to tests/components directory",
|
||||
)
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
component_names = [c.strip() for c in args.components.split(",")]
|
||||
|
||||
try:
|
||||
merge_component_configs(
|
||||
component_names=component_names,
|
||||
platform=args.platform,
|
||||
tests_dir=args.tests_dir,
|
||||
output_file=args.output,
|
||||
)
|
||||
except Exception as e:
|
||||
print(f"Error merging configs: {e}", file=sys.stderr)
|
||||
import traceback
|
||||
|
||||
traceback.print_exc()
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
Reference in New Issue
Block a user