1
0
mirror of https://github.com/esphome/esphome.git synced 2025-10-31 15:12:06 +00:00

Merge branch 'ci_impact_analysis' into ci_impact_analysis_sensor_base

This commit is contained in:
J. Nick Koston
2025-10-17 21:13:18 -10:00
122 changed files with 1307 additions and 719 deletions

View File

@@ -534,6 +534,7 @@ jobs:
ram_usage: ${{ steps.extract.outputs.ram_usage }} ram_usage: ${{ steps.extract.outputs.ram_usage }}
flash_usage: ${{ steps.extract.outputs.flash_usage }} flash_usage: ${{ steps.extract.outputs.flash_usage }}
cache_hit: ${{ steps.cache-memory-analysis.outputs.cache-hit }} cache_hit: ${{ steps.cache-memory-analysis.outputs.cache-hit }}
skip: ${{ steps.check-script.outputs.skip }}
steps: steps:
- name: Check out target branch - name: Check out target branch
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
@@ -735,7 +736,7 @@ jobs:
- determine-jobs - determine-jobs
- memory-impact-target-branch - memory-impact-target-branch
- memory-impact-pr-branch - memory-impact-pr-branch
if: github.event_name == 'pull_request' && fromJSON(needs.determine-jobs.outputs.memory_impact).should_run == 'true' if: github.event_name == 'pull_request' && fromJSON(needs.determine-jobs.outputs.memory_impact).should_run == 'true' && needs.memory-impact-target-branch.outputs.skip != 'true'
permissions: permissions:
contents: read contents: read
pull-requests: write pull-requests: write

View File

@@ -33,15 +33,41 @@ _GCC_PREFIX_ANNOTATIONS = {
"_GLOBAL__sub_D_": "global destructor for", "_GLOBAL__sub_D_": "global destructor for",
} }
# GCC optimization suffix pattern (e.g., $isra$0, $part$1, $constprop$2)
_GCC_OPTIMIZATION_SUFFIX_PATTERN = re.compile(r"(\$(?:isra|part|constprop)\$\d+)")
# C++ runtime patterns for categorization
_CPP_RUNTIME_PATTERNS = frozenset(["vtable", "typeinfo", "thunk"])
# libc printf/scanf family base names (used to detect variants like _printf_r, vfprintf, etc.)
_LIBC_PRINTF_SCANF_FAMILY = frozenset(["printf", "fprintf", "sprintf", "scanf"])
# Regex pattern for parsing readelf section headers
# Format: [ #] name type addr off size
_READELF_SECTION_PATTERN = re.compile(
r"\s*\[\s*\d+\]\s+([\.\w]+)\s+\w+\s+[\da-fA-F]+\s+[\da-fA-F]+\s+([\da-fA-F]+)"
)
# Component category prefixes
_COMPONENT_PREFIX_ESPHOME = "[esphome]"
_COMPONENT_PREFIX_EXTERNAL = "[external]"
_COMPONENT_CORE = f"{_COMPONENT_PREFIX_ESPHOME}core"
_COMPONENT_API = f"{_COMPONENT_PREFIX_ESPHOME}api"
# C++ namespace prefixes
_NAMESPACE_ESPHOME = "esphome::"
_NAMESPACE_STD = "std::"
# Type alias for symbol information: (symbol_name, size, component)
SymbolInfoType = tuple[str, int, str]
@dataclass @dataclass
class MemorySection: class MemorySection:
"""Represents a memory section with its symbols.""" """Represents a memory section with its symbols."""
name: str name: str
symbols: list[tuple[str, int, str]] = field( symbols: list[SymbolInfoType] = field(default_factory=list)
default_factory=list
) # (symbol_name, size, component)
total_size: int = 0 total_size: int = 0
@@ -77,7 +103,7 @@ class MemoryAnalyzer:
readelf_path: str | None = None, readelf_path: str | None = None,
external_components: set[str] | None = None, external_components: set[str] | None = None,
idedata: "IDEData | None" = None, idedata: "IDEData | None" = None,
): ) -> None:
"""Initialize memory analyzer. """Initialize memory analyzer.
Args: Args:
@@ -133,12 +159,7 @@ class MemoryAnalyzer:
# Parse section headers # Parse section headers
for line in result.stdout.splitlines(): for line in result.stdout.splitlines():
# Look for section entries # Look for section entries
if not ( if not (match := _READELF_SECTION_PATTERN.match(line)):
match := re.match(
r"\s*\[\s*\d+\]\s+([\.\w]+)\s+\w+\s+[\da-fA-F]+\s+[\da-fA-F]+\s+([\da-fA-F]+)",
line,
)
):
continue continue
section_name = match.group(1) section_name = match.group(1)
@@ -212,7 +233,7 @@ class MemoryAnalyzer:
self._uncategorized_symbols.append((symbol_name, demangled, size)) self._uncategorized_symbols.append((symbol_name, demangled, size))
# Track ESPHome core symbols for detailed analysis # Track ESPHome core symbols for detailed analysis
if component == "[esphome]core" and size > 0: if component == _COMPONENT_CORE and size > 0:
demangled = self._demangle_symbol(symbol_name) demangled = self._demangle_symbol(symbol_name)
self._esphome_core_symbols.append((symbol_name, demangled, size)) self._esphome_core_symbols.append((symbol_name, demangled, size))
@@ -230,13 +251,13 @@ class MemoryAnalyzer:
# Check for special component classes first (before namespace pattern) # Check for special component classes first (before namespace pattern)
# This handles cases like esphome::ESPHomeOTAComponent which should map to ota # This handles cases like esphome::ESPHomeOTAComponent which should map to ota
if "esphome::" in demangled: if _NAMESPACE_ESPHOME in demangled:
# Check for special component classes that include component name in the class # Check for special component classes that include component name in the class
# For example: esphome::ESPHomeOTAComponent -> ota component # For example: esphome::ESPHomeOTAComponent -> ota component
for component_name in get_esphome_components(): for component_name in get_esphome_components():
patterns = get_component_class_patterns(component_name) patterns = get_component_class_patterns(component_name)
if any(pattern in demangled for pattern in patterns): if any(pattern in demangled for pattern in patterns):
return f"[esphome]{component_name}" return f"{_COMPONENT_PREFIX_ESPHOME}{component_name}"
# Check for ESPHome component namespaces # Check for ESPHome component namespaces
match = ESPHOME_COMPONENT_PATTERN.search(demangled) match = ESPHOME_COMPONENT_PATTERN.search(demangled)
@@ -247,17 +268,17 @@ class MemoryAnalyzer:
# Check if this is an actual component in the components directory # Check if this is an actual component in the components directory
if component_name in get_esphome_components(): if component_name in get_esphome_components():
return f"[esphome]{component_name}" return f"{_COMPONENT_PREFIX_ESPHOME}{component_name}"
# Check if this is a known external component from the config # Check if this is a known external component from the config
if component_name in self.external_components: if component_name in self.external_components:
return f"[external]{component_name}" return f"{_COMPONENT_PREFIX_EXTERNAL}{component_name}"
# Everything else in esphome:: namespace is core # Everything else in esphome:: namespace is core
return "[esphome]core" return _COMPONENT_CORE
# Check for esphome core namespace (no component namespace) # Check for esphome core namespace (no component namespace)
if "esphome::" in demangled: if _NAMESPACE_ESPHOME in demangled:
# If no component match found, it's core # If no component match found, it's core
return "[esphome]core" return _COMPONENT_CORE
# Check against symbol patterns # Check against symbol patterns
for component, patterns in SYMBOL_PATTERNS.items(): for component, patterns in SYMBOL_PATTERNS.items():
@@ -273,14 +294,14 @@ class MemoryAnalyzer:
# Check if spi_flash vs spi_driver # Check if spi_flash vs spi_driver
if "spi_" in symbol_name or "SPI" in symbol_name: if "spi_" in symbol_name or "SPI" in symbol_name:
if "spi_flash" in symbol_name: return "spi_flash" if "spi_flash" in symbol_name else "spi_driver"
return "spi_flash"
return "spi_driver"
# libc special printf variants # libc special printf variants
if symbol_name.startswith("_") and symbol_name[1:].replace("_r", "").replace( if (
"v", "" symbol_name.startswith("_")
).replace("s", "") in ["printf", "fprintf", "sprintf", "scanf"]: and symbol_name[1:].replace("_r", "").replace("v", "").replace("s", "")
in _LIBC_PRINTF_SCANF_FAMILY
):
return "libc" return "libc"
# Track uncategorized symbols for analysis # Track uncategorized symbols for analysis
@@ -294,45 +315,42 @@ class MemoryAnalyzer:
# Try to find the appropriate c++filt for the platform # Try to find the appropriate c++filt for the platform
cppfilt_cmd = "c++filt" cppfilt_cmd = "c++filt"
_LOGGER.warning("Demangling %d symbols", len(symbols)) _LOGGER.info("Demangling %d symbols", len(symbols))
_LOGGER.warning("objdump_path = %s", self.objdump_path) _LOGGER.debug("objdump_path = %s", self.objdump_path)
# Check if we have a toolchain-specific c++filt # Check if we have a toolchain-specific c++filt
if self.objdump_path and self.objdump_path != "objdump": if self.objdump_path and self.objdump_path != "objdump":
# Replace objdump with c++filt in the path # Replace objdump with c++filt in the path
potential_cppfilt = self.objdump_path.replace("objdump", "c++filt") potential_cppfilt = self.objdump_path.replace("objdump", "c++filt")
_LOGGER.warning("Checking for toolchain c++filt at: %s", potential_cppfilt) _LOGGER.info("Checking for toolchain c++filt at: %s", potential_cppfilt)
if Path(potential_cppfilt).exists(): if Path(potential_cppfilt).exists():
cppfilt_cmd = potential_cppfilt cppfilt_cmd = potential_cppfilt
_LOGGER.warning("✓ Using toolchain c++filt: %s", cppfilt_cmd) _LOGGER.info("✓ Using toolchain c++filt: %s", cppfilt_cmd)
else: else:
_LOGGER.warning( _LOGGER.info(
"✗ Toolchain c++filt not found at %s, using system c++filt", "✗ Toolchain c++filt not found at %s, using system c++filt",
potential_cppfilt, potential_cppfilt,
) )
else: else:
_LOGGER.warning( _LOGGER.info("✗ Using system c++filt (objdump_path=%s)", self.objdump_path)
"✗ Using system c++filt (objdump_path=%s)", self.objdump_path
)
# Strip GCC optimization suffixes and prefixes before demangling # Strip GCC optimization suffixes and prefixes before demangling
# Suffixes like $isra$0, $part$0, $constprop$0 confuse c++filt # Suffixes like $isra$0, $part$0, $constprop$0 confuse c++filt
# Prefixes like _GLOBAL__sub_I_ need to be removed and tracked # Prefixes like _GLOBAL__sub_I_ need to be removed and tracked
symbols_stripped = [] symbols_stripped: list[str] = []
symbols_prefixes = [] # Track removed prefixes symbols_prefixes: list[str] = [] # Track removed prefixes
for symbol in symbols: for symbol in symbols:
# Remove GCC optimization markers # Remove GCC optimization markers
stripped = re.sub(r"\$(?:isra|part|constprop)\$\d+", "", symbol) stripped = _GCC_OPTIMIZATION_SUFFIX_PATTERN.sub("", symbol)
# Handle GCC global constructor/initializer prefixes # Handle GCC global constructor/initializer prefixes
# _GLOBAL__sub_I_<mangled> -> extract <mangled> for demangling # _GLOBAL__sub_I_<mangled> -> extract <mangled> for demangling
prefix = "" prefix = ""
if stripped.startswith("_GLOBAL__sub_I_"): for gcc_prefix in _GCC_PREFIX_ANNOTATIONS:
prefix = "_GLOBAL__sub_I_" if stripped.startswith(gcc_prefix):
stripped = stripped[len(prefix) :] prefix = gcc_prefix
elif stripped.startswith("_GLOBAL__sub_D_"): stripped = stripped[len(prefix) :]
prefix = "_GLOBAL__sub_D_" break
stripped = stripped[len(prefix) :]
symbols_stripped.append(stripped) symbols_stripped.append(stripped)
symbols_prefixes.append(prefix) symbols_prefixes.append(prefix)
@@ -405,17 +423,18 @@ class MemoryAnalyzer:
if stripped == demangled and stripped.startswith("_Z"): if stripped == demangled and stripped.startswith("_Z"):
failed_count += 1 failed_count += 1
if failed_count <= 5: # Only log first 5 failures if failed_count <= 5: # Only log first 5 failures
_LOGGER.warning("Failed to demangle: %s", original[:100]) _LOGGER.warning("Failed to demangle: %s", original)
if failed_count > 0: if failed_count == 0:
_LOGGER.warning( _LOGGER.info("Successfully demangled all %d symbols", len(symbols))
"Failed to demangle %d/%d symbols using %s", return
failed_count,
len(symbols), _LOGGER.warning(
cppfilt_cmd, "Failed to demangle %d/%d symbols using %s",
) failed_count,
else: len(symbols),
_LOGGER.warning("Successfully demangled all %d symbols", len(symbols)) cppfilt_cmd,
)
@staticmethod @staticmethod
def _restore_symbol_prefix(prefix: str, stripped: str, demangled: str) -> str: def _restore_symbol_prefix(prefix: str, stripped: str, demangled: str) -> str:
@@ -452,8 +471,7 @@ class MemoryAnalyzer:
Returns: Returns:
Demangled name with suffix annotation Demangled name with suffix annotation
""" """
suffix_match = re.search(r"(\$(?:isra|part|constprop)\$\d+)", original) if suffix_match := _GCC_OPTIMIZATION_SUFFIX_PATTERN.search(original):
if suffix_match:
return f"{demangled} [{suffix_match.group(1)}]" return f"{demangled} [{suffix_match.group(1)}]"
return demangled return demangled
@@ -464,10 +482,10 @@ class MemoryAnalyzer:
def _categorize_esphome_core_symbol(self, demangled: str) -> str: def _categorize_esphome_core_symbol(self, demangled: str) -> str:
"""Categorize ESPHome core symbols into subcategories.""" """Categorize ESPHome core symbols into subcategories."""
# Special patterns that need to be checked separately # Special patterns that need to be checked separately
if any(pattern in demangled for pattern in ["vtable", "typeinfo", "thunk"]): if any(pattern in demangled for pattern in _CPP_RUNTIME_PATTERNS):
return "C++ Runtime (vtables/RTTI)" return "C++ Runtime (vtables/RTTI)"
if demangled.startswith("std::"): if demangled.startswith(_NAMESPACE_STD):
return "C++ STL" return "C++ STL"
# Check against patterns from const.py # Check against patterns from const.py

View File

@@ -3,7 +3,13 @@
from collections import defaultdict from collections import defaultdict
import sys import sys
from . import MemoryAnalyzer from . import (
_COMPONENT_API,
_COMPONENT_CORE,
_COMPONENT_PREFIX_ESPHOME,
_COMPONENT_PREFIX_EXTERNAL,
MemoryAnalyzer,
)
class MemoryAnalyzerCLI(MemoryAnalyzer): class MemoryAnalyzerCLI(MemoryAnalyzer):
@@ -83,7 +89,7 @@ class MemoryAnalyzerCLI(MemoryAnalyzer):
total_ram = sum(c.ram_total for _, c in components) total_ram = sum(c.ram_total for _, c in components)
# Build report # Build report
lines = [] lines: list[str] = []
lines.append("=" * self.TABLE_WIDTH) lines.append("=" * self.TABLE_WIDTH)
lines.append("Component Memory Analysis".center(self.TABLE_WIDTH)) lines.append("Component Memory Analysis".center(self.TABLE_WIDTH))
@@ -144,7 +150,9 @@ class MemoryAnalyzerCLI(MemoryAnalyzer):
if self._esphome_core_symbols: if self._esphome_core_symbols:
lines.append("") lines.append("")
lines.append("=" * self.TABLE_WIDTH) lines.append("=" * self.TABLE_WIDTH)
lines.append("[esphome]core Detailed Analysis".center(self.TABLE_WIDTH)) lines.append(
f"{_COMPONENT_CORE} Detailed Analysis".center(self.TABLE_WIDTH)
)
lines.append("=" * self.TABLE_WIDTH) lines.append("=" * self.TABLE_WIDTH)
lines.append("") lines.append("")
@@ -183,9 +191,9 @@ class MemoryAnalyzerCLI(MemoryAnalyzer):
f"{len(symbols):>{self.COL_CORE_COUNT}} | {percentage:>{self.COL_CORE_PERCENT - 1}.1f}%" f"{len(symbols):>{self.COL_CORE_COUNT}} | {percentage:>{self.COL_CORE_PERCENT - 1}.1f}%"
) )
# Top 10 largest core symbols # Top 15 largest core symbols
lines.append("") lines.append("")
lines.append("Top 10 Largest [esphome]core Symbols:") lines.append(f"Top 15 Largest {_COMPONENT_CORE} Symbols:")
sorted_core_symbols = sorted( sorted_core_symbols = sorted(
self._esphome_core_symbols, key=lambda x: x[2], reverse=True self._esphome_core_symbols, key=lambda x: x[2], reverse=True
) )
@@ -199,10 +207,12 @@ class MemoryAnalyzerCLI(MemoryAnalyzer):
esphome_components = [ esphome_components = [
(name, mem) (name, mem)
for name, mem in components for name, mem in components
if name.startswith("[esphome]") and name != "[esphome]core" if name.startswith(_COMPONENT_PREFIX_ESPHOME) and name != _COMPONENT_CORE
] ]
external_components = [ external_components = [
(name, mem) for name, mem in components if name.startswith("[external]") (name, mem)
for name, mem in components
if name.startswith(_COMPONENT_PREFIX_EXTERNAL)
] ]
top_esphome_components = sorted( top_esphome_components = sorted(
@@ -217,7 +227,7 @@ class MemoryAnalyzerCLI(MemoryAnalyzer):
# Check if API component exists and ensure it's included # Check if API component exists and ensure it's included
api_component = None api_component = None
for name, mem in components: for name, mem in components:
if name == "[esphome]api": if name == _COMPONENT_API:
api_component = (name, mem) api_component = (name, mem)
break break
@@ -371,15 +381,16 @@ def main():
idedata = None idedata = None
for idedata_path in idedata_candidates: for idedata_path in idedata_candidates:
if idedata_path.exists(): if not idedata_path.exists():
try: continue
with open(idedata_path, encoding="utf-8") as f: try:
raw_data = json.load(f) with open(idedata_path, encoding="utf-8") as f:
idedata = IDEData(raw_data) raw_data = json.load(f)
print(f"Loaded idedata from: {idedata_path}", file=sys.stderr) idedata = IDEData(raw_data)
break print(f"Loaded idedata from: {idedata_path}", file=sys.stderr)
except (json.JSONDecodeError, OSError) as e: break
print(f"Warning: Failed to load idedata: {e}", file=sys.stderr) except (json.JSONDecodeError, OSError) as e:
print(f"Warning: Failed to load idedata: {e}", file=sys.stderr)
if not idedata: if not idedata:
print( print(

View File

@@ -5,6 +5,11 @@ from pathlib import Path
from .const import SECTION_MAPPING from .const import SECTION_MAPPING
# Import namespace constant from parent module
# Note: This would create a circular import if done at module level,
# so we'll define it locally here as well
_NAMESPACE_ESPHOME = "esphome::"
# Get the list of actual ESPHome components by scanning the components directory # Get the list of actual ESPHome components by scanning the components directory
@cache @cache
@@ -40,10 +45,10 @@ def get_component_class_patterns(component_name: str) -> list[str]:
component_upper = component_name.upper() component_upper = component_name.upper()
component_camel = component_name.replace("_", "").title() component_camel = component_name.replace("_", "").title()
return [ return [
f"esphome::{component_upper}Component", # e.g., esphome::OTAComponent f"{_NAMESPACE_ESPHOME}{component_upper}Component", # e.g., esphome::OTAComponent
f"esphome::ESPHome{component_upper}Component", # e.g., esphome::ESPHomeOTAComponent f"{_NAMESPACE_ESPHOME}ESPHome{component_upper}Component", # e.g., esphome::ESPHomeOTAComponent
f"esphome::{component_camel}Component", # e.g., esphome::OtaComponent f"{_NAMESPACE_ESPHOME}{component_camel}Component", # e.g., esphome::OtaComponent
f"esphome::ESPHome{component_camel}Component", # e.g., esphome::ESPHomeOtaComponent f"{_NAMESPACE_ESPHOME}ESPHome{component_camel}Component", # e.g., esphome::ESPHomeOtaComponent
] ]

View File

@@ -1056,6 +1056,52 @@ async def sony_action(var, config, args):
cg.add(var.set_nbits(template_)) cg.add(var.set_nbits(template_))
# Symphony
SymphonyData, SymphonyBinarySensor, SymphonyTrigger, SymphonyAction, SymphonyDumper = (
declare_protocol("Symphony")
)
SYMPHONY_SCHEMA = cv.Schema(
{
cv.Required(CONF_DATA): cv.hex_uint32_t,
cv.Required(CONF_NBITS): cv.int_range(min=1, max=32),
cv.Optional(CONF_COMMAND_REPEATS, default=2): cv.uint8_t,
}
)
@register_binary_sensor("symphony", SymphonyBinarySensor, SYMPHONY_SCHEMA)
def symphony_binary_sensor(var, config):
cg.add(
var.set_data(
cg.StructInitializer(
SymphonyData,
("data", config[CONF_DATA]),
("nbits", config[CONF_NBITS]),
)
)
)
@register_trigger("symphony", SymphonyTrigger, SymphonyData)
def symphony_trigger(var, config):
pass
@register_dumper("symphony", SymphonyDumper)
def symphony_dumper(var, config):
pass
@register_action("symphony", SymphonyAction, SYMPHONY_SCHEMA)
async def symphony_action(var, config, args):
template_ = await cg.templatable(config[CONF_DATA], args, cg.uint32)
cg.add(var.set_data(template_))
template_ = await cg.templatable(config[CONF_NBITS], args, cg.uint32)
cg.add(var.set_nbits(template_))
template_ = await cg.templatable(config[CONF_COMMAND_REPEATS], args, cg.uint8)
cg.add(var.set_repeats(template_))
# Raw # Raw
def validate_raw_alternating(value): def validate_raw_alternating(value):
assert isinstance(value, list) assert isinstance(value, list)

View File

@@ -0,0 +1,120 @@
#include "symphony_protocol.h"
#include "esphome/core/log.h"
namespace esphome {
namespace remote_base {
static const char *const TAG = "remote.symphony";
// Reference implementation and timing details:
// IRremoteESP8266 ir_Symphony.cpp
// https://github.com/crankyoldgit/IRremoteESP8266/blob/master/src/ir_Symphony.cpp
// The implementation below mirrors the constant bit-time mapping and
// footer-gap handling used there.
// Symphony protocol timing specifications (tuned to handset captures)
static const uint32_t BIT_ZERO_HIGH_US = 460; // short
static const uint32_t BIT_ZERO_LOW_US = 1260; // long
static const uint32_t BIT_ONE_HIGH_US = 1260; // long
static const uint32_t BIT_ONE_LOW_US = 460; // short
static const uint32_t CARRIER_FREQUENCY = 38000;
// IRremoteESP8266 reference: kSymphonyFooterGap = 4 * (mark + space)
static const uint32_t FOOTER_GAP_US = 4 * (BIT_ZERO_HIGH_US + BIT_ZERO_LOW_US);
// Typical inter-frame gap (~34.8 ms observed)
static const uint32_t INTER_FRAME_GAP_US = 34760;
void SymphonyProtocol::encode(RemoteTransmitData *dst, const SymphonyData &data) {
dst->set_carrier_frequency(CARRIER_FREQUENCY);
ESP_LOGD(TAG, "Sending Symphony: data=0x%0*X nbits=%u repeats=%u", (data.nbits + 3) / 4, (uint32_t) data.data,
data.nbits, data.repeats);
// Each bit produces a mark+space (2 entries). We fold the inter-frame/footer gap
// into the last bit's space of each frame to avoid over-length gaps.
dst->reserve(data.nbits * 2u * data.repeats);
for (uint8_t repeats = 0; repeats < data.repeats; repeats++) {
// Data bits (MSB first)
for (uint32_t mask = 1UL << (data.nbits - 1); mask != 0; mask >>= 1) {
const bool is_last_bit = (mask == 1);
const bool is_last_frame = (repeats == (data.repeats - 1));
if (is_last_bit) {
// Emit last bit's mark; replace its space with the proper gap
if (data.data & mask) {
dst->mark(BIT_ONE_HIGH_US);
} else {
dst->mark(BIT_ZERO_HIGH_US);
}
dst->space(is_last_frame ? FOOTER_GAP_US : INTER_FRAME_GAP_US);
} else {
if (data.data & mask) {
dst->item(BIT_ONE_HIGH_US, BIT_ONE_LOW_US);
} else {
dst->item(BIT_ZERO_HIGH_US, BIT_ZERO_LOW_US);
}
}
}
}
}
optional<SymphonyData> SymphonyProtocol::decode(RemoteReceiveData src) {
auto is_valid_len = [](uint8_t nbits) -> bool { return nbits == 8 || nbits == 12 || nbits == 16; };
RemoteReceiveData s = src; // copy
SymphonyData out{0, 0, 1};
for (; out.nbits < 32; out.nbits++) {
if (s.expect_mark(BIT_ONE_HIGH_US)) {
if (!s.expect_space(BIT_ONE_LOW_US)) {
// Allow footer gap immediately after the last mark
if (s.peek_space_at_least(FOOTER_GAP_US)) {
uint8_t bits_with_this = out.nbits + 1;
if (is_valid_len(bits_with_this)) {
out.data = (out.data << 1UL) | 1UL;
out.nbits = bits_with_this;
return out;
}
}
return {};
}
// Successfully consumed a '1' bit (mark + space)
out.data = (out.data << 1UL) | 1UL;
continue;
} else if (s.expect_mark(BIT_ZERO_HIGH_US)) {
if (!s.expect_space(BIT_ZERO_LOW_US)) {
// Allow footer gap immediately after the last mark
if (s.peek_space_at_least(FOOTER_GAP_US)) {
uint8_t bits_with_this = out.nbits + 1;
if (is_valid_len(bits_with_this)) {
out.data = (out.data << 1UL) | 0UL;
out.nbits = bits_with_this;
return out;
}
}
return {};
}
// Successfully consumed a '0' bit (mark + space)
out.data = (out.data << 1UL) | 0UL;
continue;
} else {
// Completed a valid-length frame followed by a footer gap
if (is_valid_len(out.nbits) && s.peek_space_at_least(FOOTER_GAP_US)) {
return out;
}
return {};
}
}
if (is_valid_len(out.nbits) && s.peek_space_at_least(FOOTER_GAP_US)) {
return out;
}
return {};
}
void SymphonyProtocol::dump(const SymphonyData &data) {
const int32_t hex_width = (data.nbits + 3) / 4; // pad to nibble width
ESP_LOGI(TAG, "Received Symphony: data=0x%0*X, nbits=%d", hex_width, (uint32_t) data.data, data.nbits);
}
} // namespace remote_base
} // namespace esphome

View File

@@ -0,0 +1,44 @@
#pragma once
#include "esphome/core/component.h"
#include "remote_base.h"
#include <cinttypes>
namespace esphome {
namespace remote_base {
struct SymphonyData {
uint32_t data;
uint8_t nbits;
uint8_t repeats{1};
bool operator==(const SymphonyData &rhs) const { return data == rhs.data && nbits == rhs.nbits; }
};
class SymphonyProtocol : public RemoteProtocol<SymphonyData> {
public:
void encode(RemoteTransmitData *dst, const SymphonyData &data) override;
optional<SymphonyData> decode(RemoteReceiveData src) override;
void dump(const SymphonyData &data) override;
};
DECLARE_REMOTE_PROTOCOL(Symphony)
template<typename... Ts> class SymphonyAction : public RemoteTransmitterActionBase<Ts...> {
public:
TEMPLATABLE_VALUE(uint32_t, data)
TEMPLATABLE_VALUE(uint8_t, nbits)
TEMPLATABLE_VALUE(uint8_t, repeats)
void encode(RemoteTransmitData *dst, Ts... x) override {
SymphonyData data{};
data.data = this->data_.value(x...);
data.nbits = this->nbits_.value(x...);
data.repeats = this->repeats_.value(x...);
SymphonyProtocol().encode(dst, data);
}
};
} // namespace remote_base
} // namespace esphome

View File

@@ -378,19 +378,19 @@ class IDEData:
@property @property
def objdump_path(self) -> str: def objdump_path(self) -> str:
# replace gcc at end with objdump # replace gcc at end with objdump
path = self.cc_path
# Windows return (
if self.cc_path.endswith(".exe"): f"{path[:-7]}objdump.exe"
return f"{self.cc_path[:-7]}objdump.exe" if path.endswith(".exe")
else f"{path[:-3]}objdump"
return f"{self.cc_path[:-3]}objdump" )
@property @property
def readelf_path(self) -> str: def readelf_path(self) -> str:
# replace gcc at end with readelf # replace gcc at end with readelf
path = self.cc_path
# Windows return (
if self.cc_path.endswith(".exe"): f"{path[:-7]}readelf.exe"
return f"{self.cc_path[:-7]}readelf.exe" if path.endswith(".exe")
else f"{path[:-3]}readelf"
return f"{self.cc_path[:-3]}readelf" )

View File

@@ -34,6 +34,8 @@ from typing import Any
# Add esphome to path # Add esphome to path
sys.path.insert(0, str(Path(__file__).parent.parent)) sys.path.insert(0, str(Path(__file__).parent.parent))
from helpers import BASE_BUS_COMPONENTS
from esphome import yaml_util from esphome import yaml_util
from esphome.config_helpers import Extend, Remove from esphome.config_helpers import Extend, Remove
@@ -50,7 +52,14 @@ PACKAGE_DEPENDENCIES = {
# Bus types that can be defined directly in config files # Bus types that can be defined directly in config files
# Components defining these directly cannot be grouped (they create unique bus IDs) # Components defining these directly cannot be grouped (they create unique bus IDs)
DIRECT_BUS_TYPES = ("i2c", "spi", "uart", "modbus") DIRECT_BUS_TYPES = (
"i2c",
"spi",
"uart",
"modbus",
"remote_transmitter",
"remote_receiver",
)
# Signature for components with no bus requirements # Signature for components with no bus requirements
# These components can be merged with any other group # These components can be merged with any other group
@@ -60,16 +69,6 @@ NO_BUSES_SIGNATURE = "no_buses"
# Isolated components have unique signatures and cannot be merged with others # Isolated components have unique signatures and cannot be merged with others
ISOLATED_SIGNATURE_PREFIX = "isolated_" ISOLATED_SIGNATURE_PREFIX = "isolated_"
# Base bus components - these ARE the bus implementations and should not
# be flagged as needing migration since they are the platform/base components
BASE_BUS_COMPONENTS = {
"i2c",
"spi",
"uart",
"modbus",
"canbus",
}
# Components that must be tested in isolation (not grouped or batched with others) # Components that must be tested in isolation (not grouped or batched with others)
# These have known build issues that prevent grouping # These have known build issues that prevent grouping
# NOTE: This should be kept in sync with both test_build_components and split_components_for_ci.py # NOTE: This should be kept in sync with both test_build_components and split_components_for_ci.py

View File

@@ -14,6 +14,8 @@ from pathlib import Path
import subprocess import subprocess
import sys import sys
from jinja2 import Environment, FileSystemLoader
# Add esphome to path for analyze_memory import # Add esphome to path for analyze_memory import
sys.path.insert(0, str(Path(__file__).parent.parent)) sys.path.insert(0, str(Path(__file__).parent.parent))
@@ -26,6 +28,22 @@ COMMENT_MARKER = "<!-- esphome-memory-impact-analysis -->"
OVERALL_CHANGE_THRESHOLD = 1.0 # Overall RAM/Flash changes OVERALL_CHANGE_THRESHOLD = 1.0 # Overall RAM/Flash changes
COMPONENT_CHANGE_THRESHOLD = 3.0 # Component breakdown changes COMPONENT_CHANGE_THRESHOLD = 3.0 # Component breakdown changes
# Display limits for tables
MAX_COMPONENT_BREAKDOWN_ROWS = 20 # Maximum components to show in breakdown table
MAX_CHANGED_SYMBOLS_ROWS = 30 # Maximum changed symbols to show
MAX_NEW_SYMBOLS_ROWS = 15 # Maximum new symbols to show
MAX_REMOVED_SYMBOLS_ROWS = 15 # Maximum removed symbols to show
# Symbol display formatting
SYMBOL_DISPLAY_MAX_LENGTH = 100 # Max length before using <details> tag
SYMBOL_DISPLAY_TRUNCATE_LENGTH = 97 # Length to truncate in summary
# Component change noise threshold
COMPONENT_CHANGE_NOISE_THRESHOLD = 2 # Ignore component changes ≤ this many bytes
# Template directory
TEMPLATE_DIR = Path(__file__).parent / "templates"
def load_analysis_json(json_path: str) -> dict | None: def load_analysis_json(json_path: str) -> dict | None:
"""Load memory analysis results from JSON file. """Load memory analysis results from JSON file.
@@ -111,43 +129,30 @@ def format_change(before: int, after: int, threshold: float | None = None) -> st
return f"{emoji} {delta_str} ({pct_str})" return f"{emoji} {delta_str} ({pct_str})"
def format_symbol_for_display(symbol: str) -> str: def prepare_symbol_changes_data(
"""Format a symbol name for display in markdown table.
Args:
symbol: Symbol name to format
Returns:
Formatted symbol with backticks or HTML details tag for long names
"""
if len(symbol) <= 100:
return f"`{symbol}`"
# Use HTML details for very long symbols (no backticks inside HTML)
return f"<details><summary><code>{symbol[:97]}...</code></summary><code>{symbol}</code></details>"
def create_symbol_changes_table(
target_symbols: dict | None, pr_symbols: dict | None target_symbols: dict | None, pr_symbols: dict | None
) -> str: ) -> dict | None:
"""Create a markdown table showing symbols that changed size. """Prepare symbol changes data for template rendering.
Args: Args:
target_symbols: Symbol name to size mapping for target branch target_symbols: Symbol name to size mapping for target branch
pr_symbols: Symbol name to size mapping for PR branch pr_symbols: Symbol name to size mapping for PR branch
Returns: Returns:
Formatted markdown table Dictionary with changed, new, and removed symbols, or None if no changes
""" """
if not target_symbols or not pr_symbols: if not target_symbols or not pr_symbols:
return "" return None
# Find all symbols that exist in both branches or only in one # Find all symbols that exist in both branches or only in one
all_symbols = set(target_symbols.keys()) | set(pr_symbols.keys()) all_symbols = set(target_symbols.keys()) | set(pr_symbols.keys())
# Track changes # Track changes
changed_symbols = [] changed_symbols: list[
new_symbols = [] tuple[str, int, int, int]
removed_symbols = [] ] = [] # (symbol, target_size, pr_size, delta)
new_symbols: list[tuple[str, int]] = [] # (symbol, size)
removed_symbols: list[tuple[str, int]] = [] # (symbol, size)
for symbol in all_symbols: for symbol in all_symbols:
target_size = target_symbols.get(symbol, 0) target_size = target_symbols.get(symbol, 0)
@@ -165,114 +170,42 @@ def create_symbol_changes_table(
changed_symbols.append((symbol, target_size, pr_size, delta)) changed_symbols.append((symbol, target_size, pr_size, delta))
if not changed_symbols and not new_symbols and not removed_symbols: if not changed_symbols and not new_symbols and not removed_symbols:
return "" return None
lines = [ # Sort by size/delta
"", changed_symbols.sort(key=lambda x: abs(x[3]), reverse=True)
"<details>", new_symbols.sort(key=lambda x: x[1], reverse=True)
"<summary>🔍 Symbol-Level Changes (click to expand)</summary>", removed_symbols.sort(key=lambda x: x[1], reverse=True)
"",
]
# Show changed symbols (sorted by absolute delta) return {
if changed_symbols: "changed_symbols": changed_symbols,
changed_symbols.sort(key=lambda x: abs(x[3]), reverse=True) "new_symbols": new_symbols,
lines.extend( "removed_symbols": removed_symbols,
[ }
"### Changed Symbols",
"",
"| Symbol | Target Size | PR Size | Change |",
"|--------|-------------|---------|--------|",
]
)
# Show top 30 changes
for symbol, target_size, pr_size, delta in changed_symbols[:30]:
target_str = format_bytes(target_size)
pr_str = format_bytes(pr_size)
change_str = format_change(target_size, pr_size) # Chart icons only
display_symbol = format_symbol_for_display(symbol)
lines.append(
f"| {display_symbol} | {target_str} | {pr_str} | {change_str} |"
)
if len(changed_symbols) > 30:
lines.append(
f"| ... | ... | ... | *({len(changed_symbols) - 30} more changed symbols not shown)* |"
)
lines.append("")
# Show new symbols
if new_symbols:
new_symbols.sort(key=lambda x: x[1], reverse=True)
lines.extend(
[
"### New Symbols (top 15)",
"",
"| Symbol | Size |",
"|--------|------|",
]
)
for symbol, size in new_symbols[:15]:
display_symbol = format_symbol_for_display(symbol)
lines.append(f"| {display_symbol} | {format_bytes(size)} |")
if len(new_symbols) > 15:
total_new_size = sum(s[1] for s in new_symbols)
lines.append(
f"| *{len(new_symbols) - 15} more new symbols...* | *Total: {format_bytes(total_new_size)}* |"
)
lines.append("")
# Show removed symbols
if removed_symbols:
removed_symbols.sort(key=lambda x: x[1], reverse=True)
lines.extend(
[
"### Removed Symbols (top 15)",
"",
"| Symbol | Size |",
"|--------|------|",
]
)
for symbol, size in removed_symbols[:15]:
display_symbol = format_symbol_for_display(symbol)
lines.append(f"| {display_symbol} | {format_bytes(size)} |")
if len(removed_symbols) > 15:
total_removed_size = sum(s[1] for s in removed_symbols)
lines.append(
f"| *{len(removed_symbols) - 15} more removed symbols...* | *Total: {format_bytes(total_removed_size)}* |"
)
lines.append("")
lines.extend(["</details>", ""])
return "\n".join(lines)
def create_detailed_breakdown_table( def prepare_component_breakdown_data(
target_analysis: dict | None, pr_analysis: dict | None target_analysis: dict | None, pr_analysis: dict | None
) -> str: ) -> list[tuple[str, int, int, int]] | None:
"""Create a markdown table showing detailed memory breakdown by component. """Prepare component breakdown data for template rendering.
Args: Args:
target_analysis: Component memory breakdown for target branch target_analysis: Component memory breakdown for target branch
pr_analysis: Component memory breakdown for PR branch pr_analysis: Component memory breakdown for PR branch
Returns: Returns:
Formatted markdown table List of tuples (component, target_flash, pr_flash, delta), or None if no changes
""" """
if not target_analysis or not pr_analysis: if not target_analysis or not pr_analysis:
return "" return None
# Combine all components from both analyses # Combine all components from both analyses
all_components = set(target_analysis.keys()) | set(pr_analysis.keys()) all_components = set(target_analysis.keys()) | set(pr_analysis.keys())
# Filter to components that have changed (ignoring noise ≤2 bytes) # Filter to components that have changed (ignoring noise)
changed_components = [] changed_components: list[
tuple[str, int, int, int]
] = [] # (comp, target_flash, pr_flash, delta)
for comp in all_components: for comp in all_components:
target_mem = target_analysis.get(comp, {}) target_mem = target_analysis.get(comp, {})
pr_mem = pr_analysis.get(comp, {}) pr_mem = pr_analysis.get(comp, {})
@@ -280,43 +213,18 @@ def create_detailed_breakdown_table(
target_flash = target_mem.get("flash_total", 0) target_flash = target_mem.get("flash_total", 0)
pr_flash = pr_mem.get("flash_total", 0) pr_flash = pr_mem.get("flash_total", 0)
# Only include if component has meaningful change (>2 bytes) # Only include if component has meaningful change (above noise threshold)
delta = pr_flash - target_flash delta = pr_flash - target_flash
if abs(delta) > 2: if abs(delta) > COMPONENT_CHANGE_NOISE_THRESHOLD:
changed_components.append((comp, target_flash, pr_flash, delta)) changed_components.append((comp, target_flash, pr_flash, delta))
if not changed_components: if not changed_components:
return "" return None
# Sort by absolute delta (largest changes first) # Sort by absolute delta (largest changes first)
changed_components.sort(key=lambda x: abs(x[3]), reverse=True) changed_components.sort(key=lambda x: abs(x[3]), reverse=True)
# Build table - limit to top 20 changes return changed_components
lines = [
"",
"<details open>",
"<summary>📊 Component Memory Breakdown</summary>",
"",
"| Component | Target Flash | PR Flash | Change |",
"|-----------|--------------|----------|--------|",
]
for comp, target_flash, pr_flash, delta in changed_components[:20]:
target_str = format_bytes(target_flash)
pr_str = format_bytes(pr_flash)
change_str = format_change(
target_flash, pr_flash, threshold=COMPONENT_CHANGE_THRESHOLD
)
lines.append(f"| `{comp}` | {target_str} | {pr_str} | {change_str} |")
if len(changed_components) > 20:
lines.append(
f"| ... | ... | ... | *({len(changed_components) - 20} more components not shown)* |"
)
lines.extend(["", "</details>", ""])
return "\n".join(lines)
def create_comment_body( def create_comment_body(
@@ -332,7 +240,7 @@ def create_comment_body(
pr_symbols: dict | None = None, pr_symbols: dict | None = None,
target_cache_hit: bool = False, target_cache_hit: bool = False,
) -> str: ) -> str:
"""Create the comment body with memory impact analysis. """Create the comment body with memory impact analysis using Jinja2 templates.
Args: Args:
components: List of component names (merged config) components: List of component names (merged config)
@@ -350,57 +258,87 @@ def create_comment_body(
Returns: Returns:
Formatted comment body Formatted comment body
""" """
ram_change = format_change(target_ram, pr_ram, threshold=OVERALL_CHANGE_THRESHOLD) # Set up Jinja2 environment
flash_change = format_change( env = Environment(
target_flash, pr_flash, threshold=OVERALL_CHANGE_THRESHOLD loader=FileSystemLoader(TEMPLATE_DIR),
trim_blocks=True,
lstrip_blocks=True,
) )
# Use provided analysis data if available # Register custom filters
component_breakdown = "" env.filters["format_bytes"] = format_bytes
symbol_changes = "" env.filters["format_change"] = format_change
if target_analysis and pr_analysis: # Prepare template context
component_breakdown = create_detailed_breakdown_table( context = {
target_analysis, pr_analysis "comment_marker": COMMENT_MARKER,
) "platform": platform,
"target_ram": format_bytes(target_ram),
if target_symbols and pr_symbols: "pr_ram": format_bytes(pr_ram),
symbol_changes = create_symbol_changes_table(target_symbols, pr_symbols) "target_flash": format_bytes(target_flash),
else: "pr_flash": format_bytes(pr_flash),
print("No ELF files provided, skipping detailed analysis", file=sys.stderr) "ram_change": format_change(
target_ram, pr_ram, threshold=OVERALL_CHANGE_THRESHOLD
),
"flash_change": format_change(
target_flash, pr_flash, threshold=OVERALL_CHANGE_THRESHOLD
),
"target_cache_hit": target_cache_hit,
"component_change_threshold": COMPONENT_CHANGE_THRESHOLD,
}
# Format components list # Format components list
if len(components) == 1: if len(components) == 1:
components_str = f"`{components[0]}`" context["components_str"] = f"`{components[0]}`"
config_note = "a representative test configuration" context["config_note"] = "a representative test configuration"
else: else:
components_str = ", ".join(f"`{c}`" for c in sorted(components)) context["components_str"] = ", ".join(f"`{c}`" for c in sorted(components))
config_note = f"a merged configuration with {len(components)} components" context["config_note"] = (
f"a merged configuration with {len(components)} components"
)
# Add cache info note if target was cached # Prepare component breakdown if available
cache_note = "" component_breakdown = ""
if target_cache_hit: if target_analysis and pr_analysis:
cache_note = "\n\n> ⚡ Target branch analysis was loaded from cache (build skipped for faster CI)." changed_components = prepare_component_breakdown_data(
target_analysis, pr_analysis
)
if changed_components:
template = env.get_template("ci_memory_impact_component_breakdown.j2")
component_breakdown = template.render(
changed_components=changed_components,
format_bytes=format_bytes,
format_change=format_change,
component_change_threshold=COMPONENT_CHANGE_THRESHOLD,
max_rows=MAX_COMPONENT_BREAKDOWN_ROWS,
)
return f"""{COMMENT_MARKER} # Prepare symbol changes if available
## Memory Impact Analysis symbol_changes = ""
if target_symbols and pr_symbols:
symbol_data = prepare_symbol_changes_data(target_symbols, pr_symbols)
if symbol_data:
template = env.get_template("ci_memory_impact_symbol_changes.j2")
symbol_changes = template.render(
**symbol_data,
format_bytes=format_bytes,
format_change=format_change,
max_changed_rows=MAX_CHANGED_SYMBOLS_ROWS,
max_new_rows=MAX_NEW_SYMBOLS_ROWS,
max_removed_rows=MAX_REMOVED_SYMBOLS_ROWS,
symbol_max_length=SYMBOL_DISPLAY_MAX_LENGTH,
symbol_truncate_length=SYMBOL_DISPLAY_TRUNCATE_LENGTH,
)
**Components:** {components_str} if not target_analysis or not pr_analysis:
**Platform:** `{platform}` print("No ELF files provided, skipping detailed analysis", file=sys.stderr)
| Metric | Target Branch | This PR | Change | context["component_breakdown"] = component_breakdown
|--------|--------------|---------|--------| context["symbol_changes"] = symbol_changes
| **RAM** | {format_bytes(target_ram)} | {format_bytes(pr_ram)} | {ram_change} |
| **Flash** | {format_bytes(target_flash)} | {format_bytes(pr_flash)} | {flash_change} |
{component_breakdown}{symbol_changes}{cache_note}
--- # Render main template
> **Note:** This analysis measures **static RAM and Flash usage** only (compile-time allocation). template = env.get_template("ci_memory_impact_comment_template.j2")
> **Dynamic memory (heap)** cannot be measured automatically. return template.render(**context)
> **⚠️ You must test this PR on a real device** to measure free heap and ensure no runtime memory issues.
*This analysis runs automatically when components change. Memory usage is measured from {config_note}.*
"""
def find_existing_comment(pr_number: str) -> str | None: def find_existing_comment(pr_number: str) -> str | None:
@@ -411,137 +349,133 @@ def find_existing_comment(pr_number: str) -> str | None:
Returns: Returns:
Comment numeric ID if found, None otherwise Comment numeric ID if found, None otherwise
Raises:
subprocess.CalledProcessError: If gh command fails
""" """
try: print(f"DEBUG: Looking for existing comment on PR #{pr_number}", file=sys.stderr)
print(
f"DEBUG: Looking for existing comment on PR #{pr_number}", file=sys.stderr
)
# Use gh api to get comments directly - this returns the numeric id field # Use gh api to get comments directly - this returns the numeric id field
result = subprocess.run( result = subprocess.run(
[ [
"gh", "gh",
"api", "api",
f"/repos/{{owner}}/{{repo}}/issues/{pr_number}/comments", f"/repos/{{owner}}/{{repo}}/issues/{pr_number}/comments",
"--jq", "--jq",
".[] | {id, body}", ".[] | {id, body}",
], ],
capture_output=True, capture_output=True,
text=True, text=True,
check=True, check=True,
) )
print( print(
f"DEBUG: gh api comments output (first 500 chars):\n{result.stdout[:500]}", f"DEBUG: gh api comments output (first 500 chars):\n{result.stdout[:500]}",
file=sys.stderr, file=sys.stderr,
) )
# Parse comments and look for our marker # Parse comments and look for our marker
comment_count = 0 comment_count = 0
for line in result.stdout.strip().split("\n"): for line in result.stdout.strip().split("\n"):
if not line: if not line:
continue continue
try: try:
comment = json.loads(line) comment = json.loads(line)
comment_count += 1 comment_count += 1
comment_id = comment.get("id") comment_id = comment.get("id")
print(
f"DEBUG: Checking comment {comment_count}: id={comment_id}",
file=sys.stderr,
)
body = comment.get("body", "")
if COMMENT_MARKER in body:
print( print(
f"DEBUG: Checking comment {comment_count}: id={comment_id}", f"DEBUG: Found existing comment with id={comment_id}",
file=sys.stderr, file=sys.stderr,
) )
# Return the numeric id
return str(comment_id)
print("DEBUG: Comment does not contain marker", file=sys.stderr)
except json.JSONDecodeError as e:
print(f"DEBUG: JSON decode error: {e}", file=sys.stderr)
continue
body = comment.get("body", "") print(
if COMMENT_MARKER in body: f"DEBUG: No existing comment found (checked {comment_count} comments)",
print( file=sys.stderr,
f"DEBUG: Found existing comment with id={comment_id}", )
file=sys.stderr, return None
)
# Return the numeric id
return str(comment_id)
print("DEBUG: Comment does not contain marker", file=sys.stderr)
except json.JSONDecodeError as e:
print(f"DEBUG: JSON decode error: {e}", file=sys.stderr)
continue
print(
f"DEBUG: No existing comment found (checked {comment_count} comments)",
file=sys.stderr,
)
return None
except subprocess.CalledProcessError as e:
print(f"Error finding existing comment: {e}", file=sys.stderr)
if e.stderr:
print(f"stderr: {e.stderr.decode()}", file=sys.stderr)
return None
def post_or_update_comment(pr_number: str, comment_body: str) -> bool: def update_existing_comment(comment_id: str, comment_body: str) -> None:
"""Update an existing comment.
Args:
comment_id: Comment ID to update
comment_body: New comment body text
Raises:
subprocess.CalledProcessError: If gh command fails
"""
print(f"DEBUG: Updating existing comment {comment_id}", file=sys.stderr)
result = subprocess.run(
[
"gh",
"api",
f"/repos/{{owner}}/{{repo}}/issues/comments/{comment_id}",
"-X",
"PATCH",
"-f",
f"body={comment_body}",
],
check=True,
capture_output=True,
text=True,
)
print(f"DEBUG: Update response: {result.stdout}", file=sys.stderr)
def create_new_comment(pr_number: str, comment_body: str) -> None:
"""Create a new PR comment.
Args:
pr_number: PR number
comment_body: Comment body text
Raises:
subprocess.CalledProcessError: If gh command fails
"""
print(f"DEBUG: Posting new comment on PR #{pr_number}", file=sys.stderr)
result = subprocess.run(
["gh", "pr", "comment", pr_number, "--body", comment_body],
check=True,
capture_output=True,
text=True,
)
print(f"DEBUG: Post response: {result.stdout}", file=sys.stderr)
def post_or_update_comment(pr_number: str, comment_body: str) -> None:
"""Post a new comment or update existing one. """Post a new comment or update existing one.
Args: Args:
pr_number: PR number pr_number: PR number
comment_body: Comment body text comment_body: Comment body text
Returns: Raises:
True if successful, False otherwise subprocess.CalledProcessError: If gh command fails
""" """
# Look for existing comment # Look for existing comment
existing_comment_id = find_existing_comment(pr_number) existing_comment_id = find_existing_comment(pr_number)
try: if existing_comment_id and existing_comment_id != "None":
if existing_comment_id and existing_comment_id != "None": update_existing_comment(existing_comment_id, comment_body)
# Update existing comment else:
print( create_new_comment(pr_number, comment_body)
f"DEBUG: Updating existing comment {existing_comment_id}",
file=sys.stderr,
)
result = subprocess.run(
[
"gh",
"api",
f"/repos/{{owner}}/{{repo}}/issues/comments/{existing_comment_id}",
"-X",
"PATCH",
"-f",
f"body={comment_body}",
],
check=True,
capture_output=True,
text=True,
)
print(f"DEBUG: Update response: {result.stdout}", file=sys.stderr)
else:
# Post new comment
print(
f"DEBUG: Posting new comment (existing_comment_id={existing_comment_id})",
file=sys.stderr,
)
result = subprocess.run(
["gh", "pr", "comment", pr_number, "--body", comment_body],
check=True,
capture_output=True,
text=True,
)
print(f"DEBUG: Post response: {result.stdout}", file=sys.stderr)
print("Comment posted/updated successfully", file=sys.stderr) print("Comment posted/updated successfully", file=sys.stderr)
return True
except subprocess.CalledProcessError as e:
print(f"Error posting/updating comment: {e}", file=sys.stderr)
if e.stderr:
print(
f"stderr: {e.stderr.decode() if isinstance(e.stderr, bytes) else e.stderr}",
file=sys.stderr,
)
if e.stdout:
print(
f"stdout: {e.stdout.decode() if isinstance(e.stdout, bytes) else e.stdout}",
file=sys.stderr,
)
return False
def main() -> int: def main() -> int:
@@ -627,9 +561,9 @@ def main() -> int:
) )
# Post or update comment # Post or update comment
success = post_or_update_comment(args.pr_number, comment_body) post_or_update_comment(args.pr_number, comment_body)
return 0 if success else 1 return 0
if __name__ == "__main__": if __name__ == "__main__":

View File

@@ -25,8 +25,15 @@ import sys
sys.path.insert(0, str(Path(__file__).parent.parent)) sys.path.insert(0, str(Path(__file__).parent.parent))
# pylint: disable=wrong-import-position # pylint: disable=wrong-import-position
from esphome.analyze_memory import MemoryAnalyzer
from esphome.platformio_api import IDEData
from script.ci_helpers import write_github_output from script.ci_helpers import write_github_output
# Regex patterns for extracting memory usage from PlatformIO output
_RAM_PATTERN = re.compile(r"RAM:\s+\[.*?\]\s+\d+\.\d+%\s+\(used\s+(\d+)\s+bytes")
_FLASH_PATTERN = re.compile(r"Flash:\s+\[.*?\]\s+\d+\.\d+%\s+\(used\s+(\d+)\s+bytes")
_BUILD_PATH_PATTERN = re.compile(r"Build path: (.+)")
def extract_from_compile_output( def extract_from_compile_output(
output_text: str, output_text: str,
@@ -42,7 +49,7 @@ def extract_from_compile_output(
Flash: [=== ] 34.0% (used 348511 bytes from 1023984 bytes) Flash: [=== ] 34.0% (used 348511 bytes from 1023984 bytes)
Also extracts build directory from lines like: Also extracts build directory from lines like:
INFO Deleting /path/to/build/.esphome/build/componenttestesp8266ard/.pioenvs INFO Compiling app... Build path: /path/to/build
Args: Args:
output_text: Compile output text (may contain multiple builds) output_text: Compile output text (may contain multiple builds)
@@ -51,12 +58,8 @@ def extract_from_compile_output(
Tuple of (total_ram_bytes, total_flash_bytes, build_dir) or (None, None, None) if not found Tuple of (total_ram_bytes, total_flash_bytes, build_dir) or (None, None, None) if not found
""" """
# Find all RAM and Flash matches (may be multiple builds) # Find all RAM and Flash matches (may be multiple builds)
ram_matches = re.findall( ram_matches = _RAM_PATTERN.findall(output_text)
r"RAM:\s+\[.*?\]\s+\d+\.\d+%\s+\(used\s+(\d+)\s+bytes", output_text flash_matches = _FLASH_PATTERN.findall(output_text)
)
flash_matches = re.findall(
r"Flash:\s+\[.*?\]\s+\d+\.\d+%\s+\(used\s+(\d+)\s+bytes", output_text
)
if not ram_matches or not flash_matches: if not ram_matches or not flash_matches:
return None, None, None return None, None, None
@@ -69,7 +72,7 @@ def extract_from_compile_output(
# Look for: INFO Compiling app... Build path: /path/to/build # Look for: INFO Compiling app... Build path: /path/to/build
# Note: Multiple builds reuse the same build path (each overwrites the previous) # Note: Multiple builds reuse the same build path (each overwrites the previous)
build_dir = None build_dir = None
if match := re.search(r"Build path: (.+)", output_text): if match := _BUILD_PATH_PATTERN.search(output_text):
build_dir = match.group(1).strip() build_dir = match.group(1).strip()
return total_ram, total_flash, build_dir return total_ram, total_flash, build_dir
@@ -84,9 +87,6 @@ def run_detailed_analysis(build_dir: str) -> dict | None:
Returns: Returns:
Dictionary with analysis results or None if analysis fails Dictionary with analysis results or None if analysis fails
""" """
from esphome.analyze_memory import MemoryAnalyzer
from esphome.platformio_api import IDEData
build_path = Path(build_dir) build_path = Path(build_dir)
if not build_path.exists(): if not build_path.exists():
print(f"Build directory not found: {build_dir}", file=sys.stderr) print(f"Build directory not found: {build_dir}", file=sys.stderr)
@@ -119,18 +119,19 @@ def run_detailed_analysis(build_dir: str) -> dict | None:
idedata = None idedata = None
for idedata_path in idedata_candidates: for idedata_path in idedata_candidates:
if idedata_path.exists(): if not idedata_path.exists():
try: continue
with open(idedata_path, encoding="utf-8") as f: try:
raw_data = json.load(f) with open(idedata_path, encoding="utf-8") as f:
idedata = IDEData(raw_data) raw_data = json.load(f)
print(f"Loaded idedata from: {idedata_path}", file=sys.stderr) idedata = IDEData(raw_data)
break print(f"Loaded idedata from: {idedata_path}", file=sys.stderr)
except (json.JSONDecodeError, OSError) as e: break
print( except (json.JSONDecodeError, OSError) as e:
f"Warning: Failed to load idedata from {idedata_path}: {e}", print(
file=sys.stderr, f"Warning: Failed to load idedata from {idedata_path}: {e}",
) file=sys.stderr,
)
analyzer = MemoryAnalyzer(elf_path, idedata=idedata) analyzer = MemoryAnalyzer(elf_path, idedata=idedata)
components = analyzer.analyze() components = analyzer.analyze()
@@ -209,11 +210,7 @@ def main() -> int:
return 1 return 1
# Count how many builds were found # Count how many builds were found
num_builds = len( num_builds = len(_RAM_PATTERN.findall(compile_output))
re.findall(
r"RAM:\s+\[.*?\]\s+\d+\.\d+%\s+\(used\s+(\d+)\s+bytes", compile_output
)
)
if num_builds > 1: if num_builds > 1:
print( print(

View File

@@ -13,9 +13,9 @@ what files have changed. It outputs JSON with the following structure:
"component_test_count": 5, "component_test_count": 5,
"memory_impact": { "memory_impact": {
"should_run": "true/false", "should_run": "true/false",
"component": "component_name", "components": ["component1", "component2", ...],
"test_file": "test.esp32-idf.yaml", "platform": "esp32-idf",
"platform": "esp32-idf" "use_merged_config": "true"
} }
} }
@@ -26,7 +26,7 @@ The CI workflow uses this information to:
- Skip or run Python linters (ruff, flake8, pylint, pyupgrade) - Skip or run Python linters (ruff, flake8, pylint, pyupgrade)
- Determine which components to test individually - Determine which components to test individually
- Decide how to split component tests (if there are many) - Decide how to split component tests (if there are many)
- Run memory impact analysis when exactly one component changes - Run memory impact analysis whenever there are changed components (merged config), and also for core-only changes
Usage: Usage:
python script/determine-jobs.py [-b BRANCH] python script/determine-jobs.py [-b BRANCH]
@@ -38,6 +38,7 @@ Options:
from __future__ import annotations from __future__ import annotations
import argparse import argparse
from collections import Counter
from enum import StrEnum from enum import StrEnum
from functools import cache from functools import cache
import json import json
@@ -48,11 +49,13 @@ import sys
from typing import Any from typing import Any
from helpers import ( from helpers import (
BASE_BUS_COMPONENTS,
CPP_FILE_EXTENSIONS, CPP_FILE_EXTENSIONS,
ESPHOME_COMPONENTS_PATH,
PYTHON_FILE_EXTENSIONS, PYTHON_FILE_EXTENSIONS,
changed_files, changed_files,
get_all_dependencies, get_all_dependencies,
get_component_from_path,
get_component_test_files,
get_components_from_integration_fixtures, get_components_from_integration_fixtures,
parse_test_filename, parse_test_filename,
root_path, root_path,
@@ -142,12 +145,9 @@ def should_run_integration_tests(branch: str | None = None) -> bool:
# Check if any required components changed # Check if any required components changed
for file in files: for file in files:
if file.startswith(ESPHOME_COMPONENTS_PATH): component = get_component_from_path(file)
parts = file.split("/") if component and component in all_required_components:
if len(parts) >= 3: return True
component = parts[2]
if component in all_required_components:
return True
return False return False
@@ -261,10 +261,7 @@ def _component_has_tests(component: str) -> bool:
Returns: Returns:
True if the component has test YAML files True if the component has test YAML files
""" """
tests_dir = Path(root_path) / "tests" / "components" / component return bool(get_component_test_files(component))
if not tests_dir.exists():
return False
return any(tests_dir.glob("test.*.yaml"))
def detect_memory_impact_config( def detect_memory_impact_config(
@@ -291,17 +288,15 @@ def detect_memory_impact_config(
files = changed_files(branch) files = changed_files(branch)
# Find all changed components (excluding core and base bus components) # Find all changed components (excluding core and base bus components)
changed_component_set = set() changed_component_set: set[str] = set()
has_core_changes = False has_core_changes = False
for file in files: for file in files:
if file.startswith(ESPHOME_COMPONENTS_PATH): component = get_component_from_path(file)
parts = file.split("/") if component:
if len(parts) >= 3: # Skip base bus components as they're used across many builds
component = parts[2] if component not in BASE_BUS_COMPONENTS:
# Skip base bus components as they're used across many builds changed_component_set.add(component)
if component not in ["i2c", "spi", "uart", "modbus", "canbus"]:
changed_component_set.add(component)
elif file.startswith("esphome/"): elif file.startswith("esphome/"):
# Core ESPHome files changed (not component-specific) # Core ESPHome files changed (not component-specific)
has_core_changes = True has_core_changes = True
@@ -321,25 +316,24 @@ def detect_memory_impact_config(
return {"should_run": "false"} return {"should_run": "false"}
# Find components that have tests and collect their supported platforms # Find components that have tests and collect their supported platforms
components_with_tests = [] components_with_tests: list[str] = []
component_platforms_map = {} # Track which platforms each component supports component_platforms_map: dict[
str, set[Platform]
] = {} # Track which platforms each component supports
for component in sorted(changed_component_set): for component in sorted(changed_component_set):
tests_dir = Path(root_path) / "tests" / "components" / component
if not tests_dir.exists():
continue
# Look for test files on preferred platforms # Look for test files on preferred platforms
test_files = list(tests_dir.glob("test.*.yaml")) test_files = get_component_test_files(component)
if not test_files: if not test_files:
continue continue
# Check if component has tests for any preferred platform # Check if component has tests for any preferred platform
available_platforms = [] available_platforms = [
for test_file in test_files: platform
_, platform = parse_test_filename(test_file) for test_file in test_files
if platform != "all" and platform in MEMORY_IMPACT_PLATFORM_PREFERENCE: if (platform := parse_test_filename(test_file)[1]) != "all"
available_platforms.append(platform) and platform in MEMORY_IMPACT_PLATFORM_PREFERENCE
]
if not available_platforms: if not available_platforms:
continue continue
@@ -367,10 +361,10 @@ def detect_memory_impact_config(
else: else:
# No common platform - pick the most commonly supported platform # No common platform - pick the most commonly supported platform
# This allows testing components individually even if they can't be merged # This allows testing components individually even if they can't be merged
platform_counts = {} # Count how many components support each platform
for platforms in component_platforms_map.values(): platform_counts = Counter(
for p in platforms: p for platforms in component_platforms_map.values() for p in platforms
platform_counts[p] = platform_counts.get(p, 0) + 1 )
# Pick the platform supported by most components, preferring earlier in MEMORY_IMPACT_PLATFORM_PREFERENCE # Pick the platform supported by most components, preferring earlier in MEMORY_IMPACT_PLATFORM_PREFERENCE
platform = max( platform = max(
platform_counts.keys(), platform_counts.keys(),

View File

@@ -29,6 +29,18 @@ YAML_FILE_EXTENSIONS = (".yaml", ".yml")
# Component path prefix # Component path prefix
ESPHOME_COMPONENTS_PATH = "esphome/components/" ESPHOME_COMPONENTS_PATH = "esphome/components/"
# Base bus components - these ARE the bus implementations and should not
# be flagged as needing migration since they are the platform/base components
BASE_BUS_COMPONENTS = {
"i2c",
"spi",
"uart",
"modbus",
"canbus",
"remote_transmitter",
"remote_receiver",
}
def parse_list_components_output(output: str) -> list[str]: def parse_list_components_output(output: str) -> list[str]:
"""Parse the output from list-components.py script. """Parse the output from list-components.py script.
@@ -63,6 +75,48 @@ def parse_test_filename(test_file: Path) -> tuple[str, str]:
return parts[0], "all" return parts[0], "all"
def get_component_from_path(file_path: str) -> str | None:
"""Extract component name from a file path.
Args:
file_path: Path to a file (e.g., "esphome/components/wifi/wifi.cpp")
Returns:
Component name if path is in components directory, None otherwise
"""
if not file_path.startswith(ESPHOME_COMPONENTS_PATH):
return None
parts = file_path.split("/")
if len(parts) >= 3:
return parts[2]
return None
def get_component_test_files(
component: str, *, all_variants: bool = False
) -> list[Path]:
"""Get test files for a component.
Args:
component: Component name (e.g., "wifi")
all_variants: If True, returns all test files including variants (test-*.yaml).
If False, returns only base test files (test.*.yaml).
Default is False.
Returns:
List of test file paths for the component, or empty list if none exist
"""
tests_dir = Path(root_path) / "tests" / "components" / component
if not tests_dir.exists():
return []
if all_variants:
# Match both test.*.yaml and test-*.yaml patterns
return list(tests_dir.glob("test[.-]*.yaml"))
# Match only test.*.yaml (base tests)
return list(tests_dir.glob("test.*.yaml"))
def styled(color: str | tuple[str, ...], msg: str, reset: bool = True) -> str: def styled(color: str | tuple[str, ...], msg: str, reset: bool = True) -> str:
prefix = "".join(color) if isinstance(color, tuple) else color prefix = "".join(color) if isinstance(color, tuple) else color
suffix = colorama.Style.RESET_ALL if reset else "" suffix = colorama.Style.RESET_ALL if reset else ""
@@ -331,11 +385,9 @@ def _filter_changed_ci(files: list[str]) -> list[str]:
# because changes in one file can affect other files in the same component. # because changes in one file can affect other files in the same component.
filtered_files = [] filtered_files = []
for f in files: for f in files:
if f.startswith(ESPHOME_COMPONENTS_PATH): component = get_component_from_path(f)
# Check if file belongs to any of the changed components if component and component in component_set:
parts = f.split("/") filtered_files.append(f)
if len(parts) >= 3 and parts[2] in component_set:
filtered_files.append(f)
return filtered_files return filtered_files

View File

@@ -4,7 +4,7 @@ from collections.abc import Callable
from pathlib import Path from pathlib import Path
import sys import sys
from helpers import changed_files, git_ls_files from helpers import changed_files, get_component_from_path, git_ls_files
from esphome.const import ( from esphome.const import (
KEY_CORE, KEY_CORE,
@@ -30,11 +30,9 @@ def get_all_component_files() -> list[str]:
def extract_component_names_array_from_files_array(files): def extract_component_names_array_from_files_array(files):
components = [] components = []
for file in files: for file in files:
file_parts = file.split("/") component_name = get_component_from_path(file)
if len(file_parts) >= 4: if component_name and component_name not in components:
component_name = file_parts[2] components.append(component_name)
if component_name not in components:
components.append(component_name)
return components return components

View File

@@ -28,6 +28,7 @@ from script.analyze_component_buses import (
create_grouping_signature, create_grouping_signature,
merge_compatible_bus_groups, merge_compatible_bus_groups,
) )
from script.helpers import get_component_test_files
# Weighting for batch creation # Weighting for batch creation
# Isolated components can't be grouped/merged, so they count as 10x # Isolated components can't be grouped/merged, so they count as 10x
@@ -45,17 +46,12 @@ def has_test_files(component_name: str, tests_dir: Path) -> bool:
Args: Args:
component_name: Name of the component component_name: Name of the component
tests_dir: Path to tests/components directory tests_dir: Path to tests/components directory (unused, kept for compatibility)
Returns: Returns:
True if the component has test.*.yaml files True if the component has test.*.yaml files
""" """
component_dir = tests_dir / component_name return bool(get_component_test_files(component_name))
if not component_dir.exists() or not component_dir.is_dir():
return False
# Check for test.*.yaml files
return any(component_dir.glob("test.*.yaml"))
def create_intelligent_batches( def create_intelligent_batches(

View File

@@ -0,0 +1,27 @@
{{ comment_marker }}
## Memory Impact Analysis
**Components:** {{ components_str }}
**Platform:** `{{ platform }}`
| Metric | Target Branch | This PR | Change |
|--------|--------------|---------|--------|
| **RAM** | {{ target_ram }} | {{ pr_ram }} | {{ ram_change }} |
| **Flash** | {{ target_flash }} | {{ pr_flash }} | {{ flash_change }} |
{% if component_breakdown %}
{{ component_breakdown }}
{% endif %}
{% if symbol_changes %}
{{ symbol_changes }}
{% endif %}
{%- if target_cache_hit %}
> ⚡ Target branch analysis was loaded from cache (build skipped for faster CI).
{%- endif %}
---
> **Note:** This analysis measures **static RAM and Flash usage** only (compile-time allocation).
> **Dynamic memory (heap)** cannot be measured automatically.
> **⚠️ You must test this PR on a real device** to measure free heap and ensure no runtime memory issues.
*This analysis runs automatically when components change. Memory usage is measured from {{ config_note }}.*

View File

@@ -0,0 +1,15 @@
<details open>
<summary>📊 Component Memory Breakdown</summary>
| Component | Target Flash | PR Flash | Change |
|-----------|--------------|----------|--------|
{% for comp, target_flash, pr_flash, delta in changed_components[:max_rows] -%}
{% set threshold = component_change_threshold if comp.startswith("[esphome]") else none -%}
| `{{ comp }}` | {{ target_flash|format_bytes }} | {{ pr_flash|format_bytes }} | {{ format_change(target_flash, pr_flash, threshold=threshold) }} |
{% endfor -%}
{% if changed_components|length > max_rows -%}
| ... | ... | ... | *({{ changed_components|length - max_rows }} more components not shown)* |
{% endif -%}
</details>

View File

@@ -0,0 +1,8 @@
{#- Macro for formatting symbol names in tables -#}
{%- macro format_symbol(symbol, max_length, truncate_length) -%}
{%- if symbol|length <= max_length -%}
`{{ symbol }}`
{%- else -%}
<details><summary><code>{{ symbol[:truncate_length] }}...</code></summary><code>{{ symbol }}</code></details>
{%- endif -%}
{%- endmacro -%}

View File

@@ -0,0 +1,51 @@
{%- from 'ci_memory_impact_macros.j2' import format_symbol -%}
<details>
<summary>🔍 Symbol-Level Changes (click to expand)</summary>
{% if changed_symbols %}
### Changed Symbols
| Symbol | Target Size | PR Size | Change |
|--------|-------------|---------|--------|
{% for symbol, target_size, pr_size, delta in changed_symbols[:max_changed_rows] -%}
| {{ format_symbol(symbol, symbol_max_length, symbol_truncate_length) }} | {{ target_size|format_bytes }} | {{ pr_size|format_bytes }} | {{ format_change(target_size, pr_size) }} |
{% endfor -%}
{% if changed_symbols|length > max_changed_rows -%}
| ... | ... | ... | *({{ changed_symbols|length - max_changed_rows }} more changed symbols not shown)* |
{% endif -%}
{% endif %}
{% if new_symbols %}
### New Symbols (top {{ max_new_rows }})
| Symbol | Size |
|--------|------|
{% for symbol, size in new_symbols[:max_new_rows] -%}
| {{ format_symbol(symbol, symbol_max_length, symbol_truncate_length) }} | {{ size|format_bytes }} |
{% endfor -%}
{% if new_symbols|length > max_new_rows -%}
{% set total_new_size = new_symbols|sum(attribute=1) -%}
| *{{ new_symbols|length - max_new_rows }} more new symbols...* | *Total: {{ total_new_size|format_bytes }}* |
{% endif -%}
{% endif %}
{% if removed_symbols %}
### Removed Symbols (top {{ max_removed_rows }})
| Symbol | Size |
|--------|------|
{% for symbol, size in removed_symbols[:max_removed_rows] -%}
| {{ format_symbol(symbol, symbol_max_length, symbol_truncate_length) }} | {{ size|format_bytes }} |
{% endfor -%}
{% if removed_symbols|length > max_removed_rows -%}
{% set total_removed_size = removed_symbols|sum(attribute=1) -%}
| *{{ removed_symbols|length - max_removed_rows }} more removed symbols...* | *Total: {{ total_removed_size|format_bytes }}* |
{% endif -%}
{% endif %}
</details>

View File

@@ -39,6 +39,7 @@ from script.analyze_component_buses import (
merge_compatible_bus_groups, merge_compatible_bus_groups,
uses_local_file_references, uses_local_file_references,
) )
from script.helpers import get_component_test_files
from script.merge_component_configs import merge_component_configs from script.merge_component_configs import merge_component_configs
@@ -100,10 +101,10 @@ def find_component_tests(
if not comp_dir.is_dir(): if not comp_dir.is_dir():
continue continue
# Find test files - either base only (test.*.yaml) or all (test[.-]*.yaml) # Get test files using helper function
pattern = "test.*.yaml" if base_only else "test[.-]*.yaml" test_files = get_component_test_files(comp_dir.name, all_variants=not base_only)
for test_file in comp_dir.glob(pattern): if test_files:
component_tests[comp_dir.name].append(test_file) component_tests[comp_dir.name] = test_files
return dict(component_tests) return dict(component_tests)

View File

@@ -1,7 +1,3 @@
remote_transmitter:
pin: ${pin}
carrier_duty_percent: 50%
climate: climate:
- platform: heatpumpir - platform: heatpumpir
protocol: ballu protocol: ballu
@@ -10,3 +6,4 @@ climate:
name: HeatpumpIR Climate name: HeatpumpIR Climate
min_temperature: 18 min_temperature: 18
max_temperature: 30 max_temperature: 30
transmitter_id: xmitr

View File

@@ -1,4 +1,4 @@
substitutions: packages:
pin: GPIO5 remote_transmitter: !include ../../test_build_components/common/remote_transmitter/esp8266-ard.yaml
<<: !include common.yaml <<: !include common.yaml

View File

@@ -1,7 +1,4 @@
remote_transmitter:
pin: ${pin}
carrier_duty_percent: 50%
climate: climate:
- platform: climate_ir_lg - platform: climate_ir_lg
name: LG Climate name: LG Climate
transmitter_id: xmitr

View File

@@ -1,4 +1,4 @@
substitutions: packages:
pin: GPIO2 remote_transmitter: !include ../../test_build_components/common/remote_transmitter/esp32-c3-idf.yaml
<<: !include common.yaml <<: !include common.yaml

View File

@@ -1,4 +1,4 @@
substitutions: packages:
pin: GPIO2 remote_transmitter: !include ../../test_build_components/common/remote_transmitter/esp32-idf.yaml
<<: !include common.yaml <<: !include common.yaml

View File

@@ -1,4 +1,4 @@
substitutions: packages:
pin: GPIO5 remote_transmitter: !include ../../test_build_components/common/remote_transmitter/esp8266-ard.yaml
<<: !include common.yaml <<: !include common.yaml

View File

@@ -1,7 +1,4 @@
remote_transmitter:
pin: ${pin}
carrier_duty_percent: 50%
climate: climate:
- platform: coolix - platform: coolix
name: Coolix Climate name: Coolix Climate
transmitter_id: xmitr

View File

@@ -1,4 +1,4 @@
substitutions: packages:
pin: GPIO2 remote_transmitter: !include ../../test_build_components/common/remote_transmitter/esp32-c3-idf.yaml
<<: !include common.yaml <<: !include common.yaml

View File

@@ -1,4 +1,4 @@
substitutions: packages:
pin: GPIO2 remote_transmitter: !include ../../test_build_components/common/remote_transmitter/esp32-idf.yaml
<<: !include common.yaml <<: !include common.yaml

View File

@@ -1,4 +1,4 @@
substitutions: packages:
pin: GPIO5 remote_transmitter: !include ../../test_build_components/common/remote_transmitter/esp8266-ard.yaml
<<: !include common.yaml <<: !include common.yaml

View File

@@ -1,7 +1,3 @@
remote_transmitter:
pin: ${pin}
carrier_duty_percent: 50%
climate: climate:
- platform: heatpumpir - platform: heatpumpir
protocol: daikin protocol: daikin
@@ -10,3 +6,4 @@ climate:
name: HeatpumpIR Climate name: HeatpumpIR Climate
min_temperature: 18 min_temperature: 18
max_temperature: 30 max_temperature: 30
transmitter_id: xmitr

View File

@@ -1,4 +1,4 @@
substitutions: packages:
pin: GPIO5 remote_transmitter: !include ../../test_build_components/common/remote_transmitter/esp8266-ard.yaml
<<: !include common.yaml <<: !include common.yaml

View File

@@ -1,18 +1,3 @@
remote_transmitter:
pin: ${tx_pin}
carrier_duty_percent: 50%
id: tsvr
remote_receiver:
id: rcvr
pin:
number: ${rx_pin}
inverted: true
mode:
input: true
pullup: true
tolerance: 40%
climate: climate:
- platform: daikin_arc - platform: daikin_arc
name: Daikin AC name: Daikin AC

View File

@@ -1,5 +1,5 @@
substitutions: packages:
tx_pin: GPIO0 remote_transmitter: !include ../../test_build_components/common/remote_transmitter/esp8266-ard.yaml
rx_pin: GPIO2 remote_receiver: !include ../../test_build_components/common/remote_receiver/esp8266-ard.yaml
<<: !include common.yaml <<: !include common.yaml

View File

@@ -1,7 +1,4 @@
remote_transmitter:
pin: ${pin}
carrier_duty_percent: 50%
climate: climate:
- platform: daikin_brc - platform: daikin_brc
name: Daikin_brc Climate name: Daikin_brc Climate
transmitter_id: xmitr

View File

@@ -1,4 +1,4 @@
substitutions: packages:
pin: GPIO2 remote_transmitter: !include ../../test_build_components/common/remote_transmitter/esp32-c3-idf.yaml
<<: !include common.yaml <<: !include common.yaml

View File

@@ -1,4 +1,4 @@
substitutions: packages:
pin: GPIO2 remote_transmitter: !include ../../test_build_components/common/remote_transmitter/esp32-idf.yaml
<<: !include common.yaml <<: !include common.yaml

View File

@@ -1,4 +1,4 @@
substitutions: packages:
pin: GPIO5 remote_transmitter: !include ../../test_build_components/common/remote_transmitter/esp8266-ard.yaml
<<: !include common.yaml <<: !include common.yaml

View File

@@ -1,7 +1,4 @@
remote_transmitter:
pin: ${pin}
carrier_duty_percent: 50%
climate: climate:
- platform: delonghi - platform: delonghi
name: Delonghi Climate name: Delonghi Climate
transmitter_id: xmitr

View File

@@ -1,4 +1,4 @@
substitutions: packages:
pin: GPIO2 remote_transmitter: !include ../../test_build_components/common/remote_transmitter/esp32-c3-idf.yaml
<<: !include common.yaml <<: !include common.yaml

View File

@@ -1,4 +1,4 @@
substitutions: packages:
pin: GPIO2 remote_transmitter: !include ../../test_build_components/common/remote_transmitter/esp32-idf.yaml
<<: !include common.yaml <<: !include common.yaml

View File

@@ -1,4 +1,4 @@
substitutions: packages:
pin: GPIO5 remote_transmitter: !include ../../test_build_components/common/remote_transmitter/esp8266-ard.yaml
<<: !include common.yaml <<: !include common.yaml

View File

@@ -1,14 +1,5 @@
remote_transmitter:
id: tx
pin: ${remote_transmitter_pin}
carrier_duty_percent: 100%
remote_receiver:
id: rcvr
pin: ${remote_receiver_pin}
climate: climate:
- platform: emmeti - platform: emmeti
name: Emmeti name: Emmeti
receiver_id: rcvr receiver_id: rcvr
transmitter_id: tx transmitter_id: xmitr

View File

@@ -1,5 +1,5 @@
substitutions: packages:
remote_transmitter_pin: GPIO33 remote_transmitter: !include ../../test_build_components/common/remote_transmitter/esp32-idf.yaml
remote_receiver_pin: GPIO32 remote_receiver: !include ../../test_build_components/common/remote_receiver/esp32-idf.yaml
<<: !include common.yaml <<: !include common.yaml

View File

@@ -1,5 +1,5 @@
substitutions: packages:
remote_transmitter_pin: GPIO0 remote_transmitter: !include ../../test_build_components/common/remote_transmitter/esp8266-ard.yaml
remote_receiver_pin: GPIO2 remote_receiver: !include ../../test_build_components/common/remote_receiver/esp8266-ard.yaml
<<: !include common.yaml <<: !include common.yaml

View File

@@ -1,7 +1,4 @@
remote_transmitter:
pin: ${pin}
carrier_duty_percent: 50%
climate: climate:
- platform: fujitsu_general - platform: fujitsu_general
name: Fujitsu General Climate name: Fujitsu General Climate
transmitter_id: xmitr

View File

@@ -1,4 +1,4 @@
substitutions: packages:
pin: GPIO2 remote_transmitter: !include ../../test_build_components/common/remote_transmitter/esp32-c3-idf.yaml
<<: !include common.yaml <<: !include common.yaml

View File

@@ -1,4 +1,4 @@
substitutions: packages:
pin: GPIO2 remote_transmitter: !include ../../test_build_components/common/remote_transmitter/esp32-idf.yaml
<<: !include common.yaml <<: !include common.yaml

View File

@@ -1,4 +1,4 @@
substitutions: packages:
pin: GPIO5 remote_transmitter: !include ../../test_build_components/common/remote_transmitter/esp8266-ard.yaml
<<: !include common.yaml <<: !include common.yaml

View File

@@ -1,8 +1,5 @@
remote_transmitter:
pin: ${pin}
carrier_duty_percent: 50%
climate: climate:
- platform: gree - platform: gree
name: GREE name: GREE
model: generic model: generic
transmitter_id: xmitr

View File

@@ -1,4 +1,4 @@
substitutions: packages:
pin: GPIO2 remote_transmitter: !include ../../test_build_components/common/remote_transmitter/esp32-c3-idf.yaml
<<: !include common.yaml <<: !include common.yaml

View File

@@ -1,4 +1,4 @@
substitutions: packages:
pin: GPIO2 remote_transmitter: !include ../../test_build_components/common/remote_transmitter/esp32-idf.yaml
<<: !include common.yaml <<: !include common.yaml

View File

@@ -1,4 +1,4 @@
substitutions: packages:
pin: GPIO5 remote_transmitter: !include ../../test_build_components/common/remote_transmitter/esp8266-ard.yaml
<<: !include common.yaml <<: !include common.yaml

View File

@@ -1,7 +1,3 @@
remote_transmitter:
pin: ${pin}
carrier_duty_percent: 50%
climate: climate:
- platform: heatpumpir - platform: heatpumpir
protocol: mitsubishi_heavy_zm protocol: mitsubishi_heavy_zm
@@ -10,6 +6,7 @@ climate:
name: HeatpumpIR Climate Mitsubishi name: HeatpumpIR Climate Mitsubishi
min_temperature: 18 min_temperature: 18
max_temperature: 30 max_temperature: 30
transmitter_id: xmitr
- platform: heatpumpir - platform: heatpumpir
protocol: daikin protocol: daikin
horizontal_default: mleft horizontal_default: mleft
@@ -17,6 +14,7 @@ climate:
name: HeatpumpIR Climate Daikin name: HeatpumpIR Climate Daikin
min_temperature: 18 min_temperature: 18
max_temperature: 30 max_temperature: 30
transmitter_id: xmitr
- platform: heatpumpir - platform: heatpumpir
protocol: panasonic_altdke protocol: panasonic_altdke
horizontal_default: mright horizontal_default: mright
@@ -24,3 +22,4 @@ climate:
name: HeatpumpIR Climate Panasonic name: HeatpumpIR Climate Panasonic
min_temperature: 18 min_temperature: 18
max_temperature: 30 max_temperature: 30
transmitter_id: xmitr

View File

@@ -1,4 +1,4 @@
substitutions: packages:
pin: GPIO6 remote_transmitter: !include ../../test_build_components/common/remote_transmitter/bk72xx-ard.yaml
<<: !include common.yaml <<: !include common.yaml

View File

@@ -1,4 +1,4 @@
substitutions: packages:
pin: GPIO2 remote_transmitter: !include ../../test_build_components/common/remote_transmitter/esp32-ard.yaml
<<: !include common.yaml <<: !include common.yaml

View File

@@ -1,4 +1,4 @@
substitutions: packages:
pin: GPIO5 remote_transmitter: !include ../../test_build_components/common/remote_transmitter/esp8266-ard.yaml
<<: !include common.yaml <<: !include common.yaml

View File

@@ -1,7 +1,4 @@
remote_transmitter:
pin: ${pin}
carrier_duty_percent: 50%
climate: climate:
- platform: hitachi_ac344 - platform: hitachi_ac344
name: Hitachi Climate name: Hitachi Climate
transmitter_id: xmitr

View File

@@ -1,4 +1,4 @@
substitutions: packages:
pin: GPIO6 remote_transmitter: !include ../../test_build_components/common/remote_transmitter/bk72xx-ard.yaml
<<: !include common.yaml <<: !include common.yaml

View File

@@ -1,4 +1,4 @@
substitutions: packages:
pin: GPIO2 remote_transmitter: !include ../../test_build_components/common/remote_transmitter/esp32-c3-idf.yaml
<<: !include common.yaml <<: !include common.yaml

View File

@@ -1,4 +1,4 @@
substitutions: packages:
pin: GPIO2 remote_transmitter: !include ../../test_build_components/common/remote_transmitter/esp32-idf.yaml
<<: !include common.yaml <<: !include common.yaml

View File

@@ -1,4 +1,4 @@
substitutions: packages:
pin: GPIO5 remote_transmitter: !include ../../test_build_components/common/remote_transmitter/esp8266-ard.yaml
<<: !include common.yaml <<: !include common.yaml

View File

@@ -1,7 +1,4 @@
remote_transmitter:
pin: ${pin}
carrier_duty_percent: 50%
climate: climate:
- platform: hitachi_ac424 - platform: hitachi_ac424
name: Hitachi Climate name: Hitachi Climate
transmitter_id: xmitr

View File

@@ -1,4 +1,4 @@
substitutions: packages:
pin: GPIO6 remote_transmitter: !include ../../test_build_components/common/remote_transmitter/bk72xx-ard.yaml
<<: !include common.yaml <<: !include common.yaml

View File

@@ -1,4 +1,4 @@
substitutions: packages:
pin: GPIO2 remote_transmitter: !include ../../test_build_components/common/remote_transmitter/esp32-c3-idf.yaml
<<: !include common.yaml <<: !include common.yaml

View File

@@ -1,4 +1,4 @@
substitutions: packages:
pin: GPIO2 remote_transmitter: !include ../../test_build_components/common/remote_transmitter/esp32-idf.yaml
<<: !include common.yaml <<: !include common.yaml

View File

@@ -1,4 +1,4 @@
substitutions: packages:
pin: GPIO5 remote_transmitter: !include ../../test_build_components/common/remote_transmitter/esp8266-ard.yaml
<<: !include common.yaml <<: !include common.yaml

View File

@@ -2,10 +2,6 @@ wifi:
ssid: MySSID ssid: MySSID
password: password1 password: password1
remote_transmitter:
pin: ${pin}
carrier_duty_percent: 50%
climate: climate:
- platform: midea - platform: midea
id: midea_unit id: midea_unit
@@ -16,7 +12,7 @@ climate:
x.set_mode(CLIMATE_MODE_FAN_ONLY); x.set_mode(CLIMATE_MODE_FAN_ONLY);
on_state: on_state:
- logger.log: State changed! - logger.log: State changed!
transmitter_id: transmitter_id: xmitr
period: 1s period: 1s
num_attempts: 5 num_attempts: 5
timeout: 2s timeout: 2s

View File

@@ -1,7 +1,5 @@
substitutions:
pin: GPIO2
packages: packages:
remote_transmitter: !include ../../test_build_components/common/remote_transmitter/esp32-ard.yaml
uart: !include ../../test_build_components/common/uart/esp32-ard.yaml uart: !include ../../test_build_components/common/uart/esp32-ard.yaml
<<: !include common.yaml <<: !include common.yaml

View File

@@ -1,7 +1,5 @@
substitutions:
pin: GPIO15
packages: packages:
remote_transmitter: !include ../../test_build_components/common/remote_transmitter/esp8266-ard.yaml
uart: !include ../../test_build_components/common/uart/esp8266-ard.yaml uart: !include ../../test_build_components/common/uart/esp8266-ard.yaml
<<: !include common.yaml <<: !include common.yaml

View File

@@ -1,8 +1,5 @@
remote_transmitter:
pin: 4
carrier_duty_percent: 50%
climate: climate:
- platform: midea_ir - platform: midea_ir
name: Midea IR name: Midea IR
use_fahrenheit: true use_fahrenheit: true
transmitter_id: xmitr

View File

@@ -1 +1,4 @@
packages:
remote_transmitter: !include ../../test_build_components/common/remote_transmitter/esp32-c3-idf.yaml
<<: !include common.yaml <<: !include common.yaml

View File

@@ -1 +1,4 @@
packages:
remote_transmitter: !include ../../test_build_components/common/remote_transmitter/esp32-idf.yaml
<<: !include common.yaml <<: !include common.yaml

View File

@@ -1 +1,4 @@
packages:
remote_transmitter: !include ../../test_build_components/common/remote_transmitter/esp8266-ard.yaml
<<: !include common.yaml <<: !include common.yaml

View File

@@ -1,7 +1,4 @@
remote_transmitter:
pin: 4
carrier_duty_percent: 50%
climate: climate:
- platform: mitsubishi - platform: mitsubishi
name: Mitsubishi name: Mitsubishi
transmitter_id: xmitr

View File

@@ -1 +1,4 @@
packages:
remote_transmitter: !include ../../test_build_components/common/remote_transmitter/esp32-c3-idf.yaml
<<: !include common.yaml <<: !include common.yaml

View File

@@ -1 +1,4 @@
packages:
remote_transmitter: !include ../../test_build_components/common/remote_transmitter/esp32-idf.yaml
<<: !include common.yaml <<: !include common.yaml

View File

@@ -1 +1,4 @@
packages:
remote_transmitter: !include ../../test_build_components/common/remote_transmitter/esp8266-ard.yaml
<<: !include common.yaml <<: !include common.yaml

View File

@@ -1,12 +1,3 @@
remote_receiver:
id: rcvr
pin: 4
dump: all
remote_transmitter:
pin: 2
carrier_duty_percent: 50%
sensor: sensor:
- platform: template - platform: template
id: noblex_ac_sensor id: noblex_ac_sensor

View File

@@ -1 +1,5 @@
packages:
remote_transmitter: !include ../../test_build_components/common/remote_transmitter/esp32-c3-idf.yaml
remote_receiver: !include ../../test_build_components/common/remote_receiver/esp32-c3-idf.yaml
<<: !include common.yaml <<: !include common.yaml

View File

@@ -1 +1,5 @@
packages:
remote_transmitter: !include ../../test_build_components/common/remote_transmitter/esp32-idf.yaml
remote_receiver: !include ../../test_build_components/common/remote_receiver/esp32-idf.yaml
<<: !include common.yaml <<: !include common.yaml

View File

@@ -1 +1,5 @@
packages:
remote_transmitter: !include ../../test_build_components/common/remote_transmitter/esp8266-ard.yaml
remote_receiver: !include ../../test_build_components/common/remote_receiver/esp8266-ard.yaml
<<: !include common.yaml <<: !include common.yaml

View File

@@ -128,13 +128,10 @@ valve:
optimistic: true optimistic: true
has_position: true has_position: true
remote_transmitter:
pin: ${pin}
carrier_duty_percent: 50%
climate: climate:
- platform: climate_ir_lg - platform: climate_ir_lg
name: LG Climate name: LG Climate
transmitter_id: xmitr
prometheus: prometheus:
include_internal: true include_internal: true

View File

@@ -1,5 +1,7 @@
substitutions: substitutions:
verify_ssl: "false" verify_ssl: "false"
pin: GPIO2
packages:
remote_transmitter: !include ../../test_build_components/common/remote_transmitter/esp32-c3-idf.yaml
<<: !include common.yaml <<: !include common.yaml

View File

@@ -1,8 +1,8 @@
substitutions: substitutions:
verify_ssl: "false" verify_ssl: "false"
pin: GPIO2
packages: packages:
remote_transmitter: !include ../../test_build_components/common/remote_transmitter/esp32-idf.yaml
spi: !include ../../test_build_components/common/spi/esp32-idf.yaml spi: !include ../../test_build_components/common/spi/esp32-idf.yaml
<<: !include common.yaml <<: !include common.yaml

View File

@@ -1,5 +1,7 @@
substitutions: substitutions:
verify_ssl: "false" verify_ssl: "false"
pin: GPIO5
packages:
remote_transmitter: !include ../../test_build_components/common/remote_transmitter/esp8266-ard.yaml
<<: !include common.yaml <<: !include common.yaml

View File

@@ -143,6 +143,11 @@ on_sony:
- logger.log: - logger.log:
format: "on_sony: %lu %u" format: "on_sony: %lu %u"
args: ["long(x.data)", "x.nbits"] args: ["long(x.data)", "x.nbits"]
on_symphony:
then:
- logger.log:
format: "on_symphony: 0x%lX %u"
args: ["long(x.data)", "x.nbits"]
on_toshiba_ac: on_toshiba_ac:
then: then:
- logger.log: - logger.log:

View File

@@ -53,6 +53,12 @@ button:
remote_transmitter.transmit_sony: remote_transmitter.transmit_sony:
data: 0xABCDEF data: 0xABCDEF
nbits: 12 nbits: 12
- platform: template
name: Symphony
on_press:
remote_transmitter.transmit_symphony:
data: 0xE88
nbits: 12
- platform: template - platform: template
name: Panasonic name: Panasonic
on_press: on_press:

View File

@@ -1,7 +1,3 @@
remote_transmitter:
pin: ${pin}
carrier_duty_percent: 50%
sensor: sensor:
- platform: template - platform: template
id: tcl112_sensor id: tcl112_sensor
@@ -13,3 +9,4 @@ climate:
supports_heat: true supports_heat: true
supports_cool: true supports_cool: true
sensor: tcl112_sensor sensor: tcl112_sensor
transmitter_id: xmitr

View File

@@ -1,4 +1,4 @@
substitutions: packages:
pin: GPIO2 remote_transmitter: !include ../../test_build_components/common/remote_transmitter/esp32-c3-idf.yaml
<<: !include common.yaml <<: !include common.yaml

View File

@@ -1,4 +1,4 @@
substitutions: packages:
pin: GPIO2 remote_transmitter: !include ../../test_build_components/common/remote_transmitter/esp32-idf.yaml
<<: !include common.yaml <<: !include common.yaml

View File

@@ -1,4 +1,4 @@
substitutions: packages:
pin: GPIO5 remote_transmitter: !include ../../test_build_components/common/remote_transmitter/esp8266-ard.yaml
<<: !include common.yaml <<: !include common.yaml

View File

@@ -1,7 +1,4 @@
remote_transmitter:
pin: ${pin}
carrier_duty_percent: 50%
climate: climate:
- platform: toshiba - platform: toshiba
name: Toshiba Climate name: Toshiba Climate
transmitter_id: xmitr

View File

@@ -1,4 +1,4 @@
substitutions: packages:
pin: GPIO2 remote_transmitter: !include ../../test_build_components/common/remote_transmitter/esp32-c3-idf.yaml
<<: !include common.yaml <<: !include common.yaml

View File

@@ -1,4 +1,4 @@
substitutions: packages:
pin: GPIO2 remote_transmitter: !include ../../test_build_components/common/remote_transmitter/esp32-idf.yaml
<<: !include common.yaml <<: !include common.yaml

View File

@@ -1,4 +1,4 @@
substitutions: packages:
pin: GPIO5 remote_transmitter: !include ../../test_build_components/common/remote_transmitter/esp8266-ard.yaml
<<: !include common.yaml <<: !include common.yaml

View File

@@ -1,7 +1,4 @@
remote_transmitter:
pin: ${pin}
carrier_duty_percent: 50%
climate: climate:
- platform: whirlpool - platform: whirlpool
name: Whirlpool Climate name: Whirlpool Climate
transmitter_id: xmitr

View File

@@ -1,4 +1,4 @@
substitutions: packages:
pin: GPIO2 remote_transmitter: !include ../../test_build_components/common/remote_transmitter/esp32-c3-idf.yaml
<<: !include common.yaml <<: !include common.yaml

View File

@@ -1,4 +1,4 @@
substitutions: packages:
pin: GPIO2 remote_transmitter: !include ../../test_build_components/common/remote_transmitter/esp32-idf.yaml
<<: !include common.yaml <<: !include common.yaml

View File

@@ -1,4 +1,4 @@
substitutions: packages:
pin: GPIO5 remote_transmitter: !include ../../test_build_components/common/remote_transmitter/esp8266-ard.yaml
<<: !include common.yaml <<: !include common.yaml

Some files were not shown because too many files have changed in this diff Show More