1
0
mirror of https://github.com/esphome/esphome.git synced 2025-10-31 07:03:55 +00:00

Merge branch 'ci_impact_analysis' into ci_impact_analysis_sensor_base

This commit is contained in:
J. Nick Koston
2025-10-17 21:13:18 -10:00
122 changed files with 1307 additions and 719 deletions

View File

@@ -534,6 +534,7 @@ jobs:
ram_usage: ${{ steps.extract.outputs.ram_usage }}
flash_usage: ${{ steps.extract.outputs.flash_usage }}
cache_hit: ${{ steps.cache-memory-analysis.outputs.cache-hit }}
skip: ${{ steps.check-script.outputs.skip }}
steps:
- name: Check out target branch
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
@@ -735,7 +736,7 @@ jobs:
- determine-jobs
- memory-impact-target-branch
- memory-impact-pr-branch
if: github.event_name == 'pull_request' && fromJSON(needs.determine-jobs.outputs.memory_impact).should_run == 'true'
if: github.event_name == 'pull_request' && fromJSON(needs.determine-jobs.outputs.memory_impact).should_run == 'true' && needs.memory-impact-target-branch.outputs.skip != 'true'
permissions:
contents: read
pull-requests: write

View File

@@ -33,15 +33,41 @@ _GCC_PREFIX_ANNOTATIONS = {
"_GLOBAL__sub_D_": "global destructor for",
}
# GCC optimization suffix pattern (e.g., $isra$0, $part$1, $constprop$2)
_GCC_OPTIMIZATION_SUFFIX_PATTERN = re.compile(r"(\$(?:isra|part|constprop)\$\d+)")
# C++ runtime patterns for categorization
_CPP_RUNTIME_PATTERNS = frozenset(["vtable", "typeinfo", "thunk"])
# libc printf/scanf family base names (used to detect variants like _printf_r, vfprintf, etc.)
_LIBC_PRINTF_SCANF_FAMILY = frozenset(["printf", "fprintf", "sprintf", "scanf"])
# Regex pattern for parsing readelf section headers
# Format: [ #] name type addr off size
_READELF_SECTION_PATTERN = re.compile(
r"\s*\[\s*\d+\]\s+([\.\w]+)\s+\w+\s+[\da-fA-F]+\s+[\da-fA-F]+\s+([\da-fA-F]+)"
)
# Component category prefixes
_COMPONENT_PREFIX_ESPHOME = "[esphome]"
_COMPONENT_PREFIX_EXTERNAL = "[external]"
_COMPONENT_CORE = f"{_COMPONENT_PREFIX_ESPHOME}core"
_COMPONENT_API = f"{_COMPONENT_PREFIX_ESPHOME}api"
# C++ namespace prefixes
_NAMESPACE_ESPHOME = "esphome::"
_NAMESPACE_STD = "std::"
# Type alias for symbol information: (symbol_name, size, component)
SymbolInfoType = tuple[str, int, str]
@dataclass
class MemorySection:
"""Represents a memory section with its symbols."""
name: str
symbols: list[tuple[str, int, str]] = field(
default_factory=list
) # (symbol_name, size, component)
symbols: list[SymbolInfoType] = field(default_factory=list)
total_size: int = 0
@@ -77,7 +103,7 @@ class MemoryAnalyzer:
readelf_path: str | None = None,
external_components: set[str] | None = None,
idedata: "IDEData | None" = None,
):
) -> None:
"""Initialize memory analyzer.
Args:
@@ -133,12 +159,7 @@ class MemoryAnalyzer:
# Parse section headers
for line in result.stdout.splitlines():
# Look for section entries
if not (
match := re.match(
r"\s*\[\s*\d+\]\s+([\.\w]+)\s+\w+\s+[\da-fA-F]+\s+[\da-fA-F]+\s+([\da-fA-F]+)",
line,
)
):
if not (match := _READELF_SECTION_PATTERN.match(line)):
continue
section_name = match.group(1)
@@ -212,7 +233,7 @@ class MemoryAnalyzer:
self._uncategorized_symbols.append((symbol_name, demangled, size))
# Track ESPHome core symbols for detailed analysis
if component == "[esphome]core" and size > 0:
if component == _COMPONENT_CORE and size > 0:
demangled = self._demangle_symbol(symbol_name)
self._esphome_core_symbols.append((symbol_name, demangled, size))
@@ -230,13 +251,13 @@ class MemoryAnalyzer:
# Check for special component classes first (before namespace pattern)
# This handles cases like esphome::ESPHomeOTAComponent which should map to ota
if "esphome::" in demangled:
if _NAMESPACE_ESPHOME in demangled:
# Check for special component classes that include component name in the class
# For example: esphome::ESPHomeOTAComponent -> ota component
for component_name in get_esphome_components():
patterns = get_component_class_patterns(component_name)
if any(pattern in demangled for pattern in patterns):
return f"[esphome]{component_name}"
return f"{_COMPONENT_PREFIX_ESPHOME}{component_name}"
# Check for ESPHome component namespaces
match = ESPHOME_COMPONENT_PATTERN.search(demangled)
@@ -247,17 +268,17 @@ class MemoryAnalyzer:
# Check if this is an actual component in the components directory
if component_name in get_esphome_components():
return f"[esphome]{component_name}"
return f"{_COMPONENT_PREFIX_ESPHOME}{component_name}"
# Check if this is a known external component from the config
if component_name in self.external_components:
return f"[external]{component_name}"
return f"{_COMPONENT_PREFIX_EXTERNAL}{component_name}"
# Everything else in esphome:: namespace is core
return "[esphome]core"
return _COMPONENT_CORE
# Check for esphome core namespace (no component namespace)
if "esphome::" in demangled:
if _NAMESPACE_ESPHOME in demangled:
# If no component match found, it's core
return "[esphome]core"
return _COMPONENT_CORE
# Check against symbol patterns
for component, patterns in SYMBOL_PATTERNS.items():
@@ -273,14 +294,14 @@ class MemoryAnalyzer:
# Check if spi_flash vs spi_driver
if "spi_" in symbol_name or "SPI" in symbol_name:
if "spi_flash" in symbol_name:
return "spi_flash"
return "spi_driver"
return "spi_flash" if "spi_flash" in symbol_name else "spi_driver"
# libc special printf variants
if symbol_name.startswith("_") and symbol_name[1:].replace("_r", "").replace(
"v", ""
).replace("s", "") in ["printf", "fprintf", "sprintf", "scanf"]:
if (
symbol_name.startswith("_")
and symbol_name[1:].replace("_r", "").replace("v", "").replace("s", "")
in _LIBC_PRINTF_SCANF_FAMILY
):
return "libc"
# Track uncategorized symbols for analysis
@@ -294,45 +315,42 @@ class MemoryAnalyzer:
# Try to find the appropriate c++filt for the platform
cppfilt_cmd = "c++filt"
_LOGGER.warning("Demangling %d symbols", len(symbols))
_LOGGER.warning("objdump_path = %s", self.objdump_path)
_LOGGER.info("Demangling %d symbols", len(symbols))
_LOGGER.debug("objdump_path = %s", self.objdump_path)
# Check if we have a toolchain-specific c++filt
if self.objdump_path and self.objdump_path != "objdump":
# Replace objdump with c++filt in the path
potential_cppfilt = self.objdump_path.replace("objdump", "c++filt")
_LOGGER.warning("Checking for toolchain c++filt at: %s", potential_cppfilt)
_LOGGER.info("Checking for toolchain c++filt at: %s", potential_cppfilt)
if Path(potential_cppfilt).exists():
cppfilt_cmd = potential_cppfilt
_LOGGER.warning("✓ Using toolchain c++filt: %s", cppfilt_cmd)
_LOGGER.info("✓ Using toolchain c++filt: %s", cppfilt_cmd)
else:
_LOGGER.warning(
_LOGGER.info(
"✗ Toolchain c++filt not found at %s, using system c++filt",
potential_cppfilt,
)
else:
_LOGGER.warning(
"✗ Using system c++filt (objdump_path=%s)", self.objdump_path
)
_LOGGER.info("✗ Using system c++filt (objdump_path=%s)", self.objdump_path)
# Strip GCC optimization suffixes and prefixes before demangling
# Suffixes like $isra$0, $part$0, $constprop$0 confuse c++filt
# Prefixes like _GLOBAL__sub_I_ need to be removed and tracked
symbols_stripped = []
symbols_prefixes = [] # Track removed prefixes
symbols_stripped: list[str] = []
symbols_prefixes: list[str] = [] # Track removed prefixes
for symbol in symbols:
# Remove GCC optimization markers
stripped = re.sub(r"\$(?:isra|part|constprop)\$\d+", "", symbol)
stripped = _GCC_OPTIMIZATION_SUFFIX_PATTERN.sub("", symbol)
# Handle GCC global constructor/initializer prefixes
# _GLOBAL__sub_I_<mangled> -> extract <mangled> for demangling
prefix = ""
if stripped.startswith("_GLOBAL__sub_I_"):
prefix = "_GLOBAL__sub_I_"
stripped = stripped[len(prefix) :]
elif stripped.startswith("_GLOBAL__sub_D_"):
prefix = "_GLOBAL__sub_D_"
stripped = stripped[len(prefix) :]
for gcc_prefix in _GCC_PREFIX_ANNOTATIONS:
if stripped.startswith(gcc_prefix):
prefix = gcc_prefix
stripped = stripped[len(prefix) :]
break
symbols_stripped.append(stripped)
symbols_prefixes.append(prefix)
@@ -405,17 +423,18 @@ class MemoryAnalyzer:
if stripped == demangled and stripped.startswith("_Z"):
failed_count += 1
if failed_count <= 5: # Only log first 5 failures
_LOGGER.warning("Failed to demangle: %s", original[:100])
_LOGGER.warning("Failed to demangle: %s", original)
if failed_count > 0:
_LOGGER.warning(
"Failed to demangle %d/%d symbols using %s",
failed_count,
len(symbols),
cppfilt_cmd,
)
else:
_LOGGER.warning("Successfully demangled all %d symbols", len(symbols))
if failed_count == 0:
_LOGGER.info("Successfully demangled all %d symbols", len(symbols))
return
_LOGGER.warning(
"Failed to demangle %d/%d symbols using %s",
failed_count,
len(symbols),
cppfilt_cmd,
)
@staticmethod
def _restore_symbol_prefix(prefix: str, stripped: str, demangled: str) -> str:
@@ -452,8 +471,7 @@ class MemoryAnalyzer:
Returns:
Demangled name with suffix annotation
"""
suffix_match = re.search(r"(\$(?:isra|part|constprop)\$\d+)", original)
if suffix_match:
if suffix_match := _GCC_OPTIMIZATION_SUFFIX_PATTERN.search(original):
return f"{demangled} [{suffix_match.group(1)}]"
return demangled
@@ -464,10 +482,10 @@ class MemoryAnalyzer:
def _categorize_esphome_core_symbol(self, demangled: str) -> str:
"""Categorize ESPHome core symbols into subcategories."""
# Special patterns that need to be checked separately
if any(pattern in demangled for pattern in ["vtable", "typeinfo", "thunk"]):
if any(pattern in demangled for pattern in _CPP_RUNTIME_PATTERNS):
return "C++ Runtime (vtables/RTTI)"
if demangled.startswith("std::"):
if demangled.startswith(_NAMESPACE_STD):
return "C++ STL"
# Check against patterns from const.py

View File

@@ -3,7 +3,13 @@
from collections import defaultdict
import sys
from . import MemoryAnalyzer
from . import (
_COMPONENT_API,
_COMPONENT_CORE,
_COMPONENT_PREFIX_ESPHOME,
_COMPONENT_PREFIX_EXTERNAL,
MemoryAnalyzer,
)
class MemoryAnalyzerCLI(MemoryAnalyzer):
@@ -83,7 +89,7 @@ class MemoryAnalyzerCLI(MemoryAnalyzer):
total_ram = sum(c.ram_total for _, c in components)
# Build report
lines = []
lines: list[str] = []
lines.append("=" * self.TABLE_WIDTH)
lines.append("Component Memory Analysis".center(self.TABLE_WIDTH))
@@ -144,7 +150,9 @@ class MemoryAnalyzerCLI(MemoryAnalyzer):
if self._esphome_core_symbols:
lines.append("")
lines.append("=" * self.TABLE_WIDTH)
lines.append("[esphome]core Detailed Analysis".center(self.TABLE_WIDTH))
lines.append(
f"{_COMPONENT_CORE} Detailed Analysis".center(self.TABLE_WIDTH)
)
lines.append("=" * self.TABLE_WIDTH)
lines.append("")
@@ -183,9 +191,9 @@ class MemoryAnalyzerCLI(MemoryAnalyzer):
f"{len(symbols):>{self.COL_CORE_COUNT}} | {percentage:>{self.COL_CORE_PERCENT - 1}.1f}%"
)
# Top 10 largest core symbols
# Top 15 largest core symbols
lines.append("")
lines.append("Top 10 Largest [esphome]core Symbols:")
lines.append(f"Top 15 Largest {_COMPONENT_CORE} Symbols:")
sorted_core_symbols = sorted(
self._esphome_core_symbols, key=lambda x: x[2], reverse=True
)
@@ -199,10 +207,12 @@ class MemoryAnalyzerCLI(MemoryAnalyzer):
esphome_components = [
(name, mem)
for name, mem in components
if name.startswith("[esphome]") and name != "[esphome]core"
if name.startswith(_COMPONENT_PREFIX_ESPHOME) and name != _COMPONENT_CORE
]
external_components = [
(name, mem) for name, mem in components if name.startswith("[external]")
(name, mem)
for name, mem in components
if name.startswith(_COMPONENT_PREFIX_EXTERNAL)
]
top_esphome_components = sorted(
@@ -217,7 +227,7 @@ class MemoryAnalyzerCLI(MemoryAnalyzer):
# Check if API component exists and ensure it's included
api_component = None
for name, mem in components:
if name == "[esphome]api":
if name == _COMPONENT_API:
api_component = (name, mem)
break
@@ -371,15 +381,16 @@ def main():
idedata = None
for idedata_path in idedata_candidates:
if idedata_path.exists():
try:
with open(idedata_path, encoding="utf-8") as f:
raw_data = json.load(f)
idedata = IDEData(raw_data)
print(f"Loaded idedata from: {idedata_path}", file=sys.stderr)
break
except (json.JSONDecodeError, OSError) as e:
print(f"Warning: Failed to load idedata: {e}", file=sys.stderr)
if not idedata_path.exists():
continue
try:
with open(idedata_path, encoding="utf-8") as f:
raw_data = json.load(f)
idedata = IDEData(raw_data)
print(f"Loaded idedata from: {idedata_path}", file=sys.stderr)
break
except (json.JSONDecodeError, OSError) as e:
print(f"Warning: Failed to load idedata: {e}", file=sys.stderr)
if not idedata:
print(

View File

@@ -5,6 +5,11 @@ from pathlib import Path
from .const import SECTION_MAPPING
# Import namespace constant from parent module
# Note: This would create a circular import if done at module level,
# so we'll define it locally here as well
_NAMESPACE_ESPHOME = "esphome::"
# Get the list of actual ESPHome components by scanning the components directory
@cache
@@ -40,10 +45,10 @@ def get_component_class_patterns(component_name: str) -> list[str]:
component_upper = component_name.upper()
component_camel = component_name.replace("_", "").title()
return [
f"esphome::{component_upper}Component", # e.g., esphome::OTAComponent
f"esphome::ESPHome{component_upper}Component", # e.g., esphome::ESPHomeOTAComponent
f"esphome::{component_camel}Component", # e.g., esphome::OtaComponent
f"esphome::ESPHome{component_camel}Component", # e.g., esphome::ESPHomeOtaComponent
f"{_NAMESPACE_ESPHOME}{component_upper}Component", # e.g., esphome::OTAComponent
f"{_NAMESPACE_ESPHOME}ESPHome{component_upper}Component", # e.g., esphome::ESPHomeOTAComponent
f"{_NAMESPACE_ESPHOME}{component_camel}Component", # e.g., esphome::OtaComponent
f"{_NAMESPACE_ESPHOME}ESPHome{component_camel}Component", # e.g., esphome::ESPHomeOtaComponent
]

View File

@@ -1056,6 +1056,52 @@ async def sony_action(var, config, args):
cg.add(var.set_nbits(template_))
# Symphony
SymphonyData, SymphonyBinarySensor, SymphonyTrigger, SymphonyAction, SymphonyDumper = (
declare_protocol("Symphony")
)
SYMPHONY_SCHEMA = cv.Schema(
{
cv.Required(CONF_DATA): cv.hex_uint32_t,
cv.Required(CONF_NBITS): cv.int_range(min=1, max=32),
cv.Optional(CONF_COMMAND_REPEATS, default=2): cv.uint8_t,
}
)
@register_binary_sensor("symphony", SymphonyBinarySensor, SYMPHONY_SCHEMA)
def symphony_binary_sensor(var, config):
cg.add(
var.set_data(
cg.StructInitializer(
SymphonyData,
("data", config[CONF_DATA]),
("nbits", config[CONF_NBITS]),
)
)
)
@register_trigger("symphony", SymphonyTrigger, SymphonyData)
def symphony_trigger(var, config):
pass
@register_dumper("symphony", SymphonyDumper)
def symphony_dumper(var, config):
pass
@register_action("symphony", SymphonyAction, SYMPHONY_SCHEMA)
async def symphony_action(var, config, args):
template_ = await cg.templatable(config[CONF_DATA], args, cg.uint32)
cg.add(var.set_data(template_))
template_ = await cg.templatable(config[CONF_NBITS], args, cg.uint32)
cg.add(var.set_nbits(template_))
template_ = await cg.templatable(config[CONF_COMMAND_REPEATS], args, cg.uint8)
cg.add(var.set_repeats(template_))
# Raw
def validate_raw_alternating(value):
assert isinstance(value, list)

View File

@@ -0,0 +1,120 @@
#include "symphony_protocol.h"
#include "esphome/core/log.h"
namespace esphome {
namespace remote_base {
static const char *const TAG = "remote.symphony";
// Reference implementation and timing details:
// IRremoteESP8266 ir_Symphony.cpp
// https://github.com/crankyoldgit/IRremoteESP8266/blob/master/src/ir_Symphony.cpp
// The implementation below mirrors the constant bit-time mapping and
// footer-gap handling used there.
// Symphony protocol timing specifications (tuned to handset captures)
static const uint32_t BIT_ZERO_HIGH_US = 460; // short
static const uint32_t BIT_ZERO_LOW_US = 1260; // long
static const uint32_t BIT_ONE_HIGH_US = 1260; // long
static const uint32_t BIT_ONE_LOW_US = 460; // short
static const uint32_t CARRIER_FREQUENCY = 38000;
// IRremoteESP8266 reference: kSymphonyFooterGap = 4 * (mark + space)
static const uint32_t FOOTER_GAP_US = 4 * (BIT_ZERO_HIGH_US + BIT_ZERO_LOW_US);
// Typical inter-frame gap (~34.8 ms observed)
static const uint32_t INTER_FRAME_GAP_US = 34760;
void SymphonyProtocol::encode(RemoteTransmitData *dst, const SymphonyData &data) {
dst->set_carrier_frequency(CARRIER_FREQUENCY);
ESP_LOGD(TAG, "Sending Symphony: data=0x%0*X nbits=%u repeats=%u", (data.nbits + 3) / 4, (uint32_t) data.data,
data.nbits, data.repeats);
// Each bit produces a mark+space (2 entries). We fold the inter-frame/footer gap
// into the last bit's space of each frame to avoid over-length gaps.
dst->reserve(data.nbits * 2u * data.repeats);
for (uint8_t repeats = 0; repeats < data.repeats; repeats++) {
// Data bits (MSB first)
for (uint32_t mask = 1UL << (data.nbits - 1); mask != 0; mask >>= 1) {
const bool is_last_bit = (mask == 1);
const bool is_last_frame = (repeats == (data.repeats - 1));
if (is_last_bit) {
// Emit last bit's mark; replace its space with the proper gap
if (data.data & mask) {
dst->mark(BIT_ONE_HIGH_US);
} else {
dst->mark(BIT_ZERO_HIGH_US);
}
dst->space(is_last_frame ? FOOTER_GAP_US : INTER_FRAME_GAP_US);
} else {
if (data.data & mask) {
dst->item(BIT_ONE_HIGH_US, BIT_ONE_LOW_US);
} else {
dst->item(BIT_ZERO_HIGH_US, BIT_ZERO_LOW_US);
}
}
}
}
}
optional<SymphonyData> SymphonyProtocol::decode(RemoteReceiveData src) {
auto is_valid_len = [](uint8_t nbits) -> bool { return nbits == 8 || nbits == 12 || nbits == 16; };
RemoteReceiveData s = src; // copy
SymphonyData out{0, 0, 1};
for (; out.nbits < 32; out.nbits++) {
if (s.expect_mark(BIT_ONE_HIGH_US)) {
if (!s.expect_space(BIT_ONE_LOW_US)) {
// Allow footer gap immediately after the last mark
if (s.peek_space_at_least(FOOTER_GAP_US)) {
uint8_t bits_with_this = out.nbits + 1;
if (is_valid_len(bits_with_this)) {
out.data = (out.data << 1UL) | 1UL;
out.nbits = bits_with_this;
return out;
}
}
return {};
}
// Successfully consumed a '1' bit (mark + space)
out.data = (out.data << 1UL) | 1UL;
continue;
} else if (s.expect_mark(BIT_ZERO_HIGH_US)) {
if (!s.expect_space(BIT_ZERO_LOW_US)) {
// Allow footer gap immediately after the last mark
if (s.peek_space_at_least(FOOTER_GAP_US)) {
uint8_t bits_with_this = out.nbits + 1;
if (is_valid_len(bits_with_this)) {
out.data = (out.data << 1UL) | 0UL;
out.nbits = bits_with_this;
return out;
}
}
return {};
}
// Successfully consumed a '0' bit (mark + space)
out.data = (out.data << 1UL) | 0UL;
continue;
} else {
// Completed a valid-length frame followed by a footer gap
if (is_valid_len(out.nbits) && s.peek_space_at_least(FOOTER_GAP_US)) {
return out;
}
return {};
}
}
if (is_valid_len(out.nbits) && s.peek_space_at_least(FOOTER_GAP_US)) {
return out;
}
return {};
}
void SymphonyProtocol::dump(const SymphonyData &data) {
const int32_t hex_width = (data.nbits + 3) / 4; // pad to nibble width
ESP_LOGI(TAG, "Received Symphony: data=0x%0*X, nbits=%d", hex_width, (uint32_t) data.data, data.nbits);
}
} // namespace remote_base
} // namespace esphome

View File

@@ -0,0 +1,44 @@
#pragma once
#include "esphome/core/component.h"
#include "remote_base.h"
#include <cinttypes>
namespace esphome {
namespace remote_base {
struct SymphonyData {
uint32_t data;
uint8_t nbits;
uint8_t repeats{1};
bool operator==(const SymphonyData &rhs) const { return data == rhs.data && nbits == rhs.nbits; }
};
class SymphonyProtocol : public RemoteProtocol<SymphonyData> {
public:
void encode(RemoteTransmitData *dst, const SymphonyData &data) override;
optional<SymphonyData> decode(RemoteReceiveData src) override;
void dump(const SymphonyData &data) override;
};
DECLARE_REMOTE_PROTOCOL(Symphony)
template<typename... Ts> class SymphonyAction : public RemoteTransmitterActionBase<Ts...> {
public:
TEMPLATABLE_VALUE(uint32_t, data)
TEMPLATABLE_VALUE(uint8_t, nbits)
TEMPLATABLE_VALUE(uint8_t, repeats)
void encode(RemoteTransmitData *dst, Ts... x) override {
SymphonyData data{};
data.data = this->data_.value(x...);
data.nbits = this->nbits_.value(x...);
data.repeats = this->repeats_.value(x...);
SymphonyProtocol().encode(dst, data);
}
};
} // namespace remote_base
} // namespace esphome

View File

@@ -378,19 +378,19 @@ class IDEData:
@property
def objdump_path(self) -> str:
# replace gcc at end with objdump
# Windows
if self.cc_path.endswith(".exe"):
return f"{self.cc_path[:-7]}objdump.exe"
return f"{self.cc_path[:-3]}objdump"
path = self.cc_path
return (
f"{path[:-7]}objdump.exe"
if path.endswith(".exe")
else f"{path[:-3]}objdump"
)
@property
def readelf_path(self) -> str:
# replace gcc at end with readelf
# Windows
if self.cc_path.endswith(".exe"):
return f"{self.cc_path[:-7]}readelf.exe"
return f"{self.cc_path[:-3]}readelf"
path = self.cc_path
return (
f"{path[:-7]}readelf.exe"
if path.endswith(".exe")
else f"{path[:-3]}readelf"
)

View File

@@ -34,6 +34,8 @@ from typing import Any
# Add esphome to path
sys.path.insert(0, str(Path(__file__).parent.parent))
from helpers import BASE_BUS_COMPONENTS
from esphome import yaml_util
from esphome.config_helpers import Extend, Remove
@@ -50,7 +52,14 @@ PACKAGE_DEPENDENCIES = {
# Bus types that can be defined directly in config files
# Components defining these directly cannot be grouped (they create unique bus IDs)
DIRECT_BUS_TYPES = ("i2c", "spi", "uart", "modbus")
DIRECT_BUS_TYPES = (
"i2c",
"spi",
"uart",
"modbus",
"remote_transmitter",
"remote_receiver",
)
# Signature for components with no bus requirements
# These components can be merged with any other group
@@ -60,16 +69,6 @@ NO_BUSES_SIGNATURE = "no_buses"
# Isolated components have unique signatures and cannot be merged with others
ISOLATED_SIGNATURE_PREFIX = "isolated_"
# Base bus components - these ARE the bus implementations and should not
# be flagged as needing migration since they are the platform/base components
BASE_BUS_COMPONENTS = {
"i2c",
"spi",
"uart",
"modbus",
"canbus",
}
# Components that must be tested in isolation (not grouped or batched with others)
# These have known build issues that prevent grouping
# NOTE: This should be kept in sync with both test_build_components and split_components_for_ci.py

View File

@@ -14,6 +14,8 @@ from pathlib import Path
import subprocess
import sys
from jinja2 import Environment, FileSystemLoader
# Add esphome to path for analyze_memory import
sys.path.insert(0, str(Path(__file__).parent.parent))
@@ -26,6 +28,22 @@ COMMENT_MARKER = "<!-- esphome-memory-impact-analysis -->"
OVERALL_CHANGE_THRESHOLD = 1.0 # Overall RAM/Flash changes
COMPONENT_CHANGE_THRESHOLD = 3.0 # Component breakdown changes
# Display limits for tables
MAX_COMPONENT_BREAKDOWN_ROWS = 20 # Maximum components to show in breakdown table
MAX_CHANGED_SYMBOLS_ROWS = 30 # Maximum changed symbols to show
MAX_NEW_SYMBOLS_ROWS = 15 # Maximum new symbols to show
MAX_REMOVED_SYMBOLS_ROWS = 15 # Maximum removed symbols to show
# Symbol display formatting
SYMBOL_DISPLAY_MAX_LENGTH = 100 # Max length before using <details> tag
SYMBOL_DISPLAY_TRUNCATE_LENGTH = 97 # Length to truncate in summary
# Component change noise threshold
COMPONENT_CHANGE_NOISE_THRESHOLD = 2 # Ignore component changes ≤ this many bytes
# Template directory
TEMPLATE_DIR = Path(__file__).parent / "templates"
def load_analysis_json(json_path: str) -> dict | None:
"""Load memory analysis results from JSON file.
@@ -111,43 +129,30 @@ def format_change(before: int, after: int, threshold: float | None = None) -> st
return f"{emoji} {delta_str} ({pct_str})"
def format_symbol_for_display(symbol: str) -> str:
"""Format a symbol name for display in markdown table.
Args:
symbol: Symbol name to format
Returns:
Formatted symbol with backticks or HTML details tag for long names
"""
if len(symbol) <= 100:
return f"`{symbol}`"
# Use HTML details for very long symbols (no backticks inside HTML)
return f"<details><summary><code>{symbol[:97]}...</code></summary><code>{symbol}</code></details>"
def create_symbol_changes_table(
def prepare_symbol_changes_data(
target_symbols: dict | None, pr_symbols: dict | None
) -> str:
"""Create a markdown table showing symbols that changed size.
) -> dict | None:
"""Prepare symbol changes data for template rendering.
Args:
target_symbols: Symbol name to size mapping for target branch
pr_symbols: Symbol name to size mapping for PR branch
Returns:
Formatted markdown table
Dictionary with changed, new, and removed symbols, or None if no changes
"""
if not target_symbols or not pr_symbols:
return ""
return None
# Find all symbols that exist in both branches or only in one
all_symbols = set(target_symbols.keys()) | set(pr_symbols.keys())
# Track changes
changed_symbols = []
new_symbols = []
removed_symbols = []
changed_symbols: list[
tuple[str, int, int, int]
] = [] # (symbol, target_size, pr_size, delta)
new_symbols: list[tuple[str, int]] = [] # (symbol, size)
removed_symbols: list[tuple[str, int]] = [] # (symbol, size)
for symbol in all_symbols:
target_size = target_symbols.get(symbol, 0)
@@ -165,114 +170,42 @@ def create_symbol_changes_table(
changed_symbols.append((symbol, target_size, pr_size, delta))
if not changed_symbols and not new_symbols and not removed_symbols:
return ""
return None
lines = [
"",
"<details>",
"<summary>🔍 Symbol-Level Changes (click to expand)</summary>",
"",
]
# Sort by size/delta
changed_symbols.sort(key=lambda x: abs(x[3]), reverse=True)
new_symbols.sort(key=lambda x: x[1], reverse=True)
removed_symbols.sort(key=lambda x: x[1], reverse=True)
# Show changed symbols (sorted by absolute delta)
if changed_symbols:
changed_symbols.sort(key=lambda x: abs(x[3]), reverse=True)
lines.extend(
[
"### Changed Symbols",
"",
"| Symbol | Target Size | PR Size | Change |",
"|--------|-------------|---------|--------|",
]
)
# Show top 30 changes
for symbol, target_size, pr_size, delta in changed_symbols[:30]:
target_str = format_bytes(target_size)
pr_str = format_bytes(pr_size)
change_str = format_change(target_size, pr_size) # Chart icons only
display_symbol = format_symbol_for_display(symbol)
lines.append(
f"| {display_symbol} | {target_str} | {pr_str} | {change_str} |"
)
if len(changed_symbols) > 30:
lines.append(
f"| ... | ... | ... | *({len(changed_symbols) - 30} more changed symbols not shown)* |"
)
lines.append("")
# Show new symbols
if new_symbols:
new_symbols.sort(key=lambda x: x[1], reverse=True)
lines.extend(
[
"### New Symbols (top 15)",
"",
"| Symbol | Size |",
"|--------|------|",
]
)
for symbol, size in new_symbols[:15]:
display_symbol = format_symbol_for_display(symbol)
lines.append(f"| {display_symbol} | {format_bytes(size)} |")
if len(new_symbols) > 15:
total_new_size = sum(s[1] for s in new_symbols)
lines.append(
f"| *{len(new_symbols) - 15} more new symbols...* | *Total: {format_bytes(total_new_size)}* |"
)
lines.append("")
# Show removed symbols
if removed_symbols:
removed_symbols.sort(key=lambda x: x[1], reverse=True)
lines.extend(
[
"### Removed Symbols (top 15)",
"",
"| Symbol | Size |",
"|--------|------|",
]
)
for symbol, size in removed_symbols[:15]:
display_symbol = format_symbol_for_display(symbol)
lines.append(f"| {display_symbol} | {format_bytes(size)} |")
if len(removed_symbols) > 15:
total_removed_size = sum(s[1] for s in removed_symbols)
lines.append(
f"| *{len(removed_symbols) - 15} more removed symbols...* | *Total: {format_bytes(total_removed_size)}* |"
)
lines.append("")
lines.extend(["</details>", ""])
return "\n".join(lines)
return {
"changed_symbols": changed_symbols,
"new_symbols": new_symbols,
"removed_symbols": removed_symbols,
}
def create_detailed_breakdown_table(
def prepare_component_breakdown_data(
target_analysis: dict | None, pr_analysis: dict | None
) -> str:
"""Create a markdown table showing detailed memory breakdown by component.
) -> list[tuple[str, int, int, int]] | None:
"""Prepare component breakdown data for template rendering.
Args:
target_analysis: Component memory breakdown for target branch
pr_analysis: Component memory breakdown for PR branch
Returns:
Formatted markdown table
List of tuples (component, target_flash, pr_flash, delta), or None if no changes
"""
if not target_analysis or not pr_analysis:
return ""
return None
# Combine all components from both analyses
all_components = set(target_analysis.keys()) | set(pr_analysis.keys())
# Filter to components that have changed (ignoring noise ≤2 bytes)
changed_components = []
# Filter to components that have changed (ignoring noise)
changed_components: list[
tuple[str, int, int, int]
] = [] # (comp, target_flash, pr_flash, delta)
for comp in all_components:
target_mem = target_analysis.get(comp, {})
pr_mem = pr_analysis.get(comp, {})
@@ -280,43 +213,18 @@ def create_detailed_breakdown_table(
target_flash = target_mem.get("flash_total", 0)
pr_flash = pr_mem.get("flash_total", 0)
# Only include if component has meaningful change (>2 bytes)
# Only include if component has meaningful change (above noise threshold)
delta = pr_flash - target_flash
if abs(delta) > 2:
if abs(delta) > COMPONENT_CHANGE_NOISE_THRESHOLD:
changed_components.append((comp, target_flash, pr_flash, delta))
if not changed_components:
return ""
return None
# Sort by absolute delta (largest changes first)
changed_components.sort(key=lambda x: abs(x[3]), reverse=True)
# Build table - limit to top 20 changes
lines = [
"",
"<details open>",
"<summary>📊 Component Memory Breakdown</summary>",
"",
"| Component | Target Flash | PR Flash | Change |",
"|-----------|--------------|----------|--------|",
]
for comp, target_flash, pr_flash, delta in changed_components[:20]:
target_str = format_bytes(target_flash)
pr_str = format_bytes(pr_flash)
change_str = format_change(
target_flash, pr_flash, threshold=COMPONENT_CHANGE_THRESHOLD
)
lines.append(f"| `{comp}` | {target_str} | {pr_str} | {change_str} |")
if len(changed_components) > 20:
lines.append(
f"| ... | ... | ... | *({len(changed_components) - 20} more components not shown)* |"
)
lines.extend(["", "</details>", ""])
return "\n".join(lines)
return changed_components
def create_comment_body(
@@ -332,7 +240,7 @@ def create_comment_body(
pr_symbols: dict | None = None,
target_cache_hit: bool = False,
) -> str:
"""Create the comment body with memory impact analysis.
"""Create the comment body with memory impact analysis using Jinja2 templates.
Args:
components: List of component names (merged config)
@@ -350,57 +258,87 @@ def create_comment_body(
Returns:
Formatted comment body
"""
ram_change = format_change(target_ram, pr_ram, threshold=OVERALL_CHANGE_THRESHOLD)
flash_change = format_change(
target_flash, pr_flash, threshold=OVERALL_CHANGE_THRESHOLD
# Set up Jinja2 environment
env = Environment(
loader=FileSystemLoader(TEMPLATE_DIR),
trim_blocks=True,
lstrip_blocks=True,
)
# Use provided analysis data if available
component_breakdown = ""
symbol_changes = ""
# Register custom filters
env.filters["format_bytes"] = format_bytes
env.filters["format_change"] = format_change
if target_analysis and pr_analysis:
component_breakdown = create_detailed_breakdown_table(
target_analysis, pr_analysis
)
if target_symbols and pr_symbols:
symbol_changes = create_symbol_changes_table(target_symbols, pr_symbols)
else:
print("No ELF files provided, skipping detailed analysis", file=sys.stderr)
# Prepare template context
context = {
"comment_marker": COMMENT_MARKER,
"platform": platform,
"target_ram": format_bytes(target_ram),
"pr_ram": format_bytes(pr_ram),
"target_flash": format_bytes(target_flash),
"pr_flash": format_bytes(pr_flash),
"ram_change": format_change(
target_ram, pr_ram, threshold=OVERALL_CHANGE_THRESHOLD
),
"flash_change": format_change(
target_flash, pr_flash, threshold=OVERALL_CHANGE_THRESHOLD
),
"target_cache_hit": target_cache_hit,
"component_change_threshold": COMPONENT_CHANGE_THRESHOLD,
}
# Format components list
if len(components) == 1:
components_str = f"`{components[0]}`"
config_note = "a representative test configuration"
context["components_str"] = f"`{components[0]}`"
context["config_note"] = "a representative test configuration"
else:
components_str = ", ".join(f"`{c}`" for c in sorted(components))
config_note = f"a merged configuration with {len(components)} components"
context["components_str"] = ", ".join(f"`{c}`" for c in sorted(components))
context["config_note"] = (
f"a merged configuration with {len(components)} components"
)
# Add cache info note if target was cached
cache_note = ""
if target_cache_hit:
cache_note = "\n\n> ⚡ Target branch analysis was loaded from cache (build skipped for faster CI)."
# Prepare component breakdown if available
component_breakdown = ""
if target_analysis and pr_analysis:
changed_components = prepare_component_breakdown_data(
target_analysis, pr_analysis
)
if changed_components:
template = env.get_template("ci_memory_impact_component_breakdown.j2")
component_breakdown = template.render(
changed_components=changed_components,
format_bytes=format_bytes,
format_change=format_change,
component_change_threshold=COMPONENT_CHANGE_THRESHOLD,
max_rows=MAX_COMPONENT_BREAKDOWN_ROWS,
)
return f"""{COMMENT_MARKER}
## Memory Impact Analysis
# Prepare symbol changes if available
symbol_changes = ""
if target_symbols and pr_symbols:
symbol_data = prepare_symbol_changes_data(target_symbols, pr_symbols)
if symbol_data:
template = env.get_template("ci_memory_impact_symbol_changes.j2")
symbol_changes = template.render(
**symbol_data,
format_bytes=format_bytes,
format_change=format_change,
max_changed_rows=MAX_CHANGED_SYMBOLS_ROWS,
max_new_rows=MAX_NEW_SYMBOLS_ROWS,
max_removed_rows=MAX_REMOVED_SYMBOLS_ROWS,
symbol_max_length=SYMBOL_DISPLAY_MAX_LENGTH,
symbol_truncate_length=SYMBOL_DISPLAY_TRUNCATE_LENGTH,
)
**Components:** {components_str}
**Platform:** `{platform}`
if not target_analysis or not pr_analysis:
print("No ELF files provided, skipping detailed analysis", file=sys.stderr)
| Metric | Target Branch | This PR | Change |
|--------|--------------|---------|--------|
| **RAM** | {format_bytes(target_ram)} | {format_bytes(pr_ram)} | {ram_change} |
| **Flash** | {format_bytes(target_flash)} | {format_bytes(pr_flash)} | {flash_change} |
{component_breakdown}{symbol_changes}{cache_note}
context["component_breakdown"] = component_breakdown
context["symbol_changes"] = symbol_changes
---
> **Note:** This analysis measures **static RAM and Flash usage** only (compile-time allocation).
> **Dynamic memory (heap)** cannot be measured automatically.
> **⚠️ You must test this PR on a real device** to measure free heap and ensure no runtime memory issues.
*This analysis runs automatically when components change. Memory usage is measured from {config_note}.*
"""
# Render main template
template = env.get_template("ci_memory_impact_comment_template.j2")
return template.render(**context)
def find_existing_comment(pr_number: str) -> str | None:
@@ -411,137 +349,133 @@ def find_existing_comment(pr_number: str) -> str | None:
Returns:
Comment numeric ID if found, None otherwise
Raises:
subprocess.CalledProcessError: If gh command fails
"""
try:
print(
f"DEBUG: Looking for existing comment on PR #{pr_number}", file=sys.stderr
)
print(f"DEBUG: Looking for existing comment on PR #{pr_number}", file=sys.stderr)
# Use gh api to get comments directly - this returns the numeric id field
result = subprocess.run(
[
"gh",
"api",
f"/repos/{{owner}}/{{repo}}/issues/{pr_number}/comments",
"--jq",
".[] | {id, body}",
],
capture_output=True,
text=True,
check=True,
)
# Use gh api to get comments directly - this returns the numeric id field
result = subprocess.run(
[
"gh",
"api",
f"/repos/{{owner}}/{{repo}}/issues/{pr_number}/comments",
"--jq",
".[] | {id, body}",
],
capture_output=True,
text=True,
check=True,
)
print(
f"DEBUG: gh api comments output (first 500 chars):\n{result.stdout[:500]}",
file=sys.stderr,
)
print(
f"DEBUG: gh api comments output (first 500 chars):\n{result.stdout[:500]}",
file=sys.stderr,
)
# Parse comments and look for our marker
comment_count = 0
for line in result.stdout.strip().split("\n"):
if not line:
continue
# Parse comments and look for our marker
comment_count = 0
for line in result.stdout.strip().split("\n"):
if not line:
continue
try:
comment = json.loads(line)
comment_count += 1
comment_id = comment.get("id")
try:
comment = json.loads(line)
comment_count += 1
comment_id = comment.get("id")
print(
f"DEBUG: Checking comment {comment_count}: id={comment_id}",
file=sys.stderr,
)
body = comment.get("body", "")
if COMMENT_MARKER in body:
print(
f"DEBUG: Checking comment {comment_count}: id={comment_id}",
f"DEBUG: Found existing comment with id={comment_id}",
file=sys.stderr,
)
# Return the numeric id
return str(comment_id)
print("DEBUG: Comment does not contain marker", file=sys.stderr)
except json.JSONDecodeError as e:
print(f"DEBUG: JSON decode error: {e}", file=sys.stderr)
continue
body = comment.get("body", "")
if COMMENT_MARKER in body:
print(
f"DEBUG: Found existing comment with id={comment_id}",
file=sys.stderr,
)
# Return the numeric id
return str(comment_id)
print("DEBUG: Comment does not contain marker", file=sys.stderr)
except json.JSONDecodeError as e:
print(f"DEBUG: JSON decode error: {e}", file=sys.stderr)
continue
print(
f"DEBUG: No existing comment found (checked {comment_count} comments)",
file=sys.stderr,
)
return None
except subprocess.CalledProcessError as e:
print(f"Error finding existing comment: {e}", file=sys.stderr)
if e.stderr:
print(f"stderr: {e.stderr.decode()}", file=sys.stderr)
return None
print(
f"DEBUG: No existing comment found (checked {comment_count} comments)",
file=sys.stderr,
)
return None
def post_or_update_comment(pr_number: str, comment_body: str) -> bool:
def update_existing_comment(comment_id: str, comment_body: str) -> None:
"""Update an existing comment.
Args:
comment_id: Comment ID to update
comment_body: New comment body text
Raises:
subprocess.CalledProcessError: If gh command fails
"""
print(f"DEBUG: Updating existing comment {comment_id}", file=sys.stderr)
result = subprocess.run(
[
"gh",
"api",
f"/repos/{{owner}}/{{repo}}/issues/comments/{comment_id}",
"-X",
"PATCH",
"-f",
f"body={comment_body}",
],
check=True,
capture_output=True,
text=True,
)
print(f"DEBUG: Update response: {result.stdout}", file=sys.stderr)
def create_new_comment(pr_number: str, comment_body: str) -> None:
"""Create a new PR comment.
Args:
pr_number: PR number
comment_body: Comment body text
Raises:
subprocess.CalledProcessError: If gh command fails
"""
print(f"DEBUG: Posting new comment on PR #{pr_number}", file=sys.stderr)
result = subprocess.run(
["gh", "pr", "comment", pr_number, "--body", comment_body],
check=True,
capture_output=True,
text=True,
)
print(f"DEBUG: Post response: {result.stdout}", file=sys.stderr)
def post_or_update_comment(pr_number: str, comment_body: str) -> None:
"""Post a new comment or update existing one.
Args:
pr_number: PR number
comment_body: Comment body text
Returns:
True if successful, False otherwise
Raises:
subprocess.CalledProcessError: If gh command fails
"""
# Look for existing comment
existing_comment_id = find_existing_comment(pr_number)
try:
if existing_comment_id and existing_comment_id != "None":
# Update existing comment
print(
f"DEBUG: Updating existing comment {existing_comment_id}",
file=sys.stderr,
)
result = subprocess.run(
[
"gh",
"api",
f"/repos/{{owner}}/{{repo}}/issues/comments/{existing_comment_id}",
"-X",
"PATCH",
"-f",
f"body={comment_body}",
],
check=True,
capture_output=True,
text=True,
)
print(f"DEBUG: Update response: {result.stdout}", file=sys.stderr)
else:
# Post new comment
print(
f"DEBUG: Posting new comment (existing_comment_id={existing_comment_id})",
file=sys.stderr,
)
result = subprocess.run(
["gh", "pr", "comment", pr_number, "--body", comment_body],
check=True,
capture_output=True,
text=True,
)
print(f"DEBUG: Post response: {result.stdout}", file=sys.stderr)
if existing_comment_id and existing_comment_id != "None":
update_existing_comment(existing_comment_id, comment_body)
else:
create_new_comment(pr_number, comment_body)
print("Comment posted/updated successfully", file=sys.stderr)
return True
except subprocess.CalledProcessError as e:
print(f"Error posting/updating comment: {e}", file=sys.stderr)
if e.stderr:
print(
f"stderr: {e.stderr.decode() if isinstance(e.stderr, bytes) else e.stderr}",
file=sys.stderr,
)
if e.stdout:
print(
f"stdout: {e.stdout.decode() if isinstance(e.stdout, bytes) else e.stdout}",
file=sys.stderr,
)
return False
print("Comment posted/updated successfully", file=sys.stderr)
def main() -> int:
@@ -627,9 +561,9 @@ def main() -> int:
)
# Post or update comment
success = post_or_update_comment(args.pr_number, comment_body)
post_or_update_comment(args.pr_number, comment_body)
return 0 if success else 1
return 0
if __name__ == "__main__":

View File

@@ -25,8 +25,15 @@ import sys
sys.path.insert(0, str(Path(__file__).parent.parent))
# pylint: disable=wrong-import-position
from esphome.analyze_memory import MemoryAnalyzer
from esphome.platformio_api import IDEData
from script.ci_helpers import write_github_output
# Regex patterns for extracting memory usage from PlatformIO output
_RAM_PATTERN = re.compile(r"RAM:\s+\[.*?\]\s+\d+\.\d+%\s+\(used\s+(\d+)\s+bytes")
_FLASH_PATTERN = re.compile(r"Flash:\s+\[.*?\]\s+\d+\.\d+%\s+\(used\s+(\d+)\s+bytes")
_BUILD_PATH_PATTERN = re.compile(r"Build path: (.+)")
def extract_from_compile_output(
output_text: str,
@@ -42,7 +49,7 @@ def extract_from_compile_output(
Flash: [=== ] 34.0% (used 348511 bytes from 1023984 bytes)
Also extracts build directory from lines like:
INFO Deleting /path/to/build/.esphome/build/componenttestesp8266ard/.pioenvs
INFO Compiling app... Build path: /path/to/build
Args:
output_text: Compile output text (may contain multiple builds)
@@ -51,12 +58,8 @@ def extract_from_compile_output(
Tuple of (total_ram_bytes, total_flash_bytes, build_dir) or (None, None, None) if not found
"""
# Find all RAM and Flash matches (may be multiple builds)
ram_matches = re.findall(
r"RAM:\s+\[.*?\]\s+\d+\.\d+%\s+\(used\s+(\d+)\s+bytes", output_text
)
flash_matches = re.findall(
r"Flash:\s+\[.*?\]\s+\d+\.\d+%\s+\(used\s+(\d+)\s+bytes", output_text
)
ram_matches = _RAM_PATTERN.findall(output_text)
flash_matches = _FLASH_PATTERN.findall(output_text)
if not ram_matches or not flash_matches:
return None, None, None
@@ -69,7 +72,7 @@ def extract_from_compile_output(
# Look for: INFO Compiling app... Build path: /path/to/build
# Note: Multiple builds reuse the same build path (each overwrites the previous)
build_dir = None
if match := re.search(r"Build path: (.+)", output_text):
if match := _BUILD_PATH_PATTERN.search(output_text):
build_dir = match.group(1).strip()
return total_ram, total_flash, build_dir
@@ -84,9 +87,6 @@ def run_detailed_analysis(build_dir: str) -> dict | None:
Returns:
Dictionary with analysis results or None if analysis fails
"""
from esphome.analyze_memory import MemoryAnalyzer
from esphome.platformio_api import IDEData
build_path = Path(build_dir)
if not build_path.exists():
print(f"Build directory not found: {build_dir}", file=sys.stderr)
@@ -119,18 +119,19 @@ def run_detailed_analysis(build_dir: str) -> dict | None:
idedata = None
for idedata_path in idedata_candidates:
if idedata_path.exists():
try:
with open(idedata_path, encoding="utf-8") as f:
raw_data = json.load(f)
idedata = IDEData(raw_data)
print(f"Loaded idedata from: {idedata_path}", file=sys.stderr)
break
except (json.JSONDecodeError, OSError) as e:
print(
f"Warning: Failed to load idedata from {idedata_path}: {e}",
file=sys.stderr,
)
if not idedata_path.exists():
continue
try:
with open(idedata_path, encoding="utf-8") as f:
raw_data = json.load(f)
idedata = IDEData(raw_data)
print(f"Loaded idedata from: {idedata_path}", file=sys.stderr)
break
except (json.JSONDecodeError, OSError) as e:
print(
f"Warning: Failed to load idedata from {idedata_path}: {e}",
file=sys.stderr,
)
analyzer = MemoryAnalyzer(elf_path, idedata=idedata)
components = analyzer.analyze()
@@ -209,11 +210,7 @@ def main() -> int:
return 1
# Count how many builds were found
num_builds = len(
re.findall(
r"RAM:\s+\[.*?\]\s+\d+\.\d+%\s+\(used\s+(\d+)\s+bytes", compile_output
)
)
num_builds = len(_RAM_PATTERN.findall(compile_output))
if num_builds > 1:
print(

View File

@@ -13,9 +13,9 @@ what files have changed. It outputs JSON with the following structure:
"component_test_count": 5,
"memory_impact": {
"should_run": "true/false",
"component": "component_name",
"test_file": "test.esp32-idf.yaml",
"platform": "esp32-idf"
"components": ["component1", "component2", ...],
"platform": "esp32-idf",
"use_merged_config": "true"
}
}
@@ -26,7 +26,7 @@ The CI workflow uses this information to:
- Skip or run Python linters (ruff, flake8, pylint, pyupgrade)
- Determine which components to test individually
- Decide how to split component tests (if there are many)
- Run memory impact analysis when exactly one component changes
- Run memory impact analysis whenever there are changed components (merged config), and also for core-only changes
Usage:
python script/determine-jobs.py [-b BRANCH]
@@ -38,6 +38,7 @@ Options:
from __future__ import annotations
import argparse
from collections import Counter
from enum import StrEnum
from functools import cache
import json
@@ -48,11 +49,13 @@ import sys
from typing import Any
from helpers import (
BASE_BUS_COMPONENTS,
CPP_FILE_EXTENSIONS,
ESPHOME_COMPONENTS_PATH,
PYTHON_FILE_EXTENSIONS,
changed_files,
get_all_dependencies,
get_component_from_path,
get_component_test_files,
get_components_from_integration_fixtures,
parse_test_filename,
root_path,
@@ -142,12 +145,9 @@ def should_run_integration_tests(branch: str | None = None) -> bool:
# Check if any required components changed
for file in files:
if file.startswith(ESPHOME_COMPONENTS_PATH):
parts = file.split("/")
if len(parts) >= 3:
component = parts[2]
if component in all_required_components:
return True
component = get_component_from_path(file)
if component and component in all_required_components:
return True
return False
@@ -261,10 +261,7 @@ def _component_has_tests(component: str) -> bool:
Returns:
True if the component has test YAML files
"""
tests_dir = Path(root_path) / "tests" / "components" / component
if not tests_dir.exists():
return False
return any(tests_dir.glob("test.*.yaml"))
return bool(get_component_test_files(component))
def detect_memory_impact_config(
@@ -291,17 +288,15 @@ def detect_memory_impact_config(
files = changed_files(branch)
# Find all changed components (excluding core and base bus components)
changed_component_set = set()
changed_component_set: set[str] = set()
has_core_changes = False
for file in files:
if file.startswith(ESPHOME_COMPONENTS_PATH):
parts = file.split("/")
if len(parts) >= 3:
component = parts[2]
# Skip base bus components as they're used across many builds
if component not in ["i2c", "spi", "uart", "modbus", "canbus"]:
changed_component_set.add(component)
component = get_component_from_path(file)
if component:
# Skip base bus components as they're used across many builds
if component not in BASE_BUS_COMPONENTS:
changed_component_set.add(component)
elif file.startswith("esphome/"):
# Core ESPHome files changed (not component-specific)
has_core_changes = True
@@ -321,25 +316,24 @@ def detect_memory_impact_config(
return {"should_run": "false"}
# Find components that have tests and collect their supported platforms
components_with_tests = []
component_platforms_map = {} # Track which platforms each component supports
components_with_tests: list[str] = []
component_platforms_map: dict[
str, set[Platform]
] = {} # Track which platforms each component supports
for component in sorted(changed_component_set):
tests_dir = Path(root_path) / "tests" / "components" / component
if not tests_dir.exists():
continue
# Look for test files on preferred platforms
test_files = list(tests_dir.glob("test.*.yaml"))
test_files = get_component_test_files(component)
if not test_files:
continue
# Check if component has tests for any preferred platform
available_platforms = []
for test_file in test_files:
_, platform = parse_test_filename(test_file)
if platform != "all" and platform in MEMORY_IMPACT_PLATFORM_PREFERENCE:
available_platforms.append(platform)
available_platforms = [
platform
for test_file in test_files
if (platform := parse_test_filename(test_file)[1]) != "all"
and platform in MEMORY_IMPACT_PLATFORM_PREFERENCE
]
if not available_platforms:
continue
@@ -367,10 +361,10 @@ def detect_memory_impact_config(
else:
# No common platform - pick the most commonly supported platform
# This allows testing components individually even if they can't be merged
platform_counts = {}
for platforms in component_platforms_map.values():
for p in platforms:
platform_counts[p] = platform_counts.get(p, 0) + 1
# Count how many components support each platform
platform_counts = Counter(
p for platforms in component_platforms_map.values() for p in platforms
)
# Pick the platform supported by most components, preferring earlier in MEMORY_IMPACT_PLATFORM_PREFERENCE
platform = max(
platform_counts.keys(),

View File

@@ -29,6 +29,18 @@ YAML_FILE_EXTENSIONS = (".yaml", ".yml")
# Component path prefix
ESPHOME_COMPONENTS_PATH = "esphome/components/"
# Base bus components - these ARE the bus implementations and should not
# be flagged as needing migration since they are the platform/base components
BASE_BUS_COMPONENTS = {
"i2c",
"spi",
"uart",
"modbus",
"canbus",
"remote_transmitter",
"remote_receiver",
}
def parse_list_components_output(output: str) -> list[str]:
"""Parse the output from list-components.py script.
@@ -63,6 +75,48 @@ def parse_test_filename(test_file: Path) -> tuple[str, str]:
return parts[0], "all"
def get_component_from_path(file_path: str) -> str | None:
"""Extract component name from a file path.
Args:
file_path: Path to a file (e.g., "esphome/components/wifi/wifi.cpp")
Returns:
Component name if path is in components directory, None otherwise
"""
if not file_path.startswith(ESPHOME_COMPONENTS_PATH):
return None
parts = file_path.split("/")
if len(parts) >= 3:
return parts[2]
return None
def get_component_test_files(
component: str, *, all_variants: bool = False
) -> list[Path]:
"""Get test files for a component.
Args:
component: Component name (e.g., "wifi")
all_variants: If True, returns all test files including variants (test-*.yaml).
If False, returns only base test files (test.*.yaml).
Default is False.
Returns:
List of test file paths for the component, or empty list if none exist
"""
tests_dir = Path(root_path) / "tests" / "components" / component
if not tests_dir.exists():
return []
if all_variants:
# Match both test.*.yaml and test-*.yaml patterns
return list(tests_dir.glob("test[.-]*.yaml"))
# Match only test.*.yaml (base tests)
return list(tests_dir.glob("test.*.yaml"))
def styled(color: str | tuple[str, ...], msg: str, reset: bool = True) -> str:
prefix = "".join(color) if isinstance(color, tuple) else color
suffix = colorama.Style.RESET_ALL if reset else ""
@@ -331,11 +385,9 @@ def _filter_changed_ci(files: list[str]) -> list[str]:
# because changes in one file can affect other files in the same component.
filtered_files = []
for f in files:
if f.startswith(ESPHOME_COMPONENTS_PATH):
# Check if file belongs to any of the changed components
parts = f.split("/")
if len(parts) >= 3 and parts[2] in component_set:
filtered_files.append(f)
component = get_component_from_path(f)
if component and component in component_set:
filtered_files.append(f)
return filtered_files

View File

@@ -4,7 +4,7 @@ from collections.abc import Callable
from pathlib import Path
import sys
from helpers import changed_files, git_ls_files
from helpers import changed_files, get_component_from_path, git_ls_files
from esphome.const import (
KEY_CORE,
@@ -30,11 +30,9 @@ def get_all_component_files() -> list[str]:
def extract_component_names_array_from_files_array(files):
components = []
for file in files:
file_parts = file.split("/")
if len(file_parts) >= 4:
component_name = file_parts[2]
if component_name not in components:
components.append(component_name)
component_name = get_component_from_path(file)
if component_name and component_name not in components:
components.append(component_name)
return components

View File

@@ -28,6 +28,7 @@ from script.analyze_component_buses import (
create_grouping_signature,
merge_compatible_bus_groups,
)
from script.helpers import get_component_test_files
# Weighting for batch creation
# Isolated components can't be grouped/merged, so they count as 10x
@@ -45,17 +46,12 @@ def has_test_files(component_name: str, tests_dir: Path) -> bool:
Args:
component_name: Name of the component
tests_dir: Path to tests/components directory
tests_dir: Path to tests/components directory (unused, kept for compatibility)
Returns:
True if the component has test.*.yaml files
"""
component_dir = tests_dir / component_name
if not component_dir.exists() or not component_dir.is_dir():
return False
# Check for test.*.yaml files
return any(component_dir.glob("test.*.yaml"))
return bool(get_component_test_files(component_name))
def create_intelligent_batches(

View File

@@ -0,0 +1,27 @@
{{ comment_marker }}
## Memory Impact Analysis
**Components:** {{ components_str }}
**Platform:** `{{ platform }}`
| Metric | Target Branch | This PR | Change |
|--------|--------------|---------|--------|
| **RAM** | {{ target_ram }} | {{ pr_ram }} | {{ ram_change }} |
| **Flash** | {{ target_flash }} | {{ pr_flash }} | {{ flash_change }} |
{% if component_breakdown %}
{{ component_breakdown }}
{% endif %}
{% if symbol_changes %}
{{ symbol_changes }}
{% endif %}
{%- if target_cache_hit %}
> ⚡ Target branch analysis was loaded from cache (build skipped for faster CI).
{%- endif %}
---
> **Note:** This analysis measures **static RAM and Flash usage** only (compile-time allocation).
> **Dynamic memory (heap)** cannot be measured automatically.
> **⚠️ You must test this PR on a real device** to measure free heap and ensure no runtime memory issues.
*This analysis runs automatically when components change. Memory usage is measured from {{ config_note }}.*

View File

@@ -0,0 +1,15 @@
<details open>
<summary>📊 Component Memory Breakdown</summary>
| Component | Target Flash | PR Flash | Change |
|-----------|--------------|----------|--------|
{% for comp, target_flash, pr_flash, delta in changed_components[:max_rows] -%}
{% set threshold = component_change_threshold if comp.startswith("[esphome]") else none -%}
| `{{ comp }}` | {{ target_flash|format_bytes }} | {{ pr_flash|format_bytes }} | {{ format_change(target_flash, pr_flash, threshold=threshold) }} |
{% endfor -%}
{% if changed_components|length > max_rows -%}
| ... | ... | ... | *({{ changed_components|length - max_rows }} more components not shown)* |
{% endif -%}
</details>

View File

@@ -0,0 +1,8 @@
{#- Macro for formatting symbol names in tables -#}
{%- macro format_symbol(symbol, max_length, truncate_length) -%}
{%- if symbol|length <= max_length -%}
`{{ symbol }}`
{%- else -%}
<details><summary><code>{{ symbol[:truncate_length] }}...</code></summary><code>{{ symbol }}</code></details>
{%- endif -%}
{%- endmacro -%}

View File

@@ -0,0 +1,51 @@
{%- from 'ci_memory_impact_macros.j2' import format_symbol -%}
<details>
<summary>🔍 Symbol-Level Changes (click to expand)</summary>
{% if changed_symbols %}
### Changed Symbols
| Symbol | Target Size | PR Size | Change |
|--------|-------------|---------|--------|
{% for symbol, target_size, pr_size, delta in changed_symbols[:max_changed_rows] -%}
| {{ format_symbol(symbol, symbol_max_length, symbol_truncate_length) }} | {{ target_size|format_bytes }} | {{ pr_size|format_bytes }} | {{ format_change(target_size, pr_size) }} |
{% endfor -%}
{% if changed_symbols|length > max_changed_rows -%}
| ... | ... | ... | *({{ changed_symbols|length - max_changed_rows }} more changed symbols not shown)* |
{% endif -%}
{% endif %}
{% if new_symbols %}
### New Symbols (top {{ max_new_rows }})
| Symbol | Size |
|--------|------|
{% for symbol, size in new_symbols[:max_new_rows] -%}
| {{ format_symbol(symbol, symbol_max_length, symbol_truncate_length) }} | {{ size|format_bytes }} |
{% endfor -%}
{% if new_symbols|length > max_new_rows -%}
{% set total_new_size = new_symbols|sum(attribute=1) -%}
| *{{ new_symbols|length - max_new_rows }} more new symbols...* | *Total: {{ total_new_size|format_bytes }}* |
{% endif -%}
{% endif %}
{% if removed_symbols %}
### Removed Symbols (top {{ max_removed_rows }})
| Symbol | Size |
|--------|------|
{% for symbol, size in removed_symbols[:max_removed_rows] -%}
| {{ format_symbol(symbol, symbol_max_length, symbol_truncate_length) }} | {{ size|format_bytes }} |
{% endfor -%}
{% if removed_symbols|length > max_removed_rows -%}
{% set total_removed_size = removed_symbols|sum(attribute=1) -%}
| *{{ removed_symbols|length - max_removed_rows }} more removed symbols...* | *Total: {{ total_removed_size|format_bytes }}* |
{% endif -%}
{% endif %}
</details>

View File

@@ -39,6 +39,7 @@ from script.analyze_component_buses import (
merge_compatible_bus_groups,
uses_local_file_references,
)
from script.helpers import get_component_test_files
from script.merge_component_configs import merge_component_configs
@@ -100,10 +101,10 @@ def find_component_tests(
if not comp_dir.is_dir():
continue
# Find test files - either base only (test.*.yaml) or all (test[.-]*.yaml)
pattern = "test.*.yaml" if base_only else "test[.-]*.yaml"
for test_file in comp_dir.glob(pattern):
component_tests[comp_dir.name].append(test_file)
# Get test files using helper function
test_files = get_component_test_files(comp_dir.name, all_variants=not base_only)
if test_files:
component_tests[comp_dir.name] = test_files
return dict(component_tests)

View File

@@ -1,7 +1,3 @@
remote_transmitter:
pin: ${pin}
carrier_duty_percent: 50%
climate:
- platform: heatpumpir
protocol: ballu
@@ -10,3 +6,4 @@ climate:
name: HeatpumpIR Climate
min_temperature: 18
max_temperature: 30
transmitter_id: xmitr

View File

@@ -1,4 +1,4 @@
substitutions:
pin: GPIO5
packages:
remote_transmitter: !include ../../test_build_components/common/remote_transmitter/esp8266-ard.yaml
<<: !include common.yaml

View File

@@ -1,7 +1,4 @@
remote_transmitter:
pin: ${pin}
carrier_duty_percent: 50%
climate:
- platform: climate_ir_lg
name: LG Climate
transmitter_id: xmitr

View File

@@ -1,4 +1,4 @@
substitutions:
pin: GPIO2
packages:
remote_transmitter: !include ../../test_build_components/common/remote_transmitter/esp32-c3-idf.yaml
<<: !include common.yaml

View File

@@ -1,4 +1,4 @@
substitutions:
pin: GPIO2
packages:
remote_transmitter: !include ../../test_build_components/common/remote_transmitter/esp32-idf.yaml
<<: !include common.yaml

View File

@@ -1,4 +1,4 @@
substitutions:
pin: GPIO5
packages:
remote_transmitter: !include ../../test_build_components/common/remote_transmitter/esp8266-ard.yaml
<<: !include common.yaml

View File

@@ -1,7 +1,4 @@
remote_transmitter:
pin: ${pin}
carrier_duty_percent: 50%
climate:
- platform: coolix
name: Coolix Climate
transmitter_id: xmitr

View File

@@ -1,4 +1,4 @@
substitutions:
pin: GPIO2
packages:
remote_transmitter: !include ../../test_build_components/common/remote_transmitter/esp32-c3-idf.yaml
<<: !include common.yaml

View File

@@ -1,4 +1,4 @@
substitutions:
pin: GPIO2
packages:
remote_transmitter: !include ../../test_build_components/common/remote_transmitter/esp32-idf.yaml
<<: !include common.yaml

View File

@@ -1,4 +1,4 @@
substitutions:
pin: GPIO5
packages:
remote_transmitter: !include ../../test_build_components/common/remote_transmitter/esp8266-ard.yaml
<<: !include common.yaml

View File

@@ -1,7 +1,3 @@
remote_transmitter:
pin: ${pin}
carrier_duty_percent: 50%
climate:
- platform: heatpumpir
protocol: daikin
@@ -10,3 +6,4 @@ climate:
name: HeatpumpIR Climate
min_temperature: 18
max_temperature: 30
transmitter_id: xmitr

View File

@@ -1,4 +1,4 @@
substitutions:
pin: GPIO5
packages:
remote_transmitter: !include ../../test_build_components/common/remote_transmitter/esp8266-ard.yaml
<<: !include common.yaml

View File

@@ -1,18 +1,3 @@
remote_transmitter:
pin: ${tx_pin}
carrier_duty_percent: 50%
id: tsvr
remote_receiver:
id: rcvr
pin:
number: ${rx_pin}
inverted: true
mode:
input: true
pullup: true
tolerance: 40%
climate:
- platform: daikin_arc
name: Daikin AC

View File

@@ -1,5 +1,5 @@
substitutions:
tx_pin: GPIO0
rx_pin: GPIO2
packages:
remote_transmitter: !include ../../test_build_components/common/remote_transmitter/esp8266-ard.yaml
remote_receiver: !include ../../test_build_components/common/remote_receiver/esp8266-ard.yaml
<<: !include common.yaml

View File

@@ -1,7 +1,4 @@
remote_transmitter:
pin: ${pin}
carrier_duty_percent: 50%
climate:
- platform: daikin_brc
name: Daikin_brc Climate
transmitter_id: xmitr

View File

@@ -1,4 +1,4 @@
substitutions:
pin: GPIO2
packages:
remote_transmitter: !include ../../test_build_components/common/remote_transmitter/esp32-c3-idf.yaml
<<: !include common.yaml

View File

@@ -1,4 +1,4 @@
substitutions:
pin: GPIO2
packages:
remote_transmitter: !include ../../test_build_components/common/remote_transmitter/esp32-idf.yaml
<<: !include common.yaml

View File

@@ -1,4 +1,4 @@
substitutions:
pin: GPIO5
packages:
remote_transmitter: !include ../../test_build_components/common/remote_transmitter/esp8266-ard.yaml
<<: !include common.yaml

View File

@@ -1,7 +1,4 @@
remote_transmitter:
pin: ${pin}
carrier_duty_percent: 50%
climate:
- platform: delonghi
name: Delonghi Climate
transmitter_id: xmitr

View File

@@ -1,4 +1,4 @@
substitutions:
pin: GPIO2
packages:
remote_transmitter: !include ../../test_build_components/common/remote_transmitter/esp32-c3-idf.yaml
<<: !include common.yaml

View File

@@ -1,4 +1,4 @@
substitutions:
pin: GPIO2
packages:
remote_transmitter: !include ../../test_build_components/common/remote_transmitter/esp32-idf.yaml
<<: !include common.yaml

View File

@@ -1,4 +1,4 @@
substitutions:
pin: GPIO5
packages:
remote_transmitter: !include ../../test_build_components/common/remote_transmitter/esp8266-ard.yaml
<<: !include common.yaml

View File

@@ -1,14 +1,5 @@
remote_transmitter:
id: tx
pin: ${remote_transmitter_pin}
carrier_duty_percent: 100%
remote_receiver:
id: rcvr
pin: ${remote_receiver_pin}
climate:
- platform: emmeti
name: Emmeti
receiver_id: rcvr
transmitter_id: tx
transmitter_id: xmitr

View File

@@ -1,5 +1,5 @@
substitutions:
remote_transmitter_pin: GPIO33
remote_receiver_pin: GPIO32
packages:
remote_transmitter: !include ../../test_build_components/common/remote_transmitter/esp32-idf.yaml
remote_receiver: !include ../../test_build_components/common/remote_receiver/esp32-idf.yaml
<<: !include common.yaml

View File

@@ -1,5 +1,5 @@
substitutions:
remote_transmitter_pin: GPIO0
remote_receiver_pin: GPIO2
packages:
remote_transmitter: !include ../../test_build_components/common/remote_transmitter/esp8266-ard.yaml
remote_receiver: !include ../../test_build_components/common/remote_receiver/esp8266-ard.yaml
<<: !include common.yaml

View File

@@ -1,7 +1,4 @@
remote_transmitter:
pin: ${pin}
carrier_duty_percent: 50%
climate:
- platform: fujitsu_general
name: Fujitsu General Climate
transmitter_id: xmitr

View File

@@ -1,4 +1,4 @@
substitutions:
pin: GPIO2
packages:
remote_transmitter: !include ../../test_build_components/common/remote_transmitter/esp32-c3-idf.yaml
<<: !include common.yaml

View File

@@ -1,4 +1,4 @@
substitutions:
pin: GPIO2
packages:
remote_transmitter: !include ../../test_build_components/common/remote_transmitter/esp32-idf.yaml
<<: !include common.yaml

View File

@@ -1,4 +1,4 @@
substitutions:
pin: GPIO5
packages:
remote_transmitter: !include ../../test_build_components/common/remote_transmitter/esp8266-ard.yaml
<<: !include common.yaml

View File

@@ -1,8 +1,5 @@
remote_transmitter:
pin: ${pin}
carrier_duty_percent: 50%
climate:
- platform: gree
name: GREE
model: generic
transmitter_id: xmitr

View File

@@ -1,4 +1,4 @@
substitutions:
pin: GPIO2
packages:
remote_transmitter: !include ../../test_build_components/common/remote_transmitter/esp32-c3-idf.yaml
<<: !include common.yaml

View File

@@ -1,4 +1,4 @@
substitutions:
pin: GPIO2
packages:
remote_transmitter: !include ../../test_build_components/common/remote_transmitter/esp32-idf.yaml
<<: !include common.yaml

View File

@@ -1,4 +1,4 @@
substitutions:
pin: GPIO5
packages:
remote_transmitter: !include ../../test_build_components/common/remote_transmitter/esp8266-ard.yaml
<<: !include common.yaml

View File

@@ -1,7 +1,3 @@
remote_transmitter:
pin: ${pin}
carrier_duty_percent: 50%
climate:
- platform: heatpumpir
protocol: mitsubishi_heavy_zm
@@ -10,6 +6,7 @@ climate:
name: HeatpumpIR Climate Mitsubishi
min_temperature: 18
max_temperature: 30
transmitter_id: xmitr
- platform: heatpumpir
protocol: daikin
horizontal_default: mleft
@@ -17,6 +14,7 @@ climate:
name: HeatpumpIR Climate Daikin
min_temperature: 18
max_temperature: 30
transmitter_id: xmitr
- platform: heatpumpir
protocol: panasonic_altdke
horizontal_default: mright
@@ -24,3 +22,4 @@ climate:
name: HeatpumpIR Climate Panasonic
min_temperature: 18
max_temperature: 30
transmitter_id: xmitr

View File

@@ -1,4 +1,4 @@
substitutions:
pin: GPIO6
packages:
remote_transmitter: !include ../../test_build_components/common/remote_transmitter/bk72xx-ard.yaml
<<: !include common.yaml

View File

@@ -1,4 +1,4 @@
substitutions:
pin: GPIO2
packages:
remote_transmitter: !include ../../test_build_components/common/remote_transmitter/esp32-ard.yaml
<<: !include common.yaml

View File

@@ -1,4 +1,4 @@
substitutions:
pin: GPIO5
packages:
remote_transmitter: !include ../../test_build_components/common/remote_transmitter/esp8266-ard.yaml
<<: !include common.yaml

View File

@@ -1,7 +1,4 @@
remote_transmitter:
pin: ${pin}
carrier_duty_percent: 50%
climate:
- platform: hitachi_ac344
name: Hitachi Climate
transmitter_id: xmitr

View File

@@ -1,4 +1,4 @@
substitutions:
pin: GPIO6
packages:
remote_transmitter: !include ../../test_build_components/common/remote_transmitter/bk72xx-ard.yaml
<<: !include common.yaml

View File

@@ -1,4 +1,4 @@
substitutions:
pin: GPIO2
packages:
remote_transmitter: !include ../../test_build_components/common/remote_transmitter/esp32-c3-idf.yaml
<<: !include common.yaml

View File

@@ -1,4 +1,4 @@
substitutions:
pin: GPIO2
packages:
remote_transmitter: !include ../../test_build_components/common/remote_transmitter/esp32-idf.yaml
<<: !include common.yaml

View File

@@ -1,4 +1,4 @@
substitutions:
pin: GPIO5
packages:
remote_transmitter: !include ../../test_build_components/common/remote_transmitter/esp8266-ard.yaml
<<: !include common.yaml

View File

@@ -1,7 +1,4 @@
remote_transmitter:
pin: ${pin}
carrier_duty_percent: 50%
climate:
- platform: hitachi_ac424
name: Hitachi Climate
transmitter_id: xmitr

View File

@@ -1,4 +1,4 @@
substitutions:
pin: GPIO6
packages:
remote_transmitter: !include ../../test_build_components/common/remote_transmitter/bk72xx-ard.yaml
<<: !include common.yaml

View File

@@ -1,4 +1,4 @@
substitutions:
pin: GPIO2
packages:
remote_transmitter: !include ../../test_build_components/common/remote_transmitter/esp32-c3-idf.yaml
<<: !include common.yaml

View File

@@ -1,4 +1,4 @@
substitutions:
pin: GPIO2
packages:
remote_transmitter: !include ../../test_build_components/common/remote_transmitter/esp32-idf.yaml
<<: !include common.yaml

View File

@@ -1,4 +1,4 @@
substitutions:
pin: GPIO5
packages:
remote_transmitter: !include ../../test_build_components/common/remote_transmitter/esp8266-ard.yaml
<<: !include common.yaml

View File

@@ -2,10 +2,6 @@ wifi:
ssid: MySSID
password: password1
remote_transmitter:
pin: ${pin}
carrier_duty_percent: 50%
climate:
- platform: midea
id: midea_unit
@@ -16,7 +12,7 @@ climate:
x.set_mode(CLIMATE_MODE_FAN_ONLY);
on_state:
- logger.log: State changed!
transmitter_id:
transmitter_id: xmitr
period: 1s
num_attempts: 5
timeout: 2s

View File

@@ -1,7 +1,5 @@
substitutions:
pin: GPIO2
packages:
remote_transmitter: !include ../../test_build_components/common/remote_transmitter/esp32-ard.yaml
uart: !include ../../test_build_components/common/uart/esp32-ard.yaml
<<: !include common.yaml

View File

@@ -1,7 +1,5 @@
substitutions:
pin: GPIO15
packages:
remote_transmitter: !include ../../test_build_components/common/remote_transmitter/esp8266-ard.yaml
uart: !include ../../test_build_components/common/uart/esp8266-ard.yaml
<<: !include common.yaml

View File

@@ -1,8 +1,5 @@
remote_transmitter:
pin: 4
carrier_duty_percent: 50%
climate:
- platform: midea_ir
name: Midea IR
use_fahrenheit: true
transmitter_id: xmitr

View File

@@ -1 +1,4 @@
packages:
remote_transmitter: !include ../../test_build_components/common/remote_transmitter/esp32-c3-idf.yaml
<<: !include common.yaml

View File

@@ -1 +1,4 @@
packages:
remote_transmitter: !include ../../test_build_components/common/remote_transmitter/esp32-idf.yaml
<<: !include common.yaml

View File

@@ -1 +1,4 @@
packages:
remote_transmitter: !include ../../test_build_components/common/remote_transmitter/esp8266-ard.yaml
<<: !include common.yaml

View File

@@ -1,7 +1,4 @@
remote_transmitter:
pin: 4
carrier_duty_percent: 50%
climate:
- platform: mitsubishi
name: Mitsubishi
transmitter_id: xmitr

View File

@@ -1 +1,4 @@
packages:
remote_transmitter: !include ../../test_build_components/common/remote_transmitter/esp32-c3-idf.yaml
<<: !include common.yaml

View File

@@ -1 +1,4 @@
packages:
remote_transmitter: !include ../../test_build_components/common/remote_transmitter/esp32-idf.yaml
<<: !include common.yaml

View File

@@ -1 +1,4 @@
packages:
remote_transmitter: !include ../../test_build_components/common/remote_transmitter/esp8266-ard.yaml
<<: !include common.yaml

View File

@@ -1,12 +1,3 @@
remote_receiver:
id: rcvr
pin: 4
dump: all
remote_transmitter:
pin: 2
carrier_duty_percent: 50%
sensor:
- platform: template
id: noblex_ac_sensor

View File

@@ -1 +1,5 @@
packages:
remote_transmitter: !include ../../test_build_components/common/remote_transmitter/esp32-c3-idf.yaml
remote_receiver: !include ../../test_build_components/common/remote_receiver/esp32-c3-idf.yaml
<<: !include common.yaml

View File

@@ -1 +1,5 @@
packages:
remote_transmitter: !include ../../test_build_components/common/remote_transmitter/esp32-idf.yaml
remote_receiver: !include ../../test_build_components/common/remote_receiver/esp32-idf.yaml
<<: !include common.yaml

View File

@@ -1 +1,5 @@
packages:
remote_transmitter: !include ../../test_build_components/common/remote_transmitter/esp8266-ard.yaml
remote_receiver: !include ../../test_build_components/common/remote_receiver/esp8266-ard.yaml
<<: !include common.yaml

View File

@@ -128,13 +128,10 @@ valve:
optimistic: true
has_position: true
remote_transmitter:
pin: ${pin}
carrier_duty_percent: 50%
climate:
- platform: climate_ir_lg
name: LG Climate
transmitter_id: xmitr
prometheus:
include_internal: true

View File

@@ -1,5 +1,7 @@
substitutions:
verify_ssl: "false"
pin: GPIO2
packages:
remote_transmitter: !include ../../test_build_components/common/remote_transmitter/esp32-c3-idf.yaml
<<: !include common.yaml

View File

@@ -1,8 +1,8 @@
substitutions:
verify_ssl: "false"
pin: GPIO2
packages:
remote_transmitter: !include ../../test_build_components/common/remote_transmitter/esp32-idf.yaml
spi: !include ../../test_build_components/common/spi/esp32-idf.yaml
<<: !include common.yaml

View File

@@ -1,5 +1,7 @@
substitutions:
verify_ssl: "false"
pin: GPIO5
packages:
remote_transmitter: !include ../../test_build_components/common/remote_transmitter/esp8266-ard.yaml
<<: !include common.yaml

View File

@@ -143,6 +143,11 @@ on_sony:
- logger.log:
format: "on_sony: %lu %u"
args: ["long(x.data)", "x.nbits"]
on_symphony:
then:
- logger.log:
format: "on_symphony: 0x%lX %u"
args: ["long(x.data)", "x.nbits"]
on_toshiba_ac:
then:
- logger.log:

View File

@@ -53,6 +53,12 @@ button:
remote_transmitter.transmit_sony:
data: 0xABCDEF
nbits: 12
- platform: template
name: Symphony
on_press:
remote_transmitter.transmit_symphony:
data: 0xE88
nbits: 12
- platform: template
name: Panasonic
on_press:

View File

@@ -1,7 +1,3 @@
remote_transmitter:
pin: ${pin}
carrier_duty_percent: 50%
sensor:
- platform: template
id: tcl112_sensor
@@ -13,3 +9,4 @@ climate:
supports_heat: true
supports_cool: true
sensor: tcl112_sensor
transmitter_id: xmitr

View File

@@ -1,4 +1,4 @@
substitutions:
pin: GPIO2
packages:
remote_transmitter: !include ../../test_build_components/common/remote_transmitter/esp32-c3-idf.yaml
<<: !include common.yaml

View File

@@ -1,4 +1,4 @@
substitutions:
pin: GPIO2
packages:
remote_transmitter: !include ../../test_build_components/common/remote_transmitter/esp32-idf.yaml
<<: !include common.yaml

View File

@@ -1,4 +1,4 @@
substitutions:
pin: GPIO5
packages:
remote_transmitter: !include ../../test_build_components/common/remote_transmitter/esp8266-ard.yaml
<<: !include common.yaml

View File

@@ -1,7 +1,4 @@
remote_transmitter:
pin: ${pin}
carrier_duty_percent: 50%
climate:
- platform: toshiba
name: Toshiba Climate
transmitter_id: xmitr

View File

@@ -1,4 +1,4 @@
substitutions:
pin: GPIO2
packages:
remote_transmitter: !include ../../test_build_components/common/remote_transmitter/esp32-c3-idf.yaml
<<: !include common.yaml

View File

@@ -1,4 +1,4 @@
substitutions:
pin: GPIO2
packages:
remote_transmitter: !include ../../test_build_components/common/remote_transmitter/esp32-idf.yaml
<<: !include common.yaml

View File

@@ -1,4 +1,4 @@
substitutions:
pin: GPIO5
packages:
remote_transmitter: !include ../../test_build_components/common/remote_transmitter/esp8266-ard.yaml
<<: !include common.yaml

View File

@@ -1,7 +1,4 @@
remote_transmitter:
pin: ${pin}
carrier_duty_percent: 50%
climate:
- platform: whirlpool
name: Whirlpool Climate
transmitter_id: xmitr

View File

@@ -1,4 +1,4 @@
substitutions:
pin: GPIO2
packages:
remote_transmitter: !include ../../test_build_components/common/remote_transmitter/esp32-c3-idf.yaml
<<: !include common.yaml

View File

@@ -1,4 +1,4 @@
substitutions:
pin: GPIO2
packages:
remote_transmitter: !include ../../test_build_components/common/remote_transmitter/esp32-idf.yaml
<<: !include common.yaml

View File

@@ -1,4 +1,4 @@
substitutions:
pin: GPIO5
packages:
remote_transmitter: !include ../../test_build_components/common/remote_transmitter/esp8266-ard.yaml
<<: !include common.yaml

Some files were not shown because too many files have changed in this diff Show More