mirror of
https://github.com/esphome/esphome.git
synced 2025-10-25 21:23:53 +01:00
Merge branch 'integration' into memory_api
This commit is contained in:
@@ -112,7 +112,7 @@ async def to_code(config):
|
|||||||
|
|
||||||
cg.add_define("USE_IMPROV")
|
cg.add_define("USE_IMPROV")
|
||||||
|
|
||||||
await improv_base.setup_improv_core(var, config)
|
await improv_base.setup_improv_core(var, config, "esp32_improv")
|
||||||
|
|
||||||
cg.add(var.set_identify_duration(config[CONF_IDENTIFY_DURATION]))
|
cg.add(var.set_identify_duration(config[CONF_IDENTIFY_DURATION]))
|
||||||
cg.add(var.set_authorized_duration(config[CONF_AUTHORIZED_DURATION]))
|
cg.add(var.set_authorized_duration(config[CONF_AUTHORIZED_DURATION]))
|
||||||
|
|||||||
@@ -389,11 +389,13 @@ void ESP32ImprovComponent::check_wifi_connection_() {
|
|||||||
std::string url_strings[3];
|
std::string url_strings[3];
|
||||||
size_t url_count = 0;
|
size_t url_count = 0;
|
||||||
|
|
||||||
|
#ifdef USE_ESP32_IMPROV_NEXT_URL
|
||||||
// Add next_url if configured (should be first per Improv BLE spec)
|
// Add next_url if configured (should be first per Improv BLE spec)
|
||||||
std::string next_url = this->get_formatted_next_url_();
|
std::string next_url = this->get_formatted_next_url_();
|
||||||
if (!next_url.empty()) {
|
if (!next_url.empty()) {
|
||||||
url_strings[url_count++] = std::move(next_url);
|
url_strings[url_count++] = std::move(next_url);
|
||||||
}
|
}
|
||||||
|
#endif
|
||||||
|
|
||||||
// Add default URLs for backward compatibility
|
// Add default URLs for backward compatibility
|
||||||
url_strings[url_count++] = ESPHOME_MY_LINK;
|
url_strings[url_count++] = ESPHOME_MY_LINK;
|
||||||
|
|||||||
@@ -3,6 +3,8 @@ import re
|
|||||||
import esphome.codegen as cg
|
import esphome.codegen as cg
|
||||||
import esphome.config_validation as cv
|
import esphome.config_validation as cv
|
||||||
from esphome.const import __version__
|
from esphome.const import __version__
|
||||||
|
from esphome.cpp_generator import MockObj
|
||||||
|
from esphome.types import ConfigType
|
||||||
|
|
||||||
CODEOWNERS = ["@esphome/core"]
|
CODEOWNERS = ["@esphome/core"]
|
||||||
|
|
||||||
@@ -35,7 +37,9 @@ def _process_next_url(url: str):
|
|||||||
return url
|
return url
|
||||||
|
|
||||||
|
|
||||||
async def setup_improv_core(var, config):
|
async def setup_improv_core(var: MockObj, config: ConfigType, component: str):
|
||||||
if CONF_NEXT_URL in config:
|
if next_url := config.get(CONF_NEXT_URL):
|
||||||
cg.add(var.set_next_url(_process_next_url(config[CONF_NEXT_URL])))
|
cg.add(var.set_next_url(_process_next_url(next_url)))
|
||||||
|
cg.add_define(f"USE_{component.upper()}_NEXT_URL")
|
||||||
|
|
||||||
cg.add_library("improv/Improv", "1.2.4")
|
cg.add_library("improv/Improv", "1.2.4")
|
||||||
|
|||||||
@@ -2,10 +2,12 @@
|
|||||||
|
|
||||||
#include "esphome/components/network/util.h"
|
#include "esphome/components/network/util.h"
|
||||||
#include "esphome/core/application.h"
|
#include "esphome/core/application.h"
|
||||||
|
#include "esphome/core/defines.h"
|
||||||
|
|
||||||
namespace esphome {
|
namespace esphome {
|
||||||
namespace improv_base {
|
namespace improv_base {
|
||||||
|
|
||||||
|
#if defined(USE_ESP32_IMPROV_NEXT_URL) || defined(USE_IMPROV_SERIAL_NEXT_URL)
|
||||||
static constexpr const char DEVICE_NAME_PLACEHOLDER[] = "{{device_name}}";
|
static constexpr const char DEVICE_NAME_PLACEHOLDER[] = "{{device_name}}";
|
||||||
static constexpr size_t DEVICE_NAME_PLACEHOLDER_LEN = sizeof(DEVICE_NAME_PLACEHOLDER) - 1;
|
static constexpr size_t DEVICE_NAME_PLACEHOLDER_LEN = sizeof(DEVICE_NAME_PLACEHOLDER) - 1;
|
||||||
static constexpr const char IP_ADDRESS_PLACEHOLDER[] = "{{ip_address}}";
|
static constexpr const char IP_ADDRESS_PLACEHOLDER[] = "{{ip_address}}";
|
||||||
@@ -43,6 +45,7 @@ std::string ImprovBase::get_formatted_next_url_() {
|
|||||||
|
|
||||||
return formatted_url;
|
return formatted_url;
|
||||||
}
|
}
|
||||||
|
#endif
|
||||||
|
|
||||||
} // namespace improv_base
|
} // namespace improv_base
|
||||||
} // namespace esphome
|
} // namespace esphome
|
||||||
|
|||||||
@@ -1,17 +1,22 @@
|
|||||||
#pragma once
|
#pragma once
|
||||||
|
|
||||||
#include <string>
|
#include <string>
|
||||||
|
#include "esphome/core/defines.h"
|
||||||
|
|
||||||
namespace esphome {
|
namespace esphome {
|
||||||
namespace improv_base {
|
namespace improv_base {
|
||||||
|
|
||||||
class ImprovBase {
|
class ImprovBase {
|
||||||
public:
|
public:
|
||||||
|
#if defined(USE_ESP32_IMPROV_NEXT_URL) || defined(USE_IMPROV_SERIAL_NEXT_URL)
|
||||||
void set_next_url(const std::string &next_url) { this->next_url_ = next_url; }
|
void set_next_url(const std::string &next_url) { this->next_url_ = next_url; }
|
||||||
|
#endif
|
||||||
|
|
||||||
protected:
|
protected:
|
||||||
|
#if defined(USE_ESP32_IMPROV_NEXT_URL) || defined(USE_IMPROV_SERIAL_NEXT_URL)
|
||||||
std::string get_formatted_next_url_();
|
std::string get_formatted_next_url_();
|
||||||
std::string next_url_;
|
std::string next_url_;
|
||||||
|
#endif
|
||||||
};
|
};
|
||||||
|
|
||||||
} // namespace improv_base
|
} // namespace improv_base
|
||||||
|
|||||||
@@ -43,4 +43,4 @@ FINAL_VALIDATE_SCHEMA = validate_logger
|
|||||||
async def to_code(config):
|
async def to_code(config):
|
||||||
var = cg.new_Pvariable(config[CONF_ID])
|
var = cg.new_Pvariable(config[CONF_ID])
|
||||||
await cg.register_component(var, config)
|
await cg.register_component(var, config)
|
||||||
await improv_base.setup_improv_core(var, config)
|
await improv_base.setup_improv_core(var, config, "improv_serial")
|
||||||
|
|||||||
@@ -146,9 +146,11 @@ void ImprovSerialComponent::loop() {
|
|||||||
|
|
||||||
std::vector<uint8_t> ImprovSerialComponent::build_rpc_settings_response_(improv::Command command) {
|
std::vector<uint8_t> ImprovSerialComponent::build_rpc_settings_response_(improv::Command command) {
|
||||||
std::vector<std::string> urls;
|
std::vector<std::string> urls;
|
||||||
|
#ifdef USE_IMPROV_SERIAL_NEXT_URL
|
||||||
if (!this->next_url_.empty()) {
|
if (!this->next_url_.empty()) {
|
||||||
urls.push_back(this->get_formatted_next_url_());
|
urls.push_back(this->get_formatted_next_url_());
|
||||||
}
|
}
|
||||||
|
#endif
|
||||||
#ifdef USE_WEBSERVER
|
#ifdef USE_WEBSERVER
|
||||||
for (auto &ip : wifi::global_wifi_component->wifi_sta_ip_addresses()) {
|
for (auto &ip : wifi::global_wifi_component->wifi_sta_ip_addresses()) {
|
||||||
if (ip.is_ip4()) {
|
if (ip.is_ip4()) {
|
||||||
|
|||||||
@@ -62,7 +62,8 @@ optional<std::string> AppendFilter::new_value(std::string value) { return value
|
|||||||
optional<std::string> PrependFilter::new_value(std::string value) { return this->prefix_ + value; }
|
optional<std::string> PrependFilter::new_value(std::string value) { return this->prefix_ + value; }
|
||||||
|
|
||||||
// Substitute
|
// Substitute
|
||||||
SubstituteFilter::SubstituteFilter(std::initializer_list<Substitution> substitutions) : substitutions_(substitutions) {}
|
SubstituteFilter::SubstituteFilter(const std::initializer_list<Substitution> &substitutions)
|
||||||
|
: substitutions_(substitutions) {}
|
||||||
|
|
||||||
optional<std::string> SubstituteFilter::new_value(std::string value) {
|
optional<std::string> SubstituteFilter::new_value(std::string value) {
|
||||||
std::size_t pos;
|
std::size_t pos;
|
||||||
@@ -74,7 +75,7 @@ optional<std::string> SubstituteFilter::new_value(std::string value) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Map
|
// Map
|
||||||
MapFilter::MapFilter(std::initializer_list<Substitution> mappings) : mappings_(mappings) {}
|
MapFilter::MapFilter(const std::initializer_list<Substitution> &mappings) : mappings_(mappings) {}
|
||||||
|
|
||||||
optional<std::string> MapFilter::new_value(std::string value) {
|
optional<std::string> MapFilter::new_value(std::string value) {
|
||||||
for (const auto &mapping : this->mappings_) {
|
for (const auto &mapping : this->mappings_) {
|
||||||
|
|||||||
@@ -102,7 +102,7 @@ struct Substitution {
|
|||||||
/// A simple filter that replaces a substring with another substring
|
/// A simple filter that replaces a substring with another substring
|
||||||
class SubstituteFilter : public Filter {
|
class SubstituteFilter : public Filter {
|
||||||
public:
|
public:
|
||||||
explicit SubstituteFilter(std::initializer_list<Substitution> substitutions);
|
explicit SubstituteFilter(const std::initializer_list<Substitution> &substitutions);
|
||||||
optional<std::string> new_value(std::string value) override;
|
optional<std::string> new_value(std::string value) override;
|
||||||
|
|
||||||
protected:
|
protected:
|
||||||
@@ -135,7 +135,7 @@ class SubstituteFilter : public Filter {
|
|||||||
*/
|
*/
|
||||||
class MapFilter : public Filter {
|
class MapFilter : public Filter {
|
||||||
public:
|
public:
|
||||||
explicit MapFilter(std::initializer_list<Substitution> mappings);
|
explicit MapFilter(const std::initializer_list<Substitution> &mappings);
|
||||||
optional<std::string> new_value(std::string value) override;
|
optional<std::string> new_value(std::string value) override;
|
||||||
|
|
||||||
protected:
|
protected:
|
||||||
|
|||||||
@@ -471,6 +471,7 @@ CONF_IMPORT_REACTIVE_ENERGY = "import_reactive_energy"
|
|||||||
CONF_INC_PIN = "inc_pin"
|
CONF_INC_PIN = "inc_pin"
|
||||||
CONF_INCLUDE_INTERNAL = "include_internal"
|
CONF_INCLUDE_INTERNAL = "include_internal"
|
||||||
CONF_INCLUDES = "includes"
|
CONF_INCLUDES = "includes"
|
||||||
|
CONF_INCLUDES_C = "includes_c"
|
||||||
CONF_INDEX = "index"
|
CONF_INDEX = "index"
|
||||||
CONF_INDOOR = "indoor"
|
CONF_INDOOR = "indoor"
|
||||||
CONF_INFRARED = "infrared"
|
CONF_INFRARED = "infrared"
|
||||||
|
|||||||
@@ -21,6 +21,7 @@ from esphome.const import (
|
|||||||
CONF_FRIENDLY_NAME,
|
CONF_FRIENDLY_NAME,
|
||||||
CONF_ID,
|
CONF_ID,
|
||||||
CONF_INCLUDES,
|
CONF_INCLUDES,
|
||||||
|
CONF_INCLUDES_C,
|
||||||
CONF_LIBRARIES,
|
CONF_LIBRARIES,
|
||||||
CONF_MIN_VERSION,
|
CONF_MIN_VERSION,
|
||||||
CONF_NAME,
|
CONF_NAME,
|
||||||
@@ -227,6 +228,7 @@ CONFIG_SCHEMA = cv.All(
|
|||||||
}
|
}
|
||||||
),
|
),
|
||||||
cv.Optional(CONF_INCLUDES, default=[]): cv.ensure_list(valid_include),
|
cv.Optional(CONF_INCLUDES, default=[]): cv.ensure_list(valid_include),
|
||||||
|
cv.Optional(CONF_INCLUDES_C, default=[]): cv.ensure_list(valid_include),
|
||||||
cv.Optional(CONF_LIBRARIES, default=[]): cv.ensure_list(cv.string_strict),
|
cv.Optional(CONF_LIBRARIES, default=[]): cv.ensure_list(cv.string_strict),
|
||||||
cv.Optional(CONF_NAME_ADD_MAC_SUFFIX, default=False): cv.boolean,
|
cv.Optional(CONF_NAME_ADD_MAC_SUFFIX, default=False): cv.boolean,
|
||||||
cv.Optional(CONF_DEBUG_SCHEDULER, default=False): cv.boolean,
|
cv.Optional(CONF_DEBUG_SCHEDULER, default=False): cv.boolean,
|
||||||
@@ -302,6 +304,17 @@ def _list_target_platforms():
|
|||||||
return target_platforms
|
return target_platforms
|
||||||
|
|
||||||
|
|
||||||
|
def _sort_includes_by_type(includes: list[str]) -> tuple[list[str], list[str]]:
|
||||||
|
system_includes = []
|
||||||
|
other_includes = []
|
||||||
|
for include in includes:
|
||||||
|
if include.startswith("<") and include.endswith(">"):
|
||||||
|
system_includes.append(include)
|
||||||
|
else:
|
||||||
|
other_includes.append(include)
|
||||||
|
return system_includes, other_includes
|
||||||
|
|
||||||
|
|
||||||
def preload_core_config(config, result) -> str:
|
def preload_core_config(config, result) -> str:
|
||||||
with cv.prepend_path(CONF_ESPHOME):
|
with cv.prepend_path(CONF_ESPHOME):
|
||||||
conf = PRELOAD_CONFIG_SCHEMA(config[CONF_ESPHOME])
|
conf = PRELOAD_CONFIG_SCHEMA(config[CONF_ESPHOME])
|
||||||
@@ -339,7 +352,7 @@ def preload_core_config(config, result) -> str:
|
|||||||
return target_platforms[0]
|
return target_platforms[0]
|
||||||
|
|
||||||
|
|
||||||
def include_file(path: Path, basename: Path):
|
def include_file(path: Path, basename: Path, is_c_header: bool = False):
|
||||||
parts = basename.parts
|
parts = basename.parts
|
||||||
dst = CORE.relative_src_path(*parts)
|
dst = CORE.relative_src_path(*parts)
|
||||||
copy_file_if_changed(path, dst)
|
copy_file_if_changed(path, dst)
|
||||||
@@ -347,6 +360,13 @@ def include_file(path: Path, basename: Path):
|
|||||||
ext = path.suffix
|
ext = path.suffix
|
||||||
if ext in [".h", ".hpp", ".tcc"]:
|
if ext in [".h", ".hpp", ".tcc"]:
|
||||||
# Header, add include statement
|
# Header, add include statement
|
||||||
|
if is_c_header:
|
||||||
|
# Wrap in extern "C" block for C headers
|
||||||
|
cg.add_global(
|
||||||
|
cg.RawStatement(f'extern "C" {{\n #include "{basename}"\n}}')
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
# Regular include
|
||||||
cg.add_global(cg.RawStatement(f'#include "{basename}"'))
|
cg.add_global(cg.RawStatement(f'#include "{basename}"'))
|
||||||
|
|
||||||
|
|
||||||
@@ -377,7 +397,7 @@ async def add_arduino_global_workaround():
|
|||||||
|
|
||||||
|
|
||||||
@coroutine_with_priority(CoroPriority.FINAL)
|
@coroutine_with_priority(CoroPriority.FINAL)
|
||||||
async def add_includes(includes: list[str]) -> None:
|
async def add_includes(includes: list[str], is_c_header: bool = False) -> None:
|
||||||
# Add includes at the very end, so that the included files can access global variables
|
# Add includes at the very end, so that the included files can access global variables
|
||||||
for include in includes:
|
for include in includes:
|
||||||
path = CORE.relative_config_path(include)
|
path = CORE.relative_config_path(include)
|
||||||
@@ -385,11 +405,11 @@ async def add_includes(includes: list[str]) -> None:
|
|||||||
# Directory, copy tree
|
# Directory, copy tree
|
||||||
for p in walk_files(path):
|
for p in walk_files(path):
|
||||||
basename = p.relative_to(path.parent)
|
basename = p.relative_to(path.parent)
|
||||||
include_file(p, basename)
|
include_file(p, basename, is_c_header)
|
||||||
else:
|
else:
|
||||||
# Copy file
|
# Copy file
|
||||||
basename = Path(path.name)
|
basename = Path(path.name)
|
||||||
include_file(path, basename)
|
include_file(path, basename, is_c_header)
|
||||||
|
|
||||||
|
|
||||||
@coroutine_with_priority(CoroPriority.FINAL)
|
@coroutine_with_priority(CoroPriority.FINAL)
|
||||||
@@ -494,19 +514,25 @@ async def to_code(config: ConfigType) -> None:
|
|||||||
CORE.add_job(add_arduino_global_workaround)
|
CORE.add_job(add_arduino_global_workaround)
|
||||||
|
|
||||||
if config[CONF_INCLUDES]:
|
if config[CONF_INCLUDES]:
|
||||||
# Get the <...> includes
|
system_includes, other_includes = _sort_includes_by_type(config[CONF_INCLUDES])
|
||||||
system_includes = []
|
|
||||||
other_includes = []
|
|
||||||
for include in config[CONF_INCLUDES]:
|
|
||||||
if include.startswith("<") and include.endswith(">"):
|
|
||||||
system_includes.append(include)
|
|
||||||
else:
|
|
||||||
other_includes.append(include)
|
|
||||||
# <...> includes should be at the start
|
# <...> includes should be at the start
|
||||||
for include in system_includes:
|
for include in system_includes:
|
||||||
cg.add_global(cg.RawStatement(f"#include {include}"), prepend=True)
|
cg.add_global(cg.RawStatement(f"#include {include}"), prepend=True)
|
||||||
# Other includes should be at the end
|
# Other includes should be at the end
|
||||||
CORE.add_job(add_includes, other_includes)
|
CORE.add_job(add_includes, other_includes, False)
|
||||||
|
|
||||||
|
if config[CONF_INCLUDES_C]:
|
||||||
|
system_includes, other_includes = _sort_includes_by_type(
|
||||||
|
config[CONF_INCLUDES_C]
|
||||||
|
)
|
||||||
|
# <...> includes should be at the start
|
||||||
|
for include in system_includes:
|
||||||
|
cg.add_global(
|
||||||
|
cg.RawStatement(f'extern "C" {{\n #include {include}\n}}'),
|
||||||
|
prepend=True,
|
||||||
|
)
|
||||||
|
# Other includes should be at the end
|
||||||
|
CORE.add_job(add_includes, other_includes, True)
|
||||||
|
|
||||||
if project_conf := config.get(CONF_PROJECT):
|
if project_conf := config.get(CONF_PROJECT):
|
||||||
cg.add_define("ESPHOME_PROJECT_NAME", project_conf[CONF_NAME])
|
cg.add_define("ESPHOME_PROJECT_NAME", project_conf[CONF_NAME])
|
||||||
|
|||||||
@@ -44,6 +44,7 @@
|
|||||||
#define USE_GRAPHICAL_DISPLAY_MENU
|
#define USE_GRAPHICAL_DISPLAY_MENU
|
||||||
#define USE_HOMEASSISTANT_TIME
|
#define USE_HOMEASSISTANT_TIME
|
||||||
#define USE_HTTP_REQUEST_OTA_WATCHDOG_TIMEOUT 8000 // NOLINT
|
#define USE_HTTP_REQUEST_OTA_WATCHDOG_TIMEOUT 8000 // NOLINT
|
||||||
|
#define USE_IMPROV_SERIAL_NEXT_URL
|
||||||
#define USE_JSON
|
#define USE_JSON
|
||||||
#define USE_LIGHT
|
#define USE_LIGHT
|
||||||
#define USE_LOCK
|
#define USE_LOCK
|
||||||
@@ -186,6 +187,7 @@
|
|||||||
#define USE_ESP32_CAMERA_JPEG_ENCODER
|
#define USE_ESP32_CAMERA_JPEG_ENCODER
|
||||||
#define USE_I2C
|
#define USE_I2C
|
||||||
#define USE_IMPROV
|
#define USE_IMPROV
|
||||||
|
#define USE_ESP32_IMPROV_NEXT_URL
|
||||||
#define USE_MICROPHONE
|
#define USE_MICROPHONE
|
||||||
#define USE_PSRAM
|
#define USE_PSRAM
|
||||||
#define USE_SOCKET_IMPL_BSD_SOCKETS
|
#define USE_SOCKET_IMPL_BSD_SOCKETS
|
||||||
|
|||||||
@@ -57,6 +57,7 @@ from helpers import (
|
|||||||
get_component_from_path,
|
get_component_from_path,
|
||||||
get_component_test_files,
|
get_component_test_files,
|
||||||
get_components_from_integration_fixtures,
|
get_components_from_integration_fixtures,
|
||||||
|
git_ls_files,
|
||||||
parse_test_filename,
|
parse_test_filename,
|
||||||
root_path,
|
root_path,
|
||||||
)
|
)
|
||||||
@@ -162,6 +163,26 @@ def should_run_integration_tests(branch: str | None = None) -> bool:
|
|||||||
return False
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
@cache
|
||||||
|
def _is_clang_tidy_full_scan() -> bool:
|
||||||
|
"""Check if clang-tidy configuration changed (requires full scan).
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
True if full scan is needed (hash changed), False otherwise.
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
result = subprocess.run(
|
||||||
|
[os.path.join(root_path, "script", "clang_tidy_hash.py"), "--check"],
|
||||||
|
capture_output=True,
|
||||||
|
check=False,
|
||||||
|
)
|
||||||
|
# Exit 0 means hash changed (full scan needed)
|
||||||
|
return result.returncode == 0
|
||||||
|
except Exception:
|
||||||
|
# If hash check fails, run full scan to be safe
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
def should_run_clang_tidy(branch: str | None = None) -> bool:
|
def should_run_clang_tidy(branch: str | None = None) -> bool:
|
||||||
"""Determine if clang-tidy should run based on changed files.
|
"""Determine if clang-tidy should run based on changed files.
|
||||||
|
|
||||||
@@ -198,17 +219,7 @@ def should_run_clang_tidy(branch: str | None = None) -> bool:
|
|||||||
True if clang-tidy should run, False otherwise.
|
True if clang-tidy should run, False otherwise.
|
||||||
"""
|
"""
|
||||||
# First check if clang-tidy configuration changed (full scan needed)
|
# First check if clang-tidy configuration changed (full scan needed)
|
||||||
try:
|
if _is_clang_tidy_full_scan():
|
||||||
result = subprocess.run(
|
|
||||||
[os.path.join(root_path, "script", "clang_tidy_hash.py"), "--check"],
|
|
||||||
capture_output=True,
|
|
||||||
check=False,
|
|
||||||
)
|
|
||||||
# Exit 0 means hash changed (full scan needed)
|
|
||||||
if result.returncode == 0:
|
|
||||||
return True
|
|
||||||
except Exception:
|
|
||||||
# If hash check fails, run clang-tidy to be safe
|
|
||||||
return True
|
return True
|
||||||
|
|
||||||
# Check if .clang-tidy.hash file itself was changed
|
# Check if .clang-tidy.hash file itself was changed
|
||||||
@@ -586,13 +597,37 @@ def main() -> None:
|
|||||||
# Detect components for memory impact analysis (merged config)
|
# Detect components for memory impact analysis (merged config)
|
||||||
memory_impact = detect_memory_impact_config(args.branch)
|
memory_impact = detect_memory_impact_config(args.branch)
|
||||||
|
|
||||||
|
# Determine clang-tidy mode based on actual files that will be checked
|
||||||
if run_clang_tidy:
|
if run_clang_tidy:
|
||||||
if changed_cpp_file_count < CLANG_TIDY_SPLIT_THRESHOLD:
|
is_full_scan = _is_clang_tidy_full_scan()
|
||||||
|
|
||||||
|
if is_full_scan:
|
||||||
|
# Full scan checks all files - always use split mode for efficiency
|
||||||
|
clang_tidy_mode = "split"
|
||||||
|
files_to_check_count = -1 # Sentinel value for "all files"
|
||||||
|
else:
|
||||||
|
# Targeted scan - calculate actual files that will be checked
|
||||||
|
# This accounts for component dependencies, not just directly changed files
|
||||||
|
if changed_components:
|
||||||
|
# Count C++ files in all changed components (including dependencies)
|
||||||
|
all_cpp_files = list(git_ls_files(["*.cpp"]).keys())
|
||||||
|
component_set = set(changed_components)
|
||||||
|
files_to_check_count = sum(
|
||||||
|
1
|
||||||
|
for f in all_cpp_files
|
||||||
|
if get_component_from_path(f) in component_set
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
# If no components changed, use the simple count of changed C++ files
|
||||||
|
files_to_check_count = changed_cpp_file_count
|
||||||
|
|
||||||
|
if files_to_check_count < CLANG_TIDY_SPLIT_THRESHOLD:
|
||||||
clang_tidy_mode = "nosplit"
|
clang_tidy_mode = "nosplit"
|
||||||
else:
|
else:
|
||||||
clang_tidy_mode = "split"
|
clang_tidy_mode = "split"
|
||||||
else:
|
else:
|
||||||
clang_tidy_mode = "disabled"
|
clang_tidy_mode = "disabled"
|
||||||
|
files_to_check_count = 0
|
||||||
|
|
||||||
# Build output
|
# Build output
|
||||||
output: dict[str, Any] = {
|
output: dict[str, Any] = {
|
||||||
|
|||||||
@@ -966,11 +966,33 @@ def test_components(
|
|||||||
# Find all component tests
|
# Find all component tests
|
||||||
all_tests = {}
|
all_tests = {}
|
||||||
for pattern in component_patterns:
|
for pattern in component_patterns:
|
||||||
|
# Skip empty patterns (happens when components list is empty string)
|
||||||
|
if not pattern:
|
||||||
|
continue
|
||||||
all_tests.update(find_component_tests(tests_dir, pattern, base_only))
|
all_tests.update(find_component_tests(tests_dir, pattern, base_only))
|
||||||
|
|
||||||
|
# If no components found, build a reference configuration for baseline comparison
|
||||||
|
# Create a synthetic "empty" component test that will build just the base config
|
||||||
if not all_tests:
|
if not all_tests:
|
||||||
print(f"No components found matching: {component_patterns}")
|
print(f"No components found matching: {component_patterns}")
|
||||||
return 1
|
print(
|
||||||
|
"Building reference configuration with no components for baseline comparison..."
|
||||||
|
)
|
||||||
|
|
||||||
|
# Create empty test files for each platform (or filtered platform)
|
||||||
|
reference_tests: list[Path] = []
|
||||||
|
for platform_name, base_file in platform_bases.items():
|
||||||
|
if platform_filter and not platform_name.startswith(platform_filter):
|
||||||
|
continue
|
||||||
|
# Create an empty test file named to match the platform
|
||||||
|
empty_test_file = build_dir / f"reference.{platform_name}.yaml"
|
||||||
|
empty_test_file.write_text(
|
||||||
|
"# Empty component test for baseline reference\n"
|
||||||
|
)
|
||||||
|
reference_tests.append(empty_test_file)
|
||||||
|
|
||||||
|
# Add to all_tests dict with component name "reference"
|
||||||
|
all_tests["reference"] = reference_tests
|
||||||
|
|
||||||
print(f"Found {len(all_tests)} components to test")
|
print(f"Found {len(all_tests)} components to test")
|
||||||
|
|
||||||
|
|||||||
@@ -71,6 +71,12 @@ def mock_changed_files() -> Generator[Mock, None, None]:
|
|||||||
yield mock
|
yield mock
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture(autouse=True)
|
||||||
|
def clear_clang_tidy_cache() -> None:
|
||||||
|
"""Clear the clang-tidy full scan cache before each test."""
|
||||||
|
determine_jobs._is_clang_tidy_full_scan.cache_clear()
|
||||||
|
|
||||||
|
|
||||||
def test_main_all_tests_should_run(
|
def test_main_all_tests_should_run(
|
||||||
mock_should_run_integration_tests: Mock,
|
mock_should_run_integration_tests: Mock,
|
||||||
mock_should_run_clang_tidy: Mock,
|
mock_should_run_clang_tidy: Mock,
|
||||||
@@ -98,7 +104,10 @@ def test_main_all_tests_should_run(
|
|||||||
mock_subprocess_run.return_value = mock_result
|
mock_subprocess_run.return_value = mock_result
|
||||||
|
|
||||||
# Run main function with mocked argv
|
# Run main function with mocked argv
|
||||||
with patch("sys.argv", ["determine-jobs.py"]):
|
with (
|
||||||
|
patch("sys.argv", ["determine-jobs.py"]),
|
||||||
|
patch.object(determine_jobs, "_is_clang_tidy_full_scan", return_value=False),
|
||||||
|
):
|
||||||
determine_jobs.main()
|
determine_jobs.main()
|
||||||
|
|
||||||
# Check output
|
# Check output
|
||||||
@@ -224,7 +233,10 @@ def test_main_with_branch_argument(
|
|||||||
)
|
)
|
||||||
mock_subprocess_run.return_value = mock_result
|
mock_subprocess_run.return_value = mock_result
|
||||||
|
|
||||||
with patch("sys.argv", ["script.py", "-b", "main"]):
|
with (
|
||||||
|
patch("sys.argv", ["script.py", "-b", "main"]),
|
||||||
|
patch.object(determine_jobs, "_is_clang_tidy_full_scan", return_value=False),
|
||||||
|
):
|
||||||
determine_jobs.main()
|
determine_jobs.main()
|
||||||
|
|
||||||
# Check that functions were called with branch
|
# Check that functions were called with branch
|
||||||
@@ -363,16 +375,6 @@ def test_should_run_clang_tidy_hash_check_exception() -> None:
|
|||||||
result = determine_jobs.should_run_clang_tidy()
|
result = determine_jobs.should_run_clang_tidy()
|
||||||
assert result is True # Fail safe - run clang-tidy
|
assert result is True # Fail safe - run clang-tidy
|
||||||
|
|
||||||
# Even with C++ files, exception should trigger clang-tidy
|
|
||||||
with (
|
|
||||||
patch.object(
|
|
||||||
determine_jobs, "changed_files", return_value=["esphome/core.cpp"]
|
|
||||||
),
|
|
||||||
patch("subprocess.run", side_effect=Exception("Hash check failed")),
|
|
||||||
):
|
|
||||||
result = determine_jobs.should_run_clang_tidy()
|
|
||||||
assert result is True
|
|
||||||
|
|
||||||
|
|
||||||
def test_should_run_clang_tidy_with_branch() -> None:
|
def test_should_run_clang_tidy_with_branch() -> None:
|
||||||
"""Test should_run_clang_tidy with branch argument."""
|
"""Test should_run_clang_tidy with branch argument."""
|
||||||
@@ -763,3 +765,120 @@ def test_detect_memory_impact_config_skips_base_bus_components(tmp_path: Path) -
|
|||||||
assert result["should_run"] == "true"
|
assert result["should_run"] == "true"
|
||||||
assert result["components"] == ["wifi"]
|
assert result["components"] == ["wifi"]
|
||||||
assert "i2c" not in result["components"]
|
assert "i2c" not in result["components"]
|
||||||
|
|
||||||
|
|
||||||
|
# Tests for clang-tidy split mode logic
|
||||||
|
|
||||||
|
|
||||||
|
def test_clang_tidy_mode_full_scan(
|
||||||
|
mock_should_run_integration_tests: Mock,
|
||||||
|
mock_should_run_clang_tidy: Mock,
|
||||||
|
mock_should_run_clang_format: Mock,
|
||||||
|
mock_should_run_python_linters: Mock,
|
||||||
|
mock_subprocess_run: Mock,
|
||||||
|
mock_changed_files: Mock,
|
||||||
|
capsys: pytest.CaptureFixture[str],
|
||||||
|
monkeypatch: pytest.MonkeyPatch,
|
||||||
|
) -> None:
|
||||||
|
"""Test that full scan (hash changed) always uses split mode."""
|
||||||
|
monkeypatch.delenv("GITHUB_ACTIONS", raising=False)
|
||||||
|
|
||||||
|
mock_should_run_integration_tests.return_value = False
|
||||||
|
mock_should_run_clang_tidy.return_value = True
|
||||||
|
mock_should_run_clang_format.return_value = False
|
||||||
|
mock_should_run_python_linters.return_value = False
|
||||||
|
|
||||||
|
# Mock list-components.py output
|
||||||
|
mock_result = Mock()
|
||||||
|
mock_result.stdout = json.dumps({"directly_changed": [], "all_changed": []})
|
||||||
|
mock_subprocess_run.return_value = mock_result
|
||||||
|
|
||||||
|
# Mock full scan (hash changed)
|
||||||
|
with (
|
||||||
|
patch("sys.argv", ["determine-jobs.py"]),
|
||||||
|
patch.object(determine_jobs, "_is_clang_tidy_full_scan", return_value=True),
|
||||||
|
):
|
||||||
|
determine_jobs.main()
|
||||||
|
|
||||||
|
captured = capsys.readouterr()
|
||||||
|
output = json.loads(captured.out)
|
||||||
|
|
||||||
|
# Full scan should always use split mode
|
||||||
|
assert output["clang_tidy_mode"] == "split"
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize(
|
||||||
|
("component_count", "files_per_component", "expected_mode"),
|
||||||
|
[
|
||||||
|
# Small PR: 5 files in 1 component -> nosplit
|
||||||
|
(1, 5, "nosplit"),
|
||||||
|
# Medium PR: 30 files in 2 components -> nosplit
|
||||||
|
(2, 15, "nosplit"),
|
||||||
|
# Medium PR: 64 files total -> nosplit (just under threshold)
|
||||||
|
(2, 32, "nosplit"),
|
||||||
|
# Large PR: 65 files total -> split (at threshold)
|
||||||
|
(2, 33, "split"), # 2 * 33 = 66 files
|
||||||
|
# Large PR: 100 files in 10 components -> split
|
||||||
|
(10, 10, "split"),
|
||||||
|
],
|
||||||
|
ids=[
|
||||||
|
"1_comp_5_files_nosplit",
|
||||||
|
"2_comp_30_files_nosplit",
|
||||||
|
"2_comp_64_files_nosplit_under_threshold",
|
||||||
|
"2_comp_66_files_split_at_threshold",
|
||||||
|
"10_comp_100_files_split",
|
||||||
|
],
|
||||||
|
)
|
||||||
|
def test_clang_tidy_mode_targeted_scan(
|
||||||
|
component_count: int,
|
||||||
|
files_per_component: int,
|
||||||
|
expected_mode: str,
|
||||||
|
mock_should_run_integration_tests: Mock,
|
||||||
|
mock_should_run_clang_tidy: Mock,
|
||||||
|
mock_should_run_clang_format: Mock,
|
||||||
|
mock_should_run_python_linters: Mock,
|
||||||
|
mock_subprocess_run: Mock,
|
||||||
|
mock_changed_files: Mock,
|
||||||
|
capsys: pytest.CaptureFixture[str],
|
||||||
|
monkeypatch: pytest.MonkeyPatch,
|
||||||
|
) -> None:
|
||||||
|
"""Test clang-tidy mode selection based on files_to_check count."""
|
||||||
|
monkeypatch.delenv("GITHUB_ACTIONS", raising=False)
|
||||||
|
|
||||||
|
mock_should_run_integration_tests.return_value = False
|
||||||
|
mock_should_run_clang_tidy.return_value = True
|
||||||
|
mock_should_run_clang_format.return_value = False
|
||||||
|
mock_should_run_python_linters.return_value = False
|
||||||
|
|
||||||
|
# Create component names
|
||||||
|
components = [f"comp{i}" for i in range(component_count)]
|
||||||
|
|
||||||
|
# Mock list-components.py output
|
||||||
|
mock_result = Mock()
|
||||||
|
mock_result.stdout = json.dumps(
|
||||||
|
{"directly_changed": components, "all_changed": components}
|
||||||
|
)
|
||||||
|
mock_subprocess_run.return_value = mock_result
|
||||||
|
|
||||||
|
# Mock git_ls_files to return files for each component
|
||||||
|
cpp_files = {
|
||||||
|
f"esphome/components/{comp}/file{i}.cpp": 0
|
||||||
|
for comp in components
|
||||||
|
for i in range(files_per_component)
|
||||||
|
}
|
||||||
|
|
||||||
|
# Create a mock that returns the cpp_files dict for any call
|
||||||
|
def mock_git_ls_files(patterns=None):
|
||||||
|
return cpp_files
|
||||||
|
|
||||||
|
with (
|
||||||
|
patch("sys.argv", ["determine-jobs.py"]),
|
||||||
|
patch.object(determine_jobs, "_is_clang_tidy_full_scan", return_value=False),
|
||||||
|
patch.object(determine_jobs, "git_ls_files", side_effect=mock_git_ls_files),
|
||||||
|
):
|
||||||
|
determine_jobs.main()
|
||||||
|
|
||||||
|
captured = capsys.readouterr()
|
||||||
|
output = json.loads(captured.out)
|
||||||
|
|
||||||
|
assert output["clang_tidy_mode"] == expected_mode
|
||||||
|
|||||||
@@ -517,6 +517,35 @@ def test_include_file_cpp(tmp_path: Path, mock_copy_file_if_changed: Mock) -> No
|
|||||||
mock_cg.add_global.assert_not_called()
|
mock_cg.add_global.assert_not_called()
|
||||||
|
|
||||||
|
|
||||||
|
def test_include_file_with_c_header(
|
||||||
|
tmp_path: Path, mock_copy_file_if_changed: Mock
|
||||||
|
) -> None:
|
||||||
|
"""Test include_file wraps header in extern C block when is_c_header is True."""
|
||||||
|
src_file = tmp_path / "c_library.h"
|
||||||
|
src_file.write_text("// C library header")
|
||||||
|
|
||||||
|
CORE.build_path = tmp_path / "build"
|
||||||
|
|
||||||
|
with patch("esphome.core.config.cg") as mock_cg:
|
||||||
|
# Mock RawStatement to capture the text
|
||||||
|
mock_raw_statement = MagicMock()
|
||||||
|
mock_raw_statement.text = ""
|
||||||
|
|
||||||
|
def raw_statement_side_effect(text):
|
||||||
|
mock_raw_statement.text = text
|
||||||
|
return mock_raw_statement
|
||||||
|
|
||||||
|
mock_cg.RawStatement.side_effect = raw_statement_side_effect
|
||||||
|
|
||||||
|
config.include_file(src_file, Path("c_library.h"), is_c_header=True)
|
||||||
|
|
||||||
|
mock_copy_file_if_changed.assert_called_once()
|
||||||
|
mock_cg.add_global.assert_called_once()
|
||||||
|
# Check that include statement is wrapped in extern "C" block
|
||||||
|
assert 'extern "C"' in mock_raw_statement.text
|
||||||
|
assert '#include "c_library.h"' in mock_raw_statement.text
|
||||||
|
|
||||||
|
|
||||||
def test_get_usable_cpu_count() -> None:
|
def test_get_usable_cpu_count() -> None:
|
||||||
"""Test get_usable_cpu_count returns CPU count."""
|
"""Test get_usable_cpu_count returns CPU count."""
|
||||||
count = config.get_usable_cpu_count()
|
count = config.get_usable_cpu_count()
|
||||||
|
|||||||
Reference in New Issue
Block a user