mirror of
https://github.com/esphome/esphome.git
synced 2025-09-23 13:42:27 +01:00
Merge branch 'dev' into sha256_ota
This commit is contained in:
@@ -772,7 +772,7 @@ def command_update_all(args: ArgsProtocol) -> int | None:
|
||||
safe_print(f"{half_line}{middle_text}{half_line}")
|
||||
|
||||
for f in files:
|
||||
safe_print(f"Updating {color(AnsiFore.CYAN, f)}")
|
||||
safe_print(f"Updating {color(AnsiFore.CYAN, str(f))}")
|
||||
safe_print("-" * twidth)
|
||||
safe_print()
|
||||
if CORE.dashboard:
|
||||
@@ -784,10 +784,10 @@ def command_update_all(args: ArgsProtocol) -> int | None:
|
||||
"esphome", "run", f, "--no-logs", "--device", "OTA"
|
||||
)
|
||||
if rc == 0:
|
||||
print_bar(f"[{color(AnsiFore.BOLD_GREEN, 'SUCCESS')}] {f}")
|
||||
print_bar(f"[{color(AnsiFore.BOLD_GREEN, 'SUCCESS')}] {str(f)}")
|
||||
success[f] = True
|
||||
else:
|
||||
print_bar(f"[{color(AnsiFore.BOLD_RED, 'ERROR')}] {f}")
|
||||
print_bar(f"[{color(AnsiFore.BOLD_RED, 'ERROR')}] {str(f)}")
|
||||
success[f] = False
|
||||
|
||||
safe_print()
|
||||
@@ -798,9 +798,9 @@ def command_update_all(args: ArgsProtocol) -> int | None:
|
||||
failed = 0
|
||||
for f in files:
|
||||
if success[f]:
|
||||
safe_print(f" - {f}: {color(AnsiFore.GREEN, 'SUCCESS')}")
|
||||
safe_print(f" - {str(f)}: {color(AnsiFore.GREEN, 'SUCCESS')}")
|
||||
else:
|
||||
safe_print(f" - {f}: {color(AnsiFore.BOLD_RED, 'FAILED')}")
|
||||
safe_print(f" - {str(f)}: {color(AnsiFore.BOLD_RED, 'FAILED')}")
|
||||
failed += 1
|
||||
return failed
|
||||
|
||||
@@ -1273,7 +1273,12 @@ def run_esphome(argv):
|
||||
CORE.config_path = conf_path
|
||||
CORE.dashboard = args.dashboard
|
||||
|
||||
config = read_config(dict(args.substitution) if args.substitution else {})
|
||||
# For logs command, skip updating external components
|
||||
skip_external = args.command == "logs"
|
||||
config = read_config(
|
||||
dict(args.substitution) if args.substitution else {},
|
||||
skip_external_update=skip_external,
|
||||
)
|
||||
if config is None:
|
||||
return 2
|
||||
CORE.config = config
|
||||
|
@@ -31,6 +31,9 @@ void ESP32ImprovComponent::setup() {
|
||||
#endif
|
||||
global_ble_server->on(BLEServerEvt::EmptyEvt::ON_DISCONNECT,
|
||||
[this](uint16_t conn_id) { this->set_error_(improv::ERROR_NONE); });
|
||||
|
||||
// Start with loop disabled - will be enabled by start() when needed
|
||||
this->disable_loop();
|
||||
}
|
||||
|
||||
void ESP32ImprovComponent::setup_characteristics() {
|
||||
|
@@ -39,11 +39,13 @@ async def to_code(config):
|
||||
pass
|
||||
|
||||
|
||||
def _process_git_config(config: dict, refresh) -> str:
|
||||
def _process_git_config(config: dict, refresh, skip_update: bool = False) -> str:
|
||||
# When skip_update is True, use NEVER_REFRESH to prevent updates
|
||||
actual_refresh = git.NEVER_REFRESH if skip_update else refresh
|
||||
repo_dir, _ = git.clone_or_update(
|
||||
url=config[CONF_URL],
|
||||
ref=config.get(CONF_REF),
|
||||
refresh=refresh,
|
||||
refresh=actual_refresh,
|
||||
domain=DOMAIN,
|
||||
username=config.get(CONF_USERNAME),
|
||||
password=config.get(CONF_PASSWORD),
|
||||
@@ -70,12 +72,12 @@ def _process_git_config(config: dict, refresh) -> str:
|
||||
return components_dir
|
||||
|
||||
|
||||
def _process_single_config(config: dict):
|
||||
def _process_single_config(config: dict, skip_update: bool = False):
|
||||
conf = config[CONF_SOURCE]
|
||||
if conf[CONF_TYPE] == TYPE_GIT:
|
||||
with cv.prepend_path([CONF_SOURCE]):
|
||||
components_dir = _process_git_config(
|
||||
config[CONF_SOURCE], config[CONF_REFRESH]
|
||||
config[CONF_SOURCE], config[CONF_REFRESH], skip_update
|
||||
)
|
||||
elif conf[CONF_TYPE] == TYPE_LOCAL:
|
||||
components_dir = Path(CORE.relative_config_path(conf[CONF_PATH]))
|
||||
@@ -105,7 +107,7 @@ def _process_single_config(config: dict):
|
||||
loader.install_meta_finder(components_dir, allowed_components=allowed_components)
|
||||
|
||||
|
||||
def do_external_components_pass(config: dict) -> None:
|
||||
def do_external_components_pass(config: dict, skip_update: bool = False) -> None:
|
||||
conf = config.get(DOMAIN)
|
||||
if conf is None:
|
||||
return
|
||||
@@ -113,4 +115,4 @@ def do_external_components_pass(config: dict) -> None:
|
||||
conf = CONFIG_SCHEMA(conf)
|
||||
for i, c in enumerate(conf):
|
||||
with cv.prepend_path(i):
|
||||
_process_single_config(c)
|
||||
_process_single_config(c, skip_update)
|
||||
|
@@ -8,52 +8,12 @@ namespace json {
|
||||
|
||||
static const char *const TAG = "json";
|
||||
|
||||
#ifdef USE_PSRAM
|
||||
// Build an allocator for the JSON Library using the RAMAllocator class
|
||||
// This is only compiled when PSRAM is enabled
|
||||
struct SpiRamAllocator : ArduinoJson::Allocator {
|
||||
void *allocate(size_t size) override { return this->allocator_.allocate(size); }
|
||||
|
||||
void deallocate(void *pointer) override {
|
||||
// ArduinoJson's Allocator interface doesn't provide the size parameter in deallocate.
|
||||
// RAMAllocator::deallocate() requires the size, which we don't have access to here.
|
||||
// RAMAllocator::deallocate implementation just calls free() regardless of whether
|
||||
// the memory was allocated with heap_caps_malloc or malloc.
|
||||
// This is safe because ESP-IDF's heap implementation internally tracks the memory region
|
||||
// and routes free() to the appropriate heap.
|
||||
free(pointer); // NOLINT(cppcoreguidelines-owning-memory,cppcoreguidelines-no-malloc)
|
||||
}
|
||||
|
||||
void *reallocate(void *ptr, size_t new_size) override {
|
||||
return this->allocator_.reallocate(static_cast<uint8_t *>(ptr), new_size);
|
||||
}
|
||||
|
||||
protected:
|
||||
RAMAllocator<uint8_t> allocator_{RAMAllocator<uint8_t>(RAMAllocator<uint8_t>::NONE)};
|
||||
};
|
||||
#endif
|
||||
|
||||
std::string build_json(const json_build_t &f) {
|
||||
// NOLINTBEGIN(clang-analyzer-cplusplus.NewDeleteLeaks) false positive with ArduinoJson
|
||||
#ifdef USE_PSRAM
|
||||
auto doc_allocator = SpiRamAllocator();
|
||||
JsonDocument json_document(&doc_allocator);
|
||||
#else
|
||||
JsonDocument json_document;
|
||||
#endif
|
||||
if (json_document.overflowed()) {
|
||||
ESP_LOGE(TAG, "Could not allocate memory for JSON document!");
|
||||
return "{}";
|
||||
}
|
||||
JsonObject root = json_document.to<JsonObject>();
|
||||
JsonBuilder builder;
|
||||
JsonObject root = builder.root();
|
||||
f(root);
|
||||
if (json_document.overflowed()) {
|
||||
ESP_LOGE(TAG, "Could not allocate memory for JSON document!");
|
||||
return "{}";
|
||||
}
|
||||
std::string output;
|
||||
serializeJson(json_document, output);
|
||||
return output;
|
||||
return builder.serialize();
|
||||
// NOLINTEND(clang-analyzer-cplusplus.NewDeleteLeaks)
|
||||
}
|
||||
|
||||
@@ -84,5 +44,15 @@ bool parse_json(const std::string &data, const json_parse_t &f) {
|
||||
// NOLINTEND(clang-analyzer-cplusplus.NewDeleteLeaks)
|
||||
}
|
||||
|
||||
std::string JsonBuilder::serialize() {
|
||||
if (doc_.overflowed()) {
|
||||
ESP_LOGE(TAG, "JSON document overflow");
|
||||
return "{}";
|
||||
}
|
||||
std::string output;
|
||||
serializeJson(doc_, output);
|
||||
return output;
|
||||
}
|
||||
|
||||
} // namespace json
|
||||
} // namespace esphome
|
||||
|
@@ -13,6 +13,31 @@
|
||||
namespace esphome {
|
||||
namespace json {
|
||||
|
||||
#ifdef USE_PSRAM
|
||||
// Build an allocator for the JSON Library using the RAMAllocator class
|
||||
// This is only compiled when PSRAM is enabled
|
||||
struct SpiRamAllocator : ArduinoJson::Allocator {
|
||||
void *allocate(size_t size) override { return allocator_.allocate(size); }
|
||||
|
||||
void deallocate(void *ptr) override {
|
||||
// ArduinoJson's Allocator interface doesn't provide the size parameter in deallocate.
|
||||
// RAMAllocator::deallocate() requires the size, which we don't have access to here.
|
||||
// RAMAllocator::deallocate implementation just calls free() regardless of whether
|
||||
// the memory was allocated with heap_caps_malloc or malloc.
|
||||
// This is safe because ESP-IDF's heap implementation internally tracks the memory region
|
||||
// and routes free() to the appropriate heap.
|
||||
free(ptr); // NOLINT(cppcoreguidelines-owning-memory,cppcoreguidelines-no-malloc)
|
||||
}
|
||||
|
||||
void *reallocate(void *ptr, size_t new_size) override {
|
||||
return allocator_.reallocate(static_cast<uint8_t *>(ptr), new_size);
|
||||
}
|
||||
|
||||
protected:
|
||||
RAMAllocator<uint8_t> allocator_{RAMAllocator<uint8_t>::NONE};
|
||||
};
|
||||
#endif
|
||||
|
||||
/// Callback function typedef for parsing JsonObjects.
|
||||
using json_parse_t = std::function<bool(JsonObject)>;
|
||||
|
||||
@@ -25,5 +50,29 @@ std::string build_json(const json_build_t &f);
|
||||
/// Parse a JSON string and run the provided json parse function if it's valid.
|
||||
bool parse_json(const std::string &data, const json_parse_t &f);
|
||||
|
||||
/// Builder class for creating JSON documents without lambdas
|
||||
class JsonBuilder {
|
||||
public:
|
||||
JsonObject root() {
|
||||
if (!root_created_) {
|
||||
root_ = doc_.to<JsonObject>();
|
||||
root_created_ = true;
|
||||
}
|
||||
return root_;
|
||||
}
|
||||
|
||||
std::string serialize();
|
||||
|
||||
private:
|
||||
#ifdef USE_PSRAM
|
||||
SpiRamAllocator allocator_;
|
||||
JsonDocument doc_{&allocator_};
|
||||
#else
|
||||
JsonDocument doc_;
|
||||
#endif
|
||||
JsonObject root_;
|
||||
bool root_created_{false};
|
||||
};
|
||||
|
||||
} // namespace json
|
||||
} // namespace esphome
|
||||
|
@@ -51,6 +51,7 @@ from esphome.const import (
|
||||
DEVICE_CLASS_OZONE,
|
||||
DEVICE_CLASS_PH,
|
||||
DEVICE_CLASS_PM1,
|
||||
DEVICE_CLASS_PM4,
|
||||
DEVICE_CLASS_PM10,
|
||||
DEVICE_CLASS_PM25,
|
||||
DEVICE_CLASS_POWER,
|
||||
@@ -116,6 +117,7 @@ DEVICE_CLASSES = [
|
||||
DEVICE_CLASS_PM1,
|
||||
DEVICE_CLASS_PM10,
|
||||
DEVICE_CLASS_PM25,
|
||||
DEVICE_CLASS_PM4,
|
||||
DEVICE_CLASS_POWER,
|
||||
DEVICE_CLASS_POWER_FACTOR,
|
||||
DEVICE_CLASS_PRECIPITATION,
|
||||
|
@@ -106,11 +106,13 @@ CONFIG_SCHEMA = cv.Any(
|
||||
)
|
||||
|
||||
|
||||
def _process_base_package(config: dict) -> dict:
|
||||
def _process_base_package(config: dict, skip_update: bool = False) -> dict:
|
||||
# When skip_update is True, use NEVER_REFRESH to prevent updates
|
||||
actual_refresh = git.NEVER_REFRESH if skip_update else config[CONF_REFRESH]
|
||||
repo_dir, revert = git.clone_or_update(
|
||||
url=config[CONF_URL],
|
||||
ref=config.get(CONF_REF),
|
||||
refresh=config[CONF_REFRESH],
|
||||
refresh=actual_refresh,
|
||||
domain=DOMAIN,
|
||||
username=config.get(CONF_USERNAME),
|
||||
password=config.get(CONF_PASSWORD),
|
||||
@@ -180,16 +182,16 @@ def _process_base_package(config: dict) -> dict:
|
||||
return {"packages": packages}
|
||||
|
||||
|
||||
def _process_package(package_config, config):
|
||||
def _process_package(package_config, config, skip_update: bool = False):
|
||||
recursive_package = package_config
|
||||
if CONF_URL in package_config:
|
||||
package_config = _process_base_package(package_config)
|
||||
package_config = _process_base_package(package_config, skip_update)
|
||||
if isinstance(package_config, dict):
|
||||
recursive_package = do_packages_pass(package_config)
|
||||
recursive_package = do_packages_pass(package_config, skip_update)
|
||||
return merge_config(recursive_package, config)
|
||||
|
||||
|
||||
def do_packages_pass(config: dict):
|
||||
def do_packages_pass(config: dict, skip_update: bool = False):
|
||||
if CONF_PACKAGES not in config:
|
||||
return config
|
||||
packages = config[CONF_PACKAGES]
|
||||
@@ -198,10 +200,10 @@ def do_packages_pass(config: dict):
|
||||
if isinstance(packages, dict):
|
||||
for package_name, package_config in reversed(packages.items()):
|
||||
with cv.prepend_path(package_name):
|
||||
config = _process_package(package_config, config)
|
||||
config = _process_package(package_config, config, skip_update)
|
||||
elif isinstance(packages, list):
|
||||
for package_config in reversed(packages):
|
||||
config = _process_package(package_config, config)
|
||||
config = _process_package(package_config, config, skip_update)
|
||||
else:
|
||||
raise cv.Invalid(
|
||||
f"Packages must be a key to value mapping or list, got {type(packages)} instead"
|
||||
|
@@ -76,7 +76,8 @@ bool SensirionI2CDevice::write_command_(uint16_t command, CommandLen command_len
|
||||
temp[raw_idx++] = data[i] >> 8;
|
||||
#endif
|
||||
// Use MSB first since Sensirion devices use CRC-8 with MSB first
|
||||
temp[raw_idx++] = crc8(&temp[raw_idx - 2], 2, 0xFF, CRC_POLYNOMIAL, true);
|
||||
uint8_t crc = crc8(&temp[raw_idx - 2], 2, 0xFF, CRC_POLYNOMIAL, true);
|
||||
temp[raw_idx++] = crc;
|
||||
}
|
||||
this->last_error_ = this->write(temp, raw_idx);
|
||||
return this->last_error_ == i2c::ERROR_OK;
|
||||
|
@@ -74,6 +74,7 @@ from esphome.const import (
|
||||
DEVICE_CLASS_OZONE,
|
||||
DEVICE_CLASS_PH,
|
||||
DEVICE_CLASS_PM1,
|
||||
DEVICE_CLASS_PM4,
|
||||
DEVICE_CLASS_PM10,
|
||||
DEVICE_CLASS_PM25,
|
||||
DEVICE_CLASS_POWER,
|
||||
@@ -143,6 +144,7 @@ DEVICE_CLASSES = [
|
||||
DEVICE_CLASS_PM1,
|
||||
DEVICE_CLASS_PM10,
|
||||
DEVICE_CLASS_PM25,
|
||||
DEVICE_CLASS_PM4,
|
||||
DEVICE_CLASS_POWER,
|
||||
DEVICE_CLASS_POWER_FACTOR,
|
||||
DEVICE_CLASS_PRECIPITATION,
|
||||
|
@@ -228,10 +228,11 @@ void DeferredUpdateEventSourceList::on_client_connect_(WebServer *ws, DeferredUp
|
||||
|
||||
#ifdef USE_WEBSERVER_SORTING
|
||||
for (auto &group : ws->sorting_groups_) {
|
||||
message = json::build_json([group](JsonObject root) {
|
||||
root["name"] = group.second.name;
|
||||
root["sorting_weight"] = group.second.weight;
|
||||
});
|
||||
json::JsonBuilder builder;
|
||||
JsonObject root = builder.root();
|
||||
root["name"] = group.second.name;
|
||||
root["sorting_weight"] = group.second.weight;
|
||||
message = builder.serialize();
|
||||
|
||||
// up to 31 groups should be able to be queued initially without defer
|
||||
source->try_send_nodefer(message.c_str(), "sorting_group");
|
||||
@@ -265,17 +266,20 @@ void WebServer::set_js_include(const char *js_include) { this->js_include_ = js_
|
||||
#endif
|
||||
|
||||
std::string WebServer::get_config_json() {
|
||||
return json::build_json([this](JsonObject root) {
|
||||
root["title"] = App.get_friendly_name().empty() ? App.get_name() : App.get_friendly_name();
|
||||
root["comment"] = App.get_comment();
|
||||
json::JsonBuilder builder;
|
||||
JsonObject root = builder.root();
|
||||
|
||||
root["title"] = App.get_friendly_name().empty() ? App.get_name() : App.get_friendly_name();
|
||||
root["comment"] = App.get_comment();
|
||||
#if defined(USE_WEBSERVER_OTA_DISABLED) || !defined(USE_WEBSERVER_OTA)
|
||||
root["ota"] = false; // Note: USE_WEBSERVER_OTA_DISABLED only affects web_server, not captive_portal
|
||||
root["ota"] = false; // Note: USE_WEBSERVER_OTA_DISABLED only affects web_server, not captive_portal
|
||||
#else
|
||||
root["ota"] = true;
|
||||
root["ota"] = true;
|
||||
#endif
|
||||
root["log"] = this->expose_log_;
|
||||
root["lang"] = "en";
|
||||
});
|
||||
root["log"] = this->expose_log_;
|
||||
root["lang"] = "en";
|
||||
|
||||
return builder.serialize();
|
||||
}
|
||||
|
||||
void WebServer::setup() {
|
||||
@@ -435,22 +439,26 @@ std::string WebServer::sensor_all_json_generator(WebServer *web_server, void *so
|
||||
return web_server->sensor_json((sensor::Sensor *) (source), ((sensor::Sensor *) (source))->state, DETAIL_ALL);
|
||||
}
|
||||
std::string WebServer::sensor_json(sensor::Sensor *obj, float value, JsonDetail start_config) {
|
||||
return json::build_json([this, obj, value, start_config](JsonObject root) {
|
||||
std::string state;
|
||||
if (std::isnan(value)) {
|
||||
state = "NA";
|
||||
} else {
|
||||
state = value_accuracy_to_string(value, obj->get_accuracy_decimals());
|
||||
if (!obj->get_unit_of_measurement().empty())
|
||||
state += " " + obj->get_unit_of_measurement();
|
||||
}
|
||||
set_json_icon_state_value(root, obj, "sensor-" + obj->get_object_id(), state, value, start_config);
|
||||
if (start_config == DETAIL_ALL) {
|
||||
this->add_sorting_info_(root, obj);
|
||||
if (!obj->get_unit_of_measurement().empty())
|
||||
root["uom"] = obj->get_unit_of_measurement();
|
||||
}
|
||||
});
|
||||
json::JsonBuilder builder;
|
||||
JsonObject root = builder.root();
|
||||
|
||||
// Build JSON directly inline
|
||||
std::string state;
|
||||
if (std::isnan(value)) {
|
||||
state = "NA";
|
||||
} else {
|
||||
state = value_accuracy_to_string(value, obj->get_accuracy_decimals());
|
||||
if (!obj->get_unit_of_measurement().empty())
|
||||
state += " " + obj->get_unit_of_measurement();
|
||||
}
|
||||
set_json_icon_state_value(root, obj, "sensor-" + obj->get_object_id(), state, value, start_config);
|
||||
if (start_config == DETAIL_ALL) {
|
||||
this->add_sorting_info_(root, obj);
|
||||
if (!obj->get_unit_of_measurement().empty())
|
||||
root["uom"] = obj->get_unit_of_measurement();
|
||||
}
|
||||
|
||||
return builder.serialize();
|
||||
}
|
||||
#endif
|
||||
|
||||
@@ -483,12 +491,15 @@ std::string WebServer::text_sensor_all_json_generator(WebServer *web_server, voi
|
||||
}
|
||||
std::string WebServer::text_sensor_json(text_sensor::TextSensor *obj, const std::string &value,
|
||||
JsonDetail start_config) {
|
||||
return json::build_json([this, obj, value, start_config](JsonObject root) {
|
||||
set_json_icon_state_value(root, obj, "text_sensor-" + obj->get_object_id(), value, value, start_config);
|
||||
if (start_config == DETAIL_ALL) {
|
||||
this->add_sorting_info_(root, obj);
|
||||
}
|
||||
});
|
||||
json::JsonBuilder builder;
|
||||
JsonObject root = builder.root();
|
||||
|
||||
set_json_icon_state_value(root, obj, "text_sensor-" + obj->get_object_id(), value, value, start_config);
|
||||
if (start_config == DETAIL_ALL) {
|
||||
this->add_sorting_info_(root, obj);
|
||||
}
|
||||
|
||||
return builder.serialize();
|
||||
}
|
||||
#endif
|
||||
|
||||
@@ -553,13 +564,16 @@ std::string WebServer::switch_all_json_generator(WebServer *web_server, void *so
|
||||
return web_server->switch_json((switch_::Switch *) (source), ((switch_::Switch *) (source))->state, DETAIL_ALL);
|
||||
}
|
||||
std::string WebServer::switch_json(switch_::Switch *obj, bool value, JsonDetail start_config) {
|
||||
return json::build_json([this, obj, value, start_config](JsonObject root) {
|
||||
set_json_icon_state_value(root, obj, "switch-" + obj->get_object_id(), value ? "ON" : "OFF", value, start_config);
|
||||
if (start_config == DETAIL_ALL) {
|
||||
root["assumed_state"] = obj->assumed_state();
|
||||
this->add_sorting_info_(root, obj);
|
||||
}
|
||||
});
|
||||
json::JsonBuilder builder;
|
||||
JsonObject root = builder.root();
|
||||
|
||||
set_json_icon_state_value(root, obj, "switch-" + obj->get_object_id(), value ? "ON" : "OFF", value, start_config);
|
||||
if (start_config == DETAIL_ALL) {
|
||||
root["assumed_state"] = obj->assumed_state();
|
||||
this->add_sorting_info_(root, obj);
|
||||
}
|
||||
|
||||
return builder.serialize();
|
||||
}
|
||||
#endif
|
||||
|
||||
@@ -590,12 +604,15 @@ std::string WebServer::button_all_json_generator(WebServer *web_server, void *so
|
||||
return web_server->button_json((button::Button *) (source), DETAIL_ALL);
|
||||
}
|
||||
std::string WebServer::button_json(button::Button *obj, JsonDetail start_config) {
|
||||
return json::build_json([this, obj, start_config](JsonObject root) {
|
||||
set_json_id(root, obj, "button-" + obj->get_object_id(), start_config);
|
||||
if (start_config == DETAIL_ALL) {
|
||||
this->add_sorting_info_(root, obj);
|
||||
}
|
||||
});
|
||||
json::JsonBuilder builder;
|
||||
JsonObject root = builder.root();
|
||||
|
||||
set_json_id(root, obj, "button-" + obj->get_object_id(), start_config);
|
||||
if (start_config == DETAIL_ALL) {
|
||||
this->add_sorting_info_(root, obj);
|
||||
}
|
||||
|
||||
return builder.serialize();
|
||||
}
|
||||
#endif
|
||||
|
||||
@@ -627,13 +644,16 @@ std::string WebServer::binary_sensor_all_json_generator(WebServer *web_server, v
|
||||
((binary_sensor::BinarySensor *) (source))->state, DETAIL_ALL);
|
||||
}
|
||||
std::string WebServer::binary_sensor_json(binary_sensor::BinarySensor *obj, bool value, JsonDetail start_config) {
|
||||
return json::build_json([this, obj, value, start_config](JsonObject root) {
|
||||
set_json_icon_state_value(root, obj, "binary_sensor-" + obj->get_object_id(), value ? "ON" : "OFF", value,
|
||||
start_config);
|
||||
if (start_config == DETAIL_ALL) {
|
||||
this->add_sorting_info_(root, obj);
|
||||
}
|
||||
});
|
||||
json::JsonBuilder builder;
|
||||
JsonObject root = builder.root();
|
||||
|
||||
set_json_icon_state_value(root, obj, "binary_sensor-" + obj->get_object_id(), value ? "ON" : "OFF", value,
|
||||
start_config);
|
||||
if (start_config == DETAIL_ALL) {
|
||||
this->add_sorting_info_(root, obj);
|
||||
}
|
||||
|
||||
return builder.serialize();
|
||||
}
|
||||
#endif
|
||||
|
||||
@@ -694,20 +714,23 @@ std::string WebServer::fan_all_json_generator(WebServer *web_server, void *sourc
|
||||
return web_server->fan_json((fan::Fan *) (source), DETAIL_ALL);
|
||||
}
|
||||
std::string WebServer::fan_json(fan::Fan *obj, JsonDetail start_config) {
|
||||
return json::build_json([this, obj, start_config](JsonObject root) {
|
||||
set_json_icon_state_value(root, obj, "fan-" + obj->get_object_id(), obj->state ? "ON" : "OFF", obj->state,
|
||||
start_config);
|
||||
const auto traits = obj->get_traits();
|
||||
if (traits.supports_speed()) {
|
||||
root["speed_level"] = obj->speed;
|
||||
root["speed_count"] = traits.supported_speed_count();
|
||||
}
|
||||
if (obj->get_traits().supports_oscillation())
|
||||
root["oscillation"] = obj->oscillating;
|
||||
if (start_config == DETAIL_ALL) {
|
||||
this->add_sorting_info_(root, obj);
|
||||
}
|
||||
});
|
||||
json::JsonBuilder builder;
|
||||
JsonObject root = builder.root();
|
||||
|
||||
set_json_icon_state_value(root, obj, "fan-" + obj->get_object_id(), obj->state ? "ON" : "OFF", obj->state,
|
||||
start_config);
|
||||
const auto traits = obj->get_traits();
|
||||
if (traits.supports_speed()) {
|
||||
root["speed_level"] = obj->speed;
|
||||
root["speed_count"] = traits.supported_speed_count();
|
||||
}
|
||||
if (obj->get_traits().supports_oscillation())
|
||||
root["oscillation"] = obj->oscillating;
|
||||
if (start_config == DETAIL_ALL) {
|
||||
this->add_sorting_info_(root, obj);
|
||||
}
|
||||
|
||||
return builder.serialize();
|
||||
}
|
||||
#endif
|
||||
|
||||
@@ -767,20 +790,23 @@ std::string WebServer::light_all_json_generator(WebServer *web_server, void *sou
|
||||
return web_server->light_json((light::LightState *) (source), DETAIL_ALL);
|
||||
}
|
||||
std::string WebServer::light_json(light::LightState *obj, JsonDetail start_config) {
|
||||
return json::build_json([this, obj, start_config](JsonObject root) {
|
||||
set_json_id(root, obj, "light-" + obj->get_object_id(), start_config);
|
||||
root["state"] = obj->remote_values.is_on() ? "ON" : "OFF";
|
||||
json::JsonBuilder builder;
|
||||
JsonObject root = builder.root();
|
||||
|
||||
light::LightJSONSchema::dump_json(*obj, root);
|
||||
if (start_config == DETAIL_ALL) {
|
||||
JsonArray opt = root["effects"].to<JsonArray>();
|
||||
opt.add("None");
|
||||
for (auto const &option : obj->get_effects()) {
|
||||
opt.add(option->get_name());
|
||||
}
|
||||
this->add_sorting_info_(root, obj);
|
||||
set_json_id(root, obj, "light-" + obj->get_object_id(), start_config);
|
||||
root["state"] = obj->remote_values.is_on() ? "ON" : "OFF";
|
||||
|
||||
light::LightJSONSchema::dump_json(*obj, root);
|
||||
if (start_config == DETAIL_ALL) {
|
||||
JsonArray opt = root["effects"].to<JsonArray>();
|
||||
opt.add("None");
|
||||
for (auto const &option : obj->get_effects()) {
|
||||
opt.add(option->get_name());
|
||||
}
|
||||
});
|
||||
this->add_sorting_info_(root, obj);
|
||||
}
|
||||
|
||||
return builder.serialize();
|
||||
}
|
||||
#endif
|
||||
|
||||
@@ -839,19 +865,22 @@ std::string WebServer::cover_all_json_generator(WebServer *web_server, void *sou
|
||||
return web_server->cover_json((cover::Cover *) (source), DETAIL_ALL);
|
||||
}
|
||||
std::string WebServer::cover_json(cover::Cover *obj, JsonDetail start_config) {
|
||||
return json::build_json([this, obj, start_config](JsonObject root) {
|
||||
set_json_icon_state_value(root, obj, "cover-" + obj->get_object_id(), obj->is_fully_closed() ? "CLOSED" : "OPEN",
|
||||
obj->position, start_config);
|
||||
root["current_operation"] = cover::cover_operation_to_str(obj->current_operation);
|
||||
json::JsonBuilder builder;
|
||||
JsonObject root = builder.root();
|
||||
|
||||
if (obj->get_traits().get_supports_position())
|
||||
root["position"] = obj->position;
|
||||
if (obj->get_traits().get_supports_tilt())
|
||||
root["tilt"] = obj->tilt;
|
||||
if (start_config == DETAIL_ALL) {
|
||||
this->add_sorting_info_(root, obj);
|
||||
}
|
||||
});
|
||||
set_json_icon_state_value(root, obj, "cover-" + obj->get_object_id(), obj->is_fully_closed() ? "CLOSED" : "OPEN",
|
||||
obj->position, start_config);
|
||||
root["current_operation"] = cover::cover_operation_to_str(obj->current_operation);
|
||||
|
||||
if (obj->get_traits().get_supports_position())
|
||||
root["position"] = obj->position;
|
||||
if (obj->get_traits().get_supports_tilt())
|
||||
root["tilt"] = obj->tilt;
|
||||
if (start_config == DETAIL_ALL) {
|
||||
this->add_sorting_info_(root, obj);
|
||||
}
|
||||
|
||||
return builder.serialize();
|
||||
}
|
||||
#endif
|
||||
|
||||
@@ -894,31 +923,33 @@ std::string WebServer::number_all_json_generator(WebServer *web_server, void *so
|
||||
return web_server->number_json((number::Number *) (source), ((number::Number *) (source))->state, DETAIL_ALL);
|
||||
}
|
||||
std::string WebServer::number_json(number::Number *obj, float value, JsonDetail start_config) {
|
||||
return json::build_json([this, obj, value, start_config](JsonObject root) {
|
||||
set_json_id(root, obj, "number-" + obj->get_object_id(), start_config);
|
||||
if (start_config == DETAIL_ALL) {
|
||||
root["min_value"] =
|
||||
value_accuracy_to_string(obj->traits.get_min_value(), step_to_accuracy_decimals(obj->traits.get_step()));
|
||||
root["max_value"] =
|
||||
value_accuracy_to_string(obj->traits.get_max_value(), step_to_accuracy_decimals(obj->traits.get_step()));
|
||||
root["step"] =
|
||||
value_accuracy_to_string(obj->traits.get_step(), step_to_accuracy_decimals(obj->traits.get_step()));
|
||||
root["mode"] = (int) obj->traits.get_mode();
|
||||
if (!obj->traits.get_unit_of_measurement().empty())
|
||||
root["uom"] = obj->traits.get_unit_of_measurement();
|
||||
this->add_sorting_info_(root, obj);
|
||||
}
|
||||
if (std::isnan(value)) {
|
||||
root["value"] = "\"NaN\"";
|
||||
root["state"] = "NA";
|
||||
} else {
|
||||
root["value"] = value_accuracy_to_string(value, step_to_accuracy_decimals(obj->traits.get_step()));
|
||||
std::string state = value_accuracy_to_string(value, step_to_accuracy_decimals(obj->traits.get_step()));
|
||||
if (!obj->traits.get_unit_of_measurement().empty())
|
||||
state += " " + obj->traits.get_unit_of_measurement();
|
||||
root["state"] = state;
|
||||
}
|
||||
});
|
||||
json::JsonBuilder builder;
|
||||
JsonObject root = builder.root();
|
||||
|
||||
set_json_id(root, obj, "number-" + obj->get_object_id(), start_config);
|
||||
if (start_config == DETAIL_ALL) {
|
||||
root["min_value"] =
|
||||
value_accuracy_to_string(obj->traits.get_min_value(), step_to_accuracy_decimals(obj->traits.get_step()));
|
||||
root["max_value"] =
|
||||
value_accuracy_to_string(obj->traits.get_max_value(), step_to_accuracy_decimals(obj->traits.get_step()));
|
||||
root["step"] = value_accuracy_to_string(obj->traits.get_step(), step_to_accuracy_decimals(obj->traits.get_step()));
|
||||
root["mode"] = (int) obj->traits.get_mode();
|
||||
if (!obj->traits.get_unit_of_measurement().empty())
|
||||
root["uom"] = obj->traits.get_unit_of_measurement();
|
||||
this->add_sorting_info_(root, obj);
|
||||
}
|
||||
if (std::isnan(value)) {
|
||||
root["value"] = "\"NaN\"";
|
||||
root["state"] = "NA";
|
||||
} else {
|
||||
root["value"] = value_accuracy_to_string(value, step_to_accuracy_decimals(obj->traits.get_step()));
|
||||
std::string state = value_accuracy_to_string(value, step_to_accuracy_decimals(obj->traits.get_step()));
|
||||
if (!obj->traits.get_unit_of_measurement().empty())
|
||||
state += " " + obj->traits.get_unit_of_measurement();
|
||||
root["state"] = state;
|
||||
}
|
||||
|
||||
return builder.serialize();
|
||||
}
|
||||
#endif
|
||||
|
||||
@@ -966,15 +997,18 @@ std::string WebServer::date_all_json_generator(WebServer *web_server, void *sour
|
||||
return web_server->date_json((datetime::DateEntity *) (source), DETAIL_ALL);
|
||||
}
|
||||
std::string WebServer::date_json(datetime::DateEntity *obj, JsonDetail start_config) {
|
||||
return json::build_json([this, obj, start_config](JsonObject root) {
|
||||
set_json_id(root, obj, "date-" + obj->get_object_id(), start_config);
|
||||
std::string value = str_sprintf("%d-%02d-%02d", obj->year, obj->month, obj->day);
|
||||
root["value"] = value;
|
||||
root["state"] = value;
|
||||
if (start_config == DETAIL_ALL) {
|
||||
this->add_sorting_info_(root, obj);
|
||||
}
|
||||
});
|
||||
json::JsonBuilder builder;
|
||||
JsonObject root = builder.root();
|
||||
|
||||
set_json_id(root, obj, "date-" + obj->get_object_id(), start_config);
|
||||
std::string value = str_sprintf("%d-%02d-%02d", obj->year, obj->month, obj->day);
|
||||
root["value"] = value;
|
||||
root["state"] = value;
|
||||
if (start_config == DETAIL_ALL) {
|
||||
this->add_sorting_info_(root, obj);
|
||||
}
|
||||
|
||||
return builder.serialize();
|
||||
}
|
||||
#endif // USE_DATETIME_DATE
|
||||
|
||||
@@ -1021,15 +1055,18 @@ std::string WebServer::time_all_json_generator(WebServer *web_server, void *sour
|
||||
return web_server->time_json((datetime::TimeEntity *) (source), DETAIL_ALL);
|
||||
}
|
||||
std::string WebServer::time_json(datetime::TimeEntity *obj, JsonDetail start_config) {
|
||||
return json::build_json([this, obj, start_config](JsonObject root) {
|
||||
set_json_id(root, obj, "time-" + obj->get_object_id(), start_config);
|
||||
std::string value = str_sprintf("%02d:%02d:%02d", obj->hour, obj->minute, obj->second);
|
||||
root["value"] = value;
|
||||
root["state"] = value;
|
||||
if (start_config == DETAIL_ALL) {
|
||||
this->add_sorting_info_(root, obj);
|
||||
}
|
||||
});
|
||||
json::JsonBuilder builder;
|
||||
JsonObject root = builder.root();
|
||||
|
||||
set_json_id(root, obj, "time-" + obj->get_object_id(), start_config);
|
||||
std::string value = str_sprintf("%02d:%02d:%02d", obj->hour, obj->minute, obj->second);
|
||||
root["value"] = value;
|
||||
root["state"] = value;
|
||||
if (start_config == DETAIL_ALL) {
|
||||
this->add_sorting_info_(root, obj);
|
||||
}
|
||||
|
||||
return builder.serialize();
|
||||
}
|
||||
#endif // USE_DATETIME_TIME
|
||||
|
||||
@@ -1076,16 +1113,19 @@ std::string WebServer::datetime_all_json_generator(WebServer *web_server, void *
|
||||
return web_server->datetime_json((datetime::DateTimeEntity *) (source), DETAIL_ALL);
|
||||
}
|
||||
std::string WebServer::datetime_json(datetime::DateTimeEntity *obj, JsonDetail start_config) {
|
||||
return json::build_json([this, obj, start_config](JsonObject root) {
|
||||
set_json_id(root, obj, "datetime-" + obj->get_object_id(), start_config);
|
||||
std::string value = str_sprintf("%d-%02d-%02d %02d:%02d:%02d", obj->year, obj->month, obj->day, obj->hour,
|
||||
obj->minute, obj->second);
|
||||
root["value"] = value;
|
||||
root["state"] = value;
|
||||
if (start_config == DETAIL_ALL) {
|
||||
this->add_sorting_info_(root, obj);
|
||||
}
|
||||
});
|
||||
json::JsonBuilder builder;
|
||||
JsonObject root = builder.root();
|
||||
|
||||
set_json_id(root, obj, "datetime-" + obj->get_object_id(), start_config);
|
||||
std::string value =
|
||||
str_sprintf("%d-%02d-%02d %02d:%02d:%02d", obj->year, obj->month, obj->day, obj->hour, obj->minute, obj->second);
|
||||
root["value"] = value;
|
||||
root["state"] = value;
|
||||
if (start_config == DETAIL_ALL) {
|
||||
this->add_sorting_info_(root, obj);
|
||||
}
|
||||
|
||||
return builder.serialize();
|
||||
}
|
||||
#endif // USE_DATETIME_DATETIME
|
||||
|
||||
@@ -1128,22 +1168,25 @@ std::string WebServer::text_all_json_generator(WebServer *web_server, void *sour
|
||||
return web_server->text_json((text::Text *) (source), ((text::Text *) (source))->state, DETAIL_ALL);
|
||||
}
|
||||
std::string WebServer::text_json(text::Text *obj, const std::string &value, JsonDetail start_config) {
|
||||
return json::build_json([this, obj, value, start_config](JsonObject root) {
|
||||
set_json_id(root, obj, "text-" + obj->get_object_id(), start_config);
|
||||
root["min_length"] = obj->traits.get_min_length();
|
||||
root["max_length"] = obj->traits.get_max_length();
|
||||
root["pattern"] = obj->traits.get_pattern();
|
||||
if (obj->traits.get_mode() == text::TextMode::TEXT_MODE_PASSWORD) {
|
||||
root["state"] = "********";
|
||||
} else {
|
||||
root["state"] = value;
|
||||
}
|
||||
root["value"] = value;
|
||||
if (start_config == DETAIL_ALL) {
|
||||
root["mode"] = (int) obj->traits.get_mode();
|
||||
this->add_sorting_info_(root, obj);
|
||||
}
|
||||
});
|
||||
json::JsonBuilder builder;
|
||||
JsonObject root = builder.root();
|
||||
|
||||
set_json_id(root, obj, "text-" + obj->get_object_id(), start_config);
|
||||
root["min_length"] = obj->traits.get_min_length();
|
||||
root["max_length"] = obj->traits.get_max_length();
|
||||
root["pattern"] = obj->traits.get_pattern();
|
||||
if (obj->traits.get_mode() == text::TextMode::TEXT_MODE_PASSWORD) {
|
||||
root["state"] = "********";
|
||||
} else {
|
||||
root["state"] = value;
|
||||
}
|
||||
root["value"] = value;
|
||||
if (start_config == DETAIL_ALL) {
|
||||
root["mode"] = (int) obj->traits.get_mode();
|
||||
this->add_sorting_info_(root, obj);
|
||||
}
|
||||
|
||||
return builder.serialize();
|
||||
}
|
||||
#endif
|
||||
|
||||
@@ -1186,16 +1229,19 @@ std::string WebServer::select_all_json_generator(WebServer *web_server, void *so
|
||||
return web_server->select_json((select::Select *) (source), ((select::Select *) (source))->state, DETAIL_ALL);
|
||||
}
|
||||
std::string WebServer::select_json(select::Select *obj, const std::string &value, JsonDetail start_config) {
|
||||
return json::build_json([this, obj, value, start_config](JsonObject root) {
|
||||
set_json_icon_state_value(root, obj, "select-" + obj->get_object_id(), value, value, start_config);
|
||||
if (start_config == DETAIL_ALL) {
|
||||
JsonArray opt = root["option"].to<JsonArray>();
|
||||
for (auto &option : obj->traits.get_options()) {
|
||||
opt.add(option);
|
||||
}
|
||||
this->add_sorting_info_(root, obj);
|
||||
json::JsonBuilder builder;
|
||||
JsonObject root = builder.root();
|
||||
|
||||
set_json_icon_state_value(root, obj, "select-" + obj->get_object_id(), value, value, start_config);
|
||||
if (start_config == DETAIL_ALL) {
|
||||
JsonArray opt = root["option"].to<JsonArray>();
|
||||
for (auto &option : obj->traits.get_options()) {
|
||||
opt.add(option);
|
||||
}
|
||||
});
|
||||
this->add_sorting_info_(root, obj);
|
||||
}
|
||||
|
||||
return builder.serialize();
|
||||
}
|
||||
#endif
|
||||
|
||||
@@ -1244,98 +1290,102 @@ void WebServer::handle_climate_request(AsyncWebServerRequest *request, const Url
|
||||
request->send(404);
|
||||
}
|
||||
std::string WebServer::climate_state_json_generator(WebServer *web_server, void *source) {
|
||||
// NOLINTNEXTLINE(clang-analyzer-cplusplus.NewDeleteLeaks) false positive with ArduinoJson
|
||||
return web_server->climate_json((climate::Climate *) (source), DETAIL_STATE);
|
||||
}
|
||||
std::string WebServer::climate_all_json_generator(WebServer *web_server, void *source) {
|
||||
// NOLINTNEXTLINE(clang-analyzer-cplusplus.NewDeleteLeaks) false positive with ArduinoJson
|
||||
return web_server->climate_json((climate::Climate *) (source), DETAIL_ALL);
|
||||
}
|
||||
std::string WebServer::climate_json(climate::Climate *obj, JsonDetail start_config) {
|
||||
// NOLINTBEGIN(clang-analyzer-cplusplus.NewDeleteLeaks) false positive with ArduinoJson
|
||||
return json::build_json([this, obj, start_config](JsonObject root) {
|
||||
set_json_id(root, obj, "climate-" + obj->get_object_id(), start_config);
|
||||
const auto traits = obj->get_traits();
|
||||
int8_t target_accuracy = traits.get_target_temperature_accuracy_decimals();
|
||||
int8_t current_accuracy = traits.get_current_temperature_accuracy_decimals();
|
||||
char buf[16];
|
||||
json::JsonBuilder builder;
|
||||
JsonObject root = builder.root();
|
||||
set_json_id(root, obj, "climate-" + obj->get_object_id(), start_config);
|
||||
const auto traits = obj->get_traits();
|
||||
int8_t target_accuracy = traits.get_target_temperature_accuracy_decimals();
|
||||
int8_t current_accuracy = traits.get_current_temperature_accuracy_decimals();
|
||||
char buf[16];
|
||||
|
||||
if (start_config == DETAIL_ALL) {
|
||||
JsonArray opt = root["modes"].to<JsonArray>();
|
||||
for (climate::ClimateMode m : traits.get_supported_modes())
|
||||
opt.add(PSTR_LOCAL(climate::climate_mode_to_string(m)));
|
||||
if (!traits.get_supported_custom_fan_modes().empty()) {
|
||||
JsonArray opt = root["fan_modes"].to<JsonArray>();
|
||||
for (climate::ClimateFanMode m : traits.get_supported_fan_modes())
|
||||
opt.add(PSTR_LOCAL(climate::climate_fan_mode_to_string(m)));
|
||||
}
|
||||
|
||||
if (!traits.get_supported_custom_fan_modes().empty()) {
|
||||
JsonArray opt = root["custom_fan_modes"].to<JsonArray>();
|
||||
for (auto const &custom_fan_mode : traits.get_supported_custom_fan_modes())
|
||||
opt.add(custom_fan_mode);
|
||||
}
|
||||
if (traits.get_supports_swing_modes()) {
|
||||
JsonArray opt = root["swing_modes"].to<JsonArray>();
|
||||
for (auto swing_mode : traits.get_supported_swing_modes())
|
||||
opt.add(PSTR_LOCAL(climate::climate_swing_mode_to_string(swing_mode)));
|
||||
}
|
||||
if (traits.get_supports_presets() && obj->preset.has_value()) {
|
||||
JsonArray opt = root["presets"].to<JsonArray>();
|
||||
for (climate::ClimatePreset m : traits.get_supported_presets())
|
||||
opt.add(PSTR_LOCAL(climate::climate_preset_to_string(m)));
|
||||
}
|
||||
if (!traits.get_supported_custom_presets().empty() && obj->custom_preset.has_value()) {
|
||||
JsonArray opt = root["custom_presets"].to<JsonArray>();
|
||||
for (auto const &custom_preset : traits.get_supported_custom_presets())
|
||||
opt.add(custom_preset);
|
||||
}
|
||||
this->add_sorting_info_(root, obj);
|
||||
if (start_config == DETAIL_ALL) {
|
||||
JsonArray opt = root["modes"].to<JsonArray>();
|
||||
for (climate::ClimateMode m : traits.get_supported_modes())
|
||||
opt.add(PSTR_LOCAL(climate::climate_mode_to_string(m)));
|
||||
if (!traits.get_supported_custom_fan_modes().empty()) {
|
||||
JsonArray opt = root["fan_modes"].to<JsonArray>();
|
||||
for (climate::ClimateFanMode m : traits.get_supported_fan_modes())
|
||||
opt.add(PSTR_LOCAL(climate::climate_fan_mode_to_string(m)));
|
||||
}
|
||||
|
||||
bool has_state = false;
|
||||
root["mode"] = PSTR_LOCAL(climate_mode_to_string(obj->mode));
|
||||
root["max_temp"] = value_accuracy_to_string(traits.get_visual_max_temperature(), target_accuracy);
|
||||
root["min_temp"] = value_accuracy_to_string(traits.get_visual_min_temperature(), target_accuracy);
|
||||
root["step"] = traits.get_visual_target_temperature_step();
|
||||
if (traits.get_supports_action()) {
|
||||
root["action"] = PSTR_LOCAL(climate_action_to_string(obj->action));
|
||||
root["state"] = root["action"];
|
||||
has_state = true;
|
||||
}
|
||||
if (traits.get_supports_fan_modes() && obj->fan_mode.has_value()) {
|
||||
root["fan_mode"] = PSTR_LOCAL(climate_fan_mode_to_string(obj->fan_mode.value()));
|
||||
}
|
||||
if (!traits.get_supported_custom_fan_modes().empty() && obj->custom_fan_mode.has_value()) {
|
||||
root["custom_fan_mode"] = obj->custom_fan_mode.value().c_str();
|
||||
}
|
||||
if (traits.get_supports_presets() && obj->preset.has_value()) {
|
||||
root["preset"] = PSTR_LOCAL(climate_preset_to_string(obj->preset.value()));
|
||||
}
|
||||
if (!traits.get_supported_custom_presets().empty() && obj->custom_preset.has_value()) {
|
||||
root["custom_preset"] = obj->custom_preset.value().c_str();
|
||||
if (!traits.get_supported_custom_fan_modes().empty()) {
|
||||
JsonArray opt = root["custom_fan_modes"].to<JsonArray>();
|
||||
for (auto const &custom_fan_mode : traits.get_supported_custom_fan_modes())
|
||||
opt.add(custom_fan_mode);
|
||||
}
|
||||
if (traits.get_supports_swing_modes()) {
|
||||
root["swing_mode"] = PSTR_LOCAL(climate_swing_mode_to_string(obj->swing_mode));
|
||||
JsonArray opt = root["swing_modes"].to<JsonArray>();
|
||||
for (auto swing_mode : traits.get_supported_swing_modes())
|
||||
opt.add(PSTR_LOCAL(climate::climate_swing_mode_to_string(swing_mode)));
|
||||
}
|
||||
if (traits.get_supports_current_temperature()) {
|
||||
if (!std::isnan(obj->current_temperature)) {
|
||||
root["current_temperature"] = value_accuracy_to_string(obj->current_temperature, current_accuracy);
|
||||
} else {
|
||||
root["current_temperature"] = "NA";
|
||||
}
|
||||
if (traits.get_supports_presets() && obj->preset.has_value()) {
|
||||
JsonArray opt = root["presets"].to<JsonArray>();
|
||||
for (climate::ClimatePreset m : traits.get_supported_presets())
|
||||
opt.add(PSTR_LOCAL(climate::climate_preset_to_string(m)));
|
||||
}
|
||||
if (traits.get_supports_two_point_target_temperature()) {
|
||||
root["target_temperature_low"] = value_accuracy_to_string(obj->target_temperature_low, target_accuracy);
|
||||
root["target_temperature_high"] = value_accuracy_to_string(obj->target_temperature_high, target_accuracy);
|
||||
if (!has_state) {
|
||||
root["state"] = value_accuracy_to_string((obj->target_temperature_high + obj->target_temperature_low) / 2.0f,
|
||||
target_accuracy);
|
||||
}
|
||||
if (!traits.get_supported_custom_presets().empty() && obj->custom_preset.has_value()) {
|
||||
JsonArray opt = root["custom_presets"].to<JsonArray>();
|
||||
for (auto const &custom_preset : traits.get_supported_custom_presets())
|
||||
opt.add(custom_preset);
|
||||
}
|
||||
this->add_sorting_info_(root, obj);
|
||||
}
|
||||
|
||||
bool has_state = false;
|
||||
root["mode"] = PSTR_LOCAL(climate_mode_to_string(obj->mode));
|
||||
root["max_temp"] = value_accuracy_to_string(traits.get_visual_max_temperature(), target_accuracy);
|
||||
root["min_temp"] = value_accuracy_to_string(traits.get_visual_min_temperature(), target_accuracy);
|
||||
root["step"] = traits.get_visual_target_temperature_step();
|
||||
if (traits.get_supports_action()) {
|
||||
root["action"] = PSTR_LOCAL(climate_action_to_string(obj->action));
|
||||
root["state"] = root["action"];
|
||||
has_state = true;
|
||||
}
|
||||
if (traits.get_supports_fan_modes() && obj->fan_mode.has_value()) {
|
||||
root["fan_mode"] = PSTR_LOCAL(climate_fan_mode_to_string(obj->fan_mode.value()));
|
||||
}
|
||||
if (!traits.get_supported_custom_fan_modes().empty() && obj->custom_fan_mode.has_value()) {
|
||||
root["custom_fan_mode"] = obj->custom_fan_mode.value().c_str();
|
||||
}
|
||||
if (traits.get_supports_presets() && obj->preset.has_value()) {
|
||||
root["preset"] = PSTR_LOCAL(climate_preset_to_string(obj->preset.value()));
|
||||
}
|
||||
if (!traits.get_supported_custom_presets().empty() && obj->custom_preset.has_value()) {
|
||||
root["custom_preset"] = obj->custom_preset.value().c_str();
|
||||
}
|
||||
if (traits.get_supports_swing_modes()) {
|
||||
root["swing_mode"] = PSTR_LOCAL(climate_swing_mode_to_string(obj->swing_mode));
|
||||
}
|
||||
if (traits.get_supports_current_temperature()) {
|
||||
if (!std::isnan(obj->current_temperature)) {
|
||||
root["current_temperature"] = value_accuracy_to_string(obj->current_temperature, current_accuracy);
|
||||
} else {
|
||||
root["target_temperature"] = value_accuracy_to_string(obj->target_temperature, target_accuracy);
|
||||
if (!has_state)
|
||||
root["state"] = root["target_temperature"];
|
||||
root["current_temperature"] = "NA";
|
||||
}
|
||||
});
|
||||
}
|
||||
if (traits.get_supports_two_point_target_temperature()) {
|
||||
root["target_temperature_low"] = value_accuracy_to_string(obj->target_temperature_low, target_accuracy);
|
||||
root["target_temperature_high"] = value_accuracy_to_string(obj->target_temperature_high, target_accuracy);
|
||||
if (!has_state) {
|
||||
root["state"] = value_accuracy_to_string((obj->target_temperature_high + obj->target_temperature_low) / 2.0f,
|
||||
target_accuracy);
|
||||
}
|
||||
} else {
|
||||
root["target_temperature"] = value_accuracy_to_string(obj->target_temperature, target_accuracy);
|
||||
if (!has_state)
|
||||
root["state"] = root["target_temperature"];
|
||||
}
|
||||
|
||||
return builder.serialize();
|
||||
// NOLINTEND(clang-analyzer-cplusplus.NewDeleteLeaks)
|
||||
}
|
||||
#endif
|
||||
@@ -1401,13 +1451,16 @@ std::string WebServer::lock_all_json_generator(WebServer *web_server, void *sour
|
||||
return web_server->lock_json((lock::Lock *) (source), ((lock::Lock *) (source))->state, DETAIL_ALL);
|
||||
}
|
||||
std::string WebServer::lock_json(lock::Lock *obj, lock::LockState value, JsonDetail start_config) {
|
||||
return json::build_json([this, obj, value, start_config](JsonObject root) {
|
||||
set_json_icon_state_value(root, obj, "lock-" + obj->get_object_id(), lock::lock_state_to_string(value), value,
|
||||
start_config);
|
||||
if (start_config == DETAIL_ALL) {
|
||||
this->add_sorting_info_(root, obj);
|
||||
}
|
||||
});
|
||||
json::JsonBuilder builder;
|
||||
JsonObject root = builder.root();
|
||||
|
||||
set_json_icon_state_value(root, obj, "lock-" + obj->get_object_id(), lock::lock_state_to_string(value), value,
|
||||
start_config);
|
||||
if (start_config == DETAIL_ALL) {
|
||||
this->add_sorting_info_(root, obj);
|
||||
}
|
||||
|
||||
return builder.serialize();
|
||||
}
|
||||
#endif
|
||||
|
||||
@@ -1464,17 +1517,20 @@ std::string WebServer::valve_all_json_generator(WebServer *web_server, void *sou
|
||||
return web_server->valve_json((valve::Valve *) (source), DETAIL_ALL);
|
||||
}
|
||||
std::string WebServer::valve_json(valve::Valve *obj, JsonDetail start_config) {
|
||||
return json::build_json([this, obj, start_config](JsonObject root) {
|
||||
set_json_icon_state_value(root, obj, "valve-" + obj->get_object_id(), obj->is_fully_closed() ? "CLOSED" : "OPEN",
|
||||
obj->position, start_config);
|
||||
root["current_operation"] = valve::valve_operation_to_str(obj->current_operation);
|
||||
json::JsonBuilder builder;
|
||||
JsonObject root = builder.root();
|
||||
|
||||
if (obj->get_traits().get_supports_position())
|
||||
root["position"] = obj->position;
|
||||
if (start_config == DETAIL_ALL) {
|
||||
this->add_sorting_info_(root, obj);
|
||||
}
|
||||
});
|
||||
set_json_icon_state_value(root, obj, "valve-" + obj->get_object_id(), obj->is_fully_closed() ? "CLOSED" : "OPEN",
|
||||
obj->position, start_config);
|
||||
root["current_operation"] = valve::valve_operation_to_str(obj->current_operation);
|
||||
|
||||
if (obj->get_traits().get_supports_position())
|
||||
root["position"] = obj->position;
|
||||
if (start_config == DETAIL_ALL) {
|
||||
this->add_sorting_info_(root, obj);
|
||||
}
|
||||
|
||||
return builder.serialize();
|
||||
}
|
||||
#endif
|
||||
|
||||
@@ -1533,14 +1589,17 @@ std::string WebServer::alarm_control_panel_all_json_generator(WebServer *web_ser
|
||||
std::string WebServer::alarm_control_panel_json(alarm_control_panel::AlarmControlPanel *obj,
|
||||
alarm_control_panel::AlarmControlPanelState value,
|
||||
JsonDetail start_config) {
|
||||
return json::build_json([this, obj, value, start_config](JsonObject root) {
|
||||
char buf[16];
|
||||
set_json_icon_state_value(root, obj, "alarm-control-panel-" + obj->get_object_id(),
|
||||
PSTR_LOCAL(alarm_control_panel_state_to_string(value)), value, start_config);
|
||||
if (start_config == DETAIL_ALL) {
|
||||
this->add_sorting_info_(root, obj);
|
||||
}
|
||||
});
|
||||
json::JsonBuilder builder;
|
||||
JsonObject root = builder.root();
|
||||
|
||||
char buf[16];
|
||||
set_json_icon_state_value(root, obj, "alarm-control-panel-" + obj->get_object_id(),
|
||||
PSTR_LOCAL(alarm_control_panel_state_to_string(value)), value, start_config);
|
||||
if (start_config == DETAIL_ALL) {
|
||||
this->add_sorting_info_(root, obj);
|
||||
}
|
||||
|
||||
return builder.serialize();
|
||||
}
|
||||
#endif
|
||||
|
||||
@@ -1577,20 +1636,23 @@ std::string WebServer::event_all_json_generator(WebServer *web_server, void *sou
|
||||
return web_server->event_json(event, get_event_type(event), DETAIL_ALL);
|
||||
}
|
||||
std::string WebServer::event_json(event::Event *obj, const std::string &event_type, JsonDetail start_config) {
|
||||
return json::build_json([this, obj, event_type, start_config](JsonObject root) {
|
||||
set_json_id(root, obj, "event-" + obj->get_object_id(), start_config);
|
||||
if (!event_type.empty()) {
|
||||
root["event_type"] = event_type;
|
||||
json::JsonBuilder builder;
|
||||
JsonObject root = builder.root();
|
||||
|
||||
set_json_id(root, obj, "event-" + obj->get_object_id(), start_config);
|
||||
if (!event_type.empty()) {
|
||||
root["event_type"] = event_type;
|
||||
}
|
||||
if (start_config == DETAIL_ALL) {
|
||||
JsonArray event_types = root["event_types"].to<JsonArray>();
|
||||
for (auto const &event_type : obj->get_event_types()) {
|
||||
event_types.add(event_type);
|
||||
}
|
||||
if (start_config == DETAIL_ALL) {
|
||||
JsonArray event_types = root["event_types"].to<JsonArray>();
|
||||
for (auto const &event_type : obj->get_event_types()) {
|
||||
event_types.add(event_type);
|
||||
}
|
||||
root["device_class"] = obj->get_device_class();
|
||||
this->add_sorting_info_(root, obj);
|
||||
}
|
||||
});
|
||||
root["device_class"] = obj->get_device_class();
|
||||
this->add_sorting_info_(root, obj);
|
||||
}
|
||||
|
||||
return builder.serialize();
|
||||
}
|
||||
#endif
|
||||
|
||||
@@ -1637,25 +1699,30 @@ void WebServer::handle_update_request(AsyncWebServerRequest *request, const UrlM
|
||||
request->send(404);
|
||||
}
|
||||
std::string WebServer::update_state_json_generator(WebServer *web_server, void *source) {
|
||||
// NOLINTNEXTLINE(clang-analyzer-cplusplus.NewDeleteLeaks) false positive with ArduinoJson
|
||||
return web_server->update_json((update::UpdateEntity *) (source), DETAIL_STATE);
|
||||
}
|
||||
std::string WebServer::update_all_json_generator(WebServer *web_server, void *source) {
|
||||
// NOLINTNEXTLINE(clang-analyzer-cplusplus.NewDeleteLeaks) false positive with ArduinoJson
|
||||
return web_server->update_json((update::UpdateEntity *) (source), DETAIL_STATE);
|
||||
}
|
||||
std::string WebServer::update_json(update::UpdateEntity *obj, JsonDetail start_config) {
|
||||
// NOLINTBEGIN(clang-analyzer-cplusplus.NewDeleteLeaks) false positive with ArduinoJson
|
||||
return json::build_json([this, obj, start_config](JsonObject root) {
|
||||
set_json_id(root, obj, "update-" + obj->get_object_id(), start_config);
|
||||
root["value"] = obj->update_info.latest_version;
|
||||
root["state"] = update_state_to_string(obj->state);
|
||||
if (start_config == DETAIL_ALL) {
|
||||
root["current_version"] = obj->update_info.current_version;
|
||||
root["title"] = obj->update_info.title;
|
||||
root["summary"] = obj->update_info.summary;
|
||||
root["release_url"] = obj->update_info.release_url;
|
||||
this->add_sorting_info_(root, obj);
|
||||
}
|
||||
});
|
||||
json::JsonBuilder builder;
|
||||
JsonObject root = builder.root();
|
||||
|
||||
set_json_id(root, obj, "update-" + obj->get_object_id(), start_config);
|
||||
root["value"] = obj->update_info.latest_version;
|
||||
root["state"] = update_state_to_string(obj->state);
|
||||
if (start_config == DETAIL_ALL) {
|
||||
root["current_version"] = obj->update_info.current_version;
|
||||
root["title"] = obj->update_info.title;
|
||||
root["summary"] = obj->update_info.summary;
|
||||
root["release_url"] = obj->update_info.release_url;
|
||||
this->add_sorting_info_(root, obj);
|
||||
}
|
||||
|
||||
return builder.serialize();
|
||||
// NOLINTEND(clang-analyzer-cplusplus.NewDeleteLeaks)
|
||||
}
|
||||
#endif
|
||||
|
@@ -317,8 +317,8 @@ AsyncEventSource::~AsyncEventSource() {
|
||||
}
|
||||
|
||||
void AsyncEventSource::handleRequest(AsyncWebServerRequest *request) {
|
||||
auto *rsp = // NOLINT(cppcoreguidelines-owning-memory)
|
||||
new AsyncEventSourceResponse(request, this, this->web_server_);
|
||||
// NOLINTNEXTLINE(cppcoreguidelines-owning-memory,clang-analyzer-cplusplus.NewDeleteLeaks)
|
||||
auto *rsp = new AsyncEventSourceResponse(request, this, this->web_server_);
|
||||
if (this->on_connect_) {
|
||||
this->on_connect_(rsp);
|
||||
}
|
||||
@@ -392,10 +392,11 @@ AsyncEventSourceResponse::AsyncEventSourceResponse(const AsyncWebServerRequest *
|
||||
#ifdef USE_WEBSERVER_SORTING
|
||||
for (auto &group : ws->sorting_groups_) {
|
||||
// NOLINTBEGIN(clang-analyzer-cplusplus.NewDeleteLeaks) false positive with ArduinoJson
|
||||
message = json::build_json([group](JsonObject root) {
|
||||
root["name"] = group.second.name;
|
||||
root["sorting_weight"] = group.second.weight;
|
||||
});
|
||||
json::JsonBuilder builder;
|
||||
JsonObject root = builder.root();
|
||||
root["name"] = group.second.name;
|
||||
root["sorting_weight"] = group.second.weight;
|
||||
message = builder.serialize();
|
||||
// NOLINTEND(clang-analyzer-cplusplus.NewDeleteLeaks)
|
||||
|
||||
// a (very) large number of these should be able to be queued initially without defer
|
||||
|
@@ -846,7 +846,9 @@ class PinUseValidationCheck(ConfigValidationStep):
|
||||
|
||||
|
||||
def validate_config(
|
||||
config: dict[str, Any], command_line_substitutions: dict[str, Any]
|
||||
config: dict[str, Any],
|
||||
command_line_substitutions: dict[str, Any],
|
||||
skip_external_update: bool = False,
|
||||
) -> Config:
|
||||
result = Config()
|
||||
|
||||
@@ -859,7 +861,7 @@ def validate_config(
|
||||
|
||||
result.add_output_path([CONF_PACKAGES], CONF_PACKAGES)
|
||||
try:
|
||||
config = do_packages_pass(config)
|
||||
config = do_packages_pass(config, skip_update=skip_external_update)
|
||||
except vol.Invalid as err:
|
||||
result.update(config)
|
||||
result.add_error(err)
|
||||
@@ -896,7 +898,7 @@ def validate_config(
|
||||
|
||||
result.add_output_path([CONF_EXTERNAL_COMPONENTS], CONF_EXTERNAL_COMPONENTS)
|
||||
try:
|
||||
do_external_components_pass(config)
|
||||
do_external_components_pass(config, skip_update=skip_external_update)
|
||||
except vol.Invalid as err:
|
||||
result.update(config)
|
||||
result.add_error(err)
|
||||
@@ -1020,7 +1022,9 @@ class InvalidYAMLError(EsphomeError):
|
||||
self.base_exc = base_exc
|
||||
|
||||
|
||||
def _load_config(command_line_substitutions: dict[str, Any]) -> Config:
|
||||
def _load_config(
|
||||
command_line_substitutions: dict[str, Any], skip_external_update: bool = False
|
||||
) -> Config:
|
||||
"""Load the configuration file."""
|
||||
try:
|
||||
config = yaml_util.load_yaml(CORE.config_path)
|
||||
@@ -1028,7 +1032,7 @@ def _load_config(command_line_substitutions: dict[str, Any]) -> Config:
|
||||
raise InvalidYAMLError(e) from e
|
||||
|
||||
try:
|
||||
return validate_config(config, command_line_substitutions)
|
||||
return validate_config(config, command_line_substitutions, skip_external_update)
|
||||
except EsphomeError:
|
||||
raise
|
||||
except Exception:
|
||||
@@ -1036,9 +1040,11 @@ def _load_config(command_line_substitutions: dict[str, Any]) -> Config:
|
||||
raise
|
||||
|
||||
|
||||
def load_config(command_line_substitutions: dict[str, Any]) -> Config:
|
||||
def load_config(
|
||||
command_line_substitutions: dict[str, Any], skip_external_update: bool = False
|
||||
) -> Config:
|
||||
try:
|
||||
return _load_config(command_line_substitutions)
|
||||
return _load_config(command_line_substitutions, skip_external_update)
|
||||
except vol.Invalid as err:
|
||||
raise EsphomeError(f"Error while parsing config: {err}") from err
|
||||
|
||||
@@ -1178,10 +1184,10 @@ def strip_default_ids(config):
|
||||
return config
|
||||
|
||||
|
||||
def read_config(command_line_substitutions):
|
||||
def read_config(command_line_substitutions, skip_external_update=False):
|
||||
_LOGGER.info("Reading configuration %s...", CORE.config_path)
|
||||
try:
|
||||
res = load_config(command_line_substitutions)
|
||||
res = load_config(command_line_substitutions, skip_external_update)
|
||||
except EsphomeError as err:
|
||||
_LOGGER.error("Error while reading config: %s", err)
|
||||
return None
|
||||
|
@@ -1269,6 +1269,7 @@ DEVICE_CLASS_PLUG = "plug"
|
||||
DEVICE_CLASS_PM1 = "pm1"
|
||||
DEVICE_CLASS_PM10 = "pm10"
|
||||
DEVICE_CLASS_PM25 = "pm25"
|
||||
DEVICE_CLASS_PM4 = "pm4"
|
||||
DEVICE_CLASS_POWER = "power"
|
||||
DEVICE_CLASS_POWER_FACTOR = "power_factor"
|
||||
DEVICE_CLASS_PRECIPITATION = "precipitation"
|
||||
|
@@ -13,6 +13,9 @@ from esphome.core import CORE, TimePeriodSeconds
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
# Special value to indicate never refresh
|
||||
NEVER_REFRESH = TimePeriodSeconds(seconds=-1)
|
||||
|
||||
|
||||
def run_git_command(cmd, cwd=None) -> str:
|
||||
_LOGGER.debug("Running git command: %s", " ".join(cmd))
|
||||
@@ -85,6 +88,11 @@ def clone_or_update(
|
||||
|
||||
else:
|
||||
# Check refresh needed
|
||||
# Skip refresh if NEVER_REFRESH is specified
|
||||
if refresh == NEVER_REFRESH:
|
||||
_LOGGER.debug("Skipping update for %s (refresh disabled)", key)
|
||||
return repo_dir, None
|
||||
|
||||
file_timestamp = Path(repo_dir / ".git" / "FETCH_HEAD")
|
||||
# On first clone, FETCH_HEAD does not exists
|
||||
if not file_timestamp.exists():
|
||||
|
@@ -301,6 +301,11 @@ def clean_cmake_cache():
|
||||
def clean_build():
|
||||
import shutil
|
||||
|
||||
# Allow skipping cache cleaning for integration tests
|
||||
if os.environ.get("ESPHOME_SKIP_CLEAN_BUILD"):
|
||||
_LOGGER.warning("Skipping build cleaning (ESPHOME_SKIP_CLEAN_BUILD set)")
|
||||
return
|
||||
|
||||
pioenvs = CORE.relative_pioenvs_path()
|
||||
if pioenvs.is_dir():
|
||||
_LOGGER.info("Deleting %s", pioenvs)
|
||||
|
@@ -6,6 +6,7 @@ from collections.abc import Callable, Generator
|
||||
from pathlib import Path
|
||||
import sys
|
||||
from typing import Any
|
||||
from unittest import mock
|
||||
|
||||
import pytest
|
||||
|
||||
@@ -17,6 +18,7 @@ from esphome.const import (
|
||||
PlatformFramework,
|
||||
)
|
||||
from esphome.types import ConfigType
|
||||
from esphome.util import OrderedDict
|
||||
|
||||
# Add package root to python path
|
||||
here = Path(__file__).parent
|
||||
@@ -135,3 +137,29 @@ def generate_main() -> Generator[Callable[[str | Path], str]]:
|
||||
return CORE.cpp_main_section
|
||||
|
||||
yield generator
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_clone_or_update() -> Generator[Any]:
|
||||
"""Mock git.clone_or_update for testing."""
|
||||
with mock.patch("esphome.git.clone_or_update") as mock_func:
|
||||
# Default return value
|
||||
mock_func.return_value = (Path("/tmp/test"), None)
|
||||
yield mock_func
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_load_yaml() -> Generator[Any]:
|
||||
"""Mock yaml_util.load_yaml for testing."""
|
||||
|
||||
with mock.patch("esphome.yaml_util.load_yaml") as mock_func:
|
||||
# Default return value
|
||||
mock_func.return_value = OrderedDict({"sensor": []})
|
||||
yield mock_func
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_install_meta_finder() -> Generator[Any]:
|
||||
"""Mock loader.install_meta_finder for testing."""
|
||||
with mock.patch("esphome.loader.install_meta_finder") as mock_func:
|
||||
yield mock_func
|
||||
|
134
tests/component_tests/external_components/test_init.py
Normal file
134
tests/component_tests/external_components/test_init.py
Normal file
@@ -0,0 +1,134 @@
|
||||
"""Tests for the external_components skip_update functionality."""
|
||||
|
||||
from pathlib import Path
|
||||
from typing import Any
|
||||
from unittest.mock import MagicMock
|
||||
|
||||
from esphome.components.external_components import do_external_components_pass
|
||||
from esphome.const import (
|
||||
CONF_EXTERNAL_COMPONENTS,
|
||||
CONF_REFRESH,
|
||||
CONF_SOURCE,
|
||||
CONF_URL,
|
||||
TYPE_GIT,
|
||||
)
|
||||
|
||||
|
||||
def test_external_components_skip_update_true(
|
||||
tmp_path: Path, mock_clone_or_update: MagicMock, mock_install_meta_finder: MagicMock
|
||||
) -> None:
|
||||
"""Test that external components don't update when skip_update=True."""
|
||||
# Create a components directory structure
|
||||
components_dir = tmp_path / "components"
|
||||
components_dir.mkdir()
|
||||
|
||||
# Create a test component
|
||||
test_component_dir = components_dir / "test_component"
|
||||
test_component_dir.mkdir()
|
||||
(test_component_dir / "__init__.py").write_text("# Test component")
|
||||
|
||||
# Set up mock to return our tmp_path
|
||||
mock_clone_or_update.return_value = (tmp_path, None)
|
||||
|
||||
config: dict[str, Any] = {
|
||||
CONF_EXTERNAL_COMPONENTS: [
|
||||
{
|
||||
CONF_SOURCE: {
|
||||
"type": TYPE_GIT,
|
||||
CONF_URL: "https://github.com/test/components",
|
||||
},
|
||||
CONF_REFRESH: "1d",
|
||||
"components": "all",
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
# Call with skip_update=True
|
||||
do_external_components_pass(config, skip_update=True)
|
||||
|
||||
# Verify clone_or_update was called with NEVER_REFRESH
|
||||
mock_clone_or_update.assert_called_once()
|
||||
call_args = mock_clone_or_update.call_args
|
||||
from esphome import git
|
||||
|
||||
assert call_args.kwargs["refresh"] == git.NEVER_REFRESH
|
||||
|
||||
|
||||
def test_external_components_skip_update_false(
|
||||
tmp_path: Path, mock_clone_or_update: MagicMock, mock_install_meta_finder: MagicMock
|
||||
) -> None:
|
||||
"""Test that external components update when skip_update=False."""
|
||||
# Create a components directory structure
|
||||
components_dir = tmp_path / "components"
|
||||
components_dir.mkdir()
|
||||
|
||||
# Create a test component
|
||||
test_component_dir = components_dir / "test_component"
|
||||
test_component_dir.mkdir()
|
||||
(test_component_dir / "__init__.py").write_text("# Test component")
|
||||
|
||||
# Set up mock to return our tmp_path
|
||||
mock_clone_or_update.return_value = (tmp_path, None)
|
||||
|
||||
config: dict[str, Any] = {
|
||||
CONF_EXTERNAL_COMPONENTS: [
|
||||
{
|
||||
CONF_SOURCE: {
|
||||
"type": TYPE_GIT,
|
||||
CONF_URL: "https://github.com/test/components",
|
||||
},
|
||||
CONF_REFRESH: "1d",
|
||||
"components": "all",
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
# Call with skip_update=False
|
||||
do_external_components_pass(config, skip_update=False)
|
||||
|
||||
# Verify clone_or_update was called with actual refresh value
|
||||
mock_clone_or_update.assert_called_once()
|
||||
call_args = mock_clone_or_update.call_args
|
||||
from esphome.core import TimePeriodSeconds
|
||||
|
||||
assert call_args.kwargs["refresh"] == TimePeriodSeconds(days=1)
|
||||
|
||||
|
||||
def test_external_components_default_no_skip(
|
||||
tmp_path: Path, mock_clone_or_update: MagicMock, mock_install_meta_finder: MagicMock
|
||||
) -> None:
|
||||
"""Test that external components update by default when skip_update not specified."""
|
||||
# Create a components directory structure
|
||||
components_dir = tmp_path / "components"
|
||||
components_dir.mkdir()
|
||||
|
||||
# Create a test component
|
||||
test_component_dir = components_dir / "test_component"
|
||||
test_component_dir.mkdir()
|
||||
(test_component_dir / "__init__.py").write_text("# Test component")
|
||||
|
||||
# Set up mock to return our tmp_path
|
||||
mock_clone_or_update.return_value = (tmp_path, None)
|
||||
|
||||
config: dict[str, Any] = {
|
||||
CONF_EXTERNAL_COMPONENTS: [
|
||||
{
|
||||
CONF_SOURCE: {
|
||||
"type": TYPE_GIT,
|
||||
CONF_URL: "https://github.com/test/components",
|
||||
},
|
||||
CONF_REFRESH: "1d",
|
||||
"components": "all",
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
# Call without skip_update parameter
|
||||
do_external_components_pass(config)
|
||||
|
||||
# Verify clone_or_update was called with actual refresh value
|
||||
mock_clone_or_update.assert_called_once()
|
||||
call_args = mock_clone_or_update.call_args
|
||||
from esphome.core import TimePeriodSeconds
|
||||
|
||||
assert call_args.kwargs["refresh"] == TimePeriodSeconds(days=1)
|
114
tests/component_tests/packages/test_init.py
Normal file
114
tests/component_tests/packages/test_init.py
Normal file
@@ -0,0 +1,114 @@
|
||||
"""Tests for the packages component skip_update functionality."""
|
||||
|
||||
from pathlib import Path
|
||||
from typing import Any
|
||||
from unittest.mock import MagicMock
|
||||
|
||||
from esphome.components.packages import do_packages_pass
|
||||
from esphome.const import CONF_FILES, CONF_PACKAGES, CONF_REFRESH, CONF_URL
|
||||
from esphome.util import OrderedDict
|
||||
|
||||
|
||||
def test_packages_skip_update_true(
|
||||
tmp_path: Path, mock_clone_or_update: MagicMock, mock_load_yaml: MagicMock
|
||||
) -> None:
|
||||
"""Test that packages don't update when skip_update=True."""
|
||||
# Set up mock to return our tmp_path
|
||||
mock_clone_or_update.return_value = (tmp_path, None)
|
||||
|
||||
# Create the test yaml file
|
||||
test_file = tmp_path / "test.yaml"
|
||||
test_file.write_text("sensor: []")
|
||||
|
||||
# Set mock_load_yaml to return some valid config
|
||||
mock_load_yaml.return_value = OrderedDict({"sensor": []})
|
||||
|
||||
config: dict[str, Any] = {
|
||||
CONF_PACKAGES: {
|
||||
"test_package": {
|
||||
CONF_URL: "https://github.com/test/repo",
|
||||
CONF_FILES: ["test.yaml"],
|
||||
CONF_REFRESH: "1d",
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
# Call with skip_update=True
|
||||
do_packages_pass(config, skip_update=True)
|
||||
|
||||
# Verify clone_or_update was called with NEVER_REFRESH
|
||||
mock_clone_or_update.assert_called_once()
|
||||
call_args = mock_clone_or_update.call_args
|
||||
from esphome import git
|
||||
|
||||
assert call_args.kwargs["refresh"] == git.NEVER_REFRESH
|
||||
|
||||
|
||||
def test_packages_skip_update_false(
|
||||
tmp_path: Path, mock_clone_or_update: MagicMock, mock_load_yaml: MagicMock
|
||||
) -> None:
|
||||
"""Test that packages update when skip_update=False."""
|
||||
# Set up mock to return our tmp_path
|
||||
mock_clone_or_update.return_value = (tmp_path, None)
|
||||
|
||||
# Create the test yaml file
|
||||
test_file = tmp_path / "test.yaml"
|
||||
test_file.write_text("sensor: []")
|
||||
|
||||
# Set mock_load_yaml to return some valid config
|
||||
mock_load_yaml.return_value = OrderedDict({"sensor": []})
|
||||
|
||||
config: dict[str, Any] = {
|
||||
CONF_PACKAGES: {
|
||||
"test_package": {
|
||||
CONF_URL: "https://github.com/test/repo",
|
||||
CONF_FILES: ["test.yaml"],
|
||||
CONF_REFRESH: "1d",
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
# Call with skip_update=False (default)
|
||||
do_packages_pass(config, skip_update=False)
|
||||
|
||||
# Verify clone_or_update was called with actual refresh value
|
||||
mock_clone_or_update.assert_called_once()
|
||||
call_args = mock_clone_or_update.call_args
|
||||
from esphome.core import TimePeriodSeconds
|
||||
|
||||
assert call_args.kwargs["refresh"] == TimePeriodSeconds(days=1)
|
||||
|
||||
|
||||
def test_packages_default_no_skip(
|
||||
tmp_path: Path, mock_clone_or_update: MagicMock, mock_load_yaml: MagicMock
|
||||
) -> None:
|
||||
"""Test that packages update by default when skip_update not specified."""
|
||||
# Set up mock to return our tmp_path
|
||||
mock_clone_or_update.return_value = (tmp_path, None)
|
||||
|
||||
# Create the test yaml file
|
||||
test_file = tmp_path / "test.yaml"
|
||||
test_file.write_text("sensor: []")
|
||||
|
||||
# Set mock_load_yaml to return some valid config
|
||||
mock_load_yaml.return_value = OrderedDict({"sensor": []})
|
||||
|
||||
config: dict[str, Any] = {
|
||||
CONF_PACKAGES: {
|
||||
"test_package": {
|
||||
CONF_URL: "https://github.com/test/repo",
|
||||
CONF_FILES: ["test.yaml"],
|
||||
CONF_REFRESH: "1d",
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
# Call without skip_update parameter
|
||||
do_packages_pass(config)
|
||||
|
||||
# Verify clone_or_update was called with actual refresh value
|
||||
mock_clone_or_update.assert_called_once()
|
||||
call_args = mock_clone_or_update.call_args
|
||||
from esphome.core import TimePeriodSeconds
|
||||
|
||||
assert call_args.kwargs["refresh"] == TimePeriodSeconds(days=1)
|
@@ -58,6 +58,8 @@ def _get_platformio_env(cache_dir: Path) -> dict[str, str]:
|
||||
env["PLATFORMIO_CORE_DIR"] = str(cache_dir)
|
||||
env["PLATFORMIO_CACHE_DIR"] = str(cache_dir / ".cache")
|
||||
env["PLATFORMIO_LIBDEPS_DIR"] = str(cache_dir / "libdeps")
|
||||
# Prevent cache cleaning during integration tests
|
||||
env["ESPHOME_SKIP_CLEAN_BUILD"] = "1"
|
||||
return env
|
||||
|
||||
|
||||
@@ -68,6 +70,11 @@ def shared_platformio_cache() -> Generator[Path]:
|
||||
test_cache_dir = Path.home() / ".esphome-integration-tests"
|
||||
cache_dir = test_cache_dir / "platformio"
|
||||
|
||||
# Create the temp directory that PlatformIO uses to avoid race conditions
|
||||
# This ensures it exists and won't be deleted by parallel processes
|
||||
platformio_tmp_dir = cache_dir / ".cache" / "tmp"
|
||||
platformio_tmp_dir.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
# Use a lock file in the home directory to ensure only one process initializes the cache
|
||||
# This is needed when running with pytest-xdist
|
||||
# The lock file must be in a directory that already exists to avoid race conditions
|
||||
@@ -83,17 +90,11 @@ def shared_platformio_cache() -> Generator[Path]:
|
||||
test_cache_dir.mkdir(exist_ok=True)
|
||||
|
||||
with tempfile.TemporaryDirectory() as tmpdir:
|
||||
# Create a basic host config
|
||||
# Use the cache_init fixture for initialization
|
||||
init_dir = Path(tmpdir)
|
||||
fixture_path = Path(__file__).parent / "fixtures" / "cache_init.yaml"
|
||||
config_path = init_dir / "cache_init.yaml"
|
||||
config_path.write_text("""esphome:
|
||||
name: cache-init
|
||||
host:
|
||||
api:
|
||||
encryption:
|
||||
key: "IIevImVI42I0FGos5nLqFK91jrJehrgidI0ArwMLr8w="
|
||||
logger:
|
||||
""")
|
||||
config_path.write_text(fixture_path.read_text())
|
||||
|
||||
# Run compilation to populate the cache
|
||||
# We must succeed here to avoid race conditions where multiple
|
||||
@@ -346,7 +347,8 @@ async def wait_and_connect_api_client(
|
||||
noise_psk: str | None = None,
|
||||
client_info: str = "integration-test",
|
||||
timeout: float = API_CONNECTION_TIMEOUT,
|
||||
) -> AsyncGenerator[APIClient]:
|
||||
return_disconnect_event: bool = False,
|
||||
) -> AsyncGenerator[APIClient | tuple[APIClient, asyncio.Event]]:
|
||||
"""Wait for API to be available and connect."""
|
||||
client = APIClient(
|
||||
address=address,
|
||||
@@ -359,14 +361,17 @@ async def wait_and_connect_api_client(
|
||||
# Create a future to signal when connected
|
||||
loop = asyncio.get_running_loop()
|
||||
connected_future: asyncio.Future[None] = loop.create_future()
|
||||
disconnect_event = asyncio.Event()
|
||||
|
||||
async def on_connect() -> None:
|
||||
"""Called when successfully connected."""
|
||||
disconnect_event.clear() # Clear the disconnect event on new connection
|
||||
if not connected_future.done():
|
||||
connected_future.set_result(None)
|
||||
|
||||
async def on_disconnect(expected_disconnect: bool) -> None:
|
||||
"""Called when disconnected."""
|
||||
disconnect_event.set()
|
||||
if not connected_future.done() and not expected_disconnect:
|
||||
connected_future.set_exception(
|
||||
APIConnectionError("Disconnected before fully connected")
|
||||
@@ -397,7 +402,10 @@ async def wait_and_connect_api_client(
|
||||
except TimeoutError:
|
||||
raise TimeoutError(f"Failed to connect to API after {timeout} seconds")
|
||||
|
||||
yield client
|
||||
if return_disconnect_event:
|
||||
yield client, disconnect_event
|
||||
else:
|
||||
yield client
|
||||
finally:
|
||||
# Stop reconnect logic and disconnect
|
||||
await reconnect_logic.stop()
|
||||
@@ -430,6 +438,33 @@ async def api_client_connected(
|
||||
yield _connect_client
|
||||
|
||||
|
||||
@pytest_asyncio.fixture
|
||||
async def api_client_connected_with_disconnect(
|
||||
unused_tcp_port: int,
|
||||
) -> AsyncGenerator:
|
||||
"""Factory for creating connected API client context managers with disconnect event."""
|
||||
|
||||
def _connect_client_with_disconnect(
|
||||
address: str = LOCALHOST,
|
||||
port: int | None = None,
|
||||
password: str = "",
|
||||
noise_psk: str | None = None,
|
||||
client_info: str = "integration-test",
|
||||
timeout: float = API_CONNECTION_TIMEOUT,
|
||||
):
|
||||
return wait_and_connect_api_client(
|
||||
address=address,
|
||||
port=port if port is not None else unused_tcp_port,
|
||||
password=password,
|
||||
noise_psk=noise_psk,
|
||||
client_info=client_info,
|
||||
timeout=timeout,
|
||||
return_disconnect_event=True,
|
||||
)
|
||||
|
||||
yield _connect_client_with_disconnect
|
||||
|
||||
|
||||
async def _read_stream_lines(
|
||||
stream: asyncio.StreamReader,
|
||||
lines: list[str],
|
||||
|
10
tests/integration/fixtures/cache_init.yaml
Normal file
10
tests/integration/fixtures/cache_init.yaml
Normal file
@@ -0,0 +1,10 @@
|
||||
esphome:
|
||||
name: cache-init
|
||||
|
||||
host:
|
||||
|
||||
api:
|
||||
encryption:
|
||||
key: "IIevImVI42I0FGos5nLqFK91jrJehrgidI0ArwMLr8w="
|
||||
|
||||
logger:
|
@@ -0,0 +1,11 @@
|
||||
esphome:
|
||||
name: oversized-noise
|
||||
|
||||
host:
|
||||
|
||||
api:
|
||||
encryption:
|
||||
key: N4Yle5YirwZhPiHHsdZLdOA73ndj/84veVaLhTvxCuU=
|
||||
|
||||
logger:
|
||||
level: VERY_VERBOSE
|
11
tests/integration/fixtures/oversized_payload_noise.yaml
Normal file
11
tests/integration/fixtures/oversized_payload_noise.yaml
Normal file
@@ -0,0 +1,11 @@
|
||||
esphome:
|
||||
name: oversized-noise
|
||||
|
||||
host:
|
||||
|
||||
api:
|
||||
encryption:
|
||||
key: N4Yle5YirwZhPiHHsdZLdOA73ndj/84veVaLhTvxCuU=
|
||||
|
||||
logger:
|
||||
level: VERY_VERBOSE
|
@@ -0,0 +1,9 @@
|
||||
esphome:
|
||||
name: oversized-plaintext
|
||||
|
||||
host:
|
||||
|
||||
api:
|
||||
|
||||
logger:
|
||||
level: VERY_VERBOSE
|
@@ -0,0 +1,11 @@
|
||||
esphome:
|
||||
name: oversized-noise
|
||||
|
||||
host:
|
||||
|
||||
api:
|
||||
encryption:
|
||||
key: N4Yle5YirwZhPiHHsdZLdOA73ndj/84veVaLhTvxCuU=
|
||||
|
||||
logger:
|
||||
level: VERY_VERBOSE
|
@@ -0,0 +1,9 @@
|
||||
esphome:
|
||||
name: oversized-protobuf-plaintext
|
||||
|
||||
host:
|
||||
|
||||
api:
|
||||
|
||||
logger:
|
||||
level: VERY_VERBOSE
|
335
tests/integration/test_oversized_payloads.py
Normal file
335
tests/integration/test_oversized_payloads.py
Normal file
@@ -0,0 +1,335 @@
|
||||
"""Integration tests for oversized payloads and headers that should cause disconnection."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
|
||||
import pytest
|
||||
|
||||
from .types import APIClientConnectedWithDisconnectFactory, RunCompiledFunction
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_oversized_payload_plaintext(
|
||||
yaml_config: str,
|
||||
run_compiled: RunCompiledFunction,
|
||||
api_client_connected_with_disconnect: APIClientConnectedWithDisconnectFactory,
|
||||
) -> None:
|
||||
"""Test that oversized payloads (>100KiB) from client cause disconnection without crashing."""
|
||||
process_exited = False
|
||||
helper_log_found = False
|
||||
|
||||
def check_logs(line: str) -> None:
|
||||
nonlocal process_exited, helper_log_found
|
||||
# Check for signs that the process exited/crashed
|
||||
if "Segmentation fault" in line or "core dumped" in line:
|
||||
process_exited = True
|
||||
# Check for HELPER_LOG message about message size exceeding maximum
|
||||
if (
|
||||
"[VV]" in line
|
||||
and "Bad packet: message size" in line
|
||||
and "exceeds maximum" in line
|
||||
):
|
||||
helper_log_found = True
|
||||
|
||||
async with run_compiled(yaml_config, line_callback=check_logs):
|
||||
async with api_client_connected_with_disconnect() as (client, disconnect_event):
|
||||
# Verify basic connection works first
|
||||
device_info = await client.device_info()
|
||||
assert device_info is not None
|
||||
assert device_info.name == "oversized-plaintext"
|
||||
|
||||
# Create an oversized payload (>100KiB)
|
||||
oversized_data = b"X" * (100 * 1024 + 1) # 100KiB + 1 byte
|
||||
|
||||
# Access the internal connection to send raw data
|
||||
frame_helper = client._connection._frame_helper
|
||||
# Create a message with oversized payload
|
||||
# Using message type 1 (DeviceInfoRequest) as an example
|
||||
message_type = 1
|
||||
frame_helper.write_packets([(message_type, oversized_data)], True)
|
||||
|
||||
# Wait for the connection to be closed by ESPHome
|
||||
await asyncio.wait_for(disconnect_event.wait(), timeout=5.0)
|
||||
|
||||
# After disconnection, verify process didn't crash
|
||||
assert not process_exited, "ESPHome process should not crash"
|
||||
# Verify we saw the expected HELPER_LOG message
|
||||
assert helper_log_found, (
|
||||
"Expected to see HELPER_LOG about message size exceeding maximum"
|
||||
)
|
||||
|
||||
# Try to reconnect to verify the process is still running
|
||||
async with api_client_connected_with_disconnect() as (client2, _):
|
||||
device_info = await client2.device_info()
|
||||
assert device_info is not None
|
||||
assert device_info.name == "oversized-plaintext"
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_oversized_protobuf_message_id_plaintext(
|
||||
yaml_config: str,
|
||||
run_compiled: RunCompiledFunction,
|
||||
api_client_connected_with_disconnect: APIClientConnectedWithDisconnectFactory,
|
||||
) -> None:
|
||||
"""Test that protobuf messages with ID > UINT16_MAX cause disconnection without crashing.
|
||||
|
||||
This tests the message type limit - message IDs must fit in a uint16_t (0-65535).
|
||||
"""
|
||||
process_exited = False
|
||||
helper_log_found = False
|
||||
|
||||
def check_logs(line: str) -> None:
|
||||
nonlocal process_exited, helper_log_found
|
||||
# Check for signs that the process exited/crashed
|
||||
if "Segmentation fault" in line or "core dumped" in line:
|
||||
process_exited = True
|
||||
# Check for HELPER_LOG message about message type exceeding maximum
|
||||
if (
|
||||
"[VV]" in line
|
||||
and "Bad packet: message type" in line
|
||||
and "exceeds maximum" in line
|
||||
):
|
||||
helper_log_found = True
|
||||
|
||||
async with run_compiled(yaml_config, line_callback=check_logs):
|
||||
async with api_client_connected_with_disconnect() as (client, disconnect_event):
|
||||
# Verify basic connection works first
|
||||
device_info = await client.device_info()
|
||||
assert device_info is not None
|
||||
assert device_info.name == "oversized-protobuf-plaintext"
|
||||
|
||||
# Access the internal connection to send raw message with large ID
|
||||
frame_helper = client._connection._frame_helper
|
||||
# Message ID that exceeds uint16_t limit (> 65535)
|
||||
large_message_id = 65536 # 2^16, exceeds UINT16_MAX
|
||||
# Small payload for the test
|
||||
payload = b"test"
|
||||
|
||||
# This should cause disconnection due to oversized varint
|
||||
frame_helper.write_packets([(large_message_id, payload)], True)
|
||||
|
||||
# Wait for the connection to be closed by ESPHome
|
||||
await asyncio.wait_for(disconnect_event.wait(), timeout=5.0)
|
||||
|
||||
# After disconnection, verify process didn't crash
|
||||
assert not process_exited, "ESPHome process should not crash"
|
||||
# Verify we saw the expected HELPER_LOG message
|
||||
assert helper_log_found, (
|
||||
"Expected to see HELPER_LOG about message type exceeding maximum"
|
||||
)
|
||||
|
||||
# Try to reconnect to verify the process is still running
|
||||
async with api_client_connected_with_disconnect() as (client2, _):
|
||||
device_info = await client2.device_info()
|
||||
assert device_info is not None
|
||||
assert device_info.name == "oversized-protobuf-plaintext"
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_oversized_payload_noise(
|
||||
yaml_config: str,
|
||||
run_compiled: RunCompiledFunction,
|
||||
api_client_connected_with_disconnect: APIClientConnectedWithDisconnectFactory,
|
||||
) -> None:
|
||||
"""Test that oversized payloads (>100KiB) from client cause disconnection without crashing with noise encryption."""
|
||||
noise_key = "N4Yle5YirwZhPiHHsdZLdOA73ndj/84veVaLhTvxCuU="
|
||||
process_exited = False
|
||||
cipherstate_failed = False
|
||||
|
||||
def check_logs(line: str) -> None:
|
||||
nonlocal process_exited, cipherstate_failed
|
||||
# Check for signs that the process exited/crashed
|
||||
if "Segmentation fault" in line or "core dumped" in line:
|
||||
process_exited = True
|
||||
# Check for the expected warning about decryption failure
|
||||
if (
|
||||
"[W][api.connection" in line
|
||||
and "Reading failed CIPHERSTATE_DECRYPT_FAILED" in line
|
||||
):
|
||||
cipherstate_failed = True
|
||||
|
||||
async with run_compiled(yaml_config, line_callback=check_logs):
|
||||
async with api_client_connected_with_disconnect(noise_psk=noise_key) as (
|
||||
client,
|
||||
disconnect_event,
|
||||
):
|
||||
# Verify basic connection works first
|
||||
device_info = await client.device_info()
|
||||
assert device_info is not None
|
||||
assert device_info.name == "oversized-noise"
|
||||
|
||||
# Create an oversized payload (>100KiB)
|
||||
oversized_data = b"Y" * (100 * 1024 + 1) # 100KiB + 1 byte
|
||||
|
||||
# Access the internal connection to send raw data
|
||||
frame_helper = client._connection._frame_helper
|
||||
# For noise connections, we still send through write_packets
|
||||
# but the frame helper will handle encryption
|
||||
# Using message type 1 (DeviceInfoRequest) as an example
|
||||
message_type = 1
|
||||
frame_helper.write_packets([(message_type, oversized_data)], True)
|
||||
|
||||
# Wait for the connection to be closed by ESPHome
|
||||
await asyncio.wait_for(disconnect_event.wait(), timeout=5.0)
|
||||
|
||||
# After disconnection, verify process didn't crash
|
||||
assert not process_exited, "ESPHome process should not crash"
|
||||
# Verify we saw the expected warning message
|
||||
assert cipherstate_failed, (
|
||||
"Expected to see warning about CIPHERSTATE_DECRYPT_FAILED"
|
||||
)
|
||||
|
||||
# Try to reconnect to verify the process is still running
|
||||
async with api_client_connected_with_disconnect(noise_psk=noise_key) as (
|
||||
client2,
|
||||
_,
|
||||
):
|
||||
device_info = await client2.device_info()
|
||||
assert device_info is not None
|
||||
assert device_info.name == "oversized-noise"
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_oversized_protobuf_message_id_noise(
|
||||
yaml_config: str,
|
||||
run_compiled: RunCompiledFunction,
|
||||
api_client_connected_with_disconnect: APIClientConnectedWithDisconnectFactory,
|
||||
) -> None:
|
||||
"""Test that the noise protocol handles unknown message types correctly.
|
||||
|
||||
With noise encryption, message types are stored as uint16_t (2 bytes) after decryption.
|
||||
Unknown message types should be ignored without disconnecting, as ESPHome needs to
|
||||
read the full message to maintain encryption stream continuity.
|
||||
"""
|
||||
noise_key = "N4Yle5YirwZhPiHHsdZLdOA73ndj/84veVaLhTvxCuU="
|
||||
process_exited = False
|
||||
|
||||
def check_logs(line: str) -> None:
|
||||
nonlocal process_exited
|
||||
# Check for signs that the process exited/crashed
|
||||
if "Segmentation fault" in line or "core dumped" in line:
|
||||
process_exited = True
|
||||
|
||||
async with run_compiled(yaml_config, line_callback=check_logs):
|
||||
async with api_client_connected_with_disconnect(noise_psk=noise_key) as (
|
||||
client,
|
||||
disconnect_event,
|
||||
):
|
||||
# Verify basic connection works first
|
||||
device_info = await client.device_info()
|
||||
assert device_info is not None
|
||||
assert device_info.name == "oversized-noise"
|
||||
|
||||
# With noise, message types are uint16_t, so we test with an unknown but valid value
|
||||
frame_helper = client._connection._frame_helper
|
||||
|
||||
# Test with an unknown message type (65535 is not used by ESPHome)
|
||||
unknown_message_id = 65535 # Valid uint16_t but unknown to ESPHome
|
||||
payload = b"test"
|
||||
|
||||
# Send the unknown message type - ESPHome should read and ignore it
|
||||
frame_helper.write_packets([(unknown_message_id, payload)], True)
|
||||
|
||||
# Give ESPHome a moment to process (but expect no disconnection)
|
||||
# The connection should stay alive as ESPHome ignores unknown message types
|
||||
with pytest.raises(asyncio.TimeoutError):
|
||||
await asyncio.wait_for(disconnect_event.wait(), timeout=0.5)
|
||||
|
||||
# Connection should still be alive - unknown types are ignored, not fatal
|
||||
assert client._connection.is_connected, (
|
||||
"Connection should remain open for unknown message types"
|
||||
)
|
||||
|
||||
# Verify we can still communicate by sending a valid request
|
||||
device_info2 = await client.device_info()
|
||||
assert device_info2 is not None
|
||||
assert device_info2.name == "oversized-noise"
|
||||
|
||||
# After test, verify process didn't crash
|
||||
assert not process_exited, "ESPHome process should not crash"
|
||||
|
||||
# Verify we can still reconnect
|
||||
async with api_client_connected_with_disconnect(noise_psk=noise_key) as (
|
||||
client2,
|
||||
_,
|
||||
):
|
||||
device_info = await client2.device_info()
|
||||
assert device_info is not None
|
||||
assert device_info.name == "oversized-noise"
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_noise_corrupt_encrypted_frame(
|
||||
yaml_config: str,
|
||||
run_compiled: RunCompiledFunction,
|
||||
api_client_connected_with_disconnect: APIClientConnectedWithDisconnectFactory,
|
||||
) -> None:
|
||||
"""Test that noise protocol properly handles corrupt encrypted frames.
|
||||
|
||||
Send a frame with valid size but corrupt encrypted content (garbage bytes).
|
||||
This should fail decryption and cause disconnection.
|
||||
"""
|
||||
noise_key = "N4Yle5YirwZhPiHHsdZLdOA73ndj/84veVaLhTvxCuU="
|
||||
process_exited = False
|
||||
cipherstate_failed = False
|
||||
|
||||
def check_logs(line: str) -> None:
|
||||
nonlocal process_exited, cipherstate_failed
|
||||
# Check for signs that the process exited/crashed
|
||||
if "Segmentation fault" in line or "core dumped" in line:
|
||||
process_exited = True
|
||||
# Check for the expected warning about decryption failure
|
||||
if (
|
||||
"[W][api.connection" in line
|
||||
and "Reading failed CIPHERSTATE_DECRYPT_FAILED" in line
|
||||
):
|
||||
cipherstate_failed = True
|
||||
|
||||
async with run_compiled(yaml_config, line_callback=check_logs):
|
||||
async with api_client_connected_with_disconnect(noise_psk=noise_key) as (
|
||||
client,
|
||||
disconnect_event,
|
||||
):
|
||||
# Verify basic connection works first
|
||||
device_info = await client.device_info()
|
||||
assert device_info is not None
|
||||
assert device_info.name == "oversized-noise"
|
||||
|
||||
# Get the socket to send raw corrupt data
|
||||
socket = client._connection._socket
|
||||
|
||||
# Send a corrupt noise frame directly to the socket
|
||||
# Format: [indicator=0x01][size_high][size_low][garbage_encrypted_data]
|
||||
# Size of 32 bytes (reasonable size for a noise frame with MAC)
|
||||
corrupt_frame = bytes(
|
||||
[
|
||||
0x01, # Noise indicator
|
||||
0x00, # Size high byte
|
||||
0x20, # Size low byte (32 bytes)
|
||||
]
|
||||
) + bytes(32) # 32 bytes of zeros (invalid encrypted data)
|
||||
|
||||
# Send the corrupt frame
|
||||
socket.sendall(corrupt_frame)
|
||||
|
||||
# Wait for ESPHome to disconnect due to decryption failure
|
||||
await asyncio.wait_for(disconnect_event.wait(), timeout=5.0)
|
||||
|
||||
# After disconnection, verify process didn't crash
|
||||
assert not process_exited, (
|
||||
"ESPHome process should not crash on corrupt encrypted frames"
|
||||
)
|
||||
# Verify we saw the expected warning message
|
||||
assert cipherstate_failed, (
|
||||
"Expected to see warning about CIPHERSTATE_DECRYPT_FAILED"
|
||||
)
|
||||
|
||||
# Verify we can still reconnect after handling the corrupt frame
|
||||
async with api_client_connected_with_disconnect(noise_psk=noise_key) as (
|
||||
client2,
|
||||
_,
|
||||
):
|
||||
device_info = await client2.device_info()
|
||||
assert device_info is not None
|
||||
assert device_info.name == "oversized-noise"
|
@@ -54,3 +54,17 @@ class APIClientConnectedFactory(Protocol):
|
||||
client_info: str = "integration-test",
|
||||
timeout: float = 30,
|
||||
) -> AbstractAsyncContextManager[APIClient]: ...
|
||||
|
||||
|
||||
class APIClientConnectedWithDisconnectFactory(Protocol):
|
||||
"""Protocol for connected API client factory that returns disconnect event."""
|
||||
|
||||
def __call__( # noqa: E704
|
||||
self,
|
||||
address: str = "localhost",
|
||||
port: int | None = None,
|
||||
password: str = "",
|
||||
noise_psk: str | None = None,
|
||||
client_info: str = "integration-test",
|
||||
timeout: float = 30,
|
||||
) -> AbstractAsyncContextManager[tuple[APIClient, asyncio.Event]]: ...
|
||||
|
@@ -87,3 +87,17 @@ def mock_run_external_command() -> Generator[Mock, None, None]:
|
||||
"""Mock run_external_command for platformio_api."""
|
||||
with patch("esphome.platformio_api.run_external_command") as mock:
|
||||
yield mock
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_run_git_command() -> Generator[Mock, None, None]:
|
||||
"""Mock run_git_command for git module."""
|
||||
with patch("esphome.git.run_git_command") as mock:
|
||||
yield mock
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_get_idedata() -> Generator[Mock, None, None]:
|
||||
"""Mock get_idedata for platformio_api."""
|
||||
with patch("esphome.platformio_api.get_idedata") as mock:
|
||||
yield mock
|
||||
|
246
tests/unit_tests/test_git.py
Normal file
246
tests/unit_tests/test_git.py
Normal file
@@ -0,0 +1,246 @@
|
||||
"""Tests for git.py module."""
|
||||
|
||||
from datetime import datetime, timedelta
|
||||
import hashlib
|
||||
import os
|
||||
from pathlib import Path
|
||||
from unittest.mock import Mock
|
||||
|
||||
from esphome import git
|
||||
from esphome.core import CORE, TimePeriodSeconds
|
||||
|
||||
|
||||
def test_clone_or_update_with_never_refresh(
|
||||
tmp_path: Path, mock_run_git_command: Mock
|
||||
) -> None:
|
||||
"""Test that NEVER_REFRESH skips updates for existing repos."""
|
||||
# Set up CORE.config_path so data_dir uses tmp_path
|
||||
CORE.config_path = tmp_path / "test.yaml"
|
||||
|
||||
# Compute the expected repo directory path
|
||||
url = "https://github.com/test/repo"
|
||||
ref = None
|
||||
key = f"{url}@{ref}"
|
||||
domain = "test"
|
||||
|
||||
# Compute hash-based directory name (matching _compute_destination_path logic)
|
||||
h = hashlib.new("sha256")
|
||||
h.update(key.encode())
|
||||
repo_dir = tmp_path / ".esphome" / domain / h.hexdigest()[:8]
|
||||
|
||||
# Create the git repo directory structure
|
||||
repo_dir.mkdir(parents=True)
|
||||
git_dir = repo_dir / ".git"
|
||||
git_dir.mkdir()
|
||||
|
||||
# Create FETCH_HEAD file with current timestamp
|
||||
fetch_head = git_dir / "FETCH_HEAD"
|
||||
fetch_head.write_text("test")
|
||||
|
||||
# Call with NEVER_REFRESH
|
||||
result_dir, revert = git.clone_or_update(
|
||||
url=url,
|
||||
ref=ref,
|
||||
refresh=git.NEVER_REFRESH,
|
||||
domain=domain,
|
||||
)
|
||||
|
||||
# Should NOT call git commands since NEVER_REFRESH and repo exists
|
||||
mock_run_git_command.assert_not_called()
|
||||
assert result_dir == repo_dir
|
||||
assert revert is None
|
||||
|
||||
|
||||
def test_clone_or_update_with_refresh_updates_old_repo(
|
||||
tmp_path: Path, mock_run_git_command: Mock
|
||||
) -> None:
|
||||
"""Test that refresh triggers update for old repos."""
|
||||
# Set up CORE.config_path so data_dir uses tmp_path
|
||||
CORE.config_path = tmp_path / "test.yaml"
|
||||
|
||||
# Compute the expected repo directory path
|
||||
url = "https://github.com/test/repo"
|
||||
ref = None
|
||||
key = f"{url}@{ref}"
|
||||
domain = "test"
|
||||
|
||||
# Compute hash-based directory name (matching _compute_destination_path logic)
|
||||
h = hashlib.new("sha256")
|
||||
h.update(key.encode())
|
||||
repo_dir = tmp_path / ".esphome" / domain / h.hexdigest()[:8]
|
||||
|
||||
# Create the git repo directory structure
|
||||
repo_dir.mkdir(parents=True)
|
||||
git_dir = repo_dir / ".git"
|
||||
git_dir.mkdir()
|
||||
|
||||
# Create FETCH_HEAD file with old timestamp (2 days ago)
|
||||
fetch_head = git_dir / "FETCH_HEAD"
|
||||
fetch_head.write_text("test")
|
||||
old_time = datetime.now() - timedelta(days=2)
|
||||
fetch_head.touch() # Create the file
|
||||
# Set modification time to 2 days ago
|
||||
os.utime(fetch_head, (old_time.timestamp(), old_time.timestamp()))
|
||||
|
||||
# Mock git command responses
|
||||
mock_run_git_command.return_value = "abc123" # SHA for rev-parse
|
||||
|
||||
# Call with refresh=1d (1 day)
|
||||
refresh = TimePeriodSeconds(days=1)
|
||||
result_dir, revert = git.clone_or_update(
|
||||
url=url,
|
||||
ref=ref,
|
||||
refresh=refresh,
|
||||
domain=domain,
|
||||
)
|
||||
|
||||
# Should call git fetch and update commands since repo is older than refresh
|
||||
assert mock_run_git_command.called
|
||||
# Check for fetch command
|
||||
fetch_calls = [
|
||||
call
|
||||
for call in mock_run_git_command.call_args_list
|
||||
if len(call[0]) > 0 and "fetch" in call[0][0]
|
||||
]
|
||||
assert len(fetch_calls) > 0
|
||||
|
||||
|
||||
def test_clone_or_update_with_refresh_skips_fresh_repo(
|
||||
tmp_path: Path, mock_run_git_command: Mock
|
||||
) -> None:
|
||||
"""Test that refresh doesn't update fresh repos."""
|
||||
# Set up CORE.config_path so data_dir uses tmp_path
|
||||
CORE.config_path = tmp_path / "test.yaml"
|
||||
|
||||
# Compute the expected repo directory path
|
||||
url = "https://github.com/test/repo"
|
||||
ref = None
|
||||
key = f"{url}@{ref}"
|
||||
domain = "test"
|
||||
|
||||
# Compute hash-based directory name (matching _compute_destination_path logic)
|
||||
h = hashlib.new("sha256")
|
||||
h.update(key.encode())
|
||||
repo_dir = tmp_path / ".esphome" / domain / h.hexdigest()[:8]
|
||||
|
||||
# Create the git repo directory structure
|
||||
repo_dir.mkdir(parents=True)
|
||||
git_dir = repo_dir / ".git"
|
||||
git_dir.mkdir()
|
||||
|
||||
# Create FETCH_HEAD file with recent timestamp (1 hour ago)
|
||||
fetch_head = git_dir / "FETCH_HEAD"
|
||||
fetch_head.write_text("test")
|
||||
recent_time = datetime.now() - timedelta(hours=1)
|
||||
fetch_head.touch() # Create the file
|
||||
# Set modification time to 1 hour ago
|
||||
os.utime(fetch_head, (recent_time.timestamp(), recent_time.timestamp()))
|
||||
|
||||
# Call with refresh=1d (1 day)
|
||||
refresh = TimePeriodSeconds(days=1)
|
||||
result_dir, revert = git.clone_or_update(
|
||||
url=url,
|
||||
ref=ref,
|
||||
refresh=refresh,
|
||||
domain=domain,
|
||||
)
|
||||
|
||||
# Should NOT call git fetch since repo is fresh
|
||||
mock_run_git_command.assert_not_called()
|
||||
assert result_dir == repo_dir
|
||||
assert revert is None
|
||||
|
||||
|
||||
def test_clone_or_update_clones_missing_repo(
|
||||
tmp_path: Path, mock_run_git_command: Mock
|
||||
) -> None:
|
||||
"""Test that missing repos are cloned regardless of refresh setting."""
|
||||
# Set up CORE.config_path so data_dir uses tmp_path
|
||||
CORE.config_path = tmp_path / "test.yaml"
|
||||
|
||||
# Compute the expected repo directory path
|
||||
url = "https://github.com/test/repo"
|
||||
ref = None
|
||||
key = f"{url}@{ref}"
|
||||
domain = "test"
|
||||
|
||||
# Compute hash-based directory name (matching _compute_destination_path logic)
|
||||
h = hashlib.new("sha256")
|
||||
h.update(key.encode())
|
||||
repo_dir = tmp_path / ".esphome" / domain / h.hexdigest()[:8]
|
||||
|
||||
# Create base directory but NOT the repo itself
|
||||
base_dir = tmp_path / ".esphome" / domain
|
||||
base_dir.mkdir(parents=True)
|
||||
# repo_dir should NOT exist
|
||||
assert not repo_dir.exists()
|
||||
|
||||
# Test with NEVER_REFRESH - should still clone since repo doesn't exist
|
||||
result_dir, revert = git.clone_or_update(
|
||||
url=url,
|
||||
ref=ref,
|
||||
refresh=git.NEVER_REFRESH,
|
||||
domain=domain,
|
||||
)
|
||||
|
||||
# Should call git clone
|
||||
assert mock_run_git_command.called
|
||||
clone_calls = [
|
||||
call
|
||||
for call in mock_run_git_command.call_args_list
|
||||
if len(call[0]) > 0 and "clone" in call[0][0]
|
||||
]
|
||||
assert len(clone_calls) > 0
|
||||
|
||||
|
||||
def test_clone_or_update_with_none_refresh_always_updates(
|
||||
tmp_path: Path, mock_run_git_command: Mock
|
||||
) -> None:
|
||||
"""Test that refresh=None always updates existing repos."""
|
||||
# Set up CORE.config_path so data_dir uses tmp_path
|
||||
CORE.config_path = tmp_path / "test.yaml"
|
||||
|
||||
# Compute the expected repo directory path
|
||||
url = "https://github.com/test/repo"
|
||||
ref = None
|
||||
key = f"{url}@{ref}"
|
||||
domain = "test"
|
||||
|
||||
# Compute hash-based directory name (matching _compute_destination_path logic)
|
||||
h = hashlib.new("sha256")
|
||||
h.update(key.encode())
|
||||
repo_dir = tmp_path / ".esphome" / domain / h.hexdigest()[:8]
|
||||
|
||||
# Create the git repo directory structure
|
||||
repo_dir.mkdir(parents=True)
|
||||
git_dir = repo_dir / ".git"
|
||||
git_dir.mkdir()
|
||||
|
||||
# Create FETCH_HEAD file with very recent timestamp (1 second ago)
|
||||
fetch_head = git_dir / "FETCH_HEAD"
|
||||
fetch_head.write_text("test")
|
||||
recent_time = datetime.now() - timedelta(seconds=1)
|
||||
fetch_head.touch() # Create the file
|
||||
# Set modification time to 1 second ago
|
||||
os.utime(fetch_head, (recent_time.timestamp(), recent_time.timestamp()))
|
||||
|
||||
# Mock git command responses
|
||||
mock_run_git_command.return_value = "abc123" # SHA for rev-parse
|
||||
|
||||
# Call with refresh=None (default behavior)
|
||||
result_dir, revert = git.clone_or_update(
|
||||
url=url,
|
||||
ref=ref,
|
||||
refresh=None,
|
||||
domain=domain,
|
||||
)
|
||||
|
||||
# Should call git fetch and update commands since refresh=None means always update
|
||||
assert mock_run_git_command.called
|
||||
# Check for fetch command
|
||||
fetch_calls = [
|
||||
call
|
||||
for call in mock_run_git_command.call_args_list
|
||||
if len(call[0]) > 0 and "fetch" in call[0][0]
|
||||
]
|
||||
assert len(fetch_calls) > 0
|
@@ -5,16 +5,19 @@ from __future__ import annotations
|
||||
from collections.abc import Generator
|
||||
from dataclasses import dataclass
|
||||
from pathlib import Path
|
||||
import re
|
||||
from typing import Any
|
||||
from unittest.mock import MagicMock, Mock, patch
|
||||
|
||||
import pytest
|
||||
from pytest import CaptureFixture
|
||||
|
||||
from esphome import platformio_api
|
||||
from esphome.__main__ import (
|
||||
Purpose,
|
||||
choose_upload_log_host,
|
||||
command_rename,
|
||||
command_update_all,
|
||||
command_wizard,
|
||||
get_port_type,
|
||||
has_ip_address,
|
||||
@@ -26,7 +29,9 @@ from esphome.__main__ import (
|
||||
mqtt_get_ip,
|
||||
show_logs,
|
||||
upload_program,
|
||||
upload_using_esptool,
|
||||
)
|
||||
from esphome.components.esp32.const import KEY_ESP32, KEY_VARIANT, VARIANT_ESP32
|
||||
from esphome.const import (
|
||||
CONF_API,
|
||||
CONF_BROKER,
|
||||
@@ -55,6 +60,17 @@ from esphome.const import (
|
||||
from esphome.core import CORE, EsphomeError
|
||||
|
||||
|
||||
def strip_ansi_codes(text: str) -> str:
|
||||
"""Remove ANSI escape codes from text.
|
||||
|
||||
This helps make test assertions cleaner by removing color codes and other
|
||||
terminal formatting that can make tests brittle.
|
||||
"""
|
||||
# Pattern to match ANSI escape sequences
|
||||
ansi_escape = re.compile(r"\x1B(?:[@-Z\\-_]|\[[0-?]*[ -/]*[@-~])")
|
||||
return ansi_escape.sub("", text)
|
||||
|
||||
|
||||
@dataclass
|
||||
class MockSerialPort:
|
||||
"""Mock serial port for testing.
|
||||
@@ -207,6 +223,14 @@ def mock_run_external_process() -> Generator[Mock]:
|
||||
yield mock
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_run_external_command() -> Generator[Mock]:
|
||||
"""Mock run_external_command for testing."""
|
||||
with patch("esphome.__main__.run_external_command") as mock:
|
||||
mock.return_value = 0 # Default to success
|
||||
yield mock
|
||||
|
||||
|
||||
def test_choose_upload_log_host_with_string_default() -> None:
|
||||
"""Test with a single string default device."""
|
||||
setup_core()
|
||||
@@ -805,6 +829,122 @@ def test_upload_program_serial_esp8266_with_file(
|
||||
)
|
||||
|
||||
|
||||
def test_upload_using_esptool_path_conversion(
|
||||
tmp_path: Path,
|
||||
mock_run_external_command: Mock,
|
||||
mock_get_idedata: Mock,
|
||||
) -> None:
|
||||
"""Test upload_using_esptool properly converts Path objects to strings for esptool.
|
||||
|
||||
This test ensures that img.path (Path object) is converted to string before
|
||||
passing to esptool, preventing AttributeError.
|
||||
"""
|
||||
setup_core(platform=PLATFORM_ESP32, tmp_path=tmp_path, name="test")
|
||||
|
||||
# Set up ESP32-specific data required by get_esp32_variant()
|
||||
CORE.data[KEY_ESP32] = {KEY_VARIANT: VARIANT_ESP32}
|
||||
|
||||
# Create mock IDEData with Path objects
|
||||
mock_idedata = MagicMock(spec=platformio_api.IDEData)
|
||||
mock_idedata.firmware_bin_path = tmp_path / "firmware.bin"
|
||||
mock_idedata.extra_flash_images = [
|
||||
platformio_api.FlashImage(path=tmp_path / "bootloader.bin", offset="0x1000"),
|
||||
platformio_api.FlashImage(path=tmp_path / "partitions.bin", offset="0x8000"),
|
||||
]
|
||||
|
||||
mock_get_idedata.return_value = mock_idedata
|
||||
|
||||
# Create the actual firmware files so they exist
|
||||
(tmp_path / "firmware.bin").touch()
|
||||
(tmp_path / "bootloader.bin").touch()
|
||||
(tmp_path / "partitions.bin").touch()
|
||||
|
||||
config = {CONF_ESPHOME: {"platformio_options": {}}}
|
||||
|
||||
# Call upload_using_esptool without custom file argument
|
||||
result = upload_using_esptool(config, "/dev/ttyUSB0", None, None)
|
||||
|
||||
assert result == 0
|
||||
|
||||
# Verify that run_external_command was called
|
||||
assert mock_run_external_command.call_count == 1
|
||||
|
||||
# Get the actual call arguments
|
||||
call_args = mock_run_external_command.call_args[0]
|
||||
|
||||
# The first argument should be esptool.main function,
|
||||
# followed by the command arguments
|
||||
assert len(call_args) > 1
|
||||
|
||||
# Find the indices of the flash image arguments
|
||||
# They should come after "write-flash" and "-z"
|
||||
cmd_list = list(call_args[1:]) # Skip the esptool.main function
|
||||
|
||||
# Verify all paths are strings, not Path objects
|
||||
# The firmware and flash images should be at specific positions
|
||||
write_flash_idx = cmd_list.index("write-flash")
|
||||
|
||||
# After write-flash we have: -z, --flash-size, detect, then offset/path pairs
|
||||
# Check firmware at offset 0x10000 (ESP32)
|
||||
firmware_offset_idx = write_flash_idx + 4
|
||||
assert cmd_list[firmware_offset_idx] == "0x10000"
|
||||
firmware_path = cmd_list[firmware_offset_idx + 1]
|
||||
assert isinstance(firmware_path, str)
|
||||
assert firmware_path.endswith("firmware.bin")
|
||||
|
||||
# Check bootloader
|
||||
bootloader_offset_idx = firmware_offset_idx + 2
|
||||
assert cmd_list[bootloader_offset_idx] == "0x1000"
|
||||
bootloader_path = cmd_list[bootloader_offset_idx + 1]
|
||||
assert isinstance(bootloader_path, str)
|
||||
assert bootloader_path.endswith("bootloader.bin")
|
||||
|
||||
# Check partitions
|
||||
partitions_offset_idx = bootloader_offset_idx + 2
|
||||
assert cmd_list[partitions_offset_idx] == "0x8000"
|
||||
partitions_path = cmd_list[partitions_offset_idx + 1]
|
||||
assert isinstance(partitions_path, str)
|
||||
assert partitions_path.endswith("partitions.bin")
|
||||
|
||||
|
||||
def test_upload_using_esptool_with_file_path(
|
||||
tmp_path: Path,
|
||||
mock_run_external_command: Mock,
|
||||
) -> None:
|
||||
"""Test upload_using_esptool with a custom file that's a Path object."""
|
||||
setup_core(platform=PLATFORM_ESP8266, tmp_path=tmp_path, name="test")
|
||||
|
||||
# Create a test firmware file
|
||||
firmware_file = tmp_path / "custom_firmware.bin"
|
||||
firmware_file.touch()
|
||||
|
||||
config = {CONF_ESPHOME: {"platformio_options": {}}}
|
||||
|
||||
# Call with a Path object as the file argument (though usually it's a string)
|
||||
result = upload_using_esptool(config, "/dev/ttyUSB0", str(firmware_file), None)
|
||||
|
||||
assert result == 0
|
||||
|
||||
# Verify that run_external_command was called
|
||||
mock_run_external_command.assert_called_once()
|
||||
|
||||
# Get the actual call arguments
|
||||
call_args = mock_run_external_command.call_args[0]
|
||||
cmd_list = list(call_args[1:]) # Skip the esptool.main function
|
||||
|
||||
# Find the firmware path in the command
|
||||
write_flash_idx = cmd_list.index("write-flash")
|
||||
|
||||
# For custom file, it should be at offset 0x0
|
||||
firmware_offset_idx = write_flash_idx + 4
|
||||
assert cmd_list[firmware_offset_idx] == "0x0"
|
||||
firmware_path = cmd_list[firmware_offset_idx + 1]
|
||||
|
||||
# Verify it's a string, not a Path object
|
||||
assert isinstance(firmware_path, str)
|
||||
assert firmware_path.endswith("custom_firmware.bin")
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"platform,device",
|
||||
[
|
||||
@@ -1545,3 +1685,171 @@ esp32:
|
||||
|
||||
captured = capfd.readouterr()
|
||||
assert "Rename failed" in captured.out
|
||||
|
||||
|
||||
def test_command_update_all_path_string_conversion(
|
||||
tmp_path: Path,
|
||||
mock_run_external_process: Mock,
|
||||
capfd: CaptureFixture[str],
|
||||
) -> None:
|
||||
"""Test that command_update_all properly converts Path objects to strings in output."""
|
||||
yaml1 = tmp_path / "device1.yaml"
|
||||
yaml1.write_text("""
|
||||
esphome:
|
||||
name: device1
|
||||
|
||||
esp32:
|
||||
board: nodemcu-32s
|
||||
""")
|
||||
|
||||
yaml2 = tmp_path / "device2.yaml"
|
||||
yaml2.write_text("""
|
||||
esphome:
|
||||
name: device2
|
||||
|
||||
esp8266:
|
||||
board: nodemcuv2
|
||||
""")
|
||||
|
||||
setup_core(tmp_path=tmp_path)
|
||||
mock_run_external_process.return_value = 0
|
||||
|
||||
assert command_update_all(MockArgs(configuration=[str(tmp_path)])) == 0
|
||||
|
||||
captured = capfd.readouterr()
|
||||
clean_output = strip_ansi_codes(captured.out)
|
||||
|
||||
# Check that Path objects were properly converted to strings
|
||||
# The output should contain file paths without causing TypeError
|
||||
assert "device1.yaml" in clean_output
|
||||
assert "device2.yaml" in clean_output
|
||||
assert "SUCCESS" in clean_output
|
||||
assert "SUMMARY" in clean_output
|
||||
|
||||
# Verify run_external_process was called for each file
|
||||
assert mock_run_external_process.call_count == 2
|
||||
|
||||
|
||||
def test_command_update_all_with_failures(
|
||||
tmp_path: Path,
|
||||
mock_run_external_process: Mock,
|
||||
capfd: CaptureFixture[str],
|
||||
) -> None:
|
||||
"""Test command_update_all handles mixed success/failure cases properly."""
|
||||
yaml1 = tmp_path / "success_device.yaml"
|
||||
yaml1.write_text("""
|
||||
esphome:
|
||||
name: success_device
|
||||
|
||||
esp32:
|
||||
board: nodemcu-32s
|
||||
""")
|
||||
|
||||
yaml2 = tmp_path / "failed_device.yaml"
|
||||
yaml2.write_text("""
|
||||
esphome:
|
||||
name: failed_device
|
||||
|
||||
esp8266:
|
||||
board: nodemcuv2
|
||||
""")
|
||||
|
||||
setup_core(tmp_path=tmp_path)
|
||||
|
||||
# Mock mixed results - first succeeds, second fails
|
||||
mock_run_external_process.side_effect = [0, 1]
|
||||
|
||||
# Should return 1 (failure) since one device failed
|
||||
assert command_update_all(MockArgs(configuration=[str(tmp_path)])) == 1
|
||||
|
||||
captured = capfd.readouterr()
|
||||
clean_output = strip_ansi_codes(captured.out)
|
||||
|
||||
# Check that both success and failure are properly displayed
|
||||
assert "SUCCESS" in clean_output
|
||||
assert "ERROR" in clean_output or "FAILED" in clean_output
|
||||
assert "SUMMARY" in clean_output
|
||||
|
||||
# Files are processed in alphabetical order, so we need to check which one succeeded/failed
|
||||
# The mock_run_external_process.side_effect = [0, 1] applies to files in alphabetical order
|
||||
# So "failed_device.yaml" gets 0 (success) and "success_device.yaml" gets 1 (failure)
|
||||
assert "failed_device.yaml: SUCCESS" in clean_output
|
||||
assert "success_device.yaml: FAILED" in clean_output
|
||||
|
||||
|
||||
def test_command_update_all_empty_directory(
|
||||
tmp_path: Path,
|
||||
mock_run_external_process: Mock,
|
||||
capfd: CaptureFixture[str],
|
||||
) -> None:
|
||||
"""Test command_update_all with an empty directory (no YAML files)."""
|
||||
setup_core(tmp_path=tmp_path)
|
||||
|
||||
assert command_update_all(MockArgs(configuration=[str(tmp_path)])) == 0
|
||||
mock_run_external_process.assert_not_called()
|
||||
|
||||
captured = capfd.readouterr()
|
||||
clean_output = strip_ansi_codes(captured.out)
|
||||
|
||||
assert "SUMMARY" in clean_output
|
||||
|
||||
|
||||
def test_command_update_all_single_file(
|
||||
tmp_path: Path,
|
||||
mock_run_external_process: Mock,
|
||||
capfd: CaptureFixture[str],
|
||||
) -> None:
|
||||
"""Test command_update_all with a single YAML file specified."""
|
||||
yaml_file = tmp_path / "single_device.yaml"
|
||||
yaml_file.write_text("""
|
||||
esphome:
|
||||
name: single_device
|
||||
|
||||
esp32:
|
||||
board: nodemcu-32s
|
||||
""")
|
||||
|
||||
setup_core(tmp_path=tmp_path)
|
||||
mock_run_external_process.return_value = 0
|
||||
|
||||
assert command_update_all(MockArgs(configuration=[str(yaml_file)])) == 0
|
||||
|
||||
captured = capfd.readouterr()
|
||||
clean_output = strip_ansi_codes(captured.out)
|
||||
|
||||
assert "single_device.yaml" in clean_output
|
||||
assert "SUCCESS" in clean_output
|
||||
mock_run_external_process.assert_called_once()
|
||||
|
||||
|
||||
def test_command_update_all_path_formatting_in_color_calls(
|
||||
tmp_path: Path,
|
||||
mock_run_external_process: Mock,
|
||||
capfd: CaptureFixture[str],
|
||||
) -> None:
|
||||
"""Test that Path objects are properly converted when passed to color() function."""
|
||||
yaml_file = tmp_path / "test-device_123.yaml"
|
||||
yaml_file.write_text("""
|
||||
esphome:
|
||||
name: test-device_123
|
||||
|
||||
esp32:
|
||||
board: nodemcu-32s
|
||||
""")
|
||||
|
||||
setup_core(tmp_path=tmp_path)
|
||||
mock_run_external_process.return_value = 0
|
||||
|
||||
assert command_update_all(MockArgs(configuration=[str(tmp_path)])) == 0
|
||||
|
||||
captured = capfd.readouterr()
|
||||
clean_output = strip_ansi_codes(captured.out)
|
||||
|
||||
assert "test-device_123.yaml" in clean_output
|
||||
assert "Updating" in clean_output
|
||||
assert "SUCCESS" in clean_output
|
||||
assert "SUMMARY" in clean_output
|
||||
|
||||
# Should not have any Python error messages
|
||||
assert "TypeError" not in clean_output
|
||||
assert "can only concatenate str" not in clean_output
|
||||
|
Reference in New Issue
Block a user