mirror of
https://github.com/esphome/esphome.git
synced 2025-09-18 11:12:20 +01:00
Merge remote-tracking branch 'upstream/dev' into zwave_proxy
This commit is contained in:
1
.github/workflows/ci.yml
vendored
1
.github/workflows/ci.yml
vendored
@@ -105,6 +105,7 @@ jobs:
|
|||||||
script/ci-custom.py
|
script/ci-custom.py
|
||||||
script/build_codeowners.py --check
|
script/build_codeowners.py --check
|
||||||
script/build_language_schema.py --check
|
script/build_language_schema.py --check
|
||||||
|
script/generate-esp32-boards.py --check
|
||||||
|
|
||||||
pytest:
|
pytest:
|
||||||
name: Run pytest
|
name: Run pytest
|
||||||
|
@@ -113,7 +113,7 @@ void ADE7880::update() {
|
|||||||
if (this->channel_a_ != nullptr) {
|
if (this->channel_a_ != nullptr) {
|
||||||
auto *chan = this->channel_a_;
|
auto *chan = this->channel_a_;
|
||||||
this->update_sensor_from_s24zp_register16_(chan->current, AIRMS, [](float val) { return val / 100000.0f; });
|
this->update_sensor_from_s24zp_register16_(chan->current, AIRMS, [](float val) { return val / 100000.0f; });
|
||||||
this->update_sensor_from_s24zp_register16_(chan->voltage, BVRMS, [](float val) { return val / 10000.0f; });
|
this->update_sensor_from_s24zp_register16_(chan->voltage, AVRMS, [](float val) { return val / 10000.0f; });
|
||||||
this->update_sensor_from_s24zp_register16_(chan->active_power, AWATT, [](float val) { return val / 100.0f; });
|
this->update_sensor_from_s24zp_register16_(chan->active_power, AWATT, [](float val) { return val / 100.0f; });
|
||||||
this->update_sensor_from_s24zp_register16_(chan->apparent_power, AVA, [](float val) { return val / 100.0f; });
|
this->update_sensor_from_s24zp_register16_(chan->apparent_power, AVA, [](float val) { return val / 100.0f; });
|
||||||
this->update_sensor_from_s16_register16_(chan->power_factor, APF,
|
this->update_sensor_from_s16_register16_(chan->power_factor, APF,
|
||||||
|
@@ -2,6 +2,7 @@ import esphome.codegen as cg
|
|||||||
from esphome.components import i2c, sensor
|
from esphome.components import i2c, sensor
|
||||||
import esphome.config_validation as cv
|
import esphome.config_validation as cv
|
||||||
from esphome.const import (
|
from esphome.const import (
|
||||||
|
CONF_CLEAR,
|
||||||
CONF_GAIN,
|
CONF_GAIN,
|
||||||
CONF_ID,
|
CONF_ID,
|
||||||
DEVICE_CLASS_ILLUMINANCE,
|
DEVICE_CLASS_ILLUMINANCE,
|
||||||
@@ -29,7 +30,6 @@ CONF_F5 = "f5"
|
|||||||
CONF_F6 = "f6"
|
CONF_F6 = "f6"
|
||||||
CONF_F7 = "f7"
|
CONF_F7 = "f7"
|
||||||
CONF_F8 = "f8"
|
CONF_F8 = "f8"
|
||||||
CONF_CLEAR = "clear"
|
|
||||||
CONF_NIR = "nir"
|
CONF_NIR = "nir"
|
||||||
|
|
||||||
UNIT_COUNTS = "#"
|
UNIT_COUNTS = "#"
|
||||||
|
@@ -1504,6 +1504,10 @@ BOARDS = {
|
|||||||
"name": "BPI-Bit",
|
"name": "BPI-Bit",
|
||||||
"variant": VARIANT_ESP32,
|
"variant": VARIANT_ESP32,
|
||||||
},
|
},
|
||||||
|
"bpi-centi-s3": {
|
||||||
|
"name": "BPI-Centi-S3",
|
||||||
|
"variant": VARIANT_ESP32S3,
|
||||||
|
},
|
||||||
"bpi_leaf_s3": {
|
"bpi_leaf_s3": {
|
||||||
"name": "BPI-Leaf-S3",
|
"name": "BPI-Leaf-S3",
|
||||||
"variant": VARIANT_ESP32S3,
|
"variant": VARIANT_ESP32S3,
|
||||||
@@ -1664,10 +1668,46 @@ BOARDS = {
|
|||||||
"name": "Espressif ESP32-S3-DevKitC-1-N8 (8 MB QD, No PSRAM)",
|
"name": "Espressif ESP32-S3-DevKitC-1-N8 (8 MB QD, No PSRAM)",
|
||||||
"variant": VARIANT_ESP32S3,
|
"variant": VARIANT_ESP32S3,
|
||||||
},
|
},
|
||||||
|
"esp32-s3-devkitc-1-n32r8v": {
|
||||||
|
"name": "Espressif ESP32-S3-DevKitC-1-N32R8V (32 MB Flash Octal, 8 MB PSRAM Octal)",
|
||||||
|
"variant": VARIANT_ESP32S3,
|
||||||
|
},
|
||||||
|
"esp32-s3-devkitc1-n16r16": {
|
||||||
|
"name": "Espressif ESP32-S3-DevKitC-1-N16R16V (16 MB Flash Quad, 16 MB PSRAM Octal)",
|
||||||
|
"variant": VARIANT_ESP32S3,
|
||||||
|
},
|
||||||
|
"esp32-s3-devkitc1-n16r2": {
|
||||||
|
"name": "Espressif ESP32-S3-DevKitC-1-N16R2 (16 MB Flash Quad, 2 MB PSRAM Quad)",
|
||||||
|
"variant": VARIANT_ESP32S3,
|
||||||
|
},
|
||||||
|
"esp32-s3-devkitc1-n16r8": {
|
||||||
|
"name": "Espressif ESP32-S3-DevKitC-1-N16R8V (16 MB Flash Quad, 8 MB PSRAM Octal)",
|
||||||
|
"variant": VARIANT_ESP32S3,
|
||||||
|
},
|
||||||
|
"esp32-s3-devkitc1-n4r2": {
|
||||||
|
"name": "Espressif ESP32-S3-DevKitC-1-N4R2 (4 MB Flash Quad, 2 MB PSRAM Quad)",
|
||||||
|
"variant": VARIANT_ESP32S3,
|
||||||
|
},
|
||||||
|
"esp32-s3-devkitc1-n4r8": {
|
||||||
|
"name": "Espressif ESP32-S3-DevKitC-1-N4R8 (4 MB Flash Quad, 8 MB PSRAM Octal)",
|
||||||
|
"variant": VARIANT_ESP32S3,
|
||||||
|
},
|
||||||
|
"esp32-s3-devkitc1-n8r2": {
|
||||||
|
"name": "Espressif ESP32-S3-DevKitC-1-N8R2 (8 MB Flash Quad, 2 MB PSRAM quad)",
|
||||||
|
"variant": VARIANT_ESP32S3,
|
||||||
|
},
|
||||||
|
"esp32-s3-devkitc1-n8r8": {
|
||||||
|
"name": "Espressif ESP32-S3-DevKitC-1-N8R8 (8 MB Flash Quad, 8 MB PSRAM Octal)",
|
||||||
|
"variant": VARIANT_ESP32S3,
|
||||||
|
},
|
||||||
"esp32-s3-devkitm-1": {
|
"esp32-s3-devkitm-1": {
|
||||||
"name": "Espressif ESP32-S3-DevKitM-1",
|
"name": "Espressif ESP32-S3-DevKitM-1",
|
||||||
"variant": VARIANT_ESP32S3,
|
"variant": VARIANT_ESP32S3,
|
||||||
},
|
},
|
||||||
|
"esp32-s3-fh4r2": {
|
||||||
|
"name": "Espressif ESP32-S3-FH4R2 (4 MB QD, 2MB PSRAM)",
|
||||||
|
"variant": VARIANT_ESP32S3,
|
||||||
|
},
|
||||||
"esp32-solo1": {
|
"esp32-solo1": {
|
||||||
"name": "Espressif Generic ESP32-solo1 4M Flash",
|
"name": "Espressif Generic ESP32-solo1 4M Flash",
|
||||||
"variant": VARIANT_ESP32,
|
"variant": VARIANT_ESP32,
|
||||||
@@ -1764,6 +1804,10 @@ BOARDS = {
|
|||||||
"name": "Franzininho WiFi MSC",
|
"name": "Franzininho WiFi MSC",
|
||||||
"variant": VARIANT_ESP32S2,
|
"variant": VARIANT_ESP32S2,
|
||||||
},
|
},
|
||||||
|
"freenove-esp32-s3-n8r8": {
|
||||||
|
"name": "Freenove ESP32-S3 WROOM N8R8 (8MB Flash / 8MB PSRAM)",
|
||||||
|
"variant": VARIANT_ESP32S3,
|
||||||
|
},
|
||||||
"freenove_esp32_s3_wroom": {
|
"freenove_esp32_s3_wroom": {
|
||||||
"name": "Freenove ESP32-S3 WROOM N8R8 (8MB Flash / 8MB PSRAM)",
|
"name": "Freenove ESP32-S3 WROOM N8R8 (8MB Flash / 8MB PSRAM)",
|
||||||
"variant": VARIANT_ESP32S3,
|
"variant": VARIANT_ESP32S3,
|
||||||
@@ -1964,6 +2008,10 @@ BOARDS = {
|
|||||||
"name": "M5Stack AtomS3",
|
"name": "M5Stack AtomS3",
|
||||||
"variant": VARIANT_ESP32S3,
|
"variant": VARIANT_ESP32S3,
|
||||||
},
|
},
|
||||||
|
"m5stack-atoms3u": {
|
||||||
|
"name": "M5Stack AtomS3U",
|
||||||
|
"variant": VARIANT_ESP32S3,
|
||||||
|
},
|
||||||
"m5stack-core-esp32": {
|
"m5stack-core-esp32": {
|
||||||
"name": "M5Stack Core ESP32",
|
"name": "M5Stack Core ESP32",
|
||||||
"variant": VARIANT_ESP32,
|
"variant": VARIANT_ESP32,
|
||||||
@@ -2084,6 +2132,10 @@ BOARDS = {
|
|||||||
"name": "Ai-Thinker NodeMCU-32S2 (ESP-12K)",
|
"name": "Ai-Thinker NodeMCU-32S2 (ESP-12K)",
|
||||||
"variant": VARIANT_ESP32S2,
|
"variant": VARIANT_ESP32S2,
|
||||||
},
|
},
|
||||||
|
"nologo_esp32c3_super_mini": {
|
||||||
|
"name": "Nologo ESP32C3 SuperMini",
|
||||||
|
"variant": VARIANT_ESP32C3,
|
||||||
|
},
|
||||||
"nscreen-32": {
|
"nscreen-32": {
|
||||||
"name": "YeaCreate NSCREEN-32",
|
"name": "YeaCreate NSCREEN-32",
|
||||||
"variant": VARIANT_ESP32,
|
"variant": VARIANT_ESP32,
|
||||||
@@ -2192,6 +2244,10 @@ BOARDS = {
|
|||||||
"name": "SparkFun LoRa Gateway 1-Channel",
|
"name": "SparkFun LoRa Gateway 1-Channel",
|
||||||
"variant": VARIANT_ESP32,
|
"variant": VARIANT_ESP32,
|
||||||
},
|
},
|
||||||
|
"sparkfun_pro_micro_esp32c3": {
|
||||||
|
"name": "SparkFun Pro Micro ESP32-C3",
|
||||||
|
"variant": VARIANT_ESP32C3,
|
||||||
|
},
|
||||||
"sparkfun_qwiic_pocket_esp32c6": {
|
"sparkfun_qwiic_pocket_esp32c6": {
|
||||||
"name": "SparkFun ESP32-C6 Qwiic Pocket",
|
"name": "SparkFun ESP32-C6 Qwiic Pocket",
|
||||||
"variant": VARIANT_ESP32C6,
|
"variant": VARIANT_ESP32C6,
|
||||||
@@ -2256,6 +2312,14 @@ BOARDS = {
|
|||||||
"name": "Turta IoT Node",
|
"name": "Turta IoT Node",
|
||||||
"variant": VARIANT_ESP32,
|
"variant": VARIANT_ESP32,
|
||||||
},
|
},
|
||||||
|
"um_bling": {
|
||||||
|
"name": "Unexpected Maker BLING!",
|
||||||
|
"variant": VARIANT_ESP32S3,
|
||||||
|
},
|
||||||
|
"um_edges3_d": {
|
||||||
|
"name": "Unexpected Maker EDGES3[D]",
|
||||||
|
"variant": VARIANT_ESP32S3,
|
||||||
|
},
|
||||||
"um_feathers2": {
|
"um_feathers2": {
|
||||||
"name": "Unexpected Maker FeatherS2",
|
"name": "Unexpected Maker FeatherS2",
|
||||||
"variant": VARIANT_ESP32S2,
|
"variant": VARIANT_ESP32S2,
|
||||||
@@ -2268,10 +2332,18 @@ BOARDS = {
|
|||||||
"name": "Unexpected Maker FeatherS3",
|
"name": "Unexpected Maker FeatherS3",
|
||||||
"variant": VARIANT_ESP32S3,
|
"variant": VARIANT_ESP32S3,
|
||||||
},
|
},
|
||||||
|
"um_feathers3_neo": {
|
||||||
|
"name": "Unexpected Maker FeatherS3 Neo",
|
||||||
|
"variant": VARIANT_ESP32S3,
|
||||||
|
},
|
||||||
"um_nanos3": {
|
"um_nanos3": {
|
||||||
"name": "Unexpected Maker NanoS3",
|
"name": "Unexpected Maker NanoS3",
|
||||||
"variant": VARIANT_ESP32S3,
|
"variant": VARIANT_ESP32S3,
|
||||||
},
|
},
|
||||||
|
"um_omgs3": {
|
||||||
|
"name": "Unexpected Maker OMGS3",
|
||||||
|
"variant": VARIANT_ESP32S3,
|
||||||
|
},
|
||||||
"um_pros3": {
|
"um_pros3": {
|
||||||
"name": "Unexpected Maker PROS3",
|
"name": "Unexpected Maker PROS3",
|
||||||
"variant": VARIANT_ESP32S3,
|
"variant": VARIANT_ESP32S3,
|
||||||
@@ -2280,6 +2352,14 @@ BOARDS = {
|
|||||||
"name": "Unexpected Maker RMP",
|
"name": "Unexpected Maker RMP",
|
||||||
"variant": VARIANT_ESP32S2,
|
"variant": VARIANT_ESP32S2,
|
||||||
},
|
},
|
||||||
|
"um_squixl": {
|
||||||
|
"name": "Unexpected Maker SQUiXL",
|
||||||
|
"variant": VARIANT_ESP32S3,
|
||||||
|
},
|
||||||
|
"um_tinyc6": {
|
||||||
|
"name": "Unexpected Maker TinyC6",
|
||||||
|
"variant": VARIANT_ESP32C6,
|
||||||
|
},
|
||||||
"um_tinys2": {
|
"um_tinys2": {
|
||||||
"name": "Unexpected Maker TinyS2",
|
"name": "Unexpected Maker TinyS2",
|
||||||
"variant": VARIANT_ESP32S2,
|
"variant": VARIANT_ESP32S2,
|
||||||
@@ -2401,3 +2481,4 @@ BOARDS = {
|
|||||||
"variant": VARIANT_ESP32S3,
|
"variant": VARIANT_ESP32S3,
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
# DO NOT ADD ANYTHING BELOW THIS LINE
|
||||||
|
@@ -77,6 +77,13 @@ ETHERNET_TYPES = {
|
|||||||
"DM9051": EthernetType.ETHERNET_TYPE_DM9051,
|
"DM9051": EthernetType.ETHERNET_TYPE_DM9051,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
# PHY types that need compile-time defines for conditional compilation
|
||||||
|
_PHY_TYPE_TO_DEFINE = {
|
||||||
|
"KSZ8081": "USE_ETHERNET_KSZ8081",
|
||||||
|
"KSZ8081RNA": "USE_ETHERNET_KSZ8081",
|
||||||
|
# Add other PHY types here only if they need conditional compilation
|
||||||
|
}
|
||||||
|
|
||||||
SPI_ETHERNET_TYPES = ["W5500", "DM9051"]
|
SPI_ETHERNET_TYPES = ["W5500", "DM9051"]
|
||||||
SPI_ETHERNET_DEFAULT_POLLING_INTERVAL = TimePeriodMilliseconds(milliseconds=10)
|
SPI_ETHERNET_DEFAULT_POLLING_INTERVAL = TimePeriodMilliseconds(milliseconds=10)
|
||||||
|
|
||||||
@@ -345,6 +352,10 @@ async def to_code(config):
|
|||||||
if CONF_MANUAL_IP in config:
|
if CONF_MANUAL_IP in config:
|
||||||
cg.add(var.set_manual_ip(manual_ip(config[CONF_MANUAL_IP])))
|
cg.add(var.set_manual_ip(manual_ip(config[CONF_MANUAL_IP])))
|
||||||
|
|
||||||
|
# Add compile-time define for PHY types with specific code
|
||||||
|
if phy_define := _PHY_TYPE_TO_DEFINE.get(config[CONF_TYPE]):
|
||||||
|
cg.add_define(phy_define)
|
||||||
|
|
||||||
cg.add_define("USE_ETHERNET")
|
cg.add_define("USE_ETHERNET")
|
||||||
|
|
||||||
# Disable WiFi when using Ethernet to save memory
|
# Disable WiFi when using Ethernet to save memory
|
||||||
|
@@ -229,10 +229,12 @@ void EthernetComponent::setup() {
|
|||||||
ESPHL_ERROR_CHECK(err, "ETH driver install error");
|
ESPHL_ERROR_CHECK(err, "ETH driver install error");
|
||||||
|
|
||||||
#ifndef USE_ETHERNET_SPI
|
#ifndef USE_ETHERNET_SPI
|
||||||
|
#ifdef USE_ETHERNET_KSZ8081
|
||||||
if (this->type_ == ETHERNET_TYPE_KSZ8081RNA && this->clk_mode_ == EMAC_CLK_OUT) {
|
if (this->type_ == ETHERNET_TYPE_KSZ8081RNA && this->clk_mode_ == EMAC_CLK_OUT) {
|
||||||
// KSZ8081RNA default is incorrect. It expects a 25MHz clock instead of the 50MHz we provide.
|
// KSZ8081RNA default is incorrect. It expects a 25MHz clock instead of the 50MHz we provide.
|
||||||
this->ksz8081_set_clock_reference_(mac);
|
this->ksz8081_set_clock_reference_(mac);
|
||||||
}
|
}
|
||||||
|
#endif // USE_ETHERNET_KSZ8081
|
||||||
|
|
||||||
for (const auto &phy_register : this->phy_registers_) {
|
for (const auto &phy_register : this->phy_registers_) {
|
||||||
this->write_phy_register_(mac, phy_register);
|
this->write_phy_register_(mac, phy_register);
|
||||||
@@ -721,6 +723,7 @@ bool EthernetComponent::powerdown() {
|
|||||||
|
|
||||||
#ifndef USE_ETHERNET_SPI
|
#ifndef USE_ETHERNET_SPI
|
||||||
|
|
||||||
|
#ifdef USE_ETHERNET_KSZ8081
|
||||||
constexpr uint8_t KSZ80XX_PC2R_REG_ADDR = 0x1F;
|
constexpr uint8_t KSZ80XX_PC2R_REG_ADDR = 0x1F;
|
||||||
|
|
||||||
void EthernetComponent::ksz8081_set_clock_reference_(esp_eth_mac_t *mac) {
|
void EthernetComponent::ksz8081_set_clock_reference_(esp_eth_mac_t *mac) {
|
||||||
@@ -749,6 +752,7 @@ void EthernetComponent::ksz8081_set_clock_reference_(esp_eth_mac_t *mac) {
|
|||||||
ESP_LOGVV(TAG, "KSZ8081 PHY Control 2: %s", format_hex_pretty((u_int8_t *) &phy_control_2, 2).c_str());
|
ESP_LOGVV(TAG, "KSZ8081 PHY Control 2: %s", format_hex_pretty((u_int8_t *) &phy_control_2, 2).c_str());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
#endif // USE_ETHERNET_KSZ8081
|
||||||
|
|
||||||
void EthernetComponent::write_phy_register_(esp_eth_mac_t *mac, PHYRegister register_data) {
|
void EthernetComponent::write_phy_register_(esp_eth_mac_t *mac, PHYRegister register_data) {
|
||||||
esp_err_t err;
|
esp_err_t err;
|
||||||
|
@@ -104,8 +104,10 @@ class EthernetComponent : public Component {
|
|||||||
void start_connect_();
|
void start_connect_();
|
||||||
void finish_connect_();
|
void finish_connect_();
|
||||||
void dump_connect_params_();
|
void dump_connect_params_();
|
||||||
|
#ifdef USE_ETHERNET_KSZ8081
|
||||||
/// @brief Set `RMII Reference Clock Select` bit for KSZ8081.
|
/// @brief Set `RMII Reference Clock Select` bit for KSZ8081.
|
||||||
void ksz8081_set_clock_reference_(esp_eth_mac_t *mac);
|
void ksz8081_set_clock_reference_(esp_eth_mac_t *mac);
|
||||||
|
#endif
|
||||||
/// @brief Set arbitratry PHY registers from config.
|
/// @brief Set arbitratry PHY registers from config.
|
||||||
void write_phy_register_(esp_eth_mac_t *mac, PHYRegister register_data);
|
void write_phy_register_(esp_eth_mac_t *mac, PHYRegister register_data);
|
||||||
|
|
||||||
|
@@ -8,7 +8,9 @@ namespace json {
|
|||||||
|
|
||||||
static const char *const TAG = "json";
|
static const char *const TAG = "json";
|
||||||
|
|
||||||
|
#ifdef USE_PSRAM
|
||||||
// Build an allocator for the JSON Library using the RAMAllocator class
|
// Build an allocator for the JSON Library using the RAMAllocator class
|
||||||
|
// This is only compiled when PSRAM is enabled
|
||||||
struct SpiRamAllocator : ArduinoJson::Allocator {
|
struct SpiRamAllocator : ArduinoJson::Allocator {
|
||||||
void *allocate(size_t size) override { return this->allocator_.allocate(size); }
|
void *allocate(size_t size) override { return this->allocator_.allocate(size); }
|
||||||
|
|
||||||
@@ -29,11 +31,16 @@ struct SpiRamAllocator : ArduinoJson::Allocator {
|
|||||||
protected:
|
protected:
|
||||||
RAMAllocator<uint8_t> allocator_{RAMAllocator<uint8_t>(RAMAllocator<uint8_t>::NONE)};
|
RAMAllocator<uint8_t> allocator_{RAMAllocator<uint8_t>(RAMAllocator<uint8_t>::NONE)};
|
||||||
};
|
};
|
||||||
|
#endif
|
||||||
|
|
||||||
std::string build_json(const json_build_t &f) {
|
std::string build_json(const json_build_t &f) {
|
||||||
// NOLINTBEGIN(clang-analyzer-cplusplus.NewDeleteLeaks) false positive with ArduinoJson
|
// NOLINTBEGIN(clang-analyzer-cplusplus.NewDeleteLeaks) false positive with ArduinoJson
|
||||||
|
#ifdef USE_PSRAM
|
||||||
auto doc_allocator = SpiRamAllocator();
|
auto doc_allocator = SpiRamAllocator();
|
||||||
JsonDocument json_document(&doc_allocator);
|
JsonDocument json_document(&doc_allocator);
|
||||||
|
#else
|
||||||
|
JsonDocument json_document;
|
||||||
|
#endif
|
||||||
if (json_document.overflowed()) {
|
if (json_document.overflowed()) {
|
||||||
ESP_LOGE(TAG, "Could not allocate memory for JSON document!");
|
ESP_LOGE(TAG, "Could not allocate memory for JSON document!");
|
||||||
return "{}";
|
return "{}";
|
||||||
@@ -52,8 +59,12 @@ std::string build_json(const json_build_t &f) {
|
|||||||
|
|
||||||
bool parse_json(const std::string &data, const json_parse_t &f) {
|
bool parse_json(const std::string &data, const json_parse_t &f) {
|
||||||
// NOLINTBEGIN(clang-analyzer-cplusplus.NewDeleteLeaks) false positive with ArduinoJson
|
// NOLINTBEGIN(clang-analyzer-cplusplus.NewDeleteLeaks) false positive with ArduinoJson
|
||||||
|
#ifdef USE_PSRAM
|
||||||
auto doc_allocator = SpiRamAllocator();
|
auto doc_allocator = SpiRamAllocator();
|
||||||
JsonDocument json_document(&doc_allocator);
|
JsonDocument json_document(&doc_allocator);
|
||||||
|
#else
|
||||||
|
JsonDocument json_document;
|
||||||
|
#endif
|
||||||
if (json_document.overflowed()) {
|
if (json_document.overflowed()) {
|
||||||
ESP_LOGE(TAG, "Could not allocate memory for JSON document!");
|
ESP_LOGE(TAG, "Could not allocate memory for JSON document!");
|
||||||
return false;
|
return false;
|
||||||
|
@@ -491,7 +491,7 @@ bool MQTTClientComponent::publish(const std::string &topic, const std::string &p
|
|||||||
|
|
||||||
bool MQTTClientComponent::publish(const std::string &topic, const char *payload, size_t payload_length, uint8_t qos,
|
bool MQTTClientComponent::publish(const std::string &topic, const char *payload, size_t payload_length, uint8_t qos,
|
||||||
bool retain) {
|
bool retain) {
|
||||||
return publish({.topic = topic, .payload = payload, .qos = qos, .retain = retain});
|
return publish({.topic = topic, .payload = std::string(payload, payload_length), .qos = qos, .retain = retain});
|
||||||
}
|
}
|
||||||
|
|
||||||
bool MQTTClientComponent::publish(const MQTTMessage &message) {
|
bool MQTTClientComponent::publish(const MQTTMessage &message) {
|
||||||
|
@@ -28,12 +28,12 @@ bool Select::has_option(const std::string &option) const { return this->index_of
|
|||||||
bool Select::has_index(size_t index) const { return index < this->size(); }
|
bool Select::has_index(size_t index) const { return index < this->size(); }
|
||||||
|
|
||||||
size_t Select::size() const {
|
size_t Select::size() const {
|
||||||
auto options = traits.get_options();
|
const auto &options = traits.get_options();
|
||||||
return options.size();
|
return options.size();
|
||||||
}
|
}
|
||||||
|
|
||||||
optional<size_t> Select::index_of(const std::string &option) const {
|
optional<size_t> Select::index_of(const std::string &option) const {
|
||||||
auto options = traits.get_options();
|
const auto &options = traits.get_options();
|
||||||
auto it = std::find(options.begin(), options.end(), option);
|
auto it = std::find(options.begin(), options.end(), option);
|
||||||
if (it == options.end()) {
|
if (it == options.end()) {
|
||||||
return {};
|
return {};
|
||||||
@@ -51,7 +51,7 @@ optional<size_t> Select::active_index() const {
|
|||||||
|
|
||||||
optional<std::string> Select::at(size_t index) const {
|
optional<std::string> Select::at(size_t index) const {
|
||||||
if (this->has_index(index)) {
|
if (this->has_index(index)) {
|
||||||
auto options = traits.get_options();
|
const auto &options = traits.get_options();
|
||||||
return options.at(index);
|
return options.at(index);
|
||||||
} else {
|
} else {
|
||||||
return {};
|
return {};
|
||||||
|
@@ -45,7 +45,7 @@ void SelectCall::perform() {
|
|||||||
auto *parent = this->parent_;
|
auto *parent = this->parent_;
|
||||||
const auto *name = parent->get_name().c_str();
|
const auto *name = parent->get_name().c_str();
|
||||||
const auto &traits = parent->traits;
|
const auto &traits = parent->traits;
|
||||||
auto options = traits.get_options();
|
const auto &options = traits.get_options();
|
||||||
|
|
||||||
if (this->operation_ == SELECT_OP_NONE) {
|
if (this->operation_ == SELECT_OP_NONE) {
|
||||||
ESP_LOGW(TAG, "'%s' - SelectCall performed without selecting an operation", name);
|
ESP_LOGW(TAG, "'%s' - SelectCall performed without selecting an operation", name);
|
||||||
|
@@ -186,6 +186,7 @@ CONF_CHARACTERISTIC_UUID = "characteristic_uuid"
|
|||||||
CONF_CHECK = "check"
|
CONF_CHECK = "check"
|
||||||
CONF_CHIPSET = "chipset"
|
CONF_CHIPSET = "chipset"
|
||||||
CONF_CLEAN_SESSION = "clean_session"
|
CONF_CLEAN_SESSION = "clean_session"
|
||||||
|
CONF_CLEAR = "clear"
|
||||||
CONF_CLEAR_IMPEDANCE = "clear_impedance"
|
CONF_CLEAR_IMPEDANCE = "clear_impedance"
|
||||||
CONF_CLIENT_CERTIFICATE = "client_certificate"
|
CONF_CLIENT_CERTIFICATE = "client_certificate"
|
||||||
CONF_CLIENT_CERTIFICATE_KEY = "client_certificate_key"
|
CONF_CLIENT_CERTIFICATE_KEY = "client_certificate_key"
|
||||||
|
@@ -176,6 +176,7 @@
|
|||||||
#ifdef USE_ARDUINO
|
#ifdef USE_ARDUINO
|
||||||
#define USE_ARDUINO_VERSION_CODE VERSION_CODE(3, 2, 1)
|
#define USE_ARDUINO_VERSION_CODE VERSION_CODE(3, 2, 1)
|
||||||
#define USE_ETHERNET
|
#define USE_ETHERNET
|
||||||
|
#define USE_ETHERNET_KSZ8081
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
#ifdef USE_ESP_IDF
|
#ifdef USE_ESP_IDF
|
||||||
|
@@ -1038,12 +1038,9 @@ class ArchiveRequestHandler(BaseHandler):
|
|||||||
shutil.move(config_file, os.path.join(archive_path, configuration))
|
shutil.move(config_file, os.path.join(archive_path, configuration))
|
||||||
|
|
||||||
storage_json = StorageJSON.load(storage_path)
|
storage_json = StorageJSON.load(storage_path)
|
||||||
if storage_json is not None:
|
if storage_json is not None and storage_json.build_path:
|
||||||
# Delete build folder (if exists)
|
# Delete build folder (if exists)
|
||||||
name = storage_json.name
|
shutil.rmtree(storage_json.build_path, ignore_errors=True)
|
||||||
build_folder = os.path.join(settings.config_dir, name)
|
|
||||||
if build_folder is not None:
|
|
||||||
shutil.rmtree(build_folder, os.path.join(archive_path, name))
|
|
||||||
|
|
||||||
|
|
||||||
class UnArchiveRequestHandler(BaseHandler):
|
class UnArchiveRequestHandler(BaseHandler):
|
||||||
|
@@ -1,6 +1,7 @@
|
|||||||
import os
|
import os
|
||||||
import random
|
import random
|
||||||
import string
|
import string
|
||||||
|
from typing import Literal, NotRequired, TypedDict, Unpack
|
||||||
import unicodedata
|
import unicodedata
|
||||||
|
|
||||||
import voluptuous as vol
|
import voluptuous as vol
|
||||||
@@ -103,11 +104,25 @@ HARDWARE_BASE_CONFIGS = {
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
def sanitize_double_quotes(value):
|
def sanitize_double_quotes(value: str) -> str:
|
||||||
return value.replace("\\", "\\\\").replace('"', '\\"')
|
return value.replace("\\", "\\\\").replace('"', '\\"')
|
||||||
|
|
||||||
|
|
||||||
def wizard_file(**kwargs):
|
class WizardFileKwargs(TypedDict):
|
||||||
|
"""Keyword arguments for wizard_file function."""
|
||||||
|
|
||||||
|
name: str
|
||||||
|
platform: Literal["ESP8266", "ESP32", "RP2040", "BK72XX", "LN882X", "RTL87XX"]
|
||||||
|
board: str
|
||||||
|
ssid: NotRequired[str]
|
||||||
|
psk: NotRequired[str]
|
||||||
|
password: NotRequired[str]
|
||||||
|
ota_password: NotRequired[str]
|
||||||
|
api_encryption_key: NotRequired[str]
|
||||||
|
friendly_name: NotRequired[str]
|
||||||
|
|
||||||
|
|
||||||
|
def wizard_file(**kwargs: Unpack[WizardFileKwargs]) -> str:
|
||||||
letters = string.ascii_letters + string.digits
|
letters = string.ascii_letters + string.digits
|
||||||
ap_name_base = kwargs["name"].replace("_", " ").title()
|
ap_name_base = kwargs["name"].replace("_", " ").title()
|
||||||
ap_name = f"{ap_name_base} Fallback Hotspot"
|
ap_name = f"{ap_name_base} Fallback Hotspot"
|
||||||
@@ -180,7 +195,25 @@ captive_portal:
|
|||||||
return config
|
return config
|
||||||
|
|
||||||
|
|
||||||
def wizard_write(path, **kwargs):
|
class WizardWriteKwargs(TypedDict):
|
||||||
|
"""Keyword arguments for wizard_write function."""
|
||||||
|
|
||||||
|
name: str
|
||||||
|
type: Literal["basic", "empty", "upload"]
|
||||||
|
# Required for "basic" type
|
||||||
|
board: NotRequired[str]
|
||||||
|
platform: NotRequired[str]
|
||||||
|
ssid: NotRequired[str]
|
||||||
|
psk: NotRequired[str]
|
||||||
|
password: NotRequired[str]
|
||||||
|
ota_password: NotRequired[str]
|
||||||
|
api_encryption_key: NotRequired[str]
|
||||||
|
friendly_name: NotRequired[str]
|
||||||
|
# Required for "upload" type
|
||||||
|
file_text: NotRequired[str]
|
||||||
|
|
||||||
|
|
||||||
|
def wizard_write(path: str, **kwargs: Unpack[WizardWriteKwargs]) -> bool:
|
||||||
from esphome.components.bk72xx import boards as bk72xx_boards
|
from esphome.components.bk72xx import boards as bk72xx_boards
|
||||||
from esphome.components.esp32 import boards as esp32_boards
|
from esphome.components.esp32 import boards as esp32_boards
|
||||||
from esphome.components.esp8266 import boards as esp8266_boards
|
from esphome.components.esp8266 import boards as esp8266_boards
|
||||||
@@ -237,14 +270,14 @@ def wizard_write(path, **kwargs):
|
|||||||
|
|
||||||
if get_bool_env(ENV_QUICKWIZARD):
|
if get_bool_env(ENV_QUICKWIZARD):
|
||||||
|
|
||||||
def sleep(time):
|
def sleep(time: float) -> None:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
else:
|
else:
|
||||||
from time import sleep
|
from time import sleep
|
||||||
|
|
||||||
|
|
||||||
def safe_print_step(step, big):
|
def safe_print_step(step: int, big: str) -> None:
|
||||||
safe_print()
|
safe_print()
|
||||||
safe_print()
|
safe_print()
|
||||||
safe_print(f"============= STEP {step} =============")
|
safe_print(f"============= STEP {step} =============")
|
||||||
@@ -253,14 +286,14 @@ def safe_print_step(step, big):
|
|||||||
sleep(0.25)
|
sleep(0.25)
|
||||||
|
|
||||||
|
|
||||||
def default_input(text, default):
|
def default_input(text: str, default: str) -> str:
|
||||||
safe_print()
|
safe_print()
|
||||||
safe_print(f"Press ENTER for default ({default})")
|
safe_print(f"Press ENTER for default ({default})")
|
||||||
return safe_input(text.format(default)) or default
|
return safe_input(text.format(default)) or default
|
||||||
|
|
||||||
|
|
||||||
# From https://stackoverflow.com/a/518232/8924614
|
# From https://stackoverflow.com/a/518232/8924614
|
||||||
def strip_accents(value):
|
def strip_accents(value: str) -> str:
|
||||||
return "".join(
|
return "".join(
|
||||||
c
|
c
|
||||||
for c in unicodedata.normalize("NFD", str(value))
|
for c in unicodedata.normalize("NFD", str(value))
|
||||||
@@ -268,7 +301,7 @@ def strip_accents(value):
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def wizard(path):
|
def wizard(path: str) -> int:
|
||||||
from esphome.components.bk72xx import boards as bk72xx_boards
|
from esphome.components.bk72xx import boards as bk72xx_boards
|
||||||
from esphome.components.esp32 import boards as esp32_boards
|
from esphome.components.esp32 import boards as esp32_boards
|
||||||
from esphome.components.esp8266 import boards as esp8266_boards
|
from esphome.components.esp8266 import boards as esp8266_boards
|
||||||
@@ -509,6 +542,7 @@ def wizard(path):
|
|||||||
ssid=ssid,
|
ssid=ssid,
|
||||||
psk=psk,
|
psk=psk,
|
||||||
password=password,
|
password=password,
|
||||||
|
type="basic",
|
||||||
):
|
):
|
||||||
return 1
|
return 1
|
||||||
|
|
||||||
|
@@ -315,6 +315,19 @@ def clean_build():
|
|||||||
_LOGGER.info("Deleting %s", dependencies_lock)
|
_LOGGER.info("Deleting %s", dependencies_lock)
|
||||||
os.remove(dependencies_lock)
|
os.remove(dependencies_lock)
|
||||||
|
|
||||||
|
# Clean PlatformIO cache to resolve CMake compiler detection issues
|
||||||
|
# This helps when toolchain paths change or get corrupted
|
||||||
|
try:
|
||||||
|
from platformio.project.helpers import get_project_cache_dir
|
||||||
|
except ImportError:
|
||||||
|
# PlatformIO is not available, skip cache cleaning
|
||||||
|
pass
|
||||||
|
else:
|
||||||
|
cache_dir = get_project_cache_dir()
|
||||||
|
if cache_dir and cache_dir.strip() and os.path.isdir(cache_dir):
|
||||||
|
_LOGGER.info("Deleting PlatformIO cache %s", cache_dir)
|
||||||
|
shutil.rmtree(cache_dir)
|
||||||
|
|
||||||
|
|
||||||
GITIGNORE_CONTENT = """# Gitignore settings for ESPHome
|
GITIGNORE_CONTENT = """# Gitignore settings for ESPHome
|
||||||
# This is an example and may include too much for your use-case.
|
# This is an example and may include too much for your use-case.
|
||||||
|
@@ -7,7 +7,7 @@ pre-commit
|
|||||||
# Unit tests
|
# Unit tests
|
||||||
pytest==8.4.2
|
pytest==8.4.2
|
||||||
pytest-cov==7.0.0
|
pytest-cov==7.0.0
|
||||||
pytest-mock==3.15.0
|
pytest-mock==3.15.1
|
||||||
pytest-asyncio==1.2.0
|
pytest-asyncio==1.2.0
|
||||||
pytest-xdist==3.8.0
|
pytest-xdist==3.8.0
|
||||||
asyncmock==0.4.2
|
asyncmock==0.4.2
|
||||||
|
@@ -1,14 +1,18 @@
|
|||||||
#!/usr/bin/env python3
|
#!/usr/bin/env python3
|
||||||
|
|
||||||
|
import argparse
|
||||||
import json
|
import json
|
||||||
import os
|
from pathlib import Path
|
||||||
import subprocess
|
import subprocess
|
||||||
|
import sys
|
||||||
import tempfile
|
import tempfile
|
||||||
|
|
||||||
from esphome.components.esp32 import ESP_IDF_PLATFORM_VERSION as ver
|
from esphome.components.esp32 import ESP_IDF_PLATFORM_VERSION as ver
|
||||||
|
from esphome.helpers import write_file_if_changed
|
||||||
|
|
||||||
version_str = f"{ver.major}.{ver.minor:02d}.{ver.patch:02d}"
|
version_str = f"{ver.major}.{ver.minor:02d}.{ver.patch:02d}"
|
||||||
print(f"ESP32 Platform Version: {version_str}")
|
root = Path(__file__).parent.parent
|
||||||
|
boards_file_path = root / "esphome" / "components" / "esp32" / "boards.py"
|
||||||
|
|
||||||
|
|
||||||
def get_boards():
|
def get_boards():
|
||||||
@@ -17,6 +21,9 @@ def get_boards():
|
|||||||
[
|
[
|
||||||
"git",
|
"git",
|
||||||
"clone",
|
"clone",
|
||||||
|
"-q",
|
||||||
|
"-c",
|
||||||
|
"advice.detachedHead=false",
|
||||||
"--depth",
|
"--depth",
|
||||||
"1",
|
"1",
|
||||||
"--branch",
|
"--branch",
|
||||||
@@ -26,16 +33,14 @@ def get_boards():
|
|||||||
],
|
],
|
||||||
check=True,
|
check=True,
|
||||||
)
|
)
|
||||||
boards_file = os.path.join(tempdir, "boards")
|
boards_directory = Path(tempdir) / "boards"
|
||||||
boards = {}
|
boards = {}
|
||||||
for fname in os.listdir(boards_file):
|
for fname in boards_directory.glob("*.json"):
|
||||||
if not fname.endswith(".json"):
|
with fname.open(encoding="utf-8") as f:
|
||||||
continue
|
|
||||||
with open(os.path.join(boards_file, fname), encoding="utf-8") as f:
|
|
||||||
board_info = json.load(f)
|
board_info = json.load(f)
|
||||||
mcu = board_info["build"]["mcu"]
|
mcu = board_info["build"]["mcu"]
|
||||||
name = board_info["name"]
|
name = board_info["name"]
|
||||||
board = fname[:-5]
|
board = fname.stem
|
||||||
variant = mcu.upper()
|
variant = mcu.upper()
|
||||||
boards[board] = {
|
boards[board] = {
|
||||||
"name": name,
|
"name": name,
|
||||||
@@ -47,33 +52,47 @@ def get_boards():
|
|||||||
TEMPLATE = """ "%s": {
|
TEMPLATE = """ "%s": {
|
||||||
"name": "%s",
|
"name": "%s",
|
||||||
"variant": %s,
|
"variant": %s,
|
||||||
},
|
},"""
|
||||||
"""
|
|
||||||
|
|
||||||
|
|
||||||
def main():
|
def main(check: bool):
|
||||||
boards = get_boards()
|
boards = get_boards()
|
||||||
# open boards.py, delete existing BOARDS variable and write the new boards dict
|
# open boards.py, delete existing BOARDS variable and write the new boards dict
|
||||||
boards_file_path = os.path.join(
|
existing_content = boards_file_path.read_text(encoding="UTF-8")
|
||||||
os.path.dirname(__file__), "..", "esphome", "components", "esp32", "boards.py"
|
|
||||||
)
|
|
||||||
with open(boards_file_path, encoding="UTF-8") as f:
|
|
||||||
lines = f.readlines()
|
|
||||||
|
|
||||||
with open(boards_file_path, "w", encoding="UTF-8") as f:
|
parts: list[str] = []
|
||||||
for line in lines:
|
for line in existing_content.splitlines():
|
||||||
if line.startswith("BOARDS = {"):
|
if line == "BOARDS = {":
|
||||||
f.write("BOARDS = {\n")
|
parts.append(line)
|
||||||
f.writelines(
|
parts.extend(
|
||||||
TEMPLATE % (board, info["name"], info["variant"])
|
TEMPLATE % (board, info["name"], info["variant"])
|
||||||
for board, info in sorted(boards.items())
|
for board, info in sorted(boards.items())
|
||||||
)
|
)
|
||||||
f.write("}\n")
|
parts.append("}")
|
||||||
break
|
parts.append("# DO NOT ADD ANYTHING BELOW THIS LINE")
|
||||||
|
break
|
||||||
|
|
||||||
f.write(line)
|
parts.append(line)
|
||||||
|
|
||||||
|
parts.append("")
|
||||||
|
content = "\n".join(parts)
|
||||||
|
|
||||||
|
if check:
|
||||||
|
if existing_content != content:
|
||||||
|
print("boards.py file is not up to date.")
|
||||||
|
print("Please run `script/generate-esp32-boards.py`")
|
||||||
|
sys.exit(1)
|
||||||
|
print("boards.py file is up to date")
|
||||||
|
elif write_file_if_changed(boards_file_path, content):
|
||||||
|
print("ESP32 boards updated successfully.")
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
main()
|
parser = argparse.ArgumentParser()
|
||||||
print("ESP32 boards updated successfully.")
|
parser.add_argument(
|
||||||
|
"--check",
|
||||||
|
help="Check if the boards.py file is up to date.",
|
||||||
|
action="store_true",
|
||||||
|
)
|
||||||
|
args = parser.parse_args()
|
||||||
|
main(args.check)
|
||||||
|
@@ -589,7 +589,7 @@ async def test_archive_request_handler_post(
|
|||||||
mock_ext_storage_path: MagicMock,
|
mock_ext_storage_path: MagicMock,
|
||||||
tmp_path: Path,
|
tmp_path: Path,
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Test ArchiveRequestHandler.post method."""
|
"""Test ArchiveRequestHandler.post method without storage_json."""
|
||||||
|
|
||||||
# Set up temp directories
|
# Set up temp directories
|
||||||
config_dir = Path(get_fixture_path("conf"))
|
config_dir = Path(get_fixture_path("conf"))
|
||||||
@@ -616,6 +616,97 @@ async def test_archive_request_handler_post(
|
|||||||
).read_text() == "esphome:\n name: test_archive\n"
|
).read_text() == "esphome:\n name: test_archive\n"
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_archive_handler_with_build_folder(
|
||||||
|
dashboard: DashboardTestHelper,
|
||||||
|
mock_archive_storage_path: MagicMock,
|
||||||
|
mock_ext_storage_path: MagicMock,
|
||||||
|
mock_dashboard_settings: MagicMock,
|
||||||
|
mock_storage_json: MagicMock,
|
||||||
|
tmp_path: Path,
|
||||||
|
) -> None:
|
||||||
|
"""Test ArchiveRequestHandler.post with storage_json and build folder."""
|
||||||
|
config_dir = tmp_path / "config"
|
||||||
|
config_dir.mkdir()
|
||||||
|
archive_dir = tmp_path / "archive"
|
||||||
|
archive_dir.mkdir()
|
||||||
|
build_dir = tmp_path / "build"
|
||||||
|
build_dir.mkdir()
|
||||||
|
|
||||||
|
configuration = "test_device.yaml"
|
||||||
|
test_config = config_dir / configuration
|
||||||
|
test_config.write_text("esphome:\n name: test_device\n")
|
||||||
|
|
||||||
|
build_folder = build_dir / "test_device"
|
||||||
|
build_folder.mkdir()
|
||||||
|
(build_folder / "firmware.bin").write_text("binary content")
|
||||||
|
(build_folder / ".pioenvs").mkdir()
|
||||||
|
|
||||||
|
mock_dashboard_settings.config_dir = str(config_dir)
|
||||||
|
mock_dashboard_settings.rel_path.return_value = str(test_config)
|
||||||
|
mock_archive_storage_path.return_value = str(archive_dir)
|
||||||
|
|
||||||
|
mock_storage = MagicMock()
|
||||||
|
mock_storage.name = "test_device"
|
||||||
|
mock_storage.build_path = str(build_folder)
|
||||||
|
mock_storage_json.load.return_value = mock_storage
|
||||||
|
|
||||||
|
response = await dashboard.fetch(
|
||||||
|
"/archive",
|
||||||
|
method="POST",
|
||||||
|
body=f"configuration={configuration}",
|
||||||
|
headers={"Content-Type": "application/x-www-form-urlencoded"},
|
||||||
|
)
|
||||||
|
assert response.code == 200
|
||||||
|
|
||||||
|
assert not test_config.exists()
|
||||||
|
assert (archive_dir / configuration).exists()
|
||||||
|
|
||||||
|
assert not build_folder.exists()
|
||||||
|
assert not (archive_dir / "test_device").exists()
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_archive_handler_no_build_folder(
|
||||||
|
dashboard: DashboardTestHelper,
|
||||||
|
mock_archive_storage_path: MagicMock,
|
||||||
|
mock_ext_storage_path: MagicMock,
|
||||||
|
mock_dashboard_settings: MagicMock,
|
||||||
|
mock_storage_json: MagicMock,
|
||||||
|
tmp_path: Path,
|
||||||
|
) -> None:
|
||||||
|
"""Test ArchiveRequestHandler.post with storage_json but no build folder."""
|
||||||
|
config_dir = tmp_path / "config"
|
||||||
|
config_dir.mkdir()
|
||||||
|
archive_dir = tmp_path / "archive"
|
||||||
|
archive_dir.mkdir()
|
||||||
|
|
||||||
|
configuration = "test_device.yaml"
|
||||||
|
test_config = config_dir / configuration
|
||||||
|
test_config.write_text("esphome:\n name: test_device\n")
|
||||||
|
|
||||||
|
mock_dashboard_settings.config_dir = str(config_dir)
|
||||||
|
mock_dashboard_settings.rel_path.return_value = str(test_config)
|
||||||
|
mock_archive_storage_path.return_value = str(archive_dir)
|
||||||
|
|
||||||
|
mock_storage = MagicMock()
|
||||||
|
mock_storage.name = "test_device"
|
||||||
|
mock_storage.build_path = None
|
||||||
|
mock_storage_json.load.return_value = mock_storage
|
||||||
|
|
||||||
|
response = await dashboard.fetch(
|
||||||
|
"/archive",
|
||||||
|
method="POST",
|
||||||
|
body=f"configuration={configuration}",
|
||||||
|
headers={"Content-Type": "application/x-www-form-urlencoded"},
|
||||||
|
)
|
||||||
|
assert response.code == 200
|
||||||
|
|
||||||
|
assert not test_config.exists()
|
||||||
|
assert (archive_dir / configuration).exists()
|
||||||
|
assert not (archive_dir / "test_device").exists()
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.skipif(os.name == "nt", reason="Unix sockets are not supported on Windows")
|
@pytest.mark.skipif(os.name == "nt", reason="Unix sockets are not supported on Windows")
|
||||||
@pytest.mark.usefixtures("mock_trash_storage_path", "mock_archive_storage_path")
|
@pytest.mark.usefixtures("mock_trash_storage_path", "mock_archive_storage_path")
|
||||||
def test_start_web_server_with_unix_socket(tmp_path: Path) -> None:
|
def test_start_web_server_with_unix_socket(tmp_path: Path) -> None:
|
||||||
|
@@ -384,6 +384,9 @@ def test_preload_core_config_basic(setup_core: Path) -> None:
|
|||||||
assert platform == "esp32"
|
assert platform == "esp32"
|
||||||
assert KEY_CORE in CORE.data
|
assert KEY_CORE in CORE.data
|
||||||
assert CONF_BUILD_PATH in config[CONF_ESPHOME]
|
assert CONF_BUILD_PATH in config[CONF_ESPHOME]
|
||||||
|
# Verify default build path is "build/<device_name>"
|
||||||
|
build_path = config[CONF_ESPHOME][CONF_BUILD_PATH]
|
||||||
|
assert build_path.endswith(os.path.join("build", "test_device"))
|
||||||
|
|
||||||
|
|
||||||
def test_preload_core_config_with_build_path(setup_core: Path) -> None:
|
def test_preload_core_config_with_build_path(setup_core: Path) -> None:
|
||||||
@@ -418,6 +421,12 @@ def test_preload_core_config_env_build_path(setup_core: Path) -> None:
|
|||||||
|
|
||||||
assert CONF_BUILD_PATH in config[CONF_ESPHOME]
|
assert CONF_BUILD_PATH in config[CONF_ESPHOME]
|
||||||
assert "test_device" in config[CONF_ESPHOME][CONF_BUILD_PATH]
|
assert "test_device" in config[CONF_ESPHOME][CONF_BUILD_PATH]
|
||||||
|
# Verify it uses the env var path with device name appended
|
||||||
|
build_path = config[CONF_ESPHOME][CONF_BUILD_PATH]
|
||||||
|
expected_path = os.path.join("/env/build", "test_device")
|
||||||
|
assert build_path == expected_path or build_path == expected_path.replace(
|
||||||
|
"/", os.sep
|
||||||
|
)
|
||||||
assert platform == "rp2040"
|
assert platform == "rp2040"
|
||||||
|
|
||||||
|
|
||||||
|
@@ -1,9 +1,12 @@
|
|||||||
"""Tests for the wizard.py file."""
|
"""Tests for the wizard.py file."""
|
||||||
|
|
||||||
import os
|
import os
|
||||||
|
from pathlib import Path
|
||||||
|
from typing import Any
|
||||||
from unittest.mock import MagicMock
|
from unittest.mock import MagicMock
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
|
from pytest import MonkeyPatch
|
||||||
|
|
||||||
from esphome.components.bk72xx.boards import BK72XX_BOARD_PINS
|
from esphome.components.bk72xx.boards import BK72XX_BOARD_PINS
|
||||||
from esphome.components.esp32.boards import ESP32_BOARD_PINS
|
from esphome.components.esp32.boards import ESP32_BOARD_PINS
|
||||||
@@ -15,7 +18,7 @@ import esphome.wizard as wz
|
|||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
def default_config():
|
def default_config() -> dict[str, Any]:
|
||||||
return {
|
return {
|
||||||
"type": "basic",
|
"type": "basic",
|
||||||
"name": "test-name",
|
"name": "test-name",
|
||||||
@@ -28,7 +31,7 @@ def default_config():
|
|||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
def wizard_answers():
|
def wizard_answers() -> list[str]:
|
||||||
return [
|
return [
|
||||||
"test-node", # Name of the node
|
"test-node", # Name of the node
|
||||||
"ESP8266", # platform
|
"ESP8266", # platform
|
||||||
@@ -53,7 +56,9 @@ def test_sanitize_quotes_replaces_with_escaped_char():
|
|||||||
assert output_str == '\\"key\\": \\"value\\"'
|
assert output_str == '\\"key\\": \\"value\\"'
|
||||||
|
|
||||||
|
|
||||||
def test_config_file_fallback_ap_includes_descriptive_name(default_config):
|
def test_config_file_fallback_ap_includes_descriptive_name(
|
||||||
|
default_config: dict[str, Any],
|
||||||
|
):
|
||||||
"""
|
"""
|
||||||
The fallback AP should include the node and a descriptive name
|
The fallback AP should include the node and a descriptive name
|
||||||
"""
|
"""
|
||||||
@@ -67,7 +72,9 @@ def test_config_file_fallback_ap_includes_descriptive_name(default_config):
|
|||||||
assert 'ssid: "Test Node Fallback Hotspot"' in config
|
assert 'ssid: "Test Node Fallback Hotspot"' in config
|
||||||
|
|
||||||
|
|
||||||
def test_config_file_fallback_ap_name_less_than_32_chars(default_config):
|
def test_config_file_fallback_ap_name_less_than_32_chars(
|
||||||
|
default_config: dict[str, Any],
|
||||||
|
):
|
||||||
"""
|
"""
|
||||||
The fallback AP name must be less than 32 chars.
|
The fallback AP name must be less than 32 chars.
|
||||||
Since it is composed of the node name and "Fallback Hotspot" this can be too long and needs truncating
|
Since it is composed of the node name and "Fallback Hotspot" this can be too long and needs truncating
|
||||||
@@ -82,7 +89,7 @@ def test_config_file_fallback_ap_name_less_than_32_chars(default_config):
|
|||||||
assert 'ssid: "A Very Long Name For This Node"' in config
|
assert 'ssid: "A Very Long Name For This Node"' in config
|
||||||
|
|
||||||
|
|
||||||
def test_config_file_should_include_ota(default_config):
|
def test_config_file_should_include_ota(default_config: dict[str, Any]):
|
||||||
"""
|
"""
|
||||||
The Over-The-Air update should be enabled by default
|
The Over-The-Air update should be enabled by default
|
||||||
"""
|
"""
|
||||||
@@ -95,7 +102,9 @@ def test_config_file_should_include_ota(default_config):
|
|||||||
assert "ota:" in config
|
assert "ota:" in config
|
||||||
|
|
||||||
|
|
||||||
def test_config_file_should_include_ota_when_password_set(default_config):
|
def test_config_file_should_include_ota_when_password_set(
|
||||||
|
default_config: dict[str, Any],
|
||||||
|
):
|
||||||
"""
|
"""
|
||||||
The Over-The-Air update should be enabled when a password is set
|
The Over-The-Air update should be enabled when a password is set
|
||||||
"""
|
"""
|
||||||
@@ -109,7 +118,9 @@ def test_config_file_should_include_ota_when_password_set(default_config):
|
|||||||
assert "ota:" in config
|
assert "ota:" in config
|
||||||
|
|
||||||
|
|
||||||
def test_wizard_write_sets_platform(default_config, tmp_path, monkeypatch):
|
def test_wizard_write_sets_platform(
|
||||||
|
default_config: dict[str, Any], tmp_path: Path, monkeypatch: MonkeyPatch
|
||||||
|
):
|
||||||
"""
|
"""
|
||||||
If the platform is not explicitly set, use "ESP8266" if the board is one of the ESP8266 boards
|
If the platform is not explicitly set, use "ESP8266" if the board is one of the ESP8266 boards
|
||||||
"""
|
"""
|
||||||
@@ -126,7 +137,7 @@ def test_wizard_write_sets_platform(default_config, tmp_path, monkeypatch):
|
|||||||
assert "esp8266:" in generated_config
|
assert "esp8266:" in generated_config
|
||||||
|
|
||||||
|
|
||||||
def test_wizard_empty_config(tmp_path, monkeypatch):
|
def test_wizard_empty_config(tmp_path: Path, monkeypatch: MonkeyPatch):
|
||||||
"""
|
"""
|
||||||
The wizard should be able to create an empty configuration
|
The wizard should be able to create an empty configuration
|
||||||
"""
|
"""
|
||||||
@@ -146,7 +157,7 @@ def test_wizard_empty_config(tmp_path, monkeypatch):
|
|||||||
assert generated_config == ""
|
assert generated_config == ""
|
||||||
|
|
||||||
|
|
||||||
def test_wizard_upload_config(tmp_path, monkeypatch):
|
def test_wizard_upload_config(tmp_path: Path, monkeypatch: MonkeyPatch):
|
||||||
"""
|
"""
|
||||||
The wizard should be able to import an base64 encoded configuration
|
The wizard should be able to import an base64 encoded configuration
|
||||||
"""
|
"""
|
||||||
@@ -168,7 +179,7 @@ def test_wizard_upload_config(tmp_path, monkeypatch):
|
|||||||
|
|
||||||
|
|
||||||
def test_wizard_write_defaults_platform_from_board_esp8266(
|
def test_wizard_write_defaults_platform_from_board_esp8266(
|
||||||
default_config, tmp_path, monkeypatch
|
default_config: dict[str, Any], tmp_path: Path, monkeypatch: MonkeyPatch
|
||||||
):
|
):
|
||||||
"""
|
"""
|
||||||
If the platform is not explicitly set, use "ESP8266" if the board is one of the ESP8266 boards
|
If the platform is not explicitly set, use "ESP8266" if the board is one of the ESP8266 boards
|
||||||
@@ -189,7 +200,7 @@ def test_wizard_write_defaults_platform_from_board_esp8266(
|
|||||||
|
|
||||||
|
|
||||||
def test_wizard_write_defaults_platform_from_board_esp32(
|
def test_wizard_write_defaults_platform_from_board_esp32(
|
||||||
default_config, tmp_path, monkeypatch
|
default_config: dict[str, Any], tmp_path: Path, monkeypatch: MonkeyPatch
|
||||||
):
|
):
|
||||||
"""
|
"""
|
||||||
If the platform is not explicitly set, use "ESP32" if the board is one of the ESP32 boards
|
If the platform is not explicitly set, use "ESP32" if the board is one of the ESP32 boards
|
||||||
@@ -210,7 +221,7 @@ def test_wizard_write_defaults_platform_from_board_esp32(
|
|||||||
|
|
||||||
|
|
||||||
def test_wizard_write_defaults_platform_from_board_bk72xx(
|
def test_wizard_write_defaults_platform_from_board_bk72xx(
|
||||||
default_config, tmp_path, monkeypatch
|
default_config: dict[str, Any], tmp_path: Path, monkeypatch: MonkeyPatch
|
||||||
):
|
):
|
||||||
"""
|
"""
|
||||||
If the platform is not explicitly set, use "BK72XX" if the board is one of BK72XX boards
|
If the platform is not explicitly set, use "BK72XX" if the board is one of BK72XX boards
|
||||||
@@ -231,7 +242,7 @@ def test_wizard_write_defaults_platform_from_board_bk72xx(
|
|||||||
|
|
||||||
|
|
||||||
def test_wizard_write_defaults_platform_from_board_ln882x(
|
def test_wizard_write_defaults_platform_from_board_ln882x(
|
||||||
default_config, tmp_path, monkeypatch
|
default_config: dict[str, Any], tmp_path: Path, monkeypatch: MonkeyPatch
|
||||||
):
|
):
|
||||||
"""
|
"""
|
||||||
If the platform is not explicitly set, use "LN882X" if the board is one of LN882X boards
|
If the platform is not explicitly set, use "LN882X" if the board is one of LN882X boards
|
||||||
@@ -252,7 +263,7 @@ def test_wizard_write_defaults_platform_from_board_ln882x(
|
|||||||
|
|
||||||
|
|
||||||
def test_wizard_write_defaults_platform_from_board_rtl87xx(
|
def test_wizard_write_defaults_platform_from_board_rtl87xx(
|
||||||
default_config, tmp_path, monkeypatch
|
default_config: dict[str, Any], tmp_path: Path, monkeypatch: MonkeyPatch
|
||||||
):
|
):
|
||||||
"""
|
"""
|
||||||
If the platform is not explicitly set, use "RTL87XX" if the board is one of RTL87XX boards
|
If the platform is not explicitly set, use "RTL87XX" if the board is one of RTL87XX boards
|
||||||
@@ -272,7 +283,7 @@ def test_wizard_write_defaults_platform_from_board_rtl87xx(
|
|||||||
assert "rtl87xx:" in generated_config
|
assert "rtl87xx:" in generated_config
|
||||||
|
|
||||||
|
|
||||||
def test_safe_print_step_prints_step_number_and_description(monkeypatch):
|
def test_safe_print_step_prints_step_number_and_description(monkeypatch: MonkeyPatch):
|
||||||
"""
|
"""
|
||||||
The safe_print_step function prints the step number and the passed description
|
The safe_print_step function prints the step number and the passed description
|
||||||
"""
|
"""
|
||||||
@@ -296,7 +307,7 @@ def test_safe_print_step_prints_step_number_and_description(monkeypatch):
|
|||||||
assert any(f"STEP {step_num}" in arg for arg in all_args)
|
assert any(f"STEP {step_num}" in arg for arg in all_args)
|
||||||
|
|
||||||
|
|
||||||
def test_default_input_uses_default_if_no_input_supplied(monkeypatch):
|
def test_default_input_uses_default_if_no_input_supplied(monkeypatch: MonkeyPatch):
|
||||||
"""
|
"""
|
||||||
The default_input() function should return the supplied default value if the user doesn't enter anything
|
The default_input() function should return the supplied default value if the user doesn't enter anything
|
||||||
"""
|
"""
|
||||||
@@ -312,7 +323,7 @@ def test_default_input_uses_default_if_no_input_supplied(monkeypatch):
|
|||||||
assert retval == default_string
|
assert retval == default_string
|
||||||
|
|
||||||
|
|
||||||
def test_default_input_uses_user_supplied_value(monkeypatch):
|
def test_default_input_uses_user_supplied_value(monkeypatch: MonkeyPatch):
|
||||||
"""
|
"""
|
||||||
The default_input() function should return the value that the user enters
|
The default_input() function should return the value that the user enters
|
||||||
"""
|
"""
|
||||||
@@ -376,7 +387,9 @@ def test_wizard_rejects_existing_files(tmpdir):
|
|||||||
assert retval == 2
|
assert retval == 2
|
||||||
|
|
||||||
|
|
||||||
def test_wizard_accepts_default_answers_esp8266(tmpdir, monkeypatch, wizard_answers):
|
def test_wizard_accepts_default_answers_esp8266(
|
||||||
|
tmpdir, monkeypatch: MonkeyPatch, wizard_answers: list[str]
|
||||||
|
):
|
||||||
"""
|
"""
|
||||||
The wizard should accept the given default answers for esp8266
|
The wizard should accept the given default answers for esp8266
|
||||||
"""
|
"""
|
||||||
@@ -396,7 +409,9 @@ def test_wizard_accepts_default_answers_esp8266(tmpdir, monkeypatch, wizard_answ
|
|||||||
assert retval == 0
|
assert retval == 0
|
||||||
|
|
||||||
|
|
||||||
def test_wizard_accepts_default_answers_esp32(tmpdir, monkeypatch, wizard_answers):
|
def test_wizard_accepts_default_answers_esp32(
|
||||||
|
tmpdir, monkeypatch: MonkeyPatch, wizard_answers: list[str]
|
||||||
|
):
|
||||||
"""
|
"""
|
||||||
The wizard should accept the given default answers for esp32
|
The wizard should accept the given default answers for esp32
|
||||||
"""
|
"""
|
||||||
@@ -418,7 +433,9 @@ def test_wizard_accepts_default_answers_esp32(tmpdir, monkeypatch, wizard_answer
|
|||||||
assert retval == 0
|
assert retval == 0
|
||||||
|
|
||||||
|
|
||||||
def test_wizard_offers_better_node_name(tmpdir, monkeypatch, wizard_answers):
|
def test_wizard_offers_better_node_name(
|
||||||
|
tmpdir, monkeypatch: MonkeyPatch, wizard_answers: list[str]
|
||||||
|
):
|
||||||
"""
|
"""
|
||||||
When the node name does not conform, a better alternative is offered
|
When the node name does not conform, a better alternative is offered
|
||||||
* Removes special chars
|
* Removes special chars
|
||||||
@@ -449,7 +466,9 @@ def test_wizard_offers_better_node_name(tmpdir, monkeypatch, wizard_answers):
|
|||||||
assert wz.default_input.call_args.args[1] == expected_name
|
assert wz.default_input.call_args.args[1] == expected_name
|
||||||
|
|
||||||
|
|
||||||
def test_wizard_requires_correct_platform(tmpdir, monkeypatch, wizard_answers):
|
def test_wizard_requires_correct_platform(
|
||||||
|
tmpdir, monkeypatch: MonkeyPatch, wizard_answers: list[str]
|
||||||
|
):
|
||||||
"""
|
"""
|
||||||
When the platform is not either esp32 or esp8266, the wizard should reject it
|
When the platform is not either esp32 or esp8266, the wizard should reject it
|
||||||
"""
|
"""
|
||||||
@@ -471,7 +490,9 @@ def test_wizard_requires_correct_platform(tmpdir, monkeypatch, wizard_answers):
|
|||||||
assert retval == 0
|
assert retval == 0
|
||||||
|
|
||||||
|
|
||||||
def test_wizard_requires_correct_board(tmpdir, monkeypatch, wizard_answers):
|
def test_wizard_requires_correct_board(
|
||||||
|
tmpdir, monkeypatch: MonkeyPatch, wizard_answers: list[str]
|
||||||
|
):
|
||||||
"""
|
"""
|
||||||
When the board is not a valid esp8266 board, the wizard should reject it
|
When the board is not a valid esp8266 board, the wizard should reject it
|
||||||
"""
|
"""
|
||||||
@@ -493,7 +514,9 @@ def test_wizard_requires_correct_board(tmpdir, monkeypatch, wizard_answers):
|
|||||||
assert retval == 0
|
assert retval == 0
|
||||||
|
|
||||||
|
|
||||||
def test_wizard_requires_valid_ssid(tmpdir, monkeypatch, wizard_answers):
|
def test_wizard_requires_valid_ssid(
|
||||||
|
tmpdir, monkeypatch: MonkeyPatch, wizard_answers: list[str]
|
||||||
|
):
|
||||||
"""
|
"""
|
||||||
When the board is not a valid esp8266 board, the wizard should reject it
|
When the board is not a valid esp8266 board, the wizard should reject it
|
||||||
"""
|
"""
|
||||||
@@ -515,7 +538,9 @@ def test_wizard_requires_valid_ssid(tmpdir, monkeypatch, wizard_answers):
|
|||||||
assert retval == 0
|
assert retval == 0
|
||||||
|
|
||||||
|
|
||||||
def test_wizard_write_protects_existing_config(tmpdir, default_config, monkeypatch):
|
def test_wizard_write_protects_existing_config(
|
||||||
|
tmpdir, default_config: dict[str, Any], monkeypatch: MonkeyPatch
|
||||||
|
):
|
||||||
"""
|
"""
|
||||||
The wizard_write function should not overwrite existing config files and return False
|
The wizard_write function should not overwrite existing config files and return False
|
||||||
"""
|
"""
|
||||||
|
@@ -349,6 +349,14 @@ def test_clean_build(
|
|||||||
dependencies_lock = tmp_path / "dependencies.lock"
|
dependencies_lock = tmp_path / "dependencies.lock"
|
||||||
dependencies_lock.write_text("lock file")
|
dependencies_lock.write_text("lock file")
|
||||||
|
|
||||||
|
# Create PlatformIO cache directory
|
||||||
|
platformio_cache_dir = tmp_path / ".platformio" / ".cache"
|
||||||
|
platformio_cache_dir.mkdir(parents=True)
|
||||||
|
(platformio_cache_dir / "downloads").mkdir()
|
||||||
|
(platformio_cache_dir / "http").mkdir()
|
||||||
|
(platformio_cache_dir / "tmp").mkdir()
|
||||||
|
(platformio_cache_dir / "downloads" / "package.tar.gz").write_text("package")
|
||||||
|
|
||||||
# Setup mocks
|
# Setup mocks
|
||||||
mock_core.relative_pioenvs_path.return_value = str(pioenvs_dir)
|
mock_core.relative_pioenvs_path.return_value = str(pioenvs_dir)
|
||||||
mock_core.relative_piolibdeps_path.return_value = str(piolibdeps_dir)
|
mock_core.relative_piolibdeps_path.return_value = str(piolibdeps_dir)
|
||||||
@@ -358,21 +366,30 @@ def test_clean_build(
|
|||||||
assert pioenvs_dir.exists()
|
assert pioenvs_dir.exists()
|
||||||
assert piolibdeps_dir.exists()
|
assert piolibdeps_dir.exists()
|
||||||
assert dependencies_lock.exists()
|
assert dependencies_lock.exists()
|
||||||
|
assert platformio_cache_dir.exists()
|
||||||
|
|
||||||
# Call the function
|
# Mock PlatformIO's get_project_cache_dir
|
||||||
with caplog.at_level("INFO"):
|
with patch(
|
||||||
clean_build()
|
"platformio.project.helpers.get_project_cache_dir"
|
||||||
|
) as mock_get_cache_dir:
|
||||||
|
mock_get_cache_dir.return_value = str(platformio_cache_dir)
|
||||||
|
|
||||||
|
# Call the function
|
||||||
|
with caplog.at_level("INFO"):
|
||||||
|
clean_build()
|
||||||
|
|
||||||
# Verify all were removed
|
# Verify all were removed
|
||||||
assert not pioenvs_dir.exists()
|
assert not pioenvs_dir.exists()
|
||||||
assert not piolibdeps_dir.exists()
|
assert not piolibdeps_dir.exists()
|
||||||
assert not dependencies_lock.exists()
|
assert not dependencies_lock.exists()
|
||||||
|
assert not platformio_cache_dir.exists()
|
||||||
|
|
||||||
# Verify logging
|
# Verify logging
|
||||||
assert "Deleting" in caplog.text
|
assert "Deleting" in caplog.text
|
||||||
assert ".pioenvs" in caplog.text
|
assert ".pioenvs" in caplog.text
|
||||||
assert ".piolibdeps" in caplog.text
|
assert ".piolibdeps" in caplog.text
|
||||||
assert "dependencies.lock" in caplog.text
|
assert "dependencies.lock" in caplog.text
|
||||||
|
assert "PlatformIO cache" in caplog.text
|
||||||
|
|
||||||
|
|
||||||
@patch("esphome.writer.CORE")
|
@patch("esphome.writer.CORE")
|
||||||
@@ -446,6 +463,86 @@ def test_clean_build_nothing_exists(
|
|||||||
assert not dependencies_lock.exists()
|
assert not dependencies_lock.exists()
|
||||||
|
|
||||||
|
|
||||||
|
@patch("esphome.writer.CORE")
|
||||||
|
def test_clean_build_platformio_not_available(
|
||||||
|
mock_core: MagicMock,
|
||||||
|
tmp_path: Path,
|
||||||
|
caplog: pytest.LogCaptureFixture,
|
||||||
|
) -> None:
|
||||||
|
"""Test clean_build when PlatformIO is not available."""
|
||||||
|
# Create directory structure and files
|
||||||
|
pioenvs_dir = tmp_path / ".pioenvs"
|
||||||
|
pioenvs_dir.mkdir()
|
||||||
|
|
||||||
|
piolibdeps_dir = tmp_path / ".piolibdeps"
|
||||||
|
piolibdeps_dir.mkdir()
|
||||||
|
|
||||||
|
dependencies_lock = tmp_path / "dependencies.lock"
|
||||||
|
dependencies_lock.write_text("lock file")
|
||||||
|
|
||||||
|
# Setup mocks
|
||||||
|
mock_core.relative_pioenvs_path.return_value = str(pioenvs_dir)
|
||||||
|
mock_core.relative_piolibdeps_path.return_value = str(piolibdeps_dir)
|
||||||
|
mock_core.relative_build_path.return_value = str(dependencies_lock)
|
||||||
|
|
||||||
|
# Verify all exist before
|
||||||
|
assert pioenvs_dir.exists()
|
||||||
|
assert piolibdeps_dir.exists()
|
||||||
|
assert dependencies_lock.exists()
|
||||||
|
|
||||||
|
# Mock import error for platformio
|
||||||
|
with (
|
||||||
|
patch.dict("sys.modules", {"platformio.project.helpers": None}),
|
||||||
|
caplog.at_level("INFO"),
|
||||||
|
):
|
||||||
|
# Call the function
|
||||||
|
clean_build()
|
||||||
|
|
||||||
|
# Verify standard paths were removed but no cache cleaning attempted
|
||||||
|
assert not pioenvs_dir.exists()
|
||||||
|
assert not piolibdeps_dir.exists()
|
||||||
|
assert not dependencies_lock.exists()
|
||||||
|
|
||||||
|
# Verify no cache logging
|
||||||
|
assert "PlatformIO cache" not in caplog.text
|
||||||
|
|
||||||
|
|
||||||
|
@patch("esphome.writer.CORE")
|
||||||
|
def test_clean_build_empty_cache_dir(
|
||||||
|
mock_core: MagicMock,
|
||||||
|
tmp_path: Path,
|
||||||
|
caplog: pytest.LogCaptureFixture,
|
||||||
|
) -> None:
|
||||||
|
"""Test clean_build when get_project_cache_dir returns empty/whitespace."""
|
||||||
|
# Create directory structure and files
|
||||||
|
pioenvs_dir = tmp_path / ".pioenvs"
|
||||||
|
pioenvs_dir.mkdir()
|
||||||
|
|
||||||
|
# Setup mocks
|
||||||
|
mock_core.relative_pioenvs_path.return_value = str(pioenvs_dir)
|
||||||
|
mock_core.relative_piolibdeps_path.return_value = str(tmp_path / ".piolibdeps")
|
||||||
|
mock_core.relative_build_path.return_value = str(tmp_path / "dependencies.lock")
|
||||||
|
|
||||||
|
# Verify pioenvs exists before
|
||||||
|
assert pioenvs_dir.exists()
|
||||||
|
|
||||||
|
# Mock PlatformIO's get_project_cache_dir to return whitespace
|
||||||
|
with patch(
|
||||||
|
"platformio.project.helpers.get_project_cache_dir"
|
||||||
|
) as mock_get_cache_dir:
|
||||||
|
mock_get_cache_dir.return_value = " " # Whitespace only
|
||||||
|
|
||||||
|
# Call the function
|
||||||
|
with caplog.at_level("INFO"):
|
||||||
|
clean_build()
|
||||||
|
|
||||||
|
# Verify pioenvs was removed
|
||||||
|
assert not pioenvs_dir.exists()
|
||||||
|
|
||||||
|
# Verify no cache cleaning was attempted due to empty string
|
||||||
|
assert "PlatformIO cache" not in caplog.text
|
||||||
|
|
||||||
|
|
||||||
@patch("esphome.writer.CORE")
|
@patch("esphome.writer.CORE")
|
||||||
def test_write_gitignore_creates_new_file(
|
def test_write_gitignore_creates_new_file(
|
||||||
mock_core: MagicMock,
|
mock_core: MagicMock,
|
||||||
|
Reference in New Issue
Block a user