mirror of
https://github.com/esphome/esphome.git
synced 2025-04-15 23:30:28 +01:00
commit
dccad040f9
15
.github/workflows/ci.yml
vendored
15
.github/workflows/ci.yml
vendored
@ -398,6 +398,7 @@ jobs:
|
|||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
needs:
|
needs:
|
||||||
- common
|
- common
|
||||||
|
if: github.event_name == 'pull_request'
|
||||||
outputs:
|
outputs:
|
||||||
matrix: ${{ steps.set-matrix.outputs.matrix }}
|
matrix: ${{ steps.set-matrix.outputs.matrix }}
|
||||||
steps:
|
steps:
|
||||||
@ -406,10 +407,14 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
# Fetch enough history so `git merge-base refs/remotes/origin/dev HEAD` works.
|
# Fetch enough history so `git merge-base refs/remotes/origin/dev HEAD` works.
|
||||||
fetch-depth: 500
|
fetch-depth: 500
|
||||||
- name: Fetch dev branch
|
- name: Get target branch
|
||||||
|
id: target-branch
|
||||||
run: |
|
run: |
|
||||||
git -c protocol.version=2 fetch --no-tags --prune --no-recurse-submodules --depth=1 origin +refs/heads/dev*:refs/remotes/origin/dev* +refs/tags/dev*:refs/tags/dev*
|
echo "branch=${{ github.event.pull_request.base.ref }}" >> $GITHUB_OUTPUT
|
||||||
git merge-base refs/remotes/origin/dev HEAD
|
- name: Fetch ${{ steps.target-branch.outputs.branch }} branch
|
||||||
|
run: |
|
||||||
|
git -c protocol.version=2 fetch --no-tags --prune --no-recurse-submodules --depth=1 origin +refs/heads/${{ steps.target-branch.outputs.branch }}:refs/remotes/origin/${{ steps.target-branch.outputs.branch }}
|
||||||
|
git merge-base refs/remotes/origin/${{ steps.target-branch.outputs.branch }} HEAD
|
||||||
- name: Restore Python
|
- name: Restore Python
|
||||||
uses: ./.github/actions/restore-python
|
uses: ./.github/actions/restore-python
|
||||||
with:
|
with:
|
||||||
@ -419,7 +424,7 @@ jobs:
|
|||||||
id: set-matrix
|
id: set-matrix
|
||||||
run: |
|
run: |
|
||||||
. venv/bin/activate
|
. venv/bin/activate
|
||||||
echo "matrix=$(script/list-components.py --changed | jq -R -s -c 'split("\n")[:-1]')" >> $GITHUB_OUTPUT
|
echo "matrix=$(script/list-components.py --changed --branch ${{ steps.target-branch.outputs.branch }} | jq -R -s -c 'split("\n")[:-1]')" >> $GITHUB_OUTPUT
|
||||||
|
|
||||||
test-build-components:
|
test-build-components:
|
||||||
name: Component test ${{ matrix.file }}
|
name: Component test ${{ matrix.file }}
|
||||||
@ -427,7 +432,7 @@ jobs:
|
|||||||
needs:
|
needs:
|
||||||
- common
|
- common
|
||||||
- list-components
|
- list-components
|
||||||
if: ${{ needs.list-components.outputs.matrix != '[]' && needs.list-components.outputs.matrix != '' }}
|
if: ${{ github.event_name == 'pull_request' && needs.list-components.outputs.matrix != '[]' && needs.list-components.outputs.matrix != '' }}
|
||||||
strategy:
|
strategy:
|
||||||
fail-fast: false
|
fail-fast: false
|
||||||
max-parallel: 2
|
max-parallel: 2
|
||||||
|
@ -2,8 +2,10 @@ import base64
|
|||||||
import secrets
|
import secrets
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from typing import Optional
|
from typing import Optional
|
||||||
|
import re
|
||||||
|
|
||||||
import requests
|
import requests
|
||||||
|
from ruamel.yaml import YAML
|
||||||
|
|
||||||
import esphome.codegen as cg
|
import esphome.codegen as cg
|
||||||
import esphome.config_validation as cv
|
import esphome.config_validation as cv
|
||||||
@ -11,7 +13,6 @@ import esphome.final_validate as fv
|
|||||||
from esphome import git
|
from esphome import git
|
||||||
from esphome.components.packages import validate_source_shorthand
|
from esphome.components.packages import validate_source_shorthand
|
||||||
from esphome.const import CONF_REF, CONF_WIFI, CONF_ESPHOME, CONF_PROJECT
|
from esphome.const import CONF_REF, CONF_WIFI, CONF_ESPHOME, CONF_PROJECT
|
||||||
from esphome.wizard import wizard_file
|
|
||||||
from esphome.yaml_util import dump
|
from esphome.yaml_util import dump
|
||||||
|
|
||||||
dashboard_import_ns = cg.esphome_ns.namespace("dashboard_import")
|
dashboard_import_ns = cg.esphome_ns.namespace("dashboard_import")
|
||||||
@ -94,75 +95,74 @@ def import_config(
|
|||||||
if p.exists():
|
if p.exists():
|
||||||
raise FileExistsError
|
raise FileExistsError
|
||||||
|
|
||||||
if project_name == "esphome.web":
|
git_file = git.GitFile.from_shorthand(import_url)
|
||||||
if "esp32c3" in import_url:
|
|
||||||
board = "esp32-c3-devkitm-1"
|
|
||||||
platform = "ESP32"
|
|
||||||
elif "esp32s2" in import_url:
|
|
||||||
board = "esp32-s2-saola-1"
|
|
||||||
platform = "ESP32"
|
|
||||||
elif "esp32s3" in import_url:
|
|
||||||
board = "esp32-s3-devkitc-1"
|
|
||||||
platform = "ESP32"
|
|
||||||
elif "esp32" in import_url:
|
|
||||||
board = "esp32dev"
|
|
||||||
platform = "ESP32"
|
|
||||||
elif "esp8266" in import_url:
|
|
||||||
board = "esp01_1m"
|
|
||||||
platform = "ESP8266"
|
|
||||||
elif "pico-w" in import_url:
|
|
||||||
board = "pico-w"
|
|
||||||
platform = "RP2040"
|
|
||||||
|
|
||||||
kwargs = {
|
if git_file.query and "full_config" in git_file.query:
|
||||||
"name": name,
|
url = git_file.raw_url
|
||||||
"friendly_name": friendly_name,
|
try:
|
||||||
"platform": platform,
|
req = requests.get(url, timeout=30)
|
||||||
"board": board,
|
req.raise_for_status()
|
||||||
"ssid": "!secret wifi_ssid",
|
except requests.exceptions.RequestException as e:
|
||||||
"psk": "!secret wifi_password",
|
raise ValueError(f"Error while fetching {url}: {e}") from e
|
||||||
|
|
||||||
|
contents = req.text
|
||||||
|
yaml = YAML()
|
||||||
|
loaded_yaml = yaml.load(contents)
|
||||||
|
if (
|
||||||
|
"name_add_mac_suffix" in loaded_yaml["esphome"]
|
||||||
|
and loaded_yaml["esphome"]["name_add_mac_suffix"]
|
||||||
|
):
|
||||||
|
loaded_yaml["esphome"]["name_add_mac_suffix"] = False
|
||||||
|
name_val = loaded_yaml["esphome"]["name"]
|
||||||
|
sub_pattern = re.compile(r"\$\{?([a-zA-Z-_]+)\}?")
|
||||||
|
if match := sub_pattern.match(name_val):
|
||||||
|
name_sub = match.group(1)
|
||||||
|
if name_sub in loaded_yaml["substitutions"]:
|
||||||
|
loaded_yaml["substitutions"][name_sub] = name
|
||||||
|
else:
|
||||||
|
raise ValueError(
|
||||||
|
f"Name substitution {name_sub} not found in substitutions"
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
loaded_yaml["esphome"]["name"] = name
|
||||||
|
if friendly_name is not None:
|
||||||
|
friendly_name_val = loaded_yaml["esphome"]["friendly_name"]
|
||||||
|
if match := sub_pattern.match(friendly_name_val):
|
||||||
|
friendly_name_sub = match.group(1)
|
||||||
|
if friendly_name_sub in loaded_yaml["substitutions"]:
|
||||||
|
loaded_yaml["substitutions"][friendly_name_sub] = friendly_name
|
||||||
|
else:
|
||||||
|
raise ValueError(
|
||||||
|
f"Friendly name substitution {friendly_name_sub} not found in substitutions"
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
loaded_yaml["esphome"]["friendly_name"] = friendly_name
|
||||||
|
|
||||||
|
with p.open("w", encoding="utf8") as f:
|
||||||
|
yaml.dump(loaded_yaml, f)
|
||||||
|
else:
|
||||||
|
with p.open("w", encoding="utf8") as f:
|
||||||
|
f.write(contents)
|
||||||
|
|
||||||
|
else:
|
||||||
|
substitutions = {"name": name}
|
||||||
|
esphome_core = {"name": "${name}", "name_add_mac_suffix": False}
|
||||||
|
if friendly_name:
|
||||||
|
substitutions["friendly_name"] = friendly_name
|
||||||
|
esphome_core["friendly_name"] = "${friendly_name}"
|
||||||
|
config = {
|
||||||
|
"substitutions": substitutions,
|
||||||
|
"packages": {project_name: import_url},
|
||||||
|
"esphome": esphome_core,
|
||||||
}
|
}
|
||||||
if encryption:
|
if encryption:
|
||||||
noise_psk = secrets.token_bytes(32)
|
noise_psk = secrets.token_bytes(32)
|
||||||
key = base64.b64encode(noise_psk).decode()
|
key = base64.b64encode(noise_psk).decode()
|
||||||
kwargs["api_encryption_key"] = key
|
config["api"] = {"encryption": {"key": key}}
|
||||||
|
|
||||||
p.write_text(
|
output = dump(config)
|
||||||
wizard_file(**kwargs),
|
|
||||||
encoding="utf8",
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
git_file = git.GitFile.from_shorthand(import_url)
|
|
||||||
|
|
||||||
if git_file.query and "full_config" in git_file.query:
|
if network == CONF_WIFI:
|
||||||
url = git_file.raw_url
|
output += WIFI_CONFIG
|
||||||
try:
|
|
||||||
req = requests.get(url, timeout=30)
|
|
||||||
req.raise_for_status()
|
|
||||||
except requests.exceptions.RequestException as e:
|
|
||||||
raise ValueError(f"Error while fetching {url}: {e}") from e
|
|
||||||
|
|
||||||
p.write_text(req.text, encoding="utf8")
|
p.write_text(output, encoding="utf8")
|
||||||
|
|
||||||
else:
|
|
||||||
substitutions = {"name": name}
|
|
||||||
esphome_core = {"name": "${name}", "name_add_mac_suffix": False}
|
|
||||||
if friendly_name:
|
|
||||||
substitutions["friendly_name"] = friendly_name
|
|
||||||
esphome_core["friendly_name"] = "${friendly_name}"
|
|
||||||
config = {
|
|
||||||
"substitutions": substitutions,
|
|
||||||
"packages": {project_name: import_url},
|
|
||||||
"esphome": esphome_core,
|
|
||||||
}
|
|
||||||
if encryption:
|
|
||||||
noise_psk = secrets.token_bytes(32)
|
|
||||||
key = base64.b64encode(noise_psk).decode()
|
|
||||||
config["api"] = {"encryption": {"key": key}}
|
|
||||||
|
|
||||||
output = dump(config)
|
|
||||||
|
|
||||||
if network == CONF_WIFI:
|
|
||||||
output += WIFI_CONFIG
|
|
||||||
|
|
||||||
p.write_text(output, encoding="utf8")
|
|
||||||
|
@ -93,11 +93,18 @@ int MicroWakeWord::read_microphone_() {
|
|||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
size_t bytes_written = this->ring_buffer_->write((void *) this->input_buffer_, bytes_read);
|
size_t bytes_free = this->ring_buffer_->free();
|
||||||
if (bytes_written != bytes_read) {
|
|
||||||
ESP_LOGW(TAG, "Failed to write some data to ring buffer (written=%d, expected=%d)", bytes_written, bytes_read);
|
if (bytes_free < bytes_read) {
|
||||||
|
ESP_LOGW(TAG,
|
||||||
|
"Not enough free bytes in ring buffer to store incoming audio data (free bytes=%d, incoming bytes=%d). "
|
||||||
|
"Resetting the ring buffer. Wake word detection accuracy will be reduced.",
|
||||||
|
bytes_free, bytes_read);
|
||||||
|
|
||||||
|
this->ring_buffer_->reset();
|
||||||
}
|
}
|
||||||
return bytes_written;
|
|
||||||
|
return this->ring_buffer_->write((void *) this->input_buffer_, bytes_read);
|
||||||
}
|
}
|
||||||
|
|
||||||
void MicroWakeWord::loop() {
|
void MicroWakeWord::loop() {
|
||||||
@ -206,12 +213,6 @@ bool MicroWakeWord::initialize_models() {
|
|||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
this->preprocessor_stride_buffer_ = audio_samples_allocator.allocate(HISTORY_SAMPLES_TO_KEEP);
|
|
||||||
if (this->preprocessor_stride_buffer_ == nullptr) {
|
|
||||||
ESP_LOGE(TAG, "Could not allocate the audio preprocessor's stride buffer.");
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
this->preprocessor_model_ = tflite::GetModel(G_AUDIO_PREPROCESSOR_INT8_TFLITE);
|
this->preprocessor_model_ = tflite::GetModel(G_AUDIO_PREPROCESSOR_INT8_TFLITE);
|
||||||
if (this->preprocessor_model_->version() != TFLITE_SCHEMA_VERSION) {
|
if (this->preprocessor_model_->version() != TFLITE_SCHEMA_VERSION) {
|
||||||
ESP_LOGE(TAG, "Wake word's audio preprocessor model's schema is not supported");
|
ESP_LOGE(TAG, "Wake word's audio preprocessor model's schema is not supported");
|
||||||
@ -225,7 +226,7 @@ bool MicroWakeWord::initialize_models() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
static tflite::MicroMutableOpResolver<18> preprocessor_op_resolver;
|
static tflite::MicroMutableOpResolver<18> preprocessor_op_resolver;
|
||||||
static tflite::MicroMutableOpResolver<14> streaming_op_resolver;
|
static tflite::MicroMutableOpResolver<17> streaming_op_resolver;
|
||||||
|
|
||||||
if (!this->register_preprocessor_ops_(preprocessor_op_resolver))
|
if (!this->register_preprocessor_ops_(preprocessor_op_resolver))
|
||||||
return false;
|
return false;
|
||||||
@ -329,7 +330,6 @@ bool MicroWakeWord::detect_wake_word_() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Perform inference
|
// Perform inference
|
||||||
uint32_t streaming_size = micros();
|
|
||||||
float streaming_prob = this->perform_streaming_inference_();
|
float streaming_prob = this->perform_streaming_inference_();
|
||||||
|
|
||||||
// Add the most recent probability to the sliding window
|
// Add the most recent probability to the sliding window
|
||||||
@ -357,6 +357,9 @@ bool MicroWakeWord::detect_wake_word_() {
|
|||||||
for (auto &prob : this->recent_streaming_probabilities_) {
|
for (auto &prob : this->recent_streaming_probabilities_) {
|
||||||
prob = 0;
|
prob = 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
ESP_LOGD(TAG, "Wake word sliding average probability is %.3f and most recent probability is %.3f",
|
||||||
|
sliding_window_average, streaming_prob);
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -371,23 +374,6 @@ void MicroWakeWord::set_sliding_window_average_size(size_t size) {
|
|||||||
bool MicroWakeWord::slice_available_() {
|
bool MicroWakeWord::slice_available_() {
|
||||||
size_t available = this->ring_buffer_->available();
|
size_t available = this->ring_buffer_->available();
|
||||||
|
|
||||||
size_t free = this->ring_buffer_->free();
|
|
||||||
|
|
||||||
if (free < NEW_SAMPLES_TO_GET * sizeof(int16_t)) {
|
|
||||||
// If the ring buffer is within one audio slice of being full, then wake word detection will have issues.
|
|
||||||
// If this is constantly occuring, then some possibilities why are
|
|
||||||
// 1) there are too many other slow components configured
|
|
||||||
// 2) the ESP32 isn't fast enough; e.g., an ESP32 is much slower than an ESP32-S3 at inferences.
|
|
||||||
// 3) the model is too large
|
|
||||||
// 4) the model uses operations that are not optimized
|
|
||||||
ESP_LOGW(TAG,
|
|
||||||
"Audio buffer is nearly full. Wake word detection may be less accurate and have slower reponse times. "
|
|
||||||
#if !defined(USE_ESP32_VARIANT_ESP32S3)
|
|
||||||
"microWakeWord is designed for the ESP32-S3. The current platform is too slow for this model."
|
|
||||||
#endif
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
return available > (NEW_SAMPLES_TO_GET * sizeof(int16_t));
|
return available > (NEW_SAMPLES_TO_GET * sizeof(int16_t));
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -396,13 +382,12 @@ bool MicroWakeWord::stride_audio_samples_(int16_t **audio_samples) {
|
|||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Copy 320 bytes (160 samples over 10 ms) into preprocessor_audio_buffer_ from history in
|
// Copy the last 320 bytes (160 samples over 10 ms) from the audio buffer to the start of the audio buffer
|
||||||
// preprocessor_stride_buffer_
|
memcpy((void *) (this->preprocessor_audio_buffer_), (void *) (this->preprocessor_audio_buffer_ + NEW_SAMPLES_TO_GET),
|
||||||
memcpy((void *) (this->preprocessor_audio_buffer_), (void *) (this->preprocessor_stride_buffer_),
|
|
||||||
HISTORY_SAMPLES_TO_KEEP * sizeof(int16_t));
|
HISTORY_SAMPLES_TO_KEEP * sizeof(int16_t));
|
||||||
|
|
||||||
// Copy 640 bytes (320 samples over 20 ms) from the ring buffer
|
// Copy 640 bytes (320 samples over 20 ms) from the ring buffer into the audio buffer offset 320 bytes (160 samples
|
||||||
// The first 320 bytes (160 samples over 10 ms) will be from history
|
// over 10 ms)
|
||||||
size_t bytes_read = this->ring_buffer_->read((void *) (this->preprocessor_audio_buffer_ + HISTORY_SAMPLES_TO_KEEP),
|
size_t bytes_read = this->ring_buffer_->read((void *) (this->preprocessor_audio_buffer_ + HISTORY_SAMPLES_TO_KEEP),
|
||||||
NEW_SAMPLES_TO_GET * sizeof(int16_t), pdMS_TO_TICKS(200));
|
NEW_SAMPLES_TO_GET * sizeof(int16_t), pdMS_TO_TICKS(200));
|
||||||
|
|
||||||
@ -415,11 +400,6 @@ bool MicroWakeWord::stride_audio_samples_(int16_t **audio_samples) {
|
|||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Copy the last 320 bytes (160 samples over 10 ms) from the audio buffer into history stride buffer for the next
|
|
||||||
// iteration
|
|
||||||
memcpy((void *) (this->preprocessor_stride_buffer_), (void *) (this->preprocessor_audio_buffer_ + NEW_SAMPLES_TO_GET),
|
|
||||||
HISTORY_SAMPLES_TO_KEEP * sizeof(int16_t));
|
|
||||||
|
|
||||||
*audio_samples = this->preprocessor_audio_buffer_;
|
*audio_samples = this->preprocessor_audio_buffer_;
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
@ -480,7 +460,7 @@ bool MicroWakeWord::register_preprocessor_ops_(tflite::MicroMutableOpResolver<18
|
|||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
bool MicroWakeWord::register_streaming_ops_(tflite::MicroMutableOpResolver<14> &op_resolver) {
|
bool MicroWakeWord::register_streaming_ops_(tflite::MicroMutableOpResolver<17> &op_resolver) {
|
||||||
if (op_resolver.AddCallOnce() != kTfLiteOk)
|
if (op_resolver.AddCallOnce() != kTfLiteOk)
|
||||||
return false;
|
return false;
|
||||||
if (op_resolver.AddVarHandle() != kTfLiteOk)
|
if (op_resolver.AddVarHandle() != kTfLiteOk)
|
||||||
@ -509,6 +489,12 @@ bool MicroWakeWord::register_streaming_ops_(tflite::MicroMutableOpResolver<14> &
|
|||||||
return false;
|
return false;
|
||||||
if (op_resolver.AddQuantize() != kTfLiteOk)
|
if (op_resolver.AddQuantize() != kTfLiteOk)
|
||||||
return false;
|
return false;
|
||||||
|
if (op_resolver.AddDepthwiseConv2D() != kTfLiteOk)
|
||||||
|
return false;
|
||||||
|
if (op_resolver.AddAveragePool2D() != kTfLiteOk)
|
||||||
|
return false;
|
||||||
|
if (op_resolver.AddMaxPool2D() != kTfLiteOk)
|
||||||
|
return false;
|
||||||
|
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
@ -128,7 +128,6 @@ class MicroWakeWord : public Component {
|
|||||||
|
|
||||||
// Stores audio fed into feature generator preprocessor
|
// Stores audio fed into feature generator preprocessor
|
||||||
int16_t *preprocessor_audio_buffer_;
|
int16_t *preprocessor_audio_buffer_;
|
||||||
int16_t *preprocessor_stride_buffer_;
|
|
||||||
|
|
||||||
bool detected_{false};
|
bool detected_{false};
|
||||||
|
|
||||||
@ -181,7 +180,7 @@ class MicroWakeWord : public Component {
|
|||||||
bool register_preprocessor_ops_(tflite::MicroMutableOpResolver<18> &op_resolver);
|
bool register_preprocessor_ops_(tflite::MicroMutableOpResolver<18> &op_resolver);
|
||||||
|
|
||||||
/// @brief Returns true if successfully registered the streaming model's TensorFlow operations
|
/// @brief Returns true if successfully registered the streaming model's TensorFlow operations
|
||||||
bool register_streaming_ops_(tflite::MicroMutableOpResolver<14> &op_resolver);
|
bool register_streaming_ops_(tflite::MicroMutableOpResolver<17> &op_resolver);
|
||||||
};
|
};
|
||||||
|
|
||||||
template<typename... Ts> class StartAction : public Action<Ts...>, public Parented<MicroWakeWord> {
|
template<typename... Ts> class StartAction : public Action<Ts...>, public Parented<MicroWakeWord> {
|
||||||
|
@ -6,6 +6,9 @@ from esphome.components.esp32 import add_idf_sdkconfig_option
|
|||||||
from esphome.const import (
|
from esphome.const import (
|
||||||
CONF_ENABLE_IPV6,
|
CONF_ENABLE_IPV6,
|
||||||
CONF_MIN_IPV6_ADDR_COUNT,
|
CONF_MIN_IPV6_ADDR_COUNT,
|
||||||
|
PLATFORM_ESP32,
|
||||||
|
PLATFORM_ESP8266,
|
||||||
|
PLATFORM_RP2040,
|
||||||
)
|
)
|
||||||
|
|
||||||
CODEOWNERS = ["@esphome/core"]
|
CODEOWNERS = ["@esphome/core"]
|
||||||
@ -16,25 +19,30 @@ IPAddress = network_ns.class_("IPAddress")
|
|||||||
|
|
||||||
CONFIG_SCHEMA = cv.Schema(
|
CONFIG_SCHEMA = cv.Schema(
|
||||||
{
|
{
|
||||||
cv.Optional(CONF_ENABLE_IPV6, default=False): cv.boolean,
|
cv.SplitDefault(CONF_ENABLE_IPV6): cv.All(
|
||||||
|
cv.boolean, cv.only_on([PLATFORM_ESP32, PLATFORM_ESP8266, PLATFORM_RP2040])
|
||||||
|
),
|
||||||
cv.Optional(CONF_MIN_IPV6_ADDR_COUNT, default=0): cv.positive_int,
|
cv.Optional(CONF_MIN_IPV6_ADDR_COUNT, default=0): cv.positive_int,
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
async def to_code(config):
|
async def to_code(config):
|
||||||
cg.add_define("USE_NETWORK_IPV6", config[CONF_ENABLE_IPV6])
|
if CONF_ENABLE_IPV6 in config:
|
||||||
cg.add_define("USE_NETWORK_MIN_IPV6_ADDR_COUNT", config[CONF_MIN_IPV6_ADDR_COUNT])
|
cg.add_define("USE_NETWORK_IPV6", config[CONF_ENABLE_IPV6])
|
||||||
if CORE.using_esp_idf:
|
cg.add_define(
|
||||||
add_idf_sdkconfig_option("CONFIG_LWIP_IPV6", config[CONF_ENABLE_IPV6])
|
"USE_NETWORK_MIN_IPV6_ADDR_COUNT", config[CONF_MIN_IPV6_ADDR_COUNT]
|
||||||
add_idf_sdkconfig_option(
|
|
||||||
"CONFIG_LWIP_IPV6_AUTOCONFIG", config[CONF_ENABLE_IPV6]
|
|
||||||
)
|
)
|
||||||
else:
|
if CORE.using_esp_idf:
|
||||||
if config[CONF_ENABLE_IPV6]:
|
add_idf_sdkconfig_option("CONFIG_LWIP_IPV6", config[CONF_ENABLE_IPV6])
|
||||||
cg.add_build_flag("-DCONFIG_LWIP_IPV6")
|
add_idf_sdkconfig_option(
|
||||||
cg.add_build_flag("-DCONFIG_LWIP_IPV6_AUTOCONFIG")
|
"CONFIG_LWIP_IPV6_AUTOCONFIG", config[CONF_ENABLE_IPV6]
|
||||||
if CORE.is_rp2040:
|
)
|
||||||
cg.add_build_flag("-DPIO_FRAMEWORK_ARDUINO_ENABLE_IPV6")
|
else:
|
||||||
if CORE.is_esp8266:
|
if config[CONF_ENABLE_IPV6]:
|
||||||
cg.add_build_flag("-DPIO_FRAMEWORK_ARDUINO_LWIP2_IPV6_LOW_MEMORY")
|
cg.add_build_flag("-DCONFIG_LWIP_IPV6")
|
||||||
|
cg.add_build_flag("-DCONFIG_LWIP_IPV6_AUTOCONFIG")
|
||||||
|
if CORE.is_rp2040:
|
||||||
|
cg.add_build_flag("-DPIO_FRAMEWORK_ARDUINO_ENABLE_IPV6")
|
||||||
|
if CORE.is_esp8266:
|
||||||
|
cg.add_build_flag("-DPIO_FRAMEWORK_ARDUINO_LWIP2_IPV6_LOW_MEMORY")
|
||||||
|
@ -14,13 +14,12 @@ from esphome.const import (
|
|||||||
CONF_PM_4_0,
|
CONF_PM_4_0,
|
||||||
CONF_STORE_BASELINE,
|
CONF_STORE_BASELINE,
|
||||||
CONF_TEMPERATURE,
|
CONF_TEMPERATURE,
|
||||||
|
DEVICE_CLASS_AQI,
|
||||||
DEVICE_CLASS_HUMIDITY,
|
DEVICE_CLASS_HUMIDITY,
|
||||||
DEVICE_CLASS_NITROUS_OXIDE,
|
|
||||||
DEVICE_CLASS_PM1,
|
DEVICE_CLASS_PM1,
|
||||||
DEVICE_CLASS_PM10,
|
DEVICE_CLASS_PM10,
|
||||||
DEVICE_CLASS_PM25,
|
DEVICE_CLASS_PM25,
|
||||||
DEVICE_CLASS_TEMPERATURE,
|
DEVICE_CLASS_TEMPERATURE,
|
||||||
DEVICE_CLASS_VOLATILE_ORGANIC_COMPOUNDS,
|
|
||||||
ICON_CHEMICAL_WEAPON,
|
ICON_CHEMICAL_WEAPON,
|
||||||
ICON_RADIATOR,
|
ICON_RADIATOR,
|
||||||
ICON_THERMOMETER,
|
ICON_THERMOMETER,
|
||||||
@ -132,13 +131,13 @@ CONFIG_SCHEMA = (
|
|||||||
cv.Optional(CONF_VOC): sensor.sensor_schema(
|
cv.Optional(CONF_VOC): sensor.sensor_schema(
|
||||||
icon=ICON_RADIATOR,
|
icon=ICON_RADIATOR,
|
||||||
accuracy_decimals=0,
|
accuracy_decimals=0,
|
||||||
device_class=DEVICE_CLASS_VOLATILE_ORGANIC_COMPOUNDS,
|
device_class=DEVICE_CLASS_AQI,
|
||||||
state_class=STATE_CLASS_MEASUREMENT,
|
state_class=STATE_CLASS_MEASUREMENT,
|
||||||
).extend(GAS_SENSOR),
|
).extend(GAS_SENSOR),
|
||||||
cv.Optional(CONF_NOX): sensor.sensor_schema(
|
cv.Optional(CONF_NOX): sensor.sensor_schema(
|
||||||
icon=ICON_RADIATOR,
|
icon=ICON_RADIATOR,
|
||||||
accuracy_decimals=0,
|
accuracy_decimals=0,
|
||||||
device_class=DEVICE_CLASS_NITROUS_OXIDE,
|
device_class=DEVICE_CLASS_AQI,
|
||||||
state_class=STATE_CLASS_MEASUREMENT,
|
state_class=STATE_CLASS_MEASUREMENT,
|
||||||
).extend(GAS_SENSOR),
|
).extend(GAS_SENSOR),
|
||||||
cv.Optional(CONF_STORE_BASELINE, default=True): cv.boolean,
|
cv.Optional(CONF_STORE_BASELINE, default=True): cv.boolean,
|
||||||
|
@ -54,9 +54,9 @@ void SenseAirComponent::update() {
|
|||||||
this->status_clear_warning();
|
this->status_clear_warning();
|
||||||
const uint8_t length = response[2];
|
const uint8_t length = response[2];
|
||||||
const uint16_t status = (uint16_t(response[3]) << 8) | response[4];
|
const uint16_t status = (uint16_t(response[3]) << 8) | response[4];
|
||||||
const uint16_t ppm = (uint16_t(response[length + 1]) << 8) | response[length + 2];
|
const int16_t ppm = int16_t((response[length + 1] << 8) | response[length + 2]);
|
||||||
|
|
||||||
ESP_LOGD(TAG, "SenseAir Received CO₂=%uppm Status=0x%02X", ppm, status);
|
ESP_LOGD(TAG, "SenseAir Received CO₂=%dppm Status=0x%02X", ppm, status);
|
||||||
if (this->co2_sensor_ != nullptr)
|
if (this->co2_sensor_ != nullptr)
|
||||||
this->co2_sensor_->publish_state(ppm);
|
this->co2_sensor_->publish_state(ppm);
|
||||||
}
|
}
|
||||||
|
@ -61,9 +61,11 @@ void UponorSmatrixComponent::loop() {
|
|||||||
|
|
||||||
// Send packets during bus silence
|
// Send packets during bus silence
|
||||||
if ((now - this->last_rx_ > 300) && (now - this->last_poll_start_ < 9500) && (now - this->last_tx_ > 200)) {
|
if ((now - this->last_rx_ > 300) && (now - this->last_poll_start_ < 9500) && (now - this->last_tx_ > 200)) {
|
||||||
|
#ifdef USE_TIME
|
||||||
// Only build time packet when bus is silent and queue is empty to make sure we can send it right away
|
// Only build time packet when bus is silent and queue is empty to make sure we can send it right away
|
||||||
if (this->send_time_requested_ && this->tx_queue_.empty() && this->do_send_time_())
|
if (this->send_time_requested_ && this->tx_queue_.empty() && this->do_send_time_())
|
||||||
this->send_time_requested_ = false;
|
this->send_time_requested_ = false;
|
||||||
|
#endif
|
||||||
// Send the next packet in the queue
|
// Send the next packet in the queue
|
||||||
if (!this->tx_queue_.empty()) {
|
if (!this->tx_queue_.empty()) {
|
||||||
auto packet = std::move(this->tx_queue_.front());
|
auto packet = std::move(this->tx_queue_.front());
|
||||||
|
@ -4,6 +4,8 @@
|
|||||||
#include "esphome/core/component.h"
|
#include "esphome/core/component.h"
|
||||||
#include "esphome/core/helpers.h"
|
#include "esphome/core/helpers.h"
|
||||||
|
|
||||||
|
#include "esphome/core/defines.h"
|
||||||
|
|
||||||
#ifdef USE_TIME
|
#ifdef USE_TIME
|
||||||
#include "esphome/components/time/real_time_clock.h"
|
#include "esphome/components/time/real_time_clock.h"
|
||||||
#include "esphome/core/time.h"
|
#include "esphome/core/time.h"
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
"""Constants used by esphome."""
|
"""Constants used by esphome."""
|
||||||
|
|
||||||
__version__ = "2024.3.0b2"
|
__version__ = "2024.3.0b3"
|
||||||
|
|
||||||
ALLOWED_NAME_CHARS = "abcdefghijklmnopqrstuvwxyz0123456789-_"
|
ALLOWED_NAME_CHARS = "abcdefghijklmnopqrstuvwxyz0123456789-_"
|
||||||
VALID_SUBSTITUTIONS_CHARACTERS = (
|
VALID_SUBSTITUTIONS_CHARACTERS = (
|
||||||
|
@ -5,6 +5,7 @@
|
|||||||
#include "esphome/core/helpers.h"
|
#include "esphome/core/helpers.h"
|
||||||
#include "esphome/core/log.h"
|
#include "esphome/core/log.h"
|
||||||
#include <utility>
|
#include <utility>
|
||||||
|
#include <cinttypes>
|
||||||
|
|
||||||
namespace esphome {
|
namespace esphome {
|
||||||
|
|
||||||
@ -211,8 +212,8 @@ WarnIfComponentBlockingGuard::~WarnIfComponentBlockingGuard() {
|
|||||||
uint32_t now = millis();
|
uint32_t now = millis();
|
||||||
if (now - started_ > 50) {
|
if (now - started_ > 50) {
|
||||||
const char *src = component_ == nullptr ? "<null>" : component_->get_component_source();
|
const char *src = component_ == nullptr ? "<null>" : component_->get_component_source();
|
||||||
ESP_LOGW(TAG, "Component %s took a long time for an operation (%.2f s).", src, (now - started_) / 1e3f);
|
ESP_LOGW(TAG, "Component %s took a long time for an operation (%" PRIu32 " ms).", src, (now - started_));
|
||||||
ESP_LOGW(TAG, "Components should block for at most 20-30ms.");
|
ESP_LOGW(TAG, "Components should block for at most 30 ms.");
|
||||||
;
|
;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -516,7 +516,8 @@ class ImportRequestHandler(BaseHandler):
|
|||||||
self.set_status(500)
|
self.set_status(500)
|
||||||
self.write("File already exists")
|
self.write("File already exists")
|
||||||
return
|
return
|
||||||
except ValueError:
|
except ValueError as e:
|
||||||
|
_LOGGER.error(e)
|
||||||
self.set_status(422)
|
self.set_status(422)
|
||||||
self.write("Invalid package url")
|
self.write("Invalid package url")
|
||||||
return
|
return
|
||||||
|
@ -16,6 +16,7 @@ esphome-dashboard==20231107.0
|
|||||||
aioesphomeapi==23.1.1
|
aioesphomeapi==23.1.1
|
||||||
zeroconf==0.131.0
|
zeroconf==0.131.0
|
||||||
python-magic==0.4.27
|
python-magic==0.4.27
|
||||||
|
ruamel.yaml==0.18.6 # dashboard_import
|
||||||
|
|
||||||
# esp-idf requires this, but doesn't bundle it by default
|
# esp-idf requires this, but doesn't bundle it by default
|
||||||
# https://github.com/espressif/esp-idf/blob/220590d599e134d7a5e7f1e683cc4550349ffbf8/requirements.txt#L24
|
# https://github.com/espressif/esp-idf/blob/220590d599e134d7a5e7f1e683cc4550349ffbf8/requirements.txt#L24
|
||||||
|
@ -70,11 +70,11 @@ def splitlines_no_ends(string):
|
|||||||
return [s.strip() for s in string.splitlines()]
|
return [s.strip() for s in string.splitlines()]
|
||||||
|
|
||||||
|
|
||||||
def changed_files():
|
def changed_files(branch="dev"):
|
||||||
check_remotes = ["upstream", "origin"]
|
check_remotes = ["upstream", "origin"]
|
||||||
check_remotes.extend(splitlines_no_ends(get_output("git", "remote")))
|
check_remotes.extend(splitlines_no_ends(get_output("git", "remote")))
|
||||||
for remote in check_remotes:
|
for remote in check_remotes:
|
||||||
command = ["git", "merge-base", f"refs/remotes/{remote}/dev", "HEAD"]
|
command = ["git", "merge-base", f"refs/remotes/{remote}/{branch}", "HEAD"]
|
||||||
try:
|
try:
|
||||||
merge_base = splitlines_no_ends(get_output(*command))[0]
|
merge_base = splitlines_no_ends(get_output(*command))[0]
|
||||||
break
|
break
|
||||||
|
@ -120,13 +120,22 @@ def main():
|
|||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
"-c", "--changed", action="store_true", help="Only run on changed files"
|
"-c", "--changed", action="store_true", help="Only run on changed files"
|
||||||
)
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"-b", "--branch", help="Branch to compare changed files against"
|
||||||
|
)
|
||||||
args = parser.parse_args()
|
args = parser.parse_args()
|
||||||
|
|
||||||
|
if args.branch and not args.changed:
|
||||||
|
parser.error("--branch requires --changed")
|
||||||
|
|
||||||
files = git_ls_files()
|
files = git_ls_files()
|
||||||
files = filter(filter_component_files, files)
|
files = filter(filter_component_files, files)
|
||||||
|
|
||||||
if args.changed:
|
if args.changed:
|
||||||
changed = changed_files()
|
if args.branch:
|
||||||
|
changed = changed_files(args.branch)
|
||||||
|
else:
|
||||||
|
changed = changed_files()
|
||||||
files = [f for f in files if f in changed]
|
files = [f for f in files if f in changed]
|
||||||
|
|
||||||
components = extract_component_names_array_from_files_array(files)
|
components = extract_component_names_array_from_files_array(files)
|
||||||
|
Loading…
x
Reference in New Issue
Block a user