mirror of
https://github.com/esphome/esphome.git
synced 2025-11-02 08:01:50 +00:00
Compare commits
56 Commits
2024.6.3
...
jesserockz
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
dfb98b523f | ||
|
|
17204baac0 | ||
|
|
1e05bcaa61 | ||
|
|
18690d51f5 | ||
|
|
2aacf14e96 | ||
|
|
9c5507ab46 | ||
|
|
0a9703bff9 | ||
|
|
67bd5db6d6 | ||
|
|
6c11f0bd51 | ||
|
|
e7556271e7 | ||
|
|
8045b889d3 | ||
|
|
6f074d3692 | ||
|
|
b09781afa5 | ||
|
|
96f1a146a6 | ||
|
|
775e03cfd9 | ||
|
|
80e5e19956 | ||
|
|
8f16268572 | ||
|
|
ba11f2ab0c | ||
|
|
8567877f07 | ||
|
|
310f850ee4 | ||
|
|
896cdab22d | ||
|
|
ed6462fa00 | ||
|
|
65b05af014 | ||
|
|
c18056bdda | ||
|
|
65a79acfb9 | ||
|
|
18d331d284 | ||
|
|
7d642147c1 | ||
|
|
4c313bc198 | ||
|
|
a78b2d0128 | ||
|
|
f6848fe24d | ||
|
|
a59c9b4f77 | ||
|
|
c30913ccde | ||
|
|
41f810f828 | ||
|
|
d604c8ae64 | ||
|
|
67d8c7c691 | ||
|
|
015cd42a2e | ||
|
|
51c5d1714c | ||
|
|
1ff302b341 | ||
|
|
6b89763ad6 | ||
|
|
253303f3a9 | ||
|
|
d49f2cbec8 | ||
|
|
290816be11 | ||
|
|
2fc43fa9c7 | ||
|
|
5adadeaa07 | ||
|
|
761aae6f89 | ||
|
|
b29e1acab8 | ||
|
|
49d4260cfe | ||
|
|
c4c46c206f | ||
|
|
8453d9a70d | ||
|
|
68dbf35b09 | ||
|
|
1a242f94db | ||
|
|
df52bc3493 | ||
|
|
2044c7e4d4 | ||
|
|
b401b5eca8 | ||
|
|
67f41a0c72 | ||
|
|
bd7e8fbf86 |
4
.github/actions/build-image/action.yaml
vendored
4
.github/actions/build-image/action.yaml
vendored
@@ -46,7 +46,7 @@ runs:
|
||||
|
||||
- name: Build and push to ghcr by digest
|
||||
id: build-ghcr
|
||||
uses: docker/build-push-action@v6.0.1
|
||||
uses: docker/build-push-action@v6.1.0
|
||||
with:
|
||||
context: .
|
||||
file: ./docker/Dockerfile
|
||||
@@ -69,7 +69,7 @@ runs:
|
||||
|
||||
- name: Build and push to dockerhub by digest
|
||||
id: build-dockerhub
|
||||
uses: docker/build-push-action@v6.0.1
|
||||
uses: docker/build-push-action@v6.1.0
|
||||
with:
|
||||
context: .
|
||||
file: ./docker/Dockerfile
|
||||
|
||||
2
.github/workflows/ci-api-proto.yml
vendored
2
.github/workflows/ci-api-proto.yml
vendored
@@ -21,7 +21,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4.1.6
|
||||
uses: actions/checkout@v4.1.7
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v5.1.0
|
||||
with:
|
||||
|
||||
2
.github/workflows/ci-docker.yml
vendored
2
.github/workflows/ci-docker.yml
vendored
@@ -40,7 +40,7 @@ jobs:
|
||||
arch: [amd64, armv7, aarch64]
|
||||
build_type: ["ha-addon", "docker", "lint"]
|
||||
steps:
|
||||
- uses: actions/checkout@v4.1.6
|
||||
- uses: actions/checkout@v4.1.7
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v5.1.0
|
||||
with:
|
||||
|
||||
39
.github/workflows/ci.yml
vendored
39
.github/workflows/ci.yml
vendored
@@ -34,7 +34,7 @@ jobs:
|
||||
cache-key: ${{ steps.cache-key.outputs.key }}
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v4.1.6
|
||||
uses: actions/checkout@v4.1.7
|
||||
- name: Generate cache-key
|
||||
id: cache-key
|
||||
run: echo key="${{ hashFiles('requirements.txt', 'requirements_optional.txt', 'requirements_test.txt') }}" >> $GITHUB_OUTPUT
|
||||
@@ -66,7 +66,7 @@ jobs:
|
||||
- common
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v4.1.6
|
||||
uses: actions/checkout@v4.1.7
|
||||
- name: Restore Python
|
||||
uses: ./.github/actions/restore-python
|
||||
with:
|
||||
@@ -87,7 +87,7 @@ jobs:
|
||||
- common
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v4.1.6
|
||||
uses: actions/checkout@v4.1.7
|
||||
- name: Restore Python
|
||||
uses: ./.github/actions/restore-python
|
||||
with:
|
||||
@@ -108,7 +108,7 @@ jobs:
|
||||
- common
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v4.1.6
|
||||
uses: actions/checkout@v4.1.7
|
||||
- name: Restore Python
|
||||
uses: ./.github/actions/restore-python
|
||||
with:
|
||||
@@ -129,7 +129,7 @@ jobs:
|
||||
- common
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v4.1.6
|
||||
uses: actions/checkout@v4.1.7
|
||||
- name: Restore Python
|
||||
uses: ./.github/actions/restore-python
|
||||
with:
|
||||
@@ -150,7 +150,7 @@ jobs:
|
||||
- common
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v4.1.6
|
||||
uses: actions/checkout@v4.1.7
|
||||
- name: Restore Python
|
||||
uses: ./.github/actions/restore-python
|
||||
with:
|
||||
@@ -199,7 +199,7 @@ jobs:
|
||||
- common
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v4.1.6
|
||||
uses: actions/checkout@v4.1.7
|
||||
- name: Restore Python
|
||||
uses: ./.github/actions/restore-python
|
||||
with:
|
||||
@@ -229,7 +229,7 @@ jobs:
|
||||
- common
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v4.1.6
|
||||
uses: actions/checkout@v4.1.7
|
||||
- name: Restore Python
|
||||
uses: ./.github/actions/restore-python
|
||||
with:
|
||||
@@ -254,7 +254,7 @@ jobs:
|
||||
matrix: ${{ steps.set-matrix.outputs.matrix }}
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v4.1.6
|
||||
uses: actions/checkout@v4.1.7
|
||||
- name: Find all YAML test files
|
||||
id: set-matrix
|
||||
run: echo "matrix=$(ls tests/test*.yaml | jq -R -s -c 'split("\n")[:-1]')" >> $GITHUB_OUTPUT
|
||||
@@ -271,7 +271,7 @@ jobs:
|
||||
file: ${{ fromJson(needs.compile-tests-list.outputs.matrix) }}
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v4.1.6
|
||||
uses: actions/checkout@v4.1.7
|
||||
- name: Restore Python
|
||||
uses: ./.github/actions/restore-python
|
||||
with:
|
||||
@@ -303,7 +303,7 @@ jobs:
|
||||
file: ${{ fromJson(needs.compile-tests-list.outputs.matrix) }}
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v4.1.6
|
||||
uses: actions/checkout@v4.1.7
|
||||
- name: Restore Python
|
||||
uses: ./.github/actions/restore-python
|
||||
with:
|
||||
@@ -358,7 +358,7 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v4.1.6
|
||||
uses: actions/checkout@v4.1.7
|
||||
- name: Restore Python
|
||||
uses: ./.github/actions/restore-python
|
||||
with:
|
||||
@@ -387,6 +387,13 @@ jobs:
|
||||
echo "::add-matcher::.github/workflows/matchers/gcc.json"
|
||||
echo "::add-matcher::.github/workflows/matchers/clang-tidy.json"
|
||||
|
||||
- name: Run 'pio run --list-targets -e esp32-idf-tidy'
|
||||
if: matrix.name == 'Run script/clang-tidy for ESP32 IDF'
|
||||
run: |
|
||||
. venv/bin/activate
|
||||
mkdir -p .temp
|
||||
pio run --list-targets -e esp32-idf-tidy
|
||||
|
||||
- name: Run clang-tidy
|
||||
run: |
|
||||
. venv/bin/activate
|
||||
@@ -410,7 +417,7 @@ jobs:
|
||||
count: ${{ steps.list-components.outputs.count }}
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v4.1.6
|
||||
uses: actions/checkout@v4.1.7
|
||||
with:
|
||||
# Fetch enough history so `git merge-base refs/remotes/origin/dev HEAD` works.
|
||||
fetch-depth: 500
|
||||
@@ -458,7 +465,7 @@ jobs:
|
||||
run: sudo apt-get install libsodium-dev libsdl2-dev
|
||||
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v4.1.6
|
||||
uses: actions/checkout@v4.1.7
|
||||
- name: Restore Python
|
||||
uses: ./.github/actions/restore-python
|
||||
with:
|
||||
@@ -484,7 +491,7 @@ jobs:
|
||||
matrix: ${{ steps.split.outputs.components }}
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v4.1.6
|
||||
uses: actions/checkout@v4.1.7
|
||||
- name: Split components into 20 groups
|
||||
id: split
|
||||
run: |
|
||||
@@ -512,7 +519,7 @@ jobs:
|
||||
run: sudo apt-get install libsodium-dev libsdl2-dev
|
||||
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v4.1.6
|
||||
uses: actions/checkout@v4.1.7
|
||||
- name: Restore Python
|
||||
uses: ./.github/actions/restore-python
|
||||
with:
|
||||
|
||||
8
.github/workflows/release.yml
vendored
8
.github/workflows/release.yml
vendored
@@ -19,7 +19,7 @@ jobs:
|
||||
tag: ${{ steps.tag.outputs.tag }}
|
||||
branch_build: ${{ steps.tag.outputs.branch_build }}
|
||||
steps:
|
||||
- uses: actions/checkout@v4.1.6
|
||||
- uses: actions/checkout@v4.1.7
|
||||
- name: Get tag
|
||||
id: tag
|
||||
# yamllint disable rule:line-length
|
||||
@@ -51,7 +51,7 @@ jobs:
|
||||
contents: read
|
||||
id-token: write
|
||||
steps:
|
||||
- uses: actions/checkout@v4.1.6
|
||||
- uses: actions/checkout@v4.1.7
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v5.1.0
|
||||
with:
|
||||
@@ -83,7 +83,7 @@ jobs:
|
||||
- linux/arm/v7
|
||||
- linux/arm64
|
||||
steps:
|
||||
- uses: actions/checkout@v4.1.6
|
||||
- uses: actions/checkout@v4.1.7
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v5.1.0
|
||||
with:
|
||||
@@ -174,7 +174,7 @@ jobs:
|
||||
- ghcr
|
||||
- dockerhub
|
||||
steps:
|
||||
- uses: actions/checkout@v4.1.6
|
||||
- uses: actions/checkout@v4.1.7
|
||||
|
||||
- name: Download digests
|
||||
uses: actions/download-artifact@v4.1.7
|
||||
|
||||
6
.github/workflows/sync-device-classes.yml
vendored
6
.github/workflows/sync-device-classes.yml
vendored
@@ -13,10 +13,10 @@ jobs:
|
||||
if: github.repository == 'esphome/esphome'
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4.1.6
|
||||
uses: actions/checkout@v4.1.7
|
||||
|
||||
- name: Checkout Home Assistant
|
||||
uses: actions/checkout@v4.1.6
|
||||
uses: actions/checkout@v4.1.7
|
||||
with:
|
||||
repository: home-assistant/core
|
||||
path: lib/home-assistant
|
||||
@@ -36,7 +36,7 @@ jobs:
|
||||
python ./script/sync-device_class.py
|
||||
|
||||
- name: Commit changes
|
||||
uses: peter-evans/create-pull-request@v6.0.5
|
||||
uses: peter-evans/create-pull-request@v6.1.0
|
||||
with:
|
||||
commit-message: "Synchronise Device Classes from Home Assistant"
|
||||
committer: esphomebot <esphome@nabucasa.com>
|
||||
|
||||
2
.github/workflows/yaml-lint.yml
vendored
2
.github/workflows/yaml-lint.yml
vendored
@@ -18,7 +18,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v4.1.6
|
||||
uses: actions/checkout@v4.1.7
|
||||
- name: Run yamllint
|
||||
uses: frenck/action-yamllint@v1.5.0
|
||||
with:
|
||||
|
||||
@@ -60,7 +60,6 @@ from esphome.cpp_types import ( # noqa
|
||||
std_ns,
|
||||
std_shared_ptr,
|
||||
std_string,
|
||||
std_string_ref,
|
||||
std_vector,
|
||||
uint8,
|
||||
uint16,
|
||||
|
||||
@@ -129,13 +129,13 @@ class Cover : public EntityBase, public EntityBase_DeviceClass {
|
||||
*
|
||||
* This is a legacy method and may be removed later, please use `.make_call()` instead.
|
||||
*/
|
||||
ESPDEPRECATED("open() is deprecated, use make_call().set_command_open() instead.", "2021.9")
|
||||
ESPDEPRECATED("open() is deprecated, use make_call().set_command_open().perform() instead.", "2021.9")
|
||||
void open();
|
||||
/** Close the cover.
|
||||
*
|
||||
* This is a legacy method and may be removed later, please use `.make_call()` instead.
|
||||
*/
|
||||
ESPDEPRECATED("close() is deprecated, use make_call().set_command_close() instead.", "2021.9")
|
||||
ESPDEPRECATED("close() is deprecated, use make_call().set_command_close().perform() instead.", "2021.9")
|
||||
void close();
|
||||
/** Stop the cover.
|
||||
*
|
||||
|
||||
@@ -145,21 +145,24 @@ bool DallasTemperatureSensor::check_scratch_pad_() {
|
||||
float DallasTemperatureSensor::get_temp_c_() {
|
||||
int16_t temp = (this->scratch_pad_[1] << 8) | this->scratch_pad_[0];
|
||||
if ((this->address_ & 0xff) == DALLAS_MODEL_DS18S20) {
|
||||
return (temp >> 1) + (this->scratch_pad_[7] - this->scratch_pad_[6]) / float(this->scratch_pad_[7]) - 0.25;
|
||||
}
|
||||
switch (this->resolution_) {
|
||||
case 9:
|
||||
temp &= 0xfff8;
|
||||
break;
|
||||
case 10:
|
||||
temp &= 0xfffc;
|
||||
break;
|
||||
case 11:
|
||||
temp &= 0xfffe;
|
||||
break;
|
||||
case 12:
|
||||
default:
|
||||
break;
|
||||
if (this->scratch_pad_[7] != 0x10)
|
||||
ESP_LOGE(TAG, "unexpected COUNT_PER_C value: %u", this->scratch_pad_[7]);
|
||||
temp = ((temp & 0xfff7) << 3) + (0x10 - this->scratch_pad_[6]) - 4;
|
||||
} else {
|
||||
switch (this->resolution_) {
|
||||
case 9:
|
||||
temp &= 0xfff8;
|
||||
break;
|
||||
case 10:
|
||||
temp &= 0xfffc;
|
||||
break;
|
||||
case 11:
|
||||
temp &= 0xfffe;
|
||||
break;
|
||||
case 12:
|
||||
default:
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
return temp / 16.0f;
|
||||
|
||||
@@ -37,18 +37,14 @@ void DS1307Component::read_time() {
|
||||
ESP_LOGW(TAG, "RTC halted, not syncing to system clock.");
|
||||
return;
|
||||
}
|
||||
ESPTime rtc_time{
|
||||
.second = uint8_t(ds1307_.reg.second + 10 * ds1307_.reg.second_10),
|
||||
.minute = uint8_t(ds1307_.reg.minute + 10u * ds1307_.reg.minute_10),
|
||||
.hour = uint8_t(ds1307_.reg.hour + 10u * ds1307_.reg.hour_10),
|
||||
.day_of_week = uint8_t(ds1307_.reg.weekday),
|
||||
.day_of_month = uint8_t(ds1307_.reg.day + 10u * ds1307_.reg.day_10),
|
||||
.day_of_year = 1, // ignored by recalc_timestamp_utc(false)
|
||||
.month = uint8_t(ds1307_.reg.month + 10u * ds1307_.reg.month_10),
|
||||
.year = uint16_t(ds1307_.reg.year + 10u * ds1307_.reg.year_10 + 2000),
|
||||
.is_dst = false, // not used
|
||||
.timestamp = 0 // overwritten by recalc_timestamp_utc(false)
|
||||
};
|
||||
ESPTime rtc_time{.second = uint8_t(ds1307_.reg.second + 10 * ds1307_.reg.second_10),
|
||||
.minute = uint8_t(ds1307_.reg.minute + 10u * ds1307_.reg.minute_10),
|
||||
.hour = uint8_t(ds1307_.reg.hour + 10u * ds1307_.reg.hour_10),
|
||||
.day_of_week = uint8_t(ds1307_.reg.weekday),
|
||||
.day_of_month = uint8_t(ds1307_.reg.day + 10u * ds1307_.reg.day_10),
|
||||
.day_of_year = 1, // ignored by recalc_timestamp_utc(false)
|
||||
.month = uint8_t(ds1307_.reg.month + 10u * ds1307_.reg.month_10),
|
||||
.year = uint16_t(ds1307_.reg.year + 10u * ds1307_.reg.year_10 + 2000)};
|
||||
rtc_time.recalc_timestamp_utc(false);
|
||||
if (!rtc_time.is_valid()) {
|
||||
ESP_LOGE(TAG, "Invalid RTC time, not syncing to system clock.");
|
||||
|
||||
@@ -17,7 +17,7 @@ from esphome.const import (
|
||||
CONF_VSYNC_PIN,
|
||||
)
|
||||
from esphome.core import CORE
|
||||
from esphome.components.esp32 import add_idf_sdkconfig_option
|
||||
from esphome.components.esp32 import add_idf_component
|
||||
from esphome.cpp_helpers import setup_entity
|
||||
|
||||
DEPENDENCIES = ["esp32"]
|
||||
@@ -290,8 +290,11 @@ async def to_code(config):
|
||||
cg.add_define("USE_ESP32_CAMERA")
|
||||
|
||||
if CORE.using_esp_idf:
|
||||
cg.add_library("espressif/esp32-camera", "1.0.0")
|
||||
add_idf_sdkconfig_option("CONFIG_RTCIO_SUPPORT_RTC_GPIO_DESC", True)
|
||||
add_idf_component(
|
||||
name="esp32-camera",
|
||||
repo="https://github.com/espressif/esp32-camera.git",
|
||||
ref="v2.0.9",
|
||||
)
|
||||
|
||||
for conf in config.get(CONF_ON_STREAM_START, []):
|
||||
trigger = cg.new_Pvariable(conf[CONF_TRIGGER_ID], var)
|
||||
|
||||
@@ -1,14 +1,10 @@
|
||||
import esphome.codegen as cg
|
||||
import esphome.config_validation as cv
|
||||
import esphome.final_validate as fv
|
||||
from esphome.components.ota import BASE_OTA_SCHEMA, ota_to_code, OTAComponent
|
||||
from esphome.const import (
|
||||
CONF_ESPHOME,
|
||||
CONF_ID,
|
||||
CONF_NUM_ATTEMPTS,
|
||||
CONF_OTA,
|
||||
CONF_PASSWORD,
|
||||
CONF_PLATFORM,
|
||||
CONF_PORT,
|
||||
CONF_REBOOT_TIMEOUT,
|
||||
CONF_SAFE_MODE,
|
||||
@@ -25,19 +21,6 @@ esphome = cg.esphome_ns.namespace("esphome")
|
||||
ESPHomeOTAComponent = esphome.class_("ESPHomeOTAComponent", OTAComponent)
|
||||
|
||||
|
||||
def ota_esphome_final_validate(config):
|
||||
fconf = fv.full_config.get()[CONF_OTA]
|
||||
used_ports = []
|
||||
for ota_conf in fconf:
|
||||
if ota_conf.get(CONF_PLATFORM) == CONF_ESPHOME:
|
||||
if (plat_port := ota_conf.get(CONF_PORT)) not in used_ports:
|
||||
used_ports.append(plat_port)
|
||||
else:
|
||||
raise cv.Invalid(
|
||||
f"Only one instance of the {CONF_ESPHOME} {CONF_OTA} {CONF_PLATFORM} is allowed per port. Note that this error may result from OTA specified in packages"
|
||||
)
|
||||
|
||||
|
||||
CONFIG_SCHEMA = (
|
||||
cv.Schema(
|
||||
{
|
||||
@@ -67,8 +50,6 @@ CONFIG_SCHEMA = (
|
||||
.extend(cv.COMPONENT_SCHEMA)
|
||||
)
|
||||
|
||||
FINAL_VALIDATE_SCHEMA = ota_esphome_final_validate
|
||||
|
||||
|
||||
@coroutine_with_priority(52.0)
|
||||
async def to_code(config):
|
||||
|
||||
@@ -631,7 +631,7 @@ void EthernetComponent::write_phy_register_(esp_eth_mac_t *mac, PHYRegister regi
|
||||
ESPHL_ERROR_CHECK(err, "Writing PHY Register failed");
|
||||
|
||||
if (this->type_ == ETHERNET_TYPE_RTL8201 && register_data.page) {
|
||||
ESP_LOGD(TAG, "Select PHY Register Page 0x%02" PRIX32, 0x0);
|
||||
ESP_LOGD(TAG, "Select PHY Register Page 0x00");
|
||||
err = mac->write_phy_reg(mac, this->phy_addr_, eth_phy_psr_reg_addr, 0x0);
|
||||
ESPHL_ERROR_CHECK(err, "Select PHY Register Page 0 failed");
|
||||
}
|
||||
|
||||
@@ -94,15 +94,13 @@ bool HOT IRAM_ATTR GPIOOneWireBus::read_bit_() {
|
||||
|
||||
// measure from start value directly, to get best accurate timing no matter
|
||||
// how long pin_mode/delayMicroseconds took
|
||||
uint32_t now = micros();
|
||||
if (now - start < 12)
|
||||
delayMicroseconds(12 - (now - start));
|
||||
delayMicroseconds(12 - (micros() - start));
|
||||
|
||||
// sample bus to read bit from peer
|
||||
bool r = pin_.digital_read();
|
||||
|
||||
// read slot is at least 60µs; get as close to 60µs to spend less time with interrupts locked
|
||||
now = micros();
|
||||
uint32_t now = micros();
|
||||
if (now - start < 60)
|
||||
delayMicroseconds(60 - (now - start));
|
||||
|
||||
|
||||
@@ -257,7 +257,7 @@ async def http_request_action_to_code(config, action_id, template_arg, args):
|
||||
trigger,
|
||||
[
|
||||
(cg.std_shared_ptr.template(HttpContainer), "response"),
|
||||
(cg.std_string_ref, "body"),
|
||||
(cg.std_string, "body"),
|
||||
],
|
||||
conf,
|
||||
)
|
||||
|
||||
@@ -43,10 +43,10 @@ class HttpContainer : public Parented<HttpRequestComponent> {
|
||||
bool secure_{false};
|
||||
};
|
||||
|
||||
class HttpRequestResponseTrigger : public Trigger<std::shared_ptr<HttpContainer>, std::string &> {
|
||||
class HttpRequestResponseTrigger : public Trigger<std::shared_ptr<HttpContainer>, std::string> {
|
||||
public:
|
||||
void process(std::shared_ptr<HttpContainer> container, std::string &response_body) {
|
||||
this->trigger(std::move(container), response_body);
|
||||
void process(std::shared_ptr<HttpContainer> container, std::string response_body) {
|
||||
this->trigger(std::move(container), std::move(response_body));
|
||||
}
|
||||
};
|
||||
|
||||
@@ -149,21 +149,11 @@ template<typename... Ts> class HttpRequestSendAction : public Action<Ts...> {
|
||||
}
|
||||
response_body.reserve(read_index);
|
||||
response_body.assign((char *) buf, read_index);
|
||||
allocator.deallocate(buf, max_length);
|
||||
}
|
||||
}
|
||||
|
||||
if (this->response_triggers_.size() == 1) {
|
||||
// if there is only one trigger, no need to copy the response body
|
||||
this->response_triggers_[0]->process(container, response_body);
|
||||
} else {
|
||||
for (auto *trigger : this->response_triggers_) {
|
||||
// with multiple triggers, pass a copy of the response body to each
|
||||
// one so that modifications made in one trigger are not visible to
|
||||
// the others
|
||||
auto response_body_copy = std::string(response_body);
|
||||
trigger->process(container, response_body_copy);
|
||||
}
|
||||
for (auto *trigger : this->response_triggers_) {
|
||||
trigger->process(container, response_body);
|
||||
}
|
||||
container->end();
|
||||
}
|
||||
|
||||
@@ -312,6 +312,7 @@ async def to_code(config):
|
||||
esp32.add_idf_component(
|
||||
name="esp-tflite-micro",
|
||||
repo="https://github.com/espressif/esp-tflite-micro",
|
||||
ref="v1.3.1",
|
||||
)
|
||||
|
||||
cg.add_build_flag("-DTF_LITE_STATIC_MEMORY")
|
||||
|
||||
@@ -293,4 +293,4 @@ async def to_code(config):
|
||||
if CONF_HUMIDITY_SETPOINT in config:
|
||||
sens = await sensor.new_sensor(config[CONF_HUMIDITY_SETPOINT])
|
||||
cg.add(var.set_humidity_setpoint_sensor(sens))
|
||||
cg.add_library("dudanov/MideaUART", "1.1.9")
|
||||
cg.add_library("dudanov/MideaUART", "1.1.8")
|
||||
|
||||
@@ -116,8 +116,7 @@ void ModbusController::on_modbus_read_registers(uint8_t function_code, uint16_t
|
||||
ESP_LOGD(TAG, "Matched register. Address: 0x%02X. Value type: %zu. Register count: %u. Value: %0.1f.",
|
||||
server_register->address, static_cast<uint8_t>(server_register->value_type),
|
||||
server_register->register_count, value);
|
||||
std::vector<uint16_t> payload = float_to_payload(value, server_register->value_type);
|
||||
sixteen_bit_response.insert(sixteen_bit_response.end(), payload.cbegin(), payload.cend());
|
||||
number_to_payload(sixteen_bit_response, value, server_register->value_type);
|
||||
current_address += server_register->register_count;
|
||||
found = true;
|
||||
break;
|
||||
|
||||
@@ -15,7 +15,7 @@ void ModbusTextSensor::parse_and_publish(const std::vector<uint8_t> &data) {
|
||||
std::ostringstream output;
|
||||
uint8_t items_left = this->response_bytes;
|
||||
uint8_t index = this->offset;
|
||||
char buffer[5];
|
||||
char buffer[4];
|
||||
while ((items_left > 0) && index < data.size()) {
|
||||
uint8_t b = data[index];
|
||||
switch (this->encode_) {
|
||||
|
||||
@@ -26,6 +26,7 @@ from esphome.const import (
|
||||
DEVICE_CLASS_BATTERY,
|
||||
DEVICE_CLASS_CARBON_DIOXIDE,
|
||||
DEVICE_CLASS_CARBON_MONOXIDE,
|
||||
DEVICE_CLASS_CONDUCTIVITY,
|
||||
DEVICE_CLASS_CURRENT,
|
||||
DEVICE_CLASS_DATA_RATE,
|
||||
DEVICE_CLASS_DATA_SIZE,
|
||||
@@ -82,6 +83,7 @@ DEVICE_CLASSES = [
|
||||
DEVICE_CLASS_BATTERY,
|
||||
DEVICE_CLASS_CARBON_DIOXIDE,
|
||||
DEVICE_CLASS_CARBON_MONOXIDE,
|
||||
DEVICE_CLASS_CONDUCTIVITY,
|
||||
DEVICE_CLASS_CURRENT,
|
||||
DEVICE_CLASS_DATA_RATE,
|
||||
DEVICE_CLASS_DATA_SIZE,
|
||||
|
||||
@@ -42,6 +42,14 @@ COLOR_ORDERS = {
|
||||
}
|
||||
DATA_PIN_SCHEMA = pins.internal_gpio_output_pin_schema
|
||||
|
||||
|
||||
def validate_dimension(value):
|
||||
value = cv.positive_int(value)
|
||||
if value % 2 != 0:
|
||||
raise cv.Invalid("Width/height/offset must be divisible by 2")
|
||||
return value
|
||||
|
||||
|
||||
CONFIG_SCHEMA = cv.All(
|
||||
display.FULL_DISPLAY_SCHEMA.extend(
|
||||
cv.Schema(
|
||||
@@ -52,10 +60,14 @@ CONFIG_SCHEMA = cv.All(
|
||||
cv.dimensions,
|
||||
cv.Schema(
|
||||
{
|
||||
cv.Required(CONF_WIDTH): cv.int_,
|
||||
cv.Required(CONF_HEIGHT): cv.int_,
|
||||
cv.Optional(CONF_OFFSET_HEIGHT, default=0): cv.int_,
|
||||
cv.Optional(CONF_OFFSET_WIDTH, default=0): cv.int_,
|
||||
cv.Required(CONF_WIDTH): validate_dimension,
|
||||
cv.Required(CONF_HEIGHT): validate_dimension,
|
||||
cv.Optional(
|
||||
CONF_OFFSET_HEIGHT, default=0
|
||||
): validate_dimension,
|
||||
cv.Optional(
|
||||
CONF_OFFSET_WIDTH, default=0
|
||||
): validate_dimension,
|
||||
}
|
||||
),
|
||||
),
|
||||
|
||||
@@ -25,7 +25,23 @@ void QspiAmoLed::setup() {
|
||||
}
|
||||
|
||||
void QspiAmoLed::update() {
|
||||
if (!this->setup_complete_) {
|
||||
return;
|
||||
}
|
||||
this->do_update_();
|
||||
// Start addresses and widths/heights must be divisible by 2 (CASET/RASET restriction in datasheet)
|
||||
if (this->x_low_ % 2 == 1) {
|
||||
this->x_low_--;
|
||||
}
|
||||
if (this->x_high_ % 2 == 0) {
|
||||
this->x_high_++;
|
||||
}
|
||||
if (this->y_low_ % 2 == 1) {
|
||||
this->y_low_--;
|
||||
}
|
||||
if (this->y_high_ % 2 == 0) {
|
||||
this->y_high_++;
|
||||
}
|
||||
int w = this->x_high_ - this->x_low_ + 1;
|
||||
int h = this->y_high_ - this->y_low_ + 1;
|
||||
this->draw_pixels_at(this->x_low_, this->y_low_, w, h, this->buffer_, this->color_mode_, display::COLOR_BITNESS_565,
|
||||
|
||||
@@ -65,13 +65,10 @@ class QspiAmoLed : public display::DisplayBuffer,
|
||||
|
||||
void set_reset_pin(GPIOPin *reset_pin) { this->reset_pin_ = reset_pin; }
|
||||
void set_enable_pin(GPIOPin *enable_pin) { this->enable_pin_ = enable_pin; }
|
||||
void set_width(uint16_t width) { this->width_ = width; }
|
||||
void set_dimensions(uint16_t width, uint16_t height) {
|
||||
this->width_ = width;
|
||||
this->height_ = height;
|
||||
}
|
||||
int get_width() override { return this->width_; }
|
||||
int get_height() override { return this->height_; }
|
||||
void set_invert_colors(bool invert_colors) {
|
||||
this->invert_colors_ = invert_colors;
|
||||
this->reset_params_();
|
||||
|
||||
@@ -8,10 +8,10 @@ static const char *const TAG = "remote.dooya";
|
||||
|
||||
static const uint32_t HEADER_HIGH_US = 5000;
|
||||
static const uint32_t HEADER_LOW_US = 1500;
|
||||
static const uint32_t BIT_ZERO_HIGH_US = 750;
|
||||
static const uint32_t BIT_ZERO_LOW_US = 350;
|
||||
static const uint32_t BIT_ONE_HIGH_US = 350;
|
||||
static const uint32_t BIT_ONE_LOW_US = 750;
|
||||
static const uint32_t BIT_ZERO_HIGH_US = 350;
|
||||
static const uint32_t BIT_ZERO_LOW_US = 750;
|
||||
static const uint32_t BIT_ONE_HIGH_US = 750;
|
||||
static const uint32_t BIT_ONE_LOW_US = 350;
|
||||
|
||||
void DooyaProtocol::encode(RemoteTransmitData *dst, const DooyaData &data) {
|
||||
dst->set_carrier_frequency(0);
|
||||
|
||||
@@ -56,20 +56,21 @@ CONFIG_SCHEMA = cv.All(
|
||||
|
||||
@coroutine_with_priority(50.0)
|
||||
async def to_code(config):
|
||||
if not config[CONF_DISABLED]:
|
||||
var = cg.new_Pvariable(config[CONF_ID])
|
||||
await cg.register_component(var, config)
|
||||
if config[CONF_DISABLED]:
|
||||
return
|
||||
|
||||
for conf in config.get(CONF_ON_SAFE_MODE, []):
|
||||
trigger = cg.new_Pvariable(conf[CONF_TRIGGER_ID], var)
|
||||
await automation.build_automation(trigger, [], conf)
|
||||
var = cg.new_Pvariable(config[CONF_ID])
|
||||
await cg.register_component(var, config)
|
||||
|
||||
condition = var.should_enter_safe_mode(
|
||||
config[CONF_NUM_ATTEMPTS],
|
||||
config[CONF_REBOOT_TIMEOUT],
|
||||
config[CONF_BOOT_IS_GOOD_AFTER],
|
||||
)
|
||||
cg.add(RawExpression(f"if ({condition}) return"))
|
||||
for conf in config.get(CONF_ON_SAFE_MODE, []):
|
||||
trigger = cg.new_Pvariable(conf[CONF_TRIGGER_ID], var)
|
||||
await automation.build_automation(trigger, [], conf)
|
||||
|
||||
condition = var.should_enter_safe_mode(
|
||||
config[CONF_NUM_ATTEMPTS],
|
||||
config[CONF_REBOOT_TIMEOUT],
|
||||
config[CONF_BOOT_IS_GOOD_AFTER],
|
||||
)
|
||||
cg.add(RawExpression(f"if ({condition}) return"))
|
||||
CORE.data[CONF_SAFE_MODE] = {}
|
||||
CORE.data[CONF_SAFE_MODE][KEY_PAST_SAFE_MODE] = True
|
||||
|
||||
@@ -88,7 +88,7 @@ def validate_parameter_name(value):
|
||||
raise cv.Invalid(f"Script's parameter name cannot be {CONF_ID}")
|
||||
|
||||
|
||||
ALLOWED_PARAM_TYPE_CHARSET = set("abcdefghijklmnopqrstuvwxyz0123456789_:*&[]")
|
||||
ALLOWED_PARAM_TYPE_CHARSET = set("abcdefghijklmnopqrstuvwxyz0123456789_:*&[]<>")
|
||||
|
||||
|
||||
def validate_parameter_type(value):
|
||||
|
||||
@@ -2,7 +2,7 @@ import subprocess
|
||||
|
||||
import esphome.codegen as cg
|
||||
import esphome.config_validation as cv
|
||||
from esphome.components import display
|
||||
from esphome.components import display, key_provider
|
||||
from esphome.const import (
|
||||
CONF_ID,
|
||||
CONF_DIMENSIONS,
|
||||
@@ -12,8 +12,10 @@ from esphome.const import (
|
||||
PLATFORM_HOST,
|
||||
)
|
||||
|
||||
AUTO_LOAD = ["key_provider"]
|
||||
|
||||
sdl_ns = cg.esphome_ns.namespace("sdl")
|
||||
Sdl = sdl_ns.class_("Sdl", display.Display, cg.Component)
|
||||
Sdl = sdl_ns.class_("Sdl", display.Display, key_provider.KeyProvider)
|
||||
|
||||
|
||||
CONF_SDL_OPTIONS = "sdl_options"
|
||||
|
||||
@@ -84,6 +84,10 @@ void Sdl::loop() {
|
||||
}
|
||||
break;
|
||||
|
||||
case SDL_KEYUP:
|
||||
this->send_key_(e.key.keysym.sym);
|
||||
break;
|
||||
|
||||
default:
|
||||
ESP_LOGV(TAG, "Event %d", e.type);
|
||||
break;
|
||||
|
||||
@@ -1,10 +1,11 @@
|
||||
#pragma once
|
||||
|
||||
#ifdef USE_HOST
|
||||
#include "esphome/components/display/display.h"
|
||||
#include "esphome/components/key_provider/key_provider.h"
|
||||
#include "esphome/core/application.h"
|
||||
#include "esphome/core/component.h"
|
||||
#include "esphome/core/log.h"
|
||||
#include "esphome/core/application.h"
|
||||
#include "esphome/components/display/display.h"
|
||||
#define SDL_MAIN_HANDLED
|
||||
#include "SDL.h"
|
||||
|
||||
@@ -13,7 +14,7 @@ namespace sdl {
|
||||
|
||||
constexpr static const char *const TAG = "sdl";
|
||||
|
||||
class Sdl : public display::Display {
|
||||
class Sdl : public display::Display, public key_provider::KeyProvider {
|
||||
public:
|
||||
display::DisplayType get_display_type() override { return display::DISPLAY_TYPE_COLOR; }
|
||||
void update() override;
|
||||
|
||||
@@ -43,6 +43,7 @@ from esphome.const import (
|
||||
DEVICE_CLASS_BATTERY,
|
||||
DEVICE_CLASS_CARBON_DIOXIDE,
|
||||
DEVICE_CLASS_CARBON_MONOXIDE,
|
||||
DEVICE_CLASS_CONDUCTIVITY,
|
||||
DEVICE_CLASS_CURRENT,
|
||||
DEVICE_CLASS_DATA_RATE,
|
||||
DEVICE_CLASS_DATA_SIZE,
|
||||
@@ -103,6 +104,7 @@ DEVICE_CLASSES = [
|
||||
DEVICE_CLASS_BATTERY,
|
||||
DEVICE_CLASS_CARBON_DIOXIDE,
|
||||
DEVICE_CLASS_CARBON_MONOXIDE,
|
||||
DEVICE_CLASS_CONDUCTIVITY,
|
||||
DEVICE_CLASS_CURRENT,
|
||||
DEVICE_CLASS_DATA_RATE,
|
||||
DEVICE_CLASS_DATA_SIZE,
|
||||
|
||||
@@ -258,6 +258,7 @@ KEY_UART_DEVICES = "uart_devices"
|
||||
def final_validate_device_schema(
|
||||
name: str,
|
||||
*,
|
||||
uart_bus: str = CONF_UART_ID,
|
||||
baud_rate: Optional[int] = None,
|
||||
require_tx: bool = False,
|
||||
require_rx: bool = False,
|
||||
@@ -268,7 +269,7 @@ def final_validate_device_schema(
|
||||
def validate_baud_rate(value):
|
||||
if value != baud_rate:
|
||||
raise cv.Invalid(
|
||||
f"Component {name} requires baud rate {baud_rate} for the uart bus"
|
||||
f"Component {name} requires baud rate {baud_rate} for the uart referenced by {uart_bus}"
|
||||
)
|
||||
return value
|
||||
|
||||
@@ -287,21 +288,21 @@ def final_validate_device_schema(
|
||||
def validate_data_bits(value):
|
||||
if value != data_bits:
|
||||
raise cv.Invalid(
|
||||
f"Component {name} requires {data_bits} data bits for the uart bus"
|
||||
f"Component {name} requires {data_bits} data bits for the uart referenced by {uart_bus}"
|
||||
)
|
||||
return value
|
||||
|
||||
def validate_parity(value):
|
||||
if value != parity:
|
||||
raise cv.Invalid(
|
||||
f"Component {name} requires parity {parity} for the uart bus"
|
||||
f"Component {name} requires parity {parity} for the uart referenced by {uart_bus}"
|
||||
)
|
||||
return value
|
||||
|
||||
def validate_stop_bits(value):
|
||||
if value != stop_bits:
|
||||
raise cv.Invalid(
|
||||
f"Component {name} requires {stop_bits} stop bits for the uart bus"
|
||||
f"Component {name} requires {stop_bits} stop bits for the uart referenced by {uart_bus}"
|
||||
)
|
||||
return value
|
||||
|
||||
@@ -316,14 +317,14 @@ def final_validate_device_schema(
|
||||
hub_schema[
|
||||
cv.Required(
|
||||
CONF_TX_PIN,
|
||||
msg=f"Component {name} requires this uart bus to declare a tx_pin",
|
||||
msg=f"Component {name} requires uart referenced by {uart_bus} to declare a tx_pin",
|
||||
)
|
||||
] = validate_pin(CONF_TX_PIN, device)
|
||||
if require_rx and uart_id_type_str in NATIVE_UART_CLASSES:
|
||||
hub_schema[
|
||||
cv.Required(
|
||||
CONF_RX_PIN,
|
||||
msg=f"Component {name} requires this uart bus to declare a rx_pin",
|
||||
msg=f"Component {name} requires uart referenced by {uart_bus} to declare a rx_pin",
|
||||
)
|
||||
] = validate_pin(CONF_RX_PIN, device)
|
||||
if baud_rate is not None:
|
||||
@@ -337,7 +338,7 @@ def final_validate_device_schema(
|
||||
return cv.Schema(hub_schema, extra=cv.ALLOW_EXTRA)(hub_config)
|
||||
|
||||
return cv.Schema(
|
||||
{cv.Required(CONF_UART_ID): fv.id_declaration_match_schema(validate_hub)},
|
||||
{cv.Required(uart_bus): fv.id_declaration_match_schema(validate_hub)},
|
||||
extra=cv.ALLOW_EXTRA,
|
||||
)
|
||||
|
||||
|
||||
@@ -69,7 +69,7 @@ async def setup_update_core_(var, config):
|
||||
await mqtt.register_mqtt_component(mqtt_, config)
|
||||
|
||||
if web_server_id_config := config.get(CONF_WEB_SERVER_ID):
|
||||
web_server_ = await cg.get_variable(web_server_id_config)
|
||||
web_server_ = cg.get_variable(web_server_id_config)
|
||||
web_server.add_entity_to_sorting_list(web_server_, var, config)
|
||||
|
||||
|
||||
|
||||
@@ -243,7 +243,7 @@ ErrorCode VEML7700Component::configure_() {
|
||||
}
|
||||
|
||||
PSMRegister psm{0};
|
||||
psm.PSM = PSM::PSM_MODE_1;
|
||||
psm.PSM = PSMMode::PSM_MODE_1;
|
||||
psm.PSM_EN = false;
|
||||
ESP_LOGV(TAG, "Setting PSM to 0x%04X", psm.raw);
|
||||
err = this->write_register((uint8_t) CommandRegisters::PWR_SAVING, psm.raw_bytes, VEML_REG_SIZE);
|
||||
|
||||
@@ -24,7 +24,7 @@ enum class CommandRegisters : uint8_t {
|
||||
ALS_INT = 0x06 // R: ALS INT trigger event
|
||||
};
|
||||
|
||||
enum Gain : uint8_t {
|
||||
enum Gain : uint16_t {
|
||||
X_1 = 0,
|
||||
X_2 = 1,
|
||||
X_1_8 = 2,
|
||||
@@ -32,7 +32,7 @@ enum Gain : uint8_t {
|
||||
};
|
||||
const uint8_t GAINS_COUNT = 4;
|
||||
|
||||
enum IntegrationTime : uint8_t {
|
||||
enum IntegrationTime : uint16_t {
|
||||
INTEGRATION_TIME_25MS = 0b1100,
|
||||
INTEGRATION_TIME_50MS = 0b1000,
|
||||
INTEGRATION_TIME_100MS = 0b0000,
|
||||
@@ -42,14 +42,14 @@ enum IntegrationTime : uint8_t {
|
||||
};
|
||||
const uint8_t INTEGRATION_TIMES_COUNT = 6;
|
||||
|
||||
enum Persistence : uint8_t {
|
||||
enum Persistence : uint16_t {
|
||||
PERSISTENCE_1 = 0,
|
||||
PERSISTENCE_2 = 1,
|
||||
PERSISTENCE_4 = 2,
|
||||
PERSISTENCE_8 = 3,
|
||||
};
|
||||
|
||||
enum PSM : uint8_t {
|
||||
enum PSMMode : uint16_t {
|
||||
PSM_MODE_1 = 0,
|
||||
PSM_MODE_2 = 1,
|
||||
PSM_MODE_3 = 2,
|
||||
@@ -92,7 +92,7 @@ union PSMRegister {
|
||||
uint8_t raw_bytes[2];
|
||||
struct {
|
||||
bool PSM_EN : 1;
|
||||
uint8_t PSM : 2;
|
||||
PSMMode PSM : 2;
|
||||
uint16_t reserved : 13;
|
||||
} __attribute__((packed));
|
||||
};
|
||||
|
||||
@@ -799,7 +799,7 @@ void VoiceAssistant::on_audio(const api::VoiceAssistantAudio &msg) {
|
||||
this->speaker_buffer_index_ += msg.data.length();
|
||||
this->speaker_buffer_size_ += msg.data.length();
|
||||
this->speaker_bytes_received_ += msg.data.length();
|
||||
ESP_LOGV(TAG, "Received audio: %" PRId32 " bytes from API", msg.data.length());
|
||||
ESP_LOGV(TAG, "Received audio: %u bytes from API", msg.data.length());
|
||||
} else {
|
||||
ESP_LOGE(TAG, "Cannot receive audio, buffer is full");
|
||||
}
|
||||
|
||||
@@ -27,7 +27,13 @@ CONFIG_SCHEMA = cv.All(
|
||||
cv.Optional(CONF_INITIAL_VALUE, default=1.0): cv.float_range(
|
||||
min=0.01, max=1.0
|
||||
),
|
||||
cv.Optional(CONF_STEP_DELAY, default=1): cv.int_range(min=1, max=100),
|
||||
cv.Optional(CONF_STEP_DELAY, default="1us"): cv.All(
|
||||
cv.positive_time_period_microseconds,
|
||||
cv.Range(
|
||||
min=cv.TimePeriod(microseconds=1),
|
||||
max=cv.TimePeriod(microseconds=100),
|
||||
),
|
||||
),
|
||||
}
|
||||
)
|
||||
)
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
"""Constants used by esphome."""
|
||||
|
||||
__version__ = "2024.6.3"
|
||||
__version__ = "2024.7.0-dev"
|
||||
|
||||
ALLOWED_NAME_CHARS = "abcdefghijklmnopqrstuvwxyz0123456789-_"
|
||||
VALID_SUBSTITUTIONS_CHARACTERS = (
|
||||
@@ -1070,6 +1070,7 @@ DEVICE_CLASS_BUTTON = "button"
|
||||
DEVICE_CLASS_CARBON_DIOXIDE = "carbon_dioxide"
|
||||
DEVICE_CLASS_CARBON_MONOXIDE = "carbon_monoxide"
|
||||
DEVICE_CLASS_COLD = "cold"
|
||||
DEVICE_CLASS_CONDUCTIVITY = "conductivity"
|
||||
DEVICE_CLASS_CONNECTIVITY = "connectivity"
|
||||
DEVICE_CLASS_CURRENT = "current"
|
||||
DEVICE_CLASS_CURTAIN = "curtain"
|
||||
|
||||
@@ -10,7 +10,6 @@ int_ = global_ns.namespace("int")
|
||||
std_ns = global_ns.namespace("std")
|
||||
std_shared_ptr = std_ns.class_("shared_ptr")
|
||||
std_string = std_ns.class_("string")
|
||||
std_string_ref = std_ns.namespace("string &")
|
||||
std_vector = std_ns.class_("vector")
|
||||
uint8 = global_ns.namespace("uint8_t")
|
||||
uint16 = global_ns.namespace("uint16_t")
|
||||
|
||||
13
esphome/idf_component.yml
Normal file
13
esphome/idf_component.yml
Normal file
@@ -0,0 +1,13 @@
|
||||
dependencies:
|
||||
esp-tflite-micro:
|
||||
git: https://github.com/espressif/esp-tflite-micro.git
|
||||
version: v1.3.1
|
||||
esp32_camera:
|
||||
git: https://github.com/espressif/esp32-camera.git
|
||||
version: v2.0.9
|
||||
mdns:
|
||||
git: https://github.com/espressif/esp-protocols.git
|
||||
version: mdns-v1.2.5
|
||||
path: components/mdns
|
||||
rules:
|
||||
- if: "idf_version >=5.0"
|
||||
@@ -64,7 +64,7 @@ lib_deps =
|
||||
freekode/TM1651@1.0.1 ; tm1651
|
||||
glmnet/Dsmr@0.7 ; dsmr
|
||||
rweather/Crypto@0.4.0 ; dsmr
|
||||
dudanov/MideaUART@1.1.9 ; midea
|
||||
dudanov/MideaUART@1.1.8 ; midea
|
||||
tonia/HeatpumpIR@1.0.23 ; heatpumpir
|
||||
build_flags =
|
||||
${common.build_flags}
|
||||
@@ -142,7 +142,6 @@ platform_packages =
|
||||
framework = espidf
|
||||
lib_deps =
|
||||
${common:idf.lib_deps}
|
||||
espressif/esp32-camera@1.0.0 ; esp32_camera
|
||||
droscy/esp_wireguard@0.4.1 ; wireguard
|
||||
build_flags =
|
||||
${common:idf.build_flags}
|
||||
|
||||
@@ -101,8 +101,10 @@ def clang_options(idedata):
|
||||
# add library include directories using -isystem to suppress their errors
|
||||
for directory in sorted(set(idedata["includes"]["build"])):
|
||||
# skip our own directories, we add those later
|
||||
if not directory.startswith(f"{root_path}/") or directory.startswith(
|
||||
f"{root_path}/.pio/"
|
||||
if (
|
||||
not directory.startswith(f"{root_path}/")
|
||||
or directory.startswith(f"{root_path}/.pio/")
|
||||
or directory.startswith(f"{root_path}/managed_components/")
|
||||
):
|
||||
cmd.extend(["-isystem", directory])
|
||||
|
||||
|
||||
@@ -10,6 +10,7 @@ from homeassistant.components.event import EventDeviceClass
|
||||
from homeassistant.components.number import NumberDeviceClass
|
||||
from homeassistant.components.sensor import SensorDeviceClass
|
||||
from homeassistant.components.switch import SwitchDeviceClass
|
||||
from homeassistant.components.update import UpdateDeviceClass
|
||||
from homeassistant.components.valve import ValveDeviceClass
|
||||
|
||||
# pylint: enable=import-error
|
||||
@@ -27,6 +28,7 @@ DOMAINS = {
|
||||
"number": NumberDeviceClass,
|
||||
"sensor": SensorDeviceClass,
|
||||
"switch": SwitchDeviceClass,
|
||||
"update": UpdateDeviceClass,
|
||||
"valve": ValveDeviceClass,
|
||||
}
|
||||
|
||||
|
||||
@@ -24,8 +24,8 @@ fi
|
||||
|
||||
start_esphome() {
|
||||
# create dynamic yaml file in `build` folder.
|
||||
# `./tests/test_build_components/build/[target_component].[test_name].[target_platform].yaml`
|
||||
component_test_file="./tests/test_build_components/build/$target_component.$test_name.$target_platform.yaml"
|
||||
# `./tests/test_build_components/build/[target_component].[test_name].[target_platform_with_version].yaml`
|
||||
component_test_file="./tests/test_build_components/build/$target_component.$test_name.$target_platform_with_version.yaml"
|
||||
|
||||
cp $target_platform_file $component_test_file
|
||||
if [[ "$OSTYPE" == "darwin"* ]]; then
|
||||
@@ -36,7 +36,7 @@ start_esphome() {
|
||||
fi
|
||||
|
||||
# Start esphome process
|
||||
echo "> [$target_component] [$test_name] [$target_platform]"
|
||||
echo "> [$target_component] [$test_name] [$target_platform_with_version]"
|
||||
set -x
|
||||
# TODO: Validate escape of Command line substitution value
|
||||
python -m esphome -s component_name $target_component -s component_dir ../../components/$target_component -s test_name $test_name -s target_platform $target_platform $esphome_command $component_test_file
|
||||
@@ -76,16 +76,17 @@ for f in ./tests/components/$target_component/*.*.yaml; do
|
||||
# 2. `./tests/test_build_components/build_components_base.[target_platform]-ard.yaml`
|
||||
target_platform_file="./tests/test_build_components/build_components_base.$target_platform.yaml"
|
||||
if ! [ -f "$target_platform_file" ]; then
|
||||
# Try find arduino test framework as platform.
|
||||
target_platform_ard="$target_platform-ard"
|
||||
target_platform_file="./tests/test_build_components/build_components_base.$target_platform_ard.yaml"
|
||||
if ! [ -f "$target_platform_file" ]; then
|
||||
echo "No base test file [./tests/test_build_components/build_components_base.$target_platform.yaml, ./tests/build_components_base.$target_platform_ard.yaml] for component test [$f] found."
|
||||
exit 1
|
||||
fi
|
||||
target_platform=$target_platform_ard
|
||||
echo "No base test file [./tests/test_build_components/build_components_base.$target_platform.yaml] for component test [$f] found."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
start_esphome
|
||||
for target_platform_file in ./tests/test_build_components/build_components_base.$target_platform*.yaml; do
|
||||
# trim off "./tests/test_build_components/build_components_base." prefix
|
||||
target_platform_with_version=${target_platform_file:52}
|
||||
# ...now remove suffix starting with "." leaving just the test target hardware and software platform (possibly with version)
|
||||
# For example: "esp32-s3-idf-50"
|
||||
target_platform_with_version=${target_platform_with_version%.*}
|
||||
start_esphome
|
||||
done
|
||||
fi
|
||||
done
|
||||
|
||||
@@ -1,9 +1,7 @@
|
||||
uart:
|
||||
- id: uart_a01nyub
|
||||
tx_pin:
|
||||
number: 4
|
||||
rx_pin:
|
||||
number: 5
|
||||
tx_pin: ${tx_pin}
|
||||
rx_pin: ${rx_pin}
|
||||
baud_rate: 9600
|
||||
|
||||
sensor:
|
||||
@@ -1,13 +1,5 @@
|
||||
uart:
|
||||
- id: uart_a01nyub
|
||||
tx_pin:
|
||||
number: 4
|
||||
rx_pin:
|
||||
number: 5
|
||||
baud_rate: 9600
|
||||
substitutions:
|
||||
tx_pin: GPIO4
|
||||
rx_pin: GPIO5
|
||||
|
||||
sensor:
|
||||
- platform: a01nyub
|
||||
id: a01nyub_sensor
|
||||
name: a01nyub Distance
|
||||
uart_id: uart_a01nyub
|
||||
<<: !include common.yaml
|
||||
|
||||
@@ -1,13 +1,5 @@
|
||||
uart:
|
||||
- id: uart_a01nyub
|
||||
tx_pin:
|
||||
number: 17
|
||||
rx_pin:
|
||||
number: 16
|
||||
baud_rate: 9600
|
||||
substitutions:
|
||||
tx_pin: GPIO17
|
||||
rx_pin: GPIO16
|
||||
|
||||
sensor:
|
||||
- platform: a01nyub
|
||||
id: a01nyub_sensor
|
||||
name: a01nyub Distance
|
||||
uart_id: uart_a01nyub
|
||||
<<: !include common.yaml
|
||||
|
||||
@@ -1,13 +0,0 @@
|
||||
uart:
|
||||
- id: uart_a01nyub
|
||||
tx_pin:
|
||||
number: 17
|
||||
rx_pin:
|
||||
number: 16
|
||||
baud_rate: 9600
|
||||
|
||||
sensor:
|
||||
- platform: a01nyub
|
||||
id: a01nyub_sensor
|
||||
name: a01nyub Distance
|
||||
uart_id: uart_a01nyub
|
||||
@@ -1,13 +0,0 @@
|
||||
uart:
|
||||
- id: uart_a01nyub
|
||||
tx_pin:
|
||||
number: 4
|
||||
rx_pin:
|
||||
number: 5
|
||||
baud_rate: 9600
|
||||
|
||||
sensor:
|
||||
- platform: a01nyub
|
||||
id: a01nyub_sensor
|
||||
name: a01nyub Distance
|
||||
uart_id: uart_a01nyub
|
||||
@@ -1,13 +0,0 @@
|
||||
uart:
|
||||
- id: uart_a01nyub
|
||||
tx_pin:
|
||||
number: 4
|
||||
rx_pin:
|
||||
number: 5
|
||||
baud_rate: 9600
|
||||
|
||||
sensor:
|
||||
- platform: a01nyub
|
||||
id: a01nyub_sensor
|
||||
name: a01nyub Distance
|
||||
uart_id: uart_a01nyub
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user