mirror of
https://github.com/esphome/esphome.git
synced 2025-11-02 16:11:53 +00:00
Compare commits
56 Commits
2024.6.6
...
jesserockz
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
dfb98b523f | ||
|
|
17204baac0 | ||
|
|
1e05bcaa61 | ||
|
|
18690d51f5 | ||
|
|
2aacf14e96 | ||
|
|
9c5507ab46 | ||
|
|
0a9703bff9 | ||
|
|
67bd5db6d6 | ||
|
|
6c11f0bd51 | ||
|
|
e7556271e7 | ||
|
|
8045b889d3 | ||
|
|
6f074d3692 | ||
|
|
b09781afa5 | ||
|
|
96f1a146a6 | ||
|
|
775e03cfd9 | ||
|
|
80e5e19956 | ||
|
|
8f16268572 | ||
|
|
ba11f2ab0c | ||
|
|
8567877f07 | ||
|
|
310f850ee4 | ||
|
|
896cdab22d | ||
|
|
ed6462fa00 | ||
|
|
65b05af014 | ||
|
|
c18056bdda | ||
|
|
65a79acfb9 | ||
|
|
18d331d284 | ||
|
|
7d642147c1 | ||
|
|
4c313bc198 | ||
|
|
a78b2d0128 | ||
|
|
f6848fe24d | ||
|
|
a59c9b4f77 | ||
|
|
c30913ccde | ||
|
|
41f810f828 | ||
|
|
d604c8ae64 | ||
|
|
67d8c7c691 | ||
|
|
015cd42a2e | ||
|
|
51c5d1714c | ||
|
|
1ff302b341 | ||
|
|
6b89763ad6 | ||
|
|
253303f3a9 | ||
|
|
d49f2cbec8 | ||
|
|
290816be11 | ||
|
|
2fc43fa9c7 | ||
|
|
5adadeaa07 | ||
|
|
761aae6f89 | ||
|
|
b29e1acab8 | ||
|
|
49d4260cfe | ||
|
|
c4c46c206f | ||
|
|
8453d9a70d | ||
|
|
68dbf35b09 | ||
|
|
1a242f94db | ||
|
|
df52bc3493 | ||
|
|
2044c7e4d4 | ||
|
|
b401b5eca8 | ||
|
|
67f41a0c72 | ||
|
|
bd7e8fbf86 |
4
.github/actions/build-image/action.yaml
vendored
4
.github/actions/build-image/action.yaml
vendored
@@ -46,7 +46,7 @@ runs:
|
||||
|
||||
- name: Build and push to ghcr by digest
|
||||
id: build-ghcr
|
||||
uses: docker/build-push-action@v6.0.1
|
||||
uses: docker/build-push-action@v6.1.0
|
||||
with:
|
||||
context: .
|
||||
file: ./docker/Dockerfile
|
||||
@@ -69,7 +69,7 @@ runs:
|
||||
|
||||
- name: Build and push to dockerhub by digest
|
||||
id: build-dockerhub
|
||||
uses: docker/build-push-action@v6.0.1
|
||||
uses: docker/build-push-action@v6.1.0
|
||||
with:
|
||||
context: .
|
||||
file: ./docker/Dockerfile
|
||||
|
||||
2
.github/workflows/ci-api-proto.yml
vendored
2
.github/workflows/ci-api-proto.yml
vendored
@@ -21,7 +21,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4.1.6
|
||||
uses: actions/checkout@v4.1.7
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v5.1.0
|
||||
with:
|
||||
|
||||
2
.github/workflows/ci-docker.yml
vendored
2
.github/workflows/ci-docker.yml
vendored
@@ -40,7 +40,7 @@ jobs:
|
||||
arch: [amd64, armv7, aarch64]
|
||||
build_type: ["ha-addon", "docker", "lint"]
|
||||
steps:
|
||||
- uses: actions/checkout@v4.1.6
|
||||
- uses: actions/checkout@v4.1.7
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v5.1.0
|
||||
with:
|
||||
|
||||
39
.github/workflows/ci.yml
vendored
39
.github/workflows/ci.yml
vendored
@@ -34,7 +34,7 @@ jobs:
|
||||
cache-key: ${{ steps.cache-key.outputs.key }}
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v4.1.6
|
||||
uses: actions/checkout@v4.1.7
|
||||
- name: Generate cache-key
|
||||
id: cache-key
|
||||
run: echo key="${{ hashFiles('requirements.txt', 'requirements_optional.txt', 'requirements_test.txt') }}" >> $GITHUB_OUTPUT
|
||||
@@ -66,7 +66,7 @@ jobs:
|
||||
- common
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v4.1.6
|
||||
uses: actions/checkout@v4.1.7
|
||||
- name: Restore Python
|
||||
uses: ./.github/actions/restore-python
|
||||
with:
|
||||
@@ -87,7 +87,7 @@ jobs:
|
||||
- common
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v4.1.6
|
||||
uses: actions/checkout@v4.1.7
|
||||
- name: Restore Python
|
||||
uses: ./.github/actions/restore-python
|
||||
with:
|
||||
@@ -108,7 +108,7 @@ jobs:
|
||||
- common
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v4.1.6
|
||||
uses: actions/checkout@v4.1.7
|
||||
- name: Restore Python
|
||||
uses: ./.github/actions/restore-python
|
||||
with:
|
||||
@@ -129,7 +129,7 @@ jobs:
|
||||
- common
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v4.1.6
|
||||
uses: actions/checkout@v4.1.7
|
||||
- name: Restore Python
|
||||
uses: ./.github/actions/restore-python
|
||||
with:
|
||||
@@ -150,7 +150,7 @@ jobs:
|
||||
- common
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v4.1.6
|
||||
uses: actions/checkout@v4.1.7
|
||||
- name: Restore Python
|
||||
uses: ./.github/actions/restore-python
|
||||
with:
|
||||
@@ -199,7 +199,7 @@ jobs:
|
||||
- common
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v4.1.6
|
||||
uses: actions/checkout@v4.1.7
|
||||
- name: Restore Python
|
||||
uses: ./.github/actions/restore-python
|
||||
with:
|
||||
@@ -229,7 +229,7 @@ jobs:
|
||||
- common
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v4.1.6
|
||||
uses: actions/checkout@v4.1.7
|
||||
- name: Restore Python
|
||||
uses: ./.github/actions/restore-python
|
||||
with:
|
||||
@@ -254,7 +254,7 @@ jobs:
|
||||
matrix: ${{ steps.set-matrix.outputs.matrix }}
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v4.1.6
|
||||
uses: actions/checkout@v4.1.7
|
||||
- name: Find all YAML test files
|
||||
id: set-matrix
|
||||
run: echo "matrix=$(ls tests/test*.yaml | jq -R -s -c 'split("\n")[:-1]')" >> $GITHUB_OUTPUT
|
||||
@@ -271,7 +271,7 @@ jobs:
|
||||
file: ${{ fromJson(needs.compile-tests-list.outputs.matrix) }}
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v4.1.6
|
||||
uses: actions/checkout@v4.1.7
|
||||
- name: Restore Python
|
||||
uses: ./.github/actions/restore-python
|
||||
with:
|
||||
@@ -303,7 +303,7 @@ jobs:
|
||||
file: ${{ fromJson(needs.compile-tests-list.outputs.matrix) }}
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v4.1.6
|
||||
uses: actions/checkout@v4.1.7
|
||||
- name: Restore Python
|
||||
uses: ./.github/actions/restore-python
|
||||
with:
|
||||
@@ -358,7 +358,7 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v4.1.6
|
||||
uses: actions/checkout@v4.1.7
|
||||
- name: Restore Python
|
||||
uses: ./.github/actions/restore-python
|
||||
with:
|
||||
@@ -387,6 +387,13 @@ jobs:
|
||||
echo "::add-matcher::.github/workflows/matchers/gcc.json"
|
||||
echo "::add-matcher::.github/workflows/matchers/clang-tidy.json"
|
||||
|
||||
- name: Run 'pio run --list-targets -e esp32-idf-tidy'
|
||||
if: matrix.name == 'Run script/clang-tidy for ESP32 IDF'
|
||||
run: |
|
||||
. venv/bin/activate
|
||||
mkdir -p .temp
|
||||
pio run --list-targets -e esp32-idf-tidy
|
||||
|
||||
- name: Run clang-tidy
|
||||
run: |
|
||||
. venv/bin/activate
|
||||
@@ -410,7 +417,7 @@ jobs:
|
||||
count: ${{ steps.list-components.outputs.count }}
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v4.1.6
|
||||
uses: actions/checkout@v4.1.7
|
||||
with:
|
||||
# Fetch enough history so `git merge-base refs/remotes/origin/dev HEAD` works.
|
||||
fetch-depth: 500
|
||||
@@ -458,7 +465,7 @@ jobs:
|
||||
run: sudo apt-get install libsodium-dev libsdl2-dev
|
||||
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v4.1.6
|
||||
uses: actions/checkout@v4.1.7
|
||||
- name: Restore Python
|
||||
uses: ./.github/actions/restore-python
|
||||
with:
|
||||
@@ -484,7 +491,7 @@ jobs:
|
||||
matrix: ${{ steps.split.outputs.components }}
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v4.1.6
|
||||
uses: actions/checkout@v4.1.7
|
||||
- name: Split components into 20 groups
|
||||
id: split
|
||||
run: |
|
||||
@@ -512,7 +519,7 @@ jobs:
|
||||
run: sudo apt-get install libsodium-dev libsdl2-dev
|
||||
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v4.1.6
|
||||
uses: actions/checkout@v4.1.7
|
||||
- name: Restore Python
|
||||
uses: ./.github/actions/restore-python
|
||||
with:
|
||||
|
||||
8
.github/workflows/release.yml
vendored
8
.github/workflows/release.yml
vendored
@@ -19,7 +19,7 @@ jobs:
|
||||
tag: ${{ steps.tag.outputs.tag }}
|
||||
branch_build: ${{ steps.tag.outputs.branch_build }}
|
||||
steps:
|
||||
- uses: actions/checkout@v4.1.6
|
||||
- uses: actions/checkout@v4.1.7
|
||||
- name: Get tag
|
||||
id: tag
|
||||
# yamllint disable rule:line-length
|
||||
@@ -51,7 +51,7 @@ jobs:
|
||||
contents: read
|
||||
id-token: write
|
||||
steps:
|
||||
- uses: actions/checkout@v4.1.6
|
||||
- uses: actions/checkout@v4.1.7
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v5.1.0
|
||||
with:
|
||||
@@ -83,7 +83,7 @@ jobs:
|
||||
- linux/arm/v7
|
||||
- linux/arm64
|
||||
steps:
|
||||
- uses: actions/checkout@v4.1.6
|
||||
- uses: actions/checkout@v4.1.7
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v5.1.0
|
||||
with:
|
||||
@@ -174,7 +174,7 @@ jobs:
|
||||
- ghcr
|
||||
- dockerhub
|
||||
steps:
|
||||
- uses: actions/checkout@v4.1.6
|
||||
- uses: actions/checkout@v4.1.7
|
||||
|
||||
- name: Download digests
|
||||
uses: actions/download-artifact@v4.1.7
|
||||
|
||||
6
.github/workflows/sync-device-classes.yml
vendored
6
.github/workflows/sync-device-classes.yml
vendored
@@ -13,10 +13,10 @@ jobs:
|
||||
if: github.repository == 'esphome/esphome'
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4.1.6
|
||||
uses: actions/checkout@v4.1.7
|
||||
|
||||
- name: Checkout Home Assistant
|
||||
uses: actions/checkout@v4.1.6
|
||||
uses: actions/checkout@v4.1.7
|
||||
with:
|
||||
repository: home-assistant/core
|
||||
path: lib/home-assistant
|
||||
@@ -36,7 +36,7 @@ jobs:
|
||||
python ./script/sync-device_class.py
|
||||
|
||||
- name: Commit changes
|
||||
uses: peter-evans/create-pull-request@v6.0.5
|
||||
uses: peter-evans/create-pull-request@v6.1.0
|
||||
with:
|
||||
commit-message: "Synchronise Device Classes from Home Assistant"
|
||||
committer: esphomebot <esphome@nabucasa.com>
|
||||
|
||||
2
.github/workflows/yaml-lint.yml
vendored
2
.github/workflows/yaml-lint.yml
vendored
@@ -18,7 +18,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v4.1.6
|
||||
uses: actions/checkout@v4.1.7
|
||||
- name: Run yamllint
|
||||
uses: frenck/action-yamllint@v1.5.0
|
||||
with:
|
||||
|
||||
@@ -129,13 +129,13 @@ class Cover : public EntityBase, public EntityBase_DeviceClass {
|
||||
*
|
||||
* This is a legacy method and may be removed later, please use `.make_call()` instead.
|
||||
*/
|
||||
ESPDEPRECATED("open() is deprecated, use make_call().set_command_open() instead.", "2021.9")
|
||||
ESPDEPRECATED("open() is deprecated, use make_call().set_command_open().perform() instead.", "2021.9")
|
||||
void open();
|
||||
/** Close the cover.
|
||||
*
|
||||
* This is a legacy method and may be removed later, please use `.make_call()` instead.
|
||||
*/
|
||||
ESPDEPRECATED("close() is deprecated, use make_call().set_command_close() instead.", "2021.9")
|
||||
ESPDEPRECATED("close() is deprecated, use make_call().set_command_close().perform() instead.", "2021.9")
|
||||
void close();
|
||||
/** Stop the cover.
|
||||
*
|
||||
|
||||
@@ -17,7 +17,7 @@ from esphome.const import (
|
||||
CONF_VSYNC_PIN,
|
||||
)
|
||||
from esphome.core import CORE
|
||||
from esphome.components.esp32 import add_idf_sdkconfig_option
|
||||
from esphome.components.esp32 import add_idf_component
|
||||
from esphome.cpp_helpers import setup_entity
|
||||
|
||||
DEPENDENCIES = ["esp32"]
|
||||
@@ -290,8 +290,11 @@ async def to_code(config):
|
||||
cg.add_define("USE_ESP32_CAMERA")
|
||||
|
||||
if CORE.using_esp_idf:
|
||||
cg.add_library("espressif/esp32-camera", "1.0.0")
|
||||
add_idf_sdkconfig_option("CONFIG_RTCIO_SUPPORT_RTC_GPIO_DESC", True)
|
||||
add_idf_component(
|
||||
name="esp32-camera",
|
||||
repo="https://github.com/espressif/esp32-camera.git",
|
||||
ref="v2.0.9",
|
||||
)
|
||||
|
||||
for conf in config.get(CONF_ON_STREAM_START, []):
|
||||
trigger = cg.new_Pvariable(conf[CONF_TRIGGER_ID], var)
|
||||
|
||||
@@ -631,7 +631,7 @@ void EthernetComponent::write_phy_register_(esp_eth_mac_t *mac, PHYRegister regi
|
||||
ESPHL_ERROR_CHECK(err, "Writing PHY Register failed");
|
||||
|
||||
if (this->type_ == ETHERNET_TYPE_RTL8201 && register_data.page) {
|
||||
ESP_LOGD(TAG, "Select PHY Register Page 0x%02" PRIX32, 0x0);
|
||||
ESP_LOGD(TAG, "Select PHY Register Page 0x00");
|
||||
err = mac->write_phy_reg(mac, this->phy_addr_, eth_phy_psr_reg_addr, 0x0);
|
||||
ESPHL_ERROR_CHECK(err, "Select PHY Register Page 0 failed");
|
||||
}
|
||||
|
||||
@@ -9,6 +9,10 @@ static const char *const TAG = "gpio.one_wire";
|
||||
|
||||
void GPIOOneWireBus::setup() {
|
||||
ESP_LOGCONFIG(TAG, "Setting up 1-wire bus...");
|
||||
this->t_pin_->setup();
|
||||
// clear bus with 480µs high, otherwise initial reset in search might fail
|
||||
this->t_pin_->pin_mode(gpio::FLAG_INPUT | gpio::FLAG_PULLUP);
|
||||
delayMicroseconds(480);
|
||||
this->search();
|
||||
}
|
||||
|
||||
|
||||
@@ -90,7 +90,7 @@ std::shared_ptr<HttpContainer> HttpRequestIDF::start(std::string url, std::strin
|
||||
int write_left = body_len;
|
||||
int write_index = 0;
|
||||
const char *buf = body.c_str();
|
||||
while (body_len > 0) {
|
||||
while (write_left > 0) {
|
||||
int written = esp_http_client_write(client, buf + write_index, write_left);
|
||||
if (written < 0) {
|
||||
err = ESP_FAIL;
|
||||
|
||||
@@ -46,7 +46,7 @@ void WatchdogManager::set_timeout_(uint32_t timeout_ms) {
|
||||
};
|
||||
esp_task_wdt_reconfigure(&wdt_config);
|
||||
#else
|
||||
esp_task_wdt_init(timeout_ms, true);
|
||||
esp_task_wdt_init(timeout_ms / 1000, true);
|
||||
#endif // ESP_IDF_VERSION_MAJOR
|
||||
#endif // USE_ESP32
|
||||
|
||||
|
||||
@@ -312,6 +312,7 @@ async def to_code(config):
|
||||
esp32.add_idf_component(
|
||||
name="esp-tflite-micro",
|
||||
repo="https://github.com/espressif/esp-tflite-micro",
|
||||
ref="v1.3.1",
|
||||
)
|
||||
|
||||
cg.add_build_flag("-DTF_LITE_STATIC_MEMORY")
|
||||
|
||||
@@ -26,6 +26,7 @@ from esphome.const import (
|
||||
DEVICE_CLASS_BATTERY,
|
||||
DEVICE_CLASS_CARBON_DIOXIDE,
|
||||
DEVICE_CLASS_CARBON_MONOXIDE,
|
||||
DEVICE_CLASS_CONDUCTIVITY,
|
||||
DEVICE_CLASS_CURRENT,
|
||||
DEVICE_CLASS_DATA_RATE,
|
||||
DEVICE_CLASS_DATA_SIZE,
|
||||
@@ -82,6 +83,7 @@ DEVICE_CLASSES = [
|
||||
DEVICE_CLASS_BATTERY,
|
||||
DEVICE_CLASS_CARBON_DIOXIDE,
|
||||
DEVICE_CLASS_CARBON_MONOXIDE,
|
||||
DEVICE_CLASS_CONDUCTIVITY,
|
||||
DEVICE_CLASS_CURRENT,
|
||||
DEVICE_CLASS_DATA_RATE,
|
||||
DEVICE_CLASS_DATA_SIZE,
|
||||
|
||||
@@ -42,6 +42,14 @@ COLOR_ORDERS = {
|
||||
}
|
||||
DATA_PIN_SCHEMA = pins.internal_gpio_output_pin_schema
|
||||
|
||||
|
||||
def validate_dimension(value):
|
||||
value = cv.positive_int(value)
|
||||
if value % 2 != 0:
|
||||
raise cv.Invalid("Width/height/offset must be divisible by 2")
|
||||
return value
|
||||
|
||||
|
||||
CONFIG_SCHEMA = cv.All(
|
||||
display.FULL_DISPLAY_SCHEMA.extend(
|
||||
cv.Schema(
|
||||
@@ -52,10 +60,14 @@ CONFIG_SCHEMA = cv.All(
|
||||
cv.dimensions,
|
||||
cv.Schema(
|
||||
{
|
||||
cv.Required(CONF_WIDTH): cv.int_,
|
||||
cv.Required(CONF_HEIGHT): cv.int_,
|
||||
cv.Optional(CONF_OFFSET_HEIGHT, default=0): cv.int_,
|
||||
cv.Optional(CONF_OFFSET_WIDTH, default=0): cv.int_,
|
||||
cv.Required(CONF_WIDTH): validate_dimension,
|
||||
cv.Required(CONF_HEIGHT): validate_dimension,
|
||||
cv.Optional(
|
||||
CONF_OFFSET_HEIGHT, default=0
|
||||
): validate_dimension,
|
||||
cv.Optional(
|
||||
CONF_OFFSET_WIDTH, default=0
|
||||
): validate_dimension,
|
||||
}
|
||||
),
|
||||
),
|
||||
|
||||
@@ -25,7 +25,23 @@ void QspiAmoLed::setup() {
|
||||
}
|
||||
|
||||
void QspiAmoLed::update() {
|
||||
if (!this->setup_complete_) {
|
||||
return;
|
||||
}
|
||||
this->do_update_();
|
||||
// Start addresses and widths/heights must be divisible by 2 (CASET/RASET restriction in datasheet)
|
||||
if (this->x_low_ % 2 == 1) {
|
||||
this->x_low_--;
|
||||
}
|
||||
if (this->x_high_ % 2 == 0) {
|
||||
this->x_high_++;
|
||||
}
|
||||
if (this->y_low_ % 2 == 1) {
|
||||
this->y_low_--;
|
||||
}
|
||||
if (this->y_high_ % 2 == 0) {
|
||||
this->y_high_++;
|
||||
}
|
||||
int w = this->x_high_ - this->x_low_ + 1;
|
||||
int h = this->y_high_ - this->y_low_ + 1;
|
||||
this->draw_pixels_at(this->x_low_, this->y_low_, w, h, this->buffer_, this->color_mode_, display::COLOR_BITNESS_565,
|
||||
|
||||
@@ -65,13 +65,10 @@ class QspiAmoLed : public display::DisplayBuffer,
|
||||
|
||||
void set_reset_pin(GPIOPin *reset_pin) { this->reset_pin_ = reset_pin; }
|
||||
void set_enable_pin(GPIOPin *enable_pin) { this->enable_pin_ = enable_pin; }
|
||||
void set_width(uint16_t width) { this->width_ = width; }
|
||||
void set_dimensions(uint16_t width, uint16_t height) {
|
||||
this->width_ = width;
|
||||
this->height_ = height;
|
||||
}
|
||||
int get_width() override { return this->width_; }
|
||||
int get_height() override { return this->height_; }
|
||||
void set_invert_colors(bool invert_colors) {
|
||||
this->invert_colors_ = invert_colors;
|
||||
this->reset_params_();
|
||||
|
||||
@@ -8,10 +8,10 @@ static const char *const TAG = "remote.dooya";
|
||||
|
||||
static const uint32_t HEADER_HIGH_US = 5000;
|
||||
static const uint32_t HEADER_LOW_US = 1500;
|
||||
static const uint32_t BIT_ZERO_HIGH_US = 750;
|
||||
static const uint32_t BIT_ZERO_LOW_US = 350;
|
||||
static const uint32_t BIT_ONE_HIGH_US = 350;
|
||||
static const uint32_t BIT_ONE_LOW_US = 750;
|
||||
static const uint32_t BIT_ZERO_HIGH_US = 350;
|
||||
static const uint32_t BIT_ZERO_LOW_US = 750;
|
||||
static const uint32_t BIT_ONE_HIGH_US = 750;
|
||||
static const uint32_t BIT_ONE_LOW_US = 350;
|
||||
|
||||
void DooyaProtocol::encode(RemoteTransmitData *dst, const DooyaData &data) {
|
||||
dst->set_carrier_frequency(0);
|
||||
|
||||
@@ -88,7 +88,7 @@ def validate_parameter_name(value):
|
||||
raise cv.Invalid(f"Script's parameter name cannot be {CONF_ID}")
|
||||
|
||||
|
||||
ALLOWED_PARAM_TYPE_CHARSET = set("abcdefghijklmnopqrstuvwxyz0123456789_:*&[]")
|
||||
ALLOWED_PARAM_TYPE_CHARSET = set("abcdefghijklmnopqrstuvwxyz0123456789_:*&[]<>")
|
||||
|
||||
|
||||
def validate_parameter_type(value):
|
||||
|
||||
@@ -2,7 +2,7 @@ import subprocess
|
||||
|
||||
import esphome.codegen as cg
|
||||
import esphome.config_validation as cv
|
||||
from esphome.components import display
|
||||
from esphome.components import display, key_provider
|
||||
from esphome.const import (
|
||||
CONF_ID,
|
||||
CONF_DIMENSIONS,
|
||||
@@ -12,8 +12,10 @@ from esphome.const import (
|
||||
PLATFORM_HOST,
|
||||
)
|
||||
|
||||
AUTO_LOAD = ["key_provider"]
|
||||
|
||||
sdl_ns = cg.esphome_ns.namespace("sdl")
|
||||
Sdl = sdl_ns.class_("Sdl", display.Display, cg.Component)
|
||||
Sdl = sdl_ns.class_("Sdl", display.Display, key_provider.KeyProvider)
|
||||
|
||||
|
||||
CONF_SDL_OPTIONS = "sdl_options"
|
||||
|
||||
@@ -84,6 +84,10 @@ void Sdl::loop() {
|
||||
}
|
||||
break;
|
||||
|
||||
case SDL_KEYUP:
|
||||
this->send_key_(e.key.keysym.sym);
|
||||
break;
|
||||
|
||||
default:
|
||||
ESP_LOGV(TAG, "Event %d", e.type);
|
||||
break;
|
||||
|
||||
@@ -1,10 +1,11 @@
|
||||
#pragma once
|
||||
|
||||
#ifdef USE_HOST
|
||||
#include "esphome/components/display/display.h"
|
||||
#include "esphome/components/key_provider/key_provider.h"
|
||||
#include "esphome/core/application.h"
|
||||
#include "esphome/core/component.h"
|
||||
#include "esphome/core/log.h"
|
||||
#include "esphome/core/application.h"
|
||||
#include "esphome/components/display/display.h"
|
||||
#define SDL_MAIN_HANDLED
|
||||
#include "SDL.h"
|
||||
|
||||
@@ -13,7 +14,7 @@ namespace sdl {
|
||||
|
||||
constexpr static const char *const TAG = "sdl";
|
||||
|
||||
class Sdl : public display::Display {
|
||||
class Sdl : public display::Display, public key_provider::KeyProvider {
|
||||
public:
|
||||
display::DisplayType get_display_type() override { return display::DISPLAY_TYPE_COLOR; }
|
||||
void update() override;
|
||||
|
||||
@@ -43,6 +43,7 @@ from esphome.const import (
|
||||
DEVICE_CLASS_BATTERY,
|
||||
DEVICE_CLASS_CARBON_DIOXIDE,
|
||||
DEVICE_CLASS_CARBON_MONOXIDE,
|
||||
DEVICE_CLASS_CONDUCTIVITY,
|
||||
DEVICE_CLASS_CURRENT,
|
||||
DEVICE_CLASS_DATA_RATE,
|
||||
DEVICE_CLASS_DATA_SIZE,
|
||||
@@ -103,6 +104,7 @@ DEVICE_CLASSES = [
|
||||
DEVICE_CLASS_BATTERY,
|
||||
DEVICE_CLASS_CARBON_DIOXIDE,
|
||||
DEVICE_CLASS_CARBON_MONOXIDE,
|
||||
DEVICE_CLASS_CONDUCTIVITY,
|
||||
DEVICE_CLASS_CURRENT,
|
||||
DEVICE_CLASS_DATA_RATE,
|
||||
DEVICE_CLASS_DATA_SIZE,
|
||||
|
||||
@@ -258,6 +258,7 @@ KEY_UART_DEVICES = "uart_devices"
|
||||
def final_validate_device_schema(
|
||||
name: str,
|
||||
*,
|
||||
uart_bus: str = CONF_UART_ID,
|
||||
baud_rate: Optional[int] = None,
|
||||
require_tx: bool = False,
|
||||
require_rx: bool = False,
|
||||
@@ -268,7 +269,7 @@ def final_validate_device_schema(
|
||||
def validate_baud_rate(value):
|
||||
if value != baud_rate:
|
||||
raise cv.Invalid(
|
||||
f"Component {name} requires baud rate {baud_rate} for the uart bus"
|
||||
f"Component {name} requires baud rate {baud_rate} for the uart referenced by {uart_bus}"
|
||||
)
|
||||
return value
|
||||
|
||||
@@ -287,21 +288,21 @@ def final_validate_device_schema(
|
||||
def validate_data_bits(value):
|
||||
if value != data_bits:
|
||||
raise cv.Invalid(
|
||||
f"Component {name} requires {data_bits} data bits for the uart bus"
|
||||
f"Component {name} requires {data_bits} data bits for the uart referenced by {uart_bus}"
|
||||
)
|
||||
return value
|
||||
|
||||
def validate_parity(value):
|
||||
if value != parity:
|
||||
raise cv.Invalid(
|
||||
f"Component {name} requires parity {parity} for the uart bus"
|
||||
f"Component {name} requires parity {parity} for the uart referenced by {uart_bus}"
|
||||
)
|
||||
return value
|
||||
|
||||
def validate_stop_bits(value):
|
||||
if value != stop_bits:
|
||||
raise cv.Invalid(
|
||||
f"Component {name} requires {stop_bits} stop bits for the uart bus"
|
||||
f"Component {name} requires {stop_bits} stop bits for the uart referenced by {uart_bus}"
|
||||
)
|
||||
return value
|
||||
|
||||
@@ -316,14 +317,14 @@ def final_validate_device_schema(
|
||||
hub_schema[
|
||||
cv.Required(
|
||||
CONF_TX_PIN,
|
||||
msg=f"Component {name} requires this uart bus to declare a tx_pin",
|
||||
msg=f"Component {name} requires uart referenced by {uart_bus} to declare a tx_pin",
|
||||
)
|
||||
] = validate_pin(CONF_TX_PIN, device)
|
||||
if require_rx and uart_id_type_str in NATIVE_UART_CLASSES:
|
||||
hub_schema[
|
||||
cv.Required(
|
||||
CONF_RX_PIN,
|
||||
msg=f"Component {name} requires this uart bus to declare a rx_pin",
|
||||
msg=f"Component {name} requires uart referenced by {uart_bus} to declare a rx_pin",
|
||||
)
|
||||
] = validate_pin(CONF_RX_PIN, device)
|
||||
if baud_rate is not None:
|
||||
@@ -337,7 +338,7 @@ def final_validate_device_schema(
|
||||
return cv.Schema(hub_schema, extra=cv.ALLOW_EXTRA)(hub_config)
|
||||
|
||||
return cv.Schema(
|
||||
{cv.Required(CONF_UART_ID): fv.id_declaration_match_schema(validate_hub)},
|
||||
{cv.Required(uart_bus): fv.id_declaration_match_schema(validate_hub)},
|
||||
extra=cv.ALLOW_EXTRA,
|
||||
)
|
||||
|
||||
|
||||
@@ -4,11 +4,13 @@ import esphome.config_validation as cv
|
||||
import esphome.codegen as cg
|
||||
from esphome.const import (
|
||||
CONF_DEVICE_CLASS,
|
||||
CONF_ENTITY_CATEGORY,
|
||||
CONF_ID,
|
||||
CONF_MQTT_ID,
|
||||
CONF_WEB_SERVER_ID,
|
||||
DEVICE_CLASS_EMPTY,
|
||||
DEVICE_CLASS_FIRMWARE,
|
||||
ENTITY_CATEGORY_CONFIG,
|
||||
)
|
||||
from esphome.core import CORE, coroutine_with_priority
|
||||
from esphome.cpp_helpers import setup_entity
|
||||
@@ -41,6 +43,9 @@ UPDATE_SCHEMA = (
|
||||
cv.Optional(CONF_ON_UPDATE_AVAILABLE): automation.validate_automation(
|
||||
single=True
|
||||
),
|
||||
cv.Optional(
|
||||
CONF_ENTITY_CATEGORY, default=ENTITY_CATEGORY_CONFIG
|
||||
): cv.entity_category,
|
||||
}
|
||||
)
|
||||
)
|
||||
|
||||
@@ -1,9 +1,35 @@
|
||||
#include "update_entity.h"
|
||||
|
||||
#include "esphome/core/log.h"
|
||||
|
||||
namespace esphome {
|
||||
namespace update {
|
||||
|
||||
static const char *const TAG = "update";
|
||||
|
||||
void UpdateEntity::publish_state() {
|
||||
ESP_LOGD(TAG, "'%s' - Publishing:", this->name_.c_str());
|
||||
ESP_LOGD(TAG, " Current Version: %s", this->update_info_.current_version.c_str());
|
||||
|
||||
if (!this->update_info_.md5.empty()) {
|
||||
ESP_LOGD(TAG, " Latest Version: %s", this->update_info_.latest_version.c_str());
|
||||
}
|
||||
if (!this->update_info_.firmware_url.empty()) {
|
||||
ESP_LOGD(TAG, " Firmware URL: %s", this->update_info_.firmware_url.c_str());
|
||||
}
|
||||
|
||||
ESP_LOGD(TAG, " Title: %s", this->update_info_.title.c_str());
|
||||
if (!this->update_info_.summary.empty()) {
|
||||
ESP_LOGD(TAG, " Summary: %s", this->update_info_.summary.c_str());
|
||||
}
|
||||
if (!this->update_info_.release_url.empty()) {
|
||||
ESP_LOGD(TAG, " Release URL: %s", this->update_info_.release_url.c_str());
|
||||
}
|
||||
|
||||
if (this->update_info_.has_progress) {
|
||||
ESP_LOGD(TAG, " Progress: %.0f%%", this->update_info_.progress);
|
||||
}
|
||||
|
||||
this->has_state_ = true;
|
||||
this->state_callback_.call();
|
||||
}
|
||||
|
||||
@@ -243,7 +243,7 @@ ErrorCode VEML7700Component::configure_() {
|
||||
}
|
||||
|
||||
PSMRegister psm{0};
|
||||
psm.PSM = PSM::PSM_MODE_1;
|
||||
psm.PSM = PSMMode::PSM_MODE_1;
|
||||
psm.PSM_EN = false;
|
||||
ESP_LOGV(TAG, "Setting PSM to 0x%04X", psm.raw);
|
||||
err = this->write_register((uint8_t) CommandRegisters::PWR_SAVING, psm.raw_bytes, VEML_REG_SIZE);
|
||||
|
||||
@@ -24,7 +24,7 @@ enum class CommandRegisters : uint8_t {
|
||||
ALS_INT = 0x06 // R: ALS INT trigger event
|
||||
};
|
||||
|
||||
enum Gain : uint8_t {
|
||||
enum Gain : uint16_t {
|
||||
X_1 = 0,
|
||||
X_2 = 1,
|
||||
X_1_8 = 2,
|
||||
@@ -32,7 +32,7 @@ enum Gain : uint8_t {
|
||||
};
|
||||
const uint8_t GAINS_COUNT = 4;
|
||||
|
||||
enum IntegrationTime : uint8_t {
|
||||
enum IntegrationTime : uint16_t {
|
||||
INTEGRATION_TIME_25MS = 0b1100,
|
||||
INTEGRATION_TIME_50MS = 0b1000,
|
||||
INTEGRATION_TIME_100MS = 0b0000,
|
||||
@@ -42,14 +42,14 @@ enum IntegrationTime : uint8_t {
|
||||
};
|
||||
const uint8_t INTEGRATION_TIMES_COUNT = 6;
|
||||
|
||||
enum Persistence : uint8_t {
|
||||
enum Persistence : uint16_t {
|
||||
PERSISTENCE_1 = 0,
|
||||
PERSISTENCE_2 = 1,
|
||||
PERSISTENCE_4 = 2,
|
||||
PERSISTENCE_8 = 3,
|
||||
};
|
||||
|
||||
enum PSM : uint8_t {
|
||||
enum PSMMode : uint16_t {
|
||||
PSM_MODE_1 = 0,
|
||||
PSM_MODE_2 = 1,
|
||||
PSM_MODE_3 = 2,
|
||||
@@ -92,7 +92,7 @@ union PSMRegister {
|
||||
uint8_t raw_bytes[2];
|
||||
struct {
|
||||
bool PSM_EN : 1;
|
||||
uint8_t PSM : 2;
|
||||
PSMMode PSM : 2;
|
||||
uint16_t reserved : 13;
|
||||
} __attribute__((packed));
|
||||
};
|
||||
|
||||
@@ -799,7 +799,7 @@ void VoiceAssistant::on_audio(const api::VoiceAssistantAudio &msg) {
|
||||
this->speaker_buffer_index_ += msg.data.length();
|
||||
this->speaker_buffer_size_ += msg.data.length();
|
||||
this->speaker_bytes_received_ += msg.data.length();
|
||||
ESP_LOGV(TAG, "Received audio: %" PRId32 " bytes from API", msg.data.length());
|
||||
ESP_LOGV(TAG, "Received audio: %u bytes from API", msg.data.length());
|
||||
} else {
|
||||
ESP_LOGE(TAG, "Cannot receive audio, buffer is full");
|
||||
}
|
||||
|
||||
@@ -27,7 +27,13 @@ CONFIG_SCHEMA = cv.All(
|
||||
cv.Optional(CONF_INITIAL_VALUE, default=1.0): cv.float_range(
|
||||
min=0.01, max=1.0
|
||||
),
|
||||
cv.Optional(CONF_STEP_DELAY, default=1): cv.int_range(min=1, max=100),
|
||||
cv.Optional(CONF_STEP_DELAY, default="1us"): cv.All(
|
||||
cv.positive_time_period_microseconds,
|
||||
cv.Range(
|
||||
min=cv.TimePeriod(microseconds=1),
|
||||
max=cv.TimePeriod(microseconds=100),
|
||||
),
|
||||
),
|
||||
}
|
||||
)
|
||||
)
|
||||
|
||||
@@ -58,17 +58,21 @@ def merge_config(full_old, full_new):
|
||||
ids = {
|
||||
v_id: i
|
||||
for i, v in enumerate(res)
|
||||
if (v_id := v.get(CONF_ID)) and isinstance(v_id, str)
|
||||
if isinstance(v, dict)
|
||||
and (v_id := v.get(CONF_ID))
|
||||
and isinstance(v_id, str)
|
||||
}
|
||||
extend_ids = {
|
||||
v_id.value: i
|
||||
for i, v in enumerate(res)
|
||||
if (v_id := v.get(CONF_ID)) and isinstance(v_id, Extend)
|
||||
if isinstance(v, dict)
|
||||
and (v_id := v.get(CONF_ID))
|
||||
and isinstance(v_id, Extend)
|
||||
}
|
||||
|
||||
ids_to_delete = []
|
||||
for v in new:
|
||||
if new_id := v.get(CONF_ID):
|
||||
if isinstance(v, dict) and (new_id := v.get(CONF_ID)):
|
||||
if isinstance(new_id, Extend):
|
||||
new_id = new_id.value
|
||||
if new_id in ids:
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
"""Constants used by esphome."""
|
||||
|
||||
__version__ = "2024.6.1"
|
||||
__version__ = "2024.7.0-dev"
|
||||
|
||||
ALLOWED_NAME_CHARS = "abcdefghijklmnopqrstuvwxyz0123456789-_"
|
||||
VALID_SUBSTITUTIONS_CHARACTERS = (
|
||||
@@ -1070,6 +1070,7 @@ DEVICE_CLASS_BUTTON = "button"
|
||||
DEVICE_CLASS_CARBON_DIOXIDE = "carbon_dioxide"
|
||||
DEVICE_CLASS_CARBON_MONOXIDE = "carbon_monoxide"
|
||||
DEVICE_CLASS_COLD = "cold"
|
||||
DEVICE_CLASS_CONDUCTIVITY = "conductivity"
|
||||
DEVICE_CLASS_CONNECTIVITY = "connectivity"
|
||||
DEVICE_CLASS_CURRENT = "current"
|
||||
DEVICE_CLASS_CURTAIN = "curtain"
|
||||
|
||||
13
esphome/idf_component.yml
Normal file
13
esphome/idf_component.yml
Normal file
@@ -0,0 +1,13 @@
|
||||
dependencies:
|
||||
esp-tflite-micro:
|
||||
git: https://github.com/espressif/esp-tflite-micro.git
|
||||
version: v1.3.1
|
||||
esp32_camera:
|
||||
git: https://github.com/espressif/esp32-camera.git
|
||||
version: v2.0.9
|
||||
mdns:
|
||||
git: https://github.com/espressif/esp-protocols.git
|
||||
version: mdns-v1.2.5
|
||||
path: components/mdns
|
||||
rules:
|
||||
- if: "idf_version >=5.0"
|
||||
@@ -142,7 +142,6 @@ platform_packages =
|
||||
framework = espidf
|
||||
lib_deps =
|
||||
${common:idf.lib_deps}
|
||||
espressif/esp32-camera@1.0.0 ; esp32_camera
|
||||
droscy/esp_wireguard@0.4.1 ; wireguard
|
||||
build_flags =
|
||||
${common:idf.build_flags}
|
||||
|
||||
@@ -101,8 +101,10 @@ def clang_options(idedata):
|
||||
# add library include directories using -isystem to suppress their errors
|
||||
for directory in sorted(set(idedata["includes"]["build"])):
|
||||
# skip our own directories, we add those later
|
||||
if not directory.startswith(f"{root_path}/") or directory.startswith(
|
||||
f"{root_path}/.pio/"
|
||||
if (
|
||||
not directory.startswith(f"{root_path}/")
|
||||
or directory.startswith(f"{root_path}/.pio/")
|
||||
or directory.startswith(f"{root_path}/managed_components/")
|
||||
):
|
||||
cmd.extend(["-isystem", directory])
|
||||
|
||||
|
||||
@@ -10,6 +10,7 @@ from homeassistant.components.event import EventDeviceClass
|
||||
from homeassistant.components.number import NumberDeviceClass
|
||||
from homeassistant.components.sensor import SensorDeviceClass
|
||||
from homeassistant.components.switch import SwitchDeviceClass
|
||||
from homeassistant.components.update import UpdateDeviceClass
|
||||
from homeassistant.components.valve import ValveDeviceClass
|
||||
|
||||
# pylint: enable=import-error
|
||||
@@ -27,6 +28,7 @@ DOMAINS = {
|
||||
"number": NumberDeviceClass,
|
||||
"sensor": SensorDeviceClass,
|
||||
"switch": SwitchDeviceClass,
|
||||
"update": UpdateDeviceClass,
|
||||
"valve": ValveDeviceClass,
|
||||
}
|
||||
|
||||
|
||||
@@ -24,8 +24,8 @@ fi
|
||||
|
||||
start_esphome() {
|
||||
# create dynamic yaml file in `build` folder.
|
||||
# `./tests/test_build_components/build/[target_component].[test_name].[target_platform].yaml`
|
||||
component_test_file="./tests/test_build_components/build/$target_component.$test_name.$target_platform.yaml"
|
||||
# `./tests/test_build_components/build/[target_component].[test_name].[target_platform_with_version].yaml`
|
||||
component_test_file="./tests/test_build_components/build/$target_component.$test_name.$target_platform_with_version.yaml"
|
||||
|
||||
cp $target_platform_file $component_test_file
|
||||
if [[ "$OSTYPE" == "darwin"* ]]; then
|
||||
@@ -36,7 +36,7 @@ start_esphome() {
|
||||
fi
|
||||
|
||||
# Start esphome process
|
||||
echo "> [$target_component] [$test_name] [$target_platform]"
|
||||
echo "> [$target_component] [$test_name] [$target_platform_with_version]"
|
||||
set -x
|
||||
# TODO: Validate escape of Command line substitution value
|
||||
python -m esphome -s component_name $target_component -s component_dir ../../components/$target_component -s test_name $test_name -s target_platform $target_platform $esphome_command $component_test_file
|
||||
@@ -76,16 +76,17 @@ for f in ./tests/components/$target_component/*.*.yaml; do
|
||||
# 2. `./tests/test_build_components/build_components_base.[target_platform]-ard.yaml`
|
||||
target_platform_file="./tests/test_build_components/build_components_base.$target_platform.yaml"
|
||||
if ! [ -f "$target_platform_file" ]; then
|
||||
# Try find arduino test framework as platform.
|
||||
target_platform_ard="$target_platform-ard"
|
||||
target_platform_file="./tests/test_build_components/build_components_base.$target_platform_ard.yaml"
|
||||
if ! [ -f "$target_platform_file" ]; then
|
||||
echo "No base test file [./tests/test_build_components/build_components_base.$target_platform.yaml, ./tests/build_components_base.$target_platform_ard.yaml] for component test [$f] found."
|
||||
exit 1
|
||||
fi
|
||||
target_platform=$target_platform_ard
|
||||
echo "No base test file [./tests/test_build_components/build_components_base.$target_platform.yaml] for component test [$f] found."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
start_esphome
|
||||
for target_platform_file in ./tests/test_build_components/build_components_base.$target_platform*.yaml; do
|
||||
# trim off "./tests/test_build_components/build_components_base." prefix
|
||||
target_platform_with_version=${target_platform_file:52}
|
||||
# ...now remove suffix starting with "." leaving just the test target hardware and software platform (possibly with version)
|
||||
# For example: "esp32-s3-idf-50"
|
||||
target_platform_with_version=${target_platform_with_version%.*}
|
||||
start_esphome
|
||||
done
|
||||
fi
|
||||
done
|
||||
|
||||
@@ -1,9 +1,7 @@
|
||||
uart:
|
||||
- id: uart_a01nyub
|
||||
tx_pin:
|
||||
number: 4
|
||||
rx_pin:
|
||||
number: 5
|
||||
tx_pin: ${tx_pin}
|
||||
rx_pin: ${rx_pin}
|
||||
baud_rate: 9600
|
||||
|
||||
sensor:
|
||||
@@ -1,13 +1,5 @@
|
||||
uart:
|
||||
- id: uart_a01nyub
|
||||
tx_pin:
|
||||
number: 4
|
||||
rx_pin:
|
||||
number: 5
|
||||
baud_rate: 9600
|
||||
substitutions:
|
||||
tx_pin: GPIO4
|
||||
rx_pin: GPIO5
|
||||
|
||||
sensor:
|
||||
- platform: a01nyub
|
||||
id: a01nyub_sensor
|
||||
name: a01nyub Distance
|
||||
uart_id: uart_a01nyub
|
||||
<<: !include common.yaml
|
||||
|
||||
@@ -1,13 +1,5 @@
|
||||
uart:
|
||||
- id: uart_a01nyub
|
||||
tx_pin:
|
||||
number: 17
|
||||
rx_pin:
|
||||
number: 16
|
||||
baud_rate: 9600
|
||||
substitutions:
|
||||
tx_pin: GPIO17
|
||||
rx_pin: GPIO16
|
||||
|
||||
sensor:
|
||||
- platform: a01nyub
|
||||
id: a01nyub_sensor
|
||||
name: a01nyub Distance
|
||||
uart_id: uart_a01nyub
|
||||
<<: !include common.yaml
|
||||
|
||||
@@ -1,13 +0,0 @@
|
||||
uart:
|
||||
- id: uart_a01nyub
|
||||
tx_pin:
|
||||
number: 17
|
||||
rx_pin:
|
||||
number: 16
|
||||
baud_rate: 9600
|
||||
|
||||
sensor:
|
||||
- platform: a01nyub
|
||||
id: a01nyub_sensor
|
||||
name: a01nyub Distance
|
||||
uart_id: uart_a01nyub
|
||||
@@ -1,13 +0,0 @@
|
||||
uart:
|
||||
- id: uart_a01nyub
|
||||
tx_pin:
|
||||
number: 4
|
||||
rx_pin:
|
||||
number: 5
|
||||
baud_rate: 9600
|
||||
|
||||
sensor:
|
||||
- platform: a01nyub
|
||||
id: a01nyub_sensor
|
||||
name: a01nyub Distance
|
||||
uart_id: uart_a01nyub
|
||||
@@ -1,13 +0,0 @@
|
||||
uart:
|
||||
- id: uart_a01nyub
|
||||
tx_pin:
|
||||
number: 4
|
||||
rx_pin:
|
||||
number: 5
|
||||
baud_rate: 9600
|
||||
|
||||
sensor:
|
||||
- platform: a01nyub
|
||||
id: a01nyub_sensor
|
||||
name: a01nyub Distance
|
||||
uart_id: uart_a01nyub
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user