1
0
mirror of https://github.com/esphome/esphome.git synced 2025-11-03 00:21:56 +00:00

Compare commits

..

78 Commits

Author SHA1 Message Date
Jesse Hills
11cc5aef62 Include Arduino header when using Arduino
Co-authored-by: clydebarrow <2366188+clydebarrow@users.noreply.github.com>
2025-05-05 19:49:11 +12:00
Clyde Stubbs
3b8a5db97c [syslog] Implement logging via syslog (#8637)
Co-authored-by: Jesse Hills <3060199+jesserockz@users.noreply.github.com>
2025-05-05 16:48:13 +12:00
Clyde Stubbs
b8d83d0765 [debug] Show source of last software reboot (#8595) 2025-05-04 23:31:37 -05:00
Clyde Stubbs
e7a2b395fd [uart] Add packet_transport platform (#8214)
Co-authored-by: Faidon Liambotis <paravoid@debian.org>
Co-authored-by: clydeps <U5yx99dok9>
Co-authored-by: Jesse Hills <3060199+jesserockz@users.noreply.github.com>
2025-05-05 16:15:46 +12:00
Clyde Stubbs
ad99d7fb45 [image] Support the other Pictogrammers icon sets memory: and mdil: (#8676) 2025-05-05 15:31:16 +12:00
Clyde Stubbs
0b032e5c19 [lvgl] Add refresh action to re-evaluate initial widget properties (#8675) 2025-05-05 15:26:16 +12:00
Clyde Stubbs
c7523ace78 [lvgl] Fix image property processing (#8691) 2025-05-05 12:31:22 +12:00
Clyde Stubbs
2a6827e1d2 [lvgl] Allow padding to be negative (#8671) 2025-05-05 12:30:11 +12:00
Clyde Stubbs
125aff79ec [as3935_i2c] Remove redundant includes (#8677) 2025-05-05 12:28:00 +12:00
Clyde Stubbs
a31d8ec309 [packages] Allow list instead of dict for packages (#8688) 2025-05-05 12:26:59 +12:00
Clyde Stubbs
3ed03edfec [display] Fix Rect::inside (#8679) 2025-05-05 12:04:33 +12:00
Clyde Stubbs
4dc6cbe2d7 [esp32_ble_server] Add appearance advertising field (#8672) 2025-05-05 12:02:33 +12:00
Clyde Stubbs
524cd4b4e3 [packet_transport] Extract packet encoding functionality (#8187) 2025-05-05 09:29:17 +12:00
Thomas Rupprecht
84ebbf0762 [climate_ir_lg] use this-> (#8687) 2025-05-05 09:21:57 +12:00
Thomas Rupprecht
670ad7192c unify lowercase x in hexadecimal values (#8686) 2025-05-05 08:47:57 +12:00
Pat Satyshur
bc6ee20270 Add CONF_CONTINUOUS to const.py (#8682) 2025-05-03 22:44:54 -05:00
Thomas Rupprecht
e869a3aec3 [climate] Fix typo and use `this->` (#8678) 2025-05-03 22:41:52 -05:00
Jani
8aff6d2fdd Add GDEY0583T81 support (#8668) 2025-05-03 13:02:35 +10:00
Michał Obrembski
8d33c6de36 Added Banking support to tca9555, fixed input bug (#8003)
Co-authored-by: Jesse Hills <3060199+jesserockz@users.noreply.github.com>
2025-05-03 10:54:27 +12:00
DJTerentjev
f4b5f32cb4 Update const.py (#8665) 2025-05-01 20:43:58 -05:00
Kevin Ahrendt
2eb9582d0f [micro_wake_word] Clarify spectrogram features calculation (#8669) 2025-05-01 14:04:23 -05:00
Kevin Ahrendt
db97440b04 [microphone] Add software mute and fix wrong type for automations (#8667) 2025-05-01 14:02:33 -05:00
Kevin Ahrendt
ced7ae1d7a [debug] add missing header (#8666) 2025-05-01 08:50:32 -04:00
Trent Houliston
d6699fa3c0 Check for missed pulse_meter ISRs in the main loop (#6126) 2025-05-01 12:29:12 +00:00
functionpointer
836e5ffa43 [mlx90393] Add verification for register contents (#8279)
Co-authored-by: Jesse Hills <3060199+jesserockz@users.noreply.github.com>
2025-05-02 00:01:02 +12:00
Kevin Ahrendt
c7f597bc75 [voice_assistant] voice assistant can configure enabled wake words (#8657) 2025-05-01 11:11:09 +00:00
Clyde Stubbs
e215fafebe [esp32, debug] Add `cpu_frequency` config option and debug sensor (#8542) 2025-05-01 03:28:07 -05:00
Ralf Habacker
da9c755f67 Add to_ntc_resistance|temperature sensor filter (esphome/feature-requests#2967) (#7898)
Co-authored-by: Clyde Stubbs <2366188+clydebarrow@users.noreply.github.com>
2025-05-01 07:53:12 +00:00
Clyde Stubbs
087ff865a7 [binary_sensor] initial state refactor (#8648)
Co-authored-by: Zsombor Welker <flaktack@welker.hu>
2025-05-01 15:58:35 +12:00
scaiper
8cd62c0308 support self-signed cert in mqtt (#8650) 2025-05-01 15:57:52 +12:00
rwrozelle
f5241ff777 Fix CONFIG_LWIP_TCP_RCV_SCALE and CONFIG_TCP_WND_DEFAULT (#8425) 2025-05-01 15:55:30 +12:00
Clyde Stubbs
1aa2b79311 [i2c] Allow buffers in PSRAM (#8640) 2025-05-01 03:54:56 +00:00
Benjamin Pearce
2dca2d5f85 Daikin IR Climate Remote Target Temperature and Fan Modes (#7946)
Co-authored-by: Benjamin Pearce <gitlab@bcpearce.com>
2025-05-01 15:52:51 +12:00
lastradanet
f03b42ced5 Adding timing budget support for vl53l0x (#7991)
Co-authored-by: Brian Davis <bdavis@mimecast.com>
Co-authored-by: Jesse Hills <3060199+jesserockz@users.noreply.github.com>
2025-05-01 15:17:27 +12:00
Jesse Hills
0f8a0af244 [defines] Fix USE_MICRO_WAKE_WORD position (#8663) 2025-04-30 21:32:23 -05:00
Keith Burzinski
62646f5f32 [remote_base] Fix compile error on IDF (#8664) 2025-04-30 21:30:36 -05:00
uae007
71f81d2f18 Component pca9685 - phase_begin always set to zero (#8379)
Co-authored-by: Clyde Stubbs <2366188+clydebarrow@users.noreply.github.com>
2025-05-01 02:27:59 +00:00
nworbneb
4ec8414050 [alarm_control_panel] Allow sensor to trigger when alarm disarmed (#7746) 2025-05-01 14:27:14 +12:00
Anton Sergunov
807925fd38 Fix second scrolling run ussue (#8347) 2025-05-01 14:03:35 +12:00
Pat Satyshur
b597565165 Add a function to return the I2C address from an I2CDevice object (#8454)
Co-authored-by: Djordje Mandic <6750655+DjordjeMandic@users.noreply.github.com>
2025-05-01 13:14:29 +12:00
Jannik
9a9b91b180 Fix HLW8012 sensor not returning values if change_mode_every is set to never (#8456) 2025-05-01 13:12:51 +12:00
Simon
9dcf295df8 [gree] Add support for YAG remotes (#7418) 2025-05-01 13:12:17 +12:00
Andrew J.Swan
e8a3de2642 Bump FastLed version to 3.9.16 (#8402) 2025-05-01 13:07:55 +12:00
Ben Winslow
d2b4dba51f Fix typo preventing tt21100 from autosetting the touchscreen res. (#8662) 2025-05-01 12:55:36 +12:00
Kevin Ahrendt
bf527b0331 [microphone] Bugfix: protect against starting mic if already started (#8656) 2025-05-01 12:45:33 +12:00
Kevin Ahrendt
cdc77506de [micro_wake_word] add new VPE features (#8655) 2025-05-01 12:22:48 +12:00
Stanislav Meduna
6de6a0c82c Only warn if the component blocked for a longer time than the last time (#8064) 2025-05-01 11:57:01 +12:00
Kevin Ahrendt
20062576a3 [i2s_audio] Move microphone reads into a task (#8651)
Co-authored-by: Jesse Hills <3060199+jesserockz@users.noreply.github.com>
2025-04-30 21:50:56 +12:00
Clyde Stubbs
07ba9fdf8f [canbus] Add callback for use by other components (#8578)
Co-authored-by: clydeps <U5yx99dok9>
2025-04-30 21:10:54 +12:00
Jesse Hills
caa255f5d1 [media_player] Fix actions with id as value (#8654) 2025-04-30 20:08:46 +12:00
StriboYar
c0be2c14f3 [debug] Fix compile errors when using the ESP32-C2 (#7474)
Co-authored-by: Keith Burzinski <kbx81x@gmail.com>
2025-04-30 06:15:56 +00:00
Kevin Ahrendt
9f629dcaa2 [i2s_audio, microphone, micro_wake_word, voice_assistant] Use microphone source to process incoming audio (#8645)
Co-authored-by: Jesse Hills <3060199+jesserockz@users.noreply.github.com>
2025-04-30 10:27:03 +12:00
Thomas Rupprecht
0fe6c65ba3 [adc] sort variants and add links to reference implementations (#8327) 2025-04-29 15:08:08 -05:00
Thomas Rupprecht
c756bb3b3e [pmsa003i] code improvements (#8485) 2025-04-29 14:29:04 -05:00
Jesse Hills
ecb91b0101 [bluetooth_proxy] Allow changing active/passive via api (#8649) 2025-04-29 12:43:55 +00:00
cvwillegen
5f9a509bdc Add code to send/receive GoBox infrared control messages. (#7554)
Co-authored-by: Jesse Hills <3060199+jesserockz@users.noreply.github.com>
2025-04-29 20:21:05 +12:00
Jesse Hills
dc6dd9fe0d Merge branch 'release' into dev 2025-04-29 14:21:09 +12:00
Jesse Hills
5baa034d0d Merge pull request #8647 from esphome/bump-2025.4.1
2025.4.1
2025-04-29 14:20:26 +12:00
Thomas Rupprecht
b8ba26787e [pmsx003] Refactor Imports, Extract Constants, Improve Data Handling & Logging (#8344) 2025-04-28 19:24:48 -05:00
Kevin Ahrendt
844569e96b [audio, microphone] Add MicrophoneSource helper class (#8641)
Co-authored-by: Jesse Hills <3060199+jesserockz@users.noreply.github.com>
2025-04-29 00:05:07 +00:00
Jesse Hills
43580739ac Ensure new const file stays in order (#8642) 2025-04-28 18:58:13 -05:00
aanban
c9f7ab6948 add beo4_protocol to remote_base component (#8307) 2025-04-29 11:50:40 +12:00
Jesse Hills
7900660bb8 Bump version to 2025.4.1 2025-04-29 11:46:20 +12:00
Steffen Banhardt
f096567ac7 Update ens160_base.cpp – fix wrong double negative (#8639) 2025-04-29 11:46:19 +12:00
Clyde Stubbs
5bfb5ccc34 [core] Fix setting of log level/verbose (#8600) 2025-04-29 11:46:19 +12:00
Jesse Hills
1c60038111 [watchdog] Fix for variants with single core (#8602) 2025-04-29 11:46:19 +12:00
Clyde Stubbs
b940db6549 [online_image] Fix printf format; comment fixes (#8607) 2025-04-29 11:46:19 +12:00
J. Nick Koston
aa6e172e14 Fix BLE connection loop caused by timeout and pending disconnect race (#8597) 2025-04-29 11:46:19 +12:00
Clyde Stubbs
86033b6612 [lvgl] Ensure pages are created on the correct display (#8596) 2025-04-29 11:46:19 +12:00
Jesse Hills
59b4a1f554 Fix psram below idf 5 (#8584) 2025-04-29 11:46:19 +12:00
Jesse Hills
b5bdfb3089 [http_request] Fix request headers (#8644)
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
2025-04-29 11:45:41 +12:00
Jesse Hills
a31a5e74bd [const] Move CONF_GAIN_FACTOR to const.py (#8646) 2025-04-29 11:35:38 +12:00
Jesse Hills
629481a526 [esp32_ble] Remove explicit and now incorrect ble override for esp32-c6 (#8643) 2025-04-29 10:46:39 +12:00
Steffen Banhardt
3291a11824 Update ens160_base.cpp – fix wrong double negative (#8639) 2025-04-29 07:18:46 +12:00
baal86
d2ee2d3b23 Fix support for ESP32-H2 in deep_sleep (#8290) 2025-04-28 00:21:24 -05:00
Nate Clark
253e3ec6f6 [mdns] Support templatable config options for MDNS extra services (#8606) 2025-04-28 16:27:39 +12:00
Ben Winslow
fdc4ec8a57 [touchscreen] Clear interrupt flag before reading touch data. (#8632) 2025-04-28 14:29:47 +12:00
Lucas Hartmann
1da0dff8b1 Take advantage of clipping to speed image drawing. (#8630) 2025-04-28 14:18:47 +12:00
220 changed files with 6126 additions and 2633 deletions

View File

@@ -278,7 +278,7 @@ esphome/components/mdns/* @esphome/core
esphome/components/media_player/* @jesserockz
esphome/components/micro_wake_word/* @jesserockz @kahrendt
esphome/components/micronova/* @jorre05
esphome/components/microphone/* @jesserockz
esphome/components/microphone/* @jesserockz @kahrendt
esphome/components/mics_4514/* @jesserockz
esphome/components/midea/* @dudanov
esphome/components/midea_ir/* @dudanov
@@ -319,6 +319,7 @@ esphome/components/online_image/* @clydebarrow @guillempages
esphome/components/opentherm/* @olegtarasov
esphome/components/ota/* @esphome/core
esphome/components/output/* @esphome/core
esphome/components/packet_transport/* @clydebarrow
esphome/components/pca6416a/* @Mat931
esphome/components/pca9554/* @clydebarrow @hwstar
esphome/components/pcf85063/* @brogon
@@ -328,6 +329,7 @@ esphome/components/pipsolar/* @andreashergert1984
esphome/components/pm1006/* @habbie
esphome/components/pm2005/* @andrewjswan
esphome/components/pmsa003i/* @sjtrny
esphome/components/pmsx003/* @ximex
esphome/components/pmwcs3/* @SeByDocKy
esphome/components/pn532/* @OttoWinter @jesserockz
esphome/components/pn532_i2c/* @OttoWinter @jesserockz
@@ -427,6 +429,7 @@ esphome/components/sun/* @OttoWinter
esphome/components/sun_gtil2/* @Mat931
esphome/components/switch/* @esphome/core
esphome/components/switch/binary_sensor/* @ssieb
esphome/components/syslog/* @clydebarrow
esphome/components/t6615/* @tylermenezes
esphome/components/tc74/* @sethgirvan
esphome/components/tca9548a/* @andreashergert1984
@@ -466,6 +469,7 @@ esphome/components/tuya/switch/* @jesserockz
esphome/components/tuya/text_sensor/* @dentra
esphome/components/uart/* @esphome/core
esphome/components/uart/button/* @ssieb
esphome/components/uart/packet_transport/* @clydebarrow
esphome/components/udp/* @clydebarrow
esphome/components/ufire_ec/* @pvizeli
esphome/components/ufire_ise/* @pvizeli

View File

@@ -47,9 +47,10 @@ SAMPLING_MODES = {
adc1_channel_t = cg.global_ns.enum("adc1_channel_t")
adc2_channel_t = cg.global_ns.enum("adc2_channel_t")
# From https://github.com/espressif/esp-idf/blob/master/components/driver/include/driver/adc_common.h
# pin to adc1 channel mapping
# https://github.com/espressif/esp-idf/blob/v4.4.8/components/driver/include/driver/adc.h
ESP32_VARIANT_ADC1_PIN_TO_CHANNEL = {
# https://github.com/espressif/esp-idf/blob/master/components/soc/esp32/include/soc/adc_channel.h
VARIANT_ESP32: {
36: adc1_channel_t.ADC1_CHANNEL_0,
37: adc1_channel_t.ADC1_CHANNEL_1,
@@ -60,6 +61,41 @@ ESP32_VARIANT_ADC1_PIN_TO_CHANNEL = {
34: adc1_channel_t.ADC1_CHANNEL_6,
35: adc1_channel_t.ADC1_CHANNEL_7,
},
# https://github.com/espressif/esp-idf/blob/master/components/soc/esp32c2/include/soc/adc_channel.h
VARIANT_ESP32C2: {
0: adc1_channel_t.ADC1_CHANNEL_0,
1: adc1_channel_t.ADC1_CHANNEL_1,
2: adc1_channel_t.ADC1_CHANNEL_2,
3: adc1_channel_t.ADC1_CHANNEL_3,
4: adc1_channel_t.ADC1_CHANNEL_4,
},
# https://github.com/espressif/esp-idf/blob/master/components/soc/esp32c3/include/soc/adc_channel.h
VARIANT_ESP32C3: {
0: adc1_channel_t.ADC1_CHANNEL_0,
1: adc1_channel_t.ADC1_CHANNEL_1,
2: adc1_channel_t.ADC1_CHANNEL_2,
3: adc1_channel_t.ADC1_CHANNEL_3,
4: adc1_channel_t.ADC1_CHANNEL_4,
},
# https://github.com/espressif/esp-idf/blob/master/components/soc/esp32c6/include/soc/adc_channel.h
VARIANT_ESP32C6: {
0: adc1_channel_t.ADC1_CHANNEL_0,
1: adc1_channel_t.ADC1_CHANNEL_1,
2: adc1_channel_t.ADC1_CHANNEL_2,
3: adc1_channel_t.ADC1_CHANNEL_3,
4: adc1_channel_t.ADC1_CHANNEL_4,
5: adc1_channel_t.ADC1_CHANNEL_5,
6: adc1_channel_t.ADC1_CHANNEL_6,
},
# https://github.com/espressif/esp-idf/blob/master/components/soc/esp32h2/include/soc/adc_channel.h
VARIANT_ESP32H2: {
1: adc1_channel_t.ADC1_CHANNEL_0,
2: adc1_channel_t.ADC1_CHANNEL_1,
3: adc1_channel_t.ADC1_CHANNEL_2,
4: adc1_channel_t.ADC1_CHANNEL_3,
5: adc1_channel_t.ADC1_CHANNEL_4,
},
# https://github.com/espressif/esp-idf/blob/master/components/soc/esp32s2/include/soc/adc_channel.h
VARIANT_ESP32S2: {
1: adc1_channel_t.ADC1_CHANNEL_0,
2: adc1_channel_t.ADC1_CHANNEL_1,
@@ -72,6 +108,7 @@ ESP32_VARIANT_ADC1_PIN_TO_CHANNEL = {
9: adc1_channel_t.ADC1_CHANNEL_8,
10: adc1_channel_t.ADC1_CHANNEL_9,
},
# https://github.com/espressif/esp-idf/blob/master/components/soc/esp32s3/include/soc/adc_channel.h
VARIANT_ESP32S3: {
1: adc1_channel_t.ADC1_CHANNEL_0,
2: adc1_channel_t.ADC1_CHANNEL_1,
@@ -84,40 +121,12 @@ ESP32_VARIANT_ADC1_PIN_TO_CHANNEL = {
9: adc1_channel_t.ADC1_CHANNEL_8,
10: adc1_channel_t.ADC1_CHANNEL_9,
},
VARIANT_ESP32C3: {
0: adc1_channel_t.ADC1_CHANNEL_0,
1: adc1_channel_t.ADC1_CHANNEL_1,
2: adc1_channel_t.ADC1_CHANNEL_2,
3: adc1_channel_t.ADC1_CHANNEL_3,
4: adc1_channel_t.ADC1_CHANNEL_4,
},
VARIANT_ESP32C2: {
0: adc1_channel_t.ADC1_CHANNEL_0,
1: adc1_channel_t.ADC1_CHANNEL_1,
2: adc1_channel_t.ADC1_CHANNEL_2,
3: adc1_channel_t.ADC1_CHANNEL_3,
4: adc1_channel_t.ADC1_CHANNEL_4,
},
VARIANT_ESP32C6: {
0: adc1_channel_t.ADC1_CHANNEL_0,
1: adc1_channel_t.ADC1_CHANNEL_1,
2: adc1_channel_t.ADC1_CHANNEL_2,
3: adc1_channel_t.ADC1_CHANNEL_3,
4: adc1_channel_t.ADC1_CHANNEL_4,
5: adc1_channel_t.ADC1_CHANNEL_5,
6: adc1_channel_t.ADC1_CHANNEL_6,
},
VARIANT_ESP32H2: {
1: adc1_channel_t.ADC1_CHANNEL_0,
2: adc1_channel_t.ADC1_CHANNEL_1,
3: adc1_channel_t.ADC1_CHANNEL_2,
4: adc1_channel_t.ADC1_CHANNEL_3,
5: adc1_channel_t.ADC1_CHANNEL_4,
},
}
# pin to adc2 channel mapping
# https://github.com/espressif/esp-idf/blob/v4.4.8/components/driver/include/driver/adc.h
ESP32_VARIANT_ADC2_PIN_TO_CHANNEL = {
# TODO: add other variants
# https://github.com/espressif/esp-idf/blob/master/components/soc/esp32/include/soc/adc_channel.h
VARIANT_ESP32: {
4: adc2_channel_t.ADC2_CHANNEL_0,
0: adc2_channel_t.ADC2_CHANNEL_1,
@@ -130,6 +139,19 @@ ESP32_VARIANT_ADC2_PIN_TO_CHANNEL = {
25: adc2_channel_t.ADC2_CHANNEL_8,
26: adc2_channel_t.ADC2_CHANNEL_9,
},
# https://github.com/espressif/esp-idf/blob/master/components/soc/esp32c2/include/soc/adc_channel.h
VARIANT_ESP32C2: {
5: adc2_channel_t.ADC2_CHANNEL_0,
},
# https://github.com/espressif/esp-idf/blob/master/components/soc/esp32c3/include/soc/adc_channel.h
VARIANT_ESP32C3: {
5: adc2_channel_t.ADC2_CHANNEL_0,
},
# https://github.com/espressif/esp-idf/blob/master/components/soc/esp32c6/include/soc/adc_channel.h
VARIANT_ESP32C6: {}, # no ADC2
# https://github.com/espressif/esp-idf/blob/master/components/soc/esp32h2/include/soc/adc_channel.h
VARIANT_ESP32H2: {}, # no ADC2
# https://github.com/espressif/esp-idf/blob/master/components/soc/esp32s2/include/soc/adc_channel.h
VARIANT_ESP32S2: {
11: adc2_channel_t.ADC2_CHANNEL_0,
12: adc2_channel_t.ADC2_CHANNEL_1,
@@ -142,6 +164,7 @@ ESP32_VARIANT_ADC2_PIN_TO_CHANNEL = {
19: adc2_channel_t.ADC2_CHANNEL_8,
20: adc2_channel_t.ADC2_CHANNEL_9,
},
# https://github.com/espressif/esp-idf/blob/master/components/soc/esp32s3/include/soc/adc_channel.h
VARIANT_ESP32S3: {
11: adc2_channel_t.ADC2_CHANNEL_0,
12: adc2_channel_t.ADC2_CHANNEL_1,
@@ -154,12 +177,6 @@ ESP32_VARIANT_ADC2_PIN_TO_CHANNEL = {
19: adc2_channel_t.ADC2_CHANNEL_8,
20: adc2_channel_t.ADC2_CHANNEL_9,
},
VARIANT_ESP32C3: {
5: adc2_channel_t.ADC2_CHANNEL_0,
},
VARIANT_ESP32C2: {},
VARIANT_ESP32C6: {},
VARIANT_ESP32H2: {},
}

View File

@@ -61,6 +61,7 @@ service APIConnection {
rpc bluetooth_gatt_notify(BluetoothGATTNotifyRequest) returns (void) {}
rpc subscribe_bluetooth_connections_free(SubscribeBluetoothConnectionsFreeRequest) returns (BluetoothConnectionsFreeResponse) {}
rpc unsubscribe_bluetooth_le_advertisements(UnsubscribeBluetoothLEAdvertisementsRequest) returns (void) {}
rpc bluetooth_scanner_set_mode(BluetoothScannerSetModeRequest) returns (void) {}
rpc subscribe_voice_assistant(SubscribeVoiceAssistantRequest) returns (void) {}
rpc voice_assistant_get_configuration(VoiceAssistantConfigurationRequest) returns (VoiceAssistantConfigurationResponse) {}
@@ -1472,6 +1473,37 @@ message BluetoothDeviceClearCacheResponse {
int32 error = 3;
}
enum BluetoothScannerState {
BLUETOOTH_SCANNER_STATE_IDLE = 0;
BLUETOOTH_SCANNER_STATE_STARTING = 1;
BLUETOOTH_SCANNER_STATE_RUNNING = 2;
BLUETOOTH_SCANNER_STATE_FAILED = 3;
BLUETOOTH_SCANNER_STATE_STOPPING = 4;
BLUETOOTH_SCANNER_STATE_STOPPED = 5;
}
enum BluetoothScannerMode {
BLUETOOTH_SCANNER_MODE_PASSIVE = 0;
BLUETOOTH_SCANNER_MODE_ACTIVE = 1;
}
message BluetoothScannerStateResponse {
option(id) = 126;
option(source) = SOURCE_SERVER;
option(ifdef) = "USE_BLUETOOTH_PROXY";
BluetoothScannerState state = 1;
BluetoothScannerMode mode = 2;
}
message BluetoothScannerSetModeRequest {
option(id) = 127;
option(source) = SOURCE_CLIENT;
option(ifdef) = "USE_BLUETOOTH_PROXY";
BluetoothScannerMode mode = 1;
}
// ==================== PUSH TO TALK ====================
enum VoiceAssistantSubscribeFlag {
VOICE_ASSISTANT_SUBSCRIBE_NONE = 0;

View File

@@ -1475,6 +1475,11 @@ BluetoothConnectionsFreeResponse APIConnection::subscribe_bluetooth_connections_
resp.limit = bluetooth_proxy::global_bluetooth_proxy->get_bluetooth_connections_limit();
return resp;
}
void APIConnection::bluetooth_scanner_set_mode(const BluetoothScannerSetModeRequest &msg) {
bluetooth_proxy::global_bluetooth_proxy->bluetooth_scanner_set_mode(
msg.mode == enums::BluetoothScannerMode::BLUETOOTH_SCANNER_MODE_ACTIVE);
}
#endif
#ifdef USE_VOICE_ASSISTANT

View File

@@ -221,6 +221,7 @@ class APIConnection : public APIServerConnection {
void bluetooth_gatt_notify(const BluetoothGATTNotifyRequest &msg) override;
BluetoothConnectionsFreeResponse subscribe_bluetooth_connections_free(
const SubscribeBluetoothConnectionsFreeRequest &msg) override;
void bluetooth_scanner_set_mode(const BluetoothScannerSetModeRequest &msg) override;
#endif
#ifdef USE_HOMEASSISTANT_TIME

View File

@@ -422,6 +422,38 @@ const char *proto_enum_to_string<enums::BluetoothDeviceRequestType>(enums::Bluet
}
#endif
#ifdef HAS_PROTO_MESSAGE_DUMP
template<> const char *proto_enum_to_string<enums::BluetoothScannerState>(enums::BluetoothScannerState value) {
switch (value) {
case enums::BLUETOOTH_SCANNER_STATE_IDLE:
return "BLUETOOTH_SCANNER_STATE_IDLE";
case enums::BLUETOOTH_SCANNER_STATE_STARTING:
return "BLUETOOTH_SCANNER_STATE_STARTING";
case enums::BLUETOOTH_SCANNER_STATE_RUNNING:
return "BLUETOOTH_SCANNER_STATE_RUNNING";
case enums::BLUETOOTH_SCANNER_STATE_FAILED:
return "BLUETOOTH_SCANNER_STATE_FAILED";
case enums::BLUETOOTH_SCANNER_STATE_STOPPING:
return "BLUETOOTH_SCANNER_STATE_STOPPING";
case enums::BLUETOOTH_SCANNER_STATE_STOPPED:
return "BLUETOOTH_SCANNER_STATE_STOPPED";
default:
return "UNKNOWN";
}
}
#endif
#ifdef HAS_PROTO_MESSAGE_DUMP
template<> const char *proto_enum_to_string<enums::BluetoothScannerMode>(enums::BluetoothScannerMode value) {
switch (value) {
case enums::BLUETOOTH_SCANNER_MODE_PASSIVE:
return "BLUETOOTH_SCANNER_MODE_PASSIVE";
case enums::BLUETOOTH_SCANNER_MODE_ACTIVE:
return "BLUETOOTH_SCANNER_MODE_ACTIVE";
default:
return "UNKNOWN";
}
}
#endif
#ifdef HAS_PROTO_MESSAGE_DUMP
template<>
const char *proto_enum_to_string<enums::VoiceAssistantSubscribeFlag>(enums::VoiceAssistantSubscribeFlag value) {
switch (value) {
@@ -6775,6 +6807,61 @@ void BluetoothDeviceClearCacheResponse::dump_to(std::string &out) const {
out.append("}");
}
#endif
bool BluetoothScannerStateResponse::decode_varint(uint32_t field_id, ProtoVarInt value) {
switch (field_id) {
case 1: {
this->state = value.as_enum<enums::BluetoothScannerState>();
return true;
}
case 2: {
this->mode = value.as_enum<enums::BluetoothScannerMode>();
return true;
}
default:
return false;
}
}
void BluetoothScannerStateResponse::encode(ProtoWriteBuffer buffer) const {
buffer.encode_enum<enums::BluetoothScannerState>(1, this->state);
buffer.encode_enum<enums::BluetoothScannerMode>(2, this->mode);
}
#ifdef HAS_PROTO_MESSAGE_DUMP
void BluetoothScannerStateResponse::dump_to(std::string &out) const {
__attribute__((unused)) char buffer[64];
out.append("BluetoothScannerStateResponse {\n");
out.append(" state: ");
out.append(proto_enum_to_string<enums::BluetoothScannerState>(this->state));
out.append("\n");
out.append(" mode: ");
out.append(proto_enum_to_string<enums::BluetoothScannerMode>(this->mode));
out.append("\n");
out.append("}");
}
#endif
bool BluetoothScannerSetModeRequest::decode_varint(uint32_t field_id, ProtoVarInt value) {
switch (field_id) {
case 1: {
this->mode = value.as_enum<enums::BluetoothScannerMode>();
return true;
}
default:
return false;
}
}
void BluetoothScannerSetModeRequest::encode(ProtoWriteBuffer buffer) const {
buffer.encode_enum<enums::BluetoothScannerMode>(1, this->mode);
}
#ifdef HAS_PROTO_MESSAGE_DUMP
void BluetoothScannerSetModeRequest::dump_to(std::string &out) const {
__attribute__((unused)) char buffer[64];
out.append("BluetoothScannerSetModeRequest {\n");
out.append(" mode: ");
out.append(proto_enum_to_string<enums::BluetoothScannerMode>(this->mode));
out.append("\n");
out.append("}");
}
#endif
bool SubscribeVoiceAssistantRequest::decode_varint(uint32_t field_id, ProtoVarInt value) {
switch (field_id) {
case 1: {

View File

@@ -169,6 +169,18 @@ enum BluetoothDeviceRequestType : uint32_t {
BLUETOOTH_DEVICE_REQUEST_TYPE_CONNECT_V3_WITHOUT_CACHE = 5,
BLUETOOTH_DEVICE_REQUEST_TYPE_CLEAR_CACHE = 6,
};
enum BluetoothScannerState : uint32_t {
BLUETOOTH_SCANNER_STATE_IDLE = 0,
BLUETOOTH_SCANNER_STATE_STARTING = 1,
BLUETOOTH_SCANNER_STATE_RUNNING = 2,
BLUETOOTH_SCANNER_STATE_FAILED = 3,
BLUETOOTH_SCANNER_STATE_STOPPING = 4,
BLUETOOTH_SCANNER_STATE_STOPPED = 5,
};
enum BluetoothScannerMode : uint32_t {
BLUETOOTH_SCANNER_MODE_PASSIVE = 0,
BLUETOOTH_SCANNER_MODE_ACTIVE = 1,
};
enum VoiceAssistantSubscribeFlag : uint32_t {
VOICE_ASSISTANT_SUBSCRIBE_NONE = 0,
VOICE_ASSISTANT_SUBSCRIBE_API_AUDIO = 1,
@@ -1742,6 +1754,29 @@ class BluetoothDeviceClearCacheResponse : public ProtoMessage {
protected:
bool decode_varint(uint32_t field_id, ProtoVarInt value) override;
};
class BluetoothScannerStateResponse : public ProtoMessage {
public:
enums::BluetoothScannerState state{};
enums::BluetoothScannerMode mode{};
void encode(ProtoWriteBuffer buffer) const override;
#ifdef HAS_PROTO_MESSAGE_DUMP
void dump_to(std::string &out) const override;
#endif
protected:
bool decode_varint(uint32_t field_id, ProtoVarInt value) override;
};
class BluetoothScannerSetModeRequest : public ProtoMessage {
public:
enums::BluetoothScannerMode mode{};
void encode(ProtoWriteBuffer buffer) const override;
#ifdef HAS_PROTO_MESSAGE_DUMP
void dump_to(std::string &out) const override;
#endif
protected:
bool decode_varint(uint32_t field_id, ProtoVarInt value) override;
};
class SubscribeVoiceAssistantRequest : public ProtoMessage {
public:
bool subscribe{false};

View File

@@ -472,6 +472,16 @@ bool APIServerConnectionBase::send_bluetooth_device_clear_cache_response(const B
return this->send_message_<BluetoothDeviceClearCacheResponse>(msg, 88);
}
#endif
#ifdef USE_BLUETOOTH_PROXY
bool APIServerConnectionBase::send_bluetooth_scanner_state_response(const BluetoothScannerStateResponse &msg) {
#ifdef HAS_PROTO_MESSAGE_DUMP
ESP_LOGVV(TAG, "send_bluetooth_scanner_state_response: %s", msg.dump().c_str());
#endif
return this->send_message_<BluetoothScannerStateResponse>(msg, 126);
}
#endif
#ifdef USE_BLUETOOTH_PROXY
#endif
#ifdef USE_VOICE_ASSISTANT
#endif
#ifdef USE_VOICE_ASSISTANT
@@ -1212,6 +1222,17 @@ bool APIServerConnectionBase::read_message(uint32_t msg_size, uint32_t msg_type,
ESP_LOGVV(TAG, "on_noise_encryption_set_key_request: %s", msg.dump().c_str());
#endif
this->on_noise_encryption_set_key_request(msg);
#endif
break;
}
case 127: {
#ifdef USE_BLUETOOTH_PROXY
BluetoothScannerSetModeRequest msg;
msg.decode(msg_data, msg_size);
#ifdef HAS_PROTO_MESSAGE_DUMP
ESP_LOGVV(TAG, "on_bluetooth_scanner_set_mode_request: %s", msg.dump().c_str());
#endif
this->on_bluetooth_scanner_set_mode_request(msg);
#endif
break;
}
@@ -1705,6 +1726,19 @@ void APIServerConnection::on_unsubscribe_bluetooth_le_advertisements_request(
this->unsubscribe_bluetooth_le_advertisements(msg);
}
#endif
#ifdef USE_BLUETOOTH_PROXY
void APIServerConnection::on_bluetooth_scanner_set_mode_request(const BluetoothScannerSetModeRequest &msg) {
if (!this->is_connection_setup()) {
this->on_no_setup_connection();
return;
}
if (!this->is_authenticated()) {
this->on_unauthenticated_access();
return;
}
this->bluetooth_scanner_set_mode(msg);
}
#endif
#ifdef USE_VOICE_ASSISTANT
void APIServerConnection::on_subscribe_voice_assistant_request(const SubscribeVoiceAssistantRequest &msg) {
if (!this->is_connection_setup()) {

View File

@@ -234,6 +234,12 @@ class APIServerConnectionBase : public ProtoService {
#ifdef USE_BLUETOOTH_PROXY
bool send_bluetooth_device_clear_cache_response(const BluetoothDeviceClearCacheResponse &msg);
#endif
#ifdef USE_BLUETOOTH_PROXY
bool send_bluetooth_scanner_state_response(const BluetoothScannerStateResponse &msg);
#endif
#ifdef USE_BLUETOOTH_PROXY
virtual void on_bluetooth_scanner_set_mode_request(const BluetoothScannerSetModeRequest &value){};
#endif
#ifdef USE_VOICE_ASSISTANT
virtual void on_subscribe_voice_assistant_request(const SubscribeVoiceAssistantRequest &value){};
#endif
@@ -440,6 +446,9 @@ class APIServerConnection : public APIServerConnectionBase {
#ifdef USE_BLUETOOTH_PROXY
virtual void unsubscribe_bluetooth_le_advertisements(const UnsubscribeBluetoothLEAdvertisementsRequest &msg) = 0;
#endif
#ifdef USE_BLUETOOTH_PROXY
virtual void bluetooth_scanner_set_mode(const BluetoothScannerSetModeRequest &msg) = 0;
#endif
#ifdef USE_VOICE_ASSISTANT
virtual void subscribe_voice_assistant(const SubscribeVoiceAssistantRequest &msg) = 0;
#endif
@@ -551,6 +560,9 @@ class APIServerConnection : public APIServerConnectionBase {
void on_unsubscribe_bluetooth_le_advertisements_request(
const UnsubscribeBluetoothLEAdvertisementsRequest &msg) override;
#endif
#ifdef USE_BLUETOOTH_PROXY
void on_bluetooth_scanner_set_mode_request(const BluetoothScannerSetModeRequest &msg) override;
#endif
#ifdef USE_VOICE_ASSISTANT
void on_subscribe_voice_assistant_request(const SubscribeVoiceAssistantRequest &msg) override;
#endif

View File

@@ -29,9 +29,8 @@ async def async_run_logs(config: dict[str, Any], address: str) -> None:
port: int = int(conf[CONF_PORT])
password: str = conf[CONF_PASSWORD]
noise_psk: str | None = None
if encryption_config := conf.get(CONF_ENCRYPTION):
if key := encryption_config.get(CONF_KEY):
noise_psk = key
if CONF_ENCRYPTION in conf:
noise_psk = conf[CONF_ENCRYPTION][CONF_KEY]
_LOGGER.info("Starting log output from %s using esphome API", address)
cli = APIClient(
address,

View File

@@ -1,10 +1,7 @@
#pragma once
#include "esphome/core/component.h"
#include "esphome/components/as3935/as3935.h"
#include "esphome/components/i2c/i2c.h"
#include "esphome/components/sensor/sensor.h"
#include "esphome/components/binary_sensor/binary_sensor.h"
namespace esphome {
namespace as3935_i2c {

View File

@@ -7,7 +7,7 @@
namespace esphome {
namespace as7341 {
static const uint8_t AS7341_CHIP_ID = 0X09;
static const uint8_t AS7341_CHIP_ID = 0x09;
static const uint8_t AS7341_CONFIG = 0x70;
static const uint8_t AS7341_LED = 0x74;

View File

@@ -48,6 +48,12 @@ def set_stream_limits(
min_sample_rate: int = _UNDEF,
max_sample_rate: int = _UNDEF,
):
"""Sets the limits for the audio stream that audio component can handle
When the component sinks audio (e.g., a speaker), these indicate the limits to the audio it can receive.
When the component sources audio (e.g., a microphone), these indicate the limits to the audio it can send.
"""
def set_limits_in_config(config):
if min_bits_per_sample is not _UNDEF:
config[CONF_MIN_BITS_PER_SAMPLE] = min_bits_per_sample
@@ -69,43 +75,87 @@ def final_validate_audio_schema(
name: str,
*,
audio_device: str,
bits_per_sample: int,
channels: int,
sample_rate: int,
bits_per_sample: int = _UNDEF,
channels: int = _UNDEF,
sample_rate: int = _UNDEF,
enabled_channels: list[int] = _UNDEF,
audio_device_issue: bool = False,
):
"""Validates audio compatibility when passed between different components.
The component derived from ``AUDIO_COMPONENT_SCHEMA`` should call ``set_stream_limits`` in a validator to specify its compatible settings
- If audio_device_issue is True, then the error message indicates the user should adjust the AUDIO_COMPONENT_SCHEMA derived component's configuration to match the values passed to this function
- If audio_device_issue is False, then the error message indicates the user should adjust the configuration of the component calling this function, as it falls out of the valid stream limits
Args:
name (str): Friendly name of the component calling this function with an audio component to validate
audio_device (str): The configuration parameter name that contains the ID of an AUDIO_COMPONENT_SCHEMA derived component to validate against
bits_per_sample (int, optional): The desired bits per sample
channels (int, optional): The desired number of channels
sample_rate (int, optional): The desired sample rate
enabled_channels (list[int], optional): The desired enabled channels
audio_device_issue (bool, optional): Format the error message to indicate the problem is in the configuration for the ``audio_device`` component. Defaults to False.
"""
def validate_audio_compatiblity(audio_config):
audio_schema = {}
try:
cv.int_range(
min=audio_config.get(CONF_MIN_BITS_PER_SAMPLE),
max=audio_config.get(CONF_MAX_BITS_PER_SAMPLE),
)(bits_per_sample)
except cv.Invalid as exc:
raise cv.Invalid(
f"Invalid configuration for the {name} component. The {CONF_BITS_PER_SAMPLE} {str(exc)}"
) from exc
if bits_per_sample is not _UNDEF:
try:
cv.int_range(
min=audio_config.get(CONF_MIN_BITS_PER_SAMPLE),
max=audio_config.get(CONF_MAX_BITS_PER_SAMPLE),
)(bits_per_sample)
except cv.Invalid as exc:
if audio_device_issue:
error_string = f"Invalid configuration for the specified {audio_device}. The {name} component requires {bits_per_sample} bits per sample."
else:
error_string = f"Invalid configuration for the {name} component. The {CONF_BITS_PER_SAMPLE} {str(exc)}"
raise cv.Invalid(error_string) from exc
try:
cv.int_range(
min=audio_config.get(CONF_MIN_CHANNELS),
max=audio_config.get(CONF_MAX_CHANNELS),
)(channels)
except cv.Invalid as exc:
raise cv.Invalid(
f"Invalid configuration for the {name} component. The {CONF_NUM_CHANNELS} {str(exc)}"
) from exc
if channels is not _UNDEF:
try:
cv.int_range(
min=audio_config.get(CONF_MIN_CHANNELS),
max=audio_config.get(CONF_MAX_CHANNELS),
)(channels)
except cv.Invalid as exc:
if audio_device_issue:
error_string = f"Invalid configuration for the specified {audio_device}. The {name} component requires {channels} channels."
else:
error_string = f"Invalid configuration for the {name} component. The {CONF_NUM_CHANNELS} {str(exc)}"
raise cv.Invalid(error_string) from exc
try:
cv.int_range(
min=audio_config.get(CONF_MIN_SAMPLE_RATE),
max=audio_config.get(CONF_MAX_SAMPLE_RATE),
)(sample_rate)
return cv.Schema(audio_schema, extra=cv.ALLOW_EXTRA)(audio_config)
except cv.Invalid as exc:
raise cv.Invalid(
f"Invalid configuration for the {name} component. The {CONF_SAMPLE_RATE} {str(exc)}"
) from exc
if sample_rate is not _UNDEF:
try:
cv.int_range(
min=audio_config.get(CONF_MIN_SAMPLE_RATE),
max=audio_config.get(CONF_MAX_SAMPLE_RATE),
)(sample_rate)
except cv.Invalid as exc:
if audio_device_issue:
error_string = f"Invalid configuration for the specified {audio_device}. The {name} component requires a {sample_rate} sample rate."
else:
error_string = f"Invalid configuration for the {name} component. The {CONF_SAMPLE_RATE} {str(exc)}"
raise cv.Invalid(error_string) from exc
if enabled_channels is not _UNDEF:
for channel in enabled_channels:
try:
# Channels are 0-indexed
cv.int_range(
min=0,
max=audio_config.get(CONF_MAX_CHANNELS) - 1,
)(channel)
except cv.Invalid as exc:
if audio_device_issue:
error_string = f"Invalid configuration for the specified {audio_device}. The {name} component requires channel {channel}."
else:
error_string = f"Invalid configuration for the {name} component. Enabled channel {channel} {str(exc)}"
raise cv.Invalid(error_string) from exc
return cv.Schema(audio_schema, extra=cv.ALLOW_EXTRA)(audio_config)
return cv.Schema(
{

View File

@@ -4,6 +4,8 @@
#include "esphome/core/hal.h"
#include <cstring>
namespace esphome {
namespace audio {

View File

@@ -6,6 +6,7 @@
#include "audio_transfer_buffer.h"
#include "esphome/core/defines.h"
#include "esphome/core/helpers.h"
#include "esphome/core/ring_buffer.h"
#ifdef USE_SPEAKER

View File

@@ -15,21 +15,17 @@ void BinarySensor::publish_state(bool state) {
if (!this->publish_dedup_.next(state))
return;
if (this->filter_list_ == nullptr) {
this->send_state_internal(state, false);
this->send_state_internal(state);
} else {
this->filter_list_->input(state, false);
this->filter_list_->input(state);
}
}
void BinarySensor::publish_initial_state(bool state) {
if (!this->publish_dedup_.next(state))
return;
if (this->filter_list_ == nullptr) {
this->send_state_internal(state, true);
} else {
this->filter_list_->input(state, true);
}
this->has_state_ = false;
this->publish_state(state);
}
void BinarySensor::send_state_internal(bool state, bool is_initial) {
void BinarySensor::send_state_internal(bool state) {
bool is_initial = !this->has_state_;
if (is_initial) {
ESP_LOGD(TAG, "'%s': Sending initial state %s", this->get_name().c_str(), ONOFF(state));
} else {

View File

@@ -67,7 +67,7 @@ class BinarySensor : public EntityBase, public EntityBase_DeviceClass {
// ========== INTERNAL METHODS ==========
// (In most use cases you won't need these)
void send_state_internal(bool state, bool is_initial);
void send_state_internal(bool state);
/// Return whether this binary sensor has outputted a state.
virtual bool has_state() const;

View File

@@ -9,37 +9,37 @@ namespace binary_sensor {
static const char *const TAG = "sensor.filter";
void Filter::output(bool value, bool is_initial) {
void Filter::output(bool value) {
if (!this->dedup_.next(value))
return;
if (this->next_ == nullptr) {
this->parent_->send_state_internal(value, is_initial);
this->parent_->send_state_internal(value);
} else {
this->next_->input(value, is_initial);
this->next_->input(value);
}
}
void Filter::input(bool value, bool is_initial) {
auto b = this->new_value(value, is_initial);
void Filter::input(bool value) {
auto b = this->new_value(value);
if (b.has_value()) {
this->output(*b, is_initial);
this->output(*b);
}
}
optional<bool> DelayedOnOffFilter::new_value(bool value, bool is_initial) {
optional<bool> DelayedOnOffFilter::new_value(bool value) {
if (value) {
this->set_timeout("ON_OFF", this->on_delay_.value(), [this, is_initial]() { this->output(true, is_initial); });
this->set_timeout("ON_OFF", this->on_delay_.value(), [this]() { this->output(true); });
} else {
this->set_timeout("ON_OFF", this->off_delay_.value(), [this, is_initial]() { this->output(false, is_initial); });
this->set_timeout("ON_OFF", this->off_delay_.value(), [this]() { this->output(false); });
}
return {};
}
float DelayedOnOffFilter::get_setup_priority() const { return setup_priority::HARDWARE; }
optional<bool> DelayedOnFilter::new_value(bool value, bool is_initial) {
optional<bool> DelayedOnFilter::new_value(bool value) {
if (value) {
this->set_timeout("ON", this->delay_.value(), [this, is_initial]() { this->output(true, is_initial); });
this->set_timeout("ON", this->delay_.value(), [this]() { this->output(true); });
return {};
} else {
this->cancel_timeout("ON");
@@ -49,9 +49,9 @@ optional<bool> DelayedOnFilter::new_value(bool value, bool is_initial) {
float DelayedOnFilter::get_setup_priority() const { return setup_priority::HARDWARE; }
optional<bool> DelayedOffFilter::new_value(bool value, bool is_initial) {
optional<bool> DelayedOffFilter::new_value(bool value) {
if (!value) {
this->set_timeout("OFF", this->delay_.value(), [this, is_initial]() { this->output(false, is_initial); });
this->set_timeout("OFF", this->delay_.value(), [this]() { this->output(false); });
return {};
} else {
this->cancel_timeout("OFF");
@@ -61,11 +61,11 @@ optional<bool> DelayedOffFilter::new_value(bool value, bool is_initial) {
float DelayedOffFilter::get_setup_priority() const { return setup_priority::HARDWARE; }
optional<bool> InvertFilter::new_value(bool value, bool is_initial) { return !value; }
optional<bool> InvertFilter::new_value(bool value) { return !value; }
AutorepeatFilter::AutorepeatFilter(std::vector<AutorepeatFilterTiming> timings) : timings_(std::move(timings)) {}
optional<bool> AutorepeatFilter::new_value(bool value, bool is_initial) {
optional<bool> AutorepeatFilter::new_value(bool value) {
if (value) {
// Ignore if already running
if (this->active_timing_ != 0)
@@ -101,7 +101,7 @@ void AutorepeatFilter::next_timing_() {
void AutorepeatFilter::next_value_(bool val) {
const AutorepeatFilterTiming &timing = this->timings_[this->active_timing_ - 2];
this->output(val, false); // This is at least the second one so not initial
this->output(val);
this->set_timeout("ON_OFF", val ? timing.time_on : timing.time_off, [this, val]() { this->next_value_(!val); });
}
@@ -109,18 +109,18 @@ float AutorepeatFilter::get_setup_priority() const { return setup_priority::HARD
LambdaFilter::LambdaFilter(std::function<optional<bool>(bool)> f) : f_(std::move(f)) {}
optional<bool> LambdaFilter::new_value(bool value, bool is_initial) { return this->f_(value); }
optional<bool> LambdaFilter::new_value(bool value) { return this->f_(value); }
optional<bool> SettleFilter::new_value(bool value, bool is_initial) {
optional<bool> SettleFilter::new_value(bool value) {
if (!this->steady_) {
this->set_timeout("SETTLE", this->delay_.value(), [this, value, is_initial]() {
this->set_timeout("SETTLE", this->delay_.value(), [this, value]() {
this->steady_ = true;
this->output(value, is_initial);
this->output(value);
});
return {};
} else {
this->steady_ = false;
this->output(value, is_initial);
this->output(value);
this->set_timeout("SETTLE", this->delay_.value(), [this]() { this->steady_ = true; });
return value;
}

View File

@@ -14,11 +14,11 @@ class BinarySensor;
class Filter {
public:
virtual optional<bool> new_value(bool value, bool is_initial) = 0;
virtual optional<bool> new_value(bool value) = 0;
void input(bool value, bool is_initial);
void input(bool value);
void output(bool value, bool is_initial);
void output(bool value);
protected:
friend BinarySensor;
@@ -30,7 +30,7 @@ class Filter {
class DelayedOnOffFilter : public Filter, public Component {
public:
optional<bool> new_value(bool value, bool is_initial) override;
optional<bool> new_value(bool value) override;
float get_setup_priority() const override;
@@ -44,7 +44,7 @@ class DelayedOnOffFilter : public Filter, public Component {
class DelayedOnFilter : public Filter, public Component {
public:
optional<bool> new_value(bool value, bool is_initial) override;
optional<bool> new_value(bool value) override;
float get_setup_priority() const override;
@@ -56,7 +56,7 @@ class DelayedOnFilter : public Filter, public Component {
class DelayedOffFilter : public Filter, public Component {
public:
optional<bool> new_value(bool value, bool is_initial) override;
optional<bool> new_value(bool value) override;
float get_setup_priority() const override;
@@ -68,7 +68,7 @@ class DelayedOffFilter : public Filter, public Component {
class InvertFilter : public Filter {
public:
optional<bool> new_value(bool value, bool is_initial) override;
optional<bool> new_value(bool value) override;
};
struct AutorepeatFilterTiming {
@@ -86,7 +86,7 @@ class AutorepeatFilter : public Filter, public Component {
public:
explicit AutorepeatFilter(std::vector<AutorepeatFilterTiming> timings);
optional<bool> new_value(bool value, bool is_initial) override;
optional<bool> new_value(bool value) override;
float get_setup_priority() const override;
@@ -102,7 +102,7 @@ class LambdaFilter : public Filter {
public:
explicit LambdaFilter(std::function<optional<bool>(bool)> f);
optional<bool> new_value(bool value, bool is_initial) override;
optional<bool> new_value(bool value) override;
protected:
std::function<optional<bool>(bool)> f_;
@@ -110,7 +110,7 @@ class LambdaFilter : public Filter {
class SettleFilter : public Filter, public Component {
public:
optional<bool> new_value(bool value, bool is_initial) override;
optional<bool> new_value(bool value) override;
float get_setup_priority() const override;

View File

@@ -45,7 +45,7 @@ static const uint8_t BL0906_WRITE_COMMAND = 0xCA;
static const uint8_t BL0906_V_RMS = 0x16;
// Total power
static const uint8_t BL0906_WATT_SUM = 0X2C;
static const uint8_t BL0906_WATT_SUM = 0x2C;
// Current1~6
static const uint8_t BL0906_I_1_RMS = 0x0D; // current_1
@@ -56,29 +56,29 @@ static const uint8_t BL0906_I_5_RMS = 0x13;
static const uint8_t BL0906_I_6_RMS = 0x14; // current_6
// Power1~6
static const uint8_t BL0906_WATT_1 = 0X23; // power_1
static const uint8_t BL0906_WATT_2 = 0X24;
static const uint8_t BL0906_WATT_3 = 0X25;
static const uint8_t BL0906_WATT_4 = 0X26;
static const uint8_t BL0906_WATT_5 = 0X29;
static const uint8_t BL0906_WATT_6 = 0X2A; // power_6
static const uint8_t BL0906_WATT_1 = 0x23; // power_1
static const uint8_t BL0906_WATT_2 = 0x24;
static const uint8_t BL0906_WATT_3 = 0x25;
static const uint8_t BL0906_WATT_4 = 0x26;
static const uint8_t BL0906_WATT_5 = 0x29;
static const uint8_t BL0906_WATT_6 = 0x2A; // power_6
// Active pulse count, unsigned
static const uint8_t BL0906_CF_1_CNT = 0X30; // Channel_1
static const uint8_t BL0906_CF_2_CNT = 0X31;
static const uint8_t BL0906_CF_3_CNT = 0X32;
static const uint8_t BL0906_CF_4_CNT = 0X33;
static const uint8_t BL0906_CF_5_CNT = 0X36;
static const uint8_t BL0906_CF_6_CNT = 0X37; // Channel_6
static const uint8_t BL0906_CF_1_CNT = 0x30; // Channel_1
static const uint8_t BL0906_CF_2_CNT = 0x31;
static const uint8_t BL0906_CF_3_CNT = 0x32;
static const uint8_t BL0906_CF_4_CNT = 0x33;
static const uint8_t BL0906_CF_5_CNT = 0x36;
static const uint8_t BL0906_CF_6_CNT = 0x37; // Channel_6
// Total active pulse count, unsigned
static const uint8_t BL0906_CF_SUM_CNT = 0X39;
static const uint8_t BL0906_CF_SUM_CNT = 0x39;
// Voltage frequency cycle
static const uint8_t BL0906_FREQUENCY = 0X4E;
static const uint8_t BL0906_FREQUENCY = 0x4E;
// Internal temperature
static const uint8_t BL0906_TEMPERATURE = 0X5E;
static const uint8_t BL0906_TEMPERATURE = 0x5E;
// Calibration register
// RMS gain adjustment register

View File

@@ -25,6 +25,22 @@ std::vector<uint64_t> get_128bit_uuid_vec(esp_bt_uuid_t uuid_source) {
BluetoothProxy::BluetoothProxy() { global_bluetooth_proxy = this; }
void BluetoothProxy::setup() {
this->parent_->add_scanner_state_callback([this](esp32_ble_tracker::ScannerState state) {
if (this->api_connection_ != nullptr) {
this->send_bluetooth_scanner_state_(state);
}
});
}
void BluetoothProxy::send_bluetooth_scanner_state_(esp32_ble_tracker::ScannerState state) {
api::BluetoothScannerStateResponse resp;
resp.state = static_cast<api::enums::BluetoothScannerState>(state);
resp.mode = this->parent_->get_scan_active() ? api::enums::BluetoothScannerMode::BLUETOOTH_SCANNER_MODE_ACTIVE
: api::enums::BluetoothScannerMode::BLUETOOTH_SCANNER_MODE_PASSIVE;
this->api_connection_->send_bluetooth_scanner_state_response(resp);
}
bool BluetoothProxy::parse_device(const esp32_ble_tracker::ESPBTDevice &device) {
if (!api::global_api_server->is_connected() || this->api_connection_ == nullptr || this->raw_advertisements_)
return false;
@@ -453,6 +469,8 @@ void BluetoothProxy::subscribe_api_connection(api::APIConnection *api_connection
this->api_connection_ = api_connection;
this->raw_advertisements_ = flags & BluetoothProxySubscriptionFlag::SUBSCRIPTION_RAW_ADVERTISEMENTS;
this->parent_->recalculate_advertisement_parser_types();
this->send_bluetooth_scanner_state_(this->parent_->get_scanner_state());
}
void BluetoothProxy::unsubscribe_api_connection(api::APIConnection *api_connection) {
@@ -525,6 +543,17 @@ void BluetoothProxy::send_device_unpairing(uint64_t address, bool success, esp_e
this->api_connection_->send_bluetooth_device_unpairing_response(call);
}
void BluetoothProxy::bluetooth_scanner_set_mode(bool active) {
if (this->parent_->get_scan_active() == active) {
return;
}
ESP_LOGD(TAG, "Setting scanner mode to %s", active ? "active" : "passive");
this->parent_->set_scan_active(active);
this->parent_->stop_scan();
this->parent_->set_scan_continuous(
true); // Set this to true to automatically start scanning again when it has cleaned up.
}
BluetoothProxy *global_bluetooth_proxy = nullptr; // NOLINT(cppcoreguidelines-avoid-non-const-global-variables)
} // namespace bluetooth_proxy

View File

@@ -41,6 +41,7 @@ enum BluetoothProxyFeature : uint32_t {
FEATURE_PAIRING = 1 << 3,
FEATURE_CACHE_CLEARING = 1 << 4,
FEATURE_RAW_ADVERTISEMENTS = 1 << 5,
FEATURE_STATE_AND_MODE = 1 << 6,
};
enum BluetoothProxySubscriptionFlag : uint32_t {
@@ -53,6 +54,7 @@ class BluetoothProxy : public esp32_ble_tracker::ESPBTDeviceListener, public Com
bool parse_device(const esp32_ble_tracker::ESPBTDevice &device) override;
bool parse_devices(esp_ble_gap_cb_param_t::ble_scan_result_evt_param *advertisements, size_t count) override;
void dump_config() override;
void setup() override;
void loop() override;
esp32_ble_tracker::AdvertisementParserType get_advertisement_parser_type() override;
@@ -84,6 +86,8 @@ class BluetoothProxy : public esp32_ble_tracker::ESPBTDeviceListener, public Com
void send_device_unpairing(uint64_t address, bool success, esp_err_t error = ESP_OK);
void send_device_clear_cache(uint64_t address, bool success, esp_err_t error = ESP_OK);
void bluetooth_scanner_set_mode(bool active);
static void uint64_to_bd_addr(uint64_t address, esp_bd_addr_t bd_addr) {
bd_addr[0] = (address >> 40) & 0xff;
bd_addr[1] = (address >> 32) & 0xff;
@@ -107,6 +111,7 @@ class BluetoothProxy : public esp32_ble_tracker::ESPBTDeviceListener, public Com
uint32_t flags = 0;
flags |= BluetoothProxyFeature::FEATURE_PASSIVE_SCAN;
flags |= BluetoothProxyFeature::FEATURE_RAW_ADVERTISEMENTS;
flags |= BluetoothProxyFeature::FEATURE_STATE_AND_MODE;
if (this->active_) {
flags |= BluetoothProxyFeature::FEATURE_ACTIVE_CONNECTIONS;
flags |= BluetoothProxyFeature::FEATURE_REMOTE_CACHING;
@@ -124,6 +129,7 @@ class BluetoothProxy : public esp32_ble_tracker::ESPBTDeviceListener, public Com
protected:
void send_api_packet_(const esp32_ble_tracker::ESPBTDevice &device);
void send_bluetooth_scanner_state_(esp32_ble_tracker::ScannerState state);
BluetoothConnection *get_connection_(uint64_t address, bool reserve);

View File

@@ -86,6 +86,9 @@ void Canbus::loop() {
data.push_back(can_message.data[i]);
}
this->callback_manager_(can_message.can_id, can_message.use_extended_id, can_message.remote_transmission_request,
data);
// fire all triggers
for (auto *trigger : this->triggers_) {
if ((trigger->can_id_ == (can_message.can_id & trigger->can_id_mask_)) &&

View File

@@ -81,6 +81,20 @@ class Canbus : public Component {
void set_bitrate(CanSpeed bit_rate) { this->bit_rate_ = bit_rate; }
void add_trigger(CanbusTrigger *trigger);
/**
* Add a callback to be called when a CAN message is received. All received messages
* are passed to the callback without filtering.
*
* The callback function receives:
* - can_id of the received data
* - extended_id True if the can_id is an extended id
* - rtr If this is a remote transmission request
* - data The message data
*/
void add_callback(
std::function<void(uint32_t can_id, bool extended_id, bool rtr, const std::vector<uint8_t> &data)> callback) {
this->callback_manager_.add(std::move(callback));
}
protected:
template<typename... Ts> friend class CanbusSendAction;
@@ -88,6 +102,8 @@ class Canbus : public Component {
uint32_t can_id_;
bool use_extended_id_;
CanSpeed bit_rate_;
CallbackManager<void(uint32_t can_id, bool extended_id, bool rtr, const std::vector<uint8_t> &data)>
callback_manager_{};
virtual bool setup_internal();
virtual Error send_message(struct CanFrame *frame);

View File

@@ -20,7 +20,7 @@ enum ClimateMode : uint8_t {
CLIMATE_MODE_FAN_ONLY = 4,
/// The climate device is set to dry/humidity mode
CLIMATE_MODE_DRY = 5,
/** The climate device is adjusting the temperatre dynamically.
/** The climate device is adjusting the temperature dynamically.
* For example, the target temperature can be adjusted based on a schedule, or learned behavior.
* The target temperature can't be adjusted when in this mode.
*/

View File

@@ -40,24 +40,24 @@ namespace climate {
*/
class ClimateTraits {
public:
bool get_supports_current_temperature() const { return supports_current_temperature_; }
bool get_supports_current_temperature() const { return this->supports_current_temperature_; }
void set_supports_current_temperature(bool supports_current_temperature) {
supports_current_temperature_ = supports_current_temperature;
this->supports_current_temperature_ = supports_current_temperature;
}
bool get_supports_current_humidity() const { return supports_current_humidity_; }
bool get_supports_current_humidity() const { return this->supports_current_humidity_; }
void set_supports_current_humidity(bool supports_current_humidity) {
supports_current_humidity_ = supports_current_humidity;
this->supports_current_humidity_ = supports_current_humidity;
}
bool get_supports_two_point_target_temperature() const { return supports_two_point_target_temperature_; }
bool get_supports_two_point_target_temperature() const { return this->supports_two_point_target_temperature_; }
void set_supports_two_point_target_temperature(bool supports_two_point_target_temperature) {
supports_two_point_target_temperature_ = supports_two_point_target_temperature;
this->supports_two_point_target_temperature_ = supports_two_point_target_temperature;
}
bool get_supports_target_humidity() const { return supports_target_humidity_; }
bool get_supports_target_humidity() const { return this->supports_target_humidity_; }
void set_supports_target_humidity(bool supports_target_humidity) {
supports_target_humidity_ = supports_target_humidity;
this->supports_target_humidity_ = supports_target_humidity;
}
void set_supported_modes(std::set<ClimateMode> modes) { supported_modes_ = std::move(modes); }
void add_supported_mode(ClimateMode mode) { supported_modes_.insert(mode); }
void set_supported_modes(std::set<ClimateMode> modes) { this->supported_modes_ = std::move(modes); }
void add_supported_mode(ClimateMode mode) { this->supported_modes_.insert(mode); }
ESPDEPRECATED("This method is deprecated, use set_supported_modes() instead", "v1.20")
void set_supports_auto_mode(bool supports_auto_mode) { set_mode_support_(CLIMATE_MODE_AUTO, supports_auto_mode); }
ESPDEPRECATED("This method is deprecated, use set_supported_modes() instead", "v1.20")
@@ -72,15 +72,15 @@ class ClimateTraits {
}
ESPDEPRECATED("This method is deprecated, use set_supported_modes() instead", "v1.20")
void set_supports_dry_mode(bool supports_dry_mode) { set_mode_support_(CLIMATE_MODE_DRY, supports_dry_mode); }
bool supports_mode(ClimateMode mode) const { return supported_modes_.count(mode); }
const std::set<ClimateMode> &get_supported_modes() const { return supported_modes_; }
bool supports_mode(ClimateMode mode) const { return this->supported_modes_.count(mode); }
const std::set<ClimateMode> &get_supported_modes() const { return this->supported_modes_; }
void set_supports_action(bool supports_action) { supports_action_ = supports_action; }
bool get_supports_action() const { return supports_action_; }
void set_supports_action(bool supports_action) { this->supports_action_ = supports_action; }
bool get_supports_action() const { return this->supports_action_; }
void set_supported_fan_modes(std::set<ClimateFanMode> modes) { supported_fan_modes_ = std::move(modes); }
void add_supported_fan_mode(ClimateFanMode mode) { supported_fan_modes_.insert(mode); }
void add_supported_custom_fan_mode(const std::string &mode) { supported_custom_fan_modes_.insert(mode); }
void set_supported_fan_modes(std::set<ClimateFanMode> modes) { this->supported_fan_modes_ = std::move(modes); }
void add_supported_fan_mode(ClimateFanMode mode) { this->supported_fan_modes_.insert(mode); }
void add_supported_custom_fan_mode(const std::string &mode) { this->supported_custom_fan_modes_.insert(mode); }
ESPDEPRECATED("This method is deprecated, use set_supported_fan_modes() instead", "v1.20")
void set_supports_fan_mode_on(bool supported) { set_fan_mode_support_(CLIMATE_FAN_ON, supported); }
ESPDEPRECATED("This method is deprecated, use set_supported_fan_modes() instead", "v1.20")
@@ -99,35 +99,37 @@ class ClimateTraits {
void set_supports_fan_mode_focus(bool supported) { set_fan_mode_support_(CLIMATE_FAN_FOCUS, supported); }
ESPDEPRECATED("This method is deprecated, use set_supported_fan_modes() instead", "v1.20")
void set_supports_fan_mode_diffuse(bool supported) { set_fan_mode_support_(CLIMATE_FAN_DIFFUSE, supported); }
bool supports_fan_mode(ClimateFanMode fan_mode) const { return supported_fan_modes_.count(fan_mode); }
bool get_supports_fan_modes() const { return !supported_fan_modes_.empty() || !supported_custom_fan_modes_.empty(); }
const std::set<ClimateFanMode> &get_supported_fan_modes() const { return supported_fan_modes_; }
bool supports_fan_mode(ClimateFanMode fan_mode) const { return this->supported_fan_modes_.count(fan_mode); }
bool get_supports_fan_modes() const {
return !this->supported_fan_modes_.empty() || !this->supported_custom_fan_modes_.empty();
}
const std::set<ClimateFanMode> &get_supported_fan_modes() const { return this->supported_fan_modes_; }
void set_supported_custom_fan_modes(std::set<std::string> supported_custom_fan_modes) {
supported_custom_fan_modes_ = std::move(supported_custom_fan_modes);
this->supported_custom_fan_modes_ = std::move(supported_custom_fan_modes);
}
const std::set<std::string> &get_supported_custom_fan_modes() const { return supported_custom_fan_modes_; }
const std::set<std::string> &get_supported_custom_fan_modes() const { return this->supported_custom_fan_modes_; }
bool supports_custom_fan_mode(const std::string &custom_fan_mode) const {
return supported_custom_fan_modes_.count(custom_fan_mode);
return this->supported_custom_fan_modes_.count(custom_fan_mode);
}
void set_supported_presets(std::set<ClimatePreset> presets) { supported_presets_ = std::move(presets); }
void add_supported_preset(ClimatePreset preset) { supported_presets_.insert(preset); }
void add_supported_custom_preset(const std::string &preset) { supported_custom_presets_.insert(preset); }
bool supports_preset(ClimatePreset preset) const { return supported_presets_.count(preset); }
bool get_supports_presets() const { return !supported_presets_.empty(); }
const std::set<climate::ClimatePreset> &get_supported_presets() const { return supported_presets_; }
void set_supported_presets(std::set<ClimatePreset> presets) { this->supported_presets_ = std::move(presets); }
void add_supported_preset(ClimatePreset preset) { this->supported_presets_.insert(preset); }
void add_supported_custom_preset(const std::string &preset) { this->supported_custom_presets_.insert(preset); }
bool supports_preset(ClimatePreset preset) const { return this->supported_presets_.count(preset); }
bool get_supports_presets() const { return !this->supported_presets_.empty(); }
const std::set<climate::ClimatePreset> &get_supported_presets() const { return this->supported_presets_; }
void set_supported_custom_presets(std::set<std::string> supported_custom_presets) {
supported_custom_presets_ = std::move(supported_custom_presets);
this->supported_custom_presets_ = std::move(supported_custom_presets);
}
const std::set<std::string> &get_supported_custom_presets() const { return supported_custom_presets_; }
const std::set<std::string> &get_supported_custom_presets() const { return this->supported_custom_presets_; }
bool supports_custom_preset(const std::string &custom_preset) const {
return supported_custom_presets_.count(custom_preset);
return this->supported_custom_presets_.count(custom_preset);
}
void set_supported_swing_modes(std::set<ClimateSwingMode> modes) { supported_swing_modes_ = std::move(modes); }
void add_supported_swing_mode(ClimateSwingMode mode) { supported_swing_modes_.insert(mode); }
void set_supported_swing_modes(std::set<ClimateSwingMode> modes) { this->supported_swing_modes_ = std::move(modes); }
void add_supported_swing_mode(ClimateSwingMode mode) { this->supported_swing_modes_.insert(mode); }
ESPDEPRECATED("This method is deprecated, use set_supported_swing_modes() instead", "v1.20")
void set_supports_swing_mode_off(bool supported) { set_swing_mode_support_(CLIMATE_SWING_OFF, supported); }
ESPDEPRECATED("This method is deprecated, use set_supported_swing_modes() instead", "v1.20")
@@ -138,54 +140,58 @@ class ClimateTraits {
void set_supports_swing_mode_horizontal(bool supported) {
set_swing_mode_support_(CLIMATE_SWING_HORIZONTAL, supported);
}
bool supports_swing_mode(ClimateSwingMode swing_mode) const { return supported_swing_modes_.count(swing_mode); }
bool get_supports_swing_modes() const { return !supported_swing_modes_.empty(); }
const std::set<ClimateSwingMode> &get_supported_swing_modes() const { return supported_swing_modes_; }
bool supports_swing_mode(ClimateSwingMode swing_mode) const { return this->supported_swing_modes_.count(swing_mode); }
bool get_supports_swing_modes() const { return !this->supported_swing_modes_.empty(); }
const std::set<ClimateSwingMode> &get_supported_swing_modes() const { return this->supported_swing_modes_; }
float get_visual_min_temperature() const { return visual_min_temperature_; }
void set_visual_min_temperature(float visual_min_temperature) { visual_min_temperature_ = visual_min_temperature; }
float get_visual_max_temperature() const { return visual_max_temperature_; }
void set_visual_max_temperature(float visual_max_temperature) { visual_max_temperature_ = visual_max_temperature; }
float get_visual_target_temperature_step() const { return visual_target_temperature_step_; }
float get_visual_current_temperature_step() const { return visual_current_temperature_step_; }
float get_visual_min_temperature() const { return this->visual_min_temperature_; }
void set_visual_min_temperature(float visual_min_temperature) {
this->visual_min_temperature_ = visual_min_temperature;
}
float get_visual_max_temperature() const { return this->visual_max_temperature_; }
void set_visual_max_temperature(float visual_max_temperature) {
this->visual_max_temperature_ = visual_max_temperature;
}
float get_visual_target_temperature_step() const { return this->visual_target_temperature_step_; }
float get_visual_current_temperature_step() const { return this->visual_current_temperature_step_; }
void set_visual_target_temperature_step(float temperature_step) {
visual_target_temperature_step_ = temperature_step;
this->visual_target_temperature_step_ = temperature_step;
}
void set_visual_current_temperature_step(float temperature_step) {
visual_current_temperature_step_ = temperature_step;
this->visual_current_temperature_step_ = temperature_step;
}
void set_visual_temperature_step(float temperature_step) {
visual_target_temperature_step_ = temperature_step;
visual_current_temperature_step_ = temperature_step;
this->visual_target_temperature_step_ = temperature_step;
this->visual_current_temperature_step_ = temperature_step;
}
int8_t get_target_temperature_accuracy_decimals() const;
int8_t get_current_temperature_accuracy_decimals() const;
float get_visual_min_humidity() const { return visual_min_humidity_; }
void set_visual_min_humidity(float visual_min_humidity) { visual_min_humidity_ = visual_min_humidity; }
float get_visual_max_humidity() const { return visual_max_humidity_; }
void set_visual_max_humidity(float visual_max_humidity) { visual_max_humidity_ = visual_max_humidity; }
float get_visual_min_humidity() const { return this->visual_min_humidity_; }
void set_visual_min_humidity(float visual_min_humidity) { this->visual_min_humidity_ = visual_min_humidity; }
float get_visual_max_humidity() const { return this->visual_max_humidity_; }
void set_visual_max_humidity(float visual_max_humidity) { this->visual_max_humidity_ = visual_max_humidity; }
protected:
void set_mode_support_(climate::ClimateMode mode, bool supported) {
if (supported) {
supported_modes_.insert(mode);
this->supported_modes_.insert(mode);
} else {
supported_modes_.erase(mode);
this->supported_modes_.erase(mode);
}
}
void set_fan_mode_support_(climate::ClimateFanMode mode, bool supported) {
if (supported) {
supported_fan_modes_.insert(mode);
this->supported_fan_modes_.insert(mode);
} else {
supported_fan_modes_.erase(mode);
this->supported_fan_modes_.erase(mode);
}
}
void set_swing_mode_support_(climate::ClimateSwingMode mode, bool supported) {
if (supported) {
supported_swing_modes_.insert(mode);
this->supported_swing_modes_.insert(mode);
} else {
supported_swing_modes_.erase(mode);
this->supported_swing_modes_.erase(mode);
}
}

View File

@@ -32,7 +32,7 @@ const uint32_t FAN_MAX = 0x40;
// Temperature
const uint8_t TEMP_RANGE = TEMP_MAX - TEMP_MIN + 1;
const uint32_t TEMP_MASK = 0XF00;
const uint32_t TEMP_MASK = 0xF00;
const uint32_t TEMP_SHIFT = 8;
const uint16_t BITS = 28;
@@ -43,11 +43,11 @@ void LgIrClimate::transmit_state() {
// ESP_LOGD(TAG, "climate_lg_ir mode_before_ code: 0x%02X", modeBefore_);
// Set command
if (send_swing_cmd_) {
send_swing_cmd_ = false;
if (this->send_swing_cmd_) {
this->send_swing_cmd_ = false;
remote_state |= COMMAND_SWING;
} else {
bool climate_is_off = (mode_before_ == climate::CLIMATE_MODE_OFF);
bool climate_is_off = (this->mode_before_ == climate::CLIMATE_MODE_OFF);
switch (this->mode) {
case climate::CLIMATE_MODE_COOL:
remote_state |= climate_is_off ? COMMAND_ON_COOL : COMMAND_COOL;
@@ -71,7 +71,7 @@ void LgIrClimate::transmit_state() {
}
}
mode_before_ = this->mode;
this->mode_before_ = this->mode;
ESP_LOGD(TAG, "climate_lg_ir mode code: 0x%02X", this->mode);
@@ -102,7 +102,7 @@ void LgIrClimate::transmit_state() {
remote_state |= ((temp - 15) << TEMP_SHIFT);
}
transmit_(remote_state);
this->transmit_(remote_state);
this->publish_state();
}
@@ -187,7 +187,7 @@ bool LgIrClimate::on_receive(remote_base::RemoteReceiveData data) {
}
void LgIrClimate::transmit_(uint32_t value) {
calc_checksum_(value);
this->calc_checksum_(value);
ESP_LOGD(TAG, "Sending climate_lg_ir code: 0x%02" PRIX32, value);
auto transmit = this->transmitter_->transmit();

View File

@@ -21,7 +21,7 @@ class LgIrClimate : public climate_ir::ClimateIR {
/// Override control to change settings of the climate device.
void control(const climate::ClimateCall &call) override {
send_swing_cmd_ = call.get_swing_mode().has_value();
this->send_swing_cmd_ = call.get_swing_mode().has_value();
// swing resets after unit powered off
if (call.get_mode().has_value() && *call.get_mode() == climate::CLIMATE_MODE_OFF)
this->swing_mode = climate::CLIMATE_SWING_OFF;

View File

@@ -65,7 +65,7 @@ void DaikinClimate::transmit_state() {
transmit.perform();
}
uint8_t DaikinClimate::operation_mode_() {
uint8_t DaikinClimate::operation_mode_() const {
uint8_t operating_mode = DAIKIN_MODE_ON;
switch (this->mode) {
case climate::CLIMATE_MODE_COOL:
@@ -92,9 +92,12 @@ uint8_t DaikinClimate::operation_mode_() {
return operating_mode;
}
uint16_t DaikinClimate::fan_speed_() {
uint16_t DaikinClimate::fan_speed_() const {
uint16_t fan_speed;
switch (this->fan_mode.value()) {
case climate::CLIMATE_FAN_QUIET:
fan_speed = DAIKIN_FAN_SILENT << 8;
break;
case climate::CLIMATE_FAN_LOW:
fan_speed = DAIKIN_FAN_1 << 8;
break;
@@ -126,12 +129,11 @@ uint16_t DaikinClimate::fan_speed_() {
return fan_speed;
}
uint8_t DaikinClimate::temperature_() {
uint8_t DaikinClimate::temperature_() const {
// Force special temperatures depending on the mode
switch (this->mode) {
case climate::CLIMATE_MODE_FAN_ONLY:
return 0x32;
case climate::CLIMATE_MODE_HEAT_COOL:
case climate::CLIMATE_MODE_DRY:
return 0xc0;
default:
@@ -148,19 +150,25 @@ bool DaikinClimate::parse_state_frame_(const uint8_t frame[]) {
if (frame[DAIKIN_STATE_FRAME_SIZE - 1] != checksum)
return false;
uint8_t mode = frame[5];
// Temperature is given in degrees celcius * 2
// only update for states that use the temperature
uint8_t temperature = frame[6];
if (mode & DAIKIN_MODE_ON) {
switch (mode & 0xF0) {
case DAIKIN_MODE_COOL:
this->mode = climate::CLIMATE_MODE_COOL;
this->target_temperature = static_cast<float>(temperature * 0.5f);
break;
case DAIKIN_MODE_DRY:
this->mode = climate::CLIMATE_MODE_DRY;
break;
case DAIKIN_MODE_HEAT:
this->mode = climate::CLIMATE_MODE_HEAT;
this->target_temperature = static_cast<float>(temperature * 0.5f);
break;
case DAIKIN_MODE_AUTO:
this->mode = climate::CLIMATE_MODE_HEAT_COOL;
this->target_temperature = static_cast<float>(temperature * 0.5f);
break;
case DAIKIN_MODE_FAN:
this->mode = climate::CLIMATE_MODE_FAN_ONLY;
@@ -169,10 +177,6 @@ bool DaikinClimate::parse_state_frame_(const uint8_t frame[]) {
} else {
this->mode = climate::CLIMATE_MODE_OFF;
}
uint8_t temperature = frame[6];
if (!(temperature & 0xC0)) {
this->target_temperature = temperature >> 1;
}
uint8_t fan_mode = frame[8];
uint8_t swing_mode = frame[9];
if (fan_mode & 0xF && swing_mode & 0xF) {
@@ -187,7 +191,6 @@ bool DaikinClimate::parse_state_frame_(const uint8_t frame[]) {
switch (fan_mode & 0xF0) {
case DAIKIN_FAN_1:
case DAIKIN_FAN_2:
case DAIKIN_FAN_SILENT:
this->fan_mode = climate::CLIMATE_FAN_LOW;
break;
case DAIKIN_FAN_3:
@@ -200,6 +203,9 @@ bool DaikinClimate::parse_state_frame_(const uint8_t frame[]) {
case DAIKIN_FAN_AUTO:
this->fan_mode = climate::CLIMATE_FAN_AUTO;
break;
case DAIKIN_FAN_SILENT:
this->fan_mode = climate::CLIMATE_FAN_QUIET;
break;
}
this->publish_state();
return true;

View File

@@ -44,17 +44,17 @@ class DaikinClimate : public climate_ir::ClimateIR {
public:
DaikinClimate()
: climate_ir::ClimateIR(DAIKIN_TEMP_MIN, DAIKIN_TEMP_MAX, 1.0f, true, true,
{climate::CLIMATE_FAN_AUTO, climate::CLIMATE_FAN_LOW, climate::CLIMATE_FAN_MEDIUM,
climate::CLIMATE_FAN_HIGH},
{climate::CLIMATE_FAN_QUIET, climate::CLIMATE_FAN_AUTO, climate::CLIMATE_FAN_LOW,
climate::CLIMATE_FAN_MEDIUM, climate::CLIMATE_FAN_HIGH},
{climate::CLIMATE_SWING_OFF, climate::CLIMATE_SWING_VERTICAL,
climate::CLIMATE_SWING_HORIZONTAL, climate::CLIMATE_SWING_BOTH}) {}
protected:
// Transmit via IR the state of this climate controller.
void transmit_state() override;
uint8_t operation_mode_();
uint16_t fan_speed_();
uint8_t temperature_();
uint8_t operation_mode_() const;
uint16_t fan_speed_() const;
uint8_t temperature_() const;
// Handle received IR Buffer
bool on_receive(remote_base::RemoteReceiveData data) override;
bool parse_state_frame_(const uint8_t frame[]);

View File

@@ -1,6 +1,7 @@
#include "debug_component.h"
#include <algorithm>
#include "esphome/core/application.h"
#include "esphome/core/log.h"
#include "esphome/core/hal.h"
#include "esphome/core/helpers.h"
@@ -25,6 +26,7 @@ void DebugComponent::dump_config() {
#ifdef USE_SENSOR
LOG_SENSOR(" ", "Free space on heap", this->free_sensor_);
LOG_SENSOR(" ", "Largest free heap block", this->block_sensor_);
LOG_SENSOR(" ", "CPU frequency", this->cpu_frequency_sensor_);
#if defined(USE_ESP8266) && USE_ARDUINO_VERSION_CODE >= VERSION_CODE(2, 5, 2)
LOG_SENSOR(" ", "Heap fragmentation", this->fragmentation_sensor_);
#endif // defined(USE_ESP8266) && USE_ARDUINO_VERSION_CODE >= VERSION_CODE(2, 5, 2)
@@ -86,6 +88,9 @@ void DebugComponent::update() {
this->loop_time_sensor_->publish_state(this->max_loop_time_);
this->max_loop_time_ = 0;
}
if (this->cpu_frequency_sensor_ != nullptr) {
this->cpu_frequency_sensor_->publish_state(arch_get_cpu_freq_hz());
}
#endif // USE_SENSOR
update_platform_();

View File

@@ -34,8 +34,12 @@ class DebugComponent : public PollingComponent {
#endif
void set_loop_time_sensor(sensor::Sensor *loop_time_sensor) { loop_time_sensor_ = loop_time_sensor; }
#ifdef USE_ESP32
void on_shutdown() override;
void set_psram_sensor(sensor::Sensor *psram_sensor) { this->psram_sensor_ = psram_sensor; }
#endif // USE_ESP32
void set_cpu_frequency_sensor(sensor::Sensor *cpu_frequency_sensor) {
this->cpu_frequency_sensor_ = cpu_frequency_sensor;
}
#endif // USE_SENSOR
protected:
uint32_t free_heap_{};
@@ -53,6 +57,7 @@ class DebugComponent : public PollingComponent {
#ifdef USE_ESP32
sensor::Sensor *psram_sensor_{nullptr};
#endif // USE_ESP32
sensor::Sensor *cpu_frequency_sensor_{nullptr};
#endif // USE_SENSOR
#ifdef USE_ESP32
@@ -75,6 +80,7 @@ class DebugComponent : public PollingComponent {
#endif // USE_TEXT_SENSOR
std::string get_reset_reason_();
std::string get_wakeup_cause_();
uint32_t get_free_heap_();
void get_device_info_(std::string &device_info);
void update_platform_();

View File

@@ -1,25 +1,18 @@
#include "debug_component.h"
#ifdef USE_ESP32
#include "esphome/core/application.h"
#include "esphome/core/log.h"
#include "esphome/core/hal.h"
#include <esp_sleep.h>
#include <esp_heap_caps.h>
#include <esp_system.h>
#include <esp_chip_info.h>
#include <esp_partition.h>
#if defined(USE_ESP32_VARIANT_ESP32)
#include <esp32/rom/rtc.h>
#elif defined(USE_ESP32_VARIANT_ESP32C3)
#include <esp32c3/rom/rtc.h>
#elif defined(USE_ESP32_VARIANT_ESP32C6)
#include <esp32c6/rom/rtc.h>
#elif defined(USE_ESP32_VARIANT_ESP32S2)
#include <esp32s2/rom/rtc.h>
#elif defined(USE_ESP32_VARIANT_ESP32S3)
#include <esp32s3/rom/rtc.h>
#elif defined(USE_ESP32_VARIANT_ESP32H2)
#include <esp32h2/rom/rtc.h>
#endif
#include <map>
#ifdef USE_ARDUINO
#include <Esp.h>
#endif
@@ -29,6 +22,90 @@ namespace debug {
static const char *const TAG = "debug";
// index by values returned by esp_reset_reason
static const char *const RESET_REASONS[] = {
"unknown source",
"power-on event",
"external pin",
"software via esp_restart",
"exception/panic",
"interrupt watchdog",
"task watchdog",
"other watchdogs",
"exiting deep sleep mode",
"brownout",
"SDIO",
"USB peripheral",
"JTAG",
"efuse error",
"power glitch detected",
"CPU lock up",
};
static const char *const REBOOT_KEY = "reboot_source";
static const size_t REBOOT_MAX_LEN = 24;
// on shutdown, store the source of the reboot request
void DebugComponent::on_shutdown() {
auto *component = App.get_current_component();
char buffer[REBOOT_MAX_LEN]{};
auto pref = global_preferences->make_preference(REBOOT_MAX_LEN, fnv1_hash(REBOOT_KEY + App.get_name()));
if (component != nullptr) {
strncpy(buffer, component->get_component_source(), REBOOT_MAX_LEN - 1);
}
ESP_LOGD(TAG, "Storing reboot source: %s", buffer);
pref.save(&buffer);
global_preferences->sync();
}
std::string DebugComponent::get_reset_reason_() {
std::string reset_reason;
unsigned reason = esp_reset_reason();
if (reason < sizeof(RESET_REASONS) / sizeof(RESET_REASONS[0])) {
reset_reason = RESET_REASONS[reason];
if (reason == ESP_RST_SW) {
auto pref = global_preferences->make_preference(REBOOT_MAX_LEN, fnv1_hash(REBOOT_KEY + App.get_name()));
char buffer[REBOOT_MAX_LEN]{};
if (pref.load(&buffer)) {
reset_reason = "Reboot request from " + std::string(buffer);
}
}
} else {
reset_reason = "unknown source";
}
ESP_LOGD(TAG, "Reset Reason: %s", reset_reason.c_str());
return reset_reason;
}
static const char *const WAKEUP_CAUSES[] = {
"undefined",
"undefined",
"external signal using RTC_IO",
"external signal using RTC_CNTL",
"timer",
"touchpad",
"ULP program",
"GPIO",
"UART",
"WIFI",
"COCPU int",
"COCPU crash",
"BT",
};
std::string DebugComponent::get_wakeup_cause_() {
const char *wake_reason;
unsigned reason = esp_sleep_get_wakeup_cause();
if (reason < sizeof(WAKEUP_CAUSES) / sizeof(WAKEUP_CAUSES[0])) {
wake_reason = WAKEUP_CAUSES[reason];
} else {
wake_reason = "unknown source";
}
ESP_LOGD(TAG, "Wakeup Reason: %s", wake_reason);
return wake_reason;
}
void DebugComponent::log_partition_info_() {
ESP_LOGCONFIG(TAG, "Partition table:");
ESP_LOGCONFIG(TAG, " %-12s %-4s %-8s %-10s %-10s", "Name", "Type", "Subtype", "Address", "Size");
@@ -42,171 +119,16 @@ void DebugComponent::log_partition_info_() {
esp_partition_iterator_release(it);
}
std::string DebugComponent::get_reset_reason_() {
std::string reset_reason;
switch (esp_reset_reason()) {
case ESP_RST_POWERON:
reset_reason = "Reset due to power-on event";
break;
case ESP_RST_EXT:
reset_reason = "Reset by external pin";
break;
case ESP_RST_SW:
reset_reason = "Software reset via esp_restart";
break;
case ESP_RST_PANIC:
reset_reason = "Software reset due to exception/panic";
break;
case ESP_RST_INT_WDT:
reset_reason = "Reset (software or hardware) due to interrupt watchdog";
break;
case ESP_RST_TASK_WDT:
reset_reason = "Reset due to task watchdog";
break;
case ESP_RST_WDT:
reset_reason = "Reset due to other watchdogs";
break;
case ESP_RST_DEEPSLEEP:
reset_reason = "Reset after exiting deep sleep mode";
break;
case ESP_RST_BROWNOUT:
reset_reason = "Brownout reset (software or hardware)";
break;
case ESP_RST_SDIO:
reset_reason = "Reset over SDIO";
break;
#ifdef USE_ESP32_VARIANT_ESP32
#if (ESP_IDF_VERSION >= ESP_IDF_VERSION_VAL(5, 1, 4))
case ESP_RST_USB:
reset_reason = "Reset by USB peripheral";
break;
case ESP_RST_JTAG:
reset_reason = "Reset by JTAG";
break;
case ESP_RST_EFUSE:
reset_reason = "Reset due to efuse error";
break;
case ESP_RST_PWR_GLITCH:
reset_reason = "Reset due to power glitch detected";
break;
case ESP_RST_CPU_LOCKUP:
reset_reason = "Reset due to CPU lock up (double exception)";
break;
#endif // ESP_IDF_VERSION >= ESP_IDF_VERSION_VAL(5, 1, 4)
#endif // USE_ESP32_VARIANT_ESP32
default: // Includes ESP_RST_UNKNOWN
switch (rtc_get_reset_reason(0)) {
case POWERON_RESET:
reset_reason = "Power On Reset";
break;
#if defined(USE_ESP32_VARIANT_ESP32)
case SW_RESET:
#elif defined(USE_ESP32_VARIANT_ESP32C3) || defined(USE_ESP32_VARIANT_ESP32S2) || \
defined(USE_ESP32_VARIANT_ESP32S3) || defined(USE_ESP32_VARIANT_ESP32C6)
case RTC_SW_SYS_RESET:
#endif
reset_reason = "Software Reset Digital Core";
break;
#if defined(USE_ESP32_VARIANT_ESP32)
case OWDT_RESET:
reset_reason = "Watch Dog Reset Digital Core";
break;
#endif
case DEEPSLEEP_RESET:
reset_reason = "Deep Sleep Reset Digital Core";
break;
#if defined(USE_ESP32_VARIANT_ESP32)
case SDIO_RESET:
reset_reason = "SLC Module Reset Digital Core";
break;
#endif
case TG0WDT_SYS_RESET:
reset_reason = "Timer Group 0 Watch Dog Reset Digital Core";
break;
case TG1WDT_SYS_RESET:
reset_reason = "Timer Group 1 Watch Dog Reset Digital Core";
break;
case RTCWDT_SYS_RESET:
reset_reason = "RTC Watch Dog Reset Digital Core";
break;
#if !defined(USE_ESP32_VARIANT_ESP32C6) && !defined(USE_ESP32_VARIANT_ESP32H2)
case INTRUSION_RESET:
reset_reason = "Intrusion Reset CPU";
break;
#endif
#if defined(USE_ESP32_VARIANT_ESP32)
case TGWDT_CPU_RESET:
reset_reason = "Timer Group Reset CPU";
break;
#elif defined(USE_ESP32_VARIANT_ESP32C3) || defined(USE_ESP32_VARIANT_ESP32S2) || \
defined(USE_ESP32_VARIANT_ESP32S3) || defined(USE_ESP32_VARIANT_ESP32C6)
case TG0WDT_CPU_RESET:
reset_reason = "Timer Group 0 Reset CPU";
break;
#endif
#if defined(USE_ESP32_VARIANT_ESP32)
case SW_CPU_RESET:
#elif defined(USE_ESP32_VARIANT_ESP32C3) || defined(USE_ESP32_VARIANT_ESP32S2) || \
defined(USE_ESP32_VARIANT_ESP32S3) || defined(USE_ESP32_VARIANT_ESP32C6)
case RTC_SW_CPU_RESET:
#endif
reset_reason = "Software Reset CPU";
break;
case RTCWDT_CPU_RESET:
reset_reason = "RTC Watch Dog Reset CPU";
break;
#if defined(USE_ESP32_VARIANT_ESP32)
case EXT_CPU_RESET:
reset_reason = "External CPU Reset";
break;
#endif
case RTCWDT_BROWN_OUT_RESET:
reset_reason = "Voltage Unstable Reset";
break;
case RTCWDT_RTC_RESET:
reset_reason = "RTC Watch Dog Reset Digital Core And RTC Module";
break;
#if defined(USE_ESP32_VARIANT_ESP32C3) || defined(USE_ESP32_VARIANT_ESP32S2) || defined(USE_ESP32_VARIANT_ESP32S3) || \
defined(USE_ESP32_VARIANT_ESP32C6)
case TG1WDT_CPU_RESET:
reset_reason = "Timer Group 1 Reset CPU";
break;
case SUPER_WDT_RESET:
reset_reason = "Super Watchdog Reset Digital Core And RTC Module";
break;
case EFUSE_RESET:
reset_reason = "eFuse Reset Digital Core";
break;
#endif
#if defined(USE_ESP32_VARIANT_ESP32C3) || defined(USE_ESP32_VARIANT_ESP32S2) || defined(USE_ESP32_VARIANT_ESP32S3)
case GLITCH_RTC_RESET:
reset_reason = "Glitch Reset Digital Core And RTC Module";
break;
#endif
#if defined(USE_ESP32_VARIANT_ESP32C3) || defined(USE_ESP32_VARIANT_ESP32S3) || defined(USE_ESP32_VARIANT_ESP32C6)
case USB_UART_CHIP_RESET:
reset_reason = "USB UART Reset Digital Core";
break;
case USB_JTAG_CHIP_RESET:
reset_reason = "USB JTAG Reset Digital Core";
break;
#endif
#if defined(USE_ESP32_VARIANT_ESP32C3) || defined(USE_ESP32_VARIANT_ESP32S3)
case POWER_GLITCH_RESET:
reset_reason = "Power Glitch Reset Digital Core And RTC Module";
break;
#endif
default:
reset_reason = "Unknown Reset Reason";
}
break;
}
ESP_LOGD(TAG, "Reset Reason: %s", reset_reason.c_str());
return reset_reason;
}
uint32_t DebugComponent::get_free_heap_() { return heap_caps_get_free_size(MALLOC_CAP_INTERNAL); }
static const std::map<int, const char *> CHIP_FEATURES = {
{CHIP_FEATURE_BLE, "BLE"},
{CHIP_FEATURE_BT, "BT"},
{CHIP_FEATURE_EMB_FLASH, "EMB Flash"},
{CHIP_FEATURE_EMB_PSRAM, "EMB PSRAM"},
{CHIP_FEATURE_WIFI_BGN, "2.4GHz WiFi"},
};
void DebugComponent::get_device_info_(std::string &device_info) {
#if defined(USE_ARDUINO)
const char *flash_mode;
@@ -242,44 +164,16 @@ void DebugComponent::get_device_info_(std::string &device_info) {
esp_chip_info_t info;
esp_chip_info(&info);
const char *model;
#if defined(USE_ESP32_VARIANT_ESP32)
model = "ESP32";
#elif defined(USE_ESP32_VARIANT_ESP32C3)
model = "ESP32-C3";
#elif defined(USE_ESP32_VARIANT_ESP32C6)
model = "ESP32-C6";
#elif defined(USE_ESP32_VARIANT_ESP32S2)
model = "ESP32-S2";
#elif defined(USE_ESP32_VARIANT_ESP32S3)
model = "ESP32-S3";
#elif defined(USE_ESP32_VARIANT_ESP32H2)
model = "ESP32-H2";
#else
model = "UNKNOWN";
#endif
const char *model = ESPHOME_VARIANT;
std::string features;
if (info.features & CHIP_FEATURE_EMB_FLASH) {
features += "EMB_FLASH,";
info.features &= ~CHIP_FEATURE_EMB_FLASH;
for (auto feature : CHIP_FEATURES) {
if (info.features & feature.first) {
features += feature.second;
features += ", ";
info.features &= ~feature.first;
}
}
if (info.features & CHIP_FEATURE_WIFI_BGN) {
features += "WIFI_BGN,";
info.features &= ~CHIP_FEATURE_WIFI_BGN;
}
if (info.features & CHIP_FEATURE_BLE) {
features += "BLE,";
info.features &= ~CHIP_FEATURE_BLE;
}
if (info.features & CHIP_FEATURE_BT) {
features += "BT,";
info.features &= ~CHIP_FEATURE_BT;
}
if (info.features & CHIP_FEATURE_EMB_PSRAM) {
features += "EMB_PSRAM,";
info.features &= ~CHIP_FEATURE_EMB_PSRAM;
}
if (info.features)
if (info.features != 0)
features += "Other:" + format_hex(info.features);
ESP_LOGD(TAG, "Chip: Model=%s, Features=%s Cores=%u, Revision=%u", model, features.c_str(), info.cores,
info.revision);
@@ -289,6 +183,8 @@ void DebugComponent::get_device_info_(std::string &device_info) {
device_info += features;
device_info += " Cores:" + to_string(info.cores);
device_info += " Revision:" + to_string(info.revision);
device_info += str_sprintf("|CPU Frequency: %" PRIu32 " MHz", arch_get_cpu_freq_hz() / 1000000);
ESP_LOGD(TAG, "CPU Frequency: %" PRIu32 " MHz", arch_get_cpu_freq_hz() / 1000000);
// Framework detection
device_info += "|Framework: ";
@@ -315,48 +211,7 @@ void DebugComponent::get_device_info_(std::string &device_info) {
device_info += "|Reset: ";
device_info += get_reset_reason_();
const char *wakeup_reason;
switch (rtc_get_wakeup_cause()) {
case NO_SLEEP:
wakeup_reason = "No Sleep";
break;
case EXT_EVENT0_TRIG:
wakeup_reason = "External Event 0";
break;
case EXT_EVENT1_TRIG:
wakeup_reason = "External Event 1";
break;
case GPIO_TRIG:
wakeup_reason = "GPIO";
break;
case TIMER_EXPIRE:
wakeup_reason = "Wakeup Timer";
break;
case SDIO_TRIG:
wakeup_reason = "SDIO";
break;
case MAC_TRIG:
wakeup_reason = "MAC";
break;
case UART0_TRIG:
wakeup_reason = "UART0";
break;
case UART1_TRIG:
wakeup_reason = "UART1";
break;
case TOUCH_TRIG:
wakeup_reason = "Touch";
break;
case SAR_TRIG:
wakeup_reason = "SAR";
break;
case BT_TRIG:
wakeup_reason = "BT";
break;
default:
wakeup_reason = "Unknown";
}
ESP_LOGD(TAG, "Wakeup Reason: %s", wakeup_reason);
std::string wakeup_reason = this->get_wakeup_cause_();
device_info += "|Wakeup: ";
device_info += wakeup_reason;
}

View File

@@ -1,5 +1,6 @@
import esphome.codegen as cg
from esphome.components import sensor
from esphome.components.esp32 import CONF_CPU_FREQUENCY
import esphome.config_validation as cv
from esphome.const import (
CONF_BLOCK,
@@ -10,6 +11,7 @@ from esphome.const import (
ICON_COUNTER,
ICON_TIMER,
UNIT_BYTES,
UNIT_HERTZ,
UNIT_MILLISECOND,
UNIT_PERCENT,
)
@@ -60,6 +62,14 @@ CONFIG_SCHEMA = {
entity_category=ENTITY_CATEGORY_DIAGNOSTIC,
),
),
cv.Optional(CONF_CPU_FREQUENCY): cv.All(
sensor.sensor_schema(
unit_of_measurement=UNIT_HERTZ,
icon="mdi:speedometer",
accuracy_decimals=0,
entity_category=ENTITY_CATEGORY_DIAGNOSTIC,
),
),
}
@@ -85,3 +95,7 @@ async def to_code(config):
if psram_conf := config.get(CONF_PSRAM):
sens = await sensor.new_sensor(psram_conf)
cg.add(debug_component.set_psram_sensor(sens))
if cpu_freq_conf := config.get(CONF_CPU_FREQUENCY):
sens = await sensor.new_sensor(cpu_freq_conf)
cg.add(debug_component.set_cpu_frequency_sensor(sens))

View File

@@ -31,9 +31,12 @@ void DeepSleepComponent::set_wakeup_pin_mode(WakeupPinMode wakeup_pin_mode) {
#if !defined(USE_ESP32_VARIANT_ESP32C3) && !defined(USE_ESP32_VARIANT_ESP32C6)
void DeepSleepComponent::set_ext1_wakeup(Ext1Wakeup ext1_wakeup) { this->ext1_wakeup_ = ext1_wakeup; }
#if !defined(USE_ESP32_VARIANT_ESP32H2)
void DeepSleepComponent::set_touch_wakeup(bool touch_wakeup) { this->touch_wakeup_ = touch_wakeup; }
#endif
#endif
void DeepSleepComponent::set_run_duration(WakeupCauseToRunDuration wakeup_cause_to_run_duration) {
wakeup_cause_to_run_duration_ = wakeup_cause_to_run_duration;
}
@@ -65,7 +68,7 @@ bool DeepSleepComponent::prepare_to_sleep_() {
}
void DeepSleepComponent::deep_sleep_() {
#if !defined(USE_ESP32_VARIANT_ESP32C3) && !defined(USE_ESP32_VARIANT_ESP32C6)
#if !defined(USE_ESP32_VARIANT_ESP32C3) && !defined(USE_ESP32_VARIANT_ESP32C6) && !defined(USE_ESP32_VARIANT_ESP32H2)
if (this->sleep_duration_.has_value())
esp_sleep_enable_timer_wakeup(*this->sleep_duration_);
if (this->wakeup_pin_ != nullptr) {
@@ -84,6 +87,15 @@ void DeepSleepComponent::deep_sleep_() {
esp_sleep_pd_config(ESP_PD_DOMAIN_RTC_PERIPH, ESP_PD_OPTION_ON);
}
#endif
#if defined(USE_ESP32_VARIANT_ESP32H2)
if (this->sleep_duration_.has_value())
esp_sleep_enable_timer_wakeup(*this->sleep_duration_);
if (this->ext1_wakeup_.has_value()) {
esp_sleep_enable_ext1_wakeup(this->ext1_wakeup_->mask, this->ext1_wakeup_->wakeup_mode);
}
#endif
#if defined(USE_ESP32_VARIANT_ESP32C3) || defined(USE_ESP32_VARIANT_ESP32C6)
if (this->sleep_duration_.has_value())
esp_sleep_enable_timer_wakeup(*this->sleep_duration_);

View File

@@ -69,21 +69,16 @@ bool Rect::inside(int16_t test_x, int16_t test_y, bool absolute) const { // NOL
return true;
}
if (absolute) {
return ((test_x >= this->x) && (test_x <= this->x2()) && (test_y >= this->y) && (test_y <= this->y2()));
} else {
return ((test_x >= 0) && (test_x <= this->w) && (test_y >= 0) && (test_y <= this->h));
return test_x >= this->x && test_x < this->x2() && test_y >= this->y && test_y < this->y2();
}
return test_x >= 0 && test_x < this->w && test_y >= 0 && test_y < this->h;
}
bool Rect::inside(Rect rect, bool absolute) const {
bool Rect::inside(Rect rect) const {
if (!this->is_set() || !rect.is_set()) {
return true;
}
if (absolute) {
return ((rect.x <= this->x2()) && (rect.x2() >= this->x) && (rect.y <= this->y2()) && (rect.y2() >= this->y));
} else {
return ((rect.x <= this->w) && (rect.w >= 0) && (rect.y <= this->h) && (rect.h >= 0));
}
return this->x2() >= rect.x && this->x <= rect.x2() && this->y2() >= rect.y && this->y <= rect.y2();
}
void Rect::info(const std::string &prefix) {

View File

@@ -26,7 +26,7 @@ class Rect {
void extend(Rect rect);
void shrink(Rect rect);
bool inside(Rect rect, bool absolute = true) const;
bool inside(Rect rect) const;
bool inside(int16_t test_x, int16_t test_y, bool absolute = true) const;
bool equal(Rect rect) const;
void info(const std::string &prefix = "rect info:");

View File

@@ -187,7 +187,7 @@ void ENS160Component::update() {
}
return;
case INVALID_OUTPUT:
ESP_LOGE(TAG, "ENS160 Invalid Status - No Invalid Output");
ESP_LOGE(TAG, "ENS160 Invalid Status - No valid output");
this->status_set_warning();
return;
}

View File

@@ -1,4 +1,5 @@
from dataclasses import dataclass
import itertools
import logging
import os
from pathlib import Path
@@ -37,6 +38,7 @@ from esphome.const import (
__version__,
)
from esphome.core import CORE, HexInt, TimePeriod
from esphome.cpp_generator import RawExpression
import esphome.final_validate as fv
from esphome.helpers import copy_file_if_changed, mkdir_p, write_file_if_changed
@@ -54,6 +56,12 @@ from .const import ( # noqa
KEY_SUBMODULES,
KEY_VARIANT,
VARIANT_ESP32,
VARIANT_ESP32C2,
VARIANT_ESP32C3,
VARIANT_ESP32C6,
VARIANT_ESP32H2,
VARIANT_ESP32S2,
VARIANT_ESP32S3,
VARIANT_FRIENDLY,
VARIANTS,
)
@@ -70,7 +78,43 @@ CONF_RELEASE = "release"
CONF_ENABLE_IDF_EXPERIMENTAL_FEATURES = "enable_idf_experimental_features"
def get_cpu_frequencies(*frequencies):
return [str(x) + "MHZ" for x in frequencies]
CPU_FREQUENCIES = {
VARIANT_ESP32: get_cpu_frequencies(80, 160, 240),
VARIANT_ESP32S2: get_cpu_frequencies(80, 160, 240),
VARIANT_ESP32S3: get_cpu_frequencies(80, 160, 240),
VARIANT_ESP32C2: get_cpu_frequencies(80, 120),
VARIANT_ESP32C3: get_cpu_frequencies(80, 160),
VARIANT_ESP32C6: get_cpu_frequencies(80, 120, 160),
VARIANT_ESP32H2: get_cpu_frequencies(16, 32, 48, 64, 96),
}
# Make sure not missed here if a new variant added.
assert all(v in CPU_FREQUENCIES for v in VARIANTS)
FULL_CPU_FREQUENCIES = set(itertools.chain.from_iterable(CPU_FREQUENCIES.values()))
def set_core_data(config):
cpu_frequency = config.get(CONF_CPU_FREQUENCY, None)
variant = config[CONF_VARIANT]
# if not specified in config, set to 160MHz if supported, the fastest otherwise
if cpu_frequency is None:
choices = CPU_FREQUENCIES[variant]
if "160MHZ" in choices:
cpu_frequency = "160MHZ"
else:
cpu_frequency = choices[-1]
config[CONF_CPU_FREQUENCY] = cpu_frequency
elif cpu_frequency not in CPU_FREQUENCIES[variant]:
raise cv.Invalid(
f"Invalid CPU frequency '{cpu_frequency}' for {config[CONF_VARIANT]}",
path=[CONF_CPU_FREQUENCY],
)
CORE.data[KEY_ESP32] = {}
CORE.data[KEY_CORE][KEY_TARGET_PLATFORM] = PLATFORM_ESP32
conf = config[CONF_FRAMEWORK]
@@ -83,6 +127,7 @@ def set_core_data(config):
CORE.data[KEY_CORE][KEY_FRAMEWORK_VERSION] = cv.Version.parse(
config[CONF_FRAMEWORK][CONF_VERSION]
)
CORE.data[KEY_ESP32][KEY_BOARD] = config[CONF_BOARD]
CORE.data[KEY_ESP32][KEY_VARIANT] = config[CONF_VARIANT]
CORE.data[KEY_ESP32][KEY_EXTRA_BUILD_FILES] = {}
@@ -553,11 +598,15 @@ FLASH_SIZES = [
]
CONF_FLASH_SIZE = "flash_size"
CONF_CPU_FREQUENCY = "cpu_frequency"
CONF_PARTITIONS = "partitions"
CONFIG_SCHEMA = cv.All(
cv.Schema(
{
cv.Required(CONF_BOARD): cv.string_strict,
cv.Optional(CONF_CPU_FREQUENCY): cv.one_of(
*FULL_CPU_FREQUENCIES, upper=True
),
cv.Optional(CONF_FLASH_SIZE, default="4MB"): cv.one_of(
*FLASH_SIZES, upper=True
),
@@ -598,6 +647,7 @@ async def to_code(config):
os.path.join(os.path.dirname(__file__), "post_build.py.script"),
)
freq = config[CONF_CPU_FREQUENCY][:-3]
if conf[CONF_TYPE] == FRAMEWORK_ESP_IDF:
cg.add_platformio_option("framework", "espidf")
cg.add_build_flag("-DUSE_ESP_IDF")
@@ -631,6 +681,9 @@ async def to_code(config):
add_idf_sdkconfig_option("CONFIG_ESP_TASK_WDT_CHECK_IDLE_TASK_CPU0", False)
add_idf_sdkconfig_option("CONFIG_ESP_TASK_WDT_CHECK_IDLE_TASK_CPU1", False)
# Set default CPU frequency
add_idf_sdkconfig_option(f"CONFIG_ESP_DEFAULT_CPU_FREQ_MHZ_{freq}", True)
cg.add_platformio_option("board_build.partitions", "partitions.csv")
if CONF_PARTITIONS in config:
add_extra_build_file(
@@ -696,6 +749,7 @@ async def to_code(config):
f"VERSION_CODE({framework_ver.major}, {framework_ver.minor}, {framework_ver.patch})"
),
)
cg.add(RawExpression(f"setCpuFrequencyMhz({freq})"))
APP_PARTITION_SIZES = {

View File

@@ -13,11 +13,13 @@
#include <hal/cpu_hal.h>
#ifdef USE_ARDUINO
#include <esp32-hal.h>
#endif
#include <Esp.h>
#else
#include <esp_clk_tree.h>
void setup();
void loop();
#endif
namespace esphome {
@@ -59,9 +61,13 @@ uint32_t arch_get_cpu_cycle_count() { return esp_cpu_get_cycle_count(); }
uint32_t arch_get_cpu_cycle_count() { return cpu_hal_get_cycle_count(); }
#endif
uint32_t arch_get_cpu_freq_hz() {
rtc_cpu_freq_config_t config;
rtc_clk_cpu_freq_get_config(&config);
return config.freq_mhz * 1000000U;
uint32_t freq = 0;
#ifdef USE_ESP_IDF
esp_clk_tree_src_get_freq_hz(SOC_MOD_CLK_CPU, ESP_CLK_TREE_SRC_FREQ_PRECISION_CACHED, &freq);
#elif defined(USE_ARDUINO)
freq = ESP.getCpuFreqMHz() * 1000000;
#endif
return freq;
}
#ifdef USE_ESP_IDF

View File

@@ -2,10 +2,6 @@
#include "ble.h"
#ifdef USE_ESP32_VARIANT_ESP32C6
#include "const_esp32c6.h"
#endif // USE_ESP32_VARIANT_ESP32C6
#include "esphome/core/application.h"
#include "esphome/core/log.h"
@@ -114,6 +110,7 @@ void ESP32BLE::advertising_init_() {
this->advertising_->set_scan_response(true);
this->advertising_->set_min_preferred_interval(0x06);
this->advertising_->set_appearance(this->appearance_);
}
bool ESP32BLE::ble_setup_() {
@@ -127,11 +124,7 @@ bool ESP32BLE::ble_setup_() {
if (esp_bt_controller_get_status() != ESP_BT_CONTROLLER_STATUS_ENABLED) {
// start bt controller
if (esp_bt_controller_get_status() == ESP_BT_CONTROLLER_STATUS_IDLE) {
#ifdef USE_ESP32_VARIANT_ESP32C6
esp_bt_controller_config_t cfg = BT_CONTROLLER_CONFIG;
#else
esp_bt_controller_config_t cfg = BT_CONTROLLER_INIT_CONFIG_DEFAULT();
#endif
err = esp_bt_controller_init(&cfg);
if (err != ESP_OK) {
ESP_LOGE(TAG, "esp_bt_controller_init failed: %s", esp_err_to_name(err));

View File

@@ -95,6 +95,7 @@ class ESP32BLE : public Component {
void advertising_start();
void advertising_set_service_data(const std::vector<uint8_t> &data);
void advertising_set_manufacturer_data(const std::vector<uint8_t> &data);
void advertising_set_appearance(uint16_t appearance) { this->appearance_ = appearance; }
void advertising_add_service_uuid(ESPBTUUID uuid);
void advertising_remove_service_uuid(ESPBTUUID uuid);
void advertising_register_raw_advertisement_callback(std::function<void(bool)> &&callback);
@@ -128,11 +129,12 @@ class ESP32BLE : public Component {
BLEComponentState state_{BLE_COMPONENT_STATE_OFF};
Queue<BLEEvent> ble_events_;
BLEAdvertising *advertising_;
BLEAdvertising *advertising_{};
esp_ble_io_cap_t io_cap_{ESP_IO_CAP_NONE};
uint32_t advertising_cycle_time_;
bool enable_on_boot_;
uint32_t advertising_cycle_time_{};
bool enable_on_boot_{};
optional<std::string> name_;
uint16_t appearance_{0};
};
// NOLINTNEXTLINE(cppcoreguidelines-avoid-non-const-global-variables)

View File

@@ -32,6 +32,7 @@ class BLEAdvertising {
void set_scan_response(bool scan_response) { this->scan_response_ = scan_response; }
void set_min_preferred_interval(uint16_t interval) { this->advertising_data_.min_interval = interval; }
void set_manufacturer_data(const std::vector<uint8_t> &data);
void set_appearance(uint16_t appearance) { this->advertising_data_.appearance = appearance; }
void set_service_data(const std::vector<uint8_t> &data);
void register_raw_advertisement_callback(std::function<void(bool)> &&callback);

View File

@@ -1,74 +0,0 @@
#pragma once
#ifdef USE_ESP32_VARIANT_ESP32C6
#include <esp_bt.h>
namespace esphome {
namespace esp32_ble {
static const esp_bt_controller_config_t BT_CONTROLLER_CONFIG = {
.config_version = CONFIG_VERSION,
.ble_ll_resolv_list_size = CONFIG_BT_LE_LL_RESOLV_LIST_SIZE,
.ble_hci_evt_hi_buf_count = DEFAULT_BT_LE_HCI_EVT_HI_BUF_COUNT,
.ble_hci_evt_lo_buf_count = DEFAULT_BT_LE_HCI_EVT_LO_BUF_COUNT,
.ble_ll_sync_list_cnt = DEFAULT_BT_LE_MAX_PERIODIC_ADVERTISER_LIST,
.ble_ll_sync_cnt = DEFAULT_BT_LE_MAX_PERIODIC_SYNCS,
.ble_ll_rsp_dup_list_count = CONFIG_BT_LE_LL_DUP_SCAN_LIST_COUNT,
.ble_ll_adv_dup_list_count = CONFIG_BT_LE_LL_DUP_SCAN_LIST_COUNT,
.ble_ll_tx_pwr_dbm = BLE_LL_TX_PWR_DBM_N,
.rtc_freq = RTC_FREQ_N,
.ble_ll_sca = CONFIG_BT_LE_LL_SCA,
.ble_ll_scan_phy_number = BLE_LL_SCAN_PHY_NUMBER_N,
.ble_ll_conn_def_auth_pyld_tmo = BLE_LL_CONN_DEF_AUTH_PYLD_TMO_N,
.ble_ll_jitter_usecs = BLE_LL_JITTER_USECS_N,
.ble_ll_sched_max_adv_pdu_usecs = BLE_LL_SCHED_MAX_ADV_PDU_USECS_N,
.ble_ll_sched_direct_adv_max_usecs = BLE_LL_SCHED_DIRECT_ADV_MAX_USECS_N,
.ble_ll_sched_adv_max_usecs = BLE_LL_SCHED_ADV_MAX_USECS_N,
.ble_scan_rsp_data_max_len = DEFAULT_BT_LE_SCAN_RSP_DATA_MAX_LEN_N,
.ble_ll_cfg_num_hci_cmd_pkts = BLE_LL_CFG_NUM_HCI_CMD_PKTS_N,
.ble_ll_ctrl_proc_timeout_ms = BLE_LL_CTRL_PROC_TIMEOUT_MS_N,
.nimble_max_connections = DEFAULT_BT_LE_MAX_CONNECTIONS,
.ble_whitelist_size = DEFAULT_BT_NIMBLE_WHITELIST_SIZE, // NOLINT
.ble_acl_buf_size = DEFAULT_BT_LE_ACL_BUF_SIZE,
.ble_acl_buf_count = DEFAULT_BT_LE_ACL_BUF_COUNT,
.ble_hci_evt_buf_size = DEFAULT_BT_LE_HCI_EVT_BUF_SIZE,
.ble_multi_adv_instances = DEFAULT_BT_LE_MAX_EXT_ADV_INSTANCES,
.ble_ext_adv_max_size = DEFAULT_BT_LE_EXT_ADV_MAX_SIZE,
.controller_task_stack_size = NIMBLE_LL_STACK_SIZE,
.controller_task_prio = ESP_TASK_BT_CONTROLLER_PRIO,
.controller_run_cpu = 0,
.enable_qa_test = RUN_QA_TEST,
.enable_bqb_test = RUN_BQB_TEST,
#if ESP_IDF_VERSION < ESP_IDF_VERSION_VAL(5, 3, 1)
// The following fields have been removed since ESP IDF version 5.3.1, see commit:
// https://github.com/espressif/esp-idf/commit/e761c1de8f9c0777829d597b4d5a33bb070a30a8
.enable_uart_hci = HCI_UART_EN,
.ble_hci_uart_port = DEFAULT_BT_LE_HCI_UART_PORT,
.ble_hci_uart_baud = DEFAULT_BT_LE_HCI_UART_BAUD,
.ble_hci_uart_data_bits = DEFAULT_BT_LE_HCI_UART_DATA_BITS,
.ble_hci_uart_stop_bits = DEFAULT_BT_LE_HCI_UART_STOP_BITS,
.ble_hci_uart_flow_ctrl = DEFAULT_BT_LE_HCI_UART_FLOW_CTRL,
.ble_hci_uart_uart_parity = DEFAULT_BT_LE_HCI_UART_PARITY,
#endif
.enable_tx_cca = DEFAULT_BT_LE_TX_CCA_ENABLED,
.cca_rssi_thresh = 256 - DEFAULT_BT_LE_CCA_RSSI_THRESH,
.sleep_en = NIMBLE_SLEEP_ENABLE,
.coex_phy_coded_tx_rx_time_limit = DEFAULT_BT_LE_COEX_PHY_CODED_TX_RX_TLIM_EFF,
.dis_scan_backoff = NIMBLE_DISABLE_SCAN_BACKOFF,
.ble_scan_classify_filter_enable = 1,
.main_xtal_freq = CONFIG_XTAL_FREQ,
.version_num = (uint8_t) efuse_hal_chip_revision(),
.cpu_freq_mhz = CONFIG_ESP_DEFAULT_CPU_FREQ_MHZ,
.ignore_wl_for_direct_adv = 0,
.enable_pcl = DEFAULT_BT_LE_POWER_CONTROL_ENABLED,
#if ESP_IDF_VERSION >= ESP_IDF_VERSION_VAL(5, 1, 3)
.csa2_select = DEFAULT_BT_LE_50_FEATURE_SUPPORT,
#endif
.config_magic = CONFIG_MAGIC,
};
} // namespace esp32_ble
} // namespace esphome
#endif // USE_ESP32_VARIANT_ESP32C6

View File

@@ -32,6 +32,7 @@ DEPENDENCIES = ["esp32"]
DOMAIN = "esp32_ble_server"
CONF_ADVERTISE = "advertise"
CONF_APPEARANCE = "appearance"
CONF_BROADCAST = "broadcast"
CONF_CHARACTERISTICS = "characteristics"
CONF_DESCRIPTION = "description"
@@ -421,6 +422,7 @@ CONFIG_SCHEMA = cv.Schema(
cv.GenerateID(): cv.declare_id(BLEServer),
cv.GenerateID(esp32_ble.CONF_BLE_ID): cv.use_id(esp32_ble.ESP32BLE),
cv.Optional(CONF_MANUFACTURER): value_schema("string", templatable=False),
cv.Optional(CONF_APPEARANCE, default=0): cv.uint16_t,
cv.Optional(CONF_MODEL): value_schema("string", templatable=False),
cv.Optional(CONF_FIRMWARE_VERSION): value_schema("string", templatable=False),
cv.Optional(CONF_MANUFACTURER_DATA): cv.Schema([cv.uint8_t]),
@@ -531,6 +533,7 @@ async def to_code(config):
cg.add(parent.register_gatts_event_handler(var))
cg.add(parent.register_ble_status_event_handler(var))
cg.add(var.set_parent(parent))
cg.add(parent.advertising_set_appearance(config[CONF_APPEARANCE]))
if CONF_MANUFACTURER_DATA in config:
cg.add(var.set_manufacturer_data(config[CONF_MANUFACTURER_DATA]))
for service_config in config[CONF_SERVICES]:

View File

@@ -17,6 +17,7 @@ from esphome.components.esp32_ble import (
import esphome.config_validation as cv
from esphome.const import (
CONF_ACTIVE,
CONF_CONTINUOUS,
CONF_DURATION,
CONF_ID,
CONF_INTERVAL,
@@ -42,7 +43,6 @@ CONF_MAX_CONNECTIONS = "max_connections"
CONF_ESP32_BLE_ID = "esp32_ble_id"
CONF_SCAN_PARAMETERS = "scan_parameters"
CONF_WINDOW = "window"
CONF_CONTINUOUS = "continuous"
CONF_ON_SCAN_END = "on_scan_end"
DEFAULT_MAX_CONNECTIONS = 3

View File

@@ -245,7 +245,7 @@ void ESP32BLETracker::stop_scan_() {
return;
}
this->cancel_timeout("scan");
this->scanner_state_ = ScannerState::STOPPING;
this->set_scanner_state_(ScannerState::STOPPING);
esp_err_t err = esp_ble_gap_stop_scanning();
if (err != ESP_OK) {
ESP_LOGE(TAG, "esp_ble_gap_stop_scanning failed: %d", err);
@@ -272,7 +272,7 @@ void ESP32BLETracker::start_scan_(bool first) {
}
return;
}
this->scanner_state_ = ScannerState::STARTING;
this->set_scanner_state_(ScannerState::STARTING);
ESP_LOGD(TAG, "Starting scan, set scanner state to STARTING.");
if (!first) {
for (auto *listener : this->listeners_)
@@ -315,7 +315,7 @@ void ESP32BLETracker::end_of_scan_() {
for (auto *listener : this->listeners_)
listener->on_scan_end();
this->scanner_state_ = ScannerState::IDLE;
this->set_scanner_state_(ScannerState::IDLE);
}
void ESP32BLETracker::register_client(ESPBTClient *client) {
@@ -398,9 +398,9 @@ void ESP32BLETracker::gap_scan_start_complete_(const esp_ble_gap_cb_param_t::ble
}
if (param.status == ESP_BT_STATUS_SUCCESS) {
this->scan_start_fail_count_ = 0;
this->scanner_state_ = ScannerState::RUNNING;
this->set_scanner_state_(ScannerState::RUNNING);
} else {
this->scanner_state_ = ScannerState::FAILED;
this->set_scanner_state_(ScannerState::FAILED);
if (this->scan_start_fail_count_ != std::numeric_limits<uint8_t>::max()) {
this->scan_start_fail_count_++;
}
@@ -422,7 +422,7 @@ void ESP32BLETracker::gap_scan_stop_complete_(const esp_ble_gap_cb_param_t::ble_
ESP_LOGE(TAG, "Scan was stopped when stop complete.");
}
}
this->scanner_state_ = ScannerState::STOPPED;
this->set_scanner_state_(ScannerState::STOPPED);
}
void ESP32BLETracker::gap_scan_result_(const esp_ble_gap_cb_param_t::ble_scan_result_evt_param &param) {
@@ -449,7 +449,7 @@ void ESP32BLETracker::gap_scan_result_(const esp_ble_gap_cb_param_t::ble_scan_re
ESP_LOGE(TAG, "Scan was stopped when scan completed.");
}
}
this->scanner_state_ = ScannerState::STOPPED;
this->set_scanner_state_(ScannerState::STOPPED);
}
}
@@ -460,6 +460,11 @@ void ESP32BLETracker::gattc_event_handler(esp_gattc_cb_event_t event, esp_gatt_i
}
}
void ESP32BLETracker::set_scanner_state_(ScannerState state) {
this->scanner_state_ = state;
this->scanner_state_callbacks_.call(state);
}
ESPBLEiBeacon::ESPBLEiBeacon(const uint8_t *data) { memcpy(&this->beacon_data_, data, sizeof(beacon_data_)); }
optional<ESPBLEiBeacon> ESPBLEiBeacon::from_manufacturer_data(const ServiceData &data) {
if (!data.uuid.contains(0x4C, 0x00))

View File

@@ -218,6 +218,7 @@ class ESP32BLETracker : public Component,
void set_scan_interval(uint32_t scan_interval) { scan_interval_ = scan_interval; }
void set_scan_window(uint32_t scan_window) { scan_window_ = scan_window; }
void set_scan_active(bool scan_active) { scan_active_ = scan_active; }
bool get_scan_active() const { return scan_active_; }
void set_scan_continuous(bool scan_continuous) { scan_continuous_ = scan_continuous; }
/// Setup the FreeRTOS task and the Bluetooth stack.
@@ -241,6 +242,11 @@ class ESP32BLETracker : public Component,
void gap_event_handler(esp_gap_ble_cb_event_t event, esp_ble_gap_cb_param_t *param) override;
void ble_before_disabled_event_handler() override;
void add_scanner_state_callback(std::function<void(ScannerState)> &&callback) {
this->scanner_state_callbacks_.add(std::move(callback));
}
ScannerState get_scanner_state() const { return this->scanner_state_; }
protected:
void stop_scan_();
/// Start a single scan by setting up the parameters and doing some esp-idf calls.
@@ -255,6 +261,8 @@ class ESP32BLETracker : public Component,
void gap_scan_start_complete_(const esp_ble_gap_cb_param_t::ble_scan_start_cmpl_evt_param &param);
/// Called when a `ESP_GAP_BLE_SCAN_STOP_COMPLETE_EVT` event is received.
void gap_scan_stop_complete_(const esp_ble_gap_cb_param_t::ble_scan_stop_cmpl_evt_param &param);
/// Called to set the scanner state. Will also call callbacks to let listeners know when state is changed.
void set_scanner_state_(ScannerState state);
int app_id_{0};
@@ -273,6 +281,7 @@ class ESP32BLETracker : public Component,
bool scan_continuous_;
bool scan_active_;
ScannerState scanner_state_{ScannerState::IDLE};
CallbackManager<void(ScannerState)> scanner_state_callbacks_;
bool ble_was_disabled_{true};
bool raw_advertisements_{false};
bool parse_advertisements_{false};

View File

@@ -40,9 +40,6 @@ async def new_fastled_light(config):
if CONF_MAX_REFRESH_RATE in config:
cg.add(var.set_max_refresh_rate(config[CONF_MAX_REFRESH_RATE]))
cg.add_library("fastled/FastLED", "3.9.16")
await light.register_light(var, config)
# https://github.com/FastLED/FastLED/blob/master/library.json
# 3.3.3 has an issue on ESP32 with RMT and fastled_clockless:
# https://github.com/esphome/issues/issues/1375
cg.add_library("fastled/FastLED", "3.3.2")
return var

View File

@@ -34,7 +34,7 @@ void FastLEDLightOutput::write_state(light::LightState *state) {
this->mark_shown_();
ESP_LOGVV(TAG, "Writing RGB values to bus...");
this->controller_->showLeds();
this->controller_->showLeds(this->state_parent_->current_values.get_brightness() * 255);
}
} // namespace fastled_base

View File

@@ -8,30 +8,45 @@ namespace esphome {
namespace gpio_expander {
/// @brief A class to cache the read state of a GPIO expander.
/// This class caches reads between GPIO Pins which are on the same bank.
/// This means that for reading whole Port (ex. 8 pins) component needs only one
/// I2C/SPI read per main loop call. It assumes, that one bit in byte identifies one GPIO pin
/// Template parameters:
/// T - Type which represents internal register. Could be uint8_t or uint16_t. Adjust to
/// match size of your internal GPIO bank register.
/// N - Number of pins
template<typename T, T N> class CachedGpioExpander {
public:
bool digital_read(T pin) {
if (!this->read_cache_invalidated_[pin]) {
this->read_cache_invalidated_[pin] = true;
return this->digital_read_cache(pin);
uint8_t bank = pin / (sizeof(T) * BITS_PER_BYTE);
if (this->read_cache_invalidated_[bank]) {
this->read_cache_invalidated_[bank] = false;
if (!this->digital_read_hw(pin))
return false;
}
return this->digital_read_hw(pin);
return this->digital_read_cache(pin);
}
void digital_write(T pin, bool value) { this->digital_write_hw(pin, value); }
protected:
/// @brief Call component low level function to read GPIO state from device
virtual bool digital_read_hw(T pin) = 0;
/// @brief Call component read function from internal cache.
virtual bool digital_read_cache(T pin) = 0;
/// @brief Call component low level function to write GPIO state to device
virtual void digital_write_hw(T pin, bool value) = 0;
const uint8_t cache_byte_size_ = N / (sizeof(T) * BITS_PER_BYTE);
/// @brief Invalidate cache. This function should be called in component loop().
void reset_pin_cache_() {
for (T i = 0; i < N; i++) {
this->read_cache_invalidated_[i] = false;
for (T i = 0; i < this->cache_byte_size_; i++) {
this->read_cache_invalidated_[i] = true;
}
}
std::array<bool, N> read_cache_invalidated_{};
static const uint8_t BITS_PER_BYTE = 8;
std::array<bool, N / (sizeof(T) * BITS_PER_BYTE)> read_cache_invalidated_{};
};
} // namespace gpio_expander

View File

@@ -5,6 +5,7 @@ import esphome.config_validation as cv
from esphome.const import (
CONF_BORDER,
CONF_COLOR,
CONF_CONTINUOUS,
CONF_DIRECTION,
CONF_DURATION,
CONF_HEIGHT,
@@ -61,8 +62,6 @@ VALUE_POSITION_TYPE = {
"BELOW": ValuePositionType.VALUE_POSITION_TYPE_BELOW,
}
CONF_CONTINUOUS = "continuous"
GRAPH_TRACE_SCHEMA = cv.Schema(
{
cv.GenerateID(): cv.declare_id(GraphTrace),

View File

@@ -18,6 +18,7 @@ MODELS = {
"yac": Model.GREE_YAC,
"yac1fb9": Model.GREE_YAC1FB9,
"yx1ff": Model.GREE_YX1FF,
"yag": Model.GREE_YAG,
}
CONFIG_SCHEMA = climate_ir.CLIMATE_IR_WITH_RECEIVER_SCHEMA.extend(

View File

@@ -22,13 +22,21 @@ void GreeClimate::transmit_state() {
remote_state[0] = this->fan_speed_() | this->operation_mode_();
remote_state[1] = this->temperature_();
if (this->model_ == GREE_YAN || this->model_ == GREE_YX1FF) {
if (this->model_ == GREE_YAN || this->model_ == GREE_YX1FF || this->model_ == GREE_YAG) {
remote_state[2] = 0x60;
remote_state[3] = 0x50;
remote_state[4] = this->vertical_swing_();
}
if (this->model_ == GREE_YAC) {
if (this->model_ == GREE_YAG) {
remote_state[5] = 0x40;
if (this->vertical_swing_() == GREE_VDIR_SWING || this->horizontal_swing_() == GREE_HDIR_SWING) {
remote_state[0] |= (1 << 6);
}
}
if (this->model_ == GREE_YAC || this->model_ == GREE_YAG) {
remote_state[4] |= (this->horizontal_swing_() << 4);
}
@@ -57,6 +65,12 @@ void GreeClimate::transmit_state() {
// Calculate the checksum
if (this->model_ == GREE_YAN || this->model_ == GREE_YX1FF) {
remote_state[7] = ((remote_state[0] << 4) + (remote_state[1] << 4) + 0xC0);
} else if (this->model_ == GREE_YAG) {
remote_state[7] =
((((remote_state[0] & 0x0F) + (remote_state[1] & 0x0F) + (remote_state[2] & 0x0F) + (remote_state[3] & 0x0F) +
((remote_state[4] & 0xF0) >> 4) + ((remote_state[5] & 0xF0) >> 4) + ((remote_state[6] & 0xF0) >> 4) + 0x0A) &
0x0F)
<< 4);
} else {
remote_state[7] =
((((remote_state[0] & 0x0F) + (remote_state[1] & 0x0F) + (remote_state[2] & 0x0F) + (remote_state[3] & 0x0F) +

View File

@@ -58,7 +58,7 @@ const uint8_t GREE_VDIR_MIDDLE = 0x04;
const uint8_t GREE_VDIR_MDOWN = 0x05;
const uint8_t GREE_VDIR_DOWN = 0x06;
// Only available on YAC
// Only available on YAC/YAG
// Horizontal air directions. Note that these cannot be set on all heat pumps
const uint8_t GREE_HDIR_AUTO = 0x00;
const uint8_t GREE_HDIR_MANUAL = 0x00;
@@ -78,7 +78,7 @@ const uint8_t GREE_PRESET_SLEEP = 0x01;
const uint8_t GREE_PRESET_SLEEP_BIT = 0x80;
// Model codes
enum Model { GREE_GENERIC, GREE_YAN, GREE_YAA, GREE_YAC, GREE_YAC1FB9, GREE_YX1FF };
enum Model { GREE_GENERIC, GREE_YAN, GREE_YAA, GREE_YAC, GREE_YAC1FB9, GREE_YX1FF, GREE_YAG };
class GreeClimate : public climate_ir::ClimateIR {
public:

View File

@@ -69,7 +69,7 @@ void HLW8012Component::update() {
float power = cf_hz * this->power_multiplier_;
if (this->change_mode_at_ != 0) {
if (this->change_mode_at_ != 0 || this->change_mode_every_ == 0) {
// Only read cf1 after one cycle. Apparently it's quite unstable after being changed.
if (this->current_mode_) {
float current = cf1_hz * this->current_multiplier_;

View File

@@ -8,7 +8,7 @@
namespace esphome {
namespace hm3301 {
static const uint8_t SELECT_COMM_CMD = 0X88;
static const uint8_t SELECT_COMM_CMD = 0x88;
class HM3301Component : public PollingComponent, public i2c::I2CDevice {
public:

View File

@@ -295,8 +295,8 @@ async def http_request_action_to_code(config, action_id, template_arg, args):
for key in json_:
template_ = await cg.templatable(json_[key], args, cg.std_string)
cg.add(var.add_json(key, template_))
for key in config.get(CONF_REQUEST_HEADERS, []):
template_ = await cg.templatable(key, args, cg.std_string)
for key, value in config.get(CONF_REQUEST_HEADERS, {}).items():
template_ = await cg.templatable(value, args, cg.const_char_ptr)
cg.add(var.add_request_header(key, template_))
for value in config.get(CONF_COLLECT_HEADERS, []):

View File

@@ -139,6 +139,10 @@ class I2CDevice {
/// @param address of the device
void set_i2c_address(uint8_t address) { address_ = address; }
/// @brief Returns the I2C address of the object.
/// @return the I2C address
uint8_t get_i2c_address() const { return this->address_; }
/// @brief we store the pointer to the I2CBus to use
/// @param bus pointer to the I2CBus object
void set_i2c_bus(I2CBus *bus) { bus_ = bus; }

View File

@@ -67,7 +67,7 @@ void IDFI2CBus::setup() {
ESP_LOGV(TAG, "i2c_timeout set to %" PRIu32 " ticks (%" PRIu32 " us)", timeout_ * 80, timeout_);
}
}
err = i2c_driver_install(port_, I2C_MODE_MASTER, 0, 0, ESP_INTR_FLAG_IRAM);
err = i2c_driver_install(port_, I2C_MODE_MASTER, 0, 0, 0);
if (err != ESP_OK) {
ESP_LOGW(TAG, "i2c_driver_install failed: %s", esp_err_to_name(err));
this->mark_failed();

View File

@@ -39,6 +39,7 @@ CONF_SECONDARY = "secondary"
CONF_USE_APLL = "use_apll"
CONF_BITS_PER_CHANNEL = "bits_per_channel"
CONF_MCLK_MULTIPLE = "mclk_multiple"
CONF_MONO = "mono"
CONF_LEFT = "left"
CONF_RIGHT = "right"
@@ -122,8 +123,25 @@ I2S_SLOT_BIT_WIDTH = {
32: i2s_slot_bit_width_t.I2S_SLOT_BIT_WIDTH_32BIT,
}
i2s_mclk_multiple_t = cg.global_ns.enum("i2s_mclk_multiple_t")
I2S_MCLK_MULTIPLE = {
128: i2s_mclk_multiple_t.I2S_MCLK_MULTIPLE_128,
256: i2s_mclk_multiple_t.I2S_MCLK_MULTIPLE_256,
384: i2s_mclk_multiple_t.I2S_MCLK_MULTIPLE_384,
512: i2s_mclk_multiple_t.I2S_MCLK_MULTIPLE_512,
}
_validate_bits = cv.float_with_unit("bits", "bit")
def validate_mclk_divisible_by_3(config):
if config[CONF_BITS_PER_SAMPLE] == 24 and config[CONF_MCLK_MULTIPLE] % 3 != 0:
raise cv.Invalid(
f"{CONF_MCLK_MULTIPLE} must be divisible by 3 when bits per sample is 24"
)
return config
_use_legacy_driver = None
@@ -155,6 +173,7 @@ def i2s_audio_component_schema(
cv.Any(cv.float_with_unit("bits", "bit"), "default"),
cv.one_of(*I2S_BITS_PER_CHANNEL),
),
cv.Optional(CONF_MCLK_MULTIPLE, default=256): cv.one_of(*I2S_MCLK_MULTIPLE),
}
)
@@ -182,11 +201,10 @@ async def register_i2s_audio_component(var, config):
slot_mask = CONF_BOTH
cg.add(var.set_slot_mode(I2S_SLOT_MODE[slot_mode]))
cg.add(var.set_std_slot_mask(I2S_STD_SLOT_MASK[slot_mask]))
cg.add(
var.set_slot_bit_width(I2S_SLOT_BIT_WIDTH[config[CONF_BITS_PER_CHANNEL]])
)
cg.add(var.set_slot_bit_width(I2S_SLOT_BIT_WIDTH[config[CONF_BITS_PER_SAMPLE]]))
cg.add(var.set_sample_rate(config[CONF_SAMPLE_RATE]))
cg.add(var.set_use_apll(config[CONF_USE_APLL]))
cg.add(var.set_mclk_multiple(I2S_MCLK_MULTIPLE[config[CONF_MCLK_MULTIPLE]]))
def validate_use_legacy(value):

View File

@@ -31,6 +31,7 @@ class I2SAudioBase : public Parented<I2SAudioComponent> {
#endif
void set_sample_rate(uint32_t sample_rate) { this->sample_rate_ = sample_rate; }
void set_use_apll(uint32_t use_apll) { this->use_apll_ = use_apll; }
void set_mclk_multiple(i2s_mclk_multiple_t mclk_multiple) { this->mclk_multiple_ = mclk_multiple; }
protected:
#ifdef USE_I2S_LEGACY
@@ -46,6 +47,7 @@ class I2SAudioBase : public Parented<I2SAudioComponent> {
#endif
uint32_t sample_rate_;
bool use_apll_;
i2s_mclk_multiple_t mclk_multiple_;
};
class I2SAudioIn : public I2SAudioBase {};

View File

@@ -1,13 +1,20 @@
from esphome import pins
import esphome.codegen as cg
from esphome.components import esp32, microphone
from esphome.components import audio, esp32, microphone
from esphome.components.adc import ESP32_VARIANT_ADC1_PIN_TO_CHANNEL, validate_adc_pin
import esphome.config_validation as cv
from esphome.const import CONF_ID, CONF_NUMBER
from esphome.const import (
CONF_BITS_PER_SAMPLE,
CONF_CHANNEL,
CONF_ID,
CONF_NUM_CHANNELS,
CONF_NUMBER,
CONF_SAMPLE_RATE,
)
from .. import (
CONF_CHANNEL,
CONF_I2S_DIN_PIN,
CONF_LEFT,
CONF_MONO,
CONF_RIGHT,
I2SAudioIn,
@@ -15,6 +22,7 @@ from .. import (
i2s_audio_ns,
register_i2s_audio_component,
use_legacy,
validate_mclk_divisible_by_3,
)
CODEOWNERS = ["@jesserockz"]
@@ -32,7 +40,7 @@ INTERNAL_ADC_VARIANTS = [esp32.const.VARIANT_ESP32]
PDM_VARIANTS = [esp32.const.VARIANT_ESP32, esp32.const.VARIANT_ESP32S3]
def validate_esp32_variant(config):
def _validate_esp32_variant(config):
variant = esp32.get_esp32_variant()
if config[CONF_ADC_TYPE] == "external":
if config[CONF_PDM]:
@@ -46,12 +54,34 @@ def validate_esp32_variant(config):
raise NotImplementedError
def validate_channel(config):
def _validate_channel(config):
if config[CONF_CHANNEL] == CONF_MONO:
raise cv.Invalid(f"I2S microphone does not support {CONF_MONO}.")
return config
def _set_num_channels_from_config(config):
if config[CONF_CHANNEL] in (CONF_LEFT, CONF_RIGHT):
config[CONF_NUM_CHANNELS] = 1
else:
config[CONF_NUM_CHANNELS] = 2
return config
def _set_stream_limits(config):
audio.set_stream_limits(
min_bits_per_sample=config.get(CONF_BITS_PER_SAMPLE),
max_bits_per_sample=config.get(CONF_BITS_PER_SAMPLE),
min_channels=config.get(CONF_NUM_CHANNELS),
max_channels=config.get(CONF_NUM_CHANNELS),
min_sample_rate=config.get(CONF_SAMPLE_RATE),
max_sample_rate=config.get(CONF_SAMPLE_RATE),
)(config)
return config
BASE_SCHEMA = microphone.MICROPHONE_SCHEMA.extend(
i2s_audio_component_schema(
I2SAudioMicrophone,
@@ -79,8 +109,11 @@ CONFIG_SCHEMA = cv.All(
},
key=CONF_ADC_TYPE,
),
validate_esp32_variant,
validate_channel,
_validate_esp32_variant,
_validate_channel,
_set_num_channels_from_config,
_set_stream_limits,
validate_mclk_divisible_by_3,
)

View File

@@ -15,10 +15,25 @@
namespace esphome {
namespace i2s_audio {
static const size_t BUFFER_SIZE = 512;
static const UBaseType_t MAX_LISTENERS = 16;
static const uint32_t READ_DURATION_MS = 16;
static const size_t TASK_STACK_SIZE = 4096;
static const ssize_t TASK_PRIORITY = 23;
static const char *const TAG = "i2s_audio.microphone";
enum MicrophoneEventGroupBits : uint32_t {
COMMAND_STOP = (1 << 0), // stops the microphone task
TASK_STARTING = (1 << 10),
TASK_RUNNING = (1 << 11),
TASK_STOPPING = (1 << 12),
TASK_STOPPED = (1 << 13),
ALL_BITS = 0x00FFFFFF, // All valid FreeRTOS event group bits
};
void I2SAudioMicrophone::setup() {
ESP_LOGCONFIG(TAG, "Setting up I2S Audio Microphone...");
#ifdef USE_I2S_LEGACY
@@ -41,21 +56,64 @@ void I2SAudioMicrophone::setup() {
}
}
}
this->active_listeners_semaphore_ = xSemaphoreCreateCounting(MAX_LISTENERS, MAX_LISTENERS);
if (this->active_listeners_semaphore_ == nullptr) {
ESP_LOGE(TAG, "Failed to create semaphore");
this->mark_failed();
return;
}
this->event_group_ = xEventGroupCreate();
if (this->event_group_ == nullptr) {
ESP_LOGE(TAG, "Failed to create event group");
this->mark_failed();
return;
}
}
void I2SAudioMicrophone::start() {
if (this->is_failed())
return;
if (this->state_ == microphone::STATE_RUNNING)
return; // Already running
this->state_ = microphone::STATE_STARTING;
xSemaphoreTake(this->active_listeners_semaphore_, 0);
}
void I2SAudioMicrophone::start_() {
bool I2SAudioMicrophone::start_driver_() {
if (!this->parent_->try_lock()) {
return; // Waiting for another i2s to return lock
return false; // Waiting for another i2s to return lock
}
esp_err_t err;
uint8_t channel_count = 1;
#ifdef USE_I2S_LEGACY
uint8_t bits_per_sample = this->bits_per_sample_;
if (this->channel_ == I2S_CHANNEL_FMT_RIGHT_LEFT) {
channel_count = 2;
}
#else
if (this->slot_bit_width_ == I2S_SLOT_BIT_WIDTH_AUTO) {
this->slot_bit_width_ = I2S_SLOT_BIT_WIDTH_16BIT;
}
uint8_t bits_per_sample = this->slot_bit_width_;
if (this->slot_mode_ == I2S_SLOT_MODE_STEREO) {
channel_count = 2;
}
#endif
#ifdef USE_ESP32_VARIANT_ESP32
// ESP32 reads audio aligned to a multiple of 2 bytes. For example, if configured for 24 bits per sample, then it will
// produce 32 bits per sample, where the actual data is in the most significant bits. Other ESP32 variants produce 24
// bits per sample in this situation.
if (bits_per_sample < 16) {
bits_per_sample = 16;
} else if ((bits_per_sample > 16) && (bits_per_sample <= 32)) {
bits_per_sample = 32;
}
#endif
#ifdef USE_I2S_LEGACY
i2s_driver_config_t config = {
.mode = (i2s_mode_t) (this->i2s_mode_ | I2S_MODE_RX),
@@ -65,11 +123,11 @@ void I2SAudioMicrophone::start_() {
.communication_format = I2S_COMM_FORMAT_STAND_I2S,
.intr_alloc_flags = ESP_INTR_FLAG_LEVEL1,
.dma_buf_count = 4,
.dma_buf_len = 256,
.dma_buf_len = 240, // Must be divisible by 3 to support 24 bits per sample on old driver and newer variants
.use_apll = this->use_apll_,
.tx_desc_auto_clear = false,
.fixed_mclk = 0,
.mclk_multiple = I2S_MCLK_MULTIPLE_256,
.mclk_multiple = this->mclk_multiple_,
.bits_per_chan = this->bits_per_channel_,
};
@@ -80,20 +138,20 @@ void I2SAudioMicrophone::start_() {
if (err != ESP_OK) {
ESP_LOGW(TAG, "Error installing I2S driver: %s", esp_err_to_name(err));
this->status_set_error();
return;
return false;
}
err = i2s_set_adc_mode(ADC_UNIT_1, this->adc_channel_);
if (err != ESP_OK) {
ESP_LOGW(TAG, "Error setting ADC mode: %s", esp_err_to_name(err));
this->status_set_error();
return;
return false;
}
err = i2s_adc_enable(this->parent_->get_port());
if (err != ESP_OK) {
ESP_LOGW(TAG, "Error enabling ADC: %s", esp_err_to_name(err));
this->status_set_error();
return;
return false;
}
} else
@@ -106,7 +164,7 @@ void I2SAudioMicrophone::start_() {
if (err != ESP_OK) {
ESP_LOGW(TAG, "Error installing I2S driver: %s", esp_err_to_name(err));
this->status_set_error();
return;
return false;
}
i2s_pin_config_t pin_config = this->parent_->get_pin_config();
@@ -116,7 +174,7 @@ void I2SAudioMicrophone::start_() {
if (err != ESP_OK) {
ESP_LOGW(TAG, "Error setting I2S pin: %s", esp_err_to_name(err));
this->status_set_error();
return;
return false;
}
}
#else
@@ -132,7 +190,7 @@ void I2SAudioMicrophone::start_() {
if (err != ESP_OK) {
ESP_LOGW(TAG, "Error creating new I2S channel: %s", esp_err_to_name(err));
this->status_set_error();
return;
return false;
}
i2s_clock_src_t clk_src = I2S_CLK_SRC_DEFAULT;
@@ -144,10 +202,12 @@ void I2SAudioMicrophone::start_() {
i2s_std_gpio_config_t pin_config = this->parent_->get_pin_config();
#if SOC_I2S_SUPPORTS_PDM_RX
if (this->pdm_) {
bits_per_sample = 16; // PDM mics are always 16 bits per sample with the IDF 5 driver
i2s_pdm_rx_clk_config_t clk_cfg = {
.sample_rate_hz = this->sample_rate_,
.clk_src = clk_src,
.mclk_multiple = I2S_MCLK_MULTIPLE_256,
.mclk_multiple = this->mclk_multiple_,
.dn_sample_mode = I2S_PDM_DSR_8S,
};
@@ -185,15 +245,10 @@ void I2SAudioMicrophone::start_() {
i2s_std_clk_config_t clk_cfg = {
.sample_rate_hz = this->sample_rate_,
.clk_src = clk_src,
.mclk_multiple = I2S_MCLK_MULTIPLE_256,
.mclk_multiple = this->mclk_multiple_,
};
i2s_data_bit_width_t data_bit_width;
if (this->slot_bit_width_ != I2S_SLOT_BIT_WIDTH_8BIT) {
data_bit_width = I2S_DATA_BIT_WIDTH_16BIT;
} else {
data_bit_width = I2S_DATA_BIT_WIDTH_8BIT;
}
i2s_std_slot_config_t std_slot_cfg = I2S_STD_PHILIPS_SLOT_DEFAULT_CONFIG(data_bit_width, this->slot_mode_);
i2s_std_slot_config_t std_slot_cfg =
I2S_STD_PHILIPS_SLOT_DEFAULT_CONFIG((i2s_data_bit_width_t) this->slot_bit_width_, this->slot_mode_);
std_slot_cfg.slot_bit_width = this->slot_bit_width_;
std_slot_cfg.slot_mask = this->std_slot_mask_;
@@ -210,7 +265,7 @@ void I2SAudioMicrophone::start_() {
if (err != ESP_OK) {
ESP_LOGW(TAG, "Error initializing I2S channel: %s", esp_err_to_name(err));
this->status_set_error();
return;
return false;
}
/* Before reading data, start the RX channel first */
@@ -218,26 +273,25 @@ void I2SAudioMicrophone::start_() {
if (err != ESP_OK) {
ESP_LOGW(TAG, "Error enabling I2S Microphone: %s", esp_err_to_name(err));
this->status_set_error();
return;
return false;
}
#endif
this->state_ = microphone::STATE_RUNNING;
this->high_freq_.start();
this->audio_stream_info_ = audio::AudioStreamInfo(bits_per_sample, channel_count, this->sample_rate_);
this->status_clear_error();
return true;
}
void I2SAudioMicrophone::stop() {
if (this->state_ == microphone::STATE_STOPPED || this->is_failed())
return;
if (this->state_ == microphone::STATE_STARTING) {
this->state_ = microphone::STATE_STOPPED;
return;
}
this->state_ = microphone::STATE_STOPPING;
xSemaphoreGive(this->active_listeners_semaphore_);
}
void I2SAudioMicrophone::stop_() {
void I2SAudioMicrophone::stop_driver_() {
esp_err_t err;
#ifdef USE_I2S_LEGACY
#if SOC_I2S_SUPPORTS_ADC
@@ -279,12 +333,52 @@ void I2SAudioMicrophone::stop_() {
}
#endif
this->parent_->unlock();
this->state_ = microphone::STATE_STOPPED;
this->high_freq_.stop();
this->status_clear_error();
}
size_t I2SAudioMicrophone::read(int16_t *buf, size_t len, TickType_t ticks_to_wait) {
void I2SAudioMicrophone::mic_task(void *params) {
I2SAudioMicrophone *this_microphone = (I2SAudioMicrophone *) params;
xEventGroupSetBits(this_microphone->event_group_, MicrophoneEventGroupBits::TASK_STARTING);
uint8_t start_counter = 0;
bool started = this_microphone->start_driver_();
while (!started && start_counter < 10) {
// Attempt to load the driver again in 100 ms. Doesn't slow down main loop since its in a task.
vTaskDelay(pdMS_TO_TICKS(100));
++start_counter;
started = this_microphone->start_driver_();
}
if (started) {
xEventGroupSetBits(this_microphone->event_group_, MicrophoneEventGroupBits::TASK_RUNNING);
const size_t bytes_to_read = this_microphone->audio_stream_info_.ms_to_bytes(READ_DURATION_MS);
std::vector<uint8_t> samples;
samples.reserve(bytes_to_read);
while (!(xEventGroupGetBits(this_microphone->event_group_) & COMMAND_STOP)) {
if (this_microphone->data_callbacks_.size() > 0) {
samples.resize(bytes_to_read);
size_t bytes_read = this_microphone->read_(samples.data(), bytes_to_read, 2 * pdMS_TO_TICKS(READ_DURATION_MS));
samples.resize(bytes_read);
this_microphone->data_callbacks_.call(samples);
} else {
delay(READ_DURATION_MS);
}
}
}
xEventGroupSetBits(this_microphone->event_group_, MicrophoneEventGroupBits::TASK_STOPPING);
this_microphone->stop_driver_();
xEventGroupSetBits(this_microphone->event_group_, MicrophoneEventGroupBits::TASK_STOPPED);
while (true) {
// Continuously delay until the loop method delete the task
delay(10);
}
}
size_t I2SAudioMicrophone::read_(uint8_t *buf, size_t len, TickType_t ticks_to_wait) {
size_t bytes_read = 0;
#ifdef USE_I2S_LEGACY
esp_err_t err = i2s_read(this->parent_->get_port(), buf, len, &bytes_read, ticks_to_wait);
@@ -303,38 +397,7 @@ size_t I2SAudioMicrophone::read(int16_t *buf, size_t len, TickType_t ticks_to_wa
return 0;
}
this->status_clear_warning();
// ESP-IDF I2S implementation right-extends 8-bit data to 16 bits,
// and 24-bit data to 32 bits.
#ifdef USE_I2S_LEGACY
switch (this->bits_per_sample_) {
case I2S_BITS_PER_SAMPLE_8BIT:
case I2S_BITS_PER_SAMPLE_16BIT:
return bytes_read;
case I2S_BITS_PER_SAMPLE_24BIT:
case I2S_BITS_PER_SAMPLE_32BIT: {
size_t samples_read = bytes_read / sizeof(int32_t);
for (size_t i = 0; i < samples_read; i++) {
int32_t temp = reinterpret_cast<int32_t *>(buf)[i] >> 14;
buf[i] = clamp<int16_t>(temp, INT16_MIN, INT16_MAX);
}
return samples_read * sizeof(int16_t);
}
default:
ESP_LOGE(TAG, "Unsupported bits per sample: %d", this->bits_per_sample_);
return 0;
}
#else
#ifndef USE_ESP32_VARIANT_ESP32
// For newer ESP32 variants 8 bit data needs to be extended to 16 bit.
if (this->slot_bit_width_ == I2S_SLOT_BIT_WIDTH_8BIT) {
size_t samples_read = bytes_read / sizeof(int8_t);
for (size_t i = samples_read - 1; i >= 0; i--) {
int16_t temp = static_cast<int16_t>(reinterpret_cast<int8_t *>(buf)[i]) << 8;
buf[i] = temp;
}
return samples_read * sizeof(int16_t);
}
#else
#if defined(USE_ESP32_VARIANT_ESP32) and not defined(USE_I2S_LEGACY)
// For ESP32 8/16 bit standard mono mode samples need to be switched.
if (this->slot_mode_ == I2S_SLOT_MODE_MONO && this->slot_bit_width_ <= 16 && !this->pdm_) {
size_t samples_read = bytes_read / sizeof(int16_t);
@@ -346,31 +409,62 @@ size_t I2SAudioMicrophone::read(int16_t *buf, size_t len, TickType_t ticks_to_wa
}
#endif
return bytes_read;
#endif
}
void I2SAudioMicrophone::read_() {
std::vector<int16_t> samples;
samples.resize(BUFFER_SIZE);
size_t bytes_read = this->read(samples.data(), BUFFER_SIZE * sizeof(int16_t), 0);
samples.resize(bytes_read / sizeof(int16_t));
this->data_callbacks_.call(samples);
}
void I2SAudioMicrophone::loop() {
uint32_t event_group_bits = xEventGroupGetBits(this->event_group_);
if (event_group_bits & MicrophoneEventGroupBits::TASK_STARTING) {
ESP_LOGD(TAG, "Task has started, attempting to setup I2S audio driver");
xEventGroupClearBits(this->event_group_, MicrophoneEventGroupBits::TASK_STARTING);
}
if (event_group_bits & MicrophoneEventGroupBits::TASK_RUNNING) {
ESP_LOGD(TAG, "Task is running and reading data");
xEventGroupClearBits(this->event_group_, MicrophoneEventGroupBits::TASK_RUNNING);
this->state_ = microphone::STATE_RUNNING;
}
if (event_group_bits & MicrophoneEventGroupBits::TASK_STOPPING) {
ESP_LOGD(TAG, "Task is stopping, attempting to unload the I2S audio driver");
xEventGroupClearBits(this->event_group_, MicrophoneEventGroupBits::TASK_STOPPING);
}
if ((event_group_bits & MicrophoneEventGroupBits::TASK_STOPPED)) {
ESP_LOGD(TAG, "Task is finished, freeing resources");
vTaskDelete(this->task_handle_);
this->task_handle_ = nullptr;
xEventGroupClearBits(this->event_group_, ALL_BITS);
this->state_ = microphone::STATE_STOPPED;
}
if ((uxSemaphoreGetCount(this->active_listeners_semaphore_) < MAX_LISTENERS) &&
(this->state_ == microphone::STATE_STOPPED)) {
this->state_ = microphone::STATE_STARTING;
}
if ((uxSemaphoreGetCount(this->active_listeners_semaphore_) == MAX_LISTENERS) &&
(this->state_ == microphone::STATE_RUNNING)) {
this->state_ = microphone::STATE_STOPPING;
}
switch (this->state_) {
case microphone::STATE_STOPPED:
break;
case microphone::STATE_STARTING:
this->start_();
break;
case microphone::STATE_RUNNING:
if (this->data_callbacks_.size() > 0) {
this->read_();
if ((this->task_handle_ == nullptr) && !this->status_has_error()) {
xTaskCreate(I2SAudioMicrophone::mic_task, "mic_task", TASK_STACK_SIZE, (void *) this, TASK_PRIORITY,
&this->task_handle_);
if (this->task_handle_ == nullptr) {
this->status_momentary_error("Task failed to start, attempting again in 1 second", 1000);
}
}
break;
case microphone::STATE_RUNNING:
break;
case microphone::STATE_STOPPING:
this->stop_();
xEventGroupSetBits(this->event_group_, MicrophoneEventGroupBits::COMMAND_STOP);
break;
case microphone::STATE_STOPPED:
break;
}
}

View File

@@ -7,6 +7,9 @@
#include "esphome/components/microphone/microphone.h"
#include "esphome/core/component.h"
#include <freertos/event_groups.h>
#include <freertos/semphr.h>
namespace esphome {
namespace i2s_audio {
@@ -25,9 +28,6 @@ class I2SAudioMicrophone : public I2SAudioIn, public microphone::Microphone, pub
void set_pdm(bool pdm) { this->pdm_ = pdm; }
size_t read(int16_t *buf, size_t len, TickType_t ticks_to_wait);
size_t read(int16_t *buf, size_t len) override { return this->read(buf, len, pdMS_TO_TICKS(100)); }
#ifdef USE_I2S_LEGACY
#if SOC_I2S_SUPPORTS_ADC
void set_adc_channel(adc1_channel_t channel) {
@@ -38,9 +38,17 @@ class I2SAudioMicrophone : public I2SAudioIn, public microphone::Microphone, pub
#endif
protected:
void start_();
void stop_();
void read_();
bool start_driver_();
void stop_driver_();
size_t read_(uint8_t *buf, size_t len, TickType_t ticks_to_wait);
static void mic_task(void *params);
SemaphoreHandle_t active_listeners_semaphore_{nullptr};
EventGroupHandle_t event_group_{nullptr};
TaskHandle_t task_handle_{nullptr};
#ifdef USE_I2S_LEGACY
int8_t din_pin_{I2S_PIN_NO_CHANGE};
@@ -53,8 +61,6 @@ class I2SAudioMicrophone : public I2SAudioIn, public microphone::Microphone, pub
i2s_chan_handle_t rx_handle_;
#endif
bool pdm_{false};
HighFrequencyLoopRequester high_freq_;
};
} // namespace i2s_audio

View File

@@ -27,6 +27,7 @@ from .. import (
i2s_audio_ns,
register_i2s_audio_component,
use_legacy,
validate_mclk_divisible_by_3,
)
AUTO_LOAD = ["audio"]
@@ -155,6 +156,7 @@ CONFIG_SCHEMA = cv.All(
_validate_esp32_variant,
_set_num_channels_from_config,
_set_stream_limits,
validate_mclk_divisible_by_3,
)

View File

@@ -545,7 +545,7 @@ esp_err_t I2SAudioSpeaker::start_i2s_driver_(audio::AudioStreamInfo &audio_strea
.use_apll = this->use_apll_,
.tx_desc_auto_clear = true,
.fixed_mclk = I2S_PIN_NO_CHANGE,
.mclk_multiple = I2S_MCLK_MULTIPLE_256,
.mclk_multiple = this->mclk_multiple_,
.bits_per_chan = this->bits_per_channel_,
#if SOC_I2S_SUPPORTS_TDM
.chan_mask = (i2s_channel_t) (I2S_TDM_ACTIVE_CH0 | I2S_TDM_ACTIVE_CH1),
@@ -614,7 +614,7 @@ esp_err_t I2SAudioSpeaker::start_i2s_driver_(audio::AudioStreamInfo &audio_strea
i2s_std_clk_config_t clk_cfg = {
.sample_rate_hz = audio_stream_info.get_sample_rate(),
.clk_src = clk_src,
.mclk_multiple = I2S_MCLK_MULTIPLE_256,
.mclk_multiple = this->mclk_multiple_,
};
i2s_slot_mode_t slot_mode = this->slot_mode_;

View File

@@ -388,7 +388,7 @@ static const uint8_t PROGMEM INITCMD_GC9D01N[] = {
0x8D, 1, 0xFF,
0x8E, 1, 0xFF,
0x8F, 1, 0xFF,
0X3A, 1, 0x05, // COLMOD: Pixel Format Set (3Ah) MCU interface, 16 bits / pixel
0x3A, 1, 0x05, // COLMOD: Pixel Format Set (3Ah) MCU interface, 16 bits / pixel
0xEC, 1, 0x01, // Inversion (ECh) DINV=1+2H1V column for Dual Gate (BFh=0)
// According to datasheet Inversion (ECh) value 0x01 isn't valid, but Lilygo uses it everywhere
0x74, 7, 0x02, 0x0E, 0x00, 0x00, 0x00, 0x00, 0x00,

View File

@@ -286,9 +286,18 @@ CONF_TRANSPARENCY = "transparency"
IMAGE_DOWNLOAD_TIMEOUT = 30 # seconds
SOURCE_LOCAL = "local"
SOURCE_MDI = "mdi"
SOURCE_WEB = "web"
SOURCE_MDI = "mdi"
SOURCE_MDIL = "mdil"
SOURCE_MEMORY = "memory"
MDI_SOURCES = {
SOURCE_MDI: "https://raw.githubusercontent.com/Templarian/MaterialDesign/master/svg/",
SOURCE_MDIL: "https://raw.githubusercontent.com/Pictogrammers/MaterialDesignLight/refs/heads/master/svg/",
SOURCE_MEMORY: "https://raw.githubusercontent.com/Pictogrammers/Memory/refs/heads/main/src/svg/",
}
Image_ = image_ns.class_("Image")
INSTANCE_TYPE = Image_
@@ -313,12 +322,12 @@ def download_file(url, path):
return str(path)
def download_mdi(value):
def download_gh_svg(value, source):
mdi_id = value[CONF_ICON] if isinstance(value, dict) else value
base_dir = external_files.compute_local_file_dir(DOMAIN) / "mdi"
base_dir = external_files.compute_local_file_dir(DOMAIN) / source
path = base_dir / f"{mdi_id}.svg"
url = f"https://raw.githubusercontent.com/Templarian/MaterialDesign/master/svg/{mdi_id}.svg"
url = MDI_SOURCES[source] + mdi_id + ".svg"
return download_file(url, path)
@@ -353,12 +362,12 @@ def validate_cairosvg_installed():
def validate_file_shorthand(value):
value = cv.string_strict(value)
if value.startswith("mdi:"):
match = re.search(r"mdi:([a-zA-Z0-9\-]+)", value)
parts = value.strip().split(":")
if len(parts) == 2 and parts[0] in MDI_SOURCES:
match = re.match(r"[a-zA-Z0-9\-]+", parts[1])
if match is None:
raise cv.Invalid("Could not parse mdi icon name.")
icon = match.group(1)
return download_mdi(icon)
raise cv.Invalid(f"Could not parse mdi icon name from '{value}'.")
return download_gh_svg(parts[1], parts[0])
if value.startswith("http://") or value.startswith("https://"):
return download_image(value)
@@ -374,12 +383,20 @@ LOCAL_SCHEMA = cv.All(
local_path,
)
MDI_SCHEMA = cv.All(
{
cv.Required(CONF_ICON): cv.string,
},
download_mdi,
)
def mdi_schema(source):
def validate_mdi(value):
return download_gh_svg(value, source)
return cv.All(
cv.Schema(
{
cv.Required(CONF_ICON): cv.string,
}
),
validate_mdi,
)
WEB_SCHEMA = cv.All(
{
@@ -388,12 +405,13 @@ WEB_SCHEMA = cv.All(
download_image,
)
TYPED_FILE_SCHEMA = cv.typed_schema(
{
SOURCE_LOCAL: LOCAL_SCHEMA,
SOURCE_MDI: MDI_SCHEMA,
SOURCE_WEB: WEB_SCHEMA,
},
}
| {source: mdi_schema(source) for source in MDI_SOURCES},
key=CONF_SOURCE,
)

View File

@@ -6,10 +6,27 @@ namespace esphome {
namespace image {
void Image::draw(int x, int y, display::Display *display, Color color_on, Color color_off) {
int img_x0 = 0;
int img_y0 = 0;
int w = width_;
int h = height_;
auto clipping = display->get_clipping();
if (clipping.is_set()) {
if (clipping.x > x)
img_x0 += clipping.x - x;
if (clipping.y > y)
img_y0 += clipping.y - y;
if (w > clipping.x2() - x)
w = clipping.x2() - x;
if (h > clipping.y2() - y)
h = clipping.y2() - y;
}
switch (type_) {
case IMAGE_TYPE_BINARY: {
for (int img_x = 0; img_x < width_; img_x++) {
for (int img_y = 0; img_y < height_; img_y++) {
for (int img_x = img_x0; img_x < w; img_x++) {
for (int img_y = img_y0; img_y < h; img_y++) {
if (this->get_binary_pixel_(img_x, img_y)) {
display->draw_pixel_at(x + img_x, y + img_y, color_on);
} else if (!this->transparency_) {
@@ -20,8 +37,8 @@ void Image::draw(int x, int y, display::Display *display, Color color_on, Color
break;
}
case IMAGE_TYPE_GRAYSCALE:
for (int img_x = 0; img_x < width_; img_x++) {
for (int img_y = 0; img_y < height_; img_y++) {
for (int img_x = img_x0; img_x < w; img_x++) {
for (int img_y = img_y0; img_y < h; img_y++) {
const uint32_t pos = (img_x + img_y * this->width_);
const uint8_t gray = progmem_read_byte(this->data_start_ + pos);
Color color = Color(gray, gray, gray, 0xFF);
@@ -47,8 +64,8 @@ void Image::draw(int x, int y, display::Display *display, Color color_on, Color
}
break;
case IMAGE_TYPE_RGB565:
for (int img_x = 0; img_x < width_; img_x++) {
for (int img_y = 0; img_y < height_; img_y++) {
for (int img_x = img_x0; img_x < w; img_x++) {
for (int img_y = img_y0; img_y < h; img_y++) {
auto color = this->get_rgb565_pixel_(img_x, img_y);
if (color.w >= 0x80) {
display->draw_pixel_at(x + img_x, y + img_y, color);
@@ -57,8 +74,8 @@ void Image::draw(int x, int y, display::Display *display, Color color_on, Color
}
break;
case IMAGE_TYPE_RGB:
for (int img_x = 0; img_x < width_; img_x++) {
for (int img_y = 0; img_y < height_; img_y++) {
for (int img_x = img_x0; img_x < w; img_x++) {
for (int img_y = img_y0; img_y < h; img_y++) {
auto color = this->get_rgb_pixel_(img_x, img_y);
if (color.w >= 0x80) {
display->draw_pixel_at(x + img_x, y + img_y, color);

View File

@@ -129,7 +129,7 @@ enum PeriodicDataStructure : uint8_t {
LIGHT_SENSOR = 37,
OUT_PIN_SENSOR = 38,
};
enum PeriodicDataValue : uint8_t { HEAD = 0XAA, END = 0x55, CHECK = 0x00 };
enum PeriodicDataValue : uint8_t { HEAD = 0xAA, END = 0x55, CHECK = 0x00 };
enum AckDataStructure : uint8_t { COMMAND = 6, COMMAND_STATUS = 7 };

View File

@@ -105,7 +105,7 @@ enum PeriodicDataStructure : uint8_t {
TARGET_RESOLUTION = 10,
};
enum PeriodicDataValue : uint8_t { HEAD = 0XAA, END = 0x55, CHECK = 0x00 };
enum PeriodicDataValue : uint8_t { HEAD = 0xAA, END = 0x55, CHECK = 0x00 };
enum AckDataStructure : uint8_t { COMMAND = 6, COMMAND_STATUS = 7 };

View File

@@ -18,13 +18,13 @@ from esphome.const import (
CONF_TRIGGER_ID,
CONF_TYPE,
)
from esphome.core import CORE, ID
from esphome.core import CORE, ID, Lambda
from esphome.cpp_generator import MockObj
from esphome.final_validate import full_config
from esphome.helpers import write_file_if_changed
from . import defines as df, helpers, lv_validation as lvalid
from .automation import disp_update, focused_widgets, update_to_code
from .automation import disp_update, focused_widgets, refreshed_widgets, update_to_code
from .defines import add_define
from .encoders import (
ENCODERS_CONFIG,
@@ -240,6 +240,13 @@ def final_validation(configs):
"A non adjustable arc may not be focused",
path,
)
for w in refreshed_widgets:
path = global_config.get_path_for_id(w)
widget_conf = global_config.get_config_for_path(path[:-1])
if not any(isinstance(v, Lambda) for v in widget_conf.values()):
raise cv.Invalid(
f"Widget '{w}' does not have any templated properties to refresh",
)
async def to_code(configs):

View File

@@ -35,7 +35,13 @@ from .lvcode import (
lv_obj,
lvgl_comp,
)
from .schemas import DISP_BG_SCHEMA, LIST_ACTION_SCHEMA, LVGL_SCHEMA, base_update_schema
from .schemas import (
ALL_STYLES,
DISP_BG_SCHEMA,
LIST_ACTION_SCHEMA,
LVGL_SCHEMA,
base_update_schema,
)
from .types import (
LV_STATE,
LvglAction,
@@ -57,6 +63,7 @@ from .widgets import (
# Record widgets that are used in a focused action here
focused_widgets = set()
refreshed_widgets = set()
async def action_to_code(
@@ -361,3 +368,45 @@ async def obj_update_to_code(config, action_id, template_arg, args):
return await action_to_code(
widgets, do_update, action_id, template_arg, args, config
)
def validate_refresh_config(config):
for w in config:
refreshed_widgets.add(w[CONF_ID])
return config
@automation.register_action(
"lvgl.widget.refresh",
ObjUpdateAction,
cv.All(
cv.ensure_list(
cv.maybe_simple_value(
{
cv.Required(CONF_ID): cv.use_id(lv_obj_t),
},
key=CONF_ID,
)
),
validate_refresh_config,
),
)
async def obj_refresh_to_code(config, action_id, template_arg, args):
widget = await get_widgets(config)
async def do_refresh(widget: Widget):
# only update style properties that might have changed, i.e. are templated
config = {k: v for k, v in widget.config.items() if isinstance(v, Lambda)}
await set_obj_properties(widget, config)
# must pass all widget-specific options here, even if not templated, but only do so if at least one is
# templated. First filter out common style properties.
config = {k: v for k, v in widget.config.items() if k not in ALL_STYLES}
if any(isinstance(v, Lambda) for v in config.values()):
await widget.type.to_code(widget, config)
if (
widget.type.w_type.value_property is not None
and widget.type.w_type.value_property in config
):
lv.event_send(widget.obj, UPDATE_EVENT, nullptr)
return await action_to_code(widget, do_refresh, action_id, template_arg, args)

View File

@@ -16,7 +16,7 @@ from esphome.const import (
)
from esphome.core import CORE, ID, Lambda
from esphome.cpp_generator import MockObj
from esphome.cpp_types import ESPTime, uint32
from esphome.cpp_types import ESPTime, int32, uint32
from esphome.helpers import cpp_string_escape
from esphome.schema_extractors import SCHEMA_EXTRACT, schema_extractor
@@ -263,6 +263,15 @@ def pixels_validator(value):
pixels = LValidator(pixels_validator, uint32, retmapper=literal)
def padding_validator(value):
if isinstance(value, str) and value.lower().endswith("px"):
value = value[:-2]
return cv.int_(value)
padding = LValidator(padding_validator, int32, retmapper=literal)
def zoom_validator(value):
value = cv.float_range(0.1, 10.0)(value)
return value

View File

@@ -156,13 +156,13 @@ STYLE_PROPS = {
"opa_layered": lvalid.opacity,
"outline_color": lvalid.lv_color,
"outline_opa": lvalid.opacity,
"outline_pad": lvalid.pixels,
"outline_pad": lvalid.padding,
"outline_width": lvalid.pixels,
"pad_all": lvalid.pixels,
"pad_bottom": lvalid.pixels,
"pad_left": lvalid.pixels,
"pad_right": lvalid.pixels,
"pad_top": lvalid.pixels,
"pad_all": lvalid.padding,
"pad_bottom": lvalid.padding,
"pad_left": lvalid.padding,
"pad_right": lvalid.padding,
"pad_top": lvalid.padding,
"shadow_color": lvalid.lv_color,
"shadow_ofs_x": lvalid.lv_int,
"shadow_ofs_y": lvalid.lv_int,
@@ -226,8 +226,8 @@ FULL_STYLE_SCHEMA = STYLE_SCHEMA.extend(
{
cv.Optional(df.CONF_GRID_CELL_X_ALIGN): grid_alignments,
cv.Optional(df.CONF_GRID_CELL_Y_ALIGN): grid_alignments,
cv.Optional(df.CONF_PAD_ROW): lvalid.pixels,
cv.Optional(df.CONF_PAD_COLUMN): lvalid.pixels,
cv.Optional(df.CONF_PAD_ROW): lvalid.padding,
cv.Optional(df.CONF_PAD_COLUMN): lvalid.padding,
}
)
@@ -370,8 +370,8 @@ LAYOUT_SCHEMA = {
cv.Required(df.CONF_GRID_COLUMNS): [grid_spec],
cv.Optional(df.CONF_GRID_COLUMN_ALIGN): grid_alignments,
cv.Optional(df.CONF_GRID_ROW_ALIGN): grid_alignments,
cv.Optional(df.CONF_PAD_ROW): lvalid.pixels,
cv.Optional(df.CONF_PAD_COLUMN): lvalid.pixels,
cv.Optional(df.CONF_PAD_ROW): lvalid.padding,
cv.Optional(df.CONF_PAD_COLUMN): lvalid.padding,
},
df.TYPE_FLEX: {
cv.Optional(
@@ -380,8 +380,8 @@ LAYOUT_SCHEMA = {
cv.Optional(df.CONF_FLEX_ALIGN_MAIN, default="start"): flex_alignments,
cv.Optional(df.CONF_FLEX_ALIGN_CROSS, default="start"): flex_alignments,
cv.Optional(df.CONF_FLEX_ALIGN_TRACK, default="start"): flex_alignments,
cv.Optional(df.CONF_PAD_ROW): lvalid.pixels,
cv.Optional(df.CONF_PAD_COLUMN): lvalid.pixels,
cv.Optional(df.CONF_PAD_ROW): lvalid.padding,
cv.Optional(df.CONF_PAD_COLUMN): lvalid.padding,
},
},
lower=True,
@@ -427,8 +427,8 @@ ALL_STYLES = {
**STYLE_PROPS,
**GRID_CELL_SCHEMA,
**FLEX_OBJ_SCHEMA,
cv.Optional(df.CONF_PAD_ROW): lvalid.pixels,
cv.Optional(df.CONF_PAD_COLUMN): lvalid.pixels,
cv.Optional(df.CONF_PAD_ROW): lvalid.padding,
cv.Optional(df.CONF_PAD_COLUMN): lvalid.padding,
}

View File

@@ -19,7 +19,7 @@ from ..defines import (
CONF_SELECTED,
)
from ..helpers import lvgl_components_required
from ..lv_validation import key_code, lv_bool, pixels
from ..lv_validation import key_code, lv_bool, padding
from ..lvcode import lv, lv_add, lv_expr
from ..schemas import automation_schema
from ..types import (
@@ -59,8 +59,8 @@ BUTTONMATRIX_BUTTON_SCHEMA = cv.Schema(
BUTTONMATRIX_SCHEMA = cv.Schema(
{
cv.Optional(CONF_ONE_CHECKED, default=False): lv_bool,
cv.Optional(CONF_PAD_ROW): pixels,
cv.Optional(CONF_PAD_COLUMN): pixels,
cv.Optional(CONF_PAD_ROW): padding,
cv.Optional(CONF_PAD_COLUMN): padding,
cv.GenerateID(CONF_BUTTON_TEXT_LIST_ID): cv.declare_id(char_ptr),
cv.Required(CONF_ROWS): cv.ensure_list(
cv.Schema(

View File

@@ -2,7 +2,7 @@ from esphome.config_validation import Optional
from esphome.const import CONF_TEXT
from ..defines import CONF_INDICATOR, CONF_MAIN, CONF_PAD_COLUMN
from ..lv_validation import lv_text, pixels
from ..lv_validation import lv_text, padding
from ..lvcode import lv
from ..schemas import TEXT_SCHEMA
from ..types import LvBoolean
@@ -19,7 +19,7 @@ class CheckboxType(WidgetType):
(CONF_MAIN, CONF_INDICATOR),
TEXT_SCHEMA.extend(
{
Optional(CONF_PAD_COLUMN): pixels,
Optional(CONF_PAD_COLUMN): padding,
}
),
)

View File

@@ -10,7 +10,7 @@ from ..defines import (
CONF_ZOOM,
LvConstant,
)
from ..lv_validation import angle, lv_bool, lv_image, size, zoom
from ..lv_validation import lv_angle, lv_bool, lv_image, size, zoom
from ..lvcode import lv
from ..types import lv_img_t
from . import Widget, WidgetType
@@ -22,7 +22,7 @@ BASE_IMG_SCHEMA = cv.Schema(
{
cv.Optional(CONF_PIVOT_X): size,
cv.Optional(CONF_PIVOT_Y): size,
cv.Optional(CONF_ANGLE): angle,
cv.Optional(CONF_ANGLE): lv_angle,
cv.Optional(CONF_ZOOM): zoom,
cv.Optional(CONF_OFFSET_X): size,
cv.Optional(CONF_OFFSET_Y): size,
@@ -66,17 +66,19 @@ class ImgType(WidgetType):
if (pivot_x := config.get(CONF_PIVOT_X)) and (
pivot_y := config.get(CONF_PIVOT_Y)
):
lv.img_set_pivot(w.obj, pivot_x, pivot_y)
lv.img_set_pivot(
w.obj, await size.process(pivot_x), await size.process(pivot_y)
)
if (cf_angle := config.get(CONF_ANGLE)) is not None:
lv.img_set_angle(w.obj, cf_angle)
lv.img_set_angle(w.obj, await lv_angle.process(cf_angle))
if (img_zoom := config.get(CONF_ZOOM)) is not None:
lv.img_set_zoom(w.obj, img_zoom)
lv.img_set_zoom(w.obj, await zoom.process(img_zoom))
if (offset := config.get(CONF_OFFSET_X)) is not None:
lv.img_set_offset_x(w.obj, offset)
lv.img_set_offset_x(w.obj, await size.process(offset))
if (offset := config.get(CONF_OFFSET_Y)) is not None:
lv.img_set_offset_y(w.obj, offset)
lv.img_set_offset_y(w.obj, await size.process(offset))
if CONF_ANTIALIAS in config:
lv.img_set_antialias(w.obj, config[CONF_ANTIALIAS])
lv.img_set_antialias(w.obj, await lv_bool.process(config[CONF_ANTIALIAS]))
if mode := config.get(CONF_MODE):
await w.set_property("size_mode", mode)

View File

@@ -4,6 +4,8 @@
#include "esphome/core/hal.h"
#include "max7219font.h"
#include <algorithm>
namespace esphome {
namespace max7219digit {
@@ -61,45 +63,42 @@ void MAX7219Component::dump_config() {
}
void MAX7219Component::loop() {
uint32_t now = millis();
const uint32_t now = millis();
const uint32_t millis_since_last_scroll = now - this->last_scroll_;
const size_t first_line_size = this->max_displaybuffer_[0].size();
// check if the buffer has shrunk past the current position since last update
if ((this->max_displaybuffer_[0].size() >= this->old_buffer_size_ + 3) ||
(this->max_displaybuffer_[0].size() <= this->old_buffer_size_ - 3)) {
if ((first_line_size >= this->old_buffer_size_ + 3) || (first_line_size <= this->old_buffer_size_ - 3)) {
ESP_LOGV(TAG, "Buffer size changed %d to %d", this->old_buffer_size_, first_line_size);
this->stepsleft_ = 0;
this->display();
this->old_buffer_size_ = this->max_displaybuffer_[0].size();
this->old_buffer_size_ = first_line_size;
}
// Reset the counter back to 0 when full string has been displayed.
if (this->stepsleft_ > this->max_displaybuffer_[0].size())
this->stepsleft_ = 0;
// Return if there is no need to scroll or scroll is off
if (!this->scroll_ || (this->max_displaybuffer_[0].size() <= (size_t) get_width_internal())) {
if (!this->scroll_ || (first_line_size <= (size_t) get_width_internal())) {
ESP_LOGVV(TAG, "Return if there is no need to scroll or scroll is off.");
this->display();
return;
}
if ((this->stepsleft_ == 0) && (now - this->last_scroll_ < this->scroll_delay_)) {
if ((this->stepsleft_ == 0) && (millis_since_last_scroll < this->scroll_delay_)) {
ESP_LOGVV(TAG, "At first step. Waiting for scroll delay");
this->display();
return;
}
// Dwell time at end of string in case of stop at end
if (this->scroll_mode_ == ScrollMode::STOP) {
if (this->stepsleft_ >= this->max_displaybuffer_[0].size() - (size_t) get_width_internal() + 1) {
if (now - this->last_scroll_ >= this->scroll_dwell_) {
this->stepsleft_ = 0;
this->last_scroll_ = now;
this->display();
if (this->stepsleft_ + get_width_internal() == first_line_size + 1) {
if (millis_since_last_scroll < this->scroll_dwell_) {
ESP_LOGVV(TAG, "Dwell time at end of string in case of stop at end. Step %d, since last scroll %d, dwell %d.",
this->stepsleft_, millis_since_last_scroll, this->scroll_dwell_);
return;
}
return;
ESP_LOGV(TAG, "Dwell time passed. Continue scrolling.");
}
}
// Actual call to scroll left action
if (now - this->last_scroll_ >= this->scroll_speed_) {
if (millis_since_last_scroll >= this->scroll_speed_) {
ESP_LOGVV(TAG, "Call to scroll left action");
this->last_scroll_ = now;
this->scroll_left();
this->display();
@@ -227,19 +226,20 @@ void MAX7219Component::scroll(bool on_off) { this->set_scroll(on_off); }
void MAX7219Component::scroll_left() {
for (int chip_line = 0; chip_line < this->num_chip_lines_; chip_line++) {
auto scroll = [&](std::vector<uint8_t> &line, uint16_t steps) {
std::rotate(line.begin(), std::next(line.begin(), steps), line.end());
};
if (this->update_) {
this->max_displaybuffer_[chip_line].push_back(this->bckgrnd_);
for (uint16_t i = 0; i < this->stepsleft_; i++) {
this->max_displaybuffer_[chip_line].push_back(this->max_displaybuffer_[chip_line].front());
this->max_displaybuffer_[chip_line].erase(this->max_displaybuffer_[chip_line].begin());
}
scroll(this->max_displaybuffer_[chip_line],
(this->stepsleft_ + 1) % (this->max_displaybuffer_[chip_line].size()));
} else {
this->max_displaybuffer_[chip_line].push_back(this->max_displaybuffer_[chip_line].front());
this->max_displaybuffer_[chip_line].erase(this->max_displaybuffer_[chip_line].begin());
scroll(this->max_displaybuffer_[chip_line], 1);
}
}
this->update_ = false;
this->stepsleft_++;
this->stepsleft_ %= this->max_displaybuffer_[0].size();
}
void MAX7219Component::send_char(uint8_t chip, uint8_t data) {

View File

@@ -35,8 +35,8 @@ SERVICE_SCHEMA = cv.Schema(
{
cv.Required(CONF_SERVICE): cv.string,
cv.Required(CONF_PROTOCOL): cv.string,
cv.Optional(CONF_PORT, default=0): cv.Any(0, cv.port),
cv.Optional(CONF_TXT, default={}): {cv.string: cv.string},
cv.Optional(CONF_PORT, default=0): cv.templatable(cv.Any(0, cv.port)),
cv.Optional(CONF_TXT, default={}): {cv.string: cv.templatable(cv.string)},
}
)
@@ -102,12 +102,18 @@ async def to_code(config):
for service in config[CONF_SERVICES]:
txt = [
mdns_txt_record(txt_key, txt_value)
cg.StructInitializer(
MDNSTXTRecord,
("key", txt_key),
("value", await cg.templatable(txt_value, [], cg.std_string)),
)
for txt_key, txt_value in service[CONF_TXT].items()
]
exp = mdns_service(
service[CONF_SERVICE], service[CONF_PROTOCOL], service[CONF_PORT], txt
service[CONF_SERVICE],
service[CONF_PROTOCOL],
await cg.templatable(service[CONF_PORT], [], cg.uint16),
txt,
)
cg.add(var.add_extra_service(exp))

View File

@@ -121,9 +121,11 @@ void MDNSComponent::dump_config() {
ESP_LOGCONFIG(TAG, " Hostname: %s", this->hostname_.c_str());
ESP_LOGV(TAG, " Services:");
for (const auto &service : this->services_) {
ESP_LOGV(TAG, " - %s, %s, %d", service.service_type.c_str(), service.proto.c_str(), service.port);
ESP_LOGV(TAG, " - %s, %s, %d", service.service_type.c_str(), service.proto.c_str(),
const_cast<TemplatableValue<uint16_t> &>(service.port).value());
for (const auto &record : service.txt_records) {
ESP_LOGV(TAG, " TXT: %s = %s", record.key.c_str(), record.value.c_str());
ESP_LOGV(TAG, " TXT: %s = %s", record.key.c_str(),
const_cast<TemplatableValue<std::string> &>(record.value).value().c_str());
}
}
}

View File

@@ -3,6 +3,7 @@
#ifdef USE_MDNS
#include <string>
#include <vector>
#include "esphome/core/automation.h"
#include "esphome/core/component.h"
namespace esphome {
@@ -10,7 +11,7 @@ namespace mdns {
struct MDNSTXTRecord {
std::string key;
std::string value;
TemplatableValue<std::string> value;
};
struct MDNSService {
@@ -20,7 +21,7 @@ struct MDNSService {
// second label indicating protocol _including_ underscore character prefix
// as defined in RFC6763 Section 7, like "_tcp" or "_udp"
std::string proto;
uint16_t port;
TemplatableValue<uint16_t> port;
std::vector<MDNSTXTRecord> txt_records;
};

View File

@@ -31,11 +31,12 @@ void MDNSComponent::setup() {
mdns_txt_item_t it{};
// dup strings to ensure the pointer is valid even after the record loop
it.key = strdup(record.key.c_str());
it.value = strdup(record.value.c_str());
it.value = strdup(const_cast<TemplatableValue<std::string> &>(record.value).value().c_str());
txt_records.push_back(it);
}
err = mdns_service_add(nullptr, service.service_type.c_str(), service.proto.c_str(), service.port,
txt_records.data(), txt_records.size());
uint16_t port = const_cast<TemplatableValue<uint16_t> &>(service.port).value();
err = mdns_service_add(nullptr, service.service_type.c_str(), service.proto.c_str(), port, txt_records.data(),
txt_records.size());
// free records
for (const auto &it : txt_records) {

View File

@@ -29,9 +29,11 @@ void MDNSComponent::setup() {
while (*service_type == '_') {
service_type++;
}
MDNS.addService(service_type, proto, service.port);
uint16_t port = const_cast<TemplatableValue<uint16_t> &>(service.port).value();
MDNS.addService(service_type, proto, port);
for (const auto &record : service.txt_records) {
MDNS.addServiceTxt(service_type, proto, record.key.c_str(), record.value.c_str());
MDNS.addServiceTxt(service_type, proto, record.key.c_str(),
const_cast<TemplatableValue<std::string> &>(record.value).value().c_str());
}
}
}

View File

@@ -29,9 +29,11 @@ void MDNSComponent::setup() {
while (*service_type == '_') {
service_type++;
}
MDNS.addService(service_type, proto, service.port);
uint16_t port_ = const_cast<TemplatableValue<uint16_t> &>(service.port).value();
MDNS.addService(service_type, proto, port_);
for (const auto &record : service.txt_records) {
MDNS.addServiceTxt(service_type, proto, record.key.c_str(), record.value.c_str());
MDNS.addServiceTxt(service_type, proto, record.key.c_str(),
const_cast<TemplatableValue<std::string> &>(record.value).value().c_str());
}
}
}

View File

@@ -29,9 +29,11 @@ void MDNSComponent::setup() {
while (*service_type == '_') {
service_type++;
}
MDNS.addService(service_type, proto, service.port);
uint16_t port = const_cast<TemplatableValue<uint16_t> &>(service.port).value();
MDNS.addService(service_type, proto, port);
for (const auto &record : service.txt_records) {
MDNS.addServiceTxt(service_type, proto, record.key.c_str(), record.value.c_str());
MDNS.addServiceTxt(service_type, proto, record.key.c_str(),
const_cast<TemplatableValue<std::string> &>(record.value).value().c_str());
}
}
}

View File

@@ -134,11 +134,13 @@ MEDIA_PLAYER_SCHEMA = cv.ENTITY_BASE_SCHEMA.extend(
)
MEDIA_PLAYER_ACTION_SCHEMA = cv.Schema(
{
cv.GenerateID(): cv.use_id(MediaPlayer),
cv.Optional(CONF_ANNOUNCEMENT, default=False): cv.templatable(cv.boolean),
}
MEDIA_PLAYER_ACTION_SCHEMA = automation.maybe_simple_id(
cv.Schema(
{
cv.GenerateID(): cv.use_id(MediaPlayer),
cv.Optional(CONF_ANNOUNCEMENT, default=False): cv.templatable(cv.boolean),
}
)
)
MEDIA_PLAYER_CONDITION_SCHEMA = automation.maybe_simple_id(

View File

@@ -12,6 +12,7 @@ import esphome.config_validation as cv
from esphome.const import (
CONF_FILE,
CONF_ID,
CONF_INTERNAL,
CONF_MICROPHONE,
CONF_MODEL,
CONF_PASSWORD,
@@ -40,6 +41,7 @@ CONF_ON_WAKE_WORD_DETECTED = "on_wake_word_detected"
CONF_PROBABILITY_CUTOFF = "probability_cutoff"
CONF_SLIDING_WINDOW_AVERAGE_SIZE = "sliding_window_average_size"
CONF_SLIDING_WINDOW_SIZE = "sliding_window_size"
CONF_STOP_AFTER_DETECTION = "stop_after_detection"
CONF_TENSOR_ARENA_SIZE = "tensor_arena_size"
CONF_VAD = "vad"
@@ -49,13 +51,20 @@ micro_wake_word_ns = cg.esphome_ns.namespace("micro_wake_word")
MicroWakeWord = micro_wake_word_ns.class_("MicroWakeWord", cg.Component)
DisableModelAction = micro_wake_word_ns.class_("DisableModelAction", automation.Action)
EnableModelAction = micro_wake_word_ns.class_("EnableModelAction", automation.Action)
StartAction = micro_wake_word_ns.class_("StartAction", automation.Action)
StopAction = micro_wake_word_ns.class_("StopAction", automation.Action)
ModelIsEnabledCondition = micro_wake_word_ns.class_(
"ModelIsEnabledCondition", automation.Condition
)
IsRunningCondition = micro_wake_word_ns.class_(
"IsRunningCondition", automation.Condition
)
WakeWordModel = micro_wake_word_ns.class_("WakeWordModel")
def _validate_json_filename(value):
value = cv.string(value)
@@ -169,9 +178,10 @@ def _convert_manifest_v1_to_v2(v1_manifest):
# Original Inception-based V1 manifest models require a minimum of 45672 bytes
v2_manifest[KEY_MICRO][CONF_TENSOR_ARENA_SIZE] = 45672
# Original Inception-based V1 manifest models use a 20 ms feature step size
v2_manifest[KEY_MICRO][CONF_FEATURE_STEP_SIZE] = 20
# Original Inception-based V1 manifest models were trained only on TTS English samples
v2_manifest[KEY_TRAINED_LANGUAGES] = ["en"]
return v2_manifest
@@ -296,14 +306,16 @@ MODEL_SOURCE_SCHEMA = cv.Any(
MODEL_SCHEMA = cv.Schema(
{
cv.GenerateID(CONF_ID): cv.declare_id(WakeWordModel),
cv.Optional(CONF_MODEL): MODEL_SOURCE_SCHEMA,
cv.Optional(CONF_PROBABILITY_CUTOFF): cv.percentage,
cv.Optional(CONF_SLIDING_WINDOW_SIZE): cv.positive_int,
cv.Optional(CONF_INTERNAL, default=False): cv.boolean,
cv.GenerateID(CONF_RAW_DATA_ID): cv.declare_id(cg.uint8),
}
)
# Provide a default VAD model that could be overridden
# Provides a default VAD model that could be overridden
VAD_MODEL_SCHEMA = MODEL_SCHEMA.extend(
cv.Schema(
{
@@ -328,7 +340,14 @@ CONFIG_SCHEMA = cv.All(
cv.Schema(
{
cv.GenerateID(): cv.declare_id(MicroWakeWord),
cv.GenerateID(CONF_MICROPHONE): cv.use_id(microphone.Microphone),
cv.Optional(
CONF_MICROPHONE, default={}
): microphone.microphone_source_schema(
min_bits_per_sample=16,
max_bits_per_sample=16,
min_channels=1,
max_channels=1,
),
cv.Required(CONF_MODELS): cv.ensure_list(
cv.maybe_simple_value(MODEL_SCHEMA, key=CONF_MODEL)
),
@@ -336,6 +355,7 @@ CONFIG_SCHEMA = cv.All(
single=True
),
cv.Optional(CONF_VAD): _maybe_empty_vad_schema,
cv.Optional(CONF_STOP_AFTER_DETECTION, default=True): cv.boolean,
cv.Optional(CONF_MODEL): cv.invalid(
f"The {CONF_MODEL} parameter has moved to be a list element under the {CONF_MODELS} parameter."
),
@@ -404,39 +424,42 @@ def _feature_step_size_validate(config):
raise cv.Invalid("Cannot load models with different features step sizes.")
FINAL_VALIDATE_SCHEMA = _feature_step_size_validate
FINAL_VALIDATE_SCHEMA = cv.All(
cv.Schema(
{
cv.Required(
CONF_MICROPHONE
): microphone.final_validate_microphone_source_schema(
"micro_wake_word", sample_rate=16000
),
},
extra=cv.ALLOW_EXTRA,
),
_feature_step_size_validate,
)
async def to_code(config):
var = cg.new_Pvariable(config[CONF_ID])
await cg.register_component(var, config)
mic = await cg.get_variable(config[CONF_MICROPHONE])
cg.add(var.set_microphone(mic))
mic_source = await microphone.microphone_source_to_code(config[CONF_MICROPHONE])
cg.add(var.set_microphone_source(mic_source))
cg.add_define("USE_MICRO_WAKE_WORD")
cg.add_define("USE_OTA_STATE_CALLBACK")
esp32.add_idf_component(
name="esp-tflite-micro",
repo="https://github.com/espressif/esp-tflite-micro",
ref="v1.3.1",
)
# add esp-nn dependency for tflite-micro to work around https://github.com/espressif/esp-nn/issues/17
# ...remove after switching to IDF 5.1.4+
esp32.add_idf_component(
name="esp-nn",
repo="https://github.com/espressif/esp-nn",
ref="v1.1.0",
ref="v1.3.3.1",
)
cg.add_build_flag("-DTF_LITE_STATIC_MEMORY")
cg.add_build_flag("-DTF_LITE_DISABLE_X86_NEON")
cg.add_build_flag("-DESP_NN")
if on_wake_word_detection_config := config.get(CONF_ON_WAKE_WORD_DETECTED):
await automation.build_automation(
var.get_wake_word_detected_trigger(),
[(cg.std_string, "wake_word")],
on_wake_word_detection_config,
)
cg.add_library("kahrendt/ESPMicroSpeechFeatures", "1.1.0")
if vad_model := config.get(CONF_VAD):
cg.add_define("USE_MICRO_WAKE_WORD_VAD")
@@ -444,7 +467,7 @@ async def to_code(config):
# Use the general model loading code for the VAD codegen
config[CONF_MODELS].append(vad_model)
for model_parameters in config[CONF_MODELS]:
for i, model_parameters in enumerate(config[CONF_MODELS]):
model_config = model_parameters.get(CONF_MODEL)
data = []
manifest, data = _model_config_to_manifest_data(model_config)
@@ -455,6 +478,8 @@ async def to_code(config):
probability_cutoff = model_parameters.get(
CONF_PROBABILITY_CUTOFF, manifest[KEY_MICRO][CONF_PROBABILITY_CUTOFF]
)
quantized_probability_cutoff = int(probability_cutoff * 255)
sliding_window_size = model_parameters.get(
CONF_SLIDING_WINDOW_SIZE,
manifest[KEY_MICRO][CONF_SLIDING_WINDOW_SIZE],
@@ -464,24 +489,40 @@ async def to_code(config):
cg.add(
var.add_vad_model(
prog_arr,
probability_cutoff,
quantized_probability_cutoff,
sliding_window_size,
manifest[KEY_MICRO][CONF_TENSOR_ARENA_SIZE],
)
)
else:
cg.add(
var.add_wake_word_model(
prog_arr,
probability_cutoff,
sliding_window_size,
manifest[KEY_WAKE_WORD],
manifest[KEY_MICRO][CONF_TENSOR_ARENA_SIZE],
)
# Only enable the first wake word by default. After first boot, the enable state is saved/loaded to the flash
default_enabled = i == 0
wake_word_model = cg.new_Pvariable(
model_parameters[CONF_ID],
str(model_parameters[CONF_ID]),
prog_arr,
quantized_probability_cutoff,
sliding_window_size,
manifest[KEY_WAKE_WORD],
manifest[KEY_MICRO][CONF_TENSOR_ARENA_SIZE],
default_enabled,
model_parameters[CONF_INTERNAL],
)
for lang in manifest[KEY_TRAINED_LANGUAGES]:
cg.add(wake_word_model.add_trained_language(lang))
cg.add(var.add_wake_word_model(wake_word_model))
cg.add(var.set_features_step_size(manifest[KEY_MICRO][CONF_FEATURE_STEP_SIZE]))
cg.add_library("kahrendt/ESPMicroSpeechFeatures", "1.1.0")
cg.add(var.set_stop_after_detection(config[CONF_STOP_AFTER_DETECTION]))
if on_wake_word_detection_config := config.get(CONF_ON_WAKE_WORD_DETECTED):
await automation.build_automation(
var.get_wake_word_detected_trigger(),
[(cg.std_string, "wake_word")],
on_wake_word_detection_config,
)
MICRO_WAKE_WORD_ACTION_SCHEMA = cv.Schema({cv.GenerateID(): cv.use_id(MicroWakeWord)})
@@ -496,3 +537,30 @@ async def micro_wake_word_action_to_code(config, action_id, template_arg, args):
var = cg.new_Pvariable(action_id, template_arg)
await cg.register_parented(var, config[CONF_ID])
return var
MICRO_WAKE_WORLD_MODEL_ACTION_SCHEMA = automation.maybe_simple_id(
{
cv.Required(CONF_ID): cv.use_id(WakeWordModel),
}
)
@register_action(
"micro_wake_word.enable_model",
EnableModelAction,
MICRO_WAKE_WORLD_MODEL_ACTION_SCHEMA,
)
@register_action(
"micro_wake_word.disable_model",
DisableModelAction,
MICRO_WAKE_WORLD_MODEL_ACTION_SCHEMA,
)
@register_condition(
"micro_wake_word.model_is_enabled",
ModelIsEnabledCondition,
MICRO_WAKE_WORLD_MODEL_ACTION_SCHEMA,
)
async def model_action(config, action_id, template_arg, args):
parent = await cg.get_variable(config[CONF_ID])
return cg.new_Pvariable(action_id, template_arg, parent)

View File

@@ -0,0 +1,54 @@
#pragma once
#include "micro_wake_word.h"
#include "streaming_model.h"
#ifdef USE_ESP_IDF
namespace esphome {
namespace micro_wake_word {
template<typename... Ts> class StartAction : public Action<Ts...>, public Parented<MicroWakeWord> {
public:
void play(Ts... x) override { this->parent_->start(); }
};
template<typename... Ts> class StopAction : public Action<Ts...>, public Parented<MicroWakeWord> {
public:
void play(Ts... x) override { this->parent_->stop(); }
};
template<typename... Ts> class IsRunningCondition : public Condition<Ts...>, public Parented<MicroWakeWord> {
public:
bool check(Ts... x) override { return this->parent_->is_running(); }
};
template<typename... Ts> class EnableModelAction : public Action<Ts...> {
public:
explicit EnableModelAction(WakeWordModel *wake_word_model) : wake_word_model_(wake_word_model) {}
void play(Ts... x) override { this->wake_word_model_->enable(); }
protected:
WakeWordModel *wake_word_model_;
};
template<typename... Ts> class DisableModelAction : public Action<Ts...> {
public:
explicit DisableModelAction(WakeWordModel *wake_word_model) : wake_word_model_(wake_word_model) {}
void play(Ts... x) override { this->wake_word_model_->disable(); }
protected:
WakeWordModel *wake_word_model_;
};
template<typename... Ts> class ModelIsEnabledCondition : public Condition<Ts...> {
public:
explicit ModelIsEnabledCondition(WakeWordModel *wake_word_model) : wake_word_model_(wake_word_model) {}
bool check(Ts... x) override { return this->wake_word_model_->is_enabled(); }
protected:
WakeWordModel *wake_word_model_;
};
} // namespace micro_wake_word
} // namespace esphome
#endif

View File

@@ -1,5 +1,4 @@
#include "micro_wake_word.h"
#include "streaming_model.h"
#ifdef USE_ESP_IDF
@@ -7,41 +6,57 @@
#include "esphome/core/helpers.h"
#include "esphome/core/log.h"
#include <frontend.h>
#include <frontend_util.h>
#include "esphome/components/audio/audio_transfer_buffer.h"
#include <tensorflow/lite/core/c/common.h>
#include <tensorflow/lite/micro/micro_interpreter.h>
#include <tensorflow/lite/micro/micro_mutable_op_resolver.h>
#include <cmath>
#ifdef USE_OTA
#include "esphome/components/ota/ota_backend.h"
#endif
namespace esphome {
namespace micro_wake_word {
static const char *const TAG = "micro_wake_word";
static const size_t SAMPLE_RATE_HZ = 16000; // 16 kHz
static const size_t BUFFER_LENGTH = 64; // 0.064 seconds
static const size_t BUFFER_SIZE = SAMPLE_RATE_HZ / 1000 * BUFFER_LENGTH;
static const size_t INPUT_BUFFER_SIZE = 16 * SAMPLE_RATE_HZ / 1000; // 16ms * 16kHz / 1000ms
static const ssize_t DETECTION_QUEUE_LENGTH = 5;
static const size_t DATA_TIMEOUT_MS = 50;
static const uint32_t RING_BUFFER_DURATION_MS = 120;
static const uint32_t RING_BUFFER_SAMPLES = RING_BUFFER_DURATION_MS * (AUDIO_SAMPLE_FREQUENCY / 1000);
static const size_t RING_BUFFER_SIZE = RING_BUFFER_SAMPLES * sizeof(int16_t);
static const uint32_t INFERENCE_TASK_STACK_SIZE = 3072;
static const UBaseType_t INFERENCE_TASK_PRIORITY = 3;
enum EventGroupBits : uint32_t {
COMMAND_STOP = (1 << 0), // Signals the inference task should stop
TASK_STARTING = (1 << 3),
TASK_RUNNING = (1 << 4),
TASK_STOPPING = (1 << 5),
TASK_STOPPED = (1 << 6),
ERROR_MEMORY = (1 << 9),
ERROR_INFERENCE = (1 << 10),
WARNING_FULL_RING_BUFFER = (1 << 13),
ERROR_BITS = ERROR_MEMORY | ERROR_INFERENCE,
ALL_BITS = 0xfffff, // 24 total bits available in an event group
};
float MicroWakeWord::get_setup_priority() const { return setup_priority::AFTER_CONNECTION; }
static const LogString *micro_wake_word_state_to_string(State state) {
switch (state) {
case State::IDLE:
return LOG_STR("IDLE");
case State::START_MICROPHONE:
return LOG_STR("START_MICROPHONE");
case State::STARTING_MICROPHONE:
return LOG_STR("STARTING_MICROPHONE");
case State::STARTING:
return LOG_STR("STARTING");
case State::DETECTING_WAKE_WORD:
return LOG_STR("DETECTING_WAKE_WORD");
case State::STOP_MICROPHONE:
return LOG_STR("STOP_MICROPHONE");
case State::STOPPING_MICROPHONE:
return LOG_STR("STOPPING_MICROPHONE");
case State::STOPPING:
return LOG_STR("STOPPING");
case State::STOPPED:
return LOG_STR("STOPPED");
default:
return LOG_STR("UNKNOWN");
}
@@ -51,7 +66,7 @@ void MicroWakeWord::dump_config() {
ESP_LOGCONFIG(TAG, "microWakeWord:");
ESP_LOGCONFIG(TAG, " models:");
for (auto &model : this->wake_word_models_) {
model.log_model_config();
model->log_model_config();
}
#ifdef USE_MICRO_WAKE_WORD_VAD
this->vad_model_->log_model_config();
@@ -61,108 +76,266 @@ void MicroWakeWord::dump_config() {
void MicroWakeWord::setup() {
ESP_LOGCONFIG(TAG, "Setting up microWakeWord...");
this->microphone_->add_data_callback([this](const std::vector<int16_t> &data) {
if (this->state_ != State::DETECTING_WAKE_WORD) {
return;
}
std::shared_ptr<RingBuffer> temp_ring_buffer = this->ring_buffer_;
if (this->ring_buffer_.use_count() == 2) {
// mWW still owns the ring buffer and temp_ring_buffer does as well, proceed to copy audio into ring buffer
this->frontend_config_.window.size_ms = FEATURE_DURATION_MS;
this->frontend_config_.window.step_size_ms = this->features_step_size_;
this->frontend_config_.filterbank.num_channels = PREPROCESSOR_FEATURE_SIZE;
this->frontend_config_.filterbank.lower_band_limit = FILTERBANK_LOWER_BAND_LIMIT;
this->frontend_config_.filterbank.upper_band_limit = FILTERBANK_UPPER_BAND_LIMIT;
this->frontend_config_.noise_reduction.smoothing_bits = NOISE_REDUCTION_SMOOTHING_BITS;
this->frontend_config_.noise_reduction.even_smoothing = NOISE_REDUCTION_EVEN_SMOOTHING;
this->frontend_config_.noise_reduction.odd_smoothing = NOISE_REDUCTION_ODD_SMOOTHING;
this->frontend_config_.noise_reduction.min_signal_remaining = NOISE_REDUCTION_MIN_SIGNAL_REMAINING;
this->frontend_config_.pcan_gain_control.enable_pcan = PCAN_GAIN_CONTROL_ENABLE_PCAN;
this->frontend_config_.pcan_gain_control.strength = PCAN_GAIN_CONTROL_STRENGTH;
this->frontend_config_.pcan_gain_control.offset = PCAN_GAIN_CONTROL_OFFSET;
this->frontend_config_.pcan_gain_control.gain_bits = PCAN_GAIN_CONTROL_GAIN_BITS;
this->frontend_config_.log_scale.enable_log = LOG_SCALE_ENABLE_LOG;
this->frontend_config_.log_scale.scale_shift = LOG_SCALE_SCALE_SHIFT;
size_t bytes_free = temp_ring_buffer->free();
if (bytes_free < data.size() * sizeof(int16_t)) {
ESP_LOGW(
TAG,
"Not enough free bytes in ring buffer to store incoming audio data (free bytes=%d, incoming bytes=%d). "
"Resetting the ring buffer. Wake word detection accuracy will be reduced.",
bytes_free, data.size());
temp_ring_buffer->reset();
}
temp_ring_buffer->write((void *) data.data(), data.size() * sizeof(int16_t));
}
});
if (!this->register_streaming_ops_(this->streaming_op_resolver_)) {
this->event_group_ = xEventGroupCreate();
if (this->event_group_ == nullptr) {
ESP_LOGE(TAG, "Failed to create event group");
this->mark_failed();
return;
}
this->detection_queue_ = xQueueCreate(DETECTION_QUEUE_LENGTH, sizeof(DetectionEvent));
if (this->detection_queue_ == nullptr) {
ESP_LOGE(TAG, "Failed to create detection event queue");
this->mark_failed();
return;
}
this->microphone_source_->add_data_callback([this](const std::vector<uint8_t> &data) {
if (this->state_ == State::STOPPED) {
return;
}
std::shared_ptr<RingBuffer> temp_ring_buffer = this->ring_buffer_.lock();
if (this->ring_buffer_.use_count() > 1) {
size_t bytes_free = temp_ring_buffer->free();
if (bytes_free < data.size()) {
xEventGroupSetBits(this->event_group_, EventGroupBits::WARNING_FULL_RING_BUFFER);
temp_ring_buffer->reset();
}
temp_ring_buffer->write((void *) data.data(), data.size());
}
});
#ifdef USE_OTA
ota::get_global_ota_callback()->add_on_state_callback(
[this](ota::OTAState state, float progress, uint8_t error, ota::OTAComponent *comp) {
if (state == ota::OTA_STARTED) {
this->suspend_task_();
} else if (state == ota::OTA_ERROR) {
this->resume_task_();
}
});
#endif
ESP_LOGCONFIG(TAG, "Micro Wake Word initialized");
this->frontend_config_.window.size_ms = FEATURE_DURATION_MS;
this->frontend_config_.window.step_size_ms = this->features_step_size_;
this->frontend_config_.filterbank.num_channels = PREPROCESSOR_FEATURE_SIZE;
this->frontend_config_.filterbank.lower_band_limit = 125.0;
this->frontend_config_.filterbank.upper_band_limit = 7500.0;
this->frontend_config_.noise_reduction.smoothing_bits = 10;
this->frontend_config_.noise_reduction.even_smoothing = 0.025;
this->frontend_config_.noise_reduction.odd_smoothing = 0.06;
this->frontend_config_.noise_reduction.min_signal_remaining = 0.05;
this->frontend_config_.pcan_gain_control.enable_pcan = 1;
this->frontend_config_.pcan_gain_control.strength = 0.95;
this->frontend_config_.pcan_gain_control.offset = 80.0;
this->frontend_config_.pcan_gain_control.gain_bits = 21;
this->frontend_config_.log_scale.enable_log = 1;
this->frontend_config_.log_scale.scale_shift = 6;
}
void MicroWakeWord::add_wake_word_model(const uint8_t *model_start, float probability_cutoff,
size_t sliding_window_average_size, const std::string &wake_word,
size_t tensor_arena_size) {
this->wake_word_models_.emplace_back(model_start, probability_cutoff, sliding_window_average_size, wake_word,
tensor_arena_size);
void MicroWakeWord::inference_task(void *params) {
MicroWakeWord *this_mww = (MicroWakeWord *) params;
xEventGroupSetBits(this_mww->event_group_, EventGroupBits::TASK_STARTING);
{ // Ensures any C++ objects fall out of scope to deallocate before deleting the task
const size_t new_samples_to_read = this_mww->features_step_size_ * (AUDIO_SAMPLE_FREQUENCY / 1000);
std::unique_ptr<audio::AudioSourceTransferBuffer> audio_buffer;
int8_t features_buffer[PREPROCESSOR_FEATURE_SIZE];
if (!(xEventGroupGetBits(this_mww->event_group_) & ERROR_BITS)) {
// Allocate audio transfer buffer
audio_buffer = audio::AudioSourceTransferBuffer::create(new_samples_to_read * sizeof(int16_t));
if (audio_buffer == nullptr) {
xEventGroupSetBits(this_mww->event_group_, EventGroupBits::ERROR_MEMORY);
}
}
if (!(xEventGroupGetBits(this_mww->event_group_) & ERROR_BITS)) {
// Allocate ring buffer
std::shared_ptr<RingBuffer> temp_ring_buffer = RingBuffer::create(RING_BUFFER_SIZE);
if (temp_ring_buffer.use_count() == 0) {
xEventGroupSetBits(this_mww->event_group_, EventGroupBits::ERROR_MEMORY);
}
audio_buffer->set_source(temp_ring_buffer);
this_mww->ring_buffer_ = temp_ring_buffer;
}
if (!(xEventGroupGetBits(this_mww->event_group_) & ERROR_BITS)) {
this_mww->microphone_source_->start();
xEventGroupSetBits(this_mww->event_group_, EventGroupBits::TASK_RUNNING);
while (!(xEventGroupGetBits(this_mww->event_group_) & COMMAND_STOP)) {
audio_buffer->transfer_data_from_source(pdMS_TO_TICKS(DATA_TIMEOUT_MS));
if (audio_buffer->available() < new_samples_to_read * sizeof(int16_t)) {
// Insufficient data to generate new spectrogram features, read more next iteration
continue;
}
// Generate new spectrogram features
size_t processed_samples = this_mww->generate_features_(
(int16_t *) audio_buffer->get_buffer_start(), audio_buffer->available() / sizeof(int16_t), features_buffer);
audio_buffer->decrease_buffer_length(processed_samples * sizeof(int16_t));
// Run inference using the new spectorgram features
if (!this_mww->update_model_probabilities_(features_buffer)) {
xEventGroupSetBits(this_mww->event_group_, EventGroupBits::ERROR_INFERENCE);
break;
}
// Process each model's probabilities and possibly send a Detection Event to the queue
this_mww->process_probabilities_();
}
}
}
xEventGroupSetBits(this_mww->event_group_, EventGroupBits::TASK_STOPPING);
this_mww->unload_models_();
this_mww->microphone_source_->stop();
FrontendFreeStateContents(&this_mww->frontend_state_);
xEventGroupSetBits(this_mww->event_group_, EventGroupBits::TASK_STOPPED);
while (true) {
// Continuously delay until the main loop deletes the task
delay(10);
}
}
std::vector<WakeWordModel *> MicroWakeWord::get_wake_words() {
std::vector<WakeWordModel *> external_wake_word_models;
for (auto *model : this->wake_word_models_) {
if (!model->get_internal_only()) {
external_wake_word_models.push_back(model);
}
}
return external_wake_word_models;
}
void MicroWakeWord::add_wake_word_model(WakeWordModel *model) { this->wake_word_models_.push_back(model); }
#ifdef USE_MICRO_WAKE_WORD_VAD
void MicroWakeWord::add_vad_model(const uint8_t *model_start, float probability_cutoff, size_t sliding_window_size,
void MicroWakeWord::add_vad_model(const uint8_t *model_start, uint8_t probability_cutoff, size_t sliding_window_size,
size_t tensor_arena_size) {
this->vad_model_ = make_unique<VADModel>(model_start, probability_cutoff, sliding_window_size, tensor_arena_size);
}
#endif
void MicroWakeWord::suspend_task_() {
if (this->inference_task_handle_ != nullptr) {
vTaskSuspend(this->inference_task_handle_);
}
}
void MicroWakeWord::resume_task_() {
if (this->inference_task_handle_ != nullptr) {
vTaskResume(this->inference_task_handle_);
}
}
void MicroWakeWord::loop() {
uint32_t event_group_bits = xEventGroupGetBits(this->event_group_);
if (event_group_bits & EventGroupBits::ERROR_MEMORY) {
xEventGroupClearBits(this->event_group_, EventGroupBits::ERROR_MEMORY);
ESP_LOGE(TAG, "Encountered an error allocating buffers");
}
if (event_group_bits & EventGroupBits::ERROR_INFERENCE) {
xEventGroupClearBits(this->event_group_, EventGroupBits::ERROR_INFERENCE);
ESP_LOGE(TAG, "Encountered an error while performing an inference");
}
if (event_group_bits & EventGroupBits::WARNING_FULL_RING_BUFFER) {
xEventGroupClearBits(this->event_group_, EventGroupBits::WARNING_FULL_RING_BUFFER);
ESP_LOGW(TAG, "Not enough free bytes in ring buffer to store incoming audio data. Resetting the ring buffer. Wake "
"word detection accuracy will temporarily be reduced.");
}
if (event_group_bits & EventGroupBits::TASK_STARTING) {
ESP_LOGD(TAG, "Inference task has started, attempting to allocate memory for buffers");
xEventGroupClearBits(this->event_group_, EventGroupBits::TASK_STARTING);
}
if (event_group_bits & EventGroupBits::TASK_RUNNING) {
ESP_LOGD(TAG, "Inference task is running");
xEventGroupClearBits(this->event_group_, EventGroupBits::TASK_RUNNING);
this->set_state_(State::DETECTING_WAKE_WORD);
}
if (event_group_bits & EventGroupBits::TASK_STOPPING) {
ESP_LOGD(TAG, "Inference task is stopping, deallocating buffers");
xEventGroupClearBits(this->event_group_, EventGroupBits::TASK_STOPPING);
}
if ((event_group_bits & EventGroupBits::TASK_STOPPED)) {
ESP_LOGD(TAG, "Inference task is finished, freeing task resources");
vTaskDelete(this->inference_task_handle_);
this->inference_task_handle_ = nullptr;
xEventGroupClearBits(this->event_group_, ALL_BITS);
xQueueReset(this->detection_queue_);
this->set_state_(State::STOPPED);
}
if ((this->pending_start_) && (this->state_ == State::STOPPED)) {
this->set_state_(State::STARTING);
this->pending_start_ = false;
}
if ((this->pending_stop_) && (this->state_ == State::DETECTING_WAKE_WORD)) {
this->set_state_(State::STOPPING);
this->pending_stop_ = false;
}
switch (this->state_) {
case State::IDLE:
break;
case State::START_MICROPHONE:
ESP_LOGD(TAG, "Starting Microphone");
this->microphone_->start();
this->set_state_(State::STARTING_MICROPHONE);
break;
case State::STARTING_MICROPHONE:
if (this->microphone_->is_running()) {
this->set_state_(State::DETECTING_WAKE_WORD);
}
break;
case State::DETECTING_WAKE_WORD:
while (this->has_enough_samples_()) {
this->update_model_probabilities_();
if (this->detect_wake_words_()) {
ESP_LOGD(TAG, "Wake Word '%s' Detected", (this->detected_wake_word_).c_str());
this->detected_ = true;
this->set_state_(State::STOP_MICROPHONE);
case State::STARTING:
if ((this->inference_task_handle_ == nullptr) && !this->status_has_error()) {
// Setup preprocesor feature generator. If done in the task, it would lock the task to its initial core, as it
// uses floating point operations.
if (!FrontendPopulateState(&this->frontend_config_, &this->frontend_state_, AUDIO_SAMPLE_FREQUENCY)) {
this->status_momentary_error(
"Failed to allocate buffers for spectrogram feature processor, attempting again in 1 second", 1000);
return;
}
xTaskCreate(MicroWakeWord::inference_task, "mww", INFERENCE_TASK_STACK_SIZE, (void *) this,
INFERENCE_TASK_PRIORITY, &this->inference_task_handle_);
if (this->inference_task_handle_ == nullptr) {
FrontendFreeStateContents(&this->frontend_state_); // Deallocate frontend state
this->status_momentary_error("Task failed to start, attempting again in 1 second", 1000);
}
}
break;
case State::STOP_MICROPHONE:
ESP_LOGD(TAG, "Stopping Microphone");
this->microphone_->stop();
this->set_state_(State::STOPPING_MICROPHONE);
this->unload_models_();
this->deallocate_buffers_();
break;
case State::STOPPING_MICROPHONE:
if (this->microphone_->is_stopped()) {
this->set_state_(State::IDLE);
if (this->detected_) {
this->wake_word_detected_trigger_->trigger(this->detected_wake_word_);
this->detected_ = false;
this->detected_wake_word_ = "";
case State::DETECTING_WAKE_WORD: {
DetectionEvent detection_event;
while (xQueueReceive(this->detection_queue_, &detection_event, 0)) {
if (detection_event.blocked_by_vad) {
ESP_LOGD(TAG, "Wake word model predicts '%s', but VAD model doesn't.", detection_event.wake_word->c_str());
} else {
constexpr float uint8_to_float_divisor =
255.0f; // Converting a quantized uint8 probability to floating point
ESP_LOGD(TAG, "Detected '%s' with sliding average probability is %.2f and max probability is %.2f",
detection_event.wake_word->c_str(), (detection_event.average_probability / uint8_to_float_divisor),
(detection_event.max_probability / uint8_to_float_divisor));
this->wake_word_detected_trigger_->trigger(*detection_event.wake_word);
if (this->stop_after_detection_) {
this->stop();
}
}
}
break;
}
case State::STOPPING:
xEventGroupSetBits(this->event_group_, EventGroupBits::COMMAND_STOP);
break;
case State::STOPPED:
break;
}
}
@@ -177,199 +350,40 @@ void MicroWakeWord::start() {
return;
}
if (this->state_ != State::IDLE) {
ESP_LOGW(TAG, "Wake word is already running");
if (this->is_running()) {
ESP_LOGW(TAG, "Wake word detection is already running");
return;
}
if (!this->load_models_() || !this->allocate_buffers_()) {
ESP_LOGE(TAG, "Failed to load the wake word model(s) or allocate buffers");
this->status_set_error();
} else {
this->status_clear_error();
}
ESP_LOGD(TAG, "Starting wake word detection");
if (this->status_has_error()) {
ESP_LOGW(TAG, "Wake word component has an error. Please check logs");
return;
}
this->reset_states_();
this->set_state_(State::START_MICROPHONE);
this->pending_start_ = true;
this->pending_stop_ = false;
}
void MicroWakeWord::stop() {
if (this->state_ == State::IDLE) {
ESP_LOGW(TAG, "Wake word is already stopped");
if (this->state_ == STOPPED)
return;
}
if (this->state_ == State::STOPPING_MICROPHONE) {
ESP_LOGW(TAG, "Wake word is already stopping");
return;
}
this->set_state_(State::STOP_MICROPHONE);
ESP_LOGD(TAG, "Stopping wake word detection");
this->pending_start_ = false;
this->pending_stop_ = true;
}
void MicroWakeWord::set_state_(State state) {
ESP_LOGD(TAG, "State changed from %s to %s", LOG_STR_ARG(micro_wake_word_state_to_string(this->state_)),
LOG_STR_ARG(micro_wake_word_state_to_string(state)));
this->state_ = state;
if (this->state_ != state) {
ESP_LOGD(TAG, "State changed from %s to %s", LOG_STR_ARG(micro_wake_word_state_to_string(this->state_)),
LOG_STR_ARG(micro_wake_word_state_to_string(state)));
this->state_ = state;
}
}
bool MicroWakeWord::allocate_buffers_() {
ExternalRAMAllocator<int16_t> audio_samples_allocator(ExternalRAMAllocator<int16_t>::ALLOW_FAILURE);
if (this->input_buffer_ == nullptr) {
this->input_buffer_ = audio_samples_allocator.allocate(INPUT_BUFFER_SIZE * sizeof(int16_t));
if (this->input_buffer_ == nullptr) {
ESP_LOGE(TAG, "Could not allocate input buffer");
return false;
}
}
if (this->preprocessor_audio_buffer_ == nullptr) {
this->preprocessor_audio_buffer_ = audio_samples_allocator.allocate(this->new_samples_to_get_());
if (this->preprocessor_audio_buffer_ == nullptr) {
ESP_LOGE(TAG, "Could not allocate the audio preprocessor's buffer.");
return false;
}
}
if (this->ring_buffer_.use_count() == 0) {
this->ring_buffer_ = RingBuffer::create(BUFFER_SIZE * sizeof(int16_t));
if (this->ring_buffer_.use_count() == 0) {
ESP_LOGE(TAG, "Could not allocate ring buffer");
return false;
}
}
return true;
}
void MicroWakeWord::deallocate_buffers_() {
ExternalRAMAllocator<int16_t> audio_samples_allocator(ExternalRAMAllocator<int16_t>::ALLOW_FAILURE);
if (this->input_buffer_ != nullptr) {
audio_samples_allocator.deallocate(this->input_buffer_, INPUT_BUFFER_SIZE * sizeof(int16_t));
this->input_buffer_ = nullptr;
}
if (this->preprocessor_audio_buffer_ != nullptr) {
audio_samples_allocator.deallocate(this->preprocessor_audio_buffer_, this->new_samples_to_get_());
this->preprocessor_audio_buffer_ = nullptr;
}
this->ring_buffer_.reset();
}
bool MicroWakeWord::load_models_() {
// Setup preprocesor feature generator
if (!FrontendPopulateState(&this->frontend_config_, &this->frontend_state_, AUDIO_SAMPLE_FREQUENCY)) {
ESP_LOGD(TAG, "Failed to populate frontend state");
FrontendFreeStateContents(&this->frontend_state_);
return false;
}
// Setup streaming models
for (auto &model : this->wake_word_models_) {
if (!model.load_model(this->streaming_op_resolver_)) {
ESP_LOGE(TAG, "Failed to initialize a wake word model.");
return false;
}
}
#ifdef USE_MICRO_WAKE_WORD_VAD
if (!this->vad_model_->load_model(this->streaming_op_resolver_)) {
ESP_LOGE(TAG, "Failed to initialize VAD model.");
return false;
}
#endif
return true;
}
void MicroWakeWord::unload_models_() {
FrontendFreeStateContents(&this->frontend_state_);
for (auto &model : this->wake_word_models_) {
model.unload_model();
}
#ifdef USE_MICRO_WAKE_WORD_VAD
this->vad_model_->unload_model();
#endif
}
void MicroWakeWord::update_model_probabilities_() {
int8_t audio_features[PREPROCESSOR_FEATURE_SIZE];
if (!this->generate_features_for_window_(audio_features)) {
return;
}
// Increase the counter since the last positive detection
this->ignore_windows_ = std::min(this->ignore_windows_ + 1, 0);
for (auto &model : this->wake_word_models_) {
// Perform inference
model.perform_streaming_inference(audio_features);
}
#ifdef USE_MICRO_WAKE_WORD_VAD
this->vad_model_->perform_streaming_inference(audio_features);
#endif
}
bool MicroWakeWord::detect_wake_words_() {
// Verify we have processed samples since the last positive detection
if (this->ignore_windows_ < 0) {
return false;
}
#ifdef USE_MICRO_WAKE_WORD_VAD
bool vad_state = this->vad_model_->determine_detected();
#endif
for (auto &model : this->wake_word_models_) {
if (model.determine_detected()) {
#ifdef USE_MICRO_WAKE_WORD_VAD
if (vad_state) {
#endif
this->detected_wake_word_ = model.get_wake_word();
return true;
#ifdef USE_MICRO_WAKE_WORD_VAD
} else {
ESP_LOGD(TAG, "Wake word model predicts %s, but VAD model doesn't.", model.get_wake_word().c_str());
}
#endif
}
}
return false;
}
bool MicroWakeWord::has_enough_samples_() {
return this->ring_buffer_->available() >=
(this->features_step_size_ * (AUDIO_SAMPLE_FREQUENCY / 1000)) * sizeof(int16_t);
}
bool MicroWakeWord::generate_features_for_window_(int8_t features[PREPROCESSOR_FEATURE_SIZE]) {
// Ensure we have enough new audio samples in the ring buffer for a full window
if (!this->has_enough_samples_()) {
return false;
}
size_t bytes_read = this->ring_buffer_->read((void *) (this->preprocessor_audio_buffer_),
this->new_samples_to_get_() * sizeof(int16_t), pdMS_TO_TICKS(200));
if (bytes_read == 0) {
ESP_LOGE(TAG, "Could not read data from Ring Buffer");
} else if (bytes_read < this->new_samples_to_get_() * sizeof(int16_t)) {
ESP_LOGD(TAG, "Partial Read of Data by Model");
ESP_LOGD(TAG, "Could only read %d bytes when required %d bytes ", bytes_read,
(int) (this->new_samples_to_get_() * sizeof(int16_t)));
return false;
}
size_t num_samples_read;
struct FrontendOutput frontend_output = FrontendProcessSamples(
&this->frontend_state_, this->preprocessor_audio_buffer_, this->new_samples_to_get_(), &num_samples_read);
size_t MicroWakeWord::generate_features_(int16_t *audio_buffer, size_t samples_available,
int8_t features_buffer[PREPROCESSOR_FEATURE_SIZE]) {
size_t processed_samples = 0;
struct FrontendOutput frontend_output =
FrontendProcessSamples(&this->frontend_state_, audio_buffer, samples_available, &processed_samples);
for (size_t i = 0; i < frontend_output.size; ++i) {
// These scaling values are set to match the TFLite audio frontend int8 output.
@@ -379,8 +393,8 @@ bool MicroWakeWord::generate_features_for_window_(int8_t features[PREPROCESSOR_F
// for historical reasons, to match up with the output of other feature
// generators.
// The process is then further complicated when we quantize the model. This
// means we have to scale the 0.0 to 26.0 real values to the -128 to 127
// signed integer numbers.
// means we have to scale the 0.0 to 26.0 real values to the -128 (INT8_MIN)
// to 127 (INT8_MAX) signed integer numbers.
// All this means that to get matching values from our integer feature
// output into the tensor input, we have to perform:
// input = (((feature / 25.6) / 26.0) * 256) - 128
@@ -389,74 +403,63 @@ bool MicroWakeWord::generate_features_for_window_(int8_t features[PREPROCESSOR_F
constexpr int32_t value_scale = 256;
constexpr int32_t value_div = 666; // 666 = 25.6 * 26.0 after rounding
int32_t value = ((frontend_output.values[i] * value_scale) + (value_div / 2)) / value_div;
value -= 128;
if (value < -128) {
value = -128;
}
if (value > 127) {
value = 127;
}
features[i] = value;
value += INT8_MIN; // Adds a -128; i.e., subtracts 128
features_buffer[i] = static_cast<int8_t>(clamp<int32_t>(value, INT8_MIN, INT8_MAX));
}
return true;
return processed_samples;
}
void MicroWakeWord::reset_states_() {
ESP_LOGD(TAG, "Resetting buffers and probabilities");
this->ring_buffer_->reset();
this->ignore_windows_ = -MIN_SLICES_BEFORE_DETECTION;
void MicroWakeWord::process_probabilities_() {
#ifdef USE_MICRO_WAKE_WORD_VAD
DetectionEvent vad_state = this->vad_model_->determine_detected();
this->vad_state_ = vad_state.detected; // atomic write, so thread safe
#endif
for (auto &model : this->wake_word_models_) {
model.reset_probabilities();
if (model->get_unprocessed_probability_status()) {
// Only detect wake words if there is a new probability since the last check
DetectionEvent wake_word_state = model->determine_detected();
if (wake_word_state.detected) {
#ifdef USE_MICRO_WAKE_WORD_VAD
if (vad_state.detected) {
#endif
xQueueSend(this->detection_queue_, &wake_word_state, portMAX_DELAY);
model->reset_probabilities();
#ifdef USE_MICRO_WAKE_WORD_VAD
} else {
wake_word_state.blocked_by_vad = true;
xQueueSend(this->detection_queue_, &wake_word_state, portMAX_DELAY);
}
#endif
}
}
}
}
void MicroWakeWord::unload_models_() {
for (auto &model : this->wake_word_models_) {
model->unload_model();
}
#ifdef USE_MICRO_WAKE_WORD_VAD
this->vad_model_->reset_probabilities();
this->vad_model_->unload_model();
#endif
}
bool MicroWakeWord::register_streaming_ops_(tflite::MicroMutableOpResolver<20> &op_resolver) {
if (op_resolver.AddCallOnce() != kTfLiteOk)
return false;
if (op_resolver.AddVarHandle() != kTfLiteOk)
return false;
if (op_resolver.AddReshape() != kTfLiteOk)
return false;
if (op_resolver.AddReadVariable() != kTfLiteOk)
return false;
if (op_resolver.AddStridedSlice() != kTfLiteOk)
return false;
if (op_resolver.AddConcatenation() != kTfLiteOk)
return false;
if (op_resolver.AddAssignVariable() != kTfLiteOk)
return false;
if (op_resolver.AddConv2D() != kTfLiteOk)
return false;
if (op_resolver.AddMul() != kTfLiteOk)
return false;
if (op_resolver.AddAdd() != kTfLiteOk)
return false;
if (op_resolver.AddMean() != kTfLiteOk)
return false;
if (op_resolver.AddFullyConnected() != kTfLiteOk)
return false;
if (op_resolver.AddLogistic() != kTfLiteOk)
return false;
if (op_resolver.AddQuantize() != kTfLiteOk)
return false;
if (op_resolver.AddDepthwiseConv2D() != kTfLiteOk)
return false;
if (op_resolver.AddAveragePool2D() != kTfLiteOk)
return false;
if (op_resolver.AddMaxPool2D() != kTfLiteOk)
return false;
if (op_resolver.AddPad() != kTfLiteOk)
return false;
if (op_resolver.AddPack() != kTfLiteOk)
return false;
if (op_resolver.AddSplitV() != kTfLiteOk)
return false;
bool MicroWakeWord::update_model_probabilities_(const int8_t audio_features[PREPROCESSOR_FEATURE_SIZE]) {
bool success = true;
return true;
for (auto &model : this->wake_word_models_) {
// Perform inference
success = success & model->perform_streaming_inference(audio_features);
}
#ifdef USE_MICRO_WAKE_WORD_VAD
success = success & this->vad_model_->perform_streaming_inference(audio_features);
#endif
return success;
}
} // namespace micro_wake_word

View File

@@ -5,33 +5,27 @@
#include "preprocessor_settings.h"
#include "streaming_model.h"
#include "esphome/components/microphone/microphone_source.h"
#include "esphome/core/automation.h"
#include "esphome/core/component.h"
#include "esphome/core/ring_buffer.h"
#include "esphome/components/microphone/microphone.h"
#include <freertos/event_groups.h>
#include <frontend.h>
#include <frontend_util.h>
#include <tensorflow/lite/core/c/common.h>
#include <tensorflow/lite/micro/micro_interpreter.h>
#include <tensorflow/lite/micro/micro_mutable_op_resolver.h>
namespace esphome {
namespace micro_wake_word {
enum State {
IDLE,
START_MICROPHONE,
STARTING_MICROPHONE,
STARTING,
DETECTING_WAKE_WORD,
STOP_MICROPHONE,
STOPPING_MICROPHONE,
STOPPING,
STOPPED,
};
// The number of audio slices to process before accepting a positive detection
static const uint8_t MIN_SLICES_BEFORE_DETECTION = 74;
class MicroWakeWord : public Component {
public:
void setup() override;
@@ -42,124 +36,95 @@ class MicroWakeWord : public Component {
void start();
void stop();
bool is_running() const { return this->state_ != State::IDLE; }
bool is_running() const { return this->state_ != State::STOPPED; }
void set_features_step_size(uint8_t step_size) { this->features_step_size_ = step_size; }
void set_microphone(microphone::Microphone *microphone) { this->microphone_ = microphone; }
void set_microphone_source(microphone::MicrophoneSource *microphone_source) {
this->microphone_source_ = microphone_source;
}
void set_stop_after_detection(bool stop_after_detection) { this->stop_after_detection_ = stop_after_detection; }
Trigger<std::string> *get_wake_word_detected_trigger() const { return this->wake_word_detected_trigger_; }
void add_wake_word_model(const uint8_t *model_start, float probability_cutoff, size_t sliding_window_average_size,
const std::string &wake_word, size_t tensor_arena_size);
void add_wake_word_model(WakeWordModel *model);
#ifdef USE_MICRO_WAKE_WORD_VAD
void add_vad_model(const uint8_t *model_start, float probability_cutoff, size_t sliding_window_size,
void add_vad_model(const uint8_t *model_start, uint8_t probability_cutoff, size_t sliding_window_size,
size_t tensor_arena_size);
// Intended for the voice assistant component to fetch VAD status
bool get_vad_state() { return this->vad_state_; }
#endif
// Intended for the voice assistant component to access which wake words are available
// Since these are pointers to the WakeWordModel objects, the voice assistant component can enable or disable them
std::vector<WakeWordModel *> get_wake_words();
protected:
microphone::Microphone *microphone_{nullptr};
microphone::MicrophoneSource *microphone_source_{nullptr};
Trigger<std::string> *wake_word_detected_trigger_ = new Trigger<std::string>();
State state_{State::IDLE};
State state_{State::STOPPED};
std::shared_ptr<RingBuffer> ring_buffer_;
std::vector<WakeWordModel> wake_word_models_;
std::weak_ptr<RingBuffer> ring_buffer_;
std::vector<WakeWordModel *> wake_word_models_;
#ifdef USE_MICRO_WAKE_WORD_VAD
std::unique_ptr<VADModel> vad_model_;
bool vad_state_{false};
#endif
tflite::MicroMutableOpResolver<20> streaming_op_resolver_;
bool pending_start_{false};
bool pending_stop_{false};
bool stop_after_detection_;
uint8_t features_step_size_;
// Audio frontend handles generating spectrogram features
struct FrontendConfig frontend_config_;
struct FrontendState frontend_state_;
// When the wake word detection first starts, we ignore this many audio
// feature slices before accepting a positive detection
int16_t ignore_windows_{-MIN_SLICES_BEFORE_DETECTION};
// Handles managing the stop/state of the inference task
EventGroupHandle_t event_group_;
uint8_t features_step_size_;
// Used to send messages about the models' states to the main loop
QueueHandle_t detection_queue_;
// Stores audio read from the microphone before being added to the ring buffer.
int16_t *input_buffer_{nullptr};
// Stores audio to be fed into the audio frontend for generating features.
int16_t *preprocessor_audio_buffer_{nullptr};
static void inference_task(void *params);
TaskHandle_t inference_task_handle_{nullptr};
bool detected_{false};
std::string detected_wake_word_{""};
/// @brief Suspends the inference task
void suspend_task_();
/// @brief Resumes the inference task
void resume_task_();
void set_state_(State state);
/// @brief Tests if there are enough samples in the ring buffer to generate new features.
/// @return True if enough samples, false otherwise.
bool has_enough_samples_();
/// @brief Generates spectrogram features from an input buffer of audio samples
/// @param audio_buffer (int16_t *) Buffer containing input audio samples
/// @param samples_available (size_t) Number of samples avaiable in the input buffer
/// @param features_buffer (int8_t *) Buffer to store generated features
/// @return (size_t) Number of samples processed from the input buffer
size_t generate_features_(int16_t *audio_buffer, size_t samples_available,
int8_t features_buffer[PREPROCESSOR_FEATURE_SIZE]);
/// @brief Allocates memory for input_buffer_, preprocessor_audio_buffer_, and ring_buffer_
/// @return True if successful, false otherwise
bool allocate_buffers_();
/// @brief Processes any new probabilities for each model. If any wake word is detected, it will send a DetectionEvent
/// to the detection_queue_.
void process_probabilities_();
/// @brief Frees memory allocated for input_buffer_ and preprocessor_audio_buffer_
void deallocate_buffers_();
/// @brief Loads streaming models and prepares the feature generation frontend
/// @return True if successful, false otherwise
bool load_models_();
/// @brief Deletes each model's TFLite interpreters and frees tensor arena memory. Frees memory used by the feature
/// generation frontend.
/// @brief Deletes each model's TFLite interpreters and frees tensor arena memory.
void unload_models_();
/** Performs inference with each configured model
*
* If enough audio samples are available, it will generate one slice of new features.
* It then loops through and performs inference with each of the loaded models.
*/
void update_model_probabilities_();
/** Checks every model's recent probabilities to determine if the wake word has been predicted
*
* Verifies the models have processed enough new samples for accurate predictions.
* Sets detected_wake_word_ to the wake word, if one is detected.
* @return True if a wake word is predicted, false otherwise
*/
bool detect_wake_words_();
/** Generates features for a window of audio samples
*
* Reads samples from the ring buffer and feeds them into the preprocessor frontend.
* Adapted from TFLite microspeech frontend.
* @param features int8_t array to store the audio features
* @return True if successful, false otherwise.
*/
bool generate_features_for_window_(int8_t features[PREPROCESSOR_FEATURE_SIZE]);
/// @brief Resets the ring buffer, ignore_windows_, and sliding window probabilities
void reset_states_();
/// @brief Returns true if successfully registered the streaming model's TensorFlow operations
bool register_streaming_ops_(tflite::MicroMutableOpResolver<20> &op_resolver);
/// @brief Runs an inference with each model using the new spectrogram features
/// @param audio_features (int8_t *) Buffer containing new spectrogram features
/// @return True if successful, false if any errors were encountered
bool update_model_probabilities_(const int8_t audio_features[PREPROCESSOR_FEATURE_SIZE]);
inline uint16_t new_samples_to_get_() { return (this->features_step_size_ * (AUDIO_SAMPLE_FREQUENCY / 1000)); }
};
template<typename... Ts> class StartAction : public Action<Ts...>, public Parented<MicroWakeWord> {
public:
void play(Ts... x) override { this->parent_->start(); }
};
template<typename... Ts> class StopAction : public Action<Ts...>, public Parented<MicroWakeWord> {
public:
void play(Ts... x) override { this->parent_->stop(); }
};
template<typename... Ts> class IsRunningCondition : public Condition<Ts...>, public Parented<MicroWakeWord> {
public:
bool check(Ts... x) override { return this->parent_->is_running(); }
};
} // namespace micro_wake_word
} // namespace esphome

View File

@@ -7,6 +7,10 @@
namespace esphome {
namespace micro_wake_word {
// Settings for controlling the spectrogram feature generation by the preprocessor.
// These must match the settings used when training a particular model.
// All microWakeWord models have been trained with these specific paramters.
// The number of features the audio preprocessor generates per slice
static const uint8_t PREPROCESSOR_FEATURE_SIZE = 40;
// Duration of each slice used as input into the preprocessor
@@ -14,6 +18,21 @@ static const uint8_t FEATURE_DURATION_MS = 30;
// Audio sample frequency in hertz
static const uint16_t AUDIO_SAMPLE_FREQUENCY = 16000;
static const float FILTERBANK_LOWER_BAND_LIMIT = 125.0;
static const float FILTERBANK_UPPER_BAND_LIMIT = 7500.0;
static const uint8_t NOISE_REDUCTION_SMOOTHING_BITS = 10;
static const float NOISE_REDUCTION_EVEN_SMOOTHING = 0.025;
static const float NOISE_REDUCTION_ODD_SMOOTHING = 0.06;
static const float NOISE_REDUCTION_MIN_SIGNAL_REMAINING = 0.05;
static const bool PCAN_GAIN_CONTROL_ENABLE_PCAN = true;
static const float PCAN_GAIN_CONTROL_STRENGTH = 0.95;
static const float PCAN_GAIN_CONTROL_OFFSET = 80.0;
static const uint8_t PCAN_GAIN_CONTROL_GAIN_BITS = 21;
static const bool LOG_SCALE_ENABLE_LOG = true;
static const uint8_t LOG_SCALE_SCALE_SHIFT = 6;
} // namespace micro_wake_word
} // namespace esphome

View File

@@ -1,8 +1,7 @@
#ifdef USE_ESP_IDF
#include "streaming_model.h"
#include "esphome/core/hal.h"
#ifdef USE_ESP_IDF
#include "esphome/core/helpers.h"
#include "esphome/core/log.h"
@@ -13,18 +12,18 @@ namespace micro_wake_word {
void WakeWordModel::log_model_config() {
ESP_LOGCONFIG(TAG, " - Wake Word: %s", this->wake_word_.c_str());
ESP_LOGCONFIG(TAG, " Probability cutoff: %.3f", this->probability_cutoff_);
ESP_LOGCONFIG(TAG, " Probability cutoff: %.2f", this->probability_cutoff_ / 255.0f);
ESP_LOGCONFIG(TAG, " Sliding window size: %d", this->sliding_window_size_);
}
void VADModel::log_model_config() {
ESP_LOGCONFIG(TAG, " - VAD Model");
ESP_LOGCONFIG(TAG, " Probability cutoff: %.3f", this->probability_cutoff_);
ESP_LOGCONFIG(TAG, " Probability cutoff: %.2f", this->probability_cutoff_ / 255.0f);
ESP_LOGCONFIG(TAG, " Sliding window size: %d", this->sliding_window_size_);
}
bool StreamingModel::load_model(tflite::MicroMutableOpResolver<20> &op_resolver) {
ExternalRAMAllocator<uint8_t> arena_allocator(ExternalRAMAllocator<uint8_t>::ALLOW_FAILURE);
bool StreamingModel::load_model_() {
RAMAllocator<uint8_t> arena_allocator(RAMAllocator<uint8_t>::ALLOW_FAILURE);
if (this->tensor_arena_ == nullptr) {
this->tensor_arena_ = arena_allocator.allocate(this->tensor_arena_size_);
@@ -51,8 +50,9 @@ bool StreamingModel::load_model(tflite::MicroMutableOpResolver<20> &op_resolver)
}
if (this->interpreter_ == nullptr) {
this->interpreter_ = make_unique<tflite::MicroInterpreter>(
tflite::GetModel(this->model_start_), op_resolver, this->tensor_arena_, this->tensor_arena_size_, this->mrv_);
this->interpreter_ =
make_unique<tflite::MicroInterpreter>(tflite::GetModel(this->model_start_), this->streaming_op_resolver_,
this->tensor_arena_, this->tensor_arena_size_, this->mrv_);
if (this->interpreter_->AllocateTensors() != kTfLiteOk) {
ESP_LOGE(TAG, "Failed to allocate tensors for the streaming model");
return false;
@@ -84,34 +84,55 @@ bool StreamingModel::load_model(tflite::MicroMutableOpResolver<20> &op_resolver)
}
}
this->loaded_ = true;
this->reset_probabilities();
return true;
}
void StreamingModel::unload_model() {
this->interpreter_.reset();
ExternalRAMAllocator<uint8_t> arena_allocator(ExternalRAMAllocator<uint8_t>::ALLOW_FAILURE);
RAMAllocator<uint8_t> arena_allocator(RAMAllocator<uint8_t>::ALLOW_FAILURE);
arena_allocator.deallocate(this->tensor_arena_, this->tensor_arena_size_);
this->tensor_arena_ = nullptr;
arena_allocator.deallocate(this->var_arena_, STREAMING_MODEL_VARIABLE_ARENA_SIZE);
this->var_arena_ = nullptr;
if (this->tensor_arena_ != nullptr) {
arena_allocator.deallocate(this->tensor_arena_, this->tensor_arena_size_);
this->tensor_arena_ = nullptr;
}
if (this->var_arena_ != nullptr) {
arena_allocator.deallocate(this->var_arena_, STREAMING_MODEL_VARIABLE_ARENA_SIZE);
this->var_arena_ = nullptr;
}
this->loaded_ = false;
}
bool StreamingModel::perform_streaming_inference(const int8_t features[PREPROCESSOR_FEATURE_SIZE]) {
if (this->interpreter_ != nullptr) {
if (this->enabled_ && !this->loaded_) {
// Model is enabled but isn't loaded
if (!this->load_model_()) {
return false;
}
}
if (!this->enabled_ && this->loaded_) {
// Model is disabled but still loaded
this->unload_model();
return true;
}
if (this->loaded_) {
TfLiteTensor *input = this->interpreter_->input(0);
uint8_t stride = this->interpreter_->input(0)->dims->data[1];
this->current_stride_step_ = this->current_stride_step_ % stride;
std::memmove(
(int8_t *) (tflite::GetTensorData<int8_t>(input)) + PREPROCESSOR_FEATURE_SIZE * this->current_stride_step_,
features, PREPROCESSOR_FEATURE_SIZE);
++this->current_stride_step_;
uint8_t stride = this->interpreter_->input(0)->dims->data[1];
if (this->current_stride_step_ >= stride) {
this->current_stride_step_ = 0;
TfLiteStatus invoke_status = this->interpreter_->Invoke();
if (invoke_status != kTfLiteOk) {
ESP_LOGW(TAG, "Streaming interpreter invoke failed");
@@ -124,65 +145,159 @@ bool StreamingModel::perform_streaming_inference(const int8_t features[PREPROCES
if (this->last_n_index_ == this->sliding_window_size_)
this->last_n_index_ = 0;
this->recent_streaming_probabilities_[this->last_n_index_] = output->data.uint8[0]; // probability;
this->unprocessed_probability_status_ = true;
}
return true;
this->ignore_windows_ = std::min(this->ignore_windows_ + 1, 0);
}
ESP_LOGE(TAG, "Streaming interpreter is not initialized.");
return false;
return true;
}
void StreamingModel::reset_probabilities() {
for (auto &prob : this->recent_streaming_probabilities_) {
prob = 0;
}
this->ignore_windows_ = -MIN_SLICES_BEFORE_DETECTION;
}
WakeWordModel::WakeWordModel(const uint8_t *model_start, float probability_cutoff, size_t sliding_window_average_size,
const std::string &wake_word, size_t tensor_arena_size) {
WakeWordModel::WakeWordModel(const std::string &id, const uint8_t *model_start, uint8_t probability_cutoff,
size_t sliding_window_average_size, const std::string &wake_word, size_t tensor_arena_size,
bool default_enabled, bool internal_only) {
this->id_ = id;
this->model_start_ = model_start;
this->probability_cutoff_ = probability_cutoff;
this->sliding_window_size_ = sliding_window_average_size;
this->recent_streaming_probabilities_.resize(sliding_window_average_size, 0);
this->wake_word_ = wake_word;
this->tensor_arena_size_ = tensor_arena_size;
this->register_streaming_ops_(this->streaming_op_resolver_);
this->current_stride_step_ = 0;
this->internal_only_ = internal_only;
this->pref_ = global_preferences->make_preference<bool>(fnv1_hash(id));
bool enabled;
if (this->pref_.load(&enabled)) {
// Use the enabled state loaded from flash
this->enabled_ = enabled;
} else {
// If no state saved, then use the default
this->enabled_ = default_enabled;
}
};
bool WakeWordModel::determine_detected() {
void WakeWordModel::enable() {
this->enabled_ = true;
if (!this->internal_only_) {
this->pref_.save(&this->enabled_);
}
}
void WakeWordModel::disable() {
this->enabled_ = false;
if (!this->internal_only_) {
this->pref_.save(&this->enabled_);
}
}
DetectionEvent WakeWordModel::determine_detected() {
DetectionEvent detection_event;
detection_event.wake_word = &this->wake_word_;
detection_event.max_probability = 0;
detection_event.average_probability = 0;
if ((this->ignore_windows_ < 0) || !this->enabled_) {
detection_event.detected = false;
return detection_event;
}
uint32_t sum = 0;
for (auto &prob : this->recent_streaming_probabilities_) {
detection_event.max_probability = std::max(detection_event.max_probability, prob);
sum += prob;
}
float sliding_window_average = static_cast<float>(sum) / static_cast<float>(255 * this->sliding_window_size_);
detection_event.average_probability = sum / this->sliding_window_size_;
detection_event.detected = sum > this->probability_cutoff_ * this->sliding_window_size_;
// Detect the wake word if the sliding window average is above the cutoff
if (sliding_window_average > this->probability_cutoff_) {
ESP_LOGD(TAG, "The '%s' model sliding average probability is %.3f and most recent probability is %.3f",
this->wake_word_.c_str(), sliding_window_average,
this->recent_streaming_probabilities_[this->last_n_index_] / (255.0));
return true;
}
return false;
this->unprocessed_probability_status_ = false;
return detection_event;
}
VADModel::VADModel(const uint8_t *model_start, float probability_cutoff, size_t sliding_window_size,
VADModel::VADModel(const uint8_t *model_start, uint8_t probability_cutoff, size_t sliding_window_size,
size_t tensor_arena_size) {
this->model_start_ = model_start;
this->probability_cutoff_ = probability_cutoff;
this->sliding_window_size_ = sliding_window_size;
this->recent_streaming_probabilities_.resize(sliding_window_size, 0);
this->tensor_arena_size_ = tensor_arena_size;
};
this->register_streaming_ops_(this->streaming_op_resolver_);
}
DetectionEvent VADModel::determine_detected() {
DetectionEvent detection_event;
detection_event.max_probability = 0;
detection_event.average_probability = 0;
if (!this->enabled_) {
// We disabled the VAD model for some reason... so we shouldn't block wake words from being detected
detection_event.detected = true;
return detection_event;
}
bool VADModel::determine_detected() {
uint32_t sum = 0;
for (auto &prob : this->recent_streaming_probabilities_) {
detection_event.max_probability = std::max(detection_event.max_probability, prob);
sum += prob;
}
float sliding_window_average = static_cast<float>(sum) / static_cast<float>(255 * this->sliding_window_size_);
detection_event.average_probability = sum / this->sliding_window_size_;
detection_event.detected = sum > (this->probability_cutoff_ * this->sliding_window_size_);
return sliding_window_average > this->probability_cutoff_;
return detection_event;
}
bool StreamingModel::register_streaming_ops_(tflite::MicroMutableOpResolver<20> &op_resolver) {
if (op_resolver.AddCallOnce() != kTfLiteOk)
return false;
if (op_resolver.AddVarHandle() != kTfLiteOk)
return false;
if (op_resolver.AddReshape() != kTfLiteOk)
return false;
if (op_resolver.AddReadVariable() != kTfLiteOk)
return false;
if (op_resolver.AddStridedSlice() != kTfLiteOk)
return false;
if (op_resolver.AddConcatenation() != kTfLiteOk)
return false;
if (op_resolver.AddAssignVariable() != kTfLiteOk)
return false;
if (op_resolver.AddConv2D() != kTfLiteOk)
return false;
if (op_resolver.AddMul() != kTfLiteOk)
return false;
if (op_resolver.AddAdd() != kTfLiteOk)
return false;
if (op_resolver.AddMean() != kTfLiteOk)
return false;
if (op_resolver.AddFullyConnected() != kTfLiteOk)
return false;
if (op_resolver.AddLogistic() != kTfLiteOk)
return false;
if (op_resolver.AddQuantize() != kTfLiteOk)
return false;
if (op_resolver.AddDepthwiseConv2D() != kTfLiteOk)
return false;
if (op_resolver.AddAveragePool2D() != kTfLiteOk)
return false;
if (op_resolver.AddMaxPool2D() != kTfLiteOk)
return false;
if (op_resolver.AddPad() != kTfLiteOk)
return false;
if (op_resolver.AddPack() != kTfLiteOk)
return false;
if (op_resolver.AddSplitV() != kTfLiteOk)
return false;
return true;
}
} // namespace micro_wake_word

View File

@@ -4,6 +4,8 @@
#include "preprocessor_settings.h"
#include "esphome/core/preferences.h"
#include <tensorflow/lite/core/c/common.h>
#include <tensorflow/lite/micro/micro_interpreter.h>
#include <tensorflow/lite/micro/micro_mutable_op_resolver.h>
@@ -11,30 +13,63 @@
namespace esphome {
namespace micro_wake_word {
static const uint8_t MIN_SLICES_BEFORE_DETECTION = 100;
static const uint32_t STREAMING_MODEL_VARIABLE_ARENA_SIZE = 1024;
struct DetectionEvent {
std::string *wake_word;
bool detected;
bool partially_detection; // Set if the most recent probability exceed the threshold, but the sliding window average
// hasn't yet
uint8_t max_probability;
uint8_t average_probability;
bool blocked_by_vad = false;
};
class StreamingModel {
public:
virtual void log_model_config() = 0;
virtual bool determine_detected() = 0;
virtual DetectionEvent determine_detected() = 0;
// Performs inference on the given features.
// - If the model is enabled but not loaded, it will load it
// - If the model is disabled but loaded, it will unload it
// Returns true if sucessful or false if there is an error
bool perform_streaming_inference(const int8_t features[PREPROCESSOR_FEATURE_SIZE]);
/// @brief Sets all recent_streaming_probabilities to 0
/// @brief Sets all recent_streaming_probabilities to 0 and resets the ignore window count
void reset_probabilities();
/// @brief Allocates tensor and variable arenas and sets up the model interpreter
/// @param op_resolver MicroMutableOpResolver object that must exist until the model is unloaded
/// @return True if successful, false otherwise
bool load_model(tflite::MicroMutableOpResolver<20> &op_resolver);
/// @brief Destroys the TFLite interpreter and frees the tensor and variable arenas' memory
void unload_model();
protected:
uint8_t current_stride_step_{0};
/// @brief Enable the model. The next performing_streaming_inference call will load it.
virtual void enable() { this->enabled_ = true; }
float probability_cutoff_;
/// @brief Disable the model. The next performing_streaming_inference call will unload it.
virtual void disable() { this->enabled_ = false; }
/// @brief Return true if the model is enabled.
bool is_enabled() { return this->enabled_; }
bool get_unprocessed_probability_status() { return this->unprocessed_probability_status_; }
protected:
/// @brief Allocates tensor and variable arenas and sets up the model interpreter
/// @return True if successful, false otherwise
bool load_model_();
/// @brief Returns true if successfully registered the streaming model's TensorFlow operations
bool register_streaming_ops_(tflite::MicroMutableOpResolver<20> &op_resolver);
tflite::MicroMutableOpResolver<20> streaming_op_resolver_;
bool loaded_{false};
bool enabled_{true};
bool unprocessed_probability_status_{false};
uint8_t current_stride_step_{0};
int16_t ignore_windows_{-MIN_SLICES_BEFORE_DETECTION};
uint8_t probability_cutoff_; // Quantized probability cutoff mapping 0.0 - 1.0 to 0 - 255
size_t sliding_window_size_;
size_t last_n_index_{0};
size_t tensor_arena_size_;
@@ -50,32 +85,62 @@ class StreamingModel {
class WakeWordModel final : public StreamingModel {
public:
WakeWordModel(const uint8_t *model_start, float probability_cutoff, size_t sliding_window_average_size,
const std::string &wake_word, size_t tensor_arena_size);
/// @brief Constructs a wake word model object
/// @param id (std::string) identifier for this model
/// @param model_start (const uint8_t *) pointer to the start of the model's TFLite FlatBuffer
/// @param probability_cutoff (uint8_t) probability cutoff for acceping the wake word has been said
/// @param sliding_window_average_size (size_t) the length of the sliding window computing the mean rolling
/// probability
/// @param wake_word (std::string) Friendly name of the wake word
/// @param tensor_arena_size (size_t) Size in bytes for allocating the tensor arena
/// @param default_enabled (bool) If true, it will be enabled by default on first boot
/// @param internal_only (bool) If true, the model will not be exposed to HomeAssistant as an available model
WakeWordModel(const std::string &id, const uint8_t *model_start, uint8_t probability_cutoff,
size_t sliding_window_average_size, const std::string &wake_word, size_t tensor_arena_size,
bool default_enabled, bool internal_only);
void log_model_config() override;
/// @brief Checks for the wake word by comparing the mean probability in the sliding window with the probability
/// cutoff
/// @return True if wake word is detected, false otherwise
bool determine_detected() override;
DetectionEvent determine_detected() override;
const std::string &get_id() const { return this->id_; }
const std::string &get_wake_word() const { return this->wake_word_; }
void add_trained_language(const std::string &language) { this->trained_languages_.push_back(language); }
const std::vector<std::string> &get_trained_languages() const { return this->trained_languages_; }
/// @brief Enable the model and save to flash. The next performing_streaming_inference call will load it.
void enable() override;
/// @brief Disable the model and save to flash. The next performing_streaming_inference call will unload it.
void disable() override;
bool get_internal_only() { return this->internal_only_; }
protected:
std::string id_;
std::string wake_word_;
std::vector<std::string> trained_languages_;
bool internal_only_;
ESPPreferenceObject pref_;
};
class VADModel final : public StreamingModel {
public:
VADModel(const uint8_t *model_start, float probability_cutoff, size_t sliding_window_size, size_t tensor_arena_size);
VADModel(const uint8_t *model_start, uint8_t probability_cutoff, size_t sliding_window_size,
size_t tensor_arena_size);
void log_model_config() override;
/// @brief Checks for voice activity by comparing the max probability in the sliding window with the probability
/// cutoff
/// @return True if voice activity is detected, false otherwise
bool determine_detected() override;
DetectionEvent determine_detected() override;
};
} // namespace micro_wake_word

View File

@@ -1,12 +1,21 @@
from esphome import automation
from esphome.automation import maybe_simple_id
import esphome.codegen as cg
from esphome.components import audio
import esphome.config_validation as cv
from esphome.const import CONF_ID, CONF_TRIGGER_ID
from esphome.const import (
CONF_BITS_PER_SAMPLE,
CONF_CHANNELS,
CONF_GAIN_FACTOR,
CONF_ID,
CONF_MICROPHONE,
CONF_TRIGGER_ID,
)
from esphome.core import CORE
from esphome.coroutine import coroutine_with_priority
CODEOWNERS = ["@jesserockz"]
AUTO_LOAD = ["audio"]
CODEOWNERS = ["@jesserockz", "@kahrendt"]
IS_PLATFORM_COMPONENT = True
@@ -15,6 +24,7 @@ CONF_ON_DATA = "on_data"
microphone_ns = cg.esphome_ns.namespace("microphone")
Microphone = microphone_ns.class_("Microphone")
MicrophoneSource = microphone_ns.class_("MicrophoneSource")
CaptureAction = microphone_ns.class_(
"CaptureAction", automation.Action, cg.Parented.template(Microphone)
@@ -22,16 +32,23 @@ CaptureAction = microphone_ns.class_(
StopCaptureAction = microphone_ns.class_(
"StopCaptureAction", automation.Action, cg.Parented.template(Microphone)
)
MuteAction = microphone_ns.class_(
"MuteAction", automation.Action, cg.Parented.template(Microphone)
)
UnmuteAction = microphone_ns.class_(
"UnmuteAction", automation.Action, cg.Parented.template(Microphone)
)
DataTrigger = microphone_ns.class_(
"DataTrigger",
automation.Trigger.template(cg.std_vector.template(cg.int16).operator("ref")),
automation.Trigger.template(cg.std_vector.template(cg.uint8).operator("ref")),
)
IsCapturingCondition = microphone_ns.class_(
"IsCapturingCondition", automation.Condition
)
IsMutedCondition = microphone_ns.class_("IsMutedCondition", automation.Condition)
async def setup_microphone_core_(var, config):
@@ -39,7 +56,7 @@ async def setup_microphone_core_(var, config):
trigger = cg.new_Pvariable(conf[CONF_TRIGGER_ID], var)
await automation.build_automation(
trigger,
[(cg.std_vector.template(cg.int16).operator("ref").operator("const"), "x")],
[(cg.std_vector.template(cg.uint8).operator("ref").operator("const"), "x")],
conf,
)
@@ -50,7 +67,7 @@ async def register_microphone(var, config):
await setup_microphone_core_(var, config)
MICROPHONE_SCHEMA = cv.Schema(
MICROPHONE_SCHEMA = cv.Schema.extend(audio.AUDIO_COMPONENT_SCHEMA).extend(
{
cv.Optional(CONF_ON_DATA): automation.validate_automation(
{
@@ -64,7 +81,104 @@ MICROPHONE_SCHEMA = cv.Schema(
MICROPHONE_ACTION_SCHEMA = maybe_simple_id({cv.GenerateID(): cv.use_id(Microphone)})
async def media_player_action(config, action_id, template_arg, args):
def microphone_source_schema(
min_bits_per_sample: int = 16,
max_bits_per_sample: int = 16,
min_channels: int = 1,
max_channels: int = 1,
):
"""Schema for a microphone source
Components requesting microphone data should use this schema instead of accessing a microphone directly.
Args:
min_bits_per_sample (int, optional): Minimum number of bits per sample the requesting component supports. Defaults to 16.
max_bits_per_sample (int, optional): Maximum number of bits per sample the requesting component supports. Defaults to 16.
min_channels (int, optional): Minimum number of channels the requesting component supports. Defaults to 1.
max_channels (int, optional): Maximum number of channels the requesting component supports. Defaults to 1.
"""
def _validate_unique_channels(config):
if len(config) != len(set(config)):
raise cv.Invalid("Channels must be unique")
return config
return cv.All(
automation.maybe_conf(
CONF_MICROPHONE,
{
cv.GenerateID(CONF_ID): cv.declare_id(MicrophoneSource),
cv.GenerateID(CONF_MICROPHONE): cv.use_id(Microphone),
cv.Optional(CONF_BITS_PER_SAMPLE, default=16): cv.int_range(
min_bits_per_sample, max_bits_per_sample
),
cv.Optional(CONF_CHANNELS, default="0"): cv.All(
cv.ensure_list(cv.int_range(0, 7)),
cv.Length(min=min_channels, max=max_channels),
_validate_unique_channels,
),
cv.Optional(CONF_GAIN_FACTOR, default="1"): cv.int_range(1, 64),
},
),
)
_UNDEF = object()
def final_validate_microphone_source_schema(
component_name: str, sample_rate: int = _UNDEF
):
"""Validates that the microphone source can provide audio in the correct format. In particular it validates the sample rate and the enabled channels.
Note that:
- MicrophoneSource class automatically handles converting bits per sample, so no need to validate
- microphone_source_schema already validates that channels are unique and specifies the max number of channels the component supports
Args:
component_name (str): The name of the component requesting mic audio
sample_rate (int, optional): The sample rate the component requesting mic audio requires
"""
def _validate_audio_compatability(config):
if sample_rate is not _UNDEF:
# Issues require changing the microphone configuration
# - Verifies sample rates match
audio.final_validate_audio_schema(
component_name,
audio_device=CONF_MICROPHONE,
sample_rate=sample_rate,
audio_device_issue=True,
)(config)
# Issues require changing the MicrophoneSource configuration
# - Verifies that each of the enabled channels are available
audio.final_validate_audio_schema(
component_name,
audio_device=CONF_MICROPHONE,
enabled_channels=config[CONF_CHANNELS],
audio_device_issue=False,
)(config)
return config
return _validate_audio_compatability
async def microphone_source_to_code(config):
mic = await cg.get_variable(config[CONF_MICROPHONE])
mic_source = cg.new_Pvariable(
config[CONF_ID],
mic,
config[CONF_BITS_PER_SAMPLE],
config[CONF_GAIN_FACTOR],
)
for channel in config[CONF_CHANNELS]:
cg.add(mic_source.add_channel(channel))
return mic_source
async def microphone_action(config, action_id, template_arg, args):
var = cg.new_Pvariable(action_id, template_arg)
await cg.register_parented(var, config[CONF_ID])
return var
@@ -72,15 +186,25 @@ async def media_player_action(config, action_id, template_arg, args):
automation.register_action(
"microphone.capture", CaptureAction, MICROPHONE_ACTION_SCHEMA
)(media_player_action)
)(microphone_action)
automation.register_action(
"microphone.stop_capture", StopCaptureAction, MICROPHONE_ACTION_SCHEMA
)(media_player_action)
)(microphone_action)
automation.register_action("microphone.mute", MuteAction, MICROPHONE_ACTION_SCHEMA)(
microphone_action
)
automation.register_action("microphone.unmute", UnmuteAction, MICROPHONE_ACTION_SCHEMA)(
microphone_action
)
automation.register_condition(
"microphone.is_capturing", IsCapturingCondition, MICROPHONE_ACTION_SCHEMA
)(media_player_action)
)(microphone_action)
automation.register_condition(
"microphone.is_muted", IsMutedCondition, MICROPHONE_ACTION_SCHEMA
)(microphone_action)
@coroutine_with_priority(100.0)

View File

@@ -16,10 +16,17 @@ template<typename... Ts> class StopCaptureAction : public Action<Ts...>, public
void play(Ts... x) override { this->parent_->stop(); }
};
class DataTrigger : public Trigger<const std::vector<int16_t> &> {
template<typename... Ts> class MuteAction : public Action<Ts...>, public Parented<Microphone> {
void play(Ts... x) override { this->parent_->set_mute_state(true); }
};
template<typename... Ts> class UnmuteAction : public Action<Ts...>, public Parented<Microphone> {
void play(Ts... x) override { this->parent_->set_mute_state(false); }
};
class DataTrigger : public Trigger<const std::vector<uint8_t> &> {
public:
explicit DataTrigger(Microphone *mic) {
mic->add_data_callback([this](const std::vector<int16_t> &data) { this->trigger(data); });
mic->add_data_callback([this](const std::vector<uint8_t> &data) { this->trigger(data); });
}
};
@@ -28,5 +35,10 @@ template<typename... Ts> class IsCapturingCondition : public Condition<Ts...>, p
bool check(Ts... x) override { return this->parent_->is_running(); }
};
template<typename... Ts> class IsMutedCondition : public Condition<Ts...>, public Parented<Microphone> {
public:
bool check(Ts... x) override { return this->parent_->get_mute_state(); }
};
} // namespace microphone
} // namespace esphome

View File

@@ -0,0 +1,21 @@
#include "microphone.h"
namespace esphome {
namespace microphone {
void Microphone::add_data_callback(std::function<void(const std::vector<uint8_t> &)> &&data_callback) {
std::function<void(const std::vector<uint8_t> &)> mute_handled_callback =
[this, data_callback](const std::vector<uint8_t> &data) { data_callback(this->silence_audio_(data)); };
this->data_callbacks_.add(std::move(mute_handled_callback));
}
std::vector<uint8_t> Microphone::silence_audio_(std::vector<uint8_t> data) {
if (this->mute_state_) {
std::memset((void *) data.data(), 0, data.size());
}
return data;
}
} // namespace microphone
} // namespace esphome

View File

@@ -1,5 +1,7 @@
#pragma once
#include "esphome/components/audio/audio.h"
#include <cstddef>
#include <cstdint>
#include <functional>
@@ -20,18 +22,25 @@ class Microphone {
public:
virtual void start() = 0;
virtual void stop() = 0;
void add_data_callback(std::function<void(const std::vector<int16_t> &)> &&data_callback) {
this->data_callbacks_.add(std::move(data_callback));
}
virtual size_t read(int16_t *buf, size_t len) = 0;
void add_data_callback(std::function<void(const std::vector<uint8_t> &)> &&data_callback);
bool is_running() const { return this->state_ == STATE_RUNNING; }
bool is_stopped() const { return this->state_ == STATE_STOPPED; }
protected:
State state_{STATE_STOPPED};
void set_mute_state(bool is_muted) { this->mute_state_ = is_muted; }
bool get_mute_state() { return this->mute_state_; }
CallbackManager<void(const std::vector<int16_t> &)> data_callbacks_{};
audio::AudioStreamInfo get_audio_stream_info() { return this->audio_stream_info_; }
protected:
std::vector<uint8_t> silence_audio_(std::vector<uint8_t> data);
State state_{STATE_STOPPED};
bool mute_state_{false};
audio::AudioStreamInfo audio_stream_info_;
CallbackManager<void(const std::vector<uint8_t> &)> data_callbacks_{};
};
} // namespace microphone

View File

@@ -0,0 +1,98 @@
#include "microphone_source.h"
namespace esphome {
namespace microphone {
void MicrophoneSource::add_data_callback(std::function<void(const std::vector<uint8_t> &)> &&data_callback) {
std::function<void(const std::vector<uint8_t> &)> filtered_callback =
[this, data_callback](const std::vector<uint8_t> &data) {
if (this->enabled_) {
data_callback(this->process_audio_(data));
}
};
this->mic_->add_data_callback(std::move(filtered_callback));
}
void MicrophoneSource::start() {
if (!this->enabled_) {
this->enabled_ = true;
this->mic_->start();
}
}
void MicrophoneSource::stop() {
if (this->enabled_) {
this->enabled_ = false;
this->mic_->stop();
}
}
std::vector<uint8_t> MicrophoneSource::process_audio_(const std::vector<uint8_t> &data) {
// Bit depth conversions are obtained by truncating bits or padding with zeros - no dithering is applied.
const size_t source_bytes_per_sample = this->mic_->get_audio_stream_info().samples_to_bytes(1);
const size_t source_channels = this->mic_->get_audio_stream_info().get_channels();
const size_t source_bytes_per_frame = this->mic_->get_audio_stream_info().frames_to_bytes(1);
const uint32_t total_frames = this->mic_->get_audio_stream_info().bytes_to_frames(data.size());
const size_t target_bytes_per_sample = (this->bits_per_sample_ + 7) / 8;
const size_t target_bytes_per_frame = target_bytes_per_sample * this->channels_.count();
std::vector<uint8_t> filtered_data;
filtered_data.reserve(target_bytes_per_frame * total_frames);
const int32_t target_min_value = -(1 << (8 * target_bytes_per_sample - 1));
const int32_t target_max_value = (1 << (8 * target_bytes_per_sample - 1)) - 1;
for (size_t frame_index = 0; frame_index < total_frames; ++frame_index) {
for (size_t channel_index = 0; channel_index < source_channels; ++channel_index) {
if (this->channels_.test(channel_index)) {
// Channel's current sample is included in the target mask. Convert bits per sample, if necessary.
size_t sample_index = frame_index * source_bytes_per_frame + channel_index * source_bytes_per_sample;
int32_t sample = 0;
// Copy the data into the most significant bits of the sample variable to ensure the sign bit is correct
uint8_t bit_offset = (4 - source_bytes_per_sample) * 8;
for (int i = 0; i < source_bytes_per_sample; ++i) {
sample |= data[sample_index + i] << bit_offset;
bit_offset += 8;
}
// Shift data back to the least significant bits
if (source_bytes_per_sample >= target_bytes_per_sample) {
// Keep source bytes per sample of data so that the gain multiplication uses all significant bits instead of
// shifting to the target bytes per sample immediately, potentially losing information.
sample >>= (4 - source_bytes_per_sample) * 8; // ``source_bytes_per_sample`` bytes of valid data
} else {
// Keep padded zeros to match the target bytes per sample
sample >>= (4 - target_bytes_per_sample) * 8; // ``target_bytes_per_sample`` bytes of valid data
}
// Apply gain using multiplication
sample *= this->gain_factor_;
// Match target output bytes by shifting out the least significant bits
if (source_bytes_per_sample > target_bytes_per_sample) {
sample >>= 8 * (source_bytes_per_sample -
target_bytes_per_sample); // ``target_bytes_per_sample`` bytes of valid data
}
// Clamp ``sample`` to the target bytes per sample range in case gain multiplication overflows
sample = clamp<int32_t>(sample, target_min_value, target_max_value);
// Copy ``target_bytes_per_sample`` bytes to the output buffer.
for (int i = 0; i < target_bytes_per_sample; ++i) {
filtered_data.push_back(static_cast<uint8_t>(sample));
sample >>= 8;
}
}
}
}
return filtered_data;
}
} // namespace microphone
} // namespace esphome

Some files were not shown because too many files have changed in this diff Show More