1
0
mirror of https://github.com/esphome/esphome.git synced 2025-10-30 14:43:51 +00:00

Merge branch 'min_filter_ring_buffer' into integration

This commit is contained in:
J. Nick Koston
2025-10-15 22:13:26 -10:00
105 changed files with 3136 additions and 688 deletions

View File

@@ -1,5 +1,5 @@
substitutions:
irq0_pin: GPIO13
irq0_pin: GPIO0
irq1_pin: GPIO15
reset_pin: GPIO16

View File

@@ -4,10 +4,13 @@ sensor:
irq_pin: ${irq_pin}
voltage:
name: ADE7953 Voltage
id: ade7953_i2c_voltage
current_a:
name: ADE7953 Current A
id: ade7953_i2c_current_a
current_b:
name: ADE7953 Current B
id: ade7953_i2c_current_b
power_factor_a:
name: ADE7953 Power Factor A
power_factor_b:

View File

@@ -4,13 +4,13 @@ sensor:
irq_pin: ${irq_pin}
voltage:
name: ADE7953 Voltage
id: ade7953_voltage
id: ade7953_spi_voltage
current_a:
name: ADE7953 Current A
id: ade7953_current_a
id: ade7953_spi_current_a
current_b:
name: ADE7953 Current B
id: ade7953_current_b
id: ade7953_spi_current_b
power_factor_a:
name: ADE7953 Power Factor A
power_factor_b:

View File

@@ -1,13 +1,16 @@
as3935_i2c:
id: as3935_i2c_id
i2c_id: i2c_bus
irq_pin: ${irq_pin}
binary_sensor:
- platform: as3935
as3935_id: as3935_i2c_id
name: Storm Alert
sensor:
- platform: as3935
as3935_id: as3935_i2c_id
lightning_energy:
name: Lightning Energy
distance:

View File

@@ -1,13 +1,16 @@
as3935_spi:
id: as3935_spi_id
cs_pin: ${cs_pin}
irq_pin: ${irq_pin}
binary_sensor:
- platform: as3935
as3935_id: as3935_spi_id
name: Storm Alert
sensor:
- platform: as3935
as3935_id: as3935_spi_id
lightning_energy:
name: Lightning Energy
distance:

View File

@@ -1,7 +1,7 @@
display:
- platform: ssd1306_i2c
i2c_id: i2c_bus
id: ssd1306_display
id: ssd1306_i2c_display
model: SSD1306_128X64
reset_pin: 19
pages:
@@ -13,6 +13,6 @@ touchscreen:
- platform: axs15231
i2c_id: i2c_bus
id: axs15231_touchscreen
display: ssd1306_display
display: ssd1306_i2c_display
interrupt_pin: 20
reset_pin: 18

View File

@@ -3,12 +3,12 @@ sensor:
i2c_id: i2c_bus
address: 0x76
temperature:
id: bme280_temperature
id: bme280_i2c_temperature
name: BME280 Temperature
humidity:
id: bme280_humidity
id: bme280_i2c_humidity
name: BME280 Humidity
pressure:
id: bme280_pressure
id: bme280_i2c_pressure
name: BME280 Pressure
update_interval: 15s

View File

@@ -2,12 +2,12 @@ sensor:
- platform: bme280_spi
cs_pin: ${cs_pin}
temperature:
id: bme280_temperature
id: bme280_spi_temperature
name: BME280 Temperature
humidity:
id: bme280_humidity
id: bme280_spi_humidity
name: BME280 Humidity
pressure:
id: bme280_pressure
id: bme280_spi_pressure
name: BME280 Pressure
update_interval: 15s

View File

@@ -3,10 +3,10 @@ sensor:
i2c_id: i2c_bus
address: 0x77
temperature:
id: bmp280_temperature
id: bmp280_i2c_temperature
name: Outside Temperature
pressure:
name: Outside Pressure
id: bmp280_pressure
id: bmp280_i2c_pressure
iir_filter: 16x
update_interval: 15s

View File

@@ -2,10 +2,10 @@ sensor:
- platform: bmp280_spi
cs_pin: ${cs_pin}
temperature:
id: bmp280_temperature
id: bmp280_spi_temperature
name: Outside Temperature
pressure:
name: Outside Pressure
id: bmp280_pressure
id: bmp280_spi_pressure
iir_filter: 16x
update_interval: 15s

View File

@@ -3,8 +3,10 @@ sensor:
i2c_id: i2c_bus
address: 0x77
temperature:
id: bmp3xx_i2c_temperature
name: BMP Temperature
oversampling: 16x
pressure:
id: bmp3xx_i2c_pressure
name: BMP Pressure
iir_filter: 2X

View File

@@ -2,8 +2,10 @@ sensor:
- platform: bmp3xx_spi
cs_pin: ${cs_pin}
temperature:
id: bmp3xx_spi_temperature
name: BMP Temperature
oversampling: 16x
pressure:
id: bmp3xx_spi_pressure
name: BMP Pressure
iir_filter: 2X

View File

@@ -1,4 +1,4 @@
packages:
camera: !include ../../test_build_components/common/camera/esp32-idf.yaml
i2c_camera: !include ../../test_build_components/common/i2c_camera/esp32-idf.yaml
<<: !include common.yaml

View File

@@ -1,4 +1,4 @@
packages:
camera: !include ../../test_build_components/common/camera/esp32-idf.yaml
i2c_camera: !include ../../test_build_components/common/i2c_camera/esp32-idf.yaml
<<: !include common.yaml

View File

@@ -4,6 +4,7 @@ packages:
display:
- platform: ili9xxx
spi_id: spi_bus
id: ili9xxx_display
model: GC9A01A
invert_colors: True
@@ -16,5 +17,6 @@ display:
touchscreen:
- platform: chsc6x
i2c_id: i2c_bus
display: ili9xxx_display
interrupt_pin: 20

View File

@@ -1,7 +1,7 @@
display:
- platform: ssd1306_i2c
i2c_id: i2c_bus
id: ssd1306_display
id: ssd1306_i2c_display
model: SSD1306_128X64
reset_pin: ${display_reset_pin}
pages:
@@ -15,7 +15,7 @@ touchscreen:
id: ektf2232_touchscreen
interrupt_pin: ${interrupt_pin}
reset_pin: ${touch_reset_pin}
display: ssd1306_display
display: ssd1306_i2c_display
on_touch:
- logger.log:
format: Touch at (%d, %d)

View File

@@ -3,8 +3,11 @@ sensor:
i2c_id: i2c_bus
address: 0x53
eco2:
id: ens160_i2c_eco2
name: "ENS160 eCO2"
tvoc:
id: ens160_i2c_tvoc
name: "ENS160 Total Volatile Organic Compounds"
aqi:
id: ens160_i2c_aqi
name: "ENS160 Air Quality Index"

View File

@@ -2,8 +2,11 @@ sensor:
- platform: ens160_spi
cs_pin: ${cs_pin}
eco2:
id: ens160_spi_eco2
name: "ENS160 eCO2"
tvoc:
id: ens160_spi_tvoc
name: "ENS160 Total Volatile Organic Compounds"
aqi:
id: ens160_spi_aqi
name: "ENS160 Air Quality Index"

View File

@@ -1,4 +1,4 @@
packages:
camera: !include ../../test_build_components/common/camera/esp32-idf.yaml
i2c_camera: !include ../../test_build_components/common/i2c_camera/esp32-idf.yaml
<<: !include common.yaml

View File

@@ -1,4 +1,4 @@
packages:
camera: !include ../../test_build_components/common/camera/esp32-idf.yaml
i2c_camera: !include ../../test_build_components/common/i2c_camera/esp32-idf.yaml
<<: !include common.yaml

View File

@@ -49,6 +49,7 @@ font:
display:
- platform: ssd1306_i2c
i2c_id: i2c_bus
id: ssd1306_display
model: SSD1306_128X64
reset_pin: ${display_reset_pin}

View File

@@ -1,5 +1,5 @@
substitutions:
interrupt_pin: GPIO12
interrupt_pin: GPIO0
reset_pin: GPIO16
packages:

View File

@@ -11,6 +11,7 @@ graph:
display:
- platform: ssd1306_i2c
i2c_id: i2c_bus
id: ssd1306_display
model: SSD1306_128X64
reset_pin: ${reset_pin}

View File

@@ -1,6 +1,6 @@
display:
- platform: ssd1306_i2c
id: ssd1306_display
id: ssd1306_i2c_display
model: SSD1306_128X64
reset_pin: ${reset_pin}
pages:
@@ -36,7 +36,7 @@ switch:
graphical_display_menu:
id: test_graphical_display_menu
display: ssd1306_display
display: ssd1306_i2c_display
font: roboto
active: false
mode: rotary

View File

@@ -1,7 +1,7 @@
display:
- platform: ssd1306_i2c
i2c_id: i2c_bus
id: ssd1306_display
id: ssd1306_i2c_display
model: SSD1306_128X64
reset_pin: ${display_reset_pin}
pages:
@@ -13,7 +13,7 @@ touchscreen:
- platform: gt911
i2c_id: i2c_bus
id: gt911_touchscreen
display: ssd1306_display
display: ssd1306_i2c_display
interrupt_pin: ${interrupt_pin}
reset_pin: ${reset_pin}

View File

@@ -1,5 +1,5 @@
substitutions:
clk_pin: GPIO4
dout_pin: GPIO5
clk_pin: GPIO0
dout_pin: GPIO2
<<: !include common.yaml

View File

@@ -7,9 +7,21 @@ sensor:
max_current: 40 A
adc_range: 1
temperature_coefficient: 50
shunt_voltage: "INA2xx Shunt Voltage"
bus_voltage: "INA2xx Bus Voltage"
current: "INA2xx Current"
power: "INA2xx Power"
energy: "INA2xx Energy"
charge: "INA2xx Charge"
shunt_voltage:
id: ina2xx_i2c_shunt_voltage
name: "INA2xx Shunt Voltage"
bus_voltage:
id: ina2xx_i2c_bus_voltage
name: "INA2xx Bus Voltage"
current:
id: ina2xx_i2c_current
name: "INA2xx Current"
power:
id: ina2xx_i2c_power
name: "INA2xx Power"
energy:
id: ina2xx_i2c_energy
name: "INA2xx Energy"
charge:
id: ina2xx_i2c_charge
name: "INA2xx Charge"

View File

@@ -6,9 +6,21 @@ sensor:
max_current: 40 A
adc_range: 1
temperature_coefficient: 50
shunt_voltage: "INA2xx Shunt Voltage"
bus_voltage: "INA2xx Bus Voltage"
current: "INA2xx Current"
power: "INA2xx Power"
energy: "INA2xx Energy"
charge: "INA2xx Charge"
shunt_voltage:
id: ina2xx_spi_shunt_voltage
name: "INA2xx Shunt Voltage"
bus_voltage:
id: ina2xx_spi_bus_voltage
name: "INA2xx Bus Voltage"
current:
id: ina2xx_spi_current
name: "INA2xx Current"
power:
id: ina2xx_spi_power
name: "INA2xx Power"
energy:
id: ina2xx_spi_energy
name: "INA2xx Energy"
charge:
id: ina2xx_spi_charge
name: "INA2xx Charge"

View File

@@ -1,7 +1,7 @@
display:
- platform: ssd1306_i2c
i2c_id: i2c_bus
id: ssd1306_display
id: ssd1306_i2c_display
model: SSD1306_128X64
reset_pin: ${reset_pin}
pages:
@@ -14,7 +14,7 @@ touchscreen:
i2c_id: i2c_bus
id: lilygo_touchscreen
interrupt_pin: ${interrupt_pin}
display: ssd1306_display
display: ssd1306_i2c_display
on_touch:
- logger.log:
format: Touch at (%d, %d)

View File

@@ -1,9 +1,9 @@
pn532_i2c:
i2c_id: i2c_bus
id: pn532_nfcc
id: pn532_nfcc_i2c
binary_sensor:
- platform: pn532
pn532_id: pn532_nfcc
pn532_id: pn532_nfcc_i2c
name: PN532 NFC Tag
uid: 74-10-37-94

View File

@@ -1,9 +1,9 @@
pn532_spi:
id: pn532_nfcc
id: pn532_nfcc_spi
cs_pin: ${cs_pin}
binary_sensor:
- platform: pn532
pn532_id: pn532_nfcc
pn532_id: pn532_nfcc_spi
name: PN532 NFC Tag
uid: 74-10-37-94

View File

@@ -1,23 +1,23 @@
esphome:
on_boot:
then:
- tag.set_clean_mode: nfcc_pn7160
- tag.set_format_mode: nfcc_pn7160
- tag.set_read_mode: nfcc_pn7160
- tag.set_clean_mode: nfcc_pn7160_i2c
- tag.set_format_mode: nfcc_pn7160_i2c
- tag.set_read_mode: nfcc_pn7160_i2c
- tag.set_write_message:
message: https://www.home-assistant.io/tag/pulse
include_android_app_record: false
- tag.set_write_mode: nfcc_pn7160
- tag.set_write_mode: nfcc_pn7160_i2c
- tag.set_emulation_message:
message: https://www.home-assistant.io/tag/pulse
include_android_app_record: false
- tag.emulation_off: nfcc_pn7160
- tag.emulation_on: nfcc_pn7160
- tag.polling_off: nfcc_pn7160
- tag.polling_on: nfcc_pn7160
- tag.emulation_off: nfcc_pn7160_i2c
- tag.emulation_on: nfcc_pn7160_i2c
- tag.polling_off: nfcc_pn7160_i2c
- tag.polling_on: nfcc_pn7160_i2c
pn7150_i2c:
id: nfcc_pn7160
id: nfcc_pn7160_i2c
i2c_id: i2c_bus
irq_pin: ${irq_pin}
ven_pin: ${ven_pin}

View File

@@ -1,23 +1,23 @@
esphome:
on_boot:
then:
- tag.set_clean_mode: nfcc_pn7160
- tag.set_format_mode: nfcc_pn7160
- tag.set_read_mode: nfcc_pn7160
- tag.set_clean_mode: nfcc_pn7160_spi
- tag.set_format_mode: nfcc_pn7160_spi
- tag.set_read_mode: nfcc_pn7160_spi
- tag.set_write_message:
message: https://www.home-assistant.io/tag/pulse
include_android_app_record: false
- tag.set_write_mode: nfcc_pn7160
- tag.set_write_mode: nfcc_pn7160_spi
- tag.set_emulation_message:
message: https://www.home-assistant.io/tag/pulse
include_android_app_record: false
- tag.emulation_off: nfcc_pn7160
- tag.emulation_on: nfcc_pn7160
- tag.polling_off: nfcc_pn7160
- tag.polling_on: nfcc_pn7160
- tag.emulation_off: nfcc_pn7160_spi
- tag.emulation_on: nfcc_pn7160_spi
- tag.polling_off: nfcc_pn7160_spi
- tag.polling_on: nfcc_pn7160_spi
pn7160_spi:
id: nfcc_pn7160
id: nfcc_pn7160_spi
cs_pin: ${cs_pin}
irq_pin: ${irq_pin}
ven_pin: ${ven_pin}

View File

@@ -1,5 +1,5 @@
rc522_i2c:
- id: rc522_nfcc
- id: rc522_nfcc_i2c
i2c_id: i2c_bus
update_interval: 1s
on_tag:
@@ -8,6 +8,6 @@ rc522_i2c:
binary_sensor:
- platform: rc522
rc522_id: rc522_nfcc
rc522_id: rc522_nfcc_i2c
name: RC522 NFC Tag
uid: 74-10-37-94

View File

@@ -1,9 +1,9 @@
rc522_spi:
id: rc522_nfcc
id: rc522_nfcc_spi
cs_pin: ${cs_pin}
binary_sensor:
- platform: rc522
rc522_id: rc522_nfcc
name: PN532 NFC Tag
rc522_id: rc522_nfcc_spi
name: RC522 NFC Tag
uid: 74-10-37-94

View File

@@ -1,7 +1,7 @@
substitutions:
tx_pin: GPIO0
rx_pin: GPIO2
flow_control_pin: GPIO4
flow_control_pin: GPIO15
packages:
modbus: !include ../../test_build_components/common/modbus/esp8266-ard.yaml

View File

@@ -2,8 +2,8 @@ packages:
spi: !include ../../test_build_components/common/spi/esp8266-ard.yaml
substitutions:
clock_pin: GPIO5
data_pin: GPIO4
clock_pin: GPIO15
data_pin: GPIO16
latch_pin1: GPIO2
oe_pin1: GPIO0
latch_pin2: GPIO3

View File

@@ -4,7 +4,7 @@ display:
model: SSD1306_128X64
reset_pin: ${reset_pin}
address: 0x3C
id: display1
id: ssd1306_i2c_display
contrast: 60%
pages:
- id: ssd1306_i2c_page1

View File

@@ -1,5 +1,6 @@
display:
- platform: ssd1306_spi
id: ssd1306_spi_display
model: SSD1306 128x64
cs_pin: ${cs_pin}
dc_pin: ${dc_pin}

View File

@@ -4,7 +4,7 @@ display:
model: SSD1327_128x128
reset_pin: ${reset_pin}
address: 0x3C
id: display1
id: ssd1327_i2c_display
pages:
- id: ssd1327_i2c_page1
lambda: |-

View File

@@ -1,5 +1,6 @@
display:
- platform: ssd1327_spi
id: ssd1327_spi_display
model: SSD1327 128x128
cs_pin: ${cs_pin}
dc_pin: ${dc_pin}

View File

@@ -3,7 +3,7 @@ display:
i2c_id: i2c_bus
reset_pin: ${reset_pin}
address: 0x3C
id: display1
id: st7567_i2c_display
pages:
- id: st7567_i2c_page1
lambda: |-

View File

@@ -1,5 +1,6 @@
display:
- platform: st7567_spi
id: st7567_spi_display
cs_pin: ${cs_pin}
dc_pin: ${dc_pin}
reset_pin: ${reset_pin}

View File

@@ -6,7 +6,8 @@ udp:
addresses: ["239.0.60.53"]
time:
platform: host
- platform: host
id: host_time
syslog:
port: 514

View File

@@ -1,7 +1,7 @@
display:
- platform: ssd1306_i2c
i2c_id: i2c_bus
id: ssd1306_display
id: ssd1306_i2c_display
model: SSD1306_128X64
reset_pin: ${disp_reset_pin}
pages:
@@ -13,7 +13,7 @@ touchscreen:
- platform: tt21100
i2c_id: i2c_bus
id: tt21100_touchscreen
display: ssd1306_display
display: ssd1306_i2c_display
interrupt_pin: ${interrupt_pin}
reset_pin: ${reset_pin}

View File

@@ -1,4 +1,5 @@
packages:
i2c: !include ../../test_build_components/common/i2c/esp32-idf.yaml
uart_bridge_2: !include ../../test_build_components/common/uart_bridge_2/esp32-idf.yaml
<<: !include common.yaml

View File

@@ -4,5 +4,6 @@ substitutions:
packages:
i2c: !include ../../test_build_components/common/i2c/esp32-s3-idf.yaml
uart_bridge_2: !include ../../test_build_components/common/uart_bridge_2/esp32-s3-idf.yaml
<<: !include common.yaml

View File

@@ -1,20 +1,20 @@
wk2132_spi:
- id: wk2132_spi_id
- id: wk2132_spi_bridge
cs_pin: ${cs_pin}
crystal: 11059200
data_rate: 1MHz
uart:
- id: wk2132_spi_id0
- id: wk2132_spi_uart0
channel: 0
baud_rate: 115200
stop_bits: 1
parity: none
- id: wk2132_spi_id1
- id: wk2132_spi_uart1
channel: 1
baud_rate: 9600
# Ensures a sensor doesn't break validation
sensor:
- platform: a02yyuw
uart_id: wk2132_spi_id1
uart_id: wk2132_spi_uart1
id: distance_sensor

View File

@@ -3,5 +3,6 @@ substitutions:
packages:
spi: !include ../../test_build_components/common/spi/esp32-idf.yaml
uart_bridge_2: !include ../../test_build_components/common/uart_bridge_2/esp32-idf.yaml
<<: !include common.yaml

View File

@@ -6,5 +6,6 @@ substitutions:
packages:
spi: !include ../../test_build_components/common/spi/esp32-s3-idf.yaml
uart_bridge_2: !include ../../test_build_components/common/uart_bridge_2/esp32-s3-idf.yaml
<<: !include common.yaml

View File

@@ -1,4 +1,5 @@
packages:
i2c: !include ../../test_build_components/common/i2c/esp32-idf.yaml
uart_bridge_4: !include ../../test_build_components/common/uart_bridge_4/esp32-idf.yaml
<<: !include common.yaml

View File

@@ -4,5 +4,6 @@ substitutions:
packages:
i2c: !include ../../test_build_components/common/i2c/esp32-s3-idf.yaml
uart_bridge_4: !include ../../test_build_components/common/uart_bridge_4/esp32-s3-idf.yaml
<<: !include common.yaml

View File

@@ -3,5 +3,6 @@ substitutions:
packages:
spi: !include ../../test_build_components/common/spi/esp32-idf.yaml
uart_bridge_4: !include ../../test_build_components/common/uart_bridge_4/esp32-idf.yaml
<<: !include common.yaml

View File

@@ -6,5 +6,6 @@ substitutions:
packages:
spi: !include ../../test_build_components/common/spi/esp32-s3-idf.yaml
uart_bridge_4: !include ../../test_build_components/common/uart_bridge_4/esp32-s3-idf.yaml
<<: !include common.yaml

View File

@@ -1,4 +1,5 @@
packages:
i2c: !include ../../test_build_components/common/i2c/esp32-idf.yaml
uart_bridge_4: !include ../../test_build_components/common/uart_bridge_4/esp32-idf.yaml
<<: !include common.yaml

View File

@@ -4,5 +4,6 @@ substitutions:
packages:
i2c: !include ../../test_build_components/common/i2c/esp32-s3-idf.yaml
uart_bridge_4: !include ../../test_build_components/common/uart_bridge_4/esp32-s3-idf.yaml
<<: !include common.yaml

View File

@@ -1,28 +1,28 @@
wk2204_spi:
- id: wk2204_spi_id
- id: wk2204_spi_bridge
cs_pin: ${cs_pin}
crystal: 11059200
data_rate: 1MHz
uart:
- id: wk2204_spi_id0
- id: wk2204_spi_uart0
channel: 0
baud_rate: 115200
stop_bits: 1
parity: none
- id: wk2204_spi_id1
- id: wk2204_spi_uart1
channel: 1
baud_rate: 921600
- id: wk2204_spi_id2
- id: wk2204_spi_uart2
channel: 2
baud_rate: 115200
stop_bits: 1
parity: none
- id: wk2204_spi_id3
- id: wk2204_spi_uart3
channel: 3
baud_rate: 9600
# Ensures a sensor doesn't break validation
sensor:
- platform: a02yyuw
uart_id: wk2204_spi_id3
uart_id: wk2204_spi_uart3
id: distance_sensor

View File

@@ -3,5 +3,6 @@ substitutions:
packages:
spi: !include ../../test_build_components/common/spi/esp32-idf.yaml
uart_bridge_4: !include ../../test_build_components/common/uart_bridge_4/esp32-idf.yaml
<<: !include common.yaml

View File

@@ -6,5 +6,6 @@ substitutions:
packages:
spi: !include ../../test_build_components/common/spi/esp32-s3-idf.yaml
uart_bridge_4: !include ../../test_build_components/common/uart_bridge_4/esp32-s3-idf.yaml
<<: !include common.yaml

View File

@@ -1,4 +1,5 @@
packages:
i2c: !include ../../test_build_components/common/i2c/esp32-idf.yaml
uart_bridge_4: !include ../../test_build_components/common/uart_bridge_4/esp32-idf.yaml
<<: !include common.yaml

View File

@@ -4,5 +4,6 @@ substitutions:
packages:
i2c: !include ../../test_build_components/common/i2c/esp32-s3-idf.yaml
uart_bridge_4: !include ../../test_build_components/common/uart_bridge_4/esp32-s3-idf.yaml
<<: !include common.yaml

View File

@@ -3,5 +3,6 @@ substitutions:
packages:
spi: !include ../../test_build_components/common/spi/esp32-idf.yaml
uart_bridge_4: !include ../../test_build_components/common/uart_bridge_4/esp32-idf.yaml
<<: !include common.yaml

View File

@@ -6,5 +6,6 @@ substitutions:
packages:
spi: !include ../../test_build_components/common/spi/esp32-s3-idf.yaml
uart_bridge_4: !include ../../test_build_components/common/uart_bridge_4/esp32-s3-idf.yaml
<<: !include common.yaml

View File

@@ -0,0 +1,58 @@
esphome:
name: test-batch-window-filters
host:
api:
batch_delay: 0ms # Disable batching to receive all state updates
logger:
level: DEBUG
# Template sensor that we'll use to publish values
sensor:
- platform: template
name: "Source Sensor"
id: source_sensor
accuracy_decimals: 2
# Batch window filters (window_size == send_every) - use streaming filters
- platform: copy
source_id: source_sensor
name: "Min Sensor"
id: min_sensor
filters:
- min:
window_size: 5
send_every: 5
send_first_at: 1
- platform: copy
source_id: source_sensor
name: "Max Sensor"
id: max_sensor
filters:
- max:
window_size: 5
send_every: 5
send_first_at: 1
- platform: copy
source_id: source_sensor
name: "Moving Avg Sensor"
id: moving_avg_sensor
filters:
- sliding_window_moving_average:
window_size: 5
send_every: 5
send_first_at: 1
# Button to trigger publishing test values
button:
- platform: template
name: "Publish Values Button"
id: publish_button
on_press:
- lambda: |-
// Publish 10 values: 1.0, 2.0, ..., 10.0
for (int i = 1; i <= 10; i++) {
id(source_sensor).publish_state(float(i));
}

View File

@@ -0,0 +1,84 @@
esphome:
name: test-nan-handling
host:
api:
batch_delay: 0ms # Disable batching to receive all state updates
logger:
level: DEBUG
sensor:
- platform: template
name: "Source NaN Sensor"
id: source_nan_sensor
accuracy_decimals: 2
- platform: copy
source_id: source_nan_sensor
name: "Min NaN Sensor"
id: min_nan_sensor
filters:
- min:
window_size: 5
send_every: 5
send_first_at: 1
- platform: copy
source_id: source_nan_sensor
name: "Max NaN Sensor"
id: max_nan_sensor
filters:
- max:
window_size: 5
send_every: 5
send_first_at: 1
script:
- id: publish_nan_values_script
then:
- sensor.template.publish:
id: source_nan_sensor
state: 10.0
- delay: 20ms
- sensor.template.publish:
id: source_nan_sensor
state: !lambda 'return NAN;'
- delay: 20ms
- sensor.template.publish:
id: source_nan_sensor
state: 5.0
- delay: 20ms
- sensor.template.publish:
id: source_nan_sensor
state: !lambda 'return NAN;'
- delay: 20ms
- sensor.template.publish:
id: source_nan_sensor
state: 15.0
- delay: 20ms
- sensor.template.publish:
id: source_nan_sensor
state: 8.0
- delay: 20ms
- sensor.template.publish:
id: source_nan_sensor
state: !lambda 'return NAN;'
- delay: 20ms
- sensor.template.publish:
id: source_nan_sensor
state: 12.0
- delay: 20ms
- sensor.template.publish:
id: source_nan_sensor
state: 3.0
- delay: 20ms
- sensor.template.publish:
id: source_nan_sensor
state: !lambda 'return NAN;'
button:
- platform: template
name: "Publish NaN Values Button"
id: publish_nan_button
on_press:
- script.execute: publish_nan_values_script

View File

@@ -0,0 +1,115 @@
esphome:
name: test-sliding-window-filters
host:
api:
batch_delay: 0ms # Disable batching to receive all state updates
logger:
level: DEBUG
# Template sensor that we'll use to publish values
sensor:
- platform: template
name: "Source Sensor"
id: source_sensor
accuracy_decimals: 2
# ACTUAL sliding window filters (window_size != send_every) - use ring buffers
# Window of 5, send every 2 values
- platform: copy
source_id: source_sensor
name: "Sliding Min Sensor"
id: sliding_min_sensor
filters:
- min:
window_size: 5
send_every: 2
send_first_at: 1
- platform: copy
source_id: source_sensor
name: "Sliding Max Sensor"
id: sliding_max_sensor
filters:
- max:
window_size: 5
send_every: 2
send_first_at: 1
- platform: copy
source_id: source_sensor
name: "Sliding Median Sensor"
id: sliding_median_sensor
filters:
- median:
window_size: 5
send_every: 2
send_first_at: 1
- platform: copy
source_id: source_sensor
name: "Sliding Moving Avg Sensor"
id: sliding_moving_avg_sensor
filters:
- sliding_window_moving_average:
window_size: 5
send_every: 2
send_first_at: 1
# Button to trigger publishing test values
script:
- id: publish_values_script
then:
# Publish 10 values: 1.0, 2.0, ..., 10.0
# With window_size=5, send_every=2, send_first_at=1:
# - Output at position 1: window=[1], min=1, max=1, median=1, avg=1
# - Output at position 3: window=[1,2,3], min=1, max=3, median=2, avg=2
# - Output at position 5: window=[1,2,3,4,5], min=1, max=5, median=3, avg=3
# - Output at position 7: window=[3,4,5,6,7], min=3, max=7, median=5, avg=5
# - Output at position 9: window=[5,6,7,8,9], min=5, max=9, median=7, avg=7
- sensor.template.publish:
id: source_sensor
state: 1.0
- delay: 20ms
- sensor.template.publish:
id: source_sensor
state: 2.0
- delay: 20ms
- sensor.template.publish:
id: source_sensor
state: 3.0
- delay: 20ms
- sensor.template.publish:
id: source_sensor
state: 4.0
- delay: 20ms
- sensor.template.publish:
id: source_sensor
state: 5.0
- delay: 20ms
- sensor.template.publish:
id: source_sensor
state: 6.0
- delay: 20ms
- sensor.template.publish:
id: source_sensor
state: 7.0
- delay: 20ms
- sensor.template.publish:
id: source_sensor
state: 8.0
- delay: 20ms
- sensor.template.publish:
id: source_sensor
state: 9.0
- delay: 20ms
- sensor.template.publish:
id: source_sensor
state: 10.0
button:
- platform: template
name: "Publish Values Button"
id: publish_button
on_press:
- script.execute: publish_values_script

View File

@@ -0,0 +1,72 @@
esphome:
name: test-ring-buffer-wraparound
host:
api:
batch_delay: 0ms # Disable batching to receive all state updates
logger:
level: DEBUG
sensor:
- platform: template
name: "Source Wraparound Sensor"
id: source_wraparound
accuracy_decimals: 2
- platform: copy
source_id: source_wraparound
name: "Wraparound Min Sensor"
id: wraparound_min_sensor
filters:
- min:
window_size: 3
send_every: 3
send_first_at: 1
script:
- id: publish_wraparound_script
then:
# Publish 9 values to test ring buffer wraparound
# Values: 10, 20, 30, 5, 25, 15, 40, 35, 20
- sensor.template.publish:
id: source_wraparound
state: 10.0
- delay: 20ms
- sensor.template.publish:
id: source_wraparound
state: 20.0
- delay: 20ms
- sensor.template.publish:
id: source_wraparound
state: 30.0
- delay: 20ms
- sensor.template.publish:
id: source_wraparound
state: 5.0
- delay: 20ms
- sensor.template.publish:
id: source_wraparound
state: 25.0
- delay: 20ms
- sensor.template.publish:
id: source_wraparound
state: 15.0
- delay: 20ms
- sensor.template.publish:
id: source_wraparound
state: 40.0
- delay: 20ms
- sensor.template.publish:
id: source_wraparound
state: 35.0
- delay: 20ms
- sensor.template.publish:
id: source_wraparound
state: 20.0
button:
- platform: template
name: "Publish Wraparound Button"
id: publish_wraparound_button
on_press:
- script.execute: publish_wraparound_script

View File

@@ -0,0 +1,123 @@
esphome:
name: test-sliding-window-filters
host:
api:
batch_delay: 0ms # Disable batching to receive all state updates
logger:
level: DEBUG
# Template sensor that we'll use to publish values
sensor:
- platform: template
name: "Source Sensor"
id: source_sensor
accuracy_decimals: 2
# Min filter sensor
- platform: copy
source_id: source_sensor
name: "Min Sensor"
id: min_sensor
filters:
- min:
window_size: 5
send_every: 5
send_first_at: 1
# Max filter sensor
- platform: copy
source_id: source_sensor
name: "Max Sensor"
id: max_sensor
filters:
- max:
window_size: 5
send_every: 5
send_first_at: 1
# Median filter sensor
- platform: copy
source_id: source_sensor
name: "Median Sensor"
id: median_sensor
filters:
- median:
window_size: 5
send_every: 5
send_first_at: 1
# Quantile filter sensor (90th percentile)
- platform: copy
source_id: source_sensor
name: "Quantile Sensor"
id: quantile_sensor
filters:
- quantile:
window_size: 5
send_every: 5
send_first_at: 1
quantile: 0.9
# Moving average filter sensor
- platform: copy
source_id: source_sensor
name: "Moving Avg Sensor"
id: moving_avg_sensor
filters:
- sliding_window_moving_average:
window_size: 5
send_every: 5
send_first_at: 1
# Script to publish values with delays
script:
- id: publish_values_script
then:
- sensor.template.publish:
id: source_sensor
state: 1.0
- delay: 20ms
- sensor.template.publish:
id: source_sensor
state: 2.0
- delay: 20ms
- sensor.template.publish:
id: source_sensor
state: 3.0
- delay: 20ms
- sensor.template.publish:
id: source_sensor
state: 4.0
- delay: 20ms
- sensor.template.publish:
id: source_sensor
state: 5.0
- delay: 20ms
- sensor.template.publish:
id: source_sensor
state: 6.0
- delay: 20ms
- sensor.template.publish:
id: source_sensor
state: 7.0
- delay: 20ms
- sensor.template.publish:
id: source_sensor
state: 8.0
- delay: 20ms
- sensor.template.publish:
id: source_sensor
state: 9.0
- delay: 20ms
- sensor.template.publish:
id: source_sensor
state: 10.0
# Button to trigger publishing test values
button:
- platform: template
name: "Publish Values Button"
id: publish_button
on_press:
- script.execute: publish_values_script

View File

@@ -0,0 +1,163 @@
"""Test sensor ring buffer filter functionality (window_size != send_every)."""
from __future__ import annotations
import asyncio
from aioesphomeapi import EntityInfo, EntityState, SensorState
import pytest
from .types import APIClientConnectedFactory, RunCompiledFunction
def build_key_to_sensor_mapping(
entities: list[EntityInfo], sensor_names: list[str]
) -> dict[int, str]:
"""Build a mapping from entity keys to sensor names.
Args:
entities: List of entity info objects from the API
sensor_names: List of sensor names to search for in object_ids
Returns:
Dictionary mapping entity keys to sensor names
"""
key_to_sensor: dict[int, str] = {}
for entity in entities:
obj_id = entity.object_id.lower()
for sensor_name in sensor_names:
if sensor_name in obj_id:
key_to_sensor[entity.key] = sensor_name
break
return key_to_sensor
@pytest.mark.asyncio
async def test_sensor_filters_ring_buffer(
yaml_config: str,
run_compiled: RunCompiledFunction,
api_client_connected: APIClientConnectedFactory,
) -> None:
"""Test that ring buffer filters (window_size != send_every) work correctly."""
loop = asyncio.get_running_loop()
# Track state changes for each sensor
sensor_states: dict[str, list[float]] = {
"sliding_min": [],
"sliding_max": [],
"sliding_median": [],
"sliding_moving_avg": [],
}
# Futures to track when we receive expected values
all_updates_received = loop.create_future()
def on_state(state: EntityState) -> None:
"""Track sensor state updates."""
if not isinstance(state, SensorState):
return
# Skip NaN values (initial states)
if state.missing_state:
return
# Get the sensor name from the key mapping
sensor_name = key_to_sensor.get(state.key)
if not sensor_name or sensor_name not in sensor_states:
return
sensor_states[sensor_name].append(state.state)
# Check if we've received enough updates from all sensors
# With send_every=2, send_first_at=1, we expect 5 outputs per sensor
if (
len(sensor_states["sliding_min"]) >= 5
and len(sensor_states["sliding_max"]) >= 5
and len(sensor_states["sliding_median"]) >= 5
and len(sensor_states["sliding_moving_avg"]) >= 5
and not all_updates_received.done()
):
all_updates_received.set_result(True)
async with (
run_compiled(yaml_config),
api_client_connected() as client,
):
# Get entities first to build key mapping
entities, services = await client.list_entities_services()
# Build key-to-sensor mapping
key_to_sensor = build_key_to_sensor_mapping(
entities,
[
"sliding_min",
"sliding_max",
"sliding_median",
"sliding_moving_avg",
],
)
# Subscribe to state changes AFTER building mapping
client.subscribe_states(on_state)
# Find the publish button
publish_button = next(
(e for e in entities if "publish_values_button" in e.object_id.lower()),
None,
)
assert publish_button is not None, "Publish Values Button not found"
# Press the button to publish test values
client.button_command(publish_button.key)
# Wait for all sensors to receive their values
try:
await asyncio.wait_for(all_updates_received, timeout=10.0)
except TimeoutError:
# Provide detailed failure info
pytest.fail(
f"Timeout waiting for updates. Received states:\n"
f" min: {sensor_states['sliding_min']}\n"
f" max: {sensor_states['sliding_max']}\n"
f" median: {sensor_states['sliding_median']}\n"
f" moving_avg: {sensor_states['sliding_moving_avg']}"
)
# Verify we got 5 outputs per sensor (positions 1, 3, 5, 7, 9)
assert len(sensor_states["sliding_min"]) == 5, (
f"Min sensor should have 5 values, got {len(sensor_states['sliding_min'])}: {sensor_states['sliding_min']}"
)
assert len(sensor_states["sliding_max"]) == 5
assert len(sensor_states["sliding_median"]) == 5
assert len(sensor_states["sliding_moving_avg"]) == 5
# Verify the values at each output position
# Position 1: window=[1]
assert abs(sensor_states["sliding_min"][0] - 1.0) < 0.01
assert abs(sensor_states["sliding_max"][0] - 1.0) < 0.01
assert abs(sensor_states["sliding_median"][0] - 1.0) < 0.01
assert abs(sensor_states["sliding_moving_avg"][0] - 1.0) < 0.01
# Position 3: window=[1,2,3]
assert abs(sensor_states["sliding_min"][1] - 1.0) < 0.01
assert abs(sensor_states["sliding_max"][1] - 3.0) < 0.01
assert abs(sensor_states["sliding_median"][1] - 2.0) < 0.01
assert abs(sensor_states["sliding_moving_avg"][1] - 2.0) < 0.01
# Position 5: window=[1,2,3,4,5]
assert abs(sensor_states["sliding_min"][2] - 1.0) < 0.01
assert abs(sensor_states["sliding_max"][2] - 5.0) < 0.01
assert abs(sensor_states["sliding_median"][2] - 3.0) < 0.01
assert abs(sensor_states["sliding_moving_avg"][2] - 3.0) < 0.01
# Position 7: window=[3,4,5,6,7] (ring buffer wrapped)
assert abs(sensor_states["sliding_min"][3] - 3.0) < 0.01
assert abs(sensor_states["sliding_max"][3] - 7.0) < 0.01
assert abs(sensor_states["sliding_median"][3] - 5.0) < 0.01
assert abs(sensor_states["sliding_moving_avg"][3] - 5.0) < 0.01
# Position 9: window=[5,6,7,8,9] (ring buffer wrapped)
assert abs(sensor_states["sliding_min"][4] - 5.0) < 0.01
assert abs(sensor_states["sliding_max"][4] - 9.0) < 0.01
assert abs(sensor_states["sliding_median"][4] - 7.0) < 0.01
assert abs(sensor_states["sliding_moving_avg"][4] - 7.0) < 0.01

View File

@@ -0,0 +1,387 @@
"""Test sensor sliding window filter functionality."""
from __future__ import annotations
import asyncio
from aioesphomeapi import EntityInfo, EntityState, SensorState
import pytest
from .types import APIClientConnectedFactory, RunCompiledFunction
def build_key_to_sensor_mapping(
entities: list[EntityInfo], sensor_names: list[str]
) -> dict[int, str]:
"""Build a mapping from entity keys to sensor names.
Args:
entities: List of entity info objects from the API
sensor_names: List of sensor names to search for in object_ids
Returns:
Dictionary mapping entity keys to sensor names
"""
key_to_sensor: dict[int, str] = {}
for entity in entities:
obj_id = entity.object_id.lower()
for sensor_name in sensor_names:
if sensor_name in obj_id:
key_to_sensor[entity.key] = sensor_name
break
return key_to_sensor
@pytest.mark.asyncio
async def test_sensor_filters_sliding_window(
yaml_config: str,
run_compiled: RunCompiledFunction,
api_client_connected: APIClientConnectedFactory,
) -> None:
"""Test that sliding window filters (min, max, median, quantile, moving_average) work correctly."""
loop = asyncio.get_running_loop()
# Track state changes for each sensor
sensor_states: dict[str, list[float]] = {
"min_sensor": [],
"max_sensor": [],
"median_sensor": [],
"quantile_sensor": [],
"moving_avg_sensor": [],
}
# Futures to track when we receive expected values
min_received = loop.create_future()
max_received = loop.create_future()
median_received = loop.create_future()
quantile_received = loop.create_future()
moving_avg_received = loop.create_future()
def on_state(state: EntityState) -> None:
"""Track sensor state updates."""
if not isinstance(state, SensorState):
return
# Skip NaN values (initial states)
if state.missing_state:
return
# Get the sensor name from the key mapping
sensor_name = key_to_sensor.get(state.key)
if not sensor_name or sensor_name not in sensor_states:
return
sensor_states[sensor_name].append(state.state)
# Check if we received the expected final value
# After publishing 10 values [1.0, 2.0, ..., 10.0], the window has the last 5: [2, 3, 4, 5, 6]
# Filters send at position 1 and position 6 (send_every=5 means every 5th value after first)
if (
sensor_name == "min_sensor"
and abs(state.state - 2.0) < 0.01
and not min_received.done()
):
min_received.set_result(True)
elif (
sensor_name == "max_sensor"
and abs(state.state - 6.0) < 0.01
and not max_received.done()
):
max_received.set_result(True)
elif (
sensor_name == "median_sensor"
and abs(state.state - 4.0) < 0.01
and not median_received.done()
):
# Median of [2, 3, 4, 5, 6] = 4
median_received.set_result(True)
elif (
sensor_name == "quantile_sensor"
and abs(state.state - 6.0) < 0.01
and not quantile_received.done()
):
# 90th percentile of [2, 3, 4, 5, 6] = 6
quantile_received.set_result(True)
elif (
sensor_name == "moving_avg_sensor"
and abs(state.state - 4.0) < 0.01
and not moving_avg_received.done()
):
# Average of [2, 3, 4, 5, 6] = 4
moving_avg_received.set_result(True)
async with (
run_compiled(yaml_config),
api_client_connected() as client,
):
# Get entities first to build key mapping
entities, services = await client.list_entities_services()
# Build key-to-sensor mapping
key_to_sensor = build_key_to_sensor_mapping(
entities,
[
"min_sensor",
"max_sensor",
"median_sensor",
"quantile_sensor",
"moving_avg_sensor",
],
)
# Subscribe to state changes AFTER building mapping
client.subscribe_states(on_state)
# Find the publish button
publish_button = next(
(e for e in entities if "publish_values_button" in e.object_id.lower()),
None,
)
assert publish_button is not None, "Publish Values Button not found"
# Press the button to publish test values
client.button_command(publish_button.key)
# Wait for all sensors to receive their final values
try:
await asyncio.wait_for(
asyncio.gather(
min_received,
max_received,
median_received,
quantile_received,
moving_avg_received,
),
timeout=10.0,
)
except TimeoutError:
# Provide detailed failure info
pytest.fail(
f"Timeout waiting for expected values. Received states:\n"
f" min: {sensor_states['min_sensor']}\n"
f" max: {sensor_states['max_sensor']}\n"
f" median: {sensor_states['median_sensor']}\n"
f" quantile: {sensor_states['quantile_sensor']}\n"
f" moving_avg: {sensor_states['moving_avg_sensor']}"
)
# Verify we got the expected values
# With batch_delay: 0ms, we should receive all outputs
# Filters output at positions 1 and 6 (send_every: 5)
assert len(sensor_states["min_sensor"]) == 2, (
f"Min sensor should have 2 values, got {len(sensor_states['min_sensor'])}: {sensor_states['min_sensor']}"
)
assert len(sensor_states["max_sensor"]) == 2, (
f"Max sensor should have 2 values, got {len(sensor_states['max_sensor'])}: {sensor_states['max_sensor']}"
)
assert len(sensor_states["median_sensor"]) == 2
assert len(sensor_states["quantile_sensor"]) == 2
assert len(sensor_states["moving_avg_sensor"]) == 2
# Verify the first output (after 1 value: [1])
assert abs(sensor_states["min_sensor"][0] - 1.0) < 0.01, (
f"First min should be 1.0, got {sensor_states['min_sensor'][0]}"
)
assert abs(sensor_states["max_sensor"][0] - 1.0) < 0.01, (
f"First max should be 1.0, got {sensor_states['max_sensor'][0]}"
)
assert abs(sensor_states["median_sensor"][0] - 1.0) < 0.01, (
f"First median should be 1.0, got {sensor_states['median_sensor'][0]}"
)
assert abs(sensor_states["moving_avg_sensor"][0] - 1.0) < 0.01, (
f"First moving avg should be 1.0, got {sensor_states['moving_avg_sensor'][0]}"
)
# Verify the second output (after 6 values, window has [2, 3, 4, 5, 6])
assert abs(sensor_states["min_sensor"][1] - 2.0) < 0.01, (
f"Second min should be 2.0, got {sensor_states['min_sensor'][1]}"
)
assert abs(sensor_states["max_sensor"][1] - 6.0) < 0.01, (
f"Second max should be 6.0, got {sensor_states['max_sensor'][1]}"
)
assert abs(sensor_states["median_sensor"][1] - 4.0) < 0.01, (
f"Second median should be 4.0, got {sensor_states['median_sensor'][1]}"
)
assert abs(sensor_states["moving_avg_sensor"][1] - 4.0) < 0.01, (
f"Second moving avg should be 4.0, got {sensor_states['moving_avg_sensor'][1]}"
)
@pytest.mark.asyncio
async def test_sensor_filters_nan_handling(
yaml_config: str,
run_compiled: RunCompiledFunction,
api_client_connected: APIClientConnectedFactory,
) -> None:
"""Test that sliding window filters handle NaN values correctly."""
loop = asyncio.get_running_loop()
# Track states
min_states: list[float] = []
max_states: list[float] = []
# Future to track completion
filters_completed = loop.create_future()
def on_state(state: EntityState) -> None:
"""Track sensor state updates."""
if not isinstance(state, SensorState):
return
# Skip NaN values (initial states)
if state.missing_state:
return
sensor_name = key_to_sensor.get(state.key)
if sensor_name == "min_nan":
min_states.append(state.state)
elif sensor_name == "max_nan":
max_states.append(state.state)
# Check if both have received their final values
# With batch_delay: 0ms, we should receive 2 outputs each
if (
len(min_states) >= 2
and len(max_states) >= 2
and not filters_completed.done()
):
filters_completed.set_result(True)
async with (
run_compiled(yaml_config),
api_client_connected() as client,
):
# Get entities first to build key mapping
entities, services = await client.list_entities_services()
# Build key-to-sensor mapping
key_to_sensor = build_key_to_sensor_mapping(entities, ["min_nan", "max_nan"])
# Subscribe to state changes AFTER building mapping
client.subscribe_states(on_state)
# Find the publish button
publish_button = next(
(e for e in entities if "publish_nan_values_button" in e.object_id.lower()),
None,
)
assert publish_button is not None, "Publish NaN Values Button not found"
# Press the button
client.button_command(publish_button.key)
# Wait for filters to process
try:
await asyncio.wait_for(filters_completed, timeout=10.0)
except TimeoutError:
pytest.fail(
f"Timeout waiting for NaN handling. Received:\n"
f" min_states: {min_states}\n"
f" max_states: {max_states}"
)
# Verify NaN values were ignored
# With batch_delay: 0ms, we should receive both outputs (at positions 1 and 6)
# Position 1: window=[10], min=10, max=10
# Position 6: window=[NaN, 5, NaN, 15, 8], ignoring NaN -> [5, 15, 8], min=5, max=15
assert len(min_states) == 2, (
f"Should have 2 min states, got {len(min_states)}: {min_states}"
)
assert len(max_states) == 2, (
f"Should have 2 max states, got {len(max_states)}: {max_states}"
)
# First output
assert abs(min_states[0] - 10.0) < 0.01, (
f"First min should be 10.0, got {min_states[0]}"
)
assert abs(max_states[0] - 10.0) < 0.01, (
f"First max should be 10.0, got {max_states[0]}"
)
# Second output - verify NaN values were ignored
assert abs(min_states[1] - 5.0) < 0.01, (
f"Second min should ignore NaN and return 5.0, got {min_states[1]}"
)
assert abs(max_states[1] - 15.0) < 0.01, (
f"Second max should ignore NaN and return 15.0, got {max_states[1]}"
)
@pytest.mark.asyncio
async def test_sensor_filters_ring_buffer_wraparound(
yaml_config: str,
run_compiled: RunCompiledFunction,
api_client_connected: APIClientConnectedFactory,
) -> None:
"""Test that ring buffer correctly wraps around when window fills up."""
loop = asyncio.get_running_loop()
min_states: list[float] = []
test_completed = loop.create_future()
def on_state(state: EntityState) -> None:
"""Track min sensor states."""
if not isinstance(state, SensorState):
return
# Skip NaN values (initial states)
if state.missing_state:
return
sensor_name = key_to_sensor.get(state.key)
if sensor_name == "wraparound_min":
min_states.append(state.state)
# With batch_delay: 0ms, we should receive all 3 outputs
if len(min_states) >= 3 and not test_completed.done():
test_completed.set_result(True)
async with (
run_compiled(yaml_config),
api_client_connected() as client,
):
# Get entities first to build key mapping
entities, services = await client.list_entities_services()
# Build key-to-sensor mapping
key_to_sensor = build_key_to_sensor_mapping(entities, ["wraparound_min"])
# Subscribe to state changes AFTER building mapping
client.subscribe_states(on_state)
# Find the publish button
publish_button = next(
(e for e in entities if "publish_wraparound_button" in e.object_id.lower()),
None,
)
assert publish_button is not None, "Publish Wraparound Button not found"
# Press the button
# Will publish: 10, 20, 30, 5, 25, 15, 40, 35, 20
client.button_command(publish_button.key)
# Wait for completion
try:
await asyncio.wait_for(test_completed, timeout=10.0)
except TimeoutError:
pytest.fail(f"Timeout waiting for wraparound test. Received: {min_states}")
# Verify outputs
# With window_size=3, send_every=3, we get outputs at positions 1, 4, 7
# Position 1: window=[10], min=10
# Position 4: window=[20, 30, 5], min=5
# Position 7: window=[15, 40, 35], min=15
# With batch_delay: 0ms, we should receive all 3 outputs
assert len(min_states) == 3, (
f"Should have 3 states, got {len(min_states)}: {min_states}"
)
assert abs(min_states[0] - 10.0) < 0.01, (
f"First min should be 10.0, got {min_states[0]}"
)
assert abs(min_states[1] - 5.0) < 0.01, (
f"Second min should be 5.0, got {min_states[1]}"
)
assert abs(min_states[2] - 15.0) < 0.01, (
f"Third min should be 15.0, got {min_states[2]}"
)

View File

@@ -3,9 +3,13 @@ esphome:
friendly_name: $component_name
esp32:
board: nodemcu-32s
# Use board with 8MB flash for testing large component groups
board: esp32-pico-devkitm-2
framework:
type: esp-idf
# Use custom partition table with larger app partitions (3MB each)
# Default IDF partitions only allow 1.75MB which is too small for grouped tests
partitions: ../partitions_testing.csv
logger:
level: VERY_VERBOSE

View File

@@ -1,3 +1,10 @@
# I2C bus for camera sensor
i2c:
- id: i2c_camera_bus
sda: 25
scl: 23
frequency: 400kHz
esp32_camera:
name: ESP32 Camera
data_pins:
@@ -15,9 +22,7 @@ esp32_camera:
external_clock:
pin: 27
frequency: 20MHz
i2c_pins:
sda: 25
scl: 23
i2c_id: i2c_camera_bus
reset_pin: 15
power_down_pin: 1
resolution: 640x480

View File

@@ -0,0 +1,11 @@
# Common configuration for 2-channel UART bridge/expander chips
# Used by components like wk2132 that create 2 UART channels
# Defines standardized UART IDs: uart_id_0, uart_id_1
substitutions:
# These will be overridden by component-specific values
uart_bridge_address: "0x70"
# Note: The actual UART instances are created by the bridge component
# This package just ensures all bridge components use the same ID naming convention
# so they can be grouped together without conflicts

View File

@@ -0,0 +1,11 @@
# Common configuration for 2-channel UART bridge/expander chips
# Used by components like wk2132 that create 2 UART channels
# Defines standardized UART IDs: uart_id_0, uart_id_1
substitutions:
# These will be overridden by component-specific values
uart_bridge_address: "0x70"
# Note: The actual UART instances are created by the bridge component
# This package just ensures all bridge components use the same ID naming convention
# so they can be grouped together without conflicts

View File

@@ -0,0 +1,11 @@
# Common configuration for 4-channel UART bridge/expander chips
# Used by components like wk2168, wk2204, wk2212 that create 4 UART channels
# Defines standardized UART IDs: uart_id_0, uart_id_1, uart_id_2, uart_id_3
substitutions:
# These will be overridden by component-specific values
uart_bridge_address: "0x70"
# Note: The actual UART instances are created by the bridge component
# This package just ensures all bridge components use the same ID naming convention
# so they can be grouped together without conflicts

View File

@@ -0,0 +1,11 @@
# Common configuration for 4-channel UART bridge/expander chips
# Used by components like wk2168, wk2204, wk2212 that create 4 UART channels
# Defines standardized UART IDs: uart_id_0, uart_id_1, uart_id_2, uart_id_3
substitutions:
# These will be overridden by component-specific values
uart_bridge_address: "0x70"
# Note: The actual UART instances are created by the bridge component
# This package just ensures all bridge components use the same ID naming convention
# so they can be grouped together without conflicts

View File

@@ -0,0 +1,10 @@
# ESP-IDF Partition Table for ESPHome Component Testing
# Single app partition to maximize space for large component group testing
# Fits in 4MB flash
# Name, Type, SubType, Offset, Size, Flags
nvs, data, nvs, 0x9000, 0x4000,
otadata, data, ota, , 0x2000,
phy_init, data, phy, , 0x1000,
factory, app, factory, 0x10000, 0x300000,
nvs_key, data, nvs_keys,, 0x1000,
coredump, data, coredump,, 0xEB000,
1 # ESP-IDF Partition Table for ESPHome Component Testing
2 # Single app partition to maximize space for large component group testing
3 # Fits in 4MB flash
4 # Name, Type, SubType, Offset, Size, Flags
5 nvs, data, nvs, 0x9000, 0x4000,
6 otadata, data, ota, , 0x2000,
7 phy_init, data, phy, , 0x1000,
8 factory, app, factory, 0x10000, 0x300000,
9 nvs_key, data, nvs_keys,, 0x1000,
10 coredump, data, coredump,, 0xEB000,

View File

@@ -96,6 +96,13 @@ def mock_run_git_command() -> Generator[Mock, None, None]:
yield mock
@pytest.fixture
def mock_subprocess_run() -> Generator[Mock, None, None]:
"""Mock subprocess.run for testing."""
with patch("subprocess.run") as mock:
yield mock
@pytest.fixture
def mock_get_idedata() -> Generator[Mock, None, None]:
"""Mock get_idedata for platformio_api."""

View File

@@ -1,16 +1,204 @@
"""Tests for git.py module."""
from datetime import datetime, timedelta
import hashlib
import os
from pathlib import Path
from typing import Any
from unittest.mock import Mock
import pytest
from esphome import git
import esphome.config_validation as cv
from esphome.core import CORE, TimePeriodSeconds
from esphome.git import GitCommandError
def _compute_repo_dir(url: str, ref: str | None, domain: str) -> Path:
"""Helper to compute the expected repo directory path using git module's logic."""
key = f"{url}@{ref}"
return git._compute_destination_path(key, domain)
def _setup_old_repo(repo_dir: Path, days_old: int = 2) -> None:
"""Helper to set up a git repo directory structure with an old timestamp.
Args:
repo_dir: The repository directory path to create.
days_old: Number of days old to make the FETCH_HEAD file (default: 2).
"""
# Create repo directory
repo_dir.mkdir(parents=True)
git_dir = repo_dir / ".git"
git_dir.mkdir()
# Create FETCH_HEAD file with old timestamp
fetch_head = git_dir / "FETCH_HEAD"
fetch_head.write_text("test")
old_time = datetime.now() - timedelta(days=days_old)
fetch_head.touch()
os.utime(fetch_head, (old_time.timestamp(), old_time.timestamp()))
def _get_git_command_type(cmd: list[str]) -> str | None:
"""Helper to determine the type of git command from a command list.
Args:
cmd: The git command list (e.g., ["git", "rev-parse", "HEAD"]).
Returns:
The command type ("rev-parse", "stash", "fetch", "reset", "clone") or None.
"""
# Git commands are always in format ["git", "command", ...], so check index 1
if len(cmd) > 1:
return cmd[1]
return None
def test_run_git_command_success(tmp_path: Path) -> None:
"""Test that run_git_command returns output on success."""
# Create a simple git repo to test with
repo_dir = tmp_path / "test_repo"
repo_dir.mkdir()
# Initialize a git repo
result = git.run_git_command(["git", "init"], str(repo_dir))
assert "Initialized empty Git repository" in result or result == ""
# Verify we can run a command and get output
result = git.run_git_command(["git", "status", "--porcelain"], str(repo_dir))
# Empty repo should have empty status
assert isinstance(result, str)
def test_run_git_command_with_git_dir_isolation(
tmp_path: Path, mock_subprocess_run: Mock
) -> None:
"""Test that git_dir parameter properly isolates git operations."""
repo_dir = tmp_path / "test_repo"
repo_dir.mkdir()
git_dir = repo_dir / ".git"
git_dir.mkdir()
# Configure mock to return success
mock_subprocess_run.return_value = Mock(
returncode=0,
stdout=b"test output",
stderr=b"",
)
result = git.run_git_command(
["git", "rev-parse", "HEAD"],
git_dir=repo_dir,
)
# Verify subprocess.run was called
assert mock_subprocess_run.called
call_args = mock_subprocess_run.call_args
# Verify environment was set
env = call_args[1]["env"]
assert "GIT_DIR" in env
assert "GIT_WORK_TREE" in env
assert env["GIT_DIR"] == str(repo_dir / ".git")
assert env["GIT_WORK_TREE"] == str(repo_dir)
assert result == "test output"
def test_run_git_command_raises_git_not_installed_error(
tmp_path: Path, mock_subprocess_run: Mock
) -> None:
"""Test that FileNotFoundError is converted to GitNotInstalledError."""
from esphome.git import GitNotInstalledError
repo_dir = tmp_path / "test_repo"
# Configure mock to raise FileNotFoundError
mock_subprocess_run.side_effect = FileNotFoundError("git not found")
with pytest.raises(GitNotInstalledError, match="git is not installed"):
git.run_git_command(["git", "status"], git_dir=repo_dir)
def test_run_git_command_raises_git_command_error_on_failure(
tmp_path: Path, mock_subprocess_run: Mock
) -> None:
"""Test that failed git commands raise GitCommandError."""
repo_dir = tmp_path / "test_repo"
# Configure mock to return non-zero exit code
mock_subprocess_run.return_value = Mock(
returncode=1,
stdout=b"",
stderr=b"fatal: not a git repository",
)
with pytest.raises(GitCommandError, match="not a git repository"):
git.run_git_command(["git", "status"], git_dir=repo_dir)
def test_run_git_command_strips_fatal_prefix(
tmp_path: Path, mock_subprocess_run: Mock
) -> None:
"""Test that 'fatal: ' prefix is stripped from error messages."""
repo_dir = tmp_path / "test_repo"
# Configure mock to return error with "fatal: " prefix
mock_subprocess_run.return_value = Mock(
returncode=128,
stdout=b"",
stderr=b"fatal: repository not found\n",
)
with pytest.raises(GitCommandError) as exc_info:
git.run_git_command(["git", "clone", "invalid-url"], git_dir=repo_dir)
# Error message should NOT include "fatal: " prefix
assert "fatal:" not in str(exc_info.value)
assert "repository not found" in str(exc_info.value)
def test_run_git_command_without_git_dir(mock_subprocess_run: Mock) -> None:
"""Test that run_git_command works without git_dir (clone case)."""
# Configure mock to return success
mock_subprocess_run.return_value = Mock(
returncode=0,
stdout=b"Cloning into 'test_repo'...",
stderr=b"",
)
result = git.run_git_command(["git", "clone", "https://github.com/test/repo"])
# Verify subprocess.run was called
assert mock_subprocess_run.called
call_args = mock_subprocess_run.call_args
# Verify environment does NOT have GIT_DIR or GIT_WORK_TREE set
# (it should use the default environment or None)
env = call_args[1].get("env")
if env is not None:
assert "GIT_DIR" not in env
assert "GIT_WORK_TREE" not in env
# Verify cwd is None (default)
assert call_args[1].get("cwd") is None
assert result == "Cloning into 'test_repo'..."
def test_run_git_command_without_git_dir_raises_error(
mock_subprocess_run: Mock,
) -> None:
"""Test that run_git_command without git_dir can still raise errors."""
# Configure mock to return error
mock_subprocess_run.return_value = Mock(
returncode=128,
stdout=b"",
stderr=b"fatal: repository not found\n",
)
with pytest.raises(GitCommandError, match="repository not found"):
git.run_git_command(["git", "clone", "https://invalid.url/repo.git"])
def test_clone_or_update_with_never_refresh(
@@ -20,16 +208,10 @@ def test_clone_or_update_with_never_refresh(
# Set up CORE.config_path so data_dir uses tmp_path
CORE.config_path = tmp_path / "test.yaml"
# Compute the expected repo directory path
url = "https://github.com/test/repo"
ref = None
key = f"{url}@{ref}"
domain = "test"
# Compute hash-based directory name (matching _compute_destination_path logic)
h = hashlib.new("sha256")
h.update(key.encode())
repo_dir = tmp_path / ".esphome" / domain / h.hexdigest()[:8]
repo_dir = _compute_repo_dir(url, ref, domain)
# Create the git repo directory structure
repo_dir.mkdir(parents=True)
@@ -61,16 +243,10 @@ def test_clone_or_update_with_refresh_updates_old_repo(
# Set up CORE.config_path so data_dir uses tmp_path
CORE.config_path = tmp_path / "test.yaml"
# Compute the expected repo directory path
url = "https://github.com/test/repo"
ref = None
key = f"{url}@{ref}"
domain = "test"
# Compute hash-based directory name (matching _compute_destination_path logic)
h = hashlib.new("sha256")
h.update(key.encode())
repo_dir = tmp_path / ".esphome" / domain / h.hexdigest()[:8]
repo_dir = _compute_repo_dir(url, ref, domain)
# Create the git repo directory structure
repo_dir.mkdir(parents=True)
@@ -115,16 +291,10 @@ def test_clone_or_update_with_refresh_skips_fresh_repo(
# Set up CORE.config_path so data_dir uses tmp_path
CORE.config_path = tmp_path / "test.yaml"
# Compute the expected repo directory path
url = "https://github.com/test/repo"
ref = None
key = f"{url}@{ref}"
domain = "test"
# Compute hash-based directory name (matching _compute_destination_path logic)
h = hashlib.new("sha256")
h.update(key.encode())
repo_dir = tmp_path / ".esphome" / domain / h.hexdigest()[:8]
repo_dir = _compute_repo_dir(url, ref, domain)
# Create the git repo directory structure
repo_dir.mkdir(parents=True)
@@ -161,16 +331,10 @@ def test_clone_or_update_clones_missing_repo(
# Set up CORE.config_path so data_dir uses tmp_path
CORE.config_path = tmp_path / "test.yaml"
# Compute the expected repo directory path
url = "https://github.com/test/repo"
ref = None
key = f"{url}@{ref}"
domain = "test"
# Compute hash-based directory name (matching _compute_destination_path logic)
h = hashlib.new("sha256")
h.update(key.encode())
repo_dir = tmp_path / ".esphome" / domain / h.hexdigest()[:8]
repo_dir = _compute_repo_dir(url, ref, domain)
# Create base directory but NOT the repo itself
base_dir = tmp_path / ".esphome" / domain
@@ -203,16 +367,10 @@ def test_clone_or_update_with_none_refresh_always_updates(
# Set up CORE.config_path so data_dir uses tmp_path
CORE.config_path = tmp_path / "test.yaml"
# Compute the expected repo directory path
url = "https://github.com/test/repo"
ref = None
key = f"{url}@{ref}"
domain = "test"
# Compute hash-based directory name (matching _compute_destination_path logic)
h = hashlib.new("sha256")
h.update(key.encode())
repo_dir = tmp_path / ".esphome" / domain / h.hexdigest()[:8]
repo_dir = _compute_repo_dir(url, ref, domain)
# Create the git repo directory structure
repo_dir.mkdir(parents=True)
@@ -273,40 +431,20 @@ def test_clone_or_update_recovers_from_git_failures(
url = "https://github.com/test/repo"
ref = "main"
key = f"{url}@{ref}"
domain = "test"
repo_dir = _compute_repo_dir(url, ref, domain)
h = hashlib.new("sha256")
h.update(key.encode())
repo_dir = tmp_path / ".esphome" / domain / h.hexdigest()[:8]
# Create repo directory
repo_dir.mkdir(parents=True)
git_dir = repo_dir / ".git"
git_dir.mkdir()
fetch_head = git_dir / "FETCH_HEAD"
fetch_head.write_text("test")
old_time = datetime.now() - timedelta(days=2)
fetch_head.touch()
os.utime(fetch_head, (old_time.timestamp(), old_time.timestamp()))
# Use helper to set up old repo
_setup_old_repo(repo_dir)
# Track command call counts to make first call fail, subsequent calls succeed
call_counts: dict[str, int] = {}
def git_command_side_effect(cmd: list[str], cwd: str | None = None) -> str:
def git_command_side_effect(
cmd: list[str], cwd: str | None = None, **kwargs: Any
) -> str:
# Determine which command this is
cmd_type = None
if "rev-parse" in cmd:
cmd_type = "rev-parse"
elif "stash" in cmd:
cmd_type = "stash"
elif "fetch" in cmd:
cmd_type = "fetch"
elif "reset" in cmd:
cmd_type = "reset"
elif "clone" in cmd:
cmd_type = "clone"
cmd_type = _get_git_command_type(cmd)
# Track call count for this command type
if cmd_type:
@@ -314,7 +452,7 @@ def test_clone_or_update_recovers_from_git_failures(
# Fail on first call to the specified command, succeed on subsequent calls
if cmd_type == fail_command and call_counts[cmd_type] == 1:
raise cv.Invalid(error_message)
raise GitCommandError(error_message)
# Default successful responses
if cmd_type == "rev-parse":
@@ -353,34 +491,25 @@ def test_clone_or_update_fails_when_recovery_also_fails(
url = "https://github.com/test/repo"
ref = "main"
key = f"{url}@{ref}"
domain = "test"
repo_dir = _compute_repo_dir(url, ref, domain)
h = hashlib.new("sha256")
h.update(key.encode())
repo_dir = tmp_path / ".esphome" / domain / h.hexdigest()[:8]
# Create repo directory
repo_dir.mkdir(parents=True)
git_dir = repo_dir / ".git"
git_dir.mkdir()
fetch_head = git_dir / "FETCH_HEAD"
fetch_head.write_text("test")
old_time = datetime.now() - timedelta(days=2)
fetch_head.touch()
os.utime(fetch_head, (old_time.timestamp(), old_time.timestamp()))
# Use helper to set up old repo
_setup_old_repo(repo_dir)
# Mock git command to fail on clone (simulating network failure during recovery)
def git_command_side_effect(cmd: list[str], cwd: str | None = None) -> str:
if "rev-parse" in cmd:
def git_command_side_effect(
cmd: list[str], cwd: str | None = None, **kwargs: Any
) -> str:
cmd_type = _get_git_command_type(cmd)
if cmd_type == "rev-parse":
# First time fails (broken repo)
raise cv.Invalid(
raise GitCommandError(
"ambiguous argument 'HEAD': unknown revision or path not in the working tree."
)
if "clone" in cmd:
if cmd_type == "clone":
# Clone also fails (recovery fails)
raise cv.Invalid("fatal: unable to access repository")
raise GitCommandError("fatal: unable to access repository")
return ""
mock_run_git_command.side_effect = git_command_side_effect
@@ -388,7 +517,7 @@ def test_clone_or_update_fails_when_recovery_also_fails(
refresh = TimePeriodSeconds(days=1)
# Should raise after one recovery attempt fails
with pytest.raises(cv.Invalid, match="fatal: unable to access repository"):
with pytest.raises(GitCommandError, match="fatal: unable to access repository"):
git.clone_or_update(
url=url,
ref=ref,
@@ -404,3 +533,141 @@ def test_clone_or_update_fails_when_recovery_also_fails(
# Should have tried rev-parse once (which failed and triggered recovery)
rev_parse_calls = [c for c in call_list if "rev-parse" in c[0][0]]
assert len(rev_parse_calls) == 1
def test_clone_or_update_recover_broken_flag_prevents_second_recovery(
tmp_path: Path, mock_run_git_command: Mock
) -> None:
"""Test that _recover_broken=False prevents a second recovery attempt (tests the raise path)."""
# Set up CORE.config_path so data_dir uses tmp_path
CORE.config_path = tmp_path / "test.yaml"
url = "https://github.com/test/repo"
ref = "main"
domain = "test"
repo_dir = _compute_repo_dir(url, ref, domain)
# Use helper to set up old repo
_setup_old_repo(repo_dir)
# Track fetch calls to differentiate between first (in clone) and second (in recovery update)
call_counts: dict[str, int] = {}
# Mock git command to fail on fetch during recovery's ref checkout
def git_command_side_effect(
cmd: list[str], cwd: str | None = None, **kwargs: Any
) -> str:
cmd_type = _get_git_command_type(cmd)
if cmd_type:
call_counts[cmd_type] = call_counts.get(cmd_type, 0) + 1
# First attempt: rev-parse fails (broken repo)
if cmd_type == "rev-parse" and call_counts[cmd_type] == 1:
raise GitCommandError(
"ambiguous argument 'HEAD': unknown revision or path not in the working tree."
)
# Recovery: clone succeeds
if cmd_type == "clone":
return ""
# Recovery: fetch for ref checkout fails
# This happens in the clone path when ref is not None (line 80 in git.py)
if cmd_type == "fetch" and call_counts[cmd_type] == 1:
raise GitCommandError("fatal: couldn't find remote ref main")
# Default success
return "abc123" if cmd_type == "rev-parse" else ""
mock_run_git_command.side_effect = git_command_side_effect
refresh = TimePeriodSeconds(days=1)
# Should raise on the fetch during recovery (when _recover_broken=False)
# This tests the critical "if not _recover_broken: raise" path
with pytest.raises(GitCommandError, match="fatal: couldn't find remote ref main"):
git.clone_or_update(
url=url,
ref=ref,
refresh=refresh,
domain=domain,
)
# Verify the sequence of events
call_list = mock_run_git_command.call_args_list
# Should have: rev-parse (fail, triggers recovery), clone (success),
# fetch (fail during ref checkout, raises because _recover_broken=False)
rev_parse_calls = [c for c in call_list if "rev-parse" in c[0][0]]
# Should have exactly one rev-parse call that failed
assert len(rev_parse_calls) == 1
clone_calls = [c for c in call_list if "clone" in c[0][0]]
# Should have exactly one clone call (the recovery attempt)
assert len(clone_calls) == 1
fetch_calls = [c for c in call_list if "fetch" in c[0][0]]
# Should have exactly one fetch call that failed (during ref checkout in recovery)
assert len(fetch_calls) == 1
def test_clone_or_update_recover_broken_flag_prevents_infinite_loop(
tmp_path: Path, mock_run_git_command: Mock
) -> None:
"""Test that _recover_broken=False prevents infinite recursion when repo persists."""
# This tests the critical "if not _recover_broken: raise" path at line 124-125
# Set up CORE.config_path so data_dir uses tmp_path
CORE.config_path = tmp_path / "test.yaml"
url = "https://github.com/test/repo"
ref = "main"
domain = "test"
repo_dir = _compute_repo_dir(url, ref, domain)
# Use helper to set up old repo
_setup_old_repo(repo_dir)
# Mock shutil.rmtree to NOT actually delete the directory
# This simulates a scenario where deletion fails (permissions, etc.)
import unittest.mock
def mock_rmtree(path, *args, **kwargs):
# Don't actually delete - this causes the recursive call to still see the repo
pass
# Mock git commands to always fail on stash
def git_command_side_effect(
cmd: list[str], cwd: str | None = None, **kwargs: Any
) -> str:
cmd_type = _get_git_command_type(cmd)
if cmd_type == "rev-parse":
return "abc123"
if cmd_type == "stash":
# Always fails
raise GitCommandError("fatal: unable to write new index file")
return ""
mock_run_git_command.side_effect = git_command_side_effect
refresh = TimePeriodSeconds(days=1)
# Mock shutil.rmtree and test
# Should raise on the second attempt when _recover_broken=False
# This hits the "if not _recover_broken: raise" path
with (
unittest.mock.patch("esphome.git.shutil.rmtree", side_effect=mock_rmtree),
pytest.raises(GitCommandError, match="fatal: unable to write new index file"),
):
git.clone_or_update(
url=url,
ref=ref,
refresh=refresh,
domain=domain,
)
# Verify the sequence: stash fails twice (once triggering recovery, once raising)
call_list = mock_run_git_command.call_args_list
stash_calls = [c for c in call_list if "stash" in c[0][0]]
# Should have exactly two stash calls
assert len(stash_calls) == 2

View File

@@ -2,9 +2,12 @@ import glob
import logging
from pathlib import Path
from esphome import yaml_util
from esphome import config as config_module, yaml_util
from esphome.components import substitutions
from esphome.const import CONF_PACKAGES
from esphome.config_helpers import merge_config
from esphome.const import CONF_PACKAGES, CONF_SUBSTITUTIONS
from esphome.core import CORE
from esphome.util import OrderedDict
_LOGGER = logging.getLogger(__name__)
@@ -118,3 +121,200 @@ def test_substitutions_fixtures(fixture_path):
if DEV_MODE:
_LOGGER.error("Tests passed, but Dev mode is enabled.")
assert not DEV_MODE # make sure DEV_MODE is disabled after you are finished.
def test_substitutions_with_command_line_maintains_ordered_dict() -> None:
"""Test that substitutions remain an OrderedDict when command line substitutions are provided,
and that move_to_end() can be called successfully.
This is a regression test for https://github.com/esphome/esphome/issues/11182
where the config would become a regular dict and fail when move_to_end() was called.
"""
# Create an OrderedDict config with substitutions
config = OrderedDict()
config["esphome"] = {"name": "test"}
config[CONF_SUBSTITUTIONS] = {"var1": "value1", "var2": "value2"}
config["other_key"] = "other_value"
# Command line substitutions that should override
command_line_subs = {"var2": "override", "var3": "new_value"}
# Call do_substitution_pass with command line substitutions
substitutions.do_substitution_pass(config, command_line_subs)
# Verify that config is still an OrderedDict
assert isinstance(config, OrderedDict), "Config should remain an OrderedDict"
# Verify substitutions are at the beginning (move_to_end with last=False)
keys = list(config.keys())
assert keys[0] == CONF_SUBSTITUTIONS, "Substitutions should be first key"
# Verify substitutions were properly merged
assert config[CONF_SUBSTITUTIONS]["var1"] == "value1"
assert config[CONF_SUBSTITUTIONS]["var2"] == "override"
assert config[CONF_SUBSTITUTIONS]["var3"] == "new_value"
# Verify config[CONF_SUBSTITUTIONS] is also an OrderedDict
assert isinstance(config[CONF_SUBSTITUTIONS], OrderedDict), (
"Substitutions should be an OrderedDict"
)
def test_substitutions_without_command_line_maintains_ordered_dict() -> None:
"""Test that substitutions work correctly without command line substitutions."""
config = OrderedDict()
config["esphome"] = {"name": "test"}
config[CONF_SUBSTITUTIONS] = {"var1": "value1"}
config["other_key"] = "other_value"
# Call without command line substitutions
substitutions.do_substitution_pass(config, None)
# Verify that config is still an OrderedDict
assert isinstance(config, OrderedDict), "Config should remain an OrderedDict"
# Verify substitutions are at the beginning
keys = list(config.keys())
assert keys[0] == CONF_SUBSTITUTIONS, "Substitutions should be first key"
def test_substitutions_after_merge_config_maintains_ordered_dict() -> None:
"""Test that substitutions work after merge_config (packages scenario).
This is a regression test for https://github.com/esphome/esphome/issues/11182
where using packages would cause config to become a regular dict, breaking move_to_end().
"""
# Simulate what happens with packages - merge two OrderedDict configs
base_config = OrderedDict()
base_config["esphome"] = {"name": "base"}
base_config[CONF_SUBSTITUTIONS] = {"var1": "value1"}
package_config = OrderedDict()
package_config["sensor"] = [{"platform": "template"}]
package_config[CONF_SUBSTITUTIONS] = {"var2": "value2"}
# Merge configs (simulating package merge)
merged_config = merge_config(base_config, package_config)
# Verify merged config is still an OrderedDict
assert isinstance(merged_config, OrderedDict), (
"Merged config should be an OrderedDict"
)
# Now try to run substitution pass on the merged config
substitutions.do_substitution_pass(merged_config, None)
# Should not raise AttributeError
assert isinstance(merged_config, OrderedDict), (
"Config should still be OrderedDict after substitution pass"
)
keys = list(merged_config.keys())
assert keys[0] == CONF_SUBSTITUTIONS, "Substitutions should be first key"
def test_validate_config_with_command_line_substitutions_maintains_ordered_dict(
tmp_path,
) -> None:
"""Test that validate_config preserves OrderedDict when merging command-line substitutions.
This tests the code path in config.py where result[CONF_SUBSTITUTIONS] is set
using merge_dicts_ordered() with command-line substitutions provided.
"""
# Create a minimal valid config
test_config = OrderedDict()
test_config["esphome"] = {"name": "test_device", "platform": "ESP32"}
test_config[CONF_SUBSTITUTIONS] = OrderedDict({"var1": "value1", "var2": "value2"})
test_config["esp32"] = {"board": "esp32dev"}
# Command line substitutions that should override
command_line_subs = {"var2": "override", "var3": "new_value"}
# Set up CORE for the test with a proper Path object
test_yaml = tmp_path / "test.yaml"
test_yaml.write_text("# test config")
CORE.config_path = test_yaml
# Call validate_config with command line substitutions
result = config_module.validate_config(test_config, command_line_subs)
# Verify that result[CONF_SUBSTITUTIONS] is an OrderedDict
assert isinstance(result.get(CONF_SUBSTITUTIONS), OrderedDict), (
"Result substitutions should be an OrderedDict"
)
# Verify substitutions were properly merged
assert result[CONF_SUBSTITUTIONS]["var1"] == "value1"
assert result[CONF_SUBSTITUTIONS]["var2"] == "override"
assert result[CONF_SUBSTITUTIONS]["var3"] == "new_value"
def test_validate_config_without_command_line_substitutions_maintains_ordered_dict(
tmp_path,
) -> None:
"""Test that validate_config preserves OrderedDict without command-line substitutions.
This tests the code path in config.py where result[CONF_SUBSTITUTIONS] is set
using merge_dicts_ordered() when command_line_substitutions is None.
"""
# Create a minimal valid config
test_config = OrderedDict()
test_config["esphome"] = {"name": "test_device", "platform": "ESP32"}
test_config[CONF_SUBSTITUTIONS] = OrderedDict({"var1": "value1", "var2": "value2"})
test_config["esp32"] = {"board": "esp32dev"}
# Set up CORE for the test with a proper Path object
test_yaml = tmp_path / "test.yaml"
test_yaml.write_text("# test config")
CORE.config_path = test_yaml
# Call validate_config without command line substitutions
result = config_module.validate_config(test_config, None)
# Verify that result[CONF_SUBSTITUTIONS] is an OrderedDict
assert isinstance(result.get(CONF_SUBSTITUTIONS), OrderedDict), (
"Result substitutions should be an OrderedDict"
)
# Verify substitutions are unchanged
assert result[CONF_SUBSTITUTIONS]["var1"] == "value1"
assert result[CONF_SUBSTITUTIONS]["var2"] == "value2"
def test_merge_config_preserves_ordered_dict() -> None:
"""Test that merge_config preserves OrderedDict type.
This is a regression test to ensure merge_config doesn't lose OrderedDict type
when merging configs, which causes AttributeError on move_to_end().
"""
# Test OrderedDict + dict = OrderedDict
od = OrderedDict([("a", 1), ("b", 2)])
d = {"b": 20, "c": 3}
result = merge_config(od, d)
assert isinstance(result, OrderedDict), (
"OrderedDict + dict should return OrderedDict"
)
# Test dict + OrderedDict = OrderedDict
d = {"a": 1, "b": 2}
od = OrderedDict([("b", 20), ("c", 3)])
result = merge_config(d, od)
assert isinstance(result, OrderedDict), (
"dict + OrderedDict should return OrderedDict"
)
# Test OrderedDict + OrderedDict = OrderedDict
od1 = OrderedDict([("a", 1), ("b", 2)])
od2 = OrderedDict([("b", 20), ("c", 3)])
result = merge_config(od1, od2)
assert isinstance(result, OrderedDict), (
"OrderedDict + OrderedDict should return OrderedDict"
)
# Test that dict + dict still returns regular dict (no unnecessary conversion)
d1 = {"a": 1, "b": 2}
d2 = {"b": 20, "c": 3}
result = merge_config(d1, d2)
assert isinstance(result, dict), "dict + dict should return dict"
assert not isinstance(result, OrderedDict), (
"dict + dict should not return OrderedDict"
)