mirror of
https://github.com/esphome/esphome.git
synced 2025-10-17 01:03:46 +01:00
Merge remote-tracking branch 'upstream/dev' into integration
This commit is contained in:
@@ -1 +1 @@
|
||||
049d60eed541730efaa4c0dc5d337b4287bf29b6daa350b5dfc1f23915f1c52f
|
||||
d7693a1e996cacd4a3d1c9a16336799c2a8cc3db02e4e74084151ce964581248
|
||||
|
82
.github/workflows/ci.yml
vendored
82
.github/workflows/ci.yml
vendored
@@ -114,8 +114,7 @@ jobs:
|
||||
matrix:
|
||||
python-version:
|
||||
- "3.11"
|
||||
- "3.12"
|
||||
- "3.13"
|
||||
- "3.14"
|
||||
os:
|
||||
- ubuntu-latest
|
||||
- macOS-latest
|
||||
@@ -124,13 +123,9 @@ jobs:
|
||||
# Minimize CI resource usage
|
||||
# by only running the Python version
|
||||
# version used for docker images on Windows and macOS
|
||||
- python-version: "3.13"
|
||||
- python-version: "3.14"
|
||||
os: windows-latest
|
||||
- python-version: "3.12"
|
||||
os: windows-latest
|
||||
- python-version: "3.13"
|
||||
os: macOS-latest
|
||||
- python-version: "3.12"
|
||||
- python-version: "3.14"
|
||||
os: macOS-latest
|
||||
runs-on: ${{ matrix.os }}
|
||||
needs:
|
||||
@@ -178,6 +173,7 @@ jobs:
|
||||
python-linters: ${{ steps.determine.outputs.python-linters }}
|
||||
changed-components: ${{ steps.determine.outputs.changed-components }}
|
||||
changed-components-with-tests: ${{ steps.determine.outputs.changed-components-with-tests }}
|
||||
directly-changed-components-with-tests: ${{ steps.determine.outputs.directly-changed-components-with-tests }}
|
||||
component-test-count: ${{ steps.determine.outputs.component-test-count }}
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
@@ -206,6 +202,7 @@ jobs:
|
||||
echo "python-linters=$(echo "$output" | jq -r '.python_linters')" >> $GITHUB_OUTPUT
|
||||
echo "changed-components=$(echo "$output" | jq -c '.changed_components')" >> $GITHUB_OUTPUT
|
||||
echo "changed-components-with-tests=$(echo "$output" | jq -c '.changed_components_with_tests')" >> $GITHUB_OUTPUT
|
||||
echo "directly-changed-components-with-tests=$(echo "$output" | jq -c '.directly_changed_components_with_tests')" >> $GITHUB_OUTPUT
|
||||
echo "component-test-count=$(echo "$output" | jq -r '.component_test_count')" >> $GITHUB_OUTPUT
|
||||
|
||||
integration-tests:
|
||||
@@ -358,48 +355,13 @@ jobs:
|
||||
# yamllint disable-line rule:line-length
|
||||
if: always()
|
||||
|
||||
test-build-components:
|
||||
name: Component test ${{ matrix.file }}
|
||||
runs-on: ubuntu-24.04
|
||||
needs:
|
||||
- common
|
||||
- determine-jobs
|
||||
if: github.event_name == 'pull_request' && fromJSON(needs.determine-jobs.outputs.component-test-count) > 0 && fromJSON(needs.determine-jobs.outputs.component-test-count) < 100
|
||||
strategy:
|
||||
fail-fast: false
|
||||
max-parallel: 2
|
||||
matrix:
|
||||
file: ${{ fromJson(needs.determine-jobs.outputs.changed-components-with-tests) }}
|
||||
steps:
|
||||
- name: Cache apt packages
|
||||
uses: awalsh128/cache-apt-pkgs-action@acb598e5ddbc6f68a970c5da0688d2f3a9f04d05 # v1.5.3
|
||||
with:
|
||||
packages: libsdl2-dev
|
||||
version: 1.0
|
||||
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
- name: Restore Python
|
||||
uses: ./.github/actions/restore-python
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
cache-key: ${{ needs.common.outputs.cache-key }}
|
||||
- name: Validate config for ${{ matrix.file }}
|
||||
run: |
|
||||
. venv/bin/activate
|
||||
python3 script/test_build_components.py -e config -c ${{ matrix.file }}
|
||||
- name: Compile config for ${{ matrix.file }}
|
||||
run: |
|
||||
. venv/bin/activate
|
||||
python3 script/test_build_components.py -e compile -c ${{ matrix.file }}
|
||||
|
||||
test-build-components-splitter:
|
||||
name: Split components for intelligent grouping (40 weighted per batch)
|
||||
runs-on: ubuntu-24.04
|
||||
needs:
|
||||
- common
|
||||
- determine-jobs
|
||||
if: github.event_name == 'pull_request' && fromJSON(needs.determine-jobs.outputs.component-test-count) >= 100
|
||||
if: github.event_name == 'pull_request' && fromJSON(needs.determine-jobs.outputs.component-test-count) > 0
|
||||
outputs:
|
||||
matrix: ${{ steps.split.outputs.components }}
|
||||
steps:
|
||||
@@ -417,9 +379,10 @@ jobs:
|
||||
|
||||
# Use intelligent splitter that groups components with same bus configs
|
||||
components='${{ needs.determine-jobs.outputs.changed-components-with-tests }}'
|
||||
directly_changed='${{ needs.determine-jobs.outputs.directly-changed-components-with-tests }}'
|
||||
|
||||
echo "Splitting components intelligently..."
|
||||
output=$(python3 script/split_components_for_ci.py --components "$components" --batch-size 40 --output github)
|
||||
output=$(python3 script/split_components_for_ci.py --components "$components" --directly-changed "$directly_changed" --batch-size 40 --output github)
|
||||
|
||||
echo "$output" >> $GITHUB_OUTPUT
|
||||
|
||||
@@ -430,7 +393,7 @@ jobs:
|
||||
- common
|
||||
- determine-jobs
|
||||
- test-build-components-splitter
|
||||
if: github.event_name == 'pull_request' && fromJSON(needs.determine-jobs.outputs.component-test-count) >= 100
|
||||
if: github.event_name == 'pull_request' && fromJSON(needs.determine-jobs.outputs.component-test-count) > 0
|
||||
strategy:
|
||||
fail-fast: false
|
||||
max-parallel: ${{ (github.base_ref == 'beta' || github.base_ref == 'release') && 8 || 4 }}
|
||||
@@ -477,18 +440,34 @@ jobs:
|
||||
# Convert space-separated components to comma-separated for Python script
|
||||
components_csv=$(echo "${{ matrix.components }}" | tr ' ' ',')
|
||||
|
||||
echo "Testing components: $components_csv"
|
||||
# Only isolate directly changed components when targeting dev branch
|
||||
# For beta/release branches, group everything for faster CI
|
||||
#
|
||||
# WHY ISOLATE DIRECTLY CHANGED COMPONENTS?
|
||||
# - Isolated tests run WITHOUT --testing-mode, enabling full validation
|
||||
# - This catches pin conflicts and other issues in directly changed code
|
||||
# - Grouped tests use --testing-mode to allow config merging (disables some checks)
|
||||
# - Dependencies are safe to group since they weren't modified in this PR
|
||||
if [ "${{ github.base_ref }}" = "beta" ] || [ "${{ github.base_ref }}" = "release" ]; then
|
||||
directly_changed_csv=""
|
||||
echo "Testing components: $components_csv"
|
||||
echo "Target branch: ${{ github.base_ref }} - grouping all components"
|
||||
else
|
||||
directly_changed_csv=$(echo '${{ needs.determine-jobs.outputs.directly-changed-components-with-tests }}' | jq -r 'join(",")')
|
||||
echo "Testing components: $components_csv"
|
||||
echo "Target branch: ${{ github.base_ref }} - isolating directly changed components: $directly_changed_csv"
|
||||
fi
|
||||
echo ""
|
||||
|
||||
# Run config validation with grouping
|
||||
python3 script/test_build_components.py -e config -c "$components_csv" -f
|
||||
# Run config validation with grouping and isolation
|
||||
python3 script/test_build_components.py -e config -c "$components_csv" -f --isolate "$directly_changed_csv"
|
||||
|
||||
echo ""
|
||||
echo "Config validation passed! Starting compilation..."
|
||||
echo ""
|
||||
|
||||
# Run compilation with grouping
|
||||
python3 script/test_build_components.py -e compile -c "$components_csv" -f
|
||||
# Run compilation with grouping and isolation
|
||||
python3 script/test_build_components.py -e compile -c "$components_csv" -f --isolate "$directly_changed_csv"
|
||||
|
||||
pre-commit-ci-lite:
|
||||
name: pre-commit.ci lite
|
||||
@@ -521,7 +500,6 @@ jobs:
|
||||
- integration-tests
|
||||
- clang-tidy
|
||||
- determine-jobs
|
||||
- test-build-components
|
||||
- test-build-components-splitter
|
||||
- test-build-components-split
|
||||
- pre-commit-ci-lite
|
||||
|
@@ -117,6 +117,17 @@ class Purpose(StrEnum):
|
||||
LOGGING = "logging"
|
||||
|
||||
|
||||
class PortType(StrEnum):
|
||||
SERIAL = "SERIAL"
|
||||
NETWORK = "NETWORK"
|
||||
MQTT = "MQTT"
|
||||
MQTTIP = "MQTTIP"
|
||||
|
||||
|
||||
# Magic MQTT port types that require special handling
|
||||
_MQTT_PORT_TYPES = frozenset({PortType.MQTT, PortType.MQTTIP})
|
||||
|
||||
|
||||
def _resolve_with_cache(address: str, purpose: Purpose) -> list[str]:
|
||||
"""Resolve an address using cache if available, otherwise return the address itself."""
|
||||
if CORE.address_cache and (cached := CORE.address_cache.get_addresses(address)):
|
||||
@@ -280,16 +291,67 @@ def mqtt_get_ip(config: ConfigType, username: str, password: str, client_id: str
|
||||
return mqtt.get_esphome_device_ip(config, username, password, client_id)
|
||||
|
||||
|
||||
_PORT_TO_PORT_TYPE = {
|
||||
"MQTT": "MQTT",
|
||||
"MQTTIP": "MQTTIP",
|
||||
}
|
||||
def _resolve_network_devices(
|
||||
devices: list[str], config: ConfigType, args: ArgsProtocol
|
||||
) -> list[str]:
|
||||
"""Resolve device list, converting MQTT magic strings to actual IP addresses.
|
||||
|
||||
This function filters the devices list to:
|
||||
- Replace MQTT/MQTTIP magic strings with actual IP addresses via MQTT lookup
|
||||
- Deduplicate addresses while preserving order
|
||||
- Only resolve MQTT once even if multiple MQTT strings are present
|
||||
- If MQTT resolution fails, log a warning and continue with other devices
|
||||
|
||||
Args:
|
||||
devices: List of device identifiers (IPs, hostnames, or magic strings)
|
||||
config: ESPHome configuration
|
||||
args: Command-line arguments containing MQTT credentials
|
||||
|
||||
Returns:
|
||||
List of network addresses suitable for connection attempts
|
||||
"""
|
||||
network_devices: list[str] = []
|
||||
mqtt_resolved: bool = False
|
||||
|
||||
for device in devices:
|
||||
port_type = get_port_type(device)
|
||||
if port_type in _MQTT_PORT_TYPES:
|
||||
# Only resolve MQTT once, even if multiple MQTT entries
|
||||
if not mqtt_resolved:
|
||||
try:
|
||||
mqtt_ips = mqtt_get_ip(
|
||||
config, args.username, args.password, args.client_id
|
||||
)
|
||||
network_devices.extend(mqtt_ips)
|
||||
except EsphomeError as err:
|
||||
_LOGGER.warning(
|
||||
"MQTT IP discovery failed (%s), will try other devices if available",
|
||||
err,
|
||||
)
|
||||
mqtt_resolved = True
|
||||
elif device not in network_devices:
|
||||
# Regular network address or IP - add if not already present
|
||||
network_devices.append(device)
|
||||
|
||||
return network_devices
|
||||
|
||||
|
||||
def get_port_type(port: str) -> str:
|
||||
def get_port_type(port: str) -> PortType:
|
||||
"""Determine the type of port/device identifier.
|
||||
|
||||
Returns:
|
||||
PortType.SERIAL for serial ports (/dev/ttyUSB0, COM1, etc.)
|
||||
PortType.MQTT for MQTT logging
|
||||
PortType.MQTTIP for MQTT IP lookup
|
||||
PortType.NETWORK for IP addresses, hostnames, or mDNS names
|
||||
"""
|
||||
if port.startswith("/") or port.startswith("COM"):
|
||||
return "SERIAL"
|
||||
return _PORT_TO_PORT_TYPE.get(port, "NETWORK")
|
||||
return PortType.SERIAL
|
||||
if port == "MQTT":
|
||||
return PortType.MQTT
|
||||
if port == "MQTTIP":
|
||||
return PortType.MQTTIP
|
||||
return PortType.NETWORK
|
||||
|
||||
|
||||
def run_miniterm(config: ConfigType, port: str, args) -> int:
|
||||
@@ -489,7 +551,7 @@ def upload_using_platformio(config: ConfigType, port: str):
|
||||
|
||||
|
||||
def check_permissions(port: str):
|
||||
if os.name == "posix" and get_port_type(port) == "SERIAL":
|
||||
if os.name == "posix" and get_port_type(port) == PortType.SERIAL:
|
||||
# Check if we can open selected serial port
|
||||
if not os.access(port, os.F_OK):
|
||||
raise EsphomeError(
|
||||
@@ -517,7 +579,7 @@ def upload_program(
|
||||
except AttributeError:
|
||||
pass
|
||||
|
||||
if get_port_type(host) == "SERIAL":
|
||||
if get_port_type(host) == PortType.SERIAL:
|
||||
check_permissions(host)
|
||||
|
||||
exit_code = 1
|
||||
@@ -544,17 +606,16 @@ def upload_program(
|
||||
from esphome import espota2
|
||||
|
||||
remote_port = int(ota_conf[CONF_PORT])
|
||||
password = ota_conf.get(CONF_PASSWORD, "")
|
||||
password = ota_conf.get(CONF_PASSWORD)
|
||||
if getattr(args, "file", None) is not None:
|
||||
binary = Path(args.file)
|
||||
else:
|
||||
binary = CORE.firmware_bin
|
||||
|
||||
# MQTT address resolution
|
||||
if get_port_type(host) in ("MQTT", "MQTTIP"):
|
||||
devices = mqtt_get_ip(config, args.username, args.password, args.client_id)
|
||||
# Resolve MQTT magic strings to actual IP addresses
|
||||
network_devices = _resolve_network_devices(devices, config, args)
|
||||
|
||||
return espota2.run_ota(devices, remote_port, password, binary)
|
||||
return espota2.run_ota(network_devices, remote_port, password, binary)
|
||||
|
||||
|
||||
def show_logs(config: ConfigType, args: ArgsProtocol, devices: list[str]) -> int | None:
|
||||
@@ -569,33 +630,22 @@ def show_logs(config: ConfigType, args: ArgsProtocol, devices: list[str]) -> int
|
||||
raise EsphomeError("Logger is not configured!")
|
||||
|
||||
port = devices[0]
|
||||
port_type = get_port_type(port)
|
||||
|
||||
if get_port_type(port) == "SERIAL":
|
||||
if port_type == PortType.SERIAL:
|
||||
check_permissions(port)
|
||||
return run_miniterm(config, port, args)
|
||||
|
||||
port_type = get_port_type(port)
|
||||
|
||||
# Check if we should use API for logging
|
||||
if has_api():
|
||||
addresses_to_use: list[str] | None = None
|
||||
# Resolve MQTT magic strings to actual IP addresses
|
||||
if has_api() and (
|
||||
network_devices := _resolve_network_devices(devices, config, args)
|
||||
):
|
||||
from esphome.components.api.client import run_logs
|
||||
|
||||
if port_type == "NETWORK":
|
||||
# Network addresses (IPs, mDNS names, or regular DNS hostnames) can be used
|
||||
# The resolve_ip_address() function in helpers.py handles all types
|
||||
addresses_to_use = devices
|
||||
elif port_type in ("MQTT", "MQTTIP") and has_mqtt_ip_lookup():
|
||||
# Use MQTT IP lookup for MQTT/MQTTIP types
|
||||
addresses_to_use = mqtt_get_ip(
|
||||
config, args.username, args.password, args.client_id
|
||||
)
|
||||
return run_logs(config, network_devices)
|
||||
|
||||
if addresses_to_use is not None:
|
||||
from esphome.components.api.client import run_logs
|
||||
|
||||
return run_logs(config, addresses_to_use)
|
||||
|
||||
if port_type in ("NETWORK", "MQTT") and has_mqtt_logging():
|
||||
if port_type in (PortType.NETWORK, PortType.MQTT) and has_mqtt_logging():
|
||||
from esphome import mqtt
|
||||
|
||||
return mqtt.show_logs(
|
||||
|
@@ -16,7 +16,9 @@
|
||||
|
||||
#include "bluetooth_connection.h"
|
||||
|
||||
#ifndef CONFIG_ESP_HOSTED_ENABLE_BT_BLUEDROID
|
||||
#include <esp_bt.h>
|
||||
#endif
|
||||
#include <esp_bt_device.h>
|
||||
|
||||
namespace esphome::bluetooth_proxy {
|
||||
|
@@ -775,7 +775,7 @@ void Display::test_card() {
|
||||
int shift_y = (h - image_h) / 2;
|
||||
int line_w = (image_w - 6) / 6;
|
||||
int image_c = image_w / 2;
|
||||
for (auto i = 0; i <= image_h; i++) {
|
||||
for (auto i = 0; i != image_h; i++) {
|
||||
int c = esp_scale(i, image_h);
|
||||
this->horizontal_line(shift_x + 0, shift_y + i, line_w, r.fade_to_white(c));
|
||||
this->horizontal_line(shift_x + line_w, shift_y + i, line_w, r.fade_to_black(c)); //
|
||||
@@ -809,8 +809,11 @@ void Display::test_card() {
|
||||
}
|
||||
}
|
||||
}
|
||||
this->rectangle(0, 0, w, h, Color(127, 0, 127));
|
||||
this->filled_rectangle(0, 0, 10, 10, Color(255, 0, 255));
|
||||
this->filled_rectangle(w - 10, 0, 10, 10, Color(255, 0, 255));
|
||||
this->filled_rectangle(0, h - 10, 10, 10, Color(255, 0, 255));
|
||||
this->filled_rectangle(w - 10, h - 10, 10, 10, Color(255, 0, 255));
|
||||
this->rectangle(0, 0, w, h, Color(255, 255, 255));
|
||||
this->stop_poller();
|
||||
}
|
||||
|
||||
|
@@ -324,7 +324,7 @@ def _is_framework_url(source: str) -> str:
|
||||
# The default/recommended arduino framework version
|
||||
# - https://github.com/espressif/arduino-esp32/releases
|
||||
ARDUINO_FRAMEWORK_VERSION_LOOKUP = {
|
||||
"recommended": cv.Version(3, 2, 1),
|
||||
"recommended": cv.Version(3, 3, 2),
|
||||
"latest": cv.Version(3, 3, 2),
|
||||
"dev": cv.Version(3, 3, 2),
|
||||
}
|
||||
@@ -343,7 +343,7 @@ ARDUINO_PLATFORM_VERSION_LOOKUP = {
|
||||
# The default/recommended esp-idf framework version
|
||||
# - https://github.com/espressif/esp-idf/releases
|
||||
ESP_IDF_FRAMEWORK_VERSION_LOOKUP = {
|
||||
"recommended": cv.Version(5, 4, 2),
|
||||
"recommended": cv.Version(5, 5, 1),
|
||||
"latest": cv.Version(5, 5, 1),
|
||||
"dev": cv.Version(5, 5, 1),
|
||||
}
|
||||
@@ -363,7 +363,7 @@ ESP_IDF_PLATFORM_VERSION_LOOKUP = {
|
||||
# The platform-espressif32 version
|
||||
# - https://github.com/pioarduino/platform-espressif32/releases
|
||||
PLATFORM_VERSION_LOOKUP = {
|
||||
"recommended": cv.Version(54, 3, 21, "2"),
|
||||
"recommended": cv.Version(55, 3, 31, "1"),
|
||||
"latest": cv.Version(55, 3, 31, "1"),
|
||||
"dev": cv.Version(55, 3, 31, "1"),
|
||||
}
|
||||
@@ -544,6 +544,7 @@ CONF_ENABLE_LWIP_MDNS_QUERIES = "enable_lwip_mdns_queries"
|
||||
CONF_ENABLE_LWIP_BRIDGE_INTERFACE = "enable_lwip_bridge_interface"
|
||||
CONF_ENABLE_LWIP_TCPIP_CORE_LOCKING = "enable_lwip_tcpip_core_locking"
|
||||
CONF_ENABLE_LWIP_CHECK_THREAD_SAFETY = "enable_lwip_check_thread_safety"
|
||||
CONF_DISABLE_LIBC_LOCKS_IN_IRAM = "disable_libc_locks_in_iram"
|
||||
|
||||
|
||||
def _validate_idf_component(config: ConfigType) -> ConfigType:
|
||||
@@ -606,6 +607,9 @@ FRAMEWORK_SCHEMA = cv.All(
|
||||
cv.Optional(
|
||||
CONF_ENABLE_LWIP_CHECK_THREAD_SAFETY, default=True
|
||||
): cv.boolean,
|
||||
cv.Optional(
|
||||
CONF_DISABLE_LIBC_LOCKS_IN_IRAM, default=True
|
||||
): cv.boolean,
|
||||
cv.Optional(CONF_EXECUTE_FROM_PSRAM): cv.boolean,
|
||||
}
|
||||
),
|
||||
@@ -864,6 +868,12 @@ async def to_code(config):
|
||||
if advanced.get(CONF_ENABLE_LWIP_CHECK_THREAD_SAFETY, True):
|
||||
add_idf_sdkconfig_option("CONFIG_LWIP_CHECK_THREAD_SAFETY", True)
|
||||
|
||||
# Disable placing libc locks in IRAM to save RAM
|
||||
# This is safe for ESPHome since no IRAM ISRs (interrupts that run while cache is disabled)
|
||||
# use libc lock APIs. Saves approximately 1.3KB (1,356 bytes) of IRAM.
|
||||
if advanced.get(CONF_DISABLE_LIBC_LOCKS_IN_IRAM, True):
|
||||
add_idf_sdkconfig_option("CONFIG_LIBC_LOCKS_PLACE_IN_IRAM", False)
|
||||
|
||||
cg.add_platformio_option("board_build.partitions", "partitions.csv")
|
||||
if CONF_PARTITIONS in config:
|
||||
add_extra_build_file(
|
||||
|
@@ -1564,6 +1564,10 @@ BOARDS = {
|
||||
"name": "DFRobot Beetle ESP32-C3",
|
||||
"variant": VARIANT_ESP32C3,
|
||||
},
|
||||
"dfrobot_firebeetle2_esp32c6": {
|
||||
"name": "DFRobot FireBeetle 2 ESP32-C6",
|
||||
"variant": VARIANT_ESP32C6,
|
||||
},
|
||||
"dfrobot_firebeetle2_esp32e": {
|
||||
"name": "DFRobot Firebeetle 2 ESP32-E",
|
||||
"variant": VARIANT_ESP32,
|
||||
@@ -1604,6 +1608,22 @@ BOARDS = {
|
||||
"name": "Ai-Thinker ESP-C3-M1-I-Kit",
|
||||
"variant": VARIANT_ESP32C3,
|
||||
},
|
||||
"esp32-c5-devkitc-1": {
|
||||
"name": "Espressif ESP32-C5-DevKitC-1 4MB no PSRAM",
|
||||
"variant": VARIANT_ESP32C5,
|
||||
},
|
||||
"esp32-c5-devkitc1-n16r4": {
|
||||
"name": "Espressif ESP32-C5-DevKitC-1 N16R4 (16 MB Flash Quad, 4 MB PSRAM Quad)",
|
||||
"variant": VARIANT_ESP32C5,
|
||||
},
|
||||
"esp32-c5-devkitc1-n4": {
|
||||
"name": "Espressif ESP32-C5-DevKitC-1 N4 (4MB no PSRAM)",
|
||||
"variant": VARIANT_ESP32C5,
|
||||
},
|
||||
"esp32-c5-devkitc1-n8r4": {
|
||||
"name": "Espressif ESP32-C5-DevKitC-1 N8R4 (8 MB Flash Quad, 4 MB PSRAM Quad)",
|
||||
"variant": VARIANT_ESP32C5,
|
||||
},
|
||||
"esp32-c6-devkitc-1": {
|
||||
"name": "Espressif ESP32-C6-DevKitC-1",
|
||||
"variant": VARIANT_ESP32C6,
|
||||
@@ -2048,6 +2068,10 @@ BOARDS = {
|
||||
"name": "M5Stack Station",
|
||||
"variant": VARIANT_ESP32,
|
||||
},
|
||||
"m5stack-tab5-p4": {
|
||||
"name": "M5STACK Tab5 esp32-p4 Board",
|
||||
"variant": VARIANT_ESP32P4,
|
||||
},
|
||||
"m5stack-timer-cam": {
|
||||
"name": "M5Stack Timer CAM",
|
||||
"variant": VARIANT_ESP32,
|
||||
@@ -2476,6 +2500,10 @@ BOARDS = {
|
||||
"name": "YelloByte YB-ESP32-S3-AMP (Rev.3)",
|
||||
"variant": VARIANT_ESP32S3,
|
||||
},
|
||||
"yb_esp32s3_drv": {
|
||||
"name": "YelloByte YB-ESP32-S3-DRV",
|
||||
"variant": VARIANT_ESP32S3,
|
||||
},
|
||||
"yb_esp32s3_eth": {
|
||||
"name": "YelloByte YB-ESP32-S3-ETH",
|
||||
"variant": VARIANT_ESP32S3,
|
||||
|
@@ -393,6 +393,15 @@ def final_validation(config):
|
||||
max_connections = config.get(CONF_MAX_CONNECTIONS, DEFAULT_MAX_CONNECTIONS)
|
||||
validate_connection_slots(max_connections)
|
||||
|
||||
# Check if hosted bluetooth is being used
|
||||
if "esp32_hosted" in full_config:
|
||||
add_idf_sdkconfig_option("CONFIG_BT_CLASSIC_ENABLED", False)
|
||||
add_idf_sdkconfig_option("CONFIG_BT_BLE_ENABLED", True)
|
||||
add_idf_sdkconfig_option("CONFIG_BT_BLUEDROID_ENABLED", True)
|
||||
add_idf_sdkconfig_option("CONFIG_BT_CONTROLLER_DISABLED", True)
|
||||
add_idf_sdkconfig_option("CONFIG_ESP_HOSTED_ENABLE_BT_BLUEDROID", True)
|
||||
add_idf_sdkconfig_option("CONFIG_ESP_HOSTED_BLUEDROID_HCI_VHCI", True)
|
||||
|
||||
# Check if BLE Server is needed
|
||||
has_ble_server = "esp32_ble_server" in full_config
|
||||
|
||||
|
@@ -6,7 +6,15 @@
|
||||
#include "esphome/core/helpers.h"
|
||||
#include "esphome/core/log.h"
|
||||
|
||||
#ifndef CONFIG_ESP_HOSTED_ENABLE_BT_BLUEDROID
|
||||
#include <esp_bt.h>
|
||||
#else
|
||||
extern "C" {
|
||||
#include <esp_hosted.h>
|
||||
#include <esp_hosted_misc.h>
|
||||
#include <esp_hosted_bluedroid.h>
|
||||
}
|
||||
#endif
|
||||
#include <esp_bt_device.h>
|
||||
#include <esp_bt_main.h>
|
||||
#include <esp_gap_ble_api.h>
|
||||
@@ -140,6 +148,7 @@ void ESP32BLE::advertising_init_() {
|
||||
|
||||
bool ESP32BLE::ble_setup_() {
|
||||
esp_err_t err;
|
||||
#ifndef CONFIG_ESP_HOSTED_ENABLE_BT_BLUEDROID
|
||||
#ifdef USE_ARDUINO
|
||||
if (!btStart()) {
|
||||
ESP_LOGE(TAG, "btStart failed: %d", esp_bt_controller_get_status());
|
||||
@@ -173,6 +182,28 @@ bool ESP32BLE::ble_setup_() {
|
||||
#endif
|
||||
|
||||
esp_bt_controller_mem_release(ESP_BT_MODE_CLASSIC_BT);
|
||||
#else
|
||||
esp_hosted_connect_to_slave(); // NOLINT
|
||||
|
||||
if (esp_hosted_bt_controller_init() != ESP_OK) {
|
||||
ESP_LOGW(TAG, "esp_hosted_bt_controller_init failed");
|
||||
return false;
|
||||
}
|
||||
|
||||
if (esp_hosted_bt_controller_enable() != ESP_OK) {
|
||||
ESP_LOGW(TAG, "esp_hosted_bt_controller_enable failed");
|
||||
return false;
|
||||
}
|
||||
|
||||
hosted_hci_bluedroid_open();
|
||||
|
||||
esp_bluedroid_hci_driver_operations_t operations = {
|
||||
.send = hosted_hci_bluedroid_send,
|
||||
.check_send_available = hosted_hci_bluedroid_check_send_available,
|
||||
.register_host_callback = hosted_hci_bluedroid_register_host_callback,
|
||||
};
|
||||
esp_bluedroid_attach_hci_driver(&operations);
|
||||
#endif
|
||||
|
||||
err = esp_bluedroid_init();
|
||||
if (err != ESP_OK) {
|
||||
@@ -261,6 +292,7 @@ bool ESP32BLE::ble_dismantle_() {
|
||||
return false;
|
||||
}
|
||||
|
||||
#ifndef CONFIG_ESP_HOSTED_ENABLE_BT_BLUEDROID
|
||||
#ifdef USE_ARDUINO
|
||||
if (!btStop()) {
|
||||
ESP_LOGE(TAG, "btStop failed: %d", esp_bt_controller_get_status());
|
||||
@@ -290,6 +322,19 @@ bool ESP32BLE::ble_dismantle_() {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
#endif
|
||||
#else
|
||||
if (esp_hosted_bt_controller_disable() != ESP_OK) {
|
||||
ESP_LOGW(TAG, "esp_hosted_bt_controller_disable failed");
|
||||
return false;
|
||||
}
|
||||
|
||||
if (esp_hosted_bt_controller_deinit(false) != ESP_OK) {
|
||||
ESP_LOGW(TAG, "esp_hosted_bt_controller_deinit failed");
|
||||
return false;
|
||||
}
|
||||
|
||||
hosted_hci_bluedroid_close();
|
||||
#endif
|
||||
return true;
|
||||
}
|
||||
|
@@ -10,7 +10,9 @@
|
||||
#ifdef USE_ESP32
|
||||
#ifdef USE_ESP32_BLE_ADVERTISING
|
||||
|
||||
#ifndef CONFIG_ESP_HOSTED_ENABLE_BT_BLUEDROID
|
||||
#include <esp_bt.h>
|
||||
#endif
|
||||
#include <esp_gap_ble_api.h>
|
||||
#include <esp_gatts_api.h>
|
||||
|
||||
|
@@ -4,7 +4,9 @@
|
||||
|
||||
#ifdef USE_ESP32
|
||||
|
||||
#ifndef CONFIG_ESP_HOSTED_ENABLE_BT_BLUEDROID
|
||||
#include <esp_bt.h>
|
||||
#endif
|
||||
#include <esp_bt_main.h>
|
||||
#include <esp_gap_ble_api.h>
|
||||
#include <freertos/FreeRTOS.h>
|
||||
|
@@ -5,7 +5,9 @@
|
||||
|
||||
#ifdef USE_ESP32
|
||||
|
||||
#ifndef CONFIG_ESP_HOSTED_ENABLE_BT_BLUEDROID
|
||||
#include <esp_bt.h>
|
||||
#endif
|
||||
#include <esp_gap_ble_api.h>
|
||||
|
||||
namespace esphome {
|
||||
|
@@ -10,7 +10,9 @@
|
||||
#include <nvs_flash.h>
|
||||
#include <freertos/FreeRTOSConfig.h>
|
||||
#include <esp_bt_main.h>
|
||||
#ifndef CONFIG_ESP_HOSTED_ENABLE_BT_BLUEDROID
|
||||
#include <esp_bt.h>
|
||||
#endif
|
||||
#include <freertos/task.h>
|
||||
#include <esp_gap_ble_api.h>
|
||||
|
||||
|
@@ -7,7 +7,9 @@
|
||||
#include "esphome/core/helpers.h"
|
||||
#include "esphome/core/log.h"
|
||||
|
||||
#ifndef CONFIG_ESP_HOSTED_ENABLE_BT_BLUEDROID
|
||||
#include <esp_bt.h>
|
||||
#endif
|
||||
#include <esp_bt_defs.h>
|
||||
#include <esp_bt_main.h>
|
||||
#include <esp_gap_ble_api.h>
|
||||
@@ -845,6 +847,7 @@ void ESP32BLETracker::log_unexpected_state_(const char *operation, ScannerState
|
||||
|
||||
#ifdef USE_ESP32_BLE_SOFTWARE_COEXISTENCE
|
||||
void ESP32BLETracker::update_coex_preference_(bool force_ble) {
|
||||
#ifndef CONFIG_ESP_HOSTED_ENABLE_BT_BLUEDROID
|
||||
if (force_ble && !this->coex_prefer_ble_) {
|
||||
ESP_LOGD(TAG, "Setting coexistence to Bluetooth to make connection.");
|
||||
this->coex_prefer_ble_ = true;
|
||||
@@ -854,6 +857,7 @@ void ESP32BLETracker::update_coex_preference_(bool force_ble) {
|
||||
this->coex_prefer_ble_ = false;
|
||||
esp_coex_preference_set(ESP_COEX_PREFER_BALANCE); // Reset to default
|
||||
}
|
||||
#endif // CONFIG_ESP_HOSTED_ENABLE_BT_BLUEDROID
|
||||
}
|
||||
#endif
|
||||
|
||||
|
@@ -92,9 +92,14 @@ async def to_code(config):
|
||||
|
||||
framework_ver: cv.Version = CORE.data[KEY_CORE][KEY_FRAMEWORK_VERSION]
|
||||
os.environ["ESP_IDF_VERSION"] = f"{framework_ver.major}.{framework_ver.minor}"
|
||||
esp32.add_idf_component(name="espressif/esp_wifi_remote", ref="0.10.2")
|
||||
esp32.add_idf_component(name="espressif/eppp_link", ref="0.2.0")
|
||||
esp32.add_idf_component(name="espressif/esp_hosted", ref="2.0.11")
|
||||
if framework_ver >= cv.Version(5, 5, 0):
|
||||
esp32.add_idf_component(name="espressif/esp_wifi_remote", ref="1.1.5")
|
||||
esp32.add_idf_component(name="espressif/eppp_link", ref="1.1.3")
|
||||
esp32.add_idf_component(name="espressif/esp_hosted", ref="2.5.11")
|
||||
else:
|
||||
esp32.add_idf_component(name="espressif/esp_wifi_remote", ref="0.13.0")
|
||||
esp32.add_idf_component(name="espressif/eppp_link", ref="0.2.0")
|
||||
esp32.add_idf_component(name="espressif/esp_hosted", ref="2.0.11")
|
||||
esp32.add_extra_script(
|
||||
"post",
|
||||
"esp32_hosted.py",
|
||||
|
@@ -42,6 +42,11 @@ static size_t IRAM_ATTR HOT encoder_callback(const void *data, size_t size, size
|
||||
symbols[i] = params->bit0;
|
||||
}
|
||||
}
|
||||
#if ESP_IDF_VERSION >= ESP_IDF_VERSION_VAL(5, 5, 1)
|
||||
if ((index + 1) >= size && params->reset.duration0 == 0 && params->reset.duration1 == 0) {
|
||||
*done = true;
|
||||
}
|
||||
#endif
|
||||
return RMT_SYMBOLS_PER_BYTE;
|
||||
}
|
||||
|
||||
|
@@ -19,6 +19,7 @@ from esphome.const import (
|
||||
from esphome.core import CORE, coroutine_with_priority
|
||||
from esphome.coroutine import CoroPriority
|
||||
import esphome.final_validate as fv
|
||||
from esphome.types import ConfigType
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -136,11 +137,12 @@ FINAL_VALIDATE_SCHEMA = ota_esphome_final_validate
|
||||
|
||||
|
||||
@coroutine_with_priority(CoroPriority.OTA_UPDATES)
|
||||
async def to_code(config):
|
||||
async def to_code(config: ConfigType) -> None:
|
||||
var = cg.new_Pvariable(config[CONF_ID])
|
||||
cg.add(var.set_port(config[CONF_PORT]))
|
||||
|
||||
if CONF_PASSWORD in config:
|
||||
# Password could be set to an empty string and we can assume that means no password
|
||||
if config.get(CONF_PASSWORD):
|
||||
cg.add(var.set_auth_password(config[CONF_PASSWORD]))
|
||||
cg.add_define("USE_OTA_PASSWORD")
|
||||
# Only include hash algorithms when password is configured
|
||||
|
@@ -486,7 +486,6 @@ CONF_RESUME_ON_INPUT = "resume_on_input"
|
||||
CONF_RIGHT_BUTTON = "right_button"
|
||||
CONF_ROLLOVER = "rollover"
|
||||
CONF_ROOT_BACK_BTN = "root_back_btn"
|
||||
CONF_ROWS = "rows"
|
||||
CONF_SCALE_LINES = "scale_lines"
|
||||
CONF_SCROLLBAR_MODE = "scrollbar_mode"
|
||||
CONF_SELECTED_INDEX = "selected_index"
|
||||
|
@@ -2,7 +2,7 @@ from esphome import automation
|
||||
import esphome.codegen as cg
|
||||
from esphome.components.key_provider import KeyProvider
|
||||
import esphome.config_validation as cv
|
||||
from esphome.const import CONF_ID, CONF_ITEMS, CONF_TEXT, CONF_WIDTH
|
||||
from esphome.const import CONF_ID, CONF_ITEMS, CONF_ROWS, CONF_TEXT, CONF_WIDTH
|
||||
from esphome.cpp_generator import MockObj
|
||||
|
||||
from ..automation import action_to_code
|
||||
@@ -15,7 +15,6 @@ from ..defines import (
|
||||
CONF_ONE_CHECKED,
|
||||
CONF_PAD_COLUMN,
|
||||
CONF_PAD_ROW,
|
||||
CONF_ROWS,
|
||||
CONF_SELECTED,
|
||||
)
|
||||
from ..helpers import lvgl_components_required
|
||||
|
@@ -2,7 +2,7 @@ from esphome import automation, pins
|
||||
import esphome.codegen as cg
|
||||
from esphome.components import key_provider
|
||||
import esphome.config_validation as cv
|
||||
from esphome.const import CONF_ID, CONF_ON_KEY, CONF_PIN, CONF_TRIGGER_ID
|
||||
from esphome.const import CONF_ID, CONF_ON_KEY, CONF_PIN, CONF_ROWS, CONF_TRIGGER_ID
|
||||
|
||||
CODEOWNERS = ["@ssieb"]
|
||||
|
||||
@@ -19,7 +19,6 @@ MatrixKeyTrigger = matrix_keypad_ns.class_(
|
||||
)
|
||||
|
||||
CONF_KEYPAD_ID = "keypad_id"
|
||||
CONF_ROWS = "rows"
|
||||
CONF_COLUMNS = "columns"
|
||||
CONF_KEYS = "keys"
|
||||
CONF_DEBOUNCE_TIME = "debounce_time"
|
||||
|
@@ -11,6 +11,7 @@ from esphome.const import (
|
||||
CONF_BRIGHTNESS,
|
||||
CONF_COLOR_ORDER,
|
||||
CONF_DIMENSIONS,
|
||||
CONF_DISABLED,
|
||||
CONF_HEIGHT,
|
||||
CONF_INIT_SEQUENCE,
|
||||
CONF_INVERT_COLORS,
|
||||
@@ -301,6 +302,8 @@ class DriverChip:
|
||||
Check if a rotation can be implemented in hardware using the MADCTL register.
|
||||
A rotation of 180 is always possible if x and y mirroring are supported, 90 and 270 are possible if the model supports swapping X and Y.
|
||||
"""
|
||||
if config.get(CONF_TRANSFORM) == CONF_DISABLED:
|
||||
return False
|
||||
transforms = self.transforms
|
||||
rotation = config.get(CONF_ROTATION, 0)
|
||||
if rotation == 0 or not transforms:
|
||||
@@ -358,26 +361,26 @@ class DriverChip:
|
||||
CONF_SWAP_XY: self.get_default(CONF_SWAP_XY),
|
||||
},
|
||||
)
|
||||
# fill in defaults if not provided
|
||||
mirror_x = transform.get(CONF_MIRROR_X, self.get_default(CONF_MIRROR_X))
|
||||
mirror_y = transform.get(CONF_MIRROR_Y, self.get_default(CONF_MIRROR_Y))
|
||||
swap_xy = transform.get(CONF_SWAP_XY, self.get_default(CONF_SWAP_XY))
|
||||
transform[CONF_MIRROR_X] = mirror_x
|
||||
transform[CONF_MIRROR_Y] = mirror_y
|
||||
transform[CONF_SWAP_XY] = swap_xy
|
||||
|
||||
if not isinstance(transform, dict):
|
||||
# Presumably disabled
|
||||
return {
|
||||
CONF_MIRROR_X: False,
|
||||
CONF_MIRROR_Y: False,
|
||||
CONF_SWAP_XY: False,
|
||||
CONF_TRANSFORM: False,
|
||||
}
|
||||
# Can we use the MADCTL register to set the rotation?
|
||||
if can_transform and CONF_TRANSFORM not in config:
|
||||
rotation = config[CONF_ROTATION]
|
||||
if rotation == 180:
|
||||
transform[CONF_MIRROR_X] = not mirror_x
|
||||
transform[CONF_MIRROR_Y] = not mirror_y
|
||||
transform[CONF_MIRROR_X] = not transform[CONF_MIRROR_X]
|
||||
transform[CONF_MIRROR_Y] = not transform[CONF_MIRROR_Y]
|
||||
elif rotation == 90:
|
||||
transform[CONF_SWAP_XY] = not swap_xy
|
||||
transform[CONF_MIRROR_X] = not mirror_x
|
||||
transform[CONF_SWAP_XY] = not transform[CONF_SWAP_XY]
|
||||
transform[CONF_MIRROR_X] = not transform[CONF_MIRROR_X]
|
||||
else:
|
||||
transform[CONF_SWAP_XY] = not swap_xy
|
||||
transform[CONF_MIRROR_Y] = not mirror_y
|
||||
transform[CONF_SWAP_XY] = not transform[CONF_SWAP_XY]
|
||||
transform[CONF_MIRROR_Y] = not transform[CONF_MIRROR_Y]
|
||||
transform[CONF_TRANSFORM] = True
|
||||
return transform
|
||||
|
||||
|
@@ -37,6 +37,7 @@ from esphome.const import (
|
||||
CONF_DATA_RATE,
|
||||
CONF_DC_PIN,
|
||||
CONF_DIMENSIONS,
|
||||
CONF_DISABLED,
|
||||
CONF_ENABLE_PIN,
|
||||
CONF_ID,
|
||||
CONF_INIT_SEQUENCE,
|
||||
@@ -146,12 +147,15 @@ def swap_xy_schema(model):
|
||||
def model_schema(config):
|
||||
model = MODELS[config[CONF_MODEL]]
|
||||
bus_mode = config[CONF_BUS_MODE]
|
||||
transform = cv.Schema(
|
||||
{
|
||||
cv.Required(CONF_MIRROR_X): cv.boolean,
|
||||
cv.Required(CONF_MIRROR_Y): cv.boolean,
|
||||
**swap_xy_schema(model),
|
||||
}
|
||||
transform = cv.Any(
|
||||
cv.Schema(
|
||||
{
|
||||
cv.Required(CONF_MIRROR_X): cv.boolean,
|
||||
cv.Required(CONF_MIRROR_Y): cv.boolean,
|
||||
**swap_xy_schema(model),
|
||||
}
|
||||
),
|
||||
cv.one_of(CONF_DISABLED, lower=True),
|
||||
)
|
||||
# CUSTOM model will need to provide a custom init sequence
|
||||
iseqconf = (
|
||||
@@ -160,7 +164,11 @@ def model_schema(config):
|
||||
else cv.Optional(CONF_INIT_SEQUENCE)
|
||||
)
|
||||
# Dimensions are optional if the model has a default width and the x-y transform is not overridden
|
||||
is_swapped = config.get(CONF_TRANSFORM, {}).get(CONF_SWAP_XY) is True
|
||||
transform_config = config.get(CONF_TRANSFORM, {})
|
||||
is_swapped = (
|
||||
isinstance(transform_config, dict)
|
||||
and transform_config.get(CONF_SWAP_XY, False) is True
|
||||
)
|
||||
cv_dimensions = (
|
||||
cv.Optional if model.get_default(CONF_WIDTH) and not is_swapped else cv.Required
|
||||
)
|
||||
@@ -192,9 +200,7 @@ def model_schema(config):
|
||||
.extend(
|
||||
{
|
||||
cv.GenerateID(): cv.declare_id(MipiSpi),
|
||||
cv_dimensions(CONF_DIMENSIONS): dimension_schema(
|
||||
model.get_default(CONF_DRAW_ROUNDING, 1)
|
||||
),
|
||||
cv_dimensions(CONF_DIMENSIONS): dimension_schema(1),
|
||||
model.option(CONF_ENABLE_PIN, cv.UNDEFINED): cv.ensure_list(
|
||||
pins.gpio_output_pin_schema
|
||||
),
|
||||
@@ -400,6 +406,7 @@ def get_instance(config):
|
||||
offset_height,
|
||||
DISPLAY_ROTATIONS[rotation],
|
||||
frac,
|
||||
config[CONF_DRAW_ROUNDING],
|
||||
]
|
||||
)
|
||||
return MipiSpiBuffer, templateargs
|
||||
@@ -431,7 +438,6 @@ async def to_code(config):
|
||||
else:
|
||||
config[CONF_ROTATION] = 0
|
||||
cg.add(var.set_model(config[CONF_MODEL]))
|
||||
cg.add(var.set_draw_rounding(config[CONF_DRAW_ROUNDING]))
|
||||
if enable_pin := config.get(CONF_ENABLE_PIN):
|
||||
enable = [await cg.gpio_pin_expression(pin) for pin in enable_pin]
|
||||
cg.add(var.set_enable_pins(enable))
|
||||
|
@@ -38,7 +38,7 @@ static constexpr uint8_t MADCTL_BGR = 0x08; // Bit 3 Blue-Green-Red pixel ord
|
||||
static constexpr uint8_t MADCTL_XFLIP = 0x02; // Mirror the display horizontally
|
||||
static constexpr uint8_t MADCTL_YFLIP = 0x01; // Mirror the display vertically
|
||||
|
||||
static const uint8_t DELAY_FLAG = 0xFF;
|
||||
static constexpr uint8_t DELAY_FLAG = 0xFF;
|
||||
// store a 16 bit value in a buffer, big endian.
|
||||
static inline void put16_be(uint8_t *buf, uint16_t value) {
|
||||
buf[0] = value >> 8;
|
||||
@@ -79,7 +79,7 @@ class MipiSpi : public display::Display,
|
||||
public spi::SPIDevice<spi::BIT_ORDER_MSB_FIRST, spi::CLOCK_POLARITY_LOW, spi::CLOCK_PHASE_LEADING,
|
||||
spi::DATA_RATE_1MHZ> {
|
||||
public:
|
||||
MipiSpi() {}
|
||||
MipiSpi() = default;
|
||||
void update() override { this->stop_poller(); }
|
||||
void draw_pixel_at(int x, int y, Color color) override {}
|
||||
void set_model(const char *model) { this->model_ = model; }
|
||||
@@ -99,7 +99,6 @@ class MipiSpi : public display::Display,
|
||||
int get_width_internal() override { return WIDTH; }
|
||||
int get_height_internal() override { return HEIGHT; }
|
||||
void set_init_sequence(const std::vector<uint8_t> &sequence) { this->init_sequence_ = sequence; }
|
||||
void set_draw_rounding(unsigned rounding) { this->draw_rounding_ = rounding; }
|
||||
|
||||
// reset the display, and write the init sequence
|
||||
void setup() override {
|
||||
@@ -326,6 +325,7 @@ class MipiSpi : public display::Display,
|
||||
|
||||
/**
|
||||
* Writes a buffer to the display.
|
||||
* @param ptr The pointer to the pixel data
|
||||
* @param w Width of each line in bytes
|
||||
* @param h Height of the buffer in rows
|
||||
* @param pad Padding in bytes after each line
|
||||
@@ -424,7 +424,6 @@ class MipiSpi : public display::Display,
|
||||
|
||||
// other properties set by configuration
|
||||
bool invert_colors_{};
|
||||
unsigned draw_rounding_{2};
|
||||
optional<uint8_t> brightness_{};
|
||||
const char *model_{"Unknown"};
|
||||
std::vector<uint8_t> init_sequence_{};
|
||||
@@ -444,12 +443,20 @@ class MipiSpi : public display::Display,
|
||||
* @tparam OFFSET_WIDTH The x-offset of the display in pixels
|
||||
* @tparam OFFSET_HEIGHT The y-offset of the display in pixels
|
||||
* @tparam FRACTION The fraction of the display size to use for the buffer (e.g. 4 means a 1/4 buffer).
|
||||
* @tparam ROUNDING The alignment requirement for drawing operations (e.g. 2 means that x coordinates must be even)
|
||||
*/
|
||||
template<typename BUFFERTYPE, PixelMode BUFFERPIXEL, bool IS_BIG_ENDIAN, PixelMode DISPLAYPIXEL, BusType BUS_TYPE,
|
||||
int WIDTH, int HEIGHT, int OFFSET_WIDTH, int OFFSET_HEIGHT, display::DisplayRotation ROTATION, int FRACTION>
|
||||
uint16_t WIDTH, uint16_t HEIGHT, int OFFSET_WIDTH, int OFFSET_HEIGHT, display::DisplayRotation ROTATION,
|
||||
int FRACTION, unsigned ROUNDING>
|
||||
class MipiSpiBuffer : public MipiSpi<BUFFERTYPE, BUFFERPIXEL, IS_BIG_ENDIAN, DISPLAYPIXEL, BUS_TYPE, WIDTH, HEIGHT,
|
||||
OFFSET_WIDTH, OFFSET_HEIGHT> {
|
||||
public:
|
||||
// these values define the buffer size needed to write in accordance with the chip pixel alignment
|
||||
// requirements. If the required rounding does not divide the width and height, we round up to the next multiple and
|
||||
// ignore the extra columns and rows when drawing, but use them to write to the display.
|
||||
static constexpr unsigned BUFFER_WIDTH = (WIDTH + ROUNDING - 1) / ROUNDING * ROUNDING;
|
||||
static constexpr unsigned BUFFER_HEIGHT = (HEIGHT + ROUNDING - 1) / ROUNDING * ROUNDING;
|
||||
|
||||
MipiSpiBuffer() { this->rotation_ = ROTATION; }
|
||||
|
||||
void dump_config() override {
|
||||
@@ -461,15 +468,15 @@ class MipiSpiBuffer : public MipiSpi<BUFFERTYPE, BUFFERPIXEL, IS_BIG_ENDIAN, DIS
|
||||
" Buffer fraction: 1/%d\n"
|
||||
" Buffer bytes: %zu\n"
|
||||
" Draw rounding: %u",
|
||||
this->rotation_, BUFFERPIXEL * 8, FRACTION, sizeof(BUFFERTYPE) * WIDTH * HEIGHT / FRACTION,
|
||||
this->draw_rounding_);
|
||||
this->rotation_, BUFFERPIXEL * 8, FRACTION,
|
||||
sizeof(BUFFERTYPE) * BUFFER_WIDTH * BUFFER_HEIGHT / FRACTION, ROUNDING);
|
||||
}
|
||||
|
||||
void setup() override {
|
||||
MipiSpi<BUFFERTYPE, BUFFERPIXEL, IS_BIG_ENDIAN, DISPLAYPIXEL, BUS_TYPE, WIDTH, HEIGHT, OFFSET_WIDTH,
|
||||
OFFSET_HEIGHT>::setup();
|
||||
RAMAllocator<BUFFERTYPE> allocator{};
|
||||
this->buffer_ = allocator.allocate(WIDTH * HEIGHT / FRACTION);
|
||||
this->buffer_ = allocator.allocate(BUFFER_WIDTH * BUFFER_HEIGHT / FRACTION);
|
||||
if (this->buffer_ == nullptr) {
|
||||
this->mark_failed("Buffer allocation failed");
|
||||
}
|
||||
@@ -508,15 +515,14 @@ class MipiSpiBuffer : public MipiSpi<BUFFERTYPE, BUFFERPIXEL, IS_BIG_ENDIAN, DIS
|
||||
esph_log_v(TAG, "x_low %d, y_low %d, x_high %d, y_high %d", this->x_low_, this->y_low_, this->x_high_,
|
||||
this->y_high_);
|
||||
// Some chips require that the drawing window be aligned on certain boundaries
|
||||
auto dr = this->draw_rounding_;
|
||||
this->x_low_ = this->x_low_ / dr * dr;
|
||||
this->y_low_ = this->y_low_ / dr * dr;
|
||||
this->x_high_ = (this->x_high_ + dr) / dr * dr - 1;
|
||||
this->y_high_ = (this->y_high_ + dr) / dr * dr - 1;
|
||||
this->x_low_ = this->x_low_ / ROUNDING * ROUNDING;
|
||||
this->y_low_ = this->y_low_ / ROUNDING * ROUNDING;
|
||||
this->x_high_ = (this->x_high_ + ROUNDING) / ROUNDING * ROUNDING - 1;
|
||||
this->y_high_ = (this->y_high_ + ROUNDING) / ROUNDING * ROUNDING - 1;
|
||||
int w = this->x_high_ - this->x_low_ + 1;
|
||||
int h = this->y_high_ - this->y_low_ + 1;
|
||||
this->write_to_display_(this->x_low_, this->y_low_, w, h, this->buffer_, this->x_low_,
|
||||
this->y_low_ - this->start_line_, WIDTH - w);
|
||||
this->y_low_ - this->start_line_, BUFFER_WIDTH - w);
|
||||
// invalidate watermarks
|
||||
this->x_low_ = WIDTH;
|
||||
this->y_low_ = HEIGHT;
|
||||
@@ -536,10 +542,10 @@ class MipiSpiBuffer : public MipiSpi<BUFFERTYPE, BUFFERPIXEL, IS_BIG_ENDIAN, DIS
|
||||
void draw_pixel_at(int x, int y, Color color) override {
|
||||
if (!this->get_clipping().inside(x, y))
|
||||
return;
|
||||
rotate_coordinates_(x, y);
|
||||
rotate_coordinates(x, y);
|
||||
if (x < 0 || x >= WIDTH || y < this->start_line_ || y >= this->end_line_)
|
||||
return;
|
||||
this->buffer_[(y - this->start_line_) * WIDTH + x] = convert_color_(color);
|
||||
this->buffer_[(y - this->start_line_) * BUFFER_WIDTH + x] = convert_color(color);
|
||||
if (x < this->x_low_) {
|
||||
this->x_low_ = x;
|
||||
}
|
||||
@@ -560,7 +566,7 @@ class MipiSpiBuffer : public MipiSpi<BUFFERTYPE, BUFFERPIXEL, IS_BIG_ENDIAN, DIS
|
||||
this->y_low_ = this->start_line_;
|
||||
this->x_high_ = WIDTH - 1;
|
||||
this->y_high_ = this->end_line_ - 1;
|
||||
std::fill_n(this->buffer_, HEIGHT * WIDTH / FRACTION, convert_color_(color));
|
||||
std::fill_n(this->buffer_, HEIGHT * BUFFER_WIDTH / FRACTION, convert_color(color));
|
||||
}
|
||||
|
||||
int get_width() override {
|
||||
@@ -577,7 +583,7 @@ class MipiSpiBuffer : public MipiSpi<BUFFERTYPE, BUFFERPIXEL, IS_BIG_ENDIAN, DIS
|
||||
|
||||
protected:
|
||||
// Rotate the coordinates to match the display orientation.
|
||||
void rotate_coordinates_(int &x, int &y) const {
|
||||
static void rotate_coordinates(int &x, int &y) {
|
||||
if constexpr (ROTATION == display::DISPLAY_ROTATION_180_DEGREES) {
|
||||
x = WIDTH - x - 1;
|
||||
y = HEIGHT - y - 1;
|
||||
@@ -593,7 +599,7 @@ class MipiSpiBuffer : public MipiSpi<BUFFERTYPE, BUFFERPIXEL, IS_BIG_ENDIAN, DIS
|
||||
}
|
||||
|
||||
// Convert a color to the buffer pixel format.
|
||||
BUFFERTYPE convert_color_(Color &color) const {
|
||||
static BUFFERTYPE convert_color(const Color &color) {
|
||||
if constexpr (BUFFERPIXEL == PIXEL_MODE_8) {
|
||||
return (color.red & 0xE0) | (color.g & 0xE0) >> 3 | color.b >> 6;
|
||||
} else if constexpr (BUFFERPIXEL == PIXEL_MODE_16) {
|
||||
|
@@ -3,6 +3,7 @@ import esphome.config_validation as cv
|
||||
|
||||
from .amoled import CO5300
|
||||
from .ili import ILI9488_A
|
||||
from .jc import AXS15231
|
||||
|
||||
DriverChip(
|
||||
"WAVESHARE-4-TFT",
|
||||
@@ -152,3 +153,12 @@ CO5300.extend(
|
||||
cs_pin=12,
|
||||
reset_pin=39,
|
||||
)
|
||||
|
||||
AXS15231.extend(
|
||||
"WAVESHARE-ESP32-S3-TOUCH-LCD-3.49",
|
||||
width=172,
|
||||
height=640,
|
||||
data_rate="80MHz",
|
||||
cs_pin=9,
|
||||
reset_pin=21,
|
||||
)
|
||||
|
@@ -145,7 +145,7 @@ class BSDSocketImpl : public Socket {
|
||||
}
|
||||
|
||||
ssize_t sendto(const void *buf, size_t len, int flags, const struct sockaddr *to, socklen_t tolen) override {
|
||||
return ::sendto(fd_, buf, len, flags, to, tolen);
|
||||
return ::sendto(fd_, buf, len, flags, to, tolen); // NOLINT(readability-suspicious-call-argument)
|
||||
}
|
||||
|
||||
int setblocking(bool blocking) override {
|
||||
|
@@ -836,6 +836,7 @@ CONF_RMT_CHANNEL = "rmt_channel"
|
||||
CONF_RMT_SYMBOLS = "rmt_symbols"
|
||||
CONF_ROTATION = "rotation"
|
||||
CONF_ROW = "row"
|
||||
CONF_ROWS = "rows"
|
||||
CONF_RS_PIN = "rs_pin"
|
||||
CONF_RTD_NOMINAL_RESISTANCE = "rtd_nominal_resistance"
|
||||
CONF_RTD_WIRES = "rtd_wires"
|
||||
|
@@ -202,7 +202,7 @@
|
||||
#define USB_HOST_MAX_REQUESTS 16
|
||||
|
||||
#ifdef USE_ARDUINO
|
||||
#define USE_ARDUINO_VERSION_CODE VERSION_CODE(3, 2, 1)
|
||||
#define USE_ARDUINO_VERSION_CODE VERSION_CODE(3, 3, 2)
|
||||
#define USE_ETHERNET
|
||||
#define USE_ETHERNET_KSZ8081
|
||||
#endif
|
||||
|
@@ -242,7 +242,7 @@ def send_check(
|
||||
|
||||
|
||||
def perform_ota(
|
||||
sock: socket.socket, password: str, file_handle: io.IOBase, filename: Path
|
||||
sock: socket.socket, password: str | None, file_handle: io.IOBase, filename: Path
|
||||
) -> None:
|
||||
file_contents = file_handle.read()
|
||||
file_size = len(file_contents)
|
||||
@@ -278,13 +278,13 @@ def perform_ota(
|
||||
|
||||
def perform_auth(
|
||||
sock: socket.socket,
|
||||
password: str,
|
||||
password: str | None,
|
||||
hash_func: Callable[..., Any],
|
||||
nonce_size: int,
|
||||
hash_name: str,
|
||||
) -> None:
|
||||
"""Perform challenge-response authentication using specified hash algorithm."""
|
||||
if not password:
|
||||
if password is None:
|
||||
raise OTAError("ESP requests password, but no password given!")
|
||||
|
||||
nonce_bytes = receive_exactly(
|
||||
@@ -385,7 +385,7 @@ def perform_ota(
|
||||
|
||||
|
||||
def run_ota_impl_(
|
||||
remote_host: str | list[str], remote_port: int, password: str, filename: Path
|
||||
remote_host: str | list[str], remote_port: int, password: str | None, filename: Path
|
||||
) -> tuple[int, str | None]:
|
||||
from esphome.core import CORE
|
||||
|
||||
@@ -436,7 +436,7 @@ def run_ota_impl_(
|
||||
|
||||
|
||||
def run_ota(
|
||||
remote_host: str | list[str], remote_port: int, password: str, filename: Path
|
||||
remote_host: str | list[str], remote_port: int, password: str | None, filename: Path
|
||||
) -> tuple[int, str | None]:
|
||||
try:
|
||||
return run_ota_impl_(remote_host, remote_port, password, filename)
|
||||
|
@@ -125,9 +125,9 @@ extra_scripts = post:esphome/components/esp8266/post_build.py.script
|
||||
; This are common settings for the ESP32 (all variants) using Arduino.
|
||||
[common:esp32-arduino]
|
||||
extends = common:arduino
|
||||
platform = https://github.com/pioarduino/platform-espressif32/releases/download/54.03.21-2/platform-espressif32.zip
|
||||
platform = https://github.com/pioarduino/platform-espressif32/releases/download/55.03.31-1/platform-espressif32.zip
|
||||
platform_packages =
|
||||
pioarduino/framework-arduinoespressif32@https://github.com/espressif/arduino-esp32/releases/download/3.2.1/esp32-3.2.1.zip
|
||||
pioarduino/framework-arduinoespressif32@https://github.com/espressif/arduino-esp32/releases/download/3.3.2/esp32-3.3.2.zip
|
||||
|
||||
framework = arduino, espidf ; Arduino as an ESP-IDF component
|
||||
lib_deps =
|
||||
@@ -161,9 +161,9 @@ extra_scripts = post:esphome/components/esp32/post_build.py.script
|
||||
; This are common settings for the ESP32 (all variants) using IDF.
|
||||
[common:esp32-idf]
|
||||
extends = common:idf
|
||||
platform = https://github.com/pioarduino/platform-espressif32/releases/download/54.03.21-2/platform-espressif32.zip
|
||||
platform = https://github.com/pioarduino/platform-espressif32/releases/download/55.03.31-1/platform-espressif32.zip
|
||||
platform_packages =
|
||||
pioarduino/framework-espidf@https://github.com/pioarduino/esp-idf/releases/download/v5.4.2/esp-idf-v5.4.2.zip
|
||||
pioarduino/framework-espidf@https://github.com/pioarduino/esp-idf/releases/download/v5.5.1/esp-idf-v5.5.1.zip
|
||||
|
||||
framework = espidf
|
||||
lib_deps =
|
||||
|
@@ -12,7 +12,7 @@ platformio==6.1.18 # When updating platformio, also update /docker/Dockerfile
|
||||
esptool==5.1.0
|
||||
click==8.1.7
|
||||
esphome-dashboard==20251013.0
|
||||
aioesphomeapi==41.17.0
|
||||
aioesphomeapi==42.0.0
|
||||
zeroconf==0.148.0
|
||||
puremagic==1.30
|
||||
ruamel.yaml==0.18.15 # dashboard_import
|
||||
@@ -23,10 +23,6 @@ cairosvg==2.8.2
|
||||
freetype-py==2.5.1
|
||||
jinja2==3.1.6
|
||||
|
||||
# esp-idf requires this, but doesn't bundle it by default
|
||||
# https://github.com/espressif/esp-idf/blob/220590d599e134d7a5e7f1e683cc4550349ffbf8/requirements.txt#L24
|
||||
kconfiglib==13.7.1
|
||||
|
||||
# esp-idf >= 5.0 requires this
|
||||
pyparsing >= 3.0
|
||||
|
||||
|
@@ -1,4 +1,4 @@
|
||||
pylint==4.0.0
|
||||
pylint==4.0.1
|
||||
flake8==7.3.0 # also change in .pre-commit-config.yaml when updating
|
||||
ruff==0.14.0 # also change in .pre-commit-config.yaml when updating
|
||||
pyupgrade==3.21.0 # also change in .pre-commit-config.yaml when updating
|
||||
|
@@ -75,6 +75,8 @@ ISOLATED_COMPONENTS = {
|
||||
"ethernet": "Defines ethernet: which conflicts with wifi: used by most components",
|
||||
"ethernet_info": "Related to ethernet component which conflicts with wifi",
|
||||
"lvgl": "Defines multiple SDL displays on host platform that conflict when merged with other display configs",
|
||||
"openthread": "Conflicts with wifi: used by most components",
|
||||
"openthread_info": "Conflicts with wifi: used by most components",
|
||||
"matrix_keypad": "Needs isolation due to keypad",
|
||||
"mcp4725": "no YAML config to specify i2c bus id",
|
||||
"mcp47a1": "no YAML config to specify i2c bus id",
|
||||
|
@@ -501,7 +501,7 @@ def lint_constants_usage():
|
||||
continue
|
||||
errs.append(
|
||||
f"Constant {highlight(constant)} is defined in {len(uses)} files. Please move all definitions of the "
|
||||
f"constant to const.py (Uses: {', '.join(uses)}) in a separate PR. "
|
||||
f"constant to const.py (Uses: {', '.join(str(u) for u in uses)}) in a separate PR. "
|
||||
"See https://developers.esphome.io/contributing/code/#python"
|
||||
)
|
||||
return errs
|
||||
|
@@ -31,6 +31,7 @@ Options:
|
||||
from __future__ import annotations
|
||||
|
||||
import argparse
|
||||
from functools import cache
|
||||
import json
|
||||
import os
|
||||
from pathlib import Path
|
||||
@@ -45,7 +46,6 @@ from helpers import (
|
||||
changed_files,
|
||||
get_all_dependencies,
|
||||
get_components_from_integration_fixtures,
|
||||
parse_list_components_output,
|
||||
root_path,
|
||||
)
|
||||
|
||||
@@ -212,6 +212,24 @@ def _any_changed_file_endswith(branch: str | None, extensions: tuple[str, ...])
|
||||
return any(file.endswith(extensions) for file in changed_files(branch))
|
||||
|
||||
|
||||
@cache
|
||||
def _component_has_tests(component: str) -> bool:
|
||||
"""Check if a component has test files.
|
||||
|
||||
Cached to avoid repeated filesystem operations for the same component.
|
||||
|
||||
Args:
|
||||
component: Component name to check
|
||||
|
||||
Returns:
|
||||
True if the component has test YAML files
|
||||
"""
|
||||
tests_dir = Path(root_path) / "tests" / "components" / component
|
||||
if not tests_dir.exists():
|
||||
return False
|
||||
return any(tests_dir.glob("test.*.yaml"))
|
||||
|
||||
|
||||
def main() -> None:
|
||||
"""Main function that determines which CI jobs to run."""
|
||||
parser = argparse.ArgumentParser(
|
||||
@@ -228,23 +246,37 @@ def main() -> None:
|
||||
run_clang_format = should_run_clang_format(args.branch)
|
||||
run_python_linters = should_run_python_linters(args.branch)
|
||||
|
||||
# Get changed components using list-components.py for exact compatibility
|
||||
# Get both directly changed and all changed components (with dependencies) in one call
|
||||
script_path = Path(__file__).parent / "list-components.py"
|
||||
cmd = [sys.executable, str(script_path), "--changed"]
|
||||
cmd = [sys.executable, str(script_path), "--changed-with-deps"]
|
||||
if args.branch:
|
||||
cmd.extend(["-b", args.branch])
|
||||
|
||||
result = subprocess.run(cmd, capture_output=True, text=True, check=True)
|
||||
changed_components = parse_list_components_output(result.stdout)
|
||||
component_data = json.loads(result.stdout)
|
||||
directly_changed_components = component_data["directly_changed"]
|
||||
changed_components = component_data["all_changed"]
|
||||
|
||||
# Filter to only components that have test files
|
||||
# Components without tests shouldn't generate CI test jobs
|
||||
tests_dir = Path(root_path) / "tests" / "components"
|
||||
changed_components_with_tests = [
|
||||
component for component in changed_components if _component_has_tests(component)
|
||||
]
|
||||
|
||||
# Get directly changed components with tests (for isolated testing)
|
||||
# These will be tested WITHOUT --testing-mode in CI to enable full validation
|
||||
# (pin conflicts, etc.) since they contain the actual changes being reviewed
|
||||
directly_changed_with_tests = [
|
||||
component
|
||||
for component in changed_components
|
||||
if (component_test_dir := tests_dir / component).exists()
|
||||
and any(component_test_dir.glob("test.*.yaml"))
|
||||
for component in directly_changed_components
|
||||
if _component_has_tests(component)
|
||||
]
|
||||
|
||||
# Get dependency-only components (for grouped testing)
|
||||
dependency_only_components = [
|
||||
component
|
||||
for component in changed_components_with_tests
|
||||
if component not in directly_changed_components
|
||||
]
|
||||
|
||||
# Build output
|
||||
@@ -255,7 +287,11 @@ def main() -> None:
|
||||
"python_linters": run_python_linters,
|
||||
"changed_components": changed_components,
|
||||
"changed_components_with_tests": changed_components_with_tests,
|
||||
"directly_changed_components_with_tests": directly_changed_with_tests,
|
||||
"dependency_only_components_with_tests": dependency_only_components,
|
||||
"component_test_count": len(changed_components_with_tests),
|
||||
"directly_changed_count": len(directly_changed_with_tests),
|
||||
"dependency_only_count": len(dependency_only_components),
|
||||
}
|
||||
|
||||
# Output as JSON
|
||||
|
@@ -185,18 +185,32 @@ def main():
|
||||
"-c",
|
||||
"--changed",
|
||||
action="store_true",
|
||||
help="List all components required for testing based on changes",
|
||||
help="List all components required for testing based on changes (includes dependencies)",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--changed-direct",
|
||||
action="store_true",
|
||||
help="List only directly changed components (without dependencies)",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--changed-with-deps",
|
||||
action="store_true",
|
||||
help="Output JSON with both directly changed and all changed components",
|
||||
)
|
||||
parser.add_argument(
|
||||
"-b", "--branch", help="Branch to compare changed files against"
|
||||
)
|
||||
args = parser.parse_args()
|
||||
|
||||
if args.branch and not args.changed:
|
||||
parser.error("--branch requires --changed")
|
||||
if args.branch and not (
|
||||
args.changed or args.changed_direct or args.changed_with_deps
|
||||
):
|
||||
parser.error(
|
||||
"--branch requires --changed, --changed-direct, or --changed-with-deps"
|
||||
)
|
||||
|
||||
if args.changed:
|
||||
# When --changed is passed, only get the changed files
|
||||
if args.changed or args.changed_direct or args.changed_with_deps:
|
||||
# When --changed* is passed, only get the changed files
|
||||
changed = changed_files(args.branch)
|
||||
|
||||
# If any base test file(s) changed, there's no need to filter out components
|
||||
@@ -210,8 +224,25 @@ def main():
|
||||
# Get all component files
|
||||
files = get_all_component_files()
|
||||
|
||||
for c in get_components(files, args.changed):
|
||||
print(c)
|
||||
if args.changed_with_deps:
|
||||
# Return JSON with both directly changed and all changed components
|
||||
import json
|
||||
|
||||
directly_changed = get_components(files, False)
|
||||
all_changed = get_components(files, True)
|
||||
output = {
|
||||
"directly_changed": directly_changed,
|
||||
"all_changed": all_changed,
|
||||
}
|
||||
print(json.dumps(output))
|
||||
elif args.changed_direct:
|
||||
# Return only directly changed components (without dependencies)
|
||||
for c in get_components(files, False):
|
||||
print(c)
|
||||
else:
|
||||
# Return all changed components (with dependencies) - default behavior
|
||||
for c in get_components(files, args.changed):
|
||||
print(c)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
@@ -56,6 +56,7 @@ def create_intelligent_batches(
|
||||
components: list[str],
|
||||
tests_dir: Path,
|
||||
batch_size: int = 40,
|
||||
directly_changed: set[str] | None = None,
|
||||
) -> list[list[str]]:
|
||||
"""Create batches optimized for component grouping.
|
||||
|
||||
@@ -63,6 +64,7 @@ def create_intelligent_batches(
|
||||
components: List of component names to batch
|
||||
tests_dir: Path to tests/components directory
|
||||
batch_size: Target size for each batch
|
||||
directly_changed: Set of directly changed components (for logging only)
|
||||
|
||||
Returns:
|
||||
List of component batches (lists of component names)
|
||||
@@ -94,10 +96,17 @@ def create_intelligent_batches(
|
||||
|
||||
for component in components_with_tests:
|
||||
# Components that can't be grouped get unique signatures
|
||||
# This includes both manually curated ISOLATED_COMPONENTS and
|
||||
# automatically detected non_groupable components
|
||||
# This includes:
|
||||
# - Manually curated ISOLATED_COMPONENTS
|
||||
# - Automatically detected non_groupable components
|
||||
# - Directly changed components (passed via --isolate in CI)
|
||||
# These can share a batch/runner but won't be grouped/merged
|
||||
if component in ISOLATED_COMPONENTS or component in non_groupable:
|
||||
is_isolated = (
|
||||
component in ISOLATED_COMPONENTS
|
||||
or component in non_groupable
|
||||
or (directly_changed and component in directly_changed)
|
||||
)
|
||||
if is_isolated:
|
||||
signature_groups[f"isolated_{component}"].append(component)
|
||||
continue
|
||||
|
||||
@@ -187,6 +196,10 @@ def main() -> int:
|
||||
default=Path("tests/components"),
|
||||
help="Path to tests/components directory",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--directly-changed",
|
||||
help="JSON array of directly changed component names (for logging only)",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--output",
|
||||
"-o",
|
||||
@@ -208,11 +221,21 @@ def main() -> int:
|
||||
print("Components must be a JSON array", file=sys.stderr)
|
||||
return 1
|
||||
|
||||
# Parse directly changed components list from JSON (if provided)
|
||||
directly_changed = None
|
||||
if args.directly_changed:
|
||||
try:
|
||||
directly_changed = set(json.loads(args.directly_changed))
|
||||
except json.JSONDecodeError as e:
|
||||
print(f"Error parsing directly-changed JSON: {e}", file=sys.stderr)
|
||||
return 1
|
||||
|
||||
# Create intelligent batches
|
||||
batches = create_intelligent_batches(
|
||||
components=components,
|
||||
tests_dir=args.tests_dir,
|
||||
batch_size=args.batch_size,
|
||||
directly_changed=directly_changed,
|
||||
)
|
||||
|
||||
# Convert batches to space-separated strings for CI
|
||||
@@ -238,13 +261,37 @@ def main() -> int:
|
||||
isolated_count = sum(
|
||||
1
|
||||
for comp in all_batched_components
|
||||
if comp in ISOLATED_COMPONENTS or comp in non_groupable
|
||||
if comp in ISOLATED_COMPONENTS
|
||||
or comp in non_groupable
|
||||
or (directly_changed and comp in directly_changed)
|
||||
)
|
||||
groupable_count = actual_components - isolated_count
|
||||
|
||||
print("\n=== Intelligent Batch Summary ===", file=sys.stderr)
|
||||
print(f"Total components requested: {len(components)}", file=sys.stderr)
|
||||
print(f"Components with test files: {actual_components}", file=sys.stderr)
|
||||
|
||||
# Show breakdown of directly changed vs dependencies
|
||||
if directly_changed:
|
||||
direct_count = sum(
|
||||
1 for comp in all_batched_components if comp in directly_changed
|
||||
)
|
||||
dep_count = actual_components - direct_count
|
||||
direct_comps = [
|
||||
comp for comp in all_batched_components if comp in directly_changed
|
||||
]
|
||||
dep_comps = [
|
||||
comp for comp in all_batched_components if comp not in directly_changed
|
||||
]
|
||||
print(
|
||||
f" - Direct changes: {direct_count} ({', '.join(sorted(direct_comps))})",
|
||||
file=sys.stderr,
|
||||
)
|
||||
print(
|
||||
f" - Dependencies: {dep_count} ({', '.join(sorted(dep_comps))})",
|
||||
file=sys.stderr,
|
||||
)
|
||||
|
||||
print(f" - Groupable (weight=1): {groupable_count}", file=sys.stderr)
|
||||
print(f" - Isolated (weight=10): {isolated_count}", file=sys.stderr)
|
||||
if actual_components < len(components):
|
||||
|
@@ -365,6 +365,7 @@ def run_grouped_component_tests(
|
||||
build_dir: Path,
|
||||
esphome_command: str,
|
||||
continue_on_fail: bool,
|
||||
additional_isolated: set[str] | None = None,
|
||||
) -> tuple[set[tuple[str, str]], list[str], list[str], dict[str, str]]:
|
||||
"""Run grouped component tests.
|
||||
|
||||
@@ -376,6 +377,7 @@ def run_grouped_component_tests(
|
||||
build_dir: Path to build directory
|
||||
esphome_command: ESPHome command (config/compile)
|
||||
continue_on_fail: Whether to continue on failure
|
||||
additional_isolated: Additional components to treat as isolated (not grouped)
|
||||
|
||||
Returns:
|
||||
Tuple of (tested_components, passed_tests, failed_tests, failed_commands)
|
||||
@@ -397,6 +399,17 @@ def run_grouped_component_tests(
|
||||
# Track why components can't be grouped (for detailed output)
|
||||
non_groupable_reasons = {}
|
||||
|
||||
# Merge additional isolated components with predefined ones
|
||||
# ISOLATED COMPONENTS are tested individually WITHOUT --testing-mode
|
||||
# This is critical because:
|
||||
# - Grouped tests use --testing-mode which disables pin conflict checks and other validation
|
||||
# - These checks are disabled to allow config merging (multiple components in one build)
|
||||
# - For directly changed components (via --isolate), we need full validation to catch issues
|
||||
# - Dependencies are safe to group since they weren't modified in the PR
|
||||
all_isolated = set(ISOLATED_COMPONENTS.keys())
|
||||
if additional_isolated:
|
||||
all_isolated.update(additional_isolated)
|
||||
|
||||
# Group by (platform, bus_signature)
|
||||
for component, platforms in component_buses.items():
|
||||
if component not in all_tests:
|
||||
@@ -404,7 +417,7 @@ def run_grouped_component_tests(
|
||||
|
||||
# Skip components that must be tested in isolation
|
||||
# These are shown separately and should not be in non_groupable_reasons
|
||||
if component in ISOLATED_COMPONENTS:
|
||||
if component in all_isolated:
|
||||
continue
|
||||
|
||||
# Skip base bus components (these test the bus platforms themselves)
|
||||
@@ -453,15 +466,28 @@ def run_grouped_component_tests(
|
||||
print("\nGrouping Plan:")
|
||||
print("-" * 80)
|
||||
|
||||
# Show isolated components (must test individually due to known issues)
|
||||
isolated_in_tests = [c for c in ISOLATED_COMPONENTS if c in all_tests]
|
||||
# Show isolated components (must test individually due to known issues or direct changes)
|
||||
isolated_in_tests = [c for c in all_isolated if c in all_tests]
|
||||
if isolated_in_tests:
|
||||
print(
|
||||
f"\n⚠ {len(isolated_in_tests)} components must be tested in isolation (known build issues):"
|
||||
)
|
||||
for comp in sorted(isolated_in_tests):
|
||||
reason = ISOLATED_COMPONENTS[comp]
|
||||
print(f" - {comp}: {reason}")
|
||||
predefined_isolated = [c for c in isolated_in_tests if c in ISOLATED_COMPONENTS]
|
||||
additional_in_tests = [
|
||||
c for c in isolated_in_tests if c in (additional_isolated or set())
|
||||
]
|
||||
|
||||
if predefined_isolated:
|
||||
print(
|
||||
f"\n⚠ {len(predefined_isolated)} components must be tested in isolation (known build issues):"
|
||||
)
|
||||
for comp in sorted(predefined_isolated):
|
||||
reason = ISOLATED_COMPONENTS[comp]
|
||||
print(f" - {comp}: {reason}")
|
||||
|
||||
if additional_in_tests:
|
||||
print(
|
||||
f"\n✓ {len(additional_in_tests)} components tested in isolation (directly changed in PR):"
|
||||
)
|
||||
for comp in sorted(additional_in_tests):
|
||||
print(f" - {comp}")
|
||||
|
||||
# Show base bus components (test the bus platform implementations)
|
||||
base_bus_in_tests = [c for c in BASE_BUS_COMPONENTS if c in all_tests]
|
||||
@@ -733,6 +759,7 @@ def test_components(
|
||||
esphome_command: str,
|
||||
continue_on_fail: bool,
|
||||
enable_grouping: bool = True,
|
||||
isolated_components: set[str] | None = None,
|
||||
) -> int:
|
||||
"""Test components with optional intelligent grouping.
|
||||
|
||||
@@ -742,6 +769,10 @@ def test_components(
|
||||
esphome_command: ESPHome command (config/compile)
|
||||
continue_on_fail: Whether to continue on failure
|
||||
enable_grouping: Whether to enable component grouping
|
||||
isolated_components: Set of component names to test in isolation (not grouped).
|
||||
These are tested WITHOUT --testing-mode to enable full validation
|
||||
(pin conflicts, etc). This is used in CI for directly changed components
|
||||
to catch issues that would be missed with --testing-mode.
|
||||
|
||||
Returns:
|
||||
Exit code (0 for success, 1 for failure)
|
||||
@@ -788,6 +819,7 @@ def test_components(
|
||||
build_dir=build_dir,
|
||||
esphome_command=esphome_command,
|
||||
continue_on_fail=continue_on_fail,
|
||||
additional_isolated=isolated_components,
|
||||
)
|
||||
|
||||
# Then run individual tests for components not in groups
|
||||
@@ -912,18 +944,30 @@ def main() -> int:
|
||||
action="store_true",
|
||||
help="Disable component grouping (test each component individually)",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--isolate",
|
||||
help="Comma-separated list of components to test in isolation (not grouped with others). "
|
||||
"These are tested WITHOUT --testing-mode to enable full validation. "
|
||||
"Used in CI for directly changed components to catch pin conflicts and other issues.",
|
||||
)
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
# Parse component patterns
|
||||
component_patterns = [p.strip() for p in args.components.split(",")]
|
||||
|
||||
# Parse isolated components
|
||||
isolated_components = None
|
||||
if args.isolate:
|
||||
isolated_components = {c.strip() for c in args.isolate.split(",") if c.strip()}
|
||||
|
||||
return test_components(
|
||||
component_patterns=component_patterns,
|
||||
platform_filter=args.target,
|
||||
esphome_command=args.esphome_command,
|
||||
continue_on_fail=args.continue_on_fail,
|
||||
enable_grouping=not args.no_grouping,
|
||||
isolated_components=isolated_components,
|
||||
)
|
||||
|
||||
|
||||
|
@@ -69,7 +69,7 @@ def run_schema_validation(config: ConfigType) -> None:
|
||||
{
|
||||
"id": "display_id",
|
||||
"model": "custom",
|
||||
"dimensions": {"width": 320, "height": 240},
|
||||
"dimensions": {"width": 260, "height": 260},
|
||||
"draw_rounding": 13,
|
||||
"init_sequence": [[0xA0, 0x01]],
|
||||
},
|
||||
@@ -336,7 +336,7 @@ def test_native_generation(
|
||||
|
||||
main_cpp = generate_main(component_fixture_path("native.yaml"))
|
||||
assert (
|
||||
"mipi_spi::MipiSpiBuffer<uint16_t, mipi_spi::PIXEL_MODE_16, true, mipi_spi::PIXEL_MODE_16, mipi_spi::BUS_TYPE_QUAD, 360, 360, 0, 1, display::DISPLAY_ROTATION_0_DEGREES, 1>()"
|
||||
"mipi_spi::MipiSpiBuffer<uint16_t, mipi_spi::PIXEL_MODE_16, true, mipi_spi::PIXEL_MODE_16, mipi_spi::BUS_TYPE_QUAD, 360, 360, 0, 1, display::DISPLAY_ROTATION_0_DEGREES, 1, 1>()"
|
||||
in main_cpp
|
||||
)
|
||||
assert "set_init_sequence({240, 1, 8, 242" in main_cpp
|
||||
|
19
tests/components/bluetooth_proxy/test.esp32-p4-idf.yaml
Normal file
19
tests/components/bluetooth_proxy/test.esp32-p4-idf.yaml
Normal file
@@ -0,0 +1,19 @@
|
||||
<<: !include common.yaml
|
||||
|
||||
esp32_ble_tracker:
|
||||
max_connections: 9
|
||||
|
||||
bluetooth_proxy:
|
||||
active: true
|
||||
connection_slots: 9
|
||||
|
||||
esp32_hosted:
|
||||
active_high: true
|
||||
variant: ESP32C6
|
||||
reset_pin: GPIO54
|
||||
cmd_pin: GPIO19
|
||||
clk_pin: GPIO18
|
||||
d0_pin: GPIO14
|
||||
d1_pin: GPIO15
|
||||
d2_pin: GPIO16
|
||||
d3_pin: GPIO17
|
@@ -7,8 +7,8 @@ display:
|
||||
id: ili9xxx_display
|
||||
model: GC9A01A
|
||||
invert_colors: True
|
||||
cs_pin: 10
|
||||
dc_pin: 6
|
||||
cs_pin: 11
|
||||
dc_pin: 7
|
||||
pages:
|
||||
- id: page1
|
||||
lambda: |-
|
||||
|
@@ -5,6 +5,7 @@ esp32:
|
||||
advanced:
|
||||
enable_lwip_mdns_queries: true
|
||||
enable_lwip_bridge_interface: true
|
||||
disable_libc_locks_in_iram: false # Test explicit opt-out of RAM optimization
|
||||
|
||||
wifi:
|
||||
ssid: MySSID
|
||||
|
@@ -4,6 +4,7 @@ esp32:
|
||||
type: esp-idf
|
||||
advanced:
|
||||
execute_from_psram: true
|
||||
disable_libc_locks_in_iram: true # Test default RAM optimization enabled
|
||||
|
||||
psram:
|
||||
mode: octal
|
||||
|
@@ -10,7 +10,7 @@ display:
|
||||
invert_colors: true
|
||||
show_test_card: true
|
||||
spi_mode: mode0
|
||||
draw_rounding: 8
|
||||
draw_rounding: 4
|
||||
use_axis_flips: true
|
||||
init_sequence:
|
||||
- [0xd0, 1, 2, 3]
|
||||
|
@@ -1,7 +1,7 @@
|
||||
substitutions:
|
||||
dc_pin: GPIO14
|
||||
cs_pin: GPIO13
|
||||
enable_pin: GPIO16
|
||||
enable_pin: GPIO17
|
||||
reset_pin: GPIO20
|
||||
|
||||
packages:
|
||||
|
@@ -73,9 +73,11 @@ def test_main_all_tests_should_run(
|
||||
mock_should_run_clang_format.return_value = True
|
||||
mock_should_run_python_linters.return_value = True
|
||||
|
||||
# Mock list-components.py output
|
||||
# Mock list-components.py output (now returns JSON with --changed-with-deps)
|
||||
mock_result = Mock()
|
||||
mock_result.stdout = "wifi\napi\nsensor\n"
|
||||
mock_result.stdout = json.dumps(
|
||||
{"directly_changed": ["wifi", "api"], "all_changed": ["wifi", "api", "sensor"]}
|
||||
)
|
||||
mock_subprocess_run.return_value = mock_result
|
||||
|
||||
# Run main function with mocked argv
|
||||
@@ -116,7 +118,7 @@ def test_main_no_tests_should_run(
|
||||
|
||||
# Mock empty list-components.py output
|
||||
mock_result = Mock()
|
||||
mock_result.stdout = ""
|
||||
mock_result.stdout = json.dumps({"directly_changed": [], "all_changed": []})
|
||||
mock_subprocess_run.return_value = mock_result
|
||||
|
||||
# Run main function with mocked argv
|
||||
@@ -177,7 +179,9 @@ def test_main_with_branch_argument(
|
||||
|
||||
# Mock list-components.py output
|
||||
mock_result = Mock()
|
||||
mock_result.stdout = "mqtt\n"
|
||||
mock_result.stdout = json.dumps(
|
||||
{"directly_changed": ["mqtt"], "all_changed": ["mqtt"]}
|
||||
)
|
||||
mock_subprocess_run.return_value = mock_result
|
||||
|
||||
with patch("sys.argv", ["script.py", "-b", "main"]):
|
||||
@@ -192,7 +196,7 @@ def test_main_with_branch_argument(
|
||||
# Check that list-components.py was called with branch
|
||||
mock_subprocess_run.assert_called_once()
|
||||
call_args = mock_subprocess_run.call_args[0][0]
|
||||
assert "--changed" in call_args
|
||||
assert "--changed-with-deps" in call_args
|
||||
assert "-b" in call_args
|
||||
assert "main" in call_args
|
||||
|
||||
@@ -411,7 +415,12 @@ def test_main_filters_components_without_tests(
|
||||
# Mock list-components.py output with 3 components
|
||||
# wifi: has tests, sensor: has tests, airthings_ble: no tests
|
||||
mock_result = Mock()
|
||||
mock_result.stdout = "wifi\nsensor\nairthings_ble\n"
|
||||
mock_result.stdout = json.dumps(
|
||||
{
|
||||
"directly_changed": ["wifi", "sensor"],
|
||||
"all_changed": ["wifi", "sensor", "airthings_ble"],
|
||||
}
|
||||
)
|
||||
mock_subprocess_run.return_value = mock_result
|
||||
|
||||
# Create test directory structure
|
||||
@@ -436,6 +445,8 @@ def test_main_filters_components_without_tests(
|
||||
patch.object(determine_jobs, "root_path", str(tmp_path)),
|
||||
patch("sys.argv", ["determine-jobs.py"]),
|
||||
):
|
||||
# Clear the cache since we're mocking root_path
|
||||
determine_jobs._component_has_tests.cache_clear()
|
||||
determine_jobs.main()
|
||||
|
||||
# Check output
|
||||
|
@@ -287,7 +287,7 @@ def test_perform_ota_no_auth(mock_socket: Mock, mock_file: io.BytesIO) -> None:
|
||||
|
||||
mock_socket.recv.side_effect = recv_responses
|
||||
|
||||
espota2.perform_ota(mock_socket, "", mock_file, "test.bin")
|
||||
espota2.perform_ota(mock_socket, None, mock_file, "test.bin")
|
||||
|
||||
# Should not send any auth-related data
|
||||
auth_calls = [
|
||||
@@ -317,7 +317,7 @@ def test_perform_ota_with_compression(mock_socket: Mock) -> None:
|
||||
|
||||
mock_socket.recv.side_effect = recv_responses
|
||||
|
||||
espota2.perform_ota(mock_socket, "", mock_file, "test.bin")
|
||||
espota2.perform_ota(mock_socket, None, mock_file, "test.bin")
|
||||
|
||||
# Verify compressed content was sent
|
||||
# Get the binary size that was sent (4 bytes after features)
|
||||
@@ -347,7 +347,7 @@ def test_perform_ota_auth_without_password(mock_socket: Mock) -> None:
|
||||
with pytest.raises(
|
||||
espota2.OTAError, match="ESP requests password, but no password given"
|
||||
):
|
||||
espota2.perform_ota(mock_socket, "", mock_file, "test.bin")
|
||||
espota2.perform_ota(mock_socket, None, mock_file, "test.bin")
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("mock_time")
|
||||
@@ -413,7 +413,7 @@ def test_perform_ota_sha256_auth_without_password(mock_socket: Mock) -> None:
|
||||
with pytest.raises(
|
||||
espota2.OTAError, match="ESP requests password, but no password given"
|
||||
):
|
||||
espota2.perform_ota(mock_socket, "", mock_file, "test.bin")
|
||||
espota2.perform_ota(mock_socket, None, mock_file, "test.bin")
|
||||
|
||||
|
||||
def test_perform_ota_unexpected_auth_response(mock_socket: Mock) -> None:
|
||||
@@ -450,7 +450,7 @@ def test_perform_ota_unsupported_version(mock_socket: Mock) -> None:
|
||||
mock_socket.recv.side_effect = responses
|
||||
|
||||
with pytest.raises(espota2.OTAError, match="Device uses unsupported OTA version"):
|
||||
espota2.perform_ota(mock_socket, "", mock_file, "test.bin")
|
||||
espota2.perform_ota(mock_socket, None, mock_file, "test.bin")
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("mock_time")
|
||||
@@ -471,7 +471,7 @@ def test_perform_ota_upload_error(mock_socket: Mock, mock_file: io.BytesIO) -> N
|
||||
mock_socket.recv.side_effect = recv_responses
|
||||
|
||||
with pytest.raises(espota2.OTAError, match="Error receiving acknowledge chunk OK"):
|
||||
espota2.perform_ota(mock_socket, "", mock_file, "test.bin")
|
||||
espota2.perform_ota(mock_socket, None, mock_file, "test.bin")
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("mock_socket_constructor", "mock_resolve_ip")
|
||||
@@ -706,7 +706,7 @@ def test_perform_ota_version_differences(
|
||||
]
|
||||
|
||||
mock_socket.recv.side_effect = recv_responses
|
||||
espota2.perform_ota(mock_socket, "", mock_file, "test.bin")
|
||||
espota2.perform_ota(mock_socket, None, mock_file, "test.bin")
|
||||
|
||||
# For v1.0, verify that we only get the expected number of recv calls
|
||||
# v1.0 doesn't have chunk acknowledgments, so fewer recv calls
|
||||
@@ -732,7 +732,7 @@ def test_perform_ota_version_differences(
|
||||
]
|
||||
|
||||
mock_socket.recv.side_effect = recv_responses_v2
|
||||
espota2.perform_ota(mock_socket, "", mock_file, "test.bin")
|
||||
espota2.perform_ota(mock_socket, None, mock_file, "test.bin")
|
||||
|
||||
# For v2.0, verify more recv calls due to chunk acknowledgments
|
||||
assert mock_socket.recv.call_count == 9 # v2.0 has 9 recv calls (includes chunk OK)
|
||||
|
@@ -1062,7 +1062,7 @@ def test_upload_program_ota_with_file_arg(
|
||||
assert exit_code == 0
|
||||
assert host == "192.168.1.100"
|
||||
mock_run_ota.assert_called_once_with(
|
||||
["192.168.1.100"], 3232, "", Path("custom.bin")
|
||||
["192.168.1.100"], 3232, None, Path("custom.bin")
|
||||
)
|
||||
|
||||
|
||||
@@ -1119,7 +1119,9 @@ def test_upload_program_ota_with_mqtt_resolution(
|
||||
expected_firmware = (
|
||||
tmp_path / ".esphome" / "build" / "test" / ".pioenvs" / "test" / "firmware.bin"
|
||||
)
|
||||
mock_run_ota.assert_called_once_with(["192.168.1.100"], 3232, "", expected_firmware)
|
||||
mock_run_ota.assert_called_once_with(
|
||||
["192.168.1.100"], 3232, None, expected_firmware
|
||||
)
|
||||
|
||||
|
||||
@patch("esphome.__main__.importlib.import_module")
|
||||
@@ -1976,3 +1978,292 @@ def test_command_clean_all_args_used() -> None:
|
||||
# Verify the correct configuration paths were passed
|
||||
mock_clean_all.assert_any_call(["/path/to/config1"])
|
||||
mock_clean_all.assert_any_call(["/path/to/config2", "/path/to/config3"])
|
||||
|
||||
|
||||
def test_upload_program_ota_static_ip_with_mqttip(
|
||||
mock_mqtt_get_ip: Mock,
|
||||
mock_run_ota: Mock,
|
||||
tmp_path: Path,
|
||||
) -> None:
|
||||
"""Test upload_program with static IP and MQTTIP (issue #11260).
|
||||
|
||||
This tests the scenario where a device has manual_ip (static IP) configured
|
||||
and MQTT is also configured. The devices list contains both the static IP
|
||||
and "MQTTIP" magic string. This previously failed because only the first
|
||||
device was checked for MQTT resolution.
|
||||
"""
|
||||
setup_core(platform=PLATFORM_ESP32, tmp_path=tmp_path)
|
||||
|
||||
mock_mqtt_get_ip.return_value = ["192.168.2.50"] # Different subnet
|
||||
mock_run_ota.return_value = (0, "192.168.1.100")
|
||||
|
||||
config = {
|
||||
CONF_OTA: [
|
||||
{
|
||||
CONF_PLATFORM: CONF_ESPHOME,
|
||||
CONF_PORT: 3232,
|
||||
}
|
||||
],
|
||||
CONF_MQTT: {
|
||||
CONF_BROKER: "mqtt.local",
|
||||
},
|
||||
}
|
||||
args = MockArgs(username="user", password="pass", client_id="client")
|
||||
# Simulates choose_upload_log_host returning static IP + MQTTIP
|
||||
devices = ["192.168.1.100", "MQTTIP"]
|
||||
|
||||
exit_code, host = upload_program(config, args, devices)
|
||||
|
||||
assert exit_code == 0
|
||||
assert host == "192.168.1.100"
|
||||
|
||||
# Verify MQTT was resolved
|
||||
mock_mqtt_get_ip.assert_called_once_with(config, "user", "pass", "client")
|
||||
|
||||
# Verify espota2.run_ota was called with both IPs
|
||||
expected_firmware = (
|
||||
tmp_path / ".esphome" / "build" / "test" / ".pioenvs" / "test" / "firmware.bin"
|
||||
)
|
||||
mock_run_ota.assert_called_once_with(
|
||||
["192.168.1.100", "192.168.2.50"], 3232, None, expected_firmware
|
||||
)
|
||||
|
||||
|
||||
def test_upload_program_ota_multiple_mqttip_resolves_once(
|
||||
mock_mqtt_get_ip: Mock,
|
||||
mock_run_ota: Mock,
|
||||
tmp_path: Path,
|
||||
) -> None:
|
||||
"""Test that MQTT resolution only happens once even with multiple MQTT magic strings."""
|
||||
setup_core(platform=PLATFORM_ESP32, tmp_path=tmp_path)
|
||||
|
||||
mock_mqtt_get_ip.return_value = ["192.168.2.50", "192.168.2.51"]
|
||||
mock_run_ota.return_value = (0, "192.168.2.50")
|
||||
|
||||
config = {
|
||||
CONF_OTA: [
|
||||
{
|
||||
CONF_PLATFORM: CONF_ESPHOME,
|
||||
CONF_PORT: 3232,
|
||||
}
|
||||
],
|
||||
CONF_MQTT: {
|
||||
CONF_BROKER: "mqtt.local",
|
||||
},
|
||||
}
|
||||
args = MockArgs(username="user", password="pass", client_id="client")
|
||||
# Multiple MQTT magic strings in the list
|
||||
devices = ["MQTTIP", "MQTT", "192.168.1.100"]
|
||||
|
||||
exit_code, host = upload_program(config, args, devices)
|
||||
|
||||
assert exit_code == 0
|
||||
assert host == "192.168.2.50"
|
||||
|
||||
# Verify MQTT was only resolved once despite multiple MQTT magic strings
|
||||
mock_mqtt_get_ip.assert_called_once_with(config, "user", "pass", "client")
|
||||
|
||||
# Verify espota2.run_ota was called with all unique IPs
|
||||
expected_firmware = (
|
||||
tmp_path / ".esphome" / "build" / "test" / ".pioenvs" / "test" / "firmware.bin"
|
||||
)
|
||||
mock_run_ota.assert_called_once_with(
|
||||
["192.168.2.50", "192.168.2.51", "192.168.1.100"], 3232, None, expected_firmware
|
||||
)
|
||||
|
||||
|
||||
def test_upload_program_ota_mqttip_deduplication(
|
||||
mock_mqtt_get_ip: Mock,
|
||||
mock_run_ota: Mock,
|
||||
tmp_path: Path,
|
||||
) -> None:
|
||||
"""Test that duplicate IPs are filtered when MQTT returns same IP as static IP."""
|
||||
setup_core(platform=PLATFORM_ESP32, tmp_path=tmp_path)
|
||||
|
||||
# MQTT returns the same IP as the static IP
|
||||
mock_mqtt_get_ip.return_value = ["192.168.1.100"]
|
||||
mock_run_ota.return_value = (0, "192.168.1.100")
|
||||
|
||||
config = {
|
||||
CONF_OTA: [
|
||||
{
|
||||
CONF_PLATFORM: CONF_ESPHOME,
|
||||
CONF_PORT: 3232,
|
||||
}
|
||||
],
|
||||
CONF_MQTT: {
|
||||
CONF_BROKER: "mqtt.local",
|
||||
},
|
||||
}
|
||||
args = MockArgs(username="user", password="pass", client_id="client")
|
||||
devices = ["192.168.1.100", "MQTTIP"]
|
||||
|
||||
exit_code, host = upload_program(config, args, devices)
|
||||
|
||||
assert exit_code == 0
|
||||
assert host == "192.168.1.100"
|
||||
|
||||
# Verify MQTT was resolved
|
||||
mock_mqtt_get_ip.assert_called_once_with(config, "user", "pass", "client")
|
||||
|
||||
# Verify espota2.run_ota was called with deduplicated IPs (only one instance of 192.168.1.100)
|
||||
# Note: Current implementation doesn't dedupe, so we'll get the IP twice
|
||||
# This test documents current behavior - deduplication could be future enhancement
|
||||
mock_run_ota.assert_called_once()
|
||||
call_args = mock_run_ota.call_args[0]
|
||||
# Should contain both the original IP and MQTT-resolved IP (even if duplicate)
|
||||
assert "192.168.1.100" in call_args[0]
|
||||
|
||||
|
||||
@patch("esphome.components.api.client.run_logs")
|
||||
def test_show_logs_api_static_ip_with_mqttip(
|
||||
mock_run_logs: Mock,
|
||||
mock_mqtt_get_ip: Mock,
|
||||
) -> None:
|
||||
"""Test show_logs with static IP and MQTTIP (issue #11260).
|
||||
|
||||
This tests the scenario where a device has manual_ip (static IP) configured
|
||||
and MQTT is also configured. The devices list contains both the static IP
|
||||
and "MQTTIP" magic string.
|
||||
"""
|
||||
setup_core(
|
||||
config={
|
||||
"logger": {},
|
||||
CONF_API: {},
|
||||
CONF_MQTT: {CONF_BROKER: "mqtt.local"},
|
||||
},
|
||||
platform=PLATFORM_ESP32,
|
||||
)
|
||||
mock_run_logs.return_value = 0
|
||||
mock_mqtt_get_ip.return_value = ["192.168.2.50"]
|
||||
|
||||
args = MockArgs(username="user", password="pass", client_id="client")
|
||||
# Simulates choose_upload_log_host returning static IP + MQTTIP
|
||||
devices = ["192.168.1.100", "MQTTIP"]
|
||||
|
||||
result = show_logs(CORE.config, args, devices)
|
||||
|
||||
assert result == 0
|
||||
|
||||
# Verify MQTT was resolved
|
||||
mock_mqtt_get_ip.assert_called_once_with(CORE.config, "user", "pass", "client")
|
||||
|
||||
# Verify run_logs was called with both IPs
|
||||
mock_run_logs.assert_called_once_with(
|
||||
CORE.config, ["192.168.1.100", "192.168.2.50"]
|
||||
)
|
||||
|
||||
|
||||
@patch("esphome.components.api.client.run_logs")
|
||||
def test_show_logs_api_multiple_mqttip_resolves_once(
|
||||
mock_run_logs: Mock,
|
||||
mock_mqtt_get_ip: Mock,
|
||||
) -> None:
|
||||
"""Test that MQTT resolution only happens once for show_logs with multiple MQTT magic strings."""
|
||||
setup_core(
|
||||
config={
|
||||
"logger": {},
|
||||
CONF_API: {},
|
||||
CONF_MQTT: {CONF_BROKER: "mqtt.local"},
|
||||
},
|
||||
platform=PLATFORM_ESP32,
|
||||
)
|
||||
mock_run_logs.return_value = 0
|
||||
mock_mqtt_get_ip.return_value = ["192.168.2.50", "192.168.2.51"]
|
||||
|
||||
args = MockArgs(username="user", password="pass", client_id="client")
|
||||
# Multiple MQTT magic strings in the list
|
||||
devices = ["MQTTIP", "192.168.1.100", "MQTT"]
|
||||
|
||||
result = show_logs(CORE.config, args, devices)
|
||||
|
||||
assert result == 0
|
||||
|
||||
# Verify MQTT was only resolved once despite multiple MQTT magic strings
|
||||
mock_mqtt_get_ip.assert_called_once_with(CORE.config, "user", "pass", "client")
|
||||
|
||||
# Verify run_logs was called with all unique IPs (MQTT strings replaced with IPs)
|
||||
# Note: "MQTT" is a different magic string from "MQTTIP", but both trigger MQTT resolution
|
||||
# The _resolve_network_devices helper filters out both after first resolution
|
||||
mock_run_logs.assert_called_once_with(
|
||||
CORE.config, ["192.168.2.50", "192.168.2.51", "192.168.1.100"]
|
||||
)
|
||||
|
||||
|
||||
def test_upload_program_ota_mqtt_timeout_fallback(
|
||||
mock_mqtt_get_ip: Mock,
|
||||
mock_run_ota: Mock,
|
||||
tmp_path: Path,
|
||||
) -> None:
|
||||
"""Test upload_program falls back to other devices when MQTT times out."""
|
||||
setup_core(platform=PLATFORM_ESP32, tmp_path=tmp_path)
|
||||
|
||||
# MQTT times out
|
||||
mock_mqtt_get_ip.side_effect = EsphomeError("Failed to find IP via MQTT")
|
||||
mock_run_ota.return_value = (0, "192.168.1.100")
|
||||
|
||||
config = {
|
||||
CONF_OTA: [
|
||||
{
|
||||
CONF_PLATFORM: CONF_ESPHOME,
|
||||
CONF_PORT: 3232,
|
||||
}
|
||||
],
|
||||
CONF_MQTT: {
|
||||
CONF_BROKER: "mqtt.local",
|
||||
},
|
||||
}
|
||||
args = MockArgs(username="user", password="pass", client_id="client")
|
||||
# Static IP first, MQTTIP second
|
||||
devices = ["192.168.1.100", "MQTTIP"]
|
||||
|
||||
exit_code, host = upload_program(config, args, devices)
|
||||
|
||||
# Should succeed using the static IP even though MQTT failed
|
||||
assert exit_code == 0
|
||||
assert host == "192.168.1.100"
|
||||
|
||||
# Verify MQTT was attempted
|
||||
mock_mqtt_get_ip.assert_called_once_with(config, "user", "pass", "client")
|
||||
|
||||
# Verify espota2.run_ota was called with only the static IP (MQTT failed)
|
||||
expected_firmware = (
|
||||
tmp_path / ".esphome" / "build" / "test" / ".pioenvs" / "test" / "firmware.bin"
|
||||
)
|
||||
mock_run_ota.assert_called_once_with(
|
||||
["192.168.1.100"], 3232, None, expected_firmware
|
||||
)
|
||||
|
||||
|
||||
@patch("esphome.components.api.client.run_logs")
|
||||
def test_show_logs_api_mqtt_timeout_fallback(
|
||||
mock_run_logs: Mock,
|
||||
mock_mqtt_get_ip: Mock,
|
||||
) -> None:
|
||||
"""Test show_logs falls back to other devices when MQTT times out."""
|
||||
setup_core(
|
||||
config={
|
||||
"logger": {},
|
||||
CONF_API: {},
|
||||
CONF_MQTT: {CONF_BROKER: "mqtt.local"},
|
||||
},
|
||||
platform=PLATFORM_ESP32,
|
||||
)
|
||||
mock_run_logs.return_value = 0
|
||||
# MQTT times out
|
||||
mock_mqtt_get_ip.side_effect = EsphomeError("Failed to find IP via MQTT")
|
||||
|
||||
args = MockArgs(username="user", password="pass", client_id="client")
|
||||
# Static IP first, MQTTIP second
|
||||
devices = ["192.168.1.100", "MQTTIP"]
|
||||
|
||||
result = show_logs(CORE.config, args, devices)
|
||||
|
||||
# Should succeed using the static IP even though MQTT failed
|
||||
assert result == 0
|
||||
|
||||
# Verify MQTT was attempted
|
||||
mock_mqtt_get_ip.assert_called_once_with(CORE.config, "user", "pass", "client")
|
||||
|
||||
# Verify run_logs was called with only the static IP (MQTT failed)
|
||||
mock_run_logs.assert_called_once_with(CORE.config, ["192.168.1.100"])
|
||||
|
Reference in New Issue
Block a user