mirror of
				https://github.com/esphome/esphome.git
				synced 2025-11-04 00:51:49 +00:00 
			
		
		
		
	Compare commits
	
		
			80 Commits
		
	
	
		
			2023.11.0b
			...
			jesserockz
		
	
	| Author | SHA1 | Date | |
|---|---|---|---|
| 
						 | 
					df6ac61148 | ||
| 
						 | 
					8fbb4e27d1 | ||
| 
						 | 
					3c243e663f | ||
| 
						 | 
					288af1f4d2 | ||
| 
						 | 
					6f8d7c6acd | ||
| 
						 | 
					32e3f26239 | ||
| 
						 | 
					5464368c08 | ||
| 
						 | 
					208edf89dc | ||
| 
						 | 
					fefdb80fdc | ||
| 
						 | 
					754bd5b7be | ||
| 
						 | 
					10a9129b7b | ||
| 
						 | 
					ef945d298c | ||
| 
						 | 
					149d814fab | ||
| 
						 | 
					5f1d8dfa5b | ||
| 
						 | 
					3644853d38 | ||
| 
						 | 
					4e3170dc95 | ||
| 
						 | 
					c795dbde26 | ||
| 
						 | 
					4ce627b4ee | ||
| 
						 | 
					86b4fdc139 | ||
| 
						 | 
					20ea8bf06e | ||
| 
						 | 
					642db6d92b | ||
| 
						 | 
					4aac5a23cd | ||
| 
						 | 
					c536c976b7 | ||
| 
						 | 
					214b419db2 | ||
| 
						 | 
					0c18872888 | ||
| 
						 | 
					197b6b4275 | ||
| 
						 | 
					4e8bdc2155 | ||
| 
						 | 
					f1e8622187 | ||
| 
						 | 
					e0c7a02fbc | ||
| 
						 | 
					cdcb25be8e | ||
| 
						 | 
					aecc6655db | ||
| 
						 | 
					2754ddec1b | ||
| 
						 | 
					2a20a5fc11 | ||
| 
						 | 
					7100d073f8 | ||
| 
						 | 
					1ac6cf2ff9 | ||
| 
						 | 
					2ee089c9d5 | ||
| 
						 | 
					bd568eecf5 | ||
| 
						 | 
					ae0e481cff | ||
| 
						 | 
					f198be39d7 | ||
| 
						 | 
					08fc96b890 | ||
| 
						 | 
					8c28bea5b1 | ||
| 
						 | 
					00eedeb8b3 | ||
| 
						 | 
					0a4853ba7b | ||
| 
						 | 
					3e2b83acb0 | ||
| 
						 | 
					c1eb5bd675 | ||
| 
						 | 
					a9772ebf3f | ||
| 
						 | 
					45276cc244 | ||
| 
						 | 
					a9a17ee89d | ||
| 
						 | 
					f094702a16 | ||
| 
						 | 
					908f56ff46 | ||
| 
						 | 
					bd5905c59a | ||
| 
						 | 
					91299f05f7 | ||
| 
						 | 
					30e5ff9fff | ||
| 
						 | 
					163b38e153 | ||
| 
						 | 
					3b486084c8 | ||
| 
						 | 
					684cf10230 | ||
| 
						 | 
					63a277ba80 | ||
| 
						 | 
					53f3385c49 | ||
| 
						 | 
					51930a0243 | ||
| 
						 | 
					6a5cea171e | ||
| 
						 | 
					3363c8f434 | ||
| 
						 | 
					3b891bc146 | ||
| 
						 | 
					0f19450ab4 | ||
| 
						 | 
					98ec798bfc | ||
| 
						 | 
					01d28ce3fc | ||
| 
						 | 
					9d453f0ba2 | ||
| 
						 | 
					799851a83a | ||
| 
						 | 
					bc7519f645 | ||
| 
						 | 
					7a9866f1b6 | ||
| 
						 | 
					3d30f1f733 | ||
| 
						 | 
					1e55764d52 | ||
| 
						 | 
					28513a0502 | ||
| 
						 | 
					3e3266fa74 | ||
| 
						 | 
					ce020b1f9f | ||
| 
						 | 
					d394b957d1 | ||
| 
						 | 
					cf22c55430 | ||
| 
						 | 
					511348974e | ||
| 
						 | 
					972598a698 | ||
| 
						 | 
					d81bec860b | ||
| 
						 | 
					fde7a04ee7 | 
							
								
								
									
										97
									
								
								.github/actions/build-image/action.yaml
									
									
									
									
										vendored
									
									
										Normal file
									
								
							
							
						
						
									
										97
									
								
								.github/actions/build-image/action.yaml
									
									
									
									
										vendored
									
									
										Normal file
									
								
							@@ -0,0 +1,97 @@
 | 
			
		||||
name: Build Image
 | 
			
		||||
inputs:
 | 
			
		||||
  platform:
 | 
			
		||||
    description: "Platform to build for"
 | 
			
		||||
    required: true
 | 
			
		||||
    example: "linux/amd64"
 | 
			
		||||
  target:
 | 
			
		||||
    description: "Target to build"
 | 
			
		||||
    required: true
 | 
			
		||||
    example: "docker"
 | 
			
		||||
  baseimg:
 | 
			
		||||
    description: "Base image type"
 | 
			
		||||
    required: true
 | 
			
		||||
    example: "docker"
 | 
			
		||||
  suffix:
 | 
			
		||||
    description: "Suffix to add to tags"
 | 
			
		||||
    required: true
 | 
			
		||||
  version:
 | 
			
		||||
    description: "Version to build"
 | 
			
		||||
    required: true
 | 
			
		||||
    example: "2023.12.0"
 | 
			
		||||
runs:
 | 
			
		||||
  using: "composite"
 | 
			
		||||
  steps:
 | 
			
		||||
    - name: Generate short tags
 | 
			
		||||
      id: tags
 | 
			
		||||
      shell: bash
 | 
			
		||||
      run: |
 | 
			
		||||
        output=$(docker/generate_tags.py \
 | 
			
		||||
          --tag "${{ inputs.version }}" \
 | 
			
		||||
          --suffix "${{ inputs.suffix }}")
 | 
			
		||||
        echo $output
 | 
			
		||||
        for l in $output; do
 | 
			
		||||
          echo $l >> $GITHUB_OUTPUT
 | 
			
		||||
        done
 | 
			
		||||
 | 
			
		||||
    - name: Build and push to ghcr by digest
 | 
			
		||||
      id: build-ghcr
 | 
			
		||||
      uses: docker/build-push-action@v5.0.0
 | 
			
		||||
      with:
 | 
			
		||||
        context: .
 | 
			
		||||
        file: ./docker/Dockerfile
 | 
			
		||||
        platforms: ${{ inputs.platform }}
 | 
			
		||||
        target: ${{ inputs.target }}
 | 
			
		||||
        cache-from: type=gha
 | 
			
		||||
        cache-to: type=gha,mode=max
 | 
			
		||||
        build-args: |
 | 
			
		||||
          BASEIMGTYPE=${{ inputs.baseimg }}
 | 
			
		||||
          BUILD_VERSION=${{ inputs.version }}
 | 
			
		||||
        outputs: |
 | 
			
		||||
          type=image,name=ghcr.io/${{ steps.tags.outputs.image_name }},push-by-digest=true,name-canonical=true,push=true
 | 
			
		||||
 | 
			
		||||
    - name: Export ghcr digests
 | 
			
		||||
      shell: bash
 | 
			
		||||
      run: |
 | 
			
		||||
        mkdir -p /tmp/digests/${{ inputs.target }}/ghcr
 | 
			
		||||
        digest="${{ steps.build-ghcr.outputs.digest }}"
 | 
			
		||||
        touch "/tmp/digests/${{ inputs.target }}/ghcr/${digest#sha256:}"
 | 
			
		||||
 | 
			
		||||
    - name: Upload ghcr digest
 | 
			
		||||
      uses: actions/upload-artifact@v3.1.3
 | 
			
		||||
      with:
 | 
			
		||||
        name: digests-${{ inputs.target }}-ghcr
 | 
			
		||||
        path: /tmp/digests/${{ inputs.target }}/ghcr/*
 | 
			
		||||
        if-no-files-found: error
 | 
			
		||||
        retention-days: 1
 | 
			
		||||
 | 
			
		||||
    - name: Build and push to dockerhub by digest
 | 
			
		||||
      id: build-dockerhub
 | 
			
		||||
      uses: docker/build-push-action@v5.0.0
 | 
			
		||||
      with:
 | 
			
		||||
        context: .
 | 
			
		||||
        file: ./docker/Dockerfile
 | 
			
		||||
        platforms: ${{ inputs.platform }}
 | 
			
		||||
        target: ${{ inputs.target }}
 | 
			
		||||
        cache-from: type=gha
 | 
			
		||||
        cache-to: type=gha,mode=max
 | 
			
		||||
        build-args: |
 | 
			
		||||
          BASEIMGTYPE=${{ inputs.baseimg }}
 | 
			
		||||
          BUILD_VERSION=${{ inputs.version }}
 | 
			
		||||
        outputs: |
 | 
			
		||||
          type=image,name=docker.io/${{ steps.tags.outputs.image_name }},push-by-digest=true,name-canonical=true,push=true
 | 
			
		||||
 | 
			
		||||
    - name: Export dockerhub digests
 | 
			
		||||
      shell: bash
 | 
			
		||||
      run: |
 | 
			
		||||
        mkdir -p /tmp/digests/${{ inputs.target }}/dockerhub
 | 
			
		||||
        digest="${{ steps.build-dockerhub.outputs.digest }}"
 | 
			
		||||
        touch "/tmp/digests/${{ inputs.target }}/dockerhub/${digest#sha256:}"
 | 
			
		||||
 | 
			
		||||
    - name: Upload dockerhub digest
 | 
			
		||||
      uses: actions/upload-artifact@v3.1.3
 | 
			
		||||
      with:
 | 
			
		||||
        name: digests-${{ inputs.target }}-dockerhub
 | 
			
		||||
        path: /tmp/digests/${{ inputs.target }}/dockerhub/*
 | 
			
		||||
        if-no-files-found: error
 | 
			
		||||
        retention-days: 1
 | 
			
		||||
							
								
								
									
										130
									
								
								.github/workflows/release.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										130
									
								
								.github/workflows/release.yml
									
									
									
									
										vendored
									
									
								
							@@ -63,30 +63,20 @@ jobs:
 | 
			
		||||
        run: twine upload dist/*
 | 
			
		||||
 | 
			
		||||
  deploy-docker:
 | 
			
		||||
    name: Build and publish ESPHome ${{ matrix.image.title}}
 | 
			
		||||
    name: Build ESPHome ${{ matrix.platform }}
 | 
			
		||||
    if: github.repository == 'esphome/esphome'
 | 
			
		||||
    permissions:
 | 
			
		||||
      contents: read
 | 
			
		||||
      packages: write
 | 
			
		||||
    runs-on: ubuntu-latest
 | 
			
		||||
    continue-on-error: ${{ matrix.image.title == 'lint' }}
 | 
			
		||||
    needs: [init]
 | 
			
		||||
    strategy:
 | 
			
		||||
      fail-fast: false
 | 
			
		||||
      matrix:
 | 
			
		||||
        image:
 | 
			
		||||
          - title: "ha-addon"
 | 
			
		||||
            suffix: "hassio"
 | 
			
		||||
            target: "hassio"
 | 
			
		||||
            baseimg: "hassio"
 | 
			
		||||
          - title: "docker"
 | 
			
		||||
            suffix: ""
 | 
			
		||||
            target: "docker"
 | 
			
		||||
            baseimg: "docker"
 | 
			
		||||
          - title: "lint"
 | 
			
		||||
            suffix: "lint"
 | 
			
		||||
            target: "lint"
 | 
			
		||||
            baseimg: "docker"
 | 
			
		||||
        platform:
 | 
			
		||||
          - linux/amd64
 | 
			
		||||
          - linux/arm/v7
 | 
			
		||||
          - linux/arm64
 | 
			
		||||
    steps:
 | 
			
		||||
      - uses: actions/checkout@v4.1.1
 | 
			
		||||
      - name: Set up Python
 | 
			
		||||
@@ -97,6 +87,7 @@ jobs:
 | 
			
		||||
      - name: Set up Docker Buildx
 | 
			
		||||
        uses: docker/setup-buildx-action@v3.0.0
 | 
			
		||||
      - name: Set up QEMU
 | 
			
		||||
        if: matrix.platform != 'linux/amd64'
 | 
			
		||||
        uses: docker/setup-qemu-action@v3.0.0
 | 
			
		||||
 | 
			
		||||
      - name: Log in to docker hub
 | 
			
		||||
@@ -111,34 +102,105 @@ jobs:
 | 
			
		||||
          username: ${{ github.actor }}
 | 
			
		||||
          password: ${{ secrets.GITHUB_TOKEN }}
 | 
			
		||||
 | 
			
		||||
      - name: Build docker
 | 
			
		||||
        uses: ./.github/actions/build-image
 | 
			
		||||
        with:
 | 
			
		||||
          platform: ${{ matrix.platform }}
 | 
			
		||||
          target: docker
 | 
			
		||||
          baseimg: docker
 | 
			
		||||
          suffix: ""
 | 
			
		||||
          version: ${{ needs.init.outputs.tag }}
 | 
			
		||||
 | 
			
		||||
      - name: Build ha-addon
 | 
			
		||||
        uses: ./.github/actions/build-image
 | 
			
		||||
        with:
 | 
			
		||||
          platform: ${{ matrix.platform }}
 | 
			
		||||
          target: hassio
 | 
			
		||||
          baseimg: hassio
 | 
			
		||||
          suffix: "hassio"
 | 
			
		||||
          version: ${{ needs.init.outputs.tag }}
 | 
			
		||||
 | 
			
		||||
      - name: Build lint
 | 
			
		||||
        uses: ./.github/actions/build-image
 | 
			
		||||
        with:
 | 
			
		||||
          platform: ${{ matrix.platform }}
 | 
			
		||||
          target: lint
 | 
			
		||||
          baseimg: docker
 | 
			
		||||
          suffix: lint
 | 
			
		||||
          version: ${{ needs.init.outputs.tag }}
 | 
			
		||||
 | 
			
		||||
  deploy-manifest:
 | 
			
		||||
    name: Publish ESPHome ${{ matrix.image.title }} to ${{ matrix.registry }}
 | 
			
		||||
    runs-on: ubuntu-latest
 | 
			
		||||
    needs:
 | 
			
		||||
      - init
 | 
			
		||||
      - deploy-docker
 | 
			
		||||
    if: github.repository == 'esphome/esphome'
 | 
			
		||||
    permissions:
 | 
			
		||||
      contents: read
 | 
			
		||||
      packages: write
 | 
			
		||||
    strategy:
 | 
			
		||||
      fail-fast: false
 | 
			
		||||
      matrix:
 | 
			
		||||
        image:
 | 
			
		||||
          - title: "ha-addon"
 | 
			
		||||
            target: "hassio"
 | 
			
		||||
            suffix: "hassio"
 | 
			
		||||
          - title: "docker"
 | 
			
		||||
            target: "docker"
 | 
			
		||||
            suffix: ""
 | 
			
		||||
          - title: "lint"
 | 
			
		||||
            target: "lint"
 | 
			
		||||
            suffix: "lint"
 | 
			
		||||
        registry:
 | 
			
		||||
          - ghcr
 | 
			
		||||
          - dockerhub
 | 
			
		||||
    steps:
 | 
			
		||||
      - uses: actions/checkout@v4.1.1
 | 
			
		||||
      - name: Download digests
 | 
			
		||||
        uses: actions/download-artifact@v3.0.2
 | 
			
		||||
        with:
 | 
			
		||||
          name: digests-${{ matrix.image.target }}-${{ matrix.registry }}
 | 
			
		||||
          path: /tmp/digests
 | 
			
		||||
      - name: Set up Docker Buildx
 | 
			
		||||
        uses: docker/setup-buildx-action@v3.0.0
 | 
			
		||||
 | 
			
		||||
      - name: Log in to docker hub
 | 
			
		||||
        if: matrix.registry == 'dockerhub'
 | 
			
		||||
        uses: docker/login-action@v3.0.0
 | 
			
		||||
        with:
 | 
			
		||||
          username: ${{ secrets.DOCKER_USER }}
 | 
			
		||||
          password: ${{ secrets.DOCKER_PASSWORD }}
 | 
			
		||||
      - name: Log in to the GitHub container registry
 | 
			
		||||
        if: matrix.registry == 'ghcr'
 | 
			
		||||
        uses: docker/login-action@v3.0.0
 | 
			
		||||
        with:
 | 
			
		||||
          registry: ghcr.io
 | 
			
		||||
          username: ${{ github.actor }}
 | 
			
		||||
          password: ${{ secrets.GITHUB_TOKEN }}
 | 
			
		||||
 | 
			
		||||
      - name: Generate short tags
 | 
			
		||||
        id: tags
 | 
			
		||||
        run: |
 | 
			
		||||
          docker/generate_tags.py \
 | 
			
		||||
          output=$(docker/generate_tags.py \
 | 
			
		||||
            --tag "${{ needs.init.outputs.tag }}" \
 | 
			
		||||
            --suffix "${{ matrix.image.suffix }}"
 | 
			
		||||
            --suffix "${{ matrix.image.suffix }}" \
 | 
			
		||||
            --registry "${{ matrix.registry }}")
 | 
			
		||||
          echo $output
 | 
			
		||||
          for l in $output; do
 | 
			
		||||
            echo $l >> $GITHUB_OUTPUT
 | 
			
		||||
          done
 | 
			
		||||
 | 
			
		||||
      - name: Build and push
 | 
			
		||||
        uses: docker/build-push-action@v5.0.0
 | 
			
		||||
        with:
 | 
			
		||||
          context: .
 | 
			
		||||
          file: ./docker/Dockerfile
 | 
			
		||||
          platforms: linux/amd64,linux/arm/v7,linux/arm64
 | 
			
		||||
          target: ${{ matrix.image.target }}
 | 
			
		||||
          push: true
 | 
			
		||||
          # yamllint disable rule:line-length
 | 
			
		||||
          cache-from: type=registry,ref=ghcr.io/${{ steps.tags.outputs.image }}:cache-${{ steps.tags.outputs.channel }}
 | 
			
		||||
          cache-to: type=registry,ref=ghcr.io/${{ steps.tags.outputs.image }}:cache-${{ steps.tags.outputs.channel }},mode=max
 | 
			
		||||
          # yamllint enable rule:line-length
 | 
			
		||||
          tags: ${{ steps.tags.outputs.tags }}
 | 
			
		||||
          build-args: |
 | 
			
		||||
            BASEIMGTYPE=${{ matrix.image.baseimg }}
 | 
			
		||||
            BUILD_VERSION=${{ needs.init.outputs.tag }}
 | 
			
		||||
      - name: Create manifest list and push
 | 
			
		||||
        working-directory: /tmp/digests
 | 
			
		||||
        run: |
 | 
			
		||||
          docker buildx imagetools create $(jq -Rcnr 'inputs | . / "," | map("-t " + .) | join(" ")' <<< "${{ steps.tags.outputs.tags}}") \
 | 
			
		||||
            $(printf '${{ steps.tags.outputs.image }}@sha256:%s ' *)
 | 
			
		||||
 | 
			
		||||
  deploy-ha-addon-repo:
 | 
			
		||||
    if: github.repository == 'esphome/esphome' && github.event_name == 'release'
 | 
			
		||||
    runs-on: ubuntu-latest
 | 
			
		||||
    needs: [deploy-docker]
 | 
			
		||||
    needs: [deploy-manifest]
 | 
			
		||||
    steps:
 | 
			
		||||
      - name: Trigger Workflow
 | 
			
		||||
        uses: actions/github-script@v6.4.1
 | 
			
		||||
 
 | 
			
		||||
@@ -3,7 +3,7 @@
 | 
			
		||||
# See https://pre-commit.com/hooks.html for more hooks
 | 
			
		||||
repos:
 | 
			
		||||
  - repo: https://github.com/psf/black-pre-commit-mirror
 | 
			
		||||
    rev: 23.10.1
 | 
			
		||||
    rev: 23.11.0
 | 
			
		||||
    hooks:
 | 
			
		||||
      - id: black
 | 
			
		||||
        args:
 | 
			
		||||
 
 | 
			
		||||
@@ -246,6 +246,7 @@ esphome/components/radon_eye_rd200/* @jeffeb3
 | 
			
		||||
esphome/components/rc522/* @glmnet
 | 
			
		||||
esphome/components/rc522_i2c/* @glmnet
 | 
			
		||||
esphome/components/rc522_spi/* @glmnet
 | 
			
		||||
esphome/components/resistance_sampler/* @jesserockz
 | 
			
		||||
esphome/components/restart/* @esphome/core
 | 
			
		||||
esphome/components/rf_bridge/* @jesserockz
 | 
			
		||||
esphome/components/rgbct/* @jesserockz
 | 
			
		||||
 
 | 
			
		||||
@@ -5,6 +5,7 @@
 | 
			
		||||
# One of "docker", "hassio"
 | 
			
		||||
ARG BASEIMGTYPE=docker
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
# https://github.com/hassio-addons/addon-debian-base/releases
 | 
			
		||||
FROM ghcr.io/hassio-addons/debian-base:7.2.0 AS base-hassio
 | 
			
		||||
# https://hub.docker.com/_/debian?tab=tags&page=1&name=bookworm
 | 
			
		||||
@@ -12,9 +13,10 @@ FROM debian:12.2-slim AS base-docker
 | 
			
		||||
 | 
			
		||||
FROM base-${BASEIMGTYPE} AS base
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
ARG TARGETARCH
 | 
			
		||||
ARG TARGETVARIANT
 | 
			
		||||
ARG PIP_EXTRA_INDEX_URL
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
# Note that --break-system-packages is used below because
 | 
			
		||||
# https://peps.python.org/pep-0668/ added a safety check that prevents
 | 
			
		||||
@@ -59,21 +61,24 @@ ENV \
 | 
			
		||||
  # Fix click python3 lang warning https://click.palletsprojects.com/en/7.x/python3/
 | 
			
		||||
  LANG=C.UTF-8 LC_ALL=C.UTF-8 \
 | 
			
		||||
  # Store globally installed pio libs in /piolibs
 | 
			
		||||
  PLATFORMIO_GLOBALLIB_DIR=/piolibs \
 | 
			
		||||
  PIP_EXTRA_INDEX_URL=${PIP_EXTRA_INDEX_URL}
 | 
			
		||||
  PLATFORMIO_GLOBALLIB_DIR=/piolibs
 | 
			
		||||
 | 
			
		||||
# Support legacy binaries on Debian multiarch system. There is no "correct" way
 | 
			
		||||
# to do this, other than using properly built toolchains...
 | 
			
		||||
# See: https://unix.stackexchange.com/questions/553743/correct-way-to-add-lib-ld-linux-so-3-in-debian
 | 
			
		||||
RUN \
 | 
			
		||||
    if [ "$TARGETARCH$TARGETVARIANT" = "armv7" ]; then \
 | 
			
		||||
        ln -s /lib/arm-linux-gnueabihf/ld-linux.so.3 /lib/ld-linux.so.3; \
 | 
			
		||||
        ln -s /lib/arm-linux-gnueabihf/ld-linux-armhf.so.3 /lib/ld-linux.so.3; \
 | 
			
		||||
    fi
 | 
			
		||||
 | 
			
		||||
RUN \
 | 
			
		||||
    # Ubuntu python3-pip is missing wheel
 | 
			
		||||
    pip3 install --break-system-packages --no-cache-dir \
 | 
			
		||||
        platformio==6.1.11 \
 | 
			
		||||
    if [ "$TARGETARCH$TARGETVARIANT" = "armv7" ]; then \
 | 
			
		||||
        export PIP_EXTRA_INDEX_URL="https://www.piwheels.org/simple"; \
 | 
			
		||||
    fi; \
 | 
			
		||||
    pip3 install \
 | 
			
		||||
    --break-system-packages --no-cache-dir \
 | 
			
		||||
    platformio==6.1.11 \
 | 
			
		||||
    # Change some platformio settings
 | 
			
		||||
    && platformio settings set enable_telemetry No \
 | 
			
		||||
    && platformio settings set check_platformio_interval 1000000 \
 | 
			
		||||
@@ -84,8 +89,12 @@ RUN \
 | 
			
		||||
# tmpfs is for https://github.com/rust-lang/cargo/issues/8719
 | 
			
		||||
 | 
			
		||||
COPY requirements.txt requirements_optional.txt script/platformio_install_deps.py platformio.ini /
 | 
			
		||||
RUN --mount=type=tmpfs,target=/root/.cargo CARGO_REGISTRIES_CRATES_IO_PROTOCOL=sparse CARGO_HOME=/root/.cargo \
 | 
			
		||||
    pip3 install --break-system-packages --no-cache-dir -r /requirements.txt -r /requirements_optional.txt \
 | 
			
		||||
RUN --mount=type=tmpfs,target=/root/.cargo if [ "$TARGETARCH$TARGETVARIANT" = "armv7" ]; then \
 | 
			
		||||
        export PIP_EXTRA_INDEX_URL="https://www.piwheels.org/simple"; \
 | 
			
		||||
    fi; \
 | 
			
		||||
    CARGO_REGISTRIES_CRATES_IO_PROTOCOL=sparse CARGO_HOME=/root/.cargo \
 | 
			
		||||
    pip3 install \
 | 
			
		||||
    --break-system-packages --no-cache-dir -r /requirements.txt -r /requirements_optional.txt \
 | 
			
		||||
    && /platformio_install_deps.py /platformio.ini --libraries
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@@ -94,7 +103,11 @@ FROM base AS docker
 | 
			
		||||
 | 
			
		||||
# Copy esphome and install
 | 
			
		||||
COPY . /esphome
 | 
			
		||||
RUN pip3 install --break-system-packages --no-cache-dir --no-use-pep517 -e /esphome
 | 
			
		||||
RUN if [ "$TARGETARCH$TARGETVARIANT" = "armv7" ]; then \
 | 
			
		||||
        export PIP_EXTRA_INDEX_URL="https://www.piwheels.org/simple"; \
 | 
			
		||||
  fi; \
 | 
			
		||||
  pip3 install \
 | 
			
		||||
  --break-system-packages --no-cache-dir --no-use-pep517 -e /esphome
 | 
			
		||||
 | 
			
		||||
# Settings for dashboard
 | 
			
		||||
ENV USERNAME="" PASSWORD=""
 | 
			
		||||
@@ -140,7 +153,11 @@ COPY docker/ha-addon-rootfs/ /
 | 
			
		||||
 | 
			
		||||
# Copy esphome and install
 | 
			
		||||
COPY . /esphome
 | 
			
		||||
RUN pip3 install --break-system-packages --no-cache-dir --no-use-pep517 -e /esphome
 | 
			
		||||
RUN if [ "$TARGETARCH$TARGETVARIANT" = "armv7" ]; then \
 | 
			
		||||
        export PIP_EXTRA_INDEX_URL="https://www.piwheels.org/simple"; \
 | 
			
		||||
  fi; \
 | 
			
		||||
  pip3 install \
 | 
			
		||||
  --break-system-packages --no-cache-dir --no-use-pep517 -e /esphome
 | 
			
		||||
 | 
			
		||||
# Labels
 | 
			
		||||
LABEL \
 | 
			
		||||
@@ -176,7 +193,11 @@ RUN \
 | 
			
		||||
        /var/lib/apt/lists/*
 | 
			
		||||
 | 
			
		||||
COPY requirements_test.txt /
 | 
			
		||||
RUN pip3 install --break-system-packages --no-cache-dir -r /requirements_test.txt
 | 
			
		||||
RUN if [ "$TARGETARCH$TARGETVARIANT" = "armv7" ]; then \
 | 
			
		||||
        export PIP_EXTRA_INDEX_URL="https://www.piwheels.org/simple"; \
 | 
			
		||||
  fi; \
 | 
			
		||||
  pip3 install \
 | 
			
		||||
  --break-system-packages --no-cache-dir -r /requirements_test.txt
 | 
			
		||||
 | 
			
		||||
VOLUME ["/esphome"]
 | 
			
		||||
WORKDIR /esphome
 | 
			
		||||
 
 | 
			
		||||
@@ -143,25 +143,15 @@ def main():
 | 
			
		||||
        imgs = [f"{params.build_to}:{tag}" for tag in tags_to_push]
 | 
			
		||||
        imgs += [f"ghcr.io/{params.build_to}:{tag}" for tag in tags_to_push]
 | 
			
		||||
 | 
			
		||||
        build_args = [
 | 
			
		||||
            "--build-arg",
 | 
			
		||||
            f"BASEIMGTYPE={params.baseimgtype}",
 | 
			
		||||
            "--build-arg",
 | 
			
		||||
            f"BUILD_VERSION={args.tag}",
 | 
			
		||||
        ]
 | 
			
		||||
 | 
			
		||||
        if args.arch == ARCH_ARMV7:
 | 
			
		||||
            build_args += [
 | 
			
		||||
                "--build-arg",
 | 
			
		||||
                "PIP_EXTRA_INDEX_URL=https://www.piwheels.org/simple",
 | 
			
		||||
            ]
 | 
			
		||||
 | 
			
		||||
        # 3. build
 | 
			
		||||
        cmd = [
 | 
			
		||||
            "docker",
 | 
			
		||||
            "buildx",
 | 
			
		||||
            "build",
 | 
			
		||||
            *build_args,
 | 
			
		||||
            "--build-arg",
 | 
			
		||||
            f"BASEIMGTYPE={params.baseimgtype}",
 | 
			
		||||
            "--build-arg",
 | 
			
		||||
            f"BUILD_VERSION={args.tag}",
 | 
			
		||||
            "--cache-from",
 | 
			
		||||
            f"type=registry,ref={cache_img}",
 | 
			
		||||
            "--file",
 | 
			
		||||
 
 | 
			
		||||
@@ -1,13 +1,14 @@
 | 
			
		||||
#!/usr/bin/env python3
 | 
			
		||||
import re
 | 
			
		||||
import os
 | 
			
		||||
import argparse
 | 
			
		||||
import json
 | 
			
		||||
 | 
			
		||||
CHANNEL_DEV = "dev"
 | 
			
		||||
CHANNEL_BETA = "beta"
 | 
			
		||||
CHANNEL_RELEASE = "release"
 | 
			
		||||
 | 
			
		||||
GHCR = "ghcr"
 | 
			
		||||
DOCKERHUB = "dockerhub"
 | 
			
		||||
 | 
			
		||||
parser = argparse.ArgumentParser()
 | 
			
		||||
parser.add_argument(
 | 
			
		||||
    "--tag",
 | 
			
		||||
@@ -21,21 +22,31 @@ parser.add_argument(
 | 
			
		||||
    required=True,
 | 
			
		||||
    help="The suffix of the tag.",
 | 
			
		||||
)
 | 
			
		||||
parser.add_argument(
 | 
			
		||||
    "--registry",
 | 
			
		||||
    type=str,
 | 
			
		||||
    choices=[GHCR, DOCKERHUB],
 | 
			
		||||
    required=False,
 | 
			
		||||
    action="append",
 | 
			
		||||
    help="The registry to build tags for.",
 | 
			
		||||
)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def main():
 | 
			
		||||
    args = parser.parse_args()
 | 
			
		||||
 | 
			
		||||
    # detect channel from tag
 | 
			
		||||
    match = re.match(r"^(\d+\.\d+)(?:\.\d+)?(b\d+)?$", args.tag)
 | 
			
		||||
    match = re.match(r"^(\d+\.\d+)(?:\.\d+)(?:(b\d+)|(-dev\d+))?$", args.tag)
 | 
			
		||||
    major_minor_version = None
 | 
			
		||||
    if match is None:
 | 
			
		||||
    if match is None:  # eg 2023.12.0-dev20231109-testbranch
 | 
			
		||||
        channel = None  # Ran with custom tag for a branch etc
 | 
			
		||||
    elif match.group(3) is not None:  # eg 2023.12.0-dev20231109
 | 
			
		||||
        channel = CHANNEL_DEV
 | 
			
		||||
    elif match.group(2) is None:
 | 
			
		||||
    elif match.group(2) is not None:  # eg 2023.12.0b1
 | 
			
		||||
        channel = CHANNEL_BETA
 | 
			
		||||
    else:  # eg 2023.12.0
 | 
			
		||||
        major_minor_version = match.group(1)
 | 
			
		||||
        channel = CHANNEL_RELEASE
 | 
			
		||||
    else:
 | 
			
		||||
        channel = CHANNEL_BETA
 | 
			
		||||
 | 
			
		||||
    tags_to_push = [args.tag]
 | 
			
		||||
    if channel == CHANNEL_DEV:
 | 
			
		||||
@@ -53,15 +64,28 @@ def main():
 | 
			
		||||
 | 
			
		||||
    suffix = f"-{args.suffix}" if args.suffix else ""
 | 
			
		||||
 | 
			
		||||
    with open(os.environ["GITHUB_OUTPUT"], "w") as f:
 | 
			
		||||
        print(f"channel={channel}", file=f)
 | 
			
		||||
        print(f"image=esphome/esphome{suffix}", file=f)
 | 
			
		||||
        full_tags = []
 | 
			
		||||
    image_name = f"esphome/esphome{suffix}"
 | 
			
		||||
 | 
			
		||||
        for tag in tags_to_push:
 | 
			
		||||
            full_tags += [f"ghcr.io/esphome/esphome{suffix}:{tag}"]
 | 
			
		||||
            full_tags += [f"esphome/esphome{suffix}:{tag}"]
 | 
			
		||||
        print(f"tags={','.join(full_tags)}", file=f)
 | 
			
		||||
    print(f"channel={channel}")
 | 
			
		||||
 | 
			
		||||
    if args.registry is None:
 | 
			
		||||
        args.registry = [GHCR, DOCKERHUB]
 | 
			
		||||
    elif len(args.registry) == 1:
 | 
			
		||||
        if GHCR in args.registry:
 | 
			
		||||
            print(f"image=ghcr.io/{image_name}")
 | 
			
		||||
        if DOCKERHUB in args.registry:
 | 
			
		||||
            print(f"image=docker.io/{image_name}")
 | 
			
		||||
 | 
			
		||||
    print(f"image_name={image_name}")
 | 
			
		||||
 | 
			
		||||
    full_tags = []
 | 
			
		||||
 | 
			
		||||
    for tag in tags_to_push:
 | 
			
		||||
        if GHCR in args.registry:
 | 
			
		||||
            full_tags += [f"ghcr.io/{image_name}:{tag}"]
 | 
			
		||||
        if DOCKERHUB in args.registry:
 | 
			
		||||
            full_tags += [f"docker.io/{image_name}:{tag}"]
 | 
			
		||||
    print(f"tags={','.join(full_tags)}")
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
if __name__ == "__main__":
 | 
			
		||||
 
 | 
			
		||||
@@ -514,7 +514,7 @@ def command_clean(args, config):
 | 
			
		||||
def command_dashboard(args):
 | 
			
		||||
    from esphome.dashboard import dashboard
 | 
			
		||||
 | 
			
		||||
    return dashboard.start_web_server(args)
 | 
			
		||||
    return dashboard.start_dashboard(args)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def command_update_all(args):
 | 
			
		||||
 
 | 
			
		||||
@@ -1,23 +1,29 @@
 | 
			
		||||
from __future__ import annotations
 | 
			
		||||
 | 
			
		||||
import asyncio
 | 
			
		||||
import logging
 | 
			
		||||
from datetime import datetime
 | 
			
		||||
from typing import Optional
 | 
			
		||||
from typing import Any
 | 
			
		||||
 | 
			
		||||
from aioesphomeapi import APIClient, ReconnectLogic, APIConnectionError, LogLevel
 | 
			
		||||
import zeroconf
 | 
			
		||||
from aioesphomeapi import APIClient
 | 
			
		||||
from aioesphomeapi.api_pb2 import SubscribeLogsResponse
 | 
			
		||||
from aioesphomeapi.log_runner import async_run
 | 
			
		||||
 | 
			
		||||
from esphome.const import CONF_KEY, CONF_PASSWORD, CONF_PORT, __version__
 | 
			
		||||
from esphome.core import CORE
 | 
			
		||||
 | 
			
		||||
from esphome.const import CONF_KEY, CONF_PORT, CONF_PASSWORD, __version__
 | 
			
		||||
from esphome.util import safe_print
 | 
			
		||||
from . import CONF_ENCRYPTION
 | 
			
		||||
 | 
			
		||||
_LOGGER = logging.getLogger(__name__)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
async def async_run_logs(config, address):
 | 
			
		||||
async def async_run_logs(config: dict[str, Any], address: str) -> None:
 | 
			
		||||
    """Run the logs command in the event loop."""
 | 
			
		||||
    conf = config["api"]
 | 
			
		||||
    name = config["esphome"]["name"]
 | 
			
		||||
    port: int = int(conf[CONF_PORT])
 | 
			
		||||
    password: str = conf[CONF_PASSWORD]
 | 
			
		||||
    noise_psk: Optional[str] = None
 | 
			
		||||
    noise_psk: str | None = None
 | 
			
		||||
    if CONF_ENCRYPTION in conf:
 | 
			
		||||
        noise_psk = conf[CONF_ENCRYPTION][CONF_KEY]
 | 
			
		||||
    _LOGGER.info("Starting log output from %s using esphome API", address)
 | 
			
		||||
@@ -28,44 +34,27 @@ async def async_run_logs(config, address):
 | 
			
		||||
        client_info=f"ESPHome Logs {__version__}",
 | 
			
		||||
        noise_psk=noise_psk,
 | 
			
		||||
    )
 | 
			
		||||
    first_connect = True
 | 
			
		||||
    dashboard = CORE.dashboard
 | 
			
		||||
 | 
			
		||||
    def on_log(msg):
 | 
			
		||||
        time_ = datetime.now().time().strftime("[%H:%M:%S]")
 | 
			
		||||
        text = msg.message.decode("utf8", "backslashreplace")
 | 
			
		||||
        safe_print(time_ + text)
 | 
			
		||||
 | 
			
		||||
    async def on_connect():
 | 
			
		||||
        nonlocal first_connect
 | 
			
		||||
        try:
 | 
			
		||||
            await cli.subscribe_logs(
 | 
			
		||||
                on_log,
 | 
			
		||||
                log_level=LogLevel.LOG_LEVEL_VERY_VERBOSE,
 | 
			
		||||
                dump_config=first_connect,
 | 
			
		||||
            )
 | 
			
		||||
            first_connect = False
 | 
			
		||||
        except APIConnectionError:
 | 
			
		||||
            cli.disconnect()
 | 
			
		||||
 | 
			
		||||
    async def on_disconnect(expected_disconnect: bool) -> None:
 | 
			
		||||
        _LOGGER.warning("Disconnected from API")
 | 
			
		||||
 | 
			
		||||
    zc = zeroconf.Zeroconf()
 | 
			
		||||
    reconnect = ReconnectLogic(
 | 
			
		||||
        client=cli,
 | 
			
		||||
        on_connect=on_connect,
 | 
			
		||||
        on_disconnect=on_disconnect,
 | 
			
		||||
        zeroconf_instance=zc,
 | 
			
		||||
    )
 | 
			
		||||
    await reconnect.start()
 | 
			
		||||
    def on_log(msg: SubscribeLogsResponse) -> None:
 | 
			
		||||
        """Handle a new log message."""
 | 
			
		||||
        time_ = datetime.now()
 | 
			
		||||
        message: bytes = msg.message
 | 
			
		||||
        text = message.decode("utf8", "backslashreplace")
 | 
			
		||||
        if dashboard:
 | 
			
		||||
            text = text.replace("\033", "\\033")
 | 
			
		||||
        print(f"[{time_.hour:02}:{time_.minute:02}:{time_.second:02}]{text}")
 | 
			
		||||
 | 
			
		||||
    stop = await async_run(cli, on_log, name=name)
 | 
			
		||||
    try:
 | 
			
		||||
        while True:
 | 
			
		||||
            await asyncio.sleep(60)
 | 
			
		||||
        await asyncio.Event().wait()
 | 
			
		||||
    finally:
 | 
			
		||||
        await stop()
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def run_logs(config: dict[str, Any], address: str) -> None:
 | 
			
		||||
    """Run the logs command."""
 | 
			
		||||
    try:
 | 
			
		||||
        asyncio.run(async_run_logs(config, address))
 | 
			
		||||
    except KeyboardInterrupt:
 | 
			
		||||
        await reconnect.stop()
 | 
			
		||||
        zc.close()
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def run_logs(config, address):
 | 
			
		||||
    asyncio.run(async_run_logs(config, address))
 | 
			
		||||
        pass
 | 
			
		||||
 
 | 
			
		||||
@@ -1,38 +1,37 @@
 | 
			
		||||
import esphome.codegen as cg
 | 
			
		||||
import esphome.config_validation as cv
 | 
			
		||||
from esphome.components import (
 | 
			
		||||
    climate,
 | 
			
		||||
    remote_transmitter,
 | 
			
		||||
    remote_receiver,
 | 
			
		||||
    sensor,
 | 
			
		||||
    remote_base,
 | 
			
		||||
)
 | 
			
		||||
from esphome.components.remote_base import CONF_RECEIVER_ID, CONF_TRANSMITTER_ID
 | 
			
		||||
from esphome.components import climate, sensor, remote_base
 | 
			
		||||
from esphome.const import CONF_SUPPORTS_COOL, CONF_SUPPORTS_HEAT, CONF_SENSOR
 | 
			
		||||
 | 
			
		||||
DEPENDENCIES = ["remote_transmitter"]
 | 
			
		||||
AUTO_LOAD = ["sensor", "remote_base"]
 | 
			
		||||
CODEOWNERS = ["@glmnet"]
 | 
			
		||||
 | 
			
		||||
climate_ir_ns = cg.esphome_ns.namespace("climate_ir")
 | 
			
		||||
ClimateIR = climate_ir_ns.class_(
 | 
			
		||||
    "ClimateIR", climate.Climate, cg.Component, remote_base.RemoteReceiverListener
 | 
			
		||||
    "ClimateIR",
 | 
			
		||||
    climate.Climate,
 | 
			
		||||
    cg.Component,
 | 
			
		||||
    remote_base.RemoteReceiverListener,
 | 
			
		||||
    remote_base.RemoteTransmittable,
 | 
			
		||||
)
 | 
			
		||||
 | 
			
		||||
CLIMATE_IR_SCHEMA = climate.CLIMATE_SCHEMA.extend(
 | 
			
		||||
    {
 | 
			
		||||
        cv.GenerateID(CONF_TRANSMITTER_ID): cv.use_id(
 | 
			
		||||
            remote_transmitter.RemoteTransmitterComponent
 | 
			
		||||
        ),
 | 
			
		||||
        cv.Optional(CONF_SUPPORTS_COOL, default=True): cv.boolean,
 | 
			
		||||
        cv.Optional(CONF_SUPPORTS_HEAT, default=True): cv.boolean,
 | 
			
		||||
        cv.Optional(CONF_SENSOR): cv.use_id(sensor.Sensor),
 | 
			
		||||
    }
 | 
			
		||||
).extend(cv.COMPONENT_SCHEMA)
 | 
			
		||||
CLIMATE_IR_SCHEMA = (
 | 
			
		||||
    climate.CLIMATE_SCHEMA.extend(
 | 
			
		||||
        {
 | 
			
		||||
            cv.Optional(CONF_SUPPORTS_COOL, default=True): cv.boolean,
 | 
			
		||||
            cv.Optional(CONF_SUPPORTS_HEAT, default=True): cv.boolean,
 | 
			
		||||
            cv.Optional(CONF_SENSOR): cv.use_id(sensor.Sensor),
 | 
			
		||||
        }
 | 
			
		||||
    )
 | 
			
		||||
    .extend(cv.COMPONENT_SCHEMA)
 | 
			
		||||
    .extend(remote_base.REMOTE_TRANSMITTABLE_SCHEMA)
 | 
			
		||||
)
 | 
			
		||||
 | 
			
		||||
CLIMATE_IR_WITH_RECEIVER_SCHEMA = CLIMATE_IR_SCHEMA.extend(
 | 
			
		||||
    {
 | 
			
		||||
        cv.Optional(CONF_RECEIVER_ID): cv.use_id(
 | 
			
		||||
            remote_receiver.RemoteReceiverComponent
 | 
			
		||||
        cv.Optional(remote_base.CONF_RECEIVER_ID): cv.use_id(
 | 
			
		||||
            remote_base.RemoteReceiverBase
 | 
			
		||||
        ),
 | 
			
		||||
    }
 | 
			
		||||
)
 | 
			
		||||
@@ -41,15 +40,11 @@ CLIMATE_IR_WITH_RECEIVER_SCHEMA = CLIMATE_IR_SCHEMA.extend(
 | 
			
		||||
async def register_climate_ir(var, config):
 | 
			
		||||
    await cg.register_component(var, config)
 | 
			
		||||
    await climate.register_climate(var, config)
 | 
			
		||||
 | 
			
		||||
    await remote_base.register_transmittable(var, config)
 | 
			
		||||
    cg.add(var.set_supports_cool(config[CONF_SUPPORTS_COOL]))
 | 
			
		||||
    cg.add(var.set_supports_heat(config[CONF_SUPPORTS_HEAT]))
 | 
			
		||||
    if remote_base.CONF_RECEIVER_ID in config:
 | 
			
		||||
        await remote_base.register_listener(var, config)
 | 
			
		||||
    if sensor_id := config.get(CONF_SENSOR):
 | 
			
		||||
        sens = await cg.get_variable(sensor_id)
 | 
			
		||||
        cg.add(var.set_sensor(sens))
 | 
			
		||||
    if receiver_id := config.get(CONF_RECEIVER_ID):
 | 
			
		||||
        receiver = await cg.get_variable(receiver_id)
 | 
			
		||||
        cg.add(receiver.register_listener(var))
 | 
			
		||||
 | 
			
		||||
    transmitter = await cg.get_variable(config[CONF_TRANSMITTER_ID])
 | 
			
		||||
    cg.add(var.set_transmitter(transmitter))
 | 
			
		||||
 
 | 
			
		||||
@@ -18,7 +18,10 @@ namespace climate_ir {
 | 
			
		||||
    Likewise to decode a IR into the AC state, implement
 | 
			
		||||
      bool RemoteReceiverListener::on_receive(remote_base::RemoteReceiveData data) and return true
 | 
			
		||||
*/
 | 
			
		||||
class ClimateIR : public climate::Climate, public Component, public remote_base::RemoteReceiverListener {
 | 
			
		||||
class ClimateIR : public Component,
 | 
			
		||||
                  public climate::Climate,
 | 
			
		||||
                  public remote_base::RemoteReceiverListener,
 | 
			
		||||
                  public remote_base::RemoteTransmittable {
 | 
			
		||||
 public:
 | 
			
		||||
  ClimateIR(float minimum_temperature, float maximum_temperature, float temperature_step = 1.0f,
 | 
			
		||||
            bool supports_dry = false, bool supports_fan_only = false, std::set<climate::ClimateFanMode> fan_modes = {},
 | 
			
		||||
@@ -35,9 +38,6 @@ class ClimateIR : public climate::Climate, public Component, public remote_base:
 | 
			
		||||
 | 
			
		||||
  void setup() override;
 | 
			
		||||
  void dump_config() override;
 | 
			
		||||
  void set_transmitter(remote_transmitter::RemoteTransmitterComponent *transmitter) {
 | 
			
		||||
    this->transmitter_ = transmitter;
 | 
			
		||||
  }
 | 
			
		||||
  void set_supports_cool(bool supports_cool) { this->supports_cool_ = supports_cool; }
 | 
			
		||||
  void set_supports_heat(bool supports_heat) { this->supports_heat_ = supports_heat; }
 | 
			
		||||
  void set_sensor(sensor::Sensor *sensor) { this->sensor_ = sensor; }
 | 
			
		||||
@@ -64,7 +64,6 @@ class ClimateIR : public climate::Climate, public Component, public remote_base:
 | 
			
		||||
  std::set<climate::ClimateSwingMode> swing_modes_ = {};
 | 
			
		||||
  std::set<climate::ClimatePreset> presets_ = {};
 | 
			
		||||
 | 
			
		||||
  remote_transmitter::RemoteTransmitterComponent *transmitter_;
 | 
			
		||||
  sensor::Sensor *sensor_{nullptr};
 | 
			
		||||
};
 | 
			
		||||
 | 
			
		||||
 
 | 
			
		||||
@@ -102,11 +102,7 @@ void CoolixClimate::transmit_state() {
 | 
			
		||||
    }
 | 
			
		||||
  }
 | 
			
		||||
  ESP_LOGV(TAG, "Sending coolix code: 0x%06" PRIX32, remote_state);
 | 
			
		||||
 | 
			
		||||
  auto transmit = this->transmitter_->transmit();
 | 
			
		||||
  auto *data = transmit.get_data();
 | 
			
		||||
  remote_base::CoolixProtocol().encode(data, remote_state);
 | 
			
		||||
  transmit.perform();
 | 
			
		||||
  this->transmit_<remote_base::CoolixProtocol>(remote_state);
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
bool CoolixClimate::on_coolix(climate::Climate *parent, remote_base::RemoteReceiveData data) {
 | 
			
		||||
 
 | 
			
		||||
@@ -3,23 +3,26 @@ from typing import Union, Optional
 | 
			
		||||
from pathlib import Path
 | 
			
		||||
import logging
 | 
			
		||||
import os
 | 
			
		||||
import esphome.final_validate as fv
 | 
			
		||||
 | 
			
		||||
from esphome.helpers import copy_file_if_changed, write_file_if_changed, mkdir_p
 | 
			
		||||
from esphome.const import (
 | 
			
		||||
    CONF_ADVANCED,
 | 
			
		||||
    CONF_BOARD,
 | 
			
		||||
    CONF_COMPONENTS,
 | 
			
		||||
    CONF_ESPHOME,
 | 
			
		||||
    CONF_FRAMEWORK,
 | 
			
		||||
    CONF_IGNORE_EFUSE_MAC_CRC,
 | 
			
		||||
    CONF_NAME,
 | 
			
		||||
    CONF_PATH,
 | 
			
		||||
    CONF_PLATFORMIO_OPTIONS,
 | 
			
		||||
    CONF_REF,
 | 
			
		||||
    CONF_REFRESH,
 | 
			
		||||
    CONF_SOURCE,
 | 
			
		||||
    CONF_TYPE,
 | 
			
		||||
    CONF_URL,
 | 
			
		||||
    CONF_VARIANT,
 | 
			
		||||
    CONF_VERSION,
 | 
			
		||||
    CONF_ADVANCED,
 | 
			
		||||
    CONF_REFRESH,
 | 
			
		||||
    CONF_PATH,
 | 
			
		||||
    CONF_URL,
 | 
			
		||||
    CONF_REF,
 | 
			
		||||
    CONF_IGNORE_EFUSE_MAC_CRC,
 | 
			
		||||
    KEY_CORE,
 | 
			
		||||
    KEY_FRAMEWORK_VERSION,
 | 
			
		||||
    KEY_NAME,
 | 
			
		||||
@@ -327,6 +330,32 @@ def _detect_variant(value):
 | 
			
		||||
    return value
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def final_validate(config):
 | 
			
		||||
    if CONF_PLATFORMIO_OPTIONS not in fv.full_config.get()[CONF_ESPHOME]:
 | 
			
		||||
        return config
 | 
			
		||||
 | 
			
		||||
    pio_flash_size_key = "board_upload.flash_size"
 | 
			
		||||
    pio_partitions_key = "board_build.partitions"
 | 
			
		||||
    if (
 | 
			
		||||
        CONF_PARTITIONS in config
 | 
			
		||||
        and pio_partitions_key
 | 
			
		||||
        in fv.full_config.get()[CONF_ESPHOME][CONF_PLATFORMIO_OPTIONS]
 | 
			
		||||
    ):
 | 
			
		||||
        raise cv.Invalid(
 | 
			
		||||
            f"Do not specify '{pio_partitions_key}' in '{CONF_PLATFORMIO_OPTIONS}' with '{CONF_PARTITIONS}' in esp32"
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
    if (
 | 
			
		||||
        pio_flash_size_key
 | 
			
		||||
        in fv.full_config.get()[CONF_ESPHOME][CONF_PLATFORMIO_OPTIONS]
 | 
			
		||||
    ):
 | 
			
		||||
        raise cv.Invalid(
 | 
			
		||||
            f"Please specify {CONF_FLASH_SIZE} within esp32 configuration only"
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
    return config
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
CONF_PLATFORM_VERSION = "platform_version"
 | 
			
		||||
 | 
			
		||||
ARDUINO_FRAMEWORK_SCHEMA = cv.All(
 | 
			
		||||
@@ -340,6 +369,13 @@ ARDUINO_FRAMEWORK_SCHEMA = cv.All(
 | 
			
		||||
    _arduino_check_versions,
 | 
			
		||||
)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def _check_component_type(config):
 | 
			
		||||
    if config[CONF_SOURCE][CONF_TYPE] == TYPE_LOCAL:
 | 
			
		||||
        raise cv.Invalid("Local components are not implemented yet.")
 | 
			
		||||
    return config
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
CONF_SDKCONFIG_OPTIONS = "sdkconfig_options"
 | 
			
		||||
ESP_IDF_FRAMEWORK_SCHEMA = cv.All(
 | 
			
		||||
    cv.Schema(
 | 
			
		||||
@@ -356,15 +392,18 @@ ESP_IDF_FRAMEWORK_SCHEMA = cv.All(
 | 
			
		||||
                }
 | 
			
		||||
            ),
 | 
			
		||||
            cv.Optional(CONF_COMPONENTS, default=[]): cv.ensure_list(
 | 
			
		||||
                cv.Schema(
 | 
			
		||||
                    {
 | 
			
		||||
                        cv.Required(CONF_NAME): cv.string_strict,
 | 
			
		||||
                        cv.Required(CONF_SOURCE): cv.SOURCE_SCHEMA,
 | 
			
		||||
                        cv.Optional(CONF_PATH): cv.string,
 | 
			
		||||
                        cv.Optional(CONF_REFRESH, default="1d"): cv.All(
 | 
			
		||||
                            cv.string, cv.source_refresh
 | 
			
		||||
                        ),
 | 
			
		||||
                    }
 | 
			
		||||
                cv.All(
 | 
			
		||||
                    cv.Schema(
 | 
			
		||||
                        {
 | 
			
		||||
                            cv.Required(CONF_NAME): cv.string_strict,
 | 
			
		||||
                            cv.Required(CONF_SOURCE): cv.SOURCE_SCHEMA,
 | 
			
		||||
                            cv.Optional(CONF_PATH): cv.string,
 | 
			
		||||
                            cv.Optional(CONF_REFRESH, default="1d"): cv.All(
 | 
			
		||||
                                cv.string, cv.source_refresh
 | 
			
		||||
                            ),
 | 
			
		||||
                        }
 | 
			
		||||
                    ),
 | 
			
		||||
                    _check_component_type,
 | 
			
		||||
                )
 | 
			
		||||
            ),
 | 
			
		||||
        }
 | 
			
		||||
@@ -386,10 +425,24 @@ FRAMEWORK_SCHEMA = cv.typed_schema(
 | 
			
		||||
)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
FLASH_SIZES = [
 | 
			
		||||
    "2MB",
 | 
			
		||||
    "4MB",
 | 
			
		||||
    "8MB",
 | 
			
		||||
    "16MB",
 | 
			
		||||
    "32MB",
 | 
			
		||||
]
 | 
			
		||||
 | 
			
		||||
CONF_FLASH_SIZE = "flash_size"
 | 
			
		||||
CONF_PARTITIONS = "partitions"
 | 
			
		||||
CONFIG_SCHEMA = cv.All(
 | 
			
		||||
    cv.Schema(
 | 
			
		||||
        {
 | 
			
		||||
            cv.Required(CONF_BOARD): cv.string_strict,
 | 
			
		||||
            cv.Optional(CONF_FLASH_SIZE, default="4MB"): cv.one_of(
 | 
			
		||||
                *FLASH_SIZES, upper=True
 | 
			
		||||
            ),
 | 
			
		||||
            cv.Optional(CONF_PARTITIONS): cv.file_,
 | 
			
		||||
            cv.Optional(CONF_VARIANT): cv.one_of(*VARIANTS, upper=True),
 | 
			
		||||
            cv.Optional(CONF_FRAMEWORK, default={}): FRAMEWORK_SCHEMA,
 | 
			
		||||
        }
 | 
			
		||||
@@ -399,8 +452,12 @@ CONFIG_SCHEMA = cv.All(
 | 
			
		||||
)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
FINAL_VALIDATE_SCHEMA = cv.Schema(final_validate)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
async def to_code(config):
 | 
			
		||||
    cg.add_platformio_option("board", config[CONF_BOARD])
 | 
			
		||||
    cg.add_platformio_option("board_upload.flash_size", config[CONF_FLASH_SIZE])
 | 
			
		||||
    cg.add_build_flag("-DUSE_ESP32")
 | 
			
		||||
    cg.add_define("ESPHOME_BOARD", config[CONF_BOARD])
 | 
			
		||||
    cg.add_build_flag(f"-DUSE_ESP32_VARIANT_{config[CONF_VARIANT]}")
 | 
			
		||||
@@ -450,7 +507,10 @@ async def to_code(config):
 | 
			
		||||
        add_idf_sdkconfig_option("CONFIG_ESP_TASK_WDT_CHECK_IDLE_TASK_CPU0", False)
 | 
			
		||||
        add_idf_sdkconfig_option("CONFIG_ESP_TASK_WDT_CHECK_IDLE_TASK_CPU1", False)
 | 
			
		||||
 | 
			
		||||
        cg.add_platformio_option("board_build.partitions", "partitions.csv")
 | 
			
		||||
        if CONF_PARTITIONS in config:
 | 
			
		||||
            cg.add_platformio_option("board_build.partitions", config[CONF_PARTITIONS])
 | 
			
		||||
        else:
 | 
			
		||||
            cg.add_platformio_option("board_build.partitions", "partitions.csv")
 | 
			
		||||
 | 
			
		||||
        for name, value in conf[CONF_SDKCONFIG_OPTIONS].items():
 | 
			
		||||
            add_idf_sdkconfig_option(name, RawSdkconfigValue(value))
 | 
			
		||||
@@ -495,7 +555,10 @@ async def to_code(config):
 | 
			
		||||
            [f"platformio/framework-arduinoespressif32@{conf[CONF_SOURCE]}"],
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
        cg.add_platformio_option("board_build.partitions", "partitions.csv")
 | 
			
		||||
        if CONF_PARTITIONS in config:
 | 
			
		||||
            cg.add_platformio_option("board_build.partitions", config[CONF_PARTITIONS])
 | 
			
		||||
        else:
 | 
			
		||||
            cg.add_platformio_option("board_build.partitions", "partitions.csv")
 | 
			
		||||
 | 
			
		||||
        cg.add_define(
 | 
			
		||||
            "USE_ARDUINO_VERSION_CODE",
 | 
			
		||||
@@ -505,24 +568,47 @@ async def to_code(config):
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
ARDUINO_PARTITIONS_CSV = """\
 | 
			
		||||
nvs,      data, nvs,     0x009000, 0x005000,
 | 
			
		||||
otadata,  data, ota,     0x00e000, 0x002000,
 | 
			
		||||
app0,     app,  ota_0,   0x010000, 0x1C0000,
 | 
			
		||||
app1,     app,  ota_1,   0x1D0000, 0x1C0000,
 | 
			
		||||
eeprom,   data, 0x99,    0x390000, 0x001000,
 | 
			
		||||
spiffs,   data, spiffs,  0x391000, 0x00F000
 | 
			
		||||
APP_PARTITION_SIZES = {
 | 
			
		||||
    "2MB": 0x0C0000,  # 768 KB
 | 
			
		||||
    "4MB": 0x1C0000,  # 1792 KB
 | 
			
		||||
    "8MB": 0x3C0000,  # 3840 KB
 | 
			
		||||
    "16MB": 0x7C0000,  # 7936 KB
 | 
			
		||||
    "32MB": 0xFC0000,  # 16128 KB
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def get_arduino_partition_csv(flash_size):
 | 
			
		||||
    app_partition_size = APP_PARTITION_SIZES[flash_size]
 | 
			
		||||
    eeprom_partition_size = 0x1000  # 4 KB
 | 
			
		||||
    spiffs_partition_size = 0xF000  # 60 KB
 | 
			
		||||
 | 
			
		||||
    app0_partition_start = 0x010000  # 64 KB
 | 
			
		||||
    app1_partition_start = app0_partition_start + app_partition_size
 | 
			
		||||
    eeprom_partition_start = app1_partition_start + app_partition_size
 | 
			
		||||
    spiffs_partition_start = eeprom_partition_start + eeprom_partition_size
 | 
			
		||||
 | 
			
		||||
    partition_csv = f"""\
 | 
			
		||||
nvs,      data, nvs,     0x9000, 0x5000,
 | 
			
		||||
otadata,  data, ota,     0xE000, 0x2000,
 | 
			
		||||
app0,     app,  ota_0,   0x{app0_partition_start:X}, 0x{app_partition_size:X},
 | 
			
		||||
app1,     app,  ota_1,   0x{app1_partition_start:X}, 0x{app_partition_size:X},
 | 
			
		||||
eeprom,   data, 0x99,    0x{eeprom_partition_start:X}, 0x{eeprom_partition_size:X},
 | 
			
		||||
spiffs,   data, spiffs,  0x{spiffs_partition_start:X}, 0x{spiffs_partition_size:X}
 | 
			
		||||
"""
 | 
			
		||||
    return partition_csv
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
IDF_PARTITIONS_CSV = """\
 | 
			
		||||
# Name,   Type, SubType, Offset,   Size, Flags
 | 
			
		||||
def get_idf_partition_csv(flash_size):
 | 
			
		||||
    app_partition_size = APP_PARTITION_SIZES[flash_size]
 | 
			
		||||
 | 
			
		||||
    partition_csv = f"""\
 | 
			
		||||
otadata,  data, ota,     ,        0x2000,
 | 
			
		||||
phy_init, data, phy,     ,        0x1000,
 | 
			
		||||
app0,     app,  ota_0,   ,      0x1C0000,
 | 
			
		||||
app1,     app,  ota_1,   ,      0x1C0000,
 | 
			
		||||
nvs,      data, nvs,     ,       0x6d000,
 | 
			
		||||
app0,     app,  ota_0,   ,        0x{app_partition_size:X},
 | 
			
		||||
app1,     app,  ota_1,   ,        0x{app_partition_size:X},
 | 
			
		||||
nvs,      data, nvs,     ,        0x6D000,
 | 
			
		||||
"""
 | 
			
		||||
    return partition_csv
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def _format_sdkconfig_val(value: SdkconfigValueType) -> str:
 | 
			
		||||
@@ -565,13 +651,17 @@ def copy_files():
 | 
			
		||||
    if CORE.using_arduino:
 | 
			
		||||
        write_file_if_changed(
 | 
			
		||||
            CORE.relative_build_path("partitions.csv"),
 | 
			
		||||
            ARDUINO_PARTITIONS_CSV,
 | 
			
		||||
            get_arduino_partition_csv(
 | 
			
		||||
                CORE.platformio_options.get("board_upload.flash_size")
 | 
			
		||||
            ),
 | 
			
		||||
        )
 | 
			
		||||
    if CORE.using_esp_idf:
 | 
			
		||||
        _write_sdkconfig()
 | 
			
		||||
        write_file_if_changed(
 | 
			
		||||
            CORE.relative_build_path("partitions.csv"),
 | 
			
		||||
            IDF_PARTITIONS_CSV,
 | 
			
		||||
            get_idf_partition_csv(
 | 
			
		||||
                CORE.platformio_options.get("board_upload.flash_size")
 | 
			
		||||
            ),
 | 
			
		||||
        )
 | 
			
		||||
        # IDF build scripts look for version string to put in the build.
 | 
			
		||||
        # However, if the build path does not have an initialized git repo,
 | 
			
		||||
 
 | 
			
		||||
@@ -3,7 +3,6 @@
 | 
			
		||||
#ifdef USE_ARDUINO
 | 
			
		||||
 | 
			
		||||
#include "esphome/components/remote_base/remote_base.h"
 | 
			
		||||
#include "esphome/components/remote_transmitter/remote_transmitter.h"
 | 
			
		||||
#include <IRSender.h>  // arduino-heatpump library
 | 
			
		||||
 | 
			
		||||
namespace esphome {
 | 
			
		||||
@@ -11,14 +10,13 @@ namespace heatpumpir {
 | 
			
		||||
 | 
			
		||||
class IRSenderESPHome : public IRSender {
 | 
			
		||||
 public:
 | 
			
		||||
  IRSenderESPHome(remote_transmitter::RemoteTransmitterComponent *transmitter)
 | 
			
		||||
      : IRSender(0), transmit_(transmitter->transmit()){};
 | 
			
		||||
  IRSenderESPHome(remote_base::RemoteTransmitterBase *transmitter) : IRSender(0), transmit_(transmitter->transmit()){};
 | 
			
		||||
  void setFrequency(int frequency) override;  // NOLINT(readability-identifier-naming)
 | 
			
		||||
  void space(int space_length) override;
 | 
			
		||||
  void mark(int mark_length) override;
 | 
			
		||||
 | 
			
		||||
 protected:
 | 
			
		||||
  remote_transmitter::RemoteTransmitterComponent::TransmitCall transmit_;
 | 
			
		||||
  remote_base::RemoteTransmitterBase::TransmitCall transmit_;
 | 
			
		||||
};
 | 
			
		||||
 | 
			
		||||
}  // namespace heatpumpir
 | 
			
		||||
 
 | 
			
		||||
@@ -68,6 +68,7 @@ void LD2420Component::dump_config() {
 | 
			
		||||
  ESP_LOGCONFIG(TAG, "LD2420:");
 | 
			
		||||
  ESP_LOGCONFIG(TAG, "  Firmware Version : %7s", this->ld2420_firmware_ver_);
 | 
			
		||||
  ESP_LOGCONFIG(TAG, "LD2420 Number:");
 | 
			
		||||
#ifdef USE_NUMBER
 | 
			
		||||
  LOG_NUMBER(TAG, "  Gate Timeout:", this->gate_timeout_number_);
 | 
			
		||||
  LOG_NUMBER(TAG, "  Gate Max Distance:", this->max_gate_distance_number_);
 | 
			
		||||
  LOG_NUMBER(TAG, "  Gate Min Distance:", this->min_gate_distance_number_);
 | 
			
		||||
@@ -76,10 +77,13 @@ void LD2420Component::dump_config() {
 | 
			
		||||
    LOG_NUMBER(TAG, "  Gate Move Threshold:", this->gate_move_threshold_numbers_[gate]);
 | 
			
		||||
    LOG_NUMBER(TAG, "  Gate Still Threshold::", this->gate_still_threshold_numbers_[gate]);
 | 
			
		||||
  }
 | 
			
		||||
#endif
 | 
			
		||||
#ifdef USE_BUTTON
 | 
			
		||||
  LOG_BUTTON(TAG, "  Apply Config:", this->apply_config_button_);
 | 
			
		||||
  LOG_BUTTON(TAG, "  Revert Edits:", this->revert_config_button_);
 | 
			
		||||
  LOG_BUTTON(TAG, "  Factory Reset:", this->factory_reset_button_);
 | 
			
		||||
  LOG_BUTTON(TAG, "  Restart Module:", this->restart_module_button_);
 | 
			
		||||
#endif
 | 
			
		||||
  ESP_LOGCONFIG(TAG, "LD2420 Select:");
 | 
			
		||||
  LOG_SELECT(TAG, "  Operating Mode", this->operating_selector_);
 | 
			
		||||
  if (this->get_firmware_int_(ld2420_firmware_ver_) < CALIBRATE_VERSION_MIN) {
 | 
			
		||||
@@ -183,9 +187,11 @@ void LD2420Component::factory_reset_action() {
 | 
			
		||||
    return;
 | 
			
		||||
  }
 | 
			
		||||
  this->set_min_max_distances_timeout(FACTORY_MAX_GATE, FACTORY_MIN_GATE, FACTORY_TIMEOUT);
 | 
			
		||||
#ifdef USE_NUMBER
 | 
			
		||||
  this->gate_timeout_number_->state = FACTORY_TIMEOUT;
 | 
			
		||||
  this->min_gate_distance_number_->state = FACTORY_MIN_GATE;
 | 
			
		||||
  this->max_gate_distance_number_->state = FACTORY_MAX_GATE;
 | 
			
		||||
#endif
 | 
			
		||||
  for (uint8_t gate = 0; gate < LD2420_TOTAL_GATES; gate++) {
 | 
			
		||||
    this->new_config.move_thresh[gate] = FACTORY_MOVE_THRESH[gate];
 | 
			
		||||
    this->new_config.still_thresh[gate] = FACTORY_STILL_THRESH[gate];
 | 
			
		||||
 
 | 
			
		||||
@@ -147,7 +147,7 @@ void MQTTClientComponent::dump_config() {
 | 
			
		||||
    ESP_LOGCONFIG(TAG, "  Availability: '%s'", this->availability_.topic.c_str());
 | 
			
		||||
  }
 | 
			
		||||
}
 | 
			
		||||
bool MQTTClientComponent::can_proceed() { return this->is_connected(); }
 | 
			
		||||
bool MQTTClientComponent::can_proceed() { return network::is_disabled() || this->is_connected(); }
 | 
			
		||||
 | 
			
		||||
void MQTTClientComponent::start_dnslookup_() {
 | 
			
		||||
  for (auto &subscription : this->subscriptions_) {
 | 
			
		||||
 
 | 
			
		||||
@@ -1,5 +1,6 @@
 | 
			
		||||
#include "my9231.h"
 | 
			
		||||
#include "esphome/core/log.h"
 | 
			
		||||
#include "esphome/core/helpers.h"
 | 
			
		||||
 | 
			
		||||
namespace esphome {
 | 
			
		||||
namespace my9231 {
 | 
			
		||||
@@ -51,7 +52,11 @@ void MY9231OutputComponent::setup() {
 | 
			
		||||
      MY9231_CMD_SCATTER_APDM | MY9231_CMD_FREQUENCY_DIVIDE_1 | MY9231_CMD_REACTION_FAST | MY9231_CMD_ONE_SHOT_DISABLE;
 | 
			
		||||
  ESP_LOGV(TAG, "  Command: 0x%02X", command);
 | 
			
		||||
 | 
			
		||||
  this->init_chips_(command);
 | 
			
		||||
  {
 | 
			
		||||
    InterruptLock lock;
 | 
			
		||||
    this->send_dcki_pulses_(32 * this->num_chips_);
 | 
			
		||||
    this->init_chips_(command);
 | 
			
		||||
  }
 | 
			
		||||
  ESP_LOGV(TAG, "  Chips initialized.");
 | 
			
		||||
}
 | 
			
		||||
void MY9231OutputComponent::dump_config() {
 | 
			
		||||
@@ -66,11 +71,14 @@ void MY9231OutputComponent::loop() {
 | 
			
		||||
  if (!this->update_)
 | 
			
		||||
    return;
 | 
			
		||||
 | 
			
		||||
  for (auto pwm_amount : this->pwm_amounts_) {
 | 
			
		||||
    this->write_word_(pwm_amount, this->bit_depth_);
 | 
			
		||||
  {
 | 
			
		||||
    InterruptLock lock;
 | 
			
		||||
    for (auto pwm_amount : this->pwm_amounts_) {
 | 
			
		||||
      this->write_word_(pwm_amount, this->bit_depth_);
 | 
			
		||||
    }
 | 
			
		||||
    // Send 8 DI pulses. After 8 falling edges, the duty data are store.
 | 
			
		||||
    this->send_di_pulses_(8);
 | 
			
		||||
  }
 | 
			
		||||
  // Send 8 DI pulses. After 8 falling edges, the duty data are store.
 | 
			
		||||
  this->send_di_pulses_(8);
 | 
			
		||||
  this->update_ = false;
 | 
			
		||||
}
 | 
			
		||||
void MY9231OutputComponent::set_channel_value_(uint8_t channel, uint16_t value) {
 | 
			
		||||
@@ -92,6 +100,7 @@ void MY9231OutputComponent::init_chips_(uint8_t command) {
 | 
			
		||||
  // Send 16 DI pulse. After 14 falling edges, the command data are
 | 
			
		||||
  // stored and after 16 falling edges the duty mode is activated.
 | 
			
		||||
  this->send_di_pulses_(16);
 | 
			
		||||
  delayMicroseconds(12);
 | 
			
		||||
}
 | 
			
		||||
void MY9231OutputComponent::write_word_(uint16_t value, uint8_t bits) {
 | 
			
		||||
  for (uint8_t i = bits; i > 0; i--) {
 | 
			
		||||
@@ -106,6 +115,13 @@ void MY9231OutputComponent::send_di_pulses_(uint8_t count) {
 | 
			
		||||
    this->pin_di_->digital_write(false);
 | 
			
		||||
  }
 | 
			
		||||
}
 | 
			
		||||
void MY9231OutputComponent::send_dcki_pulses_(uint8_t count) {
 | 
			
		||||
  delayMicroseconds(12);
 | 
			
		||||
  for (uint8_t i = 0; i < count; i++) {
 | 
			
		||||
    this->pin_dcki_->digital_write(true);
 | 
			
		||||
    this->pin_dcki_->digital_write(false);
 | 
			
		||||
  }
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
}  // namespace my9231
 | 
			
		||||
}  // namespace esphome
 | 
			
		||||
 
 | 
			
		||||
@@ -49,6 +49,7 @@ class MY9231OutputComponent : public Component {
 | 
			
		||||
  void init_chips_(uint8_t command);
 | 
			
		||||
  void write_word_(uint16_t value, uint8_t bits);
 | 
			
		||||
  void send_di_pulses_(uint8_t count);
 | 
			
		||||
  void send_dcki_pulses_(uint8_t count);
 | 
			
		||||
 | 
			
		||||
  GPIOPin *pin_di_;
 | 
			
		||||
  GPIOPin *pin_dcki_;
 | 
			
		||||
 
 | 
			
		||||
@@ -29,6 +29,14 @@ bool is_connected() {
 | 
			
		||||
  return false;
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
bool is_disabled() {
 | 
			
		||||
#ifdef USE_WIFI
 | 
			
		||||
  if (wifi::global_wifi_component != nullptr)
 | 
			
		||||
    return wifi::global_wifi_component->is_disabled();
 | 
			
		||||
#endif
 | 
			
		||||
  return false;
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
network::IPAddress get_ip_address() {
 | 
			
		||||
#ifdef USE_ETHERNET
 | 
			
		||||
  if (ethernet::global_eth_component != nullptr)
 | 
			
		||||
 
 | 
			
		||||
@@ -8,6 +8,8 @@ namespace network {
 | 
			
		||||
 | 
			
		||||
/// Return whether the node is connected to the network (through wifi, eth, ...)
 | 
			
		||||
bool is_connected();
 | 
			
		||||
/// Return whether the network is disabled (only wifi for now)
 | 
			
		||||
bool is_disabled();
 | 
			
		||||
/// Get the active network hostname
 | 
			
		||||
std::string get_use_address();
 | 
			
		||||
IPAddress get_ip_address();
 | 
			
		||||
 
 | 
			
		||||
@@ -36,7 +36,7 @@ CONFIG_SCHEMA = (
 | 
			
		||||
    display.BASIC_DISPLAY_SCHEMA.extend(
 | 
			
		||||
        {
 | 
			
		||||
            cv.GenerateID(): cv.declare_id(Nextion),
 | 
			
		||||
            cv.Optional(CONF_TFT_URL): cv.All(cv.string, cv.only_with_arduino),
 | 
			
		||||
            cv.Optional(CONF_TFT_URL): cv.url,
 | 
			
		||||
            cv.Optional(CONF_BRIGHTNESS, default=1.0): cv.percentage,
 | 
			
		||||
            cv.Optional(CONF_ON_SETUP): automation.validate_automation(
 | 
			
		||||
                {
 | 
			
		||||
@@ -85,10 +85,10 @@ async def to_code(config):
 | 
			
		||||
    if CONF_TFT_URL in config:
 | 
			
		||||
        cg.add_define("USE_NEXTION_TFT_UPLOAD")
 | 
			
		||||
        cg.add(var.set_tft_url(config[CONF_TFT_URL]))
 | 
			
		||||
        if CORE.is_esp32:
 | 
			
		||||
        if CORE.is_esp32 and CORE.using_arduino:
 | 
			
		||||
            cg.add_library("WiFiClientSecure", None)
 | 
			
		||||
            cg.add_library("HTTPClient", None)
 | 
			
		||||
        if CORE.is_esp8266:
 | 
			
		||||
        elif CORE.is_esp8266 and CORE.using_arduino:
 | 
			
		||||
            cg.add_library("ESP8266HTTPClient", None)
 | 
			
		||||
 | 
			
		||||
    if CONF_TOUCH_SLEEP_TIMEOUT in config:
 | 
			
		||||
 
 | 
			
		||||
@@ -128,7 +128,7 @@ void Nextion::dump_config() {
 | 
			
		||||
  ESP_LOGCONFIG(TAG, "  Wake On Touch:    %s", this->auto_wake_on_touch_ ? "True" : "False");
 | 
			
		||||
 | 
			
		||||
  if (this->touch_sleep_timeout_ != 0) {
 | 
			
		||||
    ESP_LOGCONFIG(TAG, "  Touch Timeout:       %d", this->touch_sleep_timeout_);
 | 
			
		||||
    ESP_LOGCONFIG(TAG, "  Touch Timeout:       %" PRIu32, this->touch_sleep_timeout_);
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  if (this->wake_up_page_ != -1) {
 | 
			
		||||
@@ -868,6 +868,12 @@ uint16_t Nextion::recv_ret_string_(std::string &response, uint32_t timeout, bool
 | 
			
		||||
  start = millis();
 | 
			
		||||
 | 
			
		||||
  while ((timeout == 0 && this->available()) || millis() - start <= timeout) {
 | 
			
		||||
    if (!this->available()) {
 | 
			
		||||
      App.feed_wdt();
 | 
			
		||||
      delay(1);
 | 
			
		||||
      continue;
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    this->read_byte(&c);
 | 
			
		||||
    if (c == 0xFF) {
 | 
			
		||||
      nr_of_ff_bytes++;
 | 
			
		||||
@@ -886,7 +892,7 @@ uint16_t Nextion::recv_ret_string_(std::string &response, uint32_t timeout, bool
 | 
			
		||||
      }
 | 
			
		||||
    }
 | 
			
		||||
    App.feed_wdt();
 | 
			
		||||
    delay(1);
 | 
			
		||||
    delay(2);
 | 
			
		||||
 | 
			
		||||
    if (exit_flag || ff_flag) {
 | 
			
		||||
      break;
 | 
			
		||||
 
 | 
			
		||||
@@ -12,14 +12,18 @@
 | 
			
		||||
#include "esphome/components/display/display_color_utils.h"
 | 
			
		||||
 | 
			
		||||
#ifdef USE_NEXTION_TFT_UPLOAD
 | 
			
		||||
#ifdef ARDUINO
 | 
			
		||||
#ifdef USE_ESP32
 | 
			
		||||
#include <HTTPClient.h>
 | 
			
		||||
#endif
 | 
			
		||||
#endif  // USE_ESP32
 | 
			
		||||
#ifdef USE_ESP8266
 | 
			
		||||
#include <ESP8266HTTPClient.h>
 | 
			
		||||
#include <WiFiClientSecure.h>
 | 
			
		||||
#endif
 | 
			
		||||
#endif
 | 
			
		||||
#endif  // USE_ESP8266
 | 
			
		||||
#elif defined(USE_ESP_IDF)
 | 
			
		||||
#include <esp_http_client.h>
 | 
			
		||||
#endif  // ARDUINO vs ESP-IDF
 | 
			
		||||
#endif  // USE_NEXTION_TFT_UPLOAD
 | 
			
		||||
 | 
			
		||||
namespace esphome {
 | 
			
		||||
namespace nextion {
 | 
			
		||||
@@ -685,16 +689,18 @@ class Nextion : public NextionBase, public PollingComponent, public uart::UARTDe
 | 
			
		||||
 | 
			
		||||
#ifdef USE_NEXTION_TFT_UPLOAD
 | 
			
		||||
  /**
 | 
			
		||||
   * Set the tft file URL. https seems problamtic with arduino..
 | 
			
		||||
   * Set the tft file URL. https seems problematic with arduino..
 | 
			
		||||
   */
 | 
			
		||||
  void set_tft_url(const std::string &tft_url) { this->tft_url_ = tft_url; }
 | 
			
		||||
 | 
			
		||||
#endif
 | 
			
		||||
 | 
			
		||||
  /**
 | 
			
		||||
   * Upload the tft file and softreset the Nextion
 | 
			
		||||
   * Upload the tft file and soft reset Nextion
 | 
			
		||||
   * @return bool True: Transfer completed successfuly, False: Transfer failed.
 | 
			
		||||
   */
 | 
			
		||||
  void upload_tft();
 | 
			
		||||
  bool upload_tft();
 | 
			
		||||
 | 
			
		||||
  void dump_config() override;
 | 
			
		||||
 | 
			
		||||
  /**
 | 
			
		||||
@@ -817,16 +823,16 @@ class Nextion : public NextionBase, public PollingComponent, public uart::UARTDe
 | 
			
		||||
  BearSSL::WiFiClientSecure *wifi_client_secure_{nullptr};
 | 
			
		||||
  WiFiClient *get_wifi_client_();
 | 
			
		||||
#endif
 | 
			
		||||
 | 
			
		||||
  int content_length_ = 0;
 | 
			
		||||
  int tft_size_ = 0;
 | 
			
		||||
#ifdef ARDUINO
 | 
			
		||||
  /**
 | 
			
		||||
   * will request chunk_size chunks from the web server
 | 
			
		||||
   * and send each to the nextion
 | 
			
		||||
   * @param int contentLength Total size of the file
 | 
			
		||||
   * @param uint32_t chunk_size
 | 
			
		||||
   * @return true if success, false for failure.
 | 
			
		||||
   * @param HTTPClient http HTTP client handler.
 | 
			
		||||
   * @param int range_start Position of next byte to transfer.
 | 
			
		||||
   * @return position of last byte transferred, -1 for failure.
 | 
			
		||||
   */
 | 
			
		||||
  int content_length_ = 0;
 | 
			
		||||
  int tft_size_ = 0;
 | 
			
		||||
  int upload_by_chunks_(HTTPClient *http, int range_start);
 | 
			
		||||
 | 
			
		||||
  bool upload_with_range_(uint32_t range_start, uint32_t range_end);
 | 
			
		||||
@@ -839,7 +845,30 @@ class Nextion : public NextionBase, public PollingComponent, public uart::UARTDe
 | 
			
		||||
   * @return true if success, false for failure.
 | 
			
		||||
   */
 | 
			
		||||
  bool upload_from_buffer_(const uint8_t *file_buf, size_t buf_size);
 | 
			
		||||
  void upload_end_();
 | 
			
		||||
  /**
 | 
			
		||||
   * Ends the upload process, restart Nextion and, if successful,
 | 
			
		||||
   * restarts ESP
 | 
			
		||||
   * @param bool url successful True: Transfer completed successfuly, False: Transfer failed.
 | 
			
		||||
   * @return bool True: Transfer completed successfuly, False: Transfer failed.
 | 
			
		||||
   */
 | 
			
		||||
  bool upload_end_(bool successful);
 | 
			
		||||
#elif defined(USE_ESP_IDF)
 | 
			
		||||
  /**
 | 
			
		||||
   * will request 4096 bytes chunks from the web server
 | 
			
		||||
   * and send each to Nextion
 | 
			
		||||
   * @param std::string url Full url for download.
 | 
			
		||||
   * @param int range_start Position of next byte to transfer.
 | 
			
		||||
   * @return position of last byte transferred, -1 for failure.
 | 
			
		||||
   */
 | 
			
		||||
  int upload_range(const std::string &url, int range_start);
 | 
			
		||||
  /**
 | 
			
		||||
   * Ends the upload process, restart Nextion and, if successful,
 | 
			
		||||
   * restarts ESP
 | 
			
		||||
   * @param bool url successful True: Transfer completed successfuly, False: Transfer failed.
 | 
			
		||||
   * @return bool True: Transfer completed successfuly, False: Transfer failed.
 | 
			
		||||
   */
 | 
			
		||||
  bool upload_end(bool successful);
 | 
			
		||||
#endif  // ARDUINO vs ESP-IDF
 | 
			
		||||
 | 
			
		||||
#endif  // USE_NEXTION_TFT_UPLOAD
 | 
			
		||||
 | 
			
		||||
 
 | 
			
		||||
@@ -55,7 +55,7 @@ void Nextion::set_protocol_reparse_mode(bool active_mode) {
 | 
			
		||||
 | 
			
		||||
// Set Colors
 | 
			
		||||
void Nextion::set_component_background_color(const char *component, uint32_t color) {
 | 
			
		||||
  this->add_no_result_to_queue_with_printf_("set_component_background_color", "%s.bco=%d", component, color);
 | 
			
		||||
  this->add_no_result_to_queue_with_printf_("set_component_background_color", "%s.bco=%" PRIu32, component, color);
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
void Nextion::set_component_background_color(const char *component, const char *color) {
 | 
			
		||||
@@ -68,7 +68,8 @@ void Nextion::set_component_background_color(const char *component, Color color)
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
void Nextion::set_component_pressed_background_color(const char *component, uint32_t color) {
 | 
			
		||||
  this->add_no_result_to_queue_with_printf_("set_component_pressed_background_color", "%s.bco2=%d", component, color);
 | 
			
		||||
  this->add_no_result_to_queue_with_printf_("set_component_pressed_background_color", "%s.bco2=%" PRIu32, component,
 | 
			
		||||
                                            color);
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
void Nextion::set_component_pressed_background_color(const char *component, const char *color) {
 | 
			
		||||
@@ -89,7 +90,7 @@ void Nextion::set_component_picc(const char *component, uint8_t pic_id) {
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
void Nextion::set_component_font_color(const char *component, uint32_t color) {
 | 
			
		||||
  this->add_no_result_to_queue_with_printf_("set_component_font_color", "%s.pco=%d", component, color);
 | 
			
		||||
  this->add_no_result_to_queue_with_printf_("set_component_font_color", "%s.pco=%" PRIu32, component, color);
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
void Nextion::set_component_font_color(const char *component, const char *color) {
 | 
			
		||||
@@ -102,7 +103,7 @@ void Nextion::set_component_font_color(const char *component, Color color) {
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
void Nextion::set_component_pressed_font_color(const char *component, uint32_t color) {
 | 
			
		||||
  this->add_no_result_to_queue_with_printf_("set_component_pressed_font_color", "%s.pco2=%d", component, color);
 | 
			
		||||
  this->add_no_result_to_queue_with_printf_("set_component_pressed_font_color", "%s.pco2=%" PRIu32, component, color);
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
void Nextion::set_component_pressed_font_color(const char *component, const char *color) {
 | 
			
		||||
 
 | 
			
		||||
@@ -1,5 +1,6 @@
 | 
			
		||||
#include "nextion.h"
 | 
			
		||||
 | 
			
		||||
#ifdef ARDUINO
 | 
			
		||||
#ifdef USE_NEXTION_TFT_UPLOAD
 | 
			
		||||
 | 
			
		||||
#include "esphome/core/application.h"
 | 
			
		||||
@@ -128,15 +129,15 @@ int Nextion::upload_by_chunks_(HTTPClient *http, int range_start) {
 | 
			
		||||
  return range_end + 1;
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
void Nextion::upload_tft() {
 | 
			
		||||
bool Nextion::upload_tft() {
 | 
			
		||||
  if (this->is_updating_) {
 | 
			
		||||
    ESP_LOGD(TAG, "Currently updating");
 | 
			
		||||
    return;
 | 
			
		||||
    return false;
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  if (!network::is_connected()) {
 | 
			
		||||
    ESP_LOGD(TAG, "network is not connected");
 | 
			
		||||
    return;
 | 
			
		||||
    return false;
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  this->is_updating_ = true;
 | 
			
		||||
@@ -164,7 +165,7 @@ void Nextion::upload_tft() {
 | 
			
		||||
    ESP_LOGD(TAG, "connection failed");
 | 
			
		||||
    ExternalRAMAllocator<uint8_t> allocator(ExternalRAMAllocator<uint8_t>::ALLOW_FAILURE);
 | 
			
		||||
    allocator.deallocate(this->transfer_buffer_, this->transfer_buffer_size_);
 | 
			
		||||
    return;
 | 
			
		||||
    return false;
 | 
			
		||||
  } else {
 | 
			
		||||
    ESP_LOGD(TAG, "Connected");
 | 
			
		||||
  }
 | 
			
		||||
@@ -192,7 +193,7 @@ void Nextion::upload_tft() {
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  if ((code != 200 && code != 206) || tries > 5) {
 | 
			
		||||
    this->upload_end_();
 | 
			
		||||
    return this->upload_end_(false);
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  String content_range_string = http.header("Content-Range");
 | 
			
		||||
@@ -203,7 +204,7 @@ void Nextion::upload_tft() {
 | 
			
		||||
 | 
			
		||||
  if (this->content_length_ < 4096) {
 | 
			
		||||
    ESP_LOGE(TAG, "Failed to get file size");
 | 
			
		||||
    this->upload_end_();
 | 
			
		||||
    return this->upload_end_(false);
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  ESP_LOGD(TAG, "Updating Nextion %s...", this->device_model_.c_str());
 | 
			
		||||
@@ -246,7 +247,7 @@ void Nextion::upload_tft() {
 | 
			
		||||
    ESP_LOGD(TAG, "preparation for tft update done");
 | 
			
		||||
  } else {
 | 
			
		||||
    ESP_LOGD(TAG, "preparation for tft update failed %d \"%s\"", response[0], response.c_str());
 | 
			
		||||
    this->upload_end_();
 | 
			
		||||
    return this->upload_end_(false);
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  // Nextion wants 4096 bytes at a time. Make chunk_size a multiple of 4096
 | 
			
		||||
@@ -280,7 +281,7 @@ void Nextion::upload_tft() {
 | 
			
		||||
      this->transfer_buffer_ = allocator.allocate(chunk_size);
 | 
			
		||||
 | 
			
		||||
      if (!this->transfer_buffer_)
 | 
			
		||||
        this->upload_end_();
 | 
			
		||||
        return this->upload_end_(false);
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    this->transfer_buffer_size_ = chunk_size;
 | 
			
		||||
@@ -295,7 +296,7 @@ void Nextion::upload_tft() {
 | 
			
		||||
    result = this->upload_by_chunks_(&http, result);
 | 
			
		||||
    if (result < 0) {
 | 
			
		||||
      ESP_LOGD(TAG, "Error updating Nextion!");
 | 
			
		||||
      this->upload_end_();
 | 
			
		||||
      return this->upload_end_(false);
 | 
			
		||||
    }
 | 
			
		||||
    App.feed_wdt();
 | 
			
		||||
    // NOLINTNEXTLINE(readability-static-accessed-through-instance)
 | 
			
		||||
@@ -303,15 +304,19 @@ void Nextion::upload_tft() {
 | 
			
		||||
  }
 | 
			
		||||
  ESP_LOGD(TAG, "Successfully updated Nextion!");
 | 
			
		||||
 | 
			
		||||
  this->upload_end_();
 | 
			
		||||
  return this->upload_end_(true);
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
void Nextion::upload_end_() {
 | 
			
		||||
bool Nextion::upload_end_(bool successful) {
 | 
			
		||||
  this->is_updating_ = false;
 | 
			
		||||
  ESP_LOGD(TAG, "Restarting Nextion");
 | 
			
		||||
  this->soft_reset();
 | 
			
		||||
  delay(1500);  // NOLINT
 | 
			
		||||
  ESP_LOGD(TAG, "Restarting esphome");
 | 
			
		||||
  ESP.restart();  // NOLINT(readability-static-accessed-through-instance)
 | 
			
		||||
  if (successful) {
 | 
			
		||||
    delay(1500);  // NOLINT
 | 
			
		||||
    ESP_LOGD(TAG, "Restarting esphome");
 | 
			
		||||
    ESP.restart();  // NOLINT(readability-static-accessed-through-instance)
 | 
			
		||||
  }
 | 
			
		||||
  return successful;
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
#ifdef USE_ESP8266
 | 
			
		||||
@@ -337,3 +342,4 @@ WiFiClient *Nextion::get_wifi_client_() {
 | 
			
		||||
}  // namespace esphome
 | 
			
		||||
 | 
			
		||||
#endif  // USE_NEXTION_TFT_UPLOAD
 | 
			
		||||
#endif  // ARDUINO
 | 
			
		||||
							
								
								
									
										268
									
								
								esphome/components/nextion/nextion_upload_idf.cpp
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										268
									
								
								esphome/components/nextion/nextion_upload_idf.cpp
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1,268 @@
 | 
			
		||||
#include "nextion.h"
 | 
			
		||||
 | 
			
		||||
#ifdef USE_ESP_IDF
 | 
			
		||||
#ifdef USE_NEXTION_TFT_UPLOAD
 | 
			
		||||
 | 
			
		||||
#include "esphome/core/application.h"
 | 
			
		||||
#include "esphome/core/defines.h"
 | 
			
		||||
#include "esphome/core/util.h"
 | 
			
		||||
#include "esphome/core/log.h"
 | 
			
		||||
#include "esphome/components/network/util.h"
 | 
			
		||||
 | 
			
		||||
#include <esp_heap_caps.h>
 | 
			
		||||
#include <esp_http_client.h>
 | 
			
		||||
 | 
			
		||||
namespace esphome {
 | 
			
		||||
namespace nextion {
 | 
			
		||||
static const char *const TAG = "nextion_upload";
 | 
			
		||||
 | 
			
		||||
// Followed guide
 | 
			
		||||
// https://unofficialnextion.com/t/nextion-upload-protocol-v1-2-the-fast-one/1044/2
 | 
			
		||||
 | 
			
		||||
int Nextion::upload_range(const std::string &url, int range_start) {
 | 
			
		||||
  ESP_LOGVV(TAG, "url: %s", url.c_str());
 | 
			
		||||
  uint range_size = this->tft_size_ - range_start;
 | 
			
		||||
  ESP_LOGVV(TAG, "tft_size_: %i", this->tft_size_);
 | 
			
		||||
  ESP_LOGV(TAG, "Available heap: %u", esp_get_free_heap_size());
 | 
			
		||||
  int range_end = (range_start == 0) ? std::min(this->tft_size_, 16383) : this->tft_size_;
 | 
			
		||||
  if (range_size <= 0 or range_end <= range_start) {
 | 
			
		||||
    ESP_LOGE(TAG, "Invalid range");
 | 
			
		||||
    ESP_LOGD(TAG, "Range start: %i", range_start);
 | 
			
		||||
    ESP_LOGD(TAG, "Range end: %i", range_end);
 | 
			
		||||
    ESP_LOGD(TAG, "Range size: %i", range_size);
 | 
			
		||||
    return -1;
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  esp_http_client_config_t config = {
 | 
			
		||||
      .url = url.c_str(),
 | 
			
		||||
      .cert_pem = nullptr,
 | 
			
		||||
  };
 | 
			
		||||
  esp_http_client_handle_t client = esp_http_client_init(&config);
 | 
			
		||||
 | 
			
		||||
  char range_header[64];
 | 
			
		||||
  sprintf(range_header, "bytes=%d-%d", range_start, range_end);
 | 
			
		||||
  ESP_LOGV(TAG, "Requesting range: %s", range_header);
 | 
			
		||||
  esp_http_client_set_header(client, "Range", range_header);
 | 
			
		||||
  ESP_LOGVV(TAG, "Available heap: %u", esp_get_free_heap_size());
 | 
			
		||||
 | 
			
		||||
  ESP_LOGV(TAG, "Opening http connetion");
 | 
			
		||||
  esp_err_t err;
 | 
			
		||||
  if ((err = esp_http_client_open(client, 0)) != ESP_OK) {
 | 
			
		||||
    ESP_LOGE(TAG, "Failed to open HTTP connection: %s", esp_err_to_name(err));
 | 
			
		||||
    esp_http_client_cleanup(client);
 | 
			
		||||
    return -1;
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  ESP_LOGV(TAG, "Fetch content length");
 | 
			
		||||
  int content_length = esp_http_client_fetch_headers(client);
 | 
			
		||||
  ESP_LOGV(TAG, "content_length = %d", content_length);
 | 
			
		||||
  if (content_length <= 0) {
 | 
			
		||||
    ESP_LOGE(TAG, "Failed to get content length: %d", content_length);
 | 
			
		||||
    esp_http_client_cleanup(client);
 | 
			
		||||
    return -1;
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  int total_read_len = 0, read_len;
 | 
			
		||||
 | 
			
		||||
  ESP_LOGV(TAG, "Allocate buffer");
 | 
			
		||||
  uint8_t *buffer = new uint8_t[4096];
 | 
			
		||||
  std::string recv_string;
 | 
			
		||||
  if (buffer == nullptr) {
 | 
			
		||||
    ESP_LOGE(TAG, "Failed to allocate memory for buffer");
 | 
			
		||||
    ESP_LOGV(TAG, "Available heap: %u", esp_get_free_heap_size());
 | 
			
		||||
  } else {
 | 
			
		||||
    ESP_LOGV(TAG, "Memory for buffer allocated successfully");
 | 
			
		||||
 | 
			
		||||
    while (true) {
 | 
			
		||||
      App.feed_wdt();
 | 
			
		||||
      ESP_LOGVV(TAG, "Available heap: %u", esp_get_free_heap_size());
 | 
			
		||||
      int read_len = esp_http_client_read(client, reinterpret_cast<char *>(buffer), 4096);
 | 
			
		||||
      ESP_LOGVV(TAG, "Read %d bytes from HTTP client, writing to UART", read_len);
 | 
			
		||||
      if (read_len > 0) {
 | 
			
		||||
        this->write_array(buffer, read_len);
 | 
			
		||||
        ESP_LOGVV(TAG, "Write to UART successful");
 | 
			
		||||
        this->recv_ret_string_(recv_string, 5000, true);
 | 
			
		||||
        this->content_length_ -= read_len;
 | 
			
		||||
        ESP_LOGD(TAG, "Uploaded %0.2f %%, remaining %d bytes",
 | 
			
		||||
                 100.0 * (this->tft_size_ - this->content_length_) / this->tft_size_, this->content_length_);
 | 
			
		||||
        if (recv_string[0] != 0x05) {  // 0x05 == "ok"
 | 
			
		||||
          ESP_LOGD(
 | 
			
		||||
              TAG, "recv_string [%s]",
 | 
			
		||||
              format_hex_pretty(reinterpret_cast<const uint8_t *>(recv_string.data()), recv_string.size()).c_str());
 | 
			
		||||
        }
 | 
			
		||||
        // handle partial upload request
 | 
			
		||||
        if (recv_string[0] == 0x08 && recv_string.size() == 5) {
 | 
			
		||||
          uint32_t result = 0;
 | 
			
		||||
          for (int j = 0; j < 4; ++j) {
 | 
			
		||||
            result += static_cast<uint8_t>(recv_string[j + 1]) << (8 * j);
 | 
			
		||||
          }
 | 
			
		||||
          if (result > 0) {
 | 
			
		||||
            ESP_LOGI(TAG, "Nextion reported new range %" PRIu32, result);
 | 
			
		||||
            this->content_length_ = this->tft_size_ - result;
 | 
			
		||||
            // Deallocate the buffer when done
 | 
			
		||||
            delete[] buffer;
 | 
			
		||||
            ESP_LOGVV(TAG, "Memory for buffer deallocated");
 | 
			
		||||
            esp_http_client_cleanup(client);
 | 
			
		||||
            esp_http_client_close(client);
 | 
			
		||||
            return result;
 | 
			
		||||
          }
 | 
			
		||||
        }
 | 
			
		||||
        recv_string.clear();
 | 
			
		||||
      } else if (read_len == 0) {
 | 
			
		||||
        ESP_LOGV(TAG, "End of HTTP response reached");
 | 
			
		||||
        break;  // Exit the loop if there is no more data to read
 | 
			
		||||
      } else {
 | 
			
		||||
        ESP_LOGE(TAG, "Failed to read from HTTP client, error code: %d", read_len);
 | 
			
		||||
        break;  // Exit the loop on error
 | 
			
		||||
      }
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    // Deallocate the buffer when done
 | 
			
		||||
    delete[] buffer;
 | 
			
		||||
    ESP_LOGVV(TAG, "Memory for buffer deallocated");
 | 
			
		||||
  }
 | 
			
		||||
  esp_http_client_cleanup(client);
 | 
			
		||||
  esp_http_client_close(client);
 | 
			
		||||
  return range_end + 1;
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
bool Nextion::upload_tft() {
 | 
			
		||||
  ESP_LOGD(TAG, "Nextion TFT upload requested");
 | 
			
		||||
  ESP_LOGD(TAG, "url: %s", this->tft_url_.c_str());
 | 
			
		||||
 | 
			
		||||
  if (this->is_updating_) {
 | 
			
		||||
    ESP_LOGW(TAG, "Currently updating");
 | 
			
		||||
    return false;
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  if (!network::is_connected()) {
 | 
			
		||||
    ESP_LOGE(TAG, "Network is not connected");
 | 
			
		||||
    return false;
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  this->is_updating_ = true;
 | 
			
		||||
 | 
			
		||||
  // Define the configuration for the HTTP client
 | 
			
		||||
  ESP_LOGV(TAG, "Establishing connection to HTTP server");
 | 
			
		||||
  ESP_LOGVV(TAG, "Available heap: %u", esp_get_free_heap_size());
 | 
			
		||||
  esp_http_client_config_t config = {
 | 
			
		||||
      .url = this->tft_url_.c_str(),
 | 
			
		||||
      .cert_pem = nullptr,
 | 
			
		||||
      .method = HTTP_METHOD_HEAD,
 | 
			
		||||
      .timeout_ms = 15000,
 | 
			
		||||
  };
 | 
			
		||||
 | 
			
		||||
  // Initialize the HTTP client with the configuration
 | 
			
		||||
  ESP_LOGV(TAG, "Initializing HTTP client");
 | 
			
		||||
  ESP_LOGV(TAG, "Available heap: %u", esp_get_free_heap_size());
 | 
			
		||||
  esp_http_client_handle_t http = esp_http_client_init(&config);
 | 
			
		||||
  if (!http) {
 | 
			
		||||
    ESP_LOGE(TAG, "Failed to initialize HTTP client.");
 | 
			
		||||
    return this->upload_end(false);
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  // Perform the HTTP request
 | 
			
		||||
  ESP_LOGV(TAG, "Check if the client could connect");
 | 
			
		||||
  ESP_LOGV(TAG, "Available heap: %u", esp_get_free_heap_size());
 | 
			
		||||
  esp_err_t err = esp_http_client_perform(http);
 | 
			
		||||
  if (err != ESP_OK) {
 | 
			
		||||
    ESP_LOGE(TAG, "HTTP request failed: %s", esp_err_to_name(err));
 | 
			
		||||
    esp_http_client_cleanup(http);
 | 
			
		||||
    return this->upload_end(false);
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  // Check the HTTP Status Code
 | 
			
		||||
  int status_code = esp_http_client_get_status_code(http);
 | 
			
		||||
  ESP_LOGV(TAG, "HTTP Status Code: %d", status_code);
 | 
			
		||||
  size_t tft_file_size = esp_http_client_get_content_length(http);
 | 
			
		||||
  ESP_LOGD(TAG, "TFT file size: %zu", tft_file_size);
 | 
			
		||||
 | 
			
		||||
  if (tft_file_size < 4096) {
 | 
			
		||||
    ESP_LOGE(TAG, "File size check failed. Size: %zu", tft_file_size);
 | 
			
		||||
    esp_http_client_cleanup(http);
 | 
			
		||||
    return this->upload_end(false);
 | 
			
		||||
  } else {
 | 
			
		||||
    ESP_LOGV(TAG, "File size check passed. Proceeding...");
 | 
			
		||||
  }
 | 
			
		||||
  this->content_length_ = tft_file_size;
 | 
			
		||||
  this->tft_size_ = tft_file_size;
 | 
			
		||||
 | 
			
		||||
  ESP_LOGD(TAG, "Updating Nextion");
 | 
			
		||||
  // The Nextion will ignore the update command if it is sleeping
 | 
			
		||||
 | 
			
		||||
  this->send_command_("sleep=0");
 | 
			
		||||
  this->set_backlight_brightness(1.0);
 | 
			
		||||
  vTaskDelay(pdMS_TO_TICKS(250));  // NOLINT
 | 
			
		||||
 | 
			
		||||
  App.feed_wdt();
 | 
			
		||||
  char command[128];
 | 
			
		||||
  // Tells the Nextion the content length of the tft file and baud rate it will be sent at
 | 
			
		||||
  // Once the Nextion accepts the command it will wait until the file is successfully uploaded
 | 
			
		||||
  // If it fails for any reason a power cycle of the display will be needed
 | 
			
		||||
  sprintf(command, "whmi-wris %d,%" PRIu32 ",1", this->content_length_, this->parent_->get_baud_rate());
 | 
			
		||||
 | 
			
		||||
  // Clear serial receive buffer
 | 
			
		||||
  uint8_t d;
 | 
			
		||||
  while (this->available()) {
 | 
			
		||||
    this->read_byte(&d);
 | 
			
		||||
  };
 | 
			
		||||
 | 
			
		||||
  this->send_command_(command);
 | 
			
		||||
 | 
			
		||||
  std::string response;
 | 
			
		||||
  ESP_LOGV(TAG, "Waiting for upgrade response");
 | 
			
		||||
  this->recv_ret_string_(response, 2048, true);  // This can take some time to return
 | 
			
		||||
 | 
			
		||||
  // The Nextion display will, if it's ready to accept data, send a 0x05 byte.
 | 
			
		||||
  ESP_LOGD(TAG, "Upgrade response is [%s]",
 | 
			
		||||
           format_hex_pretty(reinterpret_cast<const uint8_t *>(response.data()), response.size()).c_str());
 | 
			
		||||
 | 
			
		||||
  if (response.find(0x05) != std::string::npos) {
 | 
			
		||||
    ESP_LOGV(TAG, "Preparation for tft update done");
 | 
			
		||||
  } else {
 | 
			
		||||
    ESP_LOGE(TAG, "Preparation for tft update failed %d \"%s\"", response[0], response.c_str());
 | 
			
		||||
    esp_http_client_cleanup(http);
 | 
			
		||||
    return this->upload_end(false);
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  ESP_LOGD(TAG, "Updating tft from \"%s\" with a file size of %d, Heap Size %" PRIu32, this->tft_url_.c_str(),
 | 
			
		||||
           content_length_, esp_get_free_heap_size());
 | 
			
		||||
 | 
			
		||||
  ESP_LOGV(TAG, "Starting transfer by chunks loop");
 | 
			
		||||
  int result = 0;
 | 
			
		||||
  while (content_length_ > 0) {
 | 
			
		||||
    result = upload_range(this->tft_url_.c_str(), result);
 | 
			
		||||
    if (result < 0) {
 | 
			
		||||
      ESP_LOGE(TAG, "Error updating Nextion!");
 | 
			
		||||
      esp_http_client_cleanup(http);
 | 
			
		||||
      return this->upload_end(false);
 | 
			
		||||
    }
 | 
			
		||||
    App.feed_wdt();
 | 
			
		||||
    ESP_LOGV(TAG, "Heap Size %" PRIu32 ", Bytes left %d", esp_get_free_heap_size(), content_length_);
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  ESP_LOGD(TAG, "Successfully updated Nextion!");
 | 
			
		||||
 | 
			
		||||
  ESP_LOGD(TAG, "Close HTTP connection");
 | 
			
		||||
  esp_http_client_close(http);
 | 
			
		||||
  esp_http_client_cleanup(http);
 | 
			
		||||
  return upload_end(true);
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
bool Nextion::upload_end(bool successful) {
 | 
			
		||||
  this->is_updating_ = false;
 | 
			
		||||
  ESP_LOGD(TAG, "Restarting Nextion");
 | 
			
		||||
  this->soft_reset();
 | 
			
		||||
  vTaskDelay(pdMS_TO_TICKS(1500));  // NOLINT
 | 
			
		||||
  if (successful) {
 | 
			
		||||
    ESP_LOGD(TAG, "Restarting esphome");
 | 
			
		||||
    esp_restart();  // NOLINT(readability-static-accessed-through-instance)
 | 
			
		||||
  }
 | 
			
		||||
  return successful;
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
}  // namespace nextion
 | 
			
		||||
}  // namespace esphome
 | 
			
		||||
 | 
			
		||||
#endif  // USE_NEXTION_TFT_UPLOAD
 | 
			
		||||
#endif  // USE_ESP_IDF
 | 
			
		||||
@@ -2,7 +2,7 @@ from math import log
 | 
			
		||||
 | 
			
		||||
import esphome.config_validation as cv
 | 
			
		||||
import esphome.codegen as cg
 | 
			
		||||
from esphome.components import sensor
 | 
			
		||||
from esphome.components import sensor, resistance_sampler
 | 
			
		||||
from esphome.const import (
 | 
			
		||||
    CONF_CALIBRATION,
 | 
			
		||||
    CONF_REFERENCE_RESISTANCE,
 | 
			
		||||
@@ -15,6 +15,8 @@ from esphome.const import (
 | 
			
		||||
    UNIT_CELSIUS,
 | 
			
		||||
)
 | 
			
		||||
 | 
			
		||||
AUTO_LOAD = ["resistance_sampler"]
 | 
			
		||||
 | 
			
		||||
ntc_ns = cg.esphome_ns.namespace("ntc")
 | 
			
		||||
NTC = ntc_ns.class_("NTC", cg.Component, sensor.Sensor)
 | 
			
		||||
 | 
			
		||||
@@ -124,7 +126,7 @@ CONFIG_SCHEMA = (
 | 
			
		||||
    )
 | 
			
		||||
    .extend(
 | 
			
		||||
        {
 | 
			
		||||
            cv.Required(CONF_SENSOR): cv.use_id(sensor.Sensor),
 | 
			
		||||
            cv.Required(CONF_SENSOR): cv.use_id(resistance_sampler.ResistanceSampler),
 | 
			
		||||
            cv.Required(CONF_CALIBRATION): process_calibration,
 | 
			
		||||
        }
 | 
			
		||||
    )
 | 
			
		||||
 
 | 
			
		||||
@@ -52,8 +52,9 @@ RemoteReceiverTrigger = ns.class_(
 | 
			
		||||
    "RemoteReceiverTrigger", automation.Trigger, RemoteReceiverListener
 | 
			
		||||
)
 | 
			
		||||
RemoteTransmitterDumper = ns.class_("RemoteTransmitterDumper")
 | 
			
		||||
RemoteTransmittable = ns.class_("RemoteTransmittable")
 | 
			
		||||
RemoteTransmitterActionBase = ns.class_(
 | 
			
		||||
    "RemoteTransmitterActionBase", automation.Action
 | 
			
		||||
    "RemoteTransmitterActionBase", RemoteTransmittable, automation.Action
 | 
			
		||||
)
 | 
			
		||||
RemoteReceiverBase = ns.class_("RemoteReceiverBase")
 | 
			
		||||
RemoteTransmitterBase = ns.class_("RemoteTransmitterBase")
 | 
			
		||||
@@ -68,11 +69,30 @@ def templatize(value):
 | 
			
		||||
    return cv.Schema(ret)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
REMOTE_LISTENER_SCHEMA = cv.Schema(
 | 
			
		||||
    {
 | 
			
		||||
        cv.GenerateID(CONF_RECEIVER_ID): cv.use_id(RemoteReceiverBase),
 | 
			
		||||
    }
 | 
			
		||||
)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
REMOTE_TRANSMITTABLE_SCHEMA = cv.Schema(
 | 
			
		||||
    {
 | 
			
		||||
        cv.GenerateID(CONF_TRANSMITTER_ID): cv.use_id(RemoteTransmitterBase),
 | 
			
		||||
    }
 | 
			
		||||
)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
async def register_listener(var, config):
 | 
			
		||||
    receiver = await cg.get_variable(config[CONF_RECEIVER_ID])
 | 
			
		||||
    cg.add(receiver.register_listener(var))
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
async def register_transmittable(var, config):
 | 
			
		||||
    transmitter_ = await cg.get_variable(config[CONF_TRANSMITTER_ID])
 | 
			
		||||
    cg.add(var.set_transmitter(transmitter_))
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def register_binary_sensor(name, type, schema):
 | 
			
		||||
    return BINARY_SENSOR_REGISTRY.register(name, type, schema)
 | 
			
		||||
 | 
			
		||||
@@ -129,10 +149,9 @@ def validate_repeat(value):
 | 
			
		||||
 | 
			
		||||
BASE_REMOTE_TRANSMITTER_SCHEMA = cv.Schema(
 | 
			
		||||
    {
 | 
			
		||||
        cv.GenerateID(CONF_TRANSMITTER_ID): cv.use_id(RemoteTransmitterBase),
 | 
			
		||||
        cv.Optional(CONF_REPEAT): validate_repeat,
 | 
			
		||||
    }
 | 
			
		||||
)
 | 
			
		||||
).extend(REMOTE_TRANSMITTABLE_SCHEMA)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def register_action(name, type_, schema):
 | 
			
		||||
@@ -143,9 +162,8 @@ def register_action(name, type_, schema):
 | 
			
		||||
 | 
			
		||||
    def decorator(func):
 | 
			
		||||
        async def new_func(config, action_id, template_arg, args):
 | 
			
		||||
            transmitter = await cg.get_variable(config[CONF_TRANSMITTER_ID])
 | 
			
		||||
            var = cg.new_Pvariable(action_id, template_arg)
 | 
			
		||||
            cg.add(var.set_parent(transmitter))
 | 
			
		||||
            await register_transmittable(var, config)
 | 
			
		||||
            if CONF_REPEAT in config:
 | 
			
		||||
                conf = config[CONF_REPEAT]
 | 
			
		||||
                template_ = await cg.templatable(conf[CONF_TIMES], args, cg.uint32)
 | 
			
		||||
@@ -1539,7 +1557,7 @@ MIDEA_SCHEMA = cv.Schema(
 | 
			
		||||
 | 
			
		||||
@register_binary_sensor("midea", MideaBinarySensor, MIDEA_SCHEMA)
 | 
			
		||||
def midea_binary_sensor(var, config):
 | 
			
		||||
    cg.add(var.set_code(config[CONF_CODE]))
 | 
			
		||||
    cg.add(var.set_data(config[CONF_CODE]))
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@register_trigger("midea", MideaTrigger, MideaData)
 | 
			
		||||
 
 | 
			
		||||
@@ -67,20 +67,7 @@ class MideaProtocol : public RemoteProtocol<MideaData> {
 | 
			
		||||
  void dump(const MideaData &data) override;
 | 
			
		||||
};
 | 
			
		||||
 | 
			
		||||
class MideaBinarySensor : public RemoteReceiverBinarySensorBase {
 | 
			
		||||
 public:
 | 
			
		||||
  bool matches(RemoteReceiveData src) override {
 | 
			
		||||
    auto data = MideaProtocol().decode(src);
 | 
			
		||||
    return data.has_value() && data.value() == this->data_;
 | 
			
		||||
  }
 | 
			
		||||
  void set_code(const std::vector<uint8_t> &code) { this->data_ = code; }
 | 
			
		||||
 | 
			
		||||
 protected:
 | 
			
		||||
  MideaData data_;
 | 
			
		||||
};
 | 
			
		||||
 | 
			
		||||
using MideaTrigger = RemoteReceiverTrigger<MideaProtocol, MideaData>;
 | 
			
		||||
using MideaDumper = RemoteReceiverDumper<MideaProtocol, MideaData>;
 | 
			
		||||
DECLARE_REMOTE_PROTOCOL(Midea)
 | 
			
		||||
 | 
			
		||||
template<typename... Ts> class MideaAction : public RemoteTransmitterActionBase<Ts...> {
 | 
			
		||||
  TEMPLATABLE_VALUE(std::vector<uint8_t>, code)
 | 
			
		||||
 
 | 
			
		||||
@@ -15,6 +15,8 @@ struct RCSwitchData {
 | 
			
		||||
 | 
			
		||||
class RCSwitchBase {
 | 
			
		||||
 public:
 | 
			
		||||
  using ProtocolData = RCSwitchData;
 | 
			
		||||
 | 
			
		||||
  RCSwitchBase() = default;
 | 
			
		||||
  RCSwitchBase(uint32_t sync_high, uint32_t sync_low, uint32_t zero_high, uint32_t zero_low, uint32_t one_high,
 | 
			
		||||
               uint32_t one_low, bool inverted);
 | 
			
		||||
@@ -213,7 +215,7 @@ class RCSwitchDumper : public RemoteReceiverDumperBase {
 | 
			
		||||
  bool dump(RemoteReceiveData src) override;
 | 
			
		||||
};
 | 
			
		||||
 | 
			
		||||
using RCSwitchTrigger = RemoteReceiverTrigger<RCSwitchBase, RCSwitchData>;
 | 
			
		||||
using RCSwitchTrigger = RemoteReceiverTrigger<RCSwitchBase>;
 | 
			
		||||
 | 
			
		||||
}  // namespace remote_base
 | 
			
		||||
}  // namespace esphome
 | 
			
		||||
 
 | 
			
		||||
@@ -127,6 +127,14 @@ class RemoteTransmitterBase : public RemoteComponentBase {
 | 
			
		||||
    this->temp_.reset();
 | 
			
		||||
    return TransmitCall(this);
 | 
			
		||||
  }
 | 
			
		||||
  template<typename Protocol>
 | 
			
		||||
  void transmit(const typename Protocol::ProtocolData &data, uint32_t send_times = 1, uint32_t send_wait = 0) {
 | 
			
		||||
    auto call = this->transmit();
 | 
			
		||||
    Protocol().encode(call.get_data(), data);
 | 
			
		||||
    call.set_send_times(send_times);
 | 
			
		||||
    call.set_send_wait(send_wait);
 | 
			
		||||
    call.perform();
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
 protected:
 | 
			
		||||
  void send_(uint32_t send_times, uint32_t send_wait);
 | 
			
		||||
@@ -184,12 +192,13 @@ class RemoteReceiverBinarySensorBase : public binary_sensor::BinarySensorInitial
 | 
			
		||||
 | 
			
		||||
template<typename T> class RemoteProtocol {
 | 
			
		||||
 public:
 | 
			
		||||
  virtual void encode(RemoteTransmitData *dst, const T &data) = 0;
 | 
			
		||||
  virtual optional<T> decode(RemoteReceiveData src) = 0;
 | 
			
		||||
  virtual void dump(const T &data) = 0;
 | 
			
		||||
  using ProtocolData = T;
 | 
			
		||||
  virtual void encode(RemoteTransmitData *dst, const ProtocolData &data) = 0;
 | 
			
		||||
  virtual optional<ProtocolData> decode(RemoteReceiveData src) = 0;
 | 
			
		||||
  virtual void dump(const ProtocolData &data) = 0;
 | 
			
		||||
};
 | 
			
		||||
 | 
			
		||||
template<typename T, typename D> class RemoteReceiverBinarySensor : public RemoteReceiverBinarySensorBase {
 | 
			
		||||
template<typename T> class RemoteReceiverBinarySensor : public RemoteReceiverBinarySensorBase {
 | 
			
		||||
 public:
 | 
			
		||||
  RemoteReceiverBinarySensor() : RemoteReceiverBinarySensorBase() {}
 | 
			
		||||
 | 
			
		||||
@@ -201,13 +210,14 @@ template<typename T, typename D> class RemoteReceiverBinarySensor : public Remot
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
 public:
 | 
			
		||||
  void set_data(D data) { data_ = data; }
 | 
			
		||||
  void set_data(typename T::ProtocolData data) { data_ = data; }
 | 
			
		||||
 | 
			
		||||
 protected:
 | 
			
		||||
  D data_;
 | 
			
		||||
  typename T::ProtocolData data_;
 | 
			
		||||
};
 | 
			
		||||
 | 
			
		||||
template<typename T, typename D> class RemoteReceiverTrigger : public Trigger<D>, public RemoteReceiverListener {
 | 
			
		||||
template<typename T>
 | 
			
		||||
class RemoteReceiverTrigger : public Trigger<typename T::ProtocolData>, public RemoteReceiverListener {
 | 
			
		||||
 protected:
 | 
			
		||||
  bool on_receive(RemoteReceiveData src) override {
 | 
			
		||||
    auto proto = T();
 | 
			
		||||
@@ -220,28 +230,36 @@ template<typename T, typename D> class RemoteReceiverTrigger : public Trigger<D>
 | 
			
		||||
  }
 | 
			
		||||
};
 | 
			
		||||
 | 
			
		||||
template<typename... Ts> class RemoteTransmitterActionBase : public Action<Ts...> {
 | 
			
		||||
class RemoteTransmittable {
 | 
			
		||||
 public:
 | 
			
		||||
  void set_parent(RemoteTransmitterBase *parent) { this->parent_ = parent; }
 | 
			
		||||
  RemoteTransmittable() {}
 | 
			
		||||
  RemoteTransmittable(RemoteTransmitterBase *transmitter) : transmitter_(transmitter) {}
 | 
			
		||||
  void set_transmitter(RemoteTransmitterBase *transmitter) { this->transmitter_ = transmitter; }
 | 
			
		||||
 | 
			
		||||
  TEMPLATABLE_VALUE(uint32_t, send_times);
 | 
			
		||||
  TEMPLATABLE_VALUE(uint32_t, send_wait);
 | 
			
		||||
 protected:
 | 
			
		||||
  template<typename Protocol>
 | 
			
		||||
  void transmit_(const typename Protocol::ProtocolData &data, uint32_t send_times = 1, uint32_t send_wait = 0) {
 | 
			
		||||
    this->transmitter_->transmit<Protocol>(data, send_times, send_wait);
 | 
			
		||||
  }
 | 
			
		||||
  RemoteTransmitterBase *transmitter_;
 | 
			
		||||
};
 | 
			
		||||
 | 
			
		||||
template<typename... Ts> class RemoteTransmitterActionBase : public RemoteTransmittable, public Action<Ts...> {
 | 
			
		||||
  TEMPLATABLE_VALUE(uint32_t, send_times)
 | 
			
		||||
  TEMPLATABLE_VALUE(uint32_t, send_wait)
 | 
			
		||||
 | 
			
		||||
 protected:
 | 
			
		||||
  void play(Ts... x) override {
 | 
			
		||||
    auto call = this->parent_->transmit();
 | 
			
		||||
    auto call = this->transmitter_->transmit();
 | 
			
		||||
    this->encode(call.get_data(), x...);
 | 
			
		||||
    call.set_send_times(this->send_times_.value_or(x..., 1));
 | 
			
		||||
    call.set_send_wait(this->send_wait_.value_or(x..., 0));
 | 
			
		||||
    call.perform();
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
 protected:
 | 
			
		||||
  virtual void encode(RemoteTransmitData *dst, Ts... x) = 0;
 | 
			
		||||
 | 
			
		||||
  RemoteTransmitterBase *parent_{};
 | 
			
		||||
};
 | 
			
		||||
 | 
			
		||||
template<typename T, typename D> class RemoteReceiverDumper : public RemoteReceiverDumperBase {
 | 
			
		||||
template<typename T> class RemoteReceiverDumper : public RemoteReceiverDumperBase {
 | 
			
		||||
 public:
 | 
			
		||||
  bool dump(RemoteReceiveData src) override {
 | 
			
		||||
    auto proto = T();
 | 
			
		||||
@@ -254,9 +272,9 @@ template<typename T, typename D> class RemoteReceiverDumper : public RemoteRecei
 | 
			
		||||
};
 | 
			
		||||
 | 
			
		||||
#define DECLARE_REMOTE_PROTOCOL_(prefix) \
 | 
			
		||||
  using prefix##BinarySensor = RemoteReceiverBinarySensor<prefix##Protocol, prefix##Data>; \
 | 
			
		||||
  using prefix##Trigger = RemoteReceiverTrigger<prefix##Protocol, prefix##Data>; \
 | 
			
		||||
  using prefix##Dumper = RemoteReceiverDumper<prefix##Protocol, prefix##Data>;
 | 
			
		||||
  using prefix##BinarySensor = RemoteReceiverBinarySensor<prefix##Protocol>; \
 | 
			
		||||
  using prefix##Trigger = RemoteReceiverTrigger<prefix##Protocol>; \
 | 
			
		||||
  using prefix##Dumper = RemoteReceiverDumper<prefix##Protocol>;
 | 
			
		||||
#define DECLARE_REMOTE_PROTOCOL(prefix) DECLARE_REMOTE_PROTOCOL_(prefix)
 | 
			
		||||
 | 
			
		||||
}  // namespace remote_base
 | 
			
		||||
 
 | 
			
		||||
@@ -1,7 +1,8 @@
 | 
			
		||||
#pragma once
 | 
			
		||||
 | 
			
		||||
#include "esphome/core/component.h"
 | 
			
		||||
#include "esphome/components/resistance_sampler/resistance_sampler.h"
 | 
			
		||||
#include "esphome/components/sensor/sensor.h"
 | 
			
		||||
#include "esphome/core/component.h"
 | 
			
		||||
 | 
			
		||||
namespace esphome {
 | 
			
		||||
namespace resistance {
 | 
			
		||||
@@ -11,7 +12,7 @@ enum ResistanceConfiguration {
 | 
			
		||||
  DOWNSTREAM,
 | 
			
		||||
};
 | 
			
		||||
 | 
			
		||||
class ResistanceSensor : public Component, public sensor::Sensor {
 | 
			
		||||
class ResistanceSensor : public Component, public sensor::Sensor, resistance_sampler::ResistanceSampler {
 | 
			
		||||
 public:
 | 
			
		||||
  void set_sensor(Sensor *sensor) { sensor_ = sensor; }
 | 
			
		||||
  void set_configuration(ResistanceConfiguration configuration) { configuration_ = configuration; }
 | 
			
		||||
 
 | 
			
		||||
@@ -1,6 +1,6 @@
 | 
			
		||||
import esphome.codegen as cg
 | 
			
		||||
import esphome.config_validation as cv
 | 
			
		||||
from esphome.components import sensor
 | 
			
		||||
from esphome.components import sensor, resistance_sampler
 | 
			
		||||
from esphome.const import (
 | 
			
		||||
    CONF_SENSOR,
 | 
			
		||||
    STATE_CLASS_MEASUREMENT,
 | 
			
		||||
@@ -8,8 +8,15 @@ from esphome.const import (
 | 
			
		||||
    ICON_FLASH,
 | 
			
		||||
)
 | 
			
		||||
 | 
			
		||||
AUTO_LOAD = ["resistance_sampler"]
 | 
			
		||||
 | 
			
		||||
resistance_ns = cg.esphome_ns.namespace("resistance")
 | 
			
		||||
ResistanceSensor = resistance_ns.class_("ResistanceSensor", cg.Component, sensor.Sensor)
 | 
			
		||||
ResistanceSensor = resistance_ns.class_(
 | 
			
		||||
    "ResistanceSensor",
 | 
			
		||||
    cg.Component,
 | 
			
		||||
    sensor.Sensor,
 | 
			
		||||
    resistance_sampler.ResistanceSampler,
 | 
			
		||||
)
 | 
			
		||||
 | 
			
		||||
CONF_REFERENCE_VOLTAGE = "reference_voltage"
 | 
			
		||||
CONF_CONFIGURATION = "configuration"
 | 
			
		||||
 
 | 
			
		||||
							
								
								
									
										6
									
								
								esphome/components/resistance_sampler/__init__.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										6
									
								
								esphome/components/resistance_sampler/__init__.py
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1,6 @@
 | 
			
		||||
import esphome.codegen as cg
 | 
			
		||||
 | 
			
		||||
resistance_sampler_ns = cg.esphome_ns.namespace("resistance_sampler")
 | 
			
		||||
ResistanceSampler = resistance_sampler_ns.class_("ResistanceSampler")
 | 
			
		||||
 | 
			
		||||
CODEOWNERS = ["@jesserockz"]
 | 
			
		||||
							
								
								
									
										10
									
								
								esphome/components/resistance_sampler/resistance_sampler.h
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										10
									
								
								esphome/components/resistance_sampler/resistance_sampler.h
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1,10 @@
 | 
			
		||||
#pragma once
 | 
			
		||||
 | 
			
		||||
namespace esphome {
 | 
			
		||||
namespace resistance_sampler {
 | 
			
		||||
 | 
			
		||||
/// Abstract interface to mark components that provide resistance values.
 | 
			
		||||
class ResistanceSampler {};
 | 
			
		||||
 | 
			
		||||
}  // namespace resistance_sampler
 | 
			
		||||
}  // namespace esphome
 | 
			
		||||
@@ -74,12 +74,12 @@ def _format_framework_arduino_version(ver: cv.Version) -> str:
 | 
			
		||||
# The default/recommended arduino framework version
 | 
			
		||||
#  - https://github.com/earlephilhower/arduino-pico/releases
 | 
			
		||||
#  - https://api.registry.platformio.org/v3/packages/earlephilhower/tool/framework-arduinopico
 | 
			
		||||
RECOMMENDED_ARDUINO_FRAMEWORK_VERSION = cv.Version(3, 4, 0)
 | 
			
		||||
RECOMMENDED_ARDUINO_FRAMEWORK_VERSION = cv.Version(3, 6, 0)
 | 
			
		||||
 | 
			
		||||
# The platformio/raspberrypi version to use for arduino frameworks
 | 
			
		||||
#  - https://github.com/platformio/platform-raspberrypi/releases
 | 
			
		||||
#  - https://api.registry.platformio.org/v3/packages/platformio/platform/raspberrypi
 | 
			
		||||
ARDUINO_PLATFORM_VERSION = cv.Version(1, 9, 0)
 | 
			
		||||
ARDUINO_PLATFORM_VERSION = cv.Version(1, 10, 0)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def _arduino_check_versions(value):
 | 
			
		||||
 
 | 
			
		||||
@@ -33,6 +33,7 @@ MODELS = {
 | 
			
		||||
    "SH1106_96X16": SSD1306Model.SH1106_MODEL_96_16,
 | 
			
		||||
    "SH1106_64X48": SSD1306Model.SH1106_MODEL_64_48,
 | 
			
		||||
    "SH1107_128X64": SSD1306Model.SH1107_MODEL_128_64,
 | 
			
		||||
    "SH1107_128X128": SSD1306Model.SH1107_MODEL_128_128,
 | 
			
		||||
    "SSD1305_128X32": SSD1306Model.SSD1305_MODEL_128_32,
 | 
			
		||||
    "SSD1305_128X64": SSD1306Model.SSD1305_MODEL_128_64,
 | 
			
		||||
}
 | 
			
		||||
@@ -63,8 +64,10 @@ SSD1306_SCHEMA = display.FULL_DISPLAY_SCHEMA.extend(
 | 
			
		||||
        cv.Optional(CONF_EXTERNAL_VCC): cv.boolean,
 | 
			
		||||
        cv.Optional(CONF_FLIP_X, default=True): cv.boolean,
 | 
			
		||||
        cv.Optional(CONF_FLIP_Y, default=True): cv.boolean,
 | 
			
		||||
        cv.Optional(CONF_OFFSET_X, default=0): cv.int_range(min=-32, max=32),
 | 
			
		||||
        cv.Optional(CONF_OFFSET_Y, default=0): cv.int_range(min=-32, max=32),
 | 
			
		||||
        # Offsets determine shifts of memory location to LCD rows/columns,
 | 
			
		||||
        # and this family of controllers supports up to 128x128 screens
 | 
			
		||||
        cv.Optional(CONF_OFFSET_X, default=0): cv.int_range(min=0, max=128),
 | 
			
		||||
        cv.Optional(CONF_OFFSET_Y, default=0): cv.int_range(min=0, max=128),
 | 
			
		||||
        cv.Optional(CONF_INVERT, default=False): cv.boolean,
 | 
			
		||||
    }
 | 
			
		||||
).extend(cv.polling_component_schema("1s"))
 | 
			
		||||
 
 | 
			
		||||
@@ -35,16 +35,31 @@ static const uint8_t SSD1306_COMMAND_INVERSE_DISPLAY = 0xA7;
 | 
			
		||||
static const uint8_t SSD1305_COMMAND_SET_BRIGHTNESS = 0x82;
 | 
			
		||||
static const uint8_t SSD1305_COMMAND_SET_AREA_COLOR = 0xD8;
 | 
			
		||||
 | 
			
		||||
static const uint8_t SH1107_COMMAND_SET_START_LINE = 0xDC;
 | 
			
		||||
static const uint8_t SH1107_COMMAND_CHARGE_PUMP = 0xAD;
 | 
			
		||||
 | 
			
		||||
void SSD1306::setup() {
 | 
			
		||||
  this->init_internal_(this->get_buffer_length_());
 | 
			
		||||
 | 
			
		||||
  // SH1107 resources
 | 
			
		||||
  //
 | 
			
		||||
  // Datasheet v2.3:
 | 
			
		||||
  // www.displayfuture.com/Display/datasheet/controller/SH1107.pdf
 | 
			
		||||
  // Adafruit C++ driver:
 | 
			
		||||
  // github.com/adafruit/Adafruit_SH110x
 | 
			
		||||
  // Adafruit CircuitPython driver:
 | 
			
		||||
  // github.com/adafruit/Adafruit_CircuitPython_DisplayIO_SH1107
 | 
			
		||||
 | 
			
		||||
  // Turn off display during initialization (0xAE)
 | 
			
		||||
  this->command(SSD1306_COMMAND_DISPLAY_OFF);
 | 
			
		||||
 | 
			
		||||
  // Set oscillator frequency to 4'b1000 with no clock division (0xD5)
 | 
			
		||||
  this->command(SSD1306_COMMAND_SET_DISPLAY_CLOCK_DIV);
 | 
			
		||||
  // Oscillator frequency <= 4'b1000, no clock division
 | 
			
		||||
  this->command(0x80);
 | 
			
		||||
  // If SH1107, use POR defaults (0x50) = divider 1, frequency +0%
 | 
			
		||||
  if (!this->is_sh1107_()) {
 | 
			
		||||
    // Set oscillator frequency to 4'b1000 with no clock division (0xD5)
 | 
			
		||||
    this->command(SSD1306_COMMAND_SET_DISPLAY_CLOCK_DIV);
 | 
			
		||||
    // Oscillator frequency <= 4'b1000, no clock division
 | 
			
		||||
    this->command(0x80);
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  // Enable low power display mode for SSD1305 (0xD8)
 | 
			
		||||
  if (this->is_ssd1305_()) {
 | 
			
		||||
@@ -60,11 +75,26 @@ void SSD1306::setup() {
 | 
			
		||||
  this->command(SSD1306_COMMAND_SET_DISPLAY_OFFSET_Y);
 | 
			
		||||
  this->command(0x00 + this->offset_y_);
 | 
			
		||||
 | 
			
		||||
  // Set start line at line 0 (0x40)
 | 
			
		||||
  this->command(SSD1306_COMMAND_SET_START_LINE | 0x00);
 | 
			
		||||
  if (this->is_sh1107_()) {
 | 
			
		||||
    // Set start line at line 0 (0xDC)
 | 
			
		||||
    this->command(SH1107_COMMAND_SET_START_LINE);
 | 
			
		||||
    this->command(0x00);
 | 
			
		||||
  } else {
 | 
			
		||||
    // Set start line at line 0 (0x40)
 | 
			
		||||
    this->command(SSD1306_COMMAND_SET_START_LINE | 0x00);
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  // SSD1305 does not have charge pump
 | 
			
		||||
  if (!this->is_ssd1305_()) {
 | 
			
		||||
  if (this->is_ssd1305_()) {
 | 
			
		||||
    // SSD1305 does not have charge pump
 | 
			
		||||
  } else if (this->is_sh1107_()) {
 | 
			
		||||
    // Enable charge pump (0xAD)
 | 
			
		||||
    this->command(SH1107_COMMAND_CHARGE_PUMP);
 | 
			
		||||
    if (this->external_vcc_) {
 | 
			
		||||
      this->command(0x8A);
 | 
			
		||||
    } else {
 | 
			
		||||
      this->command(0x8B);
 | 
			
		||||
    }
 | 
			
		||||
  } else {
 | 
			
		||||
    // Enable charge pump (0x8D)
 | 
			
		||||
    this->command(SSD1306_COMMAND_CHARGE_PUMP);
 | 
			
		||||
    if (this->external_vcc_) {
 | 
			
		||||
@@ -76,34 +106,41 @@ void SSD1306::setup() {
 | 
			
		||||
 | 
			
		||||
  // Set addressing mode to horizontal (0x20)
 | 
			
		||||
  this->command(SSD1306_COMMAND_MEMORY_MODE);
 | 
			
		||||
  this->command(0x00);
 | 
			
		||||
 | 
			
		||||
  if (!this->is_sh1107_()) {
 | 
			
		||||
    // SH1107 memory mode is a 1 byte command
 | 
			
		||||
    this->command(0x00);
 | 
			
		||||
  }
 | 
			
		||||
  // X flip mode (0xA0, 0xA1)
 | 
			
		||||
  this->command(SSD1306_COMMAND_SEGRE_MAP | this->flip_x_);
 | 
			
		||||
 | 
			
		||||
  // Y flip mode (0xC0, 0xC8)
 | 
			
		||||
  this->command(SSD1306_COMMAND_COM_SCAN_INC | (this->flip_y_ << 3));
 | 
			
		||||
 | 
			
		||||
  // Set pin configuration (0xDA)
 | 
			
		||||
  this->command(SSD1306_COMMAND_SET_COM_PINS);
 | 
			
		||||
  switch (this->model_) {
 | 
			
		||||
    case SSD1306_MODEL_128_32:
 | 
			
		||||
    case SH1106_MODEL_128_32:
 | 
			
		||||
    case SSD1306_MODEL_96_16:
 | 
			
		||||
    case SH1106_MODEL_96_16:
 | 
			
		||||
      this->command(0x02);
 | 
			
		||||
      break;
 | 
			
		||||
    case SSD1306_MODEL_128_64:
 | 
			
		||||
    case SH1106_MODEL_128_64:
 | 
			
		||||
    case SSD1306_MODEL_64_48:
 | 
			
		||||
    case SSD1306_MODEL_64_32:
 | 
			
		||||
    case SH1106_MODEL_64_48:
 | 
			
		||||
    case SH1107_MODEL_128_64:
 | 
			
		||||
    case SSD1305_MODEL_128_32:
 | 
			
		||||
    case SSD1305_MODEL_128_64:
 | 
			
		||||
    case SSD1306_MODEL_72_40:
 | 
			
		||||
      this->command(0x12);
 | 
			
		||||
      break;
 | 
			
		||||
  if (!this->is_sh1107_()) {
 | 
			
		||||
    // Set pin configuration (0xDA)
 | 
			
		||||
    this->command(SSD1306_COMMAND_SET_COM_PINS);
 | 
			
		||||
    switch (this->model_) {
 | 
			
		||||
      case SSD1306_MODEL_128_32:
 | 
			
		||||
      case SH1106_MODEL_128_32:
 | 
			
		||||
      case SSD1306_MODEL_96_16:
 | 
			
		||||
      case SH1106_MODEL_96_16:
 | 
			
		||||
        this->command(0x02);
 | 
			
		||||
        break;
 | 
			
		||||
      case SSD1306_MODEL_128_64:
 | 
			
		||||
      case SH1106_MODEL_128_64:
 | 
			
		||||
      case SSD1306_MODEL_64_48:
 | 
			
		||||
      case SSD1306_MODEL_64_32:
 | 
			
		||||
      case SH1106_MODEL_64_48:
 | 
			
		||||
      case SSD1305_MODEL_128_32:
 | 
			
		||||
      case SSD1305_MODEL_128_64:
 | 
			
		||||
      case SSD1306_MODEL_72_40:
 | 
			
		||||
        this->command(0x12);
 | 
			
		||||
        break;
 | 
			
		||||
      case SH1107_MODEL_128_64:
 | 
			
		||||
      case SH1107_MODEL_128_128:
 | 
			
		||||
        // Not used, but prevents build warning
 | 
			
		||||
        break;
 | 
			
		||||
    }
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  // Pre-charge period (0xD9)
 | 
			
		||||
@@ -118,6 +155,7 @@ void SSD1306::setup() {
 | 
			
		||||
  this->command(SSD1306_COMMAND_SET_VCOM_DETECT);
 | 
			
		||||
  switch (this->model_) {
 | 
			
		||||
    case SH1107_MODEL_128_64:
 | 
			
		||||
    case SH1107_MODEL_128_128:
 | 
			
		||||
      this->command(0x35);
 | 
			
		||||
      break;
 | 
			
		||||
    case SSD1306_MODEL_72_40:
 | 
			
		||||
@@ -149,7 +187,7 @@ void SSD1306::setup() {
 | 
			
		||||
  this->turn_on();
 | 
			
		||||
}
 | 
			
		||||
void SSD1306::display() {
 | 
			
		||||
  if (this->is_sh1106_()) {
 | 
			
		||||
  if (this->is_sh1106_() || this->is_sh1107_()) {
 | 
			
		||||
    this->write_display_data();
 | 
			
		||||
    return;
 | 
			
		||||
  }
 | 
			
		||||
@@ -183,6 +221,7 @@ bool SSD1306::is_sh1106_() const {
 | 
			
		||||
  return this->model_ == SH1106_MODEL_96_16 || this->model_ == SH1106_MODEL_128_32 ||
 | 
			
		||||
         this->model_ == SH1106_MODEL_128_64;
 | 
			
		||||
}
 | 
			
		||||
bool SSD1306::is_sh1107_() const { return this->model_ == SH1107_MODEL_128_64 || this->model_ == SH1107_MODEL_128_128; }
 | 
			
		||||
bool SSD1306::is_ssd1305_() const {
 | 
			
		||||
  return this->model_ == SSD1305_MODEL_128_64 || this->model_ == SSD1305_MODEL_128_64;
 | 
			
		||||
}
 | 
			
		||||
@@ -224,6 +263,7 @@ void SSD1306::turn_off() {
 | 
			
		||||
int SSD1306::get_height_internal() {
 | 
			
		||||
  switch (this->model_) {
 | 
			
		||||
    case SH1107_MODEL_128_64:
 | 
			
		||||
    case SH1107_MODEL_128_128:
 | 
			
		||||
      return 128;
 | 
			
		||||
    case SSD1306_MODEL_128_32:
 | 
			
		||||
    case SSD1306_MODEL_64_32:
 | 
			
		||||
@@ -254,6 +294,7 @@ int SSD1306::get_width_internal() {
 | 
			
		||||
    case SH1106_MODEL_128_64:
 | 
			
		||||
    case SSD1305_MODEL_128_32:
 | 
			
		||||
    case SSD1305_MODEL_128_64:
 | 
			
		||||
    case SH1107_MODEL_128_128:
 | 
			
		||||
      return 128;
 | 
			
		||||
    case SSD1306_MODEL_96_16:
 | 
			
		||||
    case SH1106_MODEL_96_16:
 | 
			
		||||
 
 | 
			
		||||
@@ -19,6 +19,7 @@ enum SSD1306Model {
 | 
			
		||||
  SH1106_MODEL_96_16,
 | 
			
		||||
  SH1106_MODEL_64_48,
 | 
			
		||||
  SH1107_MODEL_128_64,
 | 
			
		||||
  SH1107_MODEL_128_128,
 | 
			
		||||
  SSD1305_MODEL_128_32,
 | 
			
		||||
  SSD1305_MODEL_128_64,
 | 
			
		||||
};
 | 
			
		||||
@@ -58,6 +59,7 @@ class SSD1306 : public PollingComponent, public display::DisplayBuffer {
 | 
			
		||||
  void init_reset_();
 | 
			
		||||
 | 
			
		||||
  bool is_sh1106_() const;
 | 
			
		||||
  bool is_sh1107_() const;
 | 
			
		||||
  bool is_ssd1305_() const;
 | 
			
		||||
 | 
			
		||||
  void draw_absolute_pixel_internal(int x, int y, Color color) override;
 | 
			
		||||
 
 | 
			
		||||
@@ -38,13 +38,19 @@ void I2CSSD1306::dump_config() {
 | 
			
		||||
}
 | 
			
		||||
void I2CSSD1306::command(uint8_t value) { this->write_byte(0x00, value); }
 | 
			
		||||
void HOT I2CSSD1306::write_display_data() {
 | 
			
		||||
  if (this->is_sh1106_()) {
 | 
			
		||||
  if (this->is_sh1106_() || this->is_sh1107_()) {
 | 
			
		||||
    uint32_t i = 0;
 | 
			
		||||
    for (uint8_t page = 0; page < (uint8_t) this->get_height_internal() / 8; page++) {
 | 
			
		||||
      this->command(0xB0 + page);  // row
 | 
			
		||||
      this->command(0x02);         // lower column
 | 
			
		||||
      this->command(0x10);         // higher column
 | 
			
		||||
 | 
			
		||||
      if (this->is_sh1106_()) {
 | 
			
		||||
        this->command(0x02);  // lower column - 0x02 is historical SH1106 value
 | 
			
		||||
      } else {
 | 
			
		||||
        // Other SH1107 drivers use 0x00
 | 
			
		||||
        // Column values dont change and it seems they can be set only once,
 | 
			
		||||
        // but we follow SH1106 implementation and resend them
 | 
			
		||||
        this->command(0x00);
 | 
			
		||||
      }
 | 
			
		||||
      this->command(0x10);  // higher column
 | 
			
		||||
      for (uint8_t x = 0; x < (uint8_t) this->get_width_internal() / 16; x++) {
 | 
			
		||||
        uint8_t data[16];
 | 
			
		||||
        for (uint8_t &j : data)
 | 
			
		||||
 
 | 
			
		||||
@@ -36,10 +36,14 @@ void SPISSD1306::command(uint8_t value) {
 | 
			
		||||
  this->disable();
 | 
			
		||||
}
 | 
			
		||||
void HOT SPISSD1306::write_display_data() {
 | 
			
		||||
  if (this->is_sh1106_()) {
 | 
			
		||||
  if (this->is_sh1106_() || this->is_sh1107_()) {
 | 
			
		||||
    for (uint8_t y = 0; y < (uint8_t) this->get_height_internal() / 8; y++) {
 | 
			
		||||
      this->command(0xB0 + y);
 | 
			
		||||
      this->command(0x02);
 | 
			
		||||
      if (this->is_sh1106_()) {
 | 
			
		||||
        this->command(0x02);
 | 
			
		||||
      } else {
 | 
			
		||||
        this->command(0x00);
 | 
			
		||||
      }
 | 
			
		||||
      this->command(0x10);
 | 
			
		||||
      this->dc_pin_->digital_write(true);
 | 
			
		||||
      for (uint8_t x = 0; x < (uint8_t) this->get_width_internal(); x++) {
 | 
			
		||||
 
 | 
			
		||||
@@ -18,20 +18,25 @@ DEPENDENCIES = ["api", "microphone"]
 | 
			
		||||
 | 
			
		||||
CODEOWNERS = ["@jesserockz"]
 | 
			
		||||
 | 
			
		||||
CONF_SILENCE_DETECTION = "silence_detection"
 | 
			
		||||
CONF_ON_LISTENING = "on_listening"
 | 
			
		||||
CONF_ON_START = "on_start"
 | 
			
		||||
CONF_ON_WAKE_WORD_DETECTED = "on_wake_word_detected"
 | 
			
		||||
CONF_ON_STT_END = "on_stt_end"
 | 
			
		||||
CONF_ON_TTS_START = "on_tts_start"
 | 
			
		||||
CONF_ON_TTS_END = "on_tts_end"
 | 
			
		||||
CONF_ON_END = "on_end"
 | 
			
		||||
CONF_ON_ERROR = "on_error"
 | 
			
		||||
CONF_ON_INTENT_END = "on_intent_end"
 | 
			
		||||
CONF_ON_INTENT_START = "on_intent_start"
 | 
			
		||||
CONF_ON_LISTENING = "on_listening"
 | 
			
		||||
CONF_ON_START = "on_start"
 | 
			
		||||
CONF_ON_STT_END = "on_stt_end"
 | 
			
		||||
CONF_ON_STT_VAD_END = "on_stt_vad_end"
 | 
			
		||||
CONF_ON_STT_VAD_START = "on_stt_vad_start"
 | 
			
		||||
CONF_ON_TTS_END = "on_tts_end"
 | 
			
		||||
CONF_ON_TTS_START = "on_tts_start"
 | 
			
		||||
CONF_ON_WAKE_WORD_DETECTED = "on_wake_word_detected"
 | 
			
		||||
 | 
			
		||||
CONF_SILENCE_DETECTION = "silence_detection"
 | 
			
		||||
CONF_USE_WAKE_WORD = "use_wake_word"
 | 
			
		||||
CONF_VAD_THRESHOLD = "vad_threshold"
 | 
			
		||||
 | 
			
		||||
CONF_NOISE_SUPPRESSION_LEVEL = "noise_suppression_level"
 | 
			
		||||
CONF_AUTO_GAIN = "auto_gain"
 | 
			
		||||
CONF_NOISE_SUPPRESSION_LEVEL = "noise_suppression_level"
 | 
			
		||||
CONF_VOLUME_MULTIPLIER = "volume_multiplier"
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@@ -88,6 +93,18 @@ CONFIG_SCHEMA = cv.All(
 | 
			
		||||
            cv.Optional(CONF_ON_CLIENT_DISCONNECTED): automation.validate_automation(
 | 
			
		||||
                single=True
 | 
			
		||||
            ),
 | 
			
		||||
            cv.Optional(CONF_ON_INTENT_START): automation.validate_automation(
 | 
			
		||||
                single=True
 | 
			
		||||
            ),
 | 
			
		||||
            cv.Optional(CONF_ON_INTENT_END): automation.validate_automation(
 | 
			
		||||
                single=True
 | 
			
		||||
            ),
 | 
			
		||||
            cv.Optional(CONF_ON_STT_VAD_START): automation.validate_automation(
 | 
			
		||||
                single=True
 | 
			
		||||
            ),
 | 
			
		||||
            cv.Optional(CONF_ON_STT_VAD_END): automation.validate_automation(
 | 
			
		||||
                single=True
 | 
			
		||||
            ),
 | 
			
		||||
        }
 | 
			
		||||
    ).extend(cv.COMPONENT_SCHEMA),
 | 
			
		||||
)
 | 
			
		||||
@@ -177,6 +194,34 @@ async def to_code(config):
 | 
			
		||||
            config[CONF_ON_CLIENT_DISCONNECTED],
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
    if CONF_ON_INTENT_START in config:
 | 
			
		||||
        await automation.build_automation(
 | 
			
		||||
            var.get_intent_start_trigger(),
 | 
			
		||||
            [],
 | 
			
		||||
            config[CONF_ON_INTENT_START],
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
    if CONF_ON_INTENT_END in config:
 | 
			
		||||
        await automation.build_automation(
 | 
			
		||||
            var.get_intent_end_trigger(),
 | 
			
		||||
            [],
 | 
			
		||||
            config[CONF_ON_INTENT_END],
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
    if CONF_ON_STT_VAD_START in config:
 | 
			
		||||
        await automation.build_automation(
 | 
			
		||||
            var.get_stt_vad_start_trigger(),
 | 
			
		||||
            [],
 | 
			
		||||
            config[CONF_ON_STT_VAD_START],
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
    if CONF_ON_STT_VAD_END in config:
 | 
			
		||||
        await automation.build_automation(
 | 
			
		||||
            var.get_stt_vad_end_trigger(),
 | 
			
		||||
            [],
 | 
			
		||||
            config[CONF_ON_STT_VAD_END],
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
    cg.add_define("USE_VOICE_ASSISTANT")
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
 
 | 
			
		||||
@@ -31,7 +31,7 @@ void VoiceAssistant::setup() {
 | 
			
		||||
 | 
			
		||||
  this->socket_ = socket::socket(AF_INET, SOCK_DGRAM, IPPROTO_IP);
 | 
			
		||||
  if (socket_ == nullptr) {
 | 
			
		||||
    ESP_LOGW(TAG, "Could not create socket.");
 | 
			
		||||
    ESP_LOGW(TAG, "Could not create socket");
 | 
			
		||||
    this->mark_failed();
 | 
			
		||||
    return;
 | 
			
		||||
  }
 | 
			
		||||
@@ -69,7 +69,7 @@ void VoiceAssistant::setup() {
 | 
			
		||||
    ExternalRAMAllocator<uint8_t> speaker_allocator(ExternalRAMAllocator<uint8_t>::ALLOW_FAILURE);
 | 
			
		||||
    this->speaker_buffer_ = speaker_allocator.allocate(SPEAKER_BUFFER_SIZE);
 | 
			
		||||
    if (this->speaker_buffer_ == nullptr) {
 | 
			
		||||
      ESP_LOGW(TAG, "Could not allocate speaker buffer.");
 | 
			
		||||
      ESP_LOGW(TAG, "Could not allocate speaker buffer");
 | 
			
		||||
      this->mark_failed();
 | 
			
		||||
      return;
 | 
			
		||||
    }
 | 
			
		||||
@@ -79,7 +79,7 @@ void VoiceAssistant::setup() {
 | 
			
		||||
  ExternalRAMAllocator<int16_t> allocator(ExternalRAMAllocator<int16_t>::ALLOW_FAILURE);
 | 
			
		||||
  this->input_buffer_ = allocator.allocate(INPUT_BUFFER_SIZE);
 | 
			
		||||
  if (this->input_buffer_ == nullptr) {
 | 
			
		||||
    ESP_LOGW(TAG, "Could not allocate input buffer.");
 | 
			
		||||
    ESP_LOGW(TAG, "Could not allocate input buffer");
 | 
			
		||||
    this->mark_failed();
 | 
			
		||||
    return;
 | 
			
		||||
  }
 | 
			
		||||
@@ -89,7 +89,7 @@ void VoiceAssistant::setup() {
 | 
			
		||||
 | 
			
		||||
  this->ring_buffer_ = rb_create(BUFFER_SIZE, sizeof(int16_t));
 | 
			
		||||
  if (this->ring_buffer_ == nullptr) {
 | 
			
		||||
    ESP_LOGW(TAG, "Could not allocate ring buffer.");
 | 
			
		||||
    ESP_LOGW(TAG, "Could not allocate ring buffer");
 | 
			
		||||
    this->mark_failed();
 | 
			
		||||
    return;
 | 
			
		||||
  }
 | 
			
		||||
@@ -98,7 +98,7 @@ void VoiceAssistant::setup() {
 | 
			
		||||
  ExternalRAMAllocator<uint8_t> send_allocator(ExternalRAMAllocator<uint8_t>::ALLOW_FAILURE);
 | 
			
		||||
  this->send_buffer_ = send_allocator.allocate(SEND_BUFFER_SIZE);
 | 
			
		||||
  if (send_buffer_ == nullptr) {
 | 
			
		||||
    ESP_LOGW(TAG, "Could not allocate send buffer.");
 | 
			
		||||
    ESP_LOGW(TAG, "Could not allocate send buffer");
 | 
			
		||||
    this->mark_failed();
 | 
			
		||||
    return;
 | 
			
		||||
  }
 | 
			
		||||
@@ -221,8 +221,8 @@ void VoiceAssistant::loop() {
 | 
			
		||||
      msg.audio_settings = audio_settings;
 | 
			
		||||
 | 
			
		||||
      if (this->api_client_ == nullptr || !this->api_client_->send_voice_assistant_request(msg)) {
 | 
			
		||||
        ESP_LOGW(TAG, "Could not request start.");
 | 
			
		||||
        this->error_trigger_->trigger("not-connected", "Could not request start.");
 | 
			
		||||
        ESP_LOGW(TAG, "Could not request start");
 | 
			
		||||
        this->error_trigger_->trigger("not-connected", "Could not request start");
 | 
			
		||||
        this->continuous_ = false;
 | 
			
		||||
        this->set_state_(State::IDLE, State::IDLE);
 | 
			
		||||
        break;
 | 
			
		||||
@@ -280,7 +280,7 @@ void VoiceAssistant::loop() {
 | 
			
		||||
            this->speaker_buffer_size_ += len;
 | 
			
		||||
          }
 | 
			
		||||
        } else {
 | 
			
		||||
          ESP_LOGW(TAG, "Receive buffer full.");
 | 
			
		||||
          ESP_LOGW(TAG, "Receive buffer full");
 | 
			
		||||
        }
 | 
			
		||||
        if (this->speaker_buffer_size_ > 0) {
 | 
			
		||||
          size_t written = this->speaker_->play(this->speaker_buffer_, this->speaker_buffer_size_);
 | 
			
		||||
@@ -290,7 +290,7 @@ void VoiceAssistant::loop() {
 | 
			
		||||
            this->speaker_buffer_index_ -= written;
 | 
			
		||||
            this->set_timeout("speaker-timeout", 2000, [this]() { this->speaker_->stop(); });
 | 
			
		||||
          } else {
 | 
			
		||||
            ESP_LOGW(TAG, "Speaker buffer full.");
 | 
			
		||||
            ESP_LOGW(TAG, "Speaker buffer full");
 | 
			
		||||
          }
 | 
			
		||||
        }
 | 
			
		||||
        if (this->wait_for_stream_end_) {
 | 
			
		||||
@@ -513,7 +513,7 @@ void VoiceAssistant::on_event(const api::VoiceAssistantEventResponse &msg) {
 | 
			
		||||
      break;
 | 
			
		||||
    }
 | 
			
		||||
    case api::enums::VOICE_ASSISTANT_STT_START:
 | 
			
		||||
      ESP_LOGD(TAG, "STT Started");
 | 
			
		||||
      ESP_LOGD(TAG, "STT started");
 | 
			
		||||
      this->listening_trigger_->trigger();
 | 
			
		||||
      break;
 | 
			
		||||
    case api::enums::VOICE_ASSISTANT_STT_END: {
 | 
			
		||||
@@ -525,19 +525,24 @@ void VoiceAssistant::on_event(const api::VoiceAssistantEventResponse &msg) {
 | 
			
		||||
        }
 | 
			
		||||
      }
 | 
			
		||||
      if (text.empty()) {
 | 
			
		||||
        ESP_LOGW(TAG, "No text in STT_END event.");
 | 
			
		||||
        ESP_LOGW(TAG, "No text in STT_END event");
 | 
			
		||||
        return;
 | 
			
		||||
      }
 | 
			
		||||
      ESP_LOGD(TAG, "Speech recognised as: \"%s\"", text.c_str());
 | 
			
		||||
      this->stt_end_trigger_->trigger(text);
 | 
			
		||||
      break;
 | 
			
		||||
    }
 | 
			
		||||
    case api::enums::VOICE_ASSISTANT_INTENT_START:
 | 
			
		||||
      ESP_LOGD(TAG, "Intent started");
 | 
			
		||||
      this->intent_start_trigger_->trigger();
 | 
			
		||||
      break;
 | 
			
		||||
    case api::enums::VOICE_ASSISTANT_INTENT_END: {
 | 
			
		||||
      for (auto arg : msg.data) {
 | 
			
		||||
        if (arg.name == "conversation_id") {
 | 
			
		||||
          this->conversation_id_ = std::move(arg.value);
 | 
			
		||||
        }
 | 
			
		||||
      }
 | 
			
		||||
      this->intent_end_trigger_->trigger();
 | 
			
		||||
      break;
 | 
			
		||||
    }
 | 
			
		||||
    case api::enums::VOICE_ASSISTANT_TTS_START: {
 | 
			
		||||
@@ -548,7 +553,7 @@ void VoiceAssistant::on_event(const api::VoiceAssistantEventResponse &msg) {
 | 
			
		||||
        }
 | 
			
		||||
      }
 | 
			
		||||
      if (text.empty()) {
 | 
			
		||||
        ESP_LOGW(TAG, "No text in TTS_START event.");
 | 
			
		||||
        ESP_LOGW(TAG, "No text in TTS_START event");
 | 
			
		||||
        return;
 | 
			
		||||
      }
 | 
			
		||||
      ESP_LOGD(TAG, "Response: \"%s\"", text.c_str());
 | 
			
		||||
@@ -566,7 +571,7 @@ void VoiceAssistant::on_event(const api::VoiceAssistantEventResponse &msg) {
 | 
			
		||||
        }
 | 
			
		||||
      }
 | 
			
		||||
      if (url.empty()) {
 | 
			
		||||
        ESP_LOGW(TAG, "No url in TTS_END event.");
 | 
			
		||||
        ESP_LOGW(TAG, "No url in TTS_END event");
 | 
			
		||||
        return;
 | 
			
		||||
      }
 | 
			
		||||
      ESP_LOGD(TAG, "Response URL: \"%s\"", url.c_str());
 | 
			
		||||
@@ -610,6 +615,11 @@ void VoiceAssistant::on_event(const api::VoiceAssistantEventResponse &msg) {
 | 
			
		||||
      if (code == "wake-word-timeout" || code == "wake_word_detection_aborted") {
 | 
			
		||||
        // Don't change state here since either the "tts-end" or "run-end" events will do it.
 | 
			
		||||
        return;
 | 
			
		||||
      } else if (code == "wake-provider-missing" || code == "wake-engine-missing") {
 | 
			
		||||
        // Wake word is not set up or not ready on Home Assistant so stop and do not retry until user starts again.
 | 
			
		||||
        this->request_stop();
 | 
			
		||||
        this->error_trigger_->trigger(code, message);
 | 
			
		||||
        return;
 | 
			
		||||
      }
 | 
			
		||||
      ESP_LOGE(TAG, "Error: %s - %s", code.c_str(), message.c_str());
 | 
			
		||||
      if (this->state_ != State::IDLE) {
 | 
			
		||||
@@ -629,6 +639,14 @@ void VoiceAssistant::on_event(const api::VoiceAssistantEventResponse &msg) {
 | 
			
		||||
      this->set_state_(State::RESPONSE_FINISHED, State::IDLE);
 | 
			
		||||
      break;
 | 
			
		||||
    }
 | 
			
		||||
    case api::enums::VOICE_ASSISTANT_STT_VAD_START:
 | 
			
		||||
      ESP_LOGD(TAG, "Starting STT by VAD");
 | 
			
		||||
      this->stt_vad_start_trigger_->trigger();
 | 
			
		||||
      break;
 | 
			
		||||
    case api::enums::VOICE_ASSISTANT_STT_VAD_END:
 | 
			
		||||
      ESP_LOGD(TAG, "STT by VAD end");
 | 
			
		||||
      this->stt_vad_end_trigger_->trigger();
 | 
			
		||||
      break;
 | 
			
		||||
    default:
 | 
			
		||||
      ESP_LOGD(TAG, "Unhandled event type: %d", msg.event_type);
 | 
			
		||||
      break;
 | 
			
		||||
 
 | 
			
		||||
@@ -100,13 +100,17 @@ class VoiceAssistant : public Component {
 | 
			
		||||
  void set_auto_gain(uint8_t auto_gain) { this->auto_gain_ = auto_gain; }
 | 
			
		||||
  void set_volume_multiplier(float volume_multiplier) { this->volume_multiplier_ = volume_multiplier; }
 | 
			
		||||
 | 
			
		||||
  Trigger<> *get_intent_end_trigger() const { return this->intent_end_trigger_; }
 | 
			
		||||
  Trigger<> *get_intent_start_trigger() const { return this->intent_start_trigger_; }
 | 
			
		||||
  Trigger<> *get_listening_trigger() const { return this->listening_trigger_; }
 | 
			
		||||
  Trigger<> *get_end_trigger() const { return this->end_trigger_; }
 | 
			
		||||
  Trigger<> *get_start_trigger() const { return this->start_trigger_; }
 | 
			
		||||
  Trigger<> *get_stt_vad_end_trigger() const { return this->stt_vad_end_trigger_; }
 | 
			
		||||
  Trigger<> *get_stt_vad_start_trigger() const { return this->stt_vad_start_trigger_; }
 | 
			
		||||
  Trigger<> *get_wake_word_detected_trigger() const { return this->wake_word_detected_trigger_; }
 | 
			
		||||
  Trigger<std::string> *get_stt_end_trigger() const { return this->stt_end_trigger_; }
 | 
			
		||||
  Trigger<std::string> *get_tts_start_trigger() const { return this->tts_start_trigger_; }
 | 
			
		||||
  Trigger<std::string> *get_tts_end_trigger() const { return this->tts_end_trigger_; }
 | 
			
		||||
  Trigger<> *get_end_trigger() const { return this->end_trigger_; }
 | 
			
		||||
  Trigger<std::string> *get_tts_start_trigger() const { return this->tts_start_trigger_; }
 | 
			
		||||
  Trigger<std::string, std::string> *get_error_trigger() const { return this->error_trigger_; }
 | 
			
		||||
 | 
			
		||||
  Trigger<> *get_client_connected_trigger() const { return this->client_connected_trigger_; }
 | 
			
		||||
@@ -124,13 +128,17 @@ class VoiceAssistant : public Component {
 | 
			
		||||
  std::unique_ptr<socket::Socket> socket_ = nullptr;
 | 
			
		||||
  struct sockaddr_storage dest_addr_;
 | 
			
		||||
 | 
			
		||||
  Trigger<> *intent_end_trigger_ = new Trigger<>();
 | 
			
		||||
  Trigger<> *intent_start_trigger_ = new Trigger<>();
 | 
			
		||||
  Trigger<> *listening_trigger_ = new Trigger<>();
 | 
			
		||||
  Trigger<> *end_trigger_ = new Trigger<>();
 | 
			
		||||
  Trigger<> *start_trigger_ = new Trigger<>();
 | 
			
		||||
  Trigger<> *stt_vad_start_trigger_ = new Trigger<>();
 | 
			
		||||
  Trigger<> *stt_vad_end_trigger_ = new Trigger<>();
 | 
			
		||||
  Trigger<> *wake_word_detected_trigger_ = new Trigger<>();
 | 
			
		||||
  Trigger<std::string> *stt_end_trigger_ = new Trigger<std::string>();
 | 
			
		||||
  Trigger<std::string> *tts_start_trigger_ = new Trigger<std::string>();
 | 
			
		||||
  Trigger<std::string> *tts_end_trigger_ = new Trigger<std::string>();
 | 
			
		||||
  Trigger<> *end_trigger_ = new Trigger<>();
 | 
			
		||||
  Trigger<std::string> *tts_start_trigger_ = new Trigger<std::string>();
 | 
			
		||||
  Trigger<std::string, std::string> *error_trigger_ = new Trigger<std::string, std::string>();
 | 
			
		||||
 | 
			
		||||
  Trigger<> *client_connected_trigger_ = new Trigger<>();
 | 
			
		||||
 
 | 
			
		||||
@@ -389,6 +389,10 @@ void WiFiComponent::print_connect_params_() {
 | 
			
		||||
  bssid_t bssid = wifi_bssid();
 | 
			
		||||
 | 
			
		||||
  ESP_LOGCONFIG(TAG, "  Local MAC: %s", get_mac_address_pretty().c_str());
 | 
			
		||||
  if (this->is_disabled()) {
 | 
			
		||||
    ESP_LOGCONFIG(TAG, "  WiFi is disabled!");
 | 
			
		||||
    return;
 | 
			
		||||
  }
 | 
			
		||||
  ESP_LOGCONFIG(TAG, "  SSID: " LOG_SECRET("'%s'"), wifi_ssid().c_str());
 | 
			
		||||
  ESP_LOGCONFIG(TAG, "  IP Address: %s", wifi_sta_ip().str().c_str());
 | 
			
		||||
  ESP_LOGCONFIG(TAG, "  BSSID: " LOG_SECRET("%02X:%02X:%02X:%02X:%02X:%02X"), bssid[0], bssid[1], bssid[2], bssid[3],
 | 
			
		||||
 
 | 
			
		||||
@@ -43,11 +43,17 @@ def validate_mode(mode):
 | 
			
		||||
    return mode
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def validate_pin(pin):
 | 
			
		||||
    if pin in (8, 9):
 | 
			
		||||
        raise cv.Invalid(f"pin {pin} doesn't exist")
 | 
			
		||||
    return pin
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
XL9535_PIN_SCHEMA = cv.All(
 | 
			
		||||
    {
 | 
			
		||||
        cv.GenerateID(): cv.declare_id(XL9535GPIOPin),
 | 
			
		||||
        cv.Required(CONF_XL9535): cv.use_id(XL9535Component),
 | 
			
		||||
        cv.Required(CONF_NUMBER): cv.int_range(min=0, max=15),
 | 
			
		||||
        cv.Required(CONF_NUMBER): cv.All(cv.int_range(min=0, max=17), validate_pin),
 | 
			
		||||
        cv.Optional(CONF_MODE, default={}): cv.All(
 | 
			
		||||
            {
 | 
			
		||||
                cv.Optional(CONF_INPUT, default=False): cv.boolean,
 | 
			
		||||
 
 | 
			
		||||
@@ -1,6 +1,6 @@
 | 
			
		||||
"""Constants used by esphome."""
 | 
			
		||||
 | 
			
		||||
__version__ = "2023.11.0b2"
 | 
			
		||||
__version__ = "2023.12.0-dev"
 | 
			
		||||
 | 
			
		||||
ALLOWED_NAME_CHARS = "abcdefghijklmnopqrstuvwxyz0123456789-_"
 | 
			
		||||
VALID_SUBSTITUTIONS_CHARACTERS = (
 | 
			
		||||
 
 | 
			
		||||
							
								
								
									
										8
									
								
								esphome/dashboard/const.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										8
									
								
								esphome/dashboard/const.py
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1,8 @@
 | 
			
		||||
from __future__ import annotations
 | 
			
		||||
 | 
			
		||||
EVENT_ENTRY_ADDED = "entry_added"
 | 
			
		||||
EVENT_ENTRY_REMOVED = "entry_removed"
 | 
			
		||||
EVENT_ENTRY_UPDATED = "entry_updated"
 | 
			
		||||
EVENT_ENTRY_STATE_CHANGED = "entry_state_changed"
 | 
			
		||||
 | 
			
		||||
SENTINEL = object()
 | 
			
		||||
							
								
								
									
										135
									
								
								esphome/dashboard/core.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										135
									
								
								esphome/dashboard/core.py
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1,135 @@
 | 
			
		||||
from __future__ import annotations
 | 
			
		||||
 | 
			
		||||
import asyncio
 | 
			
		||||
import logging
 | 
			
		||||
import threading
 | 
			
		||||
from dataclasses import dataclass
 | 
			
		||||
from functools import partial
 | 
			
		||||
from typing import TYPE_CHECKING, Any, Callable
 | 
			
		||||
 | 
			
		||||
from ..zeroconf import DiscoveredImport
 | 
			
		||||
from .entries import DashboardEntries
 | 
			
		||||
from .settings import DashboardSettings
 | 
			
		||||
 | 
			
		||||
if TYPE_CHECKING:
 | 
			
		||||
    from .status.mdns import MDNSStatus
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
_LOGGER = logging.getLogger(__name__)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@dataclass
 | 
			
		||||
class Event:
 | 
			
		||||
    """Dashboard Event."""
 | 
			
		||||
 | 
			
		||||
    event_type: str
 | 
			
		||||
    data: dict[str, Any]
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class EventBus:
 | 
			
		||||
    """Dashboard event bus."""
 | 
			
		||||
 | 
			
		||||
    def __init__(self) -> None:
 | 
			
		||||
        """Initialize the Dashboard event bus."""
 | 
			
		||||
        self._listeners: dict[str, set[Callable[[Event], None]]] = {}
 | 
			
		||||
 | 
			
		||||
    def async_add_listener(
 | 
			
		||||
        self, event_type: str, listener: Callable[[Event], None]
 | 
			
		||||
    ) -> Callable[[], None]:
 | 
			
		||||
        """Add a listener to the event bus."""
 | 
			
		||||
        self._listeners.setdefault(event_type, set()).add(listener)
 | 
			
		||||
        return partial(self._async_remove_listener, event_type, listener)
 | 
			
		||||
 | 
			
		||||
    def _async_remove_listener(
 | 
			
		||||
        self, event_type: str, listener: Callable[[Event], None]
 | 
			
		||||
    ) -> None:
 | 
			
		||||
        """Remove a listener from the event bus."""
 | 
			
		||||
        self._listeners[event_type].discard(listener)
 | 
			
		||||
 | 
			
		||||
    def async_fire(self, event_type: str, event_data: dict[str, Any]) -> None:
 | 
			
		||||
        """Fire an event."""
 | 
			
		||||
        event = Event(event_type, event_data)
 | 
			
		||||
 | 
			
		||||
        _LOGGER.debug("Firing event: %s", event)
 | 
			
		||||
 | 
			
		||||
        for listener in self._listeners.get(event_type, set()):
 | 
			
		||||
            listener(event)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class ESPHomeDashboard:
 | 
			
		||||
    """Class that represents the dashboard."""
 | 
			
		||||
 | 
			
		||||
    __slots__ = (
 | 
			
		||||
        "bus",
 | 
			
		||||
        "entries",
 | 
			
		||||
        "loop",
 | 
			
		||||
        "import_result",
 | 
			
		||||
        "stop_event",
 | 
			
		||||
        "ping_request",
 | 
			
		||||
        "mqtt_ping_request",
 | 
			
		||||
        "mdns_status",
 | 
			
		||||
        "settings",
 | 
			
		||||
    )
 | 
			
		||||
 | 
			
		||||
    def __init__(self) -> None:
 | 
			
		||||
        """Initialize the ESPHomeDashboard."""
 | 
			
		||||
        self.bus = EventBus()
 | 
			
		||||
        self.entries: DashboardEntries | None = None
 | 
			
		||||
        self.loop: asyncio.AbstractEventLoop | None = None
 | 
			
		||||
        self.import_result: dict[str, DiscoveredImport] = {}
 | 
			
		||||
        self.stop_event = threading.Event()
 | 
			
		||||
        self.ping_request: asyncio.Event | None = None
 | 
			
		||||
        self.mqtt_ping_request = threading.Event()
 | 
			
		||||
        self.mdns_status: MDNSStatus | None = None
 | 
			
		||||
        self.settings: DashboardSettings = DashboardSettings()
 | 
			
		||||
 | 
			
		||||
    async def async_setup(self) -> None:
 | 
			
		||||
        """Setup the dashboard."""
 | 
			
		||||
        self.loop = asyncio.get_running_loop()
 | 
			
		||||
        self.ping_request = asyncio.Event()
 | 
			
		||||
        self.entries = DashboardEntries(self)
 | 
			
		||||
 | 
			
		||||
    async def async_run(self) -> None:
 | 
			
		||||
        """Run the dashboard."""
 | 
			
		||||
        settings = self.settings
 | 
			
		||||
        mdns_task: asyncio.Task | None = None
 | 
			
		||||
        ping_status_task: asyncio.Task | None = None
 | 
			
		||||
        await self.entries.async_update_entries()
 | 
			
		||||
 | 
			
		||||
        if settings.status_use_ping:
 | 
			
		||||
            from .status.ping import PingStatus
 | 
			
		||||
 | 
			
		||||
            ping_status = PingStatus()
 | 
			
		||||
            ping_status_task = asyncio.create_task(ping_status.async_run())
 | 
			
		||||
        else:
 | 
			
		||||
            from .status.mdns import MDNSStatus
 | 
			
		||||
 | 
			
		||||
            mdns_status = MDNSStatus()
 | 
			
		||||
            await mdns_status.async_refresh_hosts()
 | 
			
		||||
            self.mdns_status = mdns_status
 | 
			
		||||
            mdns_task = asyncio.create_task(mdns_status.async_run())
 | 
			
		||||
 | 
			
		||||
        if settings.status_use_mqtt:
 | 
			
		||||
            from .status.mqtt import MqttStatusThread
 | 
			
		||||
 | 
			
		||||
            status_thread_mqtt = MqttStatusThread()
 | 
			
		||||
            status_thread_mqtt.start()
 | 
			
		||||
 | 
			
		||||
        shutdown_event = asyncio.Event()
 | 
			
		||||
        try:
 | 
			
		||||
            await shutdown_event.wait()
 | 
			
		||||
        finally:
 | 
			
		||||
            _LOGGER.info("Shutting down...")
 | 
			
		||||
            self.stop_event.set()
 | 
			
		||||
            self.ping_request.set()
 | 
			
		||||
            if ping_status_task:
 | 
			
		||||
                ping_status_task.cancel()
 | 
			
		||||
            if mdns_task:
 | 
			
		||||
                mdns_task.cancel()
 | 
			
		||||
            if settings.status_use_mqtt:
 | 
			
		||||
                status_thread_mqtt.join()
 | 
			
		||||
                self.mqtt_ping_request.set()
 | 
			
		||||
            await asyncio.sleep(0)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
DASHBOARD = ESPHomeDashboard()
 | 
			
		||||
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							
							
								
								
									
										371
									
								
								esphome/dashboard/entries.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										371
									
								
								esphome/dashboard/entries.py
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1,371 @@
 | 
			
		||||
from __future__ import annotations
 | 
			
		||||
 | 
			
		||||
import asyncio
 | 
			
		||||
import logging
 | 
			
		||||
import os
 | 
			
		||||
from typing import TYPE_CHECKING, Any
 | 
			
		||||
 | 
			
		||||
from esphome import const, util
 | 
			
		||||
from esphome.storage_json import StorageJSON, ext_storage_path
 | 
			
		||||
 | 
			
		||||
from .const import (
 | 
			
		||||
    EVENT_ENTRY_ADDED,
 | 
			
		||||
    EVENT_ENTRY_REMOVED,
 | 
			
		||||
    EVENT_ENTRY_STATE_CHANGED,
 | 
			
		||||
    EVENT_ENTRY_UPDATED,
 | 
			
		||||
)
 | 
			
		||||
from .enum import StrEnum
 | 
			
		||||
 | 
			
		||||
if TYPE_CHECKING:
 | 
			
		||||
    from .core import ESPHomeDashboard
 | 
			
		||||
 | 
			
		||||
_LOGGER = logging.getLogger(__name__)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
DashboardCacheKeyType = tuple[int, int, float, int]
 | 
			
		||||
 | 
			
		||||
# Currently EntryState is a simple
 | 
			
		||||
# online/offline/unknown enum, but in the future
 | 
			
		||||
# it may be expanded to include more states
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class EntryState(StrEnum):
 | 
			
		||||
    ONLINE = "online"
 | 
			
		||||
    OFFLINE = "offline"
 | 
			
		||||
    UNKNOWN = "unknown"
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
_BOOL_TO_ENTRY_STATE = {
 | 
			
		||||
    True: EntryState.ONLINE,
 | 
			
		||||
    False: EntryState.OFFLINE,
 | 
			
		||||
    None: EntryState.UNKNOWN,
 | 
			
		||||
}
 | 
			
		||||
_ENTRY_STATE_TO_BOOL = {
 | 
			
		||||
    EntryState.ONLINE: True,
 | 
			
		||||
    EntryState.OFFLINE: False,
 | 
			
		||||
    EntryState.UNKNOWN: None,
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def bool_to_entry_state(value: bool) -> EntryState:
 | 
			
		||||
    """Convert a bool to an entry state."""
 | 
			
		||||
    return _BOOL_TO_ENTRY_STATE[value]
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def entry_state_to_bool(value: EntryState) -> bool | None:
 | 
			
		||||
    """Convert an entry state to a bool."""
 | 
			
		||||
    return _ENTRY_STATE_TO_BOOL[value]
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class DashboardEntries:
 | 
			
		||||
    """Represents all dashboard entries."""
 | 
			
		||||
 | 
			
		||||
    __slots__ = (
 | 
			
		||||
        "_dashboard",
 | 
			
		||||
        "_loop",
 | 
			
		||||
        "_config_dir",
 | 
			
		||||
        "_entries",
 | 
			
		||||
        "_entry_states",
 | 
			
		||||
        "_loaded_entries",
 | 
			
		||||
        "_update_lock",
 | 
			
		||||
    )
 | 
			
		||||
 | 
			
		||||
    def __init__(self, dashboard: ESPHomeDashboard) -> None:
 | 
			
		||||
        """Initialize the DashboardEntries."""
 | 
			
		||||
        self._dashboard = dashboard
 | 
			
		||||
        self._loop = asyncio.get_running_loop()
 | 
			
		||||
        self._config_dir = dashboard.settings.config_dir
 | 
			
		||||
        # Entries are stored as
 | 
			
		||||
        # {
 | 
			
		||||
        #   "path/to/file.yaml": DashboardEntry,
 | 
			
		||||
        #   ...
 | 
			
		||||
        # }
 | 
			
		||||
        self._entries: dict[str, DashboardEntry] = {}
 | 
			
		||||
        self._loaded_entries = False
 | 
			
		||||
        self._update_lock = asyncio.Lock()
 | 
			
		||||
 | 
			
		||||
    def get(self, path: str) -> DashboardEntry | None:
 | 
			
		||||
        """Get an entry by path."""
 | 
			
		||||
        return self._entries.get(path)
 | 
			
		||||
 | 
			
		||||
    async def _async_all(self) -> list[DashboardEntry]:
 | 
			
		||||
        """Return all entries."""
 | 
			
		||||
        return list(self._entries.values())
 | 
			
		||||
 | 
			
		||||
    def all(self) -> list[DashboardEntry]:
 | 
			
		||||
        """Return all entries."""
 | 
			
		||||
        return asyncio.run_coroutine_threadsafe(self._async_all, self._loop).result()
 | 
			
		||||
 | 
			
		||||
    def async_all(self) -> list[DashboardEntry]:
 | 
			
		||||
        """Return all entries."""
 | 
			
		||||
        return list(self._entries.values())
 | 
			
		||||
 | 
			
		||||
    def set_state(self, entry: DashboardEntry, state: EntryState) -> None:
 | 
			
		||||
        """Set the state for an entry."""
 | 
			
		||||
        asyncio.run_coroutine_threadsafe(
 | 
			
		||||
            self._async_set_state(entry, state), self._loop
 | 
			
		||||
        ).result()
 | 
			
		||||
 | 
			
		||||
    async def _async_set_state(self, entry: DashboardEntry, state: EntryState) -> None:
 | 
			
		||||
        """Set the state for an entry."""
 | 
			
		||||
        self.async_set_state(entry, state)
 | 
			
		||||
 | 
			
		||||
    def async_set_state(self, entry: DashboardEntry, state: EntryState) -> None:
 | 
			
		||||
        """Set the state for an entry."""
 | 
			
		||||
        if entry.state == state:
 | 
			
		||||
            return
 | 
			
		||||
        entry.state = state
 | 
			
		||||
        self._dashboard.bus.async_fire(
 | 
			
		||||
            EVENT_ENTRY_STATE_CHANGED, {"entry": entry, "state": state}
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
    async def async_request_update_entries(self) -> None:
 | 
			
		||||
        """Request an update of the dashboard entries from disk.
 | 
			
		||||
 | 
			
		||||
        If an update is already in progress, this will do nothing.
 | 
			
		||||
        """
 | 
			
		||||
        if self._update_lock.locked():
 | 
			
		||||
            _LOGGER.debug("Dashboard entries are already being updated")
 | 
			
		||||
            return
 | 
			
		||||
        await self.async_update_entries()
 | 
			
		||||
 | 
			
		||||
    async def async_update_entries(self) -> None:
 | 
			
		||||
        """Update the dashboard entries from disk."""
 | 
			
		||||
        async with self._update_lock:
 | 
			
		||||
            await self._async_update_entries()
 | 
			
		||||
 | 
			
		||||
    def _load_entries(
 | 
			
		||||
        self, entries: dict[DashboardEntry, DashboardCacheKeyType]
 | 
			
		||||
    ) -> None:
 | 
			
		||||
        """Load all entries from disk."""
 | 
			
		||||
        for entry, cache_key in entries.items():
 | 
			
		||||
            _LOGGER.debug(
 | 
			
		||||
                "Loading dashboard entry %s because cache key changed: %s",
 | 
			
		||||
                entry.path,
 | 
			
		||||
                cache_key,
 | 
			
		||||
            )
 | 
			
		||||
            entry.load_from_disk(cache_key)
 | 
			
		||||
 | 
			
		||||
    async def _async_update_entries(self) -> list[DashboardEntry]:
 | 
			
		||||
        """Sync the dashboard entries from disk."""
 | 
			
		||||
        _LOGGER.debug("Updating dashboard entries")
 | 
			
		||||
        # At some point it would be nice to use watchdog to avoid polling
 | 
			
		||||
 | 
			
		||||
        path_to_cache_key = await self._loop.run_in_executor(
 | 
			
		||||
            None, self._get_path_to_cache_key
 | 
			
		||||
        )
 | 
			
		||||
        entries = self._entries
 | 
			
		||||
        added: dict[DashboardEntry, DashboardCacheKeyType] = {}
 | 
			
		||||
        updated: dict[DashboardEntry, DashboardCacheKeyType] = {}
 | 
			
		||||
        removed: set[DashboardEntry] = {
 | 
			
		||||
            entry
 | 
			
		||||
            for filename, entry in entries.items()
 | 
			
		||||
            if filename not in path_to_cache_key
 | 
			
		||||
        }
 | 
			
		||||
 | 
			
		||||
        for path, cache_key in path_to_cache_key.items():
 | 
			
		||||
            if entry := entries.get(path):
 | 
			
		||||
                if entry.cache_key != cache_key:
 | 
			
		||||
                    updated[entry] = cache_key
 | 
			
		||||
            else:
 | 
			
		||||
                entry = DashboardEntry(path, cache_key)
 | 
			
		||||
                added[entry] = cache_key
 | 
			
		||||
 | 
			
		||||
        if added or updated:
 | 
			
		||||
            await self._loop.run_in_executor(
 | 
			
		||||
                None, self._load_entries, {**added, **updated}
 | 
			
		||||
            )
 | 
			
		||||
 | 
			
		||||
        bus = self._dashboard.bus
 | 
			
		||||
        for entry in added:
 | 
			
		||||
            entries[entry.path] = entry
 | 
			
		||||
            bus.async_fire(EVENT_ENTRY_ADDED, {"entry": entry})
 | 
			
		||||
 | 
			
		||||
        for entry in removed:
 | 
			
		||||
            del entries[entry.path]
 | 
			
		||||
            bus.async_fire(EVENT_ENTRY_REMOVED, {"entry": entry})
 | 
			
		||||
 | 
			
		||||
        for entry in updated:
 | 
			
		||||
            bus.async_fire(EVENT_ENTRY_UPDATED, {"entry": entry})
 | 
			
		||||
 | 
			
		||||
    def _get_path_to_cache_key(self) -> dict[str, DashboardCacheKeyType]:
 | 
			
		||||
        """Return a dict of path to cache key."""
 | 
			
		||||
        path_to_cache_key: dict[str, DashboardCacheKeyType] = {}
 | 
			
		||||
        #
 | 
			
		||||
        # The cache key is (inode, device, mtime, size)
 | 
			
		||||
        # which allows us to avoid locking since it ensures
 | 
			
		||||
        # every iteration of this call will always return the newest
 | 
			
		||||
        # items from disk at the cost of a stat() call on each
 | 
			
		||||
        # file which is much faster than reading the file
 | 
			
		||||
        # for the cache hit case which is the common case.
 | 
			
		||||
        #
 | 
			
		||||
        for file in util.list_yaml_files([self._config_dir]):
 | 
			
		||||
            try:
 | 
			
		||||
                # Prefer the json storage path if it exists
 | 
			
		||||
                stat = os.stat(ext_storage_path(os.path.basename(file)))
 | 
			
		||||
            except OSError:
 | 
			
		||||
                try:
 | 
			
		||||
                    # Fallback to the yaml file if the storage
 | 
			
		||||
                    # file does not exist or could not be generated
 | 
			
		||||
                    stat = os.stat(file)
 | 
			
		||||
                except OSError:
 | 
			
		||||
                    # File was deleted, ignore
 | 
			
		||||
                    continue
 | 
			
		||||
            path_to_cache_key[file] = (
 | 
			
		||||
                stat.st_ino,
 | 
			
		||||
                stat.st_dev,
 | 
			
		||||
                stat.st_mtime,
 | 
			
		||||
                stat.st_size,
 | 
			
		||||
            )
 | 
			
		||||
        return path_to_cache_key
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class DashboardEntry:
 | 
			
		||||
    """Represents a single dashboard entry.
 | 
			
		||||
 | 
			
		||||
    This class is thread-safe and read-only.
 | 
			
		||||
    """
 | 
			
		||||
 | 
			
		||||
    __slots__ = (
 | 
			
		||||
        "path",
 | 
			
		||||
        "filename",
 | 
			
		||||
        "_storage_path",
 | 
			
		||||
        "cache_key",
 | 
			
		||||
        "storage",
 | 
			
		||||
        "state",
 | 
			
		||||
        "_to_dict",
 | 
			
		||||
    )
 | 
			
		||||
 | 
			
		||||
    def __init__(self, path: str, cache_key: DashboardCacheKeyType) -> None:
 | 
			
		||||
        """Initialize the DashboardEntry."""
 | 
			
		||||
        self.path = path
 | 
			
		||||
        self.filename: str = os.path.basename(path)
 | 
			
		||||
        self._storage_path = ext_storage_path(self.filename)
 | 
			
		||||
        self.cache_key = cache_key
 | 
			
		||||
        self.storage: StorageJSON | None = None
 | 
			
		||||
        self.state = EntryState.UNKNOWN
 | 
			
		||||
        self._to_dict: dict[str, Any] | None = None
 | 
			
		||||
 | 
			
		||||
    def __repr__(self):
 | 
			
		||||
        """Return the representation of this entry."""
 | 
			
		||||
        return (
 | 
			
		||||
            f"DashboardEntry(path={self.path} "
 | 
			
		||||
            f"address={self.address} "
 | 
			
		||||
            f"web_port={self.web_port} "
 | 
			
		||||
            f"name={self.name} "
 | 
			
		||||
            f"no_mdns={self.no_mdns} "
 | 
			
		||||
            f"state={self.state} "
 | 
			
		||||
            ")"
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
    def to_dict(self) -> dict[str, Any]:
 | 
			
		||||
        """Return a dict representation of this entry.
 | 
			
		||||
 | 
			
		||||
        The dict includes the loaded configuration but not
 | 
			
		||||
        the current state of the entry.
 | 
			
		||||
        """
 | 
			
		||||
        if self._to_dict is None:
 | 
			
		||||
            self._to_dict = {
 | 
			
		||||
                "name": self.name,
 | 
			
		||||
                "friendly_name": self.friendly_name,
 | 
			
		||||
                "configuration": self.filename,
 | 
			
		||||
                "loaded_integrations": self.loaded_integrations,
 | 
			
		||||
                "deployed_version": self.update_old,
 | 
			
		||||
                "current_version": self.update_new,
 | 
			
		||||
                "path": self.path,
 | 
			
		||||
                "comment": self.comment,
 | 
			
		||||
                "address": self.address,
 | 
			
		||||
                "web_port": self.web_port,
 | 
			
		||||
                "target_platform": self.target_platform,
 | 
			
		||||
            }
 | 
			
		||||
        return self._to_dict
 | 
			
		||||
 | 
			
		||||
    def load_from_disk(self, cache_key: DashboardCacheKeyType | None = None) -> None:
 | 
			
		||||
        """Load this entry from disk."""
 | 
			
		||||
        self.storage = StorageJSON.load(self._storage_path)
 | 
			
		||||
        self._to_dict = None
 | 
			
		||||
        #
 | 
			
		||||
        # Currently StorageJSON.load() will return None if the file does not exist
 | 
			
		||||
        #
 | 
			
		||||
        # StorageJSON currently does not provide an updated cache key so we use the
 | 
			
		||||
        # one that is passed in.
 | 
			
		||||
        #
 | 
			
		||||
        # The cache key was read from the disk moments ago and may be stale but
 | 
			
		||||
        # it does not matter since we are polling anyways, and the next call to
 | 
			
		||||
        # async_update_entries() will load it again in the extremely rare case that
 | 
			
		||||
        # it changed between the two calls.
 | 
			
		||||
        #
 | 
			
		||||
        if cache_key:
 | 
			
		||||
            self.cache_key = cache_key
 | 
			
		||||
 | 
			
		||||
    @property
 | 
			
		||||
    def address(self) -> str | None:
 | 
			
		||||
        """Return the address of this entry."""
 | 
			
		||||
        if self.storage is None:
 | 
			
		||||
            return None
 | 
			
		||||
        return self.storage.address
 | 
			
		||||
 | 
			
		||||
    @property
 | 
			
		||||
    def no_mdns(self) -> bool | None:
 | 
			
		||||
        """Return the no_mdns of this entry."""
 | 
			
		||||
        if self.storage is None:
 | 
			
		||||
            return None
 | 
			
		||||
        return self.storage.no_mdns
 | 
			
		||||
 | 
			
		||||
    @property
 | 
			
		||||
    def web_port(self) -> int | None:
 | 
			
		||||
        """Return the web port of this entry."""
 | 
			
		||||
        if self.storage is None:
 | 
			
		||||
            return None
 | 
			
		||||
        return self.storage.web_port
 | 
			
		||||
 | 
			
		||||
    @property
 | 
			
		||||
    def name(self) -> str:
 | 
			
		||||
        """Return the name of this entry."""
 | 
			
		||||
        if self.storage is None:
 | 
			
		||||
            return self.filename.replace(".yml", "").replace(".yaml", "")
 | 
			
		||||
        return self.storage.name
 | 
			
		||||
 | 
			
		||||
    @property
 | 
			
		||||
    def friendly_name(self) -> str:
 | 
			
		||||
        """Return the friendly name of this entry."""
 | 
			
		||||
        if self.storage is None:
 | 
			
		||||
            return self.name
 | 
			
		||||
        return self.storage.friendly_name
 | 
			
		||||
 | 
			
		||||
    @property
 | 
			
		||||
    def comment(self) -> str | None:
 | 
			
		||||
        """Return the comment of this entry."""
 | 
			
		||||
        if self.storage is None:
 | 
			
		||||
            return None
 | 
			
		||||
        return self.storage.comment
 | 
			
		||||
 | 
			
		||||
    @property
 | 
			
		||||
    def target_platform(self) -> str | None:
 | 
			
		||||
        """Return the target platform of this entry."""
 | 
			
		||||
        if self.storage is None:
 | 
			
		||||
            return None
 | 
			
		||||
        return self.storage.target_platform
 | 
			
		||||
 | 
			
		||||
    @property
 | 
			
		||||
    def update_available(self) -> bool:
 | 
			
		||||
        """Return if an update is available for this entry."""
 | 
			
		||||
        if self.storage is None:
 | 
			
		||||
            return True
 | 
			
		||||
        return self.update_old != self.update_new
 | 
			
		||||
 | 
			
		||||
    @property
 | 
			
		||||
    def update_old(self) -> str:
 | 
			
		||||
        if self.storage is None:
 | 
			
		||||
            return ""
 | 
			
		||||
        return self.storage.esphome_version or ""
 | 
			
		||||
 | 
			
		||||
    @property
 | 
			
		||||
    def update_new(self) -> str:
 | 
			
		||||
        return const.__version__
 | 
			
		||||
 | 
			
		||||
    @property
 | 
			
		||||
    def loaded_integrations(self) -> list[str]:
 | 
			
		||||
        if self.storage is None:
 | 
			
		||||
            return []
 | 
			
		||||
        return self.storage.loaded_integrations
 | 
			
		||||
							
								
								
									
										19
									
								
								esphome/dashboard/enum.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										19
									
								
								esphome/dashboard/enum.py
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1,19 @@
 | 
			
		||||
"""Enum backports from standard lib."""
 | 
			
		||||
from __future__ import annotations
 | 
			
		||||
 | 
			
		||||
from enum import Enum
 | 
			
		||||
from typing import Any
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class StrEnum(str, Enum):
 | 
			
		||||
    """Partial backport of Python 3.11's StrEnum for our basic use cases."""
 | 
			
		||||
 | 
			
		||||
    def __new__(cls, value: str, *args: Any, **kwargs: Any) -> StrEnum:
 | 
			
		||||
        """Create a new StrEnum instance."""
 | 
			
		||||
        if not isinstance(value, str):
 | 
			
		||||
            raise TypeError(f"{value!r} is not a string")
 | 
			
		||||
        return super().__new__(cls, value, *args, **kwargs)
 | 
			
		||||
 | 
			
		||||
    def __str__(self) -> str:
 | 
			
		||||
        """Return self.value."""
 | 
			
		||||
        return str(self.value)
 | 
			
		||||
							
								
								
									
										76
									
								
								esphome/dashboard/settings.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										76
									
								
								esphome/dashboard/settings.py
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1,76 @@
 | 
			
		||||
from __future__ import annotations
 | 
			
		||||
 | 
			
		||||
import hmac
 | 
			
		||||
import os
 | 
			
		||||
from pathlib import Path
 | 
			
		||||
 | 
			
		||||
from esphome.core import CORE
 | 
			
		||||
from esphome.helpers import get_bool_env
 | 
			
		||||
 | 
			
		||||
from .util.password import password_hash
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class DashboardSettings:
 | 
			
		||||
    """Settings for the dashboard."""
 | 
			
		||||
 | 
			
		||||
    def __init__(self) -> None:
 | 
			
		||||
        self.config_dir: str = ""
 | 
			
		||||
        self.password_hash: str = ""
 | 
			
		||||
        self.username: str = ""
 | 
			
		||||
        self.using_password: bool = False
 | 
			
		||||
        self.on_ha_addon: bool = False
 | 
			
		||||
        self.cookie_secret: str | None = None
 | 
			
		||||
        self.absolute_config_dir: Path | None = None
 | 
			
		||||
 | 
			
		||||
    def parse_args(self, args):
 | 
			
		||||
        self.on_ha_addon: bool = args.ha_addon
 | 
			
		||||
        password: str = args.password or os.getenv("PASSWORD", "")
 | 
			
		||||
        if not self.on_ha_addon:
 | 
			
		||||
            self.username: str = args.username or os.getenv("USERNAME", "")
 | 
			
		||||
            self.using_password = bool(password)
 | 
			
		||||
        if self.using_password:
 | 
			
		||||
            self.password_hash = password_hash(password)
 | 
			
		||||
        self.config_dir: str = args.configuration
 | 
			
		||||
        self.absolute_config_dir: Path = Path(self.config_dir).resolve()
 | 
			
		||||
        CORE.config_path = os.path.join(self.config_dir, ".")
 | 
			
		||||
 | 
			
		||||
    @property
 | 
			
		||||
    def relative_url(self):
 | 
			
		||||
        return os.getenv("ESPHOME_DASHBOARD_RELATIVE_URL", "/")
 | 
			
		||||
 | 
			
		||||
    @property
 | 
			
		||||
    def status_use_ping(self):
 | 
			
		||||
        return get_bool_env("ESPHOME_DASHBOARD_USE_PING")
 | 
			
		||||
 | 
			
		||||
    @property
 | 
			
		||||
    def status_use_mqtt(self):
 | 
			
		||||
        return get_bool_env("ESPHOME_DASHBOARD_USE_MQTT")
 | 
			
		||||
 | 
			
		||||
    @property
 | 
			
		||||
    def using_ha_addon_auth(self):
 | 
			
		||||
        if not self.on_ha_addon:
 | 
			
		||||
            return False
 | 
			
		||||
        return not get_bool_env("DISABLE_HA_AUTHENTICATION")
 | 
			
		||||
 | 
			
		||||
    @property
 | 
			
		||||
    def using_auth(self):
 | 
			
		||||
        return self.using_password or self.using_ha_addon_auth
 | 
			
		||||
 | 
			
		||||
    @property
 | 
			
		||||
    def streamer_mode(self):
 | 
			
		||||
        return get_bool_env("ESPHOME_STREAMER_MODE")
 | 
			
		||||
 | 
			
		||||
    def check_password(self, username, password):
 | 
			
		||||
        if not self.using_auth:
 | 
			
		||||
            return True
 | 
			
		||||
        if username != self.username:
 | 
			
		||||
            return False
 | 
			
		||||
 | 
			
		||||
        # Compare password in constant running time (to prevent timing attacks)
 | 
			
		||||
        return hmac.compare_digest(self.password_hash, password_hash(password))
 | 
			
		||||
 | 
			
		||||
    def rel_path(self, *args):
 | 
			
		||||
        joined_path = os.path.join(self.config_dir, *args)
 | 
			
		||||
        # Raises ValueError if not relative to ESPHome config folder
 | 
			
		||||
        Path(joined_path).resolve().relative_to(self.absolute_config_dir)
 | 
			
		||||
        return joined_path
 | 
			
		||||
							
								
								
									
										0
									
								
								esphome/dashboard/status/__init__.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										0
									
								
								esphome/dashboard/status/__init__.py
									
									
									
									
									
										Normal file
									
								
							
							
								
								
									
										112
									
								
								esphome/dashboard/status/mdns.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										112
									
								
								esphome/dashboard/status/mdns.py
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1,112 @@
 | 
			
		||||
from __future__ import annotations
 | 
			
		||||
 | 
			
		||||
import asyncio
 | 
			
		||||
 | 
			
		||||
from esphome.zeroconf import (
 | 
			
		||||
    ESPHOME_SERVICE_TYPE,
 | 
			
		||||
    AsyncEsphomeZeroconf,
 | 
			
		||||
    DashboardBrowser,
 | 
			
		||||
    DashboardImportDiscovery,
 | 
			
		||||
    DashboardStatus,
 | 
			
		||||
)
 | 
			
		||||
 | 
			
		||||
from ..const import SENTINEL
 | 
			
		||||
from ..core import DASHBOARD
 | 
			
		||||
from ..entries import bool_to_entry_state
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class MDNSStatus:
 | 
			
		||||
    """Class that updates the mdns status."""
 | 
			
		||||
 | 
			
		||||
    def __init__(self) -> None:
 | 
			
		||||
        """Initialize the MDNSStatus class."""
 | 
			
		||||
        super().__init__()
 | 
			
		||||
        self.aiozc: AsyncEsphomeZeroconf | None = None
 | 
			
		||||
        # This is the current mdns state for each host (True, False, None)
 | 
			
		||||
        self.host_mdns_state: dict[str, bool | None] = {}
 | 
			
		||||
        # This is the hostnames to path mapping
 | 
			
		||||
        self.host_name_to_path: dict[str, str] = {}
 | 
			
		||||
        self.path_to_host_name: dict[str, str] = {}
 | 
			
		||||
        # This is a set of host names to track (i.e no_mdns = false)
 | 
			
		||||
        self.host_name_with_mdns_enabled: set[set] = set()
 | 
			
		||||
        self._loop = asyncio.get_running_loop()
 | 
			
		||||
 | 
			
		||||
    def get_path_to_host_name(self, path: str) -> str | None:
 | 
			
		||||
        """Resolve a path to an address in a thread-safe manner."""
 | 
			
		||||
        return self.path_to_host_name.get(path)
 | 
			
		||||
 | 
			
		||||
    async def async_resolve_host(self, host_name: str) -> str | None:
 | 
			
		||||
        """Resolve a host name to an address in a thread-safe manner."""
 | 
			
		||||
        if aiozc := self.aiozc:
 | 
			
		||||
            return await aiozc.async_resolve_host(host_name)
 | 
			
		||||
        return None
 | 
			
		||||
 | 
			
		||||
    async def async_refresh_hosts(self):
 | 
			
		||||
        """Refresh the hosts to track."""
 | 
			
		||||
        dashboard = DASHBOARD
 | 
			
		||||
        current_entries = dashboard.entries.async_all()
 | 
			
		||||
        host_name_with_mdns_enabled = self.host_name_with_mdns_enabled
 | 
			
		||||
        host_mdns_state = self.host_mdns_state
 | 
			
		||||
        host_name_to_path = self.host_name_to_path
 | 
			
		||||
        path_to_host_name = self.path_to_host_name
 | 
			
		||||
        entries = dashboard.entries
 | 
			
		||||
 | 
			
		||||
        for entry in current_entries:
 | 
			
		||||
            name = entry.name
 | 
			
		||||
            # If no_mdns is set, remove it from the set
 | 
			
		||||
            if entry.no_mdns:
 | 
			
		||||
                host_name_with_mdns_enabled.discard(name)
 | 
			
		||||
                continue
 | 
			
		||||
 | 
			
		||||
            # We are tracking this host
 | 
			
		||||
            host_name_with_mdns_enabled.add(name)
 | 
			
		||||
            path = entry.path
 | 
			
		||||
 | 
			
		||||
            # If we just adopted/imported this host, we likely
 | 
			
		||||
            # already have a state for it, so we should make sure
 | 
			
		||||
            # to set it so the dashboard shows it as online
 | 
			
		||||
            if (online := host_mdns_state.get(name, SENTINEL)) != SENTINEL:
 | 
			
		||||
                entries.async_set_state(entry, bool_to_entry_state(online))
 | 
			
		||||
 | 
			
		||||
            # Make sure the mapping is up to date
 | 
			
		||||
            # so when we get an mdns update we can map it back
 | 
			
		||||
            # to the filename
 | 
			
		||||
            host_name_to_path[name] = path
 | 
			
		||||
            path_to_host_name[path] = name
 | 
			
		||||
 | 
			
		||||
    async def async_run(self) -> None:
 | 
			
		||||
        dashboard = DASHBOARD
 | 
			
		||||
        entries = dashboard.entries
 | 
			
		||||
        aiozc = AsyncEsphomeZeroconf()
 | 
			
		||||
        self.aiozc = aiozc
 | 
			
		||||
        host_mdns_state = self.host_mdns_state
 | 
			
		||||
        host_name_to_path = self.host_name_to_path
 | 
			
		||||
        host_name_with_mdns_enabled = self.host_name_with_mdns_enabled
 | 
			
		||||
 | 
			
		||||
        def on_update(dat: dict[str, bool | None]) -> None:
 | 
			
		||||
            """Update the entry state."""
 | 
			
		||||
            for name, result in dat.items():
 | 
			
		||||
                host_mdns_state[name] = result
 | 
			
		||||
                if name not in host_name_with_mdns_enabled:
 | 
			
		||||
                    continue
 | 
			
		||||
                if entry := entries.get(host_name_to_path[name]):
 | 
			
		||||
                    entries.async_set_state(entry, bool_to_entry_state(result))
 | 
			
		||||
 | 
			
		||||
        stat = DashboardStatus(on_update)
 | 
			
		||||
        imports = DashboardImportDiscovery()
 | 
			
		||||
        dashboard.import_result = imports.import_state
 | 
			
		||||
 | 
			
		||||
        browser = DashboardBrowser(
 | 
			
		||||
            aiozc.zeroconf,
 | 
			
		||||
            ESPHOME_SERVICE_TYPE,
 | 
			
		||||
            [stat.browser_callback, imports.browser_callback],
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
        while not dashboard.stop_event.is_set():
 | 
			
		||||
            await self.async_refresh_hosts()
 | 
			
		||||
            await dashboard.ping_request.wait()
 | 
			
		||||
            dashboard.ping_request.clear()
 | 
			
		||||
 | 
			
		||||
        await browser.async_cancel()
 | 
			
		||||
        await aiozc.async_close()
 | 
			
		||||
        self.aiozc = None
 | 
			
		||||
							
								
								
									
										67
									
								
								esphome/dashboard/status/mqtt.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										67
									
								
								esphome/dashboard/status/mqtt.py
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1,67 @@
 | 
			
		||||
from __future__ import annotations
 | 
			
		||||
 | 
			
		||||
import binascii
 | 
			
		||||
import json
 | 
			
		||||
import os
 | 
			
		||||
import threading
 | 
			
		||||
 | 
			
		||||
from esphome import mqtt
 | 
			
		||||
 | 
			
		||||
from ..core import DASHBOARD
 | 
			
		||||
from ..entries import EntryState
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class MqttStatusThread(threading.Thread):
 | 
			
		||||
    """Status thread to get the status of the devices via MQTT."""
 | 
			
		||||
 | 
			
		||||
    def run(self) -> None:
 | 
			
		||||
        """Run the status thread."""
 | 
			
		||||
        dashboard = DASHBOARD
 | 
			
		||||
        entries = dashboard.entries
 | 
			
		||||
        current_entries = entries.all()
 | 
			
		||||
 | 
			
		||||
        config = mqtt.config_from_env()
 | 
			
		||||
        topic = "esphome/discover/#"
 | 
			
		||||
 | 
			
		||||
        def on_message(client, userdata, msg):
 | 
			
		||||
            nonlocal current_entries
 | 
			
		||||
 | 
			
		||||
            payload = msg.payload.decode(errors="backslashreplace")
 | 
			
		||||
            if len(payload) > 0:
 | 
			
		||||
                data = json.loads(payload)
 | 
			
		||||
                if "name" not in data:
 | 
			
		||||
                    return
 | 
			
		||||
                for entry in current_entries:
 | 
			
		||||
                    if entry.name == data["name"]:
 | 
			
		||||
                        entries.set_state(entry, EntryState.ONLINE)
 | 
			
		||||
                        return
 | 
			
		||||
 | 
			
		||||
        def on_connect(client, userdata, flags, return_code):
 | 
			
		||||
            client.publish("esphome/discover", None, retain=False)
 | 
			
		||||
 | 
			
		||||
        mqttid = str(binascii.hexlify(os.urandom(6)).decode())
 | 
			
		||||
 | 
			
		||||
        client = mqtt.prepare(
 | 
			
		||||
            config,
 | 
			
		||||
            [topic],
 | 
			
		||||
            on_message,
 | 
			
		||||
            on_connect,
 | 
			
		||||
            None,
 | 
			
		||||
            None,
 | 
			
		||||
            f"esphome-dashboard-{mqttid}",
 | 
			
		||||
        )
 | 
			
		||||
        client.loop_start()
 | 
			
		||||
 | 
			
		||||
        while not dashboard.stop_event.wait(2):
 | 
			
		||||
            current_entries = entries.all()
 | 
			
		||||
            # will be set to true on on_message
 | 
			
		||||
            for entry in current_entries:
 | 
			
		||||
                if entry.no_mdns:
 | 
			
		||||
                    entries.set_state(entry, EntryState.OFFLINE)
 | 
			
		||||
 | 
			
		||||
            client.publish("esphome/discover", None, retain=False)
 | 
			
		||||
            dashboard.mqtt_ping_request.wait()
 | 
			
		||||
            dashboard.mqtt_ping_request.clear()
 | 
			
		||||
 | 
			
		||||
        client.disconnect()
 | 
			
		||||
        client.loop_stop()
 | 
			
		||||
							
								
								
									
										49
									
								
								esphome/dashboard/status/ping.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										49
									
								
								esphome/dashboard/status/ping.py
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1,49 @@
 | 
			
		||||
from __future__ import annotations
 | 
			
		||||
 | 
			
		||||
import asyncio
 | 
			
		||||
import os
 | 
			
		||||
from typing import cast
 | 
			
		||||
 | 
			
		||||
from ..core import DASHBOARD
 | 
			
		||||
from ..entries import DashboardEntry, bool_to_entry_state
 | 
			
		||||
from ..util.itertools import chunked
 | 
			
		||||
from ..util.subprocess import async_system_command_status
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
async def _async_ping_host(host: str) -> bool:
 | 
			
		||||
    """Ping a host."""
 | 
			
		||||
    return await async_system_command_status(
 | 
			
		||||
        ["ping", "-n" if os.name == "nt" else "-c", "1", host]
 | 
			
		||||
    )
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class PingStatus:
 | 
			
		||||
    def __init__(self) -> None:
 | 
			
		||||
        """Initialize the PingStatus class."""
 | 
			
		||||
        super().__init__()
 | 
			
		||||
        self._loop = asyncio.get_running_loop()
 | 
			
		||||
 | 
			
		||||
    async def async_run(self) -> None:
 | 
			
		||||
        """Run the ping status."""
 | 
			
		||||
        dashboard = DASHBOARD
 | 
			
		||||
        entries = dashboard.entries
 | 
			
		||||
 | 
			
		||||
        while not dashboard.stop_event.is_set():
 | 
			
		||||
            # Only ping if the dashboard is open
 | 
			
		||||
            await dashboard.ping_request.wait()
 | 
			
		||||
            current_entries = dashboard.entries.async_all()
 | 
			
		||||
            to_ping: list[DashboardEntry] = [
 | 
			
		||||
                entry for entry in current_entries if entry.address is not None
 | 
			
		||||
            ]
 | 
			
		||||
            for ping_group in chunked(to_ping, 16):
 | 
			
		||||
                ping_group = cast(list[DashboardEntry], ping_group)
 | 
			
		||||
                results = await asyncio.gather(
 | 
			
		||||
                    *(_async_ping_host(entry.address) for entry in ping_group),
 | 
			
		||||
                    return_exceptions=True,
 | 
			
		||||
                )
 | 
			
		||||
                for entry, result in zip(ping_group, results):
 | 
			
		||||
                    if isinstance(result, Exception):
 | 
			
		||||
                        result = False
 | 
			
		||||
                    elif isinstance(result, BaseException):
 | 
			
		||||
                        raise result
 | 
			
		||||
                    entries.async_set_state(entry, bool_to_entry_state(result))
 | 
			
		||||
							
								
								
									
										0
									
								
								esphome/dashboard/util/__init__.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										0
									
								
								esphome/dashboard/util/__init__.py
									
									
									
									
									
										Normal file
									
								
							
							
								
								
									
										22
									
								
								esphome/dashboard/util/itertools.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										22
									
								
								esphome/dashboard/util/itertools.py
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1,22 @@
 | 
			
		||||
from __future__ import annotations
 | 
			
		||||
 | 
			
		||||
from collections.abc import Iterable
 | 
			
		||||
from functools import partial
 | 
			
		||||
from itertools import islice
 | 
			
		||||
from typing import Any
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def take(take_num: int, iterable: Iterable) -> list[Any]:
 | 
			
		||||
    """Return first n items of the iterable as a list.
 | 
			
		||||
 | 
			
		||||
    From itertools recipes
 | 
			
		||||
    """
 | 
			
		||||
    return list(islice(iterable, take_num))
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def chunked(iterable: Iterable, chunked_num: int) -> Iterable[Any]:
 | 
			
		||||
    """Break *iterable* into lists of length *n*.
 | 
			
		||||
 | 
			
		||||
    From more-itertools
 | 
			
		||||
    """
 | 
			
		||||
    return iter(partial(take, chunked_num, iter(iterable)), [])
 | 
			
		||||
							
								
								
									
										11
									
								
								esphome/dashboard/util/password.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										11
									
								
								esphome/dashboard/util/password.py
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1,11 @@
 | 
			
		||||
from __future__ import annotations
 | 
			
		||||
 | 
			
		||||
import hashlib
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def password_hash(password: str) -> bytes:
 | 
			
		||||
    """Create a hash of a password to transform it to a fixed-length digest.
 | 
			
		||||
 | 
			
		||||
    Note this is not meant for secure storage, but for securely comparing passwords.
 | 
			
		||||
    """
 | 
			
		||||
    return hashlib.sha256(password.encode()).digest()
 | 
			
		||||
							
								
								
									
										31
									
								
								esphome/dashboard/util/subprocess.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										31
									
								
								esphome/dashboard/util/subprocess.py
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1,31 @@
 | 
			
		||||
from __future__ import annotations
 | 
			
		||||
 | 
			
		||||
import asyncio
 | 
			
		||||
from collections.abc import Iterable
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
async def async_system_command_status(command: Iterable[str]) -> bool:
 | 
			
		||||
    """Run a system command checking only the status."""
 | 
			
		||||
    process = await asyncio.create_subprocess_exec(
 | 
			
		||||
        *command,
 | 
			
		||||
        stdin=asyncio.subprocess.DEVNULL,
 | 
			
		||||
        stdout=asyncio.subprocess.DEVNULL,
 | 
			
		||||
        stderr=asyncio.subprocess.DEVNULL,
 | 
			
		||||
        close_fds=False,
 | 
			
		||||
    )
 | 
			
		||||
    await process.wait()
 | 
			
		||||
    return process.returncode == 0
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
async def async_run_system_command(command: Iterable[str]) -> tuple[bool, bytes, bytes]:
 | 
			
		||||
    """Run a system command and return a tuple of returncode, stdout, stderr."""
 | 
			
		||||
    process = await asyncio.create_subprocess_exec(
 | 
			
		||||
        *command,
 | 
			
		||||
        stdin=asyncio.subprocess.DEVNULL,
 | 
			
		||||
        stdout=asyncio.subprocess.PIPE,
 | 
			
		||||
        stderr=asyncio.subprocess.PIPE,
 | 
			
		||||
        close_fds=False,
 | 
			
		||||
    )
 | 
			
		||||
    stdout, stderr = await process.communicate()
 | 
			
		||||
    await process.wait()
 | 
			
		||||
    return process.returncode, stdout, stderr
 | 
			
		||||
@@ -1,17 +1,10 @@
 | 
			
		||||
import hashlib
 | 
			
		||||
from __future__ import annotations
 | 
			
		||||
 | 
			
		||||
import unicodedata
 | 
			
		||||
 | 
			
		||||
from esphome.const import ALLOWED_NAME_CHARS
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def password_hash(password: str) -> bytes:
 | 
			
		||||
    """Create a hash of a password to transform it to a fixed-length digest.
 | 
			
		||||
 | 
			
		||||
    Note this is not meant for secure storage, but for securely comparing passwords.
 | 
			
		||||
    """
 | 
			
		||||
    return hashlib.sha256(password.encode()).digest()
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def strip_accents(value):
 | 
			
		||||
    return "".join(
 | 
			
		||||
        c
 | 
			
		||||
							
								
								
									
										1066
									
								
								esphome/dashboard/web_server.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										1066
									
								
								esphome/dashboard/web_server.py
									
									
									
									
									
										Normal file
									
								
							
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							@@ -1,10 +1,13 @@
 | 
			
		||||
from __future__ import annotations
 | 
			
		||||
 | 
			
		||||
import gzip
 | 
			
		||||
import hashlib
 | 
			
		||||
import io
 | 
			
		||||
import logging
 | 
			
		||||
import random
 | 
			
		||||
import socket
 | 
			
		||||
import sys
 | 
			
		||||
import time
 | 
			
		||||
import gzip
 | 
			
		||||
 | 
			
		||||
from esphome.core import EsphomeError
 | 
			
		||||
from esphome.helpers import is_ip_address, resolve_ip_address
 | 
			
		||||
@@ -40,6 +43,10 @@ MAGIC_BYTES = [0x6C, 0x26, 0xF7, 0x5C, 0x45]
 | 
			
		||||
 | 
			
		||||
FEATURE_SUPPORTS_COMPRESSION = 0x01
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
UPLOAD_BLOCK_SIZE = 8192
 | 
			
		||||
UPLOAD_BUFFER_SIZE = UPLOAD_BLOCK_SIZE * 8
 | 
			
		||||
 | 
			
		||||
_LOGGER = logging.getLogger(__name__)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@@ -184,7 +191,9 @@ def send_check(sock, data, msg):
 | 
			
		||||
        raise OTAError(f"Error sending {msg}: {err}") from err
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def perform_ota(sock, password, file_handle, filename):
 | 
			
		||||
def perform_ota(
 | 
			
		||||
    sock: socket.socket, password: str, file_handle: io.IOBase, filename: str
 | 
			
		||||
) -> None:
 | 
			
		||||
    file_contents = file_handle.read()
 | 
			
		||||
    file_size = len(file_contents)
 | 
			
		||||
    _LOGGER.info("Uploading %s (%s bytes)", filename, file_size)
 | 
			
		||||
@@ -254,14 +263,16 @@ def perform_ota(sock, password, file_handle, filename):
 | 
			
		||||
    sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 0)
 | 
			
		||||
    # Limit send buffer (usually around 100kB) in order to have progress bar
 | 
			
		||||
    # show the actual progress
 | 
			
		||||
    sock.setsockopt(socket.SOL_SOCKET, socket.SO_SNDBUF, 8192)
 | 
			
		||||
 | 
			
		||||
    sock.setsockopt(socket.SOL_SOCKET, socket.SO_SNDBUF, UPLOAD_BUFFER_SIZE)
 | 
			
		||||
    # Set higher timeout during upload
 | 
			
		||||
    sock.settimeout(20.0)
 | 
			
		||||
    sock.settimeout(30.0)
 | 
			
		||||
    start_time = time.perf_counter()
 | 
			
		||||
 | 
			
		||||
    offset = 0
 | 
			
		||||
    progress = ProgressBar()
 | 
			
		||||
    while True:
 | 
			
		||||
        chunk = upload_contents[offset : offset + 1024]
 | 
			
		||||
        chunk = upload_contents[offset : offset + UPLOAD_BLOCK_SIZE]
 | 
			
		||||
        if not chunk:
 | 
			
		||||
            break
 | 
			
		||||
        offset += len(chunk)
 | 
			
		||||
@@ -277,8 +288,9 @@ def perform_ota(sock, password, file_handle, filename):
 | 
			
		||||
 | 
			
		||||
    # Enable nodelay for last checks
 | 
			
		||||
    sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
 | 
			
		||||
    duration = time.perf_counter() - start_time
 | 
			
		||||
 | 
			
		||||
    _LOGGER.info("Waiting for result...")
 | 
			
		||||
    _LOGGER.info("Upload took %.2f seconds, waiting for result...", duration)
 | 
			
		||||
 | 
			
		||||
    receive_exactly(sock, 1, "receive OK", RESPONSE_RECEIVE_OK)
 | 
			
		||||
    receive_exactly(sock, 1, "Update end", RESPONSE_UPDATE_END_OK)
 | 
			
		||||
 
 | 
			
		||||
@@ -23,6 +23,14 @@ from esphome.core import (
 | 
			
		||||
from esphome.helpers import add_class_to_obj
 | 
			
		||||
from esphome.util import OrderedDict, filter_yaml_files
 | 
			
		||||
 | 
			
		||||
try:
 | 
			
		||||
    from yaml import CSafeLoader as FastestAvailableSafeLoader
 | 
			
		||||
except ImportError:
 | 
			
		||||
    from yaml import (  # type: ignore[assignment]
 | 
			
		||||
        SafeLoader as FastestAvailableSafeLoader,
 | 
			
		||||
    )
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
_LOGGER = logging.getLogger(__name__)
 | 
			
		||||
 | 
			
		||||
# Mostly copied from Home Assistant because that code works fine and
 | 
			
		||||
@@ -89,7 +97,7 @@ def _add_data_ref(fn):
 | 
			
		||||
    return wrapped
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class ESPHomeLoader(yaml.SafeLoader):
 | 
			
		||||
class ESPHomeLoader(FastestAvailableSafeLoader):
 | 
			
		||||
    """Loader class that keeps track of line numbers."""
 | 
			
		||||
 | 
			
		||||
    @_add_data_ref
 | 
			
		||||
 
 | 
			
		||||
@@ -1,22 +1,21 @@
 | 
			
		||||
from __future__ import annotations
 | 
			
		||||
 | 
			
		||||
import asyncio
 | 
			
		||||
import logging
 | 
			
		||||
from dataclasses import dataclass
 | 
			
		||||
from typing import Callable
 | 
			
		||||
 | 
			
		||||
from zeroconf import (
 | 
			
		||||
    IPVersion,
 | 
			
		||||
    ServiceBrowser,
 | 
			
		||||
    ServiceInfo,
 | 
			
		||||
    ServiceStateChange,
 | 
			
		||||
    Zeroconf,
 | 
			
		||||
)
 | 
			
		||||
from zeroconf import IPVersion, ServiceInfo, ServiceStateChange, Zeroconf
 | 
			
		||||
from zeroconf.asyncio import AsyncServiceBrowser, AsyncServiceInfo, AsyncZeroconf
 | 
			
		||||
 | 
			
		||||
from esphome.storage_json import StorageJSON, ext_storage_path
 | 
			
		||||
 | 
			
		||||
_LOGGER = logging.getLogger(__name__)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
_BACKGROUND_TASKS: set[asyncio.Task] = set()
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class HostResolver(ServiceInfo):
 | 
			
		||||
    """Resolve a host name to an IP address."""
 | 
			
		||||
 | 
			
		||||
@@ -65,7 +64,7 @@ class DiscoveredImport:
 | 
			
		||||
    network: str
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class DashboardBrowser(ServiceBrowser):
 | 
			
		||||
class DashboardBrowser(AsyncServiceBrowser):
 | 
			
		||||
    """A class to browse for ESPHome nodes."""
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@@ -94,7 +93,28 @@ class DashboardImportDiscovery:
 | 
			
		||||
            # Ignore updates for devices that are not in the import state
 | 
			
		||||
            return
 | 
			
		||||
 | 
			
		||||
        info = zeroconf.get_service_info(service_type, name)
 | 
			
		||||
        info = AsyncServiceInfo(
 | 
			
		||||
            service_type,
 | 
			
		||||
            name,
 | 
			
		||||
        )
 | 
			
		||||
        if info.load_from_cache(zeroconf):
 | 
			
		||||
            self._process_service_info(name, info)
 | 
			
		||||
            return
 | 
			
		||||
        task = asyncio.create_task(
 | 
			
		||||
            self._async_process_service_info(zeroconf, info, service_type, name)
 | 
			
		||||
        )
 | 
			
		||||
        _BACKGROUND_TASKS.add(task)
 | 
			
		||||
        task.add_done_callback(_BACKGROUND_TASKS.discard)
 | 
			
		||||
 | 
			
		||||
    async def _async_process_service_info(
 | 
			
		||||
        self, zeroconf: Zeroconf, info: AsyncServiceInfo, service_type: str, name: str
 | 
			
		||||
    ) -> None:
 | 
			
		||||
        """Process a service info."""
 | 
			
		||||
        if await info.async_request(zeroconf):
 | 
			
		||||
            self._process_service_info(name, info)
 | 
			
		||||
 | 
			
		||||
    def _process_service_info(self, name: str, info: ServiceInfo) -> None:
 | 
			
		||||
        """Process a service info."""
 | 
			
		||||
        _LOGGER.debug("-> resolved info: %s", info)
 | 
			
		||||
        if info is None:
 | 
			
		||||
            return
 | 
			
		||||
@@ -146,13 +166,32 @@ class DashboardImportDiscovery:
 | 
			
		||||
                )
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def _make_host_resolver(host: str) -> HostResolver:
 | 
			
		||||
    """Create a new HostResolver for the given host name."""
 | 
			
		||||
    name = host.partition(".")[0]
 | 
			
		||||
    info = HostResolver(ESPHOME_SERVICE_TYPE, f"{name}.{ESPHOME_SERVICE_TYPE}")
 | 
			
		||||
    return info
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class EsphomeZeroconf(Zeroconf):
 | 
			
		||||
    def resolve_host(self, host: str, timeout=3.0):
 | 
			
		||||
    def resolve_host(self, host: str, timeout: float = 3.0) -> str | None:
 | 
			
		||||
        """Resolve a host name to an IP address."""
 | 
			
		||||
        name = host.partition(".")[0]
 | 
			
		||||
        info = HostResolver(f"{name}.{ESPHOME_SERVICE_TYPE}", ESPHOME_SERVICE_TYPE)
 | 
			
		||||
        if (info.load_from_cache(self) or info.request(self, timeout * 1000)) and (
 | 
			
		||||
            addresses := info.ip_addresses_by_version(IPVersion.V4Only)
 | 
			
		||||
        ):
 | 
			
		||||
        info = _make_host_resolver(host)
 | 
			
		||||
        if (
 | 
			
		||||
            info.load_from_cache(self)
 | 
			
		||||
            or (timeout and info.request(self, timeout * 1000))
 | 
			
		||||
        ) and (addresses := info.ip_addresses_by_version(IPVersion.V4Only)):
 | 
			
		||||
            return str(addresses[0])
 | 
			
		||||
        return None
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class AsyncEsphomeZeroconf(AsyncZeroconf):
 | 
			
		||||
    async def async_resolve_host(self, host: str, timeout: float = 3.0) -> str | None:
 | 
			
		||||
        """Resolve a host name to an IP address."""
 | 
			
		||||
        info = _make_host_resolver(host)
 | 
			
		||||
        if (
 | 
			
		||||
            info.load_from_cache(self.zeroconf)
 | 
			
		||||
            or (timeout and await info.async_request(self.zeroconf, timeout * 1000))
 | 
			
		||||
        ) and (addresses := info.ip_addresses_by_version(IPVersion.V4Only)):
 | 
			
		||||
            return str(addresses[0])
 | 
			
		||||
        return None
 | 
			
		||||
 
 | 
			
		||||
@@ -159,7 +159,7 @@ board_build.filesystem_size = 0.5m
 | 
			
		||||
platform = https://github.com/maxgerhardt/platform-raspberrypi.git
 | 
			
		||||
platform_packages =
 | 
			
		||||
    ; earlephilhower/framework-arduinopico@~1.20602.0 ; Cannot use the platformio package until old releases stop getting deleted
 | 
			
		||||
    earlephilhower/framework-arduinopico@https://github.com/earlephilhower/arduino-pico/releases/download/3.4.0/rp2040-3.4.0.zip
 | 
			
		||||
    earlephilhower/framework-arduinopico@https://github.com/earlephilhower/arduino-pico/releases/download/3.6.0/rp2040-3.6.0.zip
 | 
			
		||||
 | 
			
		||||
framework = arduino
 | 
			
		||||
lib_deps =
 | 
			
		||||
 
 | 
			
		||||
@@ -10,8 +10,8 @@ platformio==6.1.11  # When updating platformio, also update Dockerfile
 | 
			
		||||
esptool==4.6.2
 | 
			
		||||
click==8.1.7
 | 
			
		||||
esphome-dashboard==20231107.0
 | 
			
		||||
aioesphomeapi==18.2.4
 | 
			
		||||
zeroconf==0.120.0
 | 
			
		||||
aioesphomeapi==18.5.3
 | 
			
		||||
zeroconf==0.127.0
 | 
			
		||||
 | 
			
		||||
# esp-idf requires this, but doesn't bundle it by default
 | 
			
		||||
# https://github.com/espressif/esp-idf/blob/220590d599e134d7a5e7f1e683cc4550349ffbf8/requirements.txt#L24
 | 
			
		||||
 
 | 
			
		||||
@@ -1,6 +1,6 @@
 | 
			
		||||
pylint==2.17.6
 | 
			
		||||
flake8==6.1.0  # also change in .pre-commit-config.yaml when updating
 | 
			
		||||
black==23.10.1  # also change in .pre-commit-config.yaml when updating
 | 
			
		||||
black==23.11.0  # also change in .pre-commit-config.yaml when updating
 | 
			
		||||
pyupgrade==3.15.0  # also change in .pre-commit-config.yaml when updating
 | 
			
		||||
pre-commit
 | 
			
		||||
 | 
			
		||||
 
 | 
			
		||||
@@ -45,7 +45,7 @@ def sub(path, pattern, repl, expected_count=1):
 | 
			
		||||
    content, count = re.subn(pattern, repl, content, flags=re.MULTILINE)
 | 
			
		||||
    if expected_count is not None:
 | 
			
		||||
        assert count == expected_count, f"Pattern {pattern} replacement failed!"
 | 
			
		||||
    with open(path, "wt") as fh:
 | 
			
		||||
    with open(path, "w") as fh:
 | 
			
		||||
        fh.write(content)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
 
 | 
			
		||||
@@ -3050,6 +3050,9 @@ remote_receiver:
 | 
			
		||||
  on_coolix:
 | 
			
		||||
    then:
 | 
			
		||||
      delay: !lambda "return x.first + x.second;"
 | 
			
		||||
  on_rc_switch:
 | 
			
		||||
    then:
 | 
			
		||||
      delay: !lambda "return uint32_t(x.code) + x.protocol;"
 | 
			
		||||
 | 
			
		||||
status_led:
 | 
			
		||||
  pin: GPIO2
 | 
			
		||||
 
 | 
			
		||||
@@ -425,6 +425,15 @@ binary_sensor:
 | 
			
		||||
        input: true
 | 
			
		||||
      inverted: false
 | 
			
		||||
 | 
			
		||||
  - platform: gpio
 | 
			
		||||
    name: XL9535 Pin 17
 | 
			
		||||
    pin:
 | 
			
		||||
      xl9535: xl9535_hub
 | 
			
		||||
      number: 17
 | 
			
		||||
      mode:
 | 
			
		||||
        input: true
 | 
			
		||||
      inverted: false
 | 
			
		||||
 | 
			
		||||
climate:
 | 
			
		||||
  - platform: tuya
 | 
			
		||||
    id: tuya_climate
 | 
			
		||||
 
 | 
			
		||||
@@ -1,4 +1,4 @@
 | 
			
		||||
from typing import Iterator
 | 
			
		||||
from collections.abc import Iterator
 | 
			
		||||
 | 
			
		||||
import math
 | 
			
		||||
 | 
			
		||||
 
 | 
			
		||||
@@ -1,5 +1,5 @@
 | 
			
		||||
import pytest
 | 
			
		||||
from mock import Mock
 | 
			
		||||
from unittest.mock import Mock
 | 
			
		||||
 | 
			
		||||
from esphome import cpp_helpers as ch
 | 
			
		||||
from esphome import const
 | 
			
		||||
 
 | 
			
		||||
		Reference in New Issue
	
	Block a user