mirror of
				https://github.com/esphome/esphome.git
				synced 2025-11-03 16:41:50 +00:00 
			
		
		
		
	Compare commits
	
		
			3 Commits
		
	
	
		
			2025.3.0b5
			...
			oral-b-bru
		
	
	| Author | SHA1 | Date | |
|---|---|---|---|
| 
						 | 
					a85b7b3f84 | ||
| 
						 | 
					a207ed08a9 | ||
| 
						 | 
					90c3cb62b3 | 
							
								
								
									
										57
									
								
								.clang-tidy
									
									
									
									
									
								
							
							
						
						
									
										57
									
								
								.clang-tidy
									
									
									
									
									
								
							@@ -5,42 +5,26 @@ Checks: >-
 | 
			
		||||
  -altera-*,
 | 
			
		||||
  -android-*,
 | 
			
		||||
  -boost-*,
 | 
			
		||||
  -bugprone-easily-swappable-parameters,
 | 
			
		||||
  -bugprone-implicit-widening-of-multiplication-result,
 | 
			
		||||
  -bugprone-multi-level-implicit-pointer-conversion,
 | 
			
		||||
  -bugprone-narrowing-conversions,
 | 
			
		||||
  -bugprone-signed-char-misuse,
 | 
			
		||||
  -bugprone-switch-missing-default-case,
 | 
			
		||||
  -cert-dcl50-cpp,
 | 
			
		||||
  -cert-err33-c,
 | 
			
		||||
  -cert-err58-cpp,
 | 
			
		||||
  -cert-oop57-cpp,
 | 
			
		||||
  -cert-str34-c,
 | 
			
		||||
  -clang-analyzer-optin.core.EnumCastOutOfRange,
 | 
			
		||||
  -clang-analyzer-optin.cplusplus.UninitializedObject,
 | 
			
		||||
  -clang-analyzer-osx.*,
 | 
			
		||||
  -clang-diagnostic-delete-abstract-non-virtual-dtor,
 | 
			
		||||
  -clang-diagnostic-delete-non-abstract-non-virtual-dtor,
 | 
			
		||||
  -clang-diagnostic-deprecated-declarations,
 | 
			
		||||
  -clang-diagnostic-ignored-optimization-argument,
 | 
			
		||||
  -clang-diagnostic-missing-field-initializers,
 | 
			
		||||
  -clang-diagnostic-shadow-field,
 | 
			
		||||
  -clang-diagnostic-unused-const-variable,
 | 
			
		||||
  -clang-diagnostic-unused-parameter,
 | 
			
		||||
  -clang-diagnostic-vla-cxx-extension,
 | 
			
		||||
  -concurrency-*,
 | 
			
		||||
  -cppcoreguidelines-avoid-c-arrays,
 | 
			
		||||
  -cppcoreguidelines-avoid-const-or-ref-data-members,
 | 
			
		||||
  -cppcoreguidelines-avoid-do-while,
 | 
			
		||||
  -cppcoreguidelines-avoid-magic-numbers,
 | 
			
		||||
  -cppcoreguidelines-init-variables,
 | 
			
		||||
  -cppcoreguidelines-macro-to-enum,
 | 
			
		||||
  -cppcoreguidelines-macro-usage,
 | 
			
		||||
  -cppcoreguidelines-missing-std-forward,
 | 
			
		||||
  -cppcoreguidelines-narrowing-conversions,
 | 
			
		||||
  -cppcoreguidelines-non-private-member-variables-in-classes,
 | 
			
		||||
  -cppcoreguidelines-owning-memory,
 | 
			
		||||
  -cppcoreguidelines-prefer-member-initializer,
 | 
			
		||||
  -cppcoreguidelines-pro-bounds-array-to-pointer-decay,
 | 
			
		||||
  -cppcoreguidelines-pro-bounds-constant-array-index,
 | 
			
		||||
  -cppcoreguidelines-pro-bounds-pointer-arithmetic,
 | 
			
		||||
@@ -51,10 +35,7 @@ Checks: >-
 | 
			
		||||
  -cppcoreguidelines-pro-type-static-cast-downcast,
 | 
			
		||||
  -cppcoreguidelines-pro-type-union-access,
 | 
			
		||||
  -cppcoreguidelines-pro-type-vararg,
 | 
			
		||||
  -cppcoreguidelines-rvalue-reference-param-not-moved,
 | 
			
		||||
  -cppcoreguidelines-special-member-functions,
 | 
			
		||||
  -cppcoreguidelines-use-default-member-init,
 | 
			
		||||
  -cppcoreguidelines-virtual-class-destructor,
 | 
			
		||||
  -fuchsia-multiple-inheritance,
 | 
			
		||||
  -fuchsia-overloaded-operator,
 | 
			
		||||
  -fuchsia-statically-constructed-objects,
 | 
			
		||||
@@ -73,33 +54,22 @@ Checks: >-
 | 
			
		||||
  -llvm-include-order,
 | 
			
		||||
  -llvm-qualified-auto,
 | 
			
		||||
  -llvmlibc-*,
 | 
			
		||||
  -misc-const-correctness,
 | 
			
		||||
  -misc-include-cleaner,
 | 
			
		||||
  -misc-no-recursion,
 | 
			
		||||
  -misc-non-private-member-variables-in-classes,
 | 
			
		||||
  -misc-no-recursion,
 | 
			
		||||
  -misc-unused-parameters,
 | 
			
		||||
  -misc-use-anonymous-namespace,
 | 
			
		||||
  -modernize-avoid-bind,
 | 
			
		||||
  -modernize-avoid-c-arrays,
 | 
			
		||||
  -modernize-avoid-bind,
 | 
			
		||||
  -modernize-concat-nested-namespaces,
 | 
			
		||||
  -modernize-macro-to-enum,
 | 
			
		||||
  -modernize-return-braced-init-list,
 | 
			
		||||
  -modernize-type-traits,
 | 
			
		||||
  -modernize-use-auto,
 | 
			
		||||
  -modernize-use-constraints,
 | 
			
		||||
  -modernize-use-default-member-init,
 | 
			
		||||
  -modernize-use-equals-default,
 | 
			
		||||
  -modernize-use-nodiscard,
 | 
			
		||||
  -modernize-use-nullptr,
 | 
			
		||||
  -modernize-use-nodiscard,
 | 
			
		||||
  -modernize-use-nullptr,
 | 
			
		||||
  -modernize-use-trailing-return-type,
 | 
			
		||||
  -modernize-use-nodiscard,
 | 
			
		||||
  -mpi-*,
 | 
			
		||||
  -objc-*,
 | 
			
		||||
  -performance-enum-size,
 | 
			
		||||
  -readability-avoid-nested-conditional-operator,
 | 
			
		||||
  -readability-container-contains,
 | 
			
		||||
  -readability-container-data-pointer,
 | 
			
		||||
  -readability-braces-around-statements,
 | 
			
		||||
  -readability-const-return-type,
 | 
			
		||||
  -readability-convert-member-functions-to-static,
 | 
			
		||||
  -readability-else-after-return,
 | 
			
		||||
  -readability-function-cognitive-complexity,
 | 
			
		||||
@@ -108,15 +78,18 @@ Checks: >-
 | 
			
		||||
  -readability-magic-numbers,
 | 
			
		||||
  -readability-make-member-function-const,
 | 
			
		||||
  -readability-named-parameter,
 | 
			
		||||
  -readability-redundant-casting,
 | 
			
		||||
  -readability-redundant-inline-specifier,
 | 
			
		||||
  -readability-qualified-auto,
 | 
			
		||||
  -readability-redundant-access-specifiers,
 | 
			
		||||
  -readability-redundant-member-init,
 | 
			
		||||
  -readability-redundant-string-init,
 | 
			
		||||
  -readability-uppercase-literal-suffix,
 | 
			
		||||
  -readability-use-anyofallof,
 | 
			
		||||
WarningsAsErrors: '*'
 | 
			
		||||
AnalyzeTemporaryDtors: false
 | 
			
		||||
FormatStyle:     google
 | 
			
		||||
CheckOptions:
 | 
			
		||||
  - key:             google-readability-braces-around-statements.ShortStatementLines
 | 
			
		||||
    value:           '1'
 | 
			
		||||
  - key:             google-readability-function-size.StatementThreshold
 | 
			
		||||
    value:           '800'
 | 
			
		||||
  - key:             google-runtime-int.TypeSuffix
 | 
			
		||||
@@ -141,8 +114,6 @@ CheckOptions:
 | 
			
		||||
    value:           'make_unique'
 | 
			
		||||
  - key:             modernize-make-unique.MakeSmartPtrFunctionHeader
 | 
			
		||||
    value:           'esphome/core/helpers.h'
 | 
			
		||||
  - key:             readability-braces-around-statements.ShortStatementLines
 | 
			
		||||
    value:           2
 | 
			
		||||
  - key:             readability-identifier-naming.LocalVariableCase
 | 
			
		||||
    value:           'lower_case'
 | 
			
		||||
  - key:             readability-identifier-naming.ClassCase
 | 
			
		||||
@@ -189,11 +160,3 @@ CheckOptions:
 | 
			
		||||
    value:           'lower_case'
 | 
			
		||||
  - key:             readability-identifier-naming.VirtualMethodSuffix
 | 
			
		||||
    value:           ''
 | 
			
		||||
  - key:             readability-qualified-auto.AddConstToQualified
 | 
			
		||||
    value:           0
 | 
			
		||||
  - key:             readability-identifier-length.MinimumVariableNameLength
 | 
			
		||||
    value:           0
 | 
			
		||||
  - key:             readability-identifier-length.MinimumParameterNameLength
 | 
			
		||||
    value:           0
 | 
			
		||||
  - key:             readability-identifier-length.MinimumLoopCounterNameLength
 | 
			
		||||
    value:           0
 | 
			
		||||
 
 | 
			
		||||
@@ -1,88 +1,56 @@
 | 
			
		||||
{
 | 
			
		||||
  "name": "ESPHome Dev",
 | 
			
		||||
  "image": "ghcr.io/esphome/esphome-lint:dev",
 | 
			
		||||
  "image": "esphome/esphome-lint:dev",
 | 
			
		||||
  "postCreateCommand": [
 | 
			
		||||
    "script/devcontainer-post-create"
 | 
			
		||||
  ],
 | 
			
		||||
  "containerEnv": {
 | 
			
		||||
    "DEVCONTAINER": "1",
 | 
			
		||||
    "PIP_BREAK_SYSTEM_PACKAGES": "1",
 | 
			
		||||
    "PIP_ROOT_USER_ACTION": "ignore"
 | 
			
		||||
  },
 | 
			
		||||
  "runArgs": [
 | 
			
		||||
    "--privileged",
 | 
			
		||||
    "-e",
 | 
			
		||||
    "ESPHOME_DASHBOARD_USE_PING=1"
 | 
			
		||||
    // uncomment and edit the path in order to pass though local USB serial to the conatiner
 | 
			
		||||
    // , "--device=/dev/ttyACM0"
 | 
			
		||||
  ],
 | 
			
		||||
  "appPort": 6052,
 | 
			
		||||
  // if you are using avahi in the host device, uncomment these to allow the
 | 
			
		||||
  // devcontainer to find devices via mdns
 | 
			
		||||
  //"mounts": [
 | 
			
		||||
  //  "type=bind,source=/dev/bus/usb,target=/dev/bus/usb",
 | 
			
		||||
  //  "type=bind,source=/var/run/dbus,target=/var/run/dbus",
 | 
			
		||||
  //  "type=bind,source=/var/run/avahi-daemon/socket,target=/var/run/avahi-daemon/socket"
 | 
			
		||||
  //],
 | 
			
		||||
  "customizations": {
 | 
			
		||||
    "vscode": {
 | 
			
		||||
      "extensions": [
 | 
			
		||||
        // python
 | 
			
		||||
        "ms-python.python",
 | 
			
		||||
        "ms-python.pylint",
 | 
			
		||||
        "ms-python.flake8",
 | 
			
		||||
        "charliermarsh.ruff",
 | 
			
		||||
        "visualstudioexptteam.vscodeintellicode",
 | 
			
		||||
        // yaml
 | 
			
		||||
        "redhat.vscode-yaml",
 | 
			
		||||
        // cpp
 | 
			
		||||
        "ms-vscode.cpptools",
 | 
			
		||||
        // editorconfig
 | 
			
		||||
        "editorconfig.editorconfig"
 | 
			
		||||
      ],
 | 
			
		||||
      "settings": {
 | 
			
		||||
        "python.languageServer": "Pylance",
 | 
			
		||||
        "python.pythonPath": "/usr/bin/python3",
 | 
			
		||||
        "pylint.args": [
 | 
			
		||||
          "--rcfile=${workspaceFolder}/pyproject.toml"
 | 
			
		||||
        ],
 | 
			
		||||
        "flake8.args": [
 | 
			
		||||
          "--config=${workspaceFolder}/.flake8"
 | 
			
		||||
        ],
 | 
			
		||||
        "ruff.configuration": "${workspaceFolder}/pyproject.toml",
 | 
			
		||||
        "[python]": {
 | 
			
		||||
          // VS will say "Value is not accepted" before building the devcontainer, but the warning
 | 
			
		||||
          // should go away after build is completed.
 | 
			
		||||
          "editor.defaultFormatter": "charliermarsh.ruff"
 | 
			
		||||
        },
 | 
			
		||||
        "editor.formatOnPaste": false,
 | 
			
		||||
        "editor.formatOnSave": true,
 | 
			
		||||
        "editor.formatOnType": true,
 | 
			
		||||
        "files.trimTrailingWhitespace": true,
 | 
			
		||||
        "terminal.integrated.defaultProfile.linux": "bash",
 | 
			
		||||
        "yaml.customTags": [
 | 
			
		||||
          "!secret scalar",
 | 
			
		||||
          "!lambda scalar",
 | 
			
		||||
          "!extend scalar",
 | 
			
		||||
          "!remove scalar",
 | 
			
		||||
          "!include_dir_named scalar",
 | 
			
		||||
          "!include_dir_list scalar",
 | 
			
		||||
          "!include_dir_merge_list scalar",
 | 
			
		||||
          "!include_dir_merge_named scalar"
 | 
			
		||||
        ],
 | 
			
		||||
        "files.exclude": {
 | 
			
		||||
          "**/.git": true,
 | 
			
		||||
          "**/.DS_Store": true,
 | 
			
		||||
          "**/*.pyc": {
 | 
			
		||||
            "when": "$(basename).py"
 | 
			
		||||
          },
 | 
			
		||||
          "**/__pycache__": true
 | 
			
		||||
        },
 | 
			
		||||
        "files.associations": {
 | 
			
		||||
          "**/.vscode/*.json": "jsonc"
 | 
			
		||||
        },
 | 
			
		||||
        "C_Cpp.clang_format_path": "/usr/bin/clang-format-13"
 | 
			
		||||
      }
 | 
			
		||||
    }
 | 
			
		||||
  "extensions": [
 | 
			
		||||
    // python
 | 
			
		||||
    "ms-python.python",
 | 
			
		||||
    "visualstudioexptteam.vscodeintellicode",
 | 
			
		||||
    // yaml
 | 
			
		||||
    "redhat.vscode-yaml",
 | 
			
		||||
    // cpp
 | 
			
		||||
    "ms-vscode.cpptools",
 | 
			
		||||
    // editorconfig
 | 
			
		||||
    "editorconfig.editorconfig",
 | 
			
		||||
  ],
 | 
			
		||||
  "settings": {
 | 
			
		||||
    "python.languageServer": "Pylance",
 | 
			
		||||
    "python.pythonPath": "/usr/bin/python3",
 | 
			
		||||
    "python.linting.pylintEnabled": true,
 | 
			
		||||
    "python.linting.enabled": true,
 | 
			
		||||
    "python.formatting.provider": "black",
 | 
			
		||||
    "editor.formatOnPaste": false,
 | 
			
		||||
    "editor.formatOnSave": true,
 | 
			
		||||
    "editor.formatOnType": true,
 | 
			
		||||
    "files.trimTrailingWhitespace": true,
 | 
			
		||||
    "terminal.integrated.defaultProfile.linux": "bash",
 | 
			
		||||
    "yaml.customTags": [
 | 
			
		||||
      "!secret scalar",
 | 
			
		||||
      "!lambda scalar",
 | 
			
		||||
      "!include_dir_named scalar",
 | 
			
		||||
      "!include_dir_list scalar",
 | 
			
		||||
      "!include_dir_merge_list scalar",
 | 
			
		||||
      "!include_dir_merge_named scalar"
 | 
			
		||||
    ],
 | 
			
		||||
    "files.exclude": {
 | 
			
		||||
      "**/.git": true,
 | 
			
		||||
      "**/.DS_Store": true,
 | 
			
		||||
      "**/*.pyc": {
 | 
			
		||||
        "when": "$(basename).py"
 | 
			
		||||
      },
 | 
			
		||||
      "**/__pycache__": true
 | 
			
		||||
    },
 | 
			
		||||
    "files.associations": {
 | 
			
		||||
      "**/.vscode/*.json": "jsonc"
 | 
			
		||||
    },
 | 
			
		||||
    "C_Cpp.clang_format_path": "/usr/bin/clang-format-11",
 | 
			
		||||
  }
 | 
			
		||||
}
 | 
			
		||||
 
 | 
			
		||||
@@ -75,9 +75,6 @@ target/
 | 
			
		||||
# pyenv
 | 
			
		||||
.python-version
 | 
			
		||||
 | 
			
		||||
# asdf
 | 
			
		||||
.tool-versions
 | 
			
		||||
 | 
			
		||||
# celery beat schedule file
 | 
			
		||||
celerybeat-schedule
 | 
			
		||||
 | 
			
		||||
 
 | 
			
		||||
@@ -25,9 +25,10 @@ indent_size = 2
 | 
			
		||||
[*.{yaml,yml}]
 | 
			
		||||
indent_style = space
 | 
			
		||||
indent_size = 2
 | 
			
		||||
quote_type = double
 | 
			
		||||
quote_type = single
 | 
			
		||||
 | 
			
		||||
# JSON
 | 
			
		||||
[*.json]
 | 
			
		||||
indent_style = space
 | 
			
		||||
indent_size = 2
 | 
			
		||||
 | 
			
		||||
 
 | 
			
		||||
							
								
								
									
										1
									
								
								.gitattributes
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										1
									
								
								.gitattributes
									
									
									
									
										vendored
									
									
								
							@@ -1,3 +1,2 @@
 | 
			
		||||
# Normalize line endings to LF in the repository
 | 
			
		||||
* text eol=lf
 | 
			
		||||
*.png binary
 | 
			
		||||
 
 | 
			
		||||
							
								
								
									
										1
									
								
								.github/FUNDING.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										1
									
								
								.github/FUNDING.yml
									
									
									
									
										vendored
									
									
								
							@@ -1,4 +1,3 @@
 | 
			
		||||
---
 | 
			
		||||
# These are supported funding model platforms
 | 
			
		||||
 | 
			
		||||
custom: https://www.nabucasa.com
 | 
			
		||||
 
 | 
			
		||||
							
								
								
									
										9
									
								
								.github/ISSUE_TEMPLATE/config.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										9
									
								
								.github/ISSUE_TEMPLATE/config.yml
									
									
									
									
										vendored
									
									
								
							@@ -1,4 +1,3 @@
 | 
			
		||||
---
 | 
			
		||||
blank_issues_enabled: false
 | 
			
		||||
contact_links:
 | 
			
		||||
  - name: Issue Tracker
 | 
			
		||||
@@ -6,10 +5,8 @@ contact_links:
 | 
			
		||||
    about: Please create bug reports in the dedicated issue tracker.
 | 
			
		||||
  - name: Feature Request Tracker
 | 
			
		||||
    url: https://github.com/esphome/feature-requests
 | 
			
		||||
    about: |
 | 
			
		||||
      Please create feature requests in the dedicated feature request tracker.
 | 
			
		||||
    about: Please create feature requests in the dedicated feature request tracker.
 | 
			
		||||
  - name: Frequently Asked Question
 | 
			
		||||
    url: https://esphome.io/guides/faq.html
 | 
			
		||||
    about: |
 | 
			
		||||
      Please view the FAQ for common questions and what
 | 
			
		||||
      to include in a bug report.
 | 
			
		||||
    about: Please view the FAQ for common questions and what to include in a bug report.
 | 
			
		||||
    
 | 
			
		||||
 
 | 
			
		||||
							
								
								
									
										24
									
								
								.github/PULL_REQUEST_TEMPLATE.md
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										24
									
								
								.github/PULL_REQUEST_TEMPLATE.md
									
									
									
									
										vendored
									
									
								
							@@ -1,33 +1,31 @@
 | 
			
		||||
# What does this implement/fix?
 | 
			
		||||
# What does this implement/fix? 
 | 
			
		||||
 | 
			
		||||
<!-- Quick description and explanation of changes -->
 | 
			
		||||
Quick description and explanation of changes
 | 
			
		||||
 | 
			
		||||
## Types of changes
 | 
			
		||||
 | 
			
		||||
- [ ] Bugfix (non-breaking change which fixes an issue)
 | 
			
		||||
- [ ] New feature (non-breaking change which adds functionality)
 | 
			
		||||
- [ ] Breaking change (fix or feature that would cause existing functionality to not work as expected)
 | 
			
		||||
- [ ] Code quality improvements to existing code or addition of tests
 | 
			
		||||
- [ ] Other
 | 
			
		||||
 | 
			
		||||
**Related issue or feature (if applicable):**
 | 
			
		||||
**Related issue or feature (if applicable):** fixes <link to issue>
 | 
			
		||||
 | 
			
		||||
- fixes <link to issue>
 | 
			
		||||
 | 
			
		||||
**Pull request in [esphome-docs](https://github.com/esphome/esphome-docs) with documentation (if applicable):**
 | 
			
		||||
 | 
			
		||||
- esphome/esphome-docs#<esphome-docs PR number goes here>
 | 
			
		||||
**Pull request in [esphome-docs](https://github.com/esphome/esphome-docs) with documentation (if applicable):** esphome/esphome-docs#<esphome-docs PR number goes here>
 | 
			
		||||
 | 
			
		||||
## Test Environment
 | 
			
		||||
 | 
			
		||||
- [ ] ESP32
 | 
			
		||||
- [ ] ESP32 IDF
 | 
			
		||||
- [ ] ESP8266
 | 
			
		||||
- [ ] RP2040
 | 
			
		||||
- [ ] BK72xx
 | 
			
		||||
- [ ] RTL87xx
 | 
			
		||||
 | 
			
		||||
## Example entry for `config.yaml`:
 | 
			
		||||
<!--
 | 
			
		||||
  Supplying a configuration snippet, makes it easier for a maintainer to test
 | 
			
		||||
  your PR. Furthermore, for new integrations, it gives an impression of how
 | 
			
		||||
  the configuration would look like.
 | 
			
		||||
  Note: Remove this section if this PR does not have an example entry.
 | 
			
		||||
-->
 | 
			
		||||
 | 
			
		||||
```yaml
 | 
			
		||||
# Example config.yaml
 | 
			
		||||
@@ -37,6 +35,6 @@
 | 
			
		||||
## Checklist:
 | 
			
		||||
  - [ ] The code change is tested and works locally.
 | 
			
		||||
  - [ ] Tests have been added to verify that the new code works (under `tests/` folder).
 | 
			
		||||
 | 
			
		||||
  
 | 
			
		||||
If user exposed functionality or configuration variables are added/changed:
 | 
			
		||||
  - [ ] Documentation added/updated in [esphome-docs](https://github.com/esphome/esphome-docs).
 | 
			
		||||
 
 | 
			
		||||
							
								
								
									
										97
									
								
								.github/actions/build-image/action.yaml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										97
									
								
								.github/actions/build-image/action.yaml
									
									
									
									
										vendored
									
									
								
							@@ -1,97 +0,0 @@
 | 
			
		||||
name: Build Image
 | 
			
		||||
inputs:
 | 
			
		||||
  platform:
 | 
			
		||||
    description: "Platform to build for"
 | 
			
		||||
    required: true
 | 
			
		||||
    example: "linux/amd64"
 | 
			
		||||
  target:
 | 
			
		||||
    description: "Target to build"
 | 
			
		||||
    required: true
 | 
			
		||||
    example: "docker"
 | 
			
		||||
  baseimg:
 | 
			
		||||
    description: "Base image type"
 | 
			
		||||
    required: true
 | 
			
		||||
    example: "docker"
 | 
			
		||||
  suffix:
 | 
			
		||||
    description: "Suffix to add to tags"
 | 
			
		||||
    required: true
 | 
			
		||||
  version:
 | 
			
		||||
    description: "Version to build"
 | 
			
		||||
    required: true
 | 
			
		||||
    example: "2023.12.0"
 | 
			
		||||
runs:
 | 
			
		||||
  using: "composite"
 | 
			
		||||
  steps:
 | 
			
		||||
    - name: Generate short tags
 | 
			
		||||
      id: tags
 | 
			
		||||
      shell: bash
 | 
			
		||||
      run: |
 | 
			
		||||
        output=$(docker/generate_tags.py \
 | 
			
		||||
          --tag "${{ inputs.version }}" \
 | 
			
		||||
          --suffix "${{ inputs.suffix }}")
 | 
			
		||||
        echo $output
 | 
			
		||||
        for l in $output; do
 | 
			
		||||
          echo $l >> $GITHUB_OUTPUT
 | 
			
		||||
        done
 | 
			
		||||
 | 
			
		||||
    # set cache-to only if dev branch
 | 
			
		||||
    - id: cache-to
 | 
			
		||||
      shell: bash
 | 
			
		||||
      run: |-
 | 
			
		||||
        if [[ "${{ github.ref }}" == "refs/heads/dev" ]]; then
 | 
			
		||||
          echo "value=type=gha,mode=max" >> $GITHUB_OUTPUT
 | 
			
		||||
        else
 | 
			
		||||
          echo "value=" >> $GITHUB_OUTPUT
 | 
			
		||||
        fi
 | 
			
		||||
 | 
			
		||||
    - name: Build and push to ghcr by digest
 | 
			
		||||
      id: build-ghcr
 | 
			
		||||
      uses: docker/build-push-action@v6.15.0
 | 
			
		||||
      env:
 | 
			
		||||
        DOCKER_BUILD_SUMMARY: false
 | 
			
		||||
        DOCKER_BUILD_RECORD_UPLOAD: false
 | 
			
		||||
      with:
 | 
			
		||||
        context: .
 | 
			
		||||
        file: ./docker/Dockerfile
 | 
			
		||||
        platforms: ${{ inputs.platform }}
 | 
			
		||||
        target: ${{ inputs.target }}
 | 
			
		||||
        cache-from: type=gha
 | 
			
		||||
        cache-to: ${{ steps.cache-to.outputs.value }}
 | 
			
		||||
        build-args: |
 | 
			
		||||
          BASEIMGTYPE=${{ inputs.baseimg }}
 | 
			
		||||
          BUILD_VERSION=${{ inputs.version }}
 | 
			
		||||
        outputs: |
 | 
			
		||||
          type=image,name=ghcr.io/${{ steps.tags.outputs.image_name }},push-by-digest=true,name-canonical=true,push=true
 | 
			
		||||
 | 
			
		||||
    - name: Export ghcr digests
 | 
			
		||||
      shell: bash
 | 
			
		||||
      run: |
 | 
			
		||||
        mkdir -p /tmp/digests/${{ inputs.target }}/ghcr
 | 
			
		||||
        digest="${{ steps.build-ghcr.outputs.digest }}"
 | 
			
		||||
        touch "/tmp/digests/${{ inputs.target }}/ghcr/${digest#sha256:}"
 | 
			
		||||
 | 
			
		||||
    - name: Build and push to dockerhub by digest
 | 
			
		||||
      id: build-dockerhub
 | 
			
		||||
      uses: docker/build-push-action@v6.15.0
 | 
			
		||||
      env:
 | 
			
		||||
        DOCKER_BUILD_SUMMARY: false
 | 
			
		||||
        DOCKER_BUILD_RECORD_UPLOAD: false
 | 
			
		||||
      with:
 | 
			
		||||
        context: .
 | 
			
		||||
        file: ./docker/Dockerfile
 | 
			
		||||
        platforms: ${{ inputs.platform }}
 | 
			
		||||
        target: ${{ inputs.target }}
 | 
			
		||||
        cache-from: type=gha
 | 
			
		||||
        cache-to: ${{ steps.cache-to.outputs.value }}
 | 
			
		||||
        build-args: |
 | 
			
		||||
          BASEIMGTYPE=${{ inputs.baseimg }}
 | 
			
		||||
          BUILD_VERSION=${{ inputs.version }}
 | 
			
		||||
        outputs: |
 | 
			
		||||
          type=image,name=docker.io/${{ steps.tags.outputs.image_name }},push-by-digest=true,name-canonical=true,push=true
 | 
			
		||||
 | 
			
		||||
    - name: Export dockerhub digests
 | 
			
		||||
      shell: bash
 | 
			
		||||
      run: |
 | 
			
		||||
        mkdir -p /tmp/digests/${{ inputs.target }}/dockerhub
 | 
			
		||||
        digest="${{ steps.build-dockerhub.outputs.digest }}"
 | 
			
		||||
        touch "/tmp/digests/${{ inputs.target }}/dockerhub/${digest#sha256:}"
 | 
			
		||||
							
								
								
									
										47
									
								
								.github/actions/restore-python/action.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										47
									
								
								.github/actions/restore-python/action.yml
									
									
									
									
										vendored
									
									
								
							@@ -1,47 +0,0 @@
 | 
			
		||||
name: Restore Python
 | 
			
		||||
inputs:
 | 
			
		||||
  python-version:
 | 
			
		||||
    description: Python version to restore
 | 
			
		||||
    required: true
 | 
			
		||||
    type: string
 | 
			
		||||
  cache-key:
 | 
			
		||||
    description: Cache key to use
 | 
			
		||||
    required: true
 | 
			
		||||
    type: string
 | 
			
		||||
outputs:
 | 
			
		||||
  python-version:
 | 
			
		||||
    description: Python version restored
 | 
			
		||||
    value: ${{ steps.python.outputs.python-version }}
 | 
			
		||||
runs:
 | 
			
		||||
  using: "composite"
 | 
			
		||||
  steps:
 | 
			
		||||
    - name: Set up Python ${{ inputs.python-version }}
 | 
			
		||||
      id: python
 | 
			
		||||
      uses: actions/setup-python@v5.4.0
 | 
			
		||||
      with:
 | 
			
		||||
        python-version: ${{ inputs.python-version }}
 | 
			
		||||
    - name: Restore Python virtual environment
 | 
			
		||||
      id: cache-venv
 | 
			
		||||
      uses: actions/cache/restore@v4.2.2
 | 
			
		||||
      with:
 | 
			
		||||
        path: venv
 | 
			
		||||
        # yamllint disable-line rule:line-length
 | 
			
		||||
        key: ${{ runner.os }}-${{ steps.python.outputs.python-version }}-venv-${{ inputs.cache-key }}
 | 
			
		||||
    - name: Create Python virtual environment
 | 
			
		||||
      if: steps.cache-venv.outputs.cache-hit != 'true' && runner.os != 'Windows'
 | 
			
		||||
      shell: bash
 | 
			
		||||
      run: |
 | 
			
		||||
        python -m venv venv
 | 
			
		||||
        source venv/bin/activate
 | 
			
		||||
        python --version
 | 
			
		||||
        pip install -r requirements.txt -r requirements_optional.txt -r requirements_test.txt
 | 
			
		||||
        pip install -e .
 | 
			
		||||
    - name: Create Python virtual environment
 | 
			
		||||
      if: steps.cache-venv.outputs.cache-hit != 'true' && runner.os == 'Windows'
 | 
			
		||||
      shell: bash
 | 
			
		||||
      run: |
 | 
			
		||||
        python -m venv venv
 | 
			
		||||
        ./venv/Scripts/activate
 | 
			
		||||
        python --version
 | 
			
		||||
        pip install -r requirements.txt -r requirements_optional.txt -r requirements_test.txt
 | 
			
		||||
        pip install -e .
 | 
			
		||||
							
								
								
									
										27
									
								
								.github/dependabot.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										27
									
								
								.github/dependabot.yml
									
									
									
									
										vendored
									
									
								
							@@ -1,32 +1,9 @@
 | 
			
		||||
---
 | 
			
		||||
version: 2
 | 
			
		||||
updates:
 | 
			
		||||
  - package-ecosystem: pip
 | 
			
		||||
  - package-ecosystem: "pip"
 | 
			
		||||
    directory: "/"
 | 
			
		||||
    schedule:
 | 
			
		||||
      interval: daily
 | 
			
		||||
      interval: "daily"
 | 
			
		||||
    ignore:
 | 
			
		||||
      # Hypotehsis is only used for testing and is updated quite often
 | 
			
		||||
      - dependency-name: hypothesis
 | 
			
		||||
  - package-ecosystem: github-actions
 | 
			
		||||
    directory: "/"
 | 
			
		||||
    schedule:
 | 
			
		||||
      interval: daily
 | 
			
		||||
    open-pull-requests-limit: 10
 | 
			
		||||
    groups:
 | 
			
		||||
      docker-actions:
 | 
			
		||||
        applies-to: version-updates
 | 
			
		||||
        patterns:
 | 
			
		||||
          - "docker/setup-qemu-action"
 | 
			
		||||
          - "docker/login-action"
 | 
			
		||||
          - "docker/setup-buildx-action"
 | 
			
		||||
  - package-ecosystem: github-actions
 | 
			
		||||
    directory: "/.github/actions/build-image"
 | 
			
		||||
    schedule:
 | 
			
		||||
      interval: daily
 | 
			
		||||
    open-pull-requests-limit: 10
 | 
			
		||||
  - package-ecosystem: github-actions
 | 
			
		||||
    directory: "/.github/actions/restore-python"
 | 
			
		||||
    schedule:
 | 
			
		||||
      interval: daily
 | 
			
		||||
    open-pull-requests-limit: 10
 | 
			
		||||
 
 | 
			
		||||
							
								
								
									
										80
									
								
								.github/workflows/ci-api-proto.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										80
									
								
								.github/workflows/ci-api-proto.yml
									
									
									
									
										vendored
									
									
								
							@@ -1,80 +0,0 @@
 | 
			
		||||
name: API Proto CI
 | 
			
		||||
 | 
			
		||||
on:
 | 
			
		||||
  pull_request:
 | 
			
		||||
    paths:
 | 
			
		||||
      - "esphome/components/api/api.proto"
 | 
			
		||||
      - "esphome/components/api/api_pb2.cpp"
 | 
			
		||||
      - "esphome/components/api/api_pb2.h"
 | 
			
		||||
      - "esphome/components/api/api_pb2_service.cpp"
 | 
			
		||||
      - "esphome/components/api/api_pb2_service.h"
 | 
			
		||||
      - "script/api_protobuf/api_protobuf.py"
 | 
			
		||||
      - ".github/workflows/ci-api-proto.yml"
 | 
			
		||||
 | 
			
		||||
permissions:
 | 
			
		||||
  contents: read
 | 
			
		||||
  pull-requests: write
 | 
			
		||||
 | 
			
		||||
jobs:
 | 
			
		||||
  check:
 | 
			
		||||
    name: Check generated files
 | 
			
		||||
    runs-on: ubuntu-latest
 | 
			
		||||
    steps:
 | 
			
		||||
      - name: Checkout
 | 
			
		||||
        uses: actions/checkout@v4.1.7
 | 
			
		||||
      - name: Set up Python
 | 
			
		||||
        uses: actions/setup-python@v5.4.0
 | 
			
		||||
        with:
 | 
			
		||||
          python-version: "3.11"
 | 
			
		||||
 | 
			
		||||
      - name: Install apt dependencies
 | 
			
		||||
        run: |
 | 
			
		||||
          sudo apt update
 | 
			
		||||
          sudo apt-cache show protobuf-compiler
 | 
			
		||||
          sudo apt install -y protobuf-compiler
 | 
			
		||||
          protoc --version
 | 
			
		||||
      - name: Install python dependencies
 | 
			
		||||
        run: pip install aioesphomeapi -c requirements.txt -r requirements_dev.txt
 | 
			
		||||
      - name: Generate files
 | 
			
		||||
        run: script/api_protobuf/api_protobuf.py
 | 
			
		||||
      - name: Check for changes
 | 
			
		||||
        run: |
 | 
			
		||||
          if ! git diff --quiet; then
 | 
			
		||||
            echo "## Job Failed" | tee -a $GITHUB_STEP_SUMMARY
 | 
			
		||||
            echo "You have altered the generated proto files but they do not match what is expected." | tee -a $GITHUB_STEP_SUMMARY
 | 
			
		||||
            echo "Please run 'script/api_protobuf/api_protobuf.py' and commit the changes." | tee -a $GITHUB_STEP_SUMMARY
 | 
			
		||||
            exit 1
 | 
			
		||||
          fi
 | 
			
		||||
      - if: failure()
 | 
			
		||||
        name: Review PR
 | 
			
		||||
        uses: actions/github-script@v7.0.1
 | 
			
		||||
        with:
 | 
			
		||||
          script: |
 | 
			
		||||
            await github.rest.pulls.createReview({
 | 
			
		||||
              pull_number: context.issue.number,
 | 
			
		||||
              owner: context.repo.owner,
 | 
			
		||||
              repo: context.repo.repo,
 | 
			
		||||
              event: 'REQUEST_CHANGES',
 | 
			
		||||
              body: 'You have altered the generated proto files but they do not match what is expected.\nPlease run "script/api_protobuf/api_protobuf.py" and commit the changes.'
 | 
			
		||||
            })
 | 
			
		||||
      - if: success()
 | 
			
		||||
        name: Dismiss review
 | 
			
		||||
        uses: actions/github-script@v7.0.1
 | 
			
		||||
        with:
 | 
			
		||||
          script: |
 | 
			
		||||
            let reviews = await github.rest.pulls.listReviews({
 | 
			
		||||
              pull_number: context.issue.number,
 | 
			
		||||
              owner: context.repo.owner,
 | 
			
		||||
              repo: context.repo.repo
 | 
			
		||||
            });
 | 
			
		||||
            for (let review of reviews.data) {
 | 
			
		||||
              if (review.user.login === 'github-actions[bot]' && review.state === 'CHANGES_REQUESTED') {
 | 
			
		||||
                await github.rest.pulls.dismissReview({
 | 
			
		||||
                  pull_number: context.issue.number,
 | 
			
		||||
                  owner: context.repo.owner,
 | 
			
		||||
                  repo: context.repo.repo,
 | 
			
		||||
                  review_id: review.id,
 | 
			
		||||
                  message: 'Files now match the expected proto files.'
 | 
			
		||||
                });
 | 
			
		||||
              }
 | 
			
		||||
            }
 | 
			
		||||
							
								
								
									
										66
									
								
								.github/workflows/ci-docker.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										66
									
								
								.github/workflows/ci-docker.yml
									
									
									
									
										vendored
									
									
								
							@@ -1,61 +1,53 @@
 | 
			
		||||
---
 | 
			
		||||
name: CI for docker images
 | 
			
		||||
 | 
			
		||||
# Only run when docker paths change
 | 
			
		||||
 | 
			
		||||
on:
 | 
			
		||||
  push:
 | 
			
		||||
    branches: [dev, beta, release]
 | 
			
		||||
    paths:
 | 
			
		||||
      - "docker/**"
 | 
			
		||||
      - ".github/workflows/ci-docker.yml"
 | 
			
		||||
      - "requirements*.txt"
 | 
			
		||||
      - "platformio.ini"
 | 
			
		||||
      - "script/platformio_install_deps.py"
 | 
			
		||||
      - 'docker/**'
 | 
			
		||||
      - '.github/workflows/**'
 | 
			
		||||
      - 'requirements*.txt'
 | 
			
		||||
      - 'platformio.ini'
 | 
			
		||||
 | 
			
		||||
  pull_request:
 | 
			
		||||
    paths:
 | 
			
		||||
      - "docker/**"
 | 
			
		||||
      - ".github/workflows/ci-docker.yml"
 | 
			
		||||
      - "requirements*.txt"
 | 
			
		||||
      - "platformio.ini"
 | 
			
		||||
      - "script/platformio_install_deps.py"
 | 
			
		||||
      - 'docker/**'
 | 
			
		||||
      - '.github/workflows/**'
 | 
			
		||||
      - 'requirements*.txt'
 | 
			
		||||
      - 'platformio.ini'
 | 
			
		||||
 | 
			
		||||
permissions:
 | 
			
		||||
  contents: read
 | 
			
		||||
  packages: read
 | 
			
		||||
 | 
			
		||||
concurrency:
 | 
			
		||||
  # yamllint disable-line rule:line-length
 | 
			
		||||
  group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
 | 
			
		||||
  cancel-in-progress: true
 | 
			
		||||
 | 
			
		||||
jobs:
 | 
			
		||||
  check-docker:
 | 
			
		||||
    name: Build docker containers
 | 
			
		||||
    runs-on: ${{ matrix.os }}
 | 
			
		||||
    runs-on: ubuntu-latest
 | 
			
		||||
    strategy:
 | 
			
		||||
      fail-fast: false
 | 
			
		||||
      matrix:
 | 
			
		||||
        os: ["ubuntu-latest", "ubuntu-24.04-arm"]
 | 
			
		||||
        arch: [amd64, armv7, aarch64]
 | 
			
		||||
        build_type: ["ha-addon", "docker", "lint"]
 | 
			
		||||
    steps:
 | 
			
		||||
      - uses: actions/checkout@v4.1.7
 | 
			
		||||
      - name: Set up Python
 | 
			
		||||
        uses: actions/setup-python@v5.4.0
 | 
			
		||||
        with:
 | 
			
		||||
          python-version: "3.9"
 | 
			
		||||
      - name: Set up Docker Buildx
 | 
			
		||||
        uses: docker/setup-buildx-action@v3.10.0
 | 
			
		||||
    - uses: actions/checkout@v2
 | 
			
		||||
    - name: Set up Python
 | 
			
		||||
      uses: actions/setup-python@v2
 | 
			
		||||
      with:
 | 
			
		||||
        python-version: '3.9'
 | 
			
		||||
    - name: Set up Docker Buildx
 | 
			
		||||
      uses: docker/setup-buildx-action@v1
 | 
			
		||||
    - name: Set up QEMU
 | 
			
		||||
      uses: docker/setup-qemu-action@v1
 | 
			
		||||
 | 
			
		||||
      - name: Set TAG
 | 
			
		||||
        run: |
 | 
			
		||||
          echo "TAG=check" >> $GITHUB_ENV
 | 
			
		||||
    - name: Set TAG
 | 
			
		||||
      run: |
 | 
			
		||||
        echo "TAG=check" >> $GITHUB_ENV
 | 
			
		||||
 | 
			
		||||
      - name: Run build
 | 
			
		||||
        run: |
 | 
			
		||||
          docker/build.py \
 | 
			
		||||
            --tag "${TAG}" \
 | 
			
		||||
            --arch "${{ matrix.os == 'ubuntu-24.04-arm' && 'aarch64' || 'amd64' }}" \
 | 
			
		||||
            --build-type "${{ matrix.build_type }}" \
 | 
			
		||||
            build
 | 
			
		||||
    - name: Run build
 | 
			
		||||
      run: |
 | 
			
		||||
        docker/build.py \
 | 
			
		||||
          --tag "${TAG}" \
 | 
			
		||||
          --arch "${{ matrix.arch }}" \
 | 
			
		||||
          --build-type "${{ matrix.build_type }}" \
 | 
			
		||||
          build
 | 
			
		||||
 
 | 
			
		||||
							
								
								
									
										554
									
								
								.github/workflows/ci.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										554
									
								
								.github/workflows/ci.yml
									
									
									
									
										vendored
									
									
								
							@@ -1,4 +1,3 @@
 | 
			
		||||
---
 | 
			
		||||
name: CI
 | 
			
		||||
 | 
			
		||||
on:
 | 
			
		||||
@@ -6,503 +5,162 @@ on:
 | 
			
		||||
    branches: [dev, beta, release]
 | 
			
		||||
 | 
			
		||||
  pull_request:
 | 
			
		||||
    paths:
 | 
			
		||||
      - "**"
 | 
			
		||||
      - "!.github/workflows/*.yml"
 | 
			
		||||
      - "!.github/actions/build-image/*"
 | 
			
		||||
      - ".github/workflows/ci.yml"
 | 
			
		||||
      - "!.yamllint"
 | 
			
		||||
      - "!.github/dependabot.yml"
 | 
			
		||||
      - "!docker/**"
 | 
			
		||||
  merge_group:
 | 
			
		||||
 | 
			
		||||
permissions:
 | 
			
		||||
  contents: read
 | 
			
		||||
 | 
			
		||||
env:
 | 
			
		||||
  DEFAULT_PYTHON: "3.9"
 | 
			
		||||
  PYUPGRADE_TARGET: "--py39-plus"
 | 
			
		||||
 | 
			
		||||
concurrency:
 | 
			
		||||
  # yamllint disable-line rule:line-length
 | 
			
		||||
  group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
 | 
			
		||||
  cancel-in-progress: true
 | 
			
		||||
 | 
			
		||||
jobs:
 | 
			
		||||
  common:
 | 
			
		||||
    name: Create common environment
 | 
			
		||||
    runs-on: ubuntu-24.04
 | 
			
		||||
    outputs:
 | 
			
		||||
      cache-key: ${{ steps.cache-key.outputs.key }}
 | 
			
		||||
    steps:
 | 
			
		||||
      - name: Check out code from GitHub
 | 
			
		||||
        uses: actions/checkout@v4.1.7
 | 
			
		||||
      - name: Generate cache-key
 | 
			
		||||
        id: cache-key
 | 
			
		||||
        run: echo key="${{ hashFiles('requirements.txt', 'requirements_optional.txt', 'requirements_test.txt') }}" >> $GITHUB_OUTPUT
 | 
			
		||||
      - name: Set up Python ${{ env.DEFAULT_PYTHON }}
 | 
			
		||||
        id: python
 | 
			
		||||
        uses: actions/setup-python@v5.4.0
 | 
			
		||||
        with:
 | 
			
		||||
          python-version: ${{ env.DEFAULT_PYTHON }}
 | 
			
		||||
      - name: Restore Python virtual environment
 | 
			
		||||
        id: cache-venv
 | 
			
		||||
        uses: actions/cache@v4.2.2
 | 
			
		||||
        with:
 | 
			
		||||
          path: venv
 | 
			
		||||
          # yamllint disable-line rule:line-length
 | 
			
		||||
          key: ${{ runner.os }}-${{ steps.python.outputs.python-version }}-venv-${{ steps.cache-key.outputs.key }}
 | 
			
		||||
      - name: Create Python virtual environment
 | 
			
		||||
        if: steps.cache-venv.outputs.cache-hit != 'true'
 | 
			
		||||
        run: |
 | 
			
		||||
          python -m venv venv
 | 
			
		||||
          . venv/bin/activate
 | 
			
		||||
          python --version
 | 
			
		||||
          pip install -r requirements.txt -r requirements_optional.txt -r requirements_test.txt
 | 
			
		||||
          pip install -e .
 | 
			
		||||
 | 
			
		||||
  ruff:
 | 
			
		||||
    name: Check ruff
 | 
			
		||||
    runs-on: ubuntu-24.04
 | 
			
		||||
    needs:
 | 
			
		||||
      - common
 | 
			
		||||
    steps:
 | 
			
		||||
      - name: Check out code from GitHub
 | 
			
		||||
        uses: actions/checkout@v4.1.7
 | 
			
		||||
      - name: Restore Python
 | 
			
		||||
        uses: ./.github/actions/restore-python
 | 
			
		||||
        with:
 | 
			
		||||
          python-version: ${{ env.DEFAULT_PYTHON }}
 | 
			
		||||
          cache-key: ${{ needs.common.outputs.cache-key }}
 | 
			
		||||
      - name: Run Ruff
 | 
			
		||||
        run: |
 | 
			
		||||
          . venv/bin/activate
 | 
			
		||||
          ruff format esphome tests
 | 
			
		||||
      - name: Suggested changes
 | 
			
		||||
        run: script/ci-suggest-changes
 | 
			
		||||
        if: always()
 | 
			
		||||
 | 
			
		||||
  flake8:
 | 
			
		||||
    name: Check flake8
 | 
			
		||||
    runs-on: ubuntu-24.04
 | 
			
		||||
    needs:
 | 
			
		||||
      - common
 | 
			
		||||
    steps:
 | 
			
		||||
      - name: Check out code from GitHub
 | 
			
		||||
        uses: actions/checkout@v4.1.7
 | 
			
		||||
      - name: Restore Python
 | 
			
		||||
        uses: ./.github/actions/restore-python
 | 
			
		||||
        with:
 | 
			
		||||
          python-version: ${{ env.DEFAULT_PYTHON }}
 | 
			
		||||
          cache-key: ${{ needs.common.outputs.cache-key }}
 | 
			
		||||
      - name: Run flake8
 | 
			
		||||
        run: |
 | 
			
		||||
          . venv/bin/activate
 | 
			
		||||
          flake8 esphome
 | 
			
		||||
      - name: Suggested changes
 | 
			
		||||
        run: script/ci-suggest-changes
 | 
			
		||||
        if: always()
 | 
			
		||||
 | 
			
		||||
  pylint:
 | 
			
		||||
    name: Check pylint
 | 
			
		||||
    runs-on: ubuntu-24.04
 | 
			
		||||
    needs:
 | 
			
		||||
      - common
 | 
			
		||||
    steps:
 | 
			
		||||
      - name: Check out code from GitHub
 | 
			
		||||
        uses: actions/checkout@v4.1.7
 | 
			
		||||
      - name: Restore Python
 | 
			
		||||
        uses: ./.github/actions/restore-python
 | 
			
		||||
        with:
 | 
			
		||||
          python-version: ${{ env.DEFAULT_PYTHON }}
 | 
			
		||||
          cache-key: ${{ needs.common.outputs.cache-key }}
 | 
			
		||||
      - name: Run pylint
 | 
			
		||||
        run: |
 | 
			
		||||
          . venv/bin/activate
 | 
			
		||||
          pylint -f parseable --persistent=n esphome
 | 
			
		||||
      - name: Suggested changes
 | 
			
		||||
        run: script/ci-suggest-changes
 | 
			
		||||
        if: always()
 | 
			
		||||
 | 
			
		||||
  pyupgrade:
 | 
			
		||||
    name: Check pyupgrade
 | 
			
		||||
    runs-on: ubuntu-24.04
 | 
			
		||||
    needs:
 | 
			
		||||
      - common
 | 
			
		||||
    steps:
 | 
			
		||||
      - name: Check out code from GitHub
 | 
			
		||||
        uses: actions/checkout@v4.1.7
 | 
			
		||||
      - name: Restore Python
 | 
			
		||||
        uses: ./.github/actions/restore-python
 | 
			
		||||
        with:
 | 
			
		||||
          python-version: ${{ env.DEFAULT_PYTHON }}
 | 
			
		||||
          cache-key: ${{ needs.common.outputs.cache-key }}
 | 
			
		||||
      - name: Run pyupgrade
 | 
			
		||||
        run: |
 | 
			
		||||
          . venv/bin/activate
 | 
			
		||||
          pyupgrade ${{ env.PYUPGRADE_TARGET }} `find esphome -name "*.py" -type f`
 | 
			
		||||
      - name: Suggested changes
 | 
			
		||||
        run: script/ci-suggest-changes
 | 
			
		||||
        if: always()
 | 
			
		||||
 | 
			
		||||
  ci-custom:
 | 
			
		||||
    name: Run script/ci-custom
 | 
			
		||||
    runs-on: ubuntu-24.04
 | 
			
		||||
    needs:
 | 
			
		||||
      - common
 | 
			
		||||
    steps:
 | 
			
		||||
      - name: Check out code from GitHub
 | 
			
		||||
        uses: actions/checkout@v4.1.7
 | 
			
		||||
      - name: Restore Python
 | 
			
		||||
        uses: ./.github/actions/restore-python
 | 
			
		||||
        with:
 | 
			
		||||
          python-version: ${{ env.DEFAULT_PYTHON }}
 | 
			
		||||
          cache-key: ${{ needs.common.outputs.cache-key }}
 | 
			
		||||
      - name: Register matcher
 | 
			
		||||
        run: echo "::add-matcher::.github/workflows/matchers/ci-custom.json"
 | 
			
		||||
      - name: Run script/ci-custom
 | 
			
		||||
        run: |
 | 
			
		||||
          . venv/bin/activate
 | 
			
		||||
          script/ci-custom.py
 | 
			
		||||
          script/build_codeowners.py --check
 | 
			
		||||
 | 
			
		||||
  pytest:
 | 
			
		||||
    name: Run pytest
 | 
			
		||||
    strategy:
 | 
			
		||||
      fail-fast: false
 | 
			
		||||
      matrix:
 | 
			
		||||
        python-version:
 | 
			
		||||
          - "3.9"
 | 
			
		||||
          - "3.10"
 | 
			
		||||
          - "3.11"
 | 
			
		||||
          - "3.12"
 | 
			
		||||
        os:
 | 
			
		||||
          - ubuntu-latest
 | 
			
		||||
          - macOS-latest
 | 
			
		||||
          - windows-latest
 | 
			
		||||
        exclude:
 | 
			
		||||
          # Minimize CI resource usage
 | 
			
		||||
          # by only running the Python version
 | 
			
		||||
          # version used for docker images on Windows and macOS
 | 
			
		||||
          - python-version: "3.12"
 | 
			
		||||
            os: windows-latest
 | 
			
		||||
          - python-version: "3.10"
 | 
			
		||||
            os: windows-latest
 | 
			
		||||
          - python-version: "3.9"
 | 
			
		||||
            os: windows-latest
 | 
			
		||||
          - python-version: "3.12"
 | 
			
		||||
            os: macOS-latest
 | 
			
		||||
          - python-version: "3.10"
 | 
			
		||||
            os: macOS-latest
 | 
			
		||||
          - python-version: "3.9"
 | 
			
		||||
            os: macOS-latest
 | 
			
		||||
    runs-on: ${{ matrix.os }}
 | 
			
		||||
    needs:
 | 
			
		||||
      - common
 | 
			
		||||
    steps:
 | 
			
		||||
      - name: Check out code from GitHub
 | 
			
		||||
        uses: actions/checkout@v4.1.7
 | 
			
		||||
      - name: Restore Python
 | 
			
		||||
        uses: ./.github/actions/restore-python
 | 
			
		||||
        with:
 | 
			
		||||
          python-version: ${{ matrix.python-version }}
 | 
			
		||||
          cache-key: ${{ needs.common.outputs.cache-key }}
 | 
			
		||||
      - name: Register matcher
 | 
			
		||||
        run: echo "::add-matcher::.github/workflows/matchers/pytest.json"
 | 
			
		||||
      - name: Run pytest
 | 
			
		||||
        if: matrix.os == 'windows-latest'
 | 
			
		||||
        run: |
 | 
			
		||||
          ./venv/Scripts/activate
 | 
			
		||||
          pytest -vv --cov-report=xml --tb=native tests
 | 
			
		||||
      - name: Run pytest
 | 
			
		||||
        if: matrix.os == 'ubuntu-latest' || matrix.os == 'macOS-latest'
 | 
			
		||||
        run: |
 | 
			
		||||
          . venv/bin/activate
 | 
			
		||||
          pytest -vv --cov-report=xml --tb=native tests
 | 
			
		||||
      - name: Upload coverage to Codecov
 | 
			
		||||
        uses: codecov/codecov-action@v5
 | 
			
		||||
        with:
 | 
			
		||||
          token: ${{ secrets.CODECOV_TOKEN }}
 | 
			
		||||
 | 
			
		||||
  clang-format:
 | 
			
		||||
    name: Check clang-format
 | 
			
		||||
    runs-on: ubuntu-24.04
 | 
			
		||||
    needs:
 | 
			
		||||
      - common
 | 
			
		||||
    steps:
 | 
			
		||||
      - name: Check out code from GitHub
 | 
			
		||||
        uses: actions/checkout@v4.1.7
 | 
			
		||||
      - name: Restore Python
 | 
			
		||||
        uses: ./.github/actions/restore-python
 | 
			
		||||
        with:
 | 
			
		||||
          python-version: ${{ env.DEFAULT_PYTHON }}
 | 
			
		||||
          cache-key: ${{ needs.common.outputs.cache-key }}
 | 
			
		||||
      - name: Install clang-format
 | 
			
		||||
        run: |
 | 
			
		||||
          . venv/bin/activate
 | 
			
		||||
          pip install clang-format -c requirements_dev.txt
 | 
			
		||||
      - name: Run clang-format
 | 
			
		||||
        run: |
 | 
			
		||||
          . venv/bin/activate
 | 
			
		||||
          script/clang-format -i
 | 
			
		||||
          git diff-index --quiet HEAD --
 | 
			
		||||
      - name: Suggested changes
 | 
			
		||||
        run: script/ci-suggest-changes
 | 
			
		||||
        if: always()
 | 
			
		||||
 | 
			
		||||
  clang-tidy:
 | 
			
		||||
  ci:
 | 
			
		||||
    name: ${{ matrix.name }}
 | 
			
		||||
    runs-on: ubuntu-24.04
 | 
			
		||||
    needs:
 | 
			
		||||
      - common
 | 
			
		||||
      - ruff
 | 
			
		||||
      - ci-custom
 | 
			
		||||
      - clang-format
 | 
			
		||||
      - flake8
 | 
			
		||||
      - pylint
 | 
			
		||||
      - pytest
 | 
			
		||||
      - pyupgrade
 | 
			
		||||
    runs-on: ubuntu-latest
 | 
			
		||||
    strategy:
 | 
			
		||||
      fail-fast: false
 | 
			
		||||
      max-parallel: 2
 | 
			
		||||
      matrix:
 | 
			
		||||
        include:
 | 
			
		||||
          - id: ci-custom
 | 
			
		||||
            name: Run script/ci-custom
 | 
			
		||||
          - id: lint-python
 | 
			
		||||
            name: Run script/lint-python
 | 
			
		||||
          - id: test
 | 
			
		||||
            file: tests/test1.yaml
 | 
			
		||||
            name: Test tests/test1.yaml
 | 
			
		||||
            pio_cache_key: test1
 | 
			
		||||
          - id: test
 | 
			
		||||
            file: tests/test2.yaml
 | 
			
		||||
            name: Test tests/test2.yaml
 | 
			
		||||
            pio_cache_key: test2
 | 
			
		||||
          - id: test
 | 
			
		||||
            file: tests/test3.yaml
 | 
			
		||||
            name: Test tests/test3.yaml
 | 
			
		||||
            pio_cache_key: test3
 | 
			
		||||
          - id: test
 | 
			
		||||
            file: tests/test4.yaml
 | 
			
		||||
            name: Test tests/test4.yaml
 | 
			
		||||
            pio_cache_key: test4
 | 
			
		||||
          - id: test
 | 
			
		||||
            file: tests/test5.yaml
 | 
			
		||||
            name: Test tests/test5.yaml
 | 
			
		||||
            pio_cache_key: test5
 | 
			
		||||
          - id: pytest
 | 
			
		||||
            name: Run pytest
 | 
			
		||||
          - id: clang-format
 | 
			
		||||
            name: Run script/clang-format
 | 
			
		||||
          - id: clang-tidy
 | 
			
		||||
            name: Run script/clang-tidy for ESP8266
 | 
			
		||||
            options: --environment esp8266-arduino-tidy --grep USE_ESP8266
 | 
			
		||||
            options: --environment esp8266-tidy --grep USE_ESP8266
 | 
			
		||||
            pio_cache_key: tidyesp8266
 | 
			
		||||
          - id: clang-tidy
 | 
			
		||||
            name: Run script/clang-tidy for ESP32 Arduino 1/4
 | 
			
		||||
            options: --environment esp32-arduino-tidy --split-num 4 --split-at 1
 | 
			
		||||
            name: Run script/clang-tidy for ESP32 1/4
 | 
			
		||||
            options: --environment esp32-tidy --split-num 4 --split-at 1
 | 
			
		||||
            pio_cache_key: tidyesp32
 | 
			
		||||
          - id: clang-tidy
 | 
			
		||||
            name: Run script/clang-tidy for ESP32 Arduino 2/4
 | 
			
		||||
            options: --environment esp32-arduino-tidy --split-num 4 --split-at 2
 | 
			
		||||
            name: Run script/clang-tidy for ESP32 2/4
 | 
			
		||||
            options: --environment esp32-tidy --split-num 4 --split-at 2
 | 
			
		||||
            pio_cache_key: tidyesp32
 | 
			
		||||
          - id: clang-tidy
 | 
			
		||||
            name: Run script/clang-tidy for ESP32 Arduino 3/4
 | 
			
		||||
            options: --environment esp32-arduino-tidy --split-num 4 --split-at 3
 | 
			
		||||
            name: Run script/clang-tidy for ESP32 3/4
 | 
			
		||||
            options: --environment esp32-tidy --split-num 4 --split-at 3
 | 
			
		||||
            pio_cache_key: tidyesp32
 | 
			
		||||
          - id: clang-tidy
 | 
			
		||||
            name: Run script/clang-tidy for ESP32 Arduino 4/4
 | 
			
		||||
            options: --environment esp32-arduino-tidy --split-num 4 --split-at 4
 | 
			
		||||
            name: Run script/clang-tidy for ESP32 4/4
 | 
			
		||||
            options: --environment esp32-tidy --split-num 4 --split-at 4
 | 
			
		||||
            pio_cache_key: tidyesp32
 | 
			
		||||
          - id: clang-tidy
 | 
			
		||||
            name: Run script/clang-tidy for ESP32 IDF
 | 
			
		||||
            name: Run script/clang-tidy for ESP32 esp-idf
 | 
			
		||||
            options: --environment esp32-idf-tidy --grep USE_ESP_IDF
 | 
			
		||||
            pio_cache_key: tidyesp32-idf
 | 
			
		||||
 | 
			
		||||
    steps:
 | 
			
		||||
      - name: Check out code from GitHub
 | 
			
		||||
        uses: actions/checkout@v4.1.7
 | 
			
		||||
      - name: Restore Python
 | 
			
		||||
        uses: ./.github/actions/restore-python
 | 
			
		||||
      - uses: actions/checkout@v2
 | 
			
		||||
      - name: Set up Python
 | 
			
		||||
        uses: actions/setup-python@v2
 | 
			
		||||
        id: python
 | 
			
		||||
        with:
 | 
			
		||||
          python-version: ${{ env.DEFAULT_PYTHON }}
 | 
			
		||||
          cache-key: ${{ needs.common.outputs.cache-key }}
 | 
			
		||||
          python-version: '3.7'
 | 
			
		||||
 | 
			
		||||
      - name: Cache virtualenv
 | 
			
		||||
        uses: actions/cache@v2
 | 
			
		||||
        with:
 | 
			
		||||
          path: .venv
 | 
			
		||||
          key: venv-${{ steps.python.outputs.python-version }}-${{ hashFiles('requirements*.txt') }}
 | 
			
		||||
          restore-keys: |
 | 
			
		||||
            venv-${{ steps.python.outputs.python-version }}-
 | 
			
		||||
 | 
			
		||||
      - name: Set up virtualenv
 | 
			
		||||
        run: |
 | 
			
		||||
          python -m venv .venv
 | 
			
		||||
          source .venv/bin/activate
 | 
			
		||||
          pip install -U pip
 | 
			
		||||
          pip install -r requirements.txt -r requirements_optional.txt -r requirements_test.txt
 | 
			
		||||
          pip install -e .
 | 
			
		||||
          echo "$GITHUB_WORKSPACE/.venv/bin" >> $GITHUB_PATH
 | 
			
		||||
          echo "VIRTUAL_ENV=$GITHUB_WORKSPACE/.venv" >> $GITHUB_ENV
 | 
			
		||||
 | 
			
		||||
      # Use per check platformio cache because checks use different parts
 | 
			
		||||
      - name: Cache platformio
 | 
			
		||||
        if: github.ref == 'refs/heads/dev'
 | 
			
		||||
        uses: actions/cache@v4.2.2
 | 
			
		||||
        uses: actions/cache@v2
 | 
			
		||||
        with:
 | 
			
		||||
          path: ~/.platformio
 | 
			
		||||
          key: platformio-${{ matrix.pio_cache_key }}
 | 
			
		||||
          key: platformio-${{ matrix.pio_cache_key }}-${{ hashFiles('platformio.ini') }}
 | 
			
		||||
        if: matrix.id == 'test' || matrix.id == 'clang-tidy'
 | 
			
		||||
 | 
			
		||||
      - name: Cache platformio
 | 
			
		||||
        if: github.ref != 'refs/heads/dev'
 | 
			
		||||
        uses: actions/cache/restore@v4.2.2
 | 
			
		||||
        with:
 | 
			
		||||
          path: ~/.platformio
 | 
			
		||||
          key: platformio-${{ matrix.pio_cache_key }}
 | 
			
		||||
      - name: Install clang tools
 | 
			
		||||
        run: |
 | 
			
		||||
          sudo apt-get install \
 | 
			
		||||
              clang-format-11 \
 | 
			
		||||
              clang-tidy-11
 | 
			
		||||
        if: matrix.id == 'clang-tidy' || matrix.id == 'clang-format'
 | 
			
		||||
 | 
			
		||||
      - name: Register problem matchers
 | 
			
		||||
        run: |
 | 
			
		||||
          echo "::add-matcher::.github/workflows/matchers/ci-custom.json"
 | 
			
		||||
          echo "::add-matcher::.github/workflows/matchers/lint-python.json"
 | 
			
		||||
          echo "::add-matcher::.github/workflows/matchers/python.json"
 | 
			
		||||
          echo "::add-matcher::.github/workflows/matchers/pytest.json"
 | 
			
		||||
          echo "::add-matcher::.github/workflows/matchers/gcc.json"
 | 
			
		||||
          echo "::add-matcher::.github/workflows/matchers/clang-tidy.json"
 | 
			
		||||
 | 
			
		||||
      - name: Run 'pio run --list-targets -e esp32-idf-tidy'
 | 
			
		||||
        if: matrix.name == 'Run script/clang-tidy for ESP32 IDF'
 | 
			
		||||
      - name: Lint Custom
 | 
			
		||||
        run: |
 | 
			
		||||
          . venv/bin/activate
 | 
			
		||||
          mkdir -p .temp
 | 
			
		||||
          pio run --list-targets -e esp32-idf-tidy
 | 
			
		||||
          script/ci-custom.py
 | 
			
		||||
          script/build_codeowners.py --check
 | 
			
		||||
        if: matrix.id == 'ci-custom'
 | 
			
		||||
 | 
			
		||||
      - name: Lint Python
 | 
			
		||||
        run: script/lint-python
 | 
			
		||||
        if: matrix.id == 'lint-python'
 | 
			
		||||
 | 
			
		||||
      - run: esphome compile ${{ matrix.file }}
 | 
			
		||||
        if: matrix.id == 'test'
 | 
			
		||||
        env:
 | 
			
		||||
          # Also cache libdeps, store them in a ~/.platformio subfolder
 | 
			
		||||
          PLATFORMIO_LIBDEPS_DIR: ~/.platformio/libdeps
 | 
			
		||||
 | 
			
		||||
      - name: Run pytest
 | 
			
		||||
        run: |
 | 
			
		||||
          pytest -vv --tb=native tests
 | 
			
		||||
        if: matrix.id == 'pytest'
 | 
			
		||||
 | 
			
		||||
      # Also run git-diff-index so that the step is marked as failed on formatting errors,
 | 
			
		||||
      # since clang-format doesn't do anything but change files if -i is passed.
 | 
			
		||||
      - name: Run clang-format
 | 
			
		||||
        run: |
 | 
			
		||||
          script/clang-format -i
 | 
			
		||||
          git diff-index --quiet HEAD --
 | 
			
		||||
        if: matrix.id == 'clang-format'
 | 
			
		||||
 | 
			
		||||
      - name: Run clang-tidy
 | 
			
		||||
        run: |
 | 
			
		||||
          . venv/bin/activate
 | 
			
		||||
          script/clang-tidy --all-headers --fix ${{ matrix.options }}
 | 
			
		||||
        if: matrix.id == 'clang-tidy'
 | 
			
		||||
        env:
 | 
			
		||||
          # Also cache libdeps, store them in a ~/.platformio subfolder
 | 
			
		||||
          PLATFORMIO_LIBDEPS_DIR: ~/.platformio/libdeps
 | 
			
		||||
 | 
			
		||||
      - name: Suggested changes
 | 
			
		||||
        run: script/ci-suggest-changes
 | 
			
		||||
        # yamllint disable-line rule:line-length
 | 
			
		||||
        if: always()
 | 
			
		||||
 | 
			
		||||
  list-components:
 | 
			
		||||
    runs-on: ubuntu-24.04
 | 
			
		||||
    needs:
 | 
			
		||||
      - common
 | 
			
		||||
    if: github.event_name == 'pull_request'
 | 
			
		||||
    outputs:
 | 
			
		||||
      components: ${{ steps.list-components.outputs.components }}
 | 
			
		||||
      count: ${{ steps.list-components.outputs.count }}
 | 
			
		||||
    steps:
 | 
			
		||||
      - name: Check out code from GitHub
 | 
			
		||||
        uses: actions/checkout@v4.1.7
 | 
			
		||||
        with:
 | 
			
		||||
          # Fetch enough history so `git merge-base refs/remotes/origin/dev HEAD` works.
 | 
			
		||||
          fetch-depth: 500
 | 
			
		||||
      - name: Get target branch
 | 
			
		||||
        id: target-branch
 | 
			
		||||
        run: |
 | 
			
		||||
          echo "branch=${{ github.event.pull_request.base.ref }}" >> $GITHUB_OUTPUT
 | 
			
		||||
      - name: Fetch ${{ steps.target-branch.outputs.branch }} branch
 | 
			
		||||
        run: |
 | 
			
		||||
          git -c protocol.version=2 fetch --no-tags --prune --no-recurse-submodules --depth=1 origin +refs/heads/${{ steps.target-branch.outputs.branch }}:refs/remotes/origin/${{ steps.target-branch.outputs.branch }}
 | 
			
		||||
          git merge-base refs/remotes/origin/${{ steps.target-branch.outputs.branch }} HEAD
 | 
			
		||||
      - name: Restore Python
 | 
			
		||||
        uses: ./.github/actions/restore-python
 | 
			
		||||
        with:
 | 
			
		||||
          python-version: ${{ env.DEFAULT_PYTHON }}
 | 
			
		||||
          cache-key: ${{ needs.common.outputs.cache-key }}
 | 
			
		||||
      - name: Find changed components
 | 
			
		||||
        id: list-components
 | 
			
		||||
        run: |
 | 
			
		||||
          . venv/bin/activate
 | 
			
		||||
          components=$(script/list-components.py --changed --branch ${{ steps.target-branch.outputs.branch }})
 | 
			
		||||
          output_components=$(echo "$components" | jq -R -s -c 'split("\n")[:-1] | map(select(length > 0))')
 | 
			
		||||
          count=$(echo "$output_components" | jq length)
 | 
			
		||||
 | 
			
		||||
          echo "components=$output_components" >> $GITHUB_OUTPUT
 | 
			
		||||
          echo "count=$count" >> $GITHUB_OUTPUT
 | 
			
		||||
 | 
			
		||||
          echo "$count Components:"
 | 
			
		||||
          echo "$output_components" | jq
 | 
			
		||||
 | 
			
		||||
  test-build-components:
 | 
			
		||||
    name: Component test ${{ matrix.file }}
 | 
			
		||||
    runs-on: ubuntu-24.04
 | 
			
		||||
    needs:
 | 
			
		||||
      - common
 | 
			
		||||
      - list-components
 | 
			
		||||
    if: github.event_name == 'pull_request' && fromJSON(needs.list-components.outputs.count) > 0 && fromJSON(needs.list-components.outputs.count) < 100
 | 
			
		||||
    strategy:
 | 
			
		||||
      fail-fast: false
 | 
			
		||||
      max-parallel: 2
 | 
			
		||||
      matrix:
 | 
			
		||||
        file: ${{ fromJson(needs.list-components.outputs.components) }}
 | 
			
		||||
    steps:
 | 
			
		||||
      - name: Install dependencies
 | 
			
		||||
        run: |
 | 
			
		||||
          sudo apt-get update
 | 
			
		||||
          sudo apt-get install libsdl2-dev
 | 
			
		||||
 | 
			
		||||
      - name: Check out code from GitHub
 | 
			
		||||
        uses: actions/checkout@v4.1.7
 | 
			
		||||
      - name: Restore Python
 | 
			
		||||
        uses: ./.github/actions/restore-python
 | 
			
		||||
        with:
 | 
			
		||||
          python-version: ${{ env.DEFAULT_PYTHON }}
 | 
			
		||||
          cache-key: ${{ needs.common.outputs.cache-key }}
 | 
			
		||||
      - name: test_build_components -e config -c ${{ matrix.file }}
 | 
			
		||||
        run: |
 | 
			
		||||
          . venv/bin/activate
 | 
			
		||||
          ./script/test_build_components -e config -c ${{ matrix.file }}
 | 
			
		||||
      - name: test_build_components -e compile -c ${{ matrix.file }}
 | 
			
		||||
        run: |
 | 
			
		||||
          . venv/bin/activate
 | 
			
		||||
          ./script/test_build_components -e compile -c ${{ matrix.file }}
 | 
			
		||||
 | 
			
		||||
  test-build-components-splitter:
 | 
			
		||||
    name: Split components for testing into 20 groups maximum
 | 
			
		||||
    runs-on: ubuntu-24.04
 | 
			
		||||
    needs:
 | 
			
		||||
      - common
 | 
			
		||||
      - list-components
 | 
			
		||||
    if: github.event_name == 'pull_request' && fromJSON(needs.list-components.outputs.count) >= 100
 | 
			
		||||
    outputs:
 | 
			
		||||
      matrix: ${{ steps.split.outputs.components }}
 | 
			
		||||
    steps:
 | 
			
		||||
      - name: Check out code from GitHub
 | 
			
		||||
        uses: actions/checkout@v4.1.7
 | 
			
		||||
      - name: Split components into 20 groups
 | 
			
		||||
        id: split
 | 
			
		||||
        run: |
 | 
			
		||||
          components=$(echo '${{ needs.list-components.outputs.components }}' | jq -c '.[]' | shuf | jq -s -c '[_nwise(20) | join(" ")]')
 | 
			
		||||
          echo "components=$components" >> $GITHUB_OUTPUT
 | 
			
		||||
 | 
			
		||||
  test-build-components-split:
 | 
			
		||||
    name: Test split components
 | 
			
		||||
    runs-on: ubuntu-24.04
 | 
			
		||||
    needs:
 | 
			
		||||
      - common
 | 
			
		||||
      - list-components
 | 
			
		||||
      - test-build-components-splitter
 | 
			
		||||
    if: github.event_name == 'pull_request' && fromJSON(needs.list-components.outputs.count) >= 100
 | 
			
		||||
    strategy:
 | 
			
		||||
      fail-fast: false
 | 
			
		||||
      max-parallel: 4
 | 
			
		||||
      matrix:
 | 
			
		||||
        components: ${{ fromJson(needs.test-build-components-splitter.outputs.matrix) }}
 | 
			
		||||
    steps:
 | 
			
		||||
      - name: List components
 | 
			
		||||
        run: echo ${{ matrix.components }}
 | 
			
		||||
 | 
			
		||||
      - name: Install dependencies
 | 
			
		||||
        run: |
 | 
			
		||||
          sudo apt-get update
 | 
			
		||||
          sudo apt-get install libsdl2-dev
 | 
			
		||||
 | 
			
		||||
      - name: Check out code from GitHub
 | 
			
		||||
        uses: actions/checkout@v4.1.7
 | 
			
		||||
      - name: Restore Python
 | 
			
		||||
        uses: ./.github/actions/restore-python
 | 
			
		||||
        with:
 | 
			
		||||
          python-version: ${{ env.DEFAULT_PYTHON }}
 | 
			
		||||
          cache-key: ${{ needs.common.outputs.cache-key }}
 | 
			
		||||
      - name: Validate config
 | 
			
		||||
        run: |
 | 
			
		||||
          . venv/bin/activate
 | 
			
		||||
          for component in ${{ matrix.components }}; do
 | 
			
		||||
            ./script/test_build_components -e config -c $component
 | 
			
		||||
          done
 | 
			
		||||
      - name: Compile config
 | 
			
		||||
        run: |
 | 
			
		||||
          . venv/bin/activate
 | 
			
		||||
          mkdir build_cache
 | 
			
		||||
          export PLATFORMIO_BUILD_CACHE_DIR=$PWD/build_cache
 | 
			
		||||
          for component in ${{ matrix.components }}; do
 | 
			
		||||
            ./script/test_build_components -e compile -c $component
 | 
			
		||||
          done
 | 
			
		||||
 | 
			
		||||
  ci-status:
 | 
			
		||||
    name: CI Status
 | 
			
		||||
    runs-on: ubuntu-24.04
 | 
			
		||||
    needs:
 | 
			
		||||
      - common
 | 
			
		||||
      - ruff
 | 
			
		||||
      - ci-custom
 | 
			
		||||
      - clang-format
 | 
			
		||||
      - flake8
 | 
			
		||||
      - pylint
 | 
			
		||||
      - pytest
 | 
			
		||||
      - pyupgrade
 | 
			
		||||
      - clang-tidy
 | 
			
		||||
      - list-components
 | 
			
		||||
      - test-build-components
 | 
			
		||||
      - test-build-components-splitter
 | 
			
		||||
      - test-build-components-split
 | 
			
		||||
    if: always()
 | 
			
		||||
    steps:
 | 
			
		||||
      - name: Success
 | 
			
		||||
        if: ${{ !(contains(needs.*.result, 'failure')) }}
 | 
			
		||||
        run: exit 0
 | 
			
		||||
      - name: Failure
 | 
			
		||||
        if: ${{ contains(needs.*.result, 'failure') }}
 | 
			
		||||
        env:
 | 
			
		||||
          JSON_DOC: ${{ toJSON(needs) }}
 | 
			
		||||
        run: |
 | 
			
		||||
          echo $JSON_DOC | jq
 | 
			
		||||
          exit 1
 | 
			
		||||
        if: always() && (matrix.id == 'clang-tidy' || matrix.id == 'clang-format')
 | 
			
		||||
 
 | 
			
		||||
							
								
								
									
										91
									
								
								.github/workflows/codeql.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										91
									
								
								.github/workflows/codeql.yml
									
									
									
									
										vendored
									
									
								
							@@ -1,91 +0,0 @@
 | 
			
		||||
# For most projects, this workflow file will not need changing; you simply need
 | 
			
		||||
# to commit it to your repository.
 | 
			
		||||
#
 | 
			
		||||
# You may wish to alter this file to override the set of languages analyzed,
 | 
			
		||||
# or to provide custom queries or build logic.
 | 
			
		||||
#
 | 
			
		||||
# ******** NOTE ********
 | 
			
		||||
# We have attempted to detect the languages in your repository. Please check
 | 
			
		||||
# the `language` matrix defined below to confirm you have the correct set of
 | 
			
		||||
# supported CodeQL languages.
 | 
			
		||||
#
 | 
			
		||||
name: "CodeQL Advanced"
 | 
			
		||||
 | 
			
		||||
on:
 | 
			
		||||
  workflow_dispatch:
 | 
			
		||||
  schedule:
 | 
			
		||||
    - cron: "30 18 * * 4"
 | 
			
		||||
 | 
			
		||||
jobs:
 | 
			
		||||
  analyze:
 | 
			
		||||
    name: Analyze (${{ matrix.language }})
 | 
			
		||||
    # Runner size impacts CodeQL analysis time. To learn more, please see:
 | 
			
		||||
    #   - https://gh.io/recommended-hardware-resources-for-running-codeql
 | 
			
		||||
    #   - https://gh.io/supported-runners-and-hardware-resources
 | 
			
		||||
    #   - https://gh.io/using-larger-runners (GitHub.com only)
 | 
			
		||||
    # Consider using larger runners or machines with greater resources for possible analysis time improvements.
 | 
			
		||||
    runs-on: ${{ (matrix.language == 'swift' && 'macos-latest') || 'ubuntu-latest' }}
 | 
			
		||||
    permissions:
 | 
			
		||||
      # required for all workflows
 | 
			
		||||
      security-events: write
 | 
			
		||||
 | 
			
		||||
      # required to fetch internal or private CodeQL packs
 | 
			
		||||
      packages: read
 | 
			
		||||
 | 
			
		||||
      # only required for workflows in private repositories
 | 
			
		||||
      actions: read
 | 
			
		||||
      contents: read
 | 
			
		||||
 | 
			
		||||
    strategy:
 | 
			
		||||
      fail-fast: false
 | 
			
		||||
      matrix:
 | 
			
		||||
        include:
 | 
			
		||||
          # - language: c-cpp
 | 
			
		||||
          #   build-mode: autobuild
 | 
			
		||||
          - language: python
 | 
			
		||||
            build-mode: none
 | 
			
		||||
            # CodeQL supports the following values keywords for 'language': 'c-cpp', 'csharp', 'go', 'java-kotlin', 'javascript-typescript', 'python', 'ruby', 'swift'
 | 
			
		||||
            # Use `c-cpp` to analyze code written in C, C++ or both
 | 
			
		||||
            # Use 'java-kotlin' to analyze code written in Java, Kotlin or both
 | 
			
		||||
            # Use 'javascript-typescript' to analyze code written in JavaScript, TypeScript or both
 | 
			
		||||
            # To learn more about changing the languages that are analyzed or customizing the build mode for your analysis,
 | 
			
		||||
            # see https://docs.github.com/en/code-security/code-scanning/creating-an-advanced-setup-for-code-scanning/customizing-your-advanced-setup-for-code-scanning.
 | 
			
		||||
            # If you are analyzing a compiled language, you can modify the 'build-mode' for that language to customize how
 | 
			
		||||
            # your codebase is analyzed, see https://docs.github.com/en/code-security/code-scanning/creating-an-advanced-setup-for-code-scanning/codeql-code-scanning-for-compiled-languages
 | 
			
		||||
    steps:
 | 
			
		||||
      - name: Checkout repository
 | 
			
		||||
        uses: actions/checkout@v4
 | 
			
		||||
 | 
			
		||||
      # Initializes the CodeQL tools for scanning.
 | 
			
		||||
      - name: Initialize CodeQL
 | 
			
		||||
        uses: github/codeql-action/init@v3
 | 
			
		||||
        with:
 | 
			
		||||
          languages: ${{ matrix.language }}
 | 
			
		||||
          build-mode: ${{ matrix.build-mode }}
 | 
			
		||||
          # If you wish to specify custom queries, you can do so here or in a config file.
 | 
			
		||||
          # By default, queries listed here will override any specified in a config file.
 | 
			
		||||
          # Prefix the list here with "+" to use these queries and those in the config file.
 | 
			
		||||
 | 
			
		||||
          # For more details on CodeQL's query packs, refer to: https://docs.github.com/en/code-security/code-scanning/automatically-scanning-your-code-for-vulnerabilities-and-errors/configuring-code-scanning#using-queries-in-ql-packs
 | 
			
		||||
          # queries: security-extended,security-and-quality
 | 
			
		||||
 | 
			
		||||
      # If the analyze step fails for one of the languages you are analyzing with
 | 
			
		||||
      # "We were unable to automatically build your code", modify the matrix above
 | 
			
		||||
      # to set the build mode to "manual" for that language. Then modify this step
 | 
			
		||||
      # to build your code.
 | 
			
		||||
      # ℹ️ Command-line programs to run using the OS shell.
 | 
			
		||||
      # 📚 See https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstepsrun
 | 
			
		||||
      - if: matrix.build-mode == 'manual'
 | 
			
		||||
        shell: bash
 | 
			
		||||
        run: |
 | 
			
		||||
          echo 'If you are using a "manual" build mode for one or more of the' \
 | 
			
		||||
            'languages you are analyzing, replace this with the commands to build' \
 | 
			
		||||
            'your code, for example:'
 | 
			
		||||
          echo '  make bootstrap'
 | 
			
		||||
          echo '  make release'
 | 
			
		||||
          exit 1
 | 
			
		||||
 | 
			
		||||
      - name: Perform CodeQL Analysis
 | 
			
		||||
        uses: github/codeql-action/analyze@v3
 | 
			
		||||
        with:
 | 
			
		||||
          category: "/language:${{matrix.language}}"
 | 
			
		||||
							
								
								
									
										5
									
								
								.github/workflows/lock.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										5
									
								
								.github/workflows/lock.yml
									
									
									
									
										vendored
									
									
								
							@@ -1,9 +1,8 @@
 | 
			
		||||
---
 | 
			
		||||
name: Lock
 | 
			
		||||
 | 
			
		||||
on:
 | 
			
		||||
  schedule:
 | 
			
		||||
    - cron: "30 0 * * *"
 | 
			
		||||
    - cron: '30 0 * * *'
 | 
			
		||||
  workflow_dispatch:
 | 
			
		||||
 | 
			
		||||
permissions:
 | 
			
		||||
@@ -17,7 +16,7 @@ jobs:
 | 
			
		||||
  lock:
 | 
			
		||||
    runs-on: ubuntu-latest
 | 
			
		||||
    steps:
 | 
			
		||||
      - uses: dessant/lock-threads@v5.0.1
 | 
			
		||||
      - uses: dessant/lock-threads@v3
 | 
			
		||||
        with:
 | 
			
		||||
          pr-inactive-days: "1"
 | 
			
		||||
          pr-lock-reason: ""
 | 
			
		||||
 
 | 
			
		||||
							
								
								
									
										4
									
								
								.github/workflows/matchers/lint-python.json
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										4
									
								
								.github/workflows/matchers/lint-python.json
									
									
									
									
										vendored
									
									
								
							@@ -1,11 +1,11 @@
 | 
			
		||||
{
 | 
			
		||||
  "problemMatcher": [
 | 
			
		||||
    {
 | 
			
		||||
      "owner": "ruff",
 | 
			
		||||
      "owner": "black",
 | 
			
		||||
      "severity": "error",
 | 
			
		||||
      "pattern": [
 | 
			
		||||
        {
 | 
			
		||||
          "regexp": "^(.*): (Please format this file with the ruff formatter)",
 | 
			
		||||
          "regexp": "^(.*): (Please format this file with the black formatter)",
 | 
			
		||||
          "file": 1,
 | 
			
		||||
          "message": 2
 | 
			
		||||
        }
 | 
			
		||||
 
 | 
			
		||||
							
								
								
									
										24
									
								
								.github/workflows/needs-docs.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										24
									
								
								.github/workflows/needs-docs.yml
									
									
									
									
										vendored
									
									
								
							@@ -1,24 +0,0 @@
 | 
			
		||||
name: Needs Docs
 | 
			
		||||
 | 
			
		||||
on:
 | 
			
		||||
  pull_request:
 | 
			
		||||
    types: [labeled, unlabeled]
 | 
			
		||||
 | 
			
		||||
jobs:
 | 
			
		||||
  check:
 | 
			
		||||
    name: Check
 | 
			
		||||
    runs-on: ubuntu-latest
 | 
			
		||||
    steps:
 | 
			
		||||
      - name: Check for needs-docs label
 | 
			
		||||
        uses: actions/github-script@v7.0.1
 | 
			
		||||
        with:
 | 
			
		||||
          script: |
 | 
			
		||||
            const { data: labels } = await github.rest.issues.listLabelsOnIssue({
 | 
			
		||||
              owner: context.repo.owner,
 | 
			
		||||
              repo: context.repo.repo,
 | 
			
		||||
              issue_number: context.issue.number
 | 
			
		||||
            });
 | 
			
		||||
            const needsDocs = labels.find(label => label.name === 'needs-docs');
 | 
			
		||||
            if (needsDocs) {
 | 
			
		||||
              core.setFailed('Pull request needs docs');
 | 
			
		||||
            }
 | 
			
		||||
							
								
								
									
										259
									
								
								.github/workflows/release.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										259
									
								
								.github/workflows/release.yml
									
									
									
									
										vendored
									
									
								
							@@ -1,4 +1,3 @@
 | 
			
		||||
---
 | 
			
		||||
name: Publish Release
 | 
			
		||||
 | 
			
		||||
on:
 | 
			
		||||
@@ -17,58 +16,44 @@ jobs:
 | 
			
		||||
    runs-on: ubuntu-latest
 | 
			
		||||
    outputs:
 | 
			
		||||
      tag: ${{ steps.tag.outputs.tag }}
 | 
			
		||||
      branch_build: ${{ steps.tag.outputs.branch_build }}
 | 
			
		||||
    steps:
 | 
			
		||||
      - uses: actions/checkout@v4.1.7
 | 
			
		||||
      - uses: actions/checkout@v2
 | 
			
		||||
      - name: Get tag
 | 
			
		||||
        id: tag
 | 
			
		||||
        # yamllint disable rule:line-length
 | 
			
		||||
        run: |
 | 
			
		||||
          if [[ "${{ github.event_name }}" = "release" ]]; then
 | 
			
		||||
            TAG="${{ github.event.release.tag_name}}"
 | 
			
		||||
            BRANCH_BUILD="false"
 | 
			
		||||
          if [[ "$GITHUB_EVENT_NAME" = "release" ]]; then
 | 
			
		||||
            TAG="${GITHUB_REF#refs/tags/}"
 | 
			
		||||
          else
 | 
			
		||||
            TAG=$(cat esphome/const.py | sed -n -E "s/^__version__\s+=\s+\"(.+)\"$/\1/p")
 | 
			
		||||
            today="$(date --utc '+%Y%m%d')"
 | 
			
		||||
            TAG="${TAG}${today}"
 | 
			
		||||
            BRANCH=${GITHUB_REF#refs/heads/}
 | 
			
		||||
            if [[ "$BRANCH" != "dev" ]]; then
 | 
			
		||||
              TAG="${TAG}-${BRANCH}"
 | 
			
		||||
              BRANCH_BUILD="true"
 | 
			
		||||
            else
 | 
			
		||||
              BRANCH_BUILD="false"
 | 
			
		||||
            fi
 | 
			
		||||
          fi
 | 
			
		||||
          echo "tag=${TAG}" >> $GITHUB_OUTPUT
 | 
			
		||||
          echo "branch_build=${BRANCH_BUILD}" >> $GITHUB_OUTPUT
 | 
			
		||||
        # yamllint enable rule:line-length
 | 
			
		||||
          echo "::set-output name=tag::${TAG}"
 | 
			
		||||
 | 
			
		||||
  deploy-pypi:
 | 
			
		||||
    name: Build and publish to PyPi
 | 
			
		||||
    if: github.repository == 'esphome/esphome' && github.event_name == 'release'
 | 
			
		||||
    runs-on: ubuntu-latest
 | 
			
		||||
    permissions:
 | 
			
		||||
      contents: read
 | 
			
		||||
      id-token: write
 | 
			
		||||
    steps:
 | 
			
		||||
      - uses: actions/checkout@v4.1.7
 | 
			
		||||
      - uses: actions/checkout@v2
 | 
			
		||||
      - name: Set up Python
 | 
			
		||||
        uses: actions/setup-python@v5.4.0
 | 
			
		||||
        uses: actions/setup-python@v1
 | 
			
		||||
        with:
 | 
			
		||||
          python-version: "3.x"
 | 
			
		||||
          python-version: '3.x'
 | 
			
		||||
      - name: Set up python environment
 | 
			
		||||
        env:
 | 
			
		||||
          ESPHOME_NO_VENV: 1
 | 
			
		||||
        run: script/setup
 | 
			
		||||
        run: |
 | 
			
		||||
          script/setup
 | 
			
		||||
          pip install setuptools wheel twine
 | 
			
		||||
      - name: Build
 | 
			
		||||
        run: |-
 | 
			
		||||
          pip3 install build
 | 
			
		||||
          python3 -m build
 | 
			
		||||
      - name: Publish
 | 
			
		||||
        uses: pypa/gh-action-pypi-publish@v1.12.4
 | 
			
		||||
        run: python setup.py sdist bdist_wheel
 | 
			
		||||
      - name: Upload
 | 
			
		||||
        env:
 | 
			
		||||
          TWINE_USERNAME: __token__
 | 
			
		||||
          TWINE_PASSWORD: ${{ secrets.PYPI_TOKEN }}
 | 
			
		||||
        run: twine upload dist/*
 | 
			
		||||
 | 
			
		||||
  deploy-docker:
 | 
			
		||||
    name: Build ESPHome ${{ matrix.platform }}
 | 
			
		||||
    name: Build and publish docker containers
 | 
			
		||||
    if: github.repository == 'esphome/esphome'
 | 
			
		||||
    permissions:
 | 
			
		||||
      contents: read
 | 
			
		||||
@@ -76,170 +61,94 @@ jobs:
 | 
			
		||||
    runs-on: ubuntu-latest
 | 
			
		||||
    needs: [init]
 | 
			
		||||
    strategy:
 | 
			
		||||
      fail-fast: false
 | 
			
		||||
      matrix:
 | 
			
		||||
        platform:
 | 
			
		||||
          - linux/amd64
 | 
			
		||||
          - linux/arm64
 | 
			
		||||
        arch: [amd64, armv7, aarch64]
 | 
			
		||||
        build_type: ["ha-addon", "docker", "lint"]
 | 
			
		||||
    steps:
 | 
			
		||||
      - uses: actions/checkout@v4.1.7
 | 
			
		||||
      - name: Set up Python
 | 
			
		||||
        uses: actions/setup-python@v5.4.0
 | 
			
		||||
        with:
 | 
			
		||||
          python-version: "3.9"
 | 
			
		||||
    - uses: actions/checkout@v2
 | 
			
		||||
    - name: Set up Python
 | 
			
		||||
      uses: actions/setup-python@v2
 | 
			
		||||
      with:
 | 
			
		||||
        python-version: '3.9'
 | 
			
		||||
 | 
			
		||||
      - name: Set up Docker Buildx
 | 
			
		||||
        uses: docker/setup-buildx-action@v3.10.0
 | 
			
		||||
      - name: Set up QEMU
 | 
			
		||||
        if: matrix.platform != 'linux/amd64'
 | 
			
		||||
        uses: docker/setup-qemu-action@v3.6.0
 | 
			
		||||
    - name: Set up Docker Buildx
 | 
			
		||||
      uses: docker/setup-buildx-action@v1
 | 
			
		||||
    - name: Set up QEMU
 | 
			
		||||
      uses: docker/setup-qemu-action@v1
 | 
			
		||||
 | 
			
		||||
      - name: Log in to docker hub
 | 
			
		||||
        uses: docker/login-action@v3.3.0
 | 
			
		||||
        with:
 | 
			
		||||
          username: ${{ secrets.DOCKER_USER }}
 | 
			
		||||
          password: ${{ secrets.DOCKER_PASSWORD }}
 | 
			
		||||
      - name: Log in to the GitHub container registry
 | 
			
		||||
        uses: docker/login-action@v3.3.0
 | 
			
		||||
        with:
 | 
			
		||||
    - name: Log in to docker hub
 | 
			
		||||
      uses: docker/login-action@v1
 | 
			
		||||
      with:
 | 
			
		||||
        username: ${{ secrets.DOCKER_USER }}
 | 
			
		||||
        password: ${{ secrets.DOCKER_PASSWORD }}
 | 
			
		||||
    - name: Log in to the GitHub container registry
 | 
			
		||||
      uses: docker/login-action@v1
 | 
			
		||||
      with:
 | 
			
		||||
          registry: ghcr.io
 | 
			
		||||
          username: ${{ github.actor }}
 | 
			
		||||
          password: ${{ secrets.GITHUB_TOKEN }}
 | 
			
		||||
 | 
			
		||||
      - name: Build docker
 | 
			
		||||
        uses: ./.github/actions/build-image
 | 
			
		||||
        with:
 | 
			
		||||
          platform: ${{ matrix.platform }}
 | 
			
		||||
          target: docker
 | 
			
		||||
          baseimg: docker
 | 
			
		||||
          suffix: ""
 | 
			
		||||
          version: ${{ needs.init.outputs.tag }}
 | 
			
		||||
    - name: Build and push
 | 
			
		||||
      run: |
 | 
			
		||||
        docker/build.py \
 | 
			
		||||
          --tag "${{ needs.init.outputs.tag }}" \
 | 
			
		||||
          --arch "${{ matrix.arch }}" \
 | 
			
		||||
          --build-type "${{ matrix.build_type }}" \
 | 
			
		||||
          build \
 | 
			
		||||
          --push
 | 
			
		||||
 | 
			
		||||
      - name: Build ha-addon
 | 
			
		||||
        uses: ./.github/actions/build-image
 | 
			
		||||
        with:
 | 
			
		||||
          platform: ${{ matrix.platform }}
 | 
			
		||||
          target: hassio
 | 
			
		||||
          baseimg: hassio
 | 
			
		||||
          suffix: "hassio"
 | 
			
		||||
          version: ${{ needs.init.outputs.tag }}
 | 
			
		||||
 | 
			
		||||
      - name: Build lint
 | 
			
		||||
        uses: ./.github/actions/build-image
 | 
			
		||||
        with:
 | 
			
		||||
          platform: ${{ matrix.platform }}
 | 
			
		||||
          target: lint
 | 
			
		||||
          baseimg: docker
 | 
			
		||||
          suffix: lint
 | 
			
		||||
          version: ${{ needs.init.outputs.tag }}
 | 
			
		||||
 | 
			
		||||
      - name: Sanitize platform name
 | 
			
		||||
        id: sanitize
 | 
			
		||||
        run: |
 | 
			
		||||
          echo "${{ matrix.platform }}" | sed 's|/|-|g' > /tmp/platform
 | 
			
		||||
          echo name=$(cat /tmp/platform) >> $GITHUB_OUTPUT
 | 
			
		||||
 | 
			
		||||
      - name: Upload digests
 | 
			
		||||
        uses: actions/upload-artifact@v4.6.1
 | 
			
		||||
        with:
 | 
			
		||||
          name: digests-${{ steps.sanitize.outputs.name }}
 | 
			
		||||
          path: /tmp/digests
 | 
			
		||||
          retention-days: 1
 | 
			
		||||
 | 
			
		||||
  deploy-manifest:
 | 
			
		||||
    name: Publish ESPHome ${{ matrix.image.title }} to ${{ matrix.registry }}
 | 
			
		||||
    runs-on: ubuntu-latest
 | 
			
		||||
    needs:
 | 
			
		||||
      - init
 | 
			
		||||
      - deploy-docker
 | 
			
		||||
  deploy-docker-manifest:
 | 
			
		||||
    if: github.repository == 'esphome/esphome'
 | 
			
		||||
    permissions:
 | 
			
		||||
      contents: read
 | 
			
		||||
      packages: write
 | 
			
		||||
    runs-on: ubuntu-latest
 | 
			
		||||
    needs: [init, deploy-docker]
 | 
			
		||||
    strategy:
 | 
			
		||||
      fail-fast: false
 | 
			
		||||
      matrix:
 | 
			
		||||
        image:
 | 
			
		||||
          - title: "ha-addon"
 | 
			
		||||
            target: "hassio"
 | 
			
		||||
            suffix: "hassio"
 | 
			
		||||
          - title: "docker"
 | 
			
		||||
            target: "docker"
 | 
			
		||||
            suffix: ""
 | 
			
		||||
          - title: "lint"
 | 
			
		||||
            target: "lint"
 | 
			
		||||
            suffix: "lint"
 | 
			
		||||
        registry:
 | 
			
		||||
          - ghcr
 | 
			
		||||
          - dockerhub
 | 
			
		||||
        build_type: ["ha-addon", "docker", "lint"]
 | 
			
		||||
    steps:
 | 
			
		||||
      - uses: actions/checkout@v4.1.7
 | 
			
		||||
    - uses: actions/checkout@v2
 | 
			
		||||
    - name: Set up Python
 | 
			
		||||
      uses: actions/setup-python@v2
 | 
			
		||||
      with:
 | 
			
		||||
        python-version: '3.9'
 | 
			
		||||
    - name: Enable experimental manifest support
 | 
			
		||||
      run: |
 | 
			
		||||
        mkdir -p ~/.docker
 | 
			
		||||
        echo "{\"experimental\": \"enabled\"}" > ~/.docker/config.json
 | 
			
		||||
 | 
			
		||||
      - name: Download digests
 | 
			
		||||
        uses: actions/download-artifact@v4.1.9
 | 
			
		||||
        with:
 | 
			
		||||
          pattern: digests-*
 | 
			
		||||
          path: /tmp/digests
 | 
			
		||||
          merge-multiple: true
 | 
			
		||||
 | 
			
		||||
      - name: Set up Docker Buildx
 | 
			
		||||
        uses: docker/setup-buildx-action@v3.10.0
 | 
			
		||||
 | 
			
		||||
      - name: Log in to docker hub
 | 
			
		||||
        if: matrix.registry == 'dockerhub'
 | 
			
		||||
        uses: docker/login-action@v3.3.0
 | 
			
		||||
        with:
 | 
			
		||||
          username: ${{ secrets.DOCKER_USER }}
 | 
			
		||||
          password: ${{ secrets.DOCKER_PASSWORD }}
 | 
			
		||||
      - name: Log in to the GitHub container registry
 | 
			
		||||
        if: matrix.registry == 'ghcr'
 | 
			
		||||
        uses: docker/login-action@v3.3.0
 | 
			
		||||
        with:
 | 
			
		||||
    - name: Log in to docker hub
 | 
			
		||||
      uses: docker/login-action@v1
 | 
			
		||||
      with:
 | 
			
		||||
        username: ${{ secrets.DOCKER_USER }}
 | 
			
		||||
        password: ${{ secrets.DOCKER_PASSWORD }}
 | 
			
		||||
    - name: Log in to the GitHub container registry
 | 
			
		||||
      uses: docker/login-action@v1
 | 
			
		||||
      with:
 | 
			
		||||
          registry: ghcr.io
 | 
			
		||||
          username: ${{ github.actor }}
 | 
			
		||||
          password: ${{ secrets.GITHUB_TOKEN }}
 | 
			
		||||
 | 
			
		||||
      - name: Generate short tags
 | 
			
		||||
        id: tags
 | 
			
		||||
        run: |
 | 
			
		||||
          output=$(docker/generate_tags.py \
 | 
			
		||||
            --tag "${{ needs.init.outputs.tag }}" \
 | 
			
		||||
            --suffix "${{ matrix.image.suffix }}" \
 | 
			
		||||
            --registry "${{ matrix.registry }}")
 | 
			
		||||
          echo $output
 | 
			
		||||
          for l in $output; do
 | 
			
		||||
            echo $l >> $GITHUB_OUTPUT
 | 
			
		||||
          done
 | 
			
		||||
    - name: Run manifest
 | 
			
		||||
      run: |
 | 
			
		||||
        docker/build.py \
 | 
			
		||||
          --tag "${{ needs.init.outputs.tag }}" \
 | 
			
		||||
          --build-type "${{ matrix.build_type }}" \
 | 
			
		||||
          manifest
 | 
			
		||||
 | 
			
		||||
      - name: Create manifest list and push
 | 
			
		||||
        working-directory: /tmp/digests/${{ matrix.image.target }}/${{ matrix.registry }}
 | 
			
		||||
        run: |
 | 
			
		||||
          docker buildx imagetools create $(jq -Rcnr 'inputs | . / "," | map("-t " + .) | join(" ")' <<< "${{ steps.tags.outputs.tags}}") \
 | 
			
		||||
            $(printf '${{ steps.tags.outputs.image }}@sha256:%s ' *)
 | 
			
		||||
 | 
			
		||||
  deploy-ha-addon-repo:
 | 
			
		||||
    if: github.repository == 'esphome/esphome' && needs.init.outputs.branch_build == 'false'
 | 
			
		||||
  deploy-hassio-repo:
 | 
			
		||||
    if: github.repository == 'esphome/esphome' && github.event_name == 'release'
 | 
			
		||||
    runs-on: ubuntu-latest
 | 
			
		||||
    needs:
 | 
			
		||||
      - init
 | 
			
		||||
      - deploy-manifest
 | 
			
		||||
    needs: [deploy-docker]
 | 
			
		||||
    steps:
 | 
			
		||||
      - name: Trigger Workflow
 | 
			
		||||
        uses: actions/github-script@v7.0.1
 | 
			
		||||
        with:
 | 
			
		||||
          github-token: ${{ secrets.DEPLOY_HA_ADDON_REPO_TOKEN }}
 | 
			
		||||
          script: |
 | 
			
		||||
            let description = "ESPHome";
 | 
			
		||||
            if (context.eventName == "release") {
 | 
			
		||||
              description = ${{ toJSON(github.event.release.body) }};
 | 
			
		||||
            }
 | 
			
		||||
            github.rest.actions.createWorkflowDispatch({
 | 
			
		||||
              owner: "esphome",
 | 
			
		||||
              repo: "home-assistant-addon",
 | 
			
		||||
              workflow_id: "bump-version.yml",
 | 
			
		||||
              ref: "main",
 | 
			
		||||
              inputs: {
 | 
			
		||||
                version: "${{ needs.init.outputs.tag }}",
 | 
			
		||||
                content: description
 | 
			
		||||
              }
 | 
			
		||||
            })
 | 
			
		||||
      - env:
 | 
			
		||||
          TOKEN: ${{ secrets.DEPLOY_HASSIO_TOKEN }}
 | 
			
		||||
        run: |
 | 
			
		||||
          TAG="${GITHUB_REF#refs/tags/}"
 | 
			
		||||
          curl \
 | 
			
		||||
            -u ":$TOKEN" \
 | 
			
		||||
            -X POST \
 | 
			
		||||
            -H "Accept: application/vnd.github.v3+json" \
 | 
			
		||||
            https://api.github.com/repos/esphome/hassio/actions/workflows/bump-version.yml/dispatches \
 | 
			
		||||
            -d "{\"ref\":\"main\",\"inputs\":{\"version\":\"$TAG\"}}"
 | 
			
		||||
 
 | 
			
		||||
							
								
								
									
										12
									
								
								.github/workflows/stale.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										12
									
								
								.github/workflows/stale.yml
									
									
									
									
										vendored
									
									
								
							@@ -1,9 +1,8 @@
 | 
			
		||||
---
 | 
			
		||||
name: Stale
 | 
			
		||||
 | 
			
		||||
on:
 | 
			
		||||
  schedule:
 | 
			
		||||
    - cron: "30 0 * * *"
 | 
			
		||||
    - cron: '30 0 * * *'
 | 
			
		||||
  workflow_dispatch:
 | 
			
		||||
 | 
			
		||||
permissions:
 | 
			
		||||
@@ -17,7 +16,7 @@ jobs:
 | 
			
		||||
  stale:
 | 
			
		||||
    runs-on: ubuntu-latest
 | 
			
		||||
    steps:
 | 
			
		||||
      - uses: actions/stale@v9.1.0
 | 
			
		||||
      - uses: actions/stale@v4
 | 
			
		||||
        with:
 | 
			
		||||
          days-before-pr-stale: 90
 | 
			
		||||
          days-before-pr-close: 7
 | 
			
		||||
@@ -25,19 +24,18 @@ jobs:
 | 
			
		||||
          days-before-issue-close: -1
 | 
			
		||||
          remove-stale-when-updated: true
 | 
			
		||||
          stale-pr-label: "stale"
 | 
			
		||||
          exempt-pr-labels: "not-stale"
 | 
			
		||||
          exempt-pr-labels: "no-stale"
 | 
			
		||||
          stale-pr-message: >
 | 
			
		||||
            There hasn't been any activity on this pull request recently. This
 | 
			
		||||
            pull request has been automatically marked as stale because of that
 | 
			
		||||
            and will be closed if no further activity occurs within 7 days.
 | 
			
		||||
            Thank you for your contributions.
 | 
			
		||||
 | 
			
		||||
  # Use stale to automatically close issues with a
 | 
			
		||||
  # reference to the issue tracker
 | 
			
		||||
  # Use stale to automatically close issues with a reference to the issue tracker
 | 
			
		||||
  close-issues:
 | 
			
		||||
    runs-on: ubuntu-latest
 | 
			
		||||
    steps:
 | 
			
		||||
      - uses: actions/stale@v9.1.0
 | 
			
		||||
      - uses: actions/stale@v4
 | 
			
		||||
        with:
 | 
			
		||||
          days-before-pr-stale: -1
 | 
			
		||||
          days-before-pr-close: -1
 | 
			
		||||
 
 | 
			
		||||
							
								
								
									
										48
									
								
								.github/workflows/sync-device-classes.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										48
									
								
								.github/workflows/sync-device-classes.yml
									
									
									
									
										vendored
									
									
								
							@@ -1,48 +0,0 @@
 | 
			
		||||
---
 | 
			
		||||
name: Synchronise Device Classes from Home Assistant
 | 
			
		||||
 | 
			
		||||
on:
 | 
			
		||||
  workflow_dispatch:
 | 
			
		||||
  schedule:
 | 
			
		||||
    - cron: "45 6 * * *"
 | 
			
		||||
 | 
			
		||||
jobs:
 | 
			
		||||
  sync:
 | 
			
		||||
    name: Sync Device Classes
 | 
			
		||||
    runs-on: ubuntu-latest
 | 
			
		||||
    if: github.repository == 'esphome/esphome'
 | 
			
		||||
    steps:
 | 
			
		||||
      - name: Checkout
 | 
			
		||||
        uses: actions/checkout@v4.1.7
 | 
			
		||||
 | 
			
		||||
      - name: Checkout Home Assistant
 | 
			
		||||
        uses: actions/checkout@v4.1.7
 | 
			
		||||
        with:
 | 
			
		||||
          repository: home-assistant/core
 | 
			
		||||
          path: lib/home-assistant
 | 
			
		||||
 | 
			
		||||
      - name: Setup Python
 | 
			
		||||
        uses: actions/setup-python@v5.4.0
 | 
			
		||||
        with:
 | 
			
		||||
          python-version: 3.12
 | 
			
		||||
 | 
			
		||||
      - name: Install Home Assistant
 | 
			
		||||
        run: |
 | 
			
		||||
          python -m pip install --upgrade pip
 | 
			
		||||
          pip install -e lib/home-assistant
 | 
			
		||||
 | 
			
		||||
      - name: Sync
 | 
			
		||||
        run: |
 | 
			
		||||
          python ./script/sync-device_class.py
 | 
			
		||||
 | 
			
		||||
      - name: Commit changes
 | 
			
		||||
        uses: peter-evans/create-pull-request@v7.0.7
 | 
			
		||||
        with:
 | 
			
		||||
          commit-message: "Synchronise Device Classes from Home Assistant"
 | 
			
		||||
          committer: esphomebot <esphome@nabucasa.com>
 | 
			
		||||
          author: esphomebot <esphome@nabucasa.com>
 | 
			
		||||
          branch: sync/device-classes
 | 
			
		||||
          delete-branch: true
 | 
			
		||||
          title: "Synchronise Device Classes from Home Assistant"
 | 
			
		||||
          body-path: .github/PULL_REQUEST_TEMPLATE.md
 | 
			
		||||
          token: ${{ secrets.DEVICE_CLASS_SYNC_TOKEN }}
 | 
			
		||||
							
								
								
									
										25
									
								
								.github/workflows/yaml-lint.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										25
									
								
								.github/workflows/yaml-lint.yml
									
									
									
									
										vendored
									
									
								
							@@ -1,25 +0,0 @@
 | 
			
		||||
---
 | 
			
		||||
name: YAML lint
 | 
			
		||||
 | 
			
		||||
on:
 | 
			
		||||
  push:
 | 
			
		||||
    branches: [dev, beta, release]
 | 
			
		||||
    paths:
 | 
			
		||||
      - "**.yaml"
 | 
			
		||||
      - "**.yml"
 | 
			
		||||
  pull_request:
 | 
			
		||||
    paths:
 | 
			
		||||
      - "**.yaml"
 | 
			
		||||
      - "**.yml"
 | 
			
		||||
 | 
			
		||||
jobs:
 | 
			
		||||
  yamllint:
 | 
			
		||||
    name: yamllint
 | 
			
		||||
    runs-on: ubuntu-latest
 | 
			
		||||
    steps:
 | 
			
		||||
      - name: Check out code from GitHub
 | 
			
		||||
        uses: actions/checkout@v4.1.7
 | 
			
		||||
      - name: Run yamllint
 | 
			
		||||
        uses: frenck/action-yamllint@v1.5.0
 | 
			
		||||
        with:
 | 
			
		||||
          strict: true
 | 
			
		||||
							
								
								
									
										16
									
								
								.gitignore
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										16
									
								
								.gitignore
									
									
									
									
										vendored
									
									
								
							@@ -13,12 +13,6 @@ __pycache__/
 | 
			
		||||
# Intellij Idea
 | 
			
		||||
.idea
 | 
			
		||||
 | 
			
		||||
# Eclipse
 | 
			
		||||
.project
 | 
			
		||||
.cproject
 | 
			
		||||
.pydevproject
 | 
			
		||||
.settings/
 | 
			
		||||
 | 
			
		||||
# Vim
 | 
			
		||||
*.swp
 | 
			
		||||
 | 
			
		||||
@@ -75,9 +69,6 @@ cov.xml
 | 
			
		||||
# pyenv
 | 
			
		||||
.python-version
 | 
			
		||||
 | 
			
		||||
# asdf
 | 
			
		||||
.tool-versions
 | 
			
		||||
 | 
			
		||||
# Environments
 | 
			
		||||
.env
 | 
			
		||||
.venv
 | 
			
		||||
@@ -86,7 +77,6 @@ venv/
 | 
			
		||||
ENV/
 | 
			
		||||
env.bak/
 | 
			
		||||
venv.bak/
 | 
			
		||||
venv-*/
 | 
			
		||||
 | 
			
		||||
# mypy
 | 
			
		||||
.mypy_cache/
 | 
			
		||||
@@ -137,9 +127,3 @@ tests/.esphome/
 | 
			
		||||
 | 
			
		||||
sdkconfig.*
 | 
			
		||||
!sdkconfig.defaults
 | 
			
		||||
 | 
			
		||||
.tests/
 | 
			
		||||
 | 
			
		||||
/components
 | 
			
		||||
/managed_components
 | 
			
		||||
 | 
			
		||||
 
 | 
			
		||||
							
								
								
									
										6
									
								
								.gitpod.yml
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										6
									
								
								.gitpod.yml
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1,6 @@
 | 
			
		||||
ports:
 | 
			
		||||
- port: 6052
 | 
			
		||||
  onOpen: open-preview
 | 
			
		||||
tasks:
 | 
			
		||||
- before: pyenv local $(pyenv version | grep '^3\.' | cut -d ' ' -f 1) && script/setup
 | 
			
		||||
  command: python -m esphome config dashboard
 | 
			
		||||
@@ -1,18 +1,16 @@
 | 
			
		||||
---
 | 
			
		||||
# See https://pre-commit.com for more information
 | 
			
		||||
# See https://pre-commit.com/hooks.html for more hooks
 | 
			
		||||
repos:
 | 
			
		||||
  - repo: https://github.com/astral-sh/ruff-pre-commit
 | 
			
		||||
    # Ruff version.
 | 
			
		||||
    rev: v0.9.2
 | 
			
		||||
  - repo: https://github.com/ambv/black
 | 
			
		||||
    rev: 20.8b1
 | 
			
		||||
    hooks:
 | 
			
		||||
      # Run the linter.
 | 
			
		||||
      - id: ruff
 | 
			
		||||
        args: [--fix]
 | 
			
		||||
      # Run the formatter.
 | 
			
		||||
      - id: ruff-format
 | 
			
		||||
  - repo: https://github.com/PyCQA/flake8
 | 
			
		||||
    rev: 6.1.0
 | 
			
		||||
    - id: black
 | 
			
		||||
      args:
 | 
			
		||||
        - --safe
 | 
			
		||||
        - --quiet
 | 
			
		||||
      files: ^((esphome|script|tests)/.+)?[^/]+\.py$
 | 
			
		||||
  - repo: https://gitlab.com/pycqa/flake8
 | 
			
		||||
    rev: 3.8.4
 | 
			
		||||
    hooks:
 | 
			
		||||
      - id: flake8
 | 
			
		||||
        additional_dependencies:
 | 
			
		||||
@@ -27,24 +25,3 @@ repos:
 | 
			
		||||
          - --branch=dev
 | 
			
		||||
          - --branch=release
 | 
			
		||||
          - --branch=beta
 | 
			
		||||
  - repo: https://github.com/asottile/pyupgrade
 | 
			
		||||
    rev: v3.15.2
 | 
			
		||||
    hooks:
 | 
			
		||||
      - id: pyupgrade
 | 
			
		||||
        args: [--py39-plus]
 | 
			
		||||
  - repo: https://github.com/adrienverge/yamllint.git
 | 
			
		||||
    rev: v1.35.1
 | 
			
		||||
    hooks:
 | 
			
		||||
      - id: yamllint
 | 
			
		||||
  - repo: https://github.com/pre-commit/mirrors-clang-format
 | 
			
		||||
    rev: v13.0.1
 | 
			
		||||
    hooks:
 | 
			
		||||
      - id: clang-format
 | 
			
		||||
        types_or: [c, c++]
 | 
			
		||||
  - repo: local
 | 
			
		||||
    hooks:
 | 
			
		||||
      - id: pylint
 | 
			
		||||
        name: pylint
 | 
			
		||||
        entry: python3 script/run-in-env.py pylint
 | 
			
		||||
        language: system
 | 
			
		||||
        types: [python]
 | 
			
		||||
 
 | 
			
		||||
							
								
								
									
										33
									
								
								.vscode/tasks.json
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										33
									
								
								.vscode/tasks.json
									
									
									
									
										vendored
									
									
								
							@@ -2,24 +2,15 @@
 | 
			
		||||
  "version": "2.0.0",
 | 
			
		||||
  "tasks": [
 | 
			
		||||
    {
 | 
			
		||||
      "label": "Run Dashboard",
 | 
			
		||||
      "label": "run",
 | 
			
		||||
      "type": "shell",
 | 
			
		||||
      "command": "${command:python.interpreterPath}",
 | 
			
		||||
      "args": [
 | 
			
		||||
        "-m",
 | 
			
		||||
        "esphome",
 | 
			
		||||
        "dashboard",
 | 
			
		||||
        "config/"
 | 
			
		||||
      ],
 | 
			
		||||
      "command": "python3 -m esphome dashboard config/",
 | 
			
		||||
      "problemMatcher": []
 | 
			
		||||
    },
 | 
			
		||||
    {
 | 
			
		||||
      "label": "clang-tidy",
 | 
			
		||||
      "type": "shell",
 | 
			
		||||
      "command": "${command:python.interpreterPath}",
 | 
			
		||||
      "args": [
 | 
			
		||||
        "./script/clang-tidy"
 | 
			
		||||
      ],
 | 
			
		||||
      "command": "./script/clang-tidy",
 | 
			
		||||
      "problemMatcher": [
 | 
			
		||||
        {
 | 
			
		||||
          "owner": "clang-tidy",
 | 
			
		||||
@@ -36,24 +27,6 @@
 | 
			
		||||
          ]
 | 
			
		||||
        }
 | 
			
		||||
      ]
 | 
			
		||||
    },
 | 
			
		||||
    {
 | 
			
		||||
      "label": "Generate proto files",
 | 
			
		||||
      "type": "shell",
 | 
			
		||||
      "command": "${command:python.interpreterPath}",
 | 
			
		||||
      "args": [
 | 
			
		||||
        "./script/api_protobuf/api_protobuf.py"
 | 
			
		||||
      ],
 | 
			
		||||
      "group": {
 | 
			
		||||
        "kind": "build",
 | 
			
		||||
        "isDefault": true
 | 
			
		||||
      },
 | 
			
		||||
      "presentation": {
 | 
			
		||||
        "reveal": "never",
 | 
			
		||||
        "close": true,
 | 
			
		||||
        "panel": "new"
 | 
			
		||||
      },
 | 
			
		||||
      "problemMatcher": []
 | 
			
		||||
    }
 | 
			
		||||
  ]
 | 
			
		||||
}
 | 
			
		||||
 
 | 
			
		||||
							
								
								
									
										19
									
								
								.yamllint
									
									
									
									
									
								
							
							
						
						
									
										19
									
								
								.yamllint
									
									
									
									
									
								
							@@ -1,19 +0,0 @@
 | 
			
		||||
---
 | 
			
		||||
extends: default
 | 
			
		||||
 | 
			
		||||
ignore-from-file: .gitignore
 | 
			
		||||
 | 
			
		||||
rules:
 | 
			
		||||
  document-start: disable
 | 
			
		||||
  empty-lines:
 | 
			
		||||
    level: error
 | 
			
		||||
    max: 1
 | 
			
		||||
    max-start: 0
 | 
			
		||||
    max-end: 1
 | 
			
		||||
  indentation:
 | 
			
		||||
    level: error
 | 
			
		||||
    spaces: 2
 | 
			
		||||
    indent-sequences: true
 | 
			
		||||
    check-multi-line-strings: false
 | 
			
		||||
  line-length: disable
 | 
			
		||||
  truthy: disable
 | 
			
		||||
							
								
								
									
										348
									
								
								CODEOWNERS
									
									
									
									
									
								
							
							
						
						
									
										348
									
								
								CODEOWNERS
									
									
									
									
									
								
							@@ -6,257 +6,85 @@
 | 
			
		||||
# the integration's code owner is automatically notified.
 | 
			
		||||
 | 
			
		||||
# Core Code
 | 
			
		||||
pyproject.toml @esphome/core
 | 
			
		||||
setup.py @esphome/core
 | 
			
		||||
esphome/*.py @esphome/core
 | 
			
		||||
esphome/core/* @esphome/core
 | 
			
		||||
 | 
			
		||||
# Integrations
 | 
			
		||||
esphome/components/a01nyub/* @MrSuicideParrot
 | 
			
		||||
esphome/components/a02yyuw/* @TH-Braemer
 | 
			
		||||
esphome/components/absolute_humidity/* @DAVe3283
 | 
			
		||||
esphome/components/ac_dimmer/* @glmnet
 | 
			
		||||
esphome/components/adc/* @esphome/core
 | 
			
		||||
esphome/components/adc128s102/* @DeerMaximum
 | 
			
		||||
esphome/components/addressable_light/* @justfalter
 | 
			
		||||
esphome/components/ade7880/* @kpfleming
 | 
			
		||||
esphome/components/ade7953/* @angelnu
 | 
			
		||||
esphome/components/ade7953_i2c/* @angelnu
 | 
			
		||||
esphome/components/ade7953_spi/* @angelnu
 | 
			
		||||
esphome/components/ads1118/* @solomondg1
 | 
			
		||||
esphome/components/ags10/* @mak-42
 | 
			
		||||
esphome/components/aic3204/* @kbx81
 | 
			
		||||
esphome/components/airthings_ble/* @jeromelaban
 | 
			
		||||
esphome/components/airthings_wave_base/* @jeromelaban @kpfleming @ncareau
 | 
			
		||||
esphome/components/airthings_wave_mini/* @ncareau
 | 
			
		||||
esphome/components/airthings_wave_plus/* @jeromelaban
 | 
			
		||||
esphome/components/alarm_control_panel/* @grahambrown11 @hwstar
 | 
			
		||||
esphome/components/alpha3/* @jan-hofmeier
 | 
			
		||||
esphome/components/am2315c/* @swoboda1337
 | 
			
		||||
esphome/components/am43/* @buxtronix
 | 
			
		||||
esphome/components/am43/cover/* @buxtronix
 | 
			
		||||
esphome/components/am43/sensor/* @buxtronix
 | 
			
		||||
esphome/components/analog_threshold/* @ianchi
 | 
			
		||||
esphome/components/animation/* @syndlex
 | 
			
		||||
esphome/components/anova/* @buxtronix
 | 
			
		||||
esphome/components/apds9306/* @aodrenah
 | 
			
		||||
esphome/components/api/* @OttoWinter
 | 
			
		||||
esphome/components/as5600/* @ammmze
 | 
			
		||||
esphome/components/as5600/sensor/* @ammmze
 | 
			
		||||
esphome/components/as7341/* @mrgnr
 | 
			
		||||
esphome/components/async_tcp/* @OttoWinter
 | 
			
		||||
esphome/components/at581x/* @X-Ryl669
 | 
			
		||||
esphome/components/atc_mithermometer/* @ahpohl
 | 
			
		||||
esphome/components/atm90e26/* @danieltwagner
 | 
			
		||||
esphome/components/atm90e32/* @circuitsetup @descipher
 | 
			
		||||
esphome/components/audio/* @kahrendt
 | 
			
		||||
esphome/components/audio_adc/* @kbx81
 | 
			
		||||
esphome/components/audio_dac/* @kbx81
 | 
			
		||||
esphome/components/axs15231/* @clydebarrow
 | 
			
		||||
esphome/components/b_parasite/* @rbaron
 | 
			
		||||
esphome/components/ballu/* @bazuchan
 | 
			
		||||
esphome/components/bang_bang/* @OttoWinter
 | 
			
		||||
esphome/components/bedjet/* @jhansche
 | 
			
		||||
esphome/components/bedjet/climate/* @jhansche
 | 
			
		||||
esphome/components/bedjet/fan/* @jhansche
 | 
			
		||||
esphome/components/bedjet/sensor/* @javawizard @jhansche
 | 
			
		||||
esphome/components/beken_spi_led_strip/* @Mat931
 | 
			
		||||
esphome/components/bh1750/* @OttoWinter
 | 
			
		||||
esphome/components/binary_sensor/* @esphome/core
 | 
			
		||||
esphome/components/bk72xx/* @kuba2k2
 | 
			
		||||
esphome/components/bl0906/* @athom-tech @jesserockz @tarontop
 | 
			
		||||
esphome/components/bl0939/* @ziceva
 | 
			
		||||
esphome/components/bl0940/* @tobias-
 | 
			
		||||
esphome/components/bl0942/* @dbuezas @dwmw2
 | 
			
		||||
esphome/components/ble_client/* @buxtronix @clydebarrow
 | 
			
		||||
esphome/components/bluetooth_proxy/* @jesserockz
 | 
			
		||||
esphome/components/bme280_base/* @esphome/core
 | 
			
		||||
esphome/components/bme280_spi/* @apbodrov
 | 
			
		||||
esphome/components/ble_client/* @buxtronix
 | 
			
		||||
esphome/components/bme680_bsec/* @trvrnrth
 | 
			
		||||
esphome/components/bme68x_bsec2/* @kbx81 @neffs
 | 
			
		||||
esphome/components/bme68x_bsec2_i2c/* @kbx81 @neffs
 | 
			
		||||
esphome/components/bmi160/* @flaviut
 | 
			
		||||
esphome/components/bmp280_base/* @ademuri
 | 
			
		||||
esphome/components/bmp280_i2c/* @ademuri
 | 
			
		||||
esphome/components/bmp280_spi/* @ademuri
 | 
			
		||||
esphome/components/bmp3xx/* @latonita
 | 
			
		||||
esphome/components/bmp3xx_base/* @latonita @martgras
 | 
			
		||||
esphome/components/bmp3xx_i2c/* @latonita
 | 
			
		||||
esphome/components/bmp3xx_spi/* @latonita
 | 
			
		||||
esphome/components/bmp581/* @kahrendt
 | 
			
		||||
esphome/components/bp1658cj/* @Cossid
 | 
			
		||||
esphome/components/bp5758d/* @Cossid
 | 
			
		||||
esphome/components/button/* @esphome/core
 | 
			
		||||
esphome/components/bytebuffer/* @clydebarrow
 | 
			
		||||
esphome/components/canbus/* @danielschramm @mvturnho
 | 
			
		||||
esphome/components/cap1188/* @mreditor97
 | 
			
		||||
esphome/components/cap1188/* @MrEditor97
 | 
			
		||||
esphome/components/captive_portal/* @OttoWinter
 | 
			
		||||
esphome/components/ccs811/* @habbie
 | 
			
		||||
esphome/components/cd74hc4067/* @asoehlke
 | 
			
		||||
esphome/components/ch422g/* @clydebarrow @jesterret
 | 
			
		||||
esphome/components/chsc6x/* @kkosik20
 | 
			
		||||
esphome/components/climate/* @esphome/core
 | 
			
		||||
esphome/components/climate_ir/* @glmnet
 | 
			
		||||
esphome/components/color_temperature/* @jesserockz
 | 
			
		||||
esphome/components/combination/* @Cat-Ion @kahrendt
 | 
			
		||||
esphome/components/coolix/* @glmnet
 | 
			
		||||
esphome/components/copy/* @OttoWinter
 | 
			
		||||
esphome/components/cover/* @esphome/core
 | 
			
		||||
esphome/components/cs5460a/* @balrog-kun
 | 
			
		||||
esphome/components/cse7761/* @berfenger
 | 
			
		||||
esphome/components/cst226/* @clydebarrow
 | 
			
		||||
esphome/components/cst816/* @clydebarrow
 | 
			
		||||
esphome/components/ct_clamp/* @jesserockz
 | 
			
		||||
esphome/components/current_based/* @djwmarcx
 | 
			
		||||
esphome/components/dac7678/* @NickB1
 | 
			
		||||
esphome/components/daikin_arc/* @MagicBear
 | 
			
		||||
esphome/components/daikin_brc/* @hagak
 | 
			
		||||
esphome/components/dallas_temp/* @ssieb
 | 
			
		||||
esphome/components/daly_bms/* @s1lvi0
 | 
			
		||||
esphome/components/dashboard_import/* @esphome/core
 | 
			
		||||
esphome/components/datetime/* @jesserockz @rfdarter
 | 
			
		||||
esphome/components/debug/* @OttoWinter
 | 
			
		||||
esphome/components/delonghi/* @grob6000
 | 
			
		||||
esphome/components/dfplayer/* @glmnet
 | 
			
		||||
esphome/components/dfrobot_sen0395/* @niklasweber
 | 
			
		||||
esphome/components/dht/* @OttoWinter
 | 
			
		||||
esphome/components/display_menu_base/* @numo68
 | 
			
		||||
esphome/components/dps310/* @kbx81
 | 
			
		||||
esphome/components/ds1307/* @badbadc0ffee
 | 
			
		||||
esphome/components/dsmr/* @glmnet @zuidwijk
 | 
			
		||||
esphome/components/duty_time/* @dudanov
 | 
			
		||||
esphome/components/ee895/* @Stock-M
 | 
			
		||||
esphome/components/ektf2232/touchscreen/* @jesserockz
 | 
			
		||||
esphome/components/emc2101/* @ellull
 | 
			
		||||
esphome/components/emmeti/* @E440QF
 | 
			
		||||
esphome/components/ens160/* @latonita
 | 
			
		||||
esphome/components/ens160_base/* @latonita @vincentscode
 | 
			
		||||
esphome/components/ens160_i2c/* @latonita
 | 
			
		||||
esphome/components/ens160_spi/* @latonita
 | 
			
		||||
esphome/components/ens210/* @itn3rd77
 | 
			
		||||
esphome/components/es7210/* @kahrendt
 | 
			
		||||
esphome/components/es7243e/* @kbx81
 | 
			
		||||
esphome/components/es8156/* @kbx81
 | 
			
		||||
esphome/components/es8311/* @kahrendt @kroimon
 | 
			
		||||
esphome/components/esp32/* @esphome/core
 | 
			
		||||
esphome/components/esp32_ble/* @Rapsssito @jesserockz
 | 
			
		||||
esphome/components/esp32_ble_client/* @jesserockz
 | 
			
		||||
esphome/components/esp32_ble_server/* @Rapsssito @clydebarrow @jesserockz
 | 
			
		||||
esphome/components/esp32_ble/* @jesserockz
 | 
			
		||||
esphome/components/esp32_ble_server/* @jesserockz
 | 
			
		||||
esphome/components/esp32_camera_web_server/* @ayufan
 | 
			
		||||
esphome/components/esp32_can/* @Sympatron
 | 
			
		||||
esphome/components/esp32_improv/* @jesserockz
 | 
			
		||||
esphome/components/esp32_rmt/* @jesserockz
 | 
			
		||||
esphome/components/esp32_rmt_led_strip/* @jesserockz
 | 
			
		||||
esphome/components/esp8266/* @esphome/core
 | 
			
		||||
esphome/components/ethernet_info/* @gtjadsonsantos
 | 
			
		||||
esphome/components/event/* @nohat
 | 
			
		||||
esphome/components/event_emitter/* @Rapsssito
 | 
			
		||||
esphome/components/exposure_notifications/* @OttoWinter
 | 
			
		||||
esphome/components/ezo/* @ssieb
 | 
			
		||||
esphome/components/ezo_pmp/* @carlos-sarmiento
 | 
			
		||||
esphome/components/factory_reset/* @anatoly-savchenkov
 | 
			
		||||
esphome/components/fastled_base/* @OttoWinter
 | 
			
		||||
esphome/components/feedback/* @ianchi
 | 
			
		||||
esphome/components/fingerprint_grow/* @OnFreund @alexborro @loongyh
 | 
			
		||||
esphome/components/font/* @clydebarrow @esphome/core
 | 
			
		||||
esphome/components/fs3000/* @kahrendt
 | 
			
		||||
esphome/components/ft5x06/* @clydebarrow
 | 
			
		||||
esphome/components/ft63x6/* @gpambrozio
 | 
			
		||||
esphome/components/gcja5/* @gcormier
 | 
			
		||||
esphome/components/gdk101/* @Szewcson
 | 
			
		||||
esphome/components/fingerprint_grow/* @OnFreund @loongyh
 | 
			
		||||
esphome/components/globals/* @esphome/core
 | 
			
		||||
esphome/components/gp2y1010au0f/* @zry98
 | 
			
		||||
esphome/components/gp8403/* @jesserockz
 | 
			
		||||
esphome/components/gpio/* @esphome/core
 | 
			
		||||
esphome/components/gpio/one_wire/* @ssieb
 | 
			
		||||
esphome/components/gps/* @coogle
 | 
			
		||||
esphome/components/graph/* @synco
 | 
			
		||||
esphome/components/graphical_display_menu/* @MrMDavidson
 | 
			
		||||
esphome/components/gree/* @orestismers
 | 
			
		||||
esphome/components/grove_gas_mc_v2/* @YorkshireIoT
 | 
			
		||||
esphome/components/grove_tb6612fng/* @max246
 | 
			
		||||
esphome/components/growatt_solar/* @leeuwte
 | 
			
		||||
esphome/components/gt911/* @clydebarrow @jesserockz
 | 
			
		||||
esphome/components/haier/* @paveldn
 | 
			
		||||
esphome/components/haier/binary_sensor/* @paveldn
 | 
			
		||||
esphome/components/haier/button/* @paveldn
 | 
			
		||||
esphome/components/haier/sensor/* @paveldn
 | 
			
		||||
esphome/components/haier/switch/* @paveldn
 | 
			
		||||
esphome/components/haier/text_sensor/* @paveldn
 | 
			
		||||
esphome/components/havells_solar/* @sourabhjaiswal
 | 
			
		||||
esphome/components/hbridge/fan/* @WeekendWarrior
 | 
			
		||||
esphome/components/hbridge/light/* @DotNetDann
 | 
			
		||||
esphome/components/hbridge/switch/* @dwmw2
 | 
			
		||||
esphome/components/he60r/* @clydebarrow
 | 
			
		||||
esphome/components/heatpumpir/* @rob-deutsch
 | 
			
		||||
esphome/components/hitachi_ac424/* @sourabhjaiswal
 | 
			
		||||
esphome/components/hm3301/* @freekode
 | 
			
		||||
esphome/components/hmac_md5/* @dwmw2
 | 
			
		||||
esphome/components/homeassistant/* @OttoWinter @esphome/core
 | 
			
		||||
esphome/components/homeassistant/number/* @landonr
 | 
			
		||||
esphome/components/homeassistant/switch/* @Links2004
 | 
			
		||||
esphome/components/honeywell_hih_i2c/* @Benichou34
 | 
			
		||||
esphome/components/honeywellabp/* @RubyBailey
 | 
			
		||||
esphome/components/honeywellabp2_i2c/* @jpfaff
 | 
			
		||||
esphome/components/host/* @clydebarrow @esphome/core
 | 
			
		||||
esphome/components/host/time/* @clydebarrow
 | 
			
		||||
esphome/components/homeassistant/* @OttoWinter
 | 
			
		||||
esphome/components/hrxl_maxsonar_wr/* @netmikey
 | 
			
		||||
esphome/components/hte501/* @Stock-M
 | 
			
		||||
esphome/components/http_request/ota/* @oarcher
 | 
			
		||||
esphome/components/http_request/update/* @jesserockz
 | 
			
		||||
esphome/components/htu31d/* @betterengineering
 | 
			
		||||
esphome/components/hydreon_rgxx/* @functionpointer
 | 
			
		||||
esphome/components/hyt271/* @Philippe12
 | 
			
		||||
esphome/components/i2c/* @esphome/core
 | 
			
		||||
esphome/components/i2c_device/* @gabest11
 | 
			
		||||
esphome/components/i2s_audio/* @jesserockz
 | 
			
		||||
esphome/components/i2s_audio/media_player/* @jesserockz
 | 
			
		||||
esphome/components/i2s_audio/microphone/* @jesserockz
 | 
			
		||||
esphome/components/i2s_audio/speaker/* @jesserockz @kahrendt
 | 
			
		||||
esphome/components/iaqcore/* @yozik04
 | 
			
		||||
esphome/components/ili9xxx/* @clydebarrow @nielsnl68
 | 
			
		||||
esphome/components/improv_base/* @esphome/core
 | 
			
		||||
esphome/components/improv_serial/* @esphome/core
 | 
			
		||||
esphome/components/ina226/* @Sergio303 @latonita
 | 
			
		||||
esphome/components/ina260/* @mreditor97
 | 
			
		||||
esphome/components/ina2xx_base/* @latonita
 | 
			
		||||
esphome/components/ina2xx_i2c/* @latonita
 | 
			
		||||
esphome/components/ina2xx_spi/* @latonita
 | 
			
		||||
esphome/components/inkbird_ibsth1_mini/* @fkirill
 | 
			
		||||
esphome/components/inkplate6/* @jesserockz
 | 
			
		||||
esphome/components/integration/* @OttoWinter
 | 
			
		||||
esphome/components/internal_temperature/* @Mat931
 | 
			
		||||
esphome/components/interval/* @esphome/core
 | 
			
		||||
esphome/components/jsn_sr04t/* @Mafus1
 | 
			
		||||
esphome/components/json/* @OttoWinter
 | 
			
		||||
esphome/components/kamstrup_kmp/* @cfeenstra1024
 | 
			
		||||
esphome/components/key_collector/* @ssieb
 | 
			
		||||
esphome/components/key_provider/* @ssieb
 | 
			
		||||
esphome/components/kuntze/* @ssieb
 | 
			
		||||
esphome/components/lcd_menu/* @numo68
 | 
			
		||||
esphome/components/ld2410/* @regevbr @sebcaps
 | 
			
		||||
esphome/components/ld2420/* @descipher
 | 
			
		||||
esphome/components/ld2450/* @hareeshmu
 | 
			
		||||
esphome/components/ledc/* @OttoWinter
 | 
			
		||||
esphome/components/libretiny/* @kuba2k2
 | 
			
		||||
esphome/components/libretiny_pwm/* @kuba2k2
 | 
			
		||||
esphome/components/light/* @esphome/core
 | 
			
		||||
esphome/components/lightwaverf/* @max246
 | 
			
		||||
esphome/components/lilygo_t5_47/touchscreen/* @jesserockz
 | 
			
		||||
esphome/components/lock/* @esphome/core
 | 
			
		||||
esphome/components/logger/* @esphome/core
 | 
			
		||||
esphome/components/logger/select/* @clydebarrow
 | 
			
		||||
esphome/components/ltr390/* @latonita @sjtrny
 | 
			
		||||
esphome/components/ltr501/* @latonita
 | 
			
		||||
esphome/components/ltr_als_ps/* @latonita
 | 
			
		||||
esphome/components/lvgl/* @clydebarrow
 | 
			
		||||
esphome/components/m5stack_8angle/* @rnauber
 | 
			
		||||
esphome/components/matrix_keypad/* @ssieb
 | 
			
		||||
esphome/components/max17043/* @blacknell
 | 
			
		||||
esphome/components/max31865/* @DAVe3283
 | 
			
		||||
esphome/components/max44009/* @berfenger
 | 
			
		||||
esphome/components/max6956/* @looping40
 | 
			
		||||
esphome/components/ltr390/* @sjtrny
 | 
			
		||||
esphome/components/max7219digit/* @rspaargaren
 | 
			
		||||
esphome/components/max9611/* @mckaymatthew
 | 
			
		||||
esphome/components/mcp23008/* @jesserockz
 | 
			
		||||
esphome/components/mcp23017/* @jesserockz
 | 
			
		||||
esphome/components/mcp23s08/* @SenexCrenshaw @jesserockz
 | 
			
		||||
@@ -265,140 +93,61 @@ esphome/components/mcp23x08_base/* @jesserockz
 | 
			
		||||
esphome/components/mcp23x17_base/* @jesserockz
 | 
			
		||||
esphome/components/mcp23xxx_base/* @jesserockz
 | 
			
		||||
esphome/components/mcp2515/* @danielschramm @mvturnho
 | 
			
		||||
esphome/components/mcp3204/* @rsumner
 | 
			
		||||
esphome/components/mcp4728/* @berfenger
 | 
			
		||||
esphome/components/mcp47a1/* @jesserockz
 | 
			
		||||
esphome/components/mcp9600/* @mreditor97
 | 
			
		||||
esphome/components/mcp9808/* @k7hpn
 | 
			
		||||
esphome/components/md5/* @esphome/core
 | 
			
		||||
esphome/components/mdns/* @esphome/core
 | 
			
		||||
esphome/components/media_player/* @jesserockz
 | 
			
		||||
esphome/components/micro_wake_word/* @jesserockz @kahrendt
 | 
			
		||||
esphome/components/micronova/* @jorre05
 | 
			
		||||
esphome/components/microphone/* @jesserockz
 | 
			
		||||
esphome/components/mics_4514/* @jesserockz
 | 
			
		||||
esphome/components/midea/* @dudanov
 | 
			
		||||
esphome/components/midea_ir/* @dudanov
 | 
			
		||||
esphome/components/mitsubishi/* @RubyBailey
 | 
			
		||||
esphome/components/mixer/speaker/* @kahrendt
 | 
			
		||||
esphome/components/mlx90393/* @functionpointer
 | 
			
		||||
esphome/components/mlx90614/* @jesserockz
 | 
			
		||||
esphome/components/mmc5603/* @benhoff
 | 
			
		||||
esphome/components/mmc5983/* @agoode
 | 
			
		||||
esphome/components/modbus_controller/* @martgras
 | 
			
		||||
esphome/components/modbus_controller/binary_sensor/* @martgras
 | 
			
		||||
esphome/components/modbus_controller/number/* @martgras
 | 
			
		||||
esphome/components/modbus_controller/output/* @martgras
 | 
			
		||||
esphome/components/modbus_controller/select/* @martgras @stegm
 | 
			
		||||
esphome/components/modbus_controller/sensor/* @martgras
 | 
			
		||||
esphome/components/modbus_controller/switch/* @martgras
 | 
			
		||||
esphome/components/modbus_controller/text_sensor/* @martgras
 | 
			
		||||
esphome/components/mopeka_ble/* @Fabian-Schmidt @spbrogan
 | 
			
		||||
esphome/components/mopeka_pro_check/* @spbrogan
 | 
			
		||||
esphome/components/mopeka_std_check/* @Fabian-Schmidt
 | 
			
		||||
esphome/components/mpl3115a2/* @kbickar
 | 
			
		||||
esphome/components/mpu6886/* @fabaff
 | 
			
		||||
esphome/components/ms8607/* @e28eta
 | 
			
		||||
esphome/components/msa3xx/* @latonita
 | 
			
		||||
esphome/components/nau7802/* @cujomalainey
 | 
			
		||||
esphome/components/network/* @esphome/core
 | 
			
		||||
esphome/components/nextion/* @edwardtfn @senexcrenshaw
 | 
			
		||||
esphome/components/nextion/* @senexcrenshaw
 | 
			
		||||
esphome/components/nextion/binary_sensor/* @senexcrenshaw
 | 
			
		||||
esphome/components/nextion/sensor/* @senexcrenshaw
 | 
			
		||||
esphome/components/nextion/switch/* @senexcrenshaw
 | 
			
		||||
esphome/components/nextion/text_sensor/* @senexcrenshaw
 | 
			
		||||
esphome/components/nfc/* @jesserockz @kbx81
 | 
			
		||||
esphome/components/noblex/* @AGalfra
 | 
			
		||||
esphome/components/npi19/* @bakerkj
 | 
			
		||||
esphome/components/nfc/* @jesserockz
 | 
			
		||||
esphome/components/number/* @esphome/core
 | 
			
		||||
esphome/components/one_wire/* @ssieb
 | 
			
		||||
esphome/components/online_image/* @clydebarrow @guillempages
 | 
			
		||||
esphome/components/opentherm/* @olegtarasov
 | 
			
		||||
esphome/components/ota/* @esphome/core
 | 
			
		||||
esphome/components/output/* @esphome/core
 | 
			
		||||
esphome/components/pca6416a/* @Mat931
 | 
			
		||||
esphome/components/pca9554/* @clydebarrow @hwstar
 | 
			
		||||
esphome/components/pcf85063/* @brogon
 | 
			
		||||
esphome/components/pcf8563/* @KoenBreeman
 | 
			
		||||
esphome/components/pid/* @OttoWinter
 | 
			
		||||
esphome/components/pipsolar/* @andreashergert1984
 | 
			
		||||
esphome/components/pm1006/* @habbie
 | 
			
		||||
esphome/components/pmsa003i/* @sjtrny
 | 
			
		||||
esphome/components/pmwcs3/* @SeByDocKy
 | 
			
		||||
esphome/components/pn532/* @OttoWinter @jesserockz
 | 
			
		||||
esphome/components/pn532_i2c/* @OttoWinter @jesserockz
 | 
			
		||||
esphome/components/pn532_spi/* @OttoWinter @jesserockz
 | 
			
		||||
esphome/components/pn7150/* @jesserockz @kbx81
 | 
			
		||||
esphome/components/pn7150_i2c/* @jesserockz @kbx81
 | 
			
		||||
esphome/components/pn7160/* @jesserockz @kbx81
 | 
			
		||||
esphome/components/pn7160_i2c/* @jesserockz @kbx81
 | 
			
		||||
esphome/components/pn7160_spi/* @jesserockz @kbx81
 | 
			
		||||
esphome/components/power_supply/* @esphome/core
 | 
			
		||||
esphome/components/preferences/* @esphome/core
 | 
			
		||||
esphome/components/psram/* @esphome/core
 | 
			
		||||
esphome/components/pulse_meter/* @TrentHouliston @cstaahl @stevebaxter
 | 
			
		||||
esphome/components/pulse_meter/* @stevebaxter
 | 
			
		||||
esphome/components/pvvx_mithermometer/* @pasiz
 | 
			
		||||
esphome/components/pylontech/* @functionpointer
 | 
			
		||||
esphome/components/qmp6988/* @andrewpc
 | 
			
		||||
esphome/components/qr_code/* @wjtje
 | 
			
		||||
esphome/components/qspi_dbi/* @clydebarrow
 | 
			
		||||
esphome/components/qwiic_pir/* @kahrendt
 | 
			
		||||
esphome/components/radon_eye_ble/* @jeffeb3
 | 
			
		||||
esphome/components/radon_eye_rd200/* @jeffeb3
 | 
			
		||||
esphome/components/rc522/* @glmnet
 | 
			
		||||
esphome/components/rc522_i2c/* @glmnet
 | 
			
		||||
esphome/components/rc522_spi/* @glmnet
 | 
			
		||||
esphome/components/resampler/speaker/* @kahrendt
 | 
			
		||||
esphome/components/restart/* @esphome/core
 | 
			
		||||
esphome/components/rf_bridge/* @jesserockz
 | 
			
		||||
esphome/components/rgbct/* @jesserockz
 | 
			
		||||
esphome/components/rp2040/* @jesserockz
 | 
			
		||||
esphome/components/rp2040_pio_led_strip/* @Papa-DMan
 | 
			
		||||
esphome/components/rp2040_pwm/* @jesserockz
 | 
			
		||||
esphome/components/rpi_dpi_rgb/* @clydebarrow
 | 
			
		||||
esphome/components/rtl87xx/* @kuba2k2
 | 
			
		||||
esphome/components/rtttl/* @glmnet
 | 
			
		||||
esphome/components/safe_mode/* @jsuanet @kbx81 @paulmonigatti
 | 
			
		||||
esphome/components/scd4x/* @martgras @sjtrny
 | 
			
		||||
esphome/components/safe_mode/* @jsuanet @paulmonigatti
 | 
			
		||||
esphome/components/scd4x/* @sjtrny
 | 
			
		||||
esphome/components/script/* @esphome/core
 | 
			
		||||
esphome/components/sdl/* @bdm310 @clydebarrow
 | 
			
		||||
esphome/components/sdm_meter/* @jesserockz @polyfaces
 | 
			
		||||
esphome/components/sdp3x/* @Azimath
 | 
			
		||||
esphome/components/seeed_mr24hpc1/* @limengdu
 | 
			
		||||
esphome/components/seeed_mr60bha2/* @limengdu
 | 
			
		||||
esphome/components/seeed_mr60fda2/* @limengdu
 | 
			
		||||
esphome/components/selec_meter/* @sourabhjaiswal
 | 
			
		||||
esphome/components/select/* @esphome/core
 | 
			
		||||
esphome/components/sen0321/* @notjj
 | 
			
		||||
esphome/components/sen21231/* @shreyaskarnik
 | 
			
		||||
esphome/components/sen5x/* @martgras
 | 
			
		||||
esphome/components/sensirion_common/* @martgras
 | 
			
		||||
esphome/components/sensor/* @esphome/core
 | 
			
		||||
esphome/components/sfa30/* @ghsensdev
 | 
			
		||||
esphome/components/sgp40/* @SenexCrenshaw
 | 
			
		||||
esphome/components/sgp4x/* @SenexCrenshaw @martgras
 | 
			
		||||
esphome/components/shelly_dimmer/* @edge90 @rnauber
 | 
			
		||||
esphome/components/sht3xd/* @mrtoy-me
 | 
			
		||||
esphome/components/sht4x/* @sjtrny
 | 
			
		||||
esphome/components/shutdown/* @esphome/core @jsuanet
 | 
			
		||||
esphome/components/sigma_delta_output/* @Cat-Ion
 | 
			
		||||
esphome/components/sim800l/* @glmnet
 | 
			
		||||
esphome/components/sm10bit_base/* @Cossid
 | 
			
		||||
esphome/components/sm2135/* @BoukeHaarsma23 @dd32 @matika77
 | 
			
		||||
esphome/components/sm2235/* @Cossid
 | 
			
		||||
esphome/components/sm2335/* @Cossid
 | 
			
		||||
esphome/components/sml/* @alengwenus
 | 
			
		||||
esphome/components/smt100/* @piechade
 | 
			
		||||
esphome/components/sn74hc165/* @jesserockz
 | 
			
		||||
esphome/components/sm2135/* @BoukeHaarsma23
 | 
			
		||||
esphome/components/socket/* @esphome/core
 | 
			
		||||
esphome/components/sonoff_d1/* @anatoly-savchenkov
 | 
			
		||||
esphome/components/speaker/* @jesserockz @kahrendt
 | 
			
		||||
esphome/components/speaker/media_player/* @kahrendt @synesthesiam
 | 
			
		||||
esphome/components/spi/* @clydebarrow @esphome/core
 | 
			
		||||
esphome/components/spi_device/* @clydebarrow
 | 
			
		||||
esphome/components/spi_led_strip/* @clydebarrow
 | 
			
		||||
esphome/components/sprinkler/* @kbx81
 | 
			
		||||
esphome/components/sps30/* @martgras
 | 
			
		||||
esphome/components/spi/* @esphome/core
 | 
			
		||||
esphome/components/ssd1322_base/* @kbx81
 | 
			
		||||
esphome/components/ssd1322_spi/* @kbx81
 | 
			
		||||
esphome/components/ssd1325_base/* @kbx81
 | 
			
		||||
@@ -410,101 +159,36 @@ esphome/components/ssd1331_base/* @kbx81
 | 
			
		||||
esphome/components/ssd1331_spi/* @kbx81
 | 
			
		||||
esphome/components/ssd1351_base/* @kbx81
 | 
			
		||||
esphome/components/ssd1351_spi/* @kbx81
 | 
			
		||||
esphome/components/st7567_base/* @latonita
 | 
			
		||||
esphome/components/st7567_i2c/* @latonita
 | 
			
		||||
esphome/components/st7567_spi/* @latonita
 | 
			
		||||
esphome/components/st7701s/* @clydebarrow
 | 
			
		||||
esphome/components/st7735/* @SenexCrenshaw
 | 
			
		||||
esphome/components/st7789v/* @kbx81
 | 
			
		||||
esphome/components/st7920/* @marsjan155
 | 
			
		||||
esphome/components/statsd/* @Links2004
 | 
			
		||||
esphome/components/substitutions/* @esphome/core
 | 
			
		||||
esphome/components/sun/* @OttoWinter
 | 
			
		||||
esphome/components/sun_gtil2/* @Mat931
 | 
			
		||||
esphome/components/switch/* @esphome/core
 | 
			
		||||
esphome/components/switch/binary_sensor/* @ssieb
 | 
			
		||||
esphome/components/t6615/* @tylermenezes
 | 
			
		||||
esphome/components/tc74/* @sethgirvan
 | 
			
		||||
esphome/components/tca9548a/* @andreashergert1984
 | 
			
		||||
esphome/components/tca9555/* @mobrembski
 | 
			
		||||
esphome/components/tcl112/* @glmnet
 | 
			
		||||
esphome/components/tee501/* @Stock-M
 | 
			
		||||
esphome/components/teleinfo/* @0hax
 | 
			
		||||
esphome/components/tem3200/* @bakerkj
 | 
			
		||||
esphome/components/template/alarm_control_panel/* @grahambrown11 @hwstar
 | 
			
		||||
esphome/components/template/datetime/* @rfdarter
 | 
			
		||||
esphome/components/template/event/* @nohat
 | 
			
		||||
esphome/components/template/fan/* @ssieb
 | 
			
		||||
esphome/components/text/* @mauritskorse
 | 
			
		||||
esphome/components/thermostat/* @kbx81
 | 
			
		||||
esphome/components/time/* @OttoWinter
 | 
			
		||||
esphome/components/tlc5947/* @rnauber
 | 
			
		||||
esphome/components/tlc5971/* @IJIJI
 | 
			
		||||
esphome/components/tm1621/* @Philippe12
 | 
			
		||||
esphome/components/tm1637/* @glmnet
 | 
			
		||||
esphome/components/tm1638/* @skykingjwc
 | 
			
		||||
esphome/components/tm1651/* @freekode
 | 
			
		||||
esphome/components/tmp102/* @timsavage
 | 
			
		||||
esphome/components/tmp1075/* @sybrenstuvel
 | 
			
		||||
esphome/components/tmp117/* @Azimath
 | 
			
		||||
esphome/components/tof10120/* @wstrzalka
 | 
			
		||||
esphome/components/tormatic/* @ti-mo
 | 
			
		||||
esphome/components/toshiba/* @kbx81
 | 
			
		||||
esphome/components/touchscreen/* @jesserockz @nielsnl68
 | 
			
		||||
esphome/components/tsl2591/* @wjcarpenter
 | 
			
		||||
esphome/components/tt21100/* @kroimon
 | 
			
		||||
esphome/components/tuya/binary_sensor/* @jesserockz
 | 
			
		||||
esphome/components/tuya/climate/* @jesserockz
 | 
			
		||||
esphome/components/tuya/number/* @frankiboy1
 | 
			
		||||
esphome/components/tuya/select/* @bearpawmaxim
 | 
			
		||||
esphome/components/tuya/sensor/* @jesserockz
 | 
			
		||||
esphome/components/tuya/switch/* @jesserockz
 | 
			
		||||
esphome/components/tuya/text_sensor/* @dentra
 | 
			
		||||
esphome/components/uart/* @esphome/core
 | 
			
		||||
esphome/components/uart/button/* @ssieb
 | 
			
		||||
esphome/components/udp/* @clydebarrow
 | 
			
		||||
esphome/components/ufire_ec/* @pvizeli
 | 
			
		||||
esphome/components/ufire_ise/* @pvizeli
 | 
			
		||||
esphome/components/ultrasonic/* @OttoWinter
 | 
			
		||||
esphome/components/update/* @jesserockz
 | 
			
		||||
esphome/components/uponor_smatrix/* @kroimon
 | 
			
		||||
esphome/components/valve/* @esphome/core
 | 
			
		||||
esphome/components/vbus/* @ssieb
 | 
			
		||||
esphome/components/veml3235/* @kbx81
 | 
			
		||||
esphome/components/veml7700/* @latonita
 | 
			
		||||
esphome/components/version/* @esphome/core
 | 
			
		||||
esphome/components/voice_assistant/* @jesserockz
 | 
			
		||||
esphome/components/wake_on_lan/* @clydebarrow @willwill2will54
 | 
			
		||||
esphome/components/watchdog/* @oarcher
 | 
			
		||||
esphome/components/waveshare_epaper/* @clydebarrow
 | 
			
		||||
esphome/components/web_server_base/* @OttoWinter
 | 
			
		||||
esphome/components/web_server_idf/* @dentra
 | 
			
		||||
esphome/components/weikai/* @DrCoolZic
 | 
			
		||||
esphome/components/weikai_i2c/* @DrCoolZic
 | 
			
		||||
esphome/components/weikai_spi/* @DrCoolZic
 | 
			
		||||
esphome/components/whirlpool/* @glmnet
 | 
			
		||||
esphome/components/whynter/* @aeonsablaze
 | 
			
		||||
esphome/components/wiegand/* @ssieb
 | 
			
		||||
esphome/components/wireguard/* @droscy @lhoracek @thomas0bernard
 | 
			
		||||
esphome/components/wk2132_i2c/* @DrCoolZic
 | 
			
		||||
esphome/components/wk2132_spi/* @DrCoolZic
 | 
			
		||||
esphome/components/wk2168_i2c/* @DrCoolZic
 | 
			
		||||
esphome/components/wk2168_spi/* @DrCoolZic
 | 
			
		||||
esphome/components/wk2204_i2c/* @DrCoolZic
 | 
			
		||||
esphome/components/wk2204_spi/* @DrCoolZic
 | 
			
		||||
esphome/components/wk2212_i2c/* @DrCoolZic
 | 
			
		||||
esphome/components/wk2212_spi/* @DrCoolZic
 | 
			
		||||
esphome/components/wl_134/* @hobbypunk90
 | 
			
		||||
esphome/components/x9c/* @EtienneMD
 | 
			
		||||
esphome/components/xgzp68xx/* @gcormier
 | 
			
		||||
esphome/components/xiaomi_hhccjcy10/* @fariouche
 | 
			
		||||
esphome/components/xiaomi_lywsd02mmc/* @juanluss31
 | 
			
		||||
esphome/components/xiaomi_lywsd03mmc/* @ahpohl
 | 
			
		||||
esphome/components/xiaomi_mhoc303/* @drug123
 | 
			
		||||
esphome/components/xiaomi_mhoc401/* @vevsvevs
 | 
			
		||||
esphome/components/xiaomi_rtcgq02lm/* @jesserockz
 | 
			
		||||
esphome/components/xl9535/* @mreditor97
 | 
			
		||||
esphome/components/xpt2046/touchscreen/* @nielsnl68 @numo68
 | 
			
		||||
esphome/components/xxtea/* @clydebarrow
 | 
			
		||||
esphome/components/zhlt01/* @cfeenstra1024
 | 
			
		||||
esphome/components/zio_ultrasonic/* @kahrendt
 | 
			
		||||
esphome/components/xpt2046/* @numo68
 | 
			
		||||
 
 | 
			
		||||
@@ -1,14 +1,14 @@
 | 
			
		||||
# Contributing to ESPHome [](https://discord.gg/KhAMKrd) [](https://GitHub.com/esphome/esphome/releases/)
 | 
			
		||||
# Contributing to ESPHome
 | 
			
		||||
 | 
			
		||||
We welcome contributions to the ESPHome suite of code and documentation!
 | 
			
		||||
For a detailed guide, please see https://esphome.io/guides/contributing.html#contributing-to-esphome
 | 
			
		||||
 | 
			
		||||
Please read our [contributing guide](https://esphome.io/guides/contributing.html) if you wish to contribute to the
 | 
			
		||||
project and be sure to join us on [Discord](https://discord.gg/KhAMKrd).
 | 
			
		||||
Things to note when contributing:
 | 
			
		||||
 | 
			
		||||
**See also:**
 | 
			
		||||
 | 
			
		||||
[Documentation](https://esphome.io) -- [Issues](https://github.com/esphome/issues/issues) -- [Feature requests](https://github.com/esphome/feature-requests/issues)
 | 
			
		||||
 | 
			
		||||
---
 | 
			
		||||
 | 
			
		||||
[](https://www.openhomefoundation.org/)
 | 
			
		||||
 - Please test your changes :)
 | 
			
		||||
 - If a new feature is added or an existing user-facing feature is changed, you should also 
 | 
			
		||||
   update the [docs](https://github.com/esphome/esphome-docs). See [contributing to esphome-docs](https://esphome.io/guides/contributing.html#contributing-to-esphomedocs)
 | 
			
		||||
   for more information.
 | 
			
		||||
 - Please also update the tests in the `tests/` folder. You can do so by just adding a line in one of the YAML files
 | 
			
		||||
   which checks if your new feature compiles correctly.
 | 
			
		||||
 - Sometimes I will let pull requests linger because I'm not 100% sure about them. Please feel free to ping
 | 
			
		||||
   me after some time.
 | 
			
		||||
 
 | 
			
		||||
@@ -1,6 +1,7 @@
 | 
			
		||||
include LICENSE
 | 
			
		||||
include README.md
 | 
			
		||||
include requirements.txt
 | 
			
		||||
recursive-include esphome *.cpp *.h *.tcc *.c
 | 
			
		||||
recursive-include esphome *.py.script
 | 
			
		||||
include esphome/dashboard/templates/*.html
 | 
			
		||||
recursive-include esphome/dashboard/static *.ico *.js *.css *.woff* LICENSE
 | 
			
		||||
recursive-include esphome *.cpp *.h *.tcc
 | 
			
		||||
recursive-include esphome LICENSE.txt
 | 
			
		||||
 
 | 
			
		||||
							
								
								
									
										15
									
								
								README.md
									
									
									
									
									
								
							
							
						
						
									
										15
									
								
								README.md
									
									
									
									
									
								
							@@ -1,16 +1,9 @@
 | 
			
		||||
# ESPHome [](https://discord.gg/KhAMKrd) [](https://GitHub.com/esphome/esphome/releases/)
 | 
			
		||||
 | 
			
		||||
<a href="https://esphome.io/">
 | 
			
		||||
  <picture>
 | 
			
		||||
    <source media="(prefers-color-scheme: dark)" srcset="https://esphome.io/_static/logo-text-on-dark.svg", alt="ESPHome Logo">
 | 
			
		||||
    <img src="https://esphome.io/_static/logo-text-on-light.svg" alt="ESPHome Logo">
 | 
			
		||||
  </picture>
 | 
			
		||||
</a>
 | 
			
		||||
[](https://esphome.io/)
 | 
			
		||||
 | 
			
		||||
---
 | 
			
		||||
**Documentation:** https://esphome.io/
 | 
			
		||||
 | 
			
		||||
[Documentation](https://esphome.io) -- [Issues](https://github.com/esphome/issues/issues) -- [Feature requests](https://github.com/esphome/feature-requests/issues)
 | 
			
		||||
For issues, please go to [the issue tracker](https://github.com/esphome/issues/issues).
 | 
			
		||||
 | 
			
		||||
---
 | 
			
		||||
 | 
			
		||||
[](https://www.openhomefoundation.org/)
 | 
			
		||||
For feature requests, please see [feature requests](https://github.com/esphome/feature-requests/issues).
 | 
			
		||||
 
 | 
			
		||||
@@ -5,41 +5,29 @@
 | 
			
		||||
# One of "docker", "hassio"
 | 
			
		||||
ARG BASEIMGTYPE=docker
 | 
			
		||||
 | 
			
		||||
FROM ghcr.io/hassio-addons/debian-base/amd64:5.1.1 AS base-hassio-amd64
 | 
			
		||||
FROM ghcr.io/hassio-addons/debian-base/aarch64:5.1.1 AS base-hassio-arm64
 | 
			
		||||
FROM ghcr.io/hassio-addons/debian-base/armv7:5.1.1 AS base-hassio-armv7
 | 
			
		||||
FROM debian:bullseye-20211011-slim AS base-docker-amd64
 | 
			
		||||
FROM debian:bullseye-20211011-slim AS base-docker-arm64
 | 
			
		||||
FROM debian:bullseye-20211011-slim AS base-docker-armv7
 | 
			
		||||
 | 
			
		||||
# https://github.com/hassio-addons/addon-debian-base/releases
 | 
			
		||||
FROM ghcr.io/hassio-addons/debian-base:7.2.0 AS base-hassio
 | 
			
		||||
# https://hub.docker.com/_/debian?tab=tags&page=1&name=bookworm
 | 
			
		||||
FROM debian:12.2-slim AS base-docker
 | 
			
		||||
 | 
			
		||||
FROM base-${BASEIMGTYPE} AS base
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
ARG TARGETARCH
 | 
			
		||||
ARG TARGETVARIANT
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
# Note that --break-system-packages is used below because
 | 
			
		||||
# https://peps.python.org/pep-0668/ added a safety check that prevents
 | 
			
		||||
# installing packages with the same name as a system package. This is
 | 
			
		||||
# not a problem for us because we are not concerned about overwriting
 | 
			
		||||
# system packages because we are running in an isolated container.
 | 
			
		||||
# Use TARGETARCH/TARGETVARIANT defined by docker
 | 
			
		||||
# https://docs.docker.com/engine/reference/builder/#automatic-platform-args-in-the-global-scope
 | 
			
		||||
FROM base-${BASEIMGTYPE}-${TARGETARCH}${TARGETVARIANT} AS base
 | 
			
		||||
 | 
			
		||||
RUN \
 | 
			
		||||
    apt-get update \
 | 
			
		||||
    # Use pinned versions so that we get updates with build caching
 | 
			
		||||
    && apt-get install -y --no-install-recommends \
 | 
			
		||||
        python3-pip=23.0.1+dfsg-1 \
 | 
			
		||||
        python3-setuptools=66.1.1-1+deb12u1 \
 | 
			
		||||
        python3-venv=3.11.2-1+b1 \
 | 
			
		||||
        python3-wheel=0.38.4-2 \
 | 
			
		||||
        iputils-ping=3:20221126-1+deb12u1 \
 | 
			
		||||
        git=1:2.39.5-0+deb12u2 \
 | 
			
		||||
        curl=7.88.1-10+deb12u12 \
 | 
			
		||||
        openssh-client=1:9.2p1-2+deb12u5 \
 | 
			
		||||
        python3-cffi=1.15.1-5 \
 | 
			
		||||
        libcairo2=1.16.0-7 \
 | 
			
		||||
        libmagic1=1:5.44-3 \
 | 
			
		||||
        patch=2.7.6-7 \
 | 
			
		||||
        python3=3.9.2-3 \
 | 
			
		||||
        python3-pip=20.3.4-4 \
 | 
			
		||||
        python3-setuptools=52.0.0-4 \
 | 
			
		||||
        python3-pil=8.1.2+dfsg-0.3 \
 | 
			
		||||
        python3-cryptography=3.3.2-1 \
 | 
			
		||||
        iputils-ping=3:20210202-1 \
 | 
			
		||||
        git=1:2.30.2-1 \
 | 
			
		||||
        curl=7.74.0-1.3+b1 \
 | 
			
		||||
    && rm -rf \
 | 
			
		||||
        /tmp/* \
 | 
			
		||||
        /var/{cache,log}/* \
 | 
			
		||||
@@ -52,70 +40,31 @@ ENV \
 | 
			
		||||
  PLATFORMIO_GLOBALLIB_DIR=/piolibs
 | 
			
		||||
 | 
			
		||||
RUN \
 | 
			
		||||
    pip3 install \
 | 
			
		||||
    --break-system-packages --no-cache-dir \
 | 
			
		||||
    # Keep platformio version in sync with requirements.txt
 | 
			
		||||
    platformio==6.1.16 \
 | 
			
		||||
    # Ubuntu python3-pip is missing wheel
 | 
			
		||||
    pip3 install --no-cache-dir \
 | 
			
		||||
        wheel==0.36.2 \
 | 
			
		||||
        platformio==5.2.2 \
 | 
			
		||||
    # Change some platformio settings
 | 
			
		||||
    && platformio settings set enable_telemetry No \
 | 
			
		||||
    && platformio settings set check_libraries_interval 1000000 \
 | 
			
		||||
    && platformio settings set check_platformio_interval 1000000 \
 | 
			
		||||
    && platformio settings set check_platforms_interval 1000000 \
 | 
			
		||||
    && mkdir -p /piolibs
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
# First install requirements to leverage caching when requirements don't change
 | 
			
		||||
# tmpfs is for https://github.com/rust-lang/cargo/issues/8719
 | 
			
		||||
 | 
			
		||||
COPY requirements.txt requirements_optional.txt /
 | 
			
		||||
RUN --mount=type=tmpfs,target=/root/.cargo <<END-OF-RUN
 | 
			
		||||
# Fail on any non-zero status
 | 
			
		||||
set -e
 | 
			
		||||
 | 
			
		||||
# install build tools in case wheels are not available
 | 
			
		||||
BUILD_DEPS="
 | 
			
		||||
    build-essential=12.9
 | 
			
		||||
    python3-dev=3.11.2-1+b1
 | 
			
		||||
    zlib1g-dev=1:1.2.13.dfsg-1
 | 
			
		||||
    libjpeg-dev=1:2.1.5-2
 | 
			
		||||
    libfreetype-dev=2.12.1+dfsg-5+deb12u4
 | 
			
		||||
    libssl-dev=3.0.15-1~deb12u1
 | 
			
		||||
    libffi-dev=3.4.4-1
 | 
			
		||||
    cargo=0.66.0+ds1-1
 | 
			
		||||
    pkg-config=1.8.1-1
 | 
			
		||||
"
 | 
			
		||||
LIB_DEPS="
 | 
			
		||||
    libtiff6=4.5.0-6+deb12u1
 | 
			
		||||
    libopenjp2-7=2.5.0-2+deb12u1
 | 
			
		||||
"
 | 
			
		||||
if [ "$TARGETARCH$TARGETVARIANT" = "arm64" ]
 | 
			
		||||
then
 | 
			
		||||
    apt-get update
 | 
			
		||||
    apt-get install -y --no-install-recommends $BUILD_DEPS $LIB_DEPS
 | 
			
		||||
fi
 | 
			
		||||
 | 
			
		||||
CARGO_REGISTRIES_CRATES_IO_PROTOCOL=sparse CARGO_HOME=/root/.cargo
 | 
			
		||||
pip3 install --break-system-packages --no-cache-dir -r /requirements.txt -r /requirements_optional.txt
 | 
			
		||||
 | 
			
		||||
if [ "$TARGETARCH$TARGETVARIANT" = "arm64" ]
 | 
			
		||||
then
 | 
			
		||||
    apt-get remove -y --purge --auto-remove $BUILD_DEPS
 | 
			
		||||
    rm -rf /tmp/* /var/{cache,log}/* /var/lib/apt/lists/*
 | 
			
		||||
fi
 | 
			
		||||
END-OF-RUN
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
COPY script/platformio_install_deps.py platformio.ini /
 | 
			
		||||
RUN /platformio_install_deps.py /platformio.ini --libraries
 | 
			
		||||
 | 
			
		||||
# Avoid unsafe git error when container user and file config volume permissions don't match
 | 
			
		||||
RUN git config --system --add safe.directory '*'
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
# ======================= docker-type image =======================
 | 
			
		||||
FROM base AS docker
 | 
			
		||||
 | 
			
		||||
# First install requirements to leverage caching when requirements don't change
 | 
			
		||||
COPY requirements.txt requirements_optional.txt docker/platformio_install_deps.py platformio.ini /
 | 
			
		||||
RUN \
 | 
			
		||||
    pip3 install --no-cache-dir -r /requirements.txt -r /requirements_optional.txt \
 | 
			
		||||
    && /platformio_install_deps.py /platformio.ini
 | 
			
		||||
 | 
			
		||||
# Copy esphome and install
 | 
			
		||||
COPY . /esphome
 | 
			
		||||
RUN pip3 install --break-system-packages --no-cache-dir -e /esphome
 | 
			
		||||
RUN pip3 install --no-cache-dir -e /esphome
 | 
			
		||||
 | 
			
		||||
# Settings for dashboard
 | 
			
		||||
ENV USERNAME="" PASSWORD=""
 | 
			
		||||
@@ -123,10 +72,6 @@ ENV USERNAME="" PASSWORD=""
 | 
			
		||||
# Expose the dashboard to Docker
 | 
			
		||||
EXPOSE 6052
 | 
			
		||||
 | 
			
		||||
# Run healthcheck (heartbeat)
 | 
			
		||||
HEALTHCHECK --interval=30s --timeout=30s \
 | 
			
		||||
  CMD curl --fail http://localhost:6052/version -A "HealthCheck" || exit 1
 | 
			
		||||
 | 
			
		||||
COPY docker/docker_entrypoint.sh /entrypoint.sh
 | 
			
		||||
 | 
			
		||||
# The directory the user should mount their configuration files to
 | 
			
		||||
@@ -139,18 +84,6 @@ ENTRYPOINT ["/entrypoint.sh"]
 | 
			
		||||
CMD ["dashboard", "/config"]
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
ARG BUILD_VERSION=dev
 | 
			
		||||
 | 
			
		||||
# Labels
 | 
			
		||||
LABEL \
 | 
			
		||||
    org.opencontainers.image.authors="The ESPHome Authors" \
 | 
			
		||||
    org.opencontainers.image.title="ESPHome" \
 | 
			
		||||
    org.opencontainers.image.description="ESPHome is a system to configure your microcontrollers by simple yet powerful configuration files and control them remotely through Home Automation systems" \
 | 
			
		||||
    org.opencontainers.image.url="https://esphome.io/" \
 | 
			
		||||
    org.opencontainers.image.documentation="https://esphome.io/" \
 | 
			
		||||
    org.opencontainers.image.source="https://github.com/esphome/esphome" \
 | 
			
		||||
    org.opencontainers.image.licenses="ESPHome" \
 | 
			
		||||
    org.opencontainers.image.version=${BUILD_VERSION}
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
# ======================= hassio-type image =======================
 | 
			
		||||
@@ -160,7 +93,7 @@ RUN \
 | 
			
		||||
    apt-get update \
 | 
			
		||||
    # Use pinned versions so that we get updates with build caching
 | 
			
		||||
    && apt-get install -y --no-install-recommends \
 | 
			
		||||
        nginx-light=1.22.1-9+deb12u1 \
 | 
			
		||||
        nginx=1.18.0-6.1 \
 | 
			
		||||
    && rm -rf \
 | 
			
		||||
        /tmp/* \
 | 
			
		||||
        /var/{cache,log}/* \
 | 
			
		||||
@@ -169,16 +102,22 @@ RUN \
 | 
			
		||||
ARG BUILD_VERSION=dev
 | 
			
		||||
 | 
			
		||||
# Copy root filesystem
 | 
			
		||||
COPY docker/ha-addon-rootfs/ /
 | 
			
		||||
COPY docker/hassio-rootfs/ /
 | 
			
		||||
 | 
			
		||||
# First install requirements to leverage caching when requirements don't change
 | 
			
		||||
COPY requirements.txt requirements_optional.txt docker/platformio_install_deps.py platformio.ini /
 | 
			
		||||
RUN \
 | 
			
		||||
    pip3 install --no-cache-dir -r /requirements.txt -r /requirements_optional.txt \
 | 
			
		||||
    && /platformio_install_deps.py /platformio.ini
 | 
			
		||||
 | 
			
		||||
# Copy esphome and install
 | 
			
		||||
COPY . /esphome
 | 
			
		||||
RUN pip3 install --break-system-packages --no-cache-dir -e /esphome
 | 
			
		||||
RUN pip3 install --no-cache-dir -e /esphome
 | 
			
		||||
 | 
			
		||||
# Labels
 | 
			
		||||
LABEL \
 | 
			
		||||
    io.hass.name="ESPHome" \
 | 
			
		||||
    io.hass.description="ESPHome is a system to configure your microcontrollers by simple yet powerful configuration files and control them remotely through Home Automation systems" \
 | 
			
		||||
    io.hass.description="Manage and program ESP8266/ESP32 microcontrollers through YAML configuration files" \
 | 
			
		||||
    io.hass.type="addon" \
 | 
			
		||||
    io.hass.version="${BUILD_VERSION}"
 | 
			
		||||
    # io.hass.arch is inherited from addon-debian-base
 | 
			
		||||
@@ -193,25 +132,25 @@ ENV \
 | 
			
		||||
  PLATFORMIO_CORE_DIR=/esphome/.temp/platformio
 | 
			
		||||
 | 
			
		||||
RUN \
 | 
			
		||||
    curl -L https://apt.llvm.org/llvm-snapshot.gpg.key -o /etc/apt/trusted.gpg.d/apt.llvm.org.asc \
 | 
			
		||||
    && echo "deb http://apt.llvm.org/bookworm/ llvm-toolchain-bookworm-18 main" > /etc/apt/sources.list.d/llvm.sources.list \
 | 
			
		||||
    && apt-get update \
 | 
			
		||||
    apt-get update \
 | 
			
		||||
    # Use pinned versions so that we get updates with build caching
 | 
			
		||||
    && apt-get install -y --no-install-recommends \
 | 
			
		||||
        clang-format-13=1:13.0.1-11+b2 \
 | 
			
		||||
        clang-format-11=1:11.0.1-2 \
 | 
			
		||||
        clang-tidy-11=1:11.0.1-2 \
 | 
			
		||||
        patch=2.7.6-7 \
 | 
			
		||||
        software-properties-common=0.99.30-4.1~deb12u1 \
 | 
			
		||||
        nano=7.2-1+deb12u1 \
 | 
			
		||||
        software-properties-common=0.96.20.2-2.1 \
 | 
			
		||||
        nano=5.4-2 \
 | 
			
		||||
        build-essential=12.9 \
 | 
			
		||||
        python3-dev=3.11.2-1+b1 \
 | 
			
		||||
        clang-tidy-18=1:18.1.8~++20240731024826+3b5b5c1ec4a3-1~exp1~20240731144843.145 \
 | 
			
		||||
        python3-dev=3.9.2-3 \
 | 
			
		||||
    && rm -rf \
 | 
			
		||||
        /tmp/* \
 | 
			
		||||
        /var/{cache,log}/* \
 | 
			
		||||
        /var/lib/apt/lists/*
 | 
			
		||||
 | 
			
		||||
COPY requirements_test.txt /
 | 
			
		||||
RUN pip3 install --break-system-packages --no-cache-dir -r /requirements_test.txt
 | 
			
		||||
COPY requirements.txt requirements_optional.txt requirements_test.txt docker/platformio_install_deps.py platformio.ini /
 | 
			
		||||
RUN \
 | 
			
		||||
    pip3 install --no-cache-dir -r /requirements.txt -r /requirements_optional.txt -r /requirements_test.txt \
 | 
			
		||||
    && /platformio_install_deps.py /platformio.ini
 | 
			
		||||
 | 
			
		||||
VOLUME ["/esphome"]
 | 
			
		||||
WORKDIR /esphome
 | 
			
		||||
 
 | 
			
		||||
@@ -1,53 +1,38 @@
 | 
			
		||||
#!/usr/bin/env python3
 | 
			
		||||
import argparse
 | 
			
		||||
from dataclasses import dataclass
 | 
			
		||||
import re
 | 
			
		||||
import shlex
 | 
			
		||||
import subprocess
 | 
			
		||||
import argparse
 | 
			
		||||
from platform import machine
 | 
			
		||||
import shlex
 | 
			
		||||
import re
 | 
			
		||||
import sys
 | 
			
		||||
 | 
			
		||||
CHANNEL_DEV = "dev"
 | 
			
		||||
CHANNEL_BETA = "beta"
 | 
			
		||||
CHANNEL_RELEASE = "release"
 | 
			
		||||
 | 
			
		||||
CHANNEL_DEV = 'dev'
 | 
			
		||||
CHANNEL_BETA = 'beta'
 | 
			
		||||
CHANNEL_RELEASE = 'release'
 | 
			
		||||
CHANNELS = [CHANNEL_DEV, CHANNEL_BETA, CHANNEL_RELEASE]
 | 
			
		||||
 | 
			
		||||
ARCH_AMD64 = "amd64"
 | 
			
		||||
ARCH_AARCH64 = "aarch64"
 | 
			
		||||
ARCHS = [ARCH_AMD64, ARCH_AARCH64]
 | 
			
		||||
ARCH_AMD64 = 'amd64'
 | 
			
		||||
ARCH_ARMV7 = 'armv7'
 | 
			
		||||
ARCH_AARCH64 = 'aarch64'
 | 
			
		||||
ARCHS = [ARCH_AMD64, ARCH_ARMV7, ARCH_AARCH64]
 | 
			
		||||
 | 
			
		||||
TYPE_DOCKER = "docker"
 | 
			
		||||
TYPE_HA_ADDON = "ha-addon"
 | 
			
		||||
TYPE_LINT = "lint"
 | 
			
		||||
TYPE_DOCKER = 'docker'
 | 
			
		||||
TYPE_HA_ADDON = 'ha-addon'
 | 
			
		||||
TYPE_LINT = 'lint'
 | 
			
		||||
TYPES = [TYPE_DOCKER, TYPE_HA_ADDON, TYPE_LINT]
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
parser = argparse.ArgumentParser()
 | 
			
		||||
parser.add_argument(
 | 
			
		||||
    "--tag",
 | 
			
		||||
    type=str,
 | 
			
		||||
    required=True,
 | 
			
		||||
    help="The main docker tag to push to. If a version number also adds latest and/or beta tag",
 | 
			
		||||
)
 | 
			
		||||
parser.add_argument(
 | 
			
		||||
    "--arch", choices=ARCHS, required=False, help="The architecture to build for"
 | 
			
		||||
)
 | 
			
		||||
parser.add_argument(
 | 
			
		||||
    "--build-type", choices=TYPES, required=True, help="The type of build to run"
 | 
			
		||||
)
 | 
			
		||||
parser.add_argument(
 | 
			
		||||
    "--dry-run", action="store_true", help="Don't run any commands, just print them"
 | 
			
		||||
)
 | 
			
		||||
subparsers = parser.add_subparsers(
 | 
			
		||||
    help="Action to perform", dest="command", required=True
 | 
			
		||||
)
 | 
			
		||||
parser.add_argument("--tag", type=str, required=True, help="The main docker tag to push to. If a version number also adds latest and/or beta tag")
 | 
			
		||||
parser.add_argument("--arch", choices=ARCHS, required=False, help="The architecture to build for")
 | 
			
		||||
parser.add_argument("--build-type", choices=TYPES, required=True, help="The type of build to run")
 | 
			
		||||
parser.add_argument("--dry-run", action="store_true", help="Don't run any commands, just print them")
 | 
			
		||||
subparsers = parser.add_subparsers(help="Action to perform", dest="command", required=True)
 | 
			
		||||
build_parser = subparsers.add_parser("build", help="Build the image")
 | 
			
		||||
build_parser.add_argument("--push", help="Also push the images", action="store_true")
 | 
			
		||||
build_parser.add_argument(
 | 
			
		||||
    "--load", help="Load the docker image locally", action="store_true"
 | 
			
		||||
)
 | 
			
		||||
manifest_parser = subparsers.add_parser(
 | 
			
		||||
    "manifest", help="Create a manifest from already pushed images"
 | 
			
		||||
)
 | 
			
		||||
manifest_parser = subparsers.add_parser("manifest", help="Create a manifest from already pushed images")
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@dataclass(frozen=True)
 | 
			
		||||
@@ -63,7 +48,7 @@ class DockerParams:
 | 
			
		||||
        prefix = {
 | 
			
		||||
            TYPE_DOCKER: "esphome/esphome",
 | 
			
		||||
            TYPE_HA_ADDON: "esphome/esphome-hassio",
 | 
			
		||||
            TYPE_LINT: "esphome/esphome-lint",
 | 
			
		||||
            TYPE_LINT: "esphome/esphome-lint"
 | 
			
		||||
        }[build_type]
 | 
			
		||||
        build_to = f"{prefix}-{arch}"
 | 
			
		||||
        baseimgtype = {
 | 
			
		||||
@@ -73,6 +58,7 @@ class DockerParams:
 | 
			
		||||
        }[build_type]
 | 
			
		||||
        platform = {
 | 
			
		||||
            ARCH_AMD64: "linux/amd64",
 | 
			
		||||
            ARCH_ARMV7: "linux/arm/v7",
 | 
			
		||||
            ARCH_AARCH64: "linux/arm64",
 | 
			
		||||
        }[arch]
 | 
			
		||||
        target = {
 | 
			
		||||
@@ -101,12 +87,10 @@ def main():
 | 
			
		||||
                sys.exit(1)
 | 
			
		||||
 | 
			
		||||
    # detect channel from tag
 | 
			
		||||
    match = re.match(r"^(\d+\.\d+)(?:\.\d+)?(b\d+)?$", args.tag)
 | 
			
		||||
    major_minor_version = None
 | 
			
		||||
    match = re.match(r'^\d+\.\d+(?:\.\d+)?(b\d+)?$', args.tag)
 | 
			
		||||
    if match is None:
 | 
			
		||||
        channel = CHANNEL_DEV
 | 
			
		||||
    elif match.group(2) is None:
 | 
			
		||||
        major_minor_version = match.group(1)
 | 
			
		||||
    elif match.group(1) is None:
 | 
			
		||||
        channel = CHANNEL_RELEASE
 | 
			
		||||
    else:
 | 
			
		||||
        channel = CHANNEL_BETA
 | 
			
		||||
@@ -121,11 +105,6 @@ def main():
 | 
			
		||||
        tags_to_push.append("beta")
 | 
			
		||||
        tags_to_push.append("latest")
 | 
			
		||||
 | 
			
		||||
        # Compatibility with HA tags
 | 
			
		||||
        if major_minor_version:
 | 
			
		||||
            tags_to_push.append("stable")
 | 
			
		||||
            tags_to_push.append(major_minor_version)
 | 
			
		||||
 | 
			
		||||
    if args.command == "build":
 | 
			
		||||
        # 1. pull cache image
 | 
			
		||||
        params = DockerParams.for_type_arch(args.build_type, args.arch)
 | 
			
		||||
@@ -141,28 +120,18 @@ def main():
 | 
			
		||||
 | 
			
		||||
        # 3. build
 | 
			
		||||
        cmd = [
 | 
			
		||||
            "docker",
 | 
			
		||||
            "buildx",
 | 
			
		||||
            "build",
 | 
			
		||||
            "--build-arg",
 | 
			
		||||
            f"BASEIMGTYPE={params.baseimgtype}",
 | 
			
		||||
            "--build-arg",
 | 
			
		||||
            f"BUILD_VERSION={args.tag}",
 | 
			
		||||
            "--cache-from",
 | 
			
		||||
            f"type=registry,ref={cache_img}",
 | 
			
		||||
            "--file",
 | 
			
		||||
            "docker/Dockerfile",
 | 
			
		||||
            "--platform",
 | 
			
		||||
            params.platform,
 | 
			
		||||
            "--target",
 | 
			
		||||
            params.target,
 | 
			
		||||
            "docker", "buildx", "build",
 | 
			
		||||
            "--build-arg", f"BASEIMGTYPE={params.baseimgtype}",
 | 
			
		||||
            "--build-arg", f"BUILD_VERSION={args.tag}",
 | 
			
		||||
            "--cache-from", f"type=registry,ref={cache_img}",
 | 
			
		||||
            "--file", "docker/Dockerfile",
 | 
			
		||||
            "--platform", params.platform,
 | 
			
		||||
            "--target", params.target,
 | 
			
		||||
        ]
 | 
			
		||||
        for img in imgs:
 | 
			
		||||
            cmd += ["--tag", img]
 | 
			
		||||
        if args.push:
 | 
			
		||||
            cmd += ["--push", "--cache-to", f"type=registry,ref={cache_img},mode=max"]
 | 
			
		||||
        if args.load:
 | 
			
		||||
            cmd += ["--load"]
 | 
			
		||||
 | 
			
		||||
        run_command(*cmd, ".")
 | 
			
		||||
    elif args.command == "manifest":
 | 
			
		||||
@@ -181,7 +150,9 @@ def main():
 | 
			
		||||
            run_command(*cmd)
 | 
			
		||||
        # 2. Push manifests
 | 
			
		||||
        for target in targets:
 | 
			
		||||
            run_command("docker", "manifest", "push", target)
 | 
			
		||||
            run_command(
 | 
			
		||||
                "docker", "manifest", "push", target
 | 
			
		||||
            )
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
if __name__ == "__main__":
 | 
			
		||||
 
 | 
			
		||||
@@ -1,4 +1,4 @@
 | 
			
		||||
#!/usr/bin/env bash
 | 
			
		||||
#!/bin/bash
 | 
			
		||||
 | 
			
		||||
# If /cache is mounted, use that as PIO's coredir
 | 
			
		||||
# otherwise use path in /config (so that PIO packages aren't downloaded on each compile)
 | 
			
		||||
@@ -21,10 +21,4 @@ export PLATFORMIO_PLATFORMS_DIR="${pio_cache_base}/platforms"
 | 
			
		||||
export PLATFORMIO_PACKAGES_DIR="${pio_cache_base}/packages"
 | 
			
		||||
export PLATFORMIO_CACHE_DIR="${pio_cache_base}/cache"
 | 
			
		||||
 | 
			
		||||
# If /build is mounted, use that as the build path
 | 
			
		||||
# otherwise use path in /config (so that builds aren't lost on container restart)
 | 
			
		||||
if [[ -d /build ]]; then
 | 
			
		||||
    export ESPHOME_BUILD_PATH=/build
 | 
			
		||||
fi
 | 
			
		||||
 | 
			
		||||
exec esphome "$@"
 | 
			
		||||
 
 | 
			
		||||
@@ -1,92 +0,0 @@
 | 
			
		||||
#!/usr/bin/env python3
 | 
			
		||||
import re
 | 
			
		||||
import argparse
 | 
			
		||||
 | 
			
		||||
CHANNEL_DEV = "dev"
 | 
			
		||||
CHANNEL_BETA = "beta"
 | 
			
		||||
CHANNEL_RELEASE = "release"
 | 
			
		||||
 | 
			
		||||
GHCR = "ghcr"
 | 
			
		||||
DOCKERHUB = "dockerhub"
 | 
			
		||||
 | 
			
		||||
parser = argparse.ArgumentParser()
 | 
			
		||||
parser.add_argument(
 | 
			
		||||
    "--tag",
 | 
			
		||||
    type=str,
 | 
			
		||||
    required=True,
 | 
			
		||||
    help="The main docker tag to push to. If a version number also adds latest and/or beta tag",
 | 
			
		||||
)
 | 
			
		||||
parser.add_argument(
 | 
			
		||||
    "--suffix",
 | 
			
		||||
    type=str,
 | 
			
		||||
    required=True,
 | 
			
		||||
    help="The suffix of the tag.",
 | 
			
		||||
)
 | 
			
		||||
parser.add_argument(
 | 
			
		||||
    "--registry",
 | 
			
		||||
    type=str,
 | 
			
		||||
    choices=[GHCR, DOCKERHUB],
 | 
			
		||||
    required=False,
 | 
			
		||||
    action="append",
 | 
			
		||||
    help="The registry to build tags for.",
 | 
			
		||||
)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def main():
 | 
			
		||||
    args = parser.parse_args()
 | 
			
		||||
 | 
			
		||||
    # detect channel from tag
 | 
			
		||||
    match = re.match(r"^(\d+\.\d+)(?:\.\d+)(?:(b\d+)|(-dev\d+))?$", args.tag)
 | 
			
		||||
    major_minor_version = None
 | 
			
		||||
    if match is None:  # eg 2023.12.0-dev20231109-testbranch
 | 
			
		||||
        channel = None  # Ran with custom tag for a branch etc
 | 
			
		||||
    elif match.group(3) is not None:  # eg 2023.12.0-dev20231109
 | 
			
		||||
        channel = CHANNEL_DEV
 | 
			
		||||
    elif match.group(2) is not None:  # eg 2023.12.0b1
 | 
			
		||||
        channel = CHANNEL_BETA
 | 
			
		||||
    else:  # eg 2023.12.0
 | 
			
		||||
        major_minor_version = match.group(1)
 | 
			
		||||
        channel = CHANNEL_RELEASE
 | 
			
		||||
 | 
			
		||||
    tags_to_push = [args.tag]
 | 
			
		||||
    if channel == CHANNEL_DEV:
 | 
			
		||||
        tags_to_push.append("dev")
 | 
			
		||||
    elif channel == CHANNEL_BETA:
 | 
			
		||||
        tags_to_push.append("beta")
 | 
			
		||||
    elif channel == CHANNEL_RELEASE:
 | 
			
		||||
        # Additionally push to beta
 | 
			
		||||
        tags_to_push.append("beta")
 | 
			
		||||
        tags_to_push.append("latest")
 | 
			
		||||
 | 
			
		||||
        if major_minor_version:
 | 
			
		||||
            tags_to_push.append("stable")
 | 
			
		||||
            tags_to_push.append(major_minor_version)
 | 
			
		||||
 | 
			
		||||
    suffix = f"-{args.suffix}" if args.suffix else ""
 | 
			
		||||
 | 
			
		||||
    image_name = f"esphome/esphome{suffix}"
 | 
			
		||||
 | 
			
		||||
    print(f"channel={channel}")
 | 
			
		||||
 | 
			
		||||
    if args.registry is None:
 | 
			
		||||
        args.registry = [GHCR, DOCKERHUB]
 | 
			
		||||
    elif len(args.registry) == 1:
 | 
			
		||||
        if GHCR in args.registry:
 | 
			
		||||
            print(f"image=ghcr.io/{image_name}")
 | 
			
		||||
        if DOCKERHUB in args.registry:
 | 
			
		||||
            print(f"image=docker.io/{image_name}")
 | 
			
		||||
 | 
			
		||||
    print(f"image_name={image_name}")
 | 
			
		||||
 | 
			
		||||
    full_tags = []
 | 
			
		||||
 | 
			
		||||
    for tag in tags_to_push:
 | 
			
		||||
        if GHCR in args.registry:
 | 
			
		||||
            full_tags += [f"ghcr.io/{image_name}:{tag}"]
 | 
			
		||||
        if DOCKERHUB in args.registry:
 | 
			
		||||
            full_tags += [f"docker.io/{image_name}:{tag}"]
 | 
			
		||||
    print(f"tags={','.join(full_tags)}")
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
if __name__ == "__main__":
 | 
			
		||||
    main()
 | 
			
		||||
@@ -1,47 +0,0 @@
 | 
			
		||||
#!/usr/bin/with-contenv bashio
 | 
			
		||||
# ==============================================================================
 | 
			
		||||
# This file installs the user ESPHome fork if specified.
 | 
			
		||||
# The fork must be up to date with the latest ESPHome dev branch
 | 
			
		||||
# and have no conflicts.
 | 
			
		||||
# This config option only exists in the ESPHome Dev add-on.
 | 
			
		||||
# ==============================================================================
 | 
			
		||||
 | 
			
		||||
declare esphome_fork
 | 
			
		||||
 | 
			
		||||
if bashio::config.has_value 'esphome_fork'; then
 | 
			
		||||
  esphome_fork=$(bashio::config 'esphome_fork')
 | 
			
		||||
  # format: [username][/repository]:ref
 | 
			
		||||
  if [[ "$esphome_fork" =~ ^(([^/]+)(/([^:]+))?:)?([^:/]+)$ ]]; then
 | 
			
		||||
    username="${BASH_REMATCH[2]:-esphome}"
 | 
			
		||||
    repository="${BASH_REMATCH[4]:-esphome}"
 | 
			
		||||
    ref="${BASH_REMATCH[5]}"
 | 
			
		||||
  else
 | 
			
		||||
    bashio::exit.nok "Invalid esphome_fork format: $esphome_fork"
 | 
			
		||||
  fi
 | 
			
		||||
  full_url="https://github.com/${username}/${repository}/archive/${ref}.tar.gz"
 | 
			
		||||
  bashio::log.info "Checking forked ESPHome"
 | 
			
		||||
  dev_version=$(python3 -c "from esphome.const import __version__; print(__version__)")
 | 
			
		||||
  bashio::log.info "Downloading ESPHome from fork '${esphome_fork}' (${full_url})..."
 | 
			
		||||
  curl -L -o /tmp/esphome.tar.gz "${full_url}" -qq ||
 | 
			
		||||
    bashio::exit.nok "Failed downloading ESPHome fork."
 | 
			
		||||
  bashio::log.info "Installing ESPHome from fork '${esphome_fork}' (${full_url})..."
 | 
			
		||||
  rm -rf /esphome || bashio::exit.nok "Failed to remove ESPHome."
 | 
			
		||||
  mkdir /esphome
 | 
			
		||||
  tar -zxf /tmp/esphome.tar.gz -C /esphome --strip-components=1 ||
 | 
			
		||||
    bashio::exit.nok "Failed installing ESPHome from fork."
 | 
			
		||||
  pip install -U -e /esphome || bashio::exit.nok "Failed installing ESPHome from fork."
 | 
			
		||||
  rm -f /tmp/esphome.tar.gz
 | 
			
		||||
  fork_version=$(python3 -c "from esphome.const import __version__; print(__version__)")
 | 
			
		||||
 | 
			
		||||
  if [[ "$fork_version" != "$dev_version" ]]; then
 | 
			
		||||
    bashio::log.error "############################"
 | 
			
		||||
    bashio::log.error "Uninstalled fork as version does not match"
 | 
			
		||||
    bashio::log.error "Update (or ask the author to update) the branch"
 | 
			
		||||
    bashio::log.error "This is important as the dev addon and the dev ESPHome"
 | 
			
		||||
    bashio::log.error "branch can have changes that are not compatible with old forks"
 | 
			
		||||
    bashio::log.error "and get reported as bugs which we cannot solve easily."
 | 
			
		||||
    bashio::log.error "############################"
 | 
			
		||||
    bashio::exit.nok
 | 
			
		||||
  fi
 | 
			
		||||
  bashio::log.info "Installed ESPHome from fork '${esphome_fork}' (${full_url})..."
 | 
			
		||||
fi
 | 
			
		||||
@@ -1,8 +0,0 @@
 | 
			
		||||
ssl_protocols TLSv1.2 TLSv1.3;
 | 
			
		||||
ssl_prefer_server_ciphers off;
 | 
			
		||||
ssl_ciphers ECDHE-ECDSA-AES128-GCM-SHA256:ECDHE-RSA-AES128-GCM-SHA256:ECDHE-ECDSA-AES256-GCM-SHA384:ECDHE-RSA-AES256-GCM-SHA384:ECDHE-ECDSA-CHACHA20-POLY1305:ECDHE-RSA-CHACHA20-POLY1305:DHE-RSA-AES128-GCM-SHA256:DHE-RSA-AES256-GCM-SHA384;
 | 
			
		||||
ssl_session_timeout  10m;
 | 
			
		||||
ssl_session_cache shared:SSL:10m;
 | 
			
		||||
ssl_session_tickets off;
 | 
			
		||||
ssl_stapling on;
 | 
			
		||||
ssl_stapling_verify on;
 | 
			
		||||
@@ -1,3 +0,0 @@
 | 
			
		||||
upstream esphome {
 | 
			
		||||
    server unix:/var/run/esphome.sock;
 | 
			
		||||
}
 | 
			
		||||
@@ -1 +0,0 @@
 | 
			
		||||
Without requirements or design, programming is the art of adding bugs to an empty text file. (Louis Srygley)
 | 
			
		||||
@@ -1,32 +0,0 @@
 | 
			
		||||
#!/command/with-contenv bashio
 | 
			
		||||
# shellcheck shell=bash
 | 
			
		||||
# ==============================================================================
 | 
			
		||||
# Home Assistant Add-on: ESPHome
 | 
			
		||||
# Sends discovery information to Home Assistant.
 | 
			
		||||
# ==============================================================================
 | 
			
		||||
declare config
 | 
			
		||||
declare port
 | 
			
		||||
 | 
			
		||||
# We only disable it when disabled explicitly
 | 
			
		||||
if bashio::config.false 'home_assistant_dashboard_integration';
 | 
			
		||||
then
 | 
			
		||||
    bashio::log.info "Home Assistant discovery is disabled for this add-on."
 | 
			
		||||
    bashio::exit.ok
 | 
			
		||||
fi
 | 
			
		||||
 | 
			
		||||
port=$(bashio::addon.ingress_port)
 | 
			
		||||
 | 
			
		||||
# Wait for NGINX to become available
 | 
			
		||||
bashio::net.wait_for "${port}" "127.0.0.1" 300
 | 
			
		||||
 | 
			
		||||
config=$(\
 | 
			
		||||
    bashio::var.json \
 | 
			
		||||
        host "127.0.0.1" \
 | 
			
		||||
        port "^${port}" \
 | 
			
		||||
)
 | 
			
		||||
 | 
			
		||||
if bashio::discovery "esphome" "${config}" > /dev/null; then
 | 
			
		||||
    bashio::log.info "Successfully send discovery information to Home Assistant."
 | 
			
		||||
else
 | 
			
		||||
    bashio::log.error "Discovery message to Home Assistant failed!"
 | 
			
		||||
fi
 | 
			
		||||
@@ -1 +0,0 @@
 | 
			
		||||
oneshot
 | 
			
		||||
@@ -1 +0,0 @@
 | 
			
		||||
/etc/s6-overlay/s6-rc.d/discovery/run
 | 
			
		||||
@@ -1,26 +0,0 @@
 | 
			
		||||
#!/command/with-contenv bashio
 | 
			
		||||
# shellcheck shell=bash
 | 
			
		||||
# ==============================================================================
 | 
			
		||||
# Home Assistant Community Add-on: ESPHome
 | 
			
		||||
# Take down the S6 supervision tree when ESPHome dashboard fails
 | 
			
		||||
# ==============================================================================
 | 
			
		||||
declare exit_code
 | 
			
		||||
readonly exit_code_container=$(</run/s6-linux-init-container-results/exitcode)
 | 
			
		||||
readonly exit_code_service="${1}"
 | 
			
		||||
readonly exit_code_signal="${2}"
 | 
			
		||||
 | 
			
		||||
bashio::log.info \
 | 
			
		||||
  "Service ESPHome dashboard exited with code ${exit_code_service}" \
 | 
			
		||||
  "(by signal ${exit_code_signal})"
 | 
			
		||||
 | 
			
		||||
if [[ "${exit_code_service}" -eq 256 ]]; then
 | 
			
		||||
  if [[ "${exit_code_container}" -eq 0 ]]; then
 | 
			
		||||
    echo $((128 + $exit_code_signal)) > /run/s6-linux-init-container-results/exitcode
 | 
			
		||||
  fi
 | 
			
		||||
  [[ "${exit_code_signal}" -eq 15 ]] && exec /run/s6/basedir/bin/halt
 | 
			
		||||
elif [[ "${exit_code_service}" -ne 0 ]]; then
 | 
			
		||||
  if [[ "${exit_code_container}" -eq 0 ]]; then
 | 
			
		||||
    echo "${exit_code_service}" > /run/s6-linux-init-container-results/exitcode
 | 
			
		||||
  fi
 | 
			
		||||
  exec /run/s6/basedir/bin/halt
 | 
			
		||||
fi
 | 
			
		||||
@@ -1 +0,0 @@
 | 
			
		||||
longrun
 | 
			
		||||
@@ -1,27 +0,0 @@
 | 
			
		||||
#!/command/with-contenv bashio
 | 
			
		||||
# shellcheck shell=bash
 | 
			
		||||
# ==============================================================================
 | 
			
		||||
# Community Hass.io Add-ons: ESPHome
 | 
			
		||||
# Configures NGINX for use with ESPHome
 | 
			
		||||
# ==============================================================================
 | 
			
		||||
mkdir -p /var/log/nginx
 | 
			
		||||
 | 
			
		||||
# Generate Ingress configuration
 | 
			
		||||
bashio::var.json \
 | 
			
		||||
    interface "$(bashio::addon.ip_address)" \
 | 
			
		||||
    port "^$(bashio::addon.ingress_port)" \
 | 
			
		||||
    | tempio \
 | 
			
		||||
        -template /etc/nginx/templates/ingress.gtpl \
 | 
			
		||||
        -out /etc/nginx/servers/ingress.conf
 | 
			
		||||
 | 
			
		||||
# Generate direct access configuration, if enabled.
 | 
			
		||||
if bashio::var.has_value "$(bashio::addon.port 6052)"; then
 | 
			
		||||
    bashio::config.require.ssl
 | 
			
		||||
    bashio::var.json \
 | 
			
		||||
        certfile "$(bashio::config 'certfile')" \
 | 
			
		||||
        keyfile "$(bashio::config 'keyfile')" \
 | 
			
		||||
        ssl "^$(bashio::config 'ssl')" \
 | 
			
		||||
        | tempio \
 | 
			
		||||
            -template /etc/nginx/templates/direct.gtpl \
 | 
			
		||||
            -out /etc/nginx/servers/direct.conf
 | 
			
		||||
fi
 | 
			
		||||
@@ -1 +0,0 @@
 | 
			
		||||
oneshot
 | 
			
		||||
@@ -1 +0,0 @@
 | 
			
		||||
/etc/s6-overlay/s6-rc.d/init-nginx/run
 | 
			
		||||
@@ -1,25 +0,0 @@
 | 
			
		||||
#!/command/with-contenv bashio
 | 
			
		||||
# ==============================================================================
 | 
			
		||||
# Community Hass.io Add-ons: ESPHome
 | 
			
		||||
# Take down the S6 supervision tree when NGINX fails
 | 
			
		||||
# ==============================================================================
 | 
			
		||||
declare exit_code
 | 
			
		||||
readonly exit_code_container=$(</run/s6-linux-init-container-results/exitcode)
 | 
			
		||||
readonly exit_code_service="${1}"
 | 
			
		||||
readonly exit_code_signal="${2}"
 | 
			
		||||
 | 
			
		||||
bashio::log.info \
 | 
			
		||||
  "Service NGINX exited with code ${exit_code_service}" \
 | 
			
		||||
  "(by signal ${exit_code_signal})"
 | 
			
		||||
 | 
			
		||||
if [[ "${exit_code_service}" -eq 256 ]]; then
 | 
			
		||||
  if [[ "${exit_code_container}" -eq 0 ]]; then
 | 
			
		||||
    echo $((128 + $exit_code_signal)) > /run/s6-linux-init-container-results/exitcode
 | 
			
		||||
  fi
 | 
			
		||||
  [[ "${exit_code_signal}" -eq 15 ]] && exec /run/s6/basedir/bin/halt
 | 
			
		||||
elif [[ "${exit_code_service}" -ne 0 ]]; then
 | 
			
		||||
  if [[ "${exit_code_container}" -eq 0 ]]; then
 | 
			
		||||
    echo "${exit_code_service}" > /run/s6-linux-init-container-results/exitcode
 | 
			
		||||
  fi
 | 
			
		||||
  exec /run/s6/basedir/bin/halt
 | 
			
		||||
fi
 | 
			
		||||
@@ -1 +0,0 @@
 | 
			
		||||
longrun
 | 
			
		||||
							
								
								
									
										41
									
								
								docker/hassio-rootfs/etc/cont-init.d/10-requirements.sh
									
									
									
									
									
										Executable file
									
								
							
							
						
						
									
										41
									
								
								docker/hassio-rootfs/etc/cont-init.d/10-requirements.sh
									
									
									
									
									
										Executable file
									
								
							@@ -0,0 +1,41 @@
 | 
			
		||||
#!/usr/bin/with-contenv bashio
 | 
			
		||||
# ==============================================================================
 | 
			
		||||
# Community Hass.io Add-ons: ESPHome
 | 
			
		||||
# This files check if all user configuration requirements are met
 | 
			
		||||
# ==============================================================================
 | 
			
		||||
 | 
			
		||||
# Check SSL requirements, if enabled
 | 
			
		||||
if bashio::config.true 'ssl'; then
 | 
			
		||||
    if ! bashio::config.has_value 'certfile'; then
 | 
			
		||||
        bashio::fatal 'SSL is enabled, but no certfile was specified.'
 | 
			
		||||
        bashio::exit.nok
 | 
			
		||||
    fi
 | 
			
		||||
 | 
			
		||||
    if ! bashio::config.has_value 'keyfile'; then
 | 
			
		||||
        bashio::fatal 'SSL is enabled, but no keyfile was specified'
 | 
			
		||||
        bashio::exit.nok
 | 
			
		||||
    fi
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
    certfile="/ssl/$(bashio::config 'certfile')"
 | 
			
		||||
    keyfile="/ssl/$(bashio::config 'keyfile')"
 | 
			
		||||
 | 
			
		||||
    if ! bashio::fs.file_exists "${certfile}"; then
 | 
			
		||||
        if ! bashio::fs.file_exists "${keyfile}"; then
 | 
			
		||||
            # Both files are missing, let's print a friendlier error message
 | 
			
		||||
            bashio::log.fatal 'You enabled encrypted connections using the "ssl": true option.'
 | 
			
		||||
            bashio::log.fatal "However, the SSL files '${certfile}' and '${keyfile}'"
 | 
			
		||||
            bashio::log.fatal "were not found. If you're using Hass.io on your local network and don't want"
 | 
			
		||||
            bashio::log.fatal 'to encrypt connections to the ESPHome dashboard, you can manually disable'
 | 
			
		||||
            bashio::log.fatal 'SSL by setting "ssl" to false."'
 | 
			
		||||
            bashio::exit.nok
 | 
			
		||||
        fi
 | 
			
		||||
        bashio::log.fatal "The configured certfile '${certfile}' was not found."
 | 
			
		||||
        bashio::exit.nok
 | 
			
		||||
    fi
 | 
			
		||||
 | 
			
		||||
    if ! bashio::fs.file_exists "/ssl/$(bashio::config 'keyfile')"; then
 | 
			
		||||
        bashio::log.fatal "The configured keyfile '${keyfile}' was not found."
 | 
			
		||||
        bashio::exit.nok
 | 
			
		||||
    fi
 | 
			
		||||
fi
 | 
			
		||||
							
								
								
									
										34
									
								
								docker/hassio-rootfs/etc/cont-init.d/20-nginx.sh
									
									
									
									
									
										Executable file
									
								
							
							
						
						
									
										34
									
								
								docker/hassio-rootfs/etc/cont-init.d/20-nginx.sh
									
									
									
									
									
										Executable file
									
								
							@@ -0,0 +1,34 @@
 | 
			
		||||
#!/usr/bin/with-contenv bashio
 | 
			
		||||
# ==============================================================================
 | 
			
		||||
# Community Hass.io Add-ons: ESPHome
 | 
			
		||||
# Configures NGINX for use with ESPHome
 | 
			
		||||
# ==============================================================================
 | 
			
		||||
 | 
			
		||||
declare certfile
 | 
			
		||||
declare keyfile
 | 
			
		||||
declare direct_port
 | 
			
		||||
declare ingress_interface
 | 
			
		||||
declare ingress_port
 | 
			
		||||
 | 
			
		||||
mkdir -p /var/log/nginx
 | 
			
		||||
 | 
			
		||||
direct_port=$(bashio::addon.port 6052)
 | 
			
		||||
if bashio::var.has_value "${direct_port}"; then
 | 
			
		||||
    if bashio::config.true 'ssl'; then
 | 
			
		||||
        certfile=$(bashio::config 'certfile')
 | 
			
		||||
        keyfile=$(bashio::config 'keyfile')
 | 
			
		||||
 | 
			
		||||
        mv /etc/nginx/servers/direct-ssl.disabled /etc/nginx/servers/direct.conf
 | 
			
		||||
        sed -i "s/%%certfile%%/${certfile}/g" /etc/nginx/servers/direct.conf
 | 
			
		||||
        sed -i "s/%%keyfile%%/${keyfile}/g" /etc/nginx/servers/direct.conf
 | 
			
		||||
    else
 | 
			
		||||
        mv /etc/nginx/servers/direct.disabled /etc/nginx/servers/direct.conf
 | 
			
		||||
    fi
 | 
			
		||||
 | 
			
		||||
    sed -i "s/%%port%%/${direct_port}/g" /etc/nginx/servers/direct.conf
 | 
			
		||||
fi
 | 
			
		||||
 | 
			
		||||
ingress_port=$(bashio::addon.ingress_port)
 | 
			
		||||
ingress_interface=$(bashio::addon.ip_address)
 | 
			
		||||
sed -i "s/%%port%%/${ingress_port}/g" /etc/nginx/servers/ingress.conf
 | 
			
		||||
sed -i "s/%%interface%%/${ingress_interface}/g" /etc/nginx/servers/ingress.conf
 | 
			
		||||
							
								
								
									
										9
									
								
								docker/hassio-rootfs/etc/cont-init.d/30-dirs.sh
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										9
									
								
								docker/hassio-rootfs/etc/cont-init.d/30-dirs.sh
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1,9 @@
 | 
			
		||||
#!/usr/bin/with-contenv bashio
 | 
			
		||||
# ==============================================================================
 | 
			
		||||
# Community Hass.io Add-ons: ESPHome
 | 
			
		||||
# This files creates all directories used by esphome
 | 
			
		||||
# ==============================================================================
 | 
			
		||||
 | 
			
		||||
pio_cache_base=/data/cache/platformio
 | 
			
		||||
 | 
			
		||||
mkdir -p "${pio_cache_base}"
 | 
			
		||||
@@ -1,9 +1,9 @@
 | 
			
		||||
proxy_http_version          1.1;
 | 
			
		||||
proxy_ignore_client_abort   off;
 | 
			
		||||
proxy_read_timeout          86400s;
 | 
			
		||||
proxy_redirect              off;
 | 
			
		||||
proxy_send_timeout          86400s;
 | 
			
		||||
proxy_max_temp_file_size    0;
 | 
			
		||||
proxy_http_version 1.1;
 | 
			
		||||
proxy_ignore_client_abort off;
 | 
			
		||||
proxy_read_timeout 86400s;
 | 
			
		||||
proxy_redirect off;
 | 
			
		||||
proxy_send_timeout 86400s;
 | 
			
		||||
proxy_max_temp_file_size 0;
 | 
			
		||||
 | 
			
		||||
proxy_set_header Accept-Encoding "";
 | 
			
		||||
proxy_set_header Connection $connection_upgrade;
 | 
			
		||||
@@ -1,7 +1,5 @@
 | 
			
		||||
root            /dev/null;
 | 
			
		||||
server_name     $hostname;
 | 
			
		||||
 | 
			
		||||
client_max_body_size 512m;
 | 
			
		||||
root /dev/null;
 | 
			
		||||
server_name $hostname;
 | 
			
		||||
 | 
			
		||||
add_header X-Content-Type-Options nosniff;
 | 
			
		||||
add_header X-XSS-Protection "1; mode=block";
 | 
			
		||||
							
								
								
									
										9
									
								
								docker/hassio-rootfs/etc/nginx/includes/ssl_params.conf
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										9
									
								
								docker/hassio-rootfs/etc/nginx/includes/ssl_params.conf
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1,9 @@
 | 
			
		||||
ssl_protocols TLSv1.2;
 | 
			
		||||
ssl_prefer_server_ciphers on;
 | 
			
		||||
ssl_ciphers ECDHE-RSA-AES256-GCM-SHA512:DHE-RSA-AES256-GCM-SHA512:ECDHE-RSA-AES256-GCM-SHA384:DHE-RSA-AES256-GCM-SHA384:ECDHE-RSA-AES256-SHA384:ECDHE-RSA-AES256-SHA:DHE-RSA-AES256-SHA;
 | 
			
		||||
ssl_ecdh_curve secp384r1;
 | 
			
		||||
ssl_session_timeout  10m;
 | 
			
		||||
ssl_session_cache shared:SSL:10m;
 | 
			
		||||
ssl_session_tickets off;
 | 
			
		||||
ssl_stapling on;
 | 
			
		||||
ssl_stapling_verify on;
 | 
			
		||||
@@ -2,6 +2,7 @@ daemon off;
 | 
			
		||||
user root;
 | 
			
		||||
pid /var/run/nginx.pid;
 | 
			
		||||
worker_processes 1;
 | 
			
		||||
# Hass.io addon log
 | 
			
		||||
error_log /proc/1/fd/1 error;
 | 
			
		||||
events {
 | 
			
		||||
    worker_connections 1024;
 | 
			
		||||
@@ -9,22 +10,24 @@ events {
 | 
			
		||||
 | 
			
		||||
http {
 | 
			
		||||
    include /etc/nginx/includes/mime.types;
 | 
			
		||||
 | 
			
		||||
    access_log              off;
 | 
			
		||||
    default_type            application/octet-stream;
 | 
			
		||||
    gzip                    on;
 | 
			
		||||
    keepalive_timeout       65;
 | 
			
		||||
    sendfile                on;
 | 
			
		||||
    server_tokens           off;
 | 
			
		||||
 | 
			
		||||
    tcp_nodelay             on;
 | 
			
		||||
    tcp_nopush              on;
 | 
			
		||||
    access_log stdout;
 | 
			
		||||
    default_type application/octet-stream;
 | 
			
		||||
    gzip on;
 | 
			
		||||
    keepalive_timeout 65;
 | 
			
		||||
    sendfile on;
 | 
			
		||||
    server_tokens off;
 | 
			
		||||
 | 
			
		||||
    map $http_upgrade $connection_upgrade {
 | 
			
		||||
        default upgrade;
 | 
			
		||||
        ''      close;
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    include /etc/nginx/includes/upstream.conf;
 | 
			
		||||
    # Use Hass.io supervisor as resolver
 | 
			
		||||
    resolver 172.30.32.2;
 | 
			
		||||
 | 
			
		||||
    upstream esphome {
 | 
			
		||||
        server unix:/var/run/esphome.sock;
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    include /etc/nginx/servers/*.conf;
 | 
			
		||||
}
 | 
			
		||||
@@ -1,26 +1,20 @@
 | 
			
		||||
server {
 | 
			
		||||
    {{ if not .ssl }}
 | 
			
		||||
    listen 6052 default_server;
 | 
			
		||||
    {{ else }}
 | 
			
		||||
    listen 6052 default_server ssl http2;
 | 
			
		||||
    {{ end }}
 | 
			
		||||
    listen %%port%% default_server ssl http2;
 | 
			
		||||
 | 
			
		||||
    include /etc/nginx/includes/server_params.conf;
 | 
			
		||||
    include /etc/nginx/includes/proxy_params.conf;
 | 
			
		||||
 | 
			
		||||
    {{ if .ssl }}
 | 
			
		||||
    include /etc/nginx/includes/ssl_params.conf;
 | 
			
		||||
 | 
			
		||||
    ssl_certificate /ssl/{{ .certfile }};
 | 
			
		||||
    ssl_certificate_key /ssl/{{ .keyfile }};
 | 
			
		||||
    ssl on;
 | 
			
		||||
    ssl_certificate /ssl/%%certfile%%;
 | 
			
		||||
    ssl_certificate_key /ssl/%%keyfile%%;
 | 
			
		||||
 | 
			
		||||
    # Clear Hass.io Ingress header
 | 
			
		||||
    proxy_set_header X-Hassio-Ingress "";
 | 
			
		||||
 | 
			
		||||
    # Redirect http requests to https on the same port.
 | 
			
		||||
    # https://rageagainstshell.com/2016/11/redirect-http-to-https-on-the-same-port-in-nginx/
 | 
			
		||||
    error_page 497 https://$http_host$request_uri;
 | 
			
		||||
    {{ end }}
 | 
			
		||||
 | 
			
		||||
    # Clear Home Assistant Ingress header
 | 
			
		||||
    proxy_set_header X-HA-Ingress "";
 | 
			
		||||
 | 
			
		||||
    location / {
 | 
			
		||||
        proxy_pass http://esphome;
 | 
			
		||||
							
								
								
									
										12
									
								
								docker/hassio-rootfs/etc/nginx/servers/direct.disabled
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										12
									
								
								docker/hassio-rootfs/etc/nginx/servers/direct.disabled
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1,12 @@
 | 
			
		||||
server {
 | 
			
		||||
    listen %%port%% default_server;
 | 
			
		||||
 | 
			
		||||
    include /etc/nginx/includes/server_params.conf;
 | 
			
		||||
    include /etc/nginx/includes/proxy_params.conf;
 | 
			
		||||
    # Clear Hass.io Ingress header
 | 
			
		||||
    proxy_set_header X-Hassio-Ingress "";
 | 
			
		||||
 | 
			
		||||
    location / {
 | 
			
		||||
        proxy_pass http://esphome;
 | 
			
		||||
    }
 | 
			
		||||
}
 | 
			
		||||
@@ -1,16 +1,14 @@
 | 
			
		||||
server {
 | 
			
		||||
    listen 127.0.0.1:{{ .port }} default_server;
 | 
			
		||||
    listen {{ .interface }}:{{ .port }} default_server;
 | 
			
		||||
    listen %%interface%%:%%port%% default_server;
 | 
			
		||||
 | 
			
		||||
    include /etc/nginx/includes/server_params.conf;
 | 
			
		||||
    include /etc/nginx/includes/proxy_params.conf;
 | 
			
		||||
 | 
			
		||||
    # Set Home Assistant Ingress header
 | 
			
		||||
    proxy_set_header X-HA-Ingress "YES";
 | 
			
		||||
    # Set Hass.io Ingress header
 | 
			
		||||
    proxy_set_header X-Hassio-Ingress "YES";
 | 
			
		||||
 | 
			
		||||
    location / {
 | 
			
		||||
        # Only allow from Hass.io supervisor
 | 
			
		||||
        allow   172.30.32.2;
 | 
			
		||||
        allow   127.0.0.1;
 | 
			
		||||
        deny    all;
 | 
			
		||||
 | 
			
		||||
        proxy_pass http://esphome;
 | 
			
		||||
							
								
								
									
										9
									
								
								docker/hassio-rootfs/etc/services.d/esphome/finish
									
									
									
									
									
										Executable file
									
								
							
							
						
						
									
										9
									
								
								docker/hassio-rootfs/etc/services.d/esphome/finish
									
									
									
									
									
										Executable file
									
								
							@@ -0,0 +1,9 @@
 | 
			
		||||
#!/usr/bin/execlineb -S0
 | 
			
		||||
# ==============================================================================
 | 
			
		||||
# Community Hass.io Add-ons: ESPHome
 | 
			
		||||
# Take down the S6 supervision tree when ESPHome fails
 | 
			
		||||
# ==============================================================================
 | 
			
		||||
if -n { s6-test $# -ne 0 }
 | 
			
		||||
if -n { s6-test ${1} -eq 256 }
 | 
			
		||||
 | 
			
		||||
s6-svscanctl -t /var/run/s6/services
 | 
			
		||||
@@ -1,19 +1,10 @@
 | 
			
		||||
#!/command/with-contenv bashio
 | 
			
		||||
# shellcheck shell=bash
 | 
			
		||||
#!/usr/bin/with-contenv bashio
 | 
			
		||||
# ==============================================================================
 | 
			
		||||
# Community Hass.io Add-ons: ESPHome
 | 
			
		||||
# Runs the ESPHome dashboard
 | 
			
		||||
# ==============================================================================
 | 
			
		||||
readonly pio_cache_base=/data/cache/platformio
 | 
			
		||||
 | 
			
		||||
export ESPHOME_IS_HA_ADDON=true
 | 
			
		||||
export PLATFORMIO_GLOBALLIB_DIR=/piolibs
 | 
			
		||||
 | 
			
		||||
# we can't set core_dir, because the settings file is stored in `core_dir/appstate.json`
 | 
			
		||||
# setting `core_dir` would therefore prevent pio from accessing
 | 
			
		||||
export PLATFORMIO_PLATFORMS_DIR="${pio_cache_base}/platforms"
 | 
			
		||||
export PLATFORMIO_PACKAGES_DIR="${pio_cache_base}/packages"
 | 
			
		||||
export PLATFORMIO_CACHE_DIR="${pio_cache_base}/cache"
 | 
			
		||||
export ESPHOME_IS_HASSIO=true
 | 
			
		||||
 | 
			
		||||
if bashio::config.true 'leave_front_door_open'; then
 | 
			
		||||
    export DISABLE_HA_AUTHENTICATION=true
 | 
			
		||||
@@ -23,31 +14,22 @@ if bashio::config.true 'streamer_mode'; then
 | 
			
		||||
    export ESPHOME_STREAMER_MODE=true
 | 
			
		||||
fi
 | 
			
		||||
 | 
			
		||||
if bashio::config.true 'status_use_ping'; then
 | 
			
		||||
    export ESPHOME_DASHBOARD_USE_PING=true
 | 
			
		||||
fi
 | 
			
		||||
 | 
			
		||||
if bashio::config.has_value 'relative_url'; then
 | 
			
		||||
    export ESPHOME_DASHBOARD_RELATIVE_URL=$(bashio::config 'relative_url')
 | 
			
		||||
fi
 | 
			
		||||
 | 
			
		||||
if bashio::config.has_value 'default_compile_process_limit'; then
 | 
			
		||||
    export ESPHOME_DEFAULT_COMPILE_PROCESS_LIMIT=$(bashio::config 'default_compile_process_limit')
 | 
			
		||||
else
 | 
			
		||||
    if grep -q 'Raspberry Pi 3' /proc/cpuinfo; then
 | 
			
		||||
        export ESPHOME_DEFAULT_COMPILE_PROCESS_LIMIT=1
 | 
			
		||||
    fi
 | 
			
		||||
fi
 | 
			
		||||
pio_cache_base=/data/cache/platformio
 | 
			
		||||
# we can't set core_dir, because the settings file is stored in `core_dir/appstate.json`
 | 
			
		||||
# setting `core_dir` would therefore prevent pio from accessing
 | 
			
		||||
export PLATFORMIO_PLATFORMS_DIR="${pio_cache_base}/platforms"
 | 
			
		||||
export PLATFORMIO_PACKAGES_DIR="${pio_cache_base}/packages"
 | 
			
		||||
export PLATFORMIO_CACHE_DIR="${pio_cache_base}/cache"
 | 
			
		||||
 | 
			
		||||
mkdir -p "${pio_cache_base}"
 | 
			
		||||
 | 
			
		||||
mkdir -p /config/esphome
 | 
			
		||||
 | 
			
		||||
if bashio::fs.directory_exists '/config/esphome/.esphome'; then
 | 
			
		||||
    bashio::log.info "Migrating old .esphome directory..."
 | 
			
		||||
    if bashio::fs.file_exists '/config/esphome/.esphome/esphome.json'; then
 | 
			
		||||
        mv /config/esphome/.esphome/esphome.json /data/esphome.json
 | 
			
		||||
    fi
 | 
			
		||||
    mkdir -p "/data/storage"
 | 
			
		||||
    mv /config/esphome/.esphome/*.json /data/storage/ || true
 | 
			
		||||
    rm -rf /config/esphome/.esphome
 | 
			
		||||
fi
 | 
			
		||||
export PLATFORMIO_GLOBALLIB_DIR=/piolibs
 | 
			
		||||
 | 
			
		||||
bashio::log.info "Starting ESPHome dashboard..."
 | 
			
		||||
exec esphome dashboard /config/esphome --socket /var/run/esphome.sock --ha-addon
 | 
			
		||||
exec esphome dashboard /config/esphome --socket /var/run/esphome.sock --hassio
 | 
			
		||||
							
								
								
									
										9
									
								
								docker/hassio-rootfs/etc/services.d/nginx/finish
									
									
									
									
									
										Executable file
									
								
							
							
						
						
									
										9
									
								
								docker/hassio-rootfs/etc/services.d/nginx/finish
									
									
									
									
									
										Executable file
									
								
							@@ -0,0 +1,9 @@
 | 
			
		||||
#!/usr/bin/execlineb -S0
 | 
			
		||||
# ==============================================================================
 | 
			
		||||
# Community Hass.io Add-ons: ESPHome
 | 
			
		||||
# Take down the S6 supervision tree when NGINX fails
 | 
			
		||||
# ==============================================================================
 | 
			
		||||
if -n { s6-test $# -ne 0 }
 | 
			
		||||
if -n { s6-test ${1} -eq 256 }
 | 
			
		||||
 | 
			
		||||
s6-svscanctl -t /var/run/s6/services
 | 
			
		||||
@@ -1,11 +1,10 @@
 | 
			
		||||
#!/command/with-contenv bashio
 | 
			
		||||
# shellcheck shell=bash
 | 
			
		||||
#!/usr/bin/with-contenv bashio
 | 
			
		||||
# ==============================================================================
 | 
			
		||||
# Community Hass.io Add-ons: ESPHome
 | 
			
		||||
# Runs the NGINX proxy
 | 
			
		||||
# ==============================================================================
 | 
			
		||||
 | 
			
		||||
bashio::log.info "Waiting for ESPHome dashboard to come up..."
 | 
			
		||||
bashio::log.info "Waiting for dashboard to come up..."
 | 
			
		||||
 | 
			
		||||
while [[ ! -S /var/run/esphome.sock ]]; do
 | 
			
		||||
  sleep 0.5
 | 
			
		||||
							
								
								
									
										30
									
								
								docker/platformio_install_deps.py
									
									
									
									
									
										Executable file
									
								
							
							
						
						
									
										30
									
								
								docker/platformio_install_deps.py
									
									
									
									
									
										Executable file
									
								
							@@ -0,0 +1,30 @@
 | 
			
		||||
#!/usr/bin/env python3
 | 
			
		||||
# This script is used in the docker containers to preinstall
 | 
			
		||||
# all platformio libraries in the global storage
 | 
			
		||||
 | 
			
		||||
import configparser
 | 
			
		||||
import subprocess
 | 
			
		||||
import sys
 | 
			
		||||
 | 
			
		||||
config = configparser.ConfigParser(inline_comment_prefixes=(';', ))
 | 
			
		||||
config.read(sys.argv[1])
 | 
			
		||||
 | 
			
		||||
libs = []
 | 
			
		||||
# Extract from every lib_deps key in all sections
 | 
			
		||||
for section in config.sections():
 | 
			
		||||
    conf = config[section]
 | 
			
		||||
    if "lib_deps" not in conf:
 | 
			
		||||
        continue
 | 
			
		||||
    for lib_dep in conf["lib_deps"].splitlines():
 | 
			
		||||
        if not lib_dep:
 | 
			
		||||
            # Empty line or comment
 | 
			
		||||
            continue
 | 
			
		||||
        if lib_dep.startswith("${"):
 | 
			
		||||
            # Extending from another section
 | 
			
		||||
            continue
 | 
			
		||||
        if "@" not in lib_dep:
 | 
			
		||||
            # No version pinned, this is an internal lib
 | 
			
		||||
            continue
 | 
			
		||||
        libs.append(lib_dep)
 | 
			
		||||
 | 
			
		||||
subprocess.check_call(['platformio', 'lib', '-g', 'install', *libs])
 | 
			
		||||
@@ -1,61 +1,40 @@
 | 
			
		||||
# PYTHON_ARGCOMPLETE_OK
 | 
			
		||||
import argparse
 | 
			
		||||
from datetime import datetime
 | 
			
		||||
import functools
 | 
			
		||||
import importlib
 | 
			
		||||
import logging
 | 
			
		||||
import os
 | 
			
		||||
import re
 | 
			
		||||
import sys
 | 
			
		||||
import time
 | 
			
		||||
 | 
			
		||||
import argcomplete
 | 
			
		||||
from datetime import datetime
 | 
			
		||||
 | 
			
		||||
from esphome import const, writer, yaml_util
 | 
			
		||||
import esphome.codegen as cg
 | 
			
		||||
from esphome.config import iter_component_configs, read_config, strip_default_ids
 | 
			
		||||
from esphome.config import iter_components, read_config, strip_default_ids
 | 
			
		||||
from esphome.const import (
 | 
			
		||||
    ALLOWED_NAME_CHARS,
 | 
			
		||||
    CONF_BAUD_RATE,
 | 
			
		||||
    CONF_BROKER,
 | 
			
		||||
    CONF_DEASSERT_RTS_DTR,
 | 
			
		||||
    CONF_DISABLED,
 | 
			
		||||
    CONF_ESPHOME,
 | 
			
		||||
    CONF_LEVEL,
 | 
			
		||||
    CONF_LOG_TOPIC,
 | 
			
		||||
    CONF_LOGGER,
 | 
			
		||||
    CONF_MDNS,
 | 
			
		||||
    CONF_MQTT,
 | 
			
		||||
    CONF_NAME,
 | 
			
		||||
    CONF_OTA,
 | 
			
		||||
    CONF_PASSWORD,
 | 
			
		||||
    CONF_PLATFORM,
 | 
			
		||||
    CONF_PLATFORMIO_OPTIONS,
 | 
			
		||||
    CONF_PORT,
 | 
			
		||||
    CONF_SUBSTITUTIONS,
 | 
			
		||||
    CONF_TOPIC,
 | 
			
		||||
    PLATFORM_BK72XX,
 | 
			
		||||
    PLATFORM_ESP32,
 | 
			
		||||
    PLATFORM_ESP8266,
 | 
			
		||||
    PLATFORM_RP2040,
 | 
			
		||||
    PLATFORM_RTL87XX,
 | 
			
		||||
    CONF_ESPHOME,
 | 
			
		||||
    CONF_PLATFORMIO_OPTIONS,
 | 
			
		||||
    SECRETS_FILES,
 | 
			
		||||
)
 | 
			
		||||
from esphome.core import CORE, EsphomeError, coroutine
 | 
			
		||||
from esphome.helpers import get_bool_env, indent, is_ip_address
 | 
			
		||||
from esphome.log import Fore, color, setup_log
 | 
			
		||||
from esphome.helpers import indent
 | 
			
		||||
from esphome.util import (
 | 
			
		||||
    get_serial_ports,
 | 
			
		||||
    list_yaml_files,
 | 
			
		||||
    run_external_command,
 | 
			
		||||
    run_external_process,
 | 
			
		||||
    safe_print,
 | 
			
		||||
    list_yaml_files,
 | 
			
		||||
    get_serial_ports,
 | 
			
		||||
)
 | 
			
		||||
from esphome.log import color, setup_log, Fore
 | 
			
		||||
 | 
			
		||||
_LOGGER = logging.getLogger(__name__)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def choose_prompt(options, purpose: str = None):
 | 
			
		||||
def choose_prompt(options):
 | 
			
		||||
    if not options:
 | 
			
		||||
        raise EsphomeError(
 | 
			
		||||
            "Found no valid options for upload/logging, please make sure relevant "
 | 
			
		||||
@@ -66,11 +45,9 @@ def choose_prompt(options, purpose: str = None):
 | 
			
		||||
    if len(options) == 1:
 | 
			
		||||
        return options[0][1]
 | 
			
		||||
 | 
			
		||||
    safe_print(
 | 
			
		||||
        f"Found multiple options{f' for {purpose}' if purpose else ''}, please choose one:"
 | 
			
		||||
    )
 | 
			
		||||
    safe_print("Found multiple options, please choose one:")
 | 
			
		||||
    for i, (desc, _) in enumerate(options):
 | 
			
		||||
        safe_print(f"  [{i + 1}] {desc}")
 | 
			
		||||
        safe_print(f"  [{i+1}] {desc}")
 | 
			
		||||
 | 
			
		||||
    while True:
 | 
			
		||||
        opt = input("(number): ")
 | 
			
		||||
@@ -87,42 +64,23 @@ def choose_prompt(options, purpose: str = None):
 | 
			
		||||
    return options[opt - 1][1]
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def choose_upload_log_host(
 | 
			
		||||
    default, check_default, show_ota, show_mqtt, show_api, purpose: str = None
 | 
			
		||||
):
 | 
			
		||||
def choose_upload_log_host(default, check_default, show_ota, show_mqtt, show_api):
 | 
			
		||||
    options = []
 | 
			
		||||
    for port in get_serial_ports():
 | 
			
		||||
        options.append((f"{port.path} ({port.description})", port.path))
 | 
			
		||||
    if default == "SERIAL":
 | 
			
		||||
        return choose_prompt(options, purpose=purpose)
 | 
			
		||||
    if (show_ota and "ota" in CORE.config) or (show_api and "api" in CORE.config):
 | 
			
		||||
        options.append((f"Over The Air ({CORE.address})", CORE.address))
 | 
			
		||||
        if default == "OTA":
 | 
			
		||||
            return CORE.address
 | 
			
		||||
    if (
 | 
			
		||||
        show_mqtt
 | 
			
		||||
        and (mqtt_config := CORE.config.get(CONF_MQTT))
 | 
			
		||||
        and mqtt_logging_enabled(mqtt_config)
 | 
			
		||||
    ):
 | 
			
		||||
        options.append((f"MQTT ({mqtt_config[CONF_BROKER]})", "MQTT"))
 | 
			
		||||
    if show_mqtt and "mqtt" in CORE.config:
 | 
			
		||||
        options.append((f"MQTT ({CORE.config['mqtt'][CONF_BROKER]})", "MQTT"))
 | 
			
		||||
        if default == "OTA":
 | 
			
		||||
            return "MQTT"
 | 
			
		||||
    if default is not None:
 | 
			
		||||
        return default
 | 
			
		||||
    if check_default is not None and check_default in [opt[1] for opt in options]:
 | 
			
		||||
        return check_default
 | 
			
		||||
    return choose_prompt(options, purpose=purpose)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def mqtt_logging_enabled(mqtt_config):
 | 
			
		||||
    log_topic = mqtt_config[CONF_LOG_TOPIC]
 | 
			
		||||
    if log_topic is None:
 | 
			
		||||
        return False
 | 
			
		||||
    if CONF_TOPIC not in log_topic:
 | 
			
		||||
        return False
 | 
			
		||||
    if log_topic.get(CONF_LEVEL, None) == "NONE":
 | 
			
		||||
        return False
 | 
			
		||||
    return True
 | 
			
		||||
    return choose_prompt(options)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def get_port_type(port):
 | 
			
		||||
@@ -135,16 +93,15 @@ def get_port_type(port):
 | 
			
		||||
 | 
			
		||||
def run_miniterm(config, port):
 | 
			
		||||
    import serial
 | 
			
		||||
 | 
			
		||||
    from esphome import platformio_api
 | 
			
		||||
 | 
			
		||||
    if CONF_LOGGER not in config:
 | 
			
		||||
        _LOGGER.info("Logger is not enabled. Not starting UART logs.")
 | 
			
		||||
        return 1
 | 
			
		||||
        return
 | 
			
		||||
    baud_rate = config["logger"][CONF_BAUD_RATE]
 | 
			
		||||
    if baud_rate == 0:
 | 
			
		||||
        _LOGGER.info("UART logging is disabled (baud_rate=0). Not starting UART logs.")
 | 
			
		||||
        return 1
 | 
			
		||||
        return
 | 
			
		||||
    _LOGGER.info("Starting log output from %s with baud rate %s", port, baud_rate)
 | 
			
		||||
 | 
			
		||||
    backtrace_state = False
 | 
			
		||||
@@ -158,36 +115,25 @@ def run_miniterm(config, port):
 | 
			
		||||
        ser.dtr = False
 | 
			
		||||
        ser.rts = False
 | 
			
		||||
 | 
			
		||||
    tries = 0
 | 
			
		||||
    while tries < 5:
 | 
			
		||||
        try:
 | 
			
		||||
            with ser:
 | 
			
		||||
                while True:
 | 
			
		||||
                    try:
 | 
			
		||||
                        raw = ser.readline()
 | 
			
		||||
                    except serial.SerialException:
 | 
			
		||||
                        _LOGGER.error("Serial port closed!")
 | 
			
		||||
                        return 0
 | 
			
		||||
                    line = (
 | 
			
		||||
                        raw.replace(b"\r", b"")
 | 
			
		||||
                        .replace(b"\n", b"")
 | 
			
		||||
                        .decode("utf8", "backslashreplace")
 | 
			
		||||
                    )
 | 
			
		||||
                    time_str = datetime.now().time().strftime("[%H:%M:%S]")
 | 
			
		||||
                    message = time_str + line
 | 
			
		||||
                    safe_print(message)
 | 
			
		||||
    with ser:
 | 
			
		||||
        while True:
 | 
			
		||||
            try:
 | 
			
		||||
                raw = ser.readline()
 | 
			
		||||
            except serial.SerialException:
 | 
			
		||||
                _LOGGER.error("Serial port closed!")
 | 
			
		||||
                return
 | 
			
		||||
            line = (
 | 
			
		||||
                raw.replace(b"\r", b"")
 | 
			
		||||
                .replace(b"\n", b"")
 | 
			
		||||
                .decode("utf8", "backslashreplace")
 | 
			
		||||
            )
 | 
			
		||||
            time = datetime.now().time().strftime("[%H:%M:%S]")
 | 
			
		||||
            message = time + line
 | 
			
		||||
            safe_print(message)
 | 
			
		||||
 | 
			
		||||
                    backtrace_state = platformio_api.process_stacktrace(
 | 
			
		||||
                        config, line, backtrace_state=backtrace_state
 | 
			
		||||
                    )
 | 
			
		||||
        except serial.SerialException:
 | 
			
		||||
            tries += 1
 | 
			
		||||
            time.sleep(1)
 | 
			
		||||
    if tries >= 5:
 | 
			
		||||
        _LOGGER.error("Could not connect to serial port %s", port)
 | 
			
		||||
        return 1
 | 
			
		||||
 | 
			
		||||
    return 0
 | 
			
		||||
            backtrace_state = platformio_api.process_stacktrace(
 | 
			
		||||
                config, line, backtrace_state=backtrace_state
 | 
			
		||||
            )
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def wrap_to_code(name, comp):
 | 
			
		||||
@@ -217,7 +163,7 @@ def write_cpp(config):
 | 
			
		||||
def generate_cpp_contents(config):
 | 
			
		||||
    _LOGGER.info("Generating C++ source...")
 | 
			
		||||
 | 
			
		||||
    for name, component, conf in iter_component_configs(CORE.config):
 | 
			
		||||
    for name, component, conf in iter_components(CORE.config):
 | 
			
		||||
        if component.to_code is not None:
 | 
			
		||||
            coro = wrap_to_code(name, component)
 | 
			
		||||
            CORE.add_job(coro, conf)
 | 
			
		||||
@@ -244,16 +190,14 @@ def compile_program(args, config):
 | 
			
		||||
    return 0 if idedata is not None else 1
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def upload_using_esptool(config, port, file):
 | 
			
		||||
def upload_using_esptool(config, port):
 | 
			
		||||
    from esphome import platformio_api
 | 
			
		||||
 | 
			
		||||
    first_baudrate = config[CONF_ESPHOME][CONF_PLATFORMIO_OPTIONS].get(
 | 
			
		||||
        "upload_speed", 460800
 | 
			
		||||
    )
 | 
			
		||||
 | 
			
		||||
    if file is not None:
 | 
			
		||||
        flash_images = [platformio_api.FlashImage(path=file, offset="0x0")]
 | 
			
		||||
    else:
 | 
			
		||||
    def run_esptool(baud_rate):
 | 
			
		||||
        idedata = platformio_api.get_idedata(config)
 | 
			
		||||
 | 
			
		||||
        firmware_offset = "0x10000" if CORE.is_esp32 else "0x0"
 | 
			
		||||
@@ -264,13 +208,12 @@ def upload_using_esptool(config, port, file):
 | 
			
		||||
            *idedata.extra_flash_images,
 | 
			
		||||
        ]
 | 
			
		||||
 | 
			
		||||
    mcu = "esp8266"
 | 
			
		||||
    if CORE.is_esp32:
 | 
			
		||||
        from esphome.components.esp32 import get_esp32_variant
 | 
			
		||||
        mcu = "esp8266"
 | 
			
		||||
        if CORE.is_esp32:
 | 
			
		||||
            from esphome.components.esp32 import get_esp32_variant
 | 
			
		||||
 | 
			
		||||
        mcu = get_esp32_variant().lower()
 | 
			
		||||
            mcu = get_esp32_variant().lower()
 | 
			
		||||
 | 
			
		||||
    def run_esptool(baud_rate):
 | 
			
		||||
        cmd = [
 | 
			
		||||
            "esptool.py",
 | 
			
		||||
            "--before",
 | 
			
		||||
@@ -294,7 +237,8 @@ def upload_using_esptool(config, port, file):
 | 
			
		||||
        if os.environ.get("ESPHOME_USE_SUBPROCESS") is None:
 | 
			
		||||
            import esptool
 | 
			
		||||
 | 
			
		||||
            return run_external_command(esptool.main, *cmd)  # pylint: disable=no-member
 | 
			
		||||
            # pylint: disable=protected-access
 | 
			
		||||
            return run_external_command(esptool._main, *cmd)
 | 
			
		||||
 | 
			
		||||
        return run_external_process(*cmd)
 | 
			
		||||
 | 
			
		||||
@@ -309,86 +253,22 @@ def upload_using_esptool(config, port, file):
 | 
			
		||||
    return run_esptool(115200)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def upload_using_platformio(config, port):
 | 
			
		||||
    from esphome import platformio_api
 | 
			
		||||
 | 
			
		||||
    upload_args = ["-t", "upload", "-t", "nobuild"]
 | 
			
		||||
    if port is not None:
 | 
			
		||||
        upload_args += ["--upload-port", port]
 | 
			
		||||
    return platformio_api.run_platformio_cli_run(config, CORE.verbose, *upload_args)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def check_permissions(port):
 | 
			
		||||
    if os.name == "posix" and get_port_type(port) == "SERIAL":
 | 
			
		||||
        # Check if we can open selected serial port
 | 
			
		||||
        if not os.access(port, os.F_OK):
 | 
			
		||||
            raise EsphomeError(
 | 
			
		||||
                "The selected serial port does not exist. To resolve this issue, "
 | 
			
		||||
                "check that the device is connected to this computer with a USB cable and that "
 | 
			
		||||
                "the USB cable can be used for data and is not a power-only cable."
 | 
			
		||||
            )
 | 
			
		||||
        if not (os.access(port, os.R_OK | os.W_OK)):
 | 
			
		||||
            raise EsphomeError(
 | 
			
		||||
                "You do not have read or write permission on the selected serial port. "
 | 
			
		||||
                "To resolve this issue, you can add your user to the dialout group "
 | 
			
		||||
                f"by running the following command: sudo usermod -a -G dialout {os.getlogin()}. "
 | 
			
		||||
                "You will need to log out & back in or reboot to activate the new group access."
 | 
			
		||||
            )
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def upload_program(config, args, host):
 | 
			
		||||
    try:
 | 
			
		||||
        module = importlib.import_module("esphome.components." + CORE.target_platform)
 | 
			
		||||
        if getattr(module, "upload_program")(config, args, host):
 | 
			
		||||
            return 0
 | 
			
		||||
    except AttributeError:
 | 
			
		||||
        pass
 | 
			
		||||
 | 
			
		||||
    # if upload is to a serial port use platformio, otherwise assume ota
 | 
			
		||||
    if get_port_type(host) == "SERIAL":
 | 
			
		||||
        check_permissions(host)
 | 
			
		||||
        if CORE.target_platform in (PLATFORM_ESP32, PLATFORM_ESP8266):
 | 
			
		||||
            file = getattr(args, "file", None)
 | 
			
		||||
            return upload_using_esptool(config, host, file)
 | 
			
		||||
 | 
			
		||||
        if CORE.target_platform in (PLATFORM_RP2040):
 | 
			
		||||
            return upload_using_platformio(config, args.device)
 | 
			
		||||
 | 
			
		||||
        if CORE.target_platform in (PLATFORM_BK72XX, PLATFORM_RTL87XX):
 | 
			
		||||
            return upload_using_platformio(config, host)
 | 
			
		||||
 | 
			
		||||
        return 1  # Unknown target platform
 | 
			
		||||
 | 
			
		||||
    ota_conf = {}
 | 
			
		||||
    for ota_item in config.get(CONF_OTA, []):
 | 
			
		||||
        if ota_item[CONF_PLATFORM] == CONF_ESPHOME:
 | 
			
		||||
            ota_conf = ota_item
 | 
			
		||||
            break
 | 
			
		||||
 | 
			
		||||
    if not ota_conf:
 | 
			
		||||
        raise EsphomeError(
 | 
			
		||||
            f"Cannot upload Over the Air as the {CONF_OTA} configuration is not present or does not include {CONF_PLATFORM}: {CONF_ESPHOME}"
 | 
			
		||||
        )
 | 
			
		||||
        return upload_using_esptool(config, host)
 | 
			
		||||
 | 
			
		||||
    from esphome import espota2
 | 
			
		||||
 | 
			
		||||
    remote_port = int(ota_conf[CONF_PORT])
 | 
			
		||||
    password = ota_conf.get(CONF_PASSWORD, "")
 | 
			
		||||
 | 
			
		||||
    if (
 | 
			
		||||
        not is_ip_address(CORE.address)  # pylint: disable=too-many-boolean-expressions
 | 
			
		||||
        and (get_port_type(host) == "MQTT" or config[CONF_MDNS][CONF_DISABLED])
 | 
			
		||||
        and CONF_MQTT in config
 | 
			
		||||
        and (not args.device or args.device in ("MQTT", "OTA"))
 | 
			
		||||
    ):
 | 
			
		||||
        from esphome import mqtt
 | 
			
		||||
 | 
			
		||||
        host = mqtt.get_esphome_device_ip(
 | 
			
		||||
            config, args.username, args.password, args.client_id
 | 
			
		||||
    if CONF_OTA not in config:
 | 
			
		||||
        raise EsphomeError(
 | 
			
		||||
            "Cannot upload Over the Air as the config does not include the ota: "
 | 
			
		||||
            "component"
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
    if getattr(args, "file", None) is not None:
 | 
			
		||||
        return espota2.run_ota(host, remote_port, password, args.file)
 | 
			
		||||
 | 
			
		||||
    ota_conf = config[CONF_OTA]
 | 
			
		||||
    remote_port = ota_conf[CONF_PORT]
 | 
			
		||||
    password = ota_conf.get(CONF_PASSWORD, "")
 | 
			
		||||
    return espota2.run_ota(host, remote_port, password, CORE.firmware_bin)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@@ -396,16 +276,9 @@ def show_logs(config, args, port):
 | 
			
		||||
    if "logger" not in config:
 | 
			
		||||
        raise EsphomeError("Logger is not configured!")
 | 
			
		||||
    if get_port_type(port) == "SERIAL":
 | 
			
		||||
        check_permissions(port)
 | 
			
		||||
        return run_miniterm(config, port)
 | 
			
		||||
        run_miniterm(config, port)
 | 
			
		||||
        return 0
 | 
			
		||||
    if get_port_type(port) == "NETWORK" and "api" in config:
 | 
			
		||||
        if config[CONF_MDNS][CONF_DISABLED] and CONF_MQTT in config:
 | 
			
		||||
            from esphome import mqtt
 | 
			
		||||
 | 
			
		||||
            port = mqtt.get_esphome_device_ip(
 | 
			
		||||
                config, args.username, args.password, args.client_id
 | 
			
		||||
            )[0]
 | 
			
		||||
 | 
			
		||||
        from esphome.components.api.client import run_logs
 | 
			
		||||
 | 
			
		||||
        return run_logs(config, port)
 | 
			
		||||
@@ -434,17 +307,10 @@ def command_wizard(args):
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def command_config(args, config):
 | 
			
		||||
    _LOGGER.info("Configuration is valid!")
 | 
			
		||||
    if not CORE.verbose:
 | 
			
		||||
        config = strip_default_ids(config)
 | 
			
		||||
    output = yaml_util.dump(config, args.show_secrets)
 | 
			
		||||
    # add the console decoration so the front-end can hide the secrets
 | 
			
		||||
    if not args.show_secrets:
 | 
			
		||||
        output = re.sub(
 | 
			
		||||
            r"(password|key|psk|ssid)\: (.+)", r"\1: \\033[5m\2\\033[6m", output
 | 
			
		||||
        )
 | 
			
		||||
    if not CORE.quiet:
 | 
			
		||||
        safe_print(output)
 | 
			
		||||
    _LOGGER.info("Configuration is valid!")
 | 
			
		||||
    safe_print(yaml_util.dump(config))
 | 
			
		||||
    return 0
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@@ -477,7 +343,6 @@ def command_upload(args, config):
 | 
			
		||||
        show_ota=True,
 | 
			
		||||
        show_mqtt=False,
 | 
			
		||||
        show_api=False,
 | 
			
		||||
        purpose="uploading",
 | 
			
		||||
    )
 | 
			
		||||
    exit_code = upload_program(config, args, port)
 | 
			
		||||
    if exit_code != 0:
 | 
			
		||||
@@ -486,15 +351,6 @@ def command_upload(args, config):
 | 
			
		||||
    return 0
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def command_discover(args, config):
 | 
			
		||||
    if "mqtt" in config:
 | 
			
		||||
        from esphome import mqtt
 | 
			
		||||
 | 
			
		||||
        return mqtt.show_discover(config, args.username, args.password, args.client_id)
 | 
			
		||||
 | 
			
		||||
    raise EsphomeError("No discover method configured (mqtt)")
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def command_logs(args, config):
 | 
			
		||||
    port = choose_upload_log_host(
 | 
			
		||||
        default=args.device,
 | 
			
		||||
@@ -502,7 +358,6 @@ def command_logs(args, config):
 | 
			
		||||
        show_ota=False,
 | 
			
		||||
        show_mqtt=True,
 | 
			
		||||
        show_api=True,
 | 
			
		||||
        purpose="logging",
 | 
			
		||||
    )
 | 
			
		||||
    return show_logs(config, args, port)
 | 
			
		||||
 | 
			
		||||
@@ -515,22 +370,12 @@ def command_run(args, config):
 | 
			
		||||
    if exit_code != 0:
 | 
			
		||||
        return exit_code
 | 
			
		||||
    _LOGGER.info("Successfully compiled program.")
 | 
			
		||||
    if CORE.is_host:
 | 
			
		||||
        from esphome.platformio_api import get_idedata
 | 
			
		||||
 | 
			
		||||
        idedata = get_idedata(config)
 | 
			
		||||
        if idedata is None:
 | 
			
		||||
            return 1
 | 
			
		||||
        program_path = idedata.raw["prog_path"]
 | 
			
		||||
        return run_external_process(program_path)
 | 
			
		||||
 | 
			
		||||
    port = choose_upload_log_host(
 | 
			
		||||
        default=args.device,
 | 
			
		||||
        check_default=None,
 | 
			
		||||
        show_ota=True,
 | 
			
		||||
        show_mqtt=False,
 | 
			
		||||
        show_api=True,
 | 
			
		||||
        purpose="uploading",
 | 
			
		||||
    )
 | 
			
		||||
    exit_code = upload_program(config, args, port)
 | 
			
		||||
    if exit_code != 0:
 | 
			
		||||
@@ -544,7 +389,6 @@ def command_run(args, config):
 | 
			
		||||
        show_ota=False,
 | 
			
		||||
        show_mqtt=True,
 | 
			
		||||
        show_api=True,
 | 
			
		||||
        purpose="logging",
 | 
			
		||||
    )
 | 
			
		||||
    return show_logs(config, args, port)
 | 
			
		||||
 | 
			
		||||
@@ -577,7 +421,7 @@ def command_clean(args, config):
 | 
			
		||||
def command_dashboard(args):
 | 
			
		||||
    from esphome.dashboard import dashboard
 | 
			
		||||
 | 
			
		||||
    return dashboard.start_dashboard(args)
 | 
			
		||||
    return dashboard.start_web_server(args)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def command_update_all(args):
 | 
			
		||||
@@ -623,9 +467,8 @@ def command_update_all(args):
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def command_idedata(args, config):
 | 
			
		||||
    import json
 | 
			
		||||
 | 
			
		||||
    from esphome import platformio_api
 | 
			
		||||
    import json
 | 
			
		||||
 | 
			
		||||
    logging.disable(logging.INFO)
 | 
			
		||||
    logging.disable(logging.WARNING)
 | 
			
		||||
@@ -638,99 +481,6 @@ def command_idedata(args, config):
 | 
			
		||||
    return 0
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def command_rename(args, config):
 | 
			
		||||
    for c in args.name:
 | 
			
		||||
        if c not in ALLOWED_NAME_CHARS:
 | 
			
		||||
            print(
 | 
			
		||||
                color(
 | 
			
		||||
                    Fore.BOLD_RED,
 | 
			
		||||
                    f"'{c}' is an invalid character for names. Valid characters are: "
 | 
			
		||||
                    f"{ALLOWED_NAME_CHARS} (lowercase, no spaces)",
 | 
			
		||||
                )
 | 
			
		||||
            )
 | 
			
		||||
            return 1
 | 
			
		||||
    # Load existing yaml file
 | 
			
		||||
    with open(CORE.config_path, mode="r+", encoding="utf-8") as raw_file:
 | 
			
		||||
        raw_contents = raw_file.read()
 | 
			
		||||
 | 
			
		||||
    yaml = yaml_util.load_yaml(CORE.config_path)
 | 
			
		||||
    if CONF_ESPHOME not in yaml or CONF_NAME not in yaml[CONF_ESPHOME]:
 | 
			
		||||
        print(
 | 
			
		||||
            color(Fore.BOLD_RED, "Complex YAML files cannot be automatically renamed.")
 | 
			
		||||
        )
 | 
			
		||||
        return 1
 | 
			
		||||
    old_name = yaml[CONF_ESPHOME][CONF_NAME]
 | 
			
		||||
    match = re.match(r"^\$\{?([a-zA-Z0-9_]+)\}?$", old_name)
 | 
			
		||||
    if match is None:
 | 
			
		||||
        new_raw = re.sub(
 | 
			
		||||
            rf"name:\s+[\"']?{old_name}[\"']?",
 | 
			
		||||
            f'name: "{args.name}"',
 | 
			
		||||
            raw_contents,
 | 
			
		||||
        )
 | 
			
		||||
    else:
 | 
			
		||||
        old_name = yaml[CONF_SUBSTITUTIONS][match.group(1)]
 | 
			
		||||
        if (
 | 
			
		||||
            len(
 | 
			
		||||
                re.findall(
 | 
			
		||||
                    rf"^\s+{match.group(1)}:\s+[\"']?{old_name}[\"']?",
 | 
			
		||||
                    raw_contents,
 | 
			
		||||
                    flags=re.MULTILINE,
 | 
			
		||||
                )
 | 
			
		||||
            )
 | 
			
		||||
            > 1
 | 
			
		||||
        ):
 | 
			
		||||
            print(color(Fore.BOLD_RED, "Too many matches in YAML to safely rename"))
 | 
			
		||||
            return 1
 | 
			
		||||
 | 
			
		||||
        new_raw = re.sub(
 | 
			
		||||
            rf"^(\s+{match.group(1)}):\s+[\"']?{old_name}[\"']?",
 | 
			
		||||
            f'\\1: "{args.name}"',
 | 
			
		||||
            raw_contents,
 | 
			
		||||
            flags=re.MULTILINE,
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
    new_path = os.path.join(CORE.config_dir, args.name + ".yaml")
 | 
			
		||||
    print(
 | 
			
		||||
        f"Updating {color(Fore.CYAN, CORE.config_path)} to {color(Fore.CYAN, new_path)}"
 | 
			
		||||
    )
 | 
			
		||||
    print()
 | 
			
		||||
 | 
			
		||||
    with open(new_path, mode="w", encoding="utf-8") as new_file:
 | 
			
		||||
        new_file.write(new_raw)
 | 
			
		||||
 | 
			
		||||
    rc = run_external_process("esphome", "config", new_path)
 | 
			
		||||
    if rc != 0:
 | 
			
		||||
        print(color(Fore.BOLD_RED, "Rename failed. Reverting changes."))
 | 
			
		||||
        os.remove(new_path)
 | 
			
		||||
        return 1
 | 
			
		||||
 | 
			
		||||
    cli_args = [
 | 
			
		||||
        "run",
 | 
			
		||||
        new_path,
 | 
			
		||||
        "--no-logs",
 | 
			
		||||
        "--device",
 | 
			
		||||
        CORE.address,
 | 
			
		||||
    ]
 | 
			
		||||
 | 
			
		||||
    if args.dashboard:
 | 
			
		||||
        cli_args.insert(0, "--dashboard")
 | 
			
		||||
 | 
			
		||||
    try:
 | 
			
		||||
        rc = run_external_process("esphome", *cli_args)
 | 
			
		||||
    except KeyboardInterrupt:
 | 
			
		||||
        rc = 1
 | 
			
		||||
    if rc != 0:
 | 
			
		||||
        os.remove(new_path)
 | 
			
		||||
        return 1
 | 
			
		||||
 | 
			
		||||
    if CORE.config_path != new_path:
 | 
			
		||||
        os.remove(CORE.config_path)
 | 
			
		||||
 | 
			
		||||
    print(color(Fore.BOLD_GREEN, "SUCCESS"))
 | 
			
		||||
    print()
 | 
			
		||||
    return 0
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
PRE_CONFIG_ACTIONS = {
 | 
			
		||||
    "wizard": command_wizard,
 | 
			
		||||
    "version": command_version,
 | 
			
		||||
@@ -749,31 +499,17 @@ POST_CONFIG_ACTIONS = {
 | 
			
		||||
    "mqtt-fingerprint": command_mqtt_fingerprint,
 | 
			
		||||
    "clean": command_clean,
 | 
			
		||||
    "idedata": command_idedata,
 | 
			
		||||
    "rename": command_rename,
 | 
			
		||||
    "discover": command_discover,
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def parse_args(argv):
 | 
			
		||||
    options_parser = argparse.ArgumentParser(add_help=False)
 | 
			
		||||
    options_parser.add_argument(
 | 
			
		||||
        "-v",
 | 
			
		||||
        "--verbose",
 | 
			
		||||
        help="Enable verbose ESPHome logs.",
 | 
			
		||||
        action="store_true",
 | 
			
		||||
        default=get_bool_env("ESPHOME_VERBOSE"),
 | 
			
		||||
        "-v", "--verbose", help="Enable verbose ESPHome logs.", action="store_true"
 | 
			
		||||
    )
 | 
			
		||||
    options_parser.add_argument(
 | 
			
		||||
        "-q", "--quiet", help="Disable all ESPHome logs.", action="store_true"
 | 
			
		||||
    )
 | 
			
		||||
    options_parser.add_argument(
 | 
			
		||||
        "-l",
 | 
			
		||||
        "--log-level",
 | 
			
		||||
        help="Set the log level.",
 | 
			
		||||
        default=os.getenv("ESPHOME_LOG_LEVEL", "INFO"),
 | 
			
		||||
        action="store",
 | 
			
		||||
        choices=["DEBUG", "INFO", "WARNING", "ERROR", "CRITICAL"],
 | 
			
		||||
    )
 | 
			
		||||
    options_parser.add_argument(
 | 
			
		||||
        "--dashboard", help=argparse.SUPPRESS, action="store_true"
 | 
			
		||||
    )
 | 
			
		||||
@@ -787,14 +523,7 @@ def parse_args(argv):
 | 
			
		||||
    )
 | 
			
		||||
 | 
			
		||||
    parser = argparse.ArgumentParser(
 | 
			
		||||
        description=f"ESPHome {const.__version__}", parents=[options_parser]
 | 
			
		||||
    )
 | 
			
		||||
 | 
			
		||||
    parser.add_argument(
 | 
			
		||||
        "--version",
 | 
			
		||||
        action="version",
 | 
			
		||||
        version=f"Version: {const.__version__}",
 | 
			
		||||
        help="Print the ESPHome version and exit.",
 | 
			
		||||
        description=f"ESPHome v{const.__version__}", parents=[options_parser]
 | 
			
		||||
    )
 | 
			
		||||
 | 
			
		||||
    mqtt_options = argparse.ArgumentParser(add_help=False)
 | 
			
		||||
@@ -814,9 +543,6 @@ def parse_args(argv):
 | 
			
		||||
    parser_config.add_argument(
 | 
			
		||||
        "configuration", help="Your YAML configuration file(s).", nargs="+"
 | 
			
		||||
    )
 | 
			
		||||
    parser_config.add_argument(
 | 
			
		||||
        "--show-secrets", help="Show secrets in output.", action="store_true"
 | 
			
		||||
    )
 | 
			
		||||
 | 
			
		||||
    parser_compile = subparsers.add_parser(
 | 
			
		||||
        "compile", help="Read the configuration and compile a program."
 | 
			
		||||
@@ -831,9 +557,7 @@ def parse_args(argv):
 | 
			
		||||
    )
 | 
			
		||||
 | 
			
		||||
    parser_upload = subparsers.add_parser(
 | 
			
		||||
        "upload",
 | 
			
		||||
        help="Validate the configuration and upload the latest binary.",
 | 
			
		||||
        parents=[mqtt_options],
 | 
			
		||||
        "upload", help="Validate the configuration and upload the latest binary."
 | 
			
		||||
    )
 | 
			
		||||
    parser_upload.add_argument(
 | 
			
		||||
        "configuration", help="Your YAML configuration file(s).", nargs="+"
 | 
			
		||||
@@ -842,15 +566,10 @@ def parse_args(argv):
 | 
			
		||||
        "--device",
 | 
			
		||||
        help="Manually specify the serial port/address to use, for example /dev/ttyUSB0.",
 | 
			
		||||
    )
 | 
			
		||||
    parser_upload.add_argument(
 | 
			
		||||
        "--file",
 | 
			
		||||
        help="Manually specify the binary file to upload.",
 | 
			
		||||
    )
 | 
			
		||||
 | 
			
		||||
    parser_logs = subparsers.add_parser(
 | 
			
		||||
        "logs",
 | 
			
		||||
        help="Validate the configuration and show all logs.",
 | 
			
		||||
        aliases=["log"],
 | 
			
		||||
        parents=[mqtt_options],
 | 
			
		||||
    )
 | 
			
		||||
    parser_logs.add_argument(
 | 
			
		||||
@@ -861,15 +580,6 @@ def parse_args(argv):
 | 
			
		||||
        help="Manually specify the serial port/address to use, for example /dev/ttyUSB0.",
 | 
			
		||||
    )
 | 
			
		||||
 | 
			
		||||
    parser_discover = subparsers.add_parser(
 | 
			
		||||
        "discover",
 | 
			
		||||
        help="Validate the configuration and show all discovered devices.",
 | 
			
		||||
        parents=[mqtt_options],
 | 
			
		||||
    )
 | 
			
		||||
    parser_discover.add_argument(
 | 
			
		||||
        "configuration", help="Your YAML configuration file.", nargs=1
 | 
			
		||||
    )
 | 
			
		||||
 | 
			
		||||
    parser_run = subparsers.add_parser(
 | 
			
		||||
        "run",
 | 
			
		||||
        help="Validate the configuration, create a binary, upload it, and start logs.",
 | 
			
		||||
@@ -951,7 +661,7 @@ def parse_args(argv):
 | 
			
		||||
        "--open-ui", help="Open the dashboard UI in a browser.", action="store_true"
 | 
			
		||||
    )
 | 
			
		||||
    parser_dashboard.add_argument(
 | 
			
		||||
        "--ha-addon", help=argparse.SUPPRESS, action="store_true"
 | 
			
		||||
        "--hassio", help=argparse.SUPPRESS, action="store_true"
 | 
			
		||||
    )
 | 
			
		||||
    parser_dashboard.add_argument(
 | 
			
		||||
        "--socket", help="Make the dashboard serve under a unix socket", type=str
 | 
			
		||||
@@ -971,15 +681,6 @@ def parse_args(argv):
 | 
			
		||||
        "configuration", help="Your YAML configuration file(s).", nargs=1
 | 
			
		||||
    )
 | 
			
		||||
 | 
			
		||||
    parser_rename = subparsers.add_parser(
 | 
			
		||||
        "rename",
 | 
			
		||||
        help="Rename a device in YAML, compile the binary and upload it.",
 | 
			
		||||
    )
 | 
			
		||||
    parser_rename.add_argument(
 | 
			
		||||
        "configuration", help="Your YAML configuration file.", nargs=1
 | 
			
		||||
    )
 | 
			
		||||
    parser_rename.add_argument("name", help="The new name for the device.", type=str)
 | 
			
		||||
 | 
			
		||||
    # Keep backward compatibility with the old command line format of
 | 
			
		||||
    # esphome <config> <command>.
 | 
			
		||||
    #
 | 
			
		||||
@@ -995,7 +696,67 @@ def parse_args(argv):
 | 
			
		||||
    # a deprecation warning).
 | 
			
		||||
    arguments = argv[1:]
 | 
			
		||||
 | 
			
		||||
    argcomplete.autocomplete(parser)
 | 
			
		||||
    # On Python 3.9+ we can simply set exit_on_error=False in the constructor
 | 
			
		||||
    def _raise(x):
 | 
			
		||||
        raise argparse.ArgumentError(None, x)
 | 
			
		||||
 | 
			
		||||
    # First, try new-style parsing, but don't exit in case of failure
 | 
			
		||||
    try:
 | 
			
		||||
        # duplicate parser so that we can use the original one to raise errors later on
 | 
			
		||||
        current_parser = argparse.ArgumentParser(add_help=False, parents=[parser])
 | 
			
		||||
        current_parser.set_defaults(deprecated_argv_suggestion=None)
 | 
			
		||||
        current_parser.error = _raise
 | 
			
		||||
        return current_parser.parse_args(arguments)
 | 
			
		||||
    except argparse.ArgumentError:
 | 
			
		||||
        pass
 | 
			
		||||
 | 
			
		||||
    # Second, try compat parsing and rearrange the command-line if it succeeds
 | 
			
		||||
    # Disable argparse's built-in help option and add it manually to prevent this
 | 
			
		||||
    # parser from printing the help messagefor the old format when invoked with -h.
 | 
			
		||||
    compat_parser = argparse.ArgumentParser(parents=[options_parser], add_help=False)
 | 
			
		||||
    compat_parser.add_argument("-h", "--help", action="store_true")
 | 
			
		||||
    compat_parser.add_argument("configuration", nargs="*")
 | 
			
		||||
    compat_parser.add_argument(
 | 
			
		||||
        "command",
 | 
			
		||||
        choices=[
 | 
			
		||||
            "config",
 | 
			
		||||
            "compile",
 | 
			
		||||
            "upload",
 | 
			
		||||
            "logs",
 | 
			
		||||
            "run",
 | 
			
		||||
            "clean-mqtt",
 | 
			
		||||
            "wizard",
 | 
			
		||||
            "mqtt-fingerprint",
 | 
			
		||||
            "version",
 | 
			
		||||
            "clean",
 | 
			
		||||
            "dashboard",
 | 
			
		||||
            "vscode",
 | 
			
		||||
            "update-all",
 | 
			
		||||
        ],
 | 
			
		||||
    )
 | 
			
		||||
 | 
			
		||||
    try:
 | 
			
		||||
        compat_parser.error = _raise
 | 
			
		||||
        result, unparsed = compat_parser.parse_known_args(argv[1:])
 | 
			
		||||
        last_option = len(arguments) - len(unparsed) - 1 - len(result.configuration)
 | 
			
		||||
        unparsed = [
 | 
			
		||||
            "--device" if arg in ("--upload-port", "--serial-port") else arg
 | 
			
		||||
            for arg in unparsed
 | 
			
		||||
        ]
 | 
			
		||||
        arguments = (
 | 
			
		||||
            arguments[0:last_option]
 | 
			
		||||
            + [result.command]
 | 
			
		||||
            + result.configuration
 | 
			
		||||
            + unparsed
 | 
			
		||||
        )
 | 
			
		||||
        deprecated_argv_suggestion = arguments
 | 
			
		||||
    except argparse.ArgumentError:
 | 
			
		||||
        # old-style parsing failed, don't suggest any argument
 | 
			
		||||
        deprecated_argv_suggestion = None
 | 
			
		||||
 | 
			
		||||
    # Finally, run the new-style parser again with the possibly swapped arguments,
 | 
			
		||||
    # and let it error out if the command is unparsable.
 | 
			
		||||
    parser.set_defaults(deprecated_argv_suggestion=deprecated_argv_suggestion)
 | 
			
		||||
    return parser.parse_args(arguments)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@@ -1003,17 +764,26 @@ def run_esphome(argv):
 | 
			
		||||
    args = parse_args(argv)
 | 
			
		||||
    CORE.dashboard = args.dashboard
 | 
			
		||||
 | 
			
		||||
    # Override log level if verbose is set
 | 
			
		||||
    if args.verbose:
 | 
			
		||||
        args.log_level = "DEBUG"
 | 
			
		||||
    elif args.quiet:
 | 
			
		||||
        args.log_level = "CRITICAL"
 | 
			
		||||
 | 
			
		||||
    setup_log(
 | 
			
		||||
        log_level=args.log_level,
 | 
			
		||||
        args.verbose,
 | 
			
		||||
        args.quiet,
 | 
			
		||||
        # Show timestamp for dashboard access logs
 | 
			
		||||
        include_timestamp=args.command == "dashboard",
 | 
			
		||||
        args.command == "dashboard",
 | 
			
		||||
    )
 | 
			
		||||
    if args.deprecated_argv_suggestion is not None and args.command != "vscode":
 | 
			
		||||
        _LOGGER.warning(
 | 
			
		||||
            "Calling ESPHome with the configuration before the command is deprecated "
 | 
			
		||||
            "and will be removed in the future. "
 | 
			
		||||
        )
 | 
			
		||||
        _LOGGER.warning("Please instead use:")
 | 
			
		||||
        _LOGGER.warning("   esphome %s", " ".join(args.deprecated_argv_suggestion))
 | 
			
		||||
 | 
			
		||||
    if sys.version_info < (3, 7, 0):
 | 
			
		||||
        _LOGGER.error(
 | 
			
		||||
            "You're running ESPHome with Python <3.7. ESPHome is no longer compatible "
 | 
			
		||||
            "with this Python version. Please reinstall ESPHome with Python 3.7+"
 | 
			
		||||
        )
 | 
			
		||||
        return 1
 | 
			
		||||
 | 
			
		||||
    if args.command in PRE_CONFIG_ACTIONS:
 | 
			
		||||
        try:
 | 
			
		||||
@@ -1022,8 +792,6 @@ def run_esphome(argv):
 | 
			
		||||
            _LOGGER.error(e, exc_info=args.verbose)
 | 
			
		||||
            return 1
 | 
			
		||||
 | 
			
		||||
    _LOGGER.info("ESPHome %s", const.__version__)
 | 
			
		||||
 | 
			
		||||
    for conf_path in args.configuration:
 | 
			
		||||
        if any(os.path.basename(conf_path) == x for x in SECRETS_FILES):
 | 
			
		||||
            _LOGGER.warning("Skipping secrets file %s", conf_path)
 | 
			
		||||
 
 | 
			
		||||
@@ -1,45 +1,33 @@
 | 
			
		||||
import esphome.codegen as cg
 | 
			
		||||
import esphome.config_validation as cv
 | 
			
		||||
from esphome.const import (
 | 
			
		||||
    CONF_ALL,
 | 
			
		||||
    CONF_ANY,
 | 
			
		||||
    CONF_AUTOMATION_ID,
 | 
			
		||||
    CONF_CONDITION,
 | 
			
		||||
    CONF_COUNT,
 | 
			
		||||
    CONF_ELSE,
 | 
			
		||||
    CONF_ID,
 | 
			
		||||
    CONF_THEN,
 | 
			
		||||
    CONF_TIME,
 | 
			
		||||
    CONF_TIMEOUT,
 | 
			
		||||
    CONF_TRIGGER_ID,
 | 
			
		||||
    CONF_TYPE_ID,
 | 
			
		||||
    CONF_UPDATE_INTERVAL,
 | 
			
		||||
    CONF_TIME,
 | 
			
		||||
)
 | 
			
		||||
from esphome.schema_extractors import SCHEMA_EXTRACT, schema_extractor
 | 
			
		||||
from esphome.jsonschema import jschema_extractor
 | 
			
		||||
from esphome.util import Registry
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def maybe_simple_id(*validators):
 | 
			
		||||
    """Allow a raw ID to be specified in place of a config block.
 | 
			
		||||
    If the value that's being validated is a dictionary, it's passed as-is to the specified validators. Otherwise, it's
 | 
			
		||||
    wrapped in a dict that looks like ``{"id": <value>}``, and that dict is then handed off to the specified validators.
 | 
			
		||||
    """
 | 
			
		||||
    return maybe_conf(CONF_ID, *validators)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def maybe_conf(conf, *validators):
 | 
			
		||||
    """Allow a raw value to be specified in place of a config block.
 | 
			
		||||
    If the value that's being validated is a dictionary, it's passed as-is to the specified validators. Otherwise, it's
 | 
			
		||||
    wrapped in a dict that looks like ``{<conf>: <value>}``, and that dict is then handed off to the specified
 | 
			
		||||
    validators.
 | 
			
		||||
    (This is a general case of ``maybe_simple_id`` that allows the wrapping key to be something other than ``id``.)
 | 
			
		||||
    """
 | 
			
		||||
    validator = cv.All(*validators)
 | 
			
		||||
 | 
			
		||||
    @schema_extractor("maybe")
 | 
			
		||||
    @jschema_extractor("maybe")
 | 
			
		||||
    def validate(value):
 | 
			
		||||
        if value == SCHEMA_EXTRACT:
 | 
			
		||||
            return (validator, conf)
 | 
			
		||||
        # pylint: disable=comparison-with-callable
 | 
			
		||||
        if value == jschema_extractor:
 | 
			
		||||
            return validator
 | 
			
		||||
 | 
			
		||||
        if isinstance(value, dict):
 | 
			
		||||
            return validator(value)
 | 
			
		||||
@@ -75,13 +63,6 @@ def validate_potentially_and_condition(value):
 | 
			
		||||
    return validate_condition(value)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def validate_potentially_or_condition(value):
 | 
			
		||||
    if isinstance(value, list):
 | 
			
		||||
        with cv.remove_prepend_path(["or"]):
 | 
			
		||||
            return validate_condition({"or": value})
 | 
			
		||||
    return validate_condition(value)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
DelayAction = cg.esphome_ns.class_("DelayAction", Action, cg.Component)
 | 
			
		||||
LambdaAction = cg.esphome_ns.class_("LambdaAction", Action)
 | 
			
		||||
IfAction = cg.esphome_ns.class_("IfAction", Action)
 | 
			
		||||
@@ -89,8 +70,6 @@ WhileAction = cg.esphome_ns.class_("WhileAction", Action)
 | 
			
		||||
RepeatAction = cg.esphome_ns.class_("RepeatAction", Action)
 | 
			
		||||
WaitUntilAction = cg.esphome_ns.class_("WaitUntilAction", Action, cg.Component)
 | 
			
		||||
UpdateComponentAction = cg.esphome_ns.class_("UpdateComponentAction", Action)
 | 
			
		||||
SuspendComponentAction = cg.esphome_ns.class_("SuspendComponentAction", Action)
 | 
			
		||||
ResumeComponentAction = cg.esphome_ns.class_("ResumeComponentAction", Action)
 | 
			
		||||
Automation = cg.esphome_ns.class_("Automation")
 | 
			
		||||
 | 
			
		||||
LambdaCondition = cg.esphome_ns.class_("LambdaCondition", Condition)
 | 
			
		||||
@@ -132,9 +111,11 @@ def validate_automation(extra_schema=None, extra_validators=None, single=False):
 | 
			
		||||
        # This should only happen with invalid configs, but let's have a nice error message.
 | 
			
		||||
        return [schema(value)]
 | 
			
		||||
 | 
			
		||||
    @schema_extractor("automation")
 | 
			
		||||
    @jschema_extractor("automation")
 | 
			
		||||
    def validator(value):
 | 
			
		||||
        if value == SCHEMA_EXTRACT:
 | 
			
		||||
        # hack to get the schema
 | 
			
		||||
        # pylint: disable=comparison-with-callable
 | 
			
		||||
        if value == jschema_extractor:
 | 
			
		||||
            return schema
 | 
			
		||||
 | 
			
		||||
        value = validator_(value)
 | 
			
		||||
@@ -160,7 +141,6 @@ AUTOMATION_SCHEMA = cv.Schema(
 | 
			
		||||
AndCondition = cg.esphome_ns.class_("AndCondition", Condition)
 | 
			
		||||
OrCondition = cg.esphome_ns.class_("OrCondition", Condition)
 | 
			
		||||
NotCondition = cg.esphome_ns.class_("NotCondition", Condition)
 | 
			
		||||
XorCondition = cg.esphome_ns.class_("XorCondition", Condition)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@register_condition("and", AndCondition, validate_condition_list)
 | 
			
		||||
@@ -175,30 +155,12 @@ async def or_condition_to_code(config, condition_id, template_arg, args):
 | 
			
		||||
    return cg.new_Pvariable(condition_id, template_arg, conditions)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@register_condition("all", AndCondition, validate_condition_list)
 | 
			
		||||
async def all_condition_to_code(config, condition_id, template_arg, args):
 | 
			
		||||
    conditions = await build_condition_list(config, template_arg, args)
 | 
			
		||||
    return cg.new_Pvariable(condition_id, template_arg, conditions)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@register_condition("any", OrCondition, validate_condition_list)
 | 
			
		||||
async def any_condition_to_code(config, condition_id, template_arg, args):
 | 
			
		||||
    conditions = await build_condition_list(config, template_arg, args)
 | 
			
		||||
    return cg.new_Pvariable(condition_id, template_arg, conditions)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@register_condition("not", NotCondition, validate_potentially_and_condition)
 | 
			
		||||
async def not_condition_to_code(config, condition_id, template_arg, args):
 | 
			
		||||
    condition = await build_condition(config, template_arg, args)
 | 
			
		||||
    return cg.new_Pvariable(condition_id, template_arg, condition)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@register_condition("xor", XorCondition, validate_condition_list)
 | 
			
		||||
async def xor_condition_to_code(config, condition_id, template_arg, args):
 | 
			
		||||
    conditions = await build_condition_list(config, template_arg, args)
 | 
			
		||||
    return cg.new_Pvariable(condition_id, template_arg, conditions)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@register_condition("lambda", LambdaCondition, cv.returning_lambda)
 | 
			
		||||
async def lambda_condition_to_code(config, condition_id, template_arg, args):
 | 
			
		||||
    lambda_ = await cg.process_lambda(config, args, return_type=bool)
 | 
			
		||||
@@ -244,21 +206,15 @@ async def delay_action_to_code(config, action_id, template_arg, args):
 | 
			
		||||
    IfAction,
 | 
			
		||||
    cv.All(
 | 
			
		||||
        {
 | 
			
		||||
            cv.Exclusive(
 | 
			
		||||
                CONF_CONDITION, CONF_CONDITION
 | 
			
		||||
            ): validate_potentially_and_condition,
 | 
			
		||||
            cv.Exclusive(CONF_ANY, CONF_CONDITION): validate_potentially_or_condition,
 | 
			
		||||
            cv.Exclusive(CONF_ALL, CONF_CONDITION): validate_potentially_and_condition,
 | 
			
		||||
            cv.Required(CONF_CONDITION): validate_potentially_and_condition,
 | 
			
		||||
            cv.Optional(CONF_THEN): validate_action_list,
 | 
			
		||||
            cv.Optional(CONF_ELSE): validate_action_list,
 | 
			
		||||
        },
 | 
			
		||||
        cv.has_at_least_one_key(CONF_THEN, CONF_ELSE),
 | 
			
		||||
        cv.has_at_least_one_key(CONF_CONDITION, CONF_ANY, CONF_ALL),
 | 
			
		||||
    ),
 | 
			
		||||
)
 | 
			
		||||
async def if_action_to_code(config, action_id, template_arg, args):
 | 
			
		||||
    cond_conf = next(el for el in config if el in (CONF_ANY, CONF_ALL, CONF_CONDITION))
 | 
			
		||||
    conditions = await build_condition(config[cond_conf], template_arg, args)
 | 
			
		||||
    conditions = await build_condition(config[CONF_CONDITION], template_arg, args)
 | 
			
		||||
    var = cg.new_Pvariable(action_id, template_arg, conditions)
 | 
			
		||||
    if CONF_THEN in config:
 | 
			
		||||
        actions = await build_action_list(config[CONF_THEN], template_arg, args)
 | 
			
		||||
@@ -301,25 +257,26 @@ async def repeat_action_to_code(config, action_id, template_arg, args):
 | 
			
		||||
    var = cg.new_Pvariable(action_id, template_arg)
 | 
			
		||||
    count_template = await cg.templatable(config[CONF_COUNT], args, cg.uint32)
 | 
			
		||||
    cg.add(var.set_count(count_template))
 | 
			
		||||
    actions = await build_action_list(
 | 
			
		||||
        config[CONF_THEN],
 | 
			
		||||
        cg.TemplateArguments(cg.uint32, *template_arg.args),
 | 
			
		||||
        [(cg.uint32, "iteration"), *args],
 | 
			
		||||
    )
 | 
			
		||||
    actions = await build_action_list(config[CONF_THEN], template_arg, args)
 | 
			
		||||
    cg.add(var.add_then(actions))
 | 
			
		||||
    return var
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
_validate_wait_until = cv.maybe_simple_value(
 | 
			
		||||
    {
 | 
			
		||||
        cv.Required(CONF_CONDITION): validate_potentially_and_condition,
 | 
			
		||||
        cv.Optional(CONF_TIMEOUT): cv.templatable(cv.positive_time_period_milliseconds),
 | 
			
		||||
    },
 | 
			
		||||
    key=CONF_CONDITION,
 | 
			
		||||
)
 | 
			
		||||
def validate_wait_until(value):
 | 
			
		||||
    schema = cv.Schema(
 | 
			
		||||
        {
 | 
			
		||||
            cv.Required(CONF_CONDITION): validate_potentially_and_condition,
 | 
			
		||||
            cv.Optional(CONF_TIMEOUT): cv.templatable(
 | 
			
		||||
                cv.positive_time_period_milliseconds
 | 
			
		||||
            ),
 | 
			
		||||
        }
 | 
			
		||||
    )
 | 
			
		||||
    if isinstance(value, dict) and CONF_CONDITION in value:
 | 
			
		||||
        return schema(value)
 | 
			
		||||
    return validate_wait_until({CONF_CONDITION: value})
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@register_action("wait_until", WaitUntilAction, _validate_wait_until)
 | 
			
		||||
@register_action("wait_until", WaitUntilAction, validate_wait_until)
 | 
			
		||||
async def wait_until_action_to_code(config, action_id, template_arg, args):
 | 
			
		||||
    conditions = await build_condition(config[CONF_CONDITION], template_arg, args)
 | 
			
		||||
    var = cg.new_Pvariable(action_id, template_arg, conditions)
 | 
			
		||||
@@ -350,41 +307,6 @@ async def component_update_action_to_code(config, action_id, template_arg, args)
 | 
			
		||||
    return cg.new_Pvariable(action_id, template_arg, comp)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@register_action(
 | 
			
		||||
    "component.suspend",
 | 
			
		||||
    SuspendComponentAction,
 | 
			
		||||
    maybe_simple_id(
 | 
			
		||||
        {
 | 
			
		||||
            cv.Required(CONF_ID): cv.use_id(cg.PollingComponent),
 | 
			
		||||
        }
 | 
			
		||||
    ),
 | 
			
		||||
)
 | 
			
		||||
async def component_suspend_action_to_code(config, action_id, template_arg, args):
 | 
			
		||||
    comp = await cg.get_variable(config[CONF_ID])
 | 
			
		||||
    return cg.new_Pvariable(action_id, template_arg, comp)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@register_action(
 | 
			
		||||
    "component.resume",
 | 
			
		||||
    ResumeComponentAction,
 | 
			
		||||
    maybe_simple_id(
 | 
			
		||||
        {
 | 
			
		||||
            cv.Required(CONF_ID): cv.use_id(cg.PollingComponent),
 | 
			
		||||
            cv.Optional(CONF_UPDATE_INTERVAL): cv.templatable(
 | 
			
		||||
                cv.positive_time_period_milliseconds
 | 
			
		||||
            ),
 | 
			
		||||
        }
 | 
			
		||||
    ),
 | 
			
		||||
)
 | 
			
		||||
async def component_resume_action_to_code(config, action_id, template_arg, args):
 | 
			
		||||
    comp = await cg.get_variable(config[CONF_ID])
 | 
			
		||||
    var = cg.new_Pvariable(action_id, template_arg, comp)
 | 
			
		||||
    if CONF_UPDATE_INTERVAL in config:
 | 
			
		||||
        template_ = await cg.templatable(config[CONF_UPDATE_INTERVAL], args, int)
 | 
			
		||||
        cg.add(var.set_update_interval(template_))
 | 
			
		||||
    return var
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
async def build_action(full_config, template_arg, args):
 | 
			
		||||
    registry_entry, config = cg.extract_registry_entry_config(
 | 
			
		||||
        ACTION_REGISTRY, full_config
 | 
			
		||||
 
 | 
			
		||||
@@ -8,86 +8,78 @@
 | 
			
		||||
# want to break suddenly due to a rename (this file will get backports for features).
 | 
			
		||||
 | 
			
		||||
# pylint: disable=unused-import
 | 
			
		||||
from esphome.cpp_generator import (  # noqa: F401
 | 
			
		||||
    ArrayInitializer,
 | 
			
		||||
from esphome.cpp_generator import (  # noqa
 | 
			
		||||
    Expression,
 | 
			
		||||
    LineComment,
 | 
			
		||||
    MockObj,
 | 
			
		||||
    MockObjClass,
 | 
			
		||||
    Pvariable,
 | 
			
		||||
    RawExpression,
 | 
			
		||||
    RawStatement,
 | 
			
		||||
    Statement,
 | 
			
		||||
    StructInitializer,
 | 
			
		||||
    TemplateArguments,
 | 
			
		||||
    StructInitializer,
 | 
			
		||||
    ArrayInitializer,
 | 
			
		||||
    safe_exp,
 | 
			
		||||
    Statement,
 | 
			
		||||
    LineComment,
 | 
			
		||||
    progmem_array,
 | 
			
		||||
    static_const_array,
 | 
			
		||||
    statement,
 | 
			
		||||
    variable,
 | 
			
		||||
    new_variable,
 | 
			
		||||
    Pvariable,
 | 
			
		||||
    new_Pvariable,
 | 
			
		||||
    add,
 | 
			
		||||
    add_build_flag,
 | 
			
		||||
    add_define,
 | 
			
		||||
    add_global,
 | 
			
		||||
    add_library,
 | 
			
		||||
    add_build_flag,
 | 
			
		||||
    add_define,
 | 
			
		||||
    add_platformio_option,
 | 
			
		||||
    get_variable,
 | 
			
		||||
    get_variable_with_full_id,
 | 
			
		||||
    is_template,
 | 
			
		||||
    new_Pvariable,
 | 
			
		||||
    new_variable,
 | 
			
		||||
    process_lambda,
 | 
			
		||||
    progmem_array,
 | 
			
		||||
    safe_exp,
 | 
			
		||||
    statement,
 | 
			
		||||
    static_const_array,
 | 
			
		||||
    is_template,
 | 
			
		||||
    templatable,
 | 
			
		||||
    variable,
 | 
			
		||||
    with_local_variable,
 | 
			
		||||
    MockObj,
 | 
			
		||||
    MockObjClass,
 | 
			
		||||
)
 | 
			
		||||
from esphome.cpp_helpers import (  # noqa: F401
 | 
			
		||||
from esphome.cpp_helpers import (  # noqa
 | 
			
		||||
    gpio_pin_expression,
 | 
			
		||||
    register_component,
 | 
			
		||||
    build_registry_entry,
 | 
			
		||||
    build_registry_list,
 | 
			
		||||
    extract_registry_entry_config,
 | 
			
		||||
    gpio_pin_expression,
 | 
			
		||||
    past_safe_mode,
 | 
			
		||||
    register_component,
 | 
			
		||||
    register_parented,
 | 
			
		||||
)
 | 
			
		||||
from esphome.cpp_types import (  # noqa: F401
 | 
			
		||||
    NAN,
 | 
			
		||||
    App,
 | 
			
		||||
    Application,
 | 
			
		||||
    Component,
 | 
			
		||||
    ComponentPtr,
 | 
			
		||||
    Controller,
 | 
			
		||||
    EntityBase,
 | 
			
		||||
    EntityCategory,
 | 
			
		||||
    ESPTime,
 | 
			
		||||
    GPIOPin,
 | 
			
		||||
    InternalGPIOPin,
 | 
			
		||||
    JsonObject,
 | 
			
		||||
    JsonObjectConst,
 | 
			
		||||
    Parented,
 | 
			
		||||
    PollingComponent,
 | 
			
		||||
    arduino_json_ns,
 | 
			
		||||
    bool_,
 | 
			
		||||
    const_char_ptr,
 | 
			
		||||
    double,
 | 
			
		||||
    esphome_ns,
 | 
			
		||||
    float_,
 | 
			
		||||
from esphome.cpp_types import (  # noqa
 | 
			
		||||
    global_ns,
 | 
			
		||||
    gpio_Flags,
 | 
			
		||||
    int16,
 | 
			
		||||
    int32,
 | 
			
		||||
    int64,
 | 
			
		||||
    int_,
 | 
			
		||||
    void,
 | 
			
		||||
    nullptr,
 | 
			
		||||
    optional,
 | 
			
		||||
    size_t,
 | 
			
		||||
    float_,
 | 
			
		||||
    double,
 | 
			
		||||
    bool_,
 | 
			
		||||
    int_,
 | 
			
		||||
    std_ns,
 | 
			
		||||
    std_shared_ptr,
 | 
			
		||||
    std_string,
 | 
			
		||||
    std_string_ref,
 | 
			
		||||
    std_vector,
 | 
			
		||||
    uint8,
 | 
			
		||||
    uint16,
 | 
			
		||||
    uint32,
 | 
			
		||||
    uint64,
 | 
			
		||||
    void,
 | 
			
		||||
    int32,
 | 
			
		||||
    const_char_ptr,
 | 
			
		||||
    NAN,
 | 
			
		||||
    esphome_ns,
 | 
			
		||||
    App,
 | 
			
		||||
    EntityBase,
 | 
			
		||||
    Component,
 | 
			
		||||
    ComponentPtr,
 | 
			
		||||
    PollingComponent,
 | 
			
		||||
    Application,
 | 
			
		||||
    optional,
 | 
			
		||||
    arduino_json_ns,
 | 
			
		||||
    JsonObject,
 | 
			
		||||
    JsonObjectRef,
 | 
			
		||||
    JsonObjectConstRef,
 | 
			
		||||
    Controller,
 | 
			
		||||
    GPIOPin,
 | 
			
		||||
    InternalGPIOPin,
 | 
			
		||||
    gpio_Flags,
 | 
			
		||||
    EntityCategory,
 | 
			
		||||
)
 | 
			
		||||
 
 | 
			
		||||
@@ -1 +0,0 @@
 | 
			
		||||
CODEOWNERS = ["@MrSuicideParrot"]
 | 
			
		||||
@@ -1,44 +0,0 @@
 | 
			
		||||
// Datasheet https://wiki.dfrobot.com/A01NYUB%20Waterproof%20Ultrasonic%20Sensor%20SKU:%20SEN0313
 | 
			
		||||
 | 
			
		||||
#include "a01nyub.h"
 | 
			
		||||
#include "esphome/core/helpers.h"
 | 
			
		||||
#include "esphome/core/log.h"
 | 
			
		||||
 | 
			
		||||
namespace esphome {
 | 
			
		||||
namespace a01nyub {
 | 
			
		||||
 | 
			
		||||
static const char *const TAG = "a01nyub.sensor";
 | 
			
		||||
 | 
			
		||||
void A01nyubComponent::loop() {
 | 
			
		||||
  uint8_t data;
 | 
			
		||||
  while (this->available() > 0) {
 | 
			
		||||
    this->read_byte(&data);
 | 
			
		||||
    if (this->buffer_.empty() && (data != 0xff))
 | 
			
		||||
      continue;
 | 
			
		||||
    buffer_.push_back(data);
 | 
			
		||||
    if (this->buffer_.size() == 4)
 | 
			
		||||
      this->check_buffer_();
 | 
			
		||||
  }
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
void A01nyubComponent::check_buffer_() {
 | 
			
		||||
  uint8_t checksum = this->buffer_[0] + this->buffer_[1] + this->buffer_[2];
 | 
			
		||||
  if (this->buffer_[3] == checksum) {
 | 
			
		||||
    float distance = (this->buffer_[1] << 8) + this->buffer_[2];
 | 
			
		||||
    if (distance > 280) {
 | 
			
		||||
      float meters = distance / 1000.0;
 | 
			
		||||
      ESP_LOGV(TAG, "Distance from sensor: %f mm, %f m", distance, meters);
 | 
			
		||||
      this->publish_state(meters);
 | 
			
		||||
    } else {
 | 
			
		||||
      ESP_LOGW(TAG, "Invalid data read from sensor: %s", format_hex_pretty(this->buffer_).c_str());
 | 
			
		||||
    }
 | 
			
		||||
  } else {
 | 
			
		||||
    ESP_LOGW(TAG, "checksum failed: %02x != %02x", checksum, this->buffer_[3]);
 | 
			
		||||
  }
 | 
			
		||||
  this->buffer_.clear();
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
void A01nyubComponent::dump_config() { LOG_SENSOR("", "A01nyub Sensor", this); }
 | 
			
		||||
 | 
			
		||||
}  // namespace a01nyub
 | 
			
		||||
}  // namespace esphome
 | 
			
		||||
@@ -1,27 +0,0 @@
 | 
			
		||||
#pragma once
 | 
			
		||||
 | 
			
		||||
#include <vector>
 | 
			
		||||
 | 
			
		||||
#include "esphome/core/component.h"
 | 
			
		||||
#include "esphome/components/sensor/sensor.h"
 | 
			
		||||
#include "esphome/components/uart/uart.h"
 | 
			
		||||
 | 
			
		||||
namespace esphome {
 | 
			
		||||
namespace a01nyub {
 | 
			
		||||
 | 
			
		||||
class A01nyubComponent : public sensor::Sensor, public Component, public uart::UARTDevice {
 | 
			
		||||
 public:
 | 
			
		||||
  // Nothing really public.
 | 
			
		||||
 | 
			
		||||
  // ========== INTERNAL METHODS ==========
 | 
			
		||||
  void loop() override;
 | 
			
		||||
  void dump_config() override;
 | 
			
		||||
 | 
			
		||||
 protected:
 | 
			
		||||
  void check_buffer_();
 | 
			
		||||
 | 
			
		||||
  std::vector<uint8_t> buffer_;
 | 
			
		||||
};
 | 
			
		||||
 | 
			
		||||
}  // namespace a01nyub
 | 
			
		||||
}  // namespace esphome
 | 
			
		||||
@@ -1,41 +0,0 @@
 | 
			
		||||
import esphome.codegen as cg
 | 
			
		||||
from esphome.components import sensor, uart
 | 
			
		||||
from esphome.const import (
 | 
			
		||||
    STATE_CLASS_MEASUREMENT,
 | 
			
		||||
    UNIT_METER,
 | 
			
		||||
    ICON_ARROW_EXPAND_VERTICAL,
 | 
			
		||||
    DEVICE_CLASS_DISTANCE,
 | 
			
		||||
)
 | 
			
		||||
 | 
			
		||||
CODEOWNERS = ["@MrSuicideParrot"]
 | 
			
		||||
DEPENDENCIES = ["uart"]
 | 
			
		||||
 | 
			
		||||
a01nyub_ns = cg.esphome_ns.namespace("a01nyub")
 | 
			
		||||
A01nyubComponent = a01nyub_ns.class_(
 | 
			
		||||
    "A01nyubComponent", sensor.Sensor, cg.Component, uart.UARTDevice
 | 
			
		||||
)
 | 
			
		||||
 | 
			
		||||
CONFIG_SCHEMA = sensor.sensor_schema(
 | 
			
		||||
    A01nyubComponent,
 | 
			
		||||
    unit_of_measurement=UNIT_METER,
 | 
			
		||||
    icon=ICON_ARROW_EXPAND_VERTICAL,
 | 
			
		||||
    accuracy_decimals=3,
 | 
			
		||||
    state_class=STATE_CLASS_MEASUREMENT,
 | 
			
		||||
    device_class=DEVICE_CLASS_DISTANCE,
 | 
			
		||||
).extend(uart.UART_DEVICE_SCHEMA)
 | 
			
		||||
 | 
			
		||||
FINAL_VALIDATE_SCHEMA = uart.final_validate_device_schema(
 | 
			
		||||
    "a01nyub",
 | 
			
		||||
    baud_rate=9600,
 | 
			
		||||
    require_tx=False,
 | 
			
		||||
    require_rx=True,
 | 
			
		||||
    data_bits=8,
 | 
			
		||||
    parity=None,
 | 
			
		||||
    stop_bits=1,
 | 
			
		||||
)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
async def to_code(config):
 | 
			
		||||
    var = await sensor.new_sensor(config)
 | 
			
		||||
    await cg.register_component(var, config)
 | 
			
		||||
    await uart.register_uart_device(var, config)
 | 
			
		||||
@@ -1 +0,0 @@
 | 
			
		||||
CODEOWNERS = ["@TH-Braemer"]
 | 
			
		||||
@@ -1,43 +0,0 @@
 | 
			
		||||
// Datasheet https://wiki.dfrobot.com/_A02YYUW_Waterproof_Ultrasonic_Sensor_SKU_SEN0311
 | 
			
		||||
 | 
			
		||||
#include "a02yyuw.h"
 | 
			
		||||
#include "esphome/core/helpers.h"
 | 
			
		||||
#include "esphome/core/log.h"
 | 
			
		||||
 | 
			
		||||
namespace esphome {
 | 
			
		||||
namespace a02yyuw {
 | 
			
		||||
 | 
			
		||||
static const char *const TAG = "a02yyuw.sensor";
 | 
			
		||||
 | 
			
		||||
void A02yyuwComponent::loop() {
 | 
			
		||||
  uint8_t data;
 | 
			
		||||
  while (this->available() > 0) {
 | 
			
		||||
    this->read_byte(&data);
 | 
			
		||||
    if (this->buffer_.empty() && (data != 0xff))
 | 
			
		||||
      continue;
 | 
			
		||||
    buffer_.push_back(data);
 | 
			
		||||
    if (this->buffer_.size() == 4)
 | 
			
		||||
      this->check_buffer_();
 | 
			
		||||
  }
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
void A02yyuwComponent::check_buffer_() {
 | 
			
		||||
  uint8_t checksum = this->buffer_[0] + this->buffer_[1] + this->buffer_[2];
 | 
			
		||||
  if (this->buffer_[3] == checksum) {
 | 
			
		||||
    float distance = (this->buffer_[1] << 8) + this->buffer_[2];
 | 
			
		||||
    if (distance > 30) {
 | 
			
		||||
      ESP_LOGV(TAG, "Distance from sensor: %f mm", distance);
 | 
			
		||||
      this->publish_state(distance);
 | 
			
		||||
    } else {
 | 
			
		||||
      ESP_LOGW(TAG, "Invalid data read from sensor: %s", format_hex_pretty(this->buffer_).c_str());
 | 
			
		||||
    }
 | 
			
		||||
  } else {
 | 
			
		||||
    ESP_LOGW(TAG, "checksum failed: %02x != %02x", checksum, this->buffer_[3]);
 | 
			
		||||
  }
 | 
			
		||||
  this->buffer_.clear();
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
void A02yyuwComponent::dump_config() { LOG_SENSOR("", "A02yyuw Sensor", this); }
 | 
			
		||||
 | 
			
		||||
}  // namespace a02yyuw
 | 
			
		||||
}  // namespace esphome
 | 
			
		||||
@@ -1,27 +0,0 @@
 | 
			
		||||
#pragma once
 | 
			
		||||
 | 
			
		||||
#include <vector>
 | 
			
		||||
 | 
			
		||||
#include "esphome/core/component.h"
 | 
			
		||||
#include "esphome/components/sensor/sensor.h"
 | 
			
		||||
#include "esphome/components/uart/uart.h"
 | 
			
		||||
 | 
			
		||||
namespace esphome {
 | 
			
		||||
namespace a02yyuw {
 | 
			
		||||
 | 
			
		||||
class A02yyuwComponent : public sensor::Sensor, public Component, public uart::UARTDevice {
 | 
			
		||||
 public:
 | 
			
		||||
  // Nothing really public.
 | 
			
		||||
 | 
			
		||||
  // ========== INTERNAL METHODS ==========
 | 
			
		||||
  void loop() override;
 | 
			
		||||
  void dump_config() override;
 | 
			
		||||
 | 
			
		||||
 protected:
 | 
			
		||||
  void check_buffer_();
 | 
			
		||||
 | 
			
		||||
  std::vector<uint8_t> buffer_;
 | 
			
		||||
};
 | 
			
		||||
 | 
			
		||||
}  // namespace a02yyuw
 | 
			
		||||
}  // namespace esphome
 | 
			
		||||
@@ -1,41 +0,0 @@
 | 
			
		||||
import esphome.codegen as cg
 | 
			
		||||
from esphome.components import sensor, uart
 | 
			
		||||
from esphome.const import (
 | 
			
		||||
    STATE_CLASS_MEASUREMENT,
 | 
			
		||||
    ICON_ARROW_EXPAND_VERTICAL,
 | 
			
		||||
    DEVICE_CLASS_DISTANCE,
 | 
			
		||||
    UNIT_MILLIMETER,
 | 
			
		||||
)
 | 
			
		||||
 | 
			
		||||
CODEOWNERS = ["@TH-Braemer"]
 | 
			
		||||
DEPENDENCIES = ["uart"]
 | 
			
		||||
 | 
			
		||||
a02yyuw_ns = cg.esphome_ns.namespace("a02yyuw")
 | 
			
		||||
A02yyuwComponent = a02yyuw_ns.class_(
 | 
			
		||||
    "A02yyuwComponent", sensor.Sensor, cg.Component, uart.UARTDevice
 | 
			
		||||
)
 | 
			
		||||
 | 
			
		||||
CONFIG_SCHEMA = sensor.sensor_schema(
 | 
			
		||||
    A02yyuwComponent,
 | 
			
		||||
    unit_of_measurement=UNIT_MILLIMETER,
 | 
			
		||||
    icon=ICON_ARROW_EXPAND_VERTICAL,
 | 
			
		||||
    accuracy_decimals=0,
 | 
			
		||||
    state_class=STATE_CLASS_MEASUREMENT,
 | 
			
		||||
    device_class=DEVICE_CLASS_DISTANCE,
 | 
			
		||||
).extend(uart.UART_DEVICE_SCHEMA)
 | 
			
		||||
 | 
			
		||||
FINAL_VALIDATE_SCHEMA = uart.final_validate_device_schema(
 | 
			
		||||
    "a02yyuw",
 | 
			
		||||
    baud_rate=9600,
 | 
			
		||||
    require_tx=False,
 | 
			
		||||
    require_rx=True,
 | 
			
		||||
    data_bits=8,
 | 
			
		||||
    parity=None,
 | 
			
		||||
    stop_bits=1,
 | 
			
		||||
)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
async def to_code(config):
 | 
			
		||||
    var = await sensor.new_sensor(config)
 | 
			
		||||
    await cg.register_component(var, config)
 | 
			
		||||
    await uart.register_uart_device(var, config)
 | 
			
		||||
@@ -46,7 +46,6 @@ void A4988::loop() {
 | 
			
		||||
    return;
 | 
			
		||||
 | 
			
		||||
  this->dir_pin_->digital_write(dir == 1);
 | 
			
		||||
  delayMicroseconds(50);
 | 
			
		||||
  this->step_pin_->digital_write(true);
 | 
			
		||||
  delayMicroseconds(5);
 | 
			
		||||
  this->step_pin_->digital_write(false);
 | 
			
		||||
 
 | 
			
		||||
@@ -28,6 +28,6 @@ async def to_code(config):
 | 
			
		||||
    dir_pin = await cg.gpio_pin_expression(config[CONF_DIR_PIN])
 | 
			
		||||
    cg.add(var.set_dir_pin(dir_pin))
 | 
			
		||||
 | 
			
		||||
    if sleep_pin_config := config.get(CONF_SLEEP_PIN):
 | 
			
		||||
        sleep_pin = await cg.gpio_pin_expression(sleep_pin_config)
 | 
			
		||||
    if CONF_SLEEP_PIN in config:
 | 
			
		||||
        sleep_pin = await cg.gpio_pin_expression(config[CONF_SLEEP_PIN])
 | 
			
		||||
        cg.add(var.set_sleep_pin(sleep_pin))
 | 
			
		||||
 
 | 
			
		||||
@@ -1 +0,0 @@
 | 
			
		||||
CODEOWNERS = ["@DAVe3283"]
 | 
			
		||||
@@ -1,182 +0,0 @@
 | 
			
		||||
#include "esphome/core/log.h"
 | 
			
		||||
#include "absolute_humidity.h"
 | 
			
		||||
 | 
			
		||||
namespace esphome {
 | 
			
		||||
namespace absolute_humidity {
 | 
			
		||||
 | 
			
		||||
static const char *const TAG = "absolute_humidity.sensor";
 | 
			
		||||
 | 
			
		||||
void AbsoluteHumidityComponent::setup() {
 | 
			
		||||
  ESP_LOGCONFIG(TAG, "Setting up absolute humidity '%s'...", this->get_name().c_str());
 | 
			
		||||
 | 
			
		||||
  ESP_LOGD(TAG, "  Added callback for temperature '%s'", this->temperature_sensor_->get_name().c_str());
 | 
			
		||||
  this->temperature_sensor_->add_on_state_callback([this](float state) { this->temperature_callback_(state); });
 | 
			
		||||
  if (this->temperature_sensor_->has_state()) {
 | 
			
		||||
    this->temperature_callback_(this->temperature_sensor_->get_state());
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  ESP_LOGD(TAG, "  Added callback for relative humidity '%s'", this->humidity_sensor_->get_name().c_str());
 | 
			
		||||
  this->humidity_sensor_->add_on_state_callback([this](float state) { this->humidity_callback_(state); });
 | 
			
		||||
  if (this->humidity_sensor_->has_state()) {
 | 
			
		||||
    this->humidity_callback_(this->humidity_sensor_->get_state());
 | 
			
		||||
  }
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
void AbsoluteHumidityComponent::dump_config() {
 | 
			
		||||
  LOG_SENSOR("", "Absolute Humidity", this);
 | 
			
		||||
 | 
			
		||||
  switch (this->equation_) {
 | 
			
		||||
    case BUCK:
 | 
			
		||||
      ESP_LOGCONFIG(TAG, "Saturation Vapor Pressure Equation: Buck");
 | 
			
		||||
      break;
 | 
			
		||||
    case TETENS:
 | 
			
		||||
      ESP_LOGCONFIG(TAG, "Saturation Vapor Pressure Equation: Tetens");
 | 
			
		||||
      break;
 | 
			
		||||
    case WOBUS:
 | 
			
		||||
      ESP_LOGCONFIG(TAG, "Saturation Vapor Pressure Equation: Wobus");
 | 
			
		||||
      break;
 | 
			
		||||
    default:
 | 
			
		||||
      ESP_LOGE(TAG, "Invalid saturation vapor pressure equation selection!");
 | 
			
		||||
      break;
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  ESP_LOGCONFIG(TAG, "Sources");
 | 
			
		||||
  ESP_LOGCONFIG(TAG, "  Temperature: '%s'", this->temperature_sensor_->get_name().c_str());
 | 
			
		||||
  ESP_LOGCONFIG(TAG, "  Relative Humidity: '%s'", this->humidity_sensor_->get_name().c_str());
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
float AbsoluteHumidityComponent::get_setup_priority() const { return setup_priority::DATA; }
 | 
			
		||||
 | 
			
		||||
void AbsoluteHumidityComponent::loop() {
 | 
			
		||||
  if (!this->next_update_) {
 | 
			
		||||
    return;
 | 
			
		||||
  }
 | 
			
		||||
  this->next_update_ = false;
 | 
			
		||||
 | 
			
		||||
  // Ensure we have source data
 | 
			
		||||
  const bool no_temperature = std::isnan(this->temperature_);
 | 
			
		||||
  const bool no_humidity = std::isnan(this->humidity_);
 | 
			
		||||
  if (no_temperature || no_humidity) {
 | 
			
		||||
    if (no_temperature) {
 | 
			
		||||
      ESP_LOGW(TAG, "No valid state from temperature sensor!");
 | 
			
		||||
    }
 | 
			
		||||
    if (no_humidity) {
 | 
			
		||||
      ESP_LOGW(TAG, "No valid state from temperature sensor!");
 | 
			
		||||
    }
 | 
			
		||||
    ESP_LOGW(TAG, "Unable to calculate absolute humidity.");
 | 
			
		||||
    this->publish_state(NAN);
 | 
			
		||||
    this->status_set_warning();
 | 
			
		||||
    return;
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  // Convert to desired units
 | 
			
		||||
  const float temperature_c = this->temperature_;
 | 
			
		||||
  const float temperature_k = temperature_c + 273.15;
 | 
			
		||||
  const float hr = this->humidity_ / 100;
 | 
			
		||||
 | 
			
		||||
  // Calculate saturation vapor pressure
 | 
			
		||||
  float es;
 | 
			
		||||
  switch (this->equation_) {
 | 
			
		||||
    case BUCK:
 | 
			
		||||
      es = es_buck(temperature_c);
 | 
			
		||||
      break;
 | 
			
		||||
    case TETENS:
 | 
			
		||||
      es = es_tetens(temperature_c);
 | 
			
		||||
      break;
 | 
			
		||||
    case WOBUS:
 | 
			
		||||
      es = es_wobus(temperature_c);
 | 
			
		||||
      break;
 | 
			
		||||
    default:
 | 
			
		||||
      ESP_LOGE(TAG, "Invalid saturation vapor pressure equation selection!");
 | 
			
		||||
      this->publish_state(NAN);
 | 
			
		||||
      this->status_set_error();
 | 
			
		||||
      return;
 | 
			
		||||
  }
 | 
			
		||||
  ESP_LOGD(TAG, "Saturation vapor pressure %f kPa", es);
 | 
			
		||||
 | 
			
		||||
  // Calculate absolute humidity
 | 
			
		||||
  const float absolute_humidity = vapor_density(es, hr, temperature_k);
 | 
			
		||||
 | 
			
		||||
  // Publish absolute humidity
 | 
			
		||||
  ESP_LOGD(TAG, "Publishing absolute humidity %f g/m³", absolute_humidity);
 | 
			
		||||
  this->status_clear_warning();
 | 
			
		||||
  this->publish_state(absolute_humidity);
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
// Buck equation (https://en.wikipedia.org/wiki/Arden_Buck_equation)
 | 
			
		||||
// More accurate than Tetens in normal meteorologic conditions
 | 
			
		||||
float AbsoluteHumidityComponent::es_buck(float temperature_c) {
 | 
			
		||||
  float a, b, c, d;
 | 
			
		||||
  if (temperature_c >= 0) {
 | 
			
		||||
    a = 0.61121;
 | 
			
		||||
    b = 18.678;
 | 
			
		||||
    c = 234.5;
 | 
			
		||||
    d = 257.14;
 | 
			
		||||
  } else {
 | 
			
		||||
    a = 0.61115;
 | 
			
		||||
    b = 18.678;
 | 
			
		||||
    c = 233.7;
 | 
			
		||||
    d = 279.82;
 | 
			
		||||
  }
 | 
			
		||||
  return a * expf((b - (temperature_c / c)) * (temperature_c / (d + temperature_c)));
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
// Tetens equation (https://en.wikipedia.org/wiki/Tetens_equation)
 | 
			
		||||
float AbsoluteHumidityComponent::es_tetens(float temperature_c) {
 | 
			
		||||
  float a, b;
 | 
			
		||||
  if (temperature_c >= 0) {
 | 
			
		||||
    a = 17.27;
 | 
			
		||||
    b = 237.3;
 | 
			
		||||
  } else {
 | 
			
		||||
    a = 21.875;
 | 
			
		||||
    b = 265.5;
 | 
			
		||||
  }
 | 
			
		||||
  return 0.61078 * expf((a * temperature_c) / (temperature_c + b));
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
// Wobus equation
 | 
			
		||||
// https://wahiduddin.net/calc/density_altitude.htm
 | 
			
		||||
// https://wahiduddin.net/calc/density_algorithms.htm
 | 
			
		||||
// Calculate the saturation vapor pressure (kPa)
 | 
			
		||||
float AbsoluteHumidityComponent::es_wobus(float t) {
 | 
			
		||||
  // THIS FUNCTION RETURNS THE SATURATION VAPOR PRESSURE ESW (MILLIBARS)
 | 
			
		||||
  // OVER LIQUID WATER GIVEN THE TEMPERATURE T (CELSIUS). THE POLYNOMIAL
 | 
			
		||||
  // APPROXIMATION BELOW IS DUE TO HERMAN WOBUS, A MATHEMATICIAN WHO
 | 
			
		||||
  // WORKED AT THE NAVY WEATHER RESEARCH FACILITY, NORFOLK, VIRGINIA,
 | 
			
		||||
  // BUT WHO IS NOW RETIRED. THE COEFFICIENTS OF THE POLYNOMIAL WERE
 | 
			
		||||
  // CHOSEN TO FIT THE VALUES IN TABLE 94 ON PP. 351-353 OF THE SMITH-
 | 
			
		||||
  // SONIAN METEOROLOGICAL TABLES BY ROLAND LIST (6TH EDITION). THE
 | 
			
		||||
  // APPROXIMATION IS VALID FOR -50 < T < 100C.
 | 
			
		||||
  //
 | 
			
		||||
  //     Baker, Schlatter  17-MAY-1982     Original version.
 | 
			
		||||
 | 
			
		||||
  const float c0 = +0.99999683e00;
 | 
			
		||||
  const float c1 = -0.90826951e-02;
 | 
			
		||||
  const float c2 = +0.78736169e-04;
 | 
			
		||||
  const float c3 = -0.61117958e-06;
 | 
			
		||||
  const float c4 = +0.43884187e-08;
 | 
			
		||||
  const float c5 = -0.29883885e-10;
 | 
			
		||||
  const float c6 = +0.21874425e-12;
 | 
			
		||||
  const float c7 = -0.17892321e-14;
 | 
			
		||||
  const float c8 = +0.11112018e-16;
 | 
			
		||||
  const float c9 = -0.30994571e-19;
 | 
			
		||||
  const float p = c0 + t * (c1 + t * (c2 + t * (c3 + t * (c4 + t * (c5 + t * (c6 + t * (c7 + t * (c8 + t * (c9)))))))));
 | 
			
		||||
  return 0.61078 / pow(p, 8);
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
// From https://www.environmentalbiophysics.org/chalk-talk-how-to-calculate-absolute-humidity/
 | 
			
		||||
// H/T to https://esphome.io/cookbook/bme280_environment.html
 | 
			
		||||
// H/T to https://carnotcycle.wordpress.com/2012/08/04/how-to-convert-relative-humidity-to-absolute-humidity/
 | 
			
		||||
float AbsoluteHumidityComponent::vapor_density(float es, float hr, float ta) {
 | 
			
		||||
  // es = saturated vapor pressure (kPa)
 | 
			
		||||
  // hr = relative humidity [0-1]
 | 
			
		||||
  // ta = absolute temperature (K)
 | 
			
		||||
 | 
			
		||||
  const float ea = hr * es * 1000;   // vapor pressure of the air (Pa)
 | 
			
		||||
  const float mw = 18.01528;         // molar mass of water (g⋅mol⁻¹)
 | 
			
		||||
  const float r = 8.31446261815324;  // molar gas constant (J⋅K⁻¹)
 | 
			
		||||
  return (ea * mw) / (r * ta);
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
}  // namespace absolute_humidity
 | 
			
		||||
}  // namespace esphome
 | 
			
		||||
@@ -1,76 +0,0 @@
 | 
			
		||||
#pragma once
 | 
			
		||||
 | 
			
		||||
#include "esphome/core/component.h"
 | 
			
		||||
#include "esphome/components/sensor/sensor.h"
 | 
			
		||||
 | 
			
		||||
namespace esphome {
 | 
			
		||||
namespace absolute_humidity {
 | 
			
		||||
 | 
			
		||||
/// Enum listing all implemented saturation vapor pressure equations.
 | 
			
		||||
enum SaturationVaporPressureEquation {
 | 
			
		||||
  BUCK,
 | 
			
		||||
  TETENS,
 | 
			
		||||
  WOBUS,
 | 
			
		||||
};
 | 
			
		||||
 | 
			
		||||
/// This class implements calculation of absolute humidity from temperature and relative humidity.
 | 
			
		||||
class AbsoluteHumidityComponent : public sensor::Sensor, public Component {
 | 
			
		||||
 public:
 | 
			
		||||
  AbsoluteHumidityComponent() = default;
 | 
			
		||||
 | 
			
		||||
  void set_temperature_sensor(sensor::Sensor *temperature_sensor) { this->temperature_sensor_ = temperature_sensor; }
 | 
			
		||||
  void set_humidity_sensor(sensor::Sensor *humidity_sensor) { this->humidity_sensor_ = humidity_sensor; }
 | 
			
		||||
  void set_equation(SaturationVaporPressureEquation equation) { this->equation_ = equation; }
 | 
			
		||||
 | 
			
		||||
  void setup() override;
 | 
			
		||||
  void dump_config() override;
 | 
			
		||||
  float get_setup_priority() const override;
 | 
			
		||||
  void loop() override;
 | 
			
		||||
 | 
			
		||||
 protected:
 | 
			
		||||
  void temperature_callback_(float state) {
 | 
			
		||||
    this->next_update_ = true;
 | 
			
		||||
    this->temperature_ = state;
 | 
			
		||||
  }
 | 
			
		||||
  void humidity_callback_(float state) {
 | 
			
		||||
    this->next_update_ = true;
 | 
			
		||||
    this->humidity_ = state;
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  /** Buck equation for saturation vapor pressure in kPa.
 | 
			
		||||
   *
 | 
			
		||||
   * @param temperature_c Air temperature in °C.
 | 
			
		||||
   */
 | 
			
		||||
  static float es_buck(float temperature_c);
 | 
			
		||||
  /** Tetens equation for saturation vapor pressure in kPa.
 | 
			
		||||
   *
 | 
			
		||||
   * @param temperature_c Air temperature in °C.
 | 
			
		||||
   */
 | 
			
		||||
  static float es_tetens(float temperature_c);
 | 
			
		||||
  /** Wobus equation for saturation vapor pressure in kPa.
 | 
			
		||||
   *
 | 
			
		||||
   * @param temperature_c Air temperature in °C.
 | 
			
		||||
   */
 | 
			
		||||
  static float es_wobus(float temperature_c);
 | 
			
		||||
 | 
			
		||||
  /** Calculate vapor density (absolute humidity) in g/m³.
 | 
			
		||||
   *
 | 
			
		||||
   * @param es Saturation vapor pressure in kPa.
 | 
			
		||||
   * @param hr Relative humidity 0 to 1.
 | 
			
		||||
   * @param ta Absolute temperature in K.
 | 
			
		||||
   * @param heater_duration The duration in ms that the heater should turn on for when measuring.
 | 
			
		||||
   */
 | 
			
		||||
  static float vapor_density(float es, float hr, float ta);
 | 
			
		||||
 | 
			
		||||
  sensor::Sensor *temperature_sensor_{nullptr};
 | 
			
		||||
  sensor::Sensor *humidity_sensor_{nullptr};
 | 
			
		||||
 | 
			
		||||
  bool next_update_{false};
 | 
			
		||||
 | 
			
		||||
  float temperature_{NAN};
 | 
			
		||||
  float humidity_{NAN};
 | 
			
		||||
  SaturationVaporPressureEquation equation_;
 | 
			
		||||
};
 | 
			
		||||
 | 
			
		||||
}  // namespace absolute_humidity
 | 
			
		||||
}  // namespace esphome
 | 
			
		||||
@@ -1,56 +0,0 @@
 | 
			
		||||
import esphome.codegen as cg
 | 
			
		||||
import esphome.config_validation as cv
 | 
			
		||||
from esphome.components import sensor
 | 
			
		||||
from esphome.const import (
 | 
			
		||||
    CONF_HUMIDITY,
 | 
			
		||||
    CONF_TEMPERATURE,
 | 
			
		||||
    STATE_CLASS_MEASUREMENT,
 | 
			
		||||
    CONF_EQUATION,
 | 
			
		||||
    ICON_WATER,
 | 
			
		||||
    UNIT_GRAMS_PER_CUBIC_METER,
 | 
			
		||||
)
 | 
			
		||||
 | 
			
		||||
absolute_humidity_ns = cg.esphome_ns.namespace("absolute_humidity")
 | 
			
		||||
AbsoluteHumidityComponent = absolute_humidity_ns.class_(
 | 
			
		||||
    "AbsoluteHumidityComponent", sensor.Sensor, cg.Component
 | 
			
		||||
)
 | 
			
		||||
 | 
			
		||||
SaturationVaporPressureEquation = absolute_humidity_ns.enum(
 | 
			
		||||
    "SaturationVaporPressureEquation"
 | 
			
		||||
)
 | 
			
		||||
EQUATION = {
 | 
			
		||||
    "BUCK": SaturationVaporPressureEquation.BUCK,
 | 
			
		||||
    "TETENS": SaturationVaporPressureEquation.TETENS,
 | 
			
		||||
    "WOBUS": SaturationVaporPressureEquation.WOBUS,
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
CONFIG_SCHEMA = (
 | 
			
		||||
    sensor.sensor_schema(
 | 
			
		||||
        unit_of_measurement=UNIT_GRAMS_PER_CUBIC_METER,
 | 
			
		||||
        icon=ICON_WATER,
 | 
			
		||||
        accuracy_decimals=2,
 | 
			
		||||
        state_class=STATE_CLASS_MEASUREMENT,
 | 
			
		||||
    )
 | 
			
		||||
    .extend(
 | 
			
		||||
        {
 | 
			
		||||
            cv.GenerateID(): cv.declare_id(AbsoluteHumidityComponent),
 | 
			
		||||
            cv.Required(CONF_TEMPERATURE): cv.use_id(sensor.Sensor),
 | 
			
		||||
            cv.Required(CONF_HUMIDITY): cv.use_id(sensor.Sensor),
 | 
			
		||||
            cv.Optional(CONF_EQUATION, default="WOBUS"): cv.enum(EQUATION, upper=True),
 | 
			
		||||
        }
 | 
			
		||||
    )
 | 
			
		||||
    .extend(cv.COMPONENT_SCHEMA)
 | 
			
		||||
)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
async def to_code(config):
 | 
			
		||||
    var = await sensor.new_sensor(config)
 | 
			
		||||
    await cg.register_component(var, config)
 | 
			
		||||
 | 
			
		||||
    temperature_sensor = await cg.get_variable(config[CONF_TEMPERATURE])
 | 
			
		||||
    cg.add(var.set_temperature_sensor(temperature_sensor))
 | 
			
		||||
 | 
			
		||||
    humidity_sensor = await cg.get_variable(config[CONF_HUMIDITY])
 | 
			
		||||
    cg.add(var.set_humidity_sensor(humidity_sensor))
 | 
			
		||||
 | 
			
		||||
    cg.add(var.set_equation(config[CONF_EQUATION]))
 | 
			
		||||
@@ -52,10 +52,10 @@ uint32_t IRAM_ATTR HOT AcDimmerDataStore::timer_intr(uint32_t now) {
 | 
			
		||||
    this->gate_pin.digital_write(false);
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  if (time_since_zc < this->enable_time_us) {
 | 
			
		||||
  if (time_since_zc < this->enable_time_us)
 | 
			
		||||
    // Next event is enable, return time until that event
 | 
			
		||||
    return this->enable_time_us - time_since_zc;
 | 
			
		||||
  } else if (time_since_zc < disable_time_us) {
 | 
			
		||||
  else if (time_since_zc < disable_time_us) {
 | 
			
		||||
    // Next event is disable, return time until that event
 | 
			
		||||
    return this->disable_time_us - time_since_zc;
 | 
			
		||||
  }
 | 
			
		||||
@@ -74,10 +74,9 @@ uint32_t IRAM_ATTR HOT timer_interrupt() {
 | 
			
		||||
  uint32_t min_dt_us = 1000;
 | 
			
		||||
  uint32_t now = micros();
 | 
			
		||||
  for (auto *dimmer : all_dimmers) {
 | 
			
		||||
    if (dimmer == nullptr) {
 | 
			
		||||
    if (dimmer == nullptr)
 | 
			
		||||
      // no more dimmers
 | 
			
		||||
      break;
 | 
			
		||||
    }
 | 
			
		||||
    uint32_t res = dimmer->timer_intr(now);
 | 
			
		||||
    if (res != 0 && res < min_dt_us)
 | 
			
		||||
      min_dt_us = res;
 | 
			
		||||
@@ -122,7 +121,6 @@ void IRAM_ATTR HOT AcDimmerDataStore::gpio_intr() {
 | 
			
		||||
      // also take into account min_power
 | 
			
		||||
      auto min_us = this->cycle_time_us * this->min_power / 1000;
 | 
			
		||||
      this->enable_time_us = std::max((uint32_t) 1, ((65535 - this->value) * (this->cycle_time_us - min_us)) / 65535);
 | 
			
		||||
 | 
			
		||||
      if (this->method == DIM_METHOD_LEADING_PULSE) {
 | 
			
		||||
        // Minimum pulse time should be enough for the triac to trigger when it is close to the ZC zone
 | 
			
		||||
        // this is for brightness near 99%
 | 
			
		||||
@@ -203,7 +201,6 @@ void AcDimmer::setup() {
 | 
			
		||||
#endif
 | 
			
		||||
}
 | 
			
		||||
void AcDimmer::write_state(float state) {
 | 
			
		||||
  state = std::acos(1 - (2 * state)) / 3.14159;  // RMS power compensation
 | 
			
		||||
  auto new_value = static_cast<uint16_t>(roundf(state * 65535));
 | 
			
		||||
  if (new_value != 0 && this->store_.value == 0)
 | 
			
		||||
    this->store_.init_cycle = this->init_with_half_cycle_;
 | 
			
		||||
@@ -215,13 +212,12 @@ void AcDimmer::dump_config() {
 | 
			
		||||
  LOG_PIN("  Zero-Cross Pin: ", this->zero_cross_pin_);
 | 
			
		||||
  ESP_LOGCONFIG(TAG, "   Min Power: %.1f%%", this->store_.min_power / 10.0f);
 | 
			
		||||
  ESP_LOGCONFIG(TAG, "   Init with half cycle: %s", YESNO(this->init_with_half_cycle_));
 | 
			
		||||
  if (method_ == DIM_METHOD_LEADING_PULSE) {
 | 
			
		||||
  if (method_ == DIM_METHOD_LEADING_PULSE)
 | 
			
		||||
    ESP_LOGCONFIG(TAG, "   Method: leading pulse");
 | 
			
		||||
  } else if (method_ == DIM_METHOD_LEADING) {
 | 
			
		||||
  else if (method_ == DIM_METHOD_LEADING)
 | 
			
		||||
    ESP_LOGCONFIG(TAG, "   Method: leading");
 | 
			
		||||
  } else {
 | 
			
		||||
  else
 | 
			
		||||
    ESP_LOGCONFIG(TAG, "   Method: trailing");
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  LOG_FLOAT_OUTPUT(this);
 | 
			
		||||
  ESP_LOGV(TAG, "  Estimated Frequency: %.3fHz", 1e6f / this->store_.cycle_time_us / 2);
 | 
			
		||||
 
 | 
			
		||||
@@ -13,6 +13,7 @@ class AdalightLightEffect : public light::AddressableLightEffect, public uart::U
 | 
			
		||||
 public:
 | 
			
		||||
  AdalightLightEffect(const std::string &name);
 | 
			
		||||
 | 
			
		||||
 public:
 | 
			
		||||
  void start() override;
 | 
			
		||||
  void stop() override;
 | 
			
		||||
  void apply(light::AddressableLight &it, const Color ¤t_color) override;
 | 
			
		||||
@@ -29,6 +30,7 @@ class AdalightLightEffect : public light::AddressableLightEffect, public uart::U
 | 
			
		||||
  void blank_all_leds_(light::AddressableLight &it);
 | 
			
		||||
  Frame parse_frame_(light::AddressableLight &it);
 | 
			
		||||
 | 
			
		||||
 protected:
 | 
			
		||||
  uint32_t last_ack_{0};
 | 
			
		||||
  uint32_t last_byte_{0};
 | 
			
		||||
  uint32_t last_reset_{0};
 | 
			
		||||
 
 | 
			
		||||
@@ -1,214 +1 @@
 | 
			
		||||
from esphome import pins
 | 
			
		||||
import esphome.codegen as cg
 | 
			
		||||
from esphome.components.esp32 import get_esp32_variant
 | 
			
		||||
from esphome.components.esp32.const import (
 | 
			
		||||
    VARIANT_ESP32,
 | 
			
		||||
    VARIANT_ESP32C2,
 | 
			
		||||
    VARIANT_ESP32C3,
 | 
			
		||||
    VARIANT_ESP32C6,
 | 
			
		||||
    VARIANT_ESP32H2,
 | 
			
		||||
    VARIANT_ESP32S2,
 | 
			
		||||
    VARIANT_ESP32S3,
 | 
			
		||||
)
 | 
			
		||||
import esphome.config_validation as cv
 | 
			
		||||
from esphome.const import CONF_ANALOG, CONF_INPUT, CONF_NUMBER, PLATFORM_ESP8266
 | 
			
		||||
from esphome.core import CORE
 | 
			
		||||
 | 
			
		||||
CODEOWNERS = ["@esphome/core"]
 | 
			
		||||
 | 
			
		||||
adc_ns = cg.esphome_ns.namespace("adc")
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
"""
 | 
			
		||||
From the below patch versions (and 5.2+) ADC_ATTEN_DB_11 is deprecated and replaced with ADC_ATTEN_DB_12.
 | 
			
		||||
4.4.7
 | 
			
		||||
5.0.5
 | 
			
		||||
5.1.3
 | 
			
		||||
5.2+
 | 
			
		||||
"""
 | 
			
		||||
 | 
			
		||||
ATTENUATION_MODES = {
 | 
			
		||||
    "0db": cg.global_ns.ADC_ATTEN_DB_0,
 | 
			
		||||
    "2.5db": cg.global_ns.ADC_ATTEN_DB_2_5,
 | 
			
		||||
    "6db": cg.global_ns.ADC_ATTEN_DB_6,
 | 
			
		||||
    "11db": adc_ns.ADC_ATTEN_DB_12_COMPAT,
 | 
			
		||||
    "12db": adc_ns.ADC_ATTEN_DB_12_COMPAT,
 | 
			
		||||
    "auto": "auto",
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
sampling_mode = adc_ns.enum("SamplingMode", is_class=True)
 | 
			
		||||
 | 
			
		||||
SAMPLING_MODES = {
 | 
			
		||||
    "avg": sampling_mode.AVG,
 | 
			
		||||
    "min": sampling_mode.MIN,
 | 
			
		||||
    "max": sampling_mode.MAX,
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
adc1_channel_t = cg.global_ns.enum("adc1_channel_t")
 | 
			
		||||
adc2_channel_t = cg.global_ns.enum("adc2_channel_t")
 | 
			
		||||
 | 
			
		||||
# From https://github.com/espressif/esp-idf/blob/master/components/driver/include/driver/adc_common.h
 | 
			
		||||
# pin to adc1 channel mapping
 | 
			
		||||
ESP32_VARIANT_ADC1_PIN_TO_CHANNEL = {
 | 
			
		||||
    VARIANT_ESP32: {
 | 
			
		||||
        36: adc1_channel_t.ADC1_CHANNEL_0,
 | 
			
		||||
        37: adc1_channel_t.ADC1_CHANNEL_1,
 | 
			
		||||
        38: adc1_channel_t.ADC1_CHANNEL_2,
 | 
			
		||||
        39: adc1_channel_t.ADC1_CHANNEL_3,
 | 
			
		||||
        32: adc1_channel_t.ADC1_CHANNEL_4,
 | 
			
		||||
        33: adc1_channel_t.ADC1_CHANNEL_5,
 | 
			
		||||
        34: adc1_channel_t.ADC1_CHANNEL_6,
 | 
			
		||||
        35: adc1_channel_t.ADC1_CHANNEL_7,
 | 
			
		||||
    },
 | 
			
		||||
    VARIANT_ESP32S2: {
 | 
			
		||||
        1: adc1_channel_t.ADC1_CHANNEL_0,
 | 
			
		||||
        2: adc1_channel_t.ADC1_CHANNEL_1,
 | 
			
		||||
        3: adc1_channel_t.ADC1_CHANNEL_2,
 | 
			
		||||
        4: adc1_channel_t.ADC1_CHANNEL_3,
 | 
			
		||||
        5: adc1_channel_t.ADC1_CHANNEL_4,
 | 
			
		||||
        6: adc1_channel_t.ADC1_CHANNEL_5,
 | 
			
		||||
        7: adc1_channel_t.ADC1_CHANNEL_6,
 | 
			
		||||
        8: adc1_channel_t.ADC1_CHANNEL_7,
 | 
			
		||||
        9: adc1_channel_t.ADC1_CHANNEL_8,
 | 
			
		||||
        10: adc1_channel_t.ADC1_CHANNEL_9,
 | 
			
		||||
    },
 | 
			
		||||
    VARIANT_ESP32S3: {
 | 
			
		||||
        1: adc1_channel_t.ADC1_CHANNEL_0,
 | 
			
		||||
        2: adc1_channel_t.ADC1_CHANNEL_1,
 | 
			
		||||
        3: adc1_channel_t.ADC1_CHANNEL_2,
 | 
			
		||||
        4: adc1_channel_t.ADC1_CHANNEL_3,
 | 
			
		||||
        5: adc1_channel_t.ADC1_CHANNEL_4,
 | 
			
		||||
        6: adc1_channel_t.ADC1_CHANNEL_5,
 | 
			
		||||
        7: adc1_channel_t.ADC1_CHANNEL_6,
 | 
			
		||||
        8: adc1_channel_t.ADC1_CHANNEL_7,
 | 
			
		||||
        9: adc1_channel_t.ADC1_CHANNEL_8,
 | 
			
		||||
        10: adc1_channel_t.ADC1_CHANNEL_9,
 | 
			
		||||
    },
 | 
			
		||||
    VARIANT_ESP32C3: {
 | 
			
		||||
        0: adc1_channel_t.ADC1_CHANNEL_0,
 | 
			
		||||
        1: adc1_channel_t.ADC1_CHANNEL_1,
 | 
			
		||||
        2: adc1_channel_t.ADC1_CHANNEL_2,
 | 
			
		||||
        3: adc1_channel_t.ADC1_CHANNEL_3,
 | 
			
		||||
        4: adc1_channel_t.ADC1_CHANNEL_4,
 | 
			
		||||
    },
 | 
			
		||||
    VARIANT_ESP32C2: {
 | 
			
		||||
        0: adc1_channel_t.ADC1_CHANNEL_0,
 | 
			
		||||
        1: adc1_channel_t.ADC1_CHANNEL_1,
 | 
			
		||||
        2: adc1_channel_t.ADC1_CHANNEL_2,
 | 
			
		||||
        3: adc1_channel_t.ADC1_CHANNEL_3,
 | 
			
		||||
        4: adc1_channel_t.ADC1_CHANNEL_4,
 | 
			
		||||
    },
 | 
			
		||||
    VARIANT_ESP32C6: {
 | 
			
		||||
        0: adc1_channel_t.ADC1_CHANNEL_0,
 | 
			
		||||
        1: adc1_channel_t.ADC1_CHANNEL_1,
 | 
			
		||||
        2: adc1_channel_t.ADC1_CHANNEL_2,
 | 
			
		||||
        3: adc1_channel_t.ADC1_CHANNEL_3,
 | 
			
		||||
        4: adc1_channel_t.ADC1_CHANNEL_4,
 | 
			
		||||
        5: adc1_channel_t.ADC1_CHANNEL_5,
 | 
			
		||||
        6: adc1_channel_t.ADC1_CHANNEL_6,
 | 
			
		||||
    },
 | 
			
		||||
    VARIANT_ESP32H2: {
 | 
			
		||||
        1: adc1_channel_t.ADC1_CHANNEL_0,
 | 
			
		||||
        2: adc1_channel_t.ADC1_CHANNEL_1,
 | 
			
		||||
        3: adc1_channel_t.ADC1_CHANNEL_2,
 | 
			
		||||
        4: adc1_channel_t.ADC1_CHANNEL_3,
 | 
			
		||||
        5: adc1_channel_t.ADC1_CHANNEL_4,
 | 
			
		||||
    },
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
ESP32_VARIANT_ADC2_PIN_TO_CHANNEL = {
 | 
			
		||||
    # TODO: add other variants
 | 
			
		||||
    VARIANT_ESP32: {
 | 
			
		||||
        4: adc2_channel_t.ADC2_CHANNEL_0,
 | 
			
		||||
        0: adc2_channel_t.ADC2_CHANNEL_1,
 | 
			
		||||
        2: adc2_channel_t.ADC2_CHANNEL_2,
 | 
			
		||||
        15: adc2_channel_t.ADC2_CHANNEL_3,
 | 
			
		||||
        13: adc2_channel_t.ADC2_CHANNEL_4,
 | 
			
		||||
        12: adc2_channel_t.ADC2_CHANNEL_5,
 | 
			
		||||
        14: adc2_channel_t.ADC2_CHANNEL_6,
 | 
			
		||||
        27: adc2_channel_t.ADC2_CHANNEL_7,
 | 
			
		||||
        25: adc2_channel_t.ADC2_CHANNEL_8,
 | 
			
		||||
        26: adc2_channel_t.ADC2_CHANNEL_9,
 | 
			
		||||
    },
 | 
			
		||||
    VARIANT_ESP32S2: {
 | 
			
		||||
        11: adc2_channel_t.ADC2_CHANNEL_0,
 | 
			
		||||
        12: adc2_channel_t.ADC2_CHANNEL_1,
 | 
			
		||||
        13: adc2_channel_t.ADC2_CHANNEL_2,
 | 
			
		||||
        14: adc2_channel_t.ADC2_CHANNEL_3,
 | 
			
		||||
        15: adc2_channel_t.ADC2_CHANNEL_4,
 | 
			
		||||
        16: adc2_channel_t.ADC2_CHANNEL_5,
 | 
			
		||||
        17: adc2_channel_t.ADC2_CHANNEL_6,
 | 
			
		||||
        18: adc2_channel_t.ADC2_CHANNEL_7,
 | 
			
		||||
        19: adc2_channel_t.ADC2_CHANNEL_8,
 | 
			
		||||
        20: adc2_channel_t.ADC2_CHANNEL_9,
 | 
			
		||||
    },
 | 
			
		||||
    VARIANT_ESP32S3: {
 | 
			
		||||
        11: adc2_channel_t.ADC2_CHANNEL_0,
 | 
			
		||||
        12: adc2_channel_t.ADC2_CHANNEL_1,
 | 
			
		||||
        13: adc2_channel_t.ADC2_CHANNEL_2,
 | 
			
		||||
        14: adc2_channel_t.ADC2_CHANNEL_3,
 | 
			
		||||
        15: adc2_channel_t.ADC2_CHANNEL_4,
 | 
			
		||||
        16: adc2_channel_t.ADC2_CHANNEL_5,
 | 
			
		||||
        17: adc2_channel_t.ADC2_CHANNEL_6,
 | 
			
		||||
        18: adc2_channel_t.ADC2_CHANNEL_7,
 | 
			
		||||
        19: adc2_channel_t.ADC2_CHANNEL_8,
 | 
			
		||||
        20: adc2_channel_t.ADC2_CHANNEL_9,
 | 
			
		||||
    },
 | 
			
		||||
    VARIANT_ESP32C3: {
 | 
			
		||||
        5: adc2_channel_t.ADC2_CHANNEL_0,
 | 
			
		||||
    },
 | 
			
		||||
    VARIANT_ESP32C2: {},
 | 
			
		||||
    VARIANT_ESP32C6: {},
 | 
			
		||||
    VARIANT_ESP32H2: {},
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def validate_adc_pin(value):
 | 
			
		||||
    if str(value).upper() == "VCC":
 | 
			
		||||
        if CORE.is_rp2040:
 | 
			
		||||
            return pins.internal_gpio_input_pin_schema(29)
 | 
			
		||||
        return cv.only_on([PLATFORM_ESP8266])("VCC")
 | 
			
		||||
 | 
			
		||||
    if str(value).upper() == "TEMPERATURE":
 | 
			
		||||
        return cv.only_on_rp2040("TEMPERATURE")
 | 
			
		||||
 | 
			
		||||
    if CORE.is_esp32:
 | 
			
		||||
        conf = pins.internal_gpio_input_pin_schema(value)
 | 
			
		||||
        value = conf[CONF_NUMBER]
 | 
			
		||||
        variant = get_esp32_variant()
 | 
			
		||||
        if (
 | 
			
		||||
            variant not in ESP32_VARIANT_ADC1_PIN_TO_CHANNEL
 | 
			
		||||
            and variant not in ESP32_VARIANT_ADC2_PIN_TO_CHANNEL
 | 
			
		||||
        ):
 | 
			
		||||
            raise cv.Invalid(f"This ESP32 variant ({variant}) is not supported")
 | 
			
		||||
 | 
			
		||||
        if (
 | 
			
		||||
            value not in ESP32_VARIANT_ADC1_PIN_TO_CHANNEL[variant]
 | 
			
		||||
            and value not in ESP32_VARIANT_ADC2_PIN_TO_CHANNEL[variant]
 | 
			
		||||
        ):
 | 
			
		||||
            raise cv.Invalid(f"{variant} doesn't support ADC on this pin")
 | 
			
		||||
 | 
			
		||||
        return conf
 | 
			
		||||
 | 
			
		||||
    if CORE.is_esp8266:
 | 
			
		||||
        conf = pins.gpio_pin_schema(
 | 
			
		||||
            {CONF_ANALOG: True, CONF_INPUT: True}, internal=True
 | 
			
		||||
        )(value)
 | 
			
		||||
 | 
			
		||||
        if conf[CONF_NUMBER] != 17:  # A0
 | 
			
		||||
            raise cv.Invalid("ESP8266: Only pin A0 (GPIO17) supports ADC")
 | 
			
		||||
        return conf
 | 
			
		||||
 | 
			
		||||
    if CORE.is_rp2040:
 | 
			
		||||
        conf = pins.internal_gpio_input_pin_schema(value)
 | 
			
		||||
        number = conf[CONF_NUMBER]
 | 
			
		||||
        if number not in (26, 27, 28, 29):
 | 
			
		||||
            raise cv.Invalid("RP2040: Only pins 26, 27, 28 and 29 support ADC")
 | 
			
		||||
        return conf
 | 
			
		||||
 | 
			
		||||
    if CORE.is_libretiny:
 | 
			
		||||
        return pins.gpio_pin_schema(
 | 
			
		||||
            {CONF_ANALOG: True, CONF_INPUT: True}, internal=True
 | 
			
		||||
        )(value)
 | 
			
		||||
 | 
			
		||||
    raise NotImplementedError
 | 
			
		||||
 
 | 
			
		||||
							
								
								
									
										170
									
								
								esphome/components/adc/adc_sensor.cpp
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										170
									
								
								esphome/components/adc/adc_sensor.cpp
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1,170 @@
 | 
			
		||||
#include "adc_sensor.h"
 | 
			
		||||
#include "esphome/core/log.h"
 | 
			
		||||
#include "esphome/core/helpers.h"
 | 
			
		||||
 | 
			
		||||
#ifdef USE_ESP8266
 | 
			
		||||
#ifdef USE_ADC_SENSOR_VCC
 | 
			
		||||
#include <Esp.h>
 | 
			
		||||
ADC_MODE(ADC_VCC)
 | 
			
		||||
#else
 | 
			
		||||
#include <Arduino.h>
 | 
			
		||||
#endif
 | 
			
		||||
#endif
 | 
			
		||||
 | 
			
		||||
namespace esphome {
 | 
			
		||||
namespace adc {
 | 
			
		||||
 | 
			
		||||
static const char *const TAG = "adc";
 | 
			
		||||
 | 
			
		||||
void ADCSensor::setup() {
 | 
			
		||||
  ESP_LOGCONFIG(TAG, "Setting up ADC '%s'...", this->get_name().c_str());
 | 
			
		||||
#ifndef USE_ADC_SENSOR_VCC
 | 
			
		||||
  pin_->setup();
 | 
			
		||||
#endif
 | 
			
		||||
 | 
			
		||||
#ifdef USE_ESP32
 | 
			
		||||
  adc1_config_width(ADC_WIDTH_BIT_12);
 | 
			
		||||
  if (!autorange_) {
 | 
			
		||||
    adc1_config_channel_atten(channel_, attenuation_);
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  // load characteristics for each attenuation
 | 
			
		||||
  for (int i = 0; i < (int) ADC_ATTEN_MAX; i++) {
 | 
			
		||||
    auto cal_value = esp_adc_cal_characterize(ADC_UNIT_1, (adc_atten_t) i, ADC_WIDTH_BIT_12,
 | 
			
		||||
                                              1100,  // default vref
 | 
			
		||||
                                              &cal_characteristics_[i]);
 | 
			
		||||
    switch (cal_value) {
 | 
			
		||||
      case ESP_ADC_CAL_VAL_EFUSE_VREF:
 | 
			
		||||
        ESP_LOGV(TAG, "Using eFuse Vref for calibration");
 | 
			
		||||
        break;
 | 
			
		||||
      case ESP_ADC_CAL_VAL_EFUSE_TP:
 | 
			
		||||
        ESP_LOGV(TAG, "Using two-point eFuse Vref for calibration");
 | 
			
		||||
        break;
 | 
			
		||||
      case ESP_ADC_CAL_VAL_DEFAULT_VREF:
 | 
			
		||||
      default:
 | 
			
		||||
        break;
 | 
			
		||||
    }
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  // adc_gpio_init doesn't exist on ESP32-C3 or ESP32-H2
 | 
			
		||||
#if !defined(USE_ESP32_VARIANT_ESP32C3) && !defined(USE_ESP32_VARIANT_ESP32H2)
 | 
			
		||||
  adc_gpio_init(ADC_UNIT_1, (adc_channel_t) channel_);
 | 
			
		||||
#endif
 | 
			
		||||
#endif  // USE_ESP32
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
void ADCSensor::dump_config() {
 | 
			
		||||
  LOG_SENSOR("", "ADC Sensor", this);
 | 
			
		||||
#ifdef USE_ESP8266
 | 
			
		||||
#ifdef USE_ADC_SENSOR_VCC
 | 
			
		||||
  ESP_LOGCONFIG(TAG, "  Pin: VCC");
 | 
			
		||||
#else
 | 
			
		||||
  LOG_PIN("  Pin: ", pin_);
 | 
			
		||||
#endif
 | 
			
		||||
#endif  // USE_ESP8266
 | 
			
		||||
 | 
			
		||||
#ifdef USE_ESP32
 | 
			
		||||
  LOG_PIN("  Pin: ", pin_);
 | 
			
		||||
  if (autorange_)
 | 
			
		||||
    ESP_LOGCONFIG(TAG, " Attenuation: auto");
 | 
			
		||||
  else
 | 
			
		||||
    switch (this->attenuation_) {
 | 
			
		||||
      case ADC_ATTEN_DB_0:
 | 
			
		||||
        ESP_LOGCONFIG(TAG, " Attenuation: 0db (max 1.1V)");
 | 
			
		||||
        break;
 | 
			
		||||
      case ADC_ATTEN_DB_2_5:
 | 
			
		||||
        ESP_LOGCONFIG(TAG, " Attenuation: 2.5db (max 1.5V)");
 | 
			
		||||
        break;
 | 
			
		||||
      case ADC_ATTEN_DB_6:
 | 
			
		||||
        ESP_LOGCONFIG(TAG, " Attenuation: 6db (max 2.2V)");
 | 
			
		||||
        break;
 | 
			
		||||
      case ADC_ATTEN_DB_11:
 | 
			
		||||
        ESP_LOGCONFIG(TAG, " Attenuation: 11db (max 3.9V)");
 | 
			
		||||
        break;
 | 
			
		||||
      default:  // This is to satisfy the unused ADC_ATTEN_MAX
 | 
			
		||||
        break;
 | 
			
		||||
    }
 | 
			
		||||
#endif  // USE_ESP32
 | 
			
		||||
  LOG_UPDATE_INTERVAL(this);
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
float ADCSensor::get_setup_priority() const { return setup_priority::DATA; }
 | 
			
		||||
void ADCSensor::update() {
 | 
			
		||||
  float value_v = this->sample();
 | 
			
		||||
  ESP_LOGV(TAG, "'%s': Got voltage=%.4fV", this->get_name().c_str(), value_v);
 | 
			
		||||
  this->publish_state(value_v);
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
#ifdef USE_ESP8266
 | 
			
		||||
float ADCSensor::sample() {
 | 
			
		||||
#ifdef USE_ADC_SENSOR_VCC
 | 
			
		||||
  int raw = ESP.getVcc();  // NOLINT(readability-static-accessed-through-instance)
 | 
			
		||||
#else
 | 
			
		||||
  int raw = analogRead(this->pin_->get_pin());  // NOLINT
 | 
			
		||||
#endif
 | 
			
		||||
  if (output_raw_) {
 | 
			
		||||
    return raw;
 | 
			
		||||
  }
 | 
			
		||||
  return raw / 1024.0f;
 | 
			
		||||
}
 | 
			
		||||
#endif
 | 
			
		||||
 | 
			
		||||
#ifdef USE_ESP32
 | 
			
		||||
float ADCSensor::sample() {
 | 
			
		||||
  if (!autorange_) {
 | 
			
		||||
    int raw = adc1_get_raw(channel_);
 | 
			
		||||
    if (raw == -1) {
 | 
			
		||||
      return NAN;
 | 
			
		||||
    }
 | 
			
		||||
    if (output_raw_) {
 | 
			
		||||
      return raw;
 | 
			
		||||
    }
 | 
			
		||||
    uint32_t mv = esp_adc_cal_raw_to_voltage(raw, &cal_characteristics_[(int) attenuation_]);
 | 
			
		||||
    return mv / 1000.0f;
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  int raw11, raw6 = 4095, raw2 = 4095, raw0 = 4095;
 | 
			
		||||
  adc1_config_channel_atten(channel_, ADC_ATTEN_DB_11);
 | 
			
		||||
  raw11 = adc1_get_raw(channel_);
 | 
			
		||||
  if (raw11 < 4095) {
 | 
			
		||||
    adc1_config_channel_atten(channel_, ADC_ATTEN_DB_6);
 | 
			
		||||
    raw6 = adc1_get_raw(channel_);
 | 
			
		||||
    if (raw6 < 4095) {
 | 
			
		||||
      adc1_config_channel_atten(channel_, ADC_ATTEN_DB_2_5);
 | 
			
		||||
      raw2 = adc1_get_raw(channel_);
 | 
			
		||||
      if (raw2 < 4095) {
 | 
			
		||||
        adc1_config_channel_atten(channel_, ADC_ATTEN_DB_0);
 | 
			
		||||
        raw0 = adc1_get_raw(channel_);
 | 
			
		||||
      }
 | 
			
		||||
    }
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  if (raw0 == -1 || raw2 == -1 || raw6 == -1 || raw11 == -1) {
 | 
			
		||||
    return NAN;
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  uint32_t mv11 = esp_adc_cal_raw_to_voltage(raw11, &cal_characteristics_[(int) ADC_ATTEN_DB_11]);
 | 
			
		||||
  uint32_t mv6 = esp_adc_cal_raw_to_voltage(raw6, &cal_characteristics_[(int) ADC_ATTEN_DB_6]);
 | 
			
		||||
  uint32_t mv2 = esp_adc_cal_raw_to_voltage(raw2, &cal_characteristics_[(int) ADC_ATTEN_DB_2_5]);
 | 
			
		||||
  uint32_t mv0 = esp_adc_cal_raw_to_voltage(raw0, &cal_characteristics_[(int) ADC_ATTEN_DB_0]);
 | 
			
		||||
 | 
			
		||||
  // Contribution of each value, in range 0-2048
 | 
			
		||||
  uint32_t c11 = std::min(raw11, 2048);
 | 
			
		||||
  uint32_t c6 = 2048 - std::abs(raw6 - 2048);
 | 
			
		||||
  uint32_t c2 = 2048 - std::abs(raw2 - 2048);
 | 
			
		||||
  uint32_t c0 = std::min(4095 - raw0, 2048);
 | 
			
		||||
  // max theoretical csum value is 2048*4 = 8192
 | 
			
		||||
  uint32_t csum = c11 + c6 + c2 + c0;
 | 
			
		||||
 | 
			
		||||
  // each mv is max 3900; so max value is 3900*2048*4, fits in unsigned
 | 
			
		||||
  uint32_t mv_scaled = (mv11 * c11) + (mv6 * c6) + (mv2 * c2) + (mv0 * c0);
 | 
			
		||||
  return mv_scaled / (float) (csum * 1000U);
 | 
			
		||||
}
 | 
			
		||||
#endif  // USE_ESP32
 | 
			
		||||
 | 
			
		||||
#ifdef USE_ESP8266
 | 
			
		||||
std::string ADCSensor::unique_id() { return get_mac_address() + "-adc"; }
 | 
			
		||||
#endif
 | 
			
		||||
 | 
			
		||||
}  // namespace adc
 | 
			
		||||
}  // namespace esphome
 | 
			
		||||
@@ -1,106 +1,53 @@
 | 
			
		||||
#pragma once
 | 
			
		||||
 | 
			
		||||
#include "esphome/components/sensor/sensor.h"
 | 
			
		||||
#include "esphome/components/voltage_sampler/voltage_sampler.h"
 | 
			
		||||
#include "esphome/core/component.h"
 | 
			
		||||
#include "esphome/core/hal.h"
 | 
			
		||||
#include "esphome/core/defines.h"
 | 
			
		||||
#include "esphome/components/sensor/sensor.h"
 | 
			
		||||
#include "esphome/components/voltage_sampler/voltage_sampler.h"
 | 
			
		||||
 | 
			
		||||
#ifdef USE_ESP32
 | 
			
		||||
#include <esp_adc_cal.h>
 | 
			
		||||
#include "driver/adc.h"
 | 
			
		||||
#endif  // USE_ESP32
 | 
			
		||||
#include <esp_adc_cal.h>
 | 
			
		||||
#endif
 | 
			
		||||
 | 
			
		||||
namespace esphome {
 | 
			
		||||
namespace adc {
 | 
			
		||||
 | 
			
		||||
#ifdef USE_ESP32
 | 
			
		||||
// clang-format off
 | 
			
		||||
#if (ESP_IDF_VERSION_MAJOR == 4 && ESP_IDF_VERSION >= ESP_IDF_VERSION_VAL(4, 4, 7)) || \
 | 
			
		||||
    (ESP_IDF_VERSION_MAJOR == 5 && \
 | 
			
		||||
     ((ESP_IDF_VERSION_MINOR == 0 && ESP_IDF_VERSION_PATCH >= 5) || \
 | 
			
		||||
      (ESP_IDF_VERSION_MINOR == 1 && ESP_IDF_VERSION_PATCH >= 3) || \
 | 
			
		||||
      (ESP_IDF_VERSION_MINOR >= 2)) \
 | 
			
		||||
    )
 | 
			
		||||
// clang-format on
 | 
			
		||||
static const adc_atten_t ADC_ATTEN_DB_12_COMPAT = ADC_ATTEN_DB_12;
 | 
			
		||||
#else
 | 
			
		||||
static const adc_atten_t ADC_ATTEN_DB_12_COMPAT = ADC_ATTEN_DB_11;
 | 
			
		||||
#endif
 | 
			
		||||
#endif  // USE_ESP32
 | 
			
		||||
 | 
			
		||||
enum class SamplingMode : uint8_t { AVG = 0, MIN = 1, MAX = 2 };
 | 
			
		||||
const LogString *sampling_mode_to_str(SamplingMode mode);
 | 
			
		||||
 | 
			
		||||
class Aggregator {
 | 
			
		||||
 public:
 | 
			
		||||
  void add_sample(uint32_t value);
 | 
			
		||||
  uint32_t aggregate();
 | 
			
		||||
  Aggregator(SamplingMode mode);
 | 
			
		||||
 | 
			
		||||
 protected:
 | 
			
		||||
  SamplingMode mode_{SamplingMode::AVG};
 | 
			
		||||
  uint32_t aggr_{0};
 | 
			
		||||
  uint32_t samples_{0};
 | 
			
		||||
};
 | 
			
		||||
 | 
			
		||||
class ADCSensor : public sensor::Sensor, public PollingComponent, public voltage_sampler::VoltageSampler {
 | 
			
		||||
 public:
 | 
			
		||||
#ifdef USE_ESP32
 | 
			
		||||
  /// Set the attenuation for this pin. Only available on the ESP32.
 | 
			
		||||
  void set_attenuation(adc_atten_t attenuation) { this->attenuation_ = attenuation; }
 | 
			
		||||
  void set_channel1(adc1_channel_t channel) {
 | 
			
		||||
    this->channel1_ = channel;
 | 
			
		||||
    this->channel2_ = ADC2_CHANNEL_MAX;
 | 
			
		||||
  }
 | 
			
		||||
  void set_channel2(adc2_channel_t channel) {
 | 
			
		||||
    this->channel2_ = channel;
 | 
			
		||||
    this->channel1_ = ADC1_CHANNEL_MAX;
 | 
			
		||||
  }
 | 
			
		||||
  void set_autorange(bool autorange) { this->autorange_ = autorange; }
 | 
			
		||||
#endif  // USE_ESP32
 | 
			
		||||
  void set_attenuation(adc_atten_t attenuation) { attenuation_ = attenuation; }
 | 
			
		||||
  void set_channel(adc1_channel_t channel) { channel_ = channel; }
 | 
			
		||||
  void set_autorange(bool autorange) { autorange_ = autorange; }
 | 
			
		||||
#endif
 | 
			
		||||
 | 
			
		||||
  /// Update ADC values
 | 
			
		||||
  /// Update adc values.
 | 
			
		||||
  void update() override;
 | 
			
		||||
  /// Setup ADC
 | 
			
		||||
  /// Setup ADc
 | 
			
		||||
  void setup() override;
 | 
			
		||||
  void dump_config() override;
 | 
			
		||||
  /// `HARDWARE_LATE` setup priority
 | 
			
		||||
  /// `HARDWARE_LATE` setup priority.
 | 
			
		||||
  float get_setup_priority() const override;
 | 
			
		||||
  void set_pin(InternalGPIOPin *pin) { this->pin_ = pin; }
 | 
			
		||||
  void set_output_raw(bool output_raw) { this->output_raw_ = output_raw; }
 | 
			
		||||
  void set_sample_count(uint8_t sample_count);
 | 
			
		||||
  void set_sampling_mode(SamplingMode sampling_mode);
 | 
			
		||||
  void set_output_raw(bool output_raw) { output_raw_ = output_raw; }
 | 
			
		||||
  float sample() override;
 | 
			
		||||
 | 
			
		||||
#ifdef USE_ESP8266
 | 
			
		||||
  std::string unique_id() override;
 | 
			
		||||
#endif  // USE_ESP8266
 | 
			
		||||
 | 
			
		||||
#ifdef USE_RP2040
 | 
			
		||||
  void set_is_temperature() { this->is_temperature_ = true; }
 | 
			
		||||
#endif  // USE_RP2040
 | 
			
		||||
#endif
 | 
			
		||||
 | 
			
		||||
 protected:
 | 
			
		||||
  InternalGPIOPin *pin_;
 | 
			
		||||
  bool output_raw_{false};
 | 
			
		||||
  uint8_t sample_count_{1};
 | 
			
		||||
  SamplingMode sampling_mode_{SamplingMode::AVG};
 | 
			
		||||
 | 
			
		||||
#ifdef USE_RP2040
 | 
			
		||||
  bool is_temperature_{false};
 | 
			
		||||
#endif  // USE_RP2040
 | 
			
		||||
 | 
			
		||||
#ifdef USE_ESP32
 | 
			
		||||
  adc_atten_t attenuation_{ADC_ATTEN_DB_0};
 | 
			
		||||
  adc1_channel_t channel1_{ADC1_CHANNEL_MAX};
 | 
			
		||||
  adc2_channel_t channel2_{ADC2_CHANNEL_MAX};
 | 
			
		||||
  adc1_channel_t channel_{};
 | 
			
		||||
  bool autorange_{false};
 | 
			
		||||
#if ESP_IDF_VERSION_MAJOR >= 5
 | 
			
		||||
  esp_adc_cal_characteristics_t cal_characteristics_[SOC_ADC_ATTEN_NUM] = {};
 | 
			
		||||
#else
 | 
			
		||||
  esp_adc_cal_characteristics_t cal_characteristics_[ADC_ATTEN_MAX] = {};
 | 
			
		||||
#endif  // ESP_IDF_VERSION_MAJOR
 | 
			
		||||
#endif  // USE_ESP32
 | 
			
		||||
  esp_adc_cal_characteristics_t cal_characteristics_[(int) ADC_ATTEN_MAX] = {};
 | 
			
		||||
#endif
 | 
			
		||||
};
 | 
			
		||||
 | 
			
		||||
}  // namespace adc
 | 
			
		||||
 
 | 
			
		||||
@@ -1,79 +0,0 @@
 | 
			
		||||
#include "adc_sensor.h"
 | 
			
		||||
#include "esphome/core/log.h"
 | 
			
		||||
 | 
			
		||||
namespace esphome {
 | 
			
		||||
namespace adc {
 | 
			
		||||
 | 
			
		||||
static const char *const TAG = "adc.common";
 | 
			
		||||
 | 
			
		||||
const LogString *sampling_mode_to_str(SamplingMode mode) {
 | 
			
		||||
  switch (mode) {
 | 
			
		||||
    case SamplingMode::AVG:
 | 
			
		||||
      return LOG_STR("average");
 | 
			
		||||
    case SamplingMode::MIN:
 | 
			
		||||
      return LOG_STR("minimum");
 | 
			
		||||
    case SamplingMode::MAX:
 | 
			
		||||
      return LOG_STR("maximum");
 | 
			
		||||
  }
 | 
			
		||||
  return LOG_STR("unknown");
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
Aggregator::Aggregator(SamplingMode mode) {
 | 
			
		||||
  this->mode_ = mode;
 | 
			
		||||
  // set to max uint if mode is "min"
 | 
			
		||||
  if (mode == SamplingMode::MIN) {
 | 
			
		||||
    this->aggr_ = UINT32_MAX;
 | 
			
		||||
  }
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
void Aggregator::add_sample(uint32_t value) {
 | 
			
		||||
  this->samples_ += 1;
 | 
			
		||||
 | 
			
		||||
  switch (this->mode_) {
 | 
			
		||||
    case SamplingMode::AVG:
 | 
			
		||||
      this->aggr_ += value;
 | 
			
		||||
      break;
 | 
			
		||||
 | 
			
		||||
    case SamplingMode::MIN:
 | 
			
		||||
      if (value < this->aggr_) {
 | 
			
		||||
        this->aggr_ = value;
 | 
			
		||||
      }
 | 
			
		||||
      break;
 | 
			
		||||
 | 
			
		||||
    case SamplingMode::MAX:
 | 
			
		||||
      if (value > this->aggr_) {
 | 
			
		||||
        this->aggr_ = value;
 | 
			
		||||
      }
 | 
			
		||||
  }
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
uint32_t Aggregator::aggregate() {
 | 
			
		||||
  if (this->mode_ == SamplingMode::AVG) {
 | 
			
		||||
    if (this->samples_ == 0) {
 | 
			
		||||
      return this->aggr_;
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    return (this->aggr_ + (this->samples_ >> 1)) / this->samples_;  // NOLINT(clang-analyzer-core.DivideZero)
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  return this->aggr_;
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
void ADCSensor::update() {
 | 
			
		||||
  float value_v = this->sample();
 | 
			
		||||
  ESP_LOGV(TAG, "'%s': Got voltage=%.4fV", this->get_name().c_str(), value_v);
 | 
			
		||||
  this->publish_state(value_v);
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
void ADCSensor::set_sample_count(uint8_t sample_count) {
 | 
			
		||||
  if (sample_count != 0) {
 | 
			
		||||
    this->sample_count_ = sample_count;
 | 
			
		||||
  }
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
void ADCSensor::set_sampling_mode(SamplingMode sampling_mode) { this->sampling_mode_ = sampling_mode; }
 | 
			
		||||
 | 
			
		||||
float ADCSensor::get_setup_priority() const { return setup_priority::DATA; }
 | 
			
		||||
 | 
			
		||||
}  // namespace adc
 | 
			
		||||
}  // namespace esphome
 | 
			
		||||
@@ -1,166 +0,0 @@
 | 
			
		||||
#ifdef USE_ESP32
 | 
			
		||||
 | 
			
		||||
#include "adc_sensor.h"
 | 
			
		||||
#include "esphome/core/log.h"
 | 
			
		||||
 | 
			
		||||
namespace esphome {
 | 
			
		||||
namespace adc {
 | 
			
		||||
 | 
			
		||||
static const char *const TAG = "adc.esp32";
 | 
			
		||||
 | 
			
		||||
static const adc_bits_width_t ADC_WIDTH_MAX_SOC_BITS = static_cast<adc_bits_width_t>(ADC_WIDTH_MAX - 1);
 | 
			
		||||
 | 
			
		||||
#ifndef SOC_ADC_RTC_MAX_BITWIDTH
 | 
			
		||||
#if USE_ESP32_VARIANT_ESP32S2
 | 
			
		||||
static const int32_t SOC_ADC_RTC_MAX_BITWIDTH = 13;
 | 
			
		||||
#else
 | 
			
		||||
static const int32_t SOC_ADC_RTC_MAX_BITWIDTH = 12;
 | 
			
		||||
#endif  // USE_ESP32_VARIANT_ESP32S2
 | 
			
		||||
#endif  // SOC_ADC_RTC_MAX_BITWIDTH
 | 
			
		||||
 | 
			
		||||
static const int ADC_MAX = (1 << SOC_ADC_RTC_MAX_BITWIDTH) - 1;
 | 
			
		||||
static const int ADC_HALF = (1 << SOC_ADC_RTC_MAX_BITWIDTH) >> 1;
 | 
			
		||||
 | 
			
		||||
void ADCSensor::setup() {
 | 
			
		||||
  ESP_LOGCONFIG(TAG, "Setting up ADC '%s'...", this->get_name().c_str());
 | 
			
		||||
 | 
			
		||||
  if (this->channel1_ != ADC1_CHANNEL_MAX) {
 | 
			
		||||
    adc1_config_width(ADC_WIDTH_MAX_SOC_BITS);
 | 
			
		||||
    if (!this->autorange_) {
 | 
			
		||||
      adc1_config_channel_atten(this->channel1_, this->attenuation_);
 | 
			
		||||
    }
 | 
			
		||||
  } else if (this->channel2_ != ADC2_CHANNEL_MAX) {
 | 
			
		||||
    if (!this->autorange_) {
 | 
			
		||||
      adc2_config_channel_atten(this->channel2_, this->attenuation_);
 | 
			
		||||
    }
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  for (int32_t i = 0; i <= ADC_ATTEN_DB_12_COMPAT; i++) {
 | 
			
		||||
    auto adc_unit = this->channel1_ != ADC1_CHANNEL_MAX ? ADC_UNIT_1 : ADC_UNIT_2;
 | 
			
		||||
    auto cal_value = esp_adc_cal_characterize(adc_unit, (adc_atten_t) i, ADC_WIDTH_MAX_SOC_BITS,
 | 
			
		||||
                                              1100,  // default vref
 | 
			
		||||
                                              &this->cal_characteristics_[i]);
 | 
			
		||||
    switch (cal_value) {
 | 
			
		||||
      case ESP_ADC_CAL_VAL_EFUSE_VREF:
 | 
			
		||||
        ESP_LOGV(TAG, "Using eFuse Vref for calibration");
 | 
			
		||||
        break;
 | 
			
		||||
      case ESP_ADC_CAL_VAL_EFUSE_TP:
 | 
			
		||||
        ESP_LOGV(TAG, "Using two-point eFuse Vref for calibration");
 | 
			
		||||
        break;
 | 
			
		||||
      case ESP_ADC_CAL_VAL_DEFAULT_VREF:
 | 
			
		||||
      default:
 | 
			
		||||
        break;
 | 
			
		||||
    }
 | 
			
		||||
  }
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
void ADCSensor::dump_config() {
 | 
			
		||||
  LOG_SENSOR("", "ADC Sensor", this);
 | 
			
		||||
  LOG_PIN("  Pin: ", this->pin_);
 | 
			
		||||
  if (this->autorange_) {
 | 
			
		||||
    ESP_LOGCONFIG(TAG, "  Attenuation: auto");
 | 
			
		||||
  } else {
 | 
			
		||||
    switch (this->attenuation_) {
 | 
			
		||||
      case ADC_ATTEN_DB_0:
 | 
			
		||||
        ESP_LOGCONFIG(TAG, "  Attenuation: 0db");
 | 
			
		||||
        break;
 | 
			
		||||
      case ADC_ATTEN_DB_2_5:
 | 
			
		||||
        ESP_LOGCONFIG(TAG, "  Attenuation: 2.5db");
 | 
			
		||||
        break;
 | 
			
		||||
      case ADC_ATTEN_DB_6:
 | 
			
		||||
        ESP_LOGCONFIG(TAG, "  Attenuation: 6db");
 | 
			
		||||
        break;
 | 
			
		||||
      case ADC_ATTEN_DB_12_COMPAT:
 | 
			
		||||
        ESP_LOGCONFIG(TAG, "  Attenuation: 12db");
 | 
			
		||||
        break;
 | 
			
		||||
      default:  // This is to satisfy the unused ADC_ATTEN_MAX
 | 
			
		||||
        break;
 | 
			
		||||
    }
 | 
			
		||||
  }
 | 
			
		||||
  ESP_LOGCONFIG(TAG, "  Samples: %i", this->sample_count_);
 | 
			
		||||
  ESP_LOGCONFIG(TAG, "  Sampling mode: %s", LOG_STR_ARG(sampling_mode_to_str(this->sampling_mode_)));
 | 
			
		||||
  LOG_UPDATE_INTERVAL(this);
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
float ADCSensor::sample() {
 | 
			
		||||
  if (!this->autorange_) {
 | 
			
		||||
    auto aggr = Aggregator(this->sampling_mode_);
 | 
			
		||||
 | 
			
		||||
    for (uint8_t sample = 0; sample < this->sample_count_; sample++) {
 | 
			
		||||
      int raw = -1;
 | 
			
		||||
      if (this->channel1_ != ADC1_CHANNEL_MAX) {
 | 
			
		||||
        raw = adc1_get_raw(this->channel1_);
 | 
			
		||||
      } else if (this->channel2_ != ADC2_CHANNEL_MAX) {
 | 
			
		||||
        adc2_get_raw(this->channel2_, ADC_WIDTH_MAX_SOC_BITS, &raw);
 | 
			
		||||
      }
 | 
			
		||||
      if (raw == -1) {
 | 
			
		||||
        return NAN;
 | 
			
		||||
      }
 | 
			
		||||
 | 
			
		||||
      aggr.add_sample(raw);
 | 
			
		||||
    }
 | 
			
		||||
    if (this->output_raw_) {
 | 
			
		||||
      return aggr.aggregate();
 | 
			
		||||
    }
 | 
			
		||||
    uint32_t mv =
 | 
			
		||||
        esp_adc_cal_raw_to_voltage(aggr.aggregate(), &this->cal_characteristics_[(int32_t) this->attenuation_]);
 | 
			
		||||
    return mv / 1000.0f;
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  int raw12 = ADC_MAX, raw6 = ADC_MAX, raw2 = ADC_MAX, raw0 = ADC_MAX;
 | 
			
		||||
 | 
			
		||||
  if (this->channel1_ != ADC1_CHANNEL_MAX) {
 | 
			
		||||
    adc1_config_channel_atten(this->channel1_, ADC_ATTEN_DB_12_COMPAT);
 | 
			
		||||
    raw12 = adc1_get_raw(this->channel1_);
 | 
			
		||||
    if (raw12 < ADC_MAX) {
 | 
			
		||||
      adc1_config_channel_atten(this->channel1_, ADC_ATTEN_DB_6);
 | 
			
		||||
      raw6 = adc1_get_raw(this->channel1_);
 | 
			
		||||
      if (raw6 < ADC_MAX) {
 | 
			
		||||
        adc1_config_channel_atten(this->channel1_, ADC_ATTEN_DB_2_5);
 | 
			
		||||
        raw2 = adc1_get_raw(this->channel1_);
 | 
			
		||||
        if (raw2 < ADC_MAX) {
 | 
			
		||||
          adc1_config_channel_atten(this->channel1_, ADC_ATTEN_DB_0);
 | 
			
		||||
          raw0 = adc1_get_raw(this->channel1_);
 | 
			
		||||
        }
 | 
			
		||||
      }
 | 
			
		||||
    }
 | 
			
		||||
  } else if (this->channel2_ != ADC2_CHANNEL_MAX) {
 | 
			
		||||
    adc2_config_channel_atten(this->channel2_, ADC_ATTEN_DB_12_COMPAT);
 | 
			
		||||
    adc2_get_raw(this->channel2_, ADC_WIDTH_MAX_SOC_BITS, &raw12);
 | 
			
		||||
    if (raw12 < ADC_MAX) {
 | 
			
		||||
      adc2_config_channel_atten(this->channel2_, ADC_ATTEN_DB_6);
 | 
			
		||||
      adc2_get_raw(this->channel2_, ADC_WIDTH_MAX_SOC_BITS, &raw6);
 | 
			
		||||
      if (raw6 < ADC_MAX) {
 | 
			
		||||
        adc2_config_channel_atten(this->channel2_, ADC_ATTEN_DB_2_5);
 | 
			
		||||
        adc2_get_raw(this->channel2_, ADC_WIDTH_MAX_SOC_BITS, &raw2);
 | 
			
		||||
        if (raw2 < ADC_MAX) {
 | 
			
		||||
          adc2_config_channel_atten(this->channel2_, ADC_ATTEN_DB_0);
 | 
			
		||||
          adc2_get_raw(this->channel2_, ADC_WIDTH_MAX_SOC_BITS, &raw0);
 | 
			
		||||
        }
 | 
			
		||||
      }
 | 
			
		||||
    }
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  if (raw0 == -1 || raw2 == -1 || raw6 == -1 || raw12 == -1) {
 | 
			
		||||
    return NAN;
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  uint32_t mv12 = esp_adc_cal_raw_to_voltage(raw12, &this->cal_characteristics_[(int32_t) ADC_ATTEN_DB_12_COMPAT]);
 | 
			
		||||
  uint32_t mv6 = esp_adc_cal_raw_to_voltage(raw6, &this->cal_characteristics_[(int32_t) ADC_ATTEN_DB_6]);
 | 
			
		||||
  uint32_t mv2 = esp_adc_cal_raw_to_voltage(raw2, &this->cal_characteristics_[(int32_t) ADC_ATTEN_DB_2_5]);
 | 
			
		||||
  uint32_t mv0 = esp_adc_cal_raw_to_voltage(raw0, &this->cal_characteristics_[(int32_t) ADC_ATTEN_DB_0]);
 | 
			
		||||
 | 
			
		||||
  uint32_t c12 = std::min(raw12, ADC_HALF);
 | 
			
		||||
  uint32_t c6 = ADC_HALF - std::abs(raw6 - ADC_HALF);
 | 
			
		||||
  uint32_t c2 = ADC_HALF - std::abs(raw2 - ADC_HALF);
 | 
			
		||||
  uint32_t c0 = std::min(ADC_MAX - raw0, ADC_HALF);
 | 
			
		||||
  uint32_t csum = c12 + c6 + c2 + c0;
 | 
			
		||||
 | 
			
		||||
  uint32_t mv_scaled = (mv12 * c12) + (mv6 * c6) + (mv2 * c2) + (mv0 * c0);
 | 
			
		||||
  return mv_scaled / (float) (csum * 1000U);
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
}  // namespace adc
 | 
			
		||||
}  // namespace esphome
 | 
			
		||||
 | 
			
		||||
#endif  // USE_ESP32
 | 
			
		||||
@@ -1,62 +0,0 @@
 | 
			
		||||
#ifdef USE_ESP8266
 | 
			
		||||
 | 
			
		||||
#include "adc_sensor.h"
 | 
			
		||||
#include "esphome/core/helpers.h"
 | 
			
		||||
#include "esphome/core/log.h"
 | 
			
		||||
 | 
			
		||||
#ifdef USE_ADC_SENSOR_VCC
 | 
			
		||||
#include <Esp.h>
 | 
			
		||||
ADC_MODE(ADC_VCC)
 | 
			
		||||
#else
 | 
			
		||||
#include <Arduino.h>
 | 
			
		||||
#endif  // USE_ADC_SENSOR_VCC
 | 
			
		||||
 | 
			
		||||
namespace esphome {
 | 
			
		||||
namespace adc {
 | 
			
		||||
 | 
			
		||||
static const char *const TAG = "adc.esp8266";
 | 
			
		||||
 | 
			
		||||
void ADCSensor::setup() {
 | 
			
		||||
  ESP_LOGCONFIG(TAG, "Setting up ADC '%s'...", this->get_name().c_str());
 | 
			
		||||
#ifndef USE_ADC_SENSOR_VCC
 | 
			
		||||
  this->pin_->setup();
 | 
			
		||||
#endif
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
void ADCSensor::dump_config() {
 | 
			
		||||
  LOG_SENSOR("", "ADC Sensor", this);
 | 
			
		||||
#ifdef USE_ADC_SENSOR_VCC
 | 
			
		||||
  ESP_LOGCONFIG(TAG, "  Pin: VCC");
 | 
			
		||||
#else
 | 
			
		||||
  LOG_PIN("  Pin: ", this->pin_);
 | 
			
		||||
#endif  // USE_ADC_SENSOR_VCC
 | 
			
		||||
  ESP_LOGCONFIG(TAG, "  Samples: %i", this->sample_count_);
 | 
			
		||||
  ESP_LOGCONFIG(TAG, "  Sampling mode: %s", LOG_STR_ARG(sampling_mode_to_str(this->sampling_mode_)));
 | 
			
		||||
  LOG_UPDATE_INTERVAL(this);
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
float ADCSensor::sample() {
 | 
			
		||||
  auto aggr = Aggregator(this->sampling_mode_);
 | 
			
		||||
 | 
			
		||||
  for (uint8_t sample = 0; sample < this->sample_count_; sample++) {
 | 
			
		||||
    uint32_t raw = 0;
 | 
			
		||||
#ifdef USE_ADC_SENSOR_VCC
 | 
			
		||||
    raw = ESP.getVcc();  // NOLINT(readability-static-accessed-through-instance)
 | 
			
		||||
#else
 | 
			
		||||
    raw = analogRead(this->pin_->get_pin());  // NOLINT
 | 
			
		||||
#endif  // USE_ADC_SENSOR_VCC
 | 
			
		||||
    aggr.add_sample(raw);
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  if (this->output_raw_) {
 | 
			
		||||
    return aggr.aggregate();
 | 
			
		||||
  }
 | 
			
		||||
  return aggr.aggregate() / 1024.0f;
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
std::string ADCSensor::unique_id() { return get_mac_address() + "-adc"; }
 | 
			
		||||
 | 
			
		||||
}  // namespace adc
 | 
			
		||||
}  // namespace esphome
 | 
			
		||||
 | 
			
		||||
#endif  // USE_ESP8266
 | 
			
		||||
Some files were not shown because too many files have changed in this diff Show More
		Reference in New Issue
	
	Block a user