mirror of
https://github.com/esphome/esphome.git
synced 2025-11-05 09:31:54 +00:00
Compare commits
1 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
6761f9dfdd |
137
.clang-format
137
.clang-format
@@ -1,137 +0,0 @@
|
|||||||
Language: Cpp
|
|
||||||
AccessModifierOffset: -1
|
|
||||||
AlignAfterOpenBracket: Align
|
|
||||||
AlignConsecutiveAssignments: false
|
|
||||||
AlignConsecutiveDeclarations: false
|
|
||||||
AlignEscapedNewlines: DontAlign
|
|
||||||
AlignOperands: true
|
|
||||||
AlignTrailingComments: true
|
|
||||||
AllowAllParametersOfDeclarationOnNextLine: true
|
|
||||||
AllowShortBlocksOnASingleLine: false
|
|
||||||
AllowShortCaseLabelsOnASingleLine: false
|
|
||||||
AllowShortFunctionsOnASingleLine: All
|
|
||||||
AllowShortIfStatementsOnASingleLine: false
|
|
||||||
AllowShortLoopsOnASingleLine: false
|
|
||||||
AlwaysBreakAfterReturnType: None
|
|
||||||
AlwaysBreakBeforeMultilineStrings: false
|
|
||||||
AlwaysBreakTemplateDeclarations: MultiLine
|
|
||||||
BinPackArguments: true
|
|
||||||
BinPackParameters: true
|
|
||||||
BraceWrapping:
|
|
||||||
AfterClass: false
|
|
||||||
AfterControlStatement: false
|
|
||||||
AfterEnum: false
|
|
||||||
AfterFunction: false
|
|
||||||
AfterNamespace: false
|
|
||||||
AfterObjCDeclaration: false
|
|
||||||
AfterStruct: false
|
|
||||||
AfterUnion: false
|
|
||||||
AfterExternBlock: false
|
|
||||||
BeforeCatch: false
|
|
||||||
BeforeElse: false
|
|
||||||
IndentBraces: false
|
|
||||||
SplitEmptyFunction: true
|
|
||||||
SplitEmptyRecord: true
|
|
||||||
SplitEmptyNamespace: true
|
|
||||||
BreakBeforeBinaryOperators: None
|
|
||||||
BreakBeforeBraces: Attach
|
|
||||||
BreakBeforeInheritanceComma: false
|
|
||||||
BreakInheritanceList: BeforeColon
|
|
||||||
BreakBeforeTernaryOperators: true
|
|
||||||
BreakConstructorInitializersBeforeComma: false
|
|
||||||
BreakConstructorInitializers: BeforeColon
|
|
||||||
BreakAfterJavaFieldAnnotations: false
|
|
||||||
BreakStringLiterals: true
|
|
||||||
ColumnLimit: 120
|
|
||||||
CommentPragmas: '^ IWYU pragma:'
|
|
||||||
CompactNamespaces: false
|
|
||||||
ConstructorInitializerAllOnOneLineOrOnePerLine: true
|
|
||||||
ConstructorInitializerIndentWidth: 4
|
|
||||||
ContinuationIndentWidth: 4
|
|
||||||
Cpp11BracedListStyle: true
|
|
||||||
DerivePointerAlignment: true
|
|
||||||
DisableFormat: false
|
|
||||||
ExperimentalAutoDetectBinPacking: false
|
|
||||||
FixNamespaceComments: true
|
|
||||||
ForEachMacros:
|
|
||||||
- foreach
|
|
||||||
- Q_FOREACH
|
|
||||||
- BOOST_FOREACH
|
|
||||||
IncludeBlocks: Preserve
|
|
||||||
IncludeCategories:
|
|
||||||
- Regex: '^<ext/.*\.h>'
|
|
||||||
Priority: 2
|
|
||||||
- Regex: '^<.*\.h>'
|
|
||||||
Priority: 1
|
|
||||||
- Regex: '^<.*'
|
|
||||||
Priority: 2
|
|
||||||
- Regex: '.*'
|
|
||||||
Priority: 3
|
|
||||||
IncludeIsMainRegex: '([-_](test|unittest))?$'
|
|
||||||
IndentCaseLabels: true
|
|
||||||
IndentPPDirectives: None
|
|
||||||
IndentWidth: 2
|
|
||||||
IndentWrappedFunctionNames: false
|
|
||||||
KeepEmptyLinesAtTheStartOfBlocks: false
|
|
||||||
MacroBlockBegin: ''
|
|
||||||
MacroBlockEnd: ''
|
|
||||||
MaxEmptyLinesToKeep: 1
|
|
||||||
NamespaceIndentation: None
|
|
||||||
PenaltyBreakAssignment: 2
|
|
||||||
PenaltyBreakBeforeFirstCallParameter: 1
|
|
||||||
PenaltyBreakComment: 300
|
|
||||||
PenaltyBreakFirstLessLess: 120
|
|
||||||
PenaltyBreakString: 1000
|
|
||||||
PenaltyBreakTemplateDeclaration: 10
|
|
||||||
PenaltyExcessCharacter: 1000000
|
|
||||||
PenaltyReturnTypeOnItsOwnLine: 2000
|
|
||||||
PointerAlignment: Right
|
|
||||||
RawStringFormats:
|
|
||||||
- Language: Cpp
|
|
||||||
Delimiters:
|
|
||||||
- cc
|
|
||||||
- CC
|
|
||||||
- cpp
|
|
||||||
- Cpp
|
|
||||||
- CPP
|
|
||||||
- 'c++'
|
|
||||||
- 'C++'
|
|
||||||
CanonicalDelimiter: ''
|
|
||||||
BasedOnStyle: google
|
|
||||||
- Language: TextProto
|
|
||||||
Delimiters:
|
|
||||||
- pb
|
|
||||||
- PB
|
|
||||||
- proto
|
|
||||||
- PROTO
|
|
||||||
EnclosingFunctions:
|
|
||||||
- EqualsProto
|
|
||||||
- EquivToProto
|
|
||||||
- PARSE_PARTIAL_TEXT_PROTO
|
|
||||||
- PARSE_TEST_PROTO
|
|
||||||
- PARSE_TEXT_PROTO
|
|
||||||
- ParseTextOrDie
|
|
||||||
- ParseTextProtoOrDie
|
|
||||||
CanonicalDelimiter: ''
|
|
||||||
BasedOnStyle: google
|
|
||||||
ReflowComments: true
|
|
||||||
SortIncludes: false
|
|
||||||
SortUsingDeclarations: false
|
|
||||||
SpaceAfterCStyleCast: true
|
|
||||||
SpaceAfterTemplateKeyword: false
|
|
||||||
SpaceBeforeAssignmentOperators: true
|
|
||||||
SpaceBeforeCpp11BracedList: false
|
|
||||||
SpaceBeforeCtorInitializerColon: true
|
|
||||||
SpaceBeforeInheritanceColon: true
|
|
||||||
SpaceBeforeParens: ControlStatements
|
|
||||||
SpaceBeforeRangeBasedForLoopColon: true
|
|
||||||
SpaceInEmptyParentheses: false
|
|
||||||
SpacesBeforeTrailingComments: 2
|
|
||||||
SpacesInAngles: false
|
|
||||||
SpacesInContainerLiterals: false
|
|
||||||
SpacesInCStyleCastParentheses: false
|
|
||||||
SpacesInParentheses: false
|
|
||||||
SpacesInSquareBrackets: false
|
|
||||||
Standard: Auto
|
|
||||||
TabWidth: 2
|
|
||||||
UseTab: Never
|
|
||||||
127
.clang-tidy
127
.clang-tidy
@@ -1,127 +0,0 @@
|
|||||||
---
|
|
||||||
Checks: >-
|
|
||||||
*,
|
|
||||||
-abseil-*,
|
|
||||||
-android-*,
|
|
||||||
-boost-*,
|
|
||||||
-bugprone-macro-parentheses,
|
|
||||||
-cert-dcl50-cpp,
|
|
||||||
-cert-err58-cpp,
|
|
||||||
-clang-analyzer-core.CallAndMessage,
|
|
||||||
-clang-analyzer-osx.*,
|
|
||||||
-clang-analyzer-security.*,
|
|
||||||
-cppcoreguidelines-avoid-goto,
|
|
||||||
-cppcoreguidelines-c-copy-assignment-signature,
|
|
||||||
-cppcoreguidelines-owning-memory,
|
|
||||||
-cppcoreguidelines-pro-bounds-array-to-pointer-decay,
|
|
||||||
-cppcoreguidelines-pro-bounds-constant-array-index,
|
|
||||||
-cppcoreguidelines-pro-bounds-pointer-arithmetic,
|
|
||||||
-cppcoreguidelines-pro-type-const-cast,
|
|
||||||
-cppcoreguidelines-pro-type-cstyle-cast,
|
|
||||||
-cppcoreguidelines-pro-type-member-init,
|
|
||||||
-cppcoreguidelines-pro-type-reinterpret-cast,
|
|
||||||
-cppcoreguidelines-pro-type-static-cast-downcast,
|
|
||||||
-cppcoreguidelines-pro-type-union-access,
|
|
||||||
-cppcoreguidelines-pro-type-vararg,
|
|
||||||
-cppcoreguidelines-special-member-functions,
|
|
||||||
-fuchsia-*,
|
|
||||||
-fuchsia-default-arguments,
|
|
||||||
-fuchsia-multiple-inheritance,
|
|
||||||
-fuchsia-overloaded-operator,
|
|
||||||
-fuchsia-statically-constructed-objects,
|
|
||||||
-google-build-using-namespace,
|
|
||||||
-google-explicit-constructor,
|
|
||||||
-google-readability-braces-around-statements,
|
|
||||||
-google-readability-casting,
|
|
||||||
-google-readability-todo,
|
|
||||||
-google-runtime-int,
|
|
||||||
-google-runtime-references,
|
|
||||||
-hicpp-*,
|
|
||||||
-llvm-header-guard,
|
|
||||||
-llvm-include-order,
|
|
||||||
-misc-unconventional-assign-operator,
|
|
||||||
-misc-unused-parameters,
|
|
||||||
-modernize-deprecated-headers,
|
|
||||||
-modernize-pass-by-value,
|
|
||||||
-modernize-pass-by-value,
|
|
||||||
-modernize-return-braced-init-list,
|
|
||||||
-modernize-use-auto,
|
|
||||||
-modernize-use-default-member-init,
|
|
||||||
-modernize-use-equals-default,
|
|
||||||
-mpi-*,
|
|
||||||
-objc-*,
|
|
||||||
-performance-unnecessary-value-param,
|
|
||||||
-readability-braces-around-statements,
|
|
||||||
-readability-else-after-return,
|
|
||||||
-readability-implicit-bool-conversion,
|
|
||||||
-readability-named-parameter,
|
|
||||||
-readability-redundant-member-init,
|
|
||||||
-warnings-as-errors,
|
|
||||||
-zircon-*
|
|
||||||
WarningsAsErrors: '*'
|
|
||||||
HeaderFilterRegex: '^.*/src/esphome/.*'
|
|
||||||
AnalyzeTemporaryDtors: false
|
|
||||||
FormatStyle: google
|
|
||||||
CheckOptions:
|
|
||||||
- key: google-readability-braces-around-statements.ShortStatementLines
|
|
||||||
value: '1'
|
|
||||||
- key: google-readability-function-size.StatementThreshold
|
|
||||||
value: '800'
|
|
||||||
- key: google-readability-namespace-comments.ShortNamespaceLines
|
|
||||||
value: '10'
|
|
||||||
- key: google-readability-namespace-comments.SpacesBeforeComments
|
|
||||||
value: '2'
|
|
||||||
- key: modernize-loop-convert.MaxCopySize
|
|
||||||
value: '16'
|
|
||||||
- key: modernize-loop-convert.MinConfidence
|
|
||||||
value: reasonable
|
|
||||||
- key: modernize-loop-convert.NamingStyle
|
|
||||||
value: CamelCase
|
|
||||||
- key: modernize-pass-by-value.IncludeStyle
|
|
||||||
value: llvm
|
|
||||||
- key: modernize-replace-auto-ptr.IncludeStyle
|
|
||||||
value: llvm
|
|
||||||
- key: modernize-use-nullptr.NullMacros
|
|
||||||
value: 'NULL'
|
|
||||||
- key: readability-identifier-naming.LocalVariableCase
|
|
||||||
value: 'lower_case'
|
|
||||||
- key: readability-identifier-naming.ClassCase
|
|
||||||
value: 'CamelCase'
|
|
||||||
- key: readability-identifier-naming.StructCase
|
|
||||||
value: 'CamelCase'
|
|
||||||
- key: readability-identifier-naming.EnumCase
|
|
||||||
value: 'CamelCase'
|
|
||||||
- key: readability-identifier-naming.EnumConstantCase
|
|
||||||
value: 'UPPER_CASE'
|
|
||||||
- key: readability-identifier-naming.StaticConstantCase
|
|
||||||
value: 'UPPER_CASE'
|
|
||||||
- key: readability-identifier-naming.StaticVariableCase
|
|
||||||
value: 'UPPER_CASE'
|
|
||||||
- key: readability-identifier-naming.GlobalConstantCase
|
|
||||||
value: 'UPPER_CASE'
|
|
||||||
- key: readability-identifier-naming.ParameterCase
|
|
||||||
value: 'lower_case'
|
|
||||||
- key: readability-identifier-naming.PrivateMemberPrefix
|
|
||||||
value: 'NO_PRIVATE_MEMBERS_ALWAYS_USE_PROTECTED'
|
|
||||||
- key: readability-identifier-naming.PrivateMethodPrefix
|
|
||||||
value: 'NO_PRIVATE_METHODS_ALWAYS_USE_PROTECTED'
|
|
||||||
- key: readability-identifier-naming.ClassMemberCase
|
|
||||||
value: 'lower_case'
|
|
||||||
- key: readability-identifier-naming.ClassMemberCase
|
|
||||||
value: 'lower_case'
|
|
||||||
- key: readability-identifier-naming.ProtectedMemberCase
|
|
||||||
value: 'lower_case'
|
|
||||||
- key: readability-identifier-naming.ProtectedMemberSuffix
|
|
||||||
value: '_'
|
|
||||||
- key: readability-identifier-naming.FunctionCase
|
|
||||||
value: 'lower_case'
|
|
||||||
- key: readability-identifier-naming.ClassMethodCase
|
|
||||||
value: 'lower_case'
|
|
||||||
- key: readability-identifier-naming.ProtectedMethodCase
|
|
||||||
value: 'lower_case'
|
|
||||||
- key: readability-identifier-naming.ProtectedMethodSuffix
|
|
||||||
value: '_'
|
|
||||||
- key: readability-identifier-naming.VirtualMethodCase
|
|
||||||
value: 'lower_case'
|
|
||||||
- key: readability-identifier-naming.VirtualMethodSuffix
|
|
||||||
value: ''
|
|
||||||
@@ -1,2 +0,0 @@
|
|||||||
[run]
|
|
||||||
omit = esphome/components/*
|
|
||||||
@@ -1,32 +0,0 @@
|
|||||||
{
|
|
||||||
"name": "ESPHome Dev",
|
|
||||||
"context": "..",
|
|
||||||
"dockerFile": "../docker/Dockerfile.dev",
|
|
||||||
"postCreateCommand": "mkdir -p config && pip3 install -e .",
|
|
||||||
"runArgs": ["--privileged", "-e", "ESPHOME_DASHBOARD_USE_PING=1"],
|
|
||||||
"appPort": 6052,
|
|
||||||
"extensions": [
|
|
||||||
"ms-python.python",
|
|
||||||
"visualstudioexptteam.vscodeintellicode",
|
|
||||||
"redhat.vscode-yaml"
|
|
||||||
],
|
|
||||||
"settings": {
|
|
||||||
"python.pythonPath": "/usr/local/bin/python",
|
|
||||||
"python.linting.pylintEnabled": true,
|
|
||||||
"python.linting.enabled": true,
|
|
||||||
"python.formatting.provider": "black",
|
|
||||||
"editor.formatOnPaste": false,
|
|
||||||
"editor.formatOnSave": true,
|
|
||||||
"editor.formatOnType": true,
|
|
||||||
"files.trimTrailingWhitespace": true,
|
|
||||||
"terminal.integrated.shell.linux": "/bin/bash",
|
|
||||||
"yaml.customTags": [
|
|
||||||
"!secret scalar",
|
|
||||||
"!lambda scalar",
|
|
||||||
"!include_dir_named scalar",
|
|
||||||
"!include_dir_list scalar",
|
|
||||||
"!include_dir_merge_list scalar",
|
|
||||||
"!include_dir_merge_named scalar"
|
|
||||||
]
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,28 +0,0 @@
|
|||||||
root = true
|
|
||||||
|
|
||||||
# general
|
|
||||||
[*]
|
|
||||||
end_of_line = lf
|
|
||||||
insert_final_newline = true
|
|
||||||
charset = utf-8
|
|
||||||
|
|
||||||
# python
|
|
||||||
[*.{py}]
|
|
||||||
indent_style = space
|
|
||||||
indent_size = 4
|
|
||||||
|
|
||||||
# C++
|
|
||||||
[*.{cpp,h,tcc}]
|
|
||||||
indent_style = space
|
|
||||||
indent_size = 2
|
|
||||||
|
|
||||||
# Web
|
|
||||||
[*.{js,html,css}]
|
|
||||||
indent_style = space
|
|
||||||
indent_size = 2
|
|
||||||
|
|
||||||
# YAML
|
|
||||||
[*.{yaml,yml}]
|
|
||||||
indent_style = space
|
|
||||||
indent_size = 2
|
|
||||||
quote_type = single
|
|
||||||
8
.github/FUNDING.yml
vendored
8
.github/FUNDING.yml
vendored
@@ -1,8 +0,0 @@
|
|||||||
# These are supported funding model platforms
|
|
||||||
|
|
||||||
github:
|
|
||||||
patreon: ottowinter
|
|
||||||
open_collective:
|
|
||||||
ko_fi:
|
|
||||||
tidelift:
|
|
||||||
custom: https://esphome.io/guides/supporters.html
|
|
||||||
47
.github/ISSUE_TEMPLATE/bug_report.md
vendored
Normal file
47
.github/ISSUE_TEMPLATE/bug_report.md
vendored
Normal file
@@ -0,0 +1,47 @@
|
|||||||
|
---
|
||||||
|
name: Bug report
|
||||||
|
about: Create a report to help us improve
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
<!-- Thanks for reporting a bug for this project. READ THIS FIRST:
|
||||||
|
- Please make sure to submit issues in the right GitHub repository, if unsure just post it here:
|
||||||
|
- esphomeyaml [here] - This is mostly for reporting bugs when compiling and when you get a long stack trace while compiling or if a configuration fails to validate.
|
||||||
|
- esphomelib [https://github.com/OttoWinter/esphomelib] - Report bugs there if the ESP is crashing or a feature is not working as expected.
|
||||||
|
- esphomedocs [https://github.com/OttoWinter/esphomedocs] - Report bugs there if the documentation is wrong/outdated.
|
||||||
|
- Provide as many details as possible. Paste logs, configuration sample and code into the backticks (```). Do not delete any text from this template!
|
||||||
|
-->
|
||||||
|
|
||||||
|
**Operating environment (Hass.io/Docker/pip/etc.):**
|
||||||
|
<!--
|
||||||
|
Please provide details about your environment.
|
||||||
|
-->
|
||||||
|
|
||||||
|
**ESP (ESP32/ESP8266/Board/Sonoff):**
|
||||||
|
<!--
|
||||||
|
Please provide details about which ESP you're using.
|
||||||
|
-->
|
||||||
|
|
||||||
|
**Affected component:**
|
||||||
|
<!--
|
||||||
|
Please add the link to the documentation at https://esphomelib.com/esphomeyaml/index.html of the component in question.
|
||||||
|
-->
|
||||||
|
|
||||||
|
|
||||||
|
**Description of problem:**
|
||||||
|
|
||||||
|
|
||||||
|
**Problem-relevant YAML-configuration entries:**
|
||||||
|
```yaml
|
||||||
|
|
||||||
|
```
|
||||||
|
|
||||||
|
**Traceback (if applicable):**
|
||||||
|
<!--
|
||||||
|
Please copy the traceback here if compilation is failing. If possible, also connect to the ESP and copy its logs into the backticks.
|
||||||
|
-->
|
||||||
|
```
|
||||||
|
|
||||||
|
```
|
||||||
|
|
||||||
|
**Additional information:**
|
||||||
12
.github/ISSUE_TEMPLATE/config.yml
vendored
12
.github/ISSUE_TEMPLATE/config.yml
vendored
@@ -1,12 +0,0 @@
|
|||||||
blank_issues_enabled: false
|
|
||||||
contact_links:
|
|
||||||
- name: Issue Tracker
|
|
||||||
url: https://github.com/esphome/issues
|
|
||||||
about: Please create bug reports in the dedicated issue tracker.
|
|
||||||
- name: Feature Request Tracker
|
|
||||||
url: https://github.com/esphome/feature-requests
|
|
||||||
about: Please create feature requests in the dedicated feature request tracker.
|
|
||||||
- name: Frequently Asked Question
|
|
||||||
url: https://esphome.io/guides/faq.html
|
|
||||||
about: Please view the FAQ for common questions and what to include in a bug report.
|
|
||||||
|
|
||||||
22
.github/ISSUE_TEMPLATE/feature_request.md
vendored
Normal file
22
.github/ISSUE_TEMPLATE/feature_request.md
vendored
Normal file
@@ -0,0 +1,22 @@
|
|||||||
|
---
|
||||||
|
name: Feature request
|
||||||
|
about: Suggest an idea for this project
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
<!-- READ THIS FIRST:
|
||||||
|
- This is for feature requests only, if you want to have a certain new sensor/module supported, please use the "new integration" template.
|
||||||
|
- Please be as descriptive as possible, especially use-cases that can otherwise not be solved boost the problem's priority.
|
||||||
|
-->
|
||||||
|
|
||||||
|
**Is your feature request related to a problem? Please describe.**
|
||||||
|
<!--
|
||||||
|
A clear and concise description of what the problem is.
|
||||||
|
-->
|
||||||
|
Ex. I'm always frustrated when [...]
|
||||||
|
|
||||||
|
**Describe the solution you'd like**
|
||||||
|
A description of what you want to happen.
|
||||||
|
|
||||||
|
**Additional context**
|
||||||
|
Add any other context about the feature request here.
|
||||||
20
.github/ISSUE_TEMPLATE/new-integration.md
vendored
Normal file
20
.github/ISSUE_TEMPLATE/new-integration.md
vendored
Normal file
@@ -0,0 +1,20 @@
|
|||||||
|
---
|
||||||
|
name: New integration
|
||||||
|
about: Suggest a new integration for esphomelib
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
<!-- READ THIS FIRST:
|
||||||
|
- This is for new integrations (such as new sensors/modules) only, for new features within the environment please use the "feature request" template.
|
||||||
|
- Do not delete anything from this template and fill out the form as precisely as possible.
|
||||||
|
-->
|
||||||
|
|
||||||
|
**What new integration would you wish to have?**
|
||||||
|
<!-- A name/description of the new integration/board. -->
|
||||||
|
|
||||||
|
**If possible, provide a link to an existing library for the integration:**
|
||||||
|
|
||||||
|
**Is your feature request related to a problem? Please describe.**
|
||||||
|
A clear and concise description of what the problem is. Ex. I'm always frustrated when [...]
|
||||||
|
|
||||||
|
**Additional context**
|
||||||
43
.github/PULL_REQUEST_TEMPLATE.md
vendored
43
.github/PULL_REQUEST_TEMPLATE.md
vendored
@@ -1,47 +1,20 @@
|
|||||||
# What does this implement/fix?
|
## Description:
|
||||||
|
|
||||||
Quick description
|
|
||||||
|
|
||||||
## Types of changes
|
**Related issue (if applicable):** fixes <link to issue>
|
||||||
|
|
||||||
- [ ] Bugfix (non-breaking change which fixes an issue)
|
**Pull request in [esphomedocs](https://github.com/OttoWinter/esphomedocs) with documentation (if applicable):** OttoWinter/esphomedocs#<esphomedocs PR number goes here>
|
||||||
- [ ] New feature (non-breaking change which adds functionality)
|
**Pull request in [esphomelib](https://github.com/OttoWinter/esphomelib) with C++ framework changes (if applicable):** OttoWinter/esphomelib#<esphomelib PR number goes here>
|
||||||
- [ ] Breaking change (fix or feature that would cause existing functionality to not work as expected)
|
|
||||||
- [ ] Configuration change (this will require users to update their yaml configuration files to keep working)
|
|
||||||
|
|
||||||
**Related issue or feature (if applicable):** fixes <link to issue>
|
|
||||||
|
|
||||||
**Pull request in [esphome-docs](https://github.com/esphome/esphome-docs) with documentation (if applicable):** esphome/esphome-docs#<esphome-docs PR number goes here>
|
|
||||||
|
|
||||||
# Test Environment
|
|
||||||
|
|
||||||
- [ ] ESP32
|
|
||||||
- [ ] ESP8266
|
|
||||||
- [ ] Windows
|
|
||||||
- [ ] Mac OS
|
|
||||||
- [ ] Linux
|
|
||||||
|
|
||||||
## Example entry for `config.yaml`:
|
|
||||||
<!--
|
|
||||||
Supplying a configuration snippet, makes it easier for a maintainer to test
|
|
||||||
your PR. Furthermore, for new integrations, it gives an impression of how
|
|
||||||
the configuration would look like.
|
|
||||||
Note: Remove this section if this PR does not have an example entry.
|
|
||||||
-->
|
|
||||||
|
|
||||||
|
## Example entry for YAML configuration (if applicable):
|
||||||
```yaml
|
```yaml
|
||||||
# Example config.yaml
|
|
||||||
|
|
||||||
```
|
```
|
||||||
|
|
||||||
# Explain your changes
|
|
||||||
|
|
||||||
Describe your changes here to communicate to the maintainers **why we should accept this pull request**.
|
|
||||||
Very important to fill if no issue linked
|
|
||||||
|
|
||||||
## Checklist:
|
## Checklist:
|
||||||
- [ ] The code change is tested and works locally.
|
- [ ] The code change is tested and works locally.
|
||||||
- [ ] Tests have been added to verify that the new code works (under `tests/` folder).
|
- [ ] Tests have been added to verify that the new code works (under `tests/` folder).
|
||||||
|
- [ ] Check this box if you have read, understand, comply, and agree with the [Code of Conduct](https://github.com/OttoWinter/esphomeyaml/blob/master/CODE_OF_CONDUCT.md).
|
||||||
|
|
||||||
If user exposed functionality or configuration variables are added/changed:
|
If user exposed functionality or configuration variables are added/changed:
|
||||||
- [ ] Documentation added/updated in [esphome-docs](https://github.com/esphome/esphome-docs).
|
- [ ] Documentation added/updated in [esphomedocs](https://github.com/OttoWinter/esphomedocs).
|
||||||
|
|||||||
9
.github/dependabot.yml
vendored
9
.github/dependabot.yml
vendored
@@ -1,9 +0,0 @@
|
|||||||
version: 2
|
|
||||||
updates:
|
|
||||||
- package-ecosystem: "pip"
|
|
||||||
directory: "/"
|
|
||||||
schedule:
|
|
||||||
interval: "daily"
|
|
||||||
ignore:
|
|
||||||
# Hypotehsis is only used for testing and is updated quite often
|
|
||||||
- dependency-name: hypothesis
|
|
||||||
7
.github/issue-close-app.yml
vendored
7
.github/issue-close-app.yml
vendored
@@ -1,7 +0,0 @@
|
|||||||
comment: >-
|
|
||||||
https://github.com/esphome/esphome/issues/430
|
|
||||||
issueConfigs:
|
|
||||||
- content:
|
|
||||||
- "OTHERWISE THE ISSUE WILL BE CLOSED AUTOMATICALLY"
|
|
||||||
|
|
||||||
caseInsensitive: false
|
|
||||||
36
.github/lock.yml
vendored
36
.github/lock.yml
vendored
@@ -1,36 +0,0 @@
|
|||||||
# Configuration for Lock Threads - https://github.com/dessant/lock-threads
|
|
||||||
|
|
||||||
# Number of days of inactivity before a closed issue or pull request is locked
|
|
||||||
daysUntilLock: 7
|
|
||||||
|
|
||||||
# Skip issues and pull requests created before a given timestamp. Timestamp must
|
|
||||||
# follow ISO 8601 (`YYYY-MM-DD`). Set to `false` to disable
|
|
||||||
skipCreatedBefore: false
|
|
||||||
|
|
||||||
# Issues and pull requests with these labels will be ignored. Set to `[]` to disable
|
|
||||||
exemptLabels:
|
|
||||||
- keep-open
|
|
||||||
|
|
||||||
# Label to add before locking, such as `outdated`. Set to `false` to disable
|
|
||||||
lockLabel: false
|
|
||||||
|
|
||||||
# Comment to post before locking. Set to `false` to disable
|
|
||||||
lockComment: false
|
|
||||||
|
|
||||||
# Assign `resolved` as the reason for locking. Set to `false` to disable
|
|
||||||
setLockReason: false
|
|
||||||
|
|
||||||
# Limit to only `issues` or `pulls`
|
|
||||||
# only: issues
|
|
||||||
|
|
||||||
# Optionally, specify configuration settings just for `issues` or `pulls`
|
|
||||||
# issues:
|
|
||||||
# exemptLabels:
|
|
||||||
# - help-wanted
|
|
||||||
# lockLabel: outdated
|
|
||||||
|
|
||||||
# pulls:
|
|
||||||
# daysUntilLock: 30
|
|
||||||
|
|
||||||
# Repository to extend settings from
|
|
||||||
# _extends: repo
|
|
||||||
59
.github/stale.yml
vendored
59
.github/stale.yml
vendored
@@ -1,59 +0,0 @@
|
|||||||
# Configuration for probot-stale - https://github.com/probot/stale
|
|
||||||
|
|
||||||
# Number of days of inactivity before an Issue or Pull Request becomes stale
|
|
||||||
daysUntilStale: 60
|
|
||||||
|
|
||||||
# Number of days of inactivity before an Issue or Pull Request with the stale label is closed.
|
|
||||||
# Set to false to disable. If disabled, issues still need to be closed manually, but will remain marked as stale.
|
|
||||||
daysUntilClose: 7
|
|
||||||
|
|
||||||
# Only issues or pull requests with all of these labels are check if stale. Defaults to `[]` (disabled)
|
|
||||||
onlyLabels: []
|
|
||||||
|
|
||||||
# Issues or Pull Requests with these labels will never be considered stale. Set to `[]` to disable
|
|
||||||
exemptLabels:
|
|
||||||
- not-stale
|
|
||||||
|
|
||||||
# Set to true to ignore issues in a project (defaults to false)
|
|
||||||
exemptProjects: false
|
|
||||||
|
|
||||||
# Set to true to ignore issues in a milestone (defaults to false)
|
|
||||||
exemptMilestones: true
|
|
||||||
|
|
||||||
# Set to true to ignore issues with an assignee (defaults to false)
|
|
||||||
exemptAssignees: false
|
|
||||||
|
|
||||||
# Label to use when marking as stale
|
|
||||||
staleLabel: stale
|
|
||||||
|
|
||||||
# Comment to post when marking as stale. Set to `false` to disable
|
|
||||||
markComment: >
|
|
||||||
This issue has been automatically marked as stale because it has not had
|
|
||||||
recent activity. It will be closed if no further activity occurs. Thank you
|
|
||||||
for your contributions.
|
|
||||||
|
|
||||||
# Comment to post when removing the stale label.
|
|
||||||
# unmarkComment: >
|
|
||||||
# Your comment here.
|
|
||||||
|
|
||||||
# Comment to post when closing a stale Issue or Pull Request.
|
|
||||||
# closeComment: >
|
|
||||||
# Your comment here.
|
|
||||||
|
|
||||||
# Limit the number of actions per hour, from 1-30. Default is 30
|
|
||||||
limitPerRun: 10
|
|
||||||
|
|
||||||
# Limit to only `issues` or `pulls`
|
|
||||||
only: pulls
|
|
||||||
|
|
||||||
# Optionally, specify configuration settings that are specific to just 'issues' or 'pulls':
|
|
||||||
# pulls:
|
|
||||||
# daysUntilStale: 30
|
|
||||||
# markComment: >
|
|
||||||
# This pull request has been automatically marked as stale because it has not had
|
|
||||||
# recent activity. It will be closed if no further activity occurs. Thank you
|
|
||||||
# for your contributions.
|
|
||||||
|
|
||||||
# issues:
|
|
||||||
# exemptLabels:
|
|
||||||
# - confirmed
|
|
||||||
55
.github/workflows/ci-docker.yml
vendored
55
.github/workflows/ci-docker.yml
vendored
@@ -1,55 +0,0 @@
|
|||||||
name: CI for docker images
|
|
||||||
|
|
||||||
# Only run when docker paths change
|
|
||||||
on:
|
|
||||||
push:
|
|
||||||
branches: [dev, beta, master]
|
|
||||||
paths:
|
|
||||||
- 'docker/**'
|
|
||||||
- '.github/workflows/**'
|
|
||||||
|
|
||||||
pull_request:
|
|
||||||
paths:
|
|
||||||
- 'docker/**'
|
|
||||||
- '.github/workflows/**'
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
check-docker:
|
|
||||||
name: Build docker containers
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
strategy:
|
|
||||||
fail-fast: false
|
|
||||||
matrix:
|
|
||||||
arch: [amd64, armv7, aarch64]
|
|
||||||
build_type: ["hassio", "docker"]
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v2
|
|
||||||
- name: Set up env variables
|
|
||||||
run: |
|
|
||||||
base_version="3.0.0"
|
|
||||||
|
|
||||||
if [[ "${{ matrix.build_type }}" == "hassio" ]]; then
|
|
||||||
build_from="esphome/esphome-hassio-base-${{ matrix.arch }}:${base_version}"
|
|
||||||
build_to="esphome/esphome-hassio-${{ matrix.arch }}"
|
|
||||||
dockerfile="docker/Dockerfile.hassio"
|
|
||||||
else
|
|
||||||
build_from="esphome/esphome-base-${{ matrix.arch }}:${base_version}"
|
|
||||||
build_to="esphome/esphome-${{ matrix.arch }}"
|
|
||||||
dockerfile="docker/Dockerfile"
|
|
||||||
fi
|
|
||||||
|
|
||||||
echo "BUILD_FROM=${build_from}" >> $GITHUB_ENV
|
|
||||||
echo "BUILD_TO=${build_to}" >> $GITHUB_ENV
|
|
||||||
echo "DOCKERFILE=${dockerfile}" >> $GITHUB_ENV
|
|
||||||
- name: Pull for cache
|
|
||||||
run: |
|
|
||||||
docker pull "${BUILD_TO}:dev" || true
|
|
||||||
- name: Register QEMU binfmt
|
|
||||||
run: docker run --rm --privileged multiarch/qemu-user-static:5.2.0-2 --reset -p yes
|
|
||||||
- run: |
|
|
||||||
docker build \
|
|
||||||
--build-arg "BUILD_FROM=${BUILD_FROM}" \
|
|
||||||
--build-arg "BUILD_VERSION=ci" \
|
|
||||||
--cache-from "${BUILD_TO}:dev" \
|
|
||||||
--file "${DOCKERFILE}" \
|
|
||||||
.
|
|
||||||
160
.github/workflows/ci.yml
vendored
160
.github/workflows/ci.yml
vendored
@@ -1,160 +0,0 @@
|
|||||||
# THESE JOBS ARE COPIED IN release.yml and release-dev.yml
|
|
||||||
# PLEASE ALSO UPDATE THOSE FILES WHEN CHANGING LINES HERE
|
|
||||||
name: CI
|
|
||||||
|
|
||||||
on:
|
|
||||||
push:
|
|
||||||
# On dev branch release-dev already performs CI checks
|
|
||||||
# On other branches the `pull_request` trigger will be used
|
|
||||||
branches: [beta, master]
|
|
||||||
|
|
||||||
pull_request:
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
lint-clang-format:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
# cpp lint job runs with esphome-lint docker image so that clang-format-*
|
|
||||||
# doesn't have to be installed
|
|
||||||
container: esphome/esphome-lint:latest
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v2
|
|
||||||
# Set up the pio project so that the cpp checks know how files are compiled
|
|
||||||
# (build flags, libraries etc)
|
|
||||||
- name: Set up platformio environment
|
|
||||||
run: pio init --ide atom
|
|
||||||
|
|
||||||
- name: Run clang-format
|
|
||||||
run: script/clang-format -i
|
|
||||||
- name: Suggest changes
|
|
||||||
run: script/ci-suggest-changes
|
|
||||||
|
|
||||||
lint-clang-tidy:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
# cpp lint job runs with esphome-lint docker image so that clang-format-*
|
|
||||||
# doesn't have to be installed
|
|
||||||
container: esphome/esphome-lint:latest
|
|
||||||
# Split clang-tidy check into 4 jobs. Each one will check 1/4th of the .cpp files
|
|
||||||
strategy:
|
|
||||||
fail-fast: false
|
|
||||||
matrix:
|
|
||||||
split: [1, 2, 3, 4]
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v2
|
|
||||||
# Set up the pio project so that the cpp checks know how files are compiled
|
|
||||||
# (build flags, libraries etc)
|
|
||||||
- name: Set up platformio environment
|
|
||||||
run: pio init --ide atom
|
|
||||||
|
|
||||||
|
|
||||||
- name: Register problem matchers
|
|
||||||
run: |
|
|
||||||
echo "::add-matcher::.github/workflows/matchers/clang-tidy.json"
|
|
||||||
echo "::add-matcher::.github/workflows/matchers/gcc.json"
|
|
||||||
- name: Run clang-tidy
|
|
||||||
run: script/clang-tidy --all-headers --fix --split-num 4 --split-at ${{ matrix.split }}
|
|
||||||
- name: Suggest changes
|
|
||||||
run: script/ci-suggest-changes
|
|
||||||
|
|
||||||
lint-python:
|
|
||||||
# Don't use the esphome-lint docker image because it may contain outdated requirements.
|
|
||||||
# This way, all dependencies are cached via the cache action.
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v2
|
|
||||||
- name: Set up Python
|
|
||||||
uses: actions/setup-python@v2
|
|
||||||
with:
|
|
||||||
python-version: '3.7'
|
|
||||||
- name: Cache pip modules
|
|
||||||
uses: actions/cache@v1
|
|
||||||
with:
|
|
||||||
path: ~/.cache/pip
|
|
||||||
key: esphome-pip-3.7-${{ hashFiles('setup.py') }}
|
|
||||||
restore-keys: |
|
|
||||||
esphome-pip-3.7-
|
|
||||||
- name: Set up python environment
|
|
||||||
run: script/setup
|
|
||||||
|
|
||||||
- name: Register problem matchers
|
|
||||||
run: |
|
|
||||||
echo "::add-matcher::.github/workflows/matchers/ci-custom.json"
|
|
||||||
echo "::add-matcher::.github/workflows/matchers/lint-python.json"
|
|
||||||
echo "::add-matcher::.github/workflows/matchers/python.json"
|
|
||||||
- name: Lint Custom
|
|
||||||
run: script/ci-custom.py
|
|
||||||
- name: Lint Python
|
|
||||||
run: script/lint-python
|
|
||||||
- name: Lint CODEOWNERS
|
|
||||||
run: script/build_codeowners.py --check
|
|
||||||
|
|
||||||
test:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
strategy:
|
|
||||||
fail-fast: false
|
|
||||||
matrix:
|
|
||||||
test:
|
|
||||||
- test1
|
|
||||||
- test2
|
|
||||||
- test3
|
|
||||||
- test4
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v2
|
|
||||||
- name: Set up Python
|
|
||||||
uses: actions/setup-python@v2
|
|
||||||
with:
|
|
||||||
python-version: '3.7'
|
|
||||||
- name: Cache pip modules
|
|
||||||
uses: actions/cache@v1
|
|
||||||
with:
|
|
||||||
path: ~/.cache/pip
|
|
||||||
key: esphome-pip-3.7-${{ hashFiles('setup.py') }}
|
|
||||||
restore-keys: |
|
|
||||||
esphome-pip-3.7-
|
|
||||||
# Use per test platformio cache because tests have different platform versions
|
|
||||||
- name: Cache ~/.platformio
|
|
||||||
uses: actions/cache@v1
|
|
||||||
with:
|
|
||||||
path: ~/.platformio
|
|
||||||
key: test-home-platformio-${{ matrix.test }}-${{ hashFiles('esphome/core_config.py') }}
|
|
||||||
restore-keys: |
|
|
||||||
test-home-platformio-${{ matrix.test }}-
|
|
||||||
- name: Set up environment
|
|
||||||
run: script/setup
|
|
||||||
|
|
||||||
|
|
||||||
- name: Register problem matchers
|
|
||||||
run: |
|
|
||||||
echo "::add-matcher::.github/workflows/matchers/gcc.json"
|
|
||||||
echo "::add-matcher::.github/workflows/matchers/python.json"
|
|
||||||
- run: esphome tests/${{ matrix.test }}.yaml compile
|
|
||||||
|
|
||||||
pytest:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v2
|
|
||||||
- name: Set up Python
|
|
||||||
uses: actions/setup-python@v2
|
|
||||||
with:
|
|
||||||
python-version: '3.7'
|
|
||||||
- name: Cache pip modules
|
|
||||||
uses: actions/cache@v1
|
|
||||||
with:
|
|
||||||
path: ~/.cache/pip
|
|
||||||
key: esphome-pip-3.7-${{ hashFiles('setup.py') }}
|
|
||||||
restore-keys: |
|
|
||||||
esphome-pip-3.7-
|
|
||||||
- name: Set up environment
|
|
||||||
run: script/setup
|
|
||||||
- name: Install Github Actions annotator
|
|
||||||
run: pip install pytest-github-actions-annotate-failures
|
|
||||||
|
|
||||||
- name: Register problem matchers
|
|
||||||
run: |
|
|
||||||
echo "::add-matcher::.github/workflows/matchers/python.json"
|
|
||||||
- name: Run pytest
|
|
||||||
run: |
|
|
||||||
pytest \
|
|
||||||
-qq \
|
|
||||||
--durations=10 \
|
|
||||||
-o console_output_style=count \
|
|
||||||
tests
|
|
||||||
16
.github/workflows/matchers/ci-custom.json
vendored
16
.github/workflows/matchers/ci-custom.json
vendored
@@ -1,16 +0,0 @@
|
|||||||
{
|
|
||||||
"problemMatcher": [
|
|
||||||
{
|
|
||||||
"owner": "ci-custom",
|
|
||||||
"pattern": [
|
|
||||||
{
|
|
||||||
"regexp": "^ERROR (.*):(\\d+):(\\d+) - (.*)$",
|
|
||||||
"file": 1,
|
|
||||||
"line": 2,
|
|
||||||
"column": 3,
|
|
||||||
"message": 4
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
|
||||||
17
.github/workflows/matchers/clang-tidy.json
vendored
17
.github/workflows/matchers/clang-tidy.json
vendored
@@ -1,17 +0,0 @@
|
|||||||
{
|
|
||||||
"problemMatcher": [
|
|
||||||
{
|
|
||||||
"owner": "clang-tidy",
|
|
||||||
"pattern": [
|
|
||||||
{
|
|
||||||
"regexp": "^(.*):(\\d+):(\\d+):\\s+(error):\\s+(.*) \\[([a-z0-9,\\-]+)\\]\\s*$",
|
|
||||||
"file": 1,
|
|
||||||
"line": 2,
|
|
||||||
"column": 3,
|
|
||||||
"severity": 4,
|
|
||||||
"message": 5
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
|
||||||
18
.github/workflows/matchers/gcc.json
vendored
18
.github/workflows/matchers/gcc.json
vendored
@@ -1,18 +0,0 @@
|
|||||||
{
|
|
||||||
"problemMatcher": [
|
|
||||||
{
|
|
||||||
"owner": "gcc",
|
|
||||||
"severity": "error",
|
|
||||||
"pattern": [
|
|
||||||
{
|
|
||||||
"regexp": "^(.*):(\\d+):(\\d+):\\s+(?:fatal\\s+)?(warning|error):\\s+(.*)$",
|
|
||||||
"file": 1,
|
|
||||||
"line": 2,
|
|
||||||
"column": 3,
|
|
||||||
"severity": 4,
|
|
||||||
"message": 5
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
|
||||||
28
.github/workflows/matchers/lint-python.json
vendored
28
.github/workflows/matchers/lint-python.json
vendored
@@ -1,28 +0,0 @@
|
|||||||
{
|
|
||||||
"problemMatcher": [
|
|
||||||
{
|
|
||||||
"owner": "flake8",
|
|
||||||
"severity": "error",
|
|
||||||
"pattern": [
|
|
||||||
{
|
|
||||||
"regexp": "^(.*):(\\d+) - ([EFCDNW]\\d{3}.*)$",
|
|
||||||
"file": 1,
|
|
||||||
"line": 2,
|
|
||||||
"message": 3
|
|
||||||
}
|
|
||||||
]
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"owner": "pylint",
|
|
||||||
"severity": "error",
|
|
||||||
"pattern": [
|
|
||||||
{
|
|
||||||
"regexp": "^(.*):(\\d+) - (\\[[EFCRW]\\d{4}\\(.*\\),.*\\].*)$",
|
|
||||||
"file": 1,
|
|
||||||
"line": 2,
|
|
||||||
"message": 3
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
|
||||||
18
.github/workflows/matchers/python.json
vendored
18
.github/workflows/matchers/python.json
vendored
@@ -1,18 +0,0 @@
|
|||||||
{
|
|
||||||
"problemMatcher": [
|
|
||||||
{
|
|
||||||
"owner": "python",
|
|
||||||
"pattern": [
|
|
||||||
{
|
|
||||||
"regexp": "^\\s*File\\s\\\"(.*)\\\",\\sline\\s(\\d+),\\sin\\s(.*)$",
|
|
||||||
"file": 1,
|
|
||||||
"line": 2
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"regexp": "^\\s*raise\\s(.*)\\(\\'(.*)\\'\\)$",
|
|
||||||
"message": 2
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
|
||||||
246
.github/workflows/release-dev.yml
vendored
246
.github/workflows/release-dev.yml
vendored
@@ -1,246 +0,0 @@
|
|||||||
name: Publish dev releases to docker hub
|
|
||||||
|
|
||||||
on:
|
|
||||||
push:
|
|
||||||
branches:
|
|
||||||
- dev
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
# THE LINT/TEST JOBS ARE COPIED FROM ci.yaml
|
|
||||||
|
|
||||||
lint-clang-format:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
# cpp lint job runs with esphome-lint docker image so that clang-format-*
|
|
||||||
# doesn't have to be installed
|
|
||||||
container: esphome/esphome-lint:latest
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v2
|
|
||||||
# Set up the pio project so that the cpp checks know how files are compiled
|
|
||||||
# (build flags, libraries etc)
|
|
||||||
- name: Set up platformio environment
|
|
||||||
run: pio init --ide atom
|
|
||||||
|
|
||||||
- name: Run clang-format
|
|
||||||
run: script/clang-format -i
|
|
||||||
- name: Suggest changes
|
|
||||||
run: script/ci-suggest-changes
|
|
||||||
|
|
||||||
lint-clang-tidy:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
# cpp lint job runs with esphome-lint docker image so that clang-format-*
|
|
||||||
# doesn't have to be installed
|
|
||||||
container: esphome/esphome-lint:latest
|
|
||||||
# Split clang-tidy check into 4 jobs. Each one will check 1/4th of the .cpp files
|
|
||||||
strategy:
|
|
||||||
fail-fast: false
|
|
||||||
matrix:
|
|
||||||
split: [1, 2, 3, 4]
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v2
|
|
||||||
# Set up the pio project so that the cpp checks know how files are compiled
|
|
||||||
# (build flags, libraries etc)
|
|
||||||
- name: Set up platformio environment
|
|
||||||
run: pio init --ide atom
|
|
||||||
|
|
||||||
|
|
||||||
- name: Register problem matchers
|
|
||||||
run: |
|
|
||||||
echo "::add-matcher::.github/workflows/matchers/clang-tidy.json"
|
|
||||||
echo "::add-matcher::.github/workflows/matchers/gcc.json"
|
|
||||||
- name: Run clang-tidy
|
|
||||||
run: script/clang-tidy --all-headers --fix --split-num 4 --split-at ${{ matrix.split }}
|
|
||||||
- name: Suggest changes
|
|
||||||
run: script/ci-suggest-changes
|
|
||||||
|
|
||||||
lint-python:
|
|
||||||
# Don't use the esphome-lint docker image because it may contain outdated requirements.
|
|
||||||
# This way, all dependencies are cached via the cache action.
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v2
|
|
||||||
- name: Set up Python
|
|
||||||
uses: actions/setup-python@v2
|
|
||||||
with:
|
|
||||||
python-version: '3.7'
|
|
||||||
- name: Cache pip modules
|
|
||||||
uses: actions/cache@v1
|
|
||||||
with:
|
|
||||||
path: ~/.cache/pip
|
|
||||||
key: esphome-pip-3.7-${{ hashFiles('setup.py') }}
|
|
||||||
restore-keys: |
|
|
||||||
esphome-pip-3.7-
|
|
||||||
- name: Set up python environment
|
|
||||||
run: script/setup
|
|
||||||
|
|
||||||
- name: Register problem matchers
|
|
||||||
run: |
|
|
||||||
echo "::add-matcher::.github/workflows/matchers/ci-custom.json"
|
|
||||||
echo "::add-matcher::.github/workflows/matchers/lint-python.json"
|
|
||||||
echo "::add-matcher::.github/workflows/matchers/python.json"
|
|
||||||
- name: Lint Custom
|
|
||||||
run: script/ci-custom.py
|
|
||||||
- name: Lint Python
|
|
||||||
run: script/lint-python
|
|
||||||
- name: Lint CODEOWNERS
|
|
||||||
run: script/build_codeowners.py --check
|
|
||||||
|
|
||||||
test:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
strategy:
|
|
||||||
fail-fast: false
|
|
||||||
matrix:
|
|
||||||
test:
|
|
||||||
- test1
|
|
||||||
- test2
|
|
||||||
- test3
|
|
||||||
- test4
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v2
|
|
||||||
- name: Set up Python
|
|
||||||
uses: actions/setup-python@v2
|
|
||||||
with:
|
|
||||||
python-version: '3.7'
|
|
||||||
- name: Cache pip modules
|
|
||||||
uses: actions/cache@v1
|
|
||||||
with:
|
|
||||||
path: ~/.cache/pip
|
|
||||||
key: esphome-pip-3.7-${{ hashFiles('setup.py') }}
|
|
||||||
restore-keys: |
|
|
||||||
esphome-pip-3.7-
|
|
||||||
# Use per test platformio cache because tests have different platform versions
|
|
||||||
- name: Cache ~/.platformio
|
|
||||||
uses: actions/cache@v1
|
|
||||||
with:
|
|
||||||
path: ~/.platformio
|
|
||||||
key: test-home-platformio-${{ matrix.test }}-${{ hashFiles('esphome/core_config.py') }}
|
|
||||||
restore-keys: |
|
|
||||||
test-home-platformio-${{ matrix.test }}-
|
|
||||||
- name: Set up environment
|
|
||||||
run: script/setup
|
|
||||||
|
|
||||||
|
|
||||||
- name: Register problem matchers
|
|
||||||
run: |
|
|
||||||
echo "::add-matcher::.github/workflows/matchers/gcc.json"
|
|
||||||
echo "::add-matcher::.github/workflows/matchers/python.json"
|
|
||||||
- run: esphome tests/${{ matrix.test }}.yaml compile
|
|
||||||
|
|
||||||
pytest:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v2
|
|
||||||
- name: Set up Python
|
|
||||||
uses: actions/setup-python@v2
|
|
||||||
with:
|
|
||||||
python-version: '3.7'
|
|
||||||
- name: Cache pip modules
|
|
||||||
uses: actions/cache@v1
|
|
||||||
with:
|
|
||||||
path: ~/.cache/pip
|
|
||||||
key: esphome-pip-3.7-${{ hashFiles('setup.py') }}
|
|
||||||
restore-keys: |
|
|
||||||
esphome-pip-3.7-
|
|
||||||
- name: Set up environment
|
|
||||||
run: script/setup
|
|
||||||
- name: Install Github Actions annotator
|
|
||||||
run: pip install pytest-github-actions-annotate-failures
|
|
||||||
|
|
||||||
- name: Register problem matchers
|
|
||||||
run: |
|
|
||||||
echo "::add-matcher::.github/workflows/matchers/python.json"
|
|
||||||
- name: Run pytest
|
|
||||||
run: |
|
|
||||||
pytest \
|
|
||||||
-qq \
|
|
||||||
--durations=10 \
|
|
||||||
-o console_output_style=count \
|
|
||||||
tests
|
|
||||||
|
|
||||||
deploy-docker:
|
|
||||||
name: Build and publish docker containers
|
|
||||||
if: github.repository == 'esphome/esphome'
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
needs: [lint-clang-format, lint-clang-tidy, lint-python, test, pytest]
|
|
||||||
strategy:
|
|
||||||
matrix:
|
|
||||||
arch: [amd64, armv7, aarch64]
|
|
||||||
# Hassio dev image doesn't use esphome/esphome-hassio-$arch and uses base directly
|
|
||||||
build_type: ["docker"]
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v2
|
|
||||||
- name: Set TAG
|
|
||||||
run: |
|
|
||||||
TAG="${GITHUB_SHA:0:7}"
|
|
||||||
echo "TAG=${TAG}" >> $GITHUB_ENV
|
|
||||||
- name: Set up env variables
|
|
||||||
run: |
|
|
||||||
base_version="3.0.0"
|
|
||||||
|
|
||||||
if [[ "${{ matrix.build_type }}" == "hassio" ]]; then
|
|
||||||
build_from="esphome/esphome-hassio-base-${{ matrix.arch }}:${base_version}"
|
|
||||||
build_to="esphome/esphome-hassio-${{ matrix.arch }}"
|
|
||||||
dockerfile="docker/Dockerfile.hassio"
|
|
||||||
else
|
|
||||||
build_from="esphome/esphome-base-${{ matrix.arch }}:${base_version}"
|
|
||||||
build_to="esphome/esphome-${{ matrix.arch }}"
|
|
||||||
dockerfile="docker/Dockerfile"
|
|
||||||
fi
|
|
||||||
|
|
||||||
echo "BUILD_FROM=${build_from}" >> $GITHUB_ENV
|
|
||||||
echo "BUILD_TO=${build_to}" >> $GITHUB_ENV
|
|
||||||
echo "DOCKERFILE=${dockerfile}" >> $GITHUB_ENV
|
|
||||||
- name: Pull for cache
|
|
||||||
run: |
|
|
||||||
docker pull "${BUILD_TO}:dev" || true
|
|
||||||
- name: Register QEMU binfmt
|
|
||||||
run: docker run --rm --privileged multiarch/qemu-user-static:5.2.0-2 --reset -p yes
|
|
||||||
- run: |
|
|
||||||
docker build \
|
|
||||||
--build-arg "BUILD_FROM=${BUILD_FROM}" \
|
|
||||||
--build-arg "BUILD_VERSION=${TAG}" \
|
|
||||||
--tag "${BUILD_TO}:${TAG}" \
|
|
||||||
--tag "${BUILD_TO}:dev" \
|
|
||||||
--cache-from "${BUILD_TO}:dev" \
|
|
||||||
--file "${DOCKERFILE}" \
|
|
||||||
.
|
|
||||||
- name: Log in to docker hub
|
|
||||||
env:
|
|
||||||
DOCKER_USER: ${{ secrets.DOCKER_USER }}
|
|
||||||
DOCKER_PASSWORD: ${{ secrets.DOCKER_PASSWORD }}
|
|
||||||
run: docker login -u "${DOCKER_USER}" -p "${DOCKER_PASSWORD}"
|
|
||||||
- run: |
|
|
||||||
docker push "${BUILD_TO}:${TAG}"
|
|
||||||
docker push "${BUILD_TO}:dev"
|
|
||||||
|
|
||||||
|
|
||||||
deploy-docker-manifest:
|
|
||||||
if: github.repository == 'esphome/esphome'
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
needs: [deploy-docker]
|
|
||||||
steps:
|
|
||||||
- name: Enable experimental manifest support
|
|
||||||
run: |
|
|
||||||
mkdir -p ~/.docker
|
|
||||||
echo "{\"experimental\": \"enabled\"}" > ~/.docker/config.json
|
|
||||||
- name: Set TAG
|
|
||||||
run: |
|
|
||||||
TAG="${GITHUB_SHA:0:7}"
|
|
||||||
echo "TAG=${TAG}" >> $GITHUB_ENV
|
|
||||||
- name: Log in to docker hub
|
|
||||||
env:
|
|
||||||
DOCKER_USER: ${{ secrets.DOCKER_USER }}
|
|
||||||
DOCKER_PASSWORD: ${{ secrets.DOCKER_PASSWORD }}
|
|
||||||
run: docker login -u "${DOCKER_USER}" -p "${DOCKER_PASSWORD}"
|
|
||||||
- name: "Create the manifest"
|
|
||||||
run: |
|
|
||||||
docker manifest create esphome/esphome:${TAG} \
|
|
||||||
esphome/esphome-aarch64:${TAG} \
|
|
||||||
esphome/esphome-amd64:${TAG} \
|
|
||||||
esphome/esphome-armv7:${TAG}
|
|
||||||
docker manifest push esphome/esphome:${TAG}
|
|
||||||
|
|
||||||
docker manifest create esphome/esphome:dev \
|
|
||||||
esphome/esphome-aarch64:${TAG} \
|
|
||||||
esphome/esphome-amd64:${TAG} \
|
|
||||||
esphome/esphome-armv7:${TAG}
|
|
||||||
docker manifest push esphome/esphome:dev
|
|
||||||
309
.github/workflows/release.yml
vendored
309
.github/workflows/release.yml
vendored
@@ -1,309 +0,0 @@
|
|||||||
name: Publish Release
|
|
||||||
|
|
||||||
on:
|
|
||||||
release:
|
|
||||||
types: [published]
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
# THE LINT/TEST JOBS ARE COPIED FROM ci.yaml
|
|
||||||
|
|
||||||
lint-clang-format:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
# cpp lint job runs with esphome-lint docker image so that clang-format-*
|
|
||||||
# doesn't have to be installed
|
|
||||||
container: esphome/esphome-lint:latest
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v2
|
|
||||||
# Set up the pio project so that the cpp checks know how files are compiled
|
|
||||||
# (build flags, libraries etc)
|
|
||||||
- name: Set up platformio environment
|
|
||||||
run: pio init --ide atom
|
|
||||||
|
|
||||||
- name: Run clang-format
|
|
||||||
run: script/clang-format -i
|
|
||||||
- name: Suggest changes
|
|
||||||
run: script/ci-suggest-changes
|
|
||||||
|
|
||||||
lint-clang-tidy:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
# cpp lint job runs with esphome-lint docker image so that clang-format-*
|
|
||||||
# doesn't have to be installed
|
|
||||||
container: esphome/esphome-lint:latest
|
|
||||||
# Split clang-tidy check into 4 jobs. Each one will check 1/4th of the .cpp files
|
|
||||||
strategy:
|
|
||||||
fail-fast: false
|
|
||||||
matrix:
|
|
||||||
split: [1, 2, 3, 4]
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v2
|
|
||||||
# Set up the pio project so that the cpp checks know how files are compiled
|
|
||||||
# (build flags, libraries etc)
|
|
||||||
- name: Set up platformio environment
|
|
||||||
run: pio init --ide atom
|
|
||||||
|
|
||||||
|
|
||||||
- name: Register problem matchers
|
|
||||||
run: |
|
|
||||||
echo "::add-matcher::.github/workflows/matchers/clang-tidy.json"
|
|
||||||
echo "::add-matcher::.github/workflows/matchers/gcc.json"
|
|
||||||
- name: Run clang-tidy
|
|
||||||
run: script/clang-tidy --all-headers --fix --split-num 4 --split-at ${{ matrix.split }}
|
|
||||||
- name: Suggest changes
|
|
||||||
run: script/ci-suggest-changes
|
|
||||||
|
|
||||||
lint-python:
|
|
||||||
# Don't use the esphome-lint docker image because it may contain outdated requirements.
|
|
||||||
# This way, all dependencies are cached via the cache action.
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v2
|
|
||||||
- name: Set up Python
|
|
||||||
uses: actions/setup-python@v2
|
|
||||||
with:
|
|
||||||
python-version: '3.7'
|
|
||||||
- name: Cache pip modules
|
|
||||||
uses: actions/cache@v1
|
|
||||||
with:
|
|
||||||
path: ~/.cache/pip
|
|
||||||
key: esphome-pip-3.7-${{ hashFiles('setup.py') }}
|
|
||||||
restore-keys: |
|
|
||||||
esphome-pip-3.7-
|
|
||||||
- name: Set up python environment
|
|
||||||
run: script/setup
|
|
||||||
|
|
||||||
- name: Register problem matchers
|
|
||||||
run: |
|
|
||||||
echo "::add-matcher::.github/workflows/matchers/ci-custom.json"
|
|
||||||
echo "::add-matcher::.github/workflows/matchers/lint-python.json"
|
|
||||||
echo "::add-matcher::.github/workflows/matchers/python.json"
|
|
||||||
- name: Lint Custom
|
|
||||||
run: script/ci-custom.py
|
|
||||||
- name: Lint Python
|
|
||||||
run: script/lint-python
|
|
||||||
- name: Lint CODEOWNERS
|
|
||||||
run: script/build_codeowners.py --check
|
|
||||||
|
|
||||||
test:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
strategy:
|
|
||||||
fail-fast: false
|
|
||||||
matrix:
|
|
||||||
test:
|
|
||||||
- test1
|
|
||||||
- test2
|
|
||||||
- test3
|
|
||||||
- test4
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v2
|
|
||||||
- name: Set up Python
|
|
||||||
uses: actions/setup-python@v2
|
|
||||||
with:
|
|
||||||
python-version: '3.7'
|
|
||||||
- name: Cache pip modules
|
|
||||||
uses: actions/cache@v1
|
|
||||||
with:
|
|
||||||
path: ~/.cache/pip
|
|
||||||
key: esphome-pip-3.7-${{ hashFiles('setup.py') }}
|
|
||||||
restore-keys: |
|
|
||||||
esphome-pip-3.7-
|
|
||||||
# Use per test platformio cache because tests have different platform versions
|
|
||||||
- name: Cache ~/.platformio
|
|
||||||
uses: actions/cache@v1
|
|
||||||
with:
|
|
||||||
path: ~/.platformio
|
|
||||||
key: test-home-platformio-${{ matrix.test }}-${{ hashFiles('esphome/core_config.py') }}
|
|
||||||
restore-keys: |
|
|
||||||
test-home-platformio-${{ matrix.test }}-
|
|
||||||
- name: Set up environment
|
|
||||||
run: script/setup
|
|
||||||
|
|
||||||
- name: Register problem matchers
|
|
||||||
run: |
|
|
||||||
echo "::add-matcher::.github/workflows/matchers/gcc.json"
|
|
||||||
echo "::add-matcher::.github/workflows/matchers/python.json"
|
|
||||||
- run: esphome tests/${{ matrix.test }}.yaml compile
|
|
||||||
|
|
||||||
pytest:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v2
|
|
||||||
- name: Set up Python
|
|
||||||
uses: actions/setup-python@v2
|
|
||||||
with:
|
|
||||||
python-version: '3.7'
|
|
||||||
- name: Cache pip modules
|
|
||||||
uses: actions/cache@v1
|
|
||||||
with:
|
|
||||||
path: ~/.cache/pip
|
|
||||||
key: esphome-pip-3.7-${{ hashFiles('setup.py') }}
|
|
||||||
restore-keys: |
|
|
||||||
esphome-pip-3.7-
|
|
||||||
- name: Set up environment
|
|
||||||
run: script/setup
|
|
||||||
- name: Install Github Actions annotator
|
|
||||||
run: pip install pytest-github-actions-annotate-failures
|
|
||||||
|
|
||||||
- name: Register problem matchers
|
|
||||||
run: |
|
|
||||||
echo "::add-matcher::.github/workflows/matchers/python.json"
|
|
||||||
- name: Run pytest
|
|
||||||
run: |
|
|
||||||
pytest \
|
|
||||||
-qq \
|
|
||||||
--durations=10 \
|
|
||||||
-o console_output_style=count \
|
|
||||||
tests
|
|
||||||
|
|
||||||
deploy-pypi:
|
|
||||||
name: Build and publish to PyPi
|
|
||||||
if: github.repository == 'esphome/esphome'
|
|
||||||
needs: [lint-clang-format, lint-clang-tidy, lint-python, test, pytest]
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v2
|
|
||||||
- name: Set up Python
|
|
||||||
uses: actions/setup-python@v1
|
|
||||||
with:
|
|
||||||
python-version: '3.x'
|
|
||||||
- name: Set up python environment
|
|
||||||
run: |
|
|
||||||
script/setup
|
|
||||||
pip install setuptools wheel twine
|
|
||||||
- name: Build
|
|
||||||
run: python setup.py sdist bdist_wheel
|
|
||||||
- name: Upload
|
|
||||||
env:
|
|
||||||
TWINE_USERNAME: __token__
|
|
||||||
TWINE_PASSWORD: ${{ secrets.PYPI_TOKEN }}
|
|
||||||
run: twine upload dist/*
|
|
||||||
|
|
||||||
deploy-docker:
|
|
||||||
name: Build and publish docker containers
|
|
||||||
if: github.repository == 'esphome/esphome'
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
needs: [lint-clang-format, lint-clang-tidy, lint-python, test, pytest]
|
|
||||||
strategy:
|
|
||||||
matrix:
|
|
||||||
arch: [amd64, armv7, aarch64]
|
|
||||||
build_type: ["hassio", "docker"]
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v2
|
|
||||||
- name: Set TAG
|
|
||||||
run: |
|
|
||||||
TAG="${GITHUB_REF#refs/tags/v}"
|
|
||||||
echo "TAG=${TAG}" >> $GITHUB_ENV
|
|
||||||
- name: Set up env variables
|
|
||||||
run: |
|
|
||||||
base_version="3.0.0"
|
|
||||||
|
|
||||||
if [[ "${{ matrix.build_type }}" == "hassio" ]]; then
|
|
||||||
build_from="esphome/esphome-hassio-base-${{ matrix.arch }}:${base_version}"
|
|
||||||
build_to="esphome/esphome-hassio-${{ matrix.arch }}"
|
|
||||||
dockerfile="docker/Dockerfile.hassio"
|
|
||||||
else
|
|
||||||
build_from="esphome/esphome-base-${{ matrix.arch }}:${base_version}"
|
|
||||||
build_to="esphome/esphome-${{ matrix.arch }}"
|
|
||||||
dockerfile="docker/Dockerfile"
|
|
||||||
fi
|
|
||||||
|
|
||||||
if [[ "${{ github.event.release.prerelease }}" == "true" ]]; then
|
|
||||||
cache_tag="beta"
|
|
||||||
else
|
|
||||||
cache_tag="latest"
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Set env variables so these values don't need to be calculated again
|
|
||||||
echo "BUILD_FROM=${build_from}" >> $GITHUB_ENV
|
|
||||||
echo "BUILD_TO=${build_to}" >> $GITHUB_ENV
|
|
||||||
echo "DOCKERFILE=${dockerfile}" >> $GITHUB_ENV
|
|
||||||
echo "CACHE_TAG=${cache_tag}" >> $GITHUB_ENV
|
|
||||||
- name: Pull for cache
|
|
||||||
run: |
|
|
||||||
docker pull "${BUILD_TO}:${CACHE_TAG}" || true
|
|
||||||
- name: Register QEMU binfmt
|
|
||||||
run: docker run --rm --privileged multiarch/qemu-user-static:5.2.0-2 --reset -p yes
|
|
||||||
- run: |
|
|
||||||
docker build \
|
|
||||||
--build-arg "BUILD_FROM=${BUILD_FROM}" \
|
|
||||||
--build-arg "BUILD_VERSION=${TAG}" \
|
|
||||||
--tag "${BUILD_TO}:${TAG}" \
|
|
||||||
--cache-from "${BUILD_TO}:${CACHE_TAG}" \
|
|
||||||
--file "${DOCKERFILE}" \
|
|
||||||
.
|
|
||||||
- name: Log in to docker hub
|
|
||||||
env:
|
|
||||||
DOCKER_USER: ${{ secrets.DOCKER_USER }}
|
|
||||||
DOCKER_PASSWORD: ${{ secrets.DOCKER_PASSWORD }}
|
|
||||||
run: docker login -u "${DOCKER_USER}" -p "${DOCKER_PASSWORD}"
|
|
||||||
- run: docker push "${BUILD_TO}:${TAG}"
|
|
||||||
|
|
||||||
# Always publish to beta tag (also full releases)
|
|
||||||
- name: Publish docker beta tag
|
|
||||||
run: |
|
|
||||||
docker tag "${BUILD_TO}:${TAG}" "${BUILD_TO}:beta"
|
|
||||||
docker push "${BUILD_TO}:beta"
|
|
||||||
|
|
||||||
- if: ${{ !github.event.release.prerelease }}
|
|
||||||
name: Publish docker latest tag
|
|
||||||
run: |
|
|
||||||
docker tag "${BUILD_TO}:${TAG}" "${BUILD_TO}:latest"
|
|
||||||
docker push "${BUILD_TO}:latest"
|
|
||||||
|
|
||||||
deploy-docker-manifest:
|
|
||||||
if: github.repository == 'esphome/esphome'
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
needs: [deploy-docker]
|
|
||||||
steps:
|
|
||||||
- name: Enable experimental manifest support
|
|
||||||
run: |
|
|
||||||
mkdir -p ~/.docker
|
|
||||||
echo "{\"experimental\": \"enabled\"}" > ~/.docker/config.json
|
|
||||||
- name: Set TAG
|
|
||||||
run: |
|
|
||||||
TAG="${GITHUB_REF#refs/tags/v}"
|
|
||||||
echo "TAG=${TAG}" >> $GITHUB_ENV
|
|
||||||
- name: Log in to docker hub
|
|
||||||
env:
|
|
||||||
DOCKER_USER: ${{ secrets.DOCKER_USER }}
|
|
||||||
DOCKER_PASSWORD: ${{ secrets.DOCKER_PASSWORD }}
|
|
||||||
run: docker login -u "${DOCKER_USER}" -p "${DOCKER_PASSWORD}"
|
|
||||||
- name: "Create the manifest"
|
|
||||||
run: |
|
|
||||||
docker manifest create esphome/esphome:${TAG} \
|
|
||||||
esphome/esphome-aarch64:${TAG} \
|
|
||||||
esphome/esphome-amd64:${TAG} \
|
|
||||||
esphome/esphome-armv7:${TAG}
|
|
||||||
docker manifest push esphome/esphome:${TAG}
|
|
||||||
|
|
||||||
- name: Publish docker beta tag
|
|
||||||
run: |
|
|
||||||
docker manifest create esphome/esphome:beta \
|
|
||||||
esphome/esphome-aarch64:${TAG} \
|
|
||||||
esphome/esphome-amd64:${TAG} \
|
|
||||||
esphome/esphome-armv7:${TAG}
|
|
||||||
docker manifest push esphome/esphome:beta
|
|
||||||
|
|
||||||
- name: Publish docker latest tag
|
|
||||||
if: ${{ !github.event.release.prerelease }}
|
|
||||||
run: |
|
|
||||||
docker manifest create esphome/esphome:latest \
|
|
||||||
esphome/esphome-aarch64:${TAG} \
|
|
||||||
esphome/esphome-amd64:${TAG} \
|
|
||||||
esphome/esphome-armv7:${TAG}
|
|
||||||
docker manifest push esphome/esphome:latest
|
|
||||||
|
|
||||||
deploy-hassio-repo:
|
|
||||||
if: github.repository == 'esphome/esphome'
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
needs: [deploy-docker]
|
|
||||||
steps:
|
|
||||||
- env:
|
|
||||||
TOKEN: ${{ secrets.DEPLOY_HASSIO_TOKEN }}
|
|
||||||
run: |
|
|
||||||
TAG="${GITHUB_REF#refs/tags/v}"
|
|
||||||
curl \
|
|
||||||
-u ":$TOKEN" \
|
|
||||||
-X POST \
|
|
||||||
-H "Accept: application/vnd.github.v3+json" \
|
|
||||||
https://api.github.com/repos/esphome/hassio/actions/workflows/bump-version.yml/dispatches \
|
|
||||||
-d "{\"ref\":\"master\",\"inputs\":{\"version\":\"$TAG\"}}"
|
|
||||||
102
.gitignore
vendored
102
.gitignore
vendored
@@ -6,22 +6,6 @@ __pycache__/
|
|||||||
# C extensions
|
# C extensions
|
||||||
*.so
|
*.so
|
||||||
|
|
||||||
# Hide sublime text stuff
|
|
||||||
*.sublime-project
|
|
||||||
*.sublime-workspace
|
|
||||||
|
|
||||||
# Intellij Idea
|
|
||||||
.idea
|
|
||||||
|
|
||||||
# Hide some OS X stuff
|
|
||||||
.DS_Store
|
|
||||||
.AppleDouble
|
|
||||||
.LSOverride
|
|
||||||
Icon
|
|
||||||
|
|
||||||
# Thumbnails
|
|
||||||
._*
|
|
||||||
|
|
||||||
# Distribution / packaging
|
# Distribution / packaging
|
||||||
.Python
|
.Python
|
||||||
build/
|
build/
|
||||||
@@ -41,6 +25,12 @@ wheels/
|
|||||||
*.egg
|
*.egg
|
||||||
MANIFEST
|
MANIFEST
|
||||||
|
|
||||||
|
# PyInstaller
|
||||||
|
# Usually these files are written by a python script from a template
|
||||||
|
# before PyInstaller builds the exe, so as to inject date/other infos into it.
|
||||||
|
*.manifest
|
||||||
|
*.spec
|
||||||
|
|
||||||
# Installer logs
|
# Installer logs
|
||||||
pip-log.txt
|
pip-log.txt
|
||||||
pip-delete-this-directory.txt
|
pip-delete-this-directory.txt
|
||||||
@@ -51,10 +41,8 @@ htmlcov/
|
|||||||
.coverage
|
.coverage
|
||||||
.coverage.*
|
.coverage.*
|
||||||
.cache
|
.cache
|
||||||
.esphome
|
|
||||||
nosetests.xml
|
nosetests.xml
|
||||||
coverage.xml
|
coverage.xml
|
||||||
cov.xml
|
|
||||||
*.cover
|
*.cover
|
||||||
.hypothesis/
|
.hypothesis/
|
||||||
.pytest_cache/
|
.pytest_cache/
|
||||||
@@ -63,9 +51,36 @@ cov.xml
|
|||||||
*.mo
|
*.mo
|
||||||
*.pot
|
*.pot
|
||||||
|
|
||||||
|
# Django stuff:
|
||||||
|
*.log
|
||||||
|
local_settings.py
|
||||||
|
db.sqlite3
|
||||||
|
|
||||||
|
# Flask stuff:
|
||||||
|
instance/
|
||||||
|
.webassets-cache
|
||||||
|
|
||||||
|
# Scrapy stuff:
|
||||||
|
.scrapy
|
||||||
|
|
||||||
|
# Sphinx documentation
|
||||||
|
docs/_build/
|
||||||
|
|
||||||
|
# PyBuilder
|
||||||
|
target/
|
||||||
|
|
||||||
|
# Jupyter Notebook
|
||||||
|
.ipynb_checkpoints
|
||||||
|
|
||||||
# pyenv
|
# pyenv
|
||||||
.python-version
|
.python-version
|
||||||
|
|
||||||
|
# celery beat schedule file
|
||||||
|
celerybeat-schedule
|
||||||
|
|
||||||
|
# SageMath parsed files
|
||||||
|
*.sage.py
|
||||||
|
|
||||||
# Environments
|
# Environments
|
||||||
.env
|
.env
|
||||||
.venv
|
.venv
|
||||||
@@ -75,49 +90,18 @@ ENV/
|
|||||||
env.bak/
|
env.bak/
|
||||||
venv.bak/
|
venv.bak/
|
||||||
|
|
||||||
|
# Spyder project settings
|
||||||
|
.spyderproject
|
||||||
|
.spyproject
|
||||||
|
|
||||||
|
# Rope project settings
|
||||||
|
.ropeproject
|
||||||
|
|
||||||
|
# mkdocs documentation
|
||||||
|
/site
|
||||||
|
|
||||||
# mypy
|
# mypy
|
||||||
.mypy_cache/
|
.mypy_cache/
|
||||||
|
|
||||||
.pioenvs
|
|
||||||
.piolibdeps
|
|
||||||
.pio
|
|
||||||
.vscode/
|
|
||||||
!.vscode/tasks.json
|
|
||||||
CMakeListsPrivate.txt
|
|
||||||
CMakeLists.txt
|
|
||||||
|
|
||||||
# User-specific stuff:
|
|
||||||
.idea/**/workspace.xml
|
|
||||||
.idea/**/tasks.xml
|
|
||||||
.idea/dictionaries
|
|
||||||
|
|
||||||
# Sensitive or high-churn files:
|
|
||||||
.idea/**/dataSources/
|
|
||||||
.idea/**/dataSources.ids
|
|
||||||
.idea/**/dataSources.xml
|
|
||||||
.idea/**/dataSources.local.xml
|
|
||||||
.idea/**/dynamic.xml
|
|
||||||
|
|
||||||
# CMake
|
|
||||||
cmake-build-debug/
|
|
||||||
cmake-build-release/
|
|
||||||
|
|
||||||
CMakeCache.txt
|
|
||||||
CMakeFiles
|
|
||||||
CMakeScripts
|
|
||||||
Testing
|
|
||||||
Makefile
|
|
||||||
cmake_install.cmake
|
|
||||||
install_manifest.txt
|
|
||||||
compile_commands.json
|
|
||||||
CTestTestfile.cmake
|
|
||||||
/*.cbp
|
|
||||||
|
|
||||||
.clang_complete
|
|
||||||
.gcc-flags.json
|
|
||||||
|
|
||||||
config/
|
config/
|
||||||
tests/build/
|
tests/build/
|
||||||
tests/.esphome/
|
|
||||||
/.temp-clang-tidy.cpp
|
|
||||||
.pio/
|
|
||||||
|
|||||||
320
.gitlab-ci.yml
Normal file
320
.gitlab-ci.yml
Normal file
@@ -0,0 +1,320 @@
|
|||||||
|
---
|
||||||
|
# Based on https://gitlab.com/hassio-addons/addon-node-red/blob/master/.gitlab-ci.yml
|
||||||
|
variables:
|
||||||
|
DOCKER_DRIVER: overlay2
|
||||||
|
|
||||||
|
stages:
|
||||||
|
- lint
|
||||||
|
- test
|
||||||
|
- build
|
||||||
|
- deploy
|
||||||
|
|
||||||
|
.lint: &lint
|
||||||
|
stage: lint
|
||||||
|
tags:
|
||||||
|
- python2.7
|
||||||
|
- esphomeyaml-lint
|
||||||
|
|
||||||
|
.test: &test
|
||||||
|
stage: test
|
||||||
|
before_script:
|
||||||
|
- pip install -e .
|
||||||
|
tags:
|
||||||
|
- python2.7
|
||||||
|
- esphomeyaml-test
|
||||||
|
variables:
|
||||||
|
TZ: UTC
|
||||||
|
cache:
|
||||||
|
paths:
|
||||||
|
- tests/build
|
||||||
|
|
||||||
|
.docker-builder: &docker-builder
|
||||||
|
before_script:
|
||||||
|
- docker info
|
||||||
|
- docker login -u "$CI_REGISTRY_USER" -p "$CI_REGISTRY_PASSWORD" "$CI_REGISTRY"
|
||||||
|
services:
|
||||||
|
- docker:dind
|
||||||
|
tags:
|
||||||
|
- hassio-builder
|
||||||
|
|
||||||
|
flake8:
|
||||||
|
<<: *lint
|
||||||
|
script:
|
||||||
|
- flake8 esphomeyaml
|
||||||
|
|
||||||
|
pylint:
|
||||||
|
<<: *lint
|
||||||
|
script:
|
||||||
|
- pylint esphomeyaml
|
||||||
|
|
||||||
|
test1:
|
||||||
|
<<: *test
|
||||||
|
script:
|
||||||
|
- esphomeyaml tests/test1.yaml compile
|
||||||
|
|
||||||
|
test2:
|
||||||
|
<<: *test
|
||||||
|
script:
|
||||||
|
- esphomeyaml tests/test2.yaml compile
|
||||||
|
|
||||||
|
.build-hassio: &build-hassio
|
||||||
|
<<: *docker-builder
|
||||||
|
stage: build
|
||||||
|
script:
|
||||||
|
- docker run --rm --privileged hassioaddons/qemu-user-static:latest
|
||||||
|
- BUILD_FROM=homeassistant/${ADDON_ARCH}-base-ubuntu:latest
|
||||||
|
- ADDON_VERSION="${CI_COMMIT_TAG#v}"
|
||||||
|
- ADDON_VERSION="${ADDON_VERSION:-${CI_COMMIT_SHA:0:7}}"
|
||||||
|
- ESPHOMELIB_VERSION="${ESPHOMELIB_VERSION:-dev}"
|
||||||
|
- echo "Build from ${BUILD_FROM}"
|
||||||
|
- echo "Add-on version ${ADDON_VERSION}"
|
||||||
|
- echo "Esphomelib version ${ESPHOMELIB_VERSION}"
|
||||||
|
- echo "Tag ${CI_REGISTRY}/esphomeyaml-hassio-${ADDON_ARCH}:dev"
|
||||||
|
- echo "Tag ${CI_REGISTRY}/esphomeyaml-hassio-${ADDON_ARCH}:${CI_COMMIT_SHA}"
|
||||||
|
- |
|
||||||
|
docker build \
|
||||||
|
--build-arg "BUILD_FROM=${BUILD_FROM}" \
|
||||||
|
--build-arg "ADDON_ARCH=${ADDON_ARCH}" \
|
||||||
|
--build-arg "ADDON_VERSION=${ADDON_VERSION}" \
|
||||||
|
--build-arg "ESPHOMELIB_VERSION=${ESPHOMELIB_VERSION}" \
|
||||||
|
--tag "${CI_REGISTRY}/esphomeyaml-hassio-${ADDON_ARCH}:dev" \
|
||||||
|
--tag "${CI_REGISTRY}/esphomeyaml-hassio-${ADDON_ARCH}:${CI_COMMIT_SHA}" \
|
||||||
|
--file "docker/Dockerfile.hassio" \
|
||||||
|
.
|
||||||
|
- |
|
||||||
|
if [ "${DO_PUSH:-true}" = true ]; then
|
||||||
|
echo "Pushing to CI registry"
|
||||||
|
docker push ${CI_REGISTRY}/esphomeyaml-hassio-${ADDON_ARCH}:${CI_COMMIT_SHA}
|
||||||
|
docker push ${CI_REGISTRY}/esphomeyaml-hassio-${ADDON_ARCH}:dev
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Generic deploy template
|
||||||
|
.deploy-release: &deploy-release
|
||||||
|
<<: *docker-builder
|
||||||
|
stage: deploy
|
||||||
|
script:
|
||||||
|
- version="${CI_COMMIT_TAG#v}"
|
||||||
|
- echo "Publishing release version ${version}"
|
||||||
|
- docker pull "${CI_REGISTRY}/esphomeyaml-hassio-${ADDON_ARCH}:${CI_COMMIT_SHA}"
|
||||||
|
- docker login -u "$DOCKER_USER" -p "$DOCKER_PASSWORD"
|
||||||
|
|
||||||
|
- echo "Tag ${CI_REGISTRY}/esphomeyaml-hassio-${ADDON_ARCH}:${version}"
|
||||||
|
- |
|
||||||
|
docker tag \
|
||||||
|
"${CI_REGISTRY}/esphomeyaml-hassio-${ADDON_ARCH}:${CI_COMMIT_SHA}" \
|
||||||
|
"${CI_REGISTRY}/esphomeyaml-hassio-${ADDON_ARCH}:${version}"
|
||||||
|
- docker push "${CI_REGISTRY}/esphomeyaml-hassio-${ADDON_ARCH}:${version}"
|
||||||
|
|
||||||
|
- echo "Tag ${CI_REGISTRY}/esphomeyaml-hassio-${ADDON_ARCH}:latest"
|
||||||
|
- |
|
||||||
|
docker tag \
|
||||||
|
"${CI_REGISTRY}/esphomeyaml-hassio-${ADDON_ARCH}:${CI_COMMIT_SHA}" \
|
||||||
|
"${CI_REGISTRY}/esphomeyaml-hassio-${ADDON_ARCH}:latest"
|
||||||
|
- docker push "${CI_REGISTRY}/esphomeyaml-hassio-${ADDON_ARCH}:latest"
|
||||||
|
|
||||||
|
- echo "Tag ${CI_REGISTRY}/esphomeyaml-hassio-${ADDON_ARCH}:rc"
|
||||||
|
- |
|
||||||
|
docker tag \
|
||||||
|
"${CI_REGISTRY}/esphomeyaml-hassio-${ADDON_ARCH}:${CI_COMMIT_SHA}" \
|
||||||
|
"${CI_REGISTRY}/esphomeyaml-hassio-${ADDON_ARCH}:rc"
|
||||||
|
- docker push "${CI_REGISTRY}/esphomeyaml-hassio-${ADDON_ARCH}:rc"
|
||||||
|
|
||||||
|
- echo "Tag ottowinter/esphomeyaml-hassio-${ADDON_ARCH}:${version}"
|
||||||
|
- |
|
||||||
|
docker tag \
|
||||||
|
"${CI_REGISTRY}/esphomeyaml-hassio-${ADDON_ARCH}:${CI_COMMIT_SHA}" \
|
||||||
|
"ottowinter/esphomeyaml-hassio-${ADDON_ARCH}:${version}"
|
||||||
|
- docker push "ottowinter/esphomeyaml-hassio-${ADDON_ARCH}:${version}"
|
||||||
|
|
||||||
|
- echo "Tag ottowinter/esphomeyaml-hassio-${ADDON_ARCH}:latest"
|
||||||
|
- |
|
||||||
|
docker tag \
|
||||||
|
"${CI_REGISTRY}/esphomeyaml-hassio-${ADDON_ARCH}:${CI_COMMIT_SHA}" \
|
||||||
|
"ottowinter/esphomeyaml-hassio-${ADDON_ARCH}:latest"
|
||||||
|
- docker push "ottowinter/esphomeyaml-hassio-${ADDON_ARCH}:latest"
|
||||||
|
|
||||||
|
- echo "Tag ottowinter/esphomeyaml-hassio-${ADDON_ARCH}:rc"
|
||||||
|
- |
|
||||||
|
docker tag \
|
||||||
|
"${CI_REGISTRY}/esphomeyaml-hassio-${ADDON_ARCH}:${CI_COMMIT_SHA}" \
|
||||||
|
"ottowinter/esphomeyaml-hassio-${ADDON_ARCH}:rc"
|
||||||
|
- docker push "ottowinter/esphomeyaml-hassio-${ADDON_ARCH}:rc"
|
||||||
|
only:
|
||||||
|
- /^v\d+\.\d+\.\d+$/
|
||||||
|
except:
|
||||||
|
- /^(?!master).+@/
|
||||||
|
|
||||||
|
.deploy-beta: &deploy-beta
|
||||||
|
<<: *docker-builder
|
||||||
|
stage: deploy
|
||||||
|
script:
|
||||||
|
- version="${CI_COMMIT_TAG#v}"
|
||||||
|
- echo "Publishing beta version ${version}"
|
||||||
|
- docker pull "${CI_REGISTRY}/esphomeyaml-hassio-${ADDON_ARCH}:${CI_COMMIT_SHA}"
|
||||||
|
- docker login -u "$DOCKER_USER" -p "$DOCKER_PASSWORD"
|
||||||
|
|
||||||
|
- echo "Tag ${CI_REGISTRY}/esphomeyaml-hassio-${ADDON_ARCH}:${version}"
|
||||||
|
- |
|
||||||
|
docker tag \
|
||||||
|
"${CI_REGISTRY}/esphomeyaml-hassio-${ADDON_ARCH}:${CI_COMMIT_SHA}" \
|
||||||
|
"${CI_REGISTRY}/esphomeyaml-hassio-${ADDON_ARCH}:${version}"
|
||||||
|
- docker push "${CI_REGISTRY}/esphomeyaml-hassio-${ADDON_ARCH}:${version}"
|
||||||
|
|
||||||
|
- echo "Tag ${CI_REGISTRY}/esphomeyaml-hassio-${ADDON_ARCH}:rc"
|
||||||
|
- |
|
||||||
|
docker tag \
|
||||||
|
"${CI_REGISTRY}/esphomeyaml-hassio-${ADDON_ARCH}:${CI_COMMIT_SHA}" \
|
||||||
|
"${CI_REGISTRY}/esphomeyaml-hassio-${ADDON_ARCH}:rc"
|
||||||
|
- docker push "${CI_REGISTRY}/esphomeyaml-hassio-${ADDON_ARCH}:rc"
|
||||||
|
|
||||||
|
- echo "Tag ottowinter/esphomeyaml-hassio-${ADDON_ARCH}:${version}"
|
||||||
|
- |
|
||||||
|
docker tag \
|
||||||
|
"${CI_REGISTRY}/esphomeyaml-hassio-${ADDON_ARCH}:${CI_COMMIT_SHA}" \
|
||||||
|
"ottowinter/esphomeyaml-hassio-${ADDON_ARCH}:${version}"
|
||||||
|
- docker push "ottowinter/esphomeyaml-hassio-${ADDON_ARCH}:${version}"
|
||||||
|
|
||||||
|
- echo "Tag ottowinter/esphomeyaml-hassio-${ADDON_ARCH}:rc"
|
||||||
|
- |
|
||||||
|
docker tag \
|
||||||
|
"${CI_REGISTRY}/esphomeyaml-hassio-${ADDON_ARCH}:${CI_COMMIT_SHA}" \
|
||||||
|
"ottowinter/esphomeyaml-hassio-${ADDON_ARCH}:rc"
|
||||||
|
- docker push "ottowinter/esphomeyaml-hassio-${ADDON_ARCH}:rc"
|
||||||
|
only:
|
||||||
|
- /^v\d+\.\d+\.\d+b\d+$/
|
||||||
|
except:
|
||||||
|
- /^(?!rc).+@/
|
||||||
|
|
||||||
|
# Build jobs
|
||||||
|
build:normal:
|
||||||
|
<<: *docker-builder
|
||||||
|
stage: build
|
||||||
|
script:
|
||||||
|
- docker build -t "${CI_REGISTRY}/esphomeyaml:dev" .
|
||||||
|
|
||||||
|
.build-hassio-edge: &build-hassio-edge
|
||||||
|
<<: *build-hassio
|
||||||
|
except:
|
||||||
|
- /^v\d+\.\d+\.\d+$/
|
||||||
|
- /^v\d+\.\d+\.\d+b\d+$/
|
||||||
|
|
||||||
|
.build-hassio-release: &build-hassio-release
|
||||||
|
<<: *build-hassio
|
||||||
|
only:
|
||||||
|
- /^v\d+\.\d+\.\d+$/
|
||||||
|
- /^v\d+\.\d+\.\d+b\d+$/
|
||||||
|
|
||||||
|
build:hassio-armhf-edge:
|
||||||
|
<<: *build-hassio-edge
|
||||||
|
variables:
|
||||||
|
ADDON_ARCH: armhf
|
||||||
|
DO_PUSH: "false"
|
||||||
|
|
||||||
|
build:hassio-armhf:
|
||||||
|
<<: *build-hassio-release
|
||||||
|
variables:
|
||||||
|
ADDON_ARCH: armhf
|
||||||
|
ESPHOMELIB_VERSION: "${CI_COMMIT_TAG}"
|
||||||
|
|
||||||
|
#build:hassio-aarch64-edge:
|
||||||
|
# <<: *build-hassio-edge
|
||||||
|
# variables:
|
||||||
|
# ADDON_ARCH: aarch64
|
||||||
|
# DO_PUSH: "false"
|
||||||
|
|
||||||
|
#build:hassio-aarch64:
|
||||||
|
# <<: *build-hassio-release
|
||||||
|
# variables:
|
||||||
|
# ADDON_ARCH: aarch64
|
||||||
|
# ESPHOMELIB_VERSION: "${CI_COMMIT_TAG}"
|
||||||
|
|
||||||
|
build:hassio-i386-edge:
|
||||||
|
<<: *build-hassio-edge
|
||||||
|
variables:
|
||||||
|
ADDON_ARCH: i386
|
||||||
|
DO_PUSH: "false"
|
||||||
|
|
||||||
|
build:hassio-i386:
|
||||||
|
<<: *build-hassio-release
|
||||||
|
variables:
|
||||||
|
ADDON_ARCH: i386
|
||||||
|
ESPHOMELIB_VERSION: "${CI_COMMIT_TAG}"
|
||||||
|
|
||||||
|
build:hassio-amd64-edge:
|
||||||
|
<<: *build-hassio-edge
|
||||||
|
variables:
|
||||||
|
ADDON_ARCH: amd64
|
||||||
|
DO_PUSH: "false"
|
||||||
|
|
||||||
|
build:hassio-amd64:
|
||||||
|
<<: *build-hassio-release
|
||||||
|
variables:
|
||||||
|
ADDON_ARCH: amd64
|
||||||
|
ESPHOMELIB_VERSION: "${CI_COMMIT_TAG}"
|
||||||
|
|
||||||
|
# Deploy jobs
|
||||||
|
deploy-release:armhf:
|
||||||
|
<<: *deploy-release
|
||||||
|
variables:
|
||||||
|
ADDON_ARCH: armhf
|
||||||
|
|
||||||
|
deploy-beta:armhf:
|
||||||
|
<<: *deploy-beta
|
||||||
|
variables:
|
||||||
|
ADDON_ARCH: armhf
|
||||||
|
|
||||||
|
#deploy-release:aarch64:
|
||||||
|
# <<: *deploy-release
|
||||||
|
# variables:
|
||||||
|
# ADDON_ARCH: aarch64
|
||||||
|
#
|
||||||
|
#deploy-beta:aarch64:
|
||||||
|
# <<: *deploy-beta
|
||||||
|
# variables:
|
||||||
|
# ADDON_ARCH: aarch64
|
||||||
|
|
||||||
|
deploy-release:i386:
|
||||||
|
<<: *deploy-release
|
||||||
|
variables:
|
||||||
|
ADDON_ARCH: i386
|
||||||
|
|
||||||
|
deploy-beta:i386:
|
||||||
|
<<: *deploy-beta
|
||||||
|
variables:
|
||||||
|
ADDON_ARCH: i386
|
||||||
|
|
||||||
|
deploy-release:amd64:
|
||||||
|
<<: *deploy-release
|
||||||
|
variables:
|
||||||
|
ADDON_ARCH: amd64
|
||||||
|
|
||||||
|
deploy-beta:amd64:
|
||||||
|
<<: *deploy-beta
|
||||||
|
variables:
|
||||||
|
ADDON_ARCH: amd64
|
||||||
|
|
||||||
|
.deploy-pypi: &deploy-pypi
|
||||||
|
stage: deploy
|
||||||
|
before_script:
|
||||||
|
- pip install -e .
|
||||||
|
- pip install twine
|
||||||
|
script:
|
||||||
|
- python setup.py sdist
|
||||||
|
- twine upload dist/*
|
||||||
|
tags:
|
||||||
|
- python2.7
|
||||||
|
- esphomeyaml-test
|
||||||
|
|
||||||
|
deploy-release:pypi:
|
||||||
|
<<: *deploy-pypi
|
||||||
|
only:
|
||||||
|
- /^v\d+\.\d+\.\d+$/
|
||||||
|
except:
|
||||||
|
- /^(?!master).+@/
|
||||||
|
|
||||||
|
deploy-beta:pypi:
|
||||||
|
<<: *deploy-pypi
|
||||||
|
only:
|
||||||
|
- /^v\d+\.\d+\.\d+b\d+$/
|
||||||
|
except:
|
||||||
|
- /^(?!rc).+@/
|
||||||
@@ -1,6 +0,0 @@
|
|||||||
ports:
|
|
||||||
- port: 6052
|
|
||||||
onOpen: open-preview
|
|
||||||
tasks:
|
|
||||||
- before: pyenv local $(pyenv version | grep '^3\.' | cut -d ' ' -f 1) && script/setup
|
|
||||||
command: python -m esphome config dashboard
|
|
||||||
@@ -1,27 +0,0 @@
|
|||||||
# See https://pre-commit.com for more information
|
|
||||||
# See https://pre-commit.com/hooks.html for more hooks
|
|
||||||
repos:
|
|
||||||
- repo: https://github.com/ambv/black
|
|
||||||
rev: 20.8b1
|
|
||||||
hooks:
|
|
||||||
- id: black
|
|
||||||
args:
|
|
||||||
- --safe
|
|
||||||
- --quiet
|
|
||||||
files: ^((esphome|script|tests)/.+)?[^/]+\.py$
|
|
||||||
- repo: https://gitlab.com/pycqa/flake8
|
|
||||||
rev: 3.8.4
|
|
||||||
hooks:
|
|
||||||
- id: flake8
|
|
||||||
additional_dependencies:
|
|
||||||
- flake8-docstrings==1.5.0
|
|
||||||
- pydocstyle==5.1.1
|
|
||||||
files: ^(esphome|tests)/.+\.py$
|
|
||||||
- repo: https://github.com/pre-commit/pre-commit-hooks
|
|
||||||
rev: v3.4.0
|
|
||||||
hooks:
|
|
||||||
- id: no-commit-to-branch
|
|
||||||
args:
|
|
||||||
- --branch=dev
|
|
||||||
- --branch=master
|
|
||||||
- --branch=beta
|
|
||||||
20
.travis.yml
Normal file
20
.travis.yml
Normal file
@@ -0,0 +1,20 @@
|
|||||||
|
sudo: false
|
||||||
|
language: python
|
||||||
|
python:
|
||||||
|
- "2.7"
|
||||||
|
jobs:
|
||||||
|
include:
|
||||||
|
- name: "Lint"
|
||||||
|
install:
|
||||||
|
- pip install -r requirements.txt
|
||||||
|
- pip install flake8==3.5.0 pylint==1.9.3 tzlocal pillow
|
||||||
|
script:
|
||||||
|
- flake8 esphomeyaml
|
||||||
|
- pylint esphomeyaml
|
||||||
|
- name: "Test"
|
||||||
|
install:
|
||||||
|
- pip install -e .
|
||||||
|
- pip install tzlocal pillow
|
||||||
|
script:
|
||||||
|
- esphomeyaml tests/test1.yaml compile
|
||||||
|
- esphomeyaml tests/test2.yaml compile
|
||||||
11
.vscode/tasks.json
vendored
11
.vscode/tasks.json
vendored
@@ -1,11 +0,0 @@
|
|||||||
{
|
|
||||||
"version": "2.0.0",
|
|
||||||
"tasks": [
|
|
||||||
{
|
|
||||||
"label": "run",
|
|
||||||
"type": "shell",
|
|
||||||
"command": "python3 -m esphome config dashboard",
|
|
||||||
"problemMatcher": []
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
|
||||||
114
CODEOWNERS
114
CODEOWNERS
@@ -1,114 +0,0 @@
|
|||||||
# This file is generated by script/build_codeowners.py
|
|
||||||
# People marked here will be automatically requested for a review
|
|
||||||
# when the code that they own is touched.
|
|
||||||
#
|
|
||||||
# Every time an issue is created with a label corresponding to an integration,
|
|
||||||
# the integration's code owner is automatically notified.
|
|
||||||
|
|
||||||
# Core Code
|
|
||||||
setup.py @esphome/core
|
|
||||||
esphome/*.py @esphome/core
|
|
||||||
esphome/core/* @esphome/core
|
|
||||||
|
|
||||||
# Integrations
|
|
||||||
esphome/components/ac_dimmer/* @glmnet
|
|
||||||
esphome/components/adc/* @esphome/core
|
|
||||||
esphome/components/addressable_light/* @justfalter
|
|
||||||
esphome/components/animation/* @syndlex
|
|
||||||
esphome/components/api/* @OttoWinter
|
|
||||||
esphome/components/async_tcp/* @OttoWinter
|
|
||||||
esphome/components/atc_mithermometer/* @ahpohl
|
|
||||||
esphome/components/bang_bang/* @OttoWinter
|
|
||||||
esphome/components/binary_sensor/* @esphome/core
|
|
||||||
esphome/components/canbus/* @danielschramm @mvturnho
|
|
||||||
esphome/components/captive_portal/* @OttoWinter
|
|
||||||
esphome/components/climate/* @esphome/core
|
|
||||||
esphome/components/climate_ir/* @glmnet
|
|
||||||
esphome/components/coolix/* @glmnet
|
|
||||||
esphome/components/cover/* @esphome/core
|
|
||||||
esphome/components/ct_clamp/* @jesserockz
|
|
||||||
esphome/components/debug/* @OttoWinter
|
|
||||||
esphome/components/dfplayer/* @glmnet
|
|
||||||
esphome/components/dht/* @OttoWinter
|
|
||||||
esphome/components/ds1307/* @badbadc0ffee
|
|
||||||
esphome/components/exposure_notifications/* @OttoWinter
|
|
||||||
esphome/components/ezo/* @ssieb
|
|
||||||
esphome/components/fastled_base/* @OttoWinter
|
|
||||||
esphome/components/globals/* @esphome/core
|
|
||||||
esphome/components/gpio/* @esphome/core
|
|
||||||
esphome/components/homeassistant/* @OttoWinter
|
|
||||||
esphome/components/i2c/* @esphome/core
|
|
||||||
esphome/components/inkbird_ibsth1_mini/* @fkirill
|
|
||||||
esphome/components/inkplate6/* @jesserockz
|
|
||||||
esphome/components/integration/* @OttoWinter
|
|
||||||
esphome/components/interval/* @esphome/core
|
|
||||||
esphome/components/json/* @OttoWinter
|
|
||||||
esphome/components/ledc/* @OttoWinter
|
|
||||||
esphome/components/light/* @esphome/core
|
|
||||||
esphome/components/logger/* @esphome/core
|
|
||||||
esphome/components/max7219digit/* @rspaargaren
|
|
||||||
esphome/components/mcp23008/* @jesserockz
|
|
||||||
esphome/components/mcp23017/* @jesserockz
|
|
||||||
esphome/components/mcp23s08/* @SenexCrenshaw @jesserockz
|
|
||||||
esphome/components/mcp23s17/* @SenexCrenshaw @jesserockz
|
|
||||||
esphome/components/mcp23x08_base/* @jesserockz
|
|
||||||
esphome/components/mcp23x17_base/* @jesserockz
|
|
||||||
esphome/components/mcp23xxx_base/* @jesserockz
|
|
||||||
esphome/components/mcp2515/* @danielschramm @mvturnho
|
|
||||||
esphome/components/mcp9808/* @k7hpn
|
|
||||||
esphome/components/midea_ac/* @dudanov
|
|
||||||
esphome/components/midea_dongle/* @dudanov
|
|
||||||
esphome/components/network/* @esphome/core
|
|
||||||
esphome/components/nfc/* @jesserockz
|
|
||||||
esphome/components/ota/* @esphome/core
|
|
||||||
esphome/components/output/* @esphome/core
|
|
||||||
esphome/components/pid/* @OttoWinter
|
|
||||||
esphome/components/pn532/* @OttoWinter @jesserockz
|
|
||||||
esphome/components/pn532_i2c/* @OttoWinter @jesserockz
|
|
||||||
esphome/components/pn532_spi/* @OttoWinter @jesserockz
|
|
||||||
esphome/components/power_supply/* @esphome/core
|
|
||||||
esphome/components/pulse_meter/* @stevebaxter
|
|
||||||
esphome/components/rc522/* @glmnet
|
|
||||||
esphome/components/rc522_i2c/* @glmnet
|
|
||||||
esphome/components/rc522_spi/* @glmnet
|
|
||||||
esphome/components/restart/* @esphome/core
|
|
||||||
esphome/components/rf_bridge/* @jesserockz
|
|
||||||
esphome/components/rtttl/* @glmnet
|
|
||||||
esphome/components/script/* @esphome/core
|
|
||||||
esphome/components/sensor/* @esphome/core
|
|
||||||
esphome/components/shutdown/* @esphome/core
|
|
||||||
esphome/components/sim800l/* @glmnet
|
|
||||||
esphome/components/spi/* @esphome/core
|
|
||||||
esphome/components/ssd1322_base/* @kbx81
|
|
||||||
esphome/components/ssd1322_spi/* @kbx81
|
|
||||||
esphome/components/ssd1325_base/* @kbx81
|
|
||||||
esphome/components/ssd1325_spi/* @kbx81
|
|
||||||
esphome/components/ssd1327_base/* @kbx81
|
|
||||||
esphome/components/ssd1327_i2c/* @kbx81
|
|
||||||
esphome/components/ssd1327_spi/* @kbx81
|
|
||||||
esphome/components/ssd1331_base/* @kbx81
|
|
||||||
esphome/components/ssd1331_spi/* @kbx81
|
|
||||||
esphome/components/ssd1351_base/* @kbx81
|
|
||||||
esphome/components/ssd1351_spi/* @kbx81
|
|
||||||
esphome/components/st7735/* @SenexCrenshaw
|
|
||||||
esphome/components/st7789v/* @kbx81
|
|
||||||
esphome/components/substitutions/* @esphome/core
|
|
||||||
esphome/components/sun/* @OttoWinter
|
|
||||||
esphome/components/switch/* @esphome/core
|
|
||||||
esphome/components/tcl112/* @glmnet
|
|
||||||
esphome/components/teleinfo/* @0hax
|
|
||||||
esphome/components/thermostat/* @kbx81
|
|
||||||
esphome/components/time/* @OttoWinter
|
|
||||||
esphome/components/tm1637/* @glmnet
|
|
||||||
esphome/components/tmp102/* @timsavage
|
|
||||||
esphome/components/tuya/binary_sensor/* @jesserockz
|
|
||||||
esphome/components/tuya/climate/* @jesserockz
|
|
||||||
esphome/components/tuya/sensor/* @jesserockz
|
|
||||||
esphome/components/tuya/switch/* @jesserockz
|
|
||||||
esphome/components/uart/* @esphome/core
|
|
||||||
esphome/components/ultrasonic/* @OttoWinter
|
|
||||||
esphome/components/version/* @esphome/core
|
|
||||||
esphome/components/web_server_base/* @OttoWinter
|
|
||||||
esphome/components/whirlpool/* @glmnet
|
|
||||||
esphome/components/xiaomi_lywsd03mmc/* @ahpohl
|
|
||||||
esphome/components/xiaomi_mhoc401/* @vevsvevs
|
|
||||||
@@ -1,16 +1,16 @@
|
|||||||
# Contributing to ESPHome
|
# Contributing to esphomeyaml
|
||||||
|
|
||||||
This python project is responsible for reading in YAML configuration files,
|
esphomeyaml is a part of esphomelib and is responsible for reading in YAML configuration files,
|
||||||
converting them to C++ code. This code is then converted to a platformio project and compiled
|
converting them to C++ code. This code is then converted to a platformio project and compiled
|
||||||
with [esphome-core](https://github.com/esphome/esphome-core), the C++ framework behind the project.
|
with [esphomelib](https://github.com/OttoWinter/esphomelib), the C++ framework behind the project.
|
||||||
|
|
||||||
For a detailed guide, please see https://esphome.io/guides/contributing.html#contributing-to-esphomeyaml
|
For a detailed guide, please see https://esphomelib.com/esphomeyaml/guides/contributing.html#contributing-to-esphomeyaml
|
||||||
|
|
||||||
Things to note when contributing:
|
Things to note when contributing:
|
||||||
|
|
||||||
- Please test your changes :)
|
- Please test your changes :)
|
||||||
- If a new feature is added or an existing user-facing feature is changed, you should also
|
- If a new feature is added or an existing user-facing feature is changed, you should also
|
||||||
update the [docs](https://github.com/esphome/esphome-docs). See [contributing to esphome-docs](https://esphome.io/guides/contributing.html#contributing-to-esphomedocs)
|
update the [docs](https://github.com/OttoWinter/esphomedocs). See [contributing to esphomedocs](https://esphomelib.com/esphomeyaml/guides/contributing.html#contributing-to-esphomedocs)
|
||||||
for more information.
|
for more information.
|
||||||
- Please also update the tests in the `tests/` folder. You can do so by just adding a line in one of the YAML files
|
- Please also update the tests in the `tests/` folder. You can do so by just adding a line in one of the YAML files
|
||||||
which checks if your new feature compiles correctly.
|
which checks if your new feature compiles correctly.
|
||||||
|
|||||||
29
Dockerfile
Normal file
29
Dockerfile
Normal file
@@ -0,0 +1,29 @@
|
|||||||
|
ARG BUILD_FROM=python:2.7
|
||||||
|
FROM ${BUILD_FROM}
|
||||||
|
MAINTAINER Otto Winter <contact@otto-winter.com>
|
||||||
|
|
||||||
|
RUN apt-get update && apt-get install -y \
|
||||||
|
python-pil \
|
||||||
|
git \
|
||||||
|
&& apt-get clean && rm -rf /var/lib/apt/lists/* /tmp/* && \
|
||||||
|
pip install --no-cache-dir --no-binary :all: platformio && \
|
||||||
|
platformio settings set enable_telemetry No && \
|
||||||
|
platformio settings set check_libraries_interval 1000000 && \
|
||||||
|
platformio settings set check_platformio_interval 1000000 && \
|
||||||
|
platformio settings set check_platforms_interval 1000000
|
||||||
|
|
||||||
|
ENV ESPHOMEYAML_OTA_HOST_PORT=6123
|
||||||
|
EXPOSE 6123
|
||||||
|
VOLUME /config
|
||||||
|
WORKDIR /usr/src/app
|
||||||
|
|
||||||
|
COPY docker/platformio.ini /pio/platformio.ini
|
||||||
|
RUN platformio run -d /pio; rm -rf /pio
|
||||||
|
|
||||||
|
COPY . .
|
||||||
|
RUN pip install --no-cache-dir --no-binary :all: -e . && \
|
||||||
|
pip install --no-cache-dir --no-binary :all: tzlocal
|
||||||
|
|
||||||
|
WORKDIR /config
|
||||||
|
ENTRYPOINT ["esphomeyaml"]
|
||||||
|
CMD ["/config", "dashboard"]
|
||||||
692
LICENSE
692
LICENSE
@@ -1,17 +1,6 @@
|
|||||||
# ESPHome License
|
MIT License
|
||||||
|
|
||||||
Copyright (c) 2019 ESPHome
|
Copyright (c) 2018 Otto Winter
|
||||||
|
|
||||||
The ESPHome License is made up of two base licenses: MIT and the GNU GENERAL PUBLIC LICENSE.
|
|
||||||
The C++/runtime codebase of the ESPHome project (file extensions .c, .cpp, .h, .hpp, .tcc, .ino) are
|
|
||||||
published under the GPLv3 license. The python codebase and all other parts of this codebase are
|
|
||||||
published under the MIT license.
|
|
||||||
|
|
||||||
Both MIT and GPLv3 licenses are attached to this document.
|
|
||||||
|
|
||||||
## MIT License
|
|
||||||
|
|
||||||
Copyright (c) 2019 ESPHome
|
|
||||||
|
|
||||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
of this software and associated documentation files (the "Software"), to deal
|
of this software and associated documentation files (the "Software"), to deal
|
||||||
@@ -30,680 +19,3 @@ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|||||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||||
SOFTWARE.
|
SOFTWARE.
|
||||||
|
|
||||||
## GPLv3 License
|
|
||||||
|
|
||||||
GNU GENERAL PUBLIC LICENSE
|
|
||||||
Version 3, 29 June 2007
|
|
||||||
|
|
||||||
Copyright (C) 2007 Free Software Foundation, Inc. <http://fsf.org/>
|
|
||||||
Everyone is permitted to copy and distribute verbatim copies
|
|
||||||
of this license document, but changing it is not allowed.
|
|
||||||
|
|
||||||
Preamble
|
|
||||||
|
|
||||||
The GNU General Public License is a free, copyleft license for
|
|
||||||
software and other kinds of works.
|
|
||||||
|
|
||||||
The licenses for most software and other practical works are designed
|
|
||||||
to take away your freedom to share and change the works. By contrast,
|
|
||||||
the GNU General Public License is intended to guarantee your freedom to
|
|
||||||
share and change all versions of a program--to make sure it remains free
|
|
||||||
software for all its users. We, the Free Software Foundation, use the
|
|
||||||
GNU General Public License for most of our software; it applies also to
|
|
||||||
any other work released this way by its authors. You can apply it to
|
|
||||||
your programs, too.
|
|
||||||
|
|
||||||
When we speak of free software, we are referring to freedom, not
|
|
||||||
price. Our General Public Licenses are designed to make sure that you
|
|
||||||
have the freedom to distribute copies of free software (and charge for
|
|
||||||
them if you wish), that you receive source code or can get it if you
|
|
||||||
want it, that you can change the software or use pieces of it in new
|
|
||||||
free programs, and that you know you can do these things.
|
|
||||||
|
|
||||||
To protect your rights, we need to prevent others from denying you
|
|
||||||
these rights or asking you to surrender the rights. Therefore, you have
|
|
||||||
certain responsibilities if you distribute copies of the software, or if
|
|
||||||
you modify it: responsibilities to respect the freedom of others.
|
|
||||||
|
|
||||||
For example, if you distribute copies of such a program, whether
|
|
||||||
gratis or for a fee, you must pass on to the recipients the same
|
|
||||||
freedoms that you received. You must make sure that they, too, receive
|
|
||||||
or can get the source code. And you must show them these terms so they
|
|
||||||
know their rights.
|
|
||||||
|
|
||||||
Developers that use the GNU GPL protect your rights with two steps:
|
|
||||||
(1) assert copyright on the software, and (2) offer you this License
|
|
||||||
giving you legal permission to copy, distribute and/or modify it.
|
|
||||||
|
|
||||||
For the developers' and authors' protection, the GPL clearly explains
|
|
||||||
that there is no warranty for this free software. For both users' and
|
|
||||||
authors' sake, the GPL requires that modified versions be marked as
|
|
||||||
changed, so that their problems will not be attributed erroneously to
|
|
||||||
authors of previous versions.
|
|
||||||
|
|
||||||
Some devices are designed to deny users access to install or run
|
|
||||||
modified versions of the software inside them, although the manufacturer
|
|
||||||
can do so. This is fundamentally incompatible with the aim of
|
|
||||||
protecting users' freedom to change the software. The systematic
|
|
||||||
pattern of such abuse occurs in the area of products for individuals to
|
|
||||||
use, which is precisely where it is most unacceptable. Therefore, we
|
|
||||||
have designed this version of the GPL to prohibit the practice for those
|
|
||||||
products. If such problems arise substantially in other domains, we
|
|
||||||
stand ready to extend this provision to those domains in future versions
|
|
||||||
of the GPL, as needed to protect the freedom of users.
|
|
||||||
|
|
||||||
Finally, every program is threatened constantly by software patents.
|
|
||||||
States should not allow patents to restrict development and use of
|
|
||||||
software on general-purpose computers, but in those that do, we wish to
|
|
||||||
avoid the special danger that patents applied to a free program could
|
|
||||||
make it effectively proprietary. To prevent this, the GPL assures that
|
|
||||||
patents cannot be used to render the program non-free.
|
|
||||||
|
|
||||||
The precise terms and conditions for copying, distribution and
|
|
||||||
modification follow.
|
|
||||||
|
|
||||||
TERMS AND CONDITIONS
|
|
||||||
|
|
||||||
0. Definitions.
|
|
||||||
|
|
||||||
"This License" refers to version 3 of the GNU General Public License.
|
|
||||||
|
|
||||||
"Copyright" also means copyright-like laws that apply to other kinds of
|
|
||||||
works, such as semiconductor masks.
|
|
||||||
|
|
||||||
"The Program" refers to any copyrightable work licensed under this
|
|
||||||
License. Each licensee is addressed as "you". "Licensees" and
|
|
||||||
"recipients" may be individuals or organizations.
|
|
||||||
|
|
||||||
To "modify" a work means to copy from or adapt all or part of the work
|
|
||||||
in a fashion requiring copyright permission, other than the making of an
|
|
||||||
exact copy. The resulting work is called a "modified version" of the
|
|
||||||
earlier work or a work "based on" the earlier work.
|
|
||||||
|
|
||||||
A "covered work" means either the unmodified Program or a work based
|
|
||||||
on the Program.
|
|
||||||
|
|
||||||
To "propagate" a work means to do anything with it that, without
|
|
||||||
permission, would make you directly or secondarily liable for
|
|
||||||
infringement under applicable copyright law, except executing it on a
|
|
||||||
computer or modifying a private copy. Propagation includes copying,
|
|
||||||
distribution (with or without modification), making available to the
|
|
||||||
public, and in some countries other activities as well.
|
|
||||||
|
|
||||||
To "convey" a work means any kind of propagation that enables other
|
|
||||||
parties to make or receive copies. Mere interaction with a user through
|
|
||||||
a computer network, with no transfer of a copy, is not conveying.
|
|
||||||
|
|
||||||
An interactive user interface displays "Appropriate Legal Notices"
|
|
||||||
to the extent that it includes a convenient and prominently visible
|
|
||||||
feature that (1) displays an appropriate copyright notice, and (2)
|
|
||||||
tells the user that there is no warranty for the work (except to the
|
|
||||||
extent that warranties are provided), that licensees may convey the
|
|
||||||
work under this License, and how to view a copy of this License. If
|
|
||||||
the interface presents a list of user commands or options, such as a
|
|
||||||
menu, a prominent item in the list meets this criterion.
|
|
||||||
|
|
||||||
1. Source Code.
|
|
||||||
|
|
||||||
The "source code" for a work means the preferred form of the work
|
|
||||||
for making modifications to it. "Object code" means any non-source
|
|
||||||
form of a work.
|
|
||||||
|
|
||||||
A "Standard Interface" means an interface that either is an official
|
|
||||||
standard defined by a recognized standards body, or, in the case of
|
|
||||||
interfaces specified for a particular programming language, one that
|
|
||||||
is widely used among developers working in that language.
|
|
||||||
|
|
||||||
The "System Libraries" of an executable work include anything, other
|
|
||||||
than the work as a whole, that (a) is included in the normal form of
|
|
||||||
packaging a Major Component, but which is not part of that Major
|
|
||||||
Component, and (b) serves only to enable use of the work with that
|
|
||||||
Major Component, or to implement a Standard Interface for which an
|
|
||||||
implementation is available to the public in source code form. A
|
|
||||||
"Major Component", in this context, means a major essential component
|
|
||||||
(kernel, window system, and so on) of the specific operating system
|
|
||||||
(if any) on which the executable work runs, or a compiler used to
|
|
||||||
produce the work, or an object code interpreter used to run it.
|
|
||||||
|
|
||||||
The "Corresponding Source" for a work in object code form means all
|
|
||||||
the source code needed to generate, install, and (for an executable
|
|
||||||
work) run the object code and to modify the work, including scripts to
|
|
||||||
control those activities. However, it does not include the work's
|
|
||||||
System Libraries, or general-purpose tools or generally available free
|
|
||||||
programs which are used unmodified in performing those activities but
|
|
||||||
which are not part of the work. For example, Corresponding Source
|
|
||||||
includes interface definition files associated with source files for
|
|
||||||
the work, and the source code for shared libraries and dynamically
|
|
||||||
linked subprograms that the work is specifically designed to require,
|
|
||||||
such as by intimate data communication or control flow between those
|
|
||||||
subprograms and other parts of the work.
|
|
||||||
|
|
||||||
The Corresponding Source need not include anything that users
|
|
||||||
can regenerate automatically from other parts of the Corresponding
|
|
||||||
Source.
|
|
||||||
|
|
||||||
The Corresponding Source for a work in source code form is that
|
|
||||||
same work.
|
|
||||||
|
|
||||||
2. Basic Permissions.
|
|
||||||
|
|
||||||
All rights granted under this License are granted for the term of
|
|
||||||
copyright on the Program, and are irrevocable provided the stated
|
|
||||||
conditions are met. This License explicitly affirms your unlimited
|
|
||||||
permission to run the unmodified Program. The output from running a
|
|
||||||
covered work is covered by this License only if the output, given its
|
|
||||||
content, constitutes a covered work. This License acknowledges your
|
|
||||||
rights of fair use or other equivalent, as provided by copyright law.
|
|
||||||
|
|
||||||
You may make, run and propagate covered works that you do not
|
|
||||||
convey, without conditions so long as your license otherwise remains
|
|
||||||
in force. You may convey covered works to others for the sole purpose
|
|
||||||
of having them make modifications exclusively for you, or provide you
|
|
||||||
with facilities for running those works, provided that you comply with
|
|
||||||
the terms of this License in conveying all material for which you do
|
|
||||||
not control copyright. Those thus making or running the covered works
|
|
||||||
for you must do so exclusively on your behalf, under your direction
|
|
||||||
and control, on terms that prohibit them from making any copies of
|
|
||||||
your copyrighted material outside their relationship with you.
|
|
||||||
|
|
||||||
Conveying under any other circumstances is permitted solely under
|
|
||||||
the conditions stated below. Sublicensing is not allowed; section 10
|
|
||||||
makes it unnecessary.
|
|
||||||
|
|
||||||
3. Protecting Users' Legal Rights From Anti-Circumvention Law.
|
|
||||||
|
|
||||||
No covered work shall be deemed part of an effective technological
|
|
||||||
measure under any applicable law fulfilling obligations under article
|
|
||||||
11 of the WIPO copyright treaty adopted on 20 December 1996, or
|
|
||||||
similar laws prohibiting or restricting circumvention of such
|
|
||||||
measures.
|
|
||||||
|
|
||||||
When you convey a covered work, you waive any legal power to forbid
|
|
||||||
circumvention of technological measures to the extent such circumvention
|
|
||||||
is effected by exercising rights under this License with respect to
|
|
||||||
the covered work, and you disclaim any intention to limit operation or
|
|
||||||
modification of the work as a means of enforcing, against the work's
|
|
||||||
users, your or third parties' legal rights to forbid circumvention of
|
|
||||||
technological measures.
|
|
||||||
|
|
||||||
4. Conveying Verbatim Copies.
|
|
||||||
|
|
||||||
You may convey verbatim copies of the Program's source code as you
|
|
||||||
receive it, in any medium, provided that you conspicuously and
|
|
||||||
appropriately publish on each copy an appropriate copyright notice;
|
|
||||||
keep intact all notices stating that this License and any
|
|
||||||
non-permissive terms added in accord with section 7 apply to the code;
|
|
||||||
keep intact all notices of the absence of any warranty; and give all
|
|
||||||
recipients a copy of this License along with the Program.
|
|
||||||
|
|
||||||
You may charge any price or no price for each copy that you convey,
|
|
||||||
and you may offer support or warranty protection for a fee.
|
|
||||||
|
|
||||||
5. Conveying Modified Source Versions.
|
|
||||||
|
|
||||||
You may convey a work based on the Program, or the modifications to
|
|
||||||
produce it from the Program, in the form of source code under the
|
|
||||||
terms of section 4, provided that you also meet all of these conditions:
|
|
||||||
|
|
||||||
a) The work must carry prominent notices stating that you modified
|
|
||||||
it, and giving a relevant date.
|
|
||||||
|
|
||||||
b) The work must carry prominent notices stating that it is
|
|
||||||
released under this License and any conditions added under section
|
|
||||||
7. This requirement modifies the requirement in section 4 to
|
|
||||||
"keep intact all notices".
|
|
||||||
|
|
||||||
c) You must license the entire work, as a whole, under this
|
|
||||||
License to anyone who comes into possession of a copy. This
|
|
||||||
License will therefore apply, along with any applicable section 7
|
|
||||||
additional terms, to the whole of the work, and all its parts,
|
|
||||||
regardless of how they are packaged. This License gives no
|
|
||||||
permission to license the work in any other way, but it does not
|
|
||||||
invalidate such permission if you have separately received it.
|
|
||||||
|
|
||||||
d) If the work has interactive user interfaces, each must display
|
|
||||||
Appropriate Legal Notices; however, if the Program has interactive
|
|
||||||
interfaces that do not display Appropriate Legal Notices, your
|
|
||||||
work need not make them do so.
|
|
||||||
|
|
||||||
A compilation of a covered work with other separate and independent
|
|
||||||
works, which are not by their nature extensions of the covered work,
|
|
||||||
and which are not combined with it such as to form a larger program,
|
|
||||||
in or on a volume of a storage or distribution medium, is called an
|
|
||||||
"aggregate" if the compilation and its resulting copyright are not
|
|
||||||
used to limit the access or legal rights of the compilation's users
|
|
||||||
beyond what the individual works permit. Inclusion of a covered work
|
|
||||||
in an aggregate does not cause this License to apply to the other
|
|
||||||
parts of the aggregate.
|
|
||||||
|
|
||||||
6. Conveying Non-Source Forms.
|
|
||||||
|
|
||||||
You may convey a covered work in object code form under the terms
|
|
||||||
of sections 4 and 5, provided that you also convey the
|
|
||||||
machine-readable Corresponding Source under the terms of this License,
|
|
||||||
in one of these ways:
|
|
||||||
|
|
||||||
a) Convey the object code in, or embodied in, a physical product
|
|
||||||
(including a physical distribution medium), accompanied by the
|
|
||||||
Corresponding Source fixed on a durable physical medium
|
|
||||||
customarily used for software interchange.
|
|
||||||
|
|
||||||
b) Convey the object code in, or embodied in, a physical product
|
|
||||||
(including a physical distribution medium), accompanied by a
|
|
||||||
written offer, valid for at least three years and valid for as
|
|
||||||
long as you offer spare parts or customer support for that product
|
|
||||||
model, to give anyone who possesses the object code either (1) a
|
|
||||||
copy of the Corresponding Source for all the software in the
|
|
||||||
product that is covered by this License, on a durable physical
|
|
||||||
medium customarily used for software interchange, for a price no
|
|
||||||
more than your reasonable cost of physically performing this
|
|
||||||
conveying of source, or (2) access to copy the
|
|
||||||
Corresponding Source from a network server at no charge.
|
|
||||||
|
|
||||||
c) Convey individual copies of the object code with a copy of the
|
|
||||||
written offer to provide the Corresponding Source. This
|
|
||||||
alternative is allowed only occasionally and noncommercially, and
|
|
||||||
only if you received the object code with such an offer, in accord
|
|
||||||
with subsection 6b.
|
|
||||||
|
|
||||||
d) Convey the object code by offering access from a designated
|
|
||||||
place (gratis or for a charge), and offer equivalent access to the
|
|
||||||
Corresponding Source in the same way through the same place at no
|
|
||||||
further charge. You need not require recipients to copy the
|
|
||||||
Corresponding Source along with the object code. If the place to
|
|
||||||
copy the object code is a network server, the Corresponding Source
|
|
||||||
may be on a different server (operated by you or a third party)
|
|
||||||
that supports equivalent copying facilities, provided you maintain
|
|
||||||
clear directions next to the object code saying where to find the
|
|
||||||
Corresponding Source. Regardless of what server hosts the
|
|
||||||
Corresponding Source, you remain obligated to ensure that it is
|
|
||||||
available for as long as needed to satisfy these requirements.
|
|
||||||
|
|
||||||
e) Convey the object code using peer-to-peer transmission, provided
|
|
||||||
you inform other peers where the object code and Corresponding
|
|
||||||
Source of the work are being offered to the general public at no
|
|
||||||
charge under subsection 6d.
|
|
||||||
|
|
||||||
A separable portion of the object code, whose source code is excluded
|
|
||||||
from the Corresponding Source as a System Library, need not be
|
|
||||||
included in conveying the object code work.
|
|
||||||
|
|
||||||
A "User Product" is either (1) a "consumer product", which means any
|
|
||||||
tangible personal property which is normally used for personal, family,
|
|
||||||
or household purposes, or (2) anything designed or sold for incorporation
|
|
||||||
into a dwelling. In determining whether a product is a consumer product,
|
|
||||||
doubtful cases shall be resolved in favor of coverage. For a particular
|
|
||||||
product received by a particular user, "normally used" refers to a
|
|
||||||
typical or common use of that class of product, regardless of the status
|
|
||||||
of the particular user or of the way in which the particular user
|
|
||||||
actually uses, or expects or is expected to use, the product. A product
|
|
||||||
is a consumer product regardless of whether the product has substantial
|
|
||||||
commercial, industrial or non-consumer uses, unless such uses represent
|
|
||||||
the only significant mode of use of the product.
|
|
||||||
|
|
||||||
"Installation Information" for a User Product means any methods,
|
|
||||||
procedures, authorization keys, or other information required to install
|
|
||||||
and execute modified versions of a covered work in that User Product from
|
|
||||||
a modified version of its Corresponding Source. The information must
|
|
||||||
suffice to ensure that the continued functioning of the modified object
|
|
||||||
code is in no case prevented or interfered with solely because
|
|
||||||
modification has been made.
|
|
||||||
|
|
||||||
If you convey an object code work under this section in, or with, or
|
|
||||||
specifically for use in, a User Product, and the conveying occurs as
|
|
||||||
part of a transaction in which the right of possession and use of the
|
|
||||||
User Product is transferred to the recipient in perpetuity or for a
|
|
||||||
fixed term (regardless of how the transaction is characterized), the
|
|
||||||
Corresponding Source conveyed under this section must be accompanied
|
|
||||||
by the Installation Information. But this requirement does not apply
|
|
||||||
if neither you nor any third party retains the ability to install
|
|
||||||
modified object code on the User Product (for example, the work has
|
|
||||||
been installed in ROM).
|
|
||||||
|
|
||||||
The requirement to provide Installation Information does not include a
|
|
||||||
requirement to continue to provide support service, warranty, or updates
|
|
||||||
for a work that has been modified or installed by the recipient, or for
|
|
||||||
the User Product in which it has been modified or installed. Access to a
|
|
||||||
network may be denied when the modification itself materially and
|
|
||||||
adversely affects the operation of the network or violates the rules and
|
|
||||||
protocols for communication across the network.
|
|
||||||
|
|
||||||
Corresponding Source conveyed, and Installation Information provided,
|
|
||||||
in accord with this section must be in a format that is publicly
|
|
||||||
documented (and with an implementation available to the public in
|
|
||||||
source code form), and must require no special password or key for
|
|
||||||
unpacking, reading or copying.
|
|
||||||
|
|
||||||
7. Additional Terms.
|
|
||||||
|
|
||||||
"Additional permissions" are terms that supplement the terms of this
|
|
||||||
License by making exceptions from one or more of its conditions.
|
|
||||||
Additional permissions that are applicable to the entire Program shall
|
|
||||||
be treated as though they were included in this License, to the extent
|
|
||||||
that they are valid under applicable law. If additional permissions
|
|
||||||
apply only to part of the Program, that part may be used separately
|
|
||||||
under those permissions, but the entire Program remains governed by
|
|
||||||
this License without regard to the additional permissions.
|
|
||||||
|
|
||||||
When you convey a copy of a covered work, you may at your option
|
|
||||||
remove any additional permissions from that copy, or from any part of
|
|
||||||
it. (Additional permissions may be written to require their own
|
|
||||||
removal in certain cases when you modify the work.) You may place
|
|
||||||
additional permissions on material, added by you to a covered work,
|
|
||||||
for which you have or can give appropriate copyright permission.
|
|
||||||
|
|
||||||
Notwithstanding any other provision of this License, for material you
|
|
||||||
add to a covered work, you may (if authorized by the copyright holders of
|
|
||||||
that material) supplement the terms of this License with terms:
|
|
||||||
|
|
||||||
a) Disclaiming warranty or limiting liability differently from the
|
|
||||||
terms of sections 15 and 16 of this License; or
|
|
||||||
|
|
||||||
b) Requiring preservation of specified reasonable legal notices or
|
|
||||||
author attributions in that material or in the Appropriate Legal
|
|
||||||
Notices displayed by works containing it; or
|
|
||||||
|
|
||||||
c) Prohibiting misrepresentation of the origin of that material, or
|
|
||||||
requiring that modified versions of such material be marked in
|
|
||||||
reasonable ways as different from the original version; or
|
|
||||||
|
|
||||||
d) Limiting the use for publicity purposes of names of licensors or
|
|
||||||
authors of the material; or
|
|
||||||
|
|
||||||
e) Declining to grant rights under trademark law for use of some
|
|
||||||
trade names, trademarks, or service marks; or
|
|
||||||
|
|
||||||
f) Requiring indemnification of licensors and authors of that
|
|
||||||
material by anyone who conveys the material (or modified versions of
|
|
||||||
it) with contractual assumptions of liability to the recipient, for
|
|
||||||
any liability that these contractual assumptions directly impose on
|
|
||||||
those licensors and authors.
|
|
||||||
|
|
||||||
All other non-permissive additional terms are considered "further
|
|
||||||
restrictions" within the meaning of section 10. If the Program as you
|
|
||||||
received it, or any part of it, contains a notice stating that it is
|
|
||||||
governed by this License along with a term that is a further
|
|
||||||
restriction, you may remove that term. If a license document contains
|
|
||||||
a further restriction but permits relicensing or conveying under this
|
|
||||||
License, you may add to a covered work material governed by the terms
|
|
||||||
of that license document, provided that the further restriction does
|
|
||||||
not survive such relicensing or conveying.
|
|
||||||
|
|
||||||
If you add terms to a covered work in accord with this section, you
|
|
||||||
must place, in the relevant source files, a statement of the
|
|
||||||
additional terms that apply to those files, or a notice indicating
|
|
||||||
where to find the applicable terms.
|
|
||||||
|
|
||||||
Additional terms, permissive or non-permissive, may be stated in the
|
|
||||||
form of a separately written license, or stated as exceptions;
|
|
||||||
the above requirements apply either way.
|
|
||||||
|
|
||||||
8. Termination.
|
|
||||||
|
|
||||||
You may not propagate or modify a covered work except as expressly
|
|
||||||
provided under this License. Any attempt otherwise to propagate or
|
|
||||||
modify it is void, and will automatically terminate your rights under
|
|
||||||
this License (including any patent licenses granted under the third
|
|
||||||
paragraph of section 11).
|
|
||||||
|
|
||||||
However, if you cease all violation of this License, then your
|
|
||||||
license from a particular copyright holder is reinstated (a)
|
|
||||||
provisionally, unless and until the copyright holder explicitly and
|
|
||||||
finally terminates your license, and (b) permanently, if the copyright
|
|
||||||
holder fails to notify you of the violation by some reasonable means
|
|
||||||
prior to 60 days after the cessation.
|
|
||||||
|
|
||||||
Moreover, your license from a particular copyright holder is
|
|
||||||
reinstated permanently if the copyright holder notifies you of the
|
|
||||||
violation by some reasonable means, this is the first time you have
|
|
||||||
received notice of violation of this License (for any work) from that
|
|
||||||
copyright holder, and you cure the violation prior to 30 days after
|
|
||||||
your receipt of the notice.
|
|
||||||
|
|
||||||
Termination of your rights under this section does not terminate the
|
|
||||||
licenses of parties who have received copies or rights from you under
|
|
||||||
this License. If your rights have been terminated and not permanently
|
|
||||||
reinstated, you do not qualify to receive new licenses for the same
|
|
||||||
material under section 10.
|
|
||||||
|
|
||||||
9. Acceptance Not Required for Having Copies.
|
|
||||||
|
|
||||||
You are not required to accept this License in order to receive or
|
|
||||||
run a copy of the Program. Ancillary propagation of a covered work
|
|
||||||
occurring solely as a consequence of using peer-to-peer transmission
|
|
||||||
to receive a copy likewise does not require acceptance. However,
|
|
||||||
nothing other than this License grants you permission to propagate or
|
|
||||||
modify any covered work. These actions infringe copyright if you do
|
|
||||||
not accept this License. Therefore, by modifying or propagating a
|
|
||||||
covered work, you indicate your acceptance of this License to do so.
|
|
||||||
|
|
||||||
10. Automatic Licensing of Downstream Recipients.
|
|
||||||
|
|
||||||
Each time you convey a covered work, the recipient automatically
|
|
||||||
receives a license from the original licensors, to run, modify and
|
|
||||||
propagate that work, subject to this License. You are not responsible
|
|
||||||
for enforcing compliance by third parties with this License.
|
|
||||||
|
|
||||||
An "entity transaction" is a transaction transferring control of an
|
|
||||||
organization, or substantially all assets of one, or subdividing an
|
|
||||||
organization, or merging organizations. If propagation of a covered
|
|
||||||
work results from an entity transaction, each party to that
|
|
||||||
transaction who receives a copy of the work also receives whatever
|
|
||||||
licenses to the work the party's predecessor in interest had or could
|
|
||||||
give under the previous paragraph, plus a right to possession of the
|
|
||||||
Corresponding Source of the work from the predecessor in interest, if
|
|
||||||
the predecessor has it or can get it with reasonable efforts.
|
|
||||||
|
|
||||||
You may not impose any further restrictions on the exercise of the
|
|
||||||
rights granted or affirmed under this License. For example, you may
|
|
||||||
not impose a license fee, royalty, or other charge for exercise of
|
|
||||||
rights granted under this License, and you may not initiate litigation
|
|
||||||
(including a cross-claim or counterclaim in a lawsuit) alleging that
|
|
||||||
any patent claim is infringed by making, using, selling, offering for
|
|
||||||
sale, or importing the Program or any portion of it.
|
|
||||||
|
|
||||||
11. Patents.
|
|
||||||
|
|
||||||
A "contributor" is a copyright holder who authorizes use under this
|
|
||||||
License of the Program or a work on which the Program is based. The
|
|
||||||
work thus licensed is called the contributor's "contributor version".
|
|
||||||
|
|
||||||
A contributor's "essential patent claims" are all patent claims
|
|
||||||
owned or controlled by the contributor, whether already acquired or
|
|
||||||
hereafter acquired, that would be infringed by some manner, permitted
|
|
||||||
by this License, of making, using, or selling its contributor version,
|
|
||||||
but do not include claims that would be infringed only as a
|
|
||||||
consequence of further modification of the contributor version. For
|
|
||||||
purposes of this definition, "control" includes the right to grant
|
|
||||||
patent sublicenses in a manner consistent with the requirements of
|
|
||||||
this License.
|
|
||||||
|
|
||||||
Each contributor grants you a non-exclusive, worldwide, royalty-free
|
|
||||||
patent license under the contributor's essential patent claims, to
|
|
||||||
make, use, sell, offer for sale, import and otherwise run, modify and
|
|
||||||
propagate the contents of its contributor version.
|
|
||||||
|
|
||||||
In the following three paragraphs, a "patent license" is any express
|
|
||||||
agreement or commitment, however denominated, not to enforce a patent
|
|
||||||
(such as an express permission to practice a patent or covenant not to
|
|
||||||
sue for patent infringement). To "grant" such a patent license to a
|
|
||||||
party means to make such an agreement or commitment not to enforce a
|
|
||||||
patent against the party.
|
|
||||||
|
|
||||||
If you convey a covered work, knowingly relying on a patent license,
|
|
||||||
and the Corresponding Source of the work is not available for anyone
|
|
||||||
to copy, free of charge and under the terms of this License, through a
|
|
||||||
publicly available network server or other readily accessible means,
|
|
||||||
then you must either (1) cause the Corresponding Source to be so
|
|
||||||
available, or (2) arrange to deprive yourself of the benefit of the
|
|
||||||
patent license for this particular work, or (3) arrange, in a manner
|
|
||||||
consistent with the requirements of this License, to extend the patent
|
|
||||||
license to downstream recipients. "Knowingly relying" means you have
|
|
||||||
actual knowledge that, but for the patent license, your conveying the
|
|
||||||
covered work in a country, or your recipient's use of the covered work
|
|
||||||
in a country, would infringe one or more identifiable patents in that
|
|
||||||
country that you have reason to believe are valid.
|
|
||||||
|
|
||||||
If, pursuant to or in connection with a single transaction or
|
|
||||||
arrangement, you convey, or propagate by procuring conveyance of, a
|
|
||||||
covered work, and grant a patent license to some of the parties
|
|
||||||
receiving the covered work authorizing them to use, propagate, modify
|
|
||||||
or convey a specific copy of the covered work, then the patent license
|
|
||||||
you grant is automatically extended to all recipients of the covered
|
|
||||||
work and works based on it.
|
|
||||||
|
|
||||||
A patent license is "discriminatory" if it does not include within
|
|
||||||
the scope of its coverage, prohibits the exercise of, or is
|
|
||||||
conditioned on the non-exercise of one or more of the rights that are
|
|
||||||
specifically granted under this License. You may not convey a covered
|
|
||||||
work if you are a party to an arrangement with a third party that is
|
|
||||||
in the business of distributing software, under which you make payment
|
|
||||||
to the third party based on the extent of your activity of conveying
|
|
||||||
the work, and under which the third party grants, to any of the
|
|
||||||
parties who would receive the covered work from you, a discriminatory
|
|
||||||
patent license (a) in connection with copies of the covered work
|
|
||||||
conveyed by you (or copies made from those copies), or (b) primarily
|
|
||||||
for and in connection with specific products or compilations that
|
|
||||||
contain the covered work, unless you entered into that arrangement,
|
|
||||||
or that patent license was granted, prior to 28 March 2007.
|
|
||||||
|
|
||||||
Nothing in this License shall be construed as excluding or limiting
|
|
||||||
any implied license or other defenses to infringement that may
|
|
||||||
otherwise be available to you under applicable patent law.
|
|
||||||
|
|
||||||
12. No Surrender of Others' Freedom.
|
|
||||||
|
|
||||||
If conditions are imposed on you (whether by court order, agreement or
|
|
||||||
otherwise) that contradict the conditions of this License, they do not
|
|
||||||
excuse you from the conditions of this License. If you cannot convey a
|
|
||||||
covered work so as to satisfy simultaneously your obligations under this
|
|
||||||
License and any other pertinent obligations, then as a consequence you may
|
|
||||||
not convey it at all. For example, if you agree to terms that obligate you
|
|
||||||
to collect a royalty for further conveying from those to whom you convey
|
|
||||||
the Program, the only way you could satisfy both those terms and this
|
|
||||||
License would be to refrain entirely from conveying the Program.
|
|
||||||
|
|
||||||
13. Use with the GNU Affero General Public License.
|
|
||||||
|
|
||||||
Notwithstanding any other provision of this License, you have
|
|
||||||
permission to link or combine any covered work with a work licensed
|
|
||||||
under version 3 of the GNU Affero General Public License into a single
|
|
||||||
combined work, and to convey the resulting work. The terms of this
|
|
||||||
License will continue to apply to the part which is the covered work,
|
|
||||||
but the special requirements of the GNU Affero General Public License,
|
|
||||||
section 13, concerning interaction through a network will apply to the
|
|
||||||
combination as such.
|
|
||||||
|
|
||||||
14. Revised Versions of this License.
|
|
||||||
|
|
||||||
The Free Software Foundation may publish revised and/or new versions of
|
|
||||||
the GNU General Public License from time to time. Such new versions will
|
|
||||||
be similar in spirit to the present version, but may differ in detail to
|
|
||||||
address new problems or concerns.
|
|
||||||
|
|
||||||
Each version is given a distinguishing version number. If the
|
|
||||||
Program specifies that a certain numbered version of the GNU General
|
|
||||||
Public License "or any later version" applies to it, you have the
|
|
||||||
option of following the terms and conditions either of that numbered
|
|
||||||
version or of any later version published by the Free Software
|
|
||||||
Foundation. If the Program does not specify a version number of the
|
|
||||||
GNU General Public License, you may choose any version ever published
|
|
||||||
by the Free Software Foundation.
|
|
||||||
|
|
||||||
If the Program specifies that a proxy can decide which future
|
|
||||||
versions of the GNU General Public License can be used, that proxy's
|
|
||||||
public statement of acceptance of a version permanently authorizes you
|
|
||||||
to choose that version for the Program.
|
|
||||||
|
|
||||||
Later license versions may give you additional or different
|
|
||||||
permissions. However, no additional obligations are imposed on any
|
|
||||||
author or copyright holder as a result of your choosing to follow a
|
|
||||||
later version.
|
|
||||||
|
|
||||||
15. Disclaimer of Warranty.
|
|
||||||
|
|
||||||
THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY
|
|
||||||
APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT
|
|
||||||
HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY
|
|
||||||
OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO,
|
|
||||||
THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
|
|
||||||
PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM
|
|
||||||
IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF
|
|
||||||
ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
|
|
||||||
|
|
||||||
16. Limitation of Liability.
|
|
||||||
|
|
||||||
IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
|
|
||||||
WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS
|
|
||||||
THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY
|
|
||||||
GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE
|
|
||||||
USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF
|
|
||||||
DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD
|
|
||||||
PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS),
|
|
||||||
EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF
|
|
||||||
SUCH DAMAGES.
|
|
||||||
|
|
||||||
17. Interpretation of Sections 15 and 16.
|
|
||||||
|
|
||||||
If the disclaimer of warranty and limitation of liability provided
|
|
||||||
above cannot be given local legal effect according to their terms,
|
|
||||||
reviewing courts shall apply local law that most closely approximates
|
|
||||||
an absolute waiver of all civil liability in connection with the
|
|
||||||
Program, unless a warranty or assumption of liability accompanies a
|
|
||||||
copy of the Program in return for a fee.
|
|
||||||
|
|
||||||
END OF TERMS AND CONDITIONS
|
|
||||||
|
|
||||||
How to Apply These Terms to Your New Programs
|
|
||||||
|
|
||||||
If you develop a new program, and you want it to be of the greatest
|
|
||||||
possible use to the public, the best way to achieve this is to make it
|
|
||||||
free software which everyone can redistribute and change under these terms.
|
|
||||||
|
|
||||||
To do so, attach the following notices to the program. It is safest
|
|
||||||
to attach them to the start of each source file to most effectively
|
|
||||||
state the exclusion of warranty; and each file should have at least
|
|
||||||
the "copyright" line and a pointer to where the full notice is found.
|
|
||||||
|
|
||||||
<one line to give the program's name and a brief idea of what it does.>
|
|
||||||
Copyright (C) <year> <name of author>
|
|
||||||
|
|
||||||
This program is free software: you can redistribute it and/or modify
|
|
||||||
it under the terms of the GNU General Public License as published by
|
|
||||||
the Free Software Foundation, either version 3 of the License, or
|
|
||||||
(at your option) any later version.
|
|
||||||
|
|
||||||
This program is distributed in the hope that it will be useful,
|
|
||||||
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
||||||
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
||||||
GNU General Public License for more details.
|
|
||||||
|
|
||||||
You should have received a copy of the GNU General Public License
|
|
||||||
along with this program. If not, see <http://www.gnu.org/licenses/>.
|
|
||||||
|
|
||||||
Also add information on how to contact you by electronic and paper mail.
|
|
||||||
|
|
||||||
If the program does terminal interaction, make it output a short
|
|
||||||
notice like this when it starts in an interactive mode:
|
|
||||||
|
|
||||||
<program> Copyright (C) <year> <name of author>
|
|
||||||
This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'.
|
|
||||||
This is free software, and you are welcome to redistribute it
|
|
||||||
under certain conditions; type `show c' for details.
|
|
||||||
|
|
||||||
The hypothetical commands `show w' and `show c' should show the appropriate
|
|
||||||
parts of the General Public License. Of course, your program's commands
|
|
||||||
might be different; for a GUI interface, you would use an "about box".
|
|
||||||
|
|
||||||
You should also get your employer (if you work as a programmer) or school,
|
|
||||||
if any, to sign a "copyright disclaimer" for the program, if necessary.
|
|
||||||
For more information on this, and how to apply and follow the GNU GPL, see
|
|
||||||
<http://www.gnu.org/licenses/>.
|
|
||||||
|
|
||||||
The GNU General Public License does not permit incorporating your program
|
|
||||||
into proprietary programs. If your program is a subroutine library, you
|
|
||||||
may consider it more useful to permit linking proprietary applications with
|
|
||||||
the library. If this is what you want to do, use the GNU Lesser General
|
|
||||||
Public License instead of this License. But first, please read
|
|
||||||
<http://www.gnu.org/philosophy/why-not-lgpl.html>.
|
|
||||||
|
|||||||
@@ -1,7 +1,4 @@
|
|||||||
include LICENSE
|
|
||||||
include README.md
|
include README.md
|
||||||
include requirements.txt
|
include esphomeyaml/dashboard/templates/index.html
|
||||||
include esphome/dashboard/templates/*.html
|
include esphomeyaml/dashboard/static/materialize-stepper.min.css
|
||||||
recursive-include esphome/dashboard/static *.ico *.js *.css *.woff* LICENSE
|
include esphomeyaml/dashboard/static/materialize-stepper.min.js
|
||||||
recursive-include esphome *.cpp *.h *.tcc
|
|
||||||
recursive-include esphome LICENSE.txt
|
|
||||||
|
|||||||
39
README.md
39
README.md
@@ -1,9 +1,38 @@
|
|||||||
# ESPHome [](https://travis-ci.org/esphome/esphome) [](https://discord.gg/KhAMKrd) [](https://GitHub.com/esphome/esphome/releases/)
|
# esphomeyaml for [esphomelib](https://github.com/OttoWinter/esphomelib)
|
||||||
|
|
||||||
[](https://esphome.io/)
|
### Getting Started Guide: https://esphomelib.com/esphomeyaml/guides/getting_started_command_line.html
|
||||||
|
|
||||||
**Documentation:** https://esphome.io/
|
### Available Components: https://esphomelib.com/esphomeyaml/index.html
|
||||||
|
|
||||||
For issues, please go to [the issue tracker](https://github.com/esphome/issues/issues).
|
esphomeyaml is the solution for your ESP8266/ESP32 projects with Home Assistant. It allows you to create **custom firmwares** for your microcontrollers with no programming experience required. All you need to know is the YAML configuration format which is also used by [Home Assistant](https://www.home-assistant.io).
|
||||||
|
|
||||||
For feature requests, please see [feature requests](https://github.com/esphome/feature-requests/issues).
|
esphomeyaml will:
|
||||||
|
|
||||||
|
* Read your configuration file and warn you about potential errors (like using the invalid pins.)
|
||||||
|
* Create a custom C++ sketch file for you using esphomeyaml's powerful C++ generation engine.
|
||||||
|
* Compile the sketch file for you using [platformio](http://platformio.org/).
|
||||||
|
* Upload the binary to your ESP via Over the Air updates.
|
||||||
|
* Automatically start remote logs via MQTT.
|
||||||
|
|
||||||
|
And all of that with a single command 🎉:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
esphomeyaml configuration.yaml run
|
||||||
|
```
|
||||||
|
|
||||||
|
## Features
|
||||||
|
|
||||||
|
* **No programming experience required:** just edit YAML configuration
|
||||||
|
files like you're used to with Home Assistant.
|
||||||
|
* **Flexible:** Use [esphomelib](https://github.com/OttoWinter/esphomelib)'s powerful core to create custom sensors/outputs.
|
||||||
|
* **Fast and efficient:** Written in C++ and keeps memory consumption to a minimum.
|
||||||
|
* **Made for [Home Assistant](https://www.home-assistant.io):** Almost all [Home Assistant](https://www.home-assistant.io) features are supported out of the box. Including RGB lights and many more.
|
||||||
|
* **Easy reproducible configuration:** No need to go through a long setup process for every single node. Just copy a configuration file and run a single command.
|
||||||
|
* **Smart Over The Air Updates:** esphomeyaml has OTA updates deeply integrated into the system. It even automatically enters a recovery mode if a boot loop is detected.
|
||||||
|
* **Powerful logging engine:** View colorful logs and debug issues remotely.
|
||||||
|
* **Open Source**
|
||||||
|
* For me: Makes documenting esphomelib's features a lot easier.
|
||||||
|
|
||||||
|
## Special Thanks
|
||||||
|
|
||||||
|
Special Thanks to the Home Assistant project. Lots of the code base of esphomeyaml is based off of Home Assistant, for example the loading and config validation code.
|
||||||
|
|||||||
@@ -1,30 +0,0 @@
|
|||||||
ARG BUILD_FROM=esphome/esphome-base-amd64:3.0.0
|
|
||||||
FROM ${BUILD_FROM}
|
|
||||||
|
|
||||||
# First install requirements to leverage caching when requirements don't change
|
|
||||||
COPY requirements.txt docker/platformio_install_deps.py platformio.ini /
|
|
||||||
RUN \
|
|
||||||
pip3 install --no-cache-dir -r /requirements.txt \
|
|
||||||
&& /platformio_install_deps.py /platformio.ini
|
|
||||||
|
|
||||||
# Then copy esphome and install
|
|
||||||
COPY . .
|
|
||||||
RUN pip3 install --no-cache-dir -e .
|
|
||||||
|
|
||||||
# Settings for dashboard
|
|
||||||
ENV USERNAME="" PASSWORD=""
|
|
||||||
|
|
||||||
# Expose the dashboard to Docker
|
|
||||||
EXPOSE 6052
|
|
||||||
|
|
||||||
# Run healthcheck (heartbeat)
|
|
||||||
HEALTHCHECK --interval=30s --timeout=30s \
|
|
||||||
CMD curl --fail http://localhost:6052 || exit 1
|
|
||||||
|
|
||||||
# The directory the user should mount their configuration files to
|
|
||||||
WORKDIR /config
|
|
||||||
# Set entrypoint to esphome so that the user doesn't have to type 'esphome'
|
|
||||||
# in every docker command twice
|
|
||||||
ENTRYPOINT ["esphome"]
|
|
||||||
# When no arguments given, start the dashboard in the workdir
|
|
||||||
CMD ["/config", "dashboard"]
|
|
||||||
30
docker/Dockerfile.builder
Normal file
30
docker/Dockerfile.builder
Normal file
@@ -0,0 +1,30 @@
|
|||||||
|
FROM multiarch/ubuntu-core:amd64-xenial
|
||||||
|
|
||||||
|
# setup locals
|
||||||
|
RUN apt-get update && apt-get install -y \
|
||||||
|
jq \
|
||||||
|
git \
|
||||||
|
python3-setuptools \
|
||||||
|
&& rm -rf /var/lib/apt/lists/* \
|
||||||
|
ENV LANG C.UTF-8
|
||||||
|
|
||||||
|
# Install docker
|
||||||
|
# https://docs.docker.com/engine/installation/linux/docker-ce/ubuntu/
|
||||||
|
RUN apt-get update && apt-get install -y \
|
||||||
|
apt-transport-https \
|
||||||
|
ca-certificates \
|
||||||
|
curl \
|
||||||
|
software-properties-common \
|
||||||
|
&& rm -rf /var/lib/apt/lists/* \
|
||||||
|
&& curl -fsSL https://download.docker.com/linux/ubuntu/gpg | apt-key add - \
|
||||||
|
&& add-apt-repository "deb https://download.docker.com/linux/ubuntu $(lsb_release -cs) stable" \
|
||||||
|
&& apt-get update && apt-get install -y docker-ce \
|
||||||
|
&& rm -rf /var/lib/apt/lists/*
|
||||||
|
|
||||||
|
# setup arm binary support
|
||||||
|
RUN apt-get update && apt-get install -y \
|
||||||
|
qemu-user-static \
|
||||||
|
binfmt-support \
|
||||||
|
&& rm -rf /var/lib/apt/lists/*
|
||||||
|
|
||||||
|
WORKDIR /data
|
||||||
@@ -1,13 +0,0 @@
|
|||||||
FROM esphome/esphome-base-amd64:3.0.0
|
|
||||||
|
|
||||||
COPY . .
|
|
||||||
|
|
||||||
RUN apt-get update \
|
|
||||||
&& apt-get install -y --no-install-recommends \
|
|
||||||
python3-wheel \
|
|
||||||
net-tools \
|
|
||||||
&& apt-get clean \
|
|
||||||
&& rm -rf /var/lib/apt/lists/*
|
|
||||||
|
|
||||||
WORKDIR /workspaces
|
|
||||||
ENV SHELL /bin/bash
|
|
||||||
@@ -1,25 +1,42 @@
|
|||||||
ARG BUILD_FROM
|
# Dockerfile for HassIO add-on
|
||||||
|
ARG BUILD_FROM=homeassistant/amd64-base-ubuntu:latest
|
||||||
FROM ${BUILD_FROM}
|
FROM ${BUILD_FROM}
|
||||||
|
|
||||||
# First install requirements to leverage caching when requirements don't change
|
RUN apt-get update && apt-get install -y --no-install-recommends \
|
||||||
COPY requirements.txt docker/platformio_install_deps.py platformio.ini /
|
python \
|
||||||
RUN \
|
python-pip \
|
||||||
pip3 install --no-cache-dir -r /requirements.txt \
|
python-setuptools \
|
||||||
&& /platformio_install_deps.py /platformio.ini
|
python-pil \
|
||||||
|
git \
|
||||||
|
&& apt-get clean && rm -rf /var/lib/apt/lists/* /tmp/* && \
|
||||||
|
pip install --no-cache-dir --no-binary :all: platformio && \
|
||||||
|
platformio settings set enable_telemetry No && \
|
||||||
|
platformio settings set check_libraries_interval 1000000 && \
|
||||||
|
platformio settings set check_platformio_interval 1000000 && \
|
||||||
|
platformio settings set check_platforms_interval 1000000
|
||||||
|
|
||||||
# Copy root filesystem
|
COPY docker/platformio.ini /pio/platformio.ini
|
||||||
COPY docker/rootfs/ /
|
RUN platformio run -d /pio; rm -rf /pio
|
||||||
|
|
||||||
# Then copy esphome and install
|
ARG ESPHOMELIB_VERSION="dev"
|
||||||
COPY . /opt/esphome/
|
RUN platformio lib -g install "https://github.com/OttoWinter/esphomelib.git#${ESPHOMELIB_VERSION}"
|
||||||
RUN pip3 install --no-cache-dir -e /opt/esphome
|
|
||||||
|
|
||||||
# Build arguments
|
COPY . .
|
||||||
ARG BUILD_VERSION=dev
|
RUN pip install --no-cache-dir --no-binary :all: -e . && \
|
||||||
|
pip install --no-cache-dir --no-binary :all: tzlocal
|
||||||
|
|
||||||
|
CMD ["esphomeyaml", "/config/esphomeyaml", "dashboard"]
|
||||||
|
|
||||||
|
# Build arugments
|
||||||
|
ARG ADDON_ARCH
|
||||||
|
ARG ADDON_VERSION
|
||||||
|
|
||||||
# Labels
|
# Labels
|
||||||
LABEL \
|
LABEL \
|
||||||
io.hass.name="ESPHome" \
|
io.hass.name="esphomeyaml" \
|
||||||
io.hass.description="Manage and program ESP8266/ESP32 microcontrollers through YAML configuration files" \
|
io.hass.description="esphomeyaml HassIO add-on for intelligently managing all your ESP8266/ESP32 devices." \
|
||||||
|
io.hass.arch="${ADDON_ARCH}" \
|
||||||
io.hass.type="addon" \
|
io.hass.type="addon" \
|
||||||
io.hass.version=${BUILD_VERSION}
|
io.hass.version="${ADDON_VERSION}" \
|
||||||
|
io.hass.url="https://esphomelib.com/esphomeyaml/index.html" \
|
||||||
|
maintainer="Otto Winter <contact@otto-winter.com>"
|
||||||
|
|||||||
@@ -1,9 +1,6 @@
|
|||||||
FROM esphome/esphome-lint-base:3.0.0
|
FROM python:2.7
|
||||||
|
|
||||||
COPY requirements.txt requirements_test.txt docker/platformio_install_deps.py platformio.ini /
|
COPY requirements.txt /requirements.txt
|
||||||
RUN \
|
|
||||||
pip3 install --no-cache-dir -r /requirements.txt -r /requirements_test.txt \
|
|
||||||
&& /platformio_install_deps.py /platformio.ini
|
|
||||||
|
|
||||||
VOLUME ["/esphome"]
|
RUN pip install -r /requirements.txt && \
|
||||||
WORKDIR /esphome
|
pip install flake8==3.5.0 pylint==1.9.3 tzlocal pillow
|
||||||
|
|||||||
19
docker/Dockerfile.test
Normal file
19
docker/Dockerfile.test
Normal file
@@ -0,0 +1,19 @@
|
|||||||
|
FROM ubuntu:bionic
|
||||||
|
|
||||||
|
RUN apt-get update && apt-get install -y --no-install-recommends \
|
||||||
|
python \
|
||||||
|
python-pip \
|
||||||
|
python-setuptools \
|
||||||
|
python-pil \
|
||||||
|
git \
|
||||||
|
&& apt-get clean && rm -rf /var/lib/apt/lists/* /tmp/*rm -rf /var/lib/apt/lists/* /tmp/* && \
|
||||||
|
pip install --no-cache-dir --no-binary :all: platformio && \
|
||||||
|
platformio settings set enable_telemetry No
|
||||||
|
|
||||||
|
COPY docker/platformio.ini /pio/platformio.ini
|
||||||
|
RUN platformio run -d /pio; rm -rf /pio
|
||||||
|
|
||||||
|
COPY requirements.txt /requirements.txt
|
||||||
|
|
||||||
|
RUN pip install --no-cache-dir -r /requirements.txt && \
|
||||||
|
pip install --no-cache-dir tzlocal pillow
|
||||||
12
docker/platformio.ini
Normal file
12
docker/platformio.ini
Normal file
@@ -0,0 +1,12 @@
|
|||||||
|
; This file allows the docker build file to install the required platformio
|
||||||
|
; platforms
|
||||||
|
|
||||||
|
[env:espressif8266]
|
||||||
|
platform = espressif8266
|
||||||
|
board = nodemcuv2
|
||||||
|
framework = arduino
|
||||||
|
|
||||||
|
[env:espressif32]
|
||||||
|
platform = espressif32
|
||||||
|
board = nodemcu-32s
|
||||||
|
framework = arduino
|
||||||
@@ -1,20 +0,0 @@
|
|||||||
#!/usr/bin/env python3
|
|
||||||
# This script is used in the docker containers to preinstall
|
|
||||||
# all platformio libraries in the global storage
|
|
||||||
|
|
||||||
import configparser
|
|
||||||
import re
|
|
||||||
import subprocess
|
|
||||||
import sys
|
|
||||||
|
|
||||||
config = configparser.ConfigParser()
|
|
||||||
config.read(sys.argv[1])
|
|
||||||
libs = []
|
|
||||||
for line in config['common']['lib_deps'].splitlines():
|
|
||||||
# Format: '1655@1.0.2 ; TinyGPSPlus (has name conflict)' (includes comment)
|
|
||||||
m = re.search(r'([a-zA-Z0-9-_/]+@[0-9\.]+)', line)
|
|
||||||
if m is None:
|
|
||||||
continue
|
|
||||||
libs.append(m.group(1))
|
|
||||||
|
|
||||||
subprocess.check_call(['platformio', 'lib', '-g', 'install', *libs])
|
|
||||||
@@ -1,41 +0,0 @@
|
|||||||
#!/usr/bin/with-contenv bashio
|
|
||||||
# ==============================================================================
|
|
||||||
# Community Hass.io Add-ons: ESPHome
|
|
||||||
# This files check if all user configuration requirements are met
|
|
||||||
# ==============================================================================
|
|
||||||
|
|
||||||
# Check SSL requirements, if enabled
|
|
||||||
if bashio::config.true 'ssl'; then
|
|
||||||
if ! bashio::config.has_value 'certfile'; then
|
|
||||||
bashio::fatal 'SSL is enabled, but no certfile was specified.'
|
|
||||||
bashio::exit.nok
|
|
||||||
fi
|
|
||||||
|
|
||||||
if ! bashio::config.has_value 'keyfile'; then
|
|
||||||
bashio::fatal 'SSL is enabled, but no keyfile was specified'
|
|
||||||
bashio::exit.nok
|
|
||||||
fi
|
|
||||||
|
|
||||||
|
|
||||||
certfile="/ssl/$(bashio::config 'certfile')"
|
|
||||||
keyfile="/ssl/$(bashio::config 'keyfile')"
|
|
||||||
|
|
||||||
if ! bashio::fs.file_exists "${certfile}"; then
|
|
||||||
if ! bashio::fs.file_exists "${keyfile}"; then
|
|
||||||
# Both files are missing, let's print a friendlier error message
|
|
||||||
bashio::log.fatal 'You enabled encrypted connections using the "ssl": true option.'
|
|
||||||
bashio::log.fatal "However, the SSL files '${certfile}' and '${keyfile}'"
|
|
||||||
bashio::log.fatal "were not found. If you're using Hass.io on your local network and don't want"
|
|
||||||
bashio::log.fatal 'to encrypt connections to the ESPHome dashboard, you can manually disable'
|
|
||||||
bashio::log.fatal 'SSL by setting "ssl" to false."'
|
|
||||||
bashio::exit.nok
|
|
||||||
fi
|
|
||||||
bashio::log.fatal "The configured certfile '${certfile}' was not found."
|
|
||||||
bashio::exit.nok
|
|
||||||
fi
|
|
||||||
|
|
||||||
if ! bashio::fs.file_exists "/ssl/$(bashio::config 'keyfile')"; then
|
|
||||||
bashio::log.fatal "The configured keyfile '${keyfile}' was not found."
|
|
||||||
bashio::exit.nok
|
|
||||||
fi
|
|
||||||
fi
|
|
||||||
@@ -1,34 +0,0 @@
|
|||||||
#!/usr/bin/with-contenv bashio
|
|
||||||
# ==============================================================================
|
|
||||||
# Community Hass.io Add-ons: ESPHome
|
|
||||||
# Configures NGINX for use with ESPHome
|
|
||||||
# ==============================================================================
|
|
||||||
|
|
||||||
declare certfile
|
|
||||||
declare keyfile
|
|
||||||
declare direct_port
|
|
||||||
declare ingress_interface
|
|
||||||
declare ingress_port
|
|
||||||
|
|
||||||
mkdir -p /var/log/nginx
|
|
||||||
|
|
||||||
direct_port=$(bashio::addon.port 6052)
|
|
||||||
if bashio::var.has_value "${direct_port}"; then
|
|
||||||
if bashio::config.true 'ssl'; then
|
|
||||||
certfile=$(bashio::config 'certfile')
|
|
||||||
keyfile=$(bashio::config 'keyfile')
|
|
||||||
|
|
||||||
mv /etc/nginx/servers/direct-ssl.disabled /etc/nginx/servers/direct.conf
|
|
||||||
sed -i "s/%%certfile%%/${certfile}/g" /etc/nginx/servers/direct.conf
|
|
||||||
sed -i "s/%%keyfile%%/${keyfile}/g" /etc/nginx/servers/direct.conf
|
|
||||||
else
|
|
||||||
mv /etc/nginx/servers/direct.disabled /etc/nginx/servers/direct.conf
|
|
||||||
fi
|
|
||||||
|
|
||||||
sed -i "s/%%port%%/${direct_port}/g" /etc/nginx/servers/direct.conf
|
|
||||||
fi
|
|
||||||
|
|
||||||
ingress_port=$(bashio::addon.ingress_port)
|
|
||||||
ingress_interface=$(bashio::addon.ip_address)
|
|
||||||
sed -i "s/%%port%%/${ingress_port}/g" /etc/nginx/servers/ingress.conf
|
|
||||||
sed -i "s/%%interface%%/${ingress_interface}/g" /etc/nginx/servers/ingress.conf
|
|
||||||
@@ -1,23 +0,0 @@
|
|||||||
#!/usr/bin/with-contenv bashio
|
|
||||||
# ==============================================================================
|
|
||||||
# Community Hass.io Add-ons: ESPHome
|
|
||||||
# This files installs the user ESPHome version if specified
|
|
||||||
# ==============================================================================
|
|
||||||
|
|
||||||
declare esphome_version
|
|
||||||
|
|
||||||
if bashio::config.has_value 'esphome_version'; then
|
|
||||||
esphome_version=$(bashio::config 'esphome_version')
|
|
||||||
if [[ $esphome_version == *":"* ]]; then
|
|
||||||
IFS=':' read -r -a array <<< "$esphome_version"
|
|
||||||
username=${array[0]}
|
|
||||||
ref=${array[1]}
|
|
||||||
else
|
|
||||||
username="esphome"
|
|
||||||
ref=$esphome_version
|
|
||||||
fi
|
|
||||||
full_url="https://github.com/${username}/esphome/archive/${ref}.zip"
|
|
||||||
bashio::log.info "Installing esphome version '${esphome_version}' (${full_url})..."
|
|
||||||
pip3 install -U --no-cache-dir "${full_url}" \
|
|
||||||
|| bashio::exit.nok "Failed installing esphome pinned version."
|
|
||||||
fi
|
|
||||||
@@ -1,11 +0,0 @@
|
|||||||
#!/usr/bin/with-contenv bashio
|
|
||||||
# ==============================================================================
|
|
||||||
# Community Hass.io Add-ons: ESPHome
|
|
||||||
# This files migrates the esphome config directory from the old path
|
|
||||||
# ==============================================================================
|
|
||||||
|
|
||||||
if [[ ! -d /config/esphome && -d /config/esphomeyaml ]]; then
|
|
||||||
echo "Moving config directory from /config/esphomeyaml to /config/esphome"
|
|
||||||
mv /config/esphomeyaml /config/esphome
|
|
||||||
mv /config/esphome/.esphomeyaml /config/esphome/.esphome
|
|
||||||
fi
|
|
||||||
@@ -1,96 +0,0 @@
|
|||||||
types {
|
|
||||||
text/html html htm shtml;
|
|
||||||
text/css css;
|
|
||||||
text/xml xml;
|
|
||||||
image/gif gif;
|
|
||||||
image/jpeg jpeg jpg;
|
|
||||||
application/javascript js;
|
|
||||||
application/atom+xml atom;
|
|
||||||
application/rss+xml rss;
|
|
||||||
|
|
||||||
text/mathml mml;
|
|
||||||
text/plain txt;
|
|
||||||
text/vnd.sun.j2me.app-descriptor jad;
|
|
||||||
text/vnd.wap.wml wml;
|
|
||||||
text/x-component htc;
|
|
||||||
|
|
||||||
image/png png;
|
|
||||||
image/svg+xml svg svgz;
|
|
||||||
image/tiff tif tiff;
|
|
||||||
image/vnd.wap.wbmp wbmp;
|
|
||||||
image/webp webp;
|
|
||||||
image/x-icon ico;
|
|
||||||
image/x-jng jng;
|
|
||||||
image/x-ms-bmp bmp;
|
|
||||||
|
|
||||||
font/woff woff;
|
|
||||||
font/woff2 woff2;
|
|
||||||
|
|
||||||
application/java-archive jar war ear;
|
|
||||||
application/json json;
|
|
||||||
application/mac-binhex40 hqx;
|
|
||||||
application/msword doc;
|
|
||||||
application/pdf pdf;
|
|
||||||
application/postscript ps eps ai;
|
|
||||||
application/rtf rtf;
|
|
||||||
application/vnd.apple.mpegurl m3u8;
|
|
||||||
application/vnd.google-earth.kml+xml kml;
|
|
||||||
application/vnd.google-earth.kmz kmz;
|
|
||||||
application/vnd.ms-excel xls;
|
|
||||||
application/vnd.ms-fontobject eot;
|
|
||||||
application/vnd.ms-powerpoint ppt;
|
|
||||||
application/vnd.oasis.opendocument.graphics odg;
|
|
||||||
application/vnd.oasis.opendocument.presentation odp;
|
|
||||||
application/vnd.oasis.opendocument.spreadsheet ods;
|
|
||||||
application/vnd.oasis.opendocument.text odt;
|
|
||||||
application/vnd.openxmlformats-officedocument.presentationml.presentation
|
|
||||||
pptx;
|
|
||||||
application/vnd.openxmlformats-officedocument.spreadsheetml.sheet
|
|
||||||
xlsx;
|
|
||||||
application/vnd.openxmlformats-officedocument.wordprocessingml.document
|
|
||||||
docx;
|
|
||||||
application/vnd.wap.wmlc wmlc;
|
|
||||||
application/x-7z-compressed 7z;
|
|
||||||
application/x-cocoa cco;
|
|
||||||
application/x-java-archive-diff jardiff;
|
|
||||||
application/x-java-jnlp-file jnlp;
|
|
||||||
application/x-makeself run;
|
|
||||||
application/x-perl pl pm;
|
|
||||||
application/x-pilot prc pdb;
|
|
||||||
application/x-rar-compressed rar;
|
|
||||||
application/x-redhat-package-manager rpm;
|
|
||||||
application/x-sea sea;
|
|
||||||
application/x-shockwave-flash swf;
|
|
||||||
application/x-stuffit sit;
|
|
||||||
application/x-tcl tcl tk;
|
|
||||||
application/x-x509-ca-cert der pem crt;
|
|
||||||
application/x-xpinstall xpi;
|
|
||||||
application/xhtml+xml xhtml;
|
|
||||||
application/xspf+xml xspf;
|
|
||||||
application/zip zip;
|
|
||||||
|
|
||||||
application/octet-stream bin exe dll;
|
|
||||||
application/octet-stream deb;
|
|
||||||
application/octet-stream dmg;
|
|
||||||
application/octet-stream iso img;
|
|
||||||
application/octet-stream msi msp msm;
|
|
||||||
|
|
||||||
audio/midi mid midi kar;
|
|
||||||
audio/mpeg mp3;
|
|
||||||
audio/ogg ogg;
|
|
||||||
audio/x-m4a m4a;
|
|
||||||
audio/x-realaudio ra;
|
|
||||||
|
|
||||||
video/3gpp 3gpp 3gp;
|
|
||||||
video/mp2t ts;
|
|
||||||
video/mp4 mp4;
|
|
||||||
video/mpeg mpeg mpg;
|
|
||||||
video/quicktime mov;
|
|
||||||
video/webm webm;
|
|
||||||
video/x-flv flv;
|
|
||||||
video/x-m4v m4v;
|
|
||||||
video/x-mng mng;
|
|
||||||
video/x-ms-asf asx asf;
|
|
||||||
video/x-ms-wmv wmv;
|
|
||||||
video/x-msvideo avi;
|
|
||||||
}
|
|
||||||
@@ -1,16 +0,0 @@
|
|||||||
proxy_http_version 1.1;
|
|
||||||
proxy_ignore_client_abort off;
|
|
||||||
proxy_read_timeout 86400s;
|
|
||||||
proxy_redirect off;
|
|
||||||
proxy_send_timeout 86400s;
|
|
||||||
proxy_max_temp_file_size 0;
|
|
||||||
|
|
||||||
proxy_set_header Accept-Encoding "";
|
|
||||||
proxy_set_header Connection $connection_upgrade;
|
|
||||||
proxy_set_header Host $http_host;
|
|
||||||
proxy_set_header Upgrade $http_upgrade;
|
|
||||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
|
||||||
proxy_set_header X-Forwarded-Proto $scheme;
|
|
||||||
proxy_set_header X-NginX-Proxy true;
|
|
||||||
proxy_set_header X-Real-IP $remote_addr;
|
|
||||||
proxy_set_header Authorization "";
|
|
||||||
@@ -1,6 +0,0 @@
|
|||||||
root /dev/null;
|
|
||||||
server_name $hostname;
|
|
||||||
|
|
||||||
add_header X-Content-Type-Options nosniff;
|
|
||||||
add_header X-XSS-Protection "1; mode=block";
|
|
||||||
add_header X-Robots-Tag none;
|
|
||||||
@@ -1,9 +0,0 @@
|
|||||||
ssl_protocols TLSv1.2;
|
|
||||||
ssl_prefer_server_ciphers on;
|
|
||||||
ssl_ciphers ECDHE-RSA-AES256-GCM-SHA512:DHE-RSA-AES256-GCM-SHA512:ECDHE-RSA-AES256-GCM-SHA384:DHE-RSA-AES256-GCM-SHA384:ECDHE-RSA-AES256-SHA384:ECDHE-RSA-AES256-SHA:DHE-RSA-AES256-SHA;
|
|
||||||
ssl_ecdh_curve secp384r1;
|
|
||||||
ssl_session_timeout 10m;
|
|
||||||
ssl_session_cache shared:SSL:10m;
|
|
||||||
ssl_session_tickets off;
|
|
||||||
ssl_stapling on;
|
|
||||||
ssl_stapling_verify on;
|
|
||||||
@@ -1,33 +0,0 @@
|
|||||||
daemon off;
|
|
||||||
user root;
|
|
||||||
pid /var/run/nginx.pid;
|
|
||||||
worker_processes 1;
|
|
||||||
# Hass.io addon log
|
|
||||||
error_log /proc/1/fd/1 error;
|
|
||||||
events {
|
|
||||||
worker_connections 1024;
|
|
||||||
}
|
|
||||||
|
|
||||||
http {
|
|
||||||
include /etc/nginx/includes/mime.types;
|
|
||||||
access_log stdout;
|
|
||||||
default_type application/octet-stream;
|
|
||||||
gzip on;
|
|
||||||
keepalive_timeout 65;
|
|
||||||
sendfile on;
|
|
||||||
server_tokens off;
|
|
||||||
|
|
||||||
map $http_upgrade $connection_upgrade {
|
|
||||||
default upgrade;
|
|
||||||
'' close;
|
|
||||||
}
|
|
||||||
|
|
||||||
# Use Hass.io supervisor as resolver
|
|
||||||
resolver 172.30.32.2;
|
|
||||||
|
|
||||||
upstream esphome {
|
|
||||||
server unix:/var/run/esphome.sock;
|
|
||||||
}
|
|
||||||
|
|
||||||
include /etc/nginx/servers/*.conf;
|
|
||||||
}
|
|
||||||
@@ -1,22 +0,0 @@
|
|||||||
server {
|
|
||||||
listen %%port%% default_server ssl http2;
|
|
||||||
|
|
||||||
include /etc/nginx/includes/server_params.conf;
|
|
||||||
include /etc/nginx/includes/proxy_params.conf;
|
|
||||||
include /etc/nginx/includes/ssl_params.conf;
|
|
||||||
|
|
||||||
ssl on;
|
|
||||||
ssl_certificate /ssl/%%certfile%%;
|
|
||||||
ssl_certificate_key /ssl/%%keyfile%%;
|
|
||||||
|
|
||||||
# Clear Hass.io Ingress header
|
|
||||||
proxy_set_header X-Hassio-Ingress "";
|
|
||||||
|
|
||||||
# Redirect http requests to https on the same port.
|
|
||||||
# https://rageagainstshell.com/2016/11/redirect-http-to-https-on-the-same-port-in-nginx/
|
|
||||||
error_page 497 https://$http_host$request_uri;
|
|
||||||
|
|
||||||
location / {
|
|
||||||
proxy_pass http://esphome;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,12 +0,0 @@
|
|||||||
server {
|
|
||||||
listen %%port%% default_server;
|
|
||||||
|
|
||||||
include /etc/nginx/includes/server_params.conf;
|
|
||||||
include /etc/nginx/includes/proxy_params.conf;
|
|
||||||
# Clear Hass.io Ingress header
|
|
||||||
proxy_set_header X-Hassio-Ingress "";
|
|
||||||
|
|
||||||
location / {
|
|
||||||
proxy_pass http://esphome;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,16 +0,0 @@
|
|||||||
server {
|
|
||||||
listen %%interface%%:%%port%% default_server;
|
|
||||||
|
|
||||||
include /etc/nginx/includes/server_params.conf;
|
|
||||||
include /etc/nginx/includes/proxy_params.conf;
|
|
||||||
# Set Hass.io Ingress header
|
|
||||||
proxy_set_header X-Hassio-Ingress "YES";
|
|
||||||
|
|
||||||
location / {
|
|
||||||
# Only allow from Hass.io supervisor
|
|
||||||
allow 172.30.32.2;
|
|
||||||
deny all;
|
|
||||||
|
|
||||||
proxy_pass http://esphome;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,9 +0,0 @@
|
|||||||
#!/usr/bin/execlineb -S0
|
|
||||||
# ==============================================================================
|
|
||||||
# Community Hass.io Add-ons: ESPHome
|
|
||||||
# Take down the S6 supervision tree when ESPHome fails
|
|
||||||
# ==============================================================================
|
|
||||||
if -n { s6-test $# -ne 0 }
|
|
||||||
if -n { s6-test ${1} -eq 256 }
|
|
||||||
|
|
||||||
s6-svscanctl -t /var/run/s6/services
|
|
||||||
@@ -1,26 +0,0 @@
|
|||||||
#!/usr/bin/with-contenv bashio
|
|
||||||
# ==============================================================================
|
|
||||||
# Community Hass.io Add-ons: ESPHome
|
|
||||||
# Runs the ESPHome dashboard
|
|
||||||
# ==============================================================================
|
|
||||||
|
|
||||||
export ESPHOME_IS_HASSIO=true
|
|
||||||
|
|
||||||
if bashio::config.true 'leave_front_door_open'; then
|
|
||||||
export DISABLE_HA_AUTHENTICATION=true
|
|
||||||
fi
|
|
||||||
|
|
||||||
if bashio::config.true 'streamer_mode'; then
|
|
||||||
export ESPHOME_STREAMER_MODE=true
|
|
||||||
fi
|
|
||||||
|
|
||||||
if bashio::config.true 'status_use_ping'; then
|
|
||||||
export ESPHOME_DASHBOARD_USE_PING=true
|
|
||||||
fi
|
|
||||||
|
|
||||||
if bashio::config.has_value 'relative_url'; then
|
|
||||||
export ESPHOME_DASHBOARD_RELATIVE_URL=$(bashio::config 'relative_url')
|
|
||||||
fi
|
|
||||||
|
|
||||||
bashio::log.info "Starting ESPHome dashboard..."
|
|
||||||
exec esphome /config/esphome dashboard --socket /var/run/esphome.sock --hassio
|
|
||||||
@@ -1,9 +0,0 @@
|
|||||||
#!/usr/bin/execlineb -S0
|
|
||||||
# ==============================================================================
|
|
||||||
# Community Hass.io Add-ons: ESPHome
|
|
||||||
# Take down the S6 supervision tree when NGINX fails
|
|
||||||
# ==============================================================================
|
|
||||||
if -n { s6-test $# -ne 0 }
|
|
||||||
if -n { s6-test ${1} -eq 256 }
|
|
||||||
|
|
||||||
s6-svscanctl -t /var/run/s6/services
|
|
||||||
@@ -1,14 +0,0 @@
|
|||||||
#!/usr/bin/with-contenv bashio
|
|
||||||
# ==============================================================================
|
|
||||||
# Community Hass.io Add-ons: ESPHome
|
|
||||||
# Runs the NGINX proxy
|
|
||||||
# ==============================================================================
|
|
||||||
|
|
||||||
bashio::log.info "Waiting for dashboard to come up..."
|
|
||||||
|
|
||||||
while [[ ! -S /var/run/esphome.sock ]]; do
|
|
||||||
sleep 0.5
|
|
||||||
done
|
|
||||||
|
|
||||||
bashio::log.info "Starting NGINX..."
|
|
||||||
exec nginx
|
|
||||||
@@ -1,651 +0,0 @@
|
|||||||
import argparse
|
|
||||||
import functools
|
|
||||||
import logging
|
|
||||||
import os
|
|
||||||
import sys
|
|
||||||
from datetime import datetime
|
|
||||||
|
|
||||||
from esphome import const, writer, yaml_util
|
|
||||||
import esphome.codegen as cg
|
|
||||||
from esphome.config import iter_components, read_config, strip_default_ids
|
|
||||||
from esphome.const import (
|
|
||||||
CONF_BAUD_RATE,
|
|
||||||
CONF_BROKER,
|
|
||||||
CONF_LOGGER,
|
|
||||||
CONF_OTA,
|
|
||||||
CONF_PASSWORD,
|
|
||||||
CONF_PORT,
|
|
||||||
CONF_ESPHOME,
|
|
||||||
CONF_PLATFORMIO_OPTIONS,
|
|
||||||
)
|
|
||||||
from esphome.core import CORE, EsphomeError, coroutine, coroutine_with_priority
|
|
||||||
from esphome.helpers import indent
|
|
||||||
from esphome.util import (
|
|
||||||
run_external_command,
|
|
||||||
run_external_process,
|
|
||||||
safe_print,
|
|
||||||
list_yaml_files,
|
|
||||||
get_serial_ports,
|
|
||||||
)
|
|
||||||
from esphome.log import color, setup_log, Fore
|
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
def choose_prompt(options):
|
|
||||||
if not options:
|
|
||||||
raise EsphomeError(
|
|
||||||
"Found no valid options for upload/logging, please make sure relevant "
|
|
||||||
"sections (ota, api, mqtt, ...) are in your configuration and/or the "
|
|
||||||
"device is plugged in."
|
|
||||||
)
|
|
||||||
|
|
||||||
if len(options) == 1:
|
|
||||||
return options[0][1]
|
|
||||||
|
|
||||||
safe_print("Found multiple options, please choose one:")
|
|
||||||
for i, (desc, _) in enumerate(options):
|
|
||||||
safe_print(f" [{i+1}] {desc}")
|
|
||||||
|
|
||||||
while True:
|
|
||||||
opt = input("(number): ")
|
|
||||||
if opt in options:
|
|
||||||
opt = options.index(opt)
|
|
||||||
break
|
|
||||||
try:
|
|
||||||
opt = int(opt)
|
|
||||||
if opt < 1 or opt > len(options):
|
|
||||||
raise ValueError
|
|
||||||
break
|
|
||||||
except ValueError:
|
|
||||||
safe_print(color(Fore.RED, f"Invalid option: '{opt}'"))
|
|
||||||
return options[opt - 1][1]
|
|
||||||
|
|
||||||
|
|
||||||
def choose_upload_log_host(default, check_default, show_ota, show_mqtt, show_api):
|
|
||||||
options = []
|
|
||||||
for port in get_serial_ports():
|
|
||||||
options.append((f"{port.path} ({port.description})", port.path))
|
|
||||||
if (show_ota and "ota" in CORE.config) or (show_api and "api" in CORE.config):
|
|
||||||
options.append((f"Over The Air ({CORE.address})", CORE.address))
|
|
||||||
if default == "OTA":
|
|
||||||
return CORE.address
|
|
||||||
if show_mqtt and "mqtt" in CORE.config:
|
|
||||||
options.append(("MQTT ({})".format(CORE.config["mqtt"][CONF_BROKER]), "MQTT"))
|
|
||||||
if default == "OTA":
|
|
||||||
return "MQTT"
|
|
||||||
if default is not None:
|
|
||||||
return default
|
|
||||||
if check_default is not None and check_default in [opt[1] for opt in options]:
|
|
||||||
return check_default
|
|
||||||
return choose_prompt(options)
|
|
||||||
|
|
||||||
|
|
||||||
def get_port_type(port):
|
|
||||||
if port.startswith("/") or port.startswith("COM"):
|
|
||||||
return "SERIAL"
|
|
||||||
if port == "MQTT":
|
|
||||||
return "MQTT"
|
|
||||||
return "NETWORK"
|
|
||||||
|
|
||||||
|
|
||||||
def run_miniterm(config, port):
|
|
||||||
import serial
|
|
||||||
from esphome import platformio_api
|
|
||||||
|
|
||||||
if CONF_LOGGER not in config:
|
|
||||||
_LOGGER.info("Logger is not enabled. Not starting UART logs.")
|
|
||||||
return
|
|
||||||
baud_rate = config["logger"][CONF_BAUD_RATE]
|
|
||||||
if baud_rate == 0:
|
|
||||||
_LOGGER.info("UART logging is disabled (baud_rate=0). Not starting UART logs.")
|
|
||||||
_LOGGER.info("Starting log output from %s with baud rate %s", port, baud_rate)
|
|
||||||
|
|
||||||
backtrace_state = False
|
|
||||||
with serial.Serial(port, baudrate=baud_rate) as ser:
|
|
||||||
while True:
|
|
||||||
try:
|
|
||||||
raw = ser.readline()
|
|
||||||
except serial.SerialException:
|
|
||||||
_LOGGER.error("Serial port closed!")
|
|
||||||
return
|
|
||||||
line = (
|
|
||||||
raw.replace(b"\r", b"")
|
|
||||||
.replace(b"\n", b"")
|
|
||||||
.decode("utf8", "backslashreplace")
|
|
||||||
)
|
|
||||||
time = datetime.now().time().strftime("[%H:%M:%S]")
|
|
||||||
message = time + line
|
|
||||||
safe_print(message)
|
|
||||||
|
|
||||||
backtrace_state = platformio_api.process_stacktrace(
|
|
||||||
config, line, backtrace_state=backtrace_state
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def wrap_to_code(name, comp):
|
|
||||||
coro = coroutine(comp.to_code)
|
|
||||||
|
|
||||||
@functools.wraps(comp.to_code)
|
|
||||||
@coroutine_with_priority(coro.priority)
|
|
||||||
def wrapped(conf):
|
|
||||||
cg.add(cg.LineComment(f"{name}:"))
|
|
||||||
if comp.config_schema is not None:
|
|
||||||
conf_str = yaml_util.dump(conf)
|
|
||||||
conf_str = conf_str.replace("//", "")
|
|
||||||
cg.add(cg.LineComment(indent(conf_str)))
|
|
||||||
yield coro(conf)
|
|
||||||
|
|
||||||
return wrapped
|
|
||||||
|
|
||||||
|
|
||||||
def write_cpp(config):
|
|
||||||
generate_cpp_contents(config)
|
|
||||||
return write_cpp_file()
|
|
||||||
|
|
||||||
|
|
||||||
def generate_cpp_contents(config):
|
|
||||||
_LOGGER.info("Generating C++ source...")
|
|
||||||
|
|
||||||
for name, component, conf in iter_components(CORE.config):
|
|
||||||
if component.to_code is not None:
|
|
||||||
coro = wrap_to_code(name, component)
|
|
||||||
CORE.add_job(coro, conf)
|
|
||||||
|
|
||||||
CORE.flush_tasks()
|
|
||||||
|
|
||||||
|
|
||||||
def write_cpp_file():
|
|
||||||
writer.write_platformio_project()
|
|
||||||
|
|
||||||
code_s = indent(CORE.cpp_main_section)
|
|
||||||
writer.write_cpp(code_s)
|
|
||||||
return 0
|
|
||||||
|
|
||||||
|
|
||||||
def compile_program(args, config):
|
|
||||||
from esphome import platformio_api
|
|
||||||
|
|
||||||
_LOGGER.info("Compiling app...")
|
|
||||||
return platformio_api.run_compile(config, CORE.verbose)
|
|
||||||
|
|
||||||
|
|
||||||
def upload_using_esptool(config, port):
|
|
||||||
path = CORE.firmware_bin
|
|
||||||
first_baudrate = config[CONF_ESPHOME][CONF_PLATFORMIO_OPTIONS].get(
|
|
||||||
"upload_speed", 460800
|
|
||||||
)
|
|
||||||
|
|
||||||
def run_esptool(baud_rate):
|
|
||||||
cmd = [
|
|
||||||
"esptool.py",
|
|
||||||
"--before",
|
|
||||||
"default_reset",
|
|
||||||
"--after",
|
|
||||||
"hard_reset",
|
|
||||||
"--baud",
|
|
||||||
str(baud_rate),
|
|
||||||
"--chip",
|
|
||||||
"esp8266",
|
|
||||||
"--port",
|
|
||||||
port,
|
|
||||||
"write_flash",
|
|
||||||
"0x0",
|
|
||||||
path,
|
|
||||||
]
|
|
||||||
|
|
||||||
if os.environ.get("ESPHOME_USE_SUBPROCESS") is None:
|
|
||||||
import esptool
|
|
||||||
|
|
||||||
# pylint: disable=protected-access
|
|
||||||
return run_external_command(esptool._main, *cmd)
|
|
||||||
|
|
||||||
return run_external_process(*cmd)
|
|
||||||
|
|
||||||
rc = run_esptool(first_baudrate)
|
|
||||||
if rc == 0 or first_baudrate == 115200:
|
|
||||||
return rc
|
|
||||||
# Try with 115200 baud rate, with some serial chips the faster baud rates do not work well
|
|
||||||
_LOGGER.info(
|
|
||||||
"Upload with baud rate %s failed. Trying again with baud rate 115200.",
|
|
||||||
first_baudrate,
|
|
||||||
)
|
|
||||||
return run_esptool(115200)
|
|
||||||
|
|
||||||
|
|
||||||
def upload_program(config, args, host):
|
|
||||||
# if upload is to a serial port use platformio, otherwise assume ota
|
|
||||||
if get_port_type(host) == "SERIAL":
|
|
||||||
from esphome import platformio_api
|
|
||||||
|
|
||||||
if CORE.is_esp8266:
|
|
||||||
return upload_using_esptool(config, host)
|
|
||||||
return platformio_api.run_upload(config, CORE.verbose, host)
|
|
||||||
|
|
||||||
from esphome import espota2
|
|
||||||
|
|
||||||
if CONF_OTA not in config:
|
|
||||||
raise EsphomeError(
|
|
||||||
"Cannot upload Over the Air as the config does not include the ota: "
|
|
||||||
"component"
|
|
||||||
)
|
|
||||||
|
|
||||||
ota_conf = config[CONF_OTA]
|
|
||||||
remote_port = ota_conf[CONF_PORT]
|
|
||||||
password = ota_conf[CONF_PASSWORD]
|
|
||||||
return espota2.run_ota(host, remote_port, password, CORE.firmware_bin)
|
|
||||||
|
|
||||||
|
|
||||||
def show_logs(config, args, port):
|
|
||||||
if "logger" not in config:
|
|
||||||
raise EsphomeError("Logger is not configured!")
|
|
||||||
if get_port_type(port) == "SERIAL":
|
|
||||||
run_miniterm(config, port)
|
|
||||||
return 0
|
|
||||||
if get_port_type(port) == "NETWORK" and "api" in config:
|
|
||||||
from esphome.api.client import run_logs
|
|
||||||
|
|
||||||
return run_logs(config, port)
|
|
||||||
if get_port_type(port) == "MQTT" and "mqtt" in config:
|
|
||||||
from esphome import mqtt
|
|
||||||
|
|
||||||
return mqtt.show_logs(
|
|
||||||
config, args.topic, args.username, args.password, args.client_id
|
|
||||||
)
|
|
||||||
|
|
||||||
raise EsphomeError("No remote or local logging method configured (api/mqtt/logger)")
|
|
||||||
|
|
||||||
|
|
||||||
def clean_mqtt(config, args):
|
|
||||||
from esphome import mqtt
|
|
||||||
|
|
||||||
return mqtt.clear_topic(
|
|
||||||
config, args.topic, args.username, args.password, args.client_id
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def command_wizard(args):
|
|
||||||
from esphome import wizard
|
|
||||||
|
|
||||||
return wizard.wizard(args.configuration[0])
|
|
||||||
|
|
||||||
|
|
||||||
def command_config(args, config):
|
|
||||||
_LOGGER.info("Configuration is valid!")
|
|
||||||
if not CORE.verbose:
|
|
||||||
config = strip_default_ids(config)
|
|
||||||
safe_print(yaml_util.dump(config))
|
|
||||||
return 0
|
|
||||||
|
|
||||||
|
|
||||||
def command_vscode(args):
|
|
||||||
from esphome import vscode
|
|
||||||
|
|
||||||
logging.disable(logging.INFO)
|
|
||||||
logging.disable(logging.WARNING)
|
|
||||||
CORE.config_path = args.configuration[0]
|
|
||||||
vscode.read_config(args)
|
|
||||||
|
|
||||||
|
|
||||||
def command_compile(args, config):
|
|
||||||
exit_code = write_cpp(config)
|
|
||||||
if exit_code != 0:
|
|
||||||
return exit_code
|
|
||||||
if args.only_generate:
|
|
||||||
_LOGGER.info("Successfully generated source code.")
|
|
||||||
return 0
|
|
||||||
exit_code = compile_program(args, config)
|
|
||||||
if exit_code != 0:
|
|
||||||
return exit_code
|
|
||||||
_LOGGER.info("Successfully compiled program.")
|
|
||||||
return 0
|
|
||||||
|
|
||||||
|
|
||||||
def command_upload(args, config):
|
|
||||||
port = choose_upload_log_host(
|
|
||||||
default=args.upload_port,
|
|
||||||
check_default=None,
|
|
||||||
show_ota=True,
|
|
||||||
show_mqtt=False,
|
|
||||||
show_api=False,
|
|
||||||
)
|
|
||||||
exit_code = upload_program(config, args, port)
|
|
||||||
if exit_code != 0:
|
|
||||||
return exit_code
|
|
||||||
_LOGGER.info("Successfully uploaded program.")
|
|
||||||
return 0
|
|
||||||
|
|
||||||
|
|
||||||
def command_logs(args, config):
|
|
||||||
port = choose_upload_log_host(
|
|
||||||
default=args.serial_port,
|
|
||||||
check_default=None,
|
|
||||||
show_ota=False,
|
|
||||||
show_mqtt=True,
|
|
||||||
show_api=True,
|
|
||||||
)
|
|
||||||
return show_logs(config, args, port)
|
|
||||||
|
|
||||||
|
|
||||||
def command_run(args, config):
|
|
||||||
exit_code = write_cpp(config)
|
|
||||||
if exit_code != 0:
|
|
||||||
return exit_code
|
|
||||||
exit_code = compile_program(args, config)
|
|
||||||
if exit_code != 0:
|
|
||||||
return exit_code
|
|
||||||
_LOGGER.info("Successfully compiled program.")
|
|
||||||
port = choose_upload_log_host(
|
|
||||||
default=args.upload_port,
|
|
||||||
check_default=None,
|
|
||||||
show_ota=True,
|
|
||||||
show_mqtt=False,
|
|
||||||
show_api=True,
|
|
||||||
)
|
|
||||||
exit_code = upload_program(config, args, port)
|
|
||||||
if exit_code != 0:
|
|
||||||
return exit_code
|
|
||||||
_LOGGER.info("Successfully uploaded program.")
|
|
||||||
if args.no_logs:
|
|
||||||
return 0
|
|
||||||
port = choose_upload_log_host(
|
|
||||||
default=args.upload_port,
|
|
||||||
check_default=port,
|
|
||||||
show_ota=False,
|
|
||||||
show_mqtt=True,
|
|
||||||
show_api=True,
|
|
||||||
)
|
|
||||||
return show_logs(config, args, port)
|
|
||||||
|
|
||||||
|
|
||||||
def command_clean_mqtt(args, config):
|
|
||||||
return clean_mqtt(config, args)
|
|
||||||
|
|
||||||
|
|
||||||
def command_mqtt_fingerprint(args, config):
|
|
||||||
from esphome import mqtt
|
|
||||||
|
|
||||||
return mqtt.get_fingerprint(config)
|
|
||||||
|
|
||||||
|
|
||||||
def command_version(args):
|
|
||||||
safe_print(f"Version: {const.__version__}")
|
|
||||||
return 0
|
|
||||||
|
|
||||||
|
|
||||||
def command_clean(args, config):
|
|
||||||
try:
|
|
||||||
writer.clean_build()
|
|
||||||
except OSError as err:
|
|
||||||
_LOGGER.error("Error deleting build files: %s", err)
|
|
||||||
return 1
|
|
||||||
_LOGGER.info("Done!")
|
|
||||||
return 0
|
|
||||||
|
|
||||||
|
|
||||||
def command_dashboard(args):
|
|
||||||
from esphome.dashboard import dashboard
|
|
||||||
|
|
||||||
return dashboard.start_web_server(args)
|
|
||||||
|
|
||||||
|
|
||||||
def command_update_all(args):
|
|
||||||
import click
|
|
||||||
|
|
||||||
success = {}
|
|
||||||
files = list_yaml_files(args.configuration[0])
|
|
||||||
twidth = 60
|
|
||||||
|
|
||||||
def print_bar(middle_text):
|
|
||||||
middle_text = f" {middle_text} "
|
|
||||||
width = len(click.unstyle(middle_text))
|
|
||||||
half_line = "=" * ((twidth - width) // 2)
|
|
||||||
click.echo(f"{half_line}{middle_text}{half_line}")
|
|
||||||
|
|
||||||
for f in files:
|
|
||||||
print("Updating {}".format(color(Fore.CYAN, f)))
|
|
||||||
print("-" * twidth)
|
|
||||||
print()
|
|
||||||
rc = run_external_process(
|
|
||||||
"esphome", "--dashboard", f, "run", "--no-logs", "--upload-port", "OTA"
|
|
||||||
)
|
|
||||||
if rc == 0:
|
|
||||||
print_bar("[{}] {}".format(color(Fore.BOLD_GREEN, "SUCCESS"), f))
|
|
||||||
success[f] = True
|
|
||||||
else:
|
|
||||||
print_bar("[{}] {}".format(color(Fore.BOLD_RED, "ERROR"), f))
|
|
||||||
success[f] = False
|
|
||||||
|
|
||||||
print()
|
|
||||||
print()
|
|
||||||
print()
|
|
||||||
|
|
||||||
print_bar("[{}]".format(color(Fore.BOLD_WHITE, "SUMMARY")))
|
|
||||||
failed = 0
|
|
||||||
for f in files:
|
|
||||||
if success[f]:
|
|
||||||
print(" - {}: {}".format(f, color(Fore.GREEN, "SUCCESS")))
|
|
||||||
else:
|
|
||||||
print(" - {}: {}".format(f, color(Fore.BOLD_RED, "FAILED")))
|
|
||||||
failed += 1
|
|
||||||
return failed
|
|
||||||
|
|
||||||
|
|
||||||
PRE_CONFIG_ACTIONS = {
|
|
||||||
"wizard": command_wizard,
|
|
||||||
"version": command_version,
|
|
||||||
"dashboard": command_dashboard,
|
|
||||||
"vscode": command_vscode,
|
|
||||||
"update-all": command_update_all,
|
|
||||||
}
|
|
||||||
|
|
||||||
POST_CONFIG_ACTIONS = {
|
|
||||||
"config": command_config,
|
|
||||||
"compile": command_compile,
|
|
||||||
"upload": command_upload,
|
|
||||||
"logs": command_logs,
|
|
||||||
"run": command_run,
|
|
||||||
"clean-mqtt": command_clean_mqtt,
|
|
||||||
"mqtt-fingerprint": command_mqtt_fingerprint,
|
|
||||||
"clean": command_clean,
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
def parse_args(argv):
|
|
||||||
parser = argparse.ArgumentParser(description=f"ESPHome v{const.__version__}")
|
|
||||||
parser.add_argument(
|
|
||||||
"-v", "--verbose", help="Enable verbose esphome logs.", action="store_true"
|
|
||||||
)
|
|
||||||
parser.add_argument(
|
|
||||||
"-q", "--quiet", help="Disable all esphome logs.", action="store_true"
|
|
||||||
)
|
|
||||||
parser.add_argument("--dashboard", help=argparse.SUPPRESS, action="store_true")
|
|
||||||
parser.add_argument(
|
|
||||||
"-s",
|
|
||||||
"--substitution",
|
|
||||||
nargs=2,
|
|
||||||
action="append",
|
|
||||||
help="Add a substitution",
|
|
||||||
metavar=("key", "value"),
|
|
||||||
)
|
|
||||||
parser.add_argument(
|
|
||||||
"configuration", help="Your YAML configuration file.", nargs="*"
|
|
||||||
)
|
|
||||||
|
|
||||||
subparsers = parser.add_subparsers(help="Commands", dest="command")
|
|
||||||
subparsers.required = True
|
|
||||||
subparsers.add_parser("config", help="Validate the configuration and spit it out.")
|
|
||||||
|
|
||||||
parser_compile = subparsers.add_parser(
|
|
||||||
"compile", help="Read the configuration and compile a program."
|
|
||||||
)
|
|
||||||
parser_compile.add_argument(
|
|
||||||
"--only-generate",
|
|
||||||
help="Only generate source code, do not compile.",
|
|
||||||
action="store_true",
|
|
||||||
)
|
|
||||||
|
|
||||||
parser_upload = subparsers.add_parser(
|
|
||||||
"upload", help="Validate the configuration " "and upload the latest binary."
|
|
||||||
)
|
|
||||||
parser_upload.add_argument(
|
|
||||||
"--upload-port",
|
|
||||||
help="Manually specify the upload port to use. "
|
|
||||||
"For example /dev/cu.SLAB_USBtoUART.",
|
|
||||||
)
|
|
||||||
|
|
||||||
parser_logs = subparsers.add_parser(
|
|
||||||
"logs", help="Validate the configuration " "and show all MQTT logs."
|
|
||||||
)
|
|
||||||
parser_logs.add_argument("--topic", help="Manually set the topic to subscribe to.")
|
|
||||||
parser_logs.add_argument("--username", help="Manually set the username.")
|
|
||||||
parser_logs.add_argument("--password", help="Manually set the password.")
|
|
||||||
parser_logs.add_argument("--client-id", help="Manually set the client id.")
|
|
||||||
parser_logs.add_argument(
|
|
||||||
"--serial-port",
|
|
||||||
help="Manually specify a serial port to use"
|
|
||||||
"For example /dev/cu.SLAB_USBtoUART.",
|
|
||||||
)
|
|
||||||
|
|
||||||
parser_run = subparsers.add_parser(
|
|
||||||
"run",
|
|
||||||
help="Validate the configuration, create a binary, "
|
|
||||||
"upload it, and start MQTT logs.",
|
|
||||||
)
|
|
||||||
parser_run.add_argument(
|
|
||||||
"--upload-port",
|
|
||||||
help="Manually specify the upload port/ip to use. "
|
|
||||||
"For example /dev/cu.SLAB_USBtoUART.",
|
|
||||||
)
|
|
||||||
parser_run.add_argument(
|
|
||||||
"--no-logs", help="Disable starting MQTT logs.", action="store_true"
|
|
||||||
)
|
|
||||||
parser_run.add_argument(
|
|
||||||
"--topic", help="Manually set the topic to subscribe to for logs."
|
|
||||||
)
|
|
||||||
parser_run.add_argument(
|
|
||||||
"--username", help="Manually set the MQTT username for logs."
|
|
||||||
)
|
|
||||||
parser_run.add_argument(
|
|
||||||
"--password", help="Manually set the MQTT password for logs."
|
|
||||||
)
|
|
||||||
parser_run.add_argument("--client-id", help="Manually set the client id for logs.")
|
|
||||||
|
|
||||||
parser_clean = subparsers.add_parser(
|
|
||||||
"clean-mqtt", help="Helper to clear an MQTT topic from " "retain messages."
|
|
||||||
)
|
|
||||||
parser_clean.add_argument("--topic", help="Manually set the topic to subscribe to.")
|
|
||||||
parser_clean.add_argument("--username", help="Manually set the username.")
|
|
||||||
parser_clean.add_argument("--password", help="Manually set the password.")
|
|
||||||
parser_clean.add_argument("--client-id", help="Manually set the client id.")
|
|
||||||
|
|
||||||
subparsers.add_parser(
|
|
||||||
"wizard",
|
|
||||||
help="A helpful setup wizard that will guide "
|
|
||||||
"you through setting up esphome.",
|
|
||||||
)
|
|
||||||
|
|
||||||
subparsers.add_parser(
|
|
||||||
"mqtt-fingerprint", help="Get the SSL fingerprint from a MQTT broker."
|
|
||||||
)
|
|
||||||
|
|
||||||
subparsers.add_parser("version", help="Print the esphome version and exit.")
|
|
||||||
|
|
||||||
subparsers.add_parser("clean", help="Delete all temporary build files.")
|
|
||||||
|
|
||||||
dashboard = subparsers.add_parser(
|
|
||||||
"dashboard", help="Create a simple web server for a dashboard."
|
|
||||||
)
|
|
||||||
dashboard.add_argument(
|
|
||||||
"--port",
|
|
||||||
help="The HTTP port to open connections on. Defaults to 6052.",
|
|
||||||
type=int,
|
|
||||||
default=6052,
|
|
||||||
)
|
|
||||||
dashboard.add_argument(
|
|
||||||
"--username",
|
|
||||||
help="The optional username to require " "for authentication.",
|
|
||||||
type=str,
|
|
||||||
default="",
|
|
||||||
)
|
|
||||||
dashboard.add_argument(
|
|
||||||
"--password",
|
|
||||||
help="The optional password to require " "for authentication.",
|
|
||||||
type=str,
|
|
||||||
default="",
|
|
||||||
)
|
|
||||||
dashboard.add_argument(
|
|
||||||
"--open-ui", help="Open the dashboard UI in a browser.", action="store_true"
|
|
||||||
)
|
|
||||||
dashboard.add_argument("--hassio", help=argparse.SUPPRESS, action="store_true")
|
|
||||||
dashboard.add_argument(
|
|
||||||
"--socket", help="Make the dashboard serve under a unix socket", type=str
|
|
||||||
)
|
|
||||||
|
|
||||||
vscode = subparsers.add_parser("vscode", help=argparse.SUPPRESS)
|
|
||||||
vscode.add_argument("--ace", action="store_true")
|
|
||||||
|
|
||||||
subparsers.add_parser("update-all", help=argparse.SUPPRESS)
|
|
||||||
|
|
||||||
return parser.parse_args(argv[1:])
|
|
||||||
|
|
||||||
|
|
||||||
def run_esphome(argv):
|
|
||||||
args = parse_args(argv)
|
|
||||||
CORE.dashboard = args.dashboard
|
|
||||||
|
|
||||||
setup_log(args.verbose, args.quiet)
|
|
||||||
if args.command != "version" and not args.configuration:
|
|
||||||
_LOGGER.error("Missing configuration parameter, see esphome --help.")
|
|
||||||
return 1
|
|
||||||
|
|
||||||
if sys.version_info < (3, 6, 0):
|
|
||||||
_LOGGER.error(
|
|
||||||
"You're running ESPHome with Python <3.6. ESPHome is no longer compatible "
|
|
||||||
"with this Python version. Please reinstall ESPHome with Python 3.6+"
|
|
||||||
)
|
|
||||||
return 1
|
|
||||||
|
|
||||||
if args.command in PRE_CONFIG_ACTIONS:
|
|
||||||
try:
|
|
||||||
return PRE_CONFIG_ACTIONS[args.command](args)
|
|
||||||
except EsphomeError as e:
|
|
||||||
_LOGGER.error(e)
|
|
||||||
return 1
|
|
||||||
|
|
||||||
for conf_path in args.configuration:
|
|
||||||
CORE.config_path = conf_path
|
|
||||||
CORE.dashboard = args.dashboard
|
|
||||||
|
|
||||||
config = read_config(dict(args.substitution) if args.substitution else {})
|
|
||||||
if config is None:
|
|
||||||
return 1
|
|
||||||
CORE.config = config
|
|
||||||
|
|
||||||
if args.command not in POST_CONFIG_ACTIONS:
|
|
||||||
safe_print(f"Unknown command {args.command}")
|
|
||||||
|
|
||||||
try:
|
|
||||||
rc = POST_CONFIG_ACTIONS[args.command](args, config)
|
|
||||||
except EsphomeError as e:
|
|
||||||
_LOGGER.error(e)
|
|
||||||
return 1
|
|
||||||
if rc != 0:
|
|
||||||
return rc
|
|
||||||
|
|
||||||
CORE.reset()
|
|
||||||
return 0
|
|
||||||
|
|
||||||
|
|
||||||
def main():
|
|
||||||
try:
|
|
||||||
return run_esphome(sys.argv)
|
|
||||||
except EsphomeError as e:
|
|
||||||
_LOGGER.error(e)
|
|
||||||
return 1
|
|
||||||
except KeyboardInterrupt:
|
|
||||||
return 1
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
sys.exit(main())
|
|
||||||
File diff suppressed because one or more lines are too long
@@ -1,518 +0,0 @@
|
|||||||
from datetime import datetime
|
|
||||||
import functools
|
|
||||||
import logging
|
|
||||||
import socket
|
|
||||||
import threading
|
|
||||||
import time
|
|
||||||
|
|
||||||
# pylint: disable=unused-import
|
|
||||||
from typing import Optional # noqa
|
|
||||||
from google.protobuf import message # noqa
|
|
||||||
|
|
||||||
from esphome import const
|
|
||||||
import esphome.api.api_pb2 as pb
|
|
||||||
from esphome.const import CONF_PASSWORD, CONF_PORT
|
|
||||||
from esphome.core import EsphomeError
|
|
||||||
from esphome.helpers import resolve_ip_address, indent
|
|
||||||
from esphome.log import color, Fore
|
|
||||||
from esphome.util import safe_print
|
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
class APIConnectionError(EsphomeError):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
MESSAGE_TYPE_TO_PROTO = {
|
|
||||||
1: pb.HelloRequest,
|
|
||||||
2: pb.HelloResponse,
|
|
||||||
3: pb.ConnectRequest,
|
|
||||||
4: pb.ConnectResponse,
|
|
||||||
5: pb.DisconnectRequest,
|
|
||||||
6: pb.DisconnectResponse,
|
|
||||||
7: pb.PingRequest,
|
|
||||||
8: pb.PingResponse,
|
|
||||||
9: pb.DeviceInfoRequest,
|
|
||||||
10: pb.DeviceInfoResponse,
|
|
||||||
11: pb.ListEntitiesRequest,
|
|
||||||
12: pb.ListEntitiesBinarySensorResponse,
|
|
||||||
13: pb.ListEntitiesCoverResponse,
|
|
||||||
14: pb.ListEntitiesFanResponse,
|
|
||||||
15: pb.ListEntitiesLightResponse,
|
|
||||||
16: pb.ListEntitiesSensorResponse,
|
|
||||||
17: pb.ListEntitiesSwitchResponse,
|
|
||||||
18: pb.ListEntitiesTextSensorResponse,
|
|
||||||
19: pb.ListEntitiesDoneResponse,
|
|
||||||
20: pb.SubscribeStatesRequest,
|
|
||||||
21: pb.BinarySensorStateResponse,
|
|
||||||
22: pb.CoverStateResponse,
|
|
||||||
23: pb.FanStateResponse,
|
|
||||||
24: pb.LightStateResponse,
|
|
||||||
25: pb.SensorStateResponse,
|
|
||||||
26: pb.SwitchStateResponse,
|
|
||||||
27: pb.TextSensorStateResponse,
|
|
||||||
28: pb.SubscribeLogsRequest,
|
|
||||||
29: pb.SubscribeLogsResponse,
|
|
||||||
30: pb.CoverCommandRequest,
|
|
||||||
31: pb.FanCommandRequest,
|
|
||||||
32: pb.LightCommandRequest,
|
|
||||||
33: pb.SwitchCommandRequest,
|
|
||||||
34: pb.SubscribeServiceCallsRequest,
|
|
||||||
35: pb.ServiceCallResponse,
|
|
||||||
36: pb.GetTimeRequest,
|
|
||||||
37: pb.GetTimeResponse,
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
def _varuint_to_bytes(value):
|
|
||||||
if value <= 0x7F:
|
|
||||||
return bytes([value])
|
|
||||||
|
|
||||||
ret = bytes()
|
|
||||||
while value:
|
|
||||||
temp = value & 0x7F
|
|
||||||
value >>= 7
|
|
||||||
if value:
|
|
||||||
ret += bytes([temp | 0x80])
|
|
||||||
else:
|
|
||||||
ret += bytes([temp])
|
|
||||||
|
|
||||||
return ret
|
|
||||||
|
|
||||||
|
|
||||||
def _bytes_to_varuint(value):
|
|
||||||
result = 0
|
|
||||||
bitpos = 0
|
|
||||||
for val in value:
|
|
||||||
result |= (val & 0x7F) << bitpos
|
|
||||||
bitpos += 7
|
|
||||||
if (val & 0x80) == 0:
|
|
||||||
return result
|
|
||||||
return None
|
|
||||||
|
|
||||||
|
|
||||||
# pylint: disable=too-many-instance-attributes,not-callable
|
|
||||||
class APIClient(threading.Thread):
|
|
||||||
def __init__(self, address, port, password):
|
|
||||||
threading.Thread.__init__(self)
|
|
||||||
self._address = address # type: str
|
|
||||||
self._port = port # type: int
|
|
||||||
self._password = password # type: Optional[str]
|
|
||||||
self._socket = None # type: Optional[socket.socket]
|
|
||||||
self._socket_open_event = threading.Event()
|
|
||||||
self._socket_write_lock = threading.Lock()
|
|
||||||
self._connected = False
|
|
||||||
self._authenticated = False
|
|
||||||
self._message_handlers = []
|
|
||||||
self._keepalive = 5
|
|
||||||
self._ping_timer = None
|
|
||||||
|
|
||||||
self.on_disconnect = None
|
|
||||||
self.on_connect = None
|
|
||||||
self.on_login = None
|
|
||||||
self.auto_reconnect = False
|
|
||||||
self._running_event = threading.Event()
|
|
||||||
self._stop_event = threading.Event()
|
|
||||||
|
|
||||||
@property
|
|
||||||
def stopped(self):
|
|
||||||
return self._stop_event.is_set()
|
|
||||||
|
|
||||||
def _refresh_ping(self):
|
|
||||||
if self._ping_timer is not None:
|
|
||||||
self._ping_timer.cancel()
|
|
||||||
self._ping_timer = None
|
|
||||||
|
|
||||||
def func():
|
|
||||||
self._ping_timer = None
|
|
||||||
|
|
||||||
if self._connected:
|
|
||||||
try:
|
|
||||||
self.ping()
|
|
||||||
except APIConnectionError as err:
|
|
||||||
self._fatal_error(err)
|
|
||||||
else:
|
|
||||||
self._refresh_ping()
|
|
||||||
|
|
||||||
self._ping_timer = threading.Timer(self._keepalive, func)
|
|
||||||
self._ping_timer.start()
|
|
||||||
|
|
||||||
def _cancel_ping(self):
|
|
||||||
if self._ping_timer is not None:
|
|
||||||
self._ping_timer.cancel()
|
|
||||||
self._ping_timer = None
|
|
||||||
|
|
||||||
def _close_socket(self):
|
|
||||||
self._cancel_ping()
|
|
||||||
if self._socket is not None:
|
|
||||||
self._socket.close()
|
|
||||||
self._socket = None
|
|
||||||
self._socket_open_event.clear()
|
|
||||||
self._connected = False
|
|
||||||
self._authenticated = False
|
|
||||||
self._message_handlers = []
|
|
||||||
|
|
||||||
def stop(self, force=False):
|
|
||||||
if self.stopped:
|
|
||||||
raise ValueError
|
|
||||||
|
|
||||||
if self._connected and not force:
|
|
||||||
try:
|
|
||||||
self.disconnect()
|
|
||||||
except APIConnectionError:
|
|
||||||
pass
|
|
||||||
self._close_socket()
|
|
||||||
|
|
||||||
self._stop_event.set()
|
|
||||||
if not force:
|
|
||||||
self.join()
|
|
||||||
|
|
||||||
def connect(self):
|
|
||||||
if not self._running_event.wait(0.1):
|
|
||||||
raise APIConnectionError("You need to call start() first!")
|
|
||||||
|
|
||||||
if self._connected:
|
|
||||||
self.disconnect(on_disconnect=False)
|
|
||||||
|
|
||||||
try:
|
|
||||||
ip = resolve_ip_address(self._address)
|
|
||||||
except EsphomeError as err:
|
|
||||||
_LOGGER.warning(
|
|
||||||
"Error resolving IP address of %s. Is it connected to WiFi?",
|
|
||||||
self._address,
|
|
||||||
)
|
|
||||||
_LOGGER.warning(
|
|
||||||
"(If this error persists, please set a static IP address: "
|
|
||||||
"https://esphome.io/components/wifi.html#manual-ips)"
|
|
||||||
)
|
|
||||||
raise APIConnectionError(err) from err
|
|
||||||
|
|
||||||
_LOGGER.info("Connecting to %s:%s (%s)", self._address, self._port, ip)
|
|
||||||
self._socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
|
|
||||||
self._socket.settimeout(10.0)
|
|
||||||
self._socket.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
|
|
||||||
try:
|
|
||||||
self._socket.connect((ip, self._port))
|
|
||||||
except OSError as err:
|
|
||||||
err = APIConnectionError(f"Error connecting to {ip}: {err}")
|
|
||||||
self._fatal_error(err)
|
|
||||||
raise err
|
|
||||||
self._socket.settimeout(0.1)
|
|
||||||
|
|
||||||
self._socket_open_event.set()
|
|
||||||
|
|
||||||
hello = pb.HelloRequest()
|
|
||||||
hello.client_info = f"ESPHome v{const.__version__}"
|
|
||||||
try:
|
|
||||||
resp = self._send_message_await_response(hello, pb.HelloResponse)
|
|
||||||
except APIConnectionError as err:
|
|
||||||
self._fatal_error(err)
|
|
||||||
raise err
|
|
||||||
_LOGGER.debug(
|
|
||||||
"Successfully connected to %s ('%s' API=%s.%s)",
|
|
||||||
self._address,
|
|
||||||
resp.server_info,
|
|
||||||
resp.api_version_major,
|
|
||||||
resp.api_version_minor,
|
|
||||||
)
|
|
||||||
self._connected = True
|
|
||||||
self._refresh_ping()
|
|
||||||
if self.on_connect is not None:
|
|
||||||
self.on_connect()
|
|
||||||
|
|
||||||
def _check_connected(self):
|
|
||||||
if not self._connected:
|
|
||||||
err = APIConnectionError("Must be connected!")
|
|
||||||
self._fatal_error(err)
|
|
||||||
raise err
|
|
||||||
|
|
||||||
def login(self):
|
|
||||||
self._check_connected()
|
|
||||||
if self._authenticated:
|
|
||||||
raise APIConnectionError("Already logged in!")
|
|
||||||
|
|
||||||
connect = pb.ConnectRequest()
|
|
||||||
if self._password is not None:
|
|
||||||
connect.password = self._password
|
|
||||||
resp = self._send_message_await_response(connect, pb.ConnectResponse)
|
|
||||||
if resp.invalid_password:
|
|
||||||
raise APIConnectionError("Invalid password!")
|
|
||||||
|
|
||||||
self._authenticated = True
|
|
||||||
if self.on_login is not None:
|
|
||||||
self.on_login()
|
|
||||||
|
|
||||||
def _fatal_error(self, err):
|
|
||||||
was_connected = self._connected
|
|
||||||
|
|
||||||
self._close_socket()
|
|
||||||
|
|
||||||
if was_connected and self.on_disconnect is not None:
|
|
||||||
self.on_disconnect(err)
|
|
||||||
|
|
||||||
def _write(self, data): # type: (bytes) -> None
|
|
||||||
if self._socket is None:
|
|
||||||
raise APIConnectionError("Socket closed")
|
|
||||||
|
|
||||||
# _LOGGER.debug("Write: %s", format_bytes(data))
|
|
||||||
with self._socket_write_lock:
|
|
||||||
try:
|
|
||||||
self._socket.sendall(data)
|
|
||||||
except OSError as err:
|
|
||||||
err = APIConnectionError(f"Error while writing data: {err}")
|
|
||||||
self._fatal_error(err)
|
|
||||||
raise err
|
|
||||||
|
|
||||||
def _send_message(self, msg):
|
|
||||||
# type: (message.Message) -> None
|
|
||||||
for message_type, klass in MESSAGE_TYPE_TO_PROTO.items():
|
|
||||||
if isinstance(msg, klass):
|
|
||||||
break
|
|
||||||
else:
|
|
||||||
raise ValueError
|
|
||||||
|
|
||||||
encoded = msg.SerializeToString()
|
|
||||||
_LOGGER.debug("Sending %s:\n%s", type(msg), indent(str(msg)))
|
|
||||||
req = bytes([0])
|
|
||||||
req += _varuint_to_bytes(len(encoded))
|
|
||||||
req += _varuint_to_bytes(message_type)
|
|
||||||
req += encoded
|
|
||||||
self._write(req)
|
|
||||||
|
|
||||||
def _send_message_await_response_complex(
|
|
||||||
self, send_msg, do_append, do_stop, timeout=5
|
|
||||||
):
|
|
||||||
event = threading.Event()
|
|
||||||
responses = []
|
|
||||||
|
|
||||||
def on_message(resp):
|
|
||||||
if do_append(resp):
|
|
||||||
responses.append(resp)
|
|
||||||
if do_stop(resp):
|
|
||||||
event.set()
|
|
||||||
|
|
||||||
self._message_handlers.append(on_message)
|
|
||||||
self._send_message(send_msg)
|
|
||||||
ret = event.wait(timeout)
|
|
||||||
try:
|
|
||||||
self._message_handlers.remove(on_message)
|
|
||||||
except ValueError:
|
|
||||||
pass
|
|
||||||
if not ret:
|
|
||||||
raise APIConnectionError("Timeout while waiting for message response!")
|
|
||||||
return responses
|
|
||||||
|
|
||||||
def _send_message_await_response(self, send_msg, response_type, timeout=5):
|
|
||||||
def is_response(msg):
|
|
||||||
return isinstance(msg, response_type)
|
|
||||||
|
|
||||||
return self._send_message_await_response_complex(
|
|
||||||
send_msg, is_response, is_response, timeout
|
|
||||||
)[0]
|
|
||||||
|
|
||||||
def device_info(self):
|
|
||||||
self._check_connected()
|
|
||||||
return self._send_message_await_response(
|
|
||||||
pb.DeviceInfoRequest(), pb.DeviceInfoResponse
|
|
||||||
)
|
|
||||||
|
|
||||||
def ping(self):
|
|
||||||
self._check_connected()
|
|
||||||
return self._send_message_await_response(pb.PingRequest(), pb.PingResponse)
|
|
||||||
|
|
||||||
def disconnect(self, on_disconnect=True):
|
|
||||||
self._check_connected()
|
|
||||||
|
|
||||||
try:
|
|
||||||
self._send_message_await_response(
|
|
||||||
pb.DisconnectRequest(), pb.DisconnectResponse
|
|
||||||
)
|
|
||||||
except APIConnectionError:
|
|
||||||
pass
|
|
||||||
self._close_socket()
|
|
||||||
|
|
||||||
if self.on_disconnect is not None and on_disconnect:
|
|
||||||
self.on_disconnect(None)
|
|
||||||
|
|
||||||
def _check_authenticated(self):
|
|
||||||
if not self._authenticated:
|
|
||||||
raise APIConnectionError("Must login first!")
|
|
||||||
|
|
||||||
def subscribe_logs(self, on_log, log_level=7, dump_config=False):
|
|
||||||
self._check_authenticated()
|
|
||||||
|
|
||||||
def on_msg(msg):
|
|
||||||
if isinstance(msg, pb.SubscribeLogsResponse):
|
|
||||||
on_log(msg)
|
|
||||||
|
|
||||||
self._message_handlers.append(on_msg)
|
|
||||||
req = pb.SubscribeLogsRequest(dump_config=dump_config)
|
|
||||||
req.level = log_level
|
|
||||||
self._send_message(req)
|
|
||||||
|
|
||||||
def _recv(self, amount):
|
|
||||||
ret = bytes()
|
|
||||||
if amount == 0:
|
|
||||||
return ret
|
|
||||||
|
|
||||||
while len(ret) < amount:
|
|
||||||
if self.stopped:
|
|
||||||
raise APIConnectionError("Stopped!")
|
|
||||||
if not self._socket_open_event.is_set():
|
|
||||||
raise APIConnectionError("No socket!")
|
|
||||||
try:
|
|
||||||
val = self._socket.recv(amount - len(ret))
|
|
||||||
except AttributeError as err:
|
|
||||||
raise APIConnectionError("Socket was closed") from err
|
|
||||||
except socket.timeout:
|
|
||||||
continue
|
|
||||||
except OSError as err:
|
|
||||||
raise APIConnectionError(f"Error while receiving data: {err}") from err
|
|
||||||
ret += val
|
|
||||||
return ret
|
|
||||||
|
|
||||||
def _recv_varint(self):
|
|
||||||
raw = bytes()
|
|
||||||
while not raw or raw[-1] & 0x80:
|
|
||||||
raw += self._recv(1)
|
|
||||||
return _bytes_to_varuint(raw)
|
|
||||||
|
|
||||||
def _run_once(self):
|
|
||||||
if not self._socket_open_event.wait(0.1):
|
|
||||||
return
|
|
||||||
|
|
||||||
# Preamble
|
|
||||||
if self._recv(1)[0] != 0x00:
|
|
||||||
raise APIConnectionError("Invalid preamble")
|
|
||||||
|
|
||||||
length = self._recv_varint()
|
|
||||||
msg_type = self._recv_varint()
|
|
||||||
|
|
||||||
raw_msg = self._recv(length)
|
|
||||||
if msg_type not in MESSAGE_TYPE_TO_PROTO:
|
|
||||||
_LOGGER.debug("Skipping message type %s", msg_type)
|
|
||||||
return
|
|
||||||
|
|
||||||
msg = MESSAGE_TYPE_TO_PROTO[msg_type]()
|
|
||||||
msg.ParseFromString(raw_msg)
|
|
||||||
_LOGGER.debug("Got message: %s:\n%s", type(msg), indent(str(msg)))
|
|
||||||
for msg_handler in self._message_handlers[:]:
|
|
||||||
msg_handler(msg)
|
|
||||||
self._handle_internal_messages(msg)
|
|
||||||
|
|
||||||
def run(self):
|
|
||||||
self._running_event.set()
|
|
||||||
while not self.stopped:
|
|
||||||
try:
|
|
||||||
self._run_once()
|
|
||||||
except APIConnectionError as err:
|
|
||||||
if self.stopped:
|
|
||||||
break
|
|
||||||
if self._connected:
|
|
||||||
_LOGGER.error("Error while reading incoming messages: %s", err)
|
|
||||||
self._fatal_error(err)
|
|
||||||
self._running_event.clear()
|
|
||||||
|
|
||||||
def _handle_internal_messages(self, msg):
|
|
||||||
if isinstance(msg, pb.DisconnectRequest):
|
|
||||||
self._send_message(pb.DisconnectResponse())
|
|
||||||
if self._socket is not None:
|
|
||||||
self._socket.close()
|
|
||||||
self._socket = None
|
|
||||||
self._connected = False
|
|
||||||
if self.on_disconnect is not None:
|
|
||||||
self.on_disconnect(None)
|
|
||||||
elif isinstance(msg, pb.PingRequest):
|
|
||||||
self._send_message(pb.PingResponse())
|
|
||||||
elif isinstance(msg, pb.GetTimeRequest):
|
|
||||||
resp = pb.GetTimeResponse()
|
|
||||||
resp.epoch_seconds = int(time.time())
|
|
||||||
self._send_message(resp)
|
|
||||||
|
|
||||||
|
|
||||||
def run_logs(config, address):
|
|
||||||
conf = config["api"]
|
|
||||||
port = conf[CONF_PORT]
|
|
||||||
password = conf[CONF_PASSWORD]
|
|
||||||
_LOGGER.info("Starting log output from %s using esphome API", address)
|
|
||||||
|
|
||||||
cli = APIClient(address, port, password)
|
|
||||||
stopping = False
|
|
||||||
retry_timer = []
|
|
||||||
|
|
||||||
has_connects = []
|
|
||||||
|
|
||||||
def try_connect(err, tries=0):
|
|
||||||
if stopping:
|
|
||||||
return
|
|
||||||
|
|
||||||
if err:
|
|
||||||
_LOGGER.warning("Disconnected from API: %s", err)
|
|
||||||
|
|
||||||
while retry_timer:
|
|
||||||
retry_timer.pop(0).cancel()
|
|
||||||
|
|
||||||
error = None
|
|
||||||
try:
|
|
||||||
cli.connect()
|
|
||||||
cli.login()
|
|
||||||
except APIConnectionError as err2: # noqa
|
|
||||||
error = err2
|
|
||||||
|
|
||||||
if error is None:
|
|
||||||
_LOGGER.info("Successfully connected to %s", address)
|
|
||||||
return
|
|
||||||
|
|
||||||
wait_time = int(min(1.5 ** min(tries, 100), 30))
|
|
||||||
if not has_connects:
|
|
||||||
_LOGGER.warning(
|
|
||||||
"Initial connection failed. The ESP might not be connected "
|
|
||||||
"to WiFi yet (%s). Re-Trying in %s seconds",
|
|
||||||
error,
|
|
||||||
wait_time,
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
_LOGGER.warning(
|
|
||||||
"Couldn't connect to API (%s). Trying to reconnect in %s seconds",
|
|
||||||
error,
|
|
||||||
wait_time,
|
|
||||||
)
|
|
||||||
timer = threading.Timer(
|
|
||||||
wait_time, functools.partial(try_connect, None, tries + 1)
|
|
||||||
)
|
|
||||||
timer.start()
|
|
||||||
retry_timer.append(timer)
|
|
||||||
|
|
||||||
def on_log(msg):
|
|
||||||
time_ = datetime.now().time().strftime("[%H:%M:%S]")
|
|
||||||
text = msg.message
|
|
||||||
if msg.send_failed:
|
|
||||||
text = color(
|
|
||||||
Fore.WHITE,
|
|
||||||
"(Message skipped because it was too big to fit in "
|
|
||||||
"TCP buffer - This is only cosmetic)",
|
|
||||||
)
|
|
||||||
safe_print(time_ + text)
|
|
||||||
|
|
||||||
def on_login():
|
|
||||||
try:
|
|
||||||
cli.subscribe_logs(on_log, dump_config=not has_connects)
|
|
||||||
has_connects.append(True)
|
|
||||||
except APIConnectionError:
|
|
||||||
cli.disconnect()
|
|
||||||
|
|
||||||
cli.on_disconnect = try_connect
|
|
||||||
cli.on_login = on_login
|
|
||||||
cli.start()
|
|
||||||
|
|
||||||
try:
|
|
||||||
try_connect(None)
|
|
||||||
while True:
|
|
||||||
time.sleep(1)
|
|
||||||
except KeyboardInterrupt:
|
|
||||||
stopping = True
|
|
||||||
cli.stop(True)
|
|
||||||
while retry_timer:
|
|
||||||
retry_timer.pop(0).cancel()
|
|
||||||
return 0
|
|
||||||
@@ -1,328 +0,0 @@
|
|||||||
import esphome.codegen as cg
|
|
||||||
import esphome.config_validation as cv
|
|
||||||
from esphome.const import (
|
|
||||||
CONF_AUTOMATION_ID,
|
|
||||||
CONF_CONDITION,
|
|
||||||
CONF_ELSE,
|
|
||||||
CONF_ID,
|
|
||||||
CONF_THEN,
|
|
||||||
CONF_TRIGGER_ID,
|
|
||||||
CONF_TYPE_ID,
|
|
||||||
CONF_TIME,
|
|
||||||
)
|
|
||||||
from esphome.core import coroutine
|
|
||||||
from esphome.jsonschema import jschema_extractor
|
|
||||||
from esphome.util import Registry
|
|
||||||
|
|
||||||
|
|
||||||
def maybe_simple_id(*validators):
|
|
||||||
return maybe_conf(CONF_ID, *validators)
|
|
||||||
|
|
||||||
|
|
||||||
def maybe_conf(conf, *validators):
|
|
||||||
validator = cv.All(*validators)
|
|
||||||
|
|
||||||
@jschema_extractor("maybe")
|
|
||||||
def validate(value):
|
|
||||||
# pylint: disable=comparison-with-callable
|
|
||||||
if value == jschema_extractor:
|
|
||||||
return validator
|
|
||||||
|
|
||||||
if isinstance(value, dict):
|
|
||||||
return validator(value)
|
|
||||||
with cv.remove_prepend_path([conf]):
|
|
||||||
return validator({conf: value})
|
|
||||||
|
|
||||||
return validate
|
|
||||||
|
|
||||||
|
|
||||||
def register_action(name, action_type, schema):
|
|
||||||
return ACTION_REGISTRY.register(name, action_type, schema)
|
|
||||||
|
|
||||||
|
|
||||||
def register_condition(name, condition_type, schema):
|
|
||||||
return CONDITION_REGISTRY.register(name, condition_type, schema)
|
|
||||||
|
|
||||||
|
|
||||||
Action = cg.esphome_ns.class_("Action")
|
|
||||||
Trigger = cg.esphome_ns.class_("Trigger")
|
|
||||||
ACTION_REGISTRY = Registry()
|
|
||||||
Condition = cg.esphome_ns.class_("Condition")
|
|
||||||
CONDITION_REGISTRY = Registry()
|
|
||||||
validate_action = cv.validate_registry_entry("action", ACTION_REGISTRY)
|
|
||||||
validate_action_list = cv.validate_registry("action", ACTION_REGISTRY)
|
|
||||||
validate_condition = cv.validate_registry_entry("condition", CONDITION_REGISTRY)
|
|
||||||
validate_condition_list = cv.validate_registry("condition", CONDITION_REGISTRY)
|
|
||||||
|
|
||||||
|
|
||||||
def validate_potentially_and_condition(value):
|
|
||||||
if isinstance(value, list):
|
|
||||||
with cv.remove_prepend_path(["and"]):
|
|
||||||
return validate_condition({"and": value})
|
|
||||||
return validate_condition(value)
|
|
||||||
|
|
||||||
|
|
||||||
DelayAction = cg.esphome_ns.class_("DelayAction", Action, cg.Component)
|
|
||||||
LambdaAction = cg.esphome_ns.class_("LambdaAction", Action)
|
|
||||||
IfAction = cg.esphome_ns.class_("IfAction", Action)
|
|
||||||
WhileAction = cg.esphome_ns.class_("WhileAction", Action)
|
|
||||||
WaitUntilAction = cg.esphome_ns.class_("WaitUntilAction", Action, cg.Component)
|
|
||||||
UpdateComponentAction = cg.esphome_ns.class_("UpdateComponentAction", Action)
|
|
||||||
Automation = cg.esphome_ns.class_("Automation")
|
|
||||||
|
|
||||||
LambdaCondition = cg.esphome_ns.class_("LambdaCondition", Condition)
|
|
||||||
ForCondition = cg.esphome_ns.class_("ForCondition", Condition, cg.Component)
|
|
||||||
|
|
||||||
|
|
||||||
def validate_automation(extra_schema=None, extra_validators=None, single=False):
|
|
||||||
if extra_schema is None:
|
|
||||||
extra_schema = {}
|
|
||||||
if isinstance(extra_schema, cv.Schema):
|
|
||||||
extra_schema = extra_schema.schema
|
|
||||||
schema = AUTOMATION_SCHEMA.extend(extra_schema)
|
|
||||||
|
|
||||||
def validator_(value):
|
|
||||||
if isinstance(value, list):
|
|
||||||
# List of items, there are two possible options here, either a sequence of
|
|
||||||
# actions (no then:) or a list of automations.
|
|
||||||
try:
|
|
||||||
# First try as a sequence of actions
|
|
||||||
# If that succeeds, return immediately
|
|
||||||
with cv.remove_prepend_path([CONF_THEN]):
|
|
||||||
return [schema({CONF_THEN: value})]
|
|
||||||
except cv.Invalid as err:
|
|
||||||
# Next try as a sequence of automations
|
|
||||||
try:
|
|
||||||
return cv.Schema([schema])(value)
|
|
||||||
except cv.Invalid as err2:
|
|
||||||
if "extra keys not allowed" in str(err2) and len(err2.path) == 2:
|
|
||||||
# pylint: disable=raise-missing-from
|
|
||||||
raise err
|
|
||||||
if "Unable to find action" in str(err):
|
|
||||||
raise err2
|
|
||||||
raise cv.MultipleInvalid([err, err2])
|
|
||||||
elif isinstance(value, dict):
|
|
||||||
if CONF_THEN in value:
|
|
||||||
return [schema(value)]
|
|
||||||
with cv.remove_prepend_path([CONF_THEN]):
|
|
||||||
return [schema({CONF_THEN: value})]
|
|
||||||
# This should only happen with invalid configs, but let's have a nice error message.
|
|
||||||
return [schema(value)]
|
|
||||||
|
|
||||||
@jschema_extractor("automation")
|
|
||||||
def validator(value):
|
|
||||||
# hack to get the schema
|
|
||||||
# pylint: disable=comparison-with-callable
|
|
||||||
if value == jschema_extractor:
|
|
||||||
return schema
|
|
||||||
|
|
||||||
value = validator_(value)
|
|
||||||
if extra_validators is not None:
|
|
||||||
value = cv.Schema([extra_validators])(value)
|
|
||||||
if single:
|
|
||||||
if len(value) != 1:
|
|
||||||
raise cv.Invalid("Cannot have more than 1 automation for templates")
|
|
||||||
return value[0]
|
|
||||||
return value
|
|
||||||
|
|
||||||
return validator
|
|
||||||
|
|
||||||
|
|
||||||
AUTOMATION_SCHEMA = cv.Schema(
|
|
||||||
{
|
|
||||||
cv.GenerateID(CONF_TRIGGER_ID): cv.declare_id(Trigger),
|
|
||||||
cv.GenerateID(CONF_AUTOMATION_ID): cv.declare_id(Automation),
|
|
||||||
cv.Required(CONF_THEN): validate_action_list,
|
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
AndCondition = cg.esphome_ns.class_("AndCondition", Condition)
|
|
||||||
OrCondition = cg.esphome_ns.class_("OrCondition", Condition)
|
|
||||||
NotCondition = cg.esphome_ns.class_("NotCondition", Condition)
|
|
||||||
|
|
||||||
|
|
||||||
@register_condition("and", AndCondition, validate_condition_list)
|
|
||||||
def and_condition_to_code(config, condition_id, template_arg, args):
|
|
||||||
conditions = yield build_condition_list(config, template_arg, args)
|
|
||||||
yield cg.new_Pvariable(condition_id, template_arg, conditions)
|
|
||||||
|
|
||||||
|
|
||||||
@register_condition("or", OrCondition, validate_condition_list)
|
|
||||||
def or_condition_to_code(config, condition_id, template_arg, args):
|
|
||||||
conditions = yield build_condition_list(config, template_arg, args)
|
|
||||||
yield cg.new_Pvariable(condition_id, template_arg, conditions)
|
|
||||||
|
|
||||||
|
|
||||||
@register_condition("not", NotCondition, validate_potentially_and_condition)
|
|
||||||
def not_condition_to_code(config, condition_id, template_arg, args):
|
|
||||||
condition = yield build_condition(config, template_arg, args)
|
|
||||||
yield cg.new_Pvariable(condition_id, template_arg, condition)
|
|
||||||
|
|
||||||
|
|
||||||
@register_condition("lambda", LambdaCondition, cv.lambda_)
|
|
||||||
def lambda_condition_to_code(config, condition_id, template_arg, args):
|
|
||||||
lambda_ = yield cg.process_lambda(config, args, return_type=bool)
|
|
||||||
yield cg.new_Pvariable(condition_id, template_arg, lambda_)
|
|
||||||
|
|
||||||
|
|
||||||
@register_condition(
|
|
||||||
"for",
|
|
||||||
ForCondition,
|
|
||||||
cv.Schema(
|
|
||||||
{
|
|
||||||
cv.Required(CONF_TIME): cv.templatable(
|
|
||||||
cv.positive_time_period_milliseconds
|
|
||||||
),
|
|
||||||
cv.Required(CONF_CONDITION): validate_potentially_and_condition,
|
|
||||||
}
|
|
||||||
).extend(cv.COMPONENT_SCHEMA),
|
|
||||||
)
|
|
||||||
def for_condition_to_code(config, condition_id, template_arg, args):
|
|
||||||
condition = yield build_condition(
|
|
||||||
config[CONF_CONDITION], cg.TemplateArguments(), []
|
|
||||||
)
|
|
||||||
var = cg.new_Pvariable(condition_id, template_arg, condition)
|
|
||||||
yield cg.register_component(var, config)
|
|
||||||
templ = yield cg.templatable(config[CONF_TIME], args, cg.uint32)
|
|
||||||
cg.add(var.set_time(templ))
|
|
||||||
yield var
|
|
||||||
|
|
||||||
|
|
||||||
@register_action(
|
|
||||||
"delay", DelayAction, cv.templatable(cv.positive_time_period_milliseconds)
|
|
||||||
)
|
|
||||||
def delay_action_to_code(config, action_id, template_arg, args):
|
|
||||||
var = cg.new_Pvariable(action_id, template_arg)
|
|
||||||
yield cg.register_component(var, {})
|
|
||||||
template_ = yield cg.templatable(config, args, cg.uint32)
|
|
||||||
cg.add(var.set_delay(template_))
|
|
||||||
yield var
|
|
||||||
|
|
||||||
|
|
||||||
@register_action(
|
|
||||||
"if",
|
|
||||||
IfAction,
|
|
||||||
cv.All(
|
|
||||||
{
|
|
||||||
cv.Required(CONF_CONDITION): validate_potentially_and_condition,
|
|
||||||
cv.Optional(CONF_THEN): validate_action_list,
|
|
||||||
cv.Optional(CONF_ELSE): validate_action_list,
|
|
||||||
},
|
|
||||||
cv.has_at_least_one_key(CONF_THEN, CONF_ELSE),
|
|
||||||
),
|
|
||||||
)
|
|
||||||
def if_action_to_code(config, action_id, template_arg, args):
|
|
||||||
conditions = yield build_condition(config[CONF_CONDITION], template_arg, args)
|
|
||||||
var = cg.new_Pvariable(action_id, template_arg, conditions)
|
|
||||||
if CONF_THEN in config:
|
|
||||||
actions = yield build_action_list(config[CONF_THEN], template_arg, args)
|
|
||||||
cg.add(var.add_then(actions))
|
|
||||||
if CONF_ELSE in config:
|
|
||||||
actions = yield build_action_list(config[CONF_ELSE], template_arg, args)
|
|
||||||
cg.add(var.add_else(actions))
|
|
||||||
yield var
|
|
||||||
|
|
||||||
|
|
||||||
@register_action(
|
|
||||||
"while",
|
|
||||||
WhileAction,
|
|
||||||
cv.Schema(
|
|
||||||
{
|
|
||||||
cv.Required(CONF_CONDITION): validate_potentially_and_condition,
|
|
||||||
cv.Required(CONF_THEN): validate_action_list,
|
|
||||||
}
|
|
||||||
),
|
|
||||||
)
|
|
||||||
def while_action_to_code(config, action_id, template_arg, args):
|
|
||||||
conditions = yield build_condition(config[CONF_CONDITION], template_arg, args)
|
|
||||||
var = cg.new_Pvariable(action_id, template_arg, conditions)
|
|
||||||
actions = yield build_action_list(config[CONF_THEN], template_arg, args)
|
|
||||||
cg.add(var.add_then(actions))
|
|
||||||
yield var
|
|
||||||
|
|
||||||
|
|
||||||
def validate_wait_until(value):
|
|
||||||
schema = cv.Schema(
|
|
||||||
{
|
|
||||||
cv.Required(CONF_CONDITION): validate_potentially_and_condition,
|
|
||||||
}
|
|
||||||
)
|
|
||||||
if isinstance(value, dict) and CONF_CONDITION in value:
|
|
||||||
return schema(value)
|
|
||||||
return validate_wait_until({CONF_CONDITION: value})
|
|
||||||
|
|
||||||
|
|
||||||
@register_action("wait_until", WaitUntilAction, validate_wait_until)
|
|
||||||
def wait_until_action_to_code(config, action_id, template_arg, args):
|
|
||||||
conditions = yield build_condition(config[CONF_CONDITION], template_arg, args)
|
|
||||||
var = cg.new_Pvariable(action_id, template_arg, conditions)
|
|
||||||
yield cg.register_component(var, {})
|
|
||||||
yield var
|
|
||||||
|
|
||||||
|
|
||||||
@register_action("lambda", LambdaAction, cv.lambda_)
|
|
||||||
def lambda_action_to_code(config, action_id, template_arg, args):
|
|
||||||
lambda_ = yield cg.process_lambda(config, args, return_type=cg.void)
|
|
||||||
yield cg.new_Pvariable(action_id, template_arg, lambda_)
|
|
||||||
|
|
||||||
|
|
||||||
@register_action(
|
|
||||||
"component.update",
|
|
||||||
UpdateComponentAction,
|
|
||||||
maybe_simple_id(
|
|
||||||
{
|
|
||||||
cv.Required(CONF_ID): cv.use_id(cg.PollingComponent),
|
|
||||||
}
|
|
||||||
),
|
|
||||||
)
|
|
||||||
def component_update_action_to_code(config, action_id, template_arg, args):
|
|
||||||
comp = yield cg.get_variable(config[CONF_ID])
|
|
||||||
yield cg.new_Pvariable(action_id, template_arg, comp)
|
|
||||||
|
|
||||||
|
|
||||||
@coroutine
|
|
||||||
def build_action(full_config, template_arg, args):
|
|
||||||
registry_entry, config = cg.extract_registry_entry_config(
|
|
||||||
ACTION_REGISTRY, full_config
|
|
||||||
)
|
|
||||||
action_id = full_config[CONF_TYPE_ID]
|
|
||||||
builder = registry_entry.coroutine_fun
|
|
||||||
yield builder(config, action_id, template_arg, args)
|
|
||||||
|
|
||||||
|
|
||||||
@coroutine
|
|
||||||
def build_action_list(config, templ, arg_type):
|
|
||||||
actions = []
|
|
||||||
for conf in config:
|
|
||||||
action = yield build_action(conf, templ, arg_type)
|
|
||||||
actions.append(action)
|
|
||||||
yield actions
|
|
||||||
|
|
||||||
|
|
||||||
@coroutine
|
|
||||||
def build_condition(full_config, template_arg, args):
|
|
||||||
registry_entry, config = cg.extract_registry_entry_config(
|
|
||||||
CONDITION_REGISTRY, full_config
|
|
||||||
)
|
|
||||||
action_id = full_config[CONF_TYPE_ID]
|
|
||||||
builder = registry_entry.coroutine_fun
|
|
||||||
yield builder(config, action_id, template_arg, args)
|
|
||||||
|
|
||||||
|
|
||||||
@coroutine
|
|
||||||
def build_condition_list(config, templ, args):
|
|
||||||
conditions = []
|
|
||||||
for conf in config:
|
|
||||||
condition = yield build_condition(conf, templ, args)
|
|
||||||
conditions.append(condition)
|
|
||||||
yield conditions
|
|
||||||
|
|
||||||
|
|
||||||
@coroutine
|
|
||||||
def build_automation(trigger, args, config):
|
|
||||||
arg_types = [arg[0] for arg in args]
|
|
||||||
templ = cg.TemplateArguments(*arg_types)
|
|
||||||
obj = cg.new_Pvariable(config[CONF_AUTOMATION_ID], templ, trigger)
|
|
||||||
actions = yield build_action_list(config[CONF_THEN], templ, args)
|
|
||||||
cg.add(obj.add_actions(actions))
|
|
||||||
yield obj
|
|
||||||
@@ -1,79 +0,0 @@
|
|||||||
# Base file for all codegen-related imports
|
|
||||||
# All integrations should have a line in the import section like this
|
|
||||||
#
|
|
||||||
# >>> import esphome.codegen as cg
|
|
||||||
#
|
|
||||||
# Integrations should specifically *NOT* import directly from the
|
|
||||||
# other helper modules (cpp_generator etc) directly if they don't
|
|
||||||
# want to break suddenly due to a rename (this file will get backports for features).
|
|
||||||
|
|
||||||
# pylint: disable=unused-import
|
|
||||||
from esphome.cpp_generator import ( # noqa
|
|
||||||
Expression,
|
|
||||||
RawExpression,
|
|
||||||
RawStatement,
|
|
||||||
TemplateArguments,
|
|
||||||
StructInitializer,
|
|
||||||
ArrayInitializer,
|
|
||||||
safe_exp,
|
|
||||||
Statement,
|
|
||||||
LineComment,
|
|
||||||
progmem_array,
|
|
||||||
statement,
|
|
||||||
variable,
|
|
||||||
new_variable,
|
|
||||||
Pvariable,
|
|
||||||
new_Pvariable,
|
|
||||||
add,
|
|
||||||
add_global,
|
|
||||||
add_library,
|
|
||||||
add_build_flag,
|
|
||||||
add_define,
|
|
||||||
get_variable,
|
|
||||||
get_variable_with_full_id,
|
|
||||||
process_lambda,
|
|
||||||
is_template,
|
|
||||||
templatable,
|
|
||||||
MockObj,
|
|
||||||
MockObjClass,
|
|
||||||
)
|
|
||||||
from esphome.cpp_helpers import ( # noqa
|
|
||||||
gpio_pin_expression,
|
|
||||||
register_component,
|
|
||||||
build_registry_entry,
|
|
||||||
build_registry_list,
|
|
||||||
extract_registry_entry_config,
|
|
||||||
register_parented,
|
|
||||||
)
|
|
||||||
from esphome.cpp_types import ( # noqa
|
|
||||||
global_ns,
|
|
||||||
void,
|
|
||||||
nullptr,
|
|
||||||
float_,
|
|
||||||
double,
|
|
||||||
bool_,
|
|
||||||
int_,
|
|
||||||
std_ns,
|
|
||||||
std_string,
|
|
||||||
std_vector,
|
|
||||||
uint8,
|
|
||||||
uint16,
|
|
||||||
uint32,
|
|
||||||
int32,
|
|
||||||
const_char_ptr,
|
|
||||||
NAN,
|
|
||||||
esphome_ns,
|
|
||||||
App,
|
|
||||||
Nameable,
|
|
||||||
Component,
|
|
||||||
ComponentPtr,
|
|
||||||
PollingComponent,
|
|
||||||
Application,
|
|
||||||
optional,
|
|
||||||
arduino_json_ns,
|
|
||||||
JsonObject,
|
|
||||||
JsonObjectRef,
|
|
||||||
JsonObjectConstRef,
|
|
||||||
Controller,
|
|
||||||
GPIOPin,
|
|
||||||
)
|
|
||||||
@@ -1,55 +0,0 @@
|
|||||||
#include "a4988.h"
|
|
||||||
#include "esphome/core/log.h"
|
|
||||||
|
|
||||||
namespace esphome {
|
|
||||||
namespace a4988 {
|
|
||||||
|
|
||||||
static const char *TAG = "a4988.stepper";
|
|
||||||
|
|
||||||
void A4988::setup() {
|
|
||||||
ESP_LOGCONFIG(TAG, "Setting up A4988...");
|
|
||||||
if (this->sleep_pin_ != nullptr) {
|
|
||||||
this->sleep_pin_->setup();
|
|
||||||
this->sleep_pin_->digital_write(false);
|
|
||||||
this->sleep_pin_state_ = false;
|
|
||||||
}
|
|
||||||
this->step_pin_->setup();
|
|
||||||
this->step_pin_->digital_write(false);
|
|
||||||
this->dir_pin_->setup();
|
|
||||||
this->dir_pin_->digital_write(false);
|
|
||||||
}
|
|
||||||
void A4988::dump_config() {
|
|
||||||
ESP_LOGCONFIG(TAG, "A4988:");
|
|
||||||
LOG_PIN(" Step Pin: ", this->step_pin_);
|
|
||||||
LOG_PIN(" Dir Pin: ", this->dir_pin_);
|
|
||||||
LOG_PIN(" Sleep Pin: ", this->sleep_pin_);
|
|
||||||
LOG_STEPPER(this);
|
|
||||||
}
|
|
||||||
void A4988::loop() {
|
|
||||||
bool at_target = this->has_reached_target();
|
|
||||||
if (this->sleep_pin_ != nullptr) {
|
|
||||||
bool sleep_rising_edge = !sleep_pin_state_ & !at_target;
|
|
||||||
this->sleep_pin_->digital_write(!at_target);
|
|
||||||
this->sleep_pin_state_ = !at_target;
|
|
||||||
if (sleep_rising_edge) {
|
|
||||||
delayMicroseconds(1000);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if (at_target) {
|
|
||||||
this->high_freq_.stop();
|
|
||||||
} else {
|
|
||||||
this->high_freq_.start();
|
|
||||||
}
|
|
||||||
|
|
||||||
int32_t dir = this->should_step_();
|
|
||||||
if (dir == 0)
|
|
||||||
return;
|
|
||||||
|
|
||||||
this->dir_pin_->digital_write(dir == 1);
|
|
||||||
this->step_pin_->digital_write(true);
|
|
||||||
delayMicroseconds(5);
|
|
||||||
this->step_pin_->digital_write(false);
|
|
||||||
}
|
|
||||||
|
|
||||||
} // namespace a4988
|
|
||||||
} // namespace esphome
|
|
||||||
@@ -1,29 +0,0 @@
|
|||||||
#pragma once
|
|
||||||
|
|
||||||
#include "esphome/core/component.h"
|
|
||||||
#include "esphome/core/esphal.h"
|
|
||||||
#include "esphome/components/stepper/stepper.h"
|
|
||||||
|
|
||||||
namespace esphome {
|
|
||||||
namespace a4988 {
|
|
||||||
|
|
||||||
class A4988 : public stepper::Stepper, public Component {
|
|
||||||
public:
|
|
||||||
void set_step_pin(GPIOPin *step_pin) { step_pin_ = step_pin; }
|
|
||||||
void set_dir_pin(GPIOPin *dir_pin) { dir_pin_ = dir_pin; }
|
|
||||||
void set_sleep_pin(GPIOPin *sleep_pin) { this->sleep_pin_ = sleep_pin; }
|
|
||||||
void setup() override;
|
|
||||||
void dump_config() override;
|
|
||||||
void loop() override;
|
|
||||||
float get_setup_priority() const override { return setup_priority::HARDWARE; }
|
|
||||||
|
|
||||||
protected:
|
|
||||||
GPIOPin *step_pin_;
|
|
||||||
GPIOPin *dir_pin_;
|
|
||||||
GPIOPin *sleep_pin_{nullptr};
|
|
||||||
bool sleep_pin_state_;
|
|
||||||
HighFrequencyLoopRequester high_freq_;
|
|
||||||
};
|
|
||||||
|
|
||||||
} // namespace a4988
|
|
||||||
} // namespace esphome
|
|
||||||
@@ -1,33 +0,0 @@
|
|||||||
from esphome import pins
|
|
||||||
from esphome.components import stepper
|
|
||||||
import esphome.config_validation as cv
|
|
||||||
import esphome.codegen as cg
|
|
||||||
from esphome.const import CONF_DIR_PIN, CONF_ID, CONF_SLEEP_PIN, CONF_STEP_PIN
|
|
||||||
|
|
||||||
|
|
||||||
a4988_ns = cg.esphome_ns.namespace("a4988")
|
|
||||||
A4988 = a4988_ns.class_("A4988", stepper.Stepper, cg.Component)
|
|
||||||
|
|
||||||
CONFIG_SCHEMA = stepper.STEPPER_SCHEMA.extend(
|
|
||||||
{
|
|
||||||
cv.Required(CONF_ID): cv.declare_id(A4988),
|
|
||||||
cv.Required(CONF_STEP_PIN): pins.gpio_output_pin_schema,
|
|
||||||
cv.Required(CONF_DIR_PIN): pins.gpio_output_pin_schema,
|
|
||||||
cv.Optional(CONF_SLEEP_PIN): pins.gpio_output_pin_schema,
|
|
||||||
}
|
|
||||||
).extend(cv.COMPONENT_SCHEMA)
|
|
||||||
|
|
||||||
|
|
||||||
def to_code(config):
|
|
||||||
var = cg.new_Pvariable(config[CONF_ID])
|
|
||||||
yield cg.register_component(var, config)
|
|
||||||
yield stepper.register_stepper(var, config)
|
|
||||||
|
|
||||||
step_pin = yield cg.gpio_pin_expression(config[CONF_STEP_PIN])
|
|
||||||
cg.add(var.set_step_pin(step_pin))
|
|
||||||
dir_pin = yield cg.gpio_pin_expression(config[CONF_DIR_PIN])
|
|
||||||
cg.add(var.set_dir_pin(dir_pin))
|
|
||||||
|
|
||||||
if CONF_SLEEP_PIN in config:
|
|
||||||
sleep_pin = yield cg.gpio_pin_expression(config[CONF_SLEEP_PIN])
|
|
||||||
cg.add(var.set_sleep_pin(sleep_pin))
|
|
||||||
@@ -1,217 +0,0 @@
|
|||||||
#include "ac_dimmer.h"
|
|
||||||
#include "esphome/core/helpers.h"
|
|
||||||
#include "esphome/core/log.h"
|
|
||||||
|
|
||||||
#ifdef ARDUINO_ARCH_ESP8266
|
|
||||||
#include <core_esp8266_waveform.h>
|
|
||||||
#endif
|
|
||||||
|
|
||||||
namespace esphome {
|
|
||||||
namespace ac_dimmer {
|
|
||||||
|
|
||||||
static const char *TAG = "ac_dimmer";
|
|
||||||
|
|
||||||
// Global array to store dimmer objects
|
|
||||||
static AcDimmerDataStore *all_dimmers[32];
|
|
||||||
|
|
||||||
/// Time in microseconds the gate should be held high
|
|
||||||
/// 10µs should be long enough for most triacs
|
|
||||||
/// For reference: BT136 datasheet says 2µs nominal (page 7)
|
|
||||||
static uint32_t GATE_ENABLE_TIME = 10;
|
|
||||||
|
|
||||||
/// Function called from timer interrupt
|
|
||||||
/// Input is current time in microseconds (micros())
|
|
||||||
/// Returns when next "event" is expected in µs, or 0 if no such event known.
|
|
||||||
uint32_t ICACHE_RAM_ATTR HOT AcDimmerDataStore::timer_intr(uint32_t now) {
|
|
||||||
// If no ZC signal received yet.
|
|
||||||
if (this->crossed_zero_at == 0)
|
|
||||||
return 0;
|
|
||||||
|
|
||||||
uint32_t time_since_zc = now - this->crossed_zero_at;
|
|
||||||
if (this->value == 65535 || this->value == 0) {
|
|
||||||
return 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (this->enable_time_us != 0 && time_since_zc >= this->enable_time_us) {
|
|
||||||
this->enable_time_us = 0;
|
|
||||||
this->gate_pin->digital_write(true);
|
|
||||||
// Prevent too short pulses
|
|
||||||
this->disable_time_us = max(this->disable_time_us, time_since_zc + GATE_ENABLE_TIME);
|
|
||||||
}
|
|
||||||
if (this->disable_time_us != 0 && time_since_zc >= this->disable_time_us) {
|
|
||||||
this->disable_time_us = 0;
|
|
||||||
this->gate_pin->digital_write(false);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (time_since_zc < this->enable_time_us)
|
|
||||||
// Next event is enable, return time until that event
|
|
||||||
return this->enable_time_us - time_since_zc;
|
|
||||||
else if (time_since_zc < disable_time_us) {
|
|
||||||
// Next event is disable, return time until that event
|
|
||||||
return this->disable_time_us - time_since_zc;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (time_since_zc >= this->cycle_time_us) {
|
|
||||||
// Already past last cycle time, schedule next call shortly
|
|
||||||
return 100;
|
|
||||||
}
|
|
||||||
|
|
||||||
return this->cycle_time_us - time_since_zc;
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Run timer interrupt code and return in how many µs the next event is expected
|
|
||||||
uint32_t ICACHE_RAM_ATTR HOT timer_interrupt() {
|
|
||||||
// run at least with 1kHz
|
|
||||||
uint32_t min_dt_us = 1000;
|
|
||||||
uint32_t now = micros();
|
|
||||||
for (auto *dimmer : all_dimmers) {
|
|
||||||
if (dimmer == nullptr)
|
|
||||||
// no more dimmers
|
|
||||||
break;
|
|
||||||
uint32_t res = dimmer->timer_intr(now);
|
|
||||||
if (res != 0 && res < min_dt_us)
|
|
||||||
min_dt_us = res;
|
|
||||||
}
|
|
||||||
// return time until next timer1 interrupt in µs
|
|
||||||
return min_dt_us;
|
|
||||||
}
|
|
||||||
|
|
||||||
/// GPIO interrupt routine, called when ZC pin triggers
|
|
||||||
void ICACHE_RAM_ATTR HOT AcDimmerDataStore::gpio_intr() {
|
|
||||||
uint32_t prev_crossed = this->crossed_zero_at;
|
|
||||||
|
|
||||||
// 50Hz mains frequency should give a half cycle of 10ms a 60Hz will give 8.33ms
|
|
||||||
// in any case the cycle last at least 5ms
|
|
||||||
this->crossed_zero_at = micros();
|
|
||||||
uint32_t cycle_time = this->crossed_zero_at - prev_crossed;
|
|
||||||
if (cycle_time > 5000) {
|
|
||||||
this->cycle_time_us = cycle_time;
|
|
||||||
} else {
|
|
||||||
// Otherwise this is noise and this is 2nd (or 3rd...) fall in the same pulse
|
|
||||||
// Consider this is the right fall edge and accumulate the cycle time instead
|
|
||||||
this->cycle_time_us += cycle_time;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (this->value == 65535) {
|
|
||||||
// fully on, enable output immediately
|
|
||||||
this->gate_pin->digital_write(true);
|
|
||||||
} else if (this->init_cycle) {
|
|
||||||
// send a full cycle
|
|
||||||
this->init_cycle = false;
|
|
||||||
this->enable_time_us = 0;
|
|
||||||
this->disable_time_us = cycle_time_us;
|
|
||||||
} else if (this->value == 0) {
|
|
||||||
// fully off, disable output immediately
|
|
||||||
this->gate_pin->digital_write(false);
|
|
||||||
} else {
|
|
||||||
if (this->method == DIM_METHOD_TRAILING) {
|
|
||||||
this->enable_time_us = 1; // cannot be 0
|
|
||||||
this->disable_time_us = max((uint32_t) 10, this->value * this->cycle_time_us / 65535);
|
|
||||||
} else {
|
|
||||||
// calculate time until enable in µs: (1.0-value)*cycle_time, but with integer arithmetic
|
|
||||||
// also take into account min_power
|
|
||||||
auto min_us = this->cycle_time_us * this->min_power / 1000;
|
|
||||||
this->enable_time_us = max((uint32_t) 1, ((65535 - this->value) * (this->cycle_time_us - min_us)) / 65535);
|
|
||||||
if (this->method == DIM_METHOD_LEADING_PULSE) {
|
|
||||||
// Minimum pulse time should be enough for the triac to trigger when it is close to the ZC zone
|
|
||||||
// this is for brightness near 99%
|
|
||||||
this->disable_time_us = max(this->enable_time_us + GATE_ENABLE_TIME, (uint32_t) cycle_time_us / 10);
|
|
||||||
} else {
|
|
||||||
this->gate_pin->digital_write(false);
|
|
||||||
this->disable_time_us = this->cycle_time_us;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
void ICACHE_RAM_ATTR HOT AcDimmerDataStore::s_gpio_intr(AcDimmerDataStore *store) {
|
|
||||||
// Attaching pin interrupts on the same pin will override the previous interupt
|
|
||||||
// However, the user expects that multiple dimmers sharing the same ZC pin will work.
|
|
||||||
// We solve this in a bit of a hacky way: On each pin interrupt, we check all dimmers
|
|
||||||
// if any of them are using the same ZC pin, and also trigger the interrupt for *them*.
|
|
||||||
for (auto *dimmer : all_dimmers) {
|
|
||||||
if (dimmer == nullptr)
|
|
||||||
break;
|
|
||||||
if (dimmer->zero_cross_pin_number == store->zero_cross_pin_number) {
|
|
||||||
dimmer->gpio_intr();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#ifdef ARDUINO_ARCH_ESP32
|
|
||||||
// ESP32 implementation, uses basically the same code but needs to wrap
|
|
||||||
// timer_interrupt() function to auto-reschedule
|
|
||||||
static hw_timer_t *dimmer_timer = nullptr;
|
|
||||||
void ICACHE_RAM_ATTR HOT AcDimmerDataStore::s_timer_intr() { timer_interrupt(); }
|
|
||||||
#endif
|
|
||||||
|
|
||||||
void AcDimmer::setup() {
|
|
||||||
// extend all_dimmers array with our dimmer
|
|
||||||
|
|
||||||
// Need to be sure the zero cross pin is setup only once, ESP8266 fails and ESP32 seems to fail silently
|
|
||||||
auto setup_zero_cross_pin = true;
|
|
||||||
|
|
||||||
for (auto &all_dimmer : all_dimmers) {
|
|
||||||
if (all_dimmer == nullptr) {
|
|
||||||
all_dimmer = &this->store_;
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
if (all_dimmer->zero_cross_pin_number == this->zero_cross_pin_->get_pin()) {
|
|
||||||
setup_zero_cross_pin = false;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
this->gate_pin_->setup();
|
|
||||||
this->store_.gate_pin = this->gate_pin_->to_isr();
|
|
||||||
this->store_.zero_cross_pin_number = this->zero_cross_pin_->get_pin();
|
|
||||||
this->store_.min_power = static_cast<uint16_t>(this->min_power_ * 1000);
|
|
||||||
this->min_power_ = 0;
|
|
||||||
this->store_.method = this->method_;
|
|
||||||
|
|
||||||
if (setup_zero_cross_pin) {
|
|
||||||
this->zero_cross_pin_->setup();
|
|
||||||
this->store_.zero_cross_pin = this->zero_cross_pin_->to_isr();
|
|
||||||
this->zero_cross_pin_->attach_interrupt(&AcDimmerDataStore::s_gpio_intr, &this->store_, FALLING);
|
|
||||||
}
|
|
||||||
|
|
||||||
#ifdef ARDUINO_ARCH_ESP8266
|
|
||||||
// Uses ESP8266 waveform (soft PWM) class
|
|
||||||
// PWM and AcDimmer can even run at the same time this way
|
|
||||||
setTimer1Callback(&timer_interrupt);
|
|
||||||
#endif
|
|
||||||
#ifdef ARDUINO_ARCH_ESP32
|
|
||||||
// 80 Divider -> 1 count=1µs
|
|
||||||
dimmer_timer = timerBegin(0, 80, true);
|
|
||||||
timerAttachInterrupt(dimmer_timer, &AcDimmerDataStore::s_timer_intr, true);
|
|
||||||
// For ESP32, we can't use dynamic interval calculation because the timerX functions
|
|
||||||
// are not callable from ISR (placed in flash storage).
|
|
||||||
// Here we just use an interrupt firing every 50 µs.
|
|
||||||
timerAlarmWrite(dimmer_timer, 50, true);
|
|
||||||
timerAlarmEnable(dimmer_timer);
|
|
||||||
#endif
|
|
||||||
}
|
|
||||||
void AcDimmer::write_state(float state) {
|
|
||||||
auto new_value = static_cast<uint16_t>(roundf(state * 65535));
|
|
||||||
if (new_value != 0 && this->store_.value == 0)
|
|
||||||
this->store_.init_cycle = this->init_with_half_cycle_;
|
|
||||||
this->store_.value = new_value;
|
|
||||||
}
|
|
||||||
void AcDimmer::dump_config() {
|
|
||||||
ESP_LOGCONFIG(TAG, "AcDimmer:");
|
|
||||||
LOG_PIN(" Output Pin: ", this->gate_pin_);
|
|
||||||
LOG_PIN(" Zero-Cross Pin: ", this->zero_cross_pin_);
|
|
||||||
ESP_LOGCONFIG(TAG, " Min Power: %.1f%%", this->store_.min_power / 10.0f);
|
|
||||||
ESP_LOGCONFIG(TAG, " Init with half cycle: %s", YESNO(this->init_with_half_cycle_));
|
|
||||||
if (method_ == DIM_METHOD_LEADING_PULSE)
|
|
||||||
ESP_LOGCONFIG(TAG, " Method: leading pulse");
|
|
||||||
else if (method_ == DIM_METHOD_LEADING)
|
|
||||||
ESP_LOGCONFIG(TAG, " Method: leading");
|
|
||||||
else
|
|
||||||
ESP_LOGCONFIG(TAG, " Method: trailing");
|
|
||||||
|
|
||||||
LOG_FLOAT_OUTPUT(this);
|
|
||||||
ESP_LOGV(TAG, " Estimated Frequency: %.3fHz", 1e6f / this->store_.cycle_time_us / 2);
|
|
||||||
}
|
|
||||||
|
|
||||||
} // namespace ac_dimmer
|
|
||||||
} // namespace esphome
|
|
||||||
@@ -1,66 +0,0 @@
|
|||||||
#pragma once
|
|
||||||
|
|
||||||
#include "esphome/core/component.h"
|
|
||||||
#include "esphome/core/esphal.h"
|
|
||||||
#include "esphome/components/output/float_output.h"
|
|
||||||
|
|
||||||
namespace esphome {
|
|
||||||
namespace ac_dimmer {
|
|
||||||
|
|
||||||
enum DimMethod { DIM_METHOD_LEADING_PULSE = 0, DIM_METHOD_LEADING, DIM_METHOD_TRAILING };
|
|
||||||
|
|
||||||
struct AcDimmerDataStore {
|
|
||||||
/// Zero-cross pin
|
|
||||||
ISRInternalGPIOPin *zero_cross_pin;
|
|
||||||
/// Zero-cross pin number - used to share ZC pin across multiple dimmers
|
|
||||||
uint8_t zero_cross_pin_number;
|
|
||||||
/// Output pin to write to
|
|
||||||
ISRInternalGPIOPin *gate_pin;
|
|
||||||
/// Value of the dimmer - 0 to 65535.
|
|
||||||
uint16_t value;
|
|
||||||
/// Minimum power for activation
|
|
||||||
uint16_t min_power;
|
|
||||||
/// Time between the last two ZC pulses
|
|
||||||
uint32_t cycle_time_us;
|
|
||||||
/// Time (in micros()) of last ZC signal
|
|
||||||
uint32_t crossed_zero_at;
|
|
||||||
/// Time since last ZC pulse to enable gate pin. 0 means not set.
|
|
||||||
uint32_t enable_time_us;
|
|
||||||
/// Time since last ZC pulse to disable gate pin. 0 means no disable.
|
|
||||||
uint32_t disable_time_us;
|
|
||||||
/// Set to send the first half ac cycle complete
|
|
||||||
bool init_cycle;
|
|
||||||
/// Dimmer method
|
|
||||||
DimMethod method;
|
|
||||||
|
|
||||||
uint32_t timer_intr(uint32_t now);
|
|
||||||
|
|
||||||
void gpio_intr();
|
|
||||||
static void s_gpio_intr(AcDimmerDataStore *store);
|
|
||||||
#ifdef ARDUINO_ARCH_ESP32
|
|
||||||
static void s_timer_intr();
|
|
||||||
#endif
|
|
||||||
};
|
|
||||||
|
|
||||||
class AcDimmer : public output::FloatOutput, public Component {
|
|
||||||
public:
|
|
||||||
void setup() override;
|
|
||||||
|
|
||||||
void dump_config() override;
|
|
||||||
void set_gate_pin(GPIOPin *gate_pin) { gate_pin_ = gate_pin; }
|
|
||||||
void set_zero_cross_pin(GPIOPin *zero_cross_pin) { zero_cross_pin_ = zero_cross_pin; }
|
|
||||||
void set_init_with_half_cycle(bool init_with_half_cycle) { init_with_half_cycle_ = init_with_half_cycle; }
|
|
||||||
void set_method(DimMethod method) { method_ = method; }
|
|
||||||
|
|
||||||
protected:
|
|
||||||
void write_state(float state) override;
|
|
||||||
|
|
||||||
GPIOPin *gate_pin_;
|
|
||||||
GPIOPin *zero_cross_pin_;
|
|
||||||
AcDimmerDataStore store_;
|
|
||||||
bool init_with_half_cycle_;
|
|
||||||
DimMethod method_;
|
|
||||||
};
|
|
||||||
|
|
||||||
} // namespace ac_dimmer
|
|
||||||
} // namespace esphome
|
|
||||||
@@ -1,49 +0,0 @@
|
|||||||
import esphome.codegen as cg
|
|
||||||
import esphome.config_validation as cv
|
|
||||||
from esphome import pins
|
|
||||||
from esphome.components import output
|
|
||||||
from esphome.const import CONF_ID, CONF_MIN_POWER, CONF_METHOD
|
|
||||||
|
|
||||||
CODEOWNERS = ["@glmnet"]
|
|
||||||
|
|
||||||
ac_dimmer_ns = cg.esphome_ns.namespace("ac_dimmer")
|
|
||||||
AcDimmer = ac_dimmer_ns.class_("AcDimmer", output.FloatOutput, cg.Component)
|
|
||||||
|
|
||||||
DimMethod = ac_dimmer_ns.enum("DimMethod")
|
|
||||||
DIM_METHODS = {
|
|
||||||
"LEADING_PULSE": DimMethod.DIM_METHOD_LEADING_PULSE,
|
|
||||||
"LEADING": DimMethod.DIM_METHOD_LEADING,
|
|
||||||
"TRAILING": DimMethod.DIM_METHOD_TRAILING,
|
|
||||||
}
|
|
||||||
|
|
||||||
CONF_GATE_PIN = "gate_pin"
|
|
||||||
CONF_ZERO_CROSS_PIN = "zero_cross_pin"
|
|
||||||
CONF_INIT_WITH_HALF_CYCLE = "init_with_half_cycle"
|
|
||||||
CONFIG_SCHEMA = output.FLOAT_OUTPUT_SCHEMA.extend(
|
|
||||||
{
|
|
||||||
cv.Required(CONF_ID): cv.declare_id(AcDimmer),
|
|
||||||
cv.Required(CONF_GATE_PIN): pins.internal_gpio_output_pin_schema,
|
|
||||||
cv.Required(CONF_ZERO_CROSS_PIN): pins.internal_gpio_input_pin_schema,
|
|
||||||
cv.Optional(CONF_INIT_WITH_HALF_CYCLE, default=True): cv.boolean,
|
|
||||||
cv.Optional(CONF_METHOD, default="leading pulse"): cv.enum(
|
|
||||||
DIM_METHODS, upper=True, space="_"
|
|
||||||
),
|
|
||||||
}
|
|
||||||
).extend(cv.COMPONENT_SCHEMA)
|
|
||||||
|
|
||||||
|
|
||||||
def to_code(config):
|
|
||||||
var = cg.new_Pvariable(config[CONF_ID])
|
|
||||||
yield cg.register_component(var, config)
|
|
||||||
|
|
||||||
# override default min power to 10%
|
|
||||||
if CONF_MIN_POWER not in config:
|
|
||||||
config[CONF_MIN_POWER] = 0.1
|
|
||||||
yield output.register_output(var, config)
|
|
||||||
|
|
||||||
pin = yield cg.gpio_pin_expression(config[CONF_GATE_PIN])
|
|
||||||
cg.add(var.set_gate_pin(pin))
|
|
||||||
pin = yield cg.gpio_pin_expression(config[CONF_ZERO_CROSS_PIN])
|
|
||||||
cg.add(var.set_zero_cross_pin(pin))
|
|
||||||
cg.add(var.set_init_with_half_cycle(config[CONF_INIT_WITH_HALF_CYCLE]))
|
|
||||||
cg.add(var.set_method(config[CONF_METHOD]))
|
|
||||||
@@ -1,28 +0,0 @@
|
|||||||
import esphome.codegen as cg
|
|
||||||
import esphome.config_validation as cv
|
|
||||||
from esphome.components import uart
|
|
||||||
from esphome.components.light.types import AddressableLightEffect
|
|
||||||
from esphome.components.light.effects import register_addressable_effect
|
|
||||||
from esphome.const import CONF_NAME, CONF_UART_ID
|
|
||||||
|
|
||||||
DEPENDENCIES = ["uart"]
|
|
||||||
|
|
||||||
adalight_ns = cg.esphome_ns.namespace("adalight")
|
|
||||||
AdalightLightEffect = adalight_ns.class_(
|
|
||||||
"AdalightLightEffect", uart.UARTDevice, AddressableLightEffect
|
|
||||||
)
|
|
||||||
|
|
||||||
CONFIG_SCHEMA = cv.Schema({})
|
|
||||||
|
|
||||||
|
|
||||||
@register_addressable_effect(
|
|
||||||
"adalight",
|
|
||||||
AdalightLightEffect,
|
|
||||||
"Adalight",
|
|
||||||
{cv.GenerateID(CONF_UART_ID): cv.use_id(uart.UARTComponent)},
|
|
||||||
)
|
|
||||||
def adalight_light_effect_to_code(config, effect_id):
|
|
||||||
effect = cg.new_Pvariable(effect_id, config[CONF_NAME])
|
|
||||||
yield uart.register_uart_device(effect, config)
|
|
||||||
|
|
||||||
yield effect
|
|
||||||
@@ -1,140 +0,0 @@
|
|||||||
#include "adalight_light_effect.h"
|
|
||||||
#include "esphome/core/log.h"
|
|
||||||
|
|
||||||
namespace esphome {
|
|
||||||
namespace adalight {
|
|
||||||
|
|
||||||
static const char *TAG = "adalight_light_effect";
|
|
||||||
|
|
||||||
static const uint32_t ADALIGHT_ACK_INTERVAL = 1000;
|
|
||||||
static const uint32_t ADALIGHT_RECEIVE_TIMEOUT = 1000;
|
|
||||||
|
|
||||||
AdalightLightEffect::AdalightLightEffect(const std::string &name) : AddressableLightEffect(name) {}
|
|
||||||
|
|
||||||
void AdalightLightEffect::start() {
|
|
||||||
AddressableLightEffect::start();
|
|
||||||
|
|
||||||
last_ack_ = 0;
|
|
||||||
last_byte_ = 0;
|
|
||||||
last_reset_ = 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
void AdalightLightEffect::stop() {
|
|
||||||
frame_.resize(0);
|
|
||||||
|
|
||||||
AddressableLightEffect::stop();
|
|
||||||
}
|
|
||||||
|
|
||||||
int AdalightLightEffect::get_frame_size_(int led_count) const {
|
|
||||||
// 3 bytes: Ada
|
|
||||||
// 2 bytes: LED count
|
|
||||||
// 1 byte: checksum
|
|
||||||
// 3 bytes per LED
|
|
||||||
return 3 + 2 + 1 + led_count * 3;
|
|
||||||
}
|
|
||||||
|
|
||||||
void AdalightLightEffect::reset_frame_(light::AddressableLight &it) {
|
|
||||||
int buffer_capacity = get_frame_size_(it.size());
|
|
||||||
|
|
||||||
frame_.clear();
|
|
||||||
frame_.reserve(buffer_capacity);
|
|
||||||
}
|
|
||||||
|
|
||||||
void AdalightLightEffect::blank_all_leds_(light::AddressableLight &it) {
|
|
||||||
for (int led = it.size(); led-- > 0;) {
|
|
||||||
it[led].set(COLOR_BLACK);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
void AdalightLightEffect::apply(light::AddressableLight &it, const Color ¤t_color) {
|
|
||||||
const uint32_t now = millis();
|
|
||||||
|
|
||||||
if (now - this->last_ack_ >= ADALIGHT_ACK_INTERVAL) {
|
|
||||||
ESP_LOGV(TAG, "Sending ACK");
|
|
||||||
this->write_str("Ada\n");
|
|
||||||
this->last_ack_ = now;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!this->last_reset_) {
|
|
||||||
ESP_LOGW(TAG, "Frame: Reset.");
|
|
||||||
reset_frame_(it);
|
|
||||||
blank_all_leds_(it);
|
|
||||||
this->last_reset_ = now;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!this->frame_.empty() && now - this->last_byte_ >= ADALIGHT_RECEIVE_TIMEOUT) {
|
|
||||||
ESP_LOGW(TAG, "Frame: Receive timeout (size=%zu).", this->frame_.size());
|
|
||||||
reset_frame_(it);
|
|
||||||
blank_all_leds_(it);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (this->available() > 0) {
|
|
||||||
ESP_LOGV(TAG, "Frame: Available (size=%d).", this->available());
|
|
||||||
}
|
|
||||||
|
|
||||||
while (this->available() != 0) {
|
|
||||||
uint8_t data;
|
|
||||||
if (!this->read_byte(&data))
|
|
||||||
break;
|
|
||||||
this->frame_.push_back(data);
|
|
||||||
this->last_byte_ = now;
|
|
||||||
|
|
||||||
switch (this->parse_frame_(it)) {
|
|
||||||
case INVALID:
|
|
||||||
ESP_LOGD(TAG, "Frame: Invalid (size=%zu, first=%d).", this->frame_.size(), this->frame_[0]);
|
|
||||||
reset_frame_(it);
|
|
||||||
break;
|
|
||||||
|
|
||||||
case PARTIAL:
|
|
||||||
break;
|
|
||||||
|
|
||||||
case CONSUMED:
|
|
||||||
ESP_LOGV(TAG, "Frame: Consumed (size=%zu).", this->frame_.size());
|
|
||||||
reset_frame_(it);
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
AdalightLightEffect::Frame AdalightLightEffect::parse_frame_(light::AddressableLight &it) {
|
|
||||||
if (frame_.empty())
|
|
||||||
return INVALID;
|
|
||||||
|
|
||||||
// Check header: `Ada`
|
|
||||||
if (frame_[0] != 'A')
|
|
||||||
return INVALID;
|
|
||||||
if (frame_.size() > 1 && frame_[1] != 'd')
|
|
||||||
return INVALID;
|
|
||||||
if (frame_.size() > 2 && frame_[2] != 'a')
|
|
||||||
return INVALID;
|
|
||||||
|
|
||||||
// 3 bytes: Count Hi, Count Lo, Checksum
|
|
||||||
if (frame_.size() < 6)
|
|
||||||
return PARTIAL;
|
|
||||||
|
|
||||||
// Check checksum
|
|
||||||
uint16_t checksum = frame_[3] ^ frame_[4] ^ 0x55;
|
|
||||||
if (checksum != frame_[5])
|
|
||||||
return INVALID;
|
|
||||||
|
|
||||||
// Check if we received the full frame
|
|
||||||
uint16_t led_count = (frame_[3] << 8) + frame_[4] + 1;
|
|
||||||
auto buffer_size = get_frame_size_(led_count);
|
|
||||||
if (frame_.size() < buffer_size)
|
|
||||||
return PARTIAL;
|
|
||||||
|
|
||||||
// Apply lights
|
|
||||||
auto accepted_led_count = std::min<int>(led_count, it.size());
|
|
||||||
uint8_t *led_data = &frame_[6];
|
|
||||||
|
|
||||||
for (int led = 0; led < accepted_led_count; led++, led_data += 3) {
|
|
||||||
auto white = std::min(std::min(led_data[0], led_data[1]), led_data[2]);
|
|
||||||
|
|
||||||
it[led].set(Color(led_data[0], led_data[1], led_data[2], white));
|
|
||||||
}
|
|
||||||
|
|
||||||
return CONSUMED;
|
|
||||||
}
|
|
||||||
|
|
||||||
} // namespace adalight
|
|
||||||
} // namespace esphome
|
|
||||||
@@ -1,41 +0,0 @@
|
|||||||
#pragma once
|
|
||||||
|
|
||||||
#include "esphome/core/component.h"
|
|
||||||
#include "esphome/components/light/addressable_light_effect.h"
|
|
||||||
#include "esphome/components/uart/uart.h"
|
|
||||||
|
|
||||||
#include <vector>
|
|
||||||
|
|
||||||
namespace esphome {
|
|
||||||
namespace adalight {
|
|
||||||
|
|
||||||
class AdalightLightEffect : public light::AddressableLightEffect, public uart::UARTDevice {
|
|
||||||
public:
|
|
||||||
AdalightLightEffect(const std::string &name);
|
|
||||||
|
|
||||||
public:
|
|
||||||
void start() override;
|
|
||||||
void stop() override;
|
|
||||||
void apply(light::AddressableLight &it, const Color ¤t_color) override;
|
|
||||||
|
|
||||||
protected:
|
|
||||||
enum Frame {
|
|
||||||
INVALID,
|
|
||||||
PARTIAL,
|
|
||||||
CONSUMED,
|
|
||||||
};
|
|
||||||
|
|
||||||
int get_frame_size_(int led_count) const;
|
|
||||||
void reset_frame_(light::AddressableLight &it);
|
|
||||||
void blank_all_leds_(light::AddressableLight &it);
|
|
||||||
Frame parse_frame_(light::AddressableLight &it);
|
|
||||||
|
|
||||||
protected:
|
|
||||||
uint32_t last_ack_{0};
|
|
||||||
uint32_t last_byte_{0};
|
|
||||||
uint32_t last_reset_{0};
|
|
||||||
std::vector<uint8_t> frame_;
|
|
||||||
};
|
|
||||||
|
|
||||||
} // namespace adalight
|
|
||||||
} // namespace esphome
|
|
||||||
@@ -1 +0,0 @@
|
|||||||
CODEOWNERS = ["@esphome/core"]
|
|
||||||
@@ -1,94 +0,0 @@
|
|||||||
#include "adc_sensor.h"
|
|
||||||
#include "esphome/core/log.h"
|
|
||||||
|
|
||||||
#ifdef USE_ADC_SENSOR_VCC
|
|
||||||
ADC_MODE(ADC_VCC)
|
|
||||||
#endif
|
|
||||||
|
|
||||||
namespace esphome {
|
|
||||||
namespace adc {
|
|
||||||
|
|
||||||
static const char *TAG = "adc";
|
|
||||||
|
|
||||||
#ifdef ARDUINO_ARCH_ESP32
|
|
||||||
void ADCSensor::set_attenuation(adc_attenuation_t attenuation) { this->attenuation_ = attenuation; }
|
|
||||||
#endif
|
|
||||||
|
|
||||||
void ADCSensor::setup() {
|
|
||||||
ESP_LOGCONFIG(TAG, "Setting up ADC '%s'...", this->get_name().c_str());
|
|
||||||
#ifndef USE_ADC_SENSOR_VCC
|
|
||||||
GPIOPin(this->pin_, INPUT).setup();
|
|
||||||
#endif
|
|
||||||
|
|
||||||
#ifdef ARDUINO_ARCH_ESP32
|
|
||||||
analogSetPinAttenuation(this->pin_, this->attenuation_);
|
|
||||||
#endif
|
|
||||||
}
|
|
||||||
void ADCSensor::dump_config() {
|
|
||||||
LOG_SENSOR("", "ADC Sensor", this);
|
|
||||||
#ifdef ARDUINO_ARCH_ESP8266
|
|
||||||
#ifdef USE_ADC_SENSOR_VCC
|
|
||||||
ESP_LOGCONFIG(TAG, " Pin: VCC");
|
|
||||||
#else
|
|
||||||
ESP_LOGCONFIG(TAG, " Pin: %u", this->pin_);
|
|
||||||
#endif
|
|
||||||
#endif
|
|
||||||
#ifdef ARDUINO_ARCH_ESP32
|
|
||||||
ESP_LOGCONFIG(TAG, " Pin: %u", this->pin_);
|
|
||||||
switch (this->attenuation_) {
|
|
||||||
case ADC_0db:
|
|
||||||
ESP_LOGCONFIG(TAG, " Attenuation: 0db (max 1.1V)");
|
|
||||||
break;
|
|
||||||
case ADC_2_5db:
|
|
||||||
ESP_LOGCONFIG(TAG, " Attenuation: 2.5db (max 1.5V)");
|
|
||||||
break;
|
|
||||||
case ADC_6db:
|
|
||||||
ESP_LOGCONFIG(TAG, " Attenuation: 6db (max 2.2V)");
|
|
||||||
break;
|
|
||||||
case ADC_11db:
|
|
||||||
ESP_LOGCONFIG(TAG, " Attenuation: 11db (max 3.9V)");
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
#endif
|
|
||||||
LOG_UPDATE_INTERVAL(this);
|
|
||||||
}
|
|
||||||
float ADCSensor::get_setup_priority() const { return setup_priority::DATA; }
|
|
||||||
void ADCSensor::update() {
|
|
||||||
float value_v = this->sample();
|
|
||||||
ESP_LOGD(TAG, "'%s': Got voltage=%.2fV", this->get_name().c_str(), value_v);
|
|
||||||
this->publish_state(value_v);
|
|
||||||
}
|
|
||||||
float ADCSensor::sample() {
|
|
||||||
#ifdef ARDUINO_ARCH_ESP32
|
|
||||||
float value_v = analogRead(this->pin_) / 4095.0f; // NOLINT
|
|
||||||
switch (this->attenuation_) {
|
|
||||||
case ADC_0db:
|
|
||||||
value_v *= 1.1;
|
|
||||||
break;
|
|
||||||
case ADC_2_5db:
|
|
||||||
value_v *= 1.5;
|
|
||||||
break;
|
|
||||||
case ADC_6db:
|
|
||||||
value_v *= 2.2;
|
|
||||||
break;
|
|
||||||
case ADC_11db:
|
|
||||||
value_v *= 3.9;
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
return value_v;
|
|
||||||
#endif
|
|
||||||
|
|
||||||
#ifdef ARDUINO_ARCH_ESP8266
|
|
||||||
#ifdef USE_ADC_SENSOR_VCC
|
|
||||||
return ESP.getVcc() / 1024.0f;
|
|
||||||
#else
|
|
||||||
return analogRead(this->pin_) / 1024.0f; // NOLINT
|
|
||||||
#endif
|
|
||||||
#endif
|
|
||||||
}
|
|
||||||
#ifdef ARDUINO_ARCH_ESP8266
|
|
||||||
std::string ADCSensor::unique_id() { return get_mac_address() + "-adc"; }
|
|
||||||
#endif
|
|
||||||
|
|
||||||
} // namespace adc
|
|
||||||
} // namespace esphome
|
|
||||||
@@ -1,42 +0,0 @@
|
|||||||
#pragma once
|
|
||||||
|
|
||||||
#include "esphome/core/component.h"
|
|
||||||
#include "esphome/core/esphal.h"
|
|
||||||
#include "esphome/core/defines.h"
|
|
||||||
#include "esphome/components/sensor/sensor.h"
|
|
||||||
#include "esphome/components/voltage_sampler/voltage_sampler.h"
|
|
||||||
|
|
||||||
namespace esphome {
|
|
||||||
namespace adc {
|
|
||||||
|
|
||||||
class ADCSensor : public sensor::Sensor, public PollingComponent, public voltage_sampler::VoltageSampler {
|
|
||||||
public:
|
|
||||||
#ifdef ARDUINO_ARCH_ESP32
|
|
||||||
/// Set the attenuation for this pin. Only available on the ESP32.
|
|
||||||
void set_attenuation(adc_attenuation_t attenuation);
|
|
||||||
#endif
|
|
||||||
|
|
||||||
/// Update adc values.
|
|
||||||
void update() override;
|
|
||||||
/// Setup ADc
|
|
||||||
void setup() override;
|
|
||||||
void dump_config() override;
|
|
||||||
/// `HARDWARE_LATE` setup priority.
|
|
||||||
float get_setup_priority() const override;
|
|
||||||
void set_pin(uint8_t pin) { this->pin_ = pin; }
|
|
||||||
float sample() override;
|
|
||||||
|
|
||||||
#ifdef ARDUINO_ARCH_ESP8266
|
|
||||||
std::string unique_id() override;
|
|
||||||
#endif
|
|
||||||
|
|
||||||
protected:
|
|
||||||
uint8_t pin_;
|
|
||||||
|
|
||||||
#ifdef ARDUINO_ARCH_ESP32
|
|
||||||
adc_attenuation_t attenuation_{ADC_0db};
|
|
||||||
#endif
|
|
||||||
};
|
|
||||||
|
|
||||||
} // namespace adc
|
|
||||||
} // namespace esphome
|
|
||||||
@@ -1,63 +0,0 @@
|
|||||||
import esphome.codegen as cg
|
|
||||||
import esphome.config_validation as cv
|
|
||||||
from esphome import pins
|
|
||||||
from esphome.components import sensor, voltage_sampler
|
|
||||||
from esphome.const import (
|
|
||||||
CONF_ATTENUATION,
|
|
||||||
CONF_ID,
|
|
||||||
CONF_PIN,
|
|
||||||
DEVICE_CLASS_VOLTAGE,
|
|
||||||
ICON_EMPTY,
|
|
||||||
UNIT_VOLT,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
AUTO_LOAD = ["voltage_sampler"]
|
|
||||||
|
|
||||||
ATTENUATION_MODES = {
|
|
||||||
"0db": cg.global_ns.ADC_0db,
|
|
||||||
"2.5db": cg.global_ns.ADC_2_5db,
|
|
||||||
"6db": cg.global_ns.ADC_6db,
|
|
||||||
"11db": cg.global_ns.ADC_11db,
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
def validate_adc_pin(value):
|
|
||||||
vcc = str(value).upper()
|
|
||||||
if vcc == "VCC":
|
|
||||||
return cv.only_on_esp8266(vcc)
|
|
||||||
return pins.analog_pin(value)
|
|
||||||
|
|
||||||
|
|
||||||
adc_ns = cg.esphome_ns.namespace("adc")
|
|
||||||
ADCSensor = adc_ns.class_(
|
|
||||||
"ADCSensor", sensor.Sensor, cg.PollingComponent, voltage_sampler.VoltageSampler
|
|
||||||
)
|
|
||||||
|
|
||||||
CONFIG_SCHEMA = (
|
|
||||||
sensor.sensor_schema(UNIT_VOLT, ICON_EMPTY, 2, DEVICE_CLASS_VOLTAGE)
|
|
||||||
.extend(
|
|
||||||
{
|
|
||||||
cv.GenerateID(): cv.declare_id(ADCSensor),
|
|
||||||
cv.Required(CONF_PIN): validate_adc_pin,
|
|
||||||
cv.SplitDefault(CONF_ATTENUATION, esp32="0db"): cv.All(
|
|
||||||
cv.only_on_esp32, cv.enum(ATTENUATION_MODES, lower=True)
|
|
||||||
),
|
|
||||||
}
|
|
||||||
)
|
|
||||||
.extend(cv.polling_component_schema("60s"))
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def to_code(config):
|
|
||||||
var = cg.new_Pvariable(config[CONF_ID])
|
|
||||||
yield cg.register_component(var, config)
|
|
||||||
yield sensor.register_sensor(var, config)
|
|
||||||
|
|
||||||
if config[CONF_PIN] == "VCC":
|
|
||||||
cg.add_define("USE_ADC_SENSOR_VCC")
|
|
||||||
else:
|
|
||||||
cg.add(var.set_pin(config[CONF_PIN]))
|
|
||||||
|
|
||||||
if CONF_ATTENUATION in config:
|
|
||||||
cg.add(var.set_attenuation(config[CONF_ATTENUATION]))
|
|
||||||
@@ -1,67 +0,0 @@
|
|||||||
#include "addressable_light_display.h"
|
|
||||||
#include "esphome/core/log.h"
|
|
||||||
|
|
||||||
namespace esphome {
|
|
||||||
namespace addressable_light {
|
|
||||||
|
|
||||||
static const char* TAG = "addressable_light.display";
|
|
||||||
|
|
||||||
int AddressableLightDisplay::get_width_internal() { return this->width_; }
|
|
||||||
int AddressableLightDisplay::get_height_internal() { return this->height_; }
|
|
||||||
|
|
||||||
void AddressableLightDisplay::setup() {
|
|
||||||
this->addressable_light_buffer_.resize(this->width_ * this->height_, {0, 0, 0, 0});
|
|
||||||
}
|
|
||||||
|
|
||||||
void AddressableLightDisplay::update() {
|
|
||||||
if (!this->enabled_)
|
|
||||||
return;
|
|
||||||
|
|
||||||
this->do_update_();
|
|
||||||
this->display();
|
|
||||||
}
|
|
||||||
|
|
||||||
void AddressableLightDisplay::display() {
|
|
||||||
bool dirty = false;
|
|
||||||
uint8_t old_r, old_g, old_b, old_w;
|
|
||||||
Color* c;
|
|
||||||
|
|
||||||
for (uint32_t offset = 0; offset < this->addressable_light_buffer_.size(); offset++) {
|
|
||||||
c = &(this->addressable_light_buffer_[offset]);
|
|
||||||
|
|
||||||
light::ESPColorView pixel = (*this->light_)[offset];
|
|
||||||
|
|
||||||
// Track the original values for the pixel view. If it has changed updating, then
|
|
||||||
// we trigger a redraw. Avoiding redraws == avoiding flicker!
|
|
||||||
old_r = pixel.get_red();
|
|
||||||
old_g = pixel.get_green();
|
|
||||||
old_b = pixel.get_blue();
|
|
||||||
old_w = pixel.get_white();
|
|
||||||
|
|
||||||
pixel.set_rgbw(c->r, c->g, c->b, c->w);
|
|
||||||
|
|
||||||
// If the actual value of the pixel changed, then schedule a redraw.
|
|
||||||
if (pixel.get_red() != old_r || pixel.get_green() != old_g || pixel.get_blue() != old_b ||
|
|
||||||
pixel.get_white() != old_w) {
|
|
||||||
dirty = true;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (dirty) {
|
|
||||||
this->light_->schedule_show();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
void HOT AddressableLightDisplay::draw_absolute_pixel_internal(int x, int y, Color color) {
|
|
||||||
if (x >= this->get_width_internal() || x < 0 || y >= this->get_height_internal() || y < 0)
|
|
||||||
return;
|
|
||||||
|
|
||||||
if (this->pixel_mapper_f_.has_value()) {
|
|
||||||
// Params are passed by reference, so they may be modified in call.
|
|
||||||
this->addressable_light_buffer_[(*this->pixel_mapper_f_)(x, y)] = color;
|
|
||||||
} else {
|
|
||||||
this->addressable_light_buffer_[y * this->get_width_internal() + x] = color;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} // namespace addressable_light
|
|
||||||
} // namespace esphome
|
|
||||||
@@ -1,59 +0,0 @@
|
|||||||
#pragma once
|
|
||||||
|
|
||||||
#include "esphome/core/component.h"
|
|
||||||
#include "esphome/core/color.h"
|
|
||||||
#include "esphome/components/display/display_buffer.h"
|
|
||||||
#include "esphome/components/light/addressable_light.h"
|
|
||||||
|
|
||||||
namespace esphome {
|
|
||||||
namespace addressable_light {
|
|
||||||
|
|
||||||
class AddressableLightDisplay : public display::DisplayBuffer, public PollingComponent {
|
|
||||||
public:
|
|
||||||
light::AddressableLight *get_light() const { return this->light_; }
|
|
||||||
|
|
||||||
void set_width(int32_t width) { width_ = width; }
|
|
||||||
void set_height(int32_t height) { height_ = height; }
|
|
||||||
void set_light(light::LightState *state) {
|
|
||||||
light_state_ = state;
|
|
||||||
light_ = static_cast<light::AddressableLight *>(state->get_output());
|
|
||||||
}
|
|
||||||
void set_enabled(bool enabled) {
|
|
||||||
if (light_state_) {
|
|
||||||
if (enabled_ && !enabled) { // enabled -> disabled
|
|
||||||
// - Tell the parent light to refresh, effectively wiping the display. Also
|
|
||||||
// restores the previous effect (if any).
|
|
||||||
light_state_->make_call().set_effect(this->last_effect_).perform();
|
|
||||||
|
|
||||||
} else if (!enabled_ && enabled) { // disabled -> enabled
|
|
||||||
// - Save the current effect.
|
|
||||||
this->last_effect_ = light_state_->get_effect_name();
|
|
||||||
// - Disable any current effect.
|
|
||||||
light_state_->make_call().set_effect(0).perform();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
enabled_ = enabled;
|
|
||||||
}
|
|
||||||
bool get_enabled() { return enabled_; }
|
|
||||||
|
|
||||||
void set_pixel_mapper(std::function<int(int, int)> &&pixel_mapper_f) { this->pixel_mapper_f_ = pixel_mapper_f; }
|
|
||||||
void setup() override;
|
|
||||||
void display();
|
|
||||||
|
|
||||||
protected:
|
|
||||||
int get_width_internal() override;
|
|
||||||
int get_height_internal() override;
|
|
||||||
void draw_absolute_pixel_internal(int x, int y, Color color) override;
|
|
||||||
void update() override;
|
|
||||||
|
|
||||||
light::LightState *light_state_;
|
|
||||||
light::AddressableLight *light_;
|
|
||||||
bool enabled_{true};
|
|
||||||
int32_t width_;
|
|
||||||
int32_t height_;
|
|
||||||
std::vector<Color> addressable_light_buffer_;
|
|
||||||
optional<std::string> last_effect_;
|
|
||||||
optional<std::function<int(int, int)>> pixel_mapper_f_;
|
|
||||||
};
|
|
||||||
} // namespace addressable_light
|
|
||||||
} // namespace esphome
|
|
||||||
@@ -1,63 +0,0 @@
|
|||||||
import esphome.codegen as cg
|
|
||||||
import esphome.config_validation as cv
|
|
||||||
from esphome.components import display, light
|
|
||||||
from esphome.const import (
|
|
||||||
CONF_ID,
|
|
||||||
CONF_LAMBDA,
|
|
||||||
CONF_PAGES,
|
|
||||||
CONF_ADDRESSABLE_LIGHT_ID,
|
|
||||||
CONF_HEIGHT,
|
|
||||||
CONF_WIDTH,
|
|
||||||
CONF_UPDATE_INTERVAL,
|
|
||||||
CONF_PIXEL_MAPPER,
|
|
||||||
)
|
|
||||||
|
|
||||||
CODEOWNERS = ["@justfalter"]
|
|
||||||
|
|
||||||
addressable_light_ns = cg.esphome_ns.namespace("addressable_light")
|
|
||||||
AddressableLightDisplay = addressable_light_ns.class_(
|
|
||||||
"AddressableLightDisplay", display.DisplayBuffer, cg.PollingComponent
|
|
||||||
)
|
|
||||||
|
|
||||||
CONFIG_SCHEMA = cv.All(
|
|
||||||
display.FULL_DISPLAY_SCHEMA.extend(
|
|
||||||
{
|
|
||||||
cv.GenerateID(): cv.declare_id(AddressableLightDisplay),
|
|
||||||
cv.Required(CONF_ADDRESSABLE_LIGHT_ID): cv.use_id(
|
|
||||||
light.AddressableLightState
|
|
||||||
),
|
|
||||||
cv.Required(CONF_WIDTH): cv.positive_int,
|
|
||||||
cv.Required(CONF_HEIGHT): cv.positive_int,
|
|
||||||
cv.Optional(
|
|
||||||
CONF_UPDATE_INTERVAL, default="16ms"
|
|
||||||
): cv.positive_time_period_milliseconds,
|
|
||||||
cv.Optional(CONF_PIXEL_MAPPER): cv.returning_lambda,
|
|
||||||
}
|
|
||||||
),
|
|
||||||
cv.has_at_most_one_key(CONF_PAGES, CONF_LAMBDA),
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def to_code(config):
|
|
||||||
var = cg.new_Pvariable(config[CONF_ID])
|
|
||||||
wrapped_light = yield cg.get_variable(config[CONF_ADDRESSABLE_LIGHT_ID])
|
|
||||||
cg.add(var.set_width(config[CONF_WIDTH]))
|
|
||||||
cg.add(var.set_height(config[CONF_HEIGHT]))
|
|
||||||
cg.add(var.set_light(wrapped_light))
|
|
||||||
|
|
||||||
yield cg.register_component(var, config)
|
|
||||||
yield display.register_display(var, config)
|
|
||||||
|
|
||||||
if CONF_PIXEL_MAPPER in config:
|
|
||||||
pixel_mapper_template_ = yield cg.process_lambda(
|
|
||||||
config[CONF_PIXEL_MAPPER],
|
|
||||||
[(int, "x"), (int, "y")],
|
|
||||||
return_type=cg.int_,
|
|
||||||
)
|
|
||||||
cg.add(var.set_pixel_mapper(pixel_mapper_template_))
|
|
||||||
|
|
||||||
if CONF_LAMBDA in config:
|
|
||||||
lambda_ = yield cg.process_lambda(
|
|
||||||
config[CONF_LAMBDA], [(display.DisplayBufferRef, "it")], return_type=cg.void
|
|
||||||
)
|
|
||||||
cg.add(var.set_writer(lambda_))
|
|
||||||
@@ -1,54 +0,0 @@
|
|||||||
#include "ade7953.h"
|
|
||||||
#include "esphome/core/log.h"
|
|
||||||
|
|
||||||
namespace esphome {
|
|
||||||
namespace ade7953 {
|
|
||||||
|
|
||||||
static const char *TAG = "ade7953";
|
|
||||||
|
|
||||||
void ADE7953::dump_config() {
|
|
||||||
ESP_LOGCONFIG(TAG, "ADE7953:");
|
|
||||||
if (this->has_irq_) {
|
|
||||||
ESP_LOGCONFIG(TAG, " IRQ Pin: GPIO%u", this->irq_pin_number_);
|
|
||||||
}
|
|
||||||
LOG_I2C_DEVICE(this);
|
|
||||||
LOG_UPDATE_INTERVAL(this);
|
|
||||||
LOG_SENSOR(" ", "Voltage Sensor", this->voltage_sensor_);
|
|
||||||
LOG_SENSOR(" ", "Current A Sensor", this->current_a_sensor_);
|
|
||||||
LOG_SENSOR(" ", "Current B Sensor", this->current_b_sensor_);
|
|
||||||
LOG_SENSOR(" ", "Active Power A Sensor", this->active_power_a_sensor_);
|
|
||||||
LOG_SENSOR(" ", "Active Power B Sensor", this->active_power_b_sensor_);
|
|
||||||
}
|
|
||||||
|
|
||||||
#define ADE_PUBLISH_(name, factor) \
|
|
||||||
if (name && this->name##_sensor_) { \
|
|
||||||
float value = *name / factor; \
|
|
||||||
this->name##_sensor_->publish_state(value); \
|
|
||||||
}
|
|
||||||
#define ADE_PUBLISH(name, factor) ADE_PUBLISH_(name, factor)
|
|
||||||
|
|
||||||
void ADE7953::update() {
|
|
||||||
if (!this->is_setup_)
|
|
||||||
return;
|
|
||||||
|
|
||||||
auto active_power_a = this->ade_read_<int32_t>(0x0312);
|
|
||||||
ADE_PUBLISH(active_power_a, 154.0f);
|
|
||||||
auto active_power_b = this->ade_read_<int32_t>(0x0313);
|
|
||||||
ADE_PUBLISH(active_power_b, 154.0f);
|
|
||||||
auto current_a = this->ade_read_<uint32_t>(0x031A);
|
|
||||||
ADE_PUBLISH(current_a, 100000.0f);
|
|
||||||
auto current_b = this->ade_read_<uint32_t>(0x031B);
|
|
||||||
ADE_PUBLISH(current_b, 100000.0f);
|
|
||||||
auto voltage = this->ade_read_<uint32_t>(0x031C);
|
|
||||||
ADE_PUBLISH(voltage, 26000.0f);
|
|
||||||
|
|
||||||
// auto apparent_power_a = this->ade_read_<int32_t>(0x0310);
|
|
||||||
// auto apparent_power_b = this->ade_read_<int32_t>(0x0311);
|
|
||||||
// auto reactive_power_a = this->ade_read_<int32_t>(0x0314);
|
|
||||||
// auto reactive_power_b = this->ade_read_<int32_t>(0x0315);
|
|
||||||
// auto power_factor_a = this->ade_read_<int16_t>(0x010A);
|
|
||||||
// auto power_factor_b = this->ade_read_<int16_t>(0x010B);
|
|
||||||
}
|
|
||||||
|
|
||||||
} // namespace ade7953
|
|
||||||
} // namespace esphome
|
|
||||||
@@ -1,79 +0,0 @@
|
|||||||
#pragma once
|
|
||||||
|
|
||||||
#include "esphome/core/component.h"
|
|
||||||
#include "esphome/components/i2c/i2c.h"
|
|
||||||
#include "esphome/components/sensor/sensor.h"
|
|
||||||
|
|
||||||
namespace esphome {
|
|
||||||
namespace ade7953 {
|
|
||||||
|
|
||||||
class ADE7953 : public i2c::I2CDevice, public PollingComponent {
|
|
||||||
public:
|
|
||||||
void set_irq_pin(uint8_t irq_pin) {
|
|
||||||
has_irq_ = true;
|
|
||||||
irq_pin_number_ = irq_pin;
|
|
||||||
}
|
|
||||||
void set_voltage_sensor(sensor::Sensor *voltage_sensor) { voltage_sensor_ = voltage_sensor; }
|
|
||||||
void set_current_a_sensor(sensor::Sensor *current_a_sensor) { current_a_sensor_ = current_a_sensor; }
|
|
||||||
void set_current_b_sensor(sensor::Sensor *current_b_sensor) { current_b_sensor_ = current_b_sensor; }
|
|
||||||
void set_active_power_a_sensor(sensor::Sensor *active_power_a_sensor) {
|
|
||||||
active_power_a_sensor_ = active_power_a_sensor;
|
|
||||||
}
|
|
||||||
void set_active_power_b_sensor(sensor::Sensor *active_power_b_sensor) {
|
|
||||||
active_power_b_sensor_ = active_power_b_sensor;
|
|
||||||
}
|
|
||||||
|
|
||||||
void setup() override {
|
|
||||||
if (this->has_irq_) {
|
|
||||||
auto pin = GPIOPin(this->irq_pin_number_, INPUT);
|
|
||||||
this->irq_pin_ = &pin;
|
|
||||||
this->irq_pin_->setup();
|
|
||||||
}
|
|
||||||
this->set_timeout(100, [this]() {
|
|
||||||
this->ade_write_<uint8_t>(0x0010, 0x04);
|
|
||||||
this->ade_write_<uint8_t>(0x00FE, 0xAD);
|
|
||||||
this->ade_write_<uint16_t>(0x0120, 0x0030);
|
|
||||||
this->is_setup_ = true;
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
void dump_config() override;
|
|
||||||
|
|
||||||
void update() override;
|
|
||||||
|
|
||||||
protected:
|
|
||||||
template<typename T> bool ade_write_(uint16_t reg, T value) {
|
|
||||||
std::vector<uint8_t> data;
|
|
||||||
data.push_back(reg >> 8);
|
|
||||||
data.push_back(reg >> 0);
|
|
||||||
for (int i = sizeof(T) - 1; i >= 0; i--)
|
|
||||||
data.push_back(value >> (i * 8));
|
|
||||||
return this->write_bytes_raw(data);
|
|
||||||
}
|
|
||||||
template<typename T> optional<T> ade_read_(uint16_t reg) {
|
|
||||||
uint8_t hi = reg >> 8;
|
|
||||||
uint8_t lo = reg >> 0;
|
|
||||||
if (!this->write_bytes_raw({hi, lo}))
|
|
||||||
return {};
|
|
||||||
auto ret = this->read_bytes_raw<sizeof(T)>();
|
|
||||||
if (!ret.has_value())
|
|
||||||
return {};
|
|
||||||
T result = 0;
|
|
||||||
for (int i = 0, j = sizeof(T) - 1; i < sizeof(T); i++, j--)
|
|
||||||
result |= T((*ret)[i]) << (j * 8);
|
|
||||||
return result;
|
|
||||||
}
|
|
||||||
|
|
||||||
bool has_irq_ = false;
|
|
||||||
uint8_t irq_pin_number_;
|
|
||||||
GPIOPin *irq_pin_{nullptr};
|
|
||||||
bool is_setup_{false};
|
|
||||||
sensor::Sensor *voltage_sensor_{nullptr};
|
|
||||||
sensor::Sensor *current_a_sensor_{nullptr};
|
|
||||||
sensor::Sensor *current_b_sensor_{nullptr};
|
|
||||||
sensor::Sensor *active_power_a_sensor_{nullptr};
|
|
||||||
sensor::Sensor *active_power_b_sensor_{nullptr};
|
|
||||||
};
|
|
||||||
|
|
||||||
} // namespace ade7953
|
|
||||||
} // namespace esphome
|
|
||||||
@@ -1,74 +0,0 @@
|
|||||||
import esphome.codegen as cg
|
|
||||||
import esphome.config_validation as cv
|
|
||||||
from esphome.components import sensor, i2c
|
|
||||||
from esphome import pins
|
|
||||||
from esphome.const import (
|
|
||||||
CONF_ID,
|
|
||||||
CONF_VOLTAGE,
|
|
||||||
DEVICE_CLASS_CURRENT,
|
|
||||||
DEVICE_CLASS_POWER,
|
|
||||||
DEVICE_CLASS_VOLTAGE,
|
|
||||||
ICON_EMPTY,
|
|
||||||
UNIT_VOLT,
|
|
||||||
UNIT_AMPERE,
|
|
||||||
UNIT_WATT,
|
|
||||||
)
|
|
||||||
|
|
||||||
DEPENDENCIES = ["i2c"]
|
|
||||||
|
|
||||||
ade7953_ns = cg.esphome_ns.namespace("ade7953")
|
|
||||||
ADE7953 = ade7953_ns.class_("ADE7953", cg.PollingComponent, i2c.I2CDevice)
|
|
||||||
|
|
||||||
CONF_IRQ_PIN = "irq_pin"
|
|
||||||
CONF_CURRENT_A = "current_a"
|
|
||||||
CONF_CURRENT_B = "current_b"
|
|
||||||
CONF_ACTIVE_POWER_A = "active_power_a"
|
|
||||||
CONF_ACTIVE_POWER_B = "active_power_b"
|
|
||||||
|
|
||||||
CONFIG_SCHEMA = (
|
|
||||||
cv.Schema(
|
|
||||||
{
|
|
||||||
cv.GenerateID(): cv.declare_id(ADE7953),
|
|
||||||
cv.Optional(CONF_IRQ_PIN): pins.input_pin,
|
|
||||||
cv.Optional(CONF_VOLTAGE): sensor.sensor_schema(
|
|
||||||
UNIT_VOLT, ICON_EMPTY, 1, DEVICE_CLASS_VOLTAGE
|
|
||||||
),
|
|
||||||
cv.Optional(CONF_CURRENT_A): sensor.sensor_schema(
|
|
||||||
UNIT_AMPERE, ICON_EMPTY, 2, DEVICE_CLASS_CURRENT
|
|
||||||
),
|
|
||||||
cv.Optional(CONF_CURRENT_B): sensor.sensor_schema(
|
|
||||||
UNIT_AMPERE, ICON_EMPTY, 2, DEVICE_CLASS_CURRENT
|
|
||||||
),
|
|
||||||
cv.Optional(CONF_ACTIVE_POWER_A): sensor.sensor_schema(
|
|
||||||
UNIT_WATT, ICON_EMPTY, 1, DEVICE_CLASS_POWER
|
|
||||||
),
|
|
||||||
cv.Optional(CONF_ACTIVE_POWER_B): sensor.sensor_schema(
|
|
||||||
UNIT_WATT, ICON_EMPTY, 1, DEVICE_CLASS_POWER
|
|
||||||
),
|
|
||||||
}
|
|
||||||
)
|
|
||||||
.extend(cv.polling_component_schema("60s"))
|
|
||||||
.extend(i2c.i2c_device_schema(0x38))
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def to_code(config):
|
|
||||||
var = cg.new_Pvariable(config[CONF_ID])
|
|
||||||
yield cg.register_component(var, config)
|
|
||||||
yield i2c.register_i2c_device(var, config)
|
|
||||||
|
|
||||||
if CONF_IRQ_PIN in config:
|
|
||||||
cg.add(var.set_irq_pin(config[CONF_IRQ_PIN]))
|
|
||||||
|
|
||||||
for key in [
|
|
||||||
CONF_VOLTAGE,
|
|
||||||
CONF_CURRENT_A,
|
|
||||||
CONF_CURRENT_B,
|
|
||||||
CONF_ACTIVE_POWER_A,
|
|
||||||
CONF_ACTIVE_POWER_B,
|
|
||||||
]:
|
|
||||||
if key not in config:
|
|
||||||
continue
|
|
||||||
conf = config[key]
|
|
||||||
sens = yield sensor.new_sensor(conf)
|
|
||||||
cg.add(getattr(var, f"set_{key}_sensor")(sens))
|
|
||||||
@@ -1,31 +0,0 @@
|
|||||||
import esphome.codegen as cg
|
|
||||||
import esphome.config_validation as cv
|
|
||||||
from esphome.components import i2c
|
|
||||||
from esphome.const import CONF_ID
|
|
||||||
|
|
||||||
DEPENDENCIES = ["i2c"]
|
|
||||||
AUTO_LOAD = ["sensor", "voltage_sampler"]
|
|
||||||
MULTI_CONF = True
|
|
||||||
|
|
||||||
ads1115_ns = cg.esphome_ns.namespace("ads1115")
|
|
||||||
ADS1115Component = ads1115_ns.class_("ADS1115Component", cg.Component, i2c.I2CDevice)
|
|
||||||
|
|
||||||
CONF_CONTINUOUS_MODE = "continuous_mode"
|
|
||||||
CONFIG_SCHEMA = (
|
|
||||||
cv.Schema(
|
|
||||||
{
|
|
||||||
cv.GenerateID(): cv.declare_id(ADS1115Component),
|
|
||||||
cv.Optional(CONF_CONTINUOUS_MODE, default=False): cv.boolean,
|
|
||||||
}
|
|
||||||
)
|
|
||||||
.extend(cv.COMPONENT_SCHEMA)
|
|
||||||
.extend(i2c.i2c_device_schema(None))
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def to_code(config):
|
|
||||||
var = cg.new_Pvariable(config[CONF_ID])
|
|
||||||
yield cg.register_component(var, config)
|
|
||||||
yield i2c.register_i2c_device(var, config)
|
|
||||||
|
|
||||||
cg.add(var.set_continuous_mode(config[CONF_CONTINUOUS_MODE]))
|
|
||||||
@@ -1,169 +0,0 @@
|
|||||||
#include "ads1115.h"
|
|
||||||
#include "esphome/core/log.h"
|
|
||||||
|
|
||||||
namespace esphome {
|
|
||||||
namespace ads1115 {
|
|
||||||
|
|
||||||
static const char *TAG = "ads1115";
|
|
||||||
static const uint8_t ADS1115_REGISTER_CONVERSION = 0x00;
|
|
||||||
static const uint8_t ADS1115_REGISTER_CONFIG = 0x01;
|
|
||||||
|
|
||||||
static const uint8_t ADS1115_DATA_RATE_860_SPS = 0b111;
|
|
||||||
|
|
||||||
void ADS1115Component::setup() {
|
|
||||||
ESP_LOGCONFIG(TAG, "Setting up ADS1115...");
|
|
||||||
uint16_t value;
|
|
||||||
if (!this->read_byte_16(ADS1115_REGISTER_CONVERSION, &value)) {
|
|
||||||
this->mark_failed();
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
uint16_t config = 0;
|
|
||||||
// Clear single-shot bit
|
|
||||||
// 0b0xxxxxxxxxxxxxxx
|
|
||||||
config |= 0b0000000000000000;
|
|
||||||
// Setup multiplexer
|
|
||||||
// 0bx000xxxxxxxxxxxx
|
|
||||||
config |= ADS1115_MULTIPLEXER_P0_N1 << 12;
|
|
||||||
|
|
||||||
// Setup Gain
|
|
||||||
// 0bxxxx000xxxxxxxxx
|
|
||||||
config |= ADS1115_GAIN_6P144 << 9;
|
|
||||||
|
|
||||||
if (this->continuous_mode_) {
|
|
||||||
// Set continuous mode
|
|
||||||
// 0bxxxxxxx0xxxxxxxx
|
|
||||||
config |= 0b0000000000000000;
|
|
||||||
} else {
|
|
||||||
// Set singleshot mode
|
|
||||||
// 0bxxxxxxx1xxxxxxxx
|
|
||||||
config |= 0b0000000100000000;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Set data rate - 860 samples per second (we're in singleshot mode)
|
|
||||||
// 0bxxxxxxxx100xxxxx
|
|
||||||
config |= ADS1115_DATA_RATE_860_SPS << 5;
|
|
||||||
|
|
||||||
// Set comparator mode - hysteresis
|
|
||||||
// 0bxxxxxxxxxxx0xxxx
|
|
||||||
config |= 0b0000000000000000;
|
|
||||||
|
|
||||||
// Set comparator polarity - active low
|
|
||||||
// 0bxxxxxxxxxxxx0xxx
|
|
||||||
config |= 0b0000000000000000;
|
|
||||||
|
|
||||||
// Set comparator latch enabled - false
|
|
||||||
// 0bxxxxxxxxxxxxx0xx
|
|
||||||
config |= 0b0000000000000000;
|
|
||||||
|
|
||||||
// Set comparator que mode - disabled
|
|
||||||
// 0bxxxxxxxxxxxxxx11
|
|
||||||
config |= 0b0000000000000011;
|
|
||||||
|
|
||||||
if (!this->write_byte_16(ADS1115_REGISTER_CONFIG, config)) {
|
|
||||||
this->mark_failed();
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
this->prev_config_ = config;
|
|
||||||
|
|
||||||
for (auto *sensor : this->sensors_) {
|
|
||||||
this->set_interval(sensor->get_name(), sensor->update_interval(),
|
|
||||||
[this, sensor] { this->request_measurement(sensor); });
|
|
||||||
}
|
|
||||||
}
|
|
||||||
void ADS1115Component::dump_config() {
|
|
||||||
ESP_LOGCONFIG(TAG, "Setting up ADS1115...");
|
|
||||||
LOG_I2C_DEVICE(this);
|
|
||||||
if (this->is_failed()) {
|
|
||||||
ESP_LOGE(TAG, "Communication with ADS1115 failed!");
|
|
||||||
}
|
|
||||||
|
|
||||||
for (auto *sensor : this->sensors_) {
|
|
||||||
LOG_SENSOR(" ", "Sensor", sensor);
|
|
||||||
ESP_LOGCONFIG(TAG, " Multiplexer: %u", sensor->get_multiplexer());
|
|
||||||
ESP_LOGCONFIG(TAG, " Gain: %u", sensor->get_gain());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
float ADS1115Component::request_measurement(ADS1115Sensor *sensor) {
|
|
||||||
uint16_t config = this->prev_config_;
|
|
||||||
// Multiplexer
|
|
||||||
// 0bxBBBxxxxxxxxxxxx
|
|
||||||
config &= 0b1000111111111111;
|
|
||||||
config |= (sensor->get_multiplexer() & 0b111) << 12;
|
|
||||||
|
|
||||||
// Gain
|
|
||||||
// 0bxxxxBBBxxxxxxxxx
|
|
||||||
config &= 0b1111000111111111;
|
|
||||||
config |= (sensor->get_gain() & 0b111) << 9;
|
|
||||||
|
|
||||||
if (!this->continuous_mode_) {
|
|
||||||
// Start conversion
|
|
||||||
config |= 0b1000000000000000;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!this->continuous_mode_ || this->prev_config_ != config) {
|
|
||||||
if (!this->write_byte_16(ADS1115_REGISTER_CONFIG, config)) {
|
|
||||||
this->status_set_warning();
|
|
||||||
return NAN;
|
|
||||||
}
|
|
||||||
this->prev_config_ = config;
|
|
||||||
|
|
||||||
// about 1.6 ms with 860 samples per second
|
|
||||||
delay(2);
|
|
||||||
|
|
||||||
uint32_t start = millis();
|
|
||||||
while (this->read_byte_16(ADS1115_REGISTER_CONFIG, &config) && (config >> 15) == 0) {
|
|
||||||
if (millis() - start > 100) {
|
|
||||||
ESP_LOGW(TAG, "Reading ADS1115 timed out");
|
|
||||||
this->status_set_warning();
|
|
||||||
return NAN;
|
|
||||||
}
|
|
||||||
yield();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
uint16_t raw_conversion;
|
|
||||||
if (!this->read_byte_16(ADS1115_REGISTER_CONVERSION, &raw_conversion)) {
|
|
||||||
this->status_set_warning();
|
|
||||||
return NAN;
|
|
||||||
}
|
|
||||||
auto signed_conversion = static_cast<int16_t>(raw_conversion);
|
|
||||||
|
|
||||||
float millivolts;
|
|
||||||
switch (sensor->get_gain()) {
|
|
||||||
case ADS1115_GAIN_6P144:
|
|
||||||
millivolts = signed_conversion * 0.187500f;
|
|
||||||
break;
|
|
||||||
case ADS1115_GAIN_4P096:
|
|
||||||
millivolts = signed_conversion * 0.125000f;
|
|
||||||
break;
|
|
||||||
case ADS1115_GAIN_2P048:
|
|
||||||
millivolts = signed_conversion * 0.062500f;
|
|
||||||
break;
|
|
||||||
case ADS1115_GAIN_1P024:
|
|
||||||
millivolts = signed_conversion * 0.031250f;
|
|
||||||
break;
|
|
||||||
case ADS1115_GAIN_0P512:
|
|
||||||
millivolts = signed_conversion * 0.015625f;
|
|
||||||
break;
|
|
||||||
case ADS1115_GAIN_0P256:
|
|
||||||
millivolts = signed_conversion * 0.007813f;
|
|
||||||
break;
|
|
||||||
default:
|
|
||||||
millivolts = NAN;
|
|
||||||
}
|
|
||||||
|
|
||||||
this->status_clear_warning();
|
|
||||||
return millivolts / 1e3f;
|
|
||||||
}
|
|
||||||
|
|
||||||
float ADS1115Sensor::sample() { return this->parent_->request_measurement(this); }
|
|
||||||
void ADS1115Sensor::update() {
|
|
||||||
float v = this->parent_->request_measurement(this);
|
|
||||||
if (!isnan(v)) {
|
|
||||||
ESP_LOGD(TAG, "'%s': Got Voltage=%fV", this->get_name().c_str(), v);
|
|
||||||
this->publish_state(v);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
} // namespace ads1115
|
|
||||||
} // namespace esphome
|
|
||||||
@@ -1,71 +0,0 @@
|
|||||||
#pragma once
|
|
||||||
|
|
||||||
#include "esphome/core/component.h"
|
|
||||||
#include "esphome/components/sensor/sensor.h"
|
|
||||||
#include "esphome/components/i2c/i2c.h"
|
|
||||||
#include "esphome/components/voltage_sampler/voltage_sampler.h"
|
|
||||||
|
|
||||||
namespace esphome {
|
|
||||||
namespace ads1115 {
|
|
||||||
|
|
||||||
enum ADS1115Multiplexer {
|
|
||||||
ADS1115_MULTIPLEXER_P0_N1 = 0b000,
|
|
||||||
ADS1115_MULTIPLEXER_P0_N3 = 0b001,
|
|
||||||
ADS1115_MULTIPLEXER_P1_N3 = 0b010,
|
|
||||||
ADS1115_MULTIPLEXER_P2_N3 = 0b011,
|
|
||||||
ADS1115_MULTIPLEXER_P0_NG = 0b100,
|
|
||||||
ADS1115_MULTIPLEXER_P1_NG = 0b101,
|
|
||||||
ADS1115_MULTIPLEXER_P2_NG = 0b110,
|
|
||||||
ADS1115_MULTIPLEXER_P3_NG = 0b111,
|
|
||||||
};
|
|
||||||
|
|
||||||
enum ADS1115Gain {
|
|
||||||
ADS1115_GAIN_6P144 = 0b000,
|
|
||||||
ADS1115_GAIN_4P096 = 0b001,
|
|
||||||
ADS1115_GAIN_2P048 = 0b010,
|
|
||||||
ADS1115_GAIN_1P024 = 0b011,
|
|
||||||
ADS1115_GAIN_0P512 = 0b100,
|
|
||||||
ADS1115_GAIN_0P256 = 0b101,
|
|
||||||
};
|
|
||||||
|
|
||||||
class ADS1115Sensor;
|
|
||||||
|
|
||||||
class ADS1115Component : public Component, public i2c::I2CDevice {
|
|
||||||
public:
|
|
||||||
void register_sensor(ADS1115Sensor *obj) { this->sensors_.push_back(obj); }
|
|
||||||
/// Set up the internal sensor array.
|
|
||||||
void setup() override;
|
|
||||||
void dump_config() override;
|
|
||||||
/// HARDWARE_LATE setup priority
|
|
||||||
float get_setup_priority() const override { return setup_priority::DATA; }
|
|
||||||
void set_continuous_mode(bool continuous_mode) { continuous_mode_ = continuous_mode; }
|
|
||||||
|
|
||||||
/// Helper method to request a measurement from a sensor.
|
|
||||||
float request_measurement(ADS1115Sensor *sensor);
|
|
||||||
|
|
||||||
protected:
|
|
||||||
std::vector<ADS1115Sensor *> sensors_;
|
|
||||||
uint16_t prev_config_{0};
|
|
||||||
bool continuous_mode_;
|
|
||||||
};
|
|
||||||
|
|
||||||
/// Internal holder class that is in instance of Sensor so that the hub can create individual sensors.
|
|
||||||
class ADS1115Sensor : public sensor::Sensor, public PollingComponent, public voltage_sampler::VoltageSampler {
|
|
||||||
public:
|
|
||||||
ADS1115Sensor(ADS1115Component *parent) : parent_(parent) {}
|
|
||||||
void update() override;
|
|
||||||
void set_multiplexer(ADS1115Multiplexer multiplexer) { multiplexer_ = multiplexer; }
|
|
||||||
void set_gain(ADS1115Gain gain) { gain_ = gain; }
|
|
||||||
|
|
||||||
float sample() override;
|
|
||||||
uint8_t get_multiplexer() const { return multiplexer_; }
|
|
||||||
uint8_t get_gain() const { return gain_; }
|
|
||||||
|
|
||||||
protected:
|
|
||||||
ADS1115Component *parent_;
|
|
||||||
ADS1115Multiplexer multiplexer_;
|
|
||||||
ADS1115Gain gain_;
|
|
||||||
};
|
|
||||||
|
|
||||||
} // namespace ads1115
|
|
||||||
} // namespace esphome
|
|
||||||
@@ -1,76 +0,0 @@
|
|||||||
import esphome.codegen as cg
|
|
||||||
import esphome.config_validation as cv
|
|
||||||
from esphome.components import sensor, voltage_sampler
|
|
||||||
from esphome.const import (
|
|
||||||
CONF_GAIN,
|
|
||||||
CONF_MULTIPLEXER,
|
|
||||||
DEVICE_CLASS_VOLTAGE,
|
|
||||||
ICON_EMPTY,
|
|
||||||
UNIT_VOLT,
|
|
||||||
CONF_ID,
|
|
||||||
)
|
|
||||||
from . import ads1115_ns, ADS1115Component
|
|
||||||
|
|
||||||
DEPENDENCIES = ["ads1115"]
|
|
||||||
|
|
||||||
ADS1115Multiplexer = ads1115_ns.enum("ADS1115Multiplexer")
|
|
||||||
MUX = {
|
|
||||||
"A0_A1": ADS1115Multiplexer.ADS1115_MULTIPLEXER_P0_N1,
|
|
||||||
"A0_A3": ADS1115Multiplexer.ADS1115_MULTIPLEXER_P0_N3,
|
|
||||||
"A1_A3": ADS1115Multiplexer.ADS1115_MULTIPLEXER_P1_N3,
|
|
||||||
"A2_A3": ADS1115Multiplexer.ADS1115_MULTIPLEXER_P2_N3,
|
|
||||||
"A0_GND": ADS1115Multiplexer.ADS1115_MULTIPLEXER_P0_NG,
|
|
||||||
"A1_GND": ADS1115Multiplexer.ADS1115_MULTIPLEXER_P1_NG,
|
|
||||||
"A2_GND": ADS1115Multiplexer.ADS1115_MULTIPLEXER_P2_NG,
|
|
||||||
"A3_GND": ADS1115Multiplexer.ADS1115_MULTIPLEXER_P3_NG,
|
|
||||||
}
|
|
||||||
|
|
||||||
ADS1115Gain = ads1115_ns.enum("ADS1115Gain")
|
|
||||||
GAIN = {
|
|
||||||
"6.144": ADS1115Gain.ADS1115_GAIN_6P144,
|
|
||||||
"4.096": ADS1115Gain.ADS1115_GAIN_4P096,
|
|
||||||
"2.048": ADS1115Gain.ADS1115_GAIN_2P048,
|
|
||||||
"1.024": ADS1115Gain.ADS1115_GAIN_1P024,
|
|
||||||
"0.512": ADS1115Gain.ADS1115_GAIN_0P512,
|
|
||||||
"0.256": ADS1115Gain.ADS1115_GAIN_0P256,
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
def validate_gain(value):
|
|
||||||
if isinstance(value, float):
|
|
||||||
value = f"{value:0.03f}"
|
|
||||||
elif not isinstance(value, str):
|
|
||||||
raise cv.Invalid(f'invalid gain "{value}"')
|
|
||||||
|
|
||||||
return cv.enum(GAIN)(value)
|
|
||||||
|
|
||||||
|
|
||||||
ADS1115Sensor = ads1115_ns.class_(
|
|
||||||
"ADS1115Sensor", sensor.Sensor, cg.PollingComponent, voltage_sampler.VoltageSampler
|
|
||||||
)
|
|
||||||
|
|
||||||
CONF_ADS1115_ID = "ads1115_id"
|
|
||||||
CONFIG_SCHEMA = (
|
|
||||||
sensor.sensor_schema(UNIT_VOLT, ICON_EMPTY, 3, DEVICE_CLASS_VOLTAGE)
|
|
||||||
.extend(
|
|
||||||
{
|
|
||||||
cv.GenerateID(): cv.declare_id(ADS1115Sensor),
|
|
||||||
cv.GenerateID(CONF_ADS1115_ID): cv.use_id(ADS1115Component),
|
|
||||||
cv.Required(CONF_MULTIPLEXER): cv.enum(MUX, upper=True, space="_"),
|
|
||||||
cv.Required(CONF_GAIN): validate_gain,
|
|
||||||
}
|
|
||||||
)
|
|
||||||
.extend(cv.polling_component_schema("60s"))
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def to_code(config):
|
|
||||||
paren = yield cg.get_variable(config[CONF_ADS1115_ID])
|
|
||||||
var = cg.new_Pvariable(config[CONF_ID], paren)
|
|
||||||
yield sensor.register_sensor(var, config)
|
|
||||||
yield cg.register_component(var, config)
|
|
||||||
|
|
||||||
cg.add(var.set_multiplexer(config[CONF_MULTIPLEXER]))
|
|
||||||
cg.add(var.set_gain(config[CONF_GAIN]))
|
|
||||||
|
|
||||||
cg.add(paren.register_sensor(var))
|
|
||||||
@@ -1,127 +0,0 @@
|
|||||||
// Implementation based on:
|
|
||||||
// - AHT10: https://github.com/Thinary/AHT10
|
|
||||||
// - Official Datasheet (cn):
|
|
||||||
// http://www.aosong.com/userfiles/files/media/aht10%E8%A7%84%E6%A0%BC%E4%B9%A6v1_1%EF%BC%8820191015%EF%BC%89.pdf
|
|
||||||
// - Unofficial Translated Datasheet (en):
|
|
||||||
// https://wiki.liutyi.info/download/attachments/30507639/Aosong_AHT10_en_draft_0c.pdf
|
|
||||||
//
|
|
||||||
// When configured for humidity, the log 'Components should block for at most 20-30ms in loop().' will be generated in
|
|
||||||
// verbose mode. This is due to technical specs of the sensor and can not be avoided.
|
|
||||||
//
|
|
||||||
// According to the datasheet, the component is supposed to respond in more than 75ms. In fact, it can answer almost
|
|
||||||
// immediately for temperature. But for humidity, it takes >90ms to get a valid data. From experience, we have best
|
|
||||||
// results making successive requests; the current implementation make 3 attemps with a delay of 30ms each time.
|
|
||||||
|
|
||||||
#include "aht10.h"
|
|
||||||
#include "esphome/core/log.h"
|
|
||||||
|
|
||||||
namespace esphome {
|
|
||||||
namespace aht10 {
|
|
||||||
|
|
||||||
static const char *TAG = "aht10";
|
|
||||||
static const uint8_t AHT10_CALIBRATE_CMD[] = {0xE1};
|
|
||||||
static const uint8_t AHT10_MEASURE_CMD[] = {0xAC, 0x33, 0x00};
|
|
||||||
static const uint8_t AHT10_DEFAULT_DELAY = 5; // ms, for calibration and temperature measurement
|
|
||||||
static const uint8_t AHT10_HUMIDITY_DELAY = 30; // ms
|
|
||||||
static const uint8_t AHT10_ATTEMPS = 3; // safety margin, normally 3 attemps are enough: 3*30=90ms
|
|
||||||
|
|
||||||
void AHT10Component::setup() {
|
|
||||||
ESP_LOGCONFIG(TAG, "Setting up AHT10...");
|
|
||||||
|
|
||||||
if (!this->write_bytes(0, AHT10_CALIBRATE_CMD, sizeof(AHT10_CALIBRATE_CMD))) {
|
|
||||||
ESP_LOGE(TAG, "Communication with AHT10 failed!");
|
|
||||||
this->mark_failed();
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
uint8_t data;
|
|
||||||
if (!this->read_byte(0, &data, AHT10_DEFAULT_DELAY)) {
|
|
||||||
ESP_LOGD(TAG, "Communication with AHT10 failed!");
|
|
||||||
this->mark_failed();
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
if ((data & 0x68) != 0x08) { // Bit[6:5] = 0b00, NORMAL mode and Bit[3] = 0b1, CALIBRATED
|
|
||||||
ESP_LOGE(TAG, "AHT10 calibration failed!");
|
|
||||||
this->mark_failed();
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
ESP_LOGV(TAG, "AHT10 calibrated");
|
|
||||||
}
|
|
||||||
|
|
||||||
void AHT10Component::update() {
|
|
||||||
if (!this->write_bytes(0, AHT10_MEASURE_CMD, sizeof(AHT10_MEASURE_CMD))) {
|
|
||||||
ESP_LOGE(TAG, "Communication with AHT10 failed!");
|
|
||||||
this->status_set_warning();
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
uint8_t data[6];
|
|
||||||
uint8_t delay = AHT10_DEFAULT_DELAY;
|
|
||||||
if (this->humidity_sensor_ != nullptr)
|
|
||||||
delay = AHT10_HUMIDITY_DELAY;
|
|
||||||
for (int i = 0; i < AHT10_ATTEMPS; ++i) {
|
|
||||||
ESP_LOGVV(TAG, "Attemps %u at %6ld", i, millis());
|
|
||||||
if (!this->read_bytes(0, data, 6, delay)) {
|
|
||||||
ESP_LOGD(TAG, "Communication with AHT10 failed, waiting...");
|
|
||||||
} else if ((data[0] & 0x80) == 0x80) { // Bit[7] = 0b1, device is busy
|
|
||||||
ESP_LOGD(TAG, "AHT10 is busy, waiting...");
|
|
||||||
} else if (data[1] == 0x0 && data[2] == 0x0 && (data[3] >> 4) == 0x0) {
|
|
||||||
// Unrealistic humidity (0x0)
|
|
||||||
if (this->humidity_sensor_ == nullptr) {
|
|
||||||
ESP_LOGVV(TAG, "ATH10 Unrealistic humidity (0x0), but humidity is not required");
|
|
||||||
break;
|
|
||||||
} else {
|
|
||||||
ESP_LOGD(TAG, "ATH10 Unrealistic humidity (0x0), retrying...");
|
|
||||||
if (!this->write_bytes(0, AHT10_MEASURE_CMD, sizeof(AHT10_MEASURE_CMD))) {
|
|
||||||
ESP_LOGE(TAG, "Communication with AHT10 failed!");
|
|
||||||
this->status_set_warning();
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
// data is valid, we can break the loop
|
|
||||||
ESP_LOGVV(TAG, "Answer at %6ld", millis());
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if ((data[0] & 0x80) == 0x80) {
|
|
||||||
ESP_LOGE(TAG, "Measurements reading timed-out!");
|
|
||||||
this->status_set_warning();
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
uint32_t raw_temperature = ((data[3] & 0x0F) << 16) | (data[4] << 8) | data[5];
|
|
||||||
uint32_t raw_humidity = ((data[1] << 16) | (data[2] << 8) | data[3]) >> 4;
|
|
||||||
|
|
||||||
float temperature = ((200.0 * (float) raw_temperature) / 1048576.0) - 50.0;
|
|
||||||
float humidity;
|
|
||||||
if (raw_humidity == 0) { // unrealistic value
|
|
||||||
humidity = NAN;
|
|
||||||
} else {
|
|
||||||
humidity = (float) raw_humidity * 100.0 / 1048576.0;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (this->temperature_sensor_ != nullptr) {
|
|
||||||
this->temperature_sensor_->publish_state(temperature);
|
|
||||||
}
|
|
||||||
if (this->humidity_sensor_ != nullptr) {
|
|
||||||
if (isnan(humidity))
|
|
||||||
ESP_LOGW(TAG, "Invalid humidity! Sensor reported 0%% Hum");
|
|
||||||
this->humidity_sensor_->publish_state(humidity);
|
|
||||||
}
|
|
||||||
this->status_clear_warning();
|
|
||||||
}
|
|
||||||
|
|
||||||
float AHT10Component::get_setup_priority() const { return setup_priority::DATA; }
|
|
||||||
|
|
||||||
void AHT10Component::dump_config() {
|
|
||||||
ESP_LOGCONFIG(TAG, "AHT10:");
|
|
||||||
LOG_I2C_DEVICE(this);
|
|
||||||
if (this->is_failed()) {
|
|
||||||
ESP_LOGE(TAG, "Communication with AHT10 failed!");
|
|
||||||
}
|
|
||||||
LOG_SENSOR(" ", "Temperature", this->temperature_sensor_);
|
|
||||||
LOG_SENSOR(" ", "Humidity", this->humidity_sensor_);
|
|
||||||
}
|
|
||||||
|
|
||||||
} // namespace aht10
|
|
||||||
} // namespace esphome
|
|
||||||
@@ -1,26 +0,0 @@
|
|||||||
#pragma once
|
|
||||||
|
|
||||||
#include "esphome/core/component.h"
|
|
||||||
#include "esphome/components/sensor/sensor.h"
|
|
||||||
#include "esphome/components/i2c/i2c.h"
|
|
||||||
|
|
||||||
namespace esphome {
|
|
||||||
namespace aht10 {
|
|
||||||
|
|
||||||
class AHT10Component : public PollingComponent, public i2c::I2CDevice {
|
|
||||||
public:
|
|
||||||
void setup() override;
|
|
||||||
void update() override;
|
|
||||||
void dump_config() override;
|
|
||||||
float get_setup_priority() const override;
|
|
||||||
|
|
||||||
void set_temperature_sensor(sensor::Sensor *temperature_sensor) { temperature_sensor_ = temperature_sensor; }
|
|
||||||
void set_humidity_sensor(sensor::Sensor *humidity_sensor) { humidity_sensor_ = humidity_sensor; }
|
|
||||||
|
|
||||||
protected:
|
|
||||||
sensor::Sensor *temperature_sensor_;
|
|
||||||
sensor::Sensor *humidity_sensor_;
|
|
||||||
};
|
|
||||||
|
|
||||||
} // namespace aht10
|
|
||||||
} // namespace esphome
|
|
||||||
@@ -1,48 +0,0 @@
|
|||||||
import esphome.codegen as cg
|
|
||||||
import esphome.config_validation as cv
|
|
||||||
from esphome.components import i2c, sensor
|
|
||||||
from esphome.const import (
|
|
||||||
CONF_HUMIDITY,
|
|
||||||
CONF_ID,
|
|
||||||
CONF_TEMPERATURE,
|
|
||||||
DEVICE_CLASS_HUMIDITY,
|
|
||||||
DEVICE_CLASS_TEMPERATURE,
|
|
||||||
ICON_EMPTY,
|
|
||||||
UNIT_CELSIUS,
|
|
||||||
UNIT_PERCENT,
|
|
||||||
)
|
|
||||||
|
|
||||||
DEPENDENCIES = ["i2c"]
|
|
||||||
|
|
||||||
aht10_ns = cg.esphome_ns.namespace("aht10")
|
|
||||||
AHT10Component = aht10_ns.class_("AHT10Component", cg.PollingComponent, i2c.I2CDevice)
|
|
||||||
|
|
||||||
CONFIG_SCHEMA = (
|
|
||||||
cv.Schema(
|
|
||||||
{
|
|
||||||
cv.GenerateID(): cv.declare_id(AHT10Component),
|
|
||||||
cv.Optional(CONF_TEMPERATURE): sensor.sensor_schema(
|
|
||||||
UNIT_CELSIUS, ICON_EMPTY, 2, DEVICE_CLASS_TEMPERATURE
|
|
||||||
),
|
|
||||||
cv.Optional(CONF_HUMIDITY): sensor.sensor_schema(
|
|
||||||
UNIT_PERCENT, ICON_EMPTY, 2, DEVICE_CLASS_HUMIDITY
|
|
||||||
),
|
|
||||||
}
|
|
||||||
)
|
|
||||||
.extend(cv.polling_component_schema("60s"))
|
|
||||||
.extend(i2c.i2c_device_schema(0x38))
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def to_code(config):
|
|
||||||
var = cg.new_Pvariable(config[CONF_ID])
|
|
||||||
yield cg.register_component(var, config)
|
|
||||||
yield i2c.register_i2c_device(var, config)
|
|
||||||
|
|
||||||
if CONF_TEMPERATURE in config:
|
|
||||||
sens = yield sensor.new_sensor(config[CONF_TEMPERATURE])
|
|
||||||
cg.add(var.set_temperature_sensor(sens))
|
|
||||||
|
|
||||||
if CONF_HUMIDITY in config:
|
|
||||||
sens = yield sensor.new_sensor(config[CONF_HUMIDITY])
|
|
||||||
cg.add(var.set_humidity_sensor(sens))
|
|
||||||
@@ -1,107 +0,0 @@
|
|||||||
// Implementation based on:
|
|
||||||
// - ESPEasy: https://github.com/letscontrolit/ESPEasy/blob/mega/src/_P034_DHT12.ino
|
|
||||||
// - DHT12_sensor_library: https://github.com/xreef/DHT12_sensor_library/blob/master/DHT12.cpp
|
|
||||||
// - Arduino - AM2320: https://github.com/EngDial/AM2320/blob/master/src/AM2320.cpp
|
|
||||||
|
|
||||||
#include "am2320.h"
|
|
||||||
#include "esphome/core/log.h"
|
|
||||||
|
|
||||||
namespace esphome {
|
|
||||||
namespace am2320 {
|
|
||||||
|
|
||||||
static const char *TAG = "am2320";
|
|
||||||
|
|
||||||
// ---=== Calc CRC16 ===---
|
|
||||||
uint16_t crc_16(uint8_t *ptr, uint8_t length) {
|
|
||||||
uint16_t crc = 0xFFFF;
|
|
||||||
uint8_t i;
|
|
||||||
//------------------------------
|
|
||||||
while (length--) {
|
|
||||||
crc ^= *ptr++;
|
|
||||||
for (i = 0; i < 8; i++)
|
|
||||||
if ((crc & 0x01) != 0) {
|
|
||||||
crc >>= 1;
|
|
||||||
crc ^= 0xA001;
|
|
||||||
} else
|
|
||||||
crc >>= 1;
|
|
||||||
}
|
|
||||||
return crc;
|
|
||||||
}
|
|
||||||
|
|
||||||
void AM2320Component::update() {
|
|
||||||
uint8_t data[8];
|
|
||||||
data[0] = 0;
|
|
||||||
data[1] = 4;
|
|
||||||
if (!this->read_data_(data)) {
|
|
||||||
this->status_set_warning();
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
float temperature = (((data[4] & 0x7F) << 8) + data[5]) / 10.0;
|
|
||||||
temperature = (data[4] & 0x80) ? -temperature : temperature;
|
|
||||||
float humidity = ((data[2] << 8) + data[3]) / 10.0;
|
|
||||||
|
|
||||||
ESP_LOGD(TAG, "Got temperature=%.1f°C humidity=%.1f%%", temperature, humidity);
|
|
||||||
if (this->temperature_sensor_ != nullptr)
|
|
||||||
this->temperature_sensor_->publish_state(temperature);
|
|
||||||
if (this->humidity_sensor_ != nullptr)
|
|
||||||
this->humidity_sensor_->publish_state(humidity);
|
|
||||||
this->status_clear_warning();
|
|
||||||
}
|
|
||||||
void AM2320Component::setup() {
|
|
||||||
ESP_LOGCONFIG(TAG, "Setting up AM2320...");
|
|
||||||
uint8_t data[8];
|
|
||||||
data[0] = 0;
|
|
||||||
data[1] = 4;
|
|
||||||
if (!this->read_data_(data)) {
|
|
||||||
this->mark_failed();
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
void AM2320Component::dump_config() {
|
|
||||||
ESP_LOGD(TAG, "AM2320:");
|
|
||||||
LOG_I2C_DEVICE(this);
|
|
||||||
if (this->is_failed()) {
|
|
||||||
ESP_LOGE(TAG, "Communication with AM2320 failed!");
|
|
||||||
}
|
|
||||||
LOG_SENSOR(" ", "Temperature", this->temperature_sensor_);
|
|
||||||
LOG_SENSOR(" ", "Humidity", this->humidity_sensor_);
|
|
||||||
}
|
|
||||||
float AM2320Component::get_setup_priority() const { return setup_priority::DATA; }
|
|
||||||
|
|
||||||
bool AM2320Component::read_bytes_(uint8_t a_register, uint8_t *data, uint8_t len, uint32_t conversion) {
|
|
||||||
if (!this->write_bytes(a_register, data, 2)) {
|
|
||||||
ESP_LOGW(TAG, "Writing bytes for AM2320 failed!");
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (conversion > 0)
|
|
||||||
delay(conversion);
|
|
||||||
return this->parent_->raw_receive(this->address_, data, len);
|
|
||||||
}
|
|
||||||
|
|
||||||
bool AM2320Component::read_data_(uint8_t *data) {
|
|
||||||
// Wake up
|
|
||||||
this->write_bytes(0, data, 0);
|
|
||||||
|
|
||||||
// Write instruction 3, 2 bytes, get 8 bytes back (2 preamble, 2 bytes temperature, 2 bytes humidity, 2 bytes CRC)
|
|
||||||
if (!this->read_bytes_(3, data, 8, 2)) {
|
|
||||||
ESP_LOGW(TAG, "Updating AM2320 failed!");
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
uint16_t checksum;
|
|
||||||
|
|
||||||
checksum = data[7] << 8;
|
|
||||||
checksum += data[6];
|
|
||||||
|
|
||||||
if (crc_16(data, 6) != checksum) {
|
|
||||||
ESP_LOGW(TAG, "AM2320 Checksum invalid!");
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
} // namespace am2320
|
|
||||||
} // namespace esphome
|
|
||||||
@@ -1,29 +0,0 @@
|
|||||||
#pragma once
|
|
||||||
|
|
||||||
#include "esphome/core/component.h"
|
|
||||||
#include "esphome/components/sensor/sensor.h"
|
|
||||||
#include "esphome/components/i2c/i2c.h"
|
|
||||||
|
|
||||||
namespace esphome {
|
|
||||||
namespace am2320 {
|
|
||||||
|
|
||||||
class AM2320Component : public PollingComponent, public i2c::I2CDevice {
|
|
||||||
public:
|
|
||||||
void setup() override;
|
|
||||||
void dump_config() override;
|
|
||||||
float get_setup_priority() const override;
|
|
||||||
void update() override;
|
|
||||||
|
|
||||||
void set_temperature_sensor(sensor::Sensor *temperature_sensor) { temperature_sensor_ = temperature_sensor; }
|
|
||||||
void set_humidity_sensor(sensor::Sensor *humidity_sensor) { humidity_sensor_ = humidity_sensor; }
|
|
||||||
|
|
||||||
protected:
|
|
||||||
bool read_data_(uint8_t *data);
|
|
||||||
bool read_bytes_(uint8_t a_register, uint8_t *data, uint8_t len, uint32_t conversion = 0);
|
|
||||||
|
|
||||||
sensor::Sensor *temperature_sensor_;
|
|
||||||
sensor::Sensor *humidity_sensor_;
|
|
||||||
};
|
|
||||||
|
|
||||||
} // namespace am2320
|
|
||||||
} // namespace esphome
|
|
||||||
@@ -1,50 +0,0 @@
|
|||||||
import esphome.codegen as cg
|
|
||||||
import esphome.config_validation as cv
|
|
||||||
from esphome.components import i2c, sensor
|
|
||||||
from esphome.const import (
|
|
||||||
CONF_HUMIDITY,
|
|
||||||
CONF_ID,
|
|
||||||
CONF_TEMPERATURE,
|
|
||||||
DEVICE_CLASS_HUMIDITY,
|
|
||||||
DEVICE_CLASS_TEMPERATURE,
|
|
||||||
UNIT_CELSIUS,
|
|
||||||
ICON_EMPTY,
|
|
||||||
UNIT_PERCENT,
|
|
||||||
)
|
|
||||||
|
|
||||||
DEPENDENCIES = ["i2c"]
|
|
||||||
|
|
||||||
am2320_ns = cg.esphome_ns.namespace("am2320")
|
|
||||||
AM2320Component = am2320_ns.class_(
|
|
||||||
"AM2320Component", cg.PollingComponent, i2c.I2CDevice
|
|
||||||
)
|
|
||||||
|
|
||||||
CONFIG_SCHEMA = (
|
|
||||||
cv.Schema(
|
|
||||||
{
|
|
||||||
cv.GenerateID(): cv.declare_id(AM2320Component),
|
|
||||||
cv.Optional(CONF_TEMPERATURE): sensor.sensor_schema(
|
|
||||||
UNIT_CELSIUS, ICON_EMPTY, 1, DEVICE_CLASS_TEMPERATURE
|
|
||||||
),
|
|
||||||
cv.Optional(CONF_HUMIDITY): sensor.sensor_schema(
|
|
||||||
UNIT_PERCENT, ICON_EMPTY, 1, DEVICE_CLASS_HUMIDITY
|
|
||||||
),
|
|
||||||
}
|
|
||||||
)
|
|
||||||
.extend(cv.polling_component_schema("60s"))
|
|
||||||
.extend(i2c.i2c_device_schema(0x5C))
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def to_code(config):
|
|
||||||
var = cg.new_Pvariable(config[CONF_ID])
|
|
||||||
yield cg.register_component(var, config)
|
|
||||||
yield i2c.register_i2c_device(var, config)
|
|
||||||
|
|
||||||
if CONF_TEMPERATURE in config:
|
|
||||||
sens = yield sensor.new_sensor(config[CONF_TEMPERATURE])
|
|
||||||
cg.add(var.set_temperature_sensor(sens))
|
|
||||||
|
|
||||||
if CONF_HUMIDITY in config:
|
|
||||||
sens = yield sensor.new_sensor(config[CONF_HUMIDITY])
|
|
||||||
cg.add(var.set_humidity_sensor(sens))
|
|
||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user