mirror of
https://github.com/esphome/esphome.git
synced 2025-04-21 14:17:20 +00:00
Compare commits
No commits in common. "dev" and "2021.9.0b2" have entirely different histories.
dev
...
2021.9.0b2
91
.clang-tidy
91
.clang-tidy
@ -2,45 +2,32 @@
|
||||
Checks: >-
|
||||
*,
|
||||
-abseil-*,
|
||||
-altera-*,
|
||||
-android-*,
|
||||
-boost-*,
|
||||
-bugprone-easily-swappable-parameters,
|
||||
-bugprone-implicit-widening-of-multiplication-result,
|
||||
-bugprone-multi-level-implicit-pointer-conversion,
|
||||
-bugprone-branch-clone,
|
||||
-bugprone-narrowing-conversions,
|
||||
-bugprone-signed-char-misuse,
|
||||
-bugprone-switch-missing-default-case,
|
||||
-bugprone-too-small-loop-variable,
|
||||
-cert-dcl50-cpp,
|
||||
-cert-err33-c,
|
||||
-cert-err58-cpp,
|
||||
-cert-oop57-cpp,
|
||||
-cert-str34-c,
|
||||
-clang-analyzer-optin.core.EnumCastOutOfRange,
|
||||
-clang-analyzer-optin.cplusplus.UninitializedObject,
|
||||
-clang-analyzer-osx.*,
|
||||
-clang-diagnostic-delete-abstract-non-virtual-dtor,
|
||||
-clang-diagnostic-delete-non-abstract-non-virtual-dtor,
|
||||
-clang-diagnostic-deprecated-declarations,
|
||||
-clang-diagnostic-ignored-optimization-argument,
|
||||
-clang-diagnostic-missing-field-initializers,
|
||||
-clang-diagnostic-shadow-field,
|
||||
-clang-diagnostic-sign-compare,
|
||||
-clang-diagnostic-unused-variable,
|
||||
-clang-diagnostic-unused-const-variable,
|
||||
-clang-diagnostic-unused-parameter,
|
||||
-clang-diagnostic-vla-cxx-extension,
|
||||
-concurrency-*,
|
||||
-cppcoreguidelines-avoid-c-arrays,
|
||||
-cppcoreguidelines-avoid-const-or-ref-data-members,
|
||||
-cppcoreguidelines-avoid-do-while,
|
||||
-cppcoreguidelines-avoid-goto,
|
||||
-cppcoreguidelines-avoid-magic-numbers,
|
||||
-cppcoreguidelines-init-variables,
|
||||
-cppcoreguidelines-macro-to-enum,
|
||||
-cppcoreguidelines-macro-usage,
|
||||
-cppcoreguidelines-missing-std-forward,
|
||||
-cppcoreguidelines-narrowing-conversions,
|
||||
-cppcoreguidelines-non-private-member-variables-in-classes,
|
||||
-cppcoreguidelines-owning-memory,
|
||||
-cppcoreguidelines-prefer-member-initializer,
|
||||
-cppcoreguidelines-pro-bounds-array-to-pointer-decay,
|
||||
-cppcoreguidelines-pro-bounds-constant-array-index,
|
||||
-cppcoreguidelines-pro-bounds-pointer-arithmetic,
|
||||
@ -51,10 +38,8 @@ Checks: >-
|
||||
-cppcoreguidelines-pro-type-static-cast-downcast,
|
||||
-cppcoreguidelines-pro-type-union-access,
|
||||
-cppcoreguidelines-pro-type-vararg,
|
||||
-cppcoreguidelines-rvalue-reference-param-not-moved,
|
||||
-cppcoreguidelines-special-member-functions,
|
||||
-cppcoreguidelines-use-default-member-init,
|
||||
-cppcoreguidelines-virtual-class-destructor,
|
||||
-fuchsia-default-arguments,
|
||||
-fuchsia-multiple-inheritance,
|
||||
-fuchsia-overloaded-operator,
|
||||
-fuchsia-statically-constructed-objects,
|
||||
@ -64,7 +49,6 @@ Checks: >-
|
||||
-google-explicit-constructor,
|
||||
-google-readability-braces-around-statements,
|
||||
-google-readability-casting,
|
||||
-google-readability-namespace-comments,
|
||||
-google-readability-todo,
|
||||
-google-runtime-references,
|
||||
-hicpp-*,
|
||||
@ -73,57 +57,44 @@ Checks: >-
|
||||
-llvm-include-order,
|
||||
-llvm-qualified-auto,
|
||||
-llvmlibc-*,
|
||||
-misc-const-correctness,
|
||||
-misc-include-cleaner,
|
||||
-misc-no-recursion,
|
||||
-misc-non-private-member-variables-in-classes,
|
||||
-misc-no-recursion,
|
||||
-misc-unused-parameters,
|
||||
-misc-use-anonymous-namespace,
|
||||
-modernize-avoid-bind,
|
||||
-modernize-avoid-c-arrays,
|
||||
-modernize-concat-nested-namespaces,
|
||||
-modernize-macro-to-enum,
|
||||
-modernize-return-braced-init-list,
|
||||
-modernize-type-traits,
|
||||
-modernize-use-auto,
|
||||
-modernize-use-constraints,
|
||||
-modernize-use-default-member-init,
|
||||
-modernize-use-equals-default,
|
||||
-modernize-use-nodiscard,
|
||||
-modernize-use-nullptr,
|
||||
-modernize-use-nodiscard,
|
||||
-modernize-use-nullptr,
|
||||
-modernize-use-trailing-return-type,
|
||||
-mpi-*,
|
||||
-objc-*,
|
||||
-performance-enum-size,
|
||||
-readability-avoid-nested-conditional-operator,
|
||||
-readability-container-contains,
|
||||
-readability-container-data-pointer,
|
||||
-readability-braces-around-statements,
|
||||
-readability-const-return-type,
|
||||
-readability-convert-member-functions-to-static,
|
||||
-readability-else-after-return,
|
||||
-readability-function-cognitive-complexity,
|
||||
-readability-implicit-bool-conversion,
|
||||
-readability-isolate-declaration,
|
||||
-readability-magic-numbers,
|
||||
-readability-make-member-function-const,
|
||||
-readability-named-parameter,
|
||||
-readability-redundant-casting,
|
||||
-readability-redundant-inline-specifier,
|
||||
-readability-qualified-auto,
|
||||
-readability-redundant-access-specifiers,
|
||||
-readability-redundant-member-init,
|
||||
-readability-redundant-string-init,
|
||||
-readability-uppercase-literal-suffix,
|
||||
-readability-use-anyofallof,
|
||||
-warnings-as-errors
|
||||
WarningsAsErrors: '*'
|
||||
AnalyzeTemporaryDtors: false
|
||||
FormatStyle: google
|
||||
CheckOptions:
|
||||
- key: google-readability-braces-around-statements.ShortStatementLines
|
||||
value: '1'
|
||||
- key: google-readability-function-size.StatementThreshold
|
||||
value: '800'
|
||||
- key: google-runtime-int.TypeSuffix
|
||||
value: '_t'
|
||||
- key: llvm-namespace-comment.ShortNamespaceLines
|
||||
- key: google-readability-namespace-comments.ShortNamespaceLines
|
||||
value: '10'
|
||||
- key: llvm-namespace-comment.SpacesBeforeComments
|
||||
- key: google-readability-namespace-comments.SpacesBeforeComments
|
||||
value: '2'
|
||||
- key: modernize-loop-convert.MaxCopySize
|
||||
value: '16'
|
||||
@ -137,12 +108,6 @@ CheckOptions:
|
||||
value: llvm
|
||||
- key: modernize-use-nullptr.NullMacros
|
||||
value: 'NULL'
|
||||
- key: modernize-make-unique.MakeSmartPtrFunction
|
||||
value: 'make_unique'
|
||||
- key: modernize-make-unique.MakeSmartPtrFunctionHeader
|
||||
value: 'esphome/core/helpers.h'
|
||||
- key: readability-braces-around-statements.ShortStatementLines
|
||||
value: 2
|
||||
- key: readability-identifier-naming.LocalVariableCase
|
||||
value: 'lower_case'
|
||||
- key: readability-identifier-naming.ClassCase
|
||||
@ -156,19 +121,15 @@ CheckOptions:
|
||||
- key: readability-identifier-naming.StaticConstantCase
|
||||
value: 'UPPER_CASE'
|
||||
- key: readability-identifier-naming.StaticVariableCase
|
||||
value: 'lower_case'
|
||||
value: 'UPPER_CASE'
|
||||
- key: readability-identifier-naming.GlobalConstantCase
|
||||
value: 'UPPER_CASE'
|
||||
- key: readability-identifier-naming.ParameterCase
|
||||
value: 'lower_case'
|
||||
- key: readability-identifier-naming.PrivateMemberCase
|
||||
value: 'lower_case'
|
||||
- key: readability-identifier-naming.PrivateMemberSuffix
|
||||
value: '_'
|
||||
- key: readability-identifier-naming.PrivateMethodCase
|
||||
value: 'lower_case'
|
||||
- key: readability-identifier-naming.PrivateMethodSuffix
|
||||
value: '_'
|
||||
- key: readability-identifier-naming.PrivateMemberPrefix
|
||||
value: 'NO_PRIVATE_MEMBERS_ALWAYS_USE_PROTECTED'
|
||||
- key: readability-identifier-naming.PrivateMethodPrefix
|
||||
value: 'NO_PRIVATE_METHODS_ALWAYS_USE_PROTECTED'
|
||||
- key: readability-identifier-naming.ClassMemberCase
|
||||
value: 'lower_case'
|
||||
- key: readability-identifier-naming.ClassMemberCase
|
||||
@ -189,11 +150,3 @@ CheckOptions:
|
||||
value: 'lower_case'
|
||||
- key: readability-identifier-naming.VirtualMethodSuffix
|
||||
value: ''
|
||||
- key: readability-qualified-auto.AddConstToQualified
|
||||
value: 0
|
||||
- key: readability-identifier-length.MinimumVariableNameLength
|
||||
value: 0
|
||||
- key: readability-identifier-length.MinimumParameterNameLength
|
||||
value: 0
|
||||
- key: readability-identifier-length.MinimumLoopCounterNameLength
|
||||
value: 0
|
||||
|
@ -1,88 +1,57 @@
|
||||
{
|
||||
"name": "ESPHome Dev",
|
||||
"image": "ghcr.io/esphome/esphome-lint:dev",
|
||||
"context": "..",
|
||||
"dockerFile": "../docker/Dockerfile.dev",
|
||||
"postCreateCommand": [
|
||||
"script/devcontainer-post-create"
|
||||
],
|
||||
"containerEnv": {
|
||||
"DEVCONTAINER": "1",
|
||||
"PIP_BREAK_SYSTEM_PACKAGES": "1",
|
||||
"PIP_ROOT_USER_ACTION": "ignore"
|
||||
},
|
||||
"runArgs": [
|
||||
"--privileged",
|
||||
"-e",
|
||||
"ESPHOME_DASHBOARD_USE_PING=1"
|
||||
// uncomment and edit the path in order to pass though local USB serial to the conatiner
|
||||
// , "--device=/dev/ttyACM0"
|
||||
],
|
||||
"appPort": 6052,
|
||||
// if you are using avahi in the host device, uncomment these to allow the
|
||||
// devcontainer to find devices via mdns
|
||||
//"mounts": [
|
||||
// "type=bind,source=/dev/bus/usb,target=/dev/bus/usb",
|
||||
// "type=bind,source=/var/run/dbus,target=/var/run/dbus",
|
||||
// "type=bind,source=/var/run/avahi-daemon/socket,target=/var/run/avahi-daemon/socket"
|
||||
//],
|
||||
"customizations": {
|
||||
"vscode": {
|
||||
"extensions": [
|
||||
// python
|
||||
"ms-python.python",
|
||||
"ms-python.pylint",
|
||||
"ms-python.flake8",
|
||||
"charliermarsh.ruff",
|
||||
"visualstudioexptteam.vscodeintellicode",
|
||||
// yaml
|
||||
"redhat.vscode-yaml",
|
||||
// cpp
|
||||
"ms-vscode.cpptools",
|
||||
// editorconfig
|
||||
"editorconfig.editorconfig"
|
||||
],
|
||||
"settings": {
|
||||
"python.languageServer": "Pylance",
|
||||
"python.pythonPath": "/usr/bin/python3",
|
||||
"pylint.args": [
|
||||
"--rcfile=${workspaceFolder}/pyproject.toml"
|
||||
],
|
||||
"flake8.args": [
|
||||
"--config=${workspaceFolder}/.flake8"
|
||||
],
|
||||
"ruff.configuration": "${workspaceFolder}/pyproject.toml",
|
||||
"[python]": {
|
||||
// VS will say "Value is not accepted" before building the devcontainer, but the warning
|
||||
// should go away after build is completed.
|
||||
"editor.defaultFormatter": "charliermarsh.ruff"
|
||||
},
|
||||
"editor.formatOnPaste": false,
|
||||
"editor.formatOnSave": true,
|
||||
"editor.formatOnType": true,
|
||||
"files.trimTrailingWhitespace": true,
|
||||
"terminal.integrated.defaultProfile.linux": "bash",
|
||||
"yaml.customTags": [
|
||||
"!secret scalar",
|
||||
"!lambda scalar",
|
||||
"!extend scalar",
|
||||
"!remove scalar",
|
||||
"!include_dir_named scalar",
|
||||
"!include_dir_list scalar",
|
||||
"!include_dir_merge_list scalar",
|
||||
"!include_dir_merge_named scalar"
|
||||
],
|
||||
"files.exclude": {
|
||||
"**/.git": true,
|
||||
"**/.DS_Store": true,
|
||||
"**/*.pyc": {
|
||||
"when": "$(basename).py"
|
||||
},
|
||||
"**/__pycache__": true
|
||||
},
|
||||
"files.associations": {
|
||||
"**/.vscode/*.json": "jsonc"
|
||||
},
|
||||
"C_Cpp.clang_format_path": "/usr/bin/clang-format-13"
|
||||
}
|
||||
}
|
||||
"extensions": [
|
||||
// python
|
||||
"ms-python.python",
|
||||
"visualstudioexptteam.vscodeintellicode",
|
||||
// yaml
|
||||
"redhat.vscode-yaml",
|
||||
// cpp
|
||||
"ms-vscode.cpptools",
|
||||
// editorconfig
|
||||
"editorconfig.editorconfig",
|
||||
],
|
||||
"settings": {
|
||||
"python.languageServer": "Pylance",
|
||||
"python.pythonPath": "/usr/bin/python3",
|
||||
"python.linting.pylintEnabled": true,
|
||||
"python.linting.enabled": true,
|
||||
"python.formatting.provider": "black",
|
||||
"editor.formatOnPaste": false,
|
||||
"editor.formatOnSave": true,
|
||||
"editor.formatOnType": true,
|
||||
"files.trimTrailingWhitespace": true,
|
||||
"terminal.integrated.defaultProfile.linux": "bash",
|
||||
"yaml.customTags": [
|
||||
"!secret scalar",
|
||||
"!lambda scalar",
|
||||
"!include_dir_named scalar",
|
||||
"!include_dir_list scalar",
|
||||
"!include_dir_merge_list scalar",
|
||||
"!include_dir_merge_named scalar"
|
||||
],
|
||||
"files.exclude": {
|
||||
"**/.git": true,
|
||||
"**/.DS_Store": true,
|
||||
"**/*.pyc": {
|
||||
"when": "$(basename).py"
|
||||
},
|
||||
"**/__pycache__": true
|
||||
},
|
||||
"files.associations": {
|
||||
"**/.vscode/*.json": "jsonc"
|
||||
},
|
||||
"C_Cpp.clang_format_path": "/usr/bin/clang-format-11",
|
||||
}
|
||||
}
|
||||
|
@ -75,9 +75,6 @@ target/
|
||||
# pyenv
|
||||
.python-version
|
||||
|
||||
# asdf
|
||||
.tool-versions
|
||||
|
||||
# celery beat schedule file
|
||||
celerybeat-schedule
|
||||
|
||||
|
@ -25,9 +25,10 @@ indent_size = 2
|
||||
[*.{yaml,yml}]
|
||||
indent_style = space
|
||||
indent_size = 2
|
||||
quote_type = double
|
||||
quote_type = single
|
||||
|
||||
# JSON
|
||||
[*.json]
|
||||
indent_style = space
|
||||
indent_size = 2
|
||||
|
||||
|
3
.gitattributes
vendored
3
.gitattributes
vendored
@ -1,3 +0,0 @@
|
||||
# Normalize line endings to LF in the repository
|
||||
* text eol=lf
|
||||
*.png binary
|
3
.github/FUNDING.yml
vendored
Normal file
3
.github/FUNDING.yml
vendored
Normal file
@ -0,0 +1,3 @@
|
||||
# These are supported funding model platforms
|
||||
|
||||
custom: https://www.nabucasa.com
|
9
.github/ISSUE_TEMPLATE/config.yml
vendored
9
.github/ISSUE_TEMPLATE/config.yml
vendored
@ -1,4 +1,3 @@
|
||||
---
|
||||
blank_issues_enabled: false
|
||||
contact_links:
|
||||
- name: Issue Tracker
|
||||
@ -6,10 +5,8 @@ contact_links:
|
||||
about: Please create bug reports in the dedicated issue tracker.
|
||||
- name: Feature Request Tracker
|
||||
url: https://github.com/esphome/feature-requests
|
||||
about: |
|
||||
Please create feature requests in the dedicated feature request tracker.
|
||||
about: Please create feature requests in the dedicated feature request tracker.
|
||||
- name: Frequently Asked Question
|
||||
url: https://esphome.io/guides/faq.html
|
||||
about: |
|
||||
Please view the FAQ for common questions and what
|
||||
to include in a bug report.
|
||||
about: Please view the FAQ for common questions and what to include in a bug report.
|
||||
|
||||
|
25
.github/PULL_REQUEST_TEMPLATE.md
vendored
25
.github/PULL_REQUEST_TEMPLATE.md
vendored
@ -1,33 +1,30 @@
|
||||
# What does this implement/fix?
|
||||
# What does this implement/fix?
|
||||
|
||||
<!-- Quick description and explanation of changes -->
|
||||
Quick description and explanation of changes
|
||||
|
||||
## Types of changes
|
||||
|
||||
- [ ] Bugfix (non-breaking change which fixes an issue)
|
||||
- [ ] New feature (non-breaking change which adds functionality)
|
||||
- [ ] Breaking change (fix or feature that would cause existing functionality to not work as expected)
|
||||
- [ ] Code quality improvements to existing code or addition of tests
|
||||
- [ ] Other
|
||||
|
||||
**Related issue or feature (if applicable):**
|
||||
**Related issue or feature (if applicable):** fixes <link to issue>
|
||||
|
||||
- fixes <link to issue>
|
||||
|
||||
**Pull request in [esphome-docs](https://github.com/esphome/esphome-docs) with documentation (if applicable):**
|
||||
|
||||
- esphome/esphome-docs#<esphome-docs PR number goes here>
|
||||
**Pull request in [esphome-docs](https://github.com/esphome/esphome-docs) with documentation (if applicable):** esphome/esphome-docs#<esphome-docs PR number goes here>
|
||||
|
||||
## Test Environment
|
||||
|
||||
- [ ] ESP32
|
||||
- [ ] ESP32 IDF
|
||||
- [ ] ESP8266
|
||||
- [ ] RP2040
|
||||
- [ ] BK72xx
|
||||
- [ ] RTL87xx
|
||||
|
||||
## Example entry for `config.yaml`:
|
||||
<!--
|
||||
Supplying a configuration snippet, makes it easier for a maintainer to test
|
||||
your PR. Furthermore, for new integrations, it gives an impression of how
|
||||
the configuration would look like.
|
||||
Note: Remove this section if this PR does not have an example entry.
|
||||
-->
|
||||
|
||||
```yaml
|
||||
# Example config.yaml
|
||||
@ -37,6 +34,6 @@
|
||||
## Checklist:
|
||||
- [ ] The code change is tested and works locally.
|
||||
- [ ] Tests have been added to verify that the new code works (under `tests/` folder).
|
||||
|
||||
|
||||
If user exposed functionality or configuration variables are added/changed:
|
||||
- [ ] Documentation added/updated in [esphome-docs](https://github.com/esphome/esphome-docs).
|
||||
|
97
.github/actions/build-image/action.yaml
vendored
97
.github/actions/build-image/action.yaml
vendored
@ -1,97 +0,0 @@
|
||||
name: Build Image
|
||||
inputs:
|
||||
platform:
|
||||
description: "Platform to build for"
|
||||
required: true
|
||||
example: "linux/amd64"
|
||||
target:
|
||||
description: "Target to build"
|
||||
required: true
|
||||
example: "docker"
|
||||
baseimg:
|
||||
description: "Base image type"
|
||||
required: true
|
||||
example: "docker"
|
||||
suffix:
|
||||
description: "Suffix to add to tags"
|
||||
required: true
|
||||
version:
|
||||
description: "Version to build"
|
||||
required: true
|
||||
example: "2023.12.0"
|
||||
runs:
|
||||
using: "composite"
|
||||
steps:
|
||||
- name: Generate short tags
|
||||
id: tags
|
||||
shell: bash
|
||||
run: |
|
||||
output=$(docker/generate_tags.py \
|
||||
--tag "${{ inputs.version }}" \
|
||||
--suffix "${{ inputs.suffix }}")
|
||||
echo $output
|
||||
for l in $output; do
|
||||
echo $l >> $GITHUB_OUTPUT
|
||||
done
|
||||
|
||||
# set cache-to only if dev branch
|
||||
- id: cache-to
|
||||
shell: bash
|
||||
run: |-
|
||||
if [[ "${{ github.ref }}" == "refs/heads/dev" ]]; then
|
||||
echo "value=type=gha,mode=max" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "value=" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
- name: Build and push to ghcr by digest
|
||||
id: build-ghcr
|
||||
uses: docker/build-push-action@v6.15.0
|
||||
env:
|
||||
DOCKER_BUILD_SUMMARY: false
|
||||
DOCKER_BUILD_RECORD_UPLOAD: false
|
||||
with:
|
||||
context: .
|
||||
file: ./docker/Dockerfile
|
||||
platforms: ${{ inputs.platform }}
|
||||
target: ${{ inputs.target }}
|
||||
cache-from: type=gha
|
||||
cache-to: ${{ steps.cache-to.outputs.value }}
|
||||
build-args: |
|
||||
BASEIMGTYPE=${{ inputs.baseimg }}
|
||||
BUILD_VERSION=${{ inputs.version }}
|
||||
outputs: |
|
||||
type=image,name=ghcr.io/${{ steps.tags.outputs.image_name }},push-by-digest=true,name-canonical=true,push=true
|
||||
|
||||
- name: Export ghcr digests
|
||||
shell: bash
|
||||
run: |
|
||||
mkdir -p /tmp/digests/${{ inputs.target }}/ghcr
|
||||
digest="${{ steps.build-ghcr.outputs.digest }}"
|
||||
touch "/tmp/digests/${{ inputs.target }}/ghcr/${digest#sha256:}"
|
||||
|
||||
- name: Build and push to dockerhub by digest
|
||||
id: build-dockerhub
|
||||
uses: docker/build-push-action@v6.15.0
|
||||
env:
|
||||
DOCKER_BUILD_SUMMARY: false
|
||||
DOCKER_BUILD_RECORD_UPLOAD: false
|
||||
with:
|
||||
context: .
|
||||
file: ./docker/Dockerfile
|
||||
platforms: ${{ inputs.platform }}
|
||||
target: ${{ inputs.target }}
|
||||
cache-from: type=gha
|
||||
cache-to: ${{ steps.cache-to.outputs.value }}
|
||||
build-args: |
|
||||
BASEIMGTYPE=${{ inputs.baseimg }}
|
||||
BUILD_VERSION=${{ inputs.version }}
|
||||
outputs: |
|
||||
type=image,name=docker.io/${{ steps.tags.outputs.image_name }},push-by-digest=true,name-canonical=true,push=true
|
||||
|
||||
- name: Export dockerhub digests
|
||||
shell: bash
|
||||
run: |
|
||||
mkdir -p /tmp/digests/${{ inputs.target }}/dockerhub
|
||||
digest="${{ steps.build-dockerhub.outputs.digest }}"
|
||||
touch "/tmp/digests/${{ inputs.target }}/dockerhub/${digest#sha256:}"
|
47
.github/actions/restore-python/action.yml
vendored
47
.github/actions/restore-python/action.yml
vendored
@ -1,47 +0,0 @@
|
||||
name: Restore Python
|
||||
inputs:
|
||||
python-version:
|
||||
description: Python version to restore
|
||||
required: true
|
||||
type: string
|
||||
cache-key:
|
||||
description: Cache key to use
|
||||
required: true
|
||||
type: string
|
||||
outputs:
|
||||
python-version:
|
||||
description: Python version restored
|
||||
value: ${{ steps.python.outputs.python-version }}
|
||||
runs:
|
||||
using: "composite"
|
||||
steps:
|
||||
- name: Set up Python ${{ inputs.python-version }}
|
||||
id: python
|
||||
uses: actions/setup-python@v5.5.0
|
||||
with:
|
||||
python-version: ${{ inputs.python-version }}
|
||||
- name: Restore Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@v4.2.3
|
||||
with:
|
||||
path: venv
|
||||
# yamllint disable-line rule:line-length
|
||||
key: ${{ runner.os }}-${{ steps.python.outputs.python-version }}-venv-${{ inputs.cache-key }}
|
||||
- name: Create Python virtual environment
|
||||
if: steps.cache-venv.outputs.cache-hit != 'true' && runner.os != 'Windows'
|
||||
shell: bash
|
||||
run: |
|
||||
python -m venv venv
|
||||
source venv/bin/activate
|
||||
python --version
|
||||
pip install -r requirements.txt -r requirements_optional.txt -r requirements_test.txt
|
||||
pip install -e .
|
||||
- name: Create Python virtual environment
|
||||
if: steps.cache-venv.outputs.cache-hit != 'true' && runner.os == 'Windows'
|
||||
shell: bash
|
||||
run: |
|
||||
python -m venv venv
|
||||
./venv/Scripts/activate
|
||||
python --version
|
||||
pip install -r requirements.txt -r requirements_optional.txt -r requirements_test.txt
|
||||
pip install -e .
|
27
.github/dependabot.yml
vendored
27
.github/dependabot.yml
vendored
@ -1,32 +1,9 @@
|
||||
---
|
||||
version: 2
|
||||
updates:
|
||||
- package-ecosystem: pip
|
||||
- package-ecosystem: "pip"
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: daily
|
||||
interval: "daily"
|
||||
ignore:
|
||||
# Hypotehsis is only used for testing and is updated quite often
|
||||
- dependency-name: hypothesis
|
||||
- package-ecosystem: github-actions
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: daily
|
||||
open-pull-requests-limit: 10
|
||||
groups:
|
||||
docker-actions:
|
||||
applies-to: version-updates
|
||||
patterns:
|
||||
- "docker/setup-qemu-action"
|
||||
- "docker/login-action"
|
||||
- "docker/setup-buildx-action"
|
||||
- package-ecosystem: github-actions
|
||||
directory: "/.github/actions/build-image"
|
||||
schedule:
|
||||
interval: daily
|
||||
open-pull-requests-limit: 10
|
||||
- package-ecosystem: github-actions
|
||||
directory: "/.github/actions/restore-python"
|
||||
schedule:
|
||||
interval: daily
|
||||
open-pull-requests-limit: 10
|
||||
|
7
.github/issue-close-app.yml
vendored
Normal file
7
.github/issue-close-app.yml
vendored
Normal file
@ -0,0 +1,7 @@
|
||||
comment: >-
|
||||
https://github.com/esphome/esphome/issues/430
|
||||
issueConfigs:
|
||||
- content:
|
||||
- "OTHERWISE THE ISSUE WILL BE CLOSED AUTOMATICALLY"
|
||||
|
||||
caseInsensitive: false
|
36
.github/lock.yml
vendored
Normal file
36
.github/lock.yml
vendored
Normal file
@ -0,0 +1,36 @@
|
||||
# Configuration for Lock Threads - https://github.com/dessant/lock-threads
|
||||
|
||||
# Number of days of inactivity before a closed issue or pull request is locked
|
||||
daysUntilLock: 7
|
||||
|
||||
# Skip issues and pull requests created before a given timestamp. Timestamp must
|
||||
# follow ISO 8601 (`YYYY-MM-DD`). Set to `false` to disable
|
||||
skipCreatedBefore: false
|
||||
|
||||
# Issues and pull requests with these labels will be ignored. Set to `[]` to disable
|
||||
exemptLabels:
|
||||
- keep-open
|
||||
|
||||
# Label to add before locking, such as `outdated`. Set to `false` to disable
|
||||
lockLabel: false
|
||||
|
||||
# Comment to post before locking. Set to `false` to disable
|
||||
lockComment: false
|
||||
|
||||
# Assign `resolved` as the reason for locking. Set to `false` to disable
|
||||
setLockReason: false
|
||||
|
||||
# Limit to only `issues` or `pulls`
|
||||
# only: issues
|
||||
|
||||
# Optionally, specify configuration settings just for `issues` or `pulls`
|
||||
# issues:
|
||||
# exemptLabels:
|
||||
# - help-wanted
|
||||
# lockLabel: outdated
|
||||
|
||||
# pulls:
|
||||
# daysUntilLock: 30
|
||||
|
||||
# Repository to extend settings from
|
||||
# _extends: repo
|
59
.github/stale.yml
vendored
Normal file
59
.github/stale.yml
vendored
Normal file
@ -0,0 +1,59 @@
|
||||
# Configuration for probot-stale - https://github.com/probot/stale
|
||||
|
||||
# Number of days of inactivity before an Issue or Pull Request becomes stale
|
||||
daysUntilStale: 60
|
||||
|
||||
# Number of days of inactivity before an Issue or Pull Request with the stale label is closed.
|
||||
# Set to false to disable. If disabled, issues still need to be closed manually, but will remain marked as stale.
|
||||
daysUntilClose: 7
|
||||
|
||||
# Only issues or pull requests with all of these labels are check if stale. Defaults to `[]` (disabled)
|
||||
onlyLabels: []
|
||||
|
||||
# Issues or Pull Requests with these labels will never be considered stale. Set to `[]` to disable
|
||||
exemptLabels:
|
||||
- not-stale
|
||||
|
||||
# Set to true to ignore issues in a project (defaults to false)
|
||||
exemptProjects: false
|
||||
|
||||
# Set to true to ignore issues in a milestone (defaults to false)
|
||||
exemptMilestones: true
|
||||
|
||||
# Set to true to ignore issues with an assignee (defaults to false)
|
||||
exemptAssignees: false
|
||||
|
||||
# Label to use when marking as stale
|
||||
staleLabel: stale
|
||||
|
||||
# Comment to post when marking as stale. Set to `false` to disable
|
||||
markComment: >
|
||||
This issue has been automatically marked as stale because it has not had
|
||||
recent activity. It will be closed if no further activity occurs. Thank you
|
||||
for your contributions.
|
||||
|
||||
# Comment to post when removing the stale label.
|
||||
# unmarkComment: >
|
||||
# Your comment here.
|
||||
|
||||
# Comment to post when closing a stale Issue or Pull Request.
|
||||
# closeComment: >
|
||||
# Your comment here.
|
||||
|
||||
# Limit the number of actions per hour, from 1-30. Default is 30
|
||||
limitPerRun: 10
|
||||
|
||||
# Limit to only `issues` or `pulls`
|
||||
only: pulls
|
||||
|
||||
# Optionally, specify configuration settings that are specific to just 'issues' or 'pulls':
|
||||
# pulls:
|
||||
# daysUntilStale: 30
|
||||
# markComment: >
|
||||
# This pull request has been automatically marked as stale because it has not had
|
||||
# recent activity. It will be closed if no further activity occurs. Thank you
|
||||
# for your contributions.
|
||||
|
||||
# issues:
|
||||
# exemptLabels:
|
||||
# - confirmed
|
80
.github/workflows/ci-api-proto.yml
vendored
80
.github/workflows/ci-api-proto.yml
vendored
@ -1,80 +0,0 @@
|
||||
name: API Proto CI
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
paths:
|
||||
- "esphome/components/api/api.proto"
|
||||
- "esphome/components/api/api_pb2.cpp"
|
||||
- "esphome/components/api/api_pb2.h"
|
||||
- "esphome/components/api/api_pb2_service.cpp"
|
||||
- "esphome/components/api/api_pb2_service.h"
|
||||
- "script/api_protobuf/api_protobuf.py"
|
||||
- ".github/workflows/ci-api-proto.yml"
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
pull-requests: write
|
||||
|
||||
jobs:
|
||||
check:
|
||||
name: Check generated files
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4.1.7
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v5.5.0
|
||||
with:
|
||||
python-version: "3.11"
|
||||
|
||||
- name: Install apt dependencies
|
||||
run: |
|
||||
sudo apt update
|
||||
sudo apt-cache show protobuf-compiler
|
||||
sudo apt install -y protobuf-compiler
|
||||
protoc --version
|
||||
- name: Install python dependencies
|
||||
run: pip install aioesphomeapi -c requirements.txt -r requirements_dev.txt
|
||||
- name: Generate files
|
||||
run: script/api_protobuf/api_protobuf.py
|
||||
- name: Check for changes
|
||||
run: |
|
||||
if ! git diff --quiet; then
|
||||
echo "## Job Failed" | tee -a $GITHUB_STEP_SUMMARY
|
||||
echo "You have altered the generated proto files but they do not match what is expected." | tee -a $GITHUB_STEP_SUMMARY
|
||||
echo "Please run 'script/api_protobuf/api_protobuf.py' and commit the changes." | tee -a $GITHUB_STEP_SUMMARY
|
||||
exit 1
|
||||
fi
|
||||
- if: failure()
|
||||
name: Review PR
|
||||
uses: actions/github-script@v7.0.1
|
||||
with:
|
||||
script: |
|
||||
await github.rest.pulls.createReview({
|
||||
pull_number: context.issue.number,
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
event: 'REQUEST_CHANGES',
|
||||
body: 'You have altered the generated proto files but they do not match what is expected.\nPlease run "script/api_protobuf/api_protobuf.py" and commit the changes.'
|
||||
})
|
||||
- if: success()
|
||||
name: Dismiss review
|
||||
uses: actions/github-script@v7.0.1
|
||||
with:
|
||||
script: |
|
||||
let reviews = await github.rest.pulls.listReviews({
|
||||
pull_number: context.issue.number,
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo
|
||||
});
|
||||
for (let review of reviews.data) {
|
||||
if (review.user.login === 'github-actions[bot]' && review.state === 'CHANGES_REQUESTED') {
|
||||
await github.rest.pulls.dismissReview({
|
||||
pull_number: context.issue.number,
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
review_id: review.id,
|
||||
message: 'Files now match the expected proto files.'
|
||||
});
|
||||
}
|
||||
}
|
63
.github/workflows/ci-docker.yml
vendored
63
.github/workflows/ci-docker.yml
vendored
@ -1,61 +1,40 @@
|
||||
---
|
||||
name: CI for docker images
|
||||
|
||||
# Only run when docker paths change
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [dev, beta, release]
|
||||
paths:
|
||||
- "docker/**"
|
||||
- ".github/workflows/ci-docker.yml"
|
||||
- "requirements*.txt"
|
||||
- "platformio.ini"
|
||||
- "script/platformio_install_deps.py"
|
||||
- 'docker/**'
|
||||
- '.github/workflows/**'
|
||||
|
||||
pull_request:
|
||||
paths:
|
||||
- "docker/**"
|
||||
- ".github/workflows/ci-docker.yml"
|
||||
- "requirements*.txt"
|
||||
- "platformio.ini"
|
||||
- "script/platformio_install_deps.py"
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
packages: read
|
||||
|
||||
concurrency:
|
||||
# yamllint disable-line rule:line-length
|
||||
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
|
||||
cancel-in-progress: true
|
||||
- 'docker/**'
|
||||
- '.github/workflows/**'
|
||||
|
||||
jobs:
|
||||
check-docker:
|
||||
name: Build docker containers
|
||||
runs-on: ${{ matrix.os }}
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
os: ["ubuntu-latest", "ubuntu-24.04-arm"]
|
||||
arch: [amd64, armv7, aarch64]
|
||||
build_type: ["ha-addon", "docker", "lint"]
|
||||
steps:
|
||||
- uses: actions/checkout@v4.1.7
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v5.5.0
|
||||
with:
|
||||
python-version: "3.9"
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3.10.0
|
||||
- uses: actions/checkout@v2
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: '3.9'
|
||||
- name: Set TAG
|
||||
run: |
|
||||
echo "TAG=check" >> $GITHUB_ENV
|
||||
|
||||
- name: Set TAG
|
||||
run: |
|
||||
echo "TAG=check" >> $GITHUB_ENV
|
||||
|
||||
- name: Run build
|
||||
run: |
|
||||
docker/build.py \
|
||||
--tag "${TAG}" \
|
||||
--arch "${{ matrix.os == 'ubuntu-24.04-arm' && 'aarch64' || 'amd64' }}" \
|
||||
--build-type "${{ matrix.build_type }}" \
|
||||
build
|
||||
- name: Run build
|
||||
run: |
|
||||
docker/build.py \
|
||||
--tag "${TAG}" \
|
||||
--arch "${{ matrix.arch }}" \
|
||||
--build-type "${{ matrix.build_type }}" \
|
||||
build
|
||||
|
567
.github/workflows/ci.yml
vendored
567
.github/workflows/ci.yml
vendored
@ -1,4 +1,5 @@
|
||||
---
|
||||
# THESE JOBS ARE COPIED IN release.yml and release-dev.yml
|
||||
# PLEASE ALSO UPDATE THOSE FILES WHEN CHANGING LINES HERE
|
||||
name: CI
|
||||
|
||||
on:
|
||||
@ -6,503 +7,137 @@ on:
|
||||
branches: [dev, beta, release]
|
||||
|
||||
pull_request:
|
||||
paths:
|
||||
- "**"
|
||||
- "!.github/workflows/*.yml"
|
||||
- "!.github/actions/build-image/*"
|
||||
- ".github/workflows/ci.yml"
|
||||
- "!.yamllint"
|
||||
- "!.github/dependabot.yml"
|
||||
- "!docker/**"
|
||||
merge_group:
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
env:
|
||||
DEFAULT_PYTHON: "3.9"
|
||||
PYUPGRADE_TARGET: "--py39-plus"
|
||||
|
||||
concurrency:
|
||||
# yamllint disable-line rule:line-length
|
||||
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
common:
|
||||
name: Create common environment
|
||||
runs-on: ubuntu-24.04
|
||||
outputs:
|
||||
cache-key: ${{ steps.cache-key.outputs.key }}
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v4.1.7
|
||||
- name: Generate cache-key
|
||||
id: cache-key
|
||||
run: echo key="${{ hashFiles('requirements.txt', 'requirements_optional.txt', 'requirements_test.txt') }}" >> $GITHUB_OUTPUT
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
id: python
|
||||
uses: actions/setup-python@v5.5.0
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
- name: Restore Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache@v4.2.3
|
||||
with:
|
||||
path: venv
|
||||
# yamllint disable-line rule:line-length
|
||||
key: ${{ runner.os }}-${{ steps.python.outputs.python-version }}-venv-${{ steps.cache-key.outputs.key }}
|
||||
- name: Create Python virtual environment
|
||||
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||
run: |
|
||||
python -m venv venv
|
||||
. venv/bin/activate
|
||||
python --version
|
||||
pip install -r requirements.txt -r requirements_optional.txt -r requirements_test.txt
|
||||
pip install -e .
|
||||
|
||||
ruff:
|
||||
name: Check ruff
|
||||
runs-on: ubuntu-24.04
|
||||
needs:
|
||||
- common
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v4.1.7
|
||||
- name: Restore Python
|
||||
uses: ./.github/actions/restore-python
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
cache-key: ${{ needs.common.outputs.cache-key }}
|
||||
- name: Run Ruff
|
||||
run: |
|
||||
. venv/bin/activate
|
||||
ruff format esphome tests
|
||||
- name: Suggested changes
|
||||
run: script/ci-suggest-changes
|
||||
if: always()
|
||||
|
||||
flake8:
|
||||
name: Check flake8
|
||||
runs-on: ubuntu-24.04
|
||||
needs:
|
||||
- common
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v4.1.7
|
||||
- name: Restore Python
|
||||
uses: ./.github/actions/restore-python
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
cache-key: ${{ needs.common.outputs.cache-key }}
|
||||
- name: Run flake8
|
||||
run: |
|
||||
. venv/bin/activate
|
||||
flake8 esphome
|
||||
- name: Suggested changes
|
||||
run: script/ci-suggest-changes
|
||||
if: always()
|
||||
|
||||
pylint:
|
||||
name: Check pylint
|
||||
runs-on: ubuntu-24.04
|
||||
needs:
|
||||
- common
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v4.1.7
|
||||
- name: Restore Python
|
||||
uses: ./.github/actions/restore-python
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
cache-key: ${{ needs.common.outputs.cache-key }}
|
||||
- name: Run pylint
|
||||
run: |
|
||||
. venv/bin/activate
|
||||
pylint -f parseable --persistent=n esphome
|
||||
- name: Suggested changes
|
||||
run: script/ci-suggest-changes
|
||||
if: always()
|
||||
|
||||
pyupgrade:
|
||||
name: Check pyupgrade
|
||||
runs-on: ubuntu-24.04
|
||||
needs:
|
||||
- common
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v4.1.7
|
||||
- name: Restore Python
|
||||
uses: ./.github/actions/restore-python
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
cache-key: ${{ needs.common.outputs.cache-key }}
|
||||
- name: Run pyupgrade
|
||||
run: |
|
||||
. venv/bin/activate
|
||||
pyupgrade ${{ env.PYUPGRADE_TARGET }} `find esphome -name "*.py" -type f`
|
||||
- name: Suggested changes
|
||||
run: script/ci-suggest-changes
|
||||
if: always()
|
||||
|
||||
ci-custom:
|
||||
name: Run script/ci-custom
|
||||
runs-on: ubuntu-24.04
|
||||
needs:
|
||||
- common
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v4.1.7
|
||||
- name: Restore Python
|
||||
uses: ./.github/actions/restore-python
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
cache-key: ${{ needs.common.outputs.cache-key }}
|
||||
- name: Register matcher
|
||||
run: echo "::add-matcher::.github/workflows/matchers/ci-custom.json"
|
||||
- name: Run script/ci-custom
|
||||
run: |
|
||||
. venv/bin/activate
|
||||
script/ci-custom.py
|
||||
script/build_codeowners.py --check
|
||||
|
||||
pytest:
|
||||
name: Run pytest
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
python-version:
|
||||
- "3.9"
|
||||
- "3.10"
|
||||
- "3.11"
|
||||
- "3.12"
|
||||
os:
|
||||
- ubuntu-latest
|
||||
- macOS-latest
|
||||
- windows-latest
|
||||
exclude:
|
||||
# Minimize CI resource usage
|
||||
# by only running the Python version
|
||||
# version used for docker images on Windows and macOS
|
||||
- python-version: "3.12"
|
||||
os: windows-latest
|
||||
- python-version: "3.10"
|
||||
os: windows-latest
|
||||
- python-version: "3.9"
|
||||
os: windows-latest
|
||||
- python-version: "3.12"
|
||||
os: macOS-latest
|
||||
- python-version: "3.10"
|
||||
os: macOS-latest
|
||||
- python-version: "3.9"
|
||||
os: macOS-latest
|
||||
runs-on: ${{ matrix.os }}
|
||||
needs:
|
||||
- common
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v4.1.7
|
||||
- name: Restore Python
|
||||
uses: ./.github/actions/restore-python
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
cache-key: ${{ needs.common.outputs.cache-key }}
|
||||
- name: Register matcher
|
||||
run: echo "::add-matcher::.github/workflows/matchers/pytest.json"
|
||||
- name: Run pytest
|
||||
if: matrix.os == 'windows-latest'
|
||||
run: |
|
||||
./venv/Scripts/activate
|
||||
pytest -vv --cov-report=xml --tb=native tests
|
||||
- name: Run pytest
|
||||
if: matrix.os == 'ubuntu-latest' || matrix.os == 'macOS-latest'
|
||||
run: |
|
||||
. venv/bin/activate
|
||||
pytest -vv --cov-report=xml --tb=native tests
|
||||
- name: Upload coverage to Codecov
|
||||
uses: codecov/codecov-action@v5.4.2
|
||||
with:
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
|
||||
clang-format:
|
||||
name: Check clang-format
|
||||
runs-on: ubuntu-24.04
|
||||
needs:
|
||||
- common
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v4.1.7
|
||||
- name: Restore Python
|
||||
uses: ./.github/actions/restore-python
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
cache-key: ${{ needs.common.outputs.cache-key }}
|
||||
- name: Install clang-format
|
||||
run: |
|
||||
. venv/bin/activate
|
||||
pip install clang-format -c requirements_dev.txt
|
||||
- name: Run clang-format
|
||||
run: |
|
||||
. venv/bin/activate
|
||||
script/clang-format -i
|
||||
git diff-index --quiet HEAD --
|
||||
- name: Suggested changes
|
||||
run: script/ci-suggest-changes
|
||||
if: always()
|
||||
|
||||
clang-tidy:
|
||||
ci-with-container:
|
||||
name: ${{ matrix.name }}
|
||||
runs-on: ubuntu-24.04
|
||||
needs:
|
||||
- common
|
||||
- ruff
|
||||
- ci-custom
|
||||
- clang-format
|
||||
- flake8
|
||||
- pylint
|
||||
- pytest
|
||||
- pyupgrade
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
fail-fast: false
|
||||
max-parallel: 2
|
||||
matrix:
|
||||
include:
|
||||
- id: clang-format
|
||||
name: Run script/clang-format
|
||||
- id: clang-tidy
|
||||
name: Run script/clang-tidy for ESP8266
|
||||
options: --environment esp8266-arduino-tidy --grep USE_ESP8266
|
||||
pio_cache_key: tidyesp8266
|
||||
name: Run script/clang-tidy 1/4
|
||||
split: 1
|
||||
- id: clang-tidy
|
||||
name: Run script/clang-tidy for ESP32 Arduino 1/4
|
||||
options: --environment esp32-arduino-tidy --split-num 4 --split-at 1
|
||||
pio_cache_key: tidyesp32
|
||||
name: Run script/clang-tidy 2/4
|
||||
split: 2
|
||||
- id: clang-tidy
|
||||
name: Run script/clang-tidy for ESP32 Arduino 2/4
|
||||
options: --environment esp32-arduino-tidy --split-num 4 --split-at 2
|
||||
pio_cache_key: tidyesp32
|
||||
name: Run script/clang-tidy 3/4
|
||||
split: 3
|
||||
- id: clang-tidy
|
||||
name: Run script/clang-tidy for ESP32 Arduino 3/4
|
||||
options: --environment esp32-arduino-tidy --split-num 4 --split-at 3
|
||||
pio_cache_key: tidyesp32
|
||||
- id: clang-tidy
|
||||
name: Run script/clang-tidy for ESP32 Arduino 4/4
|
||||
options: --environment esp32-arduino-tidy --split-num 4 --split-at 4
|
||||
pio_cache_key: tidyesp32
|
||||
- id: clang-tidy
|
||||
name: Run script/clang-tidy for ESP32 IDF
|
||||
options: --environment esp32-idf-tidy --grep USE_ESP_IDF
|
||||
pio_cache_key: tidyesp32-idf
|
||||
name: Run script/clang-tidy 4/4
|
||||
split: 4
|
||||
|
||||
# cpp lint job runs with esphome-lint docker image so that clang-format-*
|
||||
# doesn't have to be installed
|
||||
container: ghcr.io/esphome/esphome-lint:1.1
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v4.1.7
|
||||
- name: Restore Python
|
||||
uses: ./.github/actions/restore-python
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
cache-key: ${{ needs.common.outputs.cache-key }}
|
||||
|
||||
- name: Cache platformio
|
||||
if: github.ref == 'refs/heads/dev'
|
||||
uses: actions/cache@v4.2.3
|
||||
with:
|
||||
path: ~/.platformio
|
||||
key: platformio-${{ matrix.pio_cache_key }}
|
||||
|
||||
- name: Cache platformio
|
||||
if: github.ref != 'refs/heads/dev'
|
||||
uses: actions/cache/restore@v4.2.3
|
||||
with:
|
||||
path: ~/.platformio
|
||||
key: platformio-${{ matrix.pio_cache_key }}
|
||||
- uses: actions/checkout@v2
|
||||
|
||||
- name: Register problem matchers
|
||||
run: |
|
||||
echo "::add-matcher::.github/workflows/matchers/gcc.json"
|
||||
echo "::add-matcher::.github/workflows/matchers/clang-tidy.json"
|
||||
echo "::add-matcher::.github/workflows/matchers/gcc.json"
|
||||
|
||||
- name: Run 'pio run --list-targets -e esp32-idf-tidy'
|
||||
if: matrix.name == 'Run script/clang-tidy for ESP32 IDF'
|
||||
# Also run git-diff-index so that the step is marked as failed on formatting errors,
|
||||
# since clang-format doesn't do anything but change files if -i is passed.
|
||||
- name: Run clang-format
|
||||
run: |
|
||||
. venv/bin/activate
|
||||
mkdir -p .temp
|
||||
pio run --list-targets -e esp32-idf-tidy
|
||||
script/clang-format -i
|
||||
git diff-index --quiet HEAD --
|
||||
if: ${{ matrix.id == 'clang-format' }}
|
||||
|
||||
- name: Run clang-tidy
|
||||
run: |
|
||||
. venv/bin/activate
|
||||
script/clang-tidy --all-headers --fix ${{ matrix.options }}
|
||||
env:
|
||||
# Also cache libdeps, store them in a ~/.platformio subfolder
|
||||
PLATFORMIO_LIBDEPS_DIR: ~/.platformio/libdeps
|
||||
run: script/clang-tidy --all-headers --fix --split-num 4 --split-at ${{ matrix.split }}
|
||||
if: ${{ matrix.id == 'clang-tidy' }}
|
||||
|
||||
- name: Suggested changes
|
||||
run: script/ci-suggest-changes
|
||||
# yamllint disable-line rule:line-length
|
||||
if: always()
|
||||
|
||||
list-components:
|
||||
runs-on: ubuntu-24.04
|
||||
needs:
|
||||
- common
|
||||
if: github.event_name == 'pull_request'
|
||||
outputs:
|
||||
components: ${{ steps.list-components.outputs.components }}
|
||||
count: ${{ steps.list-components.outputs.count }}
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v4.1.7
|
||||
with:
|
||||
# Fetch enough history so `git merge-base refs/remotes/origin/dev HEAD` works.
|
||||
fetch-depth: 500
|
||||
- name: Get target branch
|
||||
id: target-branch
|
||||
run: |
|
||||
echo "branch=${{ github.event.pull_request.base.ref }}" >> $GITHUB_OUTPUT
|
||||
- name: Fetch ${{ steps.target-branch.outputs.branch }} branch
|
||||
run: |
|
||||
git -c protocol.version=2 fetch --no-tags --prune --no-recurse-submodules --depth=1 origin +refs/heads/${{ steps.target-branch.outputs.branch }}:refs/remotes/origin/${{ steps.target-branch.outputs.branch }}
|
||||
git merge-base refs/remotes/origin/${{ steps.target-branch.outputs.branch }} HEAD
|
||||
- name: Restore Python
|
||||
uses: ./.github/actions/restore-python
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
cache-key: ${{ needs.common.outputs.cache-key }}
|
||||
- name: Find changed components
|
||||
id: list-components
|
||||
run: |
|
||||
. venv/bin/activate
|
||||
components=$(script/list-components.py --changed --branch ${{ steps.target-branch.outputs.branch }})
|
||||
output_components=$(echo "$components" | jq -R -s -c 'split("\n")[:-1] | map(select(length > 0))')
|
||||
count=$(echo "$output_components" | jq length)
|
||||
|
||||
echo "components=$output_components" >> $GITHUB_OUTPUT
|
||||
echo "count=$count" >> $GITHUB_OUTPUT
|
||||
|
||||
echo "$count Components:"
|
||||
echo "$output_components" | jq
|
||||
|
||||
test-build-components:
|
||||
name: Component test ${{ matrix.file }}
|
||||
runs-on: ubuntu-24.04
|
||||
needs:
|
||||
- common
|
||||
- list-components
|
||||
if: github.event_name == 'pull_request' && fromJSON(needs.list-components.outputs.count) > 0 && fromJSON(needs.list-components.outputs.count) < 100
|
||||
ci:
|
||||
# Don't use the esphome-lint docker image because it may contain outdated requirements.
|
||||
# This way, all dependencies are cached via the cache action.
|
||||
name: ${{ matrix.name }}
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
fail-fast: false
|
||||
max-parallel: 2
|
||||
matrix:
|
||||
file: ${{ fromJson(needs.list-components.outputs.components) }}
|
||||
steps:
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
sudo apt-get update
|
||||
sudo apt-get install libsdl2-dev
|
||||
include:
|
||||
- id: ci-custom
|
||||
name: Run script/ci-custom
|
||||
- id: lint-python
|
||||
name: Run script/lint-python
|
||||
- id: test
|
||||
file: tests/test1.yaml
|
||||
name: Test tests/test1.yaml
|
||||
- id: test
|
||||
file: tests/test2.yaml
|
||||
name: Test tests/test2.yaml
|
||||
- id: test
|
||||
file: tests/test3.yaml
|
||||
name: Test tests/test3.yaml
|
||||
- id: test
|
||||
file: tests/test4.yaml
|
||||
name: Test tests/test4.yaml
|
||||
- id: test
|
||||
file: tests/test5.yaml
|
||||
name: Test tests/test5.yaml
|
||||
- id: pytest
|
||||
name: Run pytest
|
||||
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v4.1.7
|
||||
- name: Restore Python
|
||||
uses: ./.github/actions/restore-python
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
cache-key: ${{ needs.common.outputs.cache-key }}
|
||||
- name: test_build_components -e config -c ${{ matrix.file }}
|
||||
run: |
|
||||
. venv/bin/activate
|
||||
./script/test_build_components -e config -c ${{ matrix.file }}
|
||||
- name: test_build_components -e compile -c ${{ matrix.file }}
|
||||
run: |
|
||||
. venv/bin/activate
|
||||
./script/test_build_components -e compile -c ${{ matrix.file }}
|
||||
python-version: '3.7'
|
||||
|
||||
test-build-components-splitter:
|
||||
name: Split components for testing into 20 groups maximum
|
||||
runs-on: ubuntu-24.04
|
||||
needs:
|
||||
- common
|
||||
- list-components
|
||||
if: github.event_name == 'pull_request' && fromJSON(needs.list-components.outputs.count) >= 100
|
||||
outputs:
|
||||
matrix: ${{ steps.split.outputs.components }}
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v4.1.7
|
||||
- name: Split components into 20 groups
|
||||
id: split
|
||||
run: |
|
||||
components=$(echo '${{ needs.list-components.outputs.components }}' | jq -c '.[]' | shuf | jq -s -c '[_nwise(20) | join(" ")]')
|
||||
echo "components=$components" >> $GITHUB_OUTPUT
|
||||
|
||||
test-build-components-split:
|
||||
name: Test split components
|
||||
runs-on: ubuntu-24.04
|
||||
needs:
|
||||
- common
|
||||
- list-components
|
||||
- test-build-components-splitter
|
||||
if: github.event_name == 'pull_request' && fromJSON(needs.list-components.outputs.count) >= 100
|
||||
strategy:
|
||||
fail-fast: false
|
||||
max-parallel: 4
|
||||
matrix:
|
||||
components: ${{ fromJson(needs.test-build-components-splitter.outputs.matrix) }}
|
||||
steps:
|
||||
- name: List components
|
||||
run: echo ${{ matrix.components }}
|
||||
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
sudo apt-get update
|
||||
sudo apt-get install libsdl2-dev
|
||||
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v4.1.7
|
||||
- name: Restore Python
|
||||
uses: ./.github/actions/restore-python
|
||||
- name: Cache pip modules
|
||||
uses: actions/cache@v1
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
cache-key: ${{ needs.common.outputs.cache-key }}
|
||||
- name: Validate config
|
||||
run: |
|
||||
. venv/bin/activate
|
||||
for component in ${{ matrix.components }}; do
|
||||
./script/test_build_components -e config -c $component
|
||||
done
|
||||
- name: Compile config
|
||||
run: |
|
||||
. venv/bin/activate
|
||||
mkdir build_cache
|
||||
export PLATFORMIO_BUILD_CACHE_DIR=$PWD/build_cache
|
||||
for component in ${{ matrix.components }}; do
|
||||
./script/test_build_components -e compile -c $component
|
||||
done
|
||||
path: ~/.cache/pip
|
||||
key: esphome-pip-3.7-${{ hashFiles('setup.py') }}
|
||||
restore-keys: |
|
||||
esphome-pip-3.7-
|
||||
|
||||
ci-status:
|
||||
name: CI Status
|
||||
runs-on: ubuntu-24.04
|
||||
needs:
|
||||
- common
|
||||
- ruff
|
||||
- ci-custom
|
||||
- clang-format
|
||||
- flake8
|
||||
- pylint
|
||||
- pytest
|
||||
- pyupgrade
|
||||
- clang-tidy
|
||||
- list-components
|
||||
- test-build-components
|
||||
- test-build-components-splitter
|
||||
- test-build-components-split
|
||||
if: always()
|
||||
steps:
|
||||
- name: Success
|
||||
if: ${{ !(contains(needs.*.result, 'failure')) }}
|
||||
run: exit 0
|
||||
- name: Failure
|
||||
if: ${{ contains(needs.*.result, 'failure') }}
|
||||
env:
|
||||
JSON_DOC: ${{ toJSON(needs) }}
|
||||
# Use per test platformio cache because tests have different platform versions
|
||||
- name: Cache ~/.platformio
|
||||
uses: actions/cache@v1
|
||||
with:
|
||||
path: ~/.platformio
|
||||
key: test-home-platformio-${{ matrix.file }}-${{ hashFiles('esphome/core/config.py') }}
|
||||
restore-keys: |
|
||||
test-home-platformio-${{ matrix.file }}-
|
||||
if: ${{ matrix.id == 'test' }}
|
||||
|
||||
- name: Set up python environment
|
||||
run: script/setup
|
||||
|
||||
- name: Register problem matchers
|
||||
run: |
|
||||
echo $JSON_DOC | jq
|
||||
exit 1
|
||||
echo "::add-matcher::.github/workflows/matchers/ci-custom.json"
|
||||
echo "::add-matcher::.github/workflows/matchers/lint-python.json"
|
||||
echo "::add-matcher::.github/workflows/matchers/python.json"
|
||||
echo "::add-matcher::.github/workflows/matchers/pytest.json"
|
||||
echo "::add-matcher::.github/workflows/matchers/gcc.json"
|
||||
|
||||
- name: Lint Custom
|
||||
run: |
|
||||
script/ci-custom.py
|
||||
script/build_codeowners.py --check
|
||||
if: ${{ matrix.id == 'ci-custom' }}
|
||||
- name: Lint Python
|
||||
run: script/lint-python
|
||||
if: ${{ matrix.id == 'lint-python' }}
|
||||
|
||||
- run: esphome compile ${{ matrix.file }}
|
||||
if: ${{ matrix.id == 'test' }}
|
||||
|
||||
- name: Run pytest
|
||||
run: |
|
||||
pytest -vv --tb=native tests
|
||||
if: ${{ matrix.id == 'pytest' }}
|
||||
|
91
.github/workflows/codeql.yml
vendored
91
.github/workflows/codeql.yml
vendored
@ -1,91 +0,0 @@
|
||||
# For most projects, this workflow file will not need changing; you simply need
|
||||
# to commit it to your repository.
|
||||
#
|
||||
# You may wish to alter this file to override the set of languages analyzed,
|
||||
# or to provide custom queries or build logic.
|
||||
#
|
||||
# ******** NOTE ********
|
||||
# We have attempted to detect the languages in your repository. Please check
|
||||
# the `language` matrix defined below to confirm you have the correct set of
|
||||
# supported CodeQL languages.
|
||||
#
|
||||
name: "CodeQL Advanced"
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
schedule:
|
||||
- cron: "30 18 * * 4"
|
||||
|
||||
jobs:
|
||||
analyze:
|
||||
name: Analyze (${{ matrix.language }})
|
||||
# Runner size impacts CodeQL analysis time. To learn more, please see:
|
||||
# - https://gh.io/recommended-hardware-resources-for-running-codeql
|
||||
# - https://gh.io/supported-runners-and-hardware-resources
|
||||
# - https://gh.io/using-larger-runners (GitHub.com only)
|
||||
# Consider using larger runners or machines with greater resources for possible analysis time improvements.
|
||||
runs-on: ${{ (matrix.language == 'swift' && 'macos-latest') || 'ubuntu-latest' }}
|
||||
permissions:
|
||||
# required for all workflows
|
||||
security-events: write
|
||||
|
||||
# required to fetch internal or private CodeQL packs
|
||||
packages: read
|
||||
|
||||
# only required for workflows in private repositories
|
||||
actions: read
|
||||
contents: read
|
||||
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include:
|
||||
# - language: c-cpp
|
||||
# build-mode: autobuild
|
||||
- language: python
|
||||
build-mode: none
|
||||
# CodeQL supports the following values keywords for 'language': 'c-cpp', 'csharp', 'go', 'java-kotlin', 'javascript-typescript', 'python', 'ruby', 'swift'
|
||||
# Use `c-cpp` to analyze code written in C, C++ or both
|
||||
# Use 'java-kotlin' to analyze code written in Java, Kotlin or both
|
||||
# Use 'javascript-typescript' to analyze code written in JavaScript, TypeScript or both
|
||||
# To learn more about changing the languages that are analyzed or customizing the build mode for your analysis,
|
||||
# see https://docs.github.com/en/code-security/code-scanning/creating-an-advanced-setup-for-code-scanning/customizing-your-advanced-setup-for-code-scanning.
|
||||
# If you are analyzing a compiled language, you can modify the 'build-mode' for that language to customize how
|
||||
# your codebase is analyzed, see https://docs.github.com/en/code-security/code-scanning/creating-an-advanced-setup-for-code-scanning/codeql-code-scanning-for-compiled-languages
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
# Initializes the CodeQL tools for scanning.
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@v3
|
||||
with:
|
||||
languages: ${{ matrix.language }}
|
||||
build-mode: ${{ matrix.build-mode }}
|
||||
# If you wish to specify custom queries, you can do so here or in a config file.
|
||||
# By default, queries listed here will override any specified in a config file.
|
||||
# Prefix the list here with "+" to use these queries and those in the config file.
|
||||
|
||||
# For more details on CodeQL's query packs, refer to: https://docs.github.com/en/code-security/code-scanning/automatically-scanning-your-code-for-vulnerabilities-and-errors/configuring-code-scanning#using-queries-in-ql-packs
|
||||
# queries: security-extended,security-and-quality
|
||||
|
||||
# If the analyze step fails for one of the languages you are analyzing with
|
||||
# "We were unable to automatically build your code", modify the matrix above
|
||||
# to set the build mode to "manual" for that language. Then modify this step
|
||||
# to build your code.
|
||||
# ℹ️ Command-line programs to run using the OS shell.
|
||||
# 📚 See https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstepsrun
|
||||
- if: matrix.build-mode == 'manual'
|
||||
shell: bash
|
||||
run: |
|
||||
echo 'If you are using a "manual" build mode for one or more of the' \
|
||||
'languages you are analyzing, replace this with the commands to build' \
|
||||
'your code, for example:'
|
||||
echo ' make bootstrap'
|
||||
echo ' make release'
|
||||
exit 1
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@v3
|
||||
with:
|
||||
category: "/language:${{matrix.language}}"
|
100
.github/workflows/docker-lint-build.yml
vendored
Normal file
100
.github/workflows/docker-lint-build.yml
vendored
Normal file
@ -0,0 +1,100 @@
|
||||
name: Build and publish lint docker image
|
||||
|
||||
# Only run when docker paths change
|
||||
on:
|
||||
push:
|
||||
branches: [dev]
|
||||
paths:
|
||||
- 'docker/Dockerfile.lint'
|
||||
- 'requirements.txt'
|
||||
- 'requirements_optional.txt'
|
||||
- 'requirements_test.txt'
|
||||
- 'platformio.ini'
|
||||
- '.github/workflows/docker-lint-build.yml'
|
||||
|
||||
jobs:
|
||||
deploy-docker:
|
||||
name: Build and publish docker containers
|
||||
if: github.repository == 'esphome/esphome'
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
arch: [amd64, armv7, aarch64]
|
||||
build_type: ["lint"]
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: '3.9'
|
||||
- name: Set TAG
|
||||
run: |
|
||||
echo "TAG=1.1" >> $GITHUB_ENV
|
||||
|
||||
- name: Run build
|
||||
run: |
|
||||
docker/build.py \
|
||||
--tag "${TAG}" \
|
||||
--arch "${{ matrix.arch }}" \
|
||||
--build-type "${{ matrix.build_type }}" \
|
||||
build
|
||||
|
||||
- name: Log in to docker hub
|
||||
uses: docker/login-action@v1
|
||||
with:
|
||||
username: ${{ secrets.DOCKER_USER }}
|
||||
password: ${{ secrets.DOCKER_PASSWORD }}
|
||||
- name: Log in to the GitHub container registry
|
||||
uses: docker/login-action@v1
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.actor }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Run push
|
||||
run: |
|
||||
docker/build.py \
|
||||
--tag "${TAG}" \
|
||||
--arch "${{ matrix.arch }}" \
|
||||
--build-type "${{ matrix.build_type }}" \
|
||||
push
|
||||
|
||||
deploy-docker-manifest:
|
||||
if: github.repository == 'esphome/esphome'
|
||||
runs-on: ubuntu-latest
|
||||
needs: [deploy-docker]
|
||||
strategy:
|
||||
matrix:
|
||||
build_type: ["lint"]
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: '3.9'
|
||||
- name: Set TAG
|
||||
run: |
|
||||
echo "TAG=1.1" >> $GITHUB_ENV
|
||||
- name: Enable experimental manifest support
|
||||
run: |
|
||||
mkdir -p ~/.docker
|
||||
echo "{\"experimental\": \"enabled\"}" > ~/.docker/config.json
|
||||
|
||||
- name: Log in to docker hub
|
||||
uses: docker/login-action@v1
|
||||
with:
|
||||
username: ${{ secrets.DOCKER_USER }}
|
||||
password: ${{ secrets.DOCKER_PASSWORD }}
|
||||
- name: Log in to the GitHub container registry
|
||||
uses: docker/login-action@v1
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.actor }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Run manifest
|
||||
run: |
|
||||
docker/build.py \
|
||||
--tag "${TAG}" \
|
||||
--build-type "${{ matrix.build_type }}" \
|
||||
manifest
|
28
.github/workflows/lock.yml
vendored
28
.github/workflows/lock.yml
vendored
@ -1,28 +0,0 @@
|
||||
---
|
||||
name: Lock
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: "30 0 * * *"
|
||||
workflow_dispatch:
|
||||
|
||||
permissions:
|
||||
issues: write
|
||||
pull-requests: write
|
||||
|
||||
concurrency:
|
||||
group: lock
|
||||
|
||||
jobs:
|
||||
lock:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: dessant/lock-threads@v5.0.1
|
||||
with:
|
||||
pr-inactive-days: "1"
|
||||
pr-lock-reason: ""
|
||||
exclude-any-pr-labels: keep-open
|
||||
|
||||
issue-inactive-days: "7"
|
||||
issue-lock-reason: ""
|
||||
exclude-any-issue-labels: keep-open
|
2
.github/workflows/matchers/ci-custom.json
vendored
2
.github/workflows/matchers/ci-custom.json
vendored
@ -4,7 +4,7 @@
|
||||
"owner": "ci-custom",
|
||||
"pattern": [
|
||||
{
|
||||
"regexp": "^(.*):(\\d+):(\\d+):\\s+lint:\\s+(.*)$",
|
||||
"regexp": "^ERROR (.*):(\\d+):(\\d+) - (.*)$",
|
||||
"file": 1,
|
||||
"line": 2,
|
||||
"column": 3,
|
||||
|
2
.github/workflows/matchers/gcc.json
vendored
2
.github/workflows/matchers/gcc.json
vendored
@ -5,7 +5,7 @@
|
||||
"severity": "error",
|
||||
"pattern": [
|
||||
{
|
||||
"regexp": "^src/(.*):(\\d+):(\\d+):\\s+(?:fatal\\s+)?(warning|error):\\s+(.*)$",
|
||||
"regexp": "^(.*):(\\d+):(\\d+):\\s+(?:fatal\\s+)?(warning|error):\\s+(.*)$",
|
||||
"file": 1,
|
||||
"line": 2,
|
||||
"column": 3,
|
||||
|
15
.github/workflows/matchers/lint-python.json
vendored
15
.github/workflows/matchers/lint-python.json
vendored
@ -1,22 +1,11 @@
|
||||
{
|
||||
"problemMatcher": [
|
||||
{
|
||||
"owner": "ruff",
|
||||
"severity": "error",
|
||||
"pattern": [
|
||||
{
|
||||
"regexp": "^(.*): (Please format this file with the ruff formatter)",
|
||||
"file": 1,
|
||||
"message": 2
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"owner": "flake8",
|
||||
"severity": "error",
|
||||
"pattern": [
|
||||
{
|
||||
"regexp": "^(.*):(\\d+): ([EFCDNW]\\d{3}.*)$",
|
||||
"regexp": "^(.*):(\\d+) - ([EFCDNW]\\d{3}.*)$",
|
||||
"file": 1,
|
||||
"line": 2,
|
||||
"message": 3
|
||||
@ -28,7 +17,7 @@
|
||||
"severity": "error",
|
||||
"pattern": [
|
||||
{
|
||||
"regexp": "^(.*):(\\d+): (\\[[EFCRW]\\d{4}\\(.*\\),.*\\].*)$",
|
||||
"regexp": "^(.*):(\\d+) - (\\[[EFCRW]\\d{4}\\(.*\\),.*\\].*)$",
|
||||
"file": 1,
|
||||
"line": 2,
|
||||
"message": 3
|
||||
|
24
.github/workflows/needs-docs.yml
vendored
24
.github/workflows/needs-docs.yml
vendored
@ -1,24 +0,0 @@
|
||||
name: Needs Docs
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
types: [labeled, unlabeled]
|
||||
|
||||
jobs:
|
||||
check:
|
||||
name: Check
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Check for needs-docs label
|
||||
uses: actions/github-script@v7.0.1
|
||||
with:
|
||||
script: |
|
||||
const { data: labels } = await github.rest.issues.listLabelsOnIssue({
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
issue_number: context.issue.number
|
||||
});
|
||||
const needsDocs = labels.find(label => label.name === 'needs-docs');
|
||||
if (needsDocs) {
|
||||
core.setFailed('Pull request needs docs');
|
||||
}
|
270
.github/workflows/release.yml
vendored
270
.github/workflows/release.yml
vendored
@ -1,4 +1,3 @@
|
||||
---
|
||||
name: Publish Release
|
||||
|
||||
on:
|
||||
@ -8,238 +7,141 @@ on:
|
||||
schedule:
|
||||
- cron: "0 2 * * *"
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
jobs:
|
||||
init:
|
||||
name: Initialize build
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
tag: ${{ steps.tag.outputs.tag }}
|
||||
branch_build: ${{ steps.tag.outputs.branch_build }}
|
||||
steps:
|
||||
- uses: actions/checkout@v4.1.7
|
||||
- uses: actions/checkout@v2
|
||||
- name: Get tag
|
||||
id: tag
|
||||
# yamllint disable rule:line-length
|
||||
run: |
|
||||
if [[ "${{ github.event_name }}" = "release" ]]; then
|
||||
TAG="${{ github.event.release.tag_name}}"
|
||||
BRANCH_BUILD="false"
|
||||
if [[ "$GITHUB_EVENT_NAME" = "release" ]]; then
|
||||
TAG="${GITHUB_REF#refs/tags/}"
|
||||
else
|
||||
TAG=$(cat esphome/const.py | sed -n -E "s/^__version__\s+=\s+\"(.+)\"$/\1/p")
|
||||
today="$(date --utc '+%Y%m%d')"
|
||||
TAG="${TAG}${today}"
|
||||
BRANCH=${GITHUB_REF#refs/heads/}
|
||||
if [[ "$BRANCH" != "dev" ]]; then
|
||||
TAG="${TAG}-${BRANCH}"
|
||||
BRANCH_BUILD="true"
|
||||
else
|
||||
BRANCH_BUILD="false"
|
||||
fi
|
||||
fi
|
||||
echo "tag=${TAG}" >> $GITHUB_OUTPUT
|
||||
echo "branch_build=${BRANCH_BUILD}" >> $GITHUB_OUTPUT
|
||||
# yamllint enable rule:line-length
|
||||
echo "::set-output name=tag::${TAG}"
|
||||
|
||||
deploy-pypi:
|
||||
name: Build and publish to PyPi
|
||||
if: github.repository == 'esphome/esphome' && github.event_name == 'release'
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
id-token: write
|
||||
steps:
|
||||
- uses: actions/checkout@v4.1.7
|
||||
- uses: actions/checkout@v2
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v5.5.0
|
||||
uses: actions/setup-python@v1
|
||||
with:
|
||||
python-version: "3.x"
|
||||
python-version: '3.x'
|
||||
- name: Set up python environment
|
||||
env:
|
||||
ESPHOME_NO_VENV: 1
|
||||
run: script/setup
|
||||
run: |
|
||||
script/setup
|
||||
pip install setuptools wheel twine
|
||||
- name: Build
|
||||
run: |-
|
||||
pip3 install build
|
||||
python3 -m build
|
||||
- name: Publish
|
||||
uses: pypa/gh-action-pypi-publish@v1.12.4
|
||||
run: python setup.py sdist bdist_wheel
|
||||
- name: Upload
|
||||
env:
|
||||
TWINE_USERNAME: __token__
|
||||
TWINE_PASSWORD: ${{ secrets.PYPI_TOKEN }}
|
||||
run: twine upload dist/*
|
||||
|
||||
deploy-docker:
|
||||
name: Build ESPHome ${{ matrix.platform }}
|
||||
name: Build and publish docker containers
|
||||
if: github.repository == 'esphome/esphome'
|
||||
permissions:
|
||||
contents: read
|
||||
packages: write
|
||||
runs-on: ubuntu-latest
|
||||
needs: [init]
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
platform:
|
||||
- linux/amd64
|
||||
- linux/arm64
|
||||
arch: [amd64, armv7, aarch64]
|
||||
build_type: ["ha-addon", "docker"]
|
||||
steps:
|
||||
- uses: actions/checkout@v4.1.7
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v5.5.0
|
||||
with:
|
||||
python-version: "3.9"
|
||||
- uses: actions/checkout@v2
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: '3.9'
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3.10.0
|
||||
- name: Set up QEMU
|
||||
if: matrix.platform != 'linux/amd64'
|
||||
uses: docker/setup-qemu-action@v3.6.0
|
||||
- name: Run build
|
||||
run: |
|
||||
docker/build.py \
|
||||
--tag "${{ needs.init.outputs.tag }}" \
|
||||
--arch "${{ matrix.arch }}" \
|
||||
--build-type "${{ matrix.build_type }}" \
|
||||
build
|
||||
|
||||
- name: Log in to docker hub
|
||||
uses: docker/login-action@v3.4.0
|
||||
with:
|
||||
username: ${{ secrets.DOCKER_USER }}
|
||||
password: ${{ secrets.DOCKER_PASSWORD }}
|
||||
- name: Log in to the GitHub container registry
|
||||
uses: docker/login-action@v3.4.0
|
||||
with:
|
||||
- name: Log in to docker hub
|
||||
uses: docker/login-action@v1
|
||||
with:
|
||||
username: ${{ secrets.DOCKER_USER }}
|
||||
password: ${{ secrets.DOCKER_PASSWORD }}
|
||||
- name: Log in to the GitHub container registry
|
||||
uses: docker/login-action@v1
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.actor }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Build docker
|
||||
uses: ./.github/actions/build-image
|
||||
with:
|
||||
platform: ${{ matrix.platform }}
|
||||
target: docker
|
||||
baseimg: docker
|
||||
suffix: ""
|
||||
version: ${{ needs.init.outputs.tag }}
|
||||
- name: Run push
|
||||
run: |
|
||||
docker/build.py \
|
||||
--tag "${{ needs.init.outputs.tag }}" \
|
||||
--arch "${{ matrix.arch }}" \
|
||||
--build-type "${{ matrix.build_type }}" \
|
||||
push
|
||||
|
||||
- name: Build ha-addon
|
||||
uses: ./.github/actions/build-image
|
||||
with:
|
||||
platform: ${{ matrix.platform }}
|
||||
target: hassio
|
||||
baseimg: hassio
|
||||
suffix: "hassio"
|
||||
version: ${{ needs.init.outputs.tag }}
|
||||
|
||||
- name: Build lint
|
||||
uses: ./.github/actions/build-image
|
||||
with:
|
||||
platform: ${{ matrix.platform }}
|
||||
target: lint
|
||||
baseimg: docker
|
||||
suffix: lint
|
||||
version: ${{ needs.init.outputs.tag }}
|
||||
|
||||
- name: Sanitize platform name
|
||||
id: sanitize
|
||||
run: |
|
||||
echo "${{ matrix.platform }}" | sed 's|/|-|g' > /tmp/platform
|
||||
echo name=$(cat /tmp/platform) >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Upload digests
|
||||
uses: actions/upload-artifact@v4.6.2
|
||||
with:
|
||||
name: digests-${{ steps.sanitize.outputs.name }}
|
||||
path: /tmp/digests
|
||||
retention-days: 1
|
||||
|
||||
deploy-manifest:
|
||||
name: Publish ESPHome ${{ matrix.image.title }} to ${{ matrix.registry }}
|
||||
runs-on: ubuntu-latest
|
||||
needs:
|
||||
- init
|
||||
- deploy-docker
|
||||
deploy-docker-manifest:
|
||||
if: github.repository == 'esphome/esphome'
|
||||
permissions:
|
||||
contents: read
|
||||
packages: write
|
||||
runs-on: ubuntu-latest
|
||||
needs: [init, deploy-docker]
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
image:
|
||||
- title: "ha-addon"
|
||||
target: "hassio"
|
||||
suffix: "hassio"
|
||||
- title: "docker"
|
||||
target: "docker"
|
||||
suffix: ""
|
||||
- title: "lint"
|
||||
target: "lint"
|
||||
suffix: "lint"
|
||||
registry:
|
||||
- ghcr
|
||||
- dockerhub
|
||||
build_type: ["ha-addon", "docker"]
|
||||
steps:
|
||||
- uses: actions/checkout@v4.1.7
|
||||
- uses: actions/checkout@v2
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: '3.9'
|
||||
- name: Enable experimental manifest support
|
||||
run: |
|
||||
mkdir -p ~/.docker
|
||||
echo "{\"experimental\": \"enabled\"}" > ~/.docker/config.json
|
||||
|
||||
- name: Download digests
|
||||
uses: actions/download-artifact@v4.2.1
|
||||
with:
|
||||
pattern: digests-*
|
||||
path: /tmp/digests
|
||||
merge-multiple: true
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3.10.0
|
||||
|
||||
- name: Log in to docker hub
|
||||
if: matrix.registry == 'dockerhub'
|
||||
uses: docker/login-action@v3.4.0
|
||||
with:
|
||||
username: ${{ secrets.DOCKER_USER }}
|
||||
password: ${{ secrets.DOCKER_PASSWORD }}
|
||||
- name: Log in to the GitHub container registry
|
||||
if: matrix.registry == 'ghcr'
|
||||
uses: docker/login-action@v3.4.0
|
||||
with:
|
||||
- name: Log in to docker hub
|
||||
uses: docker/login-action@v1
|
||||
with:
|
||||
username: ${{ secrets.DOCKER_USER }}
|
||||
password: ${{ secrets.DOCKER_PASSWORD }}
|
||||
- name: Log in to the GitHub container registry
|
||||
uses: docker/login-action@v1
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.actor }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Generate short tags
|
||||
id: tags
|
||||
run: |
|
||||
output=$(docker/generate_tags.py \
|
||||
--tag "${{ needs.init.outputs.tag }}" \
|
||||
--suffix "${{ matrix.image.suffix }}" \
|
||||
--registry "${{ matrix.registry }}")
|
||||
echo $output
|
||||
for l in $output; do
|
||||
echo $l >> $GITHUB_OUTPUT
|
||||
done
|
||||
- name: Run manifest
|
||||
run: |
|
||||
docker/build.py \
|
||||
--tag "${{ needs.init.outputs.tag }}" \
|
||||
--build-type "${{ matrix.build_type }}" \
|
||||
manifest
|
||||
|
||||
- name: Create manifest list and push
|
||||
working-directory: /tmp/digests/${{ matrix.image.target }}/${{ matrix.registry }}
|
||||
run: |
|
||||
docker buildx imagetools create $(jq -Rcnr 'inputs | . / "," | map("-t " + .) | join(" ")' <<< "${{ steps.tags.outputs.tags}}") \
|
||||
$(printf '${{ steps.tags.outputs.image }}@sha256:%s ' *)
|
||||
|
||||
deploy-ha-addon-repo:
|
||||
if: github.repository == 'esphome/esphome' && needs.init.outputs.branch_build == 'false'
|
||||
deploy-hassio-repo:
|
||||
if: github.repository == 'esphome/esphome' && github.event_name == 'release'
|
||||
runs-on: ubuntu-latest
|
||||
needs:
|
||||
- init
|
||||
- deploy-manifest
|
||||
needs: [deploy-docker]
|
||||
steps:
|
||||
- name: Trigger Workflow
|
||||
uses: actions/github-script@v7.0.1
|
||||
with:
|
||||
github-token: ${{ secrets.DEPLOY_HA_ADDON_REPO_TOKEN }}
|
||||
script: |
|
||||
let description = "ESPHome";
|
||||
if (context.eventName == "release") {
|
||||
description = ${{ toJSON(github.event.release.body) }};
|
||||
}
|
||||
github.rest.actions.createWorkflowDispatch({
|
||||
owner: "esphome",
|
||||
repo: "home-assistant-addon",
|
||||
workflow_id: "bump-version.yml",
|
||||
ref: "main",
|
||||
inputs: {
|
||||
version: "${{ needs.init.outputs.tag }}",
|
||||
content: description
|
||||
}
|
||||
})
|
||||
- env:
|
||||
TOKEN: ${{ secrets.DEPLOY_HASSIO_TOKEN }}
|
||||
run: |
|
||||
TAG="${GITHUB_REF#refs/tags/}"
|
||||
curl \
|
||||
-u ":$TOKEN" \
|
||||
-X POST \
|
||||
-H "Accept: application/vnd.github.v3+json" \
|
||||
https://api.github.com/repos/esphome/hassio/actions/workflows/bump-version.yml/dispatches \
|
||||
-d "{\"ref\":\"main\",\"inputs\":{\"version\":\"$TAG\"}}"
|
||||
|
50
.github/workflows/stale.yml
vendored
50
.github/workflows/stale.yml
vendored
@ -1,50 +0,0 @@
|
||||
---
|
||||
name: Stale
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: "30 0 * * *"
|
||||
workflow_dispatch:
|
||||
|
||||
permissions:
|
||||
issues: write
|
||||
pull-requests: write
|
||||
|
||||
concurrency:
|
||||
group: lock
|
||||
|
||||
jobs:
|
||||
stale:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/stale@v9.1.0
|
||||
with:
|
||||
days-before-pr-stale: 90
|
||||
days-before-pr-close: 7
|
||||
days-before-issue-stale: -1
|
||||
days-before-issue-close: -1
|
||||
remove-stale-when-updated: true
|
||||
stale-pr-label: "stale"
|
||||
exempt-pr-labels: "not-stale"
|
||||
stale-pr-message: >
|
||||
There hasn't been any activity on this pull request recently. This
|
||||
pull request has been automatically marked as stale because of that
|
||||
and will be closed if no further activity occurs within 7 days.
|
||||
Thank you for your contributions.
|
||||
|
||||
# Use stale to automatically close issues with a
|
||||
# reference to the issue tracker
|
||||
close-issues:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/stale@v9.1.0
|
||||
with:
|
||||
days-before-pr-stale: -1
|
||||
days-before-pr-close: -1
|
||||
days-before-issue-stale: 1
|
||||
days-before-issue-close: 1
|
||||
remove-stale-when-updated: true
|
||||
stale-issue-label: "stale"
|
||||
exempt-issue-labels: "not-stale"
|
||||
stale-issue-message: >
|
||||
https://github.com/esphome/esphome/issues/430
|
48
.github/workflows/sync-device-classes.yml
vendored
48
.github/workflows/sync-device-classes.yml
vendored
@ -1,48 +0,0 @@
|
||||
---
|
||||
name: Synchronise Device Classes from Home Assistant
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
schedule:
|
||||
- cron: "45 6 * * *"
|
||||
|
||||
jobs:
|
||||
sync:
|
||||
name: Sync Device Classes
|
||||
runs-on: ubuntu-latest
|
||||
if: github.repository == 'esphome/esphome'
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4.1.7
|
||||
|
||||
- name: Checkout Home Assistant
|
||||
uses: actions/checkout@v4.1.7
|
||||
with:
|
||||
repository: home-assistant/core
|
||||
path: lib/home-assistant
|
||||
|
||||
- name: Setup Python
|
||||
uses: actions/setup-python@v5.5.0
|
||||
with:
|
||||
python-version: 3.12
|
||||
|
||||
- name: Install Home Assistant
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
pip install -e lib/home-assistant
|
||||
|
||||
- name: Sync
|
||||
run: |
|
||||
python ./script/sync-device_class.py
|
||||
|
||||
- name: Commit changes
|
||||
uses: peter-evans/create-pull-request@v7.0.8
|
||||
with:
|
||||
commit-message: "Synchronise Device Classes from Home Assistant"
|
||||
committer: esphomebot <esphome@openhomefoundation.org>
|
||||
author: esphomebot <esphome@openhomefoundation.org>
|
||||
branch: sync/device-classes
|
||||
delete-branch: true
|
||||
title: "Synchronise Device Classes from Home Assistant"
|
||||
body-path: .github/PULL_REQUEST_TEMPLATE.md
|
||||
token: ${{ secrets.DEVICE_CLASS_SYNC_TOKEN }}
|
25
.github/workflows/yaml-lint.yml
vendored
25
.github/workflows/yaml-lint.yml
vendored
@ -1,25 +0,0 @@
|
||||
---
|
||||
name: YAML lint
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [dev, beta, release]
|
||||
paths:
|
||||
- "**.yaml"
|
||||
- "**.yml"
|
||||
pull_request:
|
||||
paths:
|
||||
- "**.yaml"
|
||||
- "**.yml"
|
||||
|
||||
jobs:
|
||||
yamllint:
|
||||
name: yamllint
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v4.1.7
|
||||
- name: Run yamllint
|
||||
uses: frenck/action-yamllint@v1.5.0
|
||||
with:
|
||||
strict: true
|
24
.gitignore
vendored
24
.gitignore
vendored
@ -13,12 +13,6 @@ __pycache__/
|
||||
# Intellij Idea
|
||||
.idea
|
||||
|
||||
# Eclipse
|
||||
.project
|
||||
.cproject
|
||||
.pydevproject
|
||||
.settings/
|
||||
|
||||
# Vim
|
||||
*.swp
|
||||
|
||||
@ -75,9 +69,6 @@ cov.xml
|
||||
# pyenv
|
||||
.python-version
|
||||
|
||||
# asdf
|
||||
.tool-versions
|
||||
|
||||
# Environments
|
||||
.env
|
||||
.venv
|
||||
@ -86,7 +77,6 @@ venv/
|
||||
ENV/
|
||||
env.bak/
|
||||
venv.bak/
|
||||
venv-*/
|
||||
|
||||
# mypy
|
||||
.mypy_cache/
|
||||
@ -112,7 +102,10 @@ CMakeLists.txt
|
||||
.idea/**/dynamic.xml
|
||||
|
||||
# CMake
|
||||
cmake-build-*/
|
||||
cmake-build-debug/
|
||||
cmake-build-livingroom8266/
|
||||
cmake-build-livingroom32/
|
||||
cmake-build-release/
|
||||
|
||||
CMakeCache.txt
|
||||
CMakeFiles
|
||||
@ -134,12 +127,3 @@ tests/.esphome/
|
||||
/.temp-clang-tidy.cpp
|
||||
/.temp/
|
||||
.pio/
|
||||
|
||||
sdkconfig.*
|
||||
!sdkconfig.defaults
|
||||
|
||||
.tests/
|
||||
|
||||
/components
|
||||
/managed_components
|
||||
|
||||
|
6
.gitpod.yml
Normal file
6
.gitpod.yml
Normal file
@ -0,0 +1,6 @@
|
||||
ports:
|
||||
- port: 6052
|
||||
onOpen: open-preview
|
||||
tasks:
|
||||
- before: pyenv local $(pyenv version | grep '^3\.' | cut -d ' ' -f 1) && script/setup
|
||||
command: python -m esphome config dashboard
|
@ -1,22 +1,20 @@
|
||||
---
|
||||
# See https://pre-commit.com for more information
|
||||
# See https://pre-commit.com/hooks.html for more hooks
|
||||
repos:
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
# Ruff version.
|
||||
rev: v0.11.0
|
||||
- repo: https://github.com/ambv/black
|
||||
rev: 20.8b1
|
||||
hooks:
|
||||
# Run the linter.
|
||||
- id: ruff
|
||||
args: [--fix]
|
||||
# Run the formatter.
|
||||
- id: ruff-format
|
||||
- repo: https://github.com/PyCQA/flake8
|
||||
rev: 7.2.0
|
||||
- id: black
|
||||
args:
|
||||
- --safe
|
||||
- --quiet
|
||||
files: ^((esphome|script|tests)/.+)?[^/]+\.py$
|
||||
- repo: https://gitlab.com/pycqa/flake8
|
||||
rev: 3.8.4
|
||||
hooks:
|
||||
- id: flake8
|
||||
additional_dependencies:
|
||||
- flake8-docstrings==1.7.0
|
||||
- flake8-docstrings==1.5.0
|
||||
- pydocstyle==5.1.1
|
||||
files: ^(esphome|tests)/.+\.py$
|
||||
- repo: https://github.com/pre-commit/pre-commit-hooks
|
||||
@ -27,24 +25,3 @@ repos:
|
||||
- --branch=dev
|
||||
- --branch=release
|
||||
- --branch=beta
|
||||
- repo: https://github.com/asottile/pyupgrade
|
||||
rev: v3.15.2
|
||||
hooks:
|
||||
- id: pyupgrade
|
||||
args: [--py39-plus]
|
||||
- repo: https://github.com/adrienverge/yamllint.git
|
||||
rev: v1.35.1
|
||||
hooks:
|
||||
- id: yamllint
|
||||
- repo: https://github.com/pre-commit/mirrors-clang-format
|
||||
rev: v13.0.1
|
||||
hooks:
|
||||
- id: clang-format
|
||||
types_or: [c, c++]
|
||||
- repo: local
|
||||
hooks:
|
||||
- id: pylint
|
||||
name: pylint
|
||||
entry: python3 script/run-in-env.py pylint
|
||||
language: system
|
||||
types: [python]
|
||||
|
33
.vscode/tasks.json
vendored
33
.vscode/tasks.json
vendored
@ -2,24 +2,15 @@
|
||||
"version": "2.0.0",
|
||||
"tasks": [
|
||||
{
|
||||
"label": "Run Dashboard",
|
||||
"label": "run",
|
||||
"type": "shell",
|
||||
"command": "${command:python.interpreterPath}",
|
||||
"args": [
|
||||
"-m",
|
||||
"esphome",
|
||||
"dashboard",
|
||||
"config/"
|
||||
],
|
||||
"command": "python3 -m esphome dashboard config/",
|
||||
"problemMatcher": []
|
||||
},
|
||||
{
|
||||
"label": "clang-tidy",
|
||||
"type": "shell",
|
||||
"command": "${command:python.interpreterPath}",
|
||||
"args": [
|
||||
"./script/clang-tidy"
|
||||
],
|
||||
"command": "./script/clang-tidy",
|
||||
"problemMatcher": [
|
||||
{
|
||||
"owner": "clang-tidy",
|
||||
@ -36,24 +27,6 @@
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"label": "Generate proto files",
|
||||
"type": "shell",
|
||||
"command": "${command:python.interpreterPath}",
|
||||
"args": [
|
||||
"./script/api_protobuf/api_protobuf.py"
|
||||
],
|
||||
"group": {
|
||||
"kind": "build",
|
||||
"isDefault": true
|
||||
},
|
||||
"presentation": {
|
||||
"reveal": "never",
|
||||
"close": true,
|
||||
"panel": "new"
|
||||
},
|
||||
"problemMatcher": []
|
||||
}
|
||||
]
|
||||
}
|
||||
|
19
.yamllint
19
.yamllint
@ -1,19 +0,0 @@
|
||||
---
|
||||
extends: default
|
||||
|
||||
ignore-from-file: .gitignore
|
||||
|
||||
rules:
|
||||
document-start: disable
|
||||
empty-lines:
|
||||
level: error
|
||||
max: 1
|
||||
max-start: 0
|
||||
max-end: 1
|
||||
indentation:
|
||||
level: error
|
||||
spaces: 2
|
||||
indent-sequences: true
|
||||
check-multi-line-strings: false
|
||||
line-length: disable
|
||||
truthy: disable
|
375
CODEOWNERS
375
CODEOWNERS
@ -6,258 +6,71 @@
|
||||
# the integration's code owner is automatically notified.
|
||||
|
||||
# Core Code
|
||||
pyproject.toml @esphome/core
|
||||
setup.py @esphome/core
|
||||
esphome/*.py @esphome/core
|
||||
esphome/core/* @esphome/core
|
||||
|
||||
# Integrations
|
||||
esphome/components/a01nyub/* @MrSuicideParrot
|
||||
esphome/components/a02yyuw/* @TH-Braemer
|
||||
esphome/components/absolute_humidity/* @DAVe3283
|
||||
esphome/components/ac_dimmer/* @glmnet
|
||||
esphome/components/adc/* @esphome/core
|
||||
esphome/components/adc128s102/* @DeerMaximum
|
||||
esphome/components/addressable_light/* @justfalter
|
||||
esphome/components/ade7880/* @kpfleming
|
||||
esphome/components/ade7953/* @angelnu
|
||||
esphome/components/ade7953_i2c/* @angelnu
|
||||
esphome/components/ade7953_spi/* @angelnu
|
||||
esphome/components/ads1118/* @solomondg1
|
||||
esphome/components/ags10/* @mak-42
|
||||
esphome/components/aic3204/* @kbx81
|
||||
esphome/components/airthings_ble/* @jeromelaban
|
||||
esphome/components/airthings_wave_base/* @jeromelaban @kpfleming @ncareau
|
||||
esphome/components/airthings_wave_mini/* @ncareau
|
||||
esphome/components/airthings_wave_plus/* @jeromelaban
|
||||
esphome/components/alarm_control_panel/* @grahambrown11 @hwstar
|
||||
esphome/components/alpha3/* @jan-hofmeier
|
||||
esphome/components/am2315c/* @swoboda1337
|
||||
esphome/components/am43/* @buxtronix
|
||||
esphome/components/am43/cover/* @buxtronix
|
||||
esphome/components/am43/sensor/* @buxtronix
|
||||
esphome/components/analog_threshold/* @ianchi
|
||||
esphome/components/animation/* @syndlex
|
||||
esphome/components/anova/* @buxtronix
|
||||
esphome/components/apds9306/* @aodrenah
|
||||
esphome/components/api/* @OttoWinter
|
||||
esphome/components/as5600/* @ammmze
|
||||
esphome/components/as5600/sensor/* @ammmze
|
||||
esphome/components/as7341/* @mrgnr
|
||||
esphome/components/async_tcp/* @OttoWinter
|
||||
esphome/components/at581x/* @X-Ryl669
|
||||
esphome/components/atc_mithermometer/* @ahpohl
|
||||
esphome/components/atm90e26/* @danieltwagner
|
||||
esphome/components/atm90e32/* @circuitsetup @descipher
|
||||
esphome/components/audio/* @kahrendt
|
||||
esphome/components/audio_adc/* @kbx81
|
||||
esphome/components/audio_dac/* @kbx81
|
||||
esphome/components/axs15231/* @clydebarrow
|
||||
esphome/components/b_parasite/* @rbaron
|
||||
esphome/components/ballu/* @bazuchan
|
||||
esphome/components/bang_bang/* @OttoWinter
|
||||
esphome/components/bedjet/* @jhansche
|
||||
esphome/components/bedjet/climate/* @jhansche
|
||||
esphome/components/bedjet/fan/* @jhansche
|
||||
esphome/components/bedjet/sensor/* @javawizard @jhansche
|
||||
esphome/components/beken_spi_led_strip/* @Mat931
|
||||
esphome/components/bh1750/* @OttoWinter
|
||||
esphome/components/binary_sensor/* @esphome/core
|
||||
esphome/components/bk72xx/* @kuba2k2
|
||||
esphome/components/bl0906/* @athom-tech @jesserockz @tarontop
|
||||
esphome/components/bl0939/* @ziceva
|
||||
esphome/components/bl0940/* @tobias-
|
||||
esphome/components/bl0942/* @dbuezas @dwmw2
|
||||
esphome/components/ble_client/* @buxtronix @clydebarrow
|
||||
esphome/components/bluetooth_proxy/* @jesserockz
|
||||
esphome/components/bme280_base/* @esphome/core
|
||||
esphome/components/bme280_spi/* @apbodrov
|
||||
esphome/components/ble_client/* @buxtronix
|
||||
esphome/components/bme680_bsec/* @trvrnrth
|
||||
esphome/components/bme68x_bsec2/* @kbx81 @neffs
|
||||
esphome/components/bme68x_bsec2_i2c/* @kbx81 @neffs
|
||||
esphome/components/bmi160/* @flaviut
|
||||
esphome/components/bmp280_base/* @ademuri
|
||||
esphome/components/bmp280_i2c/* @ademuri
|
||||
esphome/components/bmp280_spi/* @ademuri
|
||||
esphome/components/bmp3xx/* @latonita
|
||||
esphome/components/bmp3xx_base/* @latonita @martgras
|
||||
esphome/components/bmp3xx_i2c/* @latonita
|
||||
esphome/components/bmp3xx_spi/* @latonita
|
||||
esphome/components/bmp581/* @kahrendt
|
||||
esphome/components/bp1658cj/* @Cossid
|
||||
esphome/components/bp5758d/* @Cossid
|
||||
esphome/components/button/* @esphome/core
|
||||
esphome/components/bytebuffer/* @clydebarrow
|
||||
esphome/components/canbus/* @danielschramm @mvturnho
|
||||
esphome/components/cap1188/* @mreditor97
|
||||
esphome/components/captive_portal/* @OttoWinter
|
||||
esphome/components/ccs811/* @habbie
|
||||
esphome/components/cd74hc4067/* @asoehlke
|
||||
esphome/components/ch422g/* @clydebarrow @jesterret
|
||||
esphome/components/chsc6x/* @kkosik20
|
||||
esphome/components/climate/* @esphome/core
|
||||
esphome/components/climate_ir/* @glmnet
|
||||
esphome/components/color_temperature/* @jesserockz
|
||||
esphome/components/combination/* @Cat-Ion @kahrendt
|
||||
esphome/components/coolix/* @glmnet
|
||||
esphome/components/copy/* @OttoWinter
|
||||
esphome/components/cover/* @esphome/core
|
||||
esphome/components/cs5460a/* @balrog-kun
|
||||
esphome/components/cse7761/* @berfenger
|
||||
esphome/components/cst226/* @clydebarrow
|
||||
esphome/components/cst816/* @clydebarrow
|
||||
esphome/components/ct_clamp/* @jesserockz
|
||||
esphome/components/current_based/* @djwmarcx
|
||||
esphome/components/dac7678/* @NickB1
|
||||
esphome/components/daikin_arc/* @MagicBear
|
||||
esphome/components/daikin_brc/* @hagak
|
||||
esphome/components/dallas_temp/* @ssieb
|
||||
esphome/components/daly_bms/* @s1lvi0
|
||||
esphome/components/dashboard_import/* @esphome/core
|
||||
esphome/components/datetime/* @jesserockz @rfdarter
|
||||
esphome/components/debug/* @OttoWinter
|
||||
esphome/components/delonghi/* @grob6000
|
||||
esphome/components/dfplayer/* @glmnet
|
||||
esphome/components/dfrobot_sen0395/* @niklasweber
|
||||
esphome/components/dht/* @OttoWinter
|
||||
esphome/components/display_menu_base/* @numo68
|
||||
esphome/components/dps310/* @kbx81
|
||||
esphome/components/ds1307/* @badbadc0ffee
|
||||
esphome/components/dsmr/* @glmnet @zuidwijk
|
||||
esphome/components/duty_time/* @dudanov
|
||||
esphome/components/ee895/* @Stock-M
|
||||
esphome/components/ektf2232/touchscreen/* @jesserockz
|
||||
esphome/components/emc2101/* @ellull
|
||||
esphome/components/emmeti/* @E440QF
|
||||
esphome/components/ens160/* @latonita
|
||||
esphome/components/ens160_base/* @latonita @vincentscode
|
||||
esphome/components/ens160_i2c/* @latonita
|
||||
esphome/components/ens160_spi/* @latonita
|
||||
esphome/components/ens210/* @itn3rd77
|
||||
esphome/components/es7210/* @kahrendt
|
||||
esphome/components/es7243e/* @kbx81
|
||||
esphome/components/es8156/* @kbx81
|
||||
esphome/components/es8311/* @kahrendt @kroimon
|
||||
esphome/components/esp32/* @esphome/core
|
||||
esphome/components/esp32_ble/* @Rapsssito @jesserockz
|
||||
esphome/components/esp32_ble_client/* @jesserockz
|
||||
esphome/components/esp32_ble_server/* @Rapsssito @clydebarrow @jesserockz
|
||||
esphome/components/esp32_camera_web_server/* @ayufan
|
||||
esphome/components/esp32_can/* @Sympatron
|
||||
esphome/components/esp32_ble/* @jesserockz
|
||||
esphome/components/esp32_ble_server/* @jesserockz
|
||||
esphome/components/esp32_improv/* @jesserockz
|
||||
esphome/components/esp32_rmt/* @jesserockz
|
||||
esphome/components/esp32_rmt_led_strip/* @jesserockz
|
||||
esphome/components/esp8266/* @esphome/core
|
||||
esphome/components/ethernet_info/* @gtjadsonsantos
|
||||
esphome/components/event/* @nohat
|
||||
esphome/components/event_emitter/* @Rapsssito
|
||||
esphome/components/exposure_notifications/* @OttoWinter
|
||||
esphome/components/ezo/* @ssieb
|
||||
esphome/components/ezo_pmp/* @carlos-sarmiento
|
||||
esphome/components/factory_reset/* @anatoly-savchenkov
|
||||
esphome/components/fastled_base/* @OttoWinter
|
||||
esphome/components/feedback/* @ianchi
|
||||
esphome/components/fingerprint_grow/* @OnFreund @alexborro @loongyh
|
||||
esphome/components/font/* @clydebarrow @esphome/core
|
||||
esphome/components/fs3000/* @kahrendt
|
||||
esphome/components/ft5x06/* @clydebarrow
|
||||
esphome/components/ft63x6/* @gpambrozio
|
||||
esphome/components/gcja5/* @gcormier
|
||||
esphome/components/gdk101/* @Szewcson
|
||||
esphome/components/fingerprint_grow/* @OnFreund @loongyh
|
||||
esphome/components/globals/* @esphome/core
|
||||
esphome/components/gp2y1010au0f/* @zry98
|
||||
esphome/components/gp8403/* @jesserockz
|
||||
esphome/components/gpio/* @esphome/core
|
||||
esphome/components/gpio/one_wire/* @ssieb
|
||||
esphome/components/gps/* @coogle
|
||||
esphome/components/graph/* @synco
|
||||
esphome/components/graphical_display_menu/* @MrMDavidson
|
||||
esphome/components/gree/* @orestismers
|
||||
esphome/components/grove_gas_mc_v2/* @YorkshireIoT
|
||||
esphome/components/grove_tb6612fng/* @max246
|
||||
esphome/components/growatt_solar/* @leeuwte
|
||||
esphome/components/gt911/* @clydebarrow @jesserockz
|
||||
esphome/components/haier/* @paveldn
|
||||
esphome/components/haier/binary_sensor/* @paveldn
|
||||
esphome/components/haier/button/* @paveldn
|
||||
esphome/components/haier/sensor/* @paveldn
|
||||
esphome/components/haier/switch/* @paveldn
|
||||
esphome/components/haier/text_sensor/* @paveldn
|
||||
esphome/components/havells_solar/* @sourabhjaiswal
|
||||
esphome/components/hbridge/fan/* @WeekendWarrior
|
||||
esphome/components/hbridge/light/* @DotNetDann
|
||||
esphome/components/hbridge/switch/* @dwmw2
|
||||
esphome/components/he60r/* @clydebarrow
|
||||
esphome/components/heatpumpir/* @rob-deutsch
|
||||
esphome/components/hitachi_ac424/* @sourabhjaiswal
|
||||
esphome/components/hm3301/* @freekode
|
||||
esphome/components/hmac_md5/* @dwmw2
|
||||
esphome/components/homeassistant/* @OttoWinter @esphome/core
|
||||
esphome/components/homeassistant/number/* @landonr
|
||||
esphome/components/homeassistant/switch/* @Links2004
|
||||
esphome/components/honeywell_hih_i2c/* @Benichou34
|
||||
esphome/components/honeywellabp/* @RubyBailey
|
||||
esphome/components/honeywellabp2_i2c/* @jpfaff
|
||||
esphome/components/host/* @clydebarrow @esphome/core
|
||||
esphome/components/host/time/* @clydebarrow
|
||||
esphome/components/homeassistant/* @OttoWinter
|
||||
esphome/components/hrxl_maxsonar_wr/* @netmikey
|
||||
esphome/components/hte501/* @Stock-M
|
||||
esphome/components/http_request/ota/* @oarcher
|
||||
esphome/components/http_request/update/* @jesserockz
|
||||
esphome/components/htu31d/* @betterengineering
|
||||
esphome/components/hydreon_rgxx/* @functionpointer
|
||||
esphome/components/hyt271/* @Philippe12
|
||||
esphome/components/i2c/* @esphome/core
|
||||
esphome/components/i2c_device/* @gabest11
|
||||
esphome/components/i2s_audio/* @jesserockz
|
||||
esphome/components/i2s_audio/media_player/* @jesserockz
|
||||
esphome/components/i2s_audio/microphone/* @jesserockz
|
||||
esphome/components/i2s_audio/speaker/* @jesserockz @kahrendt
|
||||
esphome/components/iaqcore/* @yozik04
|
||||
esphome/components/ili9xxx/* @clydebarrow @nielsnl68
|
||||
esphome/components/improv_base/* @esphome/core
|
||||
esphome/components/improv_serial/* @esphome/core
|
||||
esphome/components/ina226/* @Sergio303 @latonita
|
||||
esphome/components/ina260/* @mreditor97
|
||||
esphome/components/ina2xx_base/* @latonita
|
||||
esphome/components/ina2xx_i2c/* @latonita
|
||||
esphome/components/ina2xx_spi/* @latonita
|
||||
esphome/components/improv/* @jesserockz
|
||||
esphome/components/inkbird_ibsth1_mini/* @fkirill
|
||||
esphome/components/inkplate6/* @jesserockz
|
||||
esphome/components/integration/* @OttoWinter
|
||||
esphome/components/internal_temperature/* @Mat931
|
||||
esphome/components/interval/* @esphome/core
|
||||
esphome/components/jsn_sr04t/* @Mafus1
|
||||
esphome/components/json/* @OttoWinter
|
||||
esphome/components/kamstrup_kmp/* @cfeenstra1024
|
||||
esphome/components/key_collector/* @ssieb
|
||||
esphome/components/key_provider/* @ssieb
|
||||
esphome/components/kuntze/* @ssieb
|
||||
esphome/components/lcd_menu/* @numo68
|
||||
esphome/components/ld2410/* @regevbr @sebcaps
|
||||
esphome/components/ld2420/* @descipher
|
||||
esphome/components/ld2450/* @hareeshmu
|
||||
esphome/components/ledc/* @OttoWinter
|
||||
esphome/components/libretiny/* @kuba2k2
|
||||
esphome/components/libretiny_pwm/* @kuba2k2
|
||||
esphome/components/light/* @esphome/core
|
||||
esphome/components/lightwaverf/* @max246
|
||||
esphome/components/lilygo_t5_47/touchscreen/* @jesserockz
|
||||
esphome/components/lock/* @esphome/core
|
||||
esphome/components/logger/* @esphome/core
|
||||
esphome/components/logger/select/* @clydebarrow
|
||||
esphome/components/ltr390/* @latonita @sjtrny
|
||||
esphome/components/ltr501/* @latonita
|
||||
esphome/components/ltr_als_ps/* @latonita
|
||||
esphome/components/lvgl/* @clydebarrow
|
||||
esphome/components/m5stack_8angle/* @rnauber
|
||||
esphome/components/mapping/* @clydebarrow
|
||||
esphome/components/matrix_keypad/* @ssieb
|
||||
esphome/components/max17043/* @blacknell
|
||||
esphome/components/max31865/* @DAVe3283
|
||||
esphome/components/max44009/* @berfenger
|
||||
esphome/components/max6956/* @looping40
|
||||
esphome/components/max7219digit/* @rspaargaren
|
||||
esphome/components/max9611/* @mckaymatthew
|
||||
esphome/components/mcp23008/* @jesserockz
|
||||
esphome/components/mcp23017/* @jesserockz
|
||||
esphome/components/mcp23s08/* @SenexCrenshaw @jesserockz
|
||||
@ -266,142 +79,49 @@ esphome/components/mcp23x08_base/* @jesserockz
|
||||
esphome/components/mcp23x17_base/* @jesserockz
|
||||
esphome/components/mcp23xxx_base/* @jesserockz
|
||||
esphome/components/mcp2515/* @danielschramm @mvturnho
|
||||
esphome/components/mcp3204/* @rsumner
|
||||
esphome/components/mcp4461/* @p1ngb4ck
|
||||
esphome/components/mcp4728/* @berfenger
|
||||
esphome/components/mcp47a1/* @jesserockz
|
||||
esphome/components/mcp9600/* @mreditor97
|
||||
esphome/components/mcp9808/* @k7hpn
|
||||
esphome/components/md5/* @esphome/core
|
||||
esphome/components/mdns/* @esphome/core
|
||||
esphome/components/media_player/* @jesserockz
|
||||
esphome/components/micro_wake_word/* @jesserockz @kahrendt
|
||||
esphome/components/micronova/* @jorre05
|
||||
esphome/components/microphone/* @jesserockz
|
||||
esphome/components/mics_4514/* @jesserockz
|
||||
esphome/components/midea/* @dudanov
|
||||
esphome/components/midea_ir/* @dudanov
|
||||
esphome/components/mitsubishi/* @RubyBailey
|
||||
esphome/components/mixer/speaker/* @kahrendt
|
||||
esphome/components/mlx90393/* @functionpointer
|
||||
esphome/components/mlx90614/* @jesserockz
|
||||
esphome/components/mmc5603/* @benhoff
|
||||
esphome/components/mmc5983/* @agoode
|
||||
esphome/components/modbus_controller/* @martgras
|
||||
esphome/components/modbus_controller/binary_sensor/* @martgras
|
||||
esphome/components/modbus_controller/number/* @martgras
|
||||
esphome/components/modbus_controller/output/* @martgras
|
||||
esphome/components/modbus_controller/select/* @martgras @stegm
|
||||
esphome/components/modbus_controller/sensor/* @martgras
|
||||
esphome/components/modbus_controller/switch/* @martgras
|
||||
esphome/components/modbus_controller/text_sensor/* @martgras
|
||||
esphome/components/mopeka_ble/* @Fabian-Schmidt @spbrogan
|
||||
esphome/components/mopeka_pro_check/* @spbrogan
|
||||
esphome/components/mopeka_std_check/* @Fabian-Schmidt
|
||||
esphome/components/mpl3115a2/* @kbickar
|
||||
esphome/components/mpu6886/* @fabaff
|
||||
esphome/components/ms8607/* @e28eta
|
||||
esphome/components/msa3xx/* @latonita
|
||||
esphome/components/nau7802/* @cujomalainey
|
||||
esphome/components/network/* @esphome/core
|
||||
esphome/components/nextion/* @edwardtfn @senexcrenshaw
|
||||
esphome/components/nextion/* @senexcrenshaw
|
||||
esphome/components/nextion/binary_sensor/* @senexcrenshaw
|
||||
esphome/components/nextion/sensor/* @senexcrenshaw
|
||||
esphome/components/nextion/switch/* @senexcrenshaw
|
||||
esphome/components/nextion/text_sensor/* @senexcrenshaw
|
||||
esphome/components/nfc/* @jesserockz @kbx81
|
||||
esphome/components/noblex/* @AGalfra
|
||||
esphome/components/npi19/* @bakerkj
|
||||
esphome/components/nfc/* @jesserockz
|
||||
esphome/components/number/* @esphome/core
|
||||
esphome/components/one_wire/* @ssieb
|
||||
esphome/components/online_image/* @clydebarrow @guillempages
|
||||
esphome/components/opentherm/* @olegtarasov
|
||||
esphome/components/ota/* @esphome/core
|
||||
esphome/components/output/* @esphome/core
|
||||
esphome/components/pca6416a/* @Mat931
|
||||
esphome/components/pca9554/* @clydebarrow @hwstar
|
||||
esphome/components/pcf85063/* @brogon
|
||||
esphome/components/pcf8563/* @KoenBreeman
|
||||
esphome/components/pid/* @OttoWinter
|
||||
esphome/components/pipsolar/* @andreashergert1984
|
||||
esphome/components/pm1006/* @habbie
|
||||
esphome/components/pm2005/* @andrewjswan
|
||||
esphome/components/pmsa003i/* @sjtrny
|
||||
esphome/components/pmwcs3/* @SeByDocKy
|
||||
esphome/components/pn532/* @OttoWinter @jesserockz
|
||||
esphome/components/pn532_i2c/* @OttoWinter @jesserockz
|
||||
esphome/components/pn532_spi/* @OttoWinter @jesserockz
|
||||
esphome/components/pn7150/* @jesserockz @kbx81
|
||||
esphome/components/pn7150_i2c/* @jesserockz @kbx81
|
||||
esphome/components/pn7160/* @jesserockz @kbx81
|
||||
esphome/components/pn7160_i2c/* @jesserockz @kbx81
|
||||
esphome/components/pn7160_spi/* @jesserockz @kbx81
|
||||
esphome/components/power_supply/* @esphome/core
|
||||
esphome/components/preferences/* @esphome/core
|
||||
esphome/components/psram/* @esphome/core
|
||||
esphome/components/pulse_meter/* @TrentHouliston @cstaahl @stevebaxter
|
||||
esphome/components/pulse_meter/* @stevebaxter
|
||||
esphome/components/pvvx_mithermometer/* @pasiz
|
||||
esphome/components/pylontech/* @functionpointer
|
||||
esphome/components/qmp6988/* @andrewpc
|
||||
esphome/components/qr_code/* @wjtje
|
||||
esphome/components/qspi_dbi/* @clydebarrow
|
||||
esphome/components/qwiic_pir/* @kahrendt
|
||||
esphome/components/radon_eye_ble/* @jeffeb3
|
||||
esphome/components/radon_eye_rd200/* @jeffeb3
|
||||
esphome/components/rc522/* @glmnet
|
||||
esphome/components/rc522_i2c/* @glmnet
|
||||
esphome/components/rc522_spi/* @glmnet
|
||||
esphome/components/resampler/speaker/* @kahrendt
|
||||
esphome/components/restart/* @esphome/core
|
||||
esphome/components/rf_bridge/* @jesserockz
|
||||
esphome/components/rgbct/* @jesserockz
|
||||
esphome/components/rp2040/* @jesserockz
|
||||
esphome/components/rp2040_pio_led_strip/* @Papa-DMan
|
||||
esphome/components/rp2040_pwm/* @jesserockz
|
||||
esphome/components/rpi_dpi_rgb/* @clydebarrow
|
||||
esphome/components/rtl87xx/* @kuba2k2
|
||||
esphome/components/rtttl/* @glmnet
|
||||
esphome/components/safe_mode/* @jsuanet @kbx81 @paulmonigatti
|
||||
esphome/components/scd4x/* @martgras @sjtrny
|
||||
esphome/components/script/* @esphome/core
|
||||
esphome/components/sdl/* @bdm310 @clydebarrow
|
||||
esphome/components/sdm_meter/* @jesserockz @polyfaces
|
||||
esphome/components/sdp3x/* @Azimath
|
||||
esphome/components/seeed_mr24hpc1/* @limengdu
|
||||
esphome/components/seeed_mr60bha2/* @limengdu
|
||||
esphome/components/seeed_mr60fda2/* @limengdu
|
||||
esphome/components/selec_meter/* @sourabhjaiswal
|
||||
esphome/components/select/* @esphome/core
|
||||
esphome/components/sen0321/* @notjj
|
||||
esphome/components/sen21231/* @shreyaskarnik
|
||||
esphome/components/sen5x/* @martgras
|
||||
esphome/components/sensirion_common/* @martgras
|
||||
esphome/components/sensor/* @esphome/core
|
||||
esphome/components/sfa30/* @ghsensdev
|
||||
esphome/components/sgp40/* @SenexCrenshaw
|
||||
esphome/components/sgp4x/* @SenexCrenshaw @martgras
|
||||
esphome/components/shelly_dimmer/* @edge90 @rnauber
|
||||
esphome/components/sht3xd/* @mrtoy-me
|
||||
esphome/components/sht4x/* @sjtrny
|
||||
esphome/components/shutdown/* @esphome/core @jsuanet
|
||||
esphome/components/sigma_delta_output/* @Cat-Ion
|
||||
esphome/components/shutdown/* @esphome/core
|
||||
esphome/components/sim800l/* @glmnet
|
||||
esphome/components/sm10bit_base/* @Cossid
|
||||
esphome/components/sm2135/* @BoukeHaarsma23 @dd32 @matika77
|
||||
esphome/components/sm2235/* @Cossid
|
||||
esphome/components/sm2335/* @Cossid
|
||||
esphome/components/sml/* @alengwenus
|
||||
esphome/components/smt100/* @piechade
|
||||
esphome/components/sn74hc165/* @jesserockz
|
||||
esphome/components/sm2135/* @BoukeHaarsma23
|
||||
esphome/components/socket/* @esphome/core
|
||||
esphome/components/sonoff_d1/* @anatoly-savchenkov
|
||||
esphome/components/speaker/* @jesserockz @kahrendt
|
||||
esphome/components/speaker/media_player/* @kahrendt @synesthesiam
|
||||
esphome/components/spi/* @clydebarrow @esphome/core
|
||||
esphome/components/spi_device/* @clydebarrow
|
||||
esphome/components/spi_led_strip/* @clydebarrow
|
||||
esphome/components/sprinkler/* @kbx81
|
||||
esphome/components/sps30/* @martgras
|
||||
esphome/components/spi/* @esphome/core
|
||||
esphome/components/ssd1322_base/* @kbx81
|
||||
esphome/components/ssd1322_spi/* @kbx81
|
||||
esphome/components/ssd1325_base/* @kbx81
|
||||
@ -413,101 +133,34 @@ esphome/components/ssd1331_base/* @kbx81
|
||||
esphome/components/ssd1331_spi/* @kbx81
|
||||
esphome/components/ssd1351_base/* @kbx81
|
||||
esphome/components/ssd1351_spi/* @kbx81
|
||||
esphome/components/st7567_base/* @latonita
|
||||
esphome/components/st7567_i2c/* @latonita
|
||||
esphome/components/st7567_spi/* @latonita
|
||||
esphome/components/st7701s/* @clydebarrow
|
||||
esphome/components/st7735/* @SenexCrenshaw
|
||||
esphome/components/st7789v/* @kbx81
|
||||
esphome/components/st7920/* @marsjan155
|
||||
esphome/components/statsd/* @Links2004
|
||||
esphome/components/substitutions/* @esphome/core
|
||||
esphome/components/sun/* @OttoWinter
|
||||
esphome/components/sun_gtil2/* @Mat931
|
||||
esphome/components/switch/* @esphome/core
|
||||
esphome/components/switch/binary_sensor/* @ssieb
|
||||
esphome/components/t6615/* @tylermenezes
|
||||
esphome/components/tc74/* @sethgirvan
|
||||
esphome/components/tca9548a/* @andreashergert1984
|
||||
esphome/components/tca9555/* @mobrembski
|
||||
esphome/components/tcl112/* @glmnet
|
||||
esphome/components/tee501/* @Stock-M
|
||||
esphome/components/teleinfo/* @0hax
|
||||
esphome/components/tem3200/* @bakerkj
|
||||
esphome/components/template/alarm_control_panel/* @grahambrown11 @hwstar
|
||||
esphome/components/template/datetime/* @rfdarter
|
||||
esphome/components/template/event/* @nohat
|
||||
esphome/components/template/fan/* @ssieb
|
||||
esphome/components/text/* @mauritskorse
|
||||
esphome/components/thermostat/* @kbx81
|
||||
esphome/components/time/* @OttoWinter
|
||||
esphome/components/tlc5947/* @rnauber
|
||||
esphome/components/tlc5971/* @IJIJI
|
||||
esphome/components/tm1621/* @Philippe12
|
||||
esphome/components/tm1637/* @glmnet
|
||||
esphome/components/tm1638/* @skykingjwc
|
||||
esphome/components/tm1651/* @freekode
|
||||
esphome/components/tmp102/* @timsavage
|
||||
esphome/components/tmp1075/* @sybrenstuvel
|
||||
esphome/components/tmp117/* @Azimath
|
||||
esphome/components/tof10120/* @wstrzalka
|
||||
esphome/components/tormatic/* @ti-mo
|
||||
esphome/components/toshiba/* @kbx81
|
||||
esphome/components/touchscreen/* @jesserockz @nielsnl68
|
||||
esphome/components/tsl2591/* @wjcarpenter
|
||||
esphome/components/tt21100/* @kroimon
|
||||
esphome/components/tuya/binary_sensor/* @jesserockz
|
||||
esphome/components/tuya/climate/* @jesserockz
|
||||
esphome/components/tuya/number/* @frankiboy1
|
||||
esphome/components/tuya/select/* @bearpawmaxim
|
||||
esphome/components/tuya/sensor/* @jesserockz
|
||||
esphome/components/tuya/switch/* @jesserockz
|
||||
esphome/components/tuya/text_sensor/* @dentra
|
||||
esphome/components/uart/* @esphome/core
|
||||
esphome/components/uart/button/* @ssieb
|
||||
esphome/components/udp/* @clydebarrow
|
||||
esphome/components/ufire_ec/* @pvizeli
|
||||
esphome/components/ufire_ise/* @pvizeli
|
||||
esphome/components/ultrasonic/* @OttoWinter
|
||||
esphome/components/update/* @jesserockz
|
||||
esphome/components/uponor_smatrix/* @kroimon
|
||||
esphome/components/valve/* @esphome/core
|
||||
esphome/components/vbus/* @ssieb
|
||||
esphome/components/veml3235/* @kbx81
|
||||
esphome/components/veml7700/* @latonita
|
||||
esphome/components/version/* @esphome/core
|
||||
esphome/components/voice_assistant/* @jesserockz
|
||||
esphome/components/wake_on_lan/* @clydebarrow @willwill2will54
|
||||
esphome/components/watchdog/* @oarcher
|
||||
esphome/components/waveshare_epaper/* @clydebarrow
|
||||
esphome/components/web_server_base/* @OttoWinter
|
||||
esphome/components/web_server_idf/* @dentra
|
||||
esphome/components/weikai/* @DrCoolZic
|
||||
esphome/components/weikai_i2c/* @DrCoolZic
|
||||
esphome/components/weikai_spi/* @DrCoolZic
|
||||
esphome/components/whirlpool/* @glmnet
|
||||
esphome/components/whynter/* @aeonsablaze
|
||||
esphome/components/wiegand/* @ssieb
|
||||
esphome/components/wireguard/* @droscy @lhoracek @thomas0bernard
|
||||
esphome/components/wk2132_i2c/* @DrCoolZic
|
||||
esphome/components/wk2132_spi/* @DrCoolZic
|
||||
esphome/components/wk2168_i2c/* @DrCoolZic
|
||||
esphome/components/wk2168_spi/* @DrCoolZic
|
||||
esphome/components/wk2204_i2c/* @DrCoolZic
|
||||
esphome/components/wk2204_spi/* @DrCoolZic
|
||||
esphome/components/wk2212_i2c/* @DrCoolZic
|
||||
esphome/components/wk2212_spi/* @DrCoolZic
|
||||
esphome/components/wl_134/* @hobbypunk90
|
||||
esphome/components/x9c/* @EtienneMD
|
||||
esphome/components/xgzp68xx/* @gcormier
|
||||
esphome/components/xiaomi_hhccjcy10/* @fariouche
|
||||
esphome/components/xiaomi_lywsd02mmc/* @juanluss31
|
||||
esphome/components/xiaomi_lywsd03mmc/* @ahpohl
|
||||
esphome/components/xiaomi_mhoc303/* @drug123
|
||||
esphome/components/xiaomi_mhoc401/* @vevsvevs
|
||||
esphome/components/xiaomi_rtcgq02lm/* @jesserockz
|
||||
esphome/components/xl9535/* @mreditor97
|
||||
esphome/components/xpt2046/touchscreen/* @nielsnl68 @numo68
|
||||
esphome/components/xxtea/* @clydebarrow
|
||||
esphome/components/zhlt01/* @cfeenstra1024
|
||||
esphome/components/zio_ultrasonic/* @kahrendt
|
||||
esphome/components/xpt2046/* @numo68
|
||||
|
@ -34,7 +34,7 @@ This Code of Conduct applies both within project spaces and in public spaces whe
|
||||
|
||||
## Enforcement
|
||||
|
||||
Instances of abusive, harassing, or otherwise unacceptable behavior may be reported by contacting the project team at esphome@openhomefoundation.org. The project team will review and investigate all complaints, and will respond in a way that it deems appropriate to the circumstances. The project team is obligated to maintain confidentiality with regard to the reporter of an incident. Further details of specific enforcement policies may be posted separately.
|
||||
Instances of abusive, harassing, or otherwise unacceptable behavior may be reported by contacting the project team at esphome@nabucasa.com. The project team will review and investigate all complaints, and will respond in a way that it deems appropriate to the circumstances. The project team is obligated to maintain confidentiality with regard to the reporter of an incident. Further details of specific enforcement policies may be posted separately.
|
||||
|
||||
Project maintainers who do not follow or enforce the Code of Conduct in good faith may face temporary or permanent repercussions as determined by other members of the project's leadership.
|
||||
|
||||
|
@ -1,14 +1,18 @@
|
||||
# Contributing to ESPHome [](https://discord.gg/KhAMKrd) [](https://GitHub.com/esphome/esphome/releases/)
|
||||
# Contributing to ESPHome
|
||||
|
||||
We welcome contributions to the ESPHome suite of code and documentation!
|
||||
This python project is responsible for reading in YAML configuration files,
|
||||
converting them to C++ code. This code is then converted to a platformio project and compiled
|
||||
with [esphome-core](https://github.com/esphome/esphome-core), the C++ framework behind the project.
|
||||
|
||||
Please read our [contributing guide](https://esphome.io/guides/contributing.html) if you wish to contribute to the
|
||||
project and be sure to join us on [Discord](https://discord.gg/KhAMKrd).
|
||||
For a detailed guide, please see https://esphome.io/guides/contributing.html#contributing-to-esphomeyaml
|
||||
|
||||
**See also:**
|
||||
Things to note when contributing:
|
||||
|
||||
[Documentation](https://esphome.io) -- [Issues](https://github.com/esphome/issues/issues) -- [Feature requests](https://github.com/esphome/feature-requests/issues)
|
||||
|
||||
---
|
||||
|
||||
[](https://www.openhomefoundation.org/)
|
||||
- Please test your changes :)
|
||||
- If a new feature is added or an existing user-facing feature is changed, you should also
|
||||
update the [docs](https://github.com/esphome/esphome-docs). See [contributing to esphome-docs](https://esphome.io/guides/contributing.html#contributing-to-esphomedocs)
|
||||
for more information.
|
||||
- Please also update the tests in the `tests/` folder. You can do so by just adding a line in one of the YAML files
|
||||
which checks if your new feature compiles correctly.
|
||||
- Sometimes I will let pull requests linger because I'm not 100% sure about them. Please feel free to ping
|
||||
me after some time.
|
||||
|
@ -1,6 +1,7 @@
|
||||
include LICENSE
|
||||
include README.md
|
||||
include requirements.txt
|
||||
recursive-include esphome *.cpp *.h *.tcc *.c
|
||||
recursive-include esphome *.py.script
|
||||
include esphome/dashboard/templates/*.html
|
||||
recursive-include esphome/dashboard/static *.ico *.js *.css *.woff* LICENSE
|
||||
recursive-include esphome *.cpp *.h *.tcc
|
||||
recursive-include esphome LICENSE.txt
|
||||
|
15
README.md
15
README.md
@ -1,16 +1,9 @@
|
||||
# ESPHome [](https://discord.gg/KhAMKrd) [](https://GitHub.com/esphome/esphome/releases/)
|
||||
|
||||
<a href="https://esphome.io/">
|
||||
<picture>
|
||||
<source media="(prefers-color-scheme: dark)" srcset="https://esphome.io/_static/logo-text-on-dark.svg", alt="ESPHome Logo">
|
||||
<img src="https://esphome.io/_static/logo-text-on-light.svg" alt="ESPHome Logo">
|
||||
</picture>
|
||||
</a>
|
||||
[](https://esphome.io/)
|
||||
|
||||
---
|
||||
**Documentation:** https://esphome.io/
|
||||
|
||||
[Documentation](https://esphome.io) -- [Issues](https://github.com/esphome/issues/issues) -- [Feature requests](https://github.com/esphome/feature-requests/issues)
|
||||
For issues, please go to [the issue tracker](https://github.com/esphome/issues/issues).
|
||||
|
||||
---
|
||||
|
||||
[](https://www.openhomefoundation.org/)
|
||||
For feature requests, please see [feature requests](https://github.com/esphome/feature-requests/issues).
|
||||
|
@ -1,121 +1,15 @@
|
||||
# Build these with the build.py script
|
||||
# Example:
|
||||
# python3 docker/build.py --tag dev --arch amd64 --build-type docker build
|
||||
|
||||
# One of "docker", "hassio"
|
||||
ARG BASEIMGTYPE=docker
|
||||
|
||||
|
||||
# https://github.com/hassio-addons/addon-debian-base/releases
|
||||
FROM ghcr.io/hassio-addons/debian-base:7.2.0 AS base-hassio
|
||||
# https://hub.docker.com/_/debian?tab=tags&page=1&name=bookworm
|
||||
FROM debian:12.2-slim AS base-docker
|
||||
|
||||
FROM base-${BASEIMGTYPE} AS base
|
||||
|
||||
|
||||
ARG TARGETARCH
|
||||
ARG TARGETVARIANT
|
||||
|
||||
|
||||
# Note that --break-system-packages is used below because
|
||||
# https://peps.python.org/pep-0668/ added a safety check that prevents
|
||||
# installing packages with the same name as a system package. This is
|
||||
# not a problem for us because we are not concerned about overwriting
|
||||
# system packages because we are running in an isolated container.
|
||||
|
||||
RUN \
|
||||
apt-get update \
|
||||
# Use pinned versions so that we get updates with build caching
|
||||
&& apt-get install -y --no-install-recommends \
|
||||
python3-pip=23.0.1+dfsg-1 \
|
||||
python3-setuptools=66.1.1-1+deb12u1 \
|
||||
python3-venv=3.11.2-1+b1 \
|
||||
python3-wheel=0.38.4-2 \
|
||||
iputils-ping=3:20221126-1+deb12u1 \
|
||||
git=1:2.39.5-0+deb12u2 \
|
||||
curl=7.88.1-10+deb12u12 \
|
||||
openssh-client=1:9.2p1-2+deb12u5 \
|
||||
python3-cffi=1.15.1-5 \
|
||||
libcairo2=1.16.0-7 \
|
||||
libmagic1=1:5.44-3 \
|
||||
patch=2.7.6-7 \
|
||||
&& rm -rf \
|
||||
/tmp/* \
|
||||
/var/{cache,log}/* \
|
||||
/var/lib/apt/lists/*
|
||||
|
||||
ENV \
|
||||
# Fix click python3 lang warning https://click.palletsprojects.com/en/7.x/python3/
|
||||
LANG=C.UTF-8 LC_ALL=C.UTF-8 \
|
||||
# Store globally installed pio libs in /piolibs
|
||||
PLATFORMIO_GLOBALLIB_DIR=/piolibs
|
||||
|
||||
RUN \
|
||||
pip3 install \
|
||||
--break-system-packages --no-cache-dir \
|
||||
# Keep platformio version in sync with requirements.txt
|
||||
platformio==6.1.18 \
|
||||
# Change some platformio settings
|
||||
&& platformio settings set enable_telemetry No \
|
||||
&& platformio settings set check_platformio_interval 1000000 \
|
||||
&& mkdir -p /piolibs
|
||||
|
||||
ARG BUILD_FROM=esphome/esphome-base:latest
|
||||
FROM ${BUILD_FROM}
|
||||
|
||||
# First install requirements to leverage caching when requirements don't change
|
||||
# tmpfs is for https://github.com/rust-lang/cargo/issues/8719
|
||||
COPY requirements.txt requirements_optional.txt docker/platformio_install_deps.py platformio.ini /
|
||||
RUN \
|
||||
pip3 install --no-cache-dir -r /requirements.txt -r /requirements_optional.txt \
|
||||
&& /platformio_install_deps.py /platformio.ini
|
||||
|
||||
COPY requirements.txt requirements_optional.txt /
|
||||
RUN --mount=type=tmpfs,target=/root/.cargo <<END-OF-RUN
|
||||
# Fail on any non-zero status
|
||||
set -e
|
||||
|
||||
# install build tools in case wheels are not available
|
||||
BUILD_DEPS="
|
||||
build-essential=12.9
|
||||
python3-dev=3.11.2-1+b1
|
||||
zlib1g-dev=1:1.2.13.dfsg-1
|
||||
libjpeg-dev=1:2.1.5-2
|
||||
libfreetype-dev=2.12.1+dfsg-5+deb12u4
|
||||
libssl-dev=3.0.15-1~deb12u1
|
||||
libffi-dev=3.4.4-1
|
||||
cargo=0.66.0+ds1-1
|
||||
pkg-config=1.8.1-1
|
||||
"
|
||||
LIB_DEPS="
|
||||
libtiff6=4.5.0-6+deb12u1
|
||||
libopenjp2-7=2.5.0-2+deb12u1
|
||||
"
|
||||
if [ "$TARGETARCH$TARGETVARIANT" = "arm64" ]
|
||||
then
|
||||
apt-get update
|
||||
apt-get install -y --no-install-recommends $BUILD_DEPS $LIB_DEPS
|
||||
fi
|
||||
|
||||
CARGO_REGISTRIES_CRATES_IO_PROTOCOL=sparse CARGO_HOME=/root/.cargo
|
||||
pip3 install --break-system-packages --no-cache-dir -r /requirements.txt -r /requirements_optional.txt
|
||||
|
||||
if [ "$TARGETARCH$TARGETVARIANT" = "arm64" ]
|
||||
then
|
||||
apt-get remove -y --purge --auto-remove $BUILD_DEPS
|
||||
rm -rf /tmp/* /var/{cache,log}/* /var/lib/apt/lists/*
|
||||
fi
|
||||
END-OF-RUN
|
||||
|
||||
|
||||
COPY script/platformio_install_deps.py platformio.ini /
|
||||
RUN /platformio_install_deps.py /platformio.ini --libraries
|
||||
|
||||
# Avoid unsafe git error when container user and file config volume permissions don't match
|
||||
RUN git config --system --add safe.directory '*'
|
||||
|
||||
|
||||
# ======================= docker-type image =======================
|
||||
FROM base AS docker
|
||||
|
||||
# Copy esphome and install
|
||||
COPY . /esphome
|
||||
RUN pip3 install --break-system-packages --no-cache-dir -e /esphome
|
||||
# Then copy esphome and install
|
||||
COPY . .
|
||||
RUN pip3 install --no-cache-dir -e .
|
||||
|
||||
# Settings for dashboard
|
||||
ENV USERNAME="" PASSWORD=""
|
||||
@ -125,93 +19,12 @@ EXPOSE 6052
|
||||
|
||||
# Run healthcheck (heartbeat)
|
||||
HEALTHCHECK --interval=30s --timeout=30s \
|
||||
CMD curl --fail http://localhost:6052/version -A "HealthCheck" || exit 1
|
||||
|
||||
COPY docker/docker_entrypoint.sh /entrypoint.sh
|
||||
CMD curl --fail http://localhost:6052 || exit 1
|
||||
|
||||
# The directory the user should mount their configuration files to
|
||||
VOLUME /config
|
||||
WORKDIR /config
|
||||
# Set entrypoint to esphome (via a script) so that the user doesn't have to type 'esphome'
|
||||
# Set entrypoint to esphome so that the user doesn't have to type 'esphome'
|
||||
# in every docker command twice
|
||||
ENTRYPOINT ["/entrypoint.sh"]
|
||||
ENTRYPOINT ["esphome"]
|
||||
# When no arguments given, start the dashboard in the workdir
|
||||
CMD ["dashboard", "/config"]
|
||||
|
||||
|
||||
ARG BUILD_VERSION=dev
|
||||
|
||||
# Labels
|
||||
LABEL \
|
||||
org.opencontainers.image.authors="The ESPHome Authors" \
|
||||
org.opencontainers.image.title="ESPHome" \
|
||||
org.opencontainers.image.description="ESPHome is a system to configure your microcontrollers by simple yet powerful configuration files and control them remotely through Home Automation systems" \
|
||||
org.opencontainers.image.url="https://esphome.io/" \
|
||||
org.opencontainers.image.documentation="https://esphome.io/" \
|
||||
org.opencontainers.image.source="https://github.com/esphome/esphome" \
|
||||
org.opencontainers.image.licenses="ESPHome" \
|
||||
org.opencontainers.image.version=${BUILD_VERSION}
|
||||
|
||||
|
||||
# ======================= hassio-type image =======================
|
||||
FROM base AS hassio
|
||||
|
||||
RUN \
|
||||
apt-get update \
|
||||
# Use pinned versions so that we get updates with build caching
|
||||
&& apt-get install -y --no-install-recommends \
|
||||
nginx-light=1.22.1-9+deb12u1 \
|
||||
&& rm -rf \
|
||||
/tmp/* \
|
||||
/var/{cache,log}/* \
|
||||
/var/lib/apt/lists/*
|
||||
|
||||
ARG BUILD_VERSION=dev
|
||||
|
||||
# Copy root filesystem
|
||||
COPY docker/ha-addon-rootfs/ /
|
||||
|
||||
# Copy esphome and install
|
||||
COPY . /esphome
|
||||
RUN pip3 install --break-system-packages --no-cache-dir -e /esphome
|
||||
|
||||
# Labels
|
||||
LABEL \
|
||||
io.hass.name="ESPHome" \
|
||||
io.hass.description="ESPHome is a system to configure your microcontrollers by simple yet powerful configuration files and control them remotely through Home Automation systems" \
|
||||
io.hass.type="addon" \
|
||||
io.hass.version="${BUILD_VERSION}"
|
||||
# io.hass.arch is inherited from addon-debian-base
|
||||
|
||||
|
||||
|
||||
|
||||
# ======================= lint-type image =======================
|
||||
FROM base AS lint
|
||||
|
||||
ENV \
|
||||
PLATFORMIO_CORE_DIR=/esphome/.temp/platformio
|
||||
|
||||
RUN \
|
||||
curl -L https://apt.llvm.org/llvm-snapshot.gpg.key -o /etc/apt/trusted.gpg.d/apt.llvm.org.asc \
|
||||
&& echo "deb http://apt.llvm.org/bookworm/ llvm-toolchain-bookworm-18 main" > /etc/apt/sources.list.d/llvm.sources.list \
|
||||
&& apt-get update \
|
||||
# Use pinned versions so that we get updates with build caching
|
||||
&& apt-get install -y --no-install-recommends \
|
||||
clang-format-13=1:13.0.1-11+b2 \
|
||||
patch=2.7.6-7 \
|
||||
software-properties-common=0.99.30-4.1~deb12u1 \
|
||||
nano=7.2-1+deb12u1 \
|
||||
build-essential=12.9 \
|
||||
python3-dev=3.11.2-1+b1 \
|
||||
clang-tidy-18=1:18.1.8~++20240731024826+3b5b5c1ec4a3-1~exp1~20240731144843.145 \
|
||||
&& rm -rf \
|
||||
/tmp/* \
|
||||
/var/{cache,log}/* \
|
||||
/var/lib/apt/lists/*
|
||||
|
||||
COPY requirements_test.txt /
|
||||
RUN pip3 install --break-system-packages --no-cache-dir -r /requirements_test.txt
|
||||
|
||||
VOLUME ["/esphome"]
|
||||
WORKDIR /esphome
|
||||
|
1
docker/Dockerfile.dev
Normal file
1
docker/Dockerfile.dev
Normal file
@ -0,0 +1 @@
|
||||
FROM esphome/esphome-lint:1.1
|
25
docker/Dockerfile.hassio
Normal file
25
docker/Dockerfile.hassio
Normal file
@ -0,0 +1,25 @@
|
||||
ARG BUILD_FROM=esphome/esphome-hassio-base:latest
|
||||
FROM ${BUILD_FROM}
|
||||
|
||||
# First install requirements to leverage caching when requirements don't change
|
||||
COPY requirements.txt requirements_optional.txt docker/platformio_install_deps.py platformio.ini /
|
||||
RUN \
|
||||
pip3 install --no-cache-dir -r /requirements.txt -r /requirements_optional.txt \
|
||||
&& /platformio_install_deps.py /platformio.ini
|
||||
|
||||
# Copy root filesystem
|
||||
COPY docker/rootfs/ /
|
||||
|
||||
# Then copy esphome and install
|
||||
COPY . /opt/esphome/
|
||||
RUN pip3 install --no-cache-dir -e /opt/esphome
|
||||
|
||||
# Build arguments
|
||||
ARG BUILD_VERSION=dev
|
||||
|
||||
# Labels
|
||||
LABEL \
|
||||
io.hass.name="ESPHome" \
|
||||
io.hass.description="Manage and program ESP8266/ESP32 microcontrollers through YAML configuration files" \
|
||||
io.hass.type="addon" \
|
||||
io.hass.version=${BUILD_VERSION}
|
10
docker/Dockerfile.lint
Normal file
10
docker/Dockerfile.lint
Normal file
@ -0,0 +1,10 @@
|
||||
ARG BUILD_FROM=esphome/esphome-lint-base:latest
|
||||
FROM ${BUILD_FROM}
|
||||
|
||||
COPY requirements.txt requirements_optional.txt requirements_test.txt docker/platformio_install_deps.py platformio.ini /
|
||||
RUN \
|
||||
pip3 install --no-cache-dir -r /requirements.txt -r /requirements_optional.txt -r /requirements_test.txt \
|
||||
&& /platformio_install_deps.py /platformio.ini
|
||||
|
||||
VOLUME ["/esphome"]
|
||||
WORKDIR /esphome
|
181
docker/build.py
181
docker/build.py
@ -1,91 +1,80 @@
|
||||
#!/usr/bin/env python3
|
||||
import argparse
|
||||
from dataclasses import dataclass
|
||||
import re
|
||||
import shlex
|
||||
import subprocess
|
||||
import argparse
|
||||
import platform
|
||||
import shlex
|
||||
import re
|
||||
import sys
|
||||
|
||||
CHANNEL_DEV = "dev"
|
||||
CHANNEL_BETA = "beta"
|
||||
CHANNEL_RELEASE = "release"
|
||||
|
||||
CHANNEL_DEV = 'dev'
|
||||
CHANNEL_BETA = 'beta'
|
||||
CHANNEL_RELEASE = 'release'
|
||||
CHANNELS = [CHANNEL_DEV, CHANNEL_BETA, CHANNEL_RELEASE]
|
||||
|
||||
ARCH_AMD64 = "amd64"
|
||||
ARCH_AARCH64 = "aarch64"
|
||||
ARCHS = [ARCH_AMD64, ARCH_AARCH64]
|
||||
ARCH_AMD64 = 'amd64'
|
||||
ARCH_ARMV7 = 'armv7'
|
||||
ARCH_AARCH64 = 'aarch64'
|
||||
ARCHS = [ARCH_AMD64, ARCH_ARMV7, ARCH_AARCH64]
|
||||
|
||||
TYPE_DOCKER = "docker"
|
||||
TYPE_HA_ADDON = "ha-addon"
|
||||
TYPE_LINT = "lint"
|
||||
TYPE_DOCKER = 'docker'
|
||||
TYPE_HA_ADDON = 'ha-addon'
|
||||
TYPE_LINT = 'lint'
|
||||
TYPES = [TYPE_DOCKER, TYPE_HA_ADDON, TYPE_LINT]
|
||||
|
||||
|
||||
BASE_VERSION = "3.6.0"
|
||||
|
||||
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument(
|
||||
"--tag",
|
||||
type=str,
|
||||
required=True,
|
||||
help="The main docker tag to push to. If a version number also adds latest and/or beta tag",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--arch", choices=ARCHS, required=False, help="The architecture to build for"
|
||||
)
|
||||
parser.add_argument(
|
||||
"--build-type", choices=TYPES, required=True, help="The type of build to run"
|
||||
)
|
||||
parser.add_argument(
|
||||
"--dry-run", action="store_true", help="Don't run any commands, just print them"
|
||||
)
|
||||
subparsers = parser.add_subparsers(
|
||||
help="Action to perform", dest="command", required=True
|
||||
)
|
||||
parser.add_argument("--tag", type=str, required=True, help="The main docker tag to push to. If a version number also adds latest and/or beta tag")
|
||||
parser.add_argument("--arch", choices=ARCHS, required=False, help="The architecture to build for")
|
||||
parser.add_argument("--build-type", choices=TYPES, required=True, help="The type of build to run")
|
||||
parser.add_argument("--dry-run", action="store_true", help="Don't run any commands, just print them")
|
||||
subparsers = parser.add_subparsers(help="Action to perform", dest="command", required=True)
|
||||
build_parser = subparsers.add_parser("build", help="Build the image")
|
||||
build_parser.add_argument("--push", help="Also push the images", action="store_true")
|
||||
build_parser.add_argument(
|
||||
"--load", help="Load the docker image locally", action="store_true"
|
||||
)
|
||||
manifest_parser = subparsers.add_parser(
|
||||
"manifest", help="Create a manifest from already pushed images"
|
||||
)
|
||||
push_parser = subparsers.add_parser("push", help="Tag the already built image and push it to docker hub")
|
||||
manifest_parser = subparsers.add_parser("manifest", help="Create a manifest from already pushed images")
|
||||
|
||||
|
||||
|
||||
# only lists some possibilities, doesn't have to be perfect
|
||||
# https://stackoverflow.com/a/45125525
|
||||
UNAME_TO_ARCH = {
|
||||
"x86_64": ARCH_AMD64,
|
||||
"aarch64": ARCH_AARCH64,
|
||||
"aarch64_be": ARCH_AARCH64,
|
||||
"arm": ARCH_ARMV7,
|
||||
}
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class DockerParams:
|
||||
build_from: str
|
||||
build_to: str
|
||||
manifest_to: str
|
||||
baseimgtype: str
|
||||
platform: str
|
||||
target: str
|
||||
dockerfile: str
|
||||
|
||||
@classmethod
|
||||
def for_type_arch(cls, build_type, arch):
|
||||
prefix = {
|
||||
TYPE_DOCKER: "esphome/esphome",
|
||||
TYPE_HA_ADDON: "esphome/esphome-hassio",
|
||||
TYPE_LINT: "esphome/esphome-lint",
|
||||
TYPE_LINT: "esphome/esphome-lint"
|
||||
}[build_type]
|
||||
build_from = f"ghcr.io/{prefix}-base-{arch}:{BASE_VERSION}"
|
||||
build_to = f"{prefix}-{arch}"
|
||||
baseimgtype = {
|
||||
TYPE_DOCKER: "docker",
|
||||
TYPE_HA_ADDON: "hassio",
|
||||
TYPE_LINT: "docker",
|
||||
}[build_type]
|
||||
platform = {
|
||||
ARCH_AMD64: "linux/amd64",
|
||||
ARCH_AARCH64: "linux/arm64",
|
||||
}[arch]
|
||||
target = {
|
||||
TYPE_DOCKER: "docker",
|
||||
TYPE_HA_ADDON: "hassio",
|
||||
TYPE_LINT: "lint",
|
||||
dockerfile = {
|
||||
TYPE_DOCKER: "docker/Dockerfile",
|
||||
TYPE_HA_ADDON: "docker/Dockerfile.hassio",
|
||||
TYPE_LINT: "docker/Dockerfile.lint",
|
||||
}[build_type]
|
||||
return cls(
|
||||
build_from=build_from,
|
||||
build_to=build_to,
|
||||
manifest_to=prefix,
|
||||
baseimgtype=baseimgtype,
|
||||
platform=platform,
|
||||
target=target,
|
||||
dockerfile=dockerfile
|
||||
)
|
||||
|
||||
|
||||
@ -101,12 +90,10 @@ def main():
|
||||
sys.exit(1)
|
||||
|
||||
# detect channel from tag
|
||||
match = re.match(r"^(\d+\.\d+)(?:\.\d+)?(b\d+)?$", args.tag)
|
||||
major_minor_version = None
|
||||
match = re.match(r'^\d+\.\d+(?:\.\d+)?(b\d+)?$', args.tag)
|
||||
if match is None:
|
||||
channel = CHANNEL_DEV
|
||||
elif match.group(2) is None:
|
||||
major_minor_version = match.group(1)
|
||||
elif match.group(1) is None:
|
||||
channel = CHANNEL_RELEASE
|
||||
else:
|
||||
channel = CHANNEL_BETA
|
||||
@ -121,50 +108,50 @@ def main():
|
||||
tags_to_push.append("beta")
|
||||
tags_to_push.append("latest")
|
||||
|
||||
# Compatibility with HA tags
|
||||
if major_minor_version:
|
||||
tags_to_push.append("stable")
|
||||
tags_to_push.append(major_minor_version)
|
||||
|
||||
if args.command == "build":
|
||||
# 1. pull cache image
|
||||
params = DockerParams.for_type_arch(args.build_type, args.arch)
|
||||
cache_tag = {
|
||||
CHANNEL_DEV: "cache-dev",
|
||||
CHANNEL_BETA: "cache-beta",
|
||||
CHANNEL_RELEASE: "cache-latest",
|
||||
CHANNEL_DEV: "dev",
|
||||
CHANNEL_BETA: "beta",
|
||||
CHANNEL_RELEASE: "latest",
|
||||
}[channel]
|
||||
cache_img = f"ghcr.io/{params.build_to}:{cache_tag}"
|
||||
run_command("docker", "pull", cache_img, ignore_error=True)
|
||||
|
||||
imgs = [f"{params.build_to}:{tag}" for tag in tags_to_push]
|
||||
imgs += [f"ghcr.io/{params.build_to}:{tag}" for tag in tags_to_push]
|
||||
# 2. register QEMU binfmt (if not host arch)
|
||||
is_native = UNAME_TO_ARCH.get(platform.machine()) == args.arch
|
||||
if not is_native:
|
||||
run_command(
|
||||
"docker", "run", "--rm", "--privileged", "multiarch/qemu-user-static:5.2.0-2",
|
||||
"--reset", "-p", "yes"
|
||||
)
|
||||
|
||||
# 3. build
|
||||
cmd = [
|
||||
"docker",
|
||||
"buildx",
|
||||
"build",
|
||||
"--build-arg",
|
||||
f"BASEIMGTYPE={params.baseimgtype}",
|
||||
"--build-arg",
|
||||
f"BUILD_VERSION={args.tag}",
|
||||
"--cache-from",
|
||||
f"type=registry,ref={cache_img}",
|
||||
"--file",
|
||||
"docker/Dockerfile",
|
||||
"--platform",
|
||||
params.platform,
|
||||
"--target",
|
||||
params.target,
|
||||
]
|
||||
run_command(
|
||||
"docker", "build",
|
||||
"--build-arg", f"BUILD_FROM={params.build_from}",
|
||||
"--build-arg", f"BUILD_VERSION={args.tag}",
|
||||
"--tag", f"{params.build_to}:{args.tag}",
|
||||
"--cache-from", cache_img,
|
||||
"--file", params.dockerfile,
|
||||
"."
|
||||
)
|
||||
elif args.command == "push":
|
||||
params = DockerParams.for_type_arch(args.build_type, args.arch)
|
||||
imgs = [f"{params.build_to}:{tag}" for tag in tags_to_push]
|
||||
imgs += [f"ghcr.io/{params.build_to}:{tag}" for tag in tags_to_push]
|
||||
src = imgs[0]
|
||||
# 1. tag images
|
||||
for img in imgs[1:]:
|
||||
run_command(
|
||||
"docker", "tag", src, img
|
||||
)
|
||||
# 2. push images
|
||||
for img in imgs:
|
||||
cmd += ["--tag", img]
|
||||
if args.push:
|
||||
cmd += ["--push", "--cache-to", f"type=registry,ref={cache_img},mode=max"]
|
||||
if args.load:
|
||||
cmd += ["--load"]
|
||||
|
||||
run_command(*cmd, ".")
|
||||
run_command(
|
||||
"docker", "push", img
|
||||
)
|
||||
elif args.command == "manifest":
|
||||
manifest = DockerParams.for_type_arch(args.build_type, ARCH_AMD64).manifest_to
|
||||
|
||||
@ -181,7 +168,9 @@ def main():
|
||||
run_command(*cmd)
|
||||
# 2. Push manifests
|
||||
for target in targets:
|
||||
run_command("docker", "manifest", "push", target)
|
||||
run_command(
|
||||
"docker", "manifest", "push", target
|
||||
)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
@ -1,30 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
# If /cache is mounted, use that as PIO's coredir
|
||||
# otherwise use path in /config (so that PIO packages aren't downloaded on each compile)
|
||||
|
||||
if [[ -d /cache ]]; then
|
||||
pio_cache_base=/cache/platformio
|
||||
else
|
||||
pio_cache_base=/config/.esphome/platformio
|
||||
fi
|
||||
|
||||
if [[ ! -d "${pio_cache_base}" ]]; then
|
||||
echo "Creating cache directory ${pio_cache_base}"
|
||||
echo "You can change this behavior by mounting a directory to the container's /cache directory."
|
||||
mkdir -p "${pio_cache_base}"
|
||||
fi
|
||||
|
||||
# we can't set core_dir, because the settings file is stored in `core_dir/appstate.json`
|
||||
# setting `core_dir` would therefore prevent pio from accessing
|
||||
export PLATFORMIO_PLATFORMS_DIR="${pio_cache_base}/platforms"
|
||||
export PLATFORMIO_PACKAGES_DIR="${pio_cache_base}/packages"
|
||||
export PLATFORMIO_CACHE_DIR="${pio_cache_base}/cache"
|
||||
|
||||
# If /build is mounted, use that as the build path
|
||||
# otherwise use path in /config (so that builds aren't lost on container restart)
|
||||
if [[ -d /build ]]; then
|
||||
export ESPHOME_BUILD_PATH=/build
|
||||
fi
|
||||
|
||||
exec esphome "$@"
|
@ -1,92 +0,0 @@
|
||||
#!/usr/bin/env python3
|
||||
import argparse
|
||||
import re
|
||||
|
||||
CHANNEL_DEV = "dev"
|
||||
CHANNEL_BETA = "beta"
|
||||
CHANNEL_RELEASE = "release"
|
||||
|
||||
GHCR = "ghcr"
|
||||
DOCKERHUB = "dockerhub"
|
||||
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument(
|
||||
"--tag",
|
||||
type=str,
|
||||
required=True,
|
||||
help="The main docker tag to push to. If a version number also adds latest and/or beta tag",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--suffix",
|
||||
type=str,
|
||||
required=True,
|
||||
help="The suffix of the tag.",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--registry",
|
||||
type=str,
|
||||
choices=[GHCR, DOCKERHUB],
|
||||
required=False,
|
||||
action="append",
|
||||
help="The registry to build tags for.",
|
||||
)
|
||||
|
||||
|
||||
def main():
|
||||
args = parser.parse_args()
|
||||
|
||||
# detect channel from tag
|
||||
match = re.match(r"^(\d+\.\d+)(?:\.\d+)(?:(b\d+)|(-dev\d+))?$", args.tag)
|
||||
major_minor_version = None
|
||||
if match is None: # eg 2023.12.0-dev20231109-testbranch
|
||||
channel = None # Ran with custom tag for a branch etc
|
||||
elif match.group(3) is not None: # eg 2023.12.0-dev20231109
|
||||
channel = CHANNEL_DEV
|
||||
elif match.group(2) is not None: # eg 2023.12.0b1
|
||||
channel = CHANNEL_BETA
|
||||
else: # eg 2023.12.0
|
||||
major_minor_version = match.group(1)
|
||||
channel = CHANNEL_RELEASE
|
||||
|
||||
tags_to_push = [args.tag]
|
||||
if channel == CHANNEL_DEV:
|
||||
tags_to_push.append("dev")
|
||||
elif channel == CHANNEL_BETA:
|
||||
tags_to_push.append("beta")
|
||||
elif channel == CHANNEL_RELEASE:
|
||||
# Additionally push to beta
|
||||
tags_to_push.append("beta")
|
||||
tags_to_push.append("latest")
|
||||
|
||||
if major_minor_version:
|
||||
tags_to_push.append("stable")
|
||||
tags_to_push.append(major_minor_version)
|
||||
|
||||
suffix = f"-{args.suffix}" if args.suffix else ""
|
||||
|
||||
image_name = f"esphome/esphome{suffix}"
|
||||
|
||||
print(f"channel={channel}")
|
||||
|
||||
if args.registry is None:
|
||||
args.registry = [GHCR, DOCKERHUB]
|
||||
elif len(args.registry) == 1:
|
||||
if GHCR in args.registry:
|
||||
print(f"image=ghcr.io/{image_name}")
|
||||
if DOCKERHUB in args.registry:
|
||||
print(f"image=docker.io/{image_name}")
|
||||
|
||||
print(f"image_name={image_name}")
|
||||
|
||||
full_tags = []
|
||||
|
||||
for tag in tags_to_push:
|
||||
if GHCR in args.registry:
|
||||
full_tags += [f"ghcr.io/{image_name}:{tag}"]
|
||||
if DOCKERHUB in args.registry:
|
||||
full_tags += [f"docker.io/{image_name}:{tag}"]
|
||||
print(f"tags={','.join(full_tags)}")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
@ -1,47 +0,0 @@
|
||||
#!/usr/bin/with-contenv bashio
|
||||
# ==============================================================================
|
||||
# This file installs the user ESPHome fork if specified.
|
||||
# The fork must be up to date with the latest ESPHome dev branch
|
||||
# and have no conflicts.
|
||||
# This config option only exists in the ESPHome Dev add-on.
|
||||
# ==============================================================================
|
||||
|
||||
declare esphome_fork
|
||||
|
||||
if bashio::config.has_value 'esphome_fork'; then
|
||||
esphome_fork=$(bashio::config 'esphome_fork')
|
||||
# format: [username][/repository]:ref
|
||||
if [[ "$esphome_fork" =~ ^(([^/]+)(/([^:]+))?:)?([^:/]+)$ ]]; then
|
||||
username="${BASH_REMATCH[2]:-esphome}"
|
||||
repository="${BASH_REMATCH[4]:-esphome}"
|
||||
ref="${BASH_REMATCH[5]}"
|
||||
else
|
||||
bashio::exit.nok "Invalid esphome_fork format: $esphome_fork"
|
||||
fi
|
||||
full_url="https://github.com/${username}/${repository}/archive/${ref}.tar.gz"
|
||||
bashio::log.info "Checking forked ESPHome"
|
||||
dev_version=$(python3 -c "from esphome.const import __version__; print(__version__)")
|
||||
bashio::log.info "Downloading ESPHome from fork '${esphome_fork}' (${full_url})..."
|
||||
curl -L -o /tmp/esphome.tar.gz "${full_url}" -qq ||
|
||||
bashio::exit.nok "Failed downloading ESPHome fork."
|
||||
bashio::log.info "Installing ESPHome from fork '${esphome_fork}' (${full_url})..."
|
||||
rm -rf /esphome || bashio::exit.nok "Failed to remove ESPHome."
|
||||
mkdir /esphome
|
||||
tar -zxf /tmp/esphome.tar.gz -C /esphome --strip-components=1 ||
|
||||
bashio::exit.nok "Failed installing ESPHome from fork."
|
||||
pip install -U -e /esphome || bashio::exit.nok "Failed installing ESPHome from fork."
|
||||
rm -f /tmp/esphome.tar.gz
|
||||
fork_version=$(python3 -c "from esphome.const import __version__; print(__version__)")
|
||||
|
||||
if [[ "$fork_version" != "$dev_version" ]]; then
|
||||
bashio::log.error "############################"
|
||||
bashio::log.error "Uninstalled fork as version does not match"
|
||||
bashio::log.error "Update (or ask the author to update) the branch"
|
||||
bashio::log.error "This is important as the dev addon and the dev ESPHome"
|
||||
bashio::log.error "branch can have changes that are not compatible with old forks"
|
||||
bashio::log.error "and get reported as bugs which we cannot solve easily."
|
||||
bashio::log.error "############################"
|
||||
bashio::exit.nok
|
||||
fi
|
||||
bashio::log.info "Installed ESPHome from fork '${esphome_fork}' (${full_url})..."
|
||||
fi
|
@ -1,8 +0,0 @@
|
||||
ssl_protocols TLSv1.2 TLSv1.3;
|
||||
ssl_prefer_server_ciphers off;
|
||||
ssl_ciphers ECDHE-ECDSA-AES128-GCM-SHA256:ECDHE-RSA-AES128-GCM-SHA256:ECDHE-ECDSA-AES256-GCM-SHA384:ECDHE-RSA-AES256-GCM-SHA384:ECDHE-ECDSA-CHACHA20-POLY1305:ECDHE-RSA-CHACHA20-POLY1305:DHE-RSA-AES128-GCM-SHA256:DHE-RSA-AES256-GCM-SHA384;
|
||||
ssl_session_timeout 10m;
|
||||
ssl_session_cache shared:SSL:10m;
|
||||
ssl_session_tickets off;
|
||||
ssl_stapling on;
|
||||
ssl_stapling_verify on;
|
@ -1,3 +0,0 @@
|
||||
upstream esphome {
|
||||
server unix:/var/run/esphome.sock;
|
||||
}
|
@ -1 +0,0 @@
|
||||
Without requirements or design, programming is the art of adding bugs to an empty text file. (Louis Srygley)
|
@ -1,32 +0,0 @@
|
||||
#!/command/with-contenv bashio
|
||||
# shellcheck shell=bash
|
||||
# ==============================================================================
|
||||
# Home Assistant Add-on: ESPHome
|
||||
# Sends discovery information to Home Assistant.
|
||||
# ==============================================================================
|
||||
declare config
|
||||
declare port
|
||||
|
||||
# We only disable it when disabled explicitly
|
||||
if bashio::config.false 'home_assistant_dashboard_integration';
|
||||
then
|
||||
bashio::log.info "Home Assistant discovery is disabled for this add-on."
|
||||
bashio::exit.ok
|
||||
fi
|
||||
|
||||
port=$(bashio::addon.ingress_port)
|
||||
|
||||
# Wait for NGINX to become available
|
||||
bashio::net.wait_for "${port}" "127.0.0.1" 300
|
||||
|
||||
config=$(\
|
||||
bashio::var.json \
|
||||
host "127.0.0.1" \
|
||||
port "^${port}" \
|
||||
)
|
||||
|
||||
if bashio::discovery "esphome" "${config}" > /dev/null; then
|
||||
bashio::log.info "Successfully send discovery information to Home Assistant."
|
||||
else
|
||||
bashio::log.error "Discovery message to Home Assistant failed!"
|
||||
fi
|
@ -1 +0,0 @@
|
||||
oneshot
|
@ -1 +0,0 @@
|
||||
/etc/s6-overlay/s6-rc.d/discovery/run
|
@ -1,26 +0,0 @@
|
||||
#!/command/with-contenv bashio
|
||||
# shellcheck shell=bash
|
||||
# ==============================================================================
|
||||
# Home Assistant Community Add-on: ESPHome
|
||||
# Take down the S6 supervision tree when ESPHome dashboard fails
|
||||
# ==============================================================================
|
||||
declare exit_code
|
||||
readonly exit_code_container=$(</run/s6-linux-init-container-results/exitcode)
|
||||
readonly exit_code_service="${1}"
|
||||
readonly exit_code_signal="${2}"
|
||||
|
||||
bashio::log.info \
|
||||
"Service ESPHome dashboard exited with code ${exit_code_service}" \
|
||||
"(by signal ${exit_code_signal})"
|
||||
|
||||
if [[ "${exit_code_service}" -eq 256 ]]; then
|
||||
if [[ "${exit_code_container}" -eq 0 ]]; then
|
||||
echo $((128 + $exit_code_signal)) > /run/s6-linux-init-container-results/exitcode
|
||||
fi
|
||||
[[ "${exit_code_signal}" -eq 15 ]] && exec /run/s6/basedir/bin/halt
|
||||
elif [[ "${exit_code_service}" -ne 0 ]]; then
|
||||
if [[ "${exit_code_container}" -eq 0 ]]; then
|
||||
echo "${exit_code_service}" > /run/s6-linux-init-container-results/exitcode
|
||||
fi
|
||||
exec /run/s6/basedir/bin/halt
|
||||
fi
|
@ -1,53 +0,0 @@
|
||||
#!/command/with-contenv bashio
|
||||
# shellcheck shell=bash
|
||||
# ==============================================================================
|
||||
# Community Hass.io Add-ons: ESPHome
|
||||
# Runs the ESPHome dashboard
|
||||
# ==============================================================================
|
||||
readonly pio_cache_base=/data/cache/platformio
|
||||
|
||||
export ESPHOME_IS_HA_ADDON=true
|
||||
export PLATFORMIO_GLOBALLIB_DIR=/piolibs
|
||||
|
||||
# we can't set core_dir, because the settings file is stored in `core_dir/appstate.json`
|
||||
# setting `core_dir` would therefore prevent pio from accessing
|
||||
export PLATFORMIO_PLATFORMS_DIR="${pio_cache_base}/platforms"
|
||||
export PLATFORMIO_PACKAGES_DIR="${pio_cache_base}/packages"
|
||||
export PLATFORMIO_CACHE_DIR="${pio_cache_base}/cache"
|
||||
|
||||
if bashio::config.true 'leave_front_door_open'; then
|
||||
export DISABLE_HA_AUTHENTICATION=true
|
||||
fi
|
||||
|
||||
if bashio::config.true 'streamer_mode'; then
|
||||
export ESPHOME_STREAMER_MODE=true
|
||||
fi
|
||||
|
||||
if bashio::config.has_value 'relative_url'; then
|
||||
export ESPHOME_DASHBOARD_RELATIVE_URL=$(bashio::config 'relative_url')
|
||||
fi
|
||||
|
||||
if bashio::config.has_value 'default_compile_process_limit'; then
|
||||
export ESPHOME_DEFAULT_COMPILE_PROCESS_LIMIT=$(bashio::config 'default_compile_process_limit')
|
||||
else
|
||||
if grep -q 'Raspberry Pi 3' /proc/cpuinfo; then
|
||||
export ESPHOME_DEFAULT_COMPILE_PROCESS_LIMIT=1
|
||||
fi
|
||||
fi
|
||||
|
||||
mkdir -p "${pio_cache_base}"
|
||||
|
||||
mkdir -p /config/esphome
|
||||
|
||||
if bashio::fs.directory_exists '/config/esphome/.esphome'; then
|
||||
bashio::log.info "Migrating old .esphome directory..."
|
||||
if bashio::fs.file_exists '/config/esphome/.esphome/esphome.json'; then
|
||||
mv /config/esphome/.esphome/esphome.json /data/esphome.json
|
||||
fi
|
||||
mkdir -p "/data/storage"
|
||||
mv /config/esphome/.esphome/*.json /data/storage/ || true
|
||||
rm -rf /config/esphome/.esphome
|
||||
fi
|
||||
|
||||
bashio::log.info "Starting ESPHome dashboard..."
|
||||
exec esphome dashboard /config/esphome --socket /var/run/esphome.sock --ha-addon
|
@ -1 +0,0 @@
|
||||
longrun
|
@ -1,27 +0,0 @@
|
||||
#!/command/with-contenv bashio
|
||||
# shellcheck shell=bash
|
||||
# ==============================================================================
|
||||
# Community Hass.io Add-ons: ESPHome
|
||||
# Configures NGINX for use with ESPHome
|
||||
# ==============================================================================
|
||||
mkdir -p /var/log/nginx
|
||||
|
||||
# Generate Ingress configuration
|
||||
bashio::var.json \
|
||||
interface "$(bashio::addon.ip_address)" \
|
||||
port "^$(bashio::addon.ingress_port)" \
|
||||
| tempio \
|
||||
-template /etc/nginx/templates/ingress.gtpl \
|
||||
-out /etc/nginx/servers/ingress.conf
|
||||
|
||||
# Generate direct access configuration, if enabled.
|
||||
if bashio::var.has_value "$(bashio::addon.port 6052)"; then
|
||||
bashio::config.require.ssl
|
||||
bashio::var.json \
|
||||
certfile "$(bashio::config 'certfile')" \
|
||||
keyfile "$(bashio::config 'keyfile')" \
|
||||
ssl "^$(bashio::config 'ssl')" \
|
||||
| tempio \
|
||||
-template /etc/nginx/templates/direct.gtpl \
|
||||
-out /etc/nginx/servers/direct.conf
|
||||
fi
|
@ -1 +0,0 @@
|
||||
oneshot
|
@ -1 +0,0 @@
|
||||
/etc/s6-overlay/s6-rc.d/init-nginx/run
|
@ -1,25 +0,0 @@
|
||||
#!/command/with-contenv bashio
|
||||
# ==============================================================================
|
||||
# Community Hass.io Add-ons: ESPHome
|
||||
# Take down the S6 supervision tree when NGINX fails
|
||||
# ==============================================================================
|
||||
declare exit_code
|
||||
readonly exit_code_container=$(</run/s6-linux-init-container-results/exitcode)
|
||||
readonly exit_code_service="${1}"
|
||||
readonly exit_code_signal="${2}"
|
||||
|
||||
bashio::log.info \
|
||||
"Service NGINX exited with code ${exit_code_service}" \
|
||||
"(by signal ${exit_code_signal})"
|
||||
|
||||
if [[ "${exit_code_service}" -eq 256 ]]; then
|
||||
if [[ "${exit_code_container}" -eq 0 ]]; then
|
||||
echo $((128 + $exit_code_signal)) > /run/s6-linux-init-container-results/exitcode
|
||||
fi
|
||||
[[ "${exit_code_signal}" -eq 15 ]] && exec /run/s6/basedir/bin/halt
|
||||
elif [[ "${exit_code_service}" -ne 0 ]]; then
|
||||
if [[ "${exit_code_container}" -eq 0 ]]; then
|
||||
echo "${exit_code_service}" > /run/s6-linux-init-container-results/exitcode
|
||||
fi
|
||||
exec /run/s6/basedir/bin/halt
|
||||
fi
|
@ -1 +0,0 @@
|
||||
longrun
|
20
docker/platformio_install_deps.py
Executable file
20
docker/platformio_install_deps.py
Executable file
@ -0,0 +1,20 @@
|
||||
#!/usr/bin/env python3
|
||||
# This script is used in the docker containers to preinstall
|
||||
# all platformio libraries in the global storage
|
||||
|
||||
import configparser
|
||||
import re
|
||||
import subprocess
|
||||
import sys
|
||||
|
||||
config = configparser.ConfigParser()
|
||||
config.read(sys.argv[1])
|
||||
libs = []
|
||||
for line in config['common']['lib_deps'].splitlines():
|
||||
# Format: '1655@1.0.2 ; TinyGPSPlus (has name conflict)' (includes comment)
|
||||
m = re.search(r'([a-zA-Z0-9-_/]+@[0-9\.]+)', line)
|
||||
if m is None:
|
||||
continue
|
||||
libs.append(m.group(1))
|
||||
|
||||
subprocess.check_call(['platformio', 'lib', '-g', 'install', *libs])
|
41
docker/rootfs/etc/cont-init.d/10-requirements.sh
Executable file
41
docker/rootfs/etc/cont-init.d/10-requirements.sh
Executable file
@ -0,0 +1,41 @@
|
||||
#!/usr/bin/with-contenv bashio
|
||||
# ==============================================================================
|
||||
# Community Hass.io Add-ons: ESPHome
|
||||
# This files check if all user configuration requirements are met
|
||||
# ==============================================================================
|
||||
|
||||
# Check SSL requirements, if enabled
|
||||
if bashio::config.true 'ssl'; then
|
||||
if ! bashio::config.has_value 'certfile'; then
|
||||
bashio::fatal 'SSL is enabled, but no certfile was specified.'
|
||||
bashio::exit.nok
|
||||
fi
|
||||
|
||||
if ! bashio::config.has_value 'keyfile'; then
|
||||
bashio::fatal 'SSL is enabled, but no keyfile was specified'
|
||||
bashio::exit.nok
|
||||
fi
|
||||
|
||||
|
||||
certfile="/ssl/$(bashio::config 'certfile')"
|
||||
keyfile="/ssl/$(bashio::config 'keyfile')"
|
||||
|
||||
if ! bashio::fs.file_exists "${certfile}"; then
|
||||
if ! bashio::fs.file_exists "${keyfile}"; then
|
||||
# Both files are missing, let's print a friendlier error message
|
||||
bashio::log.fatal 'You enabled encrypted connections using the "ssl": true option.'
|
||||
bashio::log.fatal "However, the SSL files '${certfile}' and '${keyfile}'"
|
||||
bashio::log.fatal "were not found. If you're using Hass.io on your local network and don't want"
|
||||
bashio::log.fatal 'to encrypt connections to the ESPHome dashboard, you can manually disable'
|
||||
bashio::log.fatal 'SSL by setting "ssl" to false."'
|
||||
bashio::exit.nok
|
||||
fi
|
||||
bashio::log.fatal "The configured certfile '${certfile}' was not found."
|
||||
bashio::exit.nok
|
||||
fi
|
||||
|
||||
if ! bashio::fs.file_exists "/ssl/$(bashio::config 'keyfile')"; then
|
||||
bashio::log.fatal "The configured keyfile '${keyfile}' was not found."
|
||||
bashio::exit.nok
|
||||
fi
|
||||
fi
|
34
docker/rootfs/etc/cont-init.d/20-nginx.sh
Executable file
34
docker/rootfs/etc/cont-init.d/20-nginx.sh
Executable file
@ -0,0 +1,34 @@
|
||||
#!/usr/bin/with-contenv bashio
|
||||
# ==============================================================================
|
||||
# Community Hass.io Add-ons: ESPHome
|
||||
# Configures NGINX for use with ESPHome
|
||||
# ==============================================================================
|
||||
|
||||
declare certfile
|
||||
declare keyfile
|
||||
declare direct_port
|
||||
declare ingress_interface
|
||||
declare ingress_port
|
||||
|
||||
mkdir -p /var/log/nginx
|
||||
|
||||
direct_port=$(bashio::addon.port 6052)
|
||||
if bashio::var.has_value "${direct_port}"; then
|
||||
if bashio::config.true 'ssl'; then
|
||||
certfile=$(bashio::config 'certfile')
|
||||
keyfile=$(bashio::config 'keyfile')
|
||||
|
||||
mv /etc/nginx/servers/direct-ssl.disabled /etc/nginx/servers/direct.conf
|
||||
sed -i "s/%%certfile%%/${certfile}/g" /etc/nginx/servers/direct.conf
|
||||
sed -i "s/%%keyfile%%/${keyfile}/g" /etc/nginx/servers/direct.conf
|
||||
else
|
||||
mv /etc/nginx/servers/direct.disabled /etc/nginx/servers/direct.conf
|
||||
fi
|
||||
|
||||
sed -i "s/%%port%%/${direct_port}/g" /etc/nginx/servers/direct.conf
|
||||
fi
|
||||
|
||||
ingress_port=$(bashio::addon.ingress_port)
|
||||
ingress_interface=$(bashio::addon.ip_address)
|
||||
sed -i "s/%%port%%/${ingress_port}/g" /etc/nginx/servers/ingress.conf
|
||||
sed -i "s/%%interface%%/${ingress_interface}/g" /etc/nginx/servers/ingress.conf
|
@ -1,9 +1,9 @@
|
||||
proxy_http_version 1.1;
|
||||
proxy_ignore_client_abort off;
|
||||
proxy_read_timeout 86400s;
|
||||
proxy_redirect off;
|
||||
proxy_send_timeout 86400s;
|
||||
proxy_max_temp_file_size 0;
|
||||
proxy_http_version 1.1;
|
||||
proxy_ignore_client_abort off;
|
||||
proxy_read_timeout 86400s;
|
||||
proxy_redirect off;
|
||||
proxy_send_timeout 86400s;
|
||||
proxy_max_temp_file_size 0;
|
||||
|
||||
proxy_set_header Accept-Encoding "";
|
||||
proxy_set_header Connection $connection_upgrade;
|
@ -1,7 +1,5 @@
|
||||
root /dev/null;
|
||||
server_name $hostname;
|
||||
|
||||
client_max_body_size 512m;
|
||||
root /dev/null;
|
||||
server_name $hostname;
|
||||
|
||||
add_header X-Content-Type-Options nosniff;
|
||||
add_header X-XSS-Protection "1; mode=block";
|
9
docker/rootfs/etc/nginx/includes/ssl_params.conf
Normal file
9
docker/rootfs/etc/nginx/includes/ssl_params.conf
Normal file
@ -0,0 +1,9 @@
|
||||
ssl_protocols TLSv1.2;
|
||||
ssl_prefer_server_ciphers on;
|
||||
ssl_ciphers ECDHE-RSA-AES256-GCM-SHA512:DHE-RSA-AES256-GCM-SHA512:ECDHE-RSA-AES256-GCM-SHA384:DHE-RSA-AES256-GCM-SHA384:ECDHE-RSA-AES256-SHA384:ECDHE-RSA-AES256-SHA:DHE-RSA-AES256-SHA;
|
||||
ssl_ecdh_curve secp384r1;
|
||||
ssl_session_timeout 10m;
|
||||
ssl_session_cache shared:SSL:10m;
|
||||
ssl_session_tickets off;
|
||||
ssl_stapling on;
|
||||
ssl_stapling_verify on;
|
@ -2,6 +2,7 @@ daemon off;
|
||||
user root;
|
||||
pid /var/run/nginx.pid;
|
||||
worker_processes 1;
|
||||
# Hass.io addon log
|
||||
error_log /proc/1/fd/1 error;
|
||||
events {
|
||||
worker_connections 1024;
|
||||
@ -9,22 +10,24 @@ events {
|
||||
|
||||
http {
|
||||
include /etc/nginx/includes/mime.types;
|
||||
|
||||
access_log off;
|
||||
default_type application/octet-stream;
|
||||
gzip on;
|
||||
keepalive_timeout 65;
|
||||
sendfile on;
|
||||
server_tokens off;
|
||||
|
||||
tcp_nodelay on;
|
||||
tcp_nopush on;
|
||||
access_log stdout;
|
||||
default_type application/octet-stream;
|
||||
gzip on;
|
||||
keepalive_timeout 65;
|
||||
sendfile on;
|
||||
server_tokens off;
|
||||
|
||||
map $http_upgrade $connection_upgrade {
|
||||
default upgrade;
|
||||
'' close;
|
||||
}
|
||||
|
||||
include /etc/nginx/includes/upstream.conf;
|
||||
# Use Hass.io supervisor as resolver
|
||||
resolver 172.30.32.2;
|
||||
|
||||
upstream esphome {
|
||||
server unix:/var/run/esphome.sock;
|
||||
}
|
||||
|
||||
include /etc/nginx/servers/*.conf;
|
||||
}
|
@ -1,26 +1,20 @@
|
||||
server {
|
||||
{{ if not .ssl }}
|
||||
listen 6052 default_server;
|
||||
{{ else }}
|
||||
listen 6052 default_server ssl http2;
|
||||
{{ end }}
|
||||
listen %%port%% default_server ssl http2;
|
||||
|
||||
include /etc/nginx/includes/server_params.conf;
|
||||
include /etc/nginx/includes/proxy_params.conf;
|
||||
|
||||
{{ if .ssl }}
|
||||
include /etc/nginx/includes/ssl_params.conf;
|
||||
|
||||
ssl_certificate /ssl/{{ .certfile }};
|
||||
ssl_certificate_key /ssl/{{ .keyfile }};
|
||||
ssl on;
|
||||
ssl_certificate /ssl/%%certfile%%;
|
||||
ssl_certificate_key /ssl/%%keyfile%%;
|
||||
|
||||
# Clear Hass.io Ingress header
|
||||
proxy_set_header X-Hassio-Ingress "";
|
||||
|
||||
# Redirect http requests to https on the same port.
|
||||
# https://rageagainstshell.com/2016/11/redirect-http-to-https-on-the-same-port-in-nginx/
|
||||
error_page 497 https://$http_host$request_uri;
|
||||
{{ end }}
|
||||
|
||||
# Clear Home Assistant Ingress header
|
||||
proxy_set_header X-HA-Ingress "";
|
||||
|
||||
location / {
|
||||
proxy_pass http://esphome;
|
12
docker/rootfs/etc/nginx/servers/direct.disabled
Normal file
12
docker/rootfs/etc/nginx/servers/direct.disabled
Normal file
@ -0,0 +1,12 @@
|
||||
server {
|
||||
listen %%port%% default_server;
|
||||
|
||||
include /etc/nginx/includes/server_params.conf;
|
||||
include /etc/nginx/includes/proxy_params.conf;
|
||||
# Clear Hass.io Ingress header
|
||||
proxy_set_header X-Hassio-Ingress "";
|
||||
|
||||
location / {
|
||||
proxy_pass http://esphome;
|
||||
}
|
||||
}
|
@ -1,16 +1,14 @@
|
||||
server {
|
||||
listen 127.0.0.1:{{ .port }} default_server;
|
||||
listen {{ .interface }}:{{ .port }} default_server;
|
||||
listen %%interface%%:%%port%% default_server;
|
||||
|
||||
include /etc/nginx/includes/server_params.conf;
|
||||
include /etc/nginx/includes/proxy_params.conf;
|
||||
|
||||
# Set Home Assistant Ingress header
|
||||
proxy_set_header X-HA-Ingress "YES";
|
||||
# Set Hass.io Ingress header
|
||||
proxy_set_header X-Hassio-Ingress "YES";
|
||||
|
||||
location / {
|
||||
# Only allow from Hass.io supervisor
|
||||
allow 172.30.32.2;
|
||||
allow 127.0.0.1;
|
||||
deny all;
|
||||
|
||||
proxy_pass http://esphome;
|
9
docker/rootfs/etc/services.d/esphome/finish
Executable file
9
docker/rootfs/etc/services.d/esphome/finish
Executable file
@ -0,0 +1,9 @@
|
||||
#!/usr/bin/execlineb -S0
|
||||
# ==============================================================================
|
||||
# Community Hass.io Add-ons: ESPHome
|
||||
# Take down the S6 supervision tree when ESPHome fails
|
||||
# ==============================================================================
|
||||
if -n { s6-test $# -ne 0 }
|
||||
if -n { s6-test ${1} -eq 256 }
|
||||
|
||||
s6-svscanctl -t /var/run/s6/services
|
26
docker/rootfs/etc/services.d/esphome/run
Executable file
26
docker/rootfs/etc/services.d/esphome/run
Executable file
@ -0,0 +1,26 @@
|
||||
#!/usr/bin/with-contenv bashio
|
||||
# ==============================================================================
|
||||
# Community Hass.io Add-ons: ESPHome
|
||||
# Runs the ESPHome dashboard
|
||||
# ==============================================================================
|
||||
|
||||
export ESPHOME_IS_HASSIO=true
|
||||
|
||||
if bashio::config.true 'leave_front_door_open'; then
|
||||
export DISABLE_HA_AUTHENTICATION=true
|
||||
fi
|
||||
|
||||
if bashio::config.true 'streamer_mode'; then
|
||||
export ESPHOME_STREAMER_MODE=true
|
||||
fi
|
||||
|
||||
if bashio::config.true 'status_use_ping'; then
|
||||
export ESPHOME_DASHBOARD_USE_PING=true
|
||||
fi
|
||||
|
||||
if bashio::config.has_value 'relative_url'; then
|
||||
export ESPHOME_DASHBOARD_RELATIVE_URL=$(bashio::config 'relative_url')
|
||||
fi
|
||||
|
||||
bashio::log.info "Starting ESPHome dashboard..."
|
||||
exec esphome dashboard /config/esphome --socket /var/run/esphome.sock --hassio
|
9
docker/rootfs/etc/services.d/nginx/finish
Executable file
9
docker/rootfs/etc/services.d/nginx/finish
Executable file
@ -0,0 +1,9 @@
|
||||
#!/usr/bin/execlineb -S0
|
||||
# ==============================================================================
|
||||
# Community Hass.io Add-ons: ESPHome
|
||||
# Take down the S6 supervision tree when NGINX fails
|
||||
# ==============================================================================
|
||||
if -n { s6-test $# -ne 0 }
|
||||
if -n { s6-test ${1} -eq 256 }
|
||||
|
||||
s6-svscanctl -t /var/run/s6/services
|
@ -1,11 +1,10 @@
|
||||
#!/command/with-contenv bashio
|
||||
# shellcheck shell=bash
|
||||
#!/usr/bin/with-contenv bashio
|
||||
# ==============================================================================
|
||||
# Community Hass.io Add-ons: ESPHome
|
||||
# Runs the NGINX proxy
|
||||
# ==============================================================================
|
||||
|
||||
bashio::log.info "Waiting for ESPHome dashboard to come up..."
|
||||
bashio::log.info "Waiting for dashboard to come up..."
|
||||
|
||||
while [[ ! -S /var/run/esphome.sock ]]; do
|
||||
sleep 0.5
|
@ -1,61 +1,39 @@
|
||||
# PYTHON_ARGCOMPLETE_OK
|
||||
import argparse
|
||||
from datetime import datetime
|
||||
import functools
|
||||
import importlib
|
||||
import logging
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
import time
|
||||
|
||||
import argcomplete
|
||||
from datetime import datetime
|
||||
|
||||
from esphome import const, writer, yaml_util
|
||||
import esphome.codegen as cg
|
||||
from esphome.config import iter_component_configs, read_config, strip_default_ids
|
||||
from esphome.config import iter_components, read_config, strip_default_ids
|
||||
from esphome.const import (
|
||||
ALLOWED_NAME_CHARS,
|
||||
CONF_BAUD_RATE,
|
||||
CONF_BROKER,
|
||||
CONF_DEASSERT_RTS_DTR,
|
||||
CONF_DISABLED,
|
||||
CONF_ESPHOME,
|
||||
CONF_LEVEL,
|
||||
CONF_LOG_TOPIC,
|
||||
CONF_LOGGER,
|
||||
CONF_MDNS,
|
||||
CONF_MQTT,
|
||||
CONF_NAME,
|
||||
CONF_OTA,
|
||||
CONF_PASSWORD,
|
||||
CONF_PLATFORM,
|
||||
CONF_PLATFORMIO_OPTIONS,
|
||||
CONF_PORT,
|
||||
CONF_SUBSTITUTIONS,
|
||||
CONF_TOPIC,
|
||||
PLATFORM_BK72XX,
|
||||
PLATFORM_ESP32,
|
||||
PLATFORM_ESP8266,
|
||||
PLATFORM_RP2040,
|
||||
PLATFORM_RTL87XX,
|
||||
SECRETS_FILES,
|
||||
CONF_ESPHOME,
|
||||
CONF_PLATFORMIO_OPTIONS,
|
||||
)
|
||||
from esphome.core import CORE, EsphomeError, coroutine
|
||||
from esphome.helpers import get_bool_env, indent, is_ip_address
|
||||
from esphome.log import Fore, color, setup_log
|
||||
from esphome.helpers import indent
|
||||
from esphome.util import (
|
||||
get_serial_ports,
|
||||
list_yaml_files,
|
||||
run_external_command,
|
||||
run_external_process,
|
||||
safe_print,
|
||||
list_yaml_files,
|
||||
get_serial_ports,
|
||||
)
|
||||
from esphome.log import color, setup_log, Fore
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def choose_prompt(options, purpose: str = None):
|
||||
def choose_prompt(options):
|
||||
if not options:
|
||||
raise EsphomeError(
|
||||
"Found no valid options for upload/logging, please make sure relevant "
|
||||
@ -66,11 +44,9 @@ def choose_prompt(options, purpose: str = None):
|
||||
if len(options) == 1:
|
||||
return options[0][1]
|
||||
|
||||
safe_print(
|
||||
f"Found multiple options{f' for {purpose}' if purpose else ''}, please choose one:"
|
||||
)
|
||||
safe_print("Found multiple options, please choose one:")
|
||||
for i, (desc, _) in enumerate(options):
|
||||
safe_print(f" [{i + 1}] {desc}")
|
||||
safe_print(f" [{i+1}] {desc}")
|
||||
|
||||
while True:
|
||||
opt = input("(number): ")
|
||||
@ -87,42 +63,23 @@ def choose_prompt(options, purpose: str = None):
|
||||
return options[opt - 1][1]
|
||||
|
||||
|
||||
def choose_upload_log_host(
|
||||
default, check_default, show_ota, show_mqtt, show_api, purpose: str = None
|
||||
):
|
||||
def choose_upload_log_host(default, check_default, show_ota, show_mqtt, show_api):
|
||||
options = []
|
||||
for port in get_serial_ports():
|
||||
options.append((f"{port.path} ({port.description})", port.path))
|
||||
if default == "SERIAL":
|
||||
return choose_prompt(options, purpose=purpose)
|
||||
if (show_ota and "ota" in CORE.config) or (show_api and "api" in CORE.config):
|
||||
options.append((f"Over The Air ({CORE.address})", CORE.address))
|
||||
if default == "OTA":
|
||||
return CORE.address
|
||||
if (
|
||||
show_mqtt
|
||||
and (mqtt_config := CORE.config.get(CONF_MQTT))
|
||||
and mqtt_logging_enabled(mqtt_config)
|
||||
):
|
||||
options.append((f"MQTT ({mqtt_config[CONF_BROKER]})", "MQTT"))
|
||||
if show_mqtt and "mqtt" in CORE.config:
|
||||
options.append(("MQTT ({})".format(CORE.config["mqtt"][CONF_BROKER]), "MQTT"))
|
||||
if default == "OTA":
|
||||
return "MQTT"
|
||||
if default is not None:
|
||||
return default
|
||||
if check_default is not None and check_default in [opt[1] for opt in options]:
|
||||
return check_default
|
||||
return choose_prompt(options, purpose=purpose)
|
||||
|
||||
|
||||
def mqtt_logging_enabled(mqtt_config):
|
||||
log_topic = mqtt_config[CONF_LOG_TOPIC]
|
||||
if log_topic is None:
|
||||
return False
|
||||
if CONF_TOPIC not in log_topic:
|
||||
return False
|
||||
if log_topic.get(CONF_LEVEL, None) == "NONE":
|
||||
return False
|
||||
return True
|
||||
return choose_prompt(options)
|
||||
|
||||
|
||||
def get_port_type(port):
|
||||
@ -133,18 +90,17 @@ def get_port_type(port):
|
||||
return "NETWORK"
|
||||
|
||||
|
||||
def run_miniterm(config, port, args):
|
||||
def run_miniterm(config, port):
|
||||
import serial
|
||||
|
||||
from esphome import platformio_api
|
||||
|
||||
if CONF_LOGGER not in config:
|
||||
_LOGGER.info("Logger is not enabled. Not starting UART logs.")
|
||||
return 1
|
||||
return
|
||||
baud_rate = config["logger"][CONF_BAUD_RATE]
|
||||
if baud_rate == 0:
|
||||
_LOGGER.info("UART logging is disabled (baud_rate=0). Not starting UART logs.")
|
||||
return 1
|
||||
return
|
||||
_LOGGER.info("Starting log output from %s with baud rate %s", port, baud_rate)
|
||||
|
||||
backtrace_state = False
|
||||
@ -154,40 +110,29 @@ def run_miniterm(config, port, args):
|
||||
|
||||
# We can't set to False by default since it leads to toggling and hence
|
||||
# ESP32 resets on some platforms.
|
||||
if config["logger"][CONF_DEASSERT_RTS_DTR] or args.reset:
|
||||
if config["logger"][CONF_DEASSERT_RTS_DTR]:
|
||||
ser.dtr = False
|
||||
ser.rts = False
|
||||
|
||||
tries = 0
|
||||
while tries < 5:
|
||||
try:
|
||||
with ser:
|
||||
while True:
|
||||
try:
|
||||
raw = ser.readline()
|
||||
except serial.SerialException:
|
||||
_LOGGER.error("Serial port closed!")
|
||||
return 0
|
||||
line = (
|
||||
raw.replace(b"\r", b"")
|
||||
.replace(b"\n", b"")
|
||||
.decode("utf8", "backslashreplace")
|
||||
)
|
||||
time_str = datetime.now().time().strftime("[%H:%M:%S]")
|
||||
message = time_str + line
|
||||
safe_print(message)
|
||||
with ser:
|
||||
while True:
|
||||
try:
|
||||
raw = ser.readline()
|
||||
except serial.SerialException:
|
||||
_LOGGER.error("Serial port closed!")
|
||||
return
|
||||
line = (
|
||||
raw.replace(b"\r", b"")
|
||||
.replace(b"\n", b"")
|
||||
.decode("utf8", "backslashreplace")
|
||||
)
|
||||
time = datetime.now().time().strftime("[%H:%M:%S]")
|
||||
message = time + line
|
||||
safe_print(message)
|
||||
|
||||
backtrace_state = platformio_api.process_stacktrace(
|
||||
config, line, backtrace_state=backtrace_state
|
||||
)
|
||||
except serial.SerialException:
|
||||
tries += 1
|
||||
time.sleep(1)
|
||||
if tries >= 5:
|
||||
_LOGGER.error("Could not connect to serial port %s", port)
|
||||
return 1
|
||||
|
||||
return 0
|
||||
backtrace_state = platformio_api.process_stacktrace(
|
||||
config, line, backtrace_state=backtrace_state
|
||||
)
|
||||
|
||||
|
||||
def wrap_to_code(name, comp):
|
||||
@ -199,8 +144,6 @@ def wrap_to_code(name, comp):
|
||||
if comp.config_schema is not None:
|
||||
conf_str = yaml_util.dump(conf)
|
||||
conf_str = conf_str.replace("//", "")
|
||||
# remove tailing \ to avoid multi-line comment warning
|
||||
conf_str = conf_str.replace("\\\n", "\n")
|
||||
cg.add(cg.LineComment(indent(conf_str)))
|
||||
await coro(conf)
|
||||
|
||||
@ -217,7 +160,7 @@ def write_cpp(config):
|
||||
def generate_cpp_contents(config):
|
||||
_LOGGER.info("Generating C++ source...")
|
||||
|
||||
for name, component, conf in iter_component_configs(CORE.config):
|
||||
for name, component, conf in iter_components(CORE.config):
|
||||
if component.to_code is not None:
|
||||
coro = wrap_to_code(name, component)
|
||||
CORE.add_job(coro, conf)
|
||||
@ -237,39 +180,15 @@ def compile_program(args, config):
|
||||
from esphome import platformio_api
|
||||
|
||||
_LOGGER.info("Compiling app...")
|
||||
rc = platformio_api.run_compile(config, CORE.verbose)
|
||||
if rc != 0:
|
||||
return rc
|
||||
idedata = platformio_api.get_idedata(config)
|
||||
return 0 if idedata is not None else 1
|
||||
return platformio_api.run_compile(config, CORE.verbose)
|
||||
|
||||
|
||||
def upload_using_esptool(config, port, file, speed):
|
||||
from esphome import platformio_api
|
||||
|
||||
first_baudrate = speed or config[CONF_ESPHOME][CONF_PLATFORMIO_OPTIONS].get(
|
||||
"upload_speed", os.getenv("ESPHOME_UPLOAD_SPEED", "460800")
|
||||
def upload_using_esptool(config, port):
|
||||
path = CORE.firmware_bin
|
||||
first_baudrate = config[CONF_ESPHOME][CONF_PLATFORMIO_OPTIONS].get(
|
||||
"upload_speed", 460800
|
||||
)
|
||||
|
||||
if file is not None:
|
||||
flash_images = [platformio_api.FlashImage(path=file, offset="0x0")]
|
||||
else:
|
||||
idedata = platformio_api.get_idedata(config)
|
||||
|
||||
firmware_offset = "0x10000" if CORE.is_esp32 else "0x0"
|
||||
flash_images = [
|
||||
platformio_api.FlashImage(
|
||||
path=idedata.firmware_bin_path, offset=firmware_offset
|
||||
),
|
||||
*idedata.extra_flash_images,
|
||||
]
|
||||
|
||||
mcu = "esp8266"
|
||||
if CORE.is_esp32:
|
||||
from esphome.components.esp32 import get_esp32_variant
|
||||
|
||||
mcu = get_esp32_variant().lower()
|
||||
|
||||
def run_esptool(baud_rate):
|
||||
cmd = [
|
||||
"esptool.py",
|
||||
@ -279,22 +198,20 @@ def upload_using_esptool(config, port, file, speed):
|
||||
"hard_reset",
|
||||
"--baud",
|
||||
str(baud_rate),
|
||||
"--chip",
|
||||
"esp8266",
|
||||
"--port",
|
||||
port,
|
||||
"--chip",
|
||||
mcu,
|
||||
"write_flash",
|
||||
"-z",
|
||||
"--flash_size",
|
||||
"detect",
|
||||
"0x0",
|
||||
path,
|
||||
]
|
||||
for img in flash_images:
|
||||
cmd += [img.offset, img.path]
|
||||
|
||||
if os.environ.get("ESPHOME_USE_SUBPROCESS") is None:
|
||||
import esptool
|
||||
|
||||
return run_external_command(esptool.main, *cmd) # pylint: disable=no-member
|
||||
# pylint: disable=protected-access
|
||||
return run_external_command(esptool._main, *cmd)
|
||||
|
||||
return run_external_process(*cmd)
|
||||
|
||||
@ -309,88 +226,26 @@ def upload_using_esptool(config, port, file, speed):
|
||||
return run_esptool(115200)
|
||||
|
||||
|
||||
def upload_using_platformio(config, port):
|
||||
from esphome import platformio_api
|
||||
|
||||
upload_args = ["-t", "upload", "-t", "nobuild"]
|
||||
if port is not None:
|
||||
upload_args += ["--upload-port", port]
|
||||
return platformio_api.run_platformio_cli_run(config, CORE.verbose, *upload_args)
|
||||
|
||||
|
||||
def check_permissions(port):
|
||||
if os.name == "posix" and get_port_type(port) == "SERIAL":
|
||||
# Check if we can open selected serial port
|
||||
if not os.access(port, os.F_OK):
|
||||
raise EsphomeError(
|
||||
"The selected serial port does not exist. To resolve this issue, "
|
||||
"check that the device is connected to this computer with a USB cable and that "
|
||||
"the USB cable can be used for data and is not a power-only cable."
|
||||
)
|
||||
if not (os.access(port, os.R_OK | os.W_OK)):
|
||||
raise EsphomeError(
|
||||
"You do not have read or write permission on the selected serial port. "
|
||||
"To resolve this issue, you can add your user to the dialout group "
|
||||
f"by running the following command: sudo usermod -a -G dialout {os.getlogin()}. "
|
||||
"You will need to log out & back in or reboot to activate the new group access."
|
||||
)
|
||||
|
||||
|
||||
def upload_program(config, args, host):
|
||||
try:
|
||||
module = importlib.import_module("esphome.components." + CORE.target_platform)
|
||||
if getattr(module, "upload_program")(config, args, host):
|
||||
return 0
|
||||
except AttributeError:
|
||||
pass
|
||||
|
||||
# if upload is to a serial port use platformio, otherwise assume ota
|
||||
if get_port_type(host) == "SERIAL":
|
||||
check_permissions(host)
|
||||
if CORE.target_platform in (PLATFORM_ESP32, PLATFORM_ESP8266):
|
||||
file = getattr(args, "file", None)
|
||||
return upload_using_esptool(config, host, file, args.upload_speed)
|
||||
from esphome import platformio_api
|
||||
|
||||
if CORE.target_platform in (PLATFORM_RP2040):
|
||||
return upload_using_platformio(config, args.device)
|
||||
|
||||
if CORE.target_platform in (PLATFORM_BK72XX, PLATFORM_RTL87XX):
|
||||
return upload_using_platformio(config, host)
|
||||
|
||||
return 1 # Unknown target platform
|
||||
|
||||
ota_conf = {}
|
||||
for ota_item in config.get(CONF_OTA, []):
|
||||
if ota_item[CONF_PLATFORM] == CONF_ESPHOME:
|
||||
ota_conf = ota_item
|
||||
break
|
||||
|
||||
if not ota_conf:
|
||||
raise EsphomeError(
|
||||
f"Cannot upload Over the Air as the {CONF_OTA} configuration is not present or does not include {CONF_PLATFORM}: {CONF_ESPHOME}"
|
||||
)
|
||||
if CORE.is_esp8266:
|
||||
return upload_using_esptool(config, host)
|
||||
return platformio_api.run_upload(config, CORE.verbose, host)
|
||||
|
||||
from esphome import espota2
|
||||
|
||||
remote_port = int(ota_conf[CONF_PORT])
|
||||
password = ota_conf.get(CONF_PASSWORD, "")
|
||||
|
||||
if (
|
||||
CONF_MQTT in config # pylint: disable=too-many-boolean-expressions
|
||||
and (not args.device or args.device in ("MQTT", "OTA"))
|
||||
and (
|
||||
((config[CONF_MDNS][CONF_DISABLED]) and not is_ip_address(CORE.address))
|
||||
or get_port_type(host) == "MQTT"
|
||||
)
|
||||
):
|
||||
from esphome import mqtt
|
||||
|
||||
host = mqtt.get_esphome_device_ip(
|
||||
config, args.username, args.password, args.client_id
|
||||
if CONF_OTA not in config:
|
||||
raise EsphomeError(
|
||||
"Cannot upload Over the Air as the config does not include the ota: "
|
||||
"component"
|
||||
)
|
||||
|
||||
if getattr(args, "file", None) is not None:
|
||||
return espota2.run_ota(host, remote_port, password, args.file)
|
||||
|
||||
ota_conf = config[CONF_OTA]
|
||||
remote_port = ota_conf[CONF_PORT]
|
||||
password = ota_conf[CONF_PASSWORD]
|
||||
return espota2.run_ota(host, remote_port, password, CORE.firmware_bin)
|
||||
|
||||
|
||||
@ -398,17 +253,10 @@ def show_logs(config, args, port):
|
||||
if "logger" not in config:
|
||||
raise EsphomeError("Logger is not configured!")
|
||||
if get_port_type(port) == "SERIAL":
|
||||
check_permissions(port)
|
||||
return run_miniterm(config, port, args)
|
||||
run_miniterm(config, port)
|
||||
return 0
|
||||
if get_port_type(port) == "NETWORK" and "api" in config:
|
||||
if config[CONF_MDNS][CONF_DISABLED] and CONF_MQTT in config:
|
||||
from esphome import mqtt
|
||||
|
||||
port = mqtt.get_esphome_device_ip(
|
||||
config, args.username, args.password, args.client_id
|
||||
)[0]
|
||||
|
||||
from esphome.components.api.client import run_logs
|
||||
from esphome.api.client import run_logs
|
||||
|
||||
return run_logs(config, port)
|
||||
if get_port_type(port) == "MQTT" and "mqtt" in config:
|
||||
@ -436,17 +284,10 @@ def command_wizard(args):
|
||||
|
||||
|
||||
def command_config(args, config):
|
||||
_LOGGER.info("Configuration is valid!")
|
||||
if not CORE.verbose:
|
||||
config = strip_default_ids(config)
|
||||
output = yaml_util.dump(config, args.show_secrets)
|
||||
# add the console decoration so the front-end can hide the secrets
|
||||
if not args.show_secrets:
|
||||
output = re.sub(
|
||||
r"(password|key|psk|ssid)\: (.+)", r"\1: \\033[5m\2\\033[6m", output
|
||||
)
|
||||
if not CORE.quiet:
|
||||
safe_print(output)
|
||||
_LOGGER.info("Configuration is valid!")
|
||||
safe_print(yaml_util.dump(config))
|
||||
return 0
|
||||
|
||||
|
||||
@ -479,7 +320,6 @@ def command_upload(args, config):
|
||||
show_ota=True,
|
||||
show_mqtt=False,
|
||||
show_api=False,
|
||||
purpose="uploading",
|
||||
)
|
||||
exit_code = upload_program(config, args, port)
|
||||
if exit_code != 0:
|
||||
@ -488,15 +328,6 @@ def command_upload(args, config):
|
||||
return 0
|
||||
|
||||
|
||||
def command_discover(args, config):
|
||||
if "mqtt" in config:
|
||||
from esphome import mqtt
|
||||
|
||||
return mqtt.show_discover(config, args.username, args.password, args.client_id)
|
||||
|
||||
raise EsphomeError("No discover method configured (mqtt)")
|
||||
|
||||
|
||||
def command_logs(args, config):
|
||||
port = choose_upload_log_host(
|
||||
default=args.device,
|
||||
@ -504,7 +335,6 @@ def command_logs(args, config):
|
||||
show_ota=False,
|
||||
show_mqtt=True,
|
||||
show_api=True,
|
||||
purpose="logging",
|
||||
)
|
||||
return show_logs(config, args, port)
|
||||
|
||||
@ -517,22 +347,12 @@ def command_run(args, config):
|
||||
if exit_code != 0:
|
||||
return exit_code
|
||||
_LOGGER.info("Successfully compiled program.")
|
||||
if CORE.is_host:
|
||||
from esphome.platformio_api import get_idedata
|
||||
|
||||
idedata = get_idedata(config)
|
||||
if idedata is None:
|
||||
return 1
|
||||
program_path = idedata.raw["prog_path"]
|
||||
return run_external_process(program_path)
|
||||
|
||||
port = choose_upload_log_host(
|
||||
default=args.device,
|
||||
check_default=None,
|
||||
show_ota=True,
|
||||
show_mqtt=False,
|
||||
show_api=True,
|
||||
purpose="uploading",
|
||||
)
|
||||
exit_code = upload_program(config, args, port)
|
||||
if exit_code != 0:
|
||||
@ -546,7 +366,6 @@ def command_run(args, config):
|
||||
show_ota=False,
|
||||
show_mqtt=True,
|
||||
show_api=True,
|
||||
purpose="logging",
|
||||
)
|
||||
return show_logs(config, args, port)
|
||||
|
||||
@ -579,7 +398,7 @@ def command_clean(args, config):
|
||||
def command_dashboard(args):
|
||||
from esphome.dashboard import dashboard
|
||||
|
||||
return dashboard.start_dashboard(args)
|
||||
return dashboard.start_web_server(args)
|
||||
|
||||
|
||||
def command_update_all(args):
|
||||
@ -596,143 +415,34 @@ def command_update_all(args):
|
||||
click.echo(f"{half_line}{middle_text}{half_line}")
|
||||
|
||||
for f in files:
|
||||
print(f"Updating {color(Fore.CYAN, f)}")
|
||||
print("Updating {}".format(color(Fore.CYAN, f)))
|
||||
print("-" * twidth)
|
||||
print()
|
||||
rc = run_external_process(
|
||||
"esphome", "--dashboard", "run", f, "--no-logs", "--device", "OTA"
|
||||
)
|
||||
if rc == 0:
|
||||
print_bar(f"[{color(Fore.BOLD_GREEN, 'SUCCESS')}] {f}")
|
||||
print_bar("[{}] {}".format(color(Fore.BOLD_GREEN, "SUCCESS"), f))
|
||||
success[f] = True
|
||||
else:
|
||||
print_bar(f"[{color(Fore.BOLD_RED, 'ERROR')}] {f}")
|
||||
print_bar("[{}] {}".format(color(Fore.BOLD_RED, "ERROR"), f))
|
||||
success[f] = False
|
||||
|
||||
print()
|
||||
print()
|
||||
print()
|
||||
|
||||
print_bar(f"[{color(Fore.BOLD_WHITE, 'SUMMARY')}]")
|
||||
print_bar("[{}]".format(color(Fore.BOLD_WHITE, "SUMMARY")))
|
||||
failed = 0
|
||||
for f in files:
|
||||
if success[f]:
|
||||
print(f" - {f}: {color(Fore.GREEN, 'SUCCESS')}")
|
||||
print(" - {}: {}".format(f, color(Fore.GREEN, "SUCCESS")))
|
||||
else:
|
||||
print(f" - {f}: {color(Fore.BOLD_RED, 'FAILED')}")
|
||||
print(" - {}: {}".format(f, color(Fore.BOLD_RED, "FAILED")))
|
||||
failed += 1
|
||||
return failed
|
||||
|
||||
|
||||
def command_idedata(args, config):
|
||||
import json
|
||||
|
||||
from esphome import platformio_api
|
||||
|
||||
logging.disable(logging.INFO)
|
||||
logging.disable(logging.WARNING)
|
||||
|
||||
idedata = platformio_api.get_idedata(config)
|
||||
if idedata is None:
|
||||
return 1
|
||||
|
||||
print(json.dumps(idedata.raw, indent=2) + "\n")
|
||||
return 0
|
||||
|
||||
|
||||
def command_rename(args, config):
|
||||
for c in args.name:
|
||||
if c not in ALLOWED_NAME_CHARS:
|
||||
print(
|
||||
color(
|
||||
Fore.BOLD_RED,
|
||||
f"'{c}' is an invalid character for names. Valid characters are: "
|
||||
f"{ALLOWED_NAME_CHARS} (lowercase, no spaces)",
|
||||
)
|
||||
)
|
||||
return 1
|
||||
# Load existing yaml file
|
||||
with open(CORE.config_path, mode="r+", encoding="utf-8") as raw_file:
|
||||
raw_contents = raw_file.read()
|
||||
|
||||
yaml = yaml_util.load_yaml(CORE.config_path)
|
||||
if CONF_ESPHOME not in yaml or CONF_NAME not in yaml[CONF_ESPHOME]:
|
||||
print(
|
||||
color(Fore.BOLD_RED, "Complex YAML files cannot be automatically renamed.")
|
||||
)
|
||||
return 1
|
||||
old_name = yaml[CONF_ESPHOME][CONF_NAME]
|
||||
match = re.match(r"^\$\{?([a-zA-Z0-9_]+)\}?$", old_name)
|
||||
if match is None:
|
||||
new_raw = re.sub(
|
||||
rf"name:\s+[\"']?{old_name}[\"']?",
|
||||
f'name: "{args.name}"',
|
||||
raw_contents,
|
||||
)
|
||||
else:
|
||||
old_name = yaml[CONF_SUBSTITUTIONS][match.group(1)]
|
||||
if (
|
||||
len(
|
||||
re.findall(
|
||||
rf"^\s+{match.group(1)}:\s+[\"']?{old_name}[\"']?",
|
||||
raw_contents,
|
||||
flags=re.MULTILINE,
|
||||
)
|
||||
)
|
||||
> 1
|
||||
):
|
||||
print(color(Fore.BOLD_RED, "Too many matches in YAML to safely rename"))
|
||||
return 1
|
||||
|
||||
new_raw = re.sub(
|
||||
rf"^(\s+{match.group(1)}):\s+[\"']?{old_name}[\"']?",
|
||||
f'\\1: "{args.name}"',
|
||||
raw_contents,
|
||||
flags=re.MULTILINE,
|
||||
)
|
||||
|
||||
new_path = os.path.join(CORE.config_dir, args.name + ".yaml")
|
||||
print(
|
||||
f"Updating {color(Fore.CYAN, CORE.config_path)} to {color(Fore.CYAN, new_path)}"
|
||||
)
|
||||
print()
|
||||
|
||||
with open(new_path, mode="w", encoding="utf-8") as new_file:
|
||||
new_file.write(new_raw)
|
||||
|
||||
rc = run_external_process("esphome", "config", new_path)
|
||||
if rc != 0:
|
||||
print(color(Fore.BOLD_RED, "Rename failed. Reverting changes."))
|
||||
os.remove(new_path)
|
||||
return 1
|
||||
|
||||
cli_args = [
|
||||
"run",
|
||||
new_path,
|
||||
"--no-logs",
|
||||
"--device",
|
||||
CORE.address,
|
||||
]
|
||||
|
||||
if args.dashboard:
|
||||
cli_args.insert(0, "--dashboard")
|
||||
|
||||
try:
|
||||
rc = run_external_process("esphome", *cli_args)
|
||||
except KeyboardInterrupt:
|
||||
rc = 1
|
||||
if rc != 0:
|
||||
os.remove(new_path)
|
||||
return 1
|
||||
|
||||
if CORE.config_path != new_path:
|
||||
os.remove(CORE.config_path)
|
||||
|
||||
print(color(Fore.BOLD_GREEN, "SUCCESS"))
|
||||
print()
|
||||
return 0
|
||||
|
||||
|
||||
PRE_CONFIG_ACTIONS = {
|
||||
"wizard": command_wizard,
|
||||
"version": command_version,
|
||||
@ -750,32 +460,17 @@ POST_CONFIG_ACTIONS = {
|
||||
"clean-mqtt": command_clean_mqtt,
|
||||
"mqtt-fingerprint": command_mqtt_fingerprint,
|
||||
"clean": command_clean,
|
||||
"idedata": command_idedata,
|
||||
"rename": command_rename,
|
||||
"discover": command_discover,
|
||||
}
|
||||
|
||||
|
||||
def parse_args(argv):
|
||||
options_parser = argparse.ArgumentParser(add_help=False)
|
||||
options_parser.add_argument(
|
||||
"-v",
|
||||
"--verbose",
|
||||
help="Enable verbose ESPHome logs.",
|
||||
action="store_true",
|
||||
default=get_bool_env("ESPHOME_VERBOSE"),
|
||||
"-v", "--verbose", help="Enable verbose ESPHome logs.", action="store_true"
|
||||
)
|
||||
options_parser.add_argument(
|
||||
"-q", "--quiet", help="Disable all ESPHome logs.", action="store_true"
|
||||
)
|
||||
options_parser.add_argument(
|
||||
"-l",
|
||||
"--log-level",
|
||||
help="Set the log level.",
|
||||
default=os.getenv("ESPHOME_LOG_LEVEL", "INFO"),
|
||||
action="store",
|
||||
choices=["DEBUG", "INFO", "WARNING", "ERROR", "CRITICAL"],
|
||||
)
|
||||
options_parser.add_argument(
|
||||
"--dashboard", help=argparse.SUPPRESS, action="store_true"
|
||||
)
|
||||
@ -788,16 +483,75 @@ def parse_args(argv):
|
||||
metavar=("key", "value"),
|
||||
)
|
||||
|
||||
parser = argparse.ArgumentParser(
|
||||
description=f"ESPHome {const.__version__}", parents=[options_parser]
|
||||
# Keep backward compatibility with the old command line format of
|
||||
# esphome <config> <command>.
|
||||
#
|
||||
# Unfortunately this can't be done by adding another configuration argument to the
|
||||
# main config parser, as argparse is greedy when parsing arguments, so in regular
|
||||
# usage it'll eat the command as the configuration argument and error out out
|
||||
# because it can't parse the configuration as a command.
|
||||
#
|
||||
# Instead, construct an ad-hoc parser for the old format that doesn't actually
|
||||
# process the arguments, but parses them enough to let us figure out if the old
|
||||
# format is used. In that case, swap the command and configuration in the arguments
|
||||
# and continue on with the normal parser (after raising a deprecation warning).
|
||||
#
|
||||
# Disable argparse's built-in help option and add it manually to prevent this
|
||||
# parser from printing the help messagefor the old format when invoked with -h.
|
||||
compat_parser = argparse.ArgumentParser(parents=[options_parser], add_help=False)
|
||||
compat_parser.add_argument("-h", "--help")
|
||||
compat_parser.add_argument("configuration", nargs="*")
|
||||
compat_parser.add_argument(
|
||||
"command",
|
||||
choices=[
|
||||
"config",
|
||||
"compile",
|
||||
"upload",
|
||||
"logs",
|
||||
"run",
|
||||
"clean-mqtt",
|
||||
"wizard",
|
||||
"mqtt-fingerprint",
|
||||
"version",
|
||||
"clean",
|
||||
"dashboard",
|
||||
"vscode",
|
||||
"update-all",
|
||||
],
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
"--version",
|
||||
action="version",
|
||||
version=f"Version: {const.__version__}",
|
||||
help="Print the ESPHome version and exit.",
|
||||
# on Python 3.9+ we can simply set exit_on_error=False in the constructor
|
||||
def _raise(x):
|
||||
raise argparse.ArgumentError(None, x)
|
||||
|
||||
compat_parser.error = _raise
|
||||
|
||||
deprecated_argv_suggestion = None
|
||||
|
||||
if ["dashboard", "config"] == argv[1:3] or ["version"] == argv[1:2]:
|
||||
# this is most likely meant in new-style arg format. do not try compat parsing
|
||||
pass
|
||||
else:
|
||||
try:
|
||||
result, unparsed = compat_parser.parse_known_args(argv[1:])
|
||||
last_option = len(argv) - len(unparsed) - 1 - len(result.configuration)
|
||||
unparsed = [
|
||||
"--device" if arg in ("--upload-port", "--serial-port") else arg
|
||||
for arg in unparsed
|
||||
]
|
||||
argv = (
|
||||
argv[0:last_option] + [result.command] + result.configuration + unparsed
|
||||
)
|
||||
deprecated_argv_suggestion = argv
|
||||
except argparse.ArgumentError:
|
||||
# This is not an old-style command line, so we don't have to do anything.
|
||||
pass
|
||||
|
||||
# And continue on with regular parsing
|
||||
parser = argparse.ArgumentParser(
|
||||
description=f"ESPHome v{const.__version__}", parents=[options_parser]
|
||||
)
|
||||
parser.set_defaults(deprecated_argv_suggestion=deprecated_argv_suggestion)
|
||||
|
||||
mqtt_options = argparse.ArgumentParser(add_help=False)
|
||||
mqtt_options.add_argument("--topic", help="Manually set the MQTT topic.")
|
||||
@ -816,9 +570,6 @@ def parse_args(argv):
|
||||
parser_config.add_argument(
|
||||
"configuration", help="Your YAML configuration file(s).", nargs="+"
|
||||
)
|
||||
parser_config.add_argument(
|
||||
"--show-secrets", help="Show secrets in output.", action="store_true"
|
||||
)
|
||||
|
||||
parser_compile = subparsers.add_parser(
|
||||
"compile", help="Read the configuration and compile a program."
|
||||
@ -833,9 +584,7 @@ def parse_args(argv):
|
||||
)
|
||||
|
||||
parser_upload = subparsers.add_parser(
|
||||
"upload",
|
||||
help="Validate the configuration and upload the latest binary.",
|
||||
parents=[mqtt_options],
|
||||
"upload", help="Validate the configuration and upload the latest binary."
|
||||
)
|
||||
parser_upload.add_argument(
|
||||
"configuration", help="Your YAML configuration file(s).", nargs="+"
|
||||
@ -844,19 +593,10 @@ def parse_args(argv):
|
||||
"--device",
|
||||
help="Manually specify the serial port/address to use, for example /dev/ttyUSB0.",
|
||||
)
|
||||
parser_upload.add_argument(
|
||||
"--upload_speed",
|
||||
help="Override the default or configured upload speed.",
|
||||
)
|
||||
parser_upload.add_argument(
|
||||
"--file",
|
||||
help="Manually specify the binary file to upload.",
|
||||
)
|
||||
|
||||
parser_logs = subparsers.add_parser(
|
||||
"logs",
|
||||
help="Validate the configuration and show all logs.",
|
||||
aliases=["log"],
|
||||
parents=[mqtt_options],
|
||||
)
|
||||
parser_logs.add_argument(
|
||||
@ -866,22 +606,6 @@ def parse_args(argv):
|
||||
"--device",
|
||||
help="Manually specify the serial port/address to use, for example /dev/ttyUSB0.",
|
||||
)
|
||||
parser_logs.add_argument(
|
||||
"--reset",
|
||||
"-r",
|
||||
action="store_true",
|
||||
help="Reset the device before starting serial logs.",
|
||||
default=os.getenv("ESPHOME_SERIAL_LOGGING_RESET"),
|
||||
)
|
||||
|
||||
parser_discover = subparsers.add_parser(
|
||||
"discover",
|
||||
help="Validate the configuration and show all discovered devices.",
|
||||
parents=[mqtt_options],
|
||||
)
|
||||
parser_discover.add_argument(
|
||||
"configuration", help="Your YAML configuration file.", nargs=1
|
||||
)
|
||||
|
||||
parser_run = subparsers.add_parser(
|
||||
"run",
|
||||
@ -895,20 +619,9 @@ def parse_args(argv):
|
||||
"--device",
|
||||
help="Manually specify the serial port/address to use, for example /dev/ttyUSB0.",
|
||||
)
|
||||
parser_run.add_argument(
|
||||
"--upload_speed",
|
||||
help="Override the default or configured upload speed.",
|
||||
)
|
||||
parser_run.add_argument(
|
||||
"--no-logs", help="Disable starting logs.", action="store_true"
|
||||
)
|
||||
parser_run.add_argument(
|
||||
"--reset",
|
||||
"-r",
|
||||
action="store_true",
|
||||
help="Reset the device before starting serial logs.",
|
||||
default=os.getenv("ESPHOME_SERIAL_LOGGING_RESET"),
|
||||
)
|
||||
|
||||
parser_clean = subparsers.add_parser(
|
||||
"clean-mqtt",
|
||||
@ -923,7 +636,10 @@ def parse_args(argv):
|
||||
"wizard",
|
||||
help="A helpful setup wizard that will guide you through setting up ESPHome.",
|
||||
)
|
||||
parser_wizard.add_argument("configuration", help="Your YAML configuration file.")
|
||||
parser_wizard.add_argument(
|
||||
"configuration",
|
||||
help="Your YAML configuration file.",
|
||||
)
|
||||
|
||||
parser_fingerprint = subparsers.add_parser(
|
||||
"mqtt-fingerprint", help="Get the SSL fingerprint from a MQTT broker."
|
||||
@ -945,7 +661,8 @@ def parse_args(argv):
|
||||
"dashboard", help="Create a simple web server for a dashboard."
|
||||
)
|
||||
parser_dashboard.add_argument(
|
||||
"configuration", help="Your YAML configuration file directory."
|
||||
"configuration",
|
||||
help="Your YAML configuration file directory.",
|
||||
)
|
||||
parser_dashboard.add_argument(
|
||||
"--port",
|
||||
@ -953,12 +670,6 @@ def parse_args(argv):
|
||||
type=int,
|
||||
default=6052,
|
||||
)
|
||||
parser_dashboard.add_argument(
|
||||
"--address",
|
||||
help="The address to bind to.",
|
||||
type=str,
|
||||
default="0.0.0.0",
|
||||
)
|
||||
parser_dashboard.add_argument(
|
||||
"--username",
|
||||
help="The optional username to require for authentication.",
|
||||
@ -975,7 +686,7 @@ def parse_args(argv):
|
||||
"--open-ui", help="Open the dashboard UI in a browser.", action="store_true"
|
||||
)
|
||||
parser_dashboard.add_argument(
|
||||
"--ha-addon", help=argparse.SUPPRESS, action="store_true"
|
||||
"--hassio", help=argparse.SUPPRESS, action="store_true"
|
||||
)
|
||||
parser_dashboard.add_argument(
|
||||
"--socket", help="Make the dashboard serve under a unix socket", type=str
|
||||
@ -990,54 +701,28 @@ def parse_args(argv):
|
||||
"configuration", help="Your YAML configuration file directories.", nargs="+"
|
||||
)
|
||||
|
||||
parser_idedata = subparsers.add_parser("idedata")
|
||||
parser_idedata.add_argument(
|
||||
"configuration", help="Your YAML configuration file(s).", nargs=1
|
||||
)
|
||||
|
||||
parser_rename = subparsers.add_parser(
|
||||
"rename",
|
||||
help="Rename a device in YAML, compile the binary and upload it.",
|
||||
)
|
||||
parser_rename.add_argument(
|
||||
"configuration", help="Your YAML configuration file.", nargs=1
|
||||
)
|
||||
parser_rename.add_argument("name", help="The new name for the device.", type=str)
|
||||
|
||||
# Keep backward compatibility with the old command line format of
|
||||
# esphome <config> <command>.
|
||||
#
|
||||
# Unfortunately this can't be done by adding another configuration argument to the
|
||||
# main config parser, as argparse is greedy when parsing arguments, so in regular
|
||||
# usage it'll eat the command as the configuration argument and error out out
|
||||
# because it can't parse the configuration as a command.
|
||||
#
|
||||
# Instead, if parsing using the current format fails, construct an ad-hoc parser
|
||||
# that doesn't actually process the arguments, but parses them enough to let us
|
||||
# figure out if the old format is used. In that case, swap the command and
|
||||
# configuration in the arguments and retry with the normal parser (and raise
|
||||
# a deprecation warning).
|
||||
arguments = argv[1:]
|
||||
|
||||
argcomplete.autocomplete(parser)
|
||||
return parser.parse_args(arguments)
|
||||
return parser.parse_args(argv[1:])
|
||||
|
||||
|
||||
def run_esphome(argv):
|
||||
args = parse_args(argv)
|
||||
CORE.dashboard = args.dashboard
|
||||
|
||||
# Override log level if verbose is set
|
||||
if args.verbose:
|
||||
args.log_level = "DEBUG"
|
||||
elif args.quiet:
|
||||
args.log_level = "CRITICAL"
|
||||
setup_log(args.verbose, args.quiet)
|
||||
if args.deprecated_argv_suggestion is not None and args.command != "vscode":
|
||||
_LOGGER.warning(
|
||||
"Calling ESPHome with the configuration before the command is deprecated "
|
||||
"and will be removed in the future. "
|
||||
)
|
||||
_LOGGER.warning("Please instead use:")
|
||||
_LOGGER.warning(" esphome %s", " ".join(args.deprecated_argv_suggestion[1:]))
|
||||
|
||||
setup_log(
|
||||
log_level=args.log_level,
|
||||
# Show timestamp for dashboard access logs
|
||||
include_timestamp=args.command == "dashboard",
|
||||
)
|
||||
if sys.version_info < (3, 7, 0):
|
||||
_LOGGER.error(
|
||||
"You're running ESPHome with Python <3.7. ESPHome is no longer compatible "
|
||||
"with this Python version. Please reinstall ESPHome with Python 3.7+"
|
||||
)
|
||||
return 1
|
||||
|
||||
if args.command in PRE_CONFIG_ACTIONS:
|
||||
try:
|
||||
@ -1046,19 +731,13 @@ def run_esphome(argv):
|
||||
_LOGGER.error(e, exc_info=args.verbose)
|
||||
return 1
|
||||
|
||||
_LOGGER.info("ESPHome %s", const.__version__)
|
||||
|
||||
for conf_path in args.configuration:
|
||||
if any(os.path.basename(conf_path) == x for x in SECRETS_FILES):
|
||||
_LOGGER.warning("Skipping secrets file %s", conf_path)
|
||||
continue
|
||||
|
||||
CORE.config_path = conf_path
|
||||
CORE.dashboard = args.dashboard
|
||||
|
||||
config = read_config(dict(args.substitution) if args.substitution else {})
|
||||
if config is None:
|
||||
return 2
|
||||
return 1
|
||||
CORE.config = config
|
||||
|
||||
if args.command not in POST_CONFIG_ACTIONS:
|
||||
|
3997
esphome/api/api_pb2.py
Normal file
3997
esphome/api/api_pb2.py
Normal file
File diff suppressed because one or more lines are too long
518
esphome/api/client.py
Normal file
518
esphome/api/client.py
Normal file
@ -0,0 +1,518 @@
|
||||
from datetime import datetime
|
||||
import functools
|
||||
import logging
|
||||
import socket
|
||||
import threading
|
||||
import time
|
||||
|
||||
# pylint: disable=unused-import
|
||||
from typing import Optional # noqa
|
||||
from google.protobuf import message # noqa
|
||||
|
||||
from esphome import const
|
||||
import esphome.api.api_pb2 as pb
|
||||
from esphome.const import CONF_PASSWORD, CONF_PORT
|
||||
from esphome.core import EsphomeError
|
||||
from esphome.helpers import resolve_ip_address, indent
|
||||
from esphome.log import color, Fore
|
||||
from esphome.util import safe_print
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class APIConnectionError(EsphomeError):
|
||||
pass
|
||||
|
||||
|
||||
MESSAGE_TYPE_TO_PROTO = {
|
||||
1: pb.HelloRequest,
|
||||
2: pb.HelloResponse,
|
||||
3: pb.ConnectRequest,
|
||||
4: pb.ConnectResponse,
|
||||
5: pb.DisconnectRequest,
|
||||
6: pb.DisconnectResponse,
|
||||
7: pb.PingRequest,
|
||||
8: pb.PingResponse,
|
||||
9: pb.DeviceInfoRequest,
|
||||
10: pb.DeviceInfoResponse,
|
||||
11: pb.ListEntitiesRequest,
|
||||
12: pb.ListEntitiesBinarySensorResponse,
|
||||
13: pb.ListEntitiesCoverResponse,
|
||||
14: pb.ListEntitiesFanResponse,
|
||||
15: pb.ListEntitiesLightResponse,
|
||||
16: pb.ListEntitiesSensorResponse,
|
||||
17: pb.ListEntitiesSwitchResponse,
|
||||
18: pb.ListEntitiesTextSensorResponse,
|
||||
19: pb.ListEntitiesDoneResponse,
|
||||
20: pb.SubscribeStatesRequest,
|
||||
21: pb.BinarySensorStateResponse,
|
||||
22: pb.CoverStateResponse,
|
||||
23: pb.FanStateResponse,
|
||||
24: pb.LightStateResponse,
|
||||
25: pb.SensorStateResponse,
|
||||
26: pb.SwitchStateResponse,
|
||||
27: pb.TextSensorStateResponse,
|
||||
28: pb.SubscribeLogsRequest,
|
||||
29: pb.SubscribeLogsResponse,
|
||||
30: pb.CoverCommandRequest,
|
||||
31: pb.FanCommandRequest,
|
||||
32: pb.LightCommandRequest,
|
||||
33: pb.SwitchCommandRequest,
|
||||
34: pb.SubscribeServiceCallsRequest,
|
||||
35: pb.ServiceCallResponse,
|
||||
36: pb.GetTimeRequest,
|
||||
37: pb.GetTimeResponse,
|
||||
}
|
||||
|
||||
|
||||
def _varuint_to_bytes(value):
|
||||
if value <= 0x7F:
|
||||
return bytes([value])
|
||||
|
||||
ret = bytes()
|
||||
while value:
|
||||
temp = value & 0x7F
|
||||
value >>= 7
|
||||
if value:
|
||||
ret += bytes([temp | 0x80])
|
||||
else:
|
||||
ret += bytes([temp])
|
||||
|
||||
return ret
|
||||
|
||||
|
||||
def _bytes_to_varuint(value):
|
||||
result = 0
|
||||
bitpos = 0
|
||||
for val in value:
|
||||
result |= (val & 0x7F) << bitpos
|
||||
bitpos += 7
|
||||
if (val & 0x80) == 0:
|
||||
return result
|
||||
return None
|
||||
|
||||
|
||||
# pylint: disable=too-many-instance-attributes,not-callable
|
||||
class APIClient(threading.Thread):
|
||||
def __init__(self, address, port, password):
|
||||
threading.Thread.__init__(self)
|
||||
self._address = address # type: str
|
||||
self._port = port # type: int
|
||||
self._password = password # type: Optional[str]
|
||||
self._socket = None # type: Optional[socket.socket]
|
||||
self._socket_open_event = threading.Event()
|
||||
self._socket_write_lock = threading.Lock()
|
||||
self._connected = False
|
||||
self._authenticated = False
|
||||
self._message_handlers = []
|
||||
self._keepalive = 5
|
||||
self._ping_timer = None
|
||||
|
||||
self.on_disconnect = None
|
||||
self.on_connect = None
|
||||
self.on_login = None
|
||||
self.auto_reconnect = False
|
||||
self._running_event = threading.Event()
|
||||
self._stop_event = threading.Event()
|
||||
|
||||
@property
|
||||
def stopped(self):
|
||||
return self._stop_event.is_set()
|
||||
|
||||
def _refresh_ping(self):
|
||||
if self._ping_timer is not None:
|
||||
self._ping_timer.cancel()
|
||||
self._ping_timer = None
|
||||
|
||||
def func():
|
||||
self._ping_timer = None
|
||||
|
||||
if self._connected:
|
||||
try:
|
||||
self.ping()
|
||||
except APIConnectionError as err:
|
||||
self._fatal_error(err)
|
||||
else:
|
||||
self._refresh_ping()
|
||||
|
||||
self._ping_timer = threading.Timer(self._keepalive, func)
|
||||
self._ping_timer.start()
|
||||
|
||||
def _cancel_ping(self):
|
||||
if self._ping_timer is not None:
|
||||
self._ping_timer.cancel()
|
||||
self._ping_timer = None
|
||||
|
||||
def _close_socket(self):
|
||||
self._cancel_ping()
|
||||
if self._socket is not None:
|
||||
self._socket.close()
|
||||
self._socket = None
|
||||
self._socket_open_event.clear()
|
||||
self._connected = False
|
||||
self._authenticated = False
|
||||
self._message_handlers = []
|
||||
|
||||
def stop(self, force=False):
|
||||
if self.stopped:
|
||||
raise ValueError
|
||||
|
||||
if self._connected and not force:
|
||||
try:
|
||||
self.disconnect()
|
||||
except APIConnectionError:
|
||||
pass
|
||||
self._close_socket()
|
||||
|
||||
self._stop_event.set()
|
||||
if not force:
|
||||
self.join()
|
||||
|
||||
def connect(self):
|
||||
if not self._running_event.wait(0.1):
|
||||
raise APIConnectionError("You need to call start() first!")
|
||||
|
||||
if self._connected:
|
||||
self.disconnect(on_disconnect=False)
|
||||
|
||||
try:
|
||||
ip = resolve_ip_address(self._address)
|
||||
except EsphomeError as err:
|
||||
_LOGGER.warning(
|
||||
"Error resolving IP address of %s. Is it connected to WiFi?",
|
||||
self._address,
|
||||
)
|
||||
_LOGGER.warning(
|
||||
"(If this error persists, please set a static IP address: "
|
||||
"https://esphome.io/components/wifi.html#manual-ips)"
|
||||
)
|
||||
raise APIConnectionError(err) from err
|
||||
|
||||
_LOGGER.info("Connecting to %s:%s (%s)", self._address, self._port, ip)
|
||||
self._socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
|
||||
self._socket.settimeout(10.0)
|
||||
self._socket.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
|
||||
try:
|
||||
self._socket.connect((ip, self._port))
|
||||
except OSError as err:
|
||||
err = APIConnectionError(f"Error connecting to {ip}: {err}")
|
||||
self._fatal_error(err)
|
||||
raise err
|
||||
self._socket.settimeout(0.1)
|
||||
|
||||
self._socket_open_event.set()
|
||||
|
||||
hello = pb.HelloRequest()
|
||||
hello.client_info = f"ESPHome v{const.__version__}"
|
||||
try:
|
||||
resp = self._send_message_await_response(hello, pb.HelloResponse)
|
||||
except APIConnectionError as err:
|
||||
self._fatal_error(err)
|
||||
raise err
|
||||
_LOGGER.debug(
|
||||
"Successfully connected to %s ('%s' API=%s.%s)",
|
||||
self._address,
|
||||
resp.server_info,
|
||||
resp.api_version_major,
|
||||
resp.api_version_minor,
|
||||
)
|
||||
self._connected = True
|
||||
self._refresh_ping()
|
||||
if self.on_connect is not None:
|
||||
self.on_connect()
|
||||
|
||||
def _check_connected(self):
|
||||
if not self._connected:
|
||||
err = APIConnectionError("Must be connected!")
|
||||
self._fatal_error(err)
|
||||
raise err
|
||||
|
||||
def login(self):
|
||||
self._check_connected()
|
||||
if self._authenticated:
|
||||
raise APIConnectionError("Already logged in!")
|
||||
|
||||
connect = pb.ConnectRequest()
|
||||
if self._password is not None:
|
||||
connect.password = self._password
|
||||
resp = self._send_message_await_response(connect, pb.ConnectResponse)
|
||||
if resp.invalid_password:
|
||||
raise APIConnectionError("Invalid password!")
|
||||
|
||||
self._authenticated = True
|
||||
if self.on_login is not None:
|
||||
self.on_login()
|
||||
|
||||
def _fatal_error(self, err):
|
||||
was_connected = self._connected
|
||||
|
||||
self._close_socket()
|
||||
|
||||
if was_connected and self.on_disconnect is not None:
|
||||
self.on_disconnect(err)
|
||||
|
||||
def _write(self, data): # type: (bytes) -> None
|
||||
if self._socket is None:
|
||||
raise APIConnectionError("Socket closed")
|
||||
|
||||
# _LOGGER.debug("Write: %s", format_bytes(data))
|
||||
with self._socket_write_lock:
|
||||
try:
|
||||
self._socket.sendall(data)
|
||||
except OSError as err:
|
||||
err = APIConnectionError(f"Error while writing data: {err}")
|
||||
self._fatal_error(err)
|
||||
raise err
|
||||
|
||||
def _send_message(self, msg):
|
||||
# type: (message.Message) -> None
|
||||
for message_type, klass in MESSAGE_TYPE_TO_PROTO.items():
|
||||
if isinstance(msg, klass):
|
||||
break
|
||||
else:
|
||||
raise ValueError
|
||||
|
||||
encoded = msg.SerializeToString()
|
||||
_LOGGER.debug("Sending %s:\n%s", type(msg), indent(str(msg)))
|
||||
req = bytes([0])
|
||||
req += _varuint_to_bytes(len(encoded))
|
||||
req += _varuint_to_bytes(message_type)
|
||||
req += encoded
|
||||
self._write(req)
|
||||
|
||||
def _send_message_await_response_complex(
|
||||
self, send_msg, do_append, do_stop, timeout=5
|
||||
):
|
||||
event = threading.Event()
|
||||
responses = []
|
||||
|
||||
def on_message(resp):
|
||||
if do_append(resp):
|
||||
responses.append(resp)
|
||||
if do_stop(resp):
|
||||
event.set()
|
||||
|
||||
self._message_handlers.append(on_message)
|
||||
self._send_message(send_msg)
|
||||
ret = event.wait(timeout)
|
||||
try:
|
||||
self._message_handlers.remove(on_message)
|
||||
except ValueError:
|
||||
pass
|
||||
if not ret:
|
||||
raise APIConnectionError("Timeout while waiting for message response!")
|
||||
return responses
|
||||
|
||||
def _send_message_await_response(self, send_msg, response_type, timeout=5):
|
||||
def is_response(msg):
|
||||
return isinstance(msg, response_type)
|
||||
|
||||
return self._send_message_await_response_complex(
|
||||
send_msg, is_response, is_response, timeout
|
||||
)[0]
|
||||
|
||||
def device_info(self):
|
||||
self._check_connected()
|
||||
return self._send_message_await_response(
|
||||
pb.DeviceInfoRequest(), pb.DeviceInfoResponse
|
||||
)
|
||||
|
||||
def ping(self):
|
||||
self._check_connected()
|
||||
return self._send_message_await_response(pb.PingRequest(), pb.PingResponse)
|
||||
|
||||
def disconnect(self, on_disconnect=True):
|
||||
self._check_connected()
|
||||
|
||||
try:
|
||||
self._send_message_await_response(
|
||||
pb.DisconnectRequest(), pb.DisconnectResponse
|
||||
)
|
||||
except APIConnectionError:
|
||||
pass
|
||||
self._close_socket()
|
||||
|
||||
if self.on_disconnect is not None and on_disconnect:
|
||||
self.on_disconnect(None)
|
||||
|
||||
def _check_authenticated(self):
|
||||
if not self._authenticated:
|
||||
raise APIConnectionError("Must login first!")
|
||||
|
||||
def subscribe_logs(self, on_log, log_level=7, dump_config=False):
|
||||
self._check_authenticated()
|
||||
|
||||
def on_msg(msg):
|
||||
if isinstance(msg, pb.SubscribeLogsResponse):
|
||||
on_log(msg)
|
||||
|
||||
self._message_handlers.append(on_msg)
|
||||
req = pb.SubscribeLogsRequest(dump_config=dump_config)
|
||||
req.level = log_level
|
||||
self._send_message(req)
|
||||
|
||||
def _recv(self, amount):
|
||||
ret = bytes()
|
||||
if amount == 0:
|
||||
return ret
|
||||
|
||||
while len(ret) < amount:
|
||||
if self.stopped:
|
||||
raise APIConnectionError("Stopped!")
|
||||
if not self._socket_open_event.is_set():
|
||||
raise APIConnectionError("No socket!")
|
||||
try:
|
||||
val = self._socket.recv(amount - len(ret))
|
||||
except AttributeError as err:
|
||||
raise APIConnectionError("Socket was closed") from err
|
||||
except socket.timeout:
|
||||
continue
|
||||
except OSError as err:
|
||||
raise APIConnectionError(f"Error while receiving data: {err}") from err
|
||||
ret += val
|
||||
return ret
|
||||
|
||||
def _recv_varint(self):
|
||||
raw = bytes()
|
||||
while not raw or raw[-1] & 0x80:
|
||||
raw += self._recv(1)
|
||||
return _bytes_to_varuint(raw)
|
||||
|
||||
def _run_once(self):
|
||||
if not self._socket_open_event.wait(0.1):
|
||||
return
|
||||
|
||||
# Preamble
|
||||
if self._recv(1)[0] != 0x00:
|
||||
raise APIConnectionError("Invalid preamble")
|
||||
|
||||
length = self._recv_varint()
|
||||
msg_type = self._recv_varint()
|
||||
|
||||
raw_msg = self._recv(length)
|
||||
if msg_type not in MESSAGE_TYPE_TO_PROTO:
|
||||
_LOGGER.debug("Skipping message type %s", msg_type)
|
||||
return
|
||||
|
||||
msg = MESSAGE_TYPE_TO_PROTO[msg_type]()
|
||||
msg.ParseFromString(raw_msg)
|
||||
_LOGGER.debug("Got message: %s:\n%s", type(msg), indent(str(msg)))
|
||||
for msg_handler in self._message_handlers[:]:
|
||||
msg_handler(msg)
|
||||
self._handle_internal_messages(msg)
|
||||
|
||||
def run(self):
|
||||
self._running_event.set()
|
||||
while not self.stopped:
|
||||
try:
|
||||
self._run_once()
|
||||
except APIConnectionError as err:
|
||||
if self.stopped:
|
||||
break
|
||||
if self._connected:
|
||||
_LOGGER.error("Error while reading incoming messages: %s", err)
|
||||
self._fatal_error(err)
|
||||
self._running_event.clear()
|
||||
|
||||
def _handle_internal_messages(self, msg):
|
||||
if isinstance(msg, pb.DisconnectRequest):
|
||||
self._send_message(pb.DisconnectResponse())
|
||||
if self._socket is not None:
|
||||
self._socket.close()
|
||||
self._socket = None
|
||||
self._connected = False
|
||||
if self.on_disconnect is not None:
|
||||
self.on_disconnect(None)
|
||||
elif isinstance(msg, pb.PingRequest):
|
||||
self._send_message(pb.PingResponse())
|
||||
elif isinstance(msg, pb.GetTimeRequest):
|
||||
resp = pb.GetTimeResponse()
|
||||
resp.epoch_seconds = int(time.time())
|
||||
self._send_message(resp)
|
||||
|
||||
|
||||
def run_logs(config, address):
|
||||
conf = config["api"]
|
||||
port = conf[CONF_PORT]
|
||||
password = conf[CONF_PASSWORD]
|
||||
_LOGGER.info("Starting log output from %s using esphome API", address)
|
||||
|
||||
cli = APIClient(address, port, password)
|
||||
stopping = False
|
||||
retry_timer = []
|
||||
|
||||
has_connects = []
|
||||
|
||||
def try_connect(err, tries=0):
|
||||
if stopping:
|
||||
return
|
||||
|
||||
if err:
|
||||
_LOGGER.warning("Disconnected from API: %s", err)
|
||||
|
||||
while retry_timer:
|
||||
retry_timer.pop(0).cancel()
|
||||
|
||||
error = None
|
||||
try:
|
||||
cli.connect()
|
||||
cli.login()
|
||||
except APIConnectionError as err2: # noqa
|
||||
error = err2
|
||||
|
||||
if error is None:
|
||||
_LOGGER.info("Successfully connected to %s", address)
|
||||
return
|
||||
|
||||
wait_time = int(min(1.5 ** min(tries, 100), 30))
|
||||
if not has_connects:
|
||||
_LOGGER.warning(
|
||||
"Initial connection failed. The ESP might not be connected "
|
||||
"to WiFi yet (%s). Re-Trying in %s seconds",
|
||||
error,
|
||||
wait_time,
|
||||
)
|
||||
else:
|
||||
_LOGGER.warning(
|
||||
"Couldn't connect to API (%s). Trying to reconnect in %s seconds",
|
||||
error,
|
||||
wait_time,
|
||||
)
|
||||
timer = threading.Timer(
|
||||
wait_time, functools.partial(try_connect, None, tries + 1)
|
||||
)
|
||||
timer.start()
|
||||
retry_timer.append(timer)
|
||||
|
||||
def on_log(msg):
|
||||
time_ = datetime.now().time().strftime("[%H:%M:%S]")
|
||||
text = msg.message
|
||||
if msg.send_failed:
|
||||
text = color(
|
||||
Fore.WHITE,
|
||||
"(Message skipped because it was too big to fit in "
|
||||
"TCP buffer - This is only cosmetic)",
|
||||
)
|
||||
safe_print(time_ + text)
|
||||
|
||||
def on_login():
|
||||
try:
|
||||
cli.subscribe_logs(on_log, dump_config=not has_connects)
|
||||
has_connects.append(True)
|
||||
except APIConnectionError:
|
||||
cli.disconnect()
|
||||
|
||||
cli.on_disconnect = try_connect
|
||||
cli.on_login = on_login
|
||||
cli.start()
|
||||
|
||||
try:
|
||||
try_connect(None)
|
||||
while True:
|
||||
time.sleep(1)
|
||||
except KeyboardInterrupt:
|
||||
stopping = True
|
||||
cli.stop(True)
|
||||
while retry_timer:
|
||||
retry_timer.pop(0).cancel()
|
||||
return 0
|
@ -1,45 +1,31 @@
|
||||
import esphome.codegen as cg
|
||||
import esphome.config_validation as cv
|
||||
from esphome.const import (
|
||||
CONF_ALL,
|
||||
CONF_ANY,
|
||||
CONF_AUTOMATION_ID,
|
||||
CONF_CONDITION,
|
||||
CONF_COUNT,
|
||||
CONF_ELSE,
|
||||
CONF_ID,
|
||||
CONF_THEN,
|
||||
CONF_TIME,
|
||||
CONF_TIMEOUT,
|
||||
CONF_TRIGGER_ID,
|
||||
CONF_TYPE_ID,
|
||||
CONF_UPDATE_INTERVAL,
|
||||
CONF_TIME,
|
||||
)
|
||||
from esphome.schema_extractors import SCHEMA_EXTRACT, schema_extractor
|
||||
from esphome.jsonschema import jschema_extractor
|
||||
from esphome.util import Registry
|
||||
|
||||
|
||||
def maybe_simple_id(*validators):
|
||||
"""Allow a raw ID to be specified in place of a config block.
|
||||
If the value that's being validated is a dictionary, it's passed as-is to the specified validators. Otherwise, it's
|
||||
wrapped in a dict that looks like ``{"id": <value>}``, and that dict is then handed off to the specified validators.
|
||||
"""
|
||||
return maybe_conf(CONF_ID, *validators)
|
||||
|
||||
|
||||
def maybe_conf(conf, *validators):
|
||||
"""Allow a raw value to be specified in place of a config block.
|
||||
If the value that's being validated is a dictionary, it's passed as-is to the specified validators. Otherwise, it's
|
||||
wrapped in a dict that looks like ``{<conf>: <value>}``, and that dict is then handed off to the specified
|
||||
validators.
|
||||
(This is a general case of ``maybe_simple_id`` that allows the wrapping key to be something other than ``id``.)
|
||||
"""
|
||||
validator = cv.All(*validators)
|
||||
|
||||
@schema_extractor("maybe")
|
||||
@jschema_extractor("maybe")
|
||||
def validate(value):
|
||||
if value == SCHEMA_EXTRACT:
|
||||
return (validator, conf)
|
||||
# pylint: disable=comparison-with-callable
|
||||
if value == jschema_extractor:
|
||||
return validator
|
||||
|
||||
if isinstance(value, dict):
|
||||
return validator(value)
|
||||
@ -75,22 +61,12 @@ def validate_potentially_and_condition(value):
|
||||
return validate_condition(value)
|
||||
|
||||
|
||||
def validate_potentially_or_condition(value):
|
||||
if isinstance(value, list):
|
||||
with cv.remove_prepend_path(["or"]):
|
||||
return validate_condition({"or": value})
|
||||
return validate_condition(value)
|
||||
|
||||
|
||||
DelayAction = cg.esphome_ns.class_("DelayAction", Action, cg.Component)
|
||||
LambdaAction = cg.esphome_ns.class_("LambdaAction", Action)
|
||||
IfAction = cg.esphome_ns.class_("IfAction", Action)
|
||||
WhileAction = cg.esphome_ns.class_("WhileAction", Action)
|
||||
RepeatAction = cg.esphome_ns.class_("RepeatAction", Action)
|
||||
WaitUntilAction = cg.esphome_ns.class_("WaitUntilAction", Action, cg.Component)
|
||||
UpdateComponentAction = cg.esphome_ns.class_("UpdateComponentAction", Action)
|
||||
SuspendComponentAction = cg.esphome_ns.class_("SuspendComponentAction", Action)
|
||||
ResumeComponentAction = cg.esphome_ns.class_("ResumeComponentAction", Action)
|
||||
Automation = cg.esphome_ns.class_("Automation")
|
||||
|
||||
LambdaCondition = cg.esphome_ns.class_("LambdaCondition", Condition)
|
||||
@ -132,9 +108,11 @@ def validate_automation(extra_schema=None, extra_validators=None, single=False):
|
||||
# This should only happen with invalid configs, but let's have a nice error message.
|
||||
return [schema(value)]
|
||||
|
||||
@schema_extractor("automation")
|
||||
@jschema_extractor("automation")
|
||||
def validator(value):
|
||||
if value == SCHEMA_EXTRACT:
|
||||
# hack to get the schema
|
||||
# pylint: disable=comparison-with-callable
|
||||
if value == jschema_extractor:
|
||||
return schema
|
||||
|
||||
value = validator_(value)
|
||||
@ -160,7 +138,6 @@ AUTOMATION_SCHEMA = cv.Schema(
|
||||
AndCondition = cg.esphome_ns.class_("AndCondition", Condition)
|
||||
OrCondition = cg.esphome_ns.class_("OrCondition", Condition)
|
||||
NotCondition = cg.esphome_ns.class_("NotCondition", Condition)
|
||||
XorCondition = cg.esphome_ns.class_("XorCondition", Condition)
|
||||
|
||||
|
||||
@register_condition("and", AndCondition, validate_condition_list)
|
||||
@ -175,30 +152,12 @@ async def or_condition_to_code(config, condition_id, template_arg, args):
|
||||
return cg.new_Pvariable(condition_id, template_arg, conditions)
|
||||
|
||||
|
||||
@register_condition("all", AndCondition, validate_condition_list)
|
||||
async def all_condition_to_code(config, condition_id, template_arg, args):
|
||||
conditions = await build_condition_list(config, template_arg, args)
|
||||
return cg.new_Pvariable(condition_id, template_arg, conditions)
|
||||
|
||||
|
||||
@register_condition("any", OrCondition, validate_condition_list)
|
||||
async def any_condition_to_code(config, condition_id, template_arg, args):
|
||||
conditions = await build_condition_list(config, template_arg, args)
|
||||
return cg.new_Pvariable(condition_id, template_arg, conditions)
|
||||
|
||||
|
||||
@register_condition("not", NotCondition, validate_potentially_and_condition)
|
||||
async def not_condition_to_code(config, condition_id, template_arg, args):
|
||||
condition = await build_condition(config, template_arg, args)
|
||||
return cg.new_Pvariable(condition_id, template_arg, condition)
|
||||
|
||||
|
||||
@register_condition("xor", XorCondition, validate_condition_list)
|
||||
async def xor_condition_to_code(config, condition_id, template_arg, args):
|
||||
conditions = await build_condition_list(config, template_arg, args)
|
||||
return cg.new_Pvariable(condition_id, template_arg, conditions)
|
||||
|
||||
|
||||
@register_condition("lambda", LambdaCondition, cv.returning_lambda)
|
||||
async def lambda_condition_to_code(config, condition_id, template_arg, args):
|
||||
lambda_ = await cg.process_lambda(config, args, return_type=bool)
|
||||
@ -244,21 +203,15 @@ async def delay_action_to_code(config, action_id, template_arg, args):
|
||||
IfAction,
|
||||
cv.All(
|
||||
{
|
||||
cv.Exclusive(
|
||||
CONF_CONDITION, CONF_CONDITION
|
||||
): validate_potentially_and_condition,
|
||||
cv.Exclusive(CONF_ANY, CONF_CONDITION): validate_potentially_or_condition,
|
||||
cv.Exclusive(CONF_ALL, CONF_CONDITION): validate_potentially_and_condition,
|
||||
cv.Required(CONF_CONDITION): validate_potentially_and_condition,
|
||||
cv.Optional(CONF_THEN): validate_action_list,
|
||||
cv.Optional(CONF_ELSE): validate_action_list,
|
||||
},
|
||||
cv.has_at_least_one_key(CONF_THEN, CONF_ELSE),
|
||||
cv.has_at_least_one_key(CONF_CONDITION, CONF_ANY, CONF_ALL),
|
||||
),
|
||||
)
|
||||
async def if_action_to_code(config, action_id, template_arg, args):
|
||||
cond_conf = next(el for el in config if el in (CONF_ANY, CONF_ALL, CONF_CONDITION))
|
||||
conditions = await build_condition(config[cond_conf], template_arg, args)
|
||||
conditions = await build_condition(config[CONF_CONDITION], template_arg, args)
|
||||
var = cg.new_Pvariable(action_id, template_arg, conditions)
|
||||
if CONF_THEN in config:
|
||||
actions = await build_action_list(config[CONF_THEN], template_arg, args)
|
||||
@ -287,45 +240,21 @@ async def while_action_to_code(config, action_id, template_arg, args):
|
||||
return var
|
||||
|
||||
|
||||
@register_action(
|
||||
"repeat",
|
||||
RepeatAction,
|
||||
cv.Schema(
|
||||
def validate_wait_until(value):
|
||||
schema = cv.Schema(
|
||||
{
|
||||
cv.Required(CONF_COUNT): cv.templatable(cv.positive_not_null_int),
|
||||
cv.Required(CONF_THEN): validate_action_list,
|
||||
cv.Required(CONF_CONDITION): validate_potentially_and_condition,
|
||||
}
|
||||
),
|
||||
)
|
||||
async def repeat_action_to_code(config, action_id, template_arg, args):
|
||||
var = cg.new_Pvariable(action_id, template_arg)
|
||||
count_template = await cg.templatable(config[CONF_COUNT], args, cg.uint32)
|
||||
cg.add(var.set_count(count_template))
|
||||
actions = await build_action_list(
|
||||
config[CONF_THEN],
|
||||
cg.TemplateArguments(cg.uint32, *template_arg.args),
|
||||
[(cg.uint32, "iteration"), *args],
|
||||
)
|
||||
cg.add(var.add_then(actions))
|
||||
return var
|
||||
if isinstance(value, dict) and CONF_CONDITION in value:
|
||||
return schema(value)
|
||||
return validate_wait_until({CONF_CONDITION: value})
|
||||
|
||||
|
||||
_validate_wait_until = cv.maybe_simple_value(
|
||||
{
|
||||
cv.Required(CONF_CONDITION): validate_potentially_and_condition,
|
||||
cv.Optional(CONF_TIMEOUT): cv.templatable(cv.positive_time_period_milliseconds),
|
||||
},
|
||||
key=CONF_CONDITION,
|
||||
)
|
||||
|
||||
|
||||
@register_action("wait_until", WaitUntilAction, _validate_wait_until)
|
||||
@register_action("wait_until", WaitUntilAction, validate_wait_until)
|
||||
async def wait_until_action_to_code(config, action_id, template_arg, args):
|
||||
conditions = await build_condition(config[CONF_CONDITION], template_arg, args)
|
||||
var = cg.new_Pvariable(action_id, template_arg, conditions)
|
||||
if CONF_TIMEOUT in config:
|
||||
template_ = await cg.templatable(config[CONF_TIMEOUT], args, cg.uint32)
|
||||
cg.add(var.set_timeout_value(template_))
|
||||
await cg.register_component(var, {})
|
||||
return var
|
||||
|
||||
@ -350,41 +279,6 @@ async def component_update_action_to_code(config, action_id, template_arg, args)
|
||||
return cg.new_Pvariable(action_id, template_arg, comp)
|
||||
|
||||
|
||||
@register_action(
|
||||
"component.suspend",
|
||||
SuspendComponentAction,
|
||||
maybe_simple_id(
|
||||
{
|
||||
cv.Required(CONF_ID): cv.use_id(cg.PollingComponent),
|
||||
}
|
||||
),
|
||||
)
|
||||
async def component_suspend_action_to_code(config, action_id, template_arg, args):
|
||||
comp = await cg.get_variable(config[CONF_ID])
|
||||
return cg.new_Pvariable(action_id, template_arg, comp)
|
||||
|
||||
|
||||
@register_action(
|
||||
"component.resume",
|
||||
ResumeComponentAction,
|
||||
maybe_simple_id(
|
||||
{
|
||||
cv.Required(CONF_ID): cv.use_id(cg.PollingComponent),
|
||||
cv.Optional(CONF_UPDATE_INTERVAL): cv.templatable(
|
||||
cv.positive_time_period_milliseconds
|
||||
),
|
||||
}
|
||||
),
|
||||
)
|
||||
async def component_resume_action_to_code(config, action_id, template_arg, args):
|
||||
comp = await cg.get_variable(config[CONF_ID])
|
||||
var = cg.new_Pvariable(action_id, template_arg, comp)
|
||||
if CONF_UPDATE_INTERVAL in config:
|
||||
template_ = await cg.templatable(config[CONF_UPDATE_INTERVAL], args, int)
|
||||
cg.add(var.set_update_interval(template_))
|
||||
return var
|
||||
|
||||
|
||||
async def build_action(full_config, template_arg, args):
|
||||
registry_entry, config = cg.extract_registry_entry_config(
|
||||
ACTION_REGISTRY, full_config
|
||||
|
1152
esphome/boards.py
Normal file
1152
esphome/boards.py
Normal file
File diff suppressed because it is too large
Load Diff
@ -8,86 +8,74 @@
|
||||
# want to break suddenly due to a rename (this file will get backports for features).
|
||||
|
||||
# pylint: disable=unused-import
|
||||
from esphome.cpp_generator import ( # noqa: F401
|
||||
ArrayInitializer,
|
||||
from esphome.cpp_generator import ( # noqa
|
||||
Expression,
|
||||
LineComment,
|
||||
MockObj,
|
||||
MockObjClass,
|
||||
Pvariable,
|
||||
RawExpression,
|
||||
RawStatement,
|
||||
Statement,
|
||||
StructInitializer,
|
||||
TemplateArguments,
|
||||
StructInitializer,
|
||||
ArrayInitializer,
|
||||
safe_exp,
|
||||
Statement,
|
||||
LineComment,
|
||||
progmem_array,
|
||||
static_const_array,
|
||||
statement,
|
||||
variable,
|
||||
new_variable,
|
||||
Pvariable,
|
||||
new_Pvariable,
|
||||
add,
|
||||
add_build_flag,
|
||||
add_define,
|
||||
add_global,
|
||||
add_library,
|
||||
add_platformio_option,
|
||||
add_build_flag,
|
||||
add_define,
|
||||
get_variable,
|
||||
get_variable_with_full_id,
|
||||
is_template,
|
||||
new_Pvariable,
|
||||
new_variable,
|
||||
process_lambda,
|
||||
progmem_array,
|
||||
safe_exp,
|
||||
statement,
|
||||
static_const_array,
|
||||
is_template,
|
||||
templatable,
|
||||
variable,
|
||||
with_local_variable,
|
||||
MockObj,
|
||||
MockObjClass,
|
||||
)
|
||||
from esphome.cpp_helpers import ( # noqa: F401
|
||||
from esphome.cpp_helpers import ( # noqa
|
||||
gpio_pin_expression,
|
||||
register_component,
|
||||
build_registry_entry,
|
||||
build_registry_list,
|
||||
extract_registry_entry_config,
|
||||
gpio_pin_expression,
|
||||
past_safe_mode,
|
||||
register_component,
|
||||
register_parented,
|
||||
)
|
||||
from esphome.cpp_types import ( # noqa: F401
|
||||
NAN,
|
||||
App,
|
||||
Application,
|
||||
Component,
|
||||
ComponentPtr,
|
||||
Controller,
|
||||
EntityBase,
|
||||
EntityCategory,
|
||||
ESPTime,
|
||||
GPIOPin,
|
||||
InternalGPIOPin,
|
||||
JsonObject,
|
||||
JsonObjectConst,
|
||||
Parented,
|
||||
PollingComponent,
|
||||
arduino_json_ns,
|
||||
bool_,
|
||||
const_char_ptr,
|
||||
double,
|
||||
esphome_ns,
|
||||
float_,
|
||||
from esphome.cpp_types import ( # noqa
|
||||
global_ns,
|
||||
gpio_Flags,
|
||||
int16,
|
||||
int32,
|
||||
int64,
|
||||
int_,
|
||||
void,
|
||||
nullptr,
|
||||
optional,
|
||||
size_t,
|
||||
float_,
|
||||
double,
|
||||
bool_,
|
||||
int_,
|
||||
std_ns,
|
||||
std_shared_ptr,
|
||||
std_string,
|
||||
std_string_ref,
|
||||
std_vector,
|
||||
uint8,
|
||||
uint16,
|
||||
uint32,
|
||||
uint64,
|
||||
void,
|
||||
int32,
|
||||
const_char_ptr,
|
||||
NAN,
|
||||
esphome_ns,
|
||||
App,
|
||||
Nameable,
|
||||
Component,
|
||||
ComponentPtr,
|
||||
PollingComponent,
|
||||
Application,
|
||||
optional,
|
||||
arduino_json_ns,
|
||||
JsonObject,
|
||||
JsonObjectRef,
|
||||
JsonObjectConstRef,
|
||||
Controller,
|
||||
GPIOPin,
|
||||
)
|
||||
|
@ -1 +0,0 @@
|
||||
CODEOWNERS = ["@MrSuicideParrot"]
|
@ -1,44 +0,0 @@
|
||||
// Datasheet https://wiki.dfrobot.com/A01NYUB%20Waterproof%20Ultrasonic%20Sensor%20SKU:%20SEN0313
|
||||
|
||||
#include "a01nyub.h"
|
||||
#include "esphome/core/helpers.h"
|
||||
#include "esphome/core/log.h"
|
||||
|
||||
namespace esphome {
|
||||
namespace a01nyub {
|
||||
|
||||
static const char *const TAG = "a01nyub.sensor";
|
||||
|
||||
void A01nyubComponent::loop() {
|
||||
uint8_t data;
|
||||
while (this->available() > 0) {
|
||||
this->read_byte(&data);
|
||||
if (this->buffer_.empty() && (data != 0xff))
|
||||
continue;
|
||||
buffer_.push_back(data);
|
||||
if (this->buffer_.size() == 4)
|
||||
this->check_buffer_();
|
||||
}
|
||||
}
|
||||
|
||||
void A01nyubComponent::check_buffer_() {
|
||||
uint8_t checksum = this->buffer_[0] + this->buffer_[1] + this->buffer_[2];
|
||||
if (this->buffer_[3] == checksum) {
|
||||
float distance = (this->buffer_[1] << 8) + this->buffer_[2];
|
||||
if (distance > 280) {
|
||||
float meters = distance / 1000.0;
|
||||
ESP_LOGV(TAG, "Distance from sensor: %f mm, %f m", distance, meters);
|
||||
this->publish_state(meters);
|
||||
} else {
|
||||
ESP_LOGW(TAG, "Invalid data read from sensor: %s", format_hex_pretty(this->buffer_).c_str());
|
||||
}
|
||||
} else {
|
||||
ESP_LOGW(TAG, "checksum failed: %02x != %02x", checksum, this->buffer_[3]);
|
||||
}
|
||||
this->buffer_.clear();
|
||||
}
|
||||
|
||||
void A01nyubComponent::dump_config() { LOG_SENSOR("", "A01nyub Sensor", this); }
|
||||
|
||||
} // namespace a01nyub
|
||||
} // namespace esphome
|
@ -1,27 +0,0 @@
|
||||
#pragma once
|
||||
|
||||
#include <vector>
|
||||
|
||||
#include "esphome/core/component.h"
|
||||
#include "esphome/components/sensor/sensor.h"
|
||||
#include "esphome/components/uart/uart.h"
|
||||
|
||||
namespace esphome {
|
||||
namespace a01nyub {
|
||||
|
||||
class A01nyubComponent : public sensor::Sensor, public Component, public uart::UARTDevice {
|
||||
public:
|
||||
// Nothing really public.
|
||||
|
||||
// ========== INTERNAL METHODS ==========
|
||||
void loop() override;
|
||||
void dump_config() override;
|
||||
|
||||
protected:
|
||||
void check_buffer_();
|
||||
|
||||
std::vector<uint8_t> buffer_;
|
||||
};
|
||||
|
||||
} // namespace a01nyub
|
||||
} // namespace esphome
|
@ -1,41 +0,0 @@
|
||||
import esphome.codegen as cg
|
||||
from esphome.components import sensor, uart
|
||||
from esphome.const import (
|
||||
DEVICE_CLASS_DISTANCE,
|
||||
ICON_ARROW_EXPAND_VERTICAL,
|
||||
STATE_CLASS_MEASUREMENT,
|
||||
UNIT_METER,
|
||||
)
|
||||
|
||||
CODEOWNERS = ["@MrSuicideParrot"]
|
||||
DEPENDENCIES = ["uart"]
|
||||
|
||||
a01nyub_ns = cg.esphome_ns.namespace("a01nyub")
|
||||
A01nyubComponent = a01nyub_ns.class_(
|
||||
"A01nyubComponent", sensor.Sensor, cg.Component, uart.UARTDevice
|
||||
)
|
||||
|
||||
CONFIG_SCHEMA = sensor.sensor_schema(
|
||||
A01nyubComponent,
|
||||
unit_of_measurement=UNIT_METER,
|
||||
icon=ICON_ARROW_EXPAND_VERTICAL,
|
||||
accuracy_decimals=3,
|
||||
state_class=STATE_CLASS_MEASUREMENT,
|
||||
device_class=DEVICE_CLASS_DISTANCE,
|
||||
).extend(uart.UART_DEVICE_SCHEMA)
|
||||
|
||||
FINAL_VALIDATE_SCHEMA = uart.final_validate_device_schema(
|
||||
"a01nyub",
|
||||
baud_rate=9600,
|
||||
require_tx=False,
|
||||
require_rx=True,
|
||||
data_bits=8,
|
||||
parity=None,
|
||||
stop_bits=1,
|
||||
)
|
||||
|
||||
|
||||
async def to_code(config):
|
||||
var = await sensor.new_sensor(config)
|
||||
await cg.register_component(var, config)
|
||||
await uart.register_uart_device(var, config)
|
@ -1 +0,0 @@
|
||||
CODEOWNERS = ["@TH-Braemer"]
|
@ -1,43 +0,0 @@
|
||||
// Datasheet https://wiki.dfrobot.com/_A02YYUW_Waterproof_Ultrasonic_Sensor_SKU_SEN0311
|
||||
|
||||
#include "a02yyuw.h"
|
||||
#include "esphome/core/helpers.h"
|
||||
#include "esphome/core/log.h"
|
||||
|
||||
namespace esphome {
|
||||
namespace a02yyuw {
|
||||
|
||||
static const char *const TAG = "a02yyuw.sensor";
|
||||
|
||||
void A02yyuwComponent::loop() {
|
||||
uint8_t data;
|
||||
while (this->available() > 0) {
|
||||
this->read_byte(&data);
|
||||
if (this->buffer_.empty() && (data != 0xff))
|
||||
continue;
|
||||
buffer_.push_back(data);
|
||||
if (this->buffer_.size() == 4)
|
||||
this->check_buffer_();
|
||||
}
|
||||
}
|
||||
|
||||
void A02yyuwComponent::check_buffer_() {
|
||||
uint8_t checksum = this->buffer_[0] + this->buffer_[1] + this->buffer_[2];
|
||||
if (this->buffer_[3] == checksum) {
|
||||
float distance = (this->buffer_[1] << 8) + this->buffer_[2];
|
||||
if (distance > 30) {
|
||||
ESP_LOGV(TAG, "Distance from sensor: %f mm", distance);
|
||||
this->publish_state(distance);
|
||||
} else {
|
||||
ESP_LOGW(TAG, "Invalid data read from sensor: %s", format_hex_pretty(this->buffer_).c_str());
|
||||
}
|
||||
} else {
|
||||
ESP_LOGW(TAG, "checksum failed: %02x != %02x", checksum, this->buffer_[3]);
|
||||
}
|
||||
this->buffer_.clear();
|
||||
}
|
||||
|
||||
void A02yyuwComponent::dump_config() { LOG_SENSOR("", "A02yyuw Sensor", this); }
|
||||
|
||||
} // namespace a02yyuw
|
||||
} // namespace esphome
|
@ -1,27 +0,0 @@
|
||||
#pragma once
|
||||
|
||||
#include <vector>
|
||||
|
||||
#include "esphome/core/component.h"
|
||||
#include "esphome/components/sensor/sensor.h"
|
||||
#include "esphome/components/uart/uart.h"
|
||||
|
||||
namespace esphome {
|
||||
namespace a02yyuw {
|
||||
|
||||
class A02yyuwComponent : public sensor::Sensor, public Component, public uart::UARTDevice {
|
||||
public:
|
||||
// Nothing really public.
|
||||
|
||||
// ========== INTERNAL METHODS ==========
|
||||
void loop() override;
|
||||
void dump_config() override;
|
||||
|
||||
protected:
|
||||
void check_buffer_();
|
||||
|
||||
std::vector<uint8_t> buffer_;
|
||||
};
|
||||
|
||||
} // namespace a02yyuw
|
||||
} // namespace esphome
|
@ -1,41 +0,0 @@
|
||||
import esphome.codegen as cg
|
||||
from esphome.components import sensor, uart
|
||||
from esphome.const import (
|
||||
DEVICE_CLASS_DISTANCE,
|
||||
ICON_ARROW_EXPAND_VERTICAL,
|
||||
STATE_CLASS_MEASUREMENT,
|
||||
UNIT_MILLIMETER,
|
||||
)
|
||||
|
||||
CODEOWNERS = ["@TH-Braemer"]
|
||||
DEPENDENCIES = ["uart"]
|
||||
|
||||
a02yyuw_ns = cg.esphome_ns.namespace("a02yyuw")
|
||||
A02yyuwComponent = a02yyuw_ns.class_(
|
||||
"A02yyuwComponent", sensor.Sensor, cg.Component, uart.UARTDevice
|
||||
)
|
||||
|
||||
CONFIG_SCHEMA = sensor.sensor_schema(
|
||||
A02yyuwComponent,
|
||||
unit_of_measurement=UNIT_MILLIMETER,
|
||||
icon=ICON_ARROW_EXPAND_VERTICAL,
|
||||
accuracy_decimals=0,
|
||||
state_class=STATE_CLASS_MEASUREMENT,
|
||||
device_class=DEVICE_CLASS_DISTANCE,
|
||||
).extend(uart.UART_DEVICE_SCHEMA)
|
||||
|
||||
FINAL_VALIDATE_SCHEMA = uart.final_validate_device_schema(
|
||||
"a02yyuw",
|
||||
baud_rate=9600,
|
||||
require_tx=False,
|
||||
require_rx=True,
|
||||
data_bits=8,
|
||||
parity=None,
|
||||
stop_bits=1,
|
||||
)
|
||||
|
||||
|
||||
async def to_code(config):
|
||||
var = await sensor.new_sensor(config)
|
||||
await cg.register_component(var, config)
|
||||
await uart.register_uart_device(var, config)
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user