mirror of
https://github.com/esphome/esphome.git
synced 2025-08-10 20:29:24 +00:00
Compare commits
42 Commits
pre-commit
...
2025.7.1
Author | SHA1 | Date | |
---|---|---|---|
![]() |
1a9f02fa63 | ||
![]() |
7ad1b039f9 | ||
![]() |
e255d73c29 | ||
![]() |
46f5c44b37 | ||
![]() |
9d80889bc9 | ||
![]() |
08a5ba6ef1 | ||
![]() |
28128c65e5 | ||
![]() |
efcad565ee | ||
![]() |
cd987feb5b | ||
![]() |
5707389faa | ||
![]() |
3f78db5c63 | ||
![]() |
de0656a188 | ||
![]() |
90a16ffa89 | ||
![]() |
4182076f64 | ||
![]() |
8c8c08d40c | ||
![]() |
18e2f41424 | ||
![]() |
bd0fe34b14 | ||
![]() |
37982290f7 | ||
![]() |
02b7db7311 | ||
![]() |
9bc3ff5f53 | ||
![]() |
786cb7ded5 | ||
![]() |
7f01c25782 | ||
![]() |
321f2f87b0 | ||
![]() |
11a051401f | ||
![]() |
6148dd7e41 | ||
![]() |
42b6939e90 | ||
![]() |
35b3f75f7c | ||
![]() |
78e8001aa8 | ||
![]() |
84fc6ff71a | ||
![]() |
16292a9f13 | ||
![]() |
90f0ebb22b | ||
![]() |
4153380f99 | ||
![]() |
740c0ef9d7 | ||
![]() |
b4521e1d8c | ||
![]() |
10ca7ed85b | ||
![]() |
e43efdaaec | ||
![]() |
9207bf97f3 | ||
![]() |
c13317f807 | ||
![]() |
77d1d0414d | ||
![]() |
8f42bc6aac | ||
![]() |
9beb4e2cd4 | ||
![]() |
097aac2183 |
@@ -1 +0,0 @@
|
||||
a3cdfc378d28b53b416a1d5bf0ab9077ee18867f0d39436ea8013cf5a4ead87a
|
2
.github/actions/restore-python/action.yml
vendored
2
.github/actions/restore-python/action.yml
vendored
@@ -41,7 +41,7 @@ runs:
|
||||
shell: bash
|
||||
run: |
|
||||
python -m venv venv
|
||||
source ./venv/Scripts/activate
|
||||
./venv/Scripts/activate
|
||||
python --version
|
||||
pip install -r requirements.txt -r requirements_test.txt
|
||||
pip install -e .
|
||||
|
76
.github/workflows/ci-clang-tidy-hash.yml
vendored
76
.github/workflows/ci-clang-tidy-hash.yml
vendored
@@ -1,76 +0,0 @@
|
||||
name: Clang-tidy Hash CI
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
paths:
|
||||
- ".clang-tidy"
|
||||
- "platformio.ini"
|
||||
- "requirements_dev.txt"
|
||||
- ".clang-tidy.hash"
|
||||
- "script/clang_tidy_hash.py"
|
||||
- ".github/workflows/ci-clang-tidy-hash.yml"
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
pull-requests: write
|
||||
|
||||
jobs:
|
||||
verify-hash:
|
||||
name: Verify clang-tidy hash
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4.2.2
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v5.6.0
|
||||
with:
|
||||
python-version: "3.11"
|
||||
|
||||
- name: Verify hash
|
||||
run: |
|
||||
python script/clang_tidy_hash.py --verify
|
||||
|
||||
- if: failure()
|
||||
name: Show hash details
|
||||
run: |
|
||||
python script/clang_tidy_hash.py
|
||||
echo "## Job Failed" | tee -a $GITHUB_STEP_SUMMARY
|
||||
echo "You have modified clang-tidy configuration but have not updated the hash." | tee -a $GITHUB_STEP_SUMMARY
|
||||
echo "Please run 'script/clang_tidy_hash.py --update' and commit the changes." | tee -a $GITHUB_STEP_SUMMARY
|
||||
|
||||
- if: failure()
|
||||
name: Request changes
|
||||
uses: actions/github-script@v7.0.1
|
||||
with:
|
||||
script: |
|
||||
await github.rest.pulls.createReview({
|
||||
pull_number: context.issue.number,
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
event: 'REQUEST_CHANGES',
|
||||
body: 'You have modified clang-tidy configuration but have not updated the hash.\nPlease run `script/clang_tidy_hash.py --update` and commit the changes.'
|
||||
})
|
||||
|
||||
- if: success()
|
||||
name: Dismiss review
|
||||
uses: actions/github-script@v7.0.1
|
||||
with:
|
||||
script: |
|
||||
let reviews = await github.rest.pulls.listReviews({
|
||||
pull_number: context.issue.number,
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo
|
||||
});
|
||||
for (let review of reviews.data) {
|
||||
if (review.user.login === 'github-actions[bot]' && review.state === 'CHANGES_REQUESTED') {
|
||||
await github.rest.pulls.dismissReview({
|
||||
pull_number: context.issue.number,
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
review_id: review.id,
|
||||
message: 'Clang-tidy hash now matches configuration.'
|
||||
});
|
||||
}
|
||||
}
|
||||
|
258
.github/workflows/ci.yml
vendored
258
.github/workflows/ci.yml
vendored
@@ -39,7 +39,7 @@ jobs:
|
||||
uses: actions/checkout@v4.2.2
|
||||
- name: Generate cache-key
|
||||
id: cache-key
|
||||
run: echo key="${{ hashFiles('requirements.txt', 'requirements_test.txt', '.pre-commit-config.yaml') }}" >> $GITHUB_OUTPUT
|
||||
run: echo key="${{ hashFiles('requirements.txt', 'requirements_test.txt') }}" >> $GITHUB_OUTPUT
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
id: python
|
||||
uses: actions/setup-python@v5.6.0
|
||||
@@ -58,16 +58,56 @@ jobs:
|
||||
python -m venv venv
|
||||
. venv/bin/activate
|
||||
python --version
|
||||
pip install -r requirements.txt -r requirements_test.txt pre-commit
|
||||
pip install -r requirements.txt -r requirements_test.txt
|
||||
pip install -e .
|
||||
|
||||
ruff:
|
||||
name: Check ruff
|
||||
runs-on: ubuntu-24.04
|
||||
needs:
|
||||
- common
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v4.2.2
|
||||
- name: Restore Python
|
||||
uses: ./.github/actions/restore-python
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
cache-key: ${{ needs.common.outputs.cache-key }}
|
||||
- name: Run Ruff
|
||||
run: |
|
||||
. venv/bin/activate
|
||||
ruff format esphome tests
|
||||
- name: Suggested changes
|
||||
run: script/ci-suggest-changes
|
||||
if: always()
|
||||
|
||||
flake8:
|
||||
name: Check flake8
|
||||
runs-on: ubuntu-24.04
|
||||
needs:
|
||||
- common
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v4.2.2
|
||||
- name: Restore Python
|
||||
uses: ./.github/actions/restore-python
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
cache-key: ${{ needs.common.outputs.cache-key }}
|
||||
- name: Run flake8
|
||||
run: |
|
||||
. venv/bin/activate
|
||||
flake8 esphome
|
||||
- name: Suggested changes
|
||||
run: script/ci-suggest-changes
|
||||
if: always()
|
||||
|
||||
pylint:
|
||||
name: Check pylint
|
||||
runs-on: ubuntu-24.04
|
||||
needs:
|
||||
- common
|
||||
- determine-jobs
|
||||
if: needs.determine-jobs.outputs.python-linters == 'true'
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v4.2.2
|
||||
@@ -89,8 +129,6 @@ jobs:
|
||||
runs-on: ubuntu-24.04
|
||||
needs:
|
||||
- common
|
||||
- determine-jobs
|
||||
if: needs.determine-jobs.outputs.python-linters == 'true'
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v4.2.2
|
||||
@@ -166,7 +204,6 @@ jobs:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v4.2.2
|
||||
- name: Restore Python
|
||||
id: restore-python
|
||||
uses: ./.github/actions/restore-python
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
@@ -176,7 +213,7 @@ jobs:
|
||||
- name: Run pytest
|
||||
if: matrix.os == 'windows-latest'
|
||||
run: |
|
||||
. ./venv/Scripts/activate.ps1
|
||||
./venv/Scripts/activate
|
||||
pytest -vv --cov-report=xml --tb=native -n auto tests --ignore=tests/integration/
|
||||
- name: Run pytest
|
||||
if: matrix.os == 'ubuntu-latest' || matrix.os == 'macOS-latest'
|
||||
@@ -187,59 +224,12 @@ jobs:
|
||||
uses: codecov/codecov-action@v5.4.3
|
||||
with:
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
- name: Save Python virtual environment cache
|
||||
if: github.ref == 'refs/heads/dev'
|
||||
uses: actions/cache/save@v4.2.3
|
||||
with:
|
||||
path: venv
|
||||
key: ${{ runner.os }}-${{ steps.restore-python.outputs.python-version }}-venv-${{ needs.common.outputs.cache-key }}
|
||||
|
||||
determine-jobs:
|
||||
name: Determine which jobs to run
|
||||
runs-on: ubuntu-24.04
|
||||
needs:
|
||||
- common
|
||||
outputs:
|
||||
integration-tests: ${{ steps.determine.outputs.integration-tests }}
|
||||
clang-tidy: ${{ steps.determine.outputs.clang-tidy }}
|
||||
python-linters: ${{ steps.determine.outputs.python-linters }}
|
||||
changed-components: ${{ steps.determine.outputs.changed-components }}
|
||||
component-test-count: ${{ steps.determine.outputs.component-test-count }}
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v4.2.2
|
||||
with:
|
||||
# Fetch enough history to find the merge base
|
||||
fetch-depth: 2
|
||||
- name: Restore Python
|
||||
uses: ./.github/actions/restore-python
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
cache-key: ${{ needs.common.outputs.cache-key }}
|
||||
- name: Determine which tests to run
|
||||
id: determine
|
||||
env:
|
||||
GH_TOKEN: ${{ github.token }}
|
||||
run: |
|
||||
. venv/bin/activate
|
||||
output=$(python script/determine-jobs.py)
|
||||
echo "Test determination output:"
|
||||
echo "$output" | jq
|
||||
|
||||
# Extract individual fields
|
||||
echo "integration-tests=$(echo "$output" | jq -r '.integration_tests')" >> $GITHUB_OUTPUT
|
||||
echo "clang-tidy=$(echo "$output" | jq -r '.clang_tidy')" >> $GITHUB_OUTPUT
|
||||
echo "python-linters=$(echo "$output" | jq -r '.python_linters')" >> $GITHUB_OUTPUT
|
||||
echo "changed-components=$(echo "$output" | jq -c '.changed_components')" >> $GITHUB_OUTPUT
|
||||
echo "component-test-count=$(echo "$output" | jq -r '.component_test_count')" >> $GITHUB_OUTPUT
|
||||
|
||||
integration-tests:
|
||||
name: Run integration tests
|
||||
runs-on: ubuntu-latest
|
||||
needs:
|
||||
- common
|
||||
- determine-jobs
|
||||
if: needs.determine-jobs.outputs.integration-tests == 'true'
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v4.2.2
|
||||
@@ -269,32 +259,44 @@ jobs:
|
||||
. venv/bin/activate
|
||||
pytest -vv --no-cov --tb=native -n auto tests/integration/
|
||||
|
||||
clang-tidy-deps:
|
||||
name: Clang-tidy dependencies
|
||||
clang-format:
|
||||
name: Check clang-format
|
||||
runs-on: ubuntu-24.04
|
||||
needs:
|
||||
- common
|
||||
- ci-custom
|
||||
- pytest
|
||||
- determine-jobs
|
||||
if: |
|
||||
always() &&
|
||||
needs.determine-jobs.outputs.clang-tidy == 'true'
|
||||
steps:
|
||||
- run: echo "All clang-tidy dependencies ready"
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v4.2.2
|
||||
- name: Restore Python
|
||||
uses: ./.github/actions/restore-python
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
cache-key: ${{ needs.common.outputs.cache-key }}
|
||||
- name: Install clang-format
|
||||
run: |
|
||||
. venv/bin/activate
|
||||
pip install clang-format -c requirements_dev.txt
|
||||
- name: Run clang-format
|
||||
run: |
|
||||
. venv/bin/activate
|
||||
script/clang-format -i
|
||||
git diff-index --quiet HEAD --
|
||||
- name: Suggested changes
|
||||
run: script/ci-suggest-changes
|
||||
if: always()
|
||||
|
||||
clang-tidy:
|
||||
name: ${{ matrix.name }}
|
||||
runs-on: ubuntu-24.04
|
||||
needs:
|
||||
- clang-tidy-deps
|
||||
- determine-jobs
|
||||
if: |
|
||||
always() &&
|
||||
needs.determine-jobs.outputs.clang-tidy == 'true' &&
|
||||
needs.clang-tidy-deps.result == 'success'
|
||||
env:
|
||||
GH_TOKEN: ${{ github.token }}
|
||||
- common
|
||||
- ruff
|
||||
- ci-custom
|
||||
- clang-format
|
||||
- flake8
|
||||
- pylint
|
||||
- pytest
|
||||
- pyupgrade
|
||||
strategy:
|
||||
fail-fast: false
|
||||
max-parallel: 2
|
||||
@@ -333,10 +335,6 @@ jobs:
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v4.2.2
|
||||
with:
|
||||
# Need history for HEAD~1 to work for checking changed files
|
||||
fetch-depth: 2
|
||||
|
||||
- name: Restore Python
|
||||
uses: ./.github/actions/restore-python
|
||||
with:
|
||||
@@ -348,14 +346,14 @@ jobs:
|
||||
uses: actions/cache@v4.2.3
|
||||
with:
|
||||
path: ~/.platformio
|
||||
key: platformio-${{ matrix.pio_cache_key }}-${{ hashFiles('platformio.ini') }}
|
||||
key: platformio-${{ matrix.pio_cache_key }}
|
||||
|
||||
- name: Cache platformio
|
||||
if: github.ref != 'refs/heads/dev'
|
||||
uses: actions/cache/restore@v4.2.3
|
||||
with:
|
||||
path: ~/.platformio
|
||||
key: platformio-${{ matrix.pio_cache_key }}-${{ hashFiles('platformio.ini') }}
|
||||
key: platformio-${{ matrix.pio_cache_key }}
|
||||
|
||||
- name: Register problem matchers
|
||||
run: |
|
||||
@@ -369,28 +367,10 @@ jobs:
|
||||
mkdir -p .temp
|
||||
pio run --list-targets -e esp32-idf-tidy
|
||||
|
||||
- name: Check if full clang-tidy scan needed
|
||||
id: check_full_scan
|
||||
run: |
|
||||
. venv/bin/activate
|
||||
if python script/clang_tidy_hash.py --check; then
|
||||
echo "full_scan=true" >> $GITHUB_OUTPUT
|
||||
echo "reason=hash_changed" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "full_scan=false" >> $GITHUB_OUTPUT
|
||||
echo "reason=normal" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
- name: Run clang-tidy
|
||||
run: |
|
||||
. venv/bin/activate
|
||||
if [ "${{ steps.check_full_scan.outputs.full_scan }}" = "true" ]; then
|
||||
echo "Running FULL clang-tidy scan (hash changed)"
|
||||
script/clang-tidy --all-headers --fix ${{ matrix.options }} ${{ matrix.ignore_errors && '|| true' || '' }}
|
||||
else
|
||||
echo "Running clang-tidy on changed files only"
|
||||
script/clang-tidy --all-headers --fix --changed ${{ matrix.options }} ${{ matrix.ignore_errors && '|| true' || '' }}
|
||||
fi
|
||||
script/clang-tidy --all-headers --fix ${{ matrix.options }} ${{ matrix.ignore_errors && '|| true' || '' }}
|
||||
env:
|
||||
# Also cache libdeps, store them in a ~/.platformio subfolder
|
||||
PLATFORMIO_LIBDEPS_DIR: ~/.platformio/libdeps
|
||||
@@ -400,18 +380,59 @@ jobs:
|
||||
# yamllint disable-line rule:line-length
|
||||
if: always()
|
||||
|
||||
list-components:
|
||||
runs-on: ubuntu-24.04
|
||||
needs:
|
||||
- common
|
||||
if: github.event_name == 'pull_request'
|
||||
outputs:
|
||||
components: ${{ steps.list-components.outputs.components }}
|
||||
count: ${{ steps.list-components.outputs.count }}
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v4.2.2
|
||||
with:
|
||||
# Fetch enough history so `git merge-base refs/remotes/origin/dev HEAD` works.
|
||||
fetch-depth: 500
|
||||
- name: Get target branch
|
||||
id: target-branch
|
||||
run: |
|
||||
echo "branch=${{ github.event.pull_request.base.ref }}" >> $GITHUB_OUTPUT
|
||||
- name: Fetch ${{ steps.target-branch.outputs.branch }} branch
|
||||
run: |
|
||||
git -c protocol.version=2 fetch --no-tags --prune --no-recurse-submodules --depth=1 origin +refs/heads/${{ steps.target-branch.outputs.branch }}:refs/remotes/origin/${{ steps.target-branch.outputs.branch }}
|
||||
git merge-base refs/remotes/origin/${{ steps.target-branch.outputs.branch }} HEAD
|
||||
- name: Restore Python
|
||||
uses: ./.github/actions/restore-python
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
cache-key: ${{ needs.common.outputs.cache-key }}
|
||||
- name: Find changed components
|
||||
id: list-components
|
||||
run: |
|
||||
. venv/bin/activate
|
||||
components=$(script/list-components.py --changed --branch ${{ steps.target-branch.outputs.branch }})
|
||||
output_components=$(echo "$components" | jq -R -s -c 'split("\n")[:-1] | map(select(length > 0))')
|
||||
count=$(echo "$output_components" | jq length)
|
||||
|
||||
echo "components=$output_components" >> $GITHUB_OUTPUT
|
||||
echo "count=$count" >> $GITHUB_OUTPUT
|
||||
|
||||
echo "$count Components:"
|
||||
echo "$output_components" | jq
|
||||
|
||||
test-build-components:
|
||||
name: Component test ${{ matrix.file }}
|
||||
runs-on: ubuntu-24.04
|
||||
needs:
|
||||
- common
|
||||
- determine-jobs
|
||||
if: github.event_name == 'pull_request' && fromJSON(needs.determine-jobs.outputs.component-test-count) > 0 && fromJSON(needs.determine-jobs.outputs.component-test-count) < 100
|
||||
- list-components
|
||||
if: github.event_name == 'pull_request' && fromJSON(needs.list-components.outputs.count) > 0 && fromJSON(needs.list-components.outputs.count) < 100
|
||||
strategy:
|
||||
fail-fast: false
|
||||
max-parallel: 2
|
||||
matrix:
|
||||
file: ${{ fromJson(needs.determine-jobs.outputs.changed-components) }}
|
||||
file: ${{ fromJson(needs.list-components.outputs.components) }}
|
||||
steps:
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
@@ -439,8 +460,8 @@ jobs:
|
||||
runs-on: ubuntu-24.04
|
||||
needs:
|
||||
- common
|
||||
- determine-jobs
|
||||
if: github.event_name == 'pull_request' && fromJSON(needs.determine-jobs.outputs.component-test-count) >= 100
|
||||
- list-components
|
||||
if: github.event_name == 'pull_request' && fromJSON(needs.list-components.outputs.count) >= 100
|
||||
outputs:
|
||||
matrix: ${{ steps.split.outputs.components }}
|
||||
steps:
|
||||
@@ -449,7 +470,7 @@ jobs:
|
||||
- name: Split components into 20 groups
|
||||
id: split
|
||||
run: |
|
||||
components=$(echo '${{ needs.determine-jobs.outputs.changed-components }}' | jq -c '.[]' | shuf | jq -s -c '[_nwise(20) | join(" ")]')
|
||||
components=$(echo '${{ needs.list-components.outputs.components }}' | jq -c '.[]' | shuf | jq -s -c '[_nwise(20) | join(" ")]')
|
||||
echo "components=$components" >> $GITHUB_OUTPUT
|
||||
|
||||
test-build-components-split:
|
||||
@@ -457,9 +478,9 @@ jobs:
|
||||
runs-on: ubuntu-24.04
|
||||
needs:
|
||||
- common
|
||||
- determine-jobs
|
||||
- list-components
|
||||
- test-build-components-splitter
|
||||
if: github.event_name == 'pull_request' && fromJSON(needs.determine-jobs.outputs.component-test-count) >= 100
|
||||
if: github.event_name == 'pull_request' && fromJSON(needs.list-components.outputs.count) >= 100
|
||||
strategy:
|
||||
fail-fast: false
|
||||
max-parallel: 4
|
||||
@@ -496,43 +517,24 @@ jobs:
|
||||
./script/test_build_components -e compile -c $component
|
||||
done
|
||||
|
||||
pre-commit-ci-lite:
|
||||
name: pre-commit.ci lite
|
||||
runs-on: ubuntu-latest
|
||||
needs:
|
||||
- common
|
||||
if: github.event_name == 'pull_request' && github.base_ref != 'beta' && github.base_ref != 'release'
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v4.2.2
|
||||
- name: Restore Python
|
||||
uses: ./.github/actions/restore-python
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
cache-key: ${{ needs.common.outputs.cache-key }}
|
||||
- uses: pre-commit/action@v3.0.1
|
||||
env:
|
||||
SKIP: pylint,clang-tidy-hash,yamllint
|
||||
- uses: pre-commit-ci/lite-action@v1.1.0
|
||||
if: always()
|
||||
|
||||
ci-status:
|
||||
name: CI Status
|
||||
runs-on: ubuntu-24.04
|
||||
needs:
|
||||
- common
|
||||
- ruff
|
||||
- ci-custom
|
||||
- clang-format
|
||||
- flake8
|
||||
- pylint
|
||||
- pytest
|
||||
- integration-tests
|
||||
- pyupgrade
|
||||
- clang-tidy-deps
|
||||
- clang-tidy
|
||||
- determine-jobs
|
||||
- list-components
|
||||
- test-build-components
|
||||
- test-build-components-splitter
|
||||
- test-build-components-split
|
||||
- pre-commit-ci-lite
|
||||
if: always()
|
||||
steps:
|
||||
- name: Success
|
||||
|
@@ -4,14 +4,15 @@
|
||||
|
||||
ci:
|
||||
autoupdate_commit_msg: 'pre-commit: autoupdate'
|
||||
autoupdate_schedule: off # Disabled until ruff versions are synced between deps and pre-commit
|
||||
autoupdate_schedule: weekly
|
||||
autofix_prs: false
|
||||
# Skip hooks that have issues in pre-commit CI environment
|
||||
skip: [pylint, clang-tidy-hash, yamllint]
|
||||
skip: [pylint, yamllint]
|
||||
|
||||
repos:
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
# Ruff version.
|
||||
rev: v0.12.3
|
||||
rev: v0.12.2
|
||||
hooks:
|
||||
# Run the linter.
|
||||
- id: ruff
|
||||
@@ -55,10 +56,3 @@ repos:
|
||||
entry: python3 script/run-in-env.py pylint
|
||||
language: system
|
||||
types: [python]
|
||||
- id: clang-tidy-hash
|
||||
name: Update clang-tidy hash
|
||||
entry: python script/clang_tidy_hash.py --update-if-changed
|
||||
language: python
|
||||
files: ^(\.clang-tidy|platformio\.ini|requirements_dev\.txt)$
|
||||
pass_filenames: false
|
||||
additional_dependencies: []
|
||||
|
@@ -28,7 +28,7 @@ esphome/components/aic3204/* @kbx81
|
||||
esphome/components/airthings_ble/* @jeromelaban
|
||||
esphome/components/airthings_wave_base/* @jeromelaban @kpfleming @ncareau
|
||||
esphome/components/airthings_wave_mini/* @ncareau
|
||||
esphome/components/airthings_wave_plus/* @jeromelaban @precurse
|
||||
esphome/components/airthings_wave_plus/* @jeromelaban
|
||||
esphome/components/alarm_control_panel/* @grahambrown11 @hwstar
|
||||
esphome/components/alpha3/* @jan-hofmeier
|
||||
esphome/components/am2315c/* @swoboda1337
|
||||
|
2
Doxyfile
2
Doxyfile
@@ -48,7 +48,7 @@ PROJECT_NAME = ESPHome
|
||||
# could be handy for archiving the generated documentation or if some version
|
||||
# control system is used.
|
||||
|
||||
PROJECT_NUMBER = 2025.8.0-dev
|
||||
PROJECT_NUMBER = 2025.7.1
|
||||
|
||||
# Using the PROJECT_BRIEF tag one can provide an optional one line description
|
||||
# for a project that appears at the top of each page and should give viewer a
|
||||
|
@@ -1 +1 @@
|
||||
CODEOWNERS = ["@jeromelaban", "@precurse"]
|
||||
CODEOWNERS = ["@jeromelaban"]
|
||||
|
@@ -73,29 +73,11 @@ void AirthingsWavePlus::dump_config() {
|
||||
LOG_SENSOR(" ", "Illuminance", this->illuminance_sensor_);
|
||||
}
|
||||
|
||||
void AirthingsWavePlus::setup() {
|
||||
const char *service_uuid;
|
||||
const char *characteristic_uuid;
|
||||
const char *access_control_point_characteristic_uuid;
|
||||
|
||||
// Change UUIDs for Wave Radon Gen2
|
||||
switch (this->wave_device_type_) {
|
||||
case WaveDeviceType::WAVE_GEN2:
|
||||
service_uuid = SERVICE_UUID_WAVE_RADON_GEN2;
|
||||
characteristic_uuid = CHARACTERISTIC_UUID_WAVE_RADON_GEN2;
|
||||
access_control_point_characteristic_uuid = ACCESS_CONTROL_POINT_CHARACTERISTIC_UUID_WAVE_RADON_GEN2;
|
||||
break;
|
||||
default:
|
||||
// Wave Plus
|
||||
service_uuid = SERVICE_UUID;
|
||||
characteristic_uuid = CHARACTERISTIC_UUID;
|
||||
access_control_point_characteristic_uuid = ACCESS_CONTROL_POINT_CHARACTERISTIC_UUID;
|
||||
}
|
||||
|
||||
this->service_uuid_ = espbt::ESPBTUUID::from_raw(service_uuid);
|
||||
this->sensors_data_characteristic_uuid_ = espbt::ESPBTUUID::from_raw(characteristic_uuid);
|
||||
AirthingsWavePlus::AirthingsWavePlus() {
|
||||
this->service_uuid_ = espbt::ESPBTUUID::from_raw(SERVICE_UUID);
|
||||
this->sensors_data_characteristic_uuid_ = espbt::ESPBTUUID::from_raw(CHARACTERISTIC_UUID);
|
||||
this->access_control_point_characteristic_uuid_ =
|
||||
espbt::ESPBTUUID::from_raw(access_control_point_characteristic_uuid);
|
||||
espbt::ESPBTUUID::from_raw(ACCESS_CONTROL_POINT_CHARACTERISTIC_UUID);
|
||||
}
|
||||
|
||||
} // namespace airthings_wave_plus
|
||||
|
@@ -9,20 +9,13 @@ namespace airthings_wave_plus {
|
||||
|
||||
namespace espbt = esphome::esp32_ble_tracker;
|
||||
|
||||
enum WaveDeviceType : uint8_t { WAVE_PLUS = 0, WAVE_GEN2 = 1 };
|
||||
|
||||
static const char *const SERVICE_UUID = "b42e1c08-ade7-11e4-89d3-123b93f75cba";
|
||||
static const char *const CHARACTERISTIC_UUID = "b42e2a68-ade7-11e4-89d3-123b93f75cba";
|
||||
static const char *const ACCESS_CONTROL_POINT_CHARACTERISTIC_UUID = "b42e2d06-ade7-11e4-89d3-123b93f75cba";
|
||||
|
||||
static const char *const SERVICE_UUID_WAVE_RADON_GEN2 = "b42e4a8e-ade7-11e4-89d3-123b93f75cba";
|
||||
static const char *const CHARACTERISTIC_UUID_WAVE_RADON_GEN2 = "b42e4dcc-ade7-11e4-89d3-123b93f75cba";
|
||||
static const char *const ACCESS_CONTROL_POINT_CHARACTERISTIC_UUID_WAVE_RADON_GEN2 =
|
||||
"b42e50d8-ade7-11e4-89d3-123b93f75cba";
|
||||
|
||||
class AirthingsWavePlus : public airthings_wave_base::AirthingsWaveBase {
|
||||
public:
|
||||
void setup() override;
|
||||
AirthingsWavePlus();
|
||||
|
||||
void dump_config() override;
|
||||
|
||||
@@ -30,14 +23,12 @@ class AirthingsWavePlus : public airthings_wave_base::AirthingsWaveBase {
|
||||
void set_radon_long_term(sensor::Sensor *radon_long_term) { radon_long_term_sensor_ = radon_long_term; }
|
||||
void set_co2(sensor::Sensor *co2) { co2_sensor_ = co2; }
|
||||
void set_illuminance(sensor::Sensor *illuminance) { illuminance_sensor_ = illuminance; }
|
||||
void set_device_type(WaveDeviceType wave_device_type) { wave_device_type_ = wave_device_type; }
|
||||
|
||||
protected:
|
||||
bool is_valid_radon_value_(uint16_t radon);
|
||||
bool is_valid_co2_value_(uint16_t co2);
|
||||
|
||||
void read_sensors(uint8_t *raw_value, uint16_t value_len) override;
|
||||
WaveDeviceType wave_device_type_{WaveDeviceType::WAVE_PLUS};
|
||||
|
||||
sensor::Sensor *radon_sensor_{nullptr};
|
||||
sensor::Sensor *radon_long_term_sensor_{nullptr};
|
||||
|
@@ -7,7 +7,6 @@ from esphome.const import (
|
||||
CONF_ILLUMINANCE,
|
||||
CONF_RADON,
|
||||
CONF_RADON_LONG_TERM,
|
||||
CONF_TVOC,
|
||||
DEVICE_CLASS_CARBON_DIOXIDE,
|
||||
DEVICE_CLASS_ILLUMINANCE,
|
||||
ICON_RADIOACTIVE,
|
||||
@@ -16,7 +15,6 @@ from esphome.const import (
|
||||
UNIT_LUX,
|
||||
UNIT_PARTS_PER_MILLION,
|
||||
)
|
||||
from esphome.types import ConfigType
|
||||
|
||||
DEPENDENCIES = airthings_wave_base.DEPENDENCIES
|
||||
|
||||
@@ -27,59 +25,35 @@ AirthingsWavePlus = airthings_wave_plus_ns.class_(
|
||||
"AirthingsWavePlus", airthings_wave_base.AirthingsWaveBase
|
||||
)
|
||||
|
||||
CONF_DEVICE_TYPE = "device_type"
|
||||
WaveDeviceType = airthings_wave_plus_ns.enum("WaveDeviceType")
|
||||
DEVICE_TYPES = {
|
||||
"WAVE_PLUS": WaveDeviceType.WAVE_PLUS,
|
||||
"WAVE_GEN2": WaveDeviceType.WAVE_GEN2,
|
||||
}
|
||||
|
||||
|
||||
def validate_wave_gen2_config(config: ConfigType) -> ConfigType:
|
||||
"""Validate that Wave Gen2 devices don't have CO2 or TVOC sensors."""
|
||||
if config[CONF_DEVICE_TYPE] == "WAVE_GEN2":
|
||||
if CONF_CO2 in config:
|
||||
raise cv.Invalid("Wave Gen2 devices do not support CO2 sensor")
|
||||
# Check for TVOC in the base schema config
|
||||
if CONF_TVOC in config:
|
||||
raise cv.Invalid("Wave Gen2 devices do not support TVOC sensor")
|
||||
return config
|
||||
|
||||
|
||||
CONFIG_SCHEMA = cv.All(
|
||||
airthings_wave_base.BASE_SCHEMA.extend(
|
||||
{
|
||||
cv.GenerateID(): cv.declare_id(AirthingsWavePlus),
|
||||
cv.Optional(CONF_RADON): sensor.sensor_schema(
|
||||
unit_of_measurement=UNIT_BECQUEREL_PER_CUBIC_METER,
|
||||
icon=ICON_RADIOACTIVE,
|
||||
accuracy_decimals=0,
|
||||
state_class=STATE_CLASS_MEASUREMENT,
|
||||
),
|
||||
cv.Optional(CONF_RADON_LONG_TERM): sensor.sensor_schema(
|
||||
unit_of_measurement=UNIT_BECQUEREL_PER_CUBIC_METER,
|
||||
icon=ICON_RADIOACTIVE,
|
||||
accuracy_decimals=0,
|
||||
state_class=STATE_CLASS_MEASUREMENT,
|
||||
),
|
||||
cv.Optional(CONF_CO2): sensor.sensor_schema(
|
||||
unit_of_measurement=UNIT_PARTS_PER_MILLION,
|
||||
accuracy_decimals=0,
|
||||
device_class=DEVICE_CLASS_CARBON_DIOXIDE,
|
||||
state_class=STATE_CLASS_MEASUREMENT,
|
||||
),
|
||||
cv.Optional(CONF_ILLUMINANCE): sensor.sensor_schema(
|
||||
unit_of_measurement=UNIT_LUX,
|
||||
accuracy_decimals=0,
|
||||
device_class=DEVICE_CLASS_ILLUMINANCE,
|
||||
state_class=STATE_CLASS_MEASUREMENT,
|
||||
),
|
||||
cv.Optional(CONF_DEVICE_TYPE, default="WAVE_PLUS"): cv.enum(
|
||||
DEVICE_TYPES, upper=True
|
||||
),
|
||||
}
|
||||
),
|
||||
validate_wave_gen2_config,
|
||||
CONFIG_SCHEMA = airthings_wave_base.BASE_SCHEMA.extend(
|
||||
{
|
||||
cv.GenerateID(): cv.declare_id(AirthingsWavePlus),
|
||||
cv.Optional(CONF_RADON): sensor.sensor_schema(
|
||||
unit_of_measurement=UNIT_BECQUEREL_PER_CUBIC_METER,
|
||||
icon=ICON_RADIOACTIVE,
|
||||
accuracy_decimals=0,
|
||||
state_class=STATE_CLASS_MEASUREMENT,
|
||||
),
|
||||
cv.Optional(CONF_RADON_LONG_TERM): sensor.sensor_schema(
|
||||
unit_of_measurement=UNIT_BECQUEREL_PER_CUBIC_METER,
|
||||
icon=ICON_RADIOACTIVE,
|
||||
accuracy_decimals=0,
|
||||
state_class=STATE_CLASS_MEASUREMENT,
|
||||
),
|
||||
cv.Optional(CONF_CO2): sensor.sensor_schema(
|
||||
unit_of_measurement=UNIT_PARTS_PER_MILLION,
|
||||
accuracy_decimals=0,
|
||||
device_class=DEVICE_CLASS_CARBON_DIOXIDE,
|
||||
state_class=STATE_CLASS_MEASUREMENT,
|
||||
),
|
||||
cv.Optional(CONF_ILLUMINANCE): sensor.sensor_schema(
|
||||
unit_of_measurement=UNIT_LUX,
|
||||
accuracy_decimals=0,
|
||||
device_class=DEVICE_CLASS_ILLUMINANCE,
|
||||
state_class=STATE_CLASS_MEASUREMENT,
|
||||
),
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
@@ -99,4 +73,3 @@ async def to_code(config):
|
||||
if config_illuminance := config.get(CONF_ILLUMINANCE):
|
||||
sens = await sensor.new_sensor(config_illuminance)
|
||||
cg.add(var.set_illuminance(sens))
|
||||
cg.add(var.set_device_type(config[CONF_DEVICE_TYPE]))
|
||||
|
@@ -11,6 +11,15 @@ namespace esphome {
|
||||
namespace api {
|
||||
|
||||
template<typename... X> class TemplatableStringValue : public TemplatableValue<std::string, X...> {
|
||||
private:
|
||||
// Helper to convert value to string - handles the case where value is already a string
|
||||
template<typename T> static std::string value_to_string(T &&val) { return to_string(std::forward<T>(val)); }
|
||||
|
||||
// Overloads for string types - needed because std::to_string doesn't support them
|
||||
static std::string value_to_string(const char *val) { return std::string(val); } // For lambdas returning .c_str()
|
||||
static std::string value_to_string(const std::string &val) { return val; }
|
||||
static std::string value_to_string(std::string &&val) { return std::move(val); }
|
||||
|
||||
public:
|
||||
TemplatableStringValue() : TemplatableValue<std::string, X...>() {}
|
||||
|
||||
@@ -19,7 +28,7 @@ template<typename... X> class TemplatableStringValue : public TemplatableValue<s
|
||||
|
||||
template<typename F, enable_if_t<is_invocable<F, X...>::value, int> = 0>
|
||||
TemplatableStringValue(F f)
|
||||
: TemplatableValue<std::string, X...>([f](X... x) -> std::string { return to_string(f(x...)); }) {}
|
||||
: TemplatableValue<std::string, X...>([f](X... x) -> std::string { return value_to_string(f(x...)); }) {}
|
||||
};
|
||||
|
||||
template<typename... Ts> class TemplatableKeyValuePair {
|
||||
|
@@ -3,8 +3,6 @@
|
||||
#include "esphome/core/component.h"
|
||||
#include "esphome/components/as3935/as3935.h"
|
||||
#include "esphome/components/spi/spi.h"
|
||||
#include "esphome/components/sensor/sensor.h"
|
||||
#include "esphome/components/binary_sensor/binary_sensor.h"
|
||||
|
||||
namespace esphome {
|
||||
namespace as3935_spi {
|
||||
|
@@ -31,7 +31,7 @@ CONFIG_SCHEMA = cv.All(
|
||||
async def to_code(config):
|
||||
if CORE.is_esp32 or CORE.is_libretiny:
|
||||
# https://github.com/ESP32Async/AsyncTCP
|
||||
cg.add_library("ESP32Async/AsyncTCP", "3.4.4")
|
||||
cg.add_library("ESP32Async/AsyncTCP", "3.4.5")
|
||||
elif CORE.is_esp8266:
|
||||
# https://github.com/ESP32Async/ESPAsyncTCP
|
||||
cg.add_library("ESP32Async/ESPAsyncTCP", "2.0.0")
|
||||
|
@@ -4,6 +4,7 @@
|
||||
#include "esphome/components/network/ip_address.h"
|
||||
#include "esphome/core/log.h"
|
||||
#include "esphome/core/util.h"
|
||||
#include "esphome/core/helpers.h"
|
||||
|
||||
#include <lwip/igmp.h>
|
||||
#include <lwip/init.h>
|
||||
@@ -71,7 +72,11 @@ bool E131Component::join_igmp_groups_() {
|
||||
ip4_addr_t multicast_addr =
|
||||
network::IPAddress(239, 255, ((universe.first >> 8) & 0xff), ((universe.first >> 0) & 0xff));
|
||||
|
||||
auto err = igmp_joingroup(IP4_ADDR_ANY4, &multicast_addr);
|
||||
err_t err;
|
||||
{
|
||||
LwIPLock lock;
|
||||
err = igmp_joingroup(IP4_ADDR_ANY4, &multicast_addr);
|
||||
}
|
||||
|
||||
if (err) {
|
||||
ESP_LOGW(TAG, "IGMP join for %d universe of E1.31 failed. Multicast might not work.", universe.first);
|
||||
@@ -104,6 +109,7 @@ void E131Component::leave_(int universe) {
|
||||
if (listen_method_ == E131_MULTICAST) {
|
||||
ip4_addr_t multicast_addr = network::IPAddress(239, 255, ((universe >> 8) & 0xff), ((universe >> 0) & 0xff));
|
||||
|
||||
LwIPLock lock;
|
||||
igmp_leavegroup(IP4_ADDR_ANY4, &multicast_addr);
|
||||
}
|
||||
|
||||
|
@@ -114,6 +114,7 @@ void ESP32InternalGPIOPin::setup() {
|
||||
if (flags_ & gpio::FLAG_OUTPUT) {
|
||||
gpio_set_drive_capability(pin_, drive_strength_);
|
||||
}
|
||||
ESP_LOGD(TAG, "rtc: %d", SOC_GPIO_SUPPORT_RTC_INDEPENDENT);
|
||||
}
|
||||
|
||||
void ESP32InternalGPIOPin::pin_mode(gpio::Flags flags) {
|
||||
|
@@ -1,4 +1,5 @@
|
||||
#include "esphome/core/helpers.h"
|
||||
#include "esphome/core/defines.h"
|
||||
|
||||
#ifdef USE_ESP32
|
||||
|
||||
@@ -30,6 +31,45 @@ void Mutex::unlock() { xSemaphoreGive(this->handle_); }
|
||||
IRAM_ATTR InterruptLock::InterruptLock() { portDISABLE_INTERRUPTS(); }
|
||||
IRAM_ATTR InterruptLock::~InterruptLock() { portENABLE_INTERRUPTS(); }
|
||||
|
||||
#ifdef CONFIG_LWIP_TCPIP_CORE_LOCKING
|
||||
#include "lwip/priv/tcpip_priv.h"
|
||||
#endif
|
||||
|
||||
LwIPLock::LwIPLock() {
|
||||
#ifdef CONFIG_LWIP_TCPIP_CORE_LOCKING
|
||||
// When CONFIG_LWIP_TCPIP_CORE_LOCKING is enabled, lwIP uses a global mutex to protect
|
||||
// its internal state. Any thread can take this lock to safely access lwIP APIs.
|
||||
//
|
||||
// sys_thread_tcpip(LWIP_CORE_LOCK_QUERY_HOLDER) returns true if the current thread
|
||||
// already holds the lwIP core lock. This prevents recursive locking attempts and
|
||||
// allows nested LwIPLock instances to work correctly.
|
||||
//
|
||||
// If we don't already hold the lock, acquire it. This will block until the lock
|
||||
// is available if another thread currently holds it.
|
||||
if (!sys_thread_tcpip(LWIP_CORE_LOCK_QUERY_HOLDER)) {
|
||||
LOCK_TCPIP_CORE();
|
||||
}
|
||||
#endif
|
||||
}
|
||||
|
||||
LwIPLock::~LwIPLock() {
|
||||
#ifdef CONFIG_LWIP_TCPIP_CORE_LOCKING
|
||||
// Only release the lwIP core lock if this thread currently holds it.
|
||||
//
|
||||
// sys_thread_tcpip(LWIP_CORE_LOCK_QUERY_HOLDER) queries lwIP's internal lock
|
||||
// ownership tracking. It returns true only if the current thread is registered
|
||||
// as the lock holder.
|
||||
//
|
||||
// This check is essential because:
|
||||
// 1. We may not have acquired the lock in the constructor (if we already held it)
|
||||
// 2. The lock might have been released by other means between constructor and destructor
|
||||
// 3. Calling UNLOCK_TCPIP_CORE() without holding the lock causes undefined behavior
|
||||
if (sys_thread_tcpip(LWIP_CORE_LOCK_QUERY_HOLDER)) {
|
||||
UNLOCK_TCPIP_CORE();
|
||||
}
|
||||
#endif
|
||||
}
|
||||
|
||||
void get_mac_address_raw(uint8_t *mac) { // NOLINT(readability-non-const-parameter)
|
||||
#if defined(CONFIG_SOC_IEEE802154_SUPPORTED)
|
||||
// When CONFIG_SOC_IEEE802154_SUPPORTED is defined, esp_efuse_mac_get_default
|
||||
|
@@ -22,6 +22,10 @@ void Mutex::unlock() {}
|
||||
IRAM_ATTR InterruptLock::InterruptLock() { state_ = xt_rsil(15); }
|
||||
IRAM_ATTR InterruptLock::~InterruptLock() { xt_wsr_ps(state_); }
|
||||
|
||||
// ESP8266 doesn't support lwIP core locking, so this is a no-op
|
||||
LwIPLock::LwIPLock() {}
|
||||
LwIPLock::~LwIPLock() {}
|
||||
|
||||
void get_mac_address_raw(uint8_t *mac) { // NOLINT(readability-non-const-parameter)
|
||||
wifi_get_macaddr(STATION_IF, mac);
|
||||
}
|
||||
|
@@ -342,11 +342,5 @@ async def to_code(config):
|
||||
|
||||
cg.add_define("USE_ETHERNET")
|
||||
|
||||
# Disable WiFi when using Ethernet to save memory
|
||||
if CORE.using_esp_idf:
|
||||
add_idf_sdkconfig_option("CONFIG_ESP_WIFI_ENABLED", False)
|
||||
# Also disable WiFi/BT coexistence since WiFi is disabled
|
||||
add_idf_sdkconfig_option("CONFIG_SW_COEXIST_ENABLE", False)
|
||||
|
||||
if CORE.using_arduino:
|
||||
cg.add_library("WiFi", None)
|
||||
|
@@ -420,6 +420,7 @@ network::IPAddresses EthernetComponent::get_ip_addresses() {
|
||||
}
|
||||
|
||||
network::IPAddress EthernetComponent::get_dns_address(uint8_t num) {
|
||||
LwIPLock lock;
|
||||
const ip_addr_t *dns_ip = dns_getserver(num);
|
||||
return dns_ip;
|
||||
}
|
||||
@@ -527,6 +528,7 @@ void EthernetComponent::start_connect_() {
|
||||
ESPHL_ERROR_CHECK(err, "DHCPC set IP info error");
|
||||
|
||||
if (this->manual_ip_.has_value()) {
|
||||
LwIPLock lock;
|
||||
if (this->manual_ip_->dns1.is_set()) {
|
||||
ip_addr_t d;
|
||||
d = this->manual_ip_->dns1;
|
||||
@@ -559,8 +561,13 @@ bool EthernetComponent::is_connected() { return this->state_ == EthernetComponen
|
||||
void EthernetComponent::dump_connect_params_() {
|
||||
esp_netif_ip_info_t ip;
|
||||
esp_netif_get_ip_info(this->eth_netif_, &ip);
|
||||
const ip_addr_t *dns_ip1 = dns_getserver(0);
|
||||
const ip_addr_t *dns_ip2 = dns_getserver(1);
|
||||
const ip_addr_t *dns_ip1;
|
||||
const ip_addr_t *dns_ip2;
|
||||
{
|
||||
LwIPLock lock;
|
||||
dns_ip1 = dns_getserver(0);
|
||||
dns_ip2 = dns_getserver(1);
|
||||
}
|
||||
|
||||
ESP_LOGCONFIG(TAG,
|
||||
" IP Address: %s\n"
|
||||
|
@@ -177,6 +177,10 @@ optional<FanRestoreState> Fan::restore_state_() {
|
||||
return {};
|
||||
}
|
||||
void Fan::save_state_() {
|
||||
if (this->restore_mode_ == FanRestoreMode::NO_RESTORE) {
|
||||
return;
|
||||
}
|
||||
|
||||
FanRestoreState state{};
|
||||
state.state = this->state;
|
||||
state.oscillating = this->oscillating;
|
||||
|
@@ -1,6 +1,6 @@
|
||||
import esphome.codegen as cg
|
||||
from esphome.components import i2c, sensor
|
||||
import esphome.config_validation as cv
|
||||
from esphome.components import i2c, sensor
|
||||
from esphome.const import (
|
||||
CONF_ID,
|
||||
DEVICE_CLASS_DISTANCE,
|
||||
|
@@ -83,7 +83,7 @@ void HttpRequestUpdate::update_task(void *params) {
|
||||
container.reset(); // Release ownership of the container's shared_ptr
|
||||
|
||||
valid = json::parse_json(response, [this_update](JsonObject root) -> bool {
|
||||
if (!root.containsKey("name") || !root.containsKey("version") || !root.containsKey("builds")) {
|
||||
if (!root["name"].is<const char *>() || !root["version"].is<const char *>() || !root["builds"].is<JsonArray>()) {
|
||||
ESP_LOGE(TAG, "Manifest does not contain required fields");
|
||||
return false;
|
||||
}
|
||||
@@ -91,26 +91,26 @@ void HttpRequestUpdate::update_task(void *params) {
|
||||
this_update->update_info_.latest_version = root["version"].as<std::string>();
|
||||
|
||||
for (auto build : root["builds"].as<JsonArray>()) {
|
||||
if (!build.containsKey("chipFamily")) {
|
||||
if (!build["chipFamily"].is<const char *>()) {
|
||||
ESP_LOGE(TAG, "Manifest does not contain required fields");
|
||||
return false;
|
||||
}
|
||||
if (build["chipFamily"] == ESPHOME_VARIANT) {
|
||||
if (!build.containsKey("ota")) {
|
||||
if (!build["ota"].is<JsonObject>()) {
|
||||
ESP_LOGE(TAG, "Manifest does not contain required fields");
|
||||
return false;
|
||||
}
|
||||
auto ota = build["ota"];
|
||||
if (!ota.containsKey("path") || !ota.containsKey("md5")) {
|
||||
JsonObject ota = build["ota"].as<JsonObject>();
|
||||
if (!ota["path"].is<const char *>() || !ota["md5"].is<const char *>()) {
|
||||
ESP_LOGE(TAG, "Manifest does not contain required fields");
|
||||
return false;
|
||||
}
|
||||
this_update->update_info_.firmware_url = ota["path"].as<std::string>();
|
||||
this_update->update_info_.md5 = ota["md5"].as<std::string>();
|
||||
|
||||
if (ota.containsKey("summary"))
|
||||
if (ota["summary"].is<const char *>())
|
||||
this_update->update_info_.summary = ota["summary"].as<std::string>();
|
||||
if (ota.containsKey("release_url"))
|
||||
if (ota["release_url"].is<const char *>())
|
||||
this_update->update_info_.release_url = ota["release_url"].as<std::string>();
|
||||
|
||||
return true;
|
||||
|
@@ -12,6 +12,6 @@ CONFIG_SCHEMA = cv.All(
|
||||
|
||||
@coroutine_with_priority(1.0)
|
||||
async def to_code(config):
|
||||
cg.add_library("bblanchon/ArduinoJson", "6.18.5")
|
||||
cg.add_library("bblanchon/ArduinoJson", "7.4.2")
|
||||
cg.add_define("USE_JSON")
|
||||
cg.add_global(json_ns.using)
|
||||
|
@@ -1,83 +1,76 @@
|
||||
#include "json_util.h"
|
||||
#include "esphome/core/log.h"
|
||||
|
||||
// ArduinoJson::Allocator is included via ArduinoJson.h in json_util.h
|
||||
|
||||
namespace esphome {
|
||||
namespace json {
|
||||
|
||||
static const char *const TAG = "json";
|
||||
|
||||
static std::vector<char> global_json_build_buffer; // NOLINT
|
||||
static const auto ALLOCATOR = RAMAllocator<uint8_t>(RAMAllocator<uint8_t>::ALLOC_INTERNAL);
|
||||
// Build an allocator for the JSON Library using the RAMAllocator class
|
||||
struct SpiRamAllocator : ArduinoJson::Allocator {
|
||||
void *allocate(size_t size) override { return this->allocator_.allocate(size); }
|
||||
|
||||
void deallocate(void *pointer) override {
|
||||
// ArduinoJson's Allocator interface doesn't provide the size parameter in deallocate.
|
||||
// RAMAllocator::deallocate() requires the size, which we don't have access to here.
|
||||
// RAMAllocator::deallocate implementation just calls free() regardless of whether
|
||||
// the memory was allocated with heap_caps_malloc or malloc.
|
||||
// This is safe because ESP-IDF's heap implementation internally tracks the memory region
|
||||
// and routes free() to the appropriate heap.
|
||||
free(pointer); // NOLINT(cppcoreguidelines-owning-memory,cppcoreguidelines-no-malloc)
|
||||
}
|
||||
|
||||
void *reallocate(void *ptr, size_t new_size) override {
|
||||
return this->allocator_.reallocate(static_cast<uint8_t *>(ptr), new_size);
|
||||
}
|
||||
|
||||
protected:
|
||||
RAMAllocator<uint8_t> allocator_{RAMAllocator<uint8_t>(RAMAllocator<uint8_t>::NONE)};
|
||||
};
|
||||
|
||||
std::string build_json(const json_build_t &f) {
|
||||
// Here we are allocating up to 5kb of memory,
|
||||
// with the heap size minus 2kb to be safe if less than 5kb
|
||||
// as we can not have a true dynamic sized document.
|
||||
// The excess memory is freed below with `shrinkToFit()`
|
||||
auto free_heap = ALLOCATOR.get_max_free_block_size();
|
||||
size_t request_size = std::min(free_heap, (size_t) 512);
|
||||
while (true) {
|
||||
ESP_LOGV(TAG, "Attempting to allocate %zu bytes for JSON serialization", request_size);
|
||||
DynamicJsonDocument json_document(request_size);
|
||||
if (json_document.capacity() == 0) {
|
||||
ESP_LOGE(TAG, "Could not allocate memory for document! Requested %zu bytes, largest free heap block: %zu bytes",
|
||||
request_size, free_heap);
|
||||
return "{}";
|
||||
}
|
||||
JsonObject root = json_document.to<JsonObject>();
|
||||
f(root);
|
||||
if (json_document.overflowed()) {
|
||||
if (request_size == free_heap) {
|
||||
ESP_LOGE(TAG, "Could not allocate memory for document! Overflowed largest free heap block: %zu bytes",
|
||||
free_heap);
|
||||
return "{}";
|
||||
}
|
||||
request_size = std::min(request_size * 2, free_heap);
|
||||
continue;
|
||||
}
|
||||
json_document.shrinkToFit();
|
||||
ESP_LOGV(TAG, "Size after shrink %zu bytes", json_document.capacity());
|
||||
std::string output;
|
||||
serializeJson(json_document, output);
|
||||
return output;
|
||||
// NOLINTBEGIN(clang-analyzer-cplusplus.NewDeleteLeaks) false positive with ArduinoJson
|
||||
auto doc_allocator = SpiRamAllocator();
|
||||
JsonDocument json_document(&doc_allocator);
|
||||
if (json_document.overflowed()) {
|
||||
ESP_LOGE(TAG, "Could not allocate memory for JSON document!");
|
||||
return "{}";
|
||||
}
|
||||
JsonObject root = json_document.to<JsonObject>();
|
||||
f(root);
|
||||
if (json_document.overflowed()) {
|
||||
ESP_LOGE(TAG, "Could not allocate memory for JSON document!");
|
||||
return "{}";
|
||||
}
|
||||
std::string output;
|
||||
serializeJson(json_document, output);
|
||||
return output;
|
||||
// NOLINTEND(clang-analyzer-cplusplus.NewDeleteLeaks)
|
||||
}
|
||||
|
||||
bool parse_json(const std::string &data, const json_parse_t &f) {
|
||||
// Here we are allocating 1.5 times the data size,
|
||||
// with the heap size minus 2kb to be safe if less than that
|
||||
// as we can not have a true dynamic sized document.
|
||||
// The excess memory is freed below with `shrinkToFit()`
|
||||
auto free_heap = ALLOCATOR.get_max_free_block_size();
|
||||
size_t request_size = std::min(free_heap, (size_t) (data.size() * 1.5));
|
||||
while (true) {
|
||||
DynamicJsonDocument json_document(request_size);
|
||||
if (json_document.capacity() == 0) {
|
||||
ESP_LOGE(TAG, "Could not allocate memory for document! Requested %zu bytes, free heap: %zu", request_size,
|
||||
free_heap);
|
||||
return false;
|
||||
}
|
||||
DeserializationError err = deserializeJson(json_document, data);
|
||||
json_document.shrinkToFit();
|
||||
// NOLINTBEGIN(clang-analyzer-cplusplus.NewDeleteLeaks) false positive with ArduinoJson
|
||||
auto doc_allocator = SpiRamAllocator();
|
||||
JsonDocument json_document(&doc_allocator);
|
||||
if (json_document.overflowed()) {
|
||||
ESP_LOGE(TAG, "Could not allocate memory for JSON document!");
|
||||
return false;
|
||||
}
|
||||
DeserializationError err = deserializeJson(json_document, data);
|
||||
|
||||
JsonObject root = json_document.as<JsonObject>();
|
||||
JsonObject root = json_document.as<JsonObject>();
|
||||
|
||||
if (err == DeserializationError::Ok) {
|
||||
return f(root);
|
||||
} else if (err == DeserializationError::NoMemory) {
|
||||
if (request_size * 2 >= free_heap) {
|
||||
ESP_LOGE(TAG, "Can not allocate more memory for deserialization. Consider making source string smaller");
|
||||
return false;
|
||||
}
|
||||
ESP_LOGV(TAG, "Increasing memory allocation.");
|
||||
request_size *= 2;
|
||||
continue;
|
||||
} else {
|
||||
ESP_LOGE(TAG, "Parse error: %s", err.c_str());
|
||||
return false;
|
||||
}
|
||||
};
|
||||
if (err == DeserializationError::Ok) {
|
||||
return f(root);
|
||||
} else if (err == DeserializationError::NoMemory) {
|
||||
ESP_LOGE(TAG, "Can not allocate more memory for deserialization. Consider making source string smaller");
|
||||
return false;
|
||||
}
|
||||
ESP_LOGE(TAG, "Parse error: %s", err.c_str());
|
||||
return false;
|
||||
// NOLINTEND(clang-analyzer-cplusplus.NewDeleteLeaks)
|
||||
}
|
||||
|
||||
} // namespace json
|
||||
|
@@ -26,6 +26,10 @@ void Mutex::unlock() { xSemaphoreGive(this->handle_); }
|
||||
IRAM_ATTR InterruptLock::InterruptLock() { portDISABLE_INTERRUPTS(); }
|
||||
IRAM_ATTR InterruptLock::~InterruptLock() { portENABLE_INTERRUPTS(); }
|
||||
|
||||
// LibreTiny doesn't support lwIP core locking, so this is a no-op
|
||||
LwIPLock::LwIPLock() {}
|
||||
LwIPLock::~LwIPLock() {}
|
||||
|
||||
void get_mac_address_raw(uint8_t *mac) { // NOLINT(readability-non-const-parameter)
|
||||
WiFi.macAddress(mac);
|
||||
}
|
||||
|
@@ -9,6 +9,7 @@ namespace light {
|
||||
// See https://www.home-assistant.io/integrations/light.mqtt/#json-schema for documentation on the schema
|
||||
|
||||
void LightJSONSchema::dump_json(LightState &state, JsonObject root) {
|
||||
// NOLINTNEXTLINE(clang-analyzer-cplusplus.NewDeleteLeaks) false positive with ArduinoJson
|
||||
if (state.supports_effects())
|
||||
root["effect"] = state.get_effect_name();
|
||||
|
||||
@@ -52,7 +53,7 @@ void LightJSONSchema::dump_json(LightState &state, JsonObject root) {
|
||||
if (values.get_color_mode() & ColorCapability::BRIGHTNESS)
|
||||
root["brightness"] = uint8_t(values.get_brightness() * 255);
|
||||
|
||||
JsonObject color = root.createNestedObject("color");
|
||||
JsonObject color = root["color"].to<JsonObject>();
|
||||
if (values.get_color_mode() & ColorCapability::RGB) {
|
||||
color["r"] = uint8_t(values.get_color_brightness() * values.get_red() * 255);
|
||||
color["g"] = uint8_t(values.get_color_brightness() * values.get_green() * 255);
|
||||
@@ -73,7 +74,7 @@ void LightJSONSchema::dump_json(LightState &state, JsonObject root) {
|
||||
}
|
||||
|
||||
void LightJSONSchema::parse_color_json(LightState &state, LightCall &call, JsonObject root) {
|
||||
if (root.containsKey("state")) {
|
||||
if (root["state"].is<const char *>()) {
|
||||
auto val = parse_on_off(root["state"]);
|
||||
switch (val) {
|
||||
case PARSE_ON:
|
||||
@@ -90,40 +91,40 @@ void LightJSONSchema::parse_color_json(LightState &state, LightCall &call, JsonO
|
||||
}
|
||||
}
|
||||
|
||||
if (root.containsKey("brightness")) {
|
||||
if (root["brightness"].is<uint8_t>()) {
|
||||
call.set_brightness(float(root["brightness"]) / 255.0f);
|
||||
}
|
||||
|
||||
if (root.containsKey("color")) {
|
||||
if (root["color"].is<JsonObject>()) {
|
||||
JsonObject color = root["color"];
|
||||
// HA also encodes brightness information in the r, g, b values, so extract that and set it as color brightness.
|
||||
float max_rgb = 0.0f;
|
||||
if (color.containsKey("r")) {
|
||||
if (color["r"].is<uint8_t>()) {
|
||||
float r = float(color["r"]) / 255.0f;
|
||||
max_rgb = fmaxf(max_rgb, r);
|
||||
call.set_red(r);
|
||||
}
|
||||
if (color.containsKey("g")) {
|
||||
if (color["g"].is<uint8_t>()) {
|
||||
float g = float(color["g"]) / 255.0f;
|
||||
max_rgb = fmaxf(max_rgb, g);
|
||||
call.set_green(g);
|
||||
}
|
||||
if (color.containsKey("b")) {
|
||||
if (color["b"].is<uint8_t>()) {
|
||||
float b = float(color["b"]) / 255.0f;
|
||||
max_rgb = fmaxf(max_rgb, b);
|
||||
call.set_blue(b);
|
||||
}
|
||||
if (color.containsKey("r") || color.containsKey("g") || color.containsKey("b")) {
|
||||
if (color["r"].is<uint8_t>() || color["g"].is<uint8_t>() || color["b"].is<uint8_t>()) {
|
||||
call.set_color_brightness(max_rgb);
|
||||
}
|
||||
|
||||
if (color.containsKey("c")) {
|
||||
if (color["c"].is<uint8_t>()) {
|
||||
call.set_cold_white(float(color["c"]) / 255.0f);
|
||||
}
|
||||
if (color.containsKey("w")) {
|
||||
if (color["w"].is<uint8_t>()) {
|
||||
// the HA scheme is ambiguous here, the same key is used for white channel in RGBW and warm
|
||||
// white channel in RGBWW.
|
||||
if (color.containsKey("c")) {
|
||||
if (color["c"].is<uint8_t>()) {
|
||||
call.set_warm_white(float(color["w"]) / 255.0f);
|
||||
} else {
|
||||
call.set_white(float(color["w"]) / 255.0f);
|
||||
@@ -131,11 +132,11 @@ void LightJSONSchema::parse_color_json(LightState &state, LightCall &call, JsonO
|
||||
}
|
||||
}
|
||||
|
||||
if (root.containsKey("white_value")) { // legacy API
|
||||
if (root["white_value"].is<uint8_t>()) { // legacy API
|
||||
call.set_white(float(root["white_value"]) / 255.0f);
|
||||
}
|
||||
|
||||
if (root.containsKey("color_temp")) {
|
||||
if (root["color_temp"].is<uint16_t>()) {
|
||||
call.set_color_temperature(float(root["color_temp"]));
|
||||
}
|
||||
}
|
||||
@@ -143,17 +144,17 @@ void LightJSONSchema::parse_color_json(LightState &state, LightCall &call, JsonO
|
||||
void LightJSONSchema::parse_json(LightState &state, LightCall &call, JsonObject root) {
|
||||
LightJSONSchema::parse_color_json(state, call, root);
|
||||
|
||||
if (root.containsKey("flash")) {
|
||||
if (root["flash"].is<uint32_t>()) {
|
||||
auto length = uint32_t(float(root["flash"]) * 1000);
|
||||
call.set_flash_length(length);
|
||||
}
|
||||
|
||||
if (root.containsKey("transition")) {
|
||||
if (root["transition"].is<uint16_t>()) {
|
||||
auto length = uint32_t(float(root["transition"]) * 1000);
|
||||
call.set_transition_length(length);
|
||||
}
|
||||
|
||||
if (root.containsKey("effect")) {
|
||||
if (root["effect"].is<const char *>()) {
|
||||
const char *effect = root["effect"];
|
||||
call.set_effect(effect);
|
||||
}
|
||||
|
@@ -1,16 +1,16 @@
|
||||
import esphome.codegen as cg
|
||||
from esphome.components import i2c, sensor
|
||||
import esphome.config_validation as cv
|
||||
from esphome.components import i2c, sensor
|
||||
from esphome.const import (
|
||||
CONF_ID,
|
||||
CONF_PRESSURE,
|
||||
CONF_TEMPERATURE,
|
||||
DEVICE_CLASS_PRESSURE,
|
||||
DEVICE_CLASS_TEMPERATURE,
|
||||
ICON_THERMOMETER,
|
||||
CONF_PRESSURE,
|
||||
STATE_CLASS_MEASUREMENT,
|
||||
UNIT_CELSIUS,
|
||||
UNIT_HECTOPASCAL,
|
||||
ICON_THERMOMETER,
|
||||
DEVICE_CLASS_TEMPERATURE,
|
||||
DEVICE_CLASS_PRESSURE,
|
||||
)
|
||||
|
||||
CODEOWNERS = ["@nagisa"]
|
||||
|
@@ -264,7 +264,7 @@ class MeterType(WidgetType):
|
||||
color_start,
|
||||
color_end,
|
||||
v[CONF_LOCAL],
|
||||
size.process(v[CONF_WIDTH]),
|
||||
await size.process(v[CONF_WIDTH]),
|
||||
),
|
||||
)
|
||||
if t == CONF_IMAGE:
|
||||
|
@@ -55,7 +55,8 @@ void MQTTAlarmControlPanelComponent::dump_config() {
|
||||
}
|
||||
|
||||
void MQTTAlarmControlPanelComponent::send_discovery(JsonObject root, mqtt::SendDiscoveryConfig &config) {
|
||||
JsonArray supported_features = root.createNestedArray(MQTT_SUPPORTED_FEATURES);
|
||||
// NOLINTNEXTLINE(clang-analyzer-cplusplus.NewDeleteLeaks) false positive with ArduinoJson
|
||||
JsonArray supported_features = root[MQTT_SUPPORTED_FEATURES].to<JsonArray>();
|
||||
const uint32_t acp_supported_features = this->alarm_control_panel_->get_supported_features();
|
||||
if (acp_supported_features & ACP_FEAT_ARM_AWAY) {
|
||||
supported_features.add("arm_away");
|
||||
|
@@ -30,6 +30,7 @@ MQTTBinarySensorComponent::MQTTBinarySensorComponent(binary_sensor::BinarySensor
|
||||
}
|
||||
|
||||
void MQTTBinarySensorComponent::send_discovery(JsonObject root, mqtt::SendDiscoveryConfig &config) {
|
||||
// NOLINTNEXTLINE(clang-analyzer-cplusplus.NewDeleteLeaks) false positive with ArduinoJson
|
||||
if (!this->binary_sensor_->get_device_class().empty())
|
||||
root[MQTT_DEVICE_CLASS] = this->binary_sensor_->get_device_class();
|
||||
if (this->binary_sensor_->is_status_binary_sensor())
|
||||
|
@@ -31,9 +31,12 @@ void MQTTButtonComponent::dump_config() {
|
||||
}
|
||||
|
||||
void MQTTButtonComponent::send_discovery(JsonObject root, mqtt::SendDiscoveryConfig &config) {
|
||||
// NOLINTBEGIN(clang-analyzer-cplusplus.NewDeleteLeaks) false positive with ArduinoJson
|
||||
config.state_topic = false;
|
||||
if (!this->button_->get_device_class().empty())
|
||||
if (!this->button_->get_device_class().empty()) {
|
||||
root[MQTT_DEVICE_CLASS] = this->button_->get_device_class();
|
||||
}
|
||||
// NOLINTEND(clang-analyzer-cplusplus.NewDeleteLeaks)
|
||||
}
|
||||
|
||||
std::string MQTTButtonComponent::component_type() const { return "button"; }
|
||||
|
@@ -92,6 +92,7 @@ void MQTTClientComponent::send_device_info_() {
|
||||
std::string topic = "esphome/discover/";
|
||||
topic.append(App.get_name());
|
||||
|
||||
// NOLINTBEGIN(clang-analyzer-cplusplus.NewDeleteLeaks) false positive with ArduinoJson
|
||||
this->publish_json(
|
||||
topic,
|
||||
[](JsonObject root) {
|
||||
@@ -147,6 +148,7 @@ void MQTTClientComponent::send_device_info_() {
|
||||
#endif
|
||||
},
|
||||
2, this->discovery_info_.retain);
|
||||
// NOLINTEND(clang-analyzer-cplusplus.NewDeleteLeaks)
|
||||
}
|
||||
|
||||
void MQTTClientComponent::dump_config() {
|
||||
@@ -191,13 +193,17 @@ void MQTTClientComponent::start_dnslookup_() {
|
||||
this->dns_resolve_error_ = false;
|
||||
this->dns_resolved_ = false;
|
||||
ip_addr_t addr;
|
||||
err_t err;
|
||||
{
|
||||
LwIPLock lock;
|
||||
#if USE_NETWORK_IPV6
|
||||
err_t err = dns_gethostbyname_addrtype(this->credentials_.address.c_str(), &addr,
|
||||
MQTTClientComponent::dns_found_callback, this, LWIP_DNS_ADDRTYPE_IPV6_IPV4);
|
||||
err = dns_gethostbyname_addrtype(this->credentials_.address.c_str(), &addr, MQTTClientComponent::dns_found_callback,
|
||||
this, LWIP_DNS_ADDRTYPE_IPV6_IPV4);
|
||||
#else
|
||||
err_t err = dns_gethostbyname_addrtype(this->credentials_.address.c_str(), &addr,
|
||||
MQTTClientComponent::dns_found_callback, this, LWIP_DNS_ADDRTYPE_IPV4);
|
||||
err = dns_gethostbyname_addrtype(this->credentials_.address.c_str(), &addr, MQTTClientComponent::dns_found_callback,
|
||||
this, LWIP_DNS_ADDRTYPE_IPV4);
|
||||
#endif /* USE_NETWORK_IPV6 */
|
||||
}
|
||||
switch (err) {
|
||||
case ERR_OK: {
|
||||
// Got IP immediately
|
||||
|
@@ -14,6 +14,7 @@ static const char *const TAG = "mqtt.climate";
|
||||
using namespace esphome::climate;
|
||||
|
||||
void MQTTClimateComponent::send_discovery(JsonObject root, mqtt::SendDiscoveryConfig &config) {
|
||||
// NOLINTBEGIN(clang-analyzer-cplusplus.NewDeleteLeaks) false positive with ArduinoJson
|
||||
auto traits = this->device_->get_traits();
|
||||
// current_temperature_topic
|
||||
if (traits.get_supports_current_temperature()) {
|
||||
@@ -28,7 +29,7 @@ void MQTTClimateComponent::send_discovery(JsonObject root, mqtt::SendDiscoveryCo
|
||||
// mode_state_topic
|
||||
root[MQTT_MODE_STATE_TOPIC] = this->get_mode_state_topic();
|
||||
// modes
|
||||
JsonArray modes = root.createNestedArray(MQTT_MODES);
|
||||
JsonArray modes = root[MQTT_MODES].to<JsonArray>();
|
||||
// sort array for nice UI in HA
|
||||
if (traits.supports_mode(CLIMATE_MODE_AUTO))
|
||||
modes.add("auto");
|
||||
@@ -89,7 +90,7 @@ void MQTTClimateComponent::send_discovery(JsonObject root, mqtt::SendDiscoveryCo
|
||||
// preset_mode_state_topic
|
||||
root[MQTT_PRESET_MODE_STATE_TOPIC] = this->get_preset_state_topic();
|
||||
// presets
|
||||
JsonArray presets = root.createNestedArray("preset_modes");
|
||||
JsonArray presets = root["preset_modes"].to<JsonArray>();
|
||||
if (traits.supports_preset(CLIMATE_PRESET_HOME))
|
||||
presets.add("home");
|
||||
if (traits.supports_preset(CLIMATE_PRESET_AWAY))
|
||||
@@ -119,7 +120,7 @@ void MQTTClimateComponent::send_discovery(JsonObject root, mqtt::SendDiscoveryCo
|
||||
// fan_mode_state_topic
|
||||
root[MQTT_FAN_MODE_STATE_TOPIC] = this->get_fan_mode_state_topic();
|
||||
// fan_modes
|
||||
JsonArray fan_modes = root.createNestedArray("fan_modes");
|
||||
JsonArray fan_modes = root["fan_modes"].to<JsonArray>();
|
||||
if (traits.supports_fan_mode(CLIMATE_FAN_ON))
|
||||
fan_modes.add("on");
|
||||
if (traits.supports_fan_mode(CLIMATE_FAN_OFF))
|
||||
@@ -150,7 +151,7 @@ void MQTTClimateComponent::send_discovery(JsonObject root, mqtt::SendDiscoveryCo
|
||||
// swing_mode_state_topic
|
||||
root[MQTT_SWING_MODE_STATE_TOPIC] = this->get_swing_mode_state_topic();
|
||||
// swing_modes
|
||||
JsonArray swing_modes = root.createNestedArray("swing_modes");
|
||||
JsonArray swing_modes = root["swing_modes"].to<JsonArray>();
|
||||
if (traits.supports_swing_mode(CLIMATE_SWING_OFF))
|
||||
swing_modes.add("off");
|
||||
if (traits.supports_swing_mode(CLIMATE_SWING_BOTH))
|
||||
@@ -163,6 +164,7 @@ void MQTTClimateComponent::send_discovery(JsonObject root, mqtt::SendDiscoveryCo
|
||||
|
||||
config.state_topic = false;
|
||||
config.command_topic = false;
|
||||
// NOLINTEND(clang-analyzer-cplusplus.NewDeleteLeaks)
|
||||
}
|
||||
void MQTTClimateComponent::setup() {
|
||||
auto traits = this->device_->get_traits();
|
||||
|
@@ -70,6 +70,7 @@ bool MQTTComponent::send_discovery_() {
|
||||
|
||||
ESP_LOGV(TAG, "'%s': Sending discovery", this->friendly_name().c_str());
|
||||
|
||||
// NOLINTBEGIN(clang-analyzer-cplusplus.NewDeleteLeaks) false positive with ArduinoJson
|
||||
return global_mqtt_client->publish_json(
|
||||
this->get_discovery_topic_(discovery_info),
|
||||
[this](JsonObject root) {
|
||||
@@ -155,7 +156,7 @@ bool MQTTComponent::send_discovery_() {
|
||||
}
|
||||
std::string node_area = App.get_area();
|
||||
|
||||
JsonObject device_info = root.createNestedObject(MQTT_DEVICE);
|
||||
JsonObject device_info = root[MQTT_DEVICE].to<JsonObject>();
|
||||
const auto mac = get_mac_address();
|
||||
device_info[MQTT_DEVICE_IDENTIFIERS] = mac;
|
||||
device_info[MQTT_DEVICE_NAME] = node_friendly_name;
|
||||
@@ -192,6 +193,7 @@ bool MQTTComponent::send_discovery_() {
|
||||
device_info[MQTT_DEVICE_CONNECTIONS][0][1] = mac;
|
||||
},
|
||||
this->qos_, discovery_info.retain);
|
||||
// NOLINTEND(clang-analyzer-cplusplus.NewDeleteLeaks)
|
||||
}
|
||||
|
||||
uint8_t MQTTComponent::get_qos() const { return this->qos_; }
|
||||
|
@@ -67,6 +67,7 @@ void MQTTCoverComponent::dump_config() {
|
||||
}
|
||||
}
|
||||
void MQTTCoverComponent::send_discovery(JsonObject root, mqtt::SendDiscoveryConfig &config) {
|
||||
// NOLINTNEXTLINE(clang-analyzer-cplusplus.NewDeleteLeaks) false positive with ArduinoJson
|
||||
if (!this->cover_->get_device_class().empty())
|
||||
root[MQTT_DEVICE_CLASS] = this->cover_->get_device_class();
|
||||
|
||||
|
@@ -20,13 +20,13 @@ MQTTDateComponent::MQTTDateComponent(DateEntity *date) : date_(date) {}
|
||||
void MQTTDateComponent::setup() {
|
||||
this->subscribe_json(this->get_command_topic_(), [this](const std::string &topic, JsonObject root) {
|
||||
auto call = this->date_->make_call();
|
||||
if (root.containsKey("year")) {
|
||||
if (root["year"].is<uint16_t>()) {
|
||||
call.set_year(root["year"]);
|
||||
}
|
||||
if (root.containsKey("month")) {
|
||||
if (root["month"].is<uint8_t>()) {
|
||||
call.set_month(root["month"]);
|
||||
}
|
||||
if (root.containsKey("day")) {
|
||||
if (root["day"].is<uint8_t>()) {
|
||||
call.set_day(root["day"]);
|
||||
}
|
||||
call.perform();
|
||||
@@ -55,6 +55,7 @@ bool MQTTDateComponent::send_initial_state() {
|
||||
}
|
||||
bool MQTTDateComponent::publish_state(uint16_t year, uint8_t month, uint8_t day) {
|
||||
return this->publish_json(this->get_state_topic_(), [year, month, day](JsonObject root) {
|
||||
// NOLINTNEXTLINE(clang-analyzer-cplusplus.NewDeleteLeaks) false positive with ArduinoJson
|
||||
root["year"] = year;
|
||||
root["month"] = month;
|
||||
root["day"] = day;
|
||||
|
@@ -20,22 +20,22 @@ MQTTDateTimeComponent::MQTTDateTimeComponent(DateTimeEntity *datetime) : datetim
|
||||
void MQTTDateTimeComponent::setup() {
|
||||
this->subscribe_json(this->get_command_topic_(), [this](const std::string &topic, JsonObject root) {
|
||||
auto call = this->datetime_->make_call();
|
||||
if (root.containsKey("year")) {
|
||||
if (root["year"].is<uint16_t>()) {
|
||||
call.set_year(root["year"]);
|
||||
}
|
||||
if (root.containsKey("month")) {
|
||||
if (root["month"].is<uint8_t>()) {
|
||||
call.set_month(root["month"]);
|
||||
}
|
||||
if (root.containsKey("day")) {
|
||||
if (root["day"].is<uint8_t>()) {
|
||||
call.set_day(root["day"]);
|
||||
}
|
||||
if (root.containsKey("hour")) {
|
||||
if (root["hour"].is<uint8_t>()) {
|
||||
call.set_hour(root["hour"]);
|
||||
}
|
||||
if (root.containsKey("minute")) {
|
||||
if (root["minute"].is<uint8_t>()) {
|
||||
call.set_minute(root["minute"]);
|
||||
}
|
||||
if (root.containsKey("second")) {
|
||||
if (root["second"].is<uint8_t>()) {
|
||||
call.set_second(root["second"]);
|
||||
}
|
||||
call.perform();
|
||||
@@ -68,6 +68,7 @@ bool MQTTDateTimeComponent::send_initial_state() {
|
||||
bool MQTTDateTimeComponent::publish_state(uint16_t year, uint8_t month, uint8_t day, uint8_t hour, uint8_t minute,
|
||||
uint8_t second) {
|
||||
return this->publish_json(this->get_state_topic_(), [year, month, day, hour, minute, second](JsonObject root) {
|
||||
// NOLINTNEXTLINE(clang-analyzer-cplusplus.NewDeleteLeaks) false positive with ArduinoJson
|
||||
root["year"] = year;
|
||||
root["month"] = month;
|
||||
root["day"] = day;
|
||||
|
@@ -16,7 +16,8 @@ using namespace esphome::event;
|
||||
MQTTEventComponent::MQTTEventComponent(event::Event *event) : event_(event) {}
|
||||
|
||||
void MQTTEventComponent::send_discovery(JsonObject root, mqtt::SendDiscoveryConfig &config) {
|
||||
JsonArray event_types = root.createNestedArray(MQTT_EVENT_TYPES);
|
||||
// NOLINTNEXTLINE(clang-analyzer-cplusplus.NewDeleteLeaks) false positive with ArduinoJson
|
||||
JsonArray event_types = root[MQTT_EVENT_TYPES].to<JsonArray>();
|
||||
for (const auto &event_type : this->event_->get_event_types())
|
||||
event_types.add(event_type);
|
||||
|
||||
@@ -40,8 +41,10 @@ void MQTTEventComponent::dump_config() {
|
||||
}
|
||||
|
||||
bool MQTTEventComponent::publish_event_(const std::string &event_type) {
|
||||
return this->publish_json(this->get_state_topic_(),
|
||||
[event_type](JsonObject root) { root[MQTT_EVENT_TYPE] = event_type; });
|
||||
return this->publish_json(this->get_state_topic_(), [event_type](JsonObject root) {
|
||||
// NOLINTNEXTLINE(clang-analyzer-cplusplus.NewDeleteLeaks) false positive with ArduinoJson
|
||||
root[MQTT_EVENT_TYPE] = event_type;
|
||||
});
|
||||
}
|
||||
|
||||
std::string MQTTEventComponent::component_type() const { return "event"; }
|
||||
|
@@ -143,6 +143,7 @@ void MQTTFanComponent::dump_config() {
|
||||
bool MQTTFanComponent::send_initial_state() { return this->publish_state(); }
|
||||
|
||||
void MQTTFanComponent::send_discovery(JsonObject root, mqtt::SendDiscoveryConfig &config) {
|
||||
// NOLINTNEXTLINE(clang-analyzer-cplusplus.NewDeleteLeaks) false positive with ArduinoJson
|
||||
if (this->state_->get_traits().supports_direction()) {
|
||||
root[MQTT_DIRECTION_COMMAND_TOPIC] = this->get_direction_command_topic();
|
||||
root[MQTT_DIRECTION_STATE_TOPIC] = this->get_direction_state_topic();
|
||||
|
@@ -32,17 +32,21 @@ void MQTTJSONLightComponent::setup() {
|
||||
MQTTJSONLightComponent::MQTTJSONLightComponent(LightState *state) : state_(state) {}
|
||||
|
||||
bool MQTTJSONLightComponent::publish_state_() {
|
||||
return this->publish_json(this->get_state_topic_(),
|
||||
[this](JsonObject root) { LightJSONSchema::dump_json(*this->state_, root); });
|
||||
return this->publish_json(this->get_state_topic_(), [this](JsonObject root) {
|
||||
// NOLINTNEXTLINE(clang-analyzer-cplusplus.NewDeleteLeaks) false positive with ArduinoJson
|
||||
LightJSONSchema::dump_json(*this->state_, root);
|
||||
});
|
||||
}
|
||||
LightState *MQTTJSONLightComponent::get_state() const { return this->state_; }
|
||||
|
||||
void MQTTJSONLightComponent::send_discovery(JsonObject root, mqtt::SendDiscoveryConfig &config) {
|
||||
// NOLINTNEXTLINE(clang-analyzer-cplusplus.NewDeleteLeaks) false positive with ArduinoJson
|
||||
root["schema"] = "json";
|
||||
auto traits = this->state_->get_traits();
|
||||
|
||||
root[MQTT_COLOR_MODE] = true;
|
||||
JsonArray color_modes = root.createNestedArray("supported_color_modes");
|
||||
// NOLINTNEXTLINE(clang-analyzer-cplusplus.NewDeleteLeaks) false positive with ArduinoJson
|
||||
JsonArray color_modes = root["supported_color_modes"].to<JsonArray>();
|
||||
if (traits.supports_color_mode(ColorMode::ON_OFF))
|
||||
color_modes.add("onoff");
|
||||
if (traits.supports_color_mode(ColorMode::BRIGHTNESS))
|
||||
@@ -67,7 +71,7 @@ void MQTTJSONLightComponent::send_discovery(JsonObject root, mqtt::SendDiscovery
|
||||
|
||||
if (this->state_->supports_effects()) {
|
||||
root["effect"] = true;
|
||||
JsonArray effect_list = root.createNestedArray(MQTT_EFFECT_LIST);
|
||||
JsonArray effect_list = root[MQTT_EFFECT_LIST].to<JsonArray>();
|
||||
for (auto *effect : this->state_->get_effects())
|
||||
effect_list.add(effect->get_name());
|
||||
effect_list.add("None");
|
||||
|
@@ -38,8 +38,10 @@ void MQTTLockComponent::dump_config() {
|
||||
std::string MQTTLockComponent::component_type() const { return "lock"; }
|
||||
const EntityBase *MQTTLockComponent::get_entity() const { return this->lock_; }
|
||||
void MQTTLockComponent::send_discovery(JsonObject root, mqtt::SendDiscoveryConfig &config) {
|
||||
if (this->lock_->traits.get_assumed_state())
|
||||
// NOLINTNEXTLINE(clang-analyzer-cplusplus.NewDeleteLeaks) false positive with ArduinoJson
|
||||
if (this->lock_->traits.get_assumed_state()) {
|
||||
root[MQTT_OPTIMISTIC] = true;
|
||||
}
|
||||
if (this->lock_->traits.get_supports_open())
|
||||
root[MQTT_PAYLOAD_OPEN] = "OPEN";
|
||||
}
|
||||
|
@@ -40,6 +40,7 @@ const EntityBase *MQTTNumberComponent::get_entity() const { return this->number_
|
||||
void MQTTNumberComponent::send_discovery(JsonObject root, mqtt::SendDiscoveryConfig &config) {
|
||||
const auto &traits = number_->traits;
|
||||
// https://www.home-assistant.io/integrations/number.mqtt/
|
||||
// NOLINTNEXTLINE(clang-analyzer-cplusplus.NewDeleteLeaks) false positive with ArduinoJson
|
||||
root[MQTT_MIN] = traits.get_min_value();
|
||||
root[MQTT_MAX] = traits.get_max_value();
|
||||
root[MQTT_STEP] = traits.get_step();
|
||||
|
@@ -35,7 +35,8 @@ const EntityBase *MQTTSelectComponent::get_entity() const { return this->select_
|
||||
void MQTTSelectComponent::send_discovery(JsonObject root, mqtt::SendDiscoveryConfig &config) {
|
||||
const auto &traits = select_->traits;
|
||||
// https://www.home-assistant.io/integrations/select.mqtt/
|
||||
JsonArray options = root.createNestedArray(MQTT_OPTIONS);
|
||||
// NOLINTNEXTLINE(clang-analyzer-cplusplus.NewDeleteLeaks) false positive with ArduinoJson
|
||||
JsonArray options = root[MQTT_OPTIONS].to<JsonArray>();
|
||||
for (const auto &option : traits.get_options())
|
||||
options.add(option);
|
||||
|
||||
|
@@ -44,8 +44,10 @@ void MQTTSensorComponent::set_expire_after(uint32_t expire_after) { this->expire
|
||||
void MQTTSensorComponent::disable_expire_after() { this->expire_after_ = 0; }
|
||||
|
||||
void MQTTSensorComponent::send_discovery(JsonObject root, mqtt::SendDiscoveryConfig &config) {
|
||||
if (!this->sensor_->get_device_class().empty())
|
||||
// NOLINTNEXTLINE(clang-analyzer-cplusplus.NewDeleteLeaks) false positive with ArduinoJson
|
||||
if (!this->sensor_->get_device_class().empty()) {
|
||||
root[MQTT_DEVICE_CLASS] = this->sensor_->get_device_class();
|
||||
}
|
||||
|
||||
if (!this->sensor_->get_unit_of_measurement().empty())
|
||||
root[MQTT_UNIT_OF_MEASUREMENT] = this->sensor_->get_unit_of_measurement();
|
||||
|
@@ -45,8 +45,10 @@ void MQTTSwitchComponent::dump_config() {
|
||||
std::string MQTTSwitchComponent::component_type() const { return "switch"; }
|
||||
const EntityBase *MQTTSwitchComponent::get_entity() const { return this->switch_; }
|
||||
void MQTTSwitchComponent::send_discovery(JsonObject root, mqtt::SendDiscoveryConfig &config) {
|
||||
if (this->switch_->assumed_state())
|
||||
// NOLINTNEXTLINE(clang-analyzer-cplusplus.NewDeleteLeaks) false positive with ArduinoJson
|
||||
if (this->switch_->assumed_state()) {
|
||||
root[MQTT_OPTIMISTIC] = true;
|
||||
}
|
||||
}
|
||||
bool MQTTSwitchComponent::send_initial_state() { return this->publish_state(this->switch_->state); }
|
||||
|
||||
|
@@ -34,6 +34,7 @@ std::string MQTTTextComponent::component_type() const { return "text"; }
|
||||
const EntityBase *MQTTTextComponent::get_entity() const { return this->text_; }
|
||||
|
||||
void MQTTTextComponent::send_discovery(JsonObject root, mqtt::SendDiscoveryConfig &config) {
|
||||
// NOLINTNEXTLINE(clang-analyzer-cplusplus.NewDeleteLeaks) false positive with ArduinoJson
|
||||
switch (this->text_->traits.get_mode()) {
|
||||
case TEXT_MODE_TEXT:
|
||||
root[MQTT_MODE] = "text";
|
||||
|
@@ -15,8 +15,10 @@ using namespace esphome::text_sensor;
|
||||
|
||||
MQTTTextSensor::MQTTTextSensor(TextSensor *sensor) : sensor_(sensor) {}
|
||||
void MQTTTextSensor::send_discovery(JsonObject root, mqtt::SendDiscoveryConfig &config) {
|
||||
if (!this->sensor_->get_device_class().empty())
|
||||
// NOLINTNEXTLINE(clang-analyzer-cplusplus.NewDeleteLeaks) false positive with ArduinoJson
|
||||
if (!this->sensor_->get_device_class().empty()) {
|
||||
root[MQTT_DEVICE_CLASS] = this->sensor_->get_device_class();
|
||||
}
|
||||
config.command_topic = false;
|
||||
}
|
||||
void MQTTTextSensor::setup() {
|
||||
|
@@ -20,13 +20,13 @@ MQTTTimeComponent::MQTTTimeComponent(TimeEntity *time) : time_(time) {}
|
||||
void MQTTTimeComponent::setup() {
|
||||
this->subscribe_json(this->get_command_topic_(), [this](const std::string &topic, JsonObject root) {
|
||||
auto call = this->time_->make_call();
|
||||
if (root.containsKey("hour")) {
|
||||
if (root["hour"].is<uint8_t>()) {
|
||||
call.set_hour(root["hour"]);
|
||||
}
|
||||
if (root.containsKey("minute")) {
|
||||
if (root["minute"].is<uint8_t>()) {
|
||||
call.set_minute(root["minute"]);
|
||||
}
|
||||
if (root.containsKey("second")) {
|
||||
if (root["second"].is<uint8_t>()) {
|
||||
call.set_second(root["second"]);
|
||||
}
|
||||
call.perform();
|
||||
@@ -55,6 +55,7 @@ bool MQTTTimeComponent::send_initial_state() {
|
||||
}
|
||||
bool MQTTTimeComponent::publish_state(uint8_t hour, uint8_t minute, uint8_t second) {
|
||||
return this->publish_json(this->get_state_topic_(), [hour, minute, second](JsonObject root) {
|
||||
// NOLINTNEXTLINE(clang-analyzer-cplusplus.NewDeleteLeaks) false positive with ArduinoJson
|
||||
root["hour"] = hour;
|
||||
root["minute"] = minute;
|
||||
root["second"] = second;
|
||||
|
@@ -41,6 +41,7 @@ bool MQTTUpdateComponent::publish_state() {
|
||||
}
|
||||
|
||||
void MQTTUpdateComponent::send_discovery(JsonObject root, mqtt::SendDiscoveryConfig &config) {
|
||||
// NOLINTNEXTLINE(clang-analyzer-cplusplus.NewDeleteLeaks) false positive with ArduinoJson
|
||||
root["schema"] = "json";
|
||||
root[MQTT_PAYLOAD_INSTALL] = "INSTALL";
|
||||
}
|
||||
|
@@ -49,8 +49,10 @@ void MQTTValveComponent::dump_config() {
|
||||
}
|
||||
}
|
||||
void MQTTValveComponent::send_discovery(JsonObject root, mqtt::SendDiscoveryConfig &config) {
|
||||
if (!this->valve_->get_device_class().empty())
|
||||
// NOLINTNEXTLINE(clang-analyzer-cplusplus.NewDeleteLeaks) false positive with ArduinoJson
|
||||
if (!this->valve_->get_device_class().empty()) {
|
||||
root[MQTT_DEVICE_CLASS] = this->valve_->get_device_class();
|
||||
}
|
||||
|
||||
auto traits = this->valve_->get_traits();
|
||||
if (traits.get_is_assumed_state()) {
|
||||
|
@@ -356,7 +356,7 @@ void MS8607Component::read_humidity_(float temperature_float) {
|
||||
|
||||
// map 16 bit humidity value into range [-6%, 118%]
|
||||
float const humidity_partial = double(humidity) / (1 << 16);
|
||||
float const humidity_percentage = lerp(humidity_partial, -6.0, 118.0);
|
||||
float const humidity_percentage = std::lerp(-6.0, 118.0, humidity_partial);
|
||||
float const compensated_humidity_percentage =
|
||||
humidity_percentage + (20 - temperature_float) * MS8607_H_TEMP_COEFFICIENT;
|
||||
ESP_LOGD(TAG, "Compensated for temperature, humidity=%.2f%%", compensated_humidity_percentage);
|
||||
|
@@ -2,7 +2,7 @@ import logging
|
||||
|
||||
from esphome import automation
|
||||
import esphome.codegen as cg
|
||||
from esphome.components.const import CONF_REQUEST_HEADERS
|
||||
from esphome.components.const import CONF_BYTE_ORDER, CONF_REQUEST_HEADERS
|
||||
from esphome.components.http_request import CONF_HTTP_REQUEST_ID, HttpRequestComponent
|
||||
from esphome.components.image import (
|
||||
CONF_INVERT_ALPHA,
|
||||
@@ -11,6 +11,7 @@ from esphome.components.image import (
|
||||
Image_,
|
||||
get_image_type_enum,
|
||||
get_transparency_enum,
|
||||
validate_settings,
|
||||
)
|
||||
import esphome.config_validation as cv
|
||||
from esphome.const import (
|
||||
@@ -161,6 +162,7 @@ CONFIG_SCHEMA = cv.Schema(
|
||||
rp2040_arduino=cv.Version(0, 0, 0),
|
||||
host=cv.Version(0, 0, 0),
|
||||
),
|
||||
validate_settings,
|
||||
)
|
||||
)
|
||||
|
||||
@@ -213,6 +215,7 @@ async def to_code(config):
|
||||
get_image_type_enum(config[CONF_TYPE]),
|
||||
transparent,
|
||||
config[CONF_BUFFER_SIZE],
|
||||
config.get(CONF_BYTE_ORDER) != "LITTLE_ENDIAN",
|
||||
)
|
||||
await cg.register_component(var, config)
|
||||
await cg.register_parented(var, config[CONF_HTTP_REQUEST_ID])
|
||||
|
@@ -35,14 +35,15 @@ inline bool is_color_on(const Color &color) {
|
||||
}
|
||||
|
||||
OnlineImage::OnlineImage(const std::string &url, int width, int height, ImageFormat format, ImageType type,
|
||||
image::Transparency transparency, uint32_t download_buffer_size)
|
||||
image::Transparency transparency, uint32_t download_buffer_size, bool is_big_endian)
|
||||
: Image(nullptr, 0, 0, type, transparency),
|
||||
buffer_(nullptr),
|
||||
download_buffer_(download_buffer_size),
|
||||
download_buffer_initial_size_(download_buffer_size),
|
||||
format_(format),
|
||||
fixed_width_(width),
|
||||
fixed_height_(height) {
|
||||
fixed_height_(height),
|
||||
is_big_endian_(is_big_endian) {
|
||||
this->set_url(url);
|
||||
}
|
||||
|
||||
@@ -296,7 +297,7 @@ void OnlineImage::draw_pixel_(int x, int y, Color color) {
|
||||
break;
|
||||
}
|
||||
case ImageType::IMAGE_TYPE_GRAYSCALE: {
|
||||
uint8_t gray = static_cast<uint8_t>(0.2125 * color.r + 0.7154 * color.g + 0.0721 * color.b);
|
||||
auto gray = static_cast<uint8_t>(0.2125 * color.r + 0.7154 * color.g + 0.0721 * color.b);
|
||||
if (this->transparency_ == image::TRANSPARENCY_CHROMA_KEY) {
|
||||
if (gray == 1) {
|
||||
gray = 0;
|
||||
@@ -314,8 +315,13 @@ void OnlineImage::draw_pixel_(int x, int y, Color color) {
|
||||
case ImageType::IMAGE_TYPE_RGB565: {
|
||||
this->map_chroma_key(color);
|
||||
uint16_t col565 = display::ColorUtil::color_to_565(color);
|
||||
this->buffer_[pos + 0] = static_cast<uint8_t>((col565 >> 8) & 0xFF);
|
||||
this->buffer_[pos + 1] = static_cast<uint8_t>(col565 & 0xFF);
|
||||
if (this->is_big_endian_) {
|
||||
this->buffer_[pos + 0] = static_cast<uint8_t>((col565 >> 8) & 0xFF);
|
||||
this->buffer_[pos + 1] = static_cast<uint8_t>(col565 & 0xFF);
|
||||
} else {
|
||||
this->buffer_[pos + 0] = static_cast<uint8_t>(col565 & 0xFF);
|
||||
this->buffer_[pos + 1] = static_cast<uint8_t>((col565 >> 8) & 0xFF);
|
||||
}
|
||||
if (this->transparency_ == image::TRANSPARENCY_ALPHA_CHANNEL) {
|
||||
this->buffer_[pos + 2] = color.w;
|
||||
}
|
||||
|
@@ -50,7 +50,7 @@ class OnlineImage : public PollingComponent,
|
||||
* @param buffer_size Size of the buffer used to download the image.
|
||||
*/
|
||||
OnlineImage(const std::string &url, int width, int height, ImageFormat format, image::ImageType type,
|
||||
image::Transparency transparency, uint32_t buffer_size);
|
||||
image::Transparency transparency, uint32_t buffer_size, bool is_big_endian);
|
||||
|
||||
void draw(int x, int y, display::Display *display, Color color_on, Color color_off) override;
|
||||
|
||||
@@ -164,6 +164,11 @@ class OnlineImage : public PollingComponent,
|
||||
const int fixed_width_;
|
||||
/** height requested on configuration, or 0 if non specified. */
|
||||
const int fixed_height_;
|
||||
/**
|
||||
* Whether the image is stored in big-endian format.
|
||||
* This is used to determine how to store 16 bit colors in the buffer.
|
||||
*/
|
||||
bool is_big_endian_;
|
||||
/**
|
||||
* Actual width of the current image. If fixed_width_ is specified,
|
||||
* this will be equal to it; otherwise it will be set once the decoding
|
||||
|
@@ -10,7 +10,7 @@ void opentherm::OpenthermOutput::write_state(float state) {
|
||||
ESP_LOGD(TAG, "Received state: %.2f. Min value: %.2f, max value: %.2f", state, min_value_, max_value_);
|
||||
this->state = state < 0.003 && this->zero_means_zero_
|
||||
? 0.0
|
||||
: clamp(lerp(state, min_value_, max_value_), min_value_, max_value_);
|
||||
: clamp(std::lerp(min_value_, max_value_, state), min_value_, max_value_);
|
||||
this->has_state_ = true;
|
||||
ESP_LOGD(TAG, "Output %s set to %.2f", this->id_, this->state);
|
||||
}
|
||||
|
@@ -1,7 +1,11 @@
|
||||
import esphome.codegen as cg
|
||||
from esphome.components import i2c, sensor
|
||||
import esphome.config_validation as cv
|
||||
from esphome.const import DEVICE_CLASS_ILLUMINANCE, STATE_CLASS_MEASUREMENT, UNIT_LUX
|
||||
from esphome.components import i2c, sensor
|
||||
from esphome.const import (
|
||||
DEVICE_CLASS_ILLUMINANCE,
|
||||
STATE_CLASS_MEASUREMENT,
|
||||
UNIT_LUX,
|
||||
)
|
||||
|
||||
DEPENDENCIES = ["i2c"]
|
||||
CODEOWNERS = ["@ccutrer"]
|
||||
|
@@ -44,6 +44,10 @@ void Mutex::unlock() {}
|
||||
IRAM_ATTR InterruptLock::InterruptLock() { state_ = save_and_disable_interrupts(); }
|
||||
IRAM_ATTR InterruptLock::~InterruptLock() { restore_interrupts(state_); }
|
||||
|
||||
// RP2040 doesn't support lwIP core locking, so this is a no-op
|
||||
LwIPLock::LwIPLock() {}
|
||||
LwIPLock::~LwIPLock() {}
|
||||
|
||||
void get_mac_address_raw(uint8_t *mac) { // NOLINT(readability-non-const-parameter)
|
||||
#ifdef USE_WIFI
|
||||
WiFi.macAddress(mac);
|
||||
|
@@ -88,9 +88,9 @@ void Servo::internal_write(float value) {
|
||||
value = clamp(value, -1.0f, 1.0f);
|
||||
float level;
|
||||
if (value < 0.0) {
|
||||
level = lerp(-value, this->idle_level_, this->min_level_);
|
||||
level = std::lerp(this->idle_level_, this->min_level_, -value);
|
||||
} else {
|
||||
level = lerp(value, this->idle_level_, this->max_level_);
|
||||
level = std::lerp(this->idle_level_, this->max_level_, value);
|
||||
}
|
||||
this->output_->set_level(level);
|
||||
this->current_value_ = value;
|
||||
|
@@ -5,8 +5,13 @@ from esphome.config_helpers import Extend, Remove, merge_config
|
||||
import esphome.config_validation as cv
|
||||
from esphome.const import CONF_SUBSTITUTIONS, VALID_SUBSTITUTIONS_CHARACTERS
|
||||
from esphome.yaml_util import ESPHomeDataBase, make_data_base
|
||||
|
||||
from .jinja import Jinja, JinjaStr, TemplateError, TemplateRuntimeError, has_jinja
|
||||
from .jinja import (
|
||||
Jinja,
|
||||
JinjaStr,
|
||||
has_jinja,
|
||||
TemplateError,
|
||||
TemplateRuntimeError,
|
||||
)
|
||||
|
||||
CODEOWNERS = ["@esphome/core"]
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
@@ -1,7 +1,6 @@
|
||||
import logging
|
||||
import math
|
||||
import re
|
||||
|
||||
import jinja2 as jinja
|
||||
from jinja2.nativetypes import NativeEnvironment
|
||||
|
||||
|
@@ -74,13 +74,14 @@ def validate_local(config: ConfigType) -> ConfigType:
|
||||
return config
|
||||
|
||||
|
||||
def validate_ota_removed(config: ConfigType) -> ConfigType:
|
||||
# Only raise error if OTA is explicitly enabled (True)
|
||||
# If it's False or not specified, we can safely ignore it
|
||||
if config.get(CONF_OTA):
|
||||
def validate_ota(config: ConfigType) -> ConfigType:
|
||||
# The OTA option only accepts False to explicitly disable OTA for web_server
|
||||
# IMPORTANT: Setting ota: false ONLY affects the web_server component
|
||||
# The captive_portal component will still be able to perform OTA updates
|
||||
if CONF_OTA in config and config[CONF_OTA] is not False:
|
||||
raise cv.Invalid(
|
||||
f"The '{CONF_OTA}' option has been removed from 'web_server'. "
|
||||
f"Please use the new OTA platform structure instead:\n\n"
|
||||
f"The '{CONF_OTA}' option in 'web_server' only accepts 'false' to disable OTA. "
|
||||
f"To enable OTA, please use the new OTA platform structure instead:\n\n"
|
||||
f"ota:\n"
|
||||
f" - platform: web_server\n\n"
|
||||
f"See https://esphome.io/components/ota for more information."
|
||||
@@ -185,7 +186,7 @@ CONFIG_SCHEMA = cv.All(
|
||||
web_server_base.WebServerBase
|
||||
),
|
||||
cv.Optional(CONF_INCLUDE_INTERNAL, default=False): cv.boolean,
|
||||
cv.Optional(CONF_OTA, default=False): cv.boolean,
|
||||
cv.Optional(CONF_OTA): cv.boolean,
|
||||
cv.Optional(CONF_LOG, default=True): cv.boolean,
|
||||
cv.Optional(CONF_LOCAL): cv.boolean,
|
||||
cv.Optional(CONF_SORTING_GROUPS): cv.ensure_list(sorting_group),
|
||||
@@ -203,7 +204,7 @@ CONFIG_SCHEMA = cv.All(
|
||||
default_url,
|
||||
validate_local,
|
||||
validate_sorting_groups,
|
||||
validate_ota_removed,
|
||||
validate_ota,
|
||||
)
|
||||
|
||||
|
||||
@@ -288,7 +289,11 @@ async def to_code(config):
|
||||
cg.add(var.set_css_url(config[CONF_CSS_URL]))
|
||||
cg.add(var.set_js_url(config[CONF_JS_URL]))
|
||||
# OTA is now handled by the web_server OTA platform
|
||||
# The CONF_OTA option is kept only for backwards compatibility validation
|
||||
# The CONF_OTA option is kept to allow explicitly disabling OTA for web_server
|
||||
# IMPORTANT: This ONLY affects the web_server component, NOT captive_portal
|
||||
# Captive portal will still be able to perform OTA updates even when this is set
|
||||
if config.get(CONF_OTA) is False:
|
||||
cg.add_define("USE_WEBSERVER_OTA_DISABLED")
|
||||
cg.add(var.set_expose_log(config[CONF_LOG]))
|
||||
if config[CONF_ENABLE_PRIVATE_NETWORK_ACCESS]:
|
||||
cg.add_define("USE_WEBSERVER_PRIVATE_NETWORK_ACCESS")
|
||||
|
@@ -5,6 +5,10 @@
|
||||
#include "esphome/core/application.h"
|
||||
#include "esphome/core/log.h"
|
||||
|
||||
#ifdef USE_CAPTIVE_PORTAL
|
||||
#include "esphome/components/captive_portal/captive_portal.h"
|
||||
#endif
|
||||
|
||||
#ifdef USE_ARDUINO
|
||||
#ifdef USE_ESP8266
|
||||
#include <Updater.h>
|
||||
@@ -25,7 +29,22 @@ class OTARequestHandler : public AsyncWebHandler {
|
||||
void handleUpload(AsyncWebServerRequest *request, const String &filename, size_t index, uint8_t *data, size_t len,
|
||||
bool final) override;
|
||||
bool canHandle(AsyncWebServerRequest *request) const override {
|
||||
return request->url() == "/update" && request->method() == HTTP_POST;
|
||||
// Check if this is an OTA update request
|
||||
bool is_ota_request = request->url() == "/update" && request->method() == HTTP_POST;
|
||||
|
||||
#if defined(USE_WEBSERVER_OTA_DISABLED) && defined(USE_CAPTIVE_PORTAL)
|
||||
// IMPORTANT: USE_WEBSERVER_OTA_DISABLED only disables OTA for the web_server component
|
||||
// Captive portal can still perform OTA updates - check if request is from active captive portal
|
||||
// Note: global_captive_portal is the standard way components communicate in ESPHome
|
||||
return is_ota_request && captive_portal::global_captive_portal != nullptr &&
|
||||
captive_portal::global_captive_portal->is_active();
|
||||
#elif defined(USE_WEBSERVER_OTA_DISABLED)
|
||||
// OTA disabled for web_server and no captive portal compiled in
|
||||
return false;
|
||||
#else
|
||||
// OTA enabled for web_server
|
||||
return is_ota_request;
|
||||
#endif
|
||||
}
|
||||
|
||||
// NOLINTNEXTLINE(readability-identifier-naming)
|
||||
@@ -152,7 +171,7 @@ void OTARequestHandler::handleUpload(AsyncWebServerRequest *request, const Strin
|
||||
|
||||
// Finalize
|
||||
if (final) {
|
||||
ESP_LOGD(TAG, "OTA final chunk: index=%u, len=%u, total_read=%u, contentLength=%u", index, len,
|
||||
ESP_LOGD(TAG, "OTA final chunk: index=%zu, len=%zu, total_read=%u, contentLength=%zu", index, len,
|
||||
this->ota_read_length_, request->contentLength());
|
||||
|
||||
// For Arduino framework, the Update library tracks expected size from firmware header
|
||||
|
@@ -268,10 +268,10 @@ std::string WebServer::get_config_json() {
|
||||
return json::build_json([this](JsonObject root) {
|
||||
root["title"] = App.get_friendly_name().empty() ? App.get_name() : App.get_friendly_name();
|
||||
root["comment"] = App.get_comment();
|
||||
#ifdef USE_WEBSERVER_OTA
|
||||
root["ota"] = true; // web_server OTA platform is configured
|
||||
#if defined(USE_WEBSERVER_OTA_DISABLED) || !defined(USE_WEBSERVER_OTA)
|
||||
root["ota"] = false; // Note: USE_WEBSERVER_OTA_DISABLED only affects web_server, not captive_portal
|
||||
#else
|
||||
root["ota"] = false;
|
||||
root["ota"] = true;
|
||||
#endif
|
||||
root["log"] = this->expose_log_;
|
||||
root["lang"] = "en";
|
||||
@@ -792,7 +792,7 @@ std::string WebServer::light_json(light::LightState *obj, JsonDetail start_confi
|
||||
|
||||
light::LightJSONSchema::dump_json(*obj, root);
|
||||
if (start_config == DETAIL_ALL) {
|
||||
JsonArray opt = root.createNestedArray("effects");
|
||||
JsonArray opt = root["effects"].to<JsonArray>();
|
||||
opt.add("None");
|
||||
for (auto const &option : obj->get_effects()) {
|
||||
opt.add(option->get_name());
|
||||
@@ -1238,7 +1238,7 @@ std::string WebServer::select_json(select::Select *obj, const std::string &value
|
||||
return json::build_json([this, obj, value, start_config](JsonObject root) {
|
||||
set_json_icon_state_value(root, obj, "select-" + obj->get_object_id(), value, value, start_config);
|
||||
if (start_config == DETAIL_ALL) {
|
||||
JsonArray opt = root.createNestedArray("option");
|
||||
JsonArray opt = root["option"].to<JsonArray>();
|
||||
for (auto &option : obj->traits.get_options()) {
|
||||
opt.add(option);
|
||||
}
|
||||
@@ -1322,6 +1322,7 @@ std::string WebServer::climate_all_json_generator(WebServer *web_server, void *s
|
||||
return web_server->climate_json((climate::Climate *) (source), DETAIL_ALL);
|
||||
}
|
||||
std::string WebServer::climate_json(climate::Climate *obj, JsonDetail start_config) {
|
||||
// NOLINTBEGIN(clang-analyzer-cplusplus.NewDeleteLeaks) false positive with ArduinoJson
|
||||
return json::build_json([this, obj, start_config](JsonObject root) {
|
||||
set_json_id(root, obj, "climate-" + obj->get_object_id(), start_config);
|
||||
const auto traits = obj->get_traits();
|
||||
@@ -1330,32 +1331,32 @@ std::string WebServer::climate_json(climate::Climate *obj, JsonDetail start_conf
|
||||
char buf[16];
|
||||
|
||||
if (start_config == DETAIL_ALL) {
|
||||
JsonArray opt = root.createNestedArray("modes");
|
||||
JsonArray opt = root["modes"].to<JsonArray>();
|
||||
for (climate::ClimateMode m : traits.get_supported_modes())
|
||||
opt.add(PSTR_LOCAL(climate::climate_mode_to_string(m)));
|
||||
if (!traits.get_supported_custom_fan_modes().empty()) {
|
||||
JsonArray opt = root.createNestedArray("fan_modes");
|
||||
JsonArray opt = root["fan_modes"].to<JsonArray>();
|
||||
for (climate::ClimateFanMode m : traits.get_supported_fan_modes())
|
||||
opt.add(PSTR_LOCAL(climate::climate_fan_mode_to_string(m)));
|
||||
}
|
||||
|
||||
if (!traits.get_supported_custom_fan_modes().empty()) {
|
||||
JsonArray opt = root.createNestedArray("custom_fan_modes");
|
||||
JsonArray opt = root["custom_fan_modes"].to<JsonArray>();
|
||||
for (auto const &custom_fan_mode : traits.get_supported_custom_fan_modes())
|
||||
opt.add(custom_fan_mode);
|
||||
}
|
||||
if (traits.get_supports_swing_modes()) {
|
||||
JsonArray opt = root.createNestedArray("swing_modes");
|
||||
JsonArray opt = root["swing_modes"].to<JsonArray>();
|
||||
for (auto swing_mode : traits.get_supported_swing_modes())
|
||||
opt.add(PSTR_LOCAL(climate::climate_swing_mode_to_string(swing_mode)));
|
||||
}
|
||||
if (traits.get_supports_presets() && obj->preset.has_value()) {
|
||||
JsonArray opt = root.createNestedArray("presets");
|
||||
JsonArray opt = root["presets"].to<JsonArray>();
|
||||
for (climate::ClimatePreset m : traits.get_supported_presets())
|
||||
opt.add(PSTR_LOCAL(climate::climate_preset_to_string(m)));
|
||||
}
|
||||
if (!traits.get_supported_custom_presets().empty() && obj->custom_preset.has_value()) {
|
||||
JsonArray opt = root.createNestedArray("custom_presets");
|
||||
JsonArray opt = root["custom_presets"].to<JsonArray>();
|
||||
for (auto const &custom_preset : traits.get_supported_custom_presets())
|
||||
opt.add(custom_preset);
|
||||
}
|
||||
@@ -1407,6 +1408,7 @@ std::string WebServer::climate_json(climate::Climate *obj, JsonDetail start_conf
|
||||
root["state"] = root["target_temperature"];
|
||||
}
|
||||
});
|
||||
// NOLINTEND(clang-analyzer-cplusplus.NewDeleteLeaks)
|
||||
}
|
||||
#endif
|
||||
|
||||
@@ -1635,7 +1637,7 @@ std::string WebServer::event_json(event::Event *obj, const std::string &event_ty
|
||||
root["event_type"] = event_type;
|
||||
}
|
||||
if (start_config == DETAIL_ALL) {
|
||||
JsonArray event_types = root.createNestedArray("event_types");
|
||||
JsonArray event_types = root["event_types"].to<JsonArray>();
|
||||
for (auto const &event_type : obj->get_event_types()) {
|
||||
event_types.add(event_type);
|
||||
}
|
||||
@@ -1682,6 +1684,7 @@ std::string WebServer::update_all_json_generator(WebServer *web_server, void *so
|
||||
return web_server->update_json((update::UpdateEntity *) (source), DETAIL_STATE);
|
||||
}
|
||||
std::string WebServer::update_json(update::UpdateEntity *obj, JsonDetail start_config) {
|
||||
// NOLINTBEGIN(clang-analyzer-cplusplus.NewDeleteLeaks) false positive with ArduinoJson
|
||||
return json::build_json([this, obj, start_config](JsonObject root) {
|
||||
set_json_id(root, obj, "update-" + obj->get_object_id(), start_config);
|
||||
root["value"] = obj->update_info.latest_version;
|
||||
@@ -1707,6 +1710,7 @@ std::string WebServer::update_json(update::UpdateEntity *obj, JsonDetail start_c
|
||||
this->add_sorting_info_(root, obj);
|
||||
}
|
||||
});
|
||||
// NOLINTEND(clang-analyzer-cplusplus.NewDeleteLeaks)
|
||||
}
|
||||
#endif
|
||||
|
||||
|
@@ -78,7 +78,7 @@ enum JsonDetail { DETAIL_ALL, DETAIL_STATE };
|
||||
This is because only minimal changes were made to the ESPAsyncWebServer lib_dep, it was undesirable to put deferred
|
||||
update logic into that library. We need one deferred queue per connection so instead of one AsyncEventSource with
|
||||
multiple clients, we have multiple event sources with one client each. This is slightly awkward which is why it's
|
||||
implemented in a more straightforward way for ESP-IDF. Arduino platform will eventually go away and this workaround
|
||||
implemented in a more straightforward way for ESP-IDF. Arudino platform will eventually go away and this workaround
|
||||
can be forgotten.
|
||||
*/
|
||||
#ifdef USE_ARDUINO
|
||||
|
@@ -192,7 +192,9 @@ void WebServer::handle_index_request(AsyncWebServerRequest *request) {
|
||||
|
||||
stream->print(F("</tbody></table><p>See <a href=\"https://esphome.io/web-api/index.html\">ESPHome Web API</a> for "
|
||||
"REST API documentation.</p>"));
|
||||
#ifdef USE_WEBSERVER_OTA
|
||||
#if defined(USE_WEBSERVER_OTA) && !defined(USE_WEBSERVER_OTA_DISABLED)
|
||||
// Show OTA form only if web_server OTA is not explicitly disabled
|
||||
// Note: USE_WEBSERVER_OTA_DISABLED only affects web_server, not captive_portal
|
||||
stream->print(F("<h2>OTA Update</h2><form method=\"POST\" action=\"/update\" enctype=\"multipart/form-data\"><input "
|
||||
"type=\"file\" name=\"update\"><input type=\"submit\" value=\"Update\"></form>"));
|
||||
#endif
|
||||
|
@@ -40,4 +40,4 @@ async def to_code(config):
|
||||
if CORE.is_esp8266:
|
||||
cg.add_library("ESP8266WiFi", None)
|
||||
# https://github.com/ESP32Async/ESPAsyncWebServer/blob/main/library.json
|
||||
cg.add_library("ESP32Async/ESPAsyncWebServer", "3.7.8")
|
||||
cg.add_library("ESP32Async/ESPAsyncWebServer", "3.7.10")
|
||||
|
@@ -389,10 +389,12 @@ AsyncEventSourceResponse::AsyncEventSourceResponse(const AsyncWebServerRequest *
|
||||
|
||||
#ifdef USE_WEBSERVER_SORTING
|
||||
for (auto &group : ws->sorting_groups_) {
|
||||
// NOLINTBEGIN(clang-analyzer-cplusplus.NewDeleteLeaks) false positive with ArduinoJson
|
||||
message = json::build_json([group](JsonObject root) {
|
||||
root["name"] = group.second.name;
|
||||
root["sorting_weight"] = group.second.weight;
|
||||
});
|
||||
// NOLINTEND(clang-analyzer-cplusplus.NewDeleteLeaks)
|
||||
|
||||
// a (very) large number of these should be able to be queued initially without defer
|
||||
// since the only thing in the send buffer at this point is the initial ping/config
|
||||
|
@@ -20,10 +20,6 @@
|
||||
#include "lwip/dns.h"
|
||||
#include "lwip/err.h"
|
||||
|
||||
#ifdef CONFIG_LWIP_TCPIP_CORE_LOCKING
|
||||
#include "lwip/priv/tcpip_priv.h"
|
||||
#endif
|
||||
|
||||
#include "esphome/core/application.h"
|
||||
#include "esphome/core/hal.h"
|
||||
#include "esphome/core/helpers.h"
|
||||
@@ -295,25 +291,16 @@ bool WiFiComponent::wifi_sta_ip_config_(optional<ManualIP> manual_ip) {
|
||||
}
|
||||
|
||||
if (!manual_ip.has_value()) {
|
||||
// sntp_servermode_dhcp lwip/sntp.c (Required to lock TCPIP core functionality!)
|
||||
// https://github.com/esphome/issues/issues/6591
|
||||
// https://github.com/espressif/arduino-esp32/issues/10526
|
||||
#ifdef CONFIG_LWIP_TCPIP_CORE_LOCKING
|
||||
if (!sys_thread_tcpip(LWIP_CORE_LOCK_QUERY_HOLDER)) {
|
||||
LOCK_TCPIP_CORE();
|
||||
// sntp_servermode_dhcp lwip/sntp.c (Required to lock TCPIP core functionality!)
|
||||
// https://github.com/esphome/issues/issues/6591
|
||||
// https://github.com/espressif/arduino-esp32/issues/10526
|
||||
{
|
||||
LwIPLock lock;
|
||||
// lwIP starts the SNTP client if it gets an SNTP server from DHCP. We don't need the time, and more importantly,
|
||||
// the built-in SNTP client has a memory leak in certain situations. Disable this feature.
|
||||
// https://github.com/esphome/issues/issues/2299
|
||||
sntp_servermode_dhcp(false);
|
||||
}
|
||||
#endif
|
||||
|
||||
// lwIP starts the SNTP client if it gets an SNTP server from DHCP. We don't need the time, and more importantly,
|
||||
// the built-in SNTP client has a memory leak in certain situations. Disable this feature.
|
||||
// https://github.com/esphome/issues/issues/2299
|
||||
sntp_servermode_dhcp(false);
|
||||
|
||||
#ifdef CONFIG_LWIP_TCPIP_CORE_LOCKING
|
||||
if (sys_thread_tcpip(LWIP_CORE_LOCK_QUERY_HOLDER)) {
|
||||
UNLOCK_TCPIP_CORE();
|
||||
}
|
||||
#endif
|
||||
|
||||
// No manual IP is set; use DHCP client
|
||||
if (dhcp_status != ESP_NETIF_DHCP_STARTED) {
|
||||
|
@@ -1055,7 +1055,6 @@ def float_with_unit(quantity, regex_suffix, optional_unit=False):
|
||||
return validator
|
||||
|
||||
|
||||
bps = float_with_unit("bits per second", "(bps|bits/s|bit/s)?")
|
||||
frequency = float_with_unit("frequency", "(Hz|HZ|hz)?")
|
||||
resistance = float_with_unit("resistance", "(Ω|Ω|ohm|Ohm|OHM)?")
|
||||
current = float_with_unit("current", "(a|A|amp|Amp|amps|Amps|ampere|Ampere)?")
|
||||
|
@@ -4,7 +4,7 @@ from enum import Enum
|
||||
|
||||
from esphome.enum import StrEnum
|
||||
|
||||
__version__ = "2025.8.0-dev"
|
||||
__version__ = "2025.7.1"
|
||||
|
||||
ALLOWED_NAME_CHARS = "abcdefghijklmnopqrstuvwxyz0123456789-_"
|
||||
VALID_SUBSTITUTIONS_CHARACTERS = (
|
||||
|
@@ -309,6 +309,12 @@ void Application::disable_component_loop_(Component *component) {
|
||||
if (this->in_loop_ && i == this->current_loop_index_) {
|
||||
// Decrement so we'll process the swapped component next
|
||||
this->current_loop_index_--;
|
||||
// Update the loop start time to current time so the swapped component
|
||||
// gets correct timing instead of inheriting stale timing.
|
||||
// This prevents integer underflow in timing calculations by ensuring
|
||||
// the swapped component starts with a fresh timing reference, avoiding
|
||||
// errors caused by stale or wrapped timing values.
|
||||
this->loop_component_start_time_ = millis();
|
||||
}
|
||||
}
|
||||
return;
|
||||
|
@@ -264,6 +264,7 @@ void Component::set_retry(uint32_t initial_wait_time, uint8_t max_attempts, std:
|
||||
bool Component::is_failed() const { return (this->component_state_ & COMPONENT_STATE_MASK) == COMPONENT_STATE_FAILED; }
|
||||
bool Component::is_ready() const {
|
||||
return (this->component_state_ & COMPONENT_STATE_MASK) == COMPONENT_STATE_LOOP ||
|
||||
(this->component_state_ & COMPONENT_STATE_MASK) == COMPONENT_STATE_LOOP_DONE ||
|
||||
(this->component_state_ & COMPONENT_STATE_MASK) == COMPONENT_STATE_SETUP;
|
||||
}
|
||||
bool Component::can_proceed() { return true; }
|
||||
|
@@ -258,9 +258,7 @@ std::string format_hex(const uint8_t *data, size_t length) {
|
||||
std::string format_hex(const std::vector<uint8_t> &data) { return format_hex(data.data(), data.size()); }
|
||||
|
||||
static char format_hex_pretty_char(uint8_t v) { return v >= 10 ? 'A' + (v - 10) : '0' + v; }
|
||||
|
||||
// Shared implementation for uint8_t and string hex formatting
|
||||
static std::string format_hex_pretty_uint8(const uint8_t *data, size_t length, char separator, bool show_length) {
|
||||
std::string format_hex_pretty(const uint8_t *data, size_t length, char separator, bool show_length) {
|
||||
if (data == nullptr || length == 0)
|
||||
return "";
|
||||
std::string ret;
|
||||
@@ -276,10 +274,6 @@ static std::string format_hex_pretty_uint8(const uint8_t *data, size_t length, c
|
||||
return ret + " (" + std::to_string(length) + ")";
|
||||
return ret;
|
||||
}
|
||||
|
||||
std::string format_hex_pretty(const uint8_t *data, size_t length, char separator, bool show_length) {
|
||||
return format_hex_pretty_uint8(data, length, separator, show_length);
|
||||
}
|
||||
std::string format_hex_pretty(const std::vector<uint8_t> &data, char separator, bool show_length) {
|
||||
return format_hex_pretty(data.data(), data.size(), separator, show_length);
|
||||
}
|
||||
@@ -306,7 +300,20 @@ std::string format_hex_pretty(const std::vector<uint16_t> &data, char separator,
|
||||
return format_hex_pretty(data.data(), data.size(), separator, show_length);
|
||||
}
|
||||
std::string format_hex_pretty(const std::string &data, char separator, bool show_length) {
|
||||
return format_hex_pretty_uint8(reinterpret_cast<const uint8_t *>(data.data()), data.length(), separator, show_length);
|
||||
if (data.empty())
|
||||
return "";
|
||||
std::string ret;
|
||||
uint8_t multiple = separator ? 3 : 2; // 3 if separator is not \0, 2 otherwise
|
||||
ret.resize(multiple * data.length() - (separator ? 1 : 0));
|
||||
for (size_t i = 0; i < data.length(); i++) {
|
||||
ret[multiple * i] = format_hex_pretty_char((data[i] & 0xF0) >> 4);
|
||||
ret[multiple * i + 1] = format_hex_pretty_char(data[i] & 0x0F);
|
||||
if (separator && i != data.length() - 1)
|
||||
ret[multiple * i + 2] = separator;
|
||||
}
|
||||
if (show_length && data.length() > 4)
|
||||
return ret + " (" + std::to_string(data.length()) + ")";
|
||||
return ret;
|
||||
}
|
||||
|
||||
std::string format_bin(const uint8_t *data, size_t length) {
|
||||
|
@@ -683,6 +683,23 @@ class InterruptLock {
|
||||
#endif
|
||||
};
|
||||
|
||||
/** Helper class to lock the lwIP TCPIP core when making lwIP API calls from non-TCPIP threads.
|
||||
*
|
||||
* This is needed on multi-threaded platforms (ESP32) when CONFIG_LWIP_TCPIP_CORE_LOCKING is enabled.
|
||||
* It ensures thread-safe access to lwIP APIs.
|
||||
*
|
||||
* @note This follows the same pattern as InterruptLock - platform-specific implementations in helpers.cpp
|
||||
*/
|
||||
class LwIPLock {
|
||||
public:
|
||||
LwIPLock();
|
||||
~LwIPLock();
|
||||
|
||||
// Delete copy constructor and copy assignment operator to prevent accidental copying
|
||||
LwIPLock(const LwIPLock &) = delete;
|
||||
LwIPLock &operator=(const LwIPLock &) = delete;
|
||||
};
|
||||
|
||||
/** Helper class to request `loop()` to be called as fast as possible.
|
||||
*
|
||||
* Usually the ESPHome main loop runs at 60 Hz, sleeping in between invocations of `loop()` if necessary. When a higher
|
||||
|
@@ -78,6 +78,8 @@ def run_platformio_cli(*args, **kwargs) -> str | int:
|
||||
os.environ.setdefault(
|
||||
"PLATFORMIO_LIBDEPS_DIR", os.path.abspath(CORE.relative_piolibdeps_path())
|
||||
)
|
||||
# Suppress Python syntax warnings from third-party scripts during compilation
|
||||
os.environ.setdefault("PYTHONWARNINGS", "ignore::SyntaxWarning")
|
||||
cmd = ["platformio"] + list(args)
|
||||
|
||||
if not CORE.verbose:
|
||||
|
@@ -147,6 +147,13 @@ class RedirectText:
|
||||
continue
|
||||
|
||||
self._write_color_replace(line)
|
||||
# Check for flash size error and provide helpful guidance
|
||||
if (
|
||||
"Error: The program size" in line
|
||||
and "is greater than maximum allowed" in line
|
||||
and (help_msg := get_esp32_arduino_flash_error_help())
|
||||
):
|
||||
self._write_color_replace(help_msg)
|
||||
else:
|
||||
self._write_color_replace(s)
|
||||
|
||||
@@ -309,3 +316,34 @@ def get_serial_ports() -> list[SerialPort]:
|
||||
|
||||
result.sort(key=lambda x: x.path)
|
||||
return result
|
||||
|
||||
|
||||
def get_esp32_arduino_flash_error_help() -> str | None:
|
||||
"""Returns helpful message when ESP32 with Arduino runs out of flash space."""
|
||||
from esphome.core import CORE
|
||||
|
||||
if not (CORE.is_esp32 and CORE.using_arduino):
|
||||
return None
|
||||
|
||||
from esphome.log import AnsiFore, color
|
||||
|
||||
return (
|
||||
"\n"
|
||||
+ color(
|
||||
AnsiFore.YELLOW,
|
||||
"💡 TIP: Your ESP32 with Arduino framework has run out of flash space.\n",
|
||||
)
|
||||
+ "\n"
|
||||
+ "To fix this, switch to the ESP-IDF framework which is more memory efficient:\n"
|
||||
+ "\n"
|
||||
+ "1. In your YAML configuration, modify the framework section:\n"
|
||||
+ "\n"
|
||||
+ " esp32:\n"
|
||||
+ " framework:\n"
|
||||
+ " type: esp-idf\n"
|
||||
+ "\n"
|
||||
+ "2. Clean build files and compile again\n"
|
||||
+ "\n"
|
||||
+ "Note: ESP-IDF uses less flash space and provides better performance.\n"
|
||||
+ "Some Arduino-specific libraries may need alternatives.\n\n"
|
||||
)
|
||||
|
@@ -162,6 +162,9 @@ def get_ini_content():
|
||||
# Sort to avoid changing build unflags order
|
||||
CORE.add_platformio_option("build_unflags", sorted(CORE.build_unflags))
|
||||
|
||||
# Add extra script for C++ flags
|
||||
CORE.add_platformio_option("extra_scripts", [f"pre:{CXX_FLAGS_FILE_NAME}"])
|
||||
|
||||
content = "[platformio]\n"
|
||||
content += f"description = ESPHome {__version__}\n"
|
||||
|
||||
@@ -222,6 +225,9 @@ def write_platformio_project():
|
||||
write_gitignore()
|
||||
write_platformio_ini(content)
|
||||
|
||||
# Write extra script for C++ specific flags
|
||||
write_cxx_flags_script()
|
||||
|
||||
|
||||
DEFINES_H_FORMAT = ESPHOME_H_FORMAT = """\
|
||||
#pragma once
|
||||
@@ -394,3 +400,20 @@ def write_gitignore():
|
||||
if not os.path.isfile(path):
|
||||
with open(file=path, mode="w", encoding="utf-8") as f:
|
||||
f.write(GITIGNORE_CONTENT)
|
||||
|
||||
|
||||
CXX_FLAGS_FILE_NAME = "cxx_flags.py"
|
||||
CXX_FLAGS_FILE_CONTENTS = """# Auto-generated ESPHome script for C++ specific compiler flags
|
||||
Import("env")
|
||||
|
||||
# Add C++ specific flags
|
||||
"""
|
||||
|
||||
|
||||
def write_cxx_flags_script() -> None:
|
||||
path = CORE.relative_build_path(CXX_FLAGS_FILE_NAME)
|
||||
contents = CXX_FLAGS_FILE_CONTENTS
|
||||
if not CORE.is_host:
|
||||
contents += 'env.Append(CXXFLAGS=["-Wno-volatile"])'
|
||||
contents += "\n"
|
||||
write_file_if_changed(path, contents)
|
||||
|
@@ -1,6 +1,5 @@
|
||||
from __future__ import annotations
|
||||
|
||||
# ruff format -- again
|
||||
import asyncio
|
||||
from collections.abc import Callable
|
||||
from dataclasses import dataclass
|
||||
|
@@ -35,7 +35,7 @@ build_flags =
|
||||
lib_deps =
|
||||
esphome/noise-c@0.1.10 ; api
|
||||
improv/Improv@1.2.4 ; improv_serial / esp32_improv
|
||||
bblanchon/ArduinoJson@6.18.5 ; json
|
||||
bblanchon/ArduinoJson@7.4.2 ; json
|
||||
wjtje/qr-code-generator-library@1.7.0 ; qr_code
|
||||
functionpointer/arduino-MLX90393@1.0.2 ; mlx90393
|
||||
pavlodn/HaierProtocol@0.9.31 ; haier
|
||||
@@ -235,7 +235,7 @@ build_flags =
|
||||
-DUSE_ZEPHYR
|
||||
-DUSE_NRF52
|
||||
lib_deps =
|
||||
bblanchon/ArduinoJson@7.0.0 ; json
|
||||
bblanchon/ArduinoJson@7.4.2 ; json
|
||||
wjtje/qr-code-generator-library@1.7.0 ; qr_code
|
||||
pavlodn/HaierProtocol@0.9.31 ; haier
|
||||
functionpointer/arduino-MLX90393@1.0.2 ; mlx90393
|
||||
|
@@ -13,7 +13,7 @@ platformio==6.1.18 # When updating platformio, also update /docker/Dockerfile
|
||||
esptool==4.9.0
|
||||
click==8.1.7
|
||||
esphome-dashboard==20250514.0
|
||||
aioesphomeapi==35.0.1
|
||||
aioesphomeapi==34.2.1
|
||||
zeroconf==0.147.0
|
||||
puremagic==1.30
|
||||
ruamel.yaml==0.18.14 # dashboard_import
|
||||
|
@@ -1,6 +1,6 @@
|
||||
pylint==3.3.7
|
||||
flake8==7.3.0 # also change in .pre-commit-config.yaml when updating
|
||||
ruff==0.12.3 # also change in .pre-commit-config.yaml when updating
|
||||
ruff==0.12.2 # also change in .pre-commit-config.yaml when updating
|
||||
pyupgrade==3.20.0 # also change in .pre-commit-config.yaml when updating
|
||||
pre-commit
|
||||
|
||||
|
@@ -270,7 +270,7 @@ def lint_newline(fname):
|
||||
return "File contains Windows newline. Please set your editor to Unix newline mode."
|
||||
|
||||
|
||||
@lint_content_check(exclude=["*.svg", ".clang-tidy.hash"])
|
||||
@lint_content_check(exclude=["*.svg"])
|
||||
def lint_end_newline(fname, content):
|
||||
if content and not content.endswith("\n"):
|
||||
return "File does not end with a newline, please add an empty line at the end of the file."
|
||||
|
@@ -22,7 +22,6 @@ from helpers import (
|
||||
git_ls_files,
|
||||
load_idedata,
|
||||
print_error_for_file,
|
||||
print_file_list,
|
||||
root_path,
|
||||
temp_header_file,
|
||||
)
|
||||
@@ -219,14 +218,13 @@ def main():
|
||||
)
|
||||
args = parser.parse_args()
|
||||
|
||||
idedata = load_idedata(args.environment)
|
||||
options = clang_options(idedata)
|
||||
|
||||
files = []
|
||||
for path in git_ls_files(["*.cpp"]):
|
||||
files.append(os.path.relpath(path, os.getcwd()))
|
||||
|
||||
# Print initial file count if it's large
|
||||
if len(files) > 50:
|
||||
print(f"Found {len(files)} total files to process")
|
||||
|
||||
if args.files:
|
||||
# Match against files specified on command-line
|
||||
file_name_re = re.compile("|".join(args.files))
|
||||
@@ -242,28 +240,10 @@ def main():
|
||||
|
||||
if args.split_num:
|
||||
files = split_list(files, args.split_num)[args.split_at - 1]
|
||||
print(f"Split {args.split_at}/{args.split_num}: checking {len(files)} files")
|
||||
|
||||
# Print file count before adding header file
|
||||
print(f"\nTotal files to check: {len(files)}")
|
||||
|
||||
# Early exit if no files to check
|
||||
if not files:
|
||||
print("No files to check - exiting early")
|
||||
return 0
|
||||
|
||||
# Only build header file if we have actual files to check
|
||||
if args.all_headers and args.split_at in (None, 1):
|
||||
build_all_include()
|
||||
files.insert(0, temp_header_file)
|
||||
print(f"Added all-include header file, new total: {len(files)}")
|
||||
|
||||
# Print final file list before loading idedata
|
||||
print_file_list(files, "Final files to process:")
|
||||
|
||||
# Load idedata and options only if we have files to check
|
||||
idedata = load_idedata(args.environment)
|
||||
options = clang_options(idedata)
|
||||
|
||||
tmpdir = None
|
||||
if args.fix:
|
||||
|
@@ -1,188 +0,0 @@
|
||||
#!/usr/bin/env python3
|
||||
"""Calculate and manage hash for clang-tidy configuration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import argparse
|
||||
import hashlib
|
||||
from pathlib import Path
|
||||
import re
|
||||
import sys
|
||||
|
||||
# Add the script directory to path to import helpers
|
||||
script_dir = Path(__file__).parent
|
||||
sys.path.insert(0, str(script_dir))
|
||||
|
||||
|
||||
def read_file_lines(path: Path) -> list[str]:
|
||||
"""Read lines from a file."""
|
||||
with open(path) as f:
|
||||
return f.readlines()
|
||||
|
||||
|
||||
def parse_requirement_line(line: str) -> tuple[str, str] | None:
|
||||
"""Parse a requirement line and return (package, original_line) or None.
|
||||
|
||||
Handles formats like:
|
||||
- package==1.2.3
|
||||
- package==1.2.3 # comment
|
||||
- package>=1.2.3,<2.0.0
|
||||
"""
|
||||
original_line = line.strip()
|
||||
|
||||
# Extract the part before any comment for parsing
|
||||
parse_line = line
|
||||
if "#" in parse_line:
|
||||
parse_line = parse_line[: parse_line.index("#")]
|
||||
|
||||
parse_line = parse_line.strip()
|
||||
if not parse_line:
|
||||
return None
|
||||
|
||||
# Use regex to extract package name
|
||||
# This matches package names followed by version operators
|
||||
match = re.match(r"^([a-zA-Z0-9_-]+)(==|>=|<=|>|<|!=|~=)(.+)$", parse_line)
|
||||
if match:
|
||||
return (match.group(1), original_line) # Return package name and original line
|
||||
|
||||
return None
|
||||
|
||||
|
||||
def get_clang_tidy_version_from_requirements() -> str:
|
||||
"""Get clang-tidy version from requirements_dev.txt"""
|
||||
requirements_path = Path(__file__).parent.parent / "requirements_dev.txt"
|
||||
lines = read_file_lines(requirements_path)
|
||||
|
||||
for line in lines:
|
||||
parsed = parse_requirement_line(line)
|
||||
if parsed and parsed[0] == "clang-tidy":
|
||||
# Return the original line (preserves comments)
|
||||
return parsed[1]
|
||||
|
||||
return "clang-tidy version not found"
|
||||
|
||||
|
||||
def extract_platformio_flags() -> str:
|
||||
"""Extract clang-tidy related flags from platformio.ini"""
|
||||
flags: list[str] = []
|
||||
in_clangtidy_section = False
|
||||
|
||||
platformio_path = Path(__file__).parent.parent / "platformio.ini"
|
||||
lines = read_file_lines(platformio_path)
|
||||
for line in lines:
|
||||
line = line.strip()
|
||||
if line.startswith("[flags:clangtidy]"):
|
||||
in_clangtidy_section = True
|
||||
continue
|
||||
elif line.startswith("[") and in_clangtidy_section:
|
||||
break
|
||||
elif in_clangtidy_section and line and not line.startswith("#"):
|
||||
flags.append(line)
|
||||
|
||||
return "\n".join(sorted(flags))
|
||||
|
||||
|
||||
def read_file_bytes(path: Path) -> bytes:
|
||||
"""Read bytes from a file."""
|
||||
with open(path, "rb") as f:
|
||||
return f.read()
|
||||
|
||||
|
||||
def calculate_clang_tidy_hash() -> str:
|
||||
"""Calculate hash of clang-tidy configuration and version"""
|
||||
hasher = hashlib.sha256()
|
||||
|
||||
# Hash .clang-tidy file
|
||||
clang_tidy_path = Path(__file__).parent.parent / ".clang-tidy"
|
||||
content = read_file_bytes(clang_tidy_path)
|
||||
hasher.update(content)
|
||||
|
||||
# Hash clang-tidy version from requirements_dev.txt
|
||||
version = get_clang_tidy_version_from_requirements()
|
||||
hasher.update(version.encode())
|
||||
|
||||
# Hash relevant platformio.ini sections
|
||||
pio_flags = extract_platformio_flags()
|
||||
hasher.update(pio_flags.encode())
|
||||
|
||||
return hasher.hexdigest()
|
||||
|
||||
|
||||
def read_stored_hash() -> str | None:
|
||||
"""Read the stored hash from file"""
|
||||
hash_file = Path(__file__).parent.parent / ".clang-tidy.hash"
|
||||
if hash_file.exists():
|
||||
lines = read_file_lines(hash_file)
|
||||
return lines[0].strip() if lines else None
|
||||
return None
|
||||
|
||||
|
||||
def write_file_content(path: Path, content: str) -> None:
|
||||
"""Write content to a file."""
|
||||
with open(path, "w") as f:
|
||||
f.write(content)
|
||||
|
||||
|
||||
def write_hash(hash_value: str) -> None:
|
||||
"""Write hash to file"""
|
||||
hash_file = Path(__file__).parent.parent / ".clang-tidy.hash"
|
||||
write_file_content(hash_file, hash_value)
|
||||
|
||||
|
||||
def main() -> None:
|
||||
parser = argparse.ArgumentParser(description="Manage clang-tidy configuration hash")
|
||||
parser.add_argument(
|
||||
"--check",
|
||||
action="store_true",
|
||||
help="Check if full scan needed (exit 0 if needed)",
|
||||
)
|
||||
parser.add_argument("--update", action="store_true", help="Update the hash file")
|
||||
parser.add_argument(
|
||||
"--update-if-changed",
|
||||
action="store_true",
|
||||
help="Update hash only if configuration changed (for pre-commit)",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--verify", action="store_true", help="Verify hash matches (for CI)"
|
||||
)
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
current_hash = calculate_clang_tidy_hash()
|
||||
stored_hash = read_stored_hash()
|
||||
|
||||
if args.check:
|
||||
# Exit 0 if full scan needed (hash changed or no hash file)
|
||||
sys.exit(0 if current_hash != stored_hash else 1)
|
||||
|
||||
elif args.update:
|
||||
write_hash(current_hash)
|
||||
print(f"Hash updated: {current_hash}")
|
||||
|
||||
elif args.update_if_changed:
|
||||
if current_hash != stored_hash:
|
||||
write_hash(current_hash)
|
||||
print(f"Clang-tidy hash updated: {current_hash}")
|
||||
# Exit 0 so pre-commit can stage the file
|
||||
sys.exit(0)
|
||||
else:
|
||||
print("Clang-tidy hash unchanged")
|
||||
sys.exit(0)
|
||||
|
||||
elif args.verify:
|
||||
if current_hash != stored_hash:
|
||||
print("ERROR: Clang-tidy configuration has changed but hash not updated!")
|
||||
print(f"Expected: {current_hash}")
|
||||
print(f"Found: {stored_hash}")
|
||||
print("\nPlease run: script/clang_tidy_hash.py --update")
|
||||
sys.exit(1)
|
||||
print("Hash verification passed")
|
||||
|
||||
else:
|
||||
print(f"Current hash: {current_hash}")
|
||||
print(f"Stored hash: {stored_hash}")
|
||||
print(f"Match: {current_hash == stored_hash}")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
@@ -1,245 +0,0 @@
|
||||
#!/usr/bin/env python3
|
||||
"""Determine which CI jobs should run based on changed files.
|
||||
|
||||
This script is a centralized way to determine which CI jobs need to run based on
|
||||
what files have changed. It outputs JSON with the following structure:
|
||||
|
||||
{
|
||||
"integration_tests": true/false,
|
||||
"clang_tidy": true/false,
|
||||
"clang_format": true/false,
|
||||
"python_linters": true/false,
|
||||
"changed_components": ["component1", "component2", ...],
|
||||
"component_test_count": 5
|
||||
}
|
||||
|
||||
The CI workflow uses this information to:
|
||||
- Skip or run integration tests
|
||||
- Skip or run clang-tidy (and whether to do a full scan)
|
||||
- Skip or run clang-format
|
||||
- Skip or run Python linters (ruff, flake8, pylint, pyupgrade)
|
||||
- Determine which components to test individually
|
||||
- Decide how to split component tests (if there are many)
|
||||
|
||||
Usage:
|
||||
python script/determine-jobs.py [-b BRANCH]
|
||||
|
||||
Options:
|
||||
-b, --branch BRANCH Branch to compare against (default: dev)
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import argparse
|
||||
import json
|
||||
import os
|
||||
from pathlib import Path
|
||||
import subprocess
|
||||
import sys
|
||||
from typing import Any
|
||||
|
||||
from helpers import (
|
||||
CPP_FILE_EXTENSIONS,
|
||||
ESPHOME_COMPONENTS_PATH,
|
||||
PYTHON_FILE_EXTENSIONS,
|
||||
changed_files,
|
||||
get_all_dependencies,
|
||||
get_components_from_integration_fixtures,
|
||||
parse_list_components_output,
|
||||
root_path,
|
||||
)
|
||||
|
||||
|
||||
def should_run_integration_tests(branch: str | None = None) -> bool:
|
||||
"""Determine if integration tests should run based on changed files.
|
||||
|
||||
This function is used by the CI workflow to intelligently skip integration tests when they're
|
||||
not needed, saving significant CI time and resources.
|
||||
|
||||
Integration tests will run when ANY of the following conditions are met:
|
||||
|
||||
1. Core C++ files changed (esphome/core/*)
|
||||
- Any .cpp, .h, .tcc files in the core directory
|
||||
- These files contain fundamental functionality used throughout ESPHome
|
||||
- Examples: esphome/core/component.cpp, esphome/core/application.h
|
||||
|
||||
2. Core Python files changed (esphome/core/*.py)
|
||||
- Only .py files in the esphome/core/ directory
|
||||
- These are core Python files that affect the entire system
|
||||
- Examples: esphome/core/config.py, esphome/core/__init__.py
|
||||
- NOT included: esphome/*.py, esphome/dashboard/*.py, esphome/components/*/*.py
|
||||
|
||||
3. Integration test files changed
|
||||
- Any file in tests/integration/ directory
|
||||
- This includes test files themselves and fixture YAML files
|
||||
- Examples: tests/integration/test_api.py, tests/integration/fixtures/api.yaml
|
||||
|
||||
4. Components used by integration tests (or their dependencies) changed
|
||||
- The function parses all YAML files in tests/integration/fixtures/
|
||||
- Extracts which components are used in integration tests
|
||||
- Recursively finds all dependencies of those components
|
||||
- If any of these components have changes, tests must run
|
||||
- Example: If api.yaml uses 'sensor' and 'api' components, and 'api' depends on 'socket',
|
||||
then changes to sensor/, api/, or socket/ components trigger tests
|
||||
|
||||
Args:
|
||||
branch: Branch to compare against. If None, uses default.
|
||||
|
||||
Returns:
|
||||
True if integration tests should run, False otherwise.
|
||||
"""
|
||||
files = changed_files(branch)
|
||||
|
||||
# Check if any core files changed (esphome/core/*)
|
||||
for file in files:
|
||||
if file.startswith("esphome/core/"):
|
||||
return True
|
||||
|
||||
# Check if any integration test files changed
|
||||
if any("tests/integration" in file for file in files):
|
||||
return True
|
||||
|
||||
# Get all components used in integration tests and their dependencies
|
||||
fixture_components = get_components_from_integration_fixtures()
|
||||
all_required_components = get_all_dependencies(fixture_components)
|
||||
|
||||
# Check if any required components changed
|
||||
for file in files:
|
||||
if file.startswith(ESPHOME_COMPONENTS_PATH):
|
||||
parts = file.split("/")
|
||||
if len(parts) >= 3:
|
||||
component = parts[2]
|
||||
if component in all_required_components:
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
|
||||
def should_run_clang_tidy(branch: str | None = None) -> bool:
|
||||
"""Determine if clang-tidy should run based on changed files.
|
||||
|
||||
This function is used by the CI workflow to intelligently skip clang-tidy checks when they're
|
||||
not needed, saving significant CI time and resources.
|
||||
|
||||
Clang-tidy will run when ANY of the following conditions are met:
|
||||
|
||||
1. Clang-tidy configuration changed
|
||||
- The hash of .clang-tidy configuration file has changed
|
||||
- The hash includes the .clang-tidy file, clang-tidy version from requirements_dev.txt,
|
||||
and relevant platformio.ini sections
|
||||
- When configuration changes, a full scan is needed to ensure all code complies
|
||||
with the new rules
|
||||
- Detected by script/clang_tidy_hash.py --check returning exit code 0
|
||||
|
||||
2. Any C++ source files changed
|
||||
- Any file with C++ extensions: .cpp, .h, .hpp, .cc, .cxx, .c, .tcc
|
||||
- Includes files anywhere in the repository, not just in esphome/
|
||||
- This ensures all C++ code is checked, including tests, examples, etc.
|
||||
- Examples: esphome/core/component.cpp, tests/custom/my_component.h
|
||||
|
||||
If the hash check fails for any reason, clang-tidy runs as a safety measure to ensure
|
||||
code quality is maintained.
|
||||
|
||||
Args:
|
||||
branch: Branch to compare against. If None, uses default.
|
||||
|
||||
Returns:
|
||||
True if clang-tidy should run, False otherwise.
|
||||
"""
|
||||
# First check if clang-tidy configuration changed (full scan needed)
|
||||
try:
|
||||
result = subprocess.run(
|
||||
[os.path.join(root_path, "script", "clang_tidy_hash.py"), "--check"],
|
||||
capture_output=True,
|
||||
check=False,
|
||||
)
|
||||
# Exit 0 means hash changed (full scan needed)
|
||||
if result.returncode == 0:
|
||||
return True
|
||||
except Exception:
|
||||
# If hash check fails, run clang-tidy to be safe
|
||||
return True
|
||||
|
||||
return _any_changed_file_endswith(branch, CPP_FILE_EXTENSIONS)
|
||||
|
||||
|
||||
def should_run_clang_format(branch: str | None = None) -> bool:
|
||||
"""Determine if clang-format should run based on changed files.
|
||||
|
||||
This function is used by the CI workflow to skip clang-format checks when no C++ files
|
||||
have changed, saving CI time and resources.
|
||||
|
||||
Clang-format will run when any C++ source files have changed.
|
||||
|
||||
Args:
|
||||
branch: Branch to compare against. If None, uses default.
|
||||
|
||||
Returns:
|
||||
True if clang-format should run, False otherwise.
|
||||
"""
|
||||
return _any_changed_file_endswith(branch, CPP_FILE_EXTENSIONS)
|
||||
|
||||
|
||||
def should_run_python_linters(branch: str | None = None) -> bool:
|
||||
"""Determine if Python linters (ruff, flake8, pylint, pyupgrade) should run based on changed files.
|
||||
|
||||
This function is used by the CI workflow to skip Python linting checks when no Python files
|
||||
have changed, saving CI time and resources.
|
||||
|
||||
Python linters will run when any Python source files have changed.
|
||||
|
||||
Args:
|
||||
branch: Branch to compare against. If None, uses default.
|
||||
|
||||
Returns:
|
||||
True if Python linters should run, False otherwise.
|
||||
"""
|
||||
return _any_changed_file_endswith(branch, PYTHON_FILE_EXTENSIONS)
|
||||
|
||||
|
||||
def _any_changed_file_endswith(branch: str | None, extensions: tuple[str, ...]) -> bool:
|
||||
"""Check if a changed file ends with any of the specified extensions."""
|
||||
return any(file.endswith(extensions) for file in changed_files(branch))
|
||||
|
||||
|
||||
def main() -> None:
|
||||
"""Main function that determines which CI jobs to run."""
|
||||
parser = argparse.ArgumentParser(
|
||||
description="Determine which CI jobs should run based on changed files"
|
||||
)
|
||||
parser.add_argument(
|
||||
"-b", "--branch", help="Branch to compare changed files against"
|
||||
)
|
||||
args = parser.parse_args()
|
||||
|
||||
# Determine what should run
|
||||
run_integration = should_run_integration_tests(args.branch)
|
||||
run_clang_tidy = should_run_clang_tidy(args.branch)
|
||||
run_clang_format = should_run_clang_format(args.branch)
|
||||
run_python_linters = should_run_python_linters(args.branch)
|
||||
|
||||
# Get changed components using list-components.py for exact compatibility
|
||||
script_path = Path(__file__).parent / "list-components.py"
|
||||
cmd = [sys.executable, str(script_path), "--changed"]
|
||||
if args.branch:
|
||||
cmd.extend(["-b", args.branch])
|
||||
|
||||
result = subprocess.run(cmd, capture_output=True, text=True, check=True)
|
||||
changed_components = parse_list_components_output(result.stdout)
|
||||
|
||||
# Build output
|
||||
output: dict[str, Any] = {
|
||||
"integration_tests": run_integration,
|
||||
"clang_tidy": run_clang_tidy,
|
||||
"clang_format": run_clang_format,
|
||||
"python_linters": run_python_linters,
|
||||
"changed_components": changed_components,
|
||||
"component_test_count": len(changed_components),
|
||||
}
|
||||
|
||||
# Output as JSON
|
||||
print(json.dumps(output))
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
@@ -1,14 +1,8 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from functools import cache
|
||||
import json
|
||||
import os
|
||||
import os.path
|
||||
from pathlib import Path
|
||||
import re
|
||||
import subprocess
|
||||
import time
|
||||
from typing import Any
|
||||
|
||||
import colorama
|
||||
|
||||
@@ -17,42 +11,14 @@ basepath = os.path.join(root_path, "esphome")
|
||||
temp_folder = os.path.join(root_path, ".temp")
|
||||
temp_header_file = os.path.join(temp_folder, "all-include.cpp")
|
||||
|
||||
# C++ file extensions used for clang-tidy and clang-format checks
|
||||
CPP_FILE_EXTENSIONS = (".cpp", ".h", ".hpp", ".cc", ".cxx", ".c", ".tcc")
|
||||
|
||||
# Python file extensions
|
||||
PYTHON_FILE_EXTENSIONS = (".py", ".pyi")
|
||||
|
||||
# YAML file extensions
|
||||
YAML_FILE_EXTENSIONS = (".yaml", ".yml")
|
||||
|
||||
# Component path prefix
|
||||
ESPHOME_COMPONENTS_PATH = "esphome/components/"
|
||||
|
||||
|
||||
def parse_list_components_output(output: str) -> list[str]:
|
||||
"""Parse the output from list-components.py script.
|
||||
|
||||
The script outputs one component name per line.
|
||||
|
||||
Args:
|
||||
output: The stdout from list-components.py
|
||||
|
||||
Returns:
|
||||
List of component names, or empty list if no output
|
||||
"""
|
||||
if not output or not output.strip():
|
||||
return []
|
||||
return [c.strip() for c in output.strip().split("\n") if c.strip()]
|
||||
|
||||
|
||||
def styled(color: str | tuple[str, ...], msg: str, reset: bool = True) -> str:
|
||||
def styled(color, msg, reset=True):
|
||||
prefix = "".join(color) if isinstance(color, tuple) else color
|
||||
suffix = colorama.Style.RESET_ALL if reset else ""
|
||||
return prefix + msg + suffix
|
||||
|
||||
|
||||
def print_error_for_file(file: str, body: str | None) -> None:
|
||||
def print_error_for_file(file, body):
|
||||
print(
|
||||
styled(colorama.Fore.GREEN, "### File ")
|
||||
+ styled((colorama.Fore.GREEN, colorama.Style.BRIGHT), file)
|
||||
@@ -63,22 +29,17 @@ def print_error_for_file(file: str, body: str | None) -> None:
|
||||
print()
|
||||
|
||||
|
||||
def build_all_include() -> None:
|
||||
def build_all_include():
|
||||
# Build a cpp file that includes all header files in this repo.
|
||||
# Otherwise header-only integrations would not be tested by clang-tidy
|
||||
|
||||
# Use git ls-files to find all .h files in the esphome directory
|
||||
# This is much faster than walking the filesystem
|
||||
cmd = ["git", "ls-files", "esphome/**/*.h"]
|
||||
proc = subprocess.run(cmd, capture_output=True, text=True, check=True)
|
||||
|
||||
# Process git output - git already returns paths relative to repo root
|
||||
headers = [
|
||||
f'#include "{include_p}"'
|
||||
for line in proc.stdout.strip().split("\n")
|
||||
if (include_p := line.replace(os.path.sep, "/"))
|
||||
]
|
||||
|
||||
headers = []
|
||||
for path in walk_files(basepath):
|
||||
filetypes = (".h",)
|
||||
ext = os.path.splitext(path)[1]
|
||||
if ext in filetypes:
|
||||
path = os.path.relpath(path, root_path)
|
||||
include_p = path.replace(os.path.sep, "/")
|
||||
headers.append(f'#include "{include_p}"')
|
||||
headers.sort()
|
||||
headers.append("")
|
||||
content = "\n".join(headers)
|
||||
@@ -87,87 +48,29 @@ def build_all_include() -> None:
|
||||
p.write_text(content, encoding="utf-8")
|
||||
|
||||
|
||||
def get_output(*args: str) -> str:
|
||||
def walk_files(path):
|
||||
for root, _, files in os.walk(path):
|
||||
for name in files:
|
||||
yield os.path.join(root, name)
|
||||
|
||||
|
||||
def get_output(*args):
|
||||
with subprocess.Popen(args, stdout=subprocess.PIPE, stderr=subprocess.PIPE) as proc:
|
||||
output, _ = proc.communicate()
|
||||
return output.decode("utf-8")
|
||||
|
||||
|
||||
def get_err(*args: str) -> str:
|
||||
def get_err(*args):
|
||||
with subprocess.Popen(args, stdout=subprocess.PIPE, stderr=subprocess.PIPE) as proc:
|
||||
_, err = proc.communicate()
|
||||
return err.decode("utf-8")
|
||||
|
||||
|
||||
def splitlines_no_ends(string: str) -> list[str]:
|
||||
def splitlines_no_ends(string):
|
||||
return [s.strip() for s in string.splitlines()]
|
||||
|
||||
|
||||
def _get_pr_number_from_github_env() -> str | None:
|
||||
"""Extract PR number from GitHub environment variables.
|
||||
|
||||
Returns:
|
||||
PR number as string, or None if not found
|
||||
"""
|
||||
# First try parsing GITHUB_REF (fastest)
|
||||
github_ref = os.environ.get("GITHUB_REF", "")
|
||||
if "/pull/" in github_ref:
|
||||
return github_ref.split("/pull/")[1].split("/")[0]
|
||||
|
||||
# Fallback to GitHub event file
|
||||
github_event_path = os.environ.get("GITHUB_EVENT_PATH")
|
||||
if github_event_path and os.path.exists(github_event_path):
|
||||
with open(github_event_path) as f:
|
||||
event_data = json.load(f)
|
||||
pr_data = event_data.get("pull_request", {})
|
||||
if pr_number := pr_data.get("number"):
|
||||
return str(pr_number)
|
||||
|
||||
return None
|
||||
|
||||
|
||||
@cache
|
||||
def _get_changed_files_github_actions() -> list[str] | None:
|
||||
"""Get changed files in GitHub Actions environment.
|
||||
|
||||
Returns:
|
||||
List of changed files, or None if should fall back to git method
|
||||
"""
|
||||
event_name = os.environ.get("GITHUB_EVENT_NAME")
|
||||
|
||||
# For pull requests
|
||||
if event_name == "pull_request":
|
||||
pr_number = _get_pr_number_from_github_env()
|
||||
if pr_number:
|
||||
# Use GitHub CLI to get changed files directly
|
||||
cmd = ["gh", "pr", "diff", pr_number, "--name-only"]
|
||||
return _get_changed_files_from_command(cmd)
|
||||
|
||||
# For pushes (including squash-and-merge)
|
||||
elif event_name == "push":
|
||||
# For push events, we want to check what changed in this commit
|
||||
try:
|
||||
# Get the changed files in the last commit
|
||||
return _get_changed_files_from_command(
|
||||
["git", "diff", "HEAD~1..HEAD", "--name-only"]
|
||||
)
|
||||
except: # noqa: E722
|
||||
# Fall back to the original method if this fails
|
||||
pass
|
||||
|
||||
return None
|
||||
|
||||
|
||||
def changed_files(branch: str | None = None) -> list[str]:
|
||||
# In GitHub Actions, we can use the API to get changed files more efficiently
|
||||
if os.environ.get("GITHUB_ACTIONS") == "true":
|
||||
github_files = _get_changed_files_github_actions()
|
||||
if github_files is not None:
|
||||
return github_files
|
||||
|
||||
# Original implementation for local development
|
||||
if not branch: # Treat None and empty string the same
|
||||
branch = "dev"
|
||||
def changed_files(branch="dev"):
|
||||
check_remotes = ["upstream", "origin"]
|
||||
check_remotes.extend(splitlines_no_ends(get_output("git", "remote")))
|
||||
for remote in check_remotes:
|
||||
@@ -180,165 +83,25 @@ def changed_files(branch: str | None = None) -> list[str]:
|
||||
pass
|
||||
else:
|
||||
raise ValueError("Git not configured")
|
||||
return _get_changed_files_from_command(["git", "diff", merge_base, "--name-only"])
|
||||
command = ["git", "diff", merge_base, "--name-only"]
|
||||
changed = splitlines_no_ends(get_output(*command))
|
||||
changed = [os.path.relpath(f, os.getcwd()) for f in changed]
|
||||
changed.sort()
|
||||
return changed
|
||||
|
||||
|
||||
def _get_changed_files_from_command(command: list[str]) -> list[str]:
|
||||
"""Run a git command to get changed files and return them as a list."""
|
||||
proc = subprocess.run(command, capture_output=True, text=True, check=False)
|
||||
if proc.returncode != 0:
|
||||
raise Exception(f"Command failed: {' '.join(command)}\nstderr: {proc.stderr}")
|
||||
|
||||
changed_files = splitlines_no_ends(proc.stdout)
|
||||
changed_files = [os.path.relpath(f, os.getcwd()) for f in changed_files if f]
|
||||
changed_files.sort()
|
||||
return changed_files
|
||||
|
||||
|
||||
def get_changed_components() -> list[str] | None:
|
||||
"""Get list of changed components using list-components.py script.
|
||||
|
||||
This function:
|
||||
1. First checks if any core C++/header files (esphome/core/*.{cpp,h,hpp,cc,cxx,c}) changed - if so, returns None
|
||||
2. Otherwise delegates to ./script/list-components.py --changed which:
|
||||
- Analyzes all changed files
|
||||
- Determines which components are affected (including dependencies)
|
||||
- Returns a list of component names that need to be checked
|
||||
|
||||
Returns:
|
||||
- None: Core C++/header files changed, need full scan
|
||||
- Empty list: No components changed (only non-component files changed)
|
||||
- List of strings: Names of components that need checking (e.g., ["wifi", "mqtt"])
|
||||
"""
|
||||
# Check if any core C++ or header files changed first
|
||||
def filter_changed(files):
|
||||
changed = changed_files()
|
||||
core_cpp_changed = any(
|
||||
f.startswith("esphome/core/")
|
||||
and f.endswith(CPP_FILE_EXTENSIONS[:-1]) # Exclude .tcc for core files
|
||||
for f in changed
|
||||
)
|
||||
if core_cpp_changed:
|
||||
print("Core C++/header files changed - will run full clang-tidy scan")
|
||||
return None
|
||||
|
||||
# Use list-components.py to get changed components
|
||||
script_path = os.path.join(root_path, "script", "list-components.py")
|
||||
cmd = [script_path, "--changed"]
|
||||
|
||||
try:
|
||||
result = subprocess.run(
|
||||
cmd, capture_output=True, text=True, check=True, close_fds=False
|
||||
)
|
||||
return parse_list_components_output(result.stdout)
|
||||
except subprocess.CalledProcessError:
|
||||
# If the script fails, fall back to full scan
|
||||
print("Could not determine changed components - will run full clang-tidy scan")
|
||||
return None
|
||||
|
||||
|
||||
def _filter_changed_ci(files: list[str]) -> list[str]:
|
||||
"""Filter files based on changed components in CI environment.
|
||||
|
||||
This function implements intelligent filtering to reduce CI runtime by only
|
||||
checking files that could be affected by the changes. It handles three scenarios:
|
||||
|
||||
1. Core C++/header files changed (returns None from get_changed_components):
|
||||
- Triggered when any C++/header file in esphome/core/ is modified
|
||||
- Action: Check ALL files (full scan)
|
||||
- Reason: Core C++/header files are used throughout the codebase
|
||||
|
||||
2. No components changed (returns empty list from get_changed_components):
|
||||
- Triggered when only non-component files changed (e.g., scripts, configs)
|
||||
- Action: Check only the specific non-component files that changed
|
||||
- Example: If only script/clang-tidy changed, only check that file
|
||||
|
||||
3. Specific components changed (returns list of component names):
|
||||
- Component detection done by: ./script/list-components.py --changed
|
||||
- That script analyzes which components are affected by the changed files
|
||||
INCLUDING their dependencies
|
||||
- Action: Check ALL files in each component that list-components.py identifies
|
||||
- Example: If wifi.cpp changed, list-components.py might return ["wifi", "network"]
|
||||
if network depends on wifi. We then check ALL files in both
|
||||
esphome/components/wifi/ and esphome/components/network/
|
||||
- Reason: Component files often have interdependencies (headers, base classes)
|
||||
|
||||
Args:
|
||||
files: List of all files that clang-tidy would normally check
|
||||
|
||||
Returns:
|
||||
Filtered list of files to check
|
||||
"""
|
||||
components = get_changed_components()
|
||||
if components is None:
|
||||
# Scenario 1: Core files changed or couldn't determine components
|
||||
# Action: Return all files for full scan
|
||||
return files
|
||||
|
||||
if not components:
|
||||
# Scenario 2: No components changed - only non-component files changed
|
||||
# Action: Check only the specific non-component files that changed
|
||||
changed = changed_files()
|
||||
files = [
|
||||
f
|
||||
for f in files
|
||||
if f in changed and not f.startswith(ESPHOME_COMPONENTS_PATH)
|
||||
]
|
||||
if not files:
|
||||
print("No files changed")
|
||||
return files
|
||||
|
||||
# Scenario 3: Specific components changed
|
||||
# Action: Check ALL files in each changed component
|
||||
# Convert component list to set for O(1) lookups
|
||||
component_set = set(components)
|
||||
print(f"Changed components: {', '.join(sorted(components))}")
|
||||
|
||||
# The 'files' parameter contains ALL files in the codebase that clang-tidy would check.
|
||||
# We filter this down to only files in the changed components.
|
||||
# We check ALL files in each changed component (not just the changed files)
|
||||
# because changes in one file can affect other files in the same component.
|
||||
filtered_files = []
|
||||
for f in files:
|
||||
if f.startswith(ESPHOME_COMPONENTS_PATH):
|
||||
# Check if file belongs to any of the changed components
|
||||
parts = f.split("/")
|
||||
if len(parts) >= 3 and parts[2] in component_set:
|
||||
filtered_files.append(f)
|
||||
|
||||
return filtered_files
|
||||
|
||||
|
||||
def _filter_changed_local(files: list[str]) -> list[str]:
|
||||
"""Filter files based on git changes for local development.
|
||||
|
||||
Args:
|
||||
files: List of all files to filter
|
||||
|
||||
Returns:
|
||||
Filtered list of files to check
|
||||
"""
|
||||
# For local development, just check changed files directly
|
||||
changed = changed_files()
|
||||
return [f for f in files if f in changed]
|
||||
|
||||
|
||||
def filter_changed(files: list[str]) -> list[str]:
|
||||
"""Filter files to only those that changed or are in changed components.
|
||||
|
||||
Args:
|
||||
files: List of files to filter
|
||||
"""
|
||||
# When running from CI, use component-based filtering
|
||||
if os.environ.get("GITHUB_ACTIONS") == "true":
|
||||
files = _filter_changed_ci(files)
|
||||
else:
|
||||
files = _filter_changed_local(files)
|
||||
|
||||
print_file_list(files, "Files to check after filtering:")
|
||||
files = [f for f in files if f in changed]
|
||||
print("Changed files:")
|
||||
if not files:
|
||||
print(" No changed files!")
|
||||
for c in files:
|
||||
print(f" {c}")
|
||||
return files
|
||||
|
||||
|
||||
def filter_grep(files: list[str], value: str) -> list[str]:
|
||||
def filter_grep(files, value):
|
||||
matched = []
|
||||
for file in files:
|
||||
with open(file, encoding="utf-8") as handle:
|
||||
@@ -348,7 +111,7 @@ def filter_grep(files: list[str], value: str) -> list[str]:
|
||||
return matched
|
||||
|
||||
|
||||
def git_ls_files(patterns: list[str] | None = None) -> dict[str, int]:
|
||||
def git_ls_files(patterns=None):
|
||||
command = ["git", "ls-files", "-s"]
|
||||
if patterns is not None:
|
||||
command.extend(patterns)
|
||||
@@ -358,10 +121,7 @@ def git_ls_files(patterns: list[str] | None = None) -> dict[str, int]:
|
||||
return {s[3].strip(): int(s[0]) for s in lines}
|
||||
|
||||
|
||||
def load_idedata(environment: str) -> dict[str, Any]:
|
||||
start_time = time.time()
|
||||
print(f"Loading IDE data for environment '{environment}'...")
|
||||
|
||||
def load_idedata(environment):
|
||||
platformio_ini = Path(root_path) / "platformio.ini"
|
||||
temp_idedata = Path(temp_folder) / f"idedata-{environment}.json"
|
||||
changed = False
|
||||
@@ -382,10 +142,7 @@ def load_idedata(environment: str) -> dict[str, Any]:
|
||||
changed = True
|
||||
|
||||
if not changed:
|
||||
data = json.loads(temp_idedata.read_text())
|
||||
elapsed = time.time() - start_time
|
||||
print(f"IDE data loaded from cache in {elapsed:.2f} seconds")
|
||||
return data
|
||||
return json.loads(temp_idedata.read_text())
|
||||
|
||||
# ensure temp directory exists before running pio, as it writes sdkconfig to it
|
||||
Path(temp_folder).mkdir(exist_ok=True)
|
||||
@@ -401,9 +158,6 @@ def load_idedata(environment: str) -> dict[str, Any]:
|
||||
match = re.search(r'{\s*".*}', stdout.decode("utf-8"))
|
||||
data = json.loads(match.group())
|
||||
temp_idedata.write_text(json.dumps(data, indent=2) + "\n")
|
||||
|
||||
elapsed = time.time() - start_time
|
||||
print(f"IDE data generated and cached in {elapsed:.2f} seconds")
|
||||
return data
|
||||
|
||||
|
||||
@@ -442,29 +196,6 @@ def get_binary(name: str, version: str) -> str:
|
||||
raise
|
||||
|
||||
|
||||
def print_file_list(
|
||||
files: list[str], title: str = "Files:", max_files: int = 20
|
||||
) -> None:
|
||||
"""Print a list of files with optional truncation for large lists.
|
||||
|
||||
Args:
|
||||
files: List of file paths to print
|
||||
title: Title to print before the list
|
||||
max_files: Maximum number of files to show before truncating (default: 20)
|
||||
"""
|
||||
print(title)
|
||||
if not files:
|
||||
print(" No files to check!")
|
||||
elif len(files) <= max_files:
|
||||
for f in sorted(files):
|
||||
print(f" {f}")
|
||||
else:
|
||||
sorted_files = sorted(files)
|
||||
for f in sorted_files[:10]:
|
||||
print(f" {f}")
|
||||
print(f" ... and {len(files) - 10} more files")
|
||||
|
||||
|
||||
def get_usable_cpu_count() -> int:
|
||||
"""Return the number of CPUs that can be used for processes.
|
||||
|
||||
@@ -474,83 +205,3 @@ def get_usable_cpu_count() -> int:
|
||||
return (
|
||||
os.process_cpu_count() if hasattr(os, "process_cpu_count") else os.cpu_count()
|
||||
)
|
||||
|
||||
|
||||
def get_all_dependencies(component_names: set[str]) -> set[str]:
|
||||
"""Get all dependencies for a set of components.
|
||||
|
||||
Args:
|
||||
component_names: Set of component names to get dependencies for
|
||||
|
||||
Returns:
|
||||
Set of all components including dependencies and auto-loaded components
|
||||
"""
|
||||
from esphome.const import KEY_CORE
|
||||
from esphome.core import CORE
|
||||
from esphome.loader import get_component
|
||||
|
||||
all_components: set[str] = set(component_names)
|
||||
|
||||
# Reset CORE to ensure clean state
|
||||
CORE.reset()
|
||||
|
||||
# Set up fake config path for component loading
|
||||
root = Path(__file__).parent.parent
|
||||
CORE.config_path = str(root)
|
||||
CORE.data[KEY_CORE] = {}
|
||||
|
||||
# Keep finding dependencies until no new ones are found
|
||||
while True:
|
||||
new_components: set[str] = set()
|
||||
|
||||
for comp_name in all_components:
|
||||
comp = get_component(comp_name)
|
||||
if not comp:
|
||||
continue
|
||||
|
||||
# Add dependencies (extract component name before '.')
|
||||
new_components.update(dep.split(".")[0] for dep in comp.dependencies)
|
||||
|
||||
# Add auto_load components
|
||||
new_components.update(comp.auto_load)
|
||||
|
||||
# Check if we found any new components
|
||||
new_components -= all_components
|
||||
if not new_components:
|
||||
break
|
||||
|
||||
all_components.update(new_components)
|
||||
|
||||
return all_components
|
||||
|
||||
|
||||
def get_components_from_integration_fixtures() -> set[str]:
|
||||
"""Extract all components used in integration test fixtures.
|
||||
|
||||
Returns:
|
||||
Set of component names used in integration test fixtures
|
||||
"""
|
||||
import yaml
|
||||
|
||||
components: set[str] = set()
|
||||
fixtures_dir = Path(__file__).parent.parent / "tests" / "integration" / "fixtures"
|
||||
|
||||
for yaml_file in fixtures_dir.glob("*.yaml"):
|
||||
with open(yaml_file) as f:
|
||||
config: dict[str, any] | None = yaml.safe_load(f)
|
||||
if not config:
|
||||
continue
|
||||
|
||||
# Add all top-level component keys
|
||||
components.update(config.keys())
|
||||
|
||||
# Add platform components (e.g., output.template)
|
||||
for value in config.values():
|
||||
if not isinstance(value, list):
|
||||
continue
|
||||
|
||||
for item in value:
|
||||
if isinstance(item, dict) and "platform" in item:
|
||||
components.add(item["platform"])
|
||||
|
||||
return components
|
||||
|
@@ -20,12 +20,6 @@ def filter_component_files(str):
|
||||
return str.startswith("esphome/components/") | str.startswith("tests/components/")
|
||||
|
||||
|
||||
def get_all_component_files() -> list[str]:
|
||||
"""Get all component files from git."""
|
||||
files = git_ls_files()
|
||||
return list(filter(filter_component_files, files))
|
||||
|
||||
|
||||
def extract_component_names_array_from_files_array(files):
|
||||
components = []
|
||||
for file in files:
|
||||
@@ -171,20 +165,17 @@ def main():
|
||||
if args.branch and not args.changed:
|
||||
parser.error("--branch requires --changed")
|
||||
|
||||
if args.changed:
|
||||
# When --changed is passed, only get the changed files
|
||||
changed = changed_files(args.branch)
|
||||
files = git_ls_files()
|
||||
files = filter(filter_component_files, files)
|
||||
|
||||
# If any base test file(s) changed, there's no need to filter out components
|
||||
if any("tests/test_build_components" in file for file in changed):
|
||||
# Need to get all component files
|
||||
files = get_all_component_files()
|
||||
if args.changed:
|
||||
if args.branch:
|
||||
changed = changed_files(args.branch)
|
||||
else:
|
||||
# Only look at changed component files
|
||||
files = [f for f in changed if filter_component_files(f)]
|
||||
else:
|
||||
# Get all component files
|
||||
files = get_all_component_files()
|
||||
changed = changed_files()
|
||||
# If any base test file(s) changed, there's no need to filter out components
|
||||
if not any("tests/test_build_components" in file for file in changed):
|
||||
files = [f for f in files if f in changed]
|
||||
|
||||
for c in get_components(files, args.changed):
|
||||
print(c)
|
||||
|
@@ -8,31 +8,31 @@ from esphome.types import ConfigType
|
||||
|
||||
def test_web_server_ota_true_fails_validation() -> None:
|
||||
"""Test that web_server with ota: true fails validation with helpful message."""
|
||||
from esphome.components.web_server import validate_ota_removed
|
||||
from esphome.components.web_server import validate_ota
|
||||
|
||||
# Config with ota: true should fail
|
||||
config: ConfigType = {"ota": True}
|
||||
|
||||
with pytest.raises(cv.Invalid) as exc_info:
|
||||
validate_ota_removed(config)
|
||||
validate_ota(config)
|
||||
|
||||
# Check error message contains migration instructions
|
||||
error_msg = str(exc_info.value)
|
||||
assert "has been removed from 'web_server'" in error_msg
|
||||
assert "only accepts 'false' to disable OTA" in error_msg
|
||||
assert "platform: web_server" in error_msg
|
||||
assert "ota:" in error_msg
|
||||
|
||||
|
||||
def test_web_server_ota_false_passes_validation() -> None:
|
||||
"""Test that web_server with ota: false passes validation."""
|
||||
from esphome.components.web_server import validate_ota_removed
|
||||
from esphome.components.web_server import validate_ota
|
||||
|
||||
# Config with ota: false should pass
|
||||
config: ConfigType = {"ota": False}
|
||||
result = validate_ota_removed(config)
|
||||
result = validate_ota(config)
|
||||
assert result == config
|
||||
|
||||
# Config without ota should also pass
|
||||
config: ConfigType = {}
|
||||
result = validate_ota_removed(config)
|
||||
result = validate_ota(config)
|
||||
assert result == config
|
||||
|
1
tests/components/captive_portal/test.bk72xx-ard.yaml
Normal file
1
tests/components/captive_portal/test.bk72xx-ard.yaml
Normal file
@@ -0,0 +1 @@
|
||||
<<: !include common.yaml
|
@@ -928,6 +928,12 @@ lvgl:
|
||||
angle_range: 360
|
||||
rotation: !lambda return 2700;
|
||||
indicators:
|
||||
- tick_style:
|
||||
start_value: 0
|
||||
end_value: 60
|
||||
color_start: 0x0000bd
|
||||
color_end: 0xbd0000
|
||||
width: !lambda return 1;
|
||||
- line:
|
||||
opa: 50%
|
||||
id: minute_hand
|
||||
|
@@ -5,14 +5,12 @@ from __future__ import annotations
|
||||
import asyncio
|
||||
from collections.abc import AsyncGenerator, Callable, Generator
|
||||
from contextlib import AbstractAsyncContextManager, asynccontextmanager
|
||||
import fcntl
|
||||
import logging
|
||||
import os
|
||||
from pathlib import Path
|
||||
import platform
|
||||
import signal
|
||||
import socket
|
||||
import subprocess
|
||||
import sys
|
||||
import tempfile
|
||||
from typing import TextIO
|
||||
@@ -52,66 +50,6 @@ if platform.system() == "Windows":
|
||||
import pty # not available on Windows
|
||||
|
||||
|
||||
def _get_platformio_env(cache_dir: Path) -> dict[str, str]:
|
||||
"""Get environment variables for PlatformIO with shared cache."""
|
||||
env = os.environ.copy()
|
||||
env["PLATFORMIO_CORE_DIR"] = str(cache_dir)
|
||||
env["PLATFORMIO_CACHE_DIR"] = str(cache_dir / ".cache")
|
||||
env["PLATFORMIO_LIBDEPS_DIR"] = str(cache_dir / "libdeps")
|
||||
return env
|
||||
|
||||
|
||||
@pytest.fixture(scope="session")
|
||||
def shared_platformio_cache() -> Generator[Path]:
|
||||
"""Initialize a shared PlatformIO cache for all integration tests."""
|
||||
# Use a dedicated directory for integration tests to avoid conflicts
|
||||
test_cache_dir = Path.home() / ".esphome-integration-tests"
|
||||
cache_dir = test_cache_dir / "platformio"
|
||||
|
||||
# Use a lock file in the home directory to ensure only one process initializes the cache
|
||||
# This is needed when running with pytest-xdist
|
||||
# The lock file must be in a directory that already exists to avoid race conditions
|
||||
lock_file = Path.home() / ".esphome-integration-tests-init.lock"
|
||||
|
||||
# Always acquire the lock to ensure cache is ready before proceeding
|
||||
with open(lock_file, "w") as lock_fd:
|
||||
fcntl.flock(lock_fd.fileno(), fcntl.LOCK_EX)
|
||||
|
||||
# Check if cache needs initialization while holding the lock
|
||||
if not cache_dir.exists() or not any(cache_dir.iterdir()):
|
||||
# Create the test cache directory if it doesn't exist
|
||||
test_cache_dir.mkdir(exist_ok=True)
|
||||
|
||||
with tempfile.TemporaryDirectory() as tmpdir:
|
||||
# Create a basic host config
|
||||
init_dir = Path(tmpdir)
|
||||
config_path = init_dir / "cache_init.yaml"
|
||||
config_path.write_text("""esphome:
|
||||
name: cache-init
|
||||
host:
|
||||
api:
|
||||
encryption:
|
||||
key: "IIevImVI42I0FGos5nLqFK91jrJehrgidI0ArwMLr8w="
|
||||
logger:
|
||||
""")
|
||||
|
||||
# Run compilation to populate the cache
|
||||
# We must succeed here to avoid race conditions where multiple
|
||||
# tests try to populate the same cache directory simultaneously
|
||||
env = _get_platformio_env(cache_dir)
|
||||
|
||||
subprocess.run(
|
||||
["esphome", "compile", str(config_path)],
|
||||
check=True,
|
||||
cwd=init_dir,
|
||||
env=env,
|
||||
)
|
||||
|
||||
# Lock is held until here, ensuring cache is fully populated before any test proceeds
|
||||
|
||||
yield cache_dir
|
||||
|
||||
|
||||
@pytest.fixture(scope="module", autouse=True)
|
||||
def enable_aioesphomeapi_debug_logging():
|
||||
"""Enable debug logging for aioesphomeapi to help diagnose connection issues."""
|
||||
@@ -223,14 +161,22 @@ async def write_yaml_config(
|
||||
@pytest_asyncio.fixture
|
||||
async def compile_esphome(
|
||||
integration_test_dir: Path,
|
||||
shared_platformio_cache: Path,
|
||||
) -> AsyncGenerator[CompileFunction]:
|
||||
"""Compile an ESPHome configuration and return the binary path."""
|
||||
|
||||
async def _compile(config_path: Path) -> Path:
|
||||
# Use the shared PlatformIO cache for faster compilation
|
||||
# This avoids re-downloading dependencies for each test
|
||||
env = _get_platformio_env(shared_platformio_cache)
|
||||
# Create a unique PlatformIO directory for this test to avoid race conditions
|
||||
platformio_dir = integration_test_dir / ".platformio"
|
||||
platformio_dir.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
# Create cache directory as well
|
||||
platformio_cache_dir = platformio_dir / ".cache"
|
||||
platformio_cache_dir.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
# Set up environment with isolated PlatformIO directories
|
||||
env = os.environ.copy()
|
||||
env["PLATFORMIO_CORE_DIR"] = str(platformio_dir)
|
||||
env["PLATFORMIO_CACHE_DIR"] = str(platformio_cache_dir)
|
||||
|
||||
# Retry compilation up to 3 times if we get a segfault
|
||||
max_retries = 3
|
||||
|
64
tests/integration/fixtures/api_string_lambda.yaml
Normal file
64
tests/integration/fixtures/api_string_lambda.yaml
Normal file
@@ -0,0 +1,64 @@
|
||||
esphome:
|
||||
name: api-string-lambda-test
|
||||
host:
|
||||
|
||||
api:
|
||||
actions:
|
||||
# Service that tests string lambda functionality
|
||||
- action: test_string_lambda
|
||||
variables:
|
||||
input_string: string
|
||||
then:
|
||||
# Log the input to verify service was called
|
||||
- logger.log:
|
||||
format: "Service called with string: %s"
|
||||
args: [input_string.c_str()]
|
||||
|
||||
# This is the key test - using a lambda that returns x.c_str()
|
||||
# where x is already a string. This would fail to compile in 2025.7.0b5
|
||||
# with "no matching function for call to 'to_string(std::string)'"
|
||||
# This is the exact case from issue #9539
|
||||
- homeassistant.tag_scanned: !lambda 'return input_string.c_str();'
|
||||
|
||||
# Also test with homeassistant.event to verify our fix works with data fields
|
||||
- homeassistant.event:
|
||||
event: esphome.test_string_lambda
|
||||
data:
|
||||
value: !lambda 'return input_string.c_str();'
|
||||
|
||||
# Service that tests int lambda functionality
|
||||
- action: test_int_lambda
|
||||
variables:
|
||||
input_number: int
|
||||
then:
|
||||
# Log the input to verify service was called
|
||||
- logger.log:
|
||||
format: "Service called with int: %d"
|
||||
args: [input_number]
|
||||
|
||||
# Test that int lambdas still work correctly with to_string
|
||||
# The TemplatableStringValue should automatically convert int to string
|
||||
- homeassistant.event:
|
||||
event: esphome.test_int_lambda
|
||||
data:
|
||||
value: !lambda 'return input_number;'
|
||||
|
||||
# Service that tests float lambda functionality
|
||||
- action: test_float_lambda
|
||||
variables:
|
||||
input_float: float
|
||||
then:
|
||||
# Log the input to verify service was called
|
||||
- logger.log:
|
||||
format: "Service called with float: %.2f"
|
||||
args: [input_float]
|
||||
|
||||
# Test that float lambdas still work correctly with to_string
|
||||
# The TemplatableStringValue should automatically convert float to string
|
||||
- homeassistant.event:
|
||||
event: esphome.test_float_lambda
|
||||
data:
|
||||
value: !lambda 'return input_float;'
|
||||
|
||||
logger:
|
||||
level: DEBUG
|
@@ -1,7 +1,7 @@
|
||||
from esphome import automation
|
||||
import esphome.codegen as cg
|
||||
import esphome.config_validation as cv
|
||||
from esphome.const import CONF_COMPONENTS, CONF_ID, CONF_NAME
|
||||
from esphome.const import CONF_COMPONENTS, CONF_ID, CONF_NAME, CONF_UPDATE_INTERVAL
|
||||
|
||||
CODEOWNERS = ["@esphome/tests"]
|
||||
|
||||
@@ -10,10 +10,15 @@ LoopTestComponent = loop_test_component_ns.class_("LoopTestComponent", cg.Compon
|
||||
LoopTestISRComponent = loop_test_component_ns.class_(
|
||||
"LoopTestISRComponent", cg.Component
|
||||
)
|
||||
LoopTestUpdateComponent = loop_test_component_ns.class_(
|
||||
"LoopTestUpdateComponent", cg.PollingComponent
|
||||
)
|
||||
|
||||
CONF_DISABLE_AFTER = "disable_after"
|
||||
CONF_TEST_REDUNDANT_OPERATIONS = "test_redundant_operations"
|
||||
CONF_ISR_COMPONENTS = "isr_components"
|
||||
CONF_UPDATE_COMPONENTS = "update_components"
|
||||
CONF_DISABLE_LOOP_AFTER = "disable_loop_after"
|
||||
|
||||
COMPONENT_CONFIG_SCHEMA = cv.Schema(
|
||||
{
|
||||
@@ -31,11 +36,23 @@ ISR_COMPONENT_CONFIG_SCHEMA = cv.Schema(
|
||||
}
|
||||
)
|
||||
|
||||
UPDATE_COMPONENT_CONFIG_SCHEMA = cv.Schema(
|
||||
{
|
||||
cv.GenerateID(): cv.declare_id(LoopTestUpdateComponent),
|
||||
cv.Required(CONF_NAME): cv.string,
|
||||
cv.Optional(CONF_DISABLE_LOOP_AFTER, default=0): cv.int_,
|
||||
cv.Optional(CONF_UPDATE_INTERVAL, default="1s"): cv.update_interval,
|
||||
}
|
||||
)
|
||||
|
||||
CONFIG_SCHEMA = cv.Schema(
|
||||
{
|
||||
cv.GenerateID(): cv.declare_id(LoopTestComponent),
|
||||
cv.Required(CONF_COMPONENTS): cv.ensure_list(COMPONENT_CONFIG_SCHEMA),
|
||||
cv.Optional(CONF_ISR_COMPONENTS): cv.ensure_list(ISR_COMPONENT_CONFIG_SCHEMA),
|
||||
cv.Optional(CONF_UPDATE_COMPONENTS): cv.ensure_list(
|
||||
UPDATE_COMPONENT_CONFIG_SCHEMA
|
||||
),
|
||||
}
|
||||
).extend(cv.COMPONENT_SCHEMA)
|
||||
|
||||
@@ -94,3 +111,12 @@ async def to_code(config):
|
||||
var = cg.new_Pvariable(isr_config[CONF_ID])
|
||||
await cg.register_component(var, isr_config)
|
||||
cg.add(var.set_name(isr_config[CONF_NAME]))
|
||||
|
||||
# Create update test components
|
||||
for update_config in config.get(CONF_UPDATE_COMPONENTS, []):
|
||||
var = cg.new_Pvariable(update_config[CONF_ID])
|
||||
await cg.register_component(var, update_config)
|
||||
|
||||
cg.add(var.set_name(update_config[CONF_NAME]))
|
||||
cg.add(var.set_disable_loop_after(update_config[CONF_DISABLE_LOOP_AFTER]))
|
||||
cg.add(var.set_update_interval(update_config[CONF_UPDATE_INTERVAL]))
|
||||
|
@@ -39,5 +39,29 @@ void LoopTestComponent::service_disable() {
|
||||
this->disable_loop();
|
||||
}
|
||||
|
||||
// LoopTestUpdateComponent implementation
|
||||
void LoopTestUpdateComponent::setup() {
|
||||
ESP_LOGI(TAG, "[%s] LoopTestUpdateComponent setup called", this->name_.c_str());
|
||||
}
|
||||
|
||||
void LoopTestUpdateComponent::loop() {
|
||||
this->loop_count_++;
|
||||
ESP_LOGI(TAG, "[%s] LoopTestUpdateComponent loop count: %d", this->name_.c_str(), this->loop_count_);
|
||||
|
||||
// Disable loop after specified count to test component.update when loop is disabled
|
||||
if (this->disable_loop_after_ > 0 && this->loop_count_ == this->disable_loop_after_) {
|
||||
ESP_LOGI(TAG, "[%s] Disabling loop after %d iterations", this->name_.c_str(), this->disable_loop_after_);
|
||||
this->disable_loop();
|
||||
}
|
||||
}
|
||||
|
||||
void LoopTestUpdateComponent::update() {
|
||||
this->update_count_++;
|
||||
// Check if loop is disabled by testing component state
|
||||
bool loop_disabled = this->component_state_ == COMPONENT_STATE_LOOP_DONE;
|
||||
ESP_LOGI(TAG, "[%s] LoopTestUpdateComponent update() called, count: %d, loop_disabled: %s", this->name_.c_str(),
|
||||
this->update_count_, loop_disabled ? "YES" : "NO");
|
||||
}
|
||||
|
||||
} // namespace loop_test_component
|
||||
} // namespace esphome
|
||||
|
@@ -4,6 +4,7 @@
|
||||
#include "esphome/core/log.h"
|
||||
#include "esphome/core/application.h"
|
||||
#include "esphome/core/automation.h"
|
||||
#include "esphome/core/helpers.h"
|
||||
|
||||
namespace esphome {
|
||||
namespace loop_test_component {
|
||||
@@ -54,5 +55,29 @@ template<typename... Ts> class DisableAction : public Action<Ts...> {
|
||||
LoopTestComponent *parent_;
|
||||
};
|
||||
|
||||
// Component with update() method to test component.update action
|
||||
class LoopTestUpdateComponent : public PollingComponent {
|
||||
public:
|
||||
LoopTestUpdateComponent() : PollingComponent(1000) {} // Default 1s update interval
|
||||
|
||||
void set_name(const std::string &name) { this->name_ = name; }
|
||||
void set_disable_loop_after(int count) { this->disable_loop_after_ = count; }
|
||||
|
||||
void setup() override;
|
||||
void loop() override;
|
||||
void update() override;
|
||||
|
||||
int get_update_count() const { return this->update_count_; }
|
||||
int get_loop_count() const { return this->loop_count_; }
|
||||
|
||||
float get_setup_priority() const override { return setup_priority::DATA; }
|
||||
|
||||
protected:
|
||||
std::string name_;
|
||||
int loop_count_{0};
|
||||
int update_count_{0};
|
||||
int disable_loop_after_{0};
|
||||
};
|
||||
|
||||
} // namespace loop_test_component
|
||||
} // namespace esphome
|
||||
|
@@ -40,6 +40,13 @@ loop_test_component:
|
||||
- id: isr_test
|
||||
name: "isr_test"
|
||||
|
||||
# Update test component to test component.update when loop is disabled
|
||||
update_components:
|
||||
- id: update_test_component
|
||||
name: "update_test"
|
||||
disable_loop_after: 3 # Disable loop after 3 iterations
|
||||
update_interval: 0.1s # Fast update interval for testing
|
||||
|
||||
# Interval to re-enable the self_disable_10 component after some time
|
||||
interval:
|
||||
- interval: 0.5s
|
||||
@@ -51,3 +58,28 @@ interval:
|
||||
- logger.log: "Re-enabling self_disable_10 via service"
|
||||
- loop_test_component.enable:
|
||||
id: self_disable_10
|
||||
|
||||
# Test component.update on a component with disabled loop
|
||||
- interval: 0.1s
|
||||
then:
|
||||
- lambda: |-
|
||||
static bool manual_update_done = false;
|
||||
if (!manual_update_done &&
|
||||
id(update_test_component).get_loop_count() == 3 &&
|
||||
id(update_test_component).get_update_count() >= 3) {
|
||||
ESP_LOGI("main", "Manually calling component.update on update_test_component with disabled loop");
|
||||
manual_update_done = true;
|
||||
}
|
||||
- if:
|
||||
condition:
|
||||
lambda: |-
|
||||
static bool manual_update_triggered = false;
|
||||
if (!manual_update_triggered &&
|
||||
id(update_test_component).get_loop_count() == 3 &&
|
||||
id(update_test_component).get_update_count() >= 3) {
|
||||
manual_update_triggered = true;
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
then:
|
||||
- component.update: update_test_component
|
||||
|
85
tests/integration/test_api_string_lambda.py
Normal file
85
tests/integration/test_api_string_lambda.py
Normal file
@@ -0,0 +1,85 @@
|
||||
"""Integration test for TemplatableStringValue with string lambdas."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
import re
|
||||
|
||||
import pytest
|
||||
|
||||
from .types import APIClientConnectedFactory, RunCompiledFunction
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_api_string_lambda(
|
||||
yaml_config: str,
|
||||
run_compiled: RunCompiledFunction,
|
||||
api_client_connected: APIClientConnectedFactory,
|
||||
) -> None:
|
||||
"""Test TemplatableStringValue works with lambdas that return different types."""
|
||||
loop = asyncio.get_running_loop()
|
||||
|
||||
# Track log messages for all three service calls
|
||||
string_called_future = loop.create_future()
|
||||
int_called_future = loop.create_future()
|
||||
float_called_future = loop.create_future()
|
||||
|
||||
# Patterns to match in logs - confirms the lambdas compiled and executed
|
||||
string_pattern = re.compile(r"Service called with string: STRING_FROM_LAMBDA")
|
||||
int_pattern = re.compile(r"Service called with int: 42")
|
||||
float_pattern = re.compile(r"Service called with float: 3\.14")
|
||||
|
||||
def check_output(line: str) -> None:
|
||||
"""Check log output for expected messages."""
|
||||
if not string_called_future.done() and string_pattern.search(line):
|
||||
string_called_future.set_result(True)
|
||||
if not int_called_future.done() and int_pattern.search(line):
|
||||
int_called_future.set_result(True)
|
||||
if not float_called_future.done() and float_pattern.search(line):
|
||||
float_called_future.set_result(True)
|
||||
|
||||
# Run with log monitoring
|
||||
async with (
|
||||
run_compiled(yaml_config, line_callback=check_output),
|
||||
api_client_connected() as client,
|
||||
):
|
||||
# Verify device info
|
||||
device_info = await client.device_info()
|
||||
assert device_info is not None
|
||||
assert device_info.name == "api-string-lambda-test"
|
||||
|
||||
# List services to find our test services
|
||||
_, services = await client.list_entities_services()
|
||||
|
||||
# Find all test services
|
||||
string_service = next(
|
||||
(s for s in services if s.name == "test_string_lambda"), None
|
||||
)
|
||||
assert string_service is not None, "test_string_lambda service not found"
|
||||
|
||||
int_service = next((s for s in services if s.name == "test_int_lambda"), None)
|
||||
assert int_service is not None, "test_int_lambda service not found"
|
||||
|
||||
float_service = next(
|
||||
(s for s in services if s.name == "test_float_lambda"), None
|
||||
)
|
||||
assert float_service is not None, "test_float_lambda service not found"
|
||||
|
||||
# Execute all three services to test different lambda return types
|
||||
client.execute_service(string_service, {"input_string": "STRING_FROM_LAMBDA"})
|
||||
client.execute_service(int_service, {"input_number": 42})
|
||||
client.execute_service(float_service, {"input_float": 3.14})
|
||||
|
||||
# Wait for all service log messages
|
||||
# This confirms the lambdas compiled successfully and executed
|
||||
try:
|
||||
await asyncio.wait_for(
|
||||
asyncio.gather(
|
||||
string_called_future, int_called_future, float_called_future
|
||||
),
|
||||
timeout=5.0,
|
||||
)
|
||||
except TimeoutError:
|
||||
pytest.fail(
|
||||
"One or more service log messages not received - lambda may have failed to compile or execute"
|
||||
)
|
@@ -45,11 +45,18 @@ async def test_loop_disable_enable(
|
||||
isr_component_disabled = asyncio.Event()
|
||||
isr_component_re_enabled = asyncio.Event()
|
||||
isr_component_pure_re_enabled = asyncio.Event()
|
||||
# Events for update component testing
|
||||
update_component_loop_disabled = asyncio.Event()
|
||||
update_component_manual_update_called = asyncio.Event()
|
||||
|
||||
# Track loop counts for components
|
||||
self_disable_10_counts: list[int] = []
|
||||
normal_component_counts: list[int] = []
|
||||
isr_component_counts: list[int] = []
|
||||
# Track update component behavior
|
||||
update_component_loop_count = 0
|
||||
update_component_update_count = 0
|
||||
update_component_manual_update_count = 0
|
||||
|
||||
def on_log_line(line: str) -> None:
|
||||
"""Process each log line from the process output."""
|
||||
@@ -59,6 +66,7 @@ async def test_loop_disable_enable(
|
||||
if (
|
||||
"loop_test_component" not in clean_line
|
||||
and "loop_test_isr_component" not in clean_line
|
||||
and "Manually calling component.update" not in clean_line
|
||||
):
|
||||
return
|
||||
|
||||
@@ -112,6 +120,23 @@ async def test_loop_disable_enable(
|
||||
elif "Running after pure ISR re-enable!" in clean_line:
|
||||
isr_component_pure_re_enabled.set()
|
||||
|
||||
# Update component events
|
||||
elif "[update_test]" in clean_line:
|
||||
if "LoopTestUpdateComponent loop count:" in clean_line:
|
||||
nonlocal update_component_loop_count
|
||||
update_component_loop_count = int(
|
||||
clean_line.split("LoopTestUpdateComponent loop count: ")[1]
|
||||
)
|
||||
elif "LoopTestUpdateComponent update() called" in clean_line:
|
||||
nonlocal update_component_update_count
|
||||
update_component_update_count += 1
|
||||
if "Manually calling component.update" in " ".join(log_messages[-5:]):
|
||||
nonlocal update_component_manual_update_count
|
||||
update_component_manual_update_count += 1
|
||||
update_component_manual_update_called.set()
|
||||
elif "Disabling loop after" in clean_line:
|
||||
update_component_loop_disabled.set()
|
||||
|
||||
# Write, compile and run the ESPHome device with log callback
|
||||
async with (
|
||||
run_compiled(yaml_config, line_callback=on_log_line),
|
||||
@@ -205,3 +230,28 @@ async def test_loop_disable_enable(
|
||||
assert final_count > 10, (
|
||||
f"Component didn't run after pure ISR enable: got {final_count} counts total"
|
||||
)
|
||||
|
||||
# Test component.update functionality when loop is disabled
|
||||
# Wait for update component to disable its loop
|
||||
try:
|
||||
await asyncio.wait_for(update_component_loop_disabled.wait(), timeout=3.0)
|
||||
except asyncio.TimeoutError:
|
||||
pytest.fail("Update component did not disable its loop within 3 seconds")
|
||||
|
||||
# Verify it ran exactly 3 loops before disabling
|
||||
assert update_component_loop_count == 3, (
|
||||
f"Expected 3 loop iterations before disable, got {update_component_loop_count}"
|
||||
)
|
||||
|
||||
# Wait for manual component.update to be called
|
||||
try:
|
||||
await asyncio.wait_for(
|
||||
update_component_manual_update_called.wait(), timeout=5.0
|
||||
)
|
||||
except asyncio.TimeoutError:
|
||||
pytest.fail("Manual component.update was not called within 5 seconds")
|
||||
|
||||
# The key test: verify that manual component.update worked after loop was disabled
|
||||
assert update_component_manual_update_count >= 1, (
|
||||
"component.update did not fire after loop was disabled"
|
||||
)
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user