mirror of
https://github.com/home-assistant/core.git
synced 2025-09-20 18:39:40 +00:00
Compare commits
86 Commits
hassfest-e
...
media-sour
Author | SHA1 | Date | |
---|---|---|---|
![]() |
38869c37e9 | ||
![]() |
29f650565b | ||
![]() |
f5535db24c | ||
![]() |
e40ecdfb00 | ||
![]() |
2f4c69bbd5 | ||
![]() |
dd0f6a702b | ||
![]() |
5ba580bc25 | ||
![]() |
c13002bdd5 | ||
![]() |
75d22191a0 | ||
![]() |
58d6549f1c | ||
![]() |
1fcc6df1fd | ||
![]() |
9bf467e6d1 | ||
![]() |
d877d6d93f | ||
![]() |
d2b255ba92 | ||
![]() |
1509c429d6 | ||
![]() |
af9717c1cd | ||
![]() |
49e75c9cf8 | ||
![]() |
c97f16a96d | ||
![]() |
a3a4433d62 | ||
![]() |
f832002afd | ||
![]() |
dbc7f2b43c | ||
![]() |
1cd3a1eede | ||
![]() |
7d6e0d44b0 | ||
![]() |
2bb6d745ca | ||
![]() |
beb9d7856c | ||
![]() |
6a4c8a550a | ||
![]() |
7d23752a3f | ||
![]() |
c2b2a78db5 | ||
![]() |
0fb6bbee59 | ||
![]() |
d93e0a105a | ||
![]() |
ab1619c0b4 | ||
![]() |
70df7b8503 | ||
![]() |
0e2c2ad355 | ||
![]() |
4c26718739 | ||
![]() |
96034e1525 | ||
![]() |
df1302fc1c | ||
![]() |
5a5b639aa4 | ||
![]() |
e9fbe2227f | ||
![]() |
82b57568a0 | ||
![]() |
be692ab2fd | ||
![]() |
24c04cceee | ||
![]() |
97077898bb | ||
![]() |
08485f4e09 | ||
![]() |
b64d60fce4 | ||
![]() |
3690497e1f | ||
![]() |
b87e581cde | ||
![]() |
f1c55ee7e2 | ||
![]() |
9f17a82acf | ||
![]() |
3955391cda | ||
![]() |
d9a757c7e6 | ||
![]() |
aa1ec944c0 | ||
![]() |
88c3b6a9f5 | ||
![]() |
ada73953f6 | ||
![]() |
42e9b9a0bc | ||
![]() |
ec6a052ff5 | ||
![]() |
c91d64e04d | ||
![]() |
0ac7cb311d | ||
![]() |
3472020812 | ||
![]() |
dcd09523a6 | ||
![]() |
a5bfdc697b | ||
![]() |
dbb29a7c7d | ||
![]() |
124a63d846 | ||
![]() |
3de701a9ab | ||
![]() |
bfe1dd65b3 | ||
![]() |
71bf5e14cc | ||
![]() |
6d231c2c99 | ||
![]() |
b93072865b | ||
![]() |
14ebb6cd74 | ||
![]() |
2ddbcd560e | ||
![]() |
c5ff7ed1c9 | ||
![]() |
c4bea5616c | ||
![]() |
17fe147726 | ||
![]() |
9fae4e7e1f | ||
![]() |
0cebca498c | ||
![]() |
521ff62aae | ||
![]() |
fd1df5ad88 | ||
![]() |
91e7a35a07 | ||
![]() |
09381abf46 | ||
![]() |
3713c03c07 | ||
![]() |
bd8ddd7cd8 | ||
![]() |
f0dc1f927b | ||
![]() |
984590c6d1 | ||
![]() |
d324021a3f | ||
![]() |
1f4c0b3e9b | ||
![]() |
69893aba4b | ||
![]() |
b9dcf89b37 |
42
.github/workflows/builder.yml
vendored
42
.github/workflows/builder.yml
vendored
@@ -27,12 +27,12 @@ jobs:
|
||||
publish: ${{ steps.version.outputs.publish }}
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@v5.0.0
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
uses: actions/setup-python@v6.0.0
|
||||
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
|
||||
@@ -69,7 +69,7 @@ jobs:
|
||||
run: find ./homeassistant/components/*/translations -name "*.json" | tar zcvf translations.tar.gz -T -
|
||||
|
||||
- name: Upload translations
|
||||
uses: actions/upload-artifact@v4.6.2
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: translations
|
||||
path: translations.tar.gz
|
||||
@@ -90,11 +90,11 @@ jobs:
|
||||
arch: ${{ fromJson(needs.init.outputs.architectures) }}
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@v5.0.0
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
|
||||
- name: Download nightly wheels of frontend
|
||||
if: needs.init.outputs.channel == 'dev'
|
||||
uses: dawidd6/action-download-artifact@v11
|
||||
uses: dawidd6/action-download-artifact@ac66b43f0e6a346234dd65d4d0c8fbb31cb316e5 # v11
|
||||
with:
|
||||
github_token: ${{secrets.GITHUB_TOKEN}}
|
||||
repo: home-assistant/frontend
|
||||
@@ -105,7 +105,7 @@ jobs:
|
||||
|
||||
- name: Download nightly wheels of intents
|
||||
if: needs.init.outputs.channel == 'dev'
|
||||
uses: dawidd6/action-download-artifact@v11
|
||||
uses: dawidd6/action-download-artifact@ac66b43f0e6a346234dd65d4d0c8fbb31cb316e5 # v11
|
||||
with:
|
||||
github_token: ${{secrets.GITHUB_TOKEN}}
|
||||
repo: OHF-Voice/intents-package
|
||||
@@ -116,7 +116,7 @@ jobs:
|
||||
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
if: needs.init.outputs.channel == 'dev'
|
||||
uses: actions/setup-python@v6.0.0
|
||||
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
|
||||
@@ -175,7 +175,7 @@ jobs:
|
||||
sed -i "s|pykrakenapi|# pykrakenapi|g" requirements_all.txt
|
||||
|
||||
- name: Download translations
|
||||
uses: actions/download-artifact@v5.0.0
|
||||
uses: actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0 # v5.0.0
|
||||
with:
|
||||
name: translations
|
||||
|
||||
@@ -190,12 +190,13 @@ jobs:
|
||||
echo "${{ github.sha }};${{ github.ref }};${{ github.event_name }};${{ github.actor }}" > rootfs/OFFICIAL_IMAGE
|
||||
|
||||
- name: Login to GitHub Container Registry
|
||||
uses: docker/login-action@v3.5.0
|
||||
uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 # v3.5.0
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
# home-assistant/builder doesn't support sha pinning
|
||||
- name: Build base image
|
||||
uses: home-assistant/builder@2025.03.0
|
||||
with:
|
||||
@@ -242,7 +243,7 @@ jobs:
|
||||
- green
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@v5.0.0
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
|
||||
- name: Set build additional args
|
||||
run: |
|
||||
@@ -256,12 +257,13 @@ jobs:
|
||||
fi
|
||||
|
||||
- name: Login to GitHub Container Registry
|
||||
uses: docker/login-action@v3.5.0
|
||||
uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 # v3.5.0
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
# home-assistant/builder doesn't support sha pinning
|
||||
- name: Build base image
|
||||
uses: home-assistant/builder@2025.03.0
|
||||
with:
|
||||
@@ -279,7 +281,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@v5.0.0
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
|
||||
- name: Initialize git
|
||||
uses: home-assistant/actions/helpers/git-init@master
|
||||
@@ -321,23 +323,23 @@ jobs:
|
||||
registry: ["ghcr.io/home-assistant", "docker.io/homeassistant"]
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@v5.0.0
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
|
||||
- name: Install Cosign
|
||||
uses: sigstore/cosign-installer@v3.9.2
|
||||
uses: sigstore/cosign-installer@d58896d6a1865668819e1d91763c7751a165e159 # v3.9.2
|
||||
with:
|
||||
cosign-release: "v2.2.3"
|
||||
|
||||
- name: Login to DockerHub
|
||||
if: matrix.registry == 'docker.io/homeassistant'
|
||||
uses: docker/login-action@v3.5.0
|
||||
uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 # v3.5.0
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Login to GitHub Container Registry
|
||||
if: matrix.registry == 'ghcr.io/home-assistant'
|
||||
uses: docker/login-action@v3.5.0
|
||||
uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 # v3.5.0
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
@@ -454,15 +456,15 @@ jobs:
|
||||
if: github.repository_owner == 'home-assistant' && needs.init.outputs.publish == 'true'
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@v5.0.0
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
uses: actions/setup-python@v6.0.0
|
||||
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
|
||||
- name: Download translations
|
||||
uses: actions/download-artifact@v5.0.0
|
||||
uses: actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0 # v5.0.0
|
||||
with:
|
||||
name: translations
|
||||
|
||||
@@ -480,7 +482,7 @@ jobs:
|
||||
python -m build
|
||||
|
||||
- name: Upload package to PyPI
|
||||
uses: pypa/gh-action-pypi-publish@v1.13.0
|
||||
uses: pypa/gh-action-pypi-publish@ed0c53931b1dc9bd32cbe73a98c7f6766f8a527e # v1.13.0
|
||||
with:
|
||||
skip-existing: true
|
||||
|
||||
|
168
.github/workflows/ci.yaml
vendored
168
.github/workflows/ci.yaml
vendored
@@ -98,7 +98,7 @@ jobs:
|
||||
runs-on: ubuntu-24.04
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v5.0.0
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
- name: Generate partial Python venv restore key
|
||||
id: generate_python_cache_key
|
||||
run: |
|
||||
@@ -120,7 +120,7 @@ jobs:
|
||||
run: |
|
||||
echo "key=$(lsb_release -rs)-apt-${{ env.CACHE_VERSION }}-${{ env.HA_SHORT_VERSION }}" >> $GITHUB_OUTPUT
|
||||
- name: Filter for core changes
|
||||
uses: dorny/paths-filter@v3.0.2
|
||||
uses: dorny/paths-filter@de90cc6fb38fc0963ad72b210f1f284cd68cea36 # v3.0.2
|
||||
id: core
|
||||
with:
|
||||
filters: .core_files.yaml
|
||||
@@ -135,7 +135,7 @@ jobs:
|
||||
echo "Result:"
|
||||
cat .integration_paths.yaml
|
||||
- name: Filter for integration changes
|
||||
uses: dorny/paths-filter@v3.0.2
|
||||
uses: dorny/paths-filter@de90cc6fb38fc0963ad72b210f1f284cd68cea36 # v3.0.2
|
||||
id: integrations
|
||||
with:
|
||||
filters: .integration_paths.yaml
|
||||
@@ -254,16 +254,16 @@ jobs:
|
||||
- info
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v5.0.0
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
id: python
|
||||
uses: actions/setup-python@v6.0.0
|
||||
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
check-latest: true
|
||||
- name: Restore base Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache@v4.2.4
|
||||
uses: actions/cache@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
|
||||
with:
|
||||
path: venv
|
||||
key: >-
|
||||
@@ -279,7 +279,7 @@ jobs:
|
||||
uv pip install "$(cat requirements_test.txt | grep pre-commit)"
|
||||
- name: Restore pre-commit environment from cache
|
||||
id: cache-precommit
|
||||
uses: actions/cache@v4.2.4
|
||||
uses: actions/cache@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
|
||||
with:
|
||||
path: ${{ env.PRE_COMMIT_CACHE }}
|
||||
lookup-only: true
|
||||
@@ -300,16 +300,16 @@ jobs:
|
||||
- pre-commit
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v5.0.0
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
uses: actions/setup-python@v6.0.0
|
||||
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
id: python
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
check-latest: true
|
||||
- name: Restore base Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@v4.2.4
|
||||
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@@ -318,7 +318,7 @@ jobs:
|
||||
needs.info.outputs.pre-commit_cache_key }}
|
||||
- name: Restore pre-commit environment from cache
|
||||
id: cache-precommit
|
||||
uses: actions/cache/restore@v4.2.4
|
||||
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
|
||||
with:
|
||||
path: ${{ env.PRE_COMMIT_CACHE }}
|
||||
fail-on-cache-miss: true
|
||||
@@ -340,16 +340,16 @@ jobs:
|
||||
- pre-commit
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v5.0.0
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
uses: actions/setup-python@v6.0.0
|
||||
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
id: python
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
check-latest: true
|
||||
- name: Restore base Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@v4.2.4
|
||||
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@@ -358,7 +358,7 @@ jobs:
|
||||
needs.info.outputs.pre-commit_cache_key }}
|
||||
- name: Restore pre-commit environment from cache
|
||||
id: cache-precommit
|
||||
uses: actions/cache/restore@v4.2.4
|
||||
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
|
||||
with:
|
||||
path: ${{ env.PRE_COMMIT_CACHE }}
|
||||
fail-on-cache-miss: true
|
||||
@@ -380,16 +380,16 @@ jobs:
|
||||
- pre-commit
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v5.0.0
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
uses: actions/setup-python@v6.0.0
|
||||
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
id: python
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
check-latest: true
|
||||
- name: Restore base Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@v4.2.4
|
||||
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@@ -398,7 +398,7 @@ jobs:
|
||||
needs.info.outputs.pre-commit_cache_key }}
|
||||
- name: Restore pre-commit environment from cache
|
||||
id: cache-precommit
|
||||
uses: actions/cache/restore@v4.2.4
|
||||
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
|
||||
with:
|
||||
path: ${{ env.PRE_COMMIT_CACHE }}
|
||||
fail-on-cache-miss: true
|
||||
@@ -470,7 +470,7 @@ jobs:
|
||||
- script/hassfest/docker/Dockerfile
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v5.0.0
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
- name: Register hadolint problem matcher
|
||||
run: |
|
||||
echo "::add-matcher::.github/workflows/matchers/hadolint.json"
|
||||
@@ -489,10 +489,10 @@ jobs:
|
||||
python-version: ${{ fromJSON(needs.info.outputs.python_versions) }}
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v5.0.0
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
id: python
|
||||
uses: actions/setup-python@v6.0.0
|
||||
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
check-latest: true
|
||||
@@ -505,7 +505,7 @@ jobs:
|
||||
env.HA_SHORT_VERSION }}-$(date -u '+%Y-%m-%dT%H:%M:%s')" >> $GITHUB_OUTPUT
|
||||
- name: Restore base Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache@v4.2.4
|
||||
uses: actions/cache@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
|
||||
with:
|
||||
path: venv
|
||||
key: >-
|
||||
@@ -513,7 +513,7 @@ jobs:
|
||||
needs.info.outputs.python_cache_key }}
|
||||
- name: Restore uv wheel cache
|
||||
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||
uses: actions/cache@v4.2.4
|
||||
uses: actions/cache@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
|
||||
with:
|
||||
path: ${{ env.UV_CACHE_DIR }}
|
||||
key: >-
|
||||
@@ -585,7 +585,7 @@ jobs:
|
||||
python --version
|
||||
uv pip freeze >> pip_freeze.txt
|
||||
- name: Upload pip_freeze artifact
|
||||
uses: actions/upload-artifact@v4.6.2
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: pip-freeze-${{ matrix.python-version }}
|
||||
path: pip_freeze.txt
|
||||
@@ -631,16 +631,16 @@ jobs:
|
||||
-o Dir::State::Lists=${{ env.APT_LIST_CACHE_DIR }} \
|
||||
libturbojpeg
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v5.0.0
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
id: python
|
||||
uses: actions/setup-python@v6.0.0
|
||||
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
check-latest: true
|
||||
- name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@v4.2.4
|
||||
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@@ -664,16 +664,16 @@ jobs:
|
||||
- base
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v5.0.0
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
id: python
|
||||
uses: actions/setup-python@v6.0.0
|
||||
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
check-latest: true
|
||||
- name: Restore base Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@v4.2.4
|
||||
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@@ -698,9 +698,9 @@ jobs:
|
||||
&& github.event_name == 'pull_request'
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v5.0.0
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
- name: Dependency review
|
||||
uses: actions/dependency-review-action@v4.7.3
|
||||
uses: actions/dependency-review-action@595b5aeba73380359d98a5e087f648dbb0edce1b # v4.7.3
|
||||
with:
|
||||
license-check: false # We use our own license audit checks
|
||||
|
||||
@@ -721,16 +721,16 @@ jobs:
|
||||
python-version: ${{ fromJson(needs.info.outputs.python_versions) }}
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v5.0.0
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
id: python
|
||||
uses: actions/setup-python@v6.0.0
|
||||
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
check-latest: true
|
||||
- name: Restore full Python ${{ matrix.python-version }} virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@v4.2.4
|
||||
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@@ -742,7 +742,7 @@ jobs:
|
||||
. venv/bin/activate
|
||||
python -m script.licenses extract --output-file=licenses-${{ matrix.python-version }}.json
|
||||
- name: Upload licenses
|
||||
uses: actions/upload-artifact@v4.6.2
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: licenses-${{ github.run_number }}-${{ matrix.python-version }}
|
||||
path: licenses-${{ matrix.python-version }}.json
|
||||
@@ -764,16 +764,16 @@ jobs:
|
||||
- base
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v5.0.0
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
id: python
|
||||
uses: actions/setup-python@v6.0.0
|
||||
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
check-latest: true
|
||||
- name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@v4.2.4
|
||||
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@@ -811,16 +811,16 @@ jobs:
|
||||
- base
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v5.0.0
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
id: python
|
||||
uses: actions/setup-python@v6.0.0
|
||||
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
check-latest: true
|
||||
- name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@v4.2.4
|
||||
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@@ -856,10 +856,10 @@ jobs:
|
||||
- base
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v5.0.0
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
id: python
|
||||
uses: actions/setup-python@v6.0.0
|
||||
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
check-latest: true
|
||||
@@ -872,7 +872,7 @@ jobs:
|
||||
env.HA_SHORT_VERSION }}-$(date -u '+%Y-%m-%dT%H:%M:%s')" >> $GITHUB_OUTPUT
|
||||
- name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@v4.2.4
|
||||
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@@ -880,7 +880,7 @@ jobs:
|
||||
${{ runner.os }}-${{ runner.arch }}-${{ steps.python.outputs.python-version }}-${{
|
||||
needs.info.outputs.python_cache_key }}
|
||||
- name: Restore mypy cache
|
||||
uses: actions/cache@v4.2.4
|
||||
uses: actions/cache@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
|
||||
with:
|
||||
path: .mypy_cache
|
||||
key: >-
|
||||
@@ -947,16 +947,16 @@ jobs:
|
||||
libturbojpeg \
|
||||
libgammu-dev
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v5.0.0
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
id: python
|
||||
uses: actions/setup-python@v6.0.0
|
||||
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
check-latest: true
|
||||
- name: Restore base Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@v4.2.4
|
||||
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@@ -968,7 +968,7 @@ jobs:
|
||||
. venv/bin/activate
|
||||
python -m script.split_tests ${{ needs.info.outputs.test_group_count }} tests
|
||||
- name: Upload pytest_buckets
|
||||
uses: actions/upload-artifact@v4.6.2
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: pytest_buckets
|
||||
path: pytest_buckets.txt
|
||||
@@ -1022,16 +1022,16 @@ jobs:
|
||||
libgammu-dev \
|
||||
libxml2-utils
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v5.0.0
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
id: python
|
||||
uses: actions/setup-python@v6.0.0
|
||||
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
check-latest: true
|
||||
- name: Restore full Python ${{ matrix.python-version }} virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@v4.2.4
|
||||
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@@ -1045,7 +1045,7 @@ jobs:
|
||||
run: |
|
||||
echo "::add-matcher::.github/workflows/matchers/pytest-slow.json"
|
||||
- name: Download pytest_buckets
|
||||
uses: actions/download-artifact@v5.0.0
|
||||
uses: actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0 # v5.0.0
|
||||
with:
|
||||
name: pytest_buckets
|
||||
- name: Compile English translations
|
||||
@@ -1084,14 +1084,14 @@ jobs:
|
||||
2>&1 | tee pytest-${{ matrix.python-version }}-${{ matrix.group }}.txt
|
||||
- name: Upload pytest output
|
||||
if: success() || failure() && steps.pytest-full.conclusion == 'failure'
|
||||
uses: actions/upload-artifact@v4.6.2
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: pytest-${{ github.run_number }}-${{ matrix.python-version }}-${{ matrix.group }}
|
||||
path: pytest-*.txt
|
||||
overwrite: true
|
||||
- name: Upload coverage artifact
|
||||
if: needs.info.outputs.skip_coverage != 'true'
|
||||
uses: actions/upload-artifact@v4.6.2
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: coverage-${{ matrix.python-version }}-${{ matrix.group }}
|
||||
path: coverage.xml
|
||||
@@ -1104,7 +1104,7 @@ jobs:
|
||||
mv "junit.xml-tmp" "junit.xml"
|
||||
- name: Upload test results artifact
|
||||
if: needs.info.outputs.skip_coverage != 'true' && !cancelled()
|
||||
uses: actions/upload-artifact@v4.6.2
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: test-results-full-${{ matrix.python-version }}-${{ matrix.group }}
|
||||
path: junit.xml
|
||||
@@ -1169,16 +1169,16 @@ jobs:
|
||||
libmariadb-dev-compat \
|
||||
libxml2-utils
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v5.0.0
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
id: python
|
||||
uses: actions/setup-python@v6.0.0
|
||||
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
check-latest: true
|
||||
- name: Restore full Python ${{ matrix.python-version }} virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@v4.2.4
|
||||
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@@ -1237,7 +1237,7 @@ jobs:
|
||||
2>&1 | tee pytest-${{ matrix.python-version }}-${mariadb}.txt
|
||||
- name: Upload pytest output
|
||||
if: success() || failure() && steps.pytest-partial.conclusion == 'failure'
|
||||
uses: actions/upload-artifact@v4.6.2
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: pytest-${{ github.run_number }}-${{ matrix.python-version }}-${{
|
||||
steps.pytest-partial.outputs.mariadb }}
|
||||
@@ -1245,7 +1245,7 @@ jobs:
|
||||
overwrite: true
|
||||
- name: Upload coverage artifact
|
||||
if: needs.info.outputs.skip_coverage != 'true'
|
||||
uses: actions/upload-artifact@v4.6.2
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: coverage-${{ matrix.python-version }}-${{
|
||||
steps.pytest-partial.outputs.mariadb }}
|
||||
@@ -1259,7 +1259,7 @@ jobs:
|
||||
mv "junit.xml-tmp" "junit.xml"
|
||||
- name: Upload test results artifact
|
||||
if: needs.info.outputs.skip_coverage != 'true' && !cancelled()
|
||||
uses: actions/upload-artifact@v4.6.2
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: test-results-mariadb-${{ matrix.python-version }}-${{
|
||||
steps.pytest-partial.outputs.mariadb }}
|
||||
@@ -1325,16 +1325,16 @@ jobs:
|
||||
sudo apt-get -y install \
|
||||
postgresql-server-dev-14
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v5.0.0
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
id: python
|
||||
uses: actions/setup-python@v6.0.0
|
||||
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
check-latest: true
|
||||
- name: Restore full Python ${{ matrix.python-version }} virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@v4.2.4
|
||||
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@@ -1394,7 +1394,7 @@ jobs:
|
||||
2>&1 | tee pytest-${{ matrix.python-version }}-${postgresql}.txt
|
||||
- name: Upload pytest output
|
||||
if: success() || failure() && steps.pytest-partial.conclusion == 'failure'
|
||||
uses: actions/upload-artifact@v4.6.2
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: pytest-${{ github.run_number }}-${{ matrix.python-version }}-${{
|
||||
steps.pytest-partial.outputs.postgresql }}
|
||||
@@ -1402,7 +1402,7 @@ jobs:
|
||||
overwrite: true
|
||||
- name: Upload coverage artifact
|
||||
if: needs.info.outputs.skip_coverage != 'true'
|
||||
uses: actions/upload-artifact@v4.6.2
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: coverage-${{ matrix.python-version }}-${{
|
||||
steps.pytest-partial.outputs.postgresql }}
|
||||
@@ -1416,7 +1416,7 @@ jobs:
|
||||
mv "junit.xml-tmp" "junit.xml"
|
||||
- name: Upload test results artifact
|
||||
if: needs.info.outputs.skip_coverage != 'true' && !cancelled()
|
||||
uses: actions/upload-artifact@v4.6.2
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: test-results-postgres-${{ matrix.python-version }}-${{
|
||||
steps.pytest-partial.outputs.postgresql }}
|
||||
@@ -1437,14 +1437,14 @@ jobs:
|
||||
timeout-minutes: 10
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v5.0.0
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
- name: Download all coverage artifacts
|
||||
uses: actions/download-artifact@v5.0.0
|
||||
uses: actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0 # v5.0.0
|
||||
with:
|
||||
pattern: coverage-*
|
||||
- name: Upload coverage to Codecov
|
||||
if: needs.info.outputs.test_full_suite == 'true'
|
||||
uses: codecov/codecov-action@v5.5.1
|
||||
uses: codecov/codecov-action@5a1091511ad55cbe89839c7260b706298ca349f7 # v5.5.1
|
||||
with:
|
||||
fail_ci_if_error: true
|
||||
flags: full-suite
|
||||
@@ -1498,16 +1498,16 @@ jobs:
|
||||
libgammu-dev \
|
||||
libxml2-utils
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v5.0.0
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
id: python
|
||||
uses: actions/setup-python@v6.0.0
|
||||
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
check-latest: true
|
||||
- name: Restore full Python ${{ matrix.python-version }} virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@v4.2.4
|
||||
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@@ -1563,14 +1563,14 @@ jobs:
|
||||
2>&1 | tee pytest-${{ matrix.python-version }}-${{ matrix.group }}.txt
|
||||
- name: Upload pytest output
|
||||
if: success() || failure() && steps.pytest-partial.conclusion == 'failure'
|
||||
uses: actions/upload-artifact@v4.6.2
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: pytest-${{ github.run_number }}-${{ matrix.python-version }}-${{ matrix.group }}
|
||||
path: pytest-*.txt
|
||||
overwrite: true
|
||||
- name: Upload coverage artifact
|
||||
if: needs.info.outputs.skip_coverage != 'true'
|
||||
uses: actions/upload-artifact@v4.6.2
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: coverage-${{ matrix.python-version }}-${{ matrix.group }}
|
||||
path: coverage.xml
|
||||
@@ -1583,7 +1583,7 @@ jobs:
|
||||
mv "junit.xml-tmp" "junit.xml"
|
||||
- name: Upload test results artifact
|
||||
if: needs.info.outputs.skip_coverage != 'true' && !cancelled()
|
||||
uses: actions/upload-artifact@v4.6.2
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: test-results-partial-${{ matrix.python-version }}-${{ matrix.group }}
|
||||
path: junit.xml
|
||||
@@ -1601,14 +1601,14 @@ jobs:
|
||||
timeout-minutes: 10
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v5.0.0
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
- name: Download all coverage artifacts
|
||||
uses: actions/download-artifact@v5.0.0
|
||||
uses: actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0 # v5.0.0
|
||||
with:
|
||||
pattern: coverage-*
|
||||
- name: Upload coverage to Codecov
|
||||
if: needs.info.outputs.test_full_suite == 'false'
|
||||
uses: codecov/codecov-action@v5.5.1
|
||||
uses: codecov/codecov-action@5a1091511ad55cbe89839c7260b706298ca349f7 # v5.5.1
|
||||
with:
|
||||
fail_ci_if_error: true
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
@@ -1628,11 +1628,11 @@ jobs:
|
||||
timeout-minutes: 10
|
||||
steps:
|
||||
- name: Download all coverage artifacts
|
||||
uses: actions/download-artifact@v5.0.0
|
||||
uses: actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0 # v5.0.0
|
||||
with:
|
||||
pattern: test-results-*
|
||||
- name: Upload test results to Codecov
|
||||
uses: codecov/test-results-action@v1
|
||||
uses: codecov/test-results-action@47f89e9acb64b76debcd5ea40642d25a4adced9f # v1.1.1
|
||||
with:
|
||||
fail_ci_if_error: true
|
||||
verbose: true
|
||||
|
6
.github/workflows/codeql.yml
vendored
6
.github/workflows/codeql.yml
vendored
@@ -21,14 +21,14 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v5.0.0
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@v3.30.3
|
||||
uses: github/codeql-action/init@192325c86100d080feab897ff886c34abd4c83a3 # v3.30.3
|
||||
with:
|
||||
languages: python
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@v3.30.3
|
||||
uses: github/codeql-action/analyze@192325c86100d080feab897ff886c34abd4c83a3 # v3.30.3
|
||||
with:
|
||||
category: "/language:python"
|
||||
|
@@ -16,7 +16,7 @@ jobs:
|
||||
steps:
|
||||
- name: Check if integration label was added and extract details
|
||||
id: extract
|
||||
uses: actions/github-script@v8
|
||||
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
|
||||
with:
|
||||
script: |
|
||||
// Debug: Log the event payload
|
||||
@@ -113,7 +113,7 @@ jobs:
|
||||
- name: Fetch similar issues
|
||||
id: fetch_similar
|
||||
if: steps.extract.outputs.should_continue == 'true'
|
||||
uses: actions/github-script@v8
|
||||
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
|
||||
env:
|
||||
INTEGRATION_LABELS: ${{ steps.extract.outputs.integration_labels }}
|
||||
CURRENT_NUMBER: ${{ steps.extract.outputs.current_number }}
|
||||
@@ -231,7 +231,7 @@ jobs:
|
||||
- name: Detect duplicates using AI
|
||||
id: ai_detection
|
||||
if: steps.extract.outputs.should_continue == 'true' && steps.fetch_similar.outputs.has_similar == 'true'
|
||||
uses: actions/ai-inference@v2.0.1
|
||||
uses: actions/ai-inference@a1c11829223a786afe3b5663db904a3aa1eac3a2 # v2.0.1
|
||||
with:
|
||||
model: openai/gpt-4o
|
||||
system-prompt: |
|
||||
@@ -280,7 +280,7 @@ jobs:
|
||||
- name: Post duplicate detection results
|
||||
id: post_results
|
||||
if: steps.extract.outputs.should_continue == 'true' && steps.fetch_similar.outputs.has_similar == 'true'
|
||||
uses: actions/github-script@v8
|
||||
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
|
||||
env:
|
||||
AI_RESPONSE: ${{ steps.ai_detection.outputs.response }}
|
||||
SIMILAR_ISSUES: ${{ steps.fetch_similar.outputs.similar_issues }}
|
||||
|
@@ -16,7 +16,7 @@ jobs:
|
||||
steps:
|
||||
- name: Check issue language
|
||||
id: detect_language
|
||||
uses: actions/github-script@v8
|
||||
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
|
||||
env:
|
||||
ISSUE_NUMBER: ${{ github.event.issue.number }}
|
||||
ISSUE_TITLE: ${{ github.event.issue.title }}
|
||||
@@ -57,7 +57,7 @@ jobs:
|
||||
- name: Detect language using AI
|
||||
id: ai_language_detection
|
||||
if: steps.detect_language.outputs.should_continue == 'true'
|
||||
uses: actions/ai-inference@v2.0.1
|
||||
uses: actions/ai-inference@a1c11829223a786afe3b5663db904a3aa1eac3a2 # v2.0.1
|
||||
with:
|
||||
model: openai/gpt-4o-mini
|
||||
system-prompt: |
|
||||
@@ -90,7 +90,7 @@ jobs:
|
||||
|
||||
- name: Process non-English issues
|
||||
if: steps.detect_language.outputs.should_continue == 'true'
|
||||
uses: actions/github-script@v8
|
||||
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
|
||||
env:
|
||||
AI_RESPONSE: ${{ steps.ai_language_detection.outputs.response }}
|
||||
ISSUE_NUMBER: ${{ steps.detect_language.outputs.issue_number }}
|
||||
|
2
.github/workflows/lock.yml
vendored
2
.github/workflows/lock.yml
vendored
@@ -10,7 +10,7 @@ jobs:
|
||||
if: github.repository_owner == 'home-assistant'
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: dessant/lock-threads@v5.0.1
|
||||
- uses: dessant/lock-threads@1bf7ec25051fe7c00bdd17e6a7cf3d7bfb7dc771 # v5.0.1
|
||||
with:
|
||||
github-token: ${{ github.token }}
|
||||
issue-inactive-days: "30"
|
||||
|
2
.github/workflows/restrict-task-creation.yml
vendored
2
.github/workflows/restrict-task-creation.yml
vendored
@@ -12,7 +12,7 @@ jobs:
|
||||
if: github.event.issue.type.name == 'Task'
|
||||
steps:
|
||||
- name: Check if user is authorized
|
||||
uses: actions/github-script@v8
|
||||
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
|
||||
with:
|
||||
script: |
|
||||
const issueAuthor = context.payload.issue.user.login;
|
||||
|
6
.github/workflows/stale.yml
vendored
6
.github/workflows/stale.yml
vendored
@@ -17,7 +17,7 @@ jobs:
|
||||
# - No PRs marked as no-stale
|
||||
# - No issues (-1)
|
||||
- name: 60 days stale PRs policy
|
||||
uses: actions/stale@v10.0.0
|
||||
uses: actions/stale@3a9db7e6a41a89f618792c92c0e97cc736e1b13f # v10.0.0
|
||||
with:
|
||||
repo-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
days-before-stale: 60
|
||||
@@ -57,7 +57,7 @@ jobs:
|
||||
# - No issues marked as no-stale or help-wanted
|
||||
# - No PRs (-1)
|
||||
- name: 90 days stale issues
|
||||
uses: actions/stale@v10.0.0
|
||||
uses: actions/stale@3a9db7e6a41a89f618792c92c0e97cc736e1b13f # v10.0.0
|
||||
with:
|
||||
repo-token: ${{ steps.token.outputs.token }}
|
||||
days-before-stale: 90
|
||||
@@ -87,7 +87,7 @@ jobs:
|
||||
# - No Issues marked as no-stale or help-wanted
|
||||
# - No PRs (-1)
|
||||
- name: Needs more information stale issues policy
|
||||
uses: actions/stale@v10.0.0
|
||||
uses: actions/stale@3a9db7e6a41a89f618792c92c0e97cc736e1b13f # v10.0.0
|
||||
with:
|
||||
repo-token: ${{ steps.token.outputs.token }}
|
||||
only-labels: "needs-more-information"
|
||||
|
4
.github/workflows/translations.yml
vendored
4
.github/workflows/translations.yml
vendored
@@ -19,10 +19,10 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@v5.0.0
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
uses: actions/setup-python@v6.0.0
|
||||
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
|
||||
|
32
.github/workflows/wheels.yml
vendored
32
.github/workflows/wheels.yml
vendored
@@ -32,11 +32,11 @@ jobs:
|
||||
architectures: ${{ steps.info.outputs.architectures }}
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@v5.0.0
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
id: python
|
||||
uses: actions/setup-python@v6.0.0
|
||||
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
check-latest: true
|
||||
@@ -91,7 +91,7 @@ jobs:
|
||||
) > build_constraints.txt
|
||||
|
||||
- name: Upload env_file
|
||||
uses: actions/upload-artifact@v4.6.2
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: env_file
|
||||
path: ./.env_file
|
||||
@@ -99,14 +99,14 @@ jobs:
|
||||
overwrite: true
|
||||
|
||||
- name: Upload build_constraints
|
||||
uses: actions/upload-artifact@v4.6.2
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: build_constraints
|
||||
path: ./build_constraints.txt
|
||||
overwrite: true
|
||||
|
||||
- name: Upload requirements_diff
|
||||
uses: actions/upload-artifact@v4.6.2
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: requirements_diff
|
||||
path: ./requirements_diff.txt
|
||||
@@ -118,7 +118,7 @@ jobs:
|
||||
python -m script.gen_requirements_all ci
|
||||
|
||||
- name: Upload requirements_all_wheels
|
||||
uses: actions/upload-artifact@v4.6.2
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: requirements_all_wheels
|
||||
path: ./requirements_all_wheels_*.txt
|
||||
@@ -135,20 +135,20 @@ jobs:
|
||||
arch: ${{ fromJson(needs.init.outputs.architectures) }}
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@v5.0.0
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
|
||||
- name: Download env_file
|
||||
uses: actions/download-artifact@v5.0.0
|
||||
uses: actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0 # v5.0.0
|
||||
with:
|
||||
name: env_file
|
||||
|
||||
- name: Download build_constraints
|
||||
uses: actions/download-artifact@v5.0.0
|
||||
uses: actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0 # v5.0.0
|
||||
with:
|
||||
name: build_constraints
|
||||
|
||||
- name: Download requirements_diff
|
||||
uses: actions/download-artifact@v5.0.0
|
||||
uses: actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0 # v5.0.0
|
||||
with:
|
||||
name: requirements_diff
|
||||
|
||||
@@ -158,6 +158,7 @@ jobs:
|
||||
sed -i "/uv/d" requirements.txt
|
||||
sed -i "/uv/d" requirements_diff.txt
|
||||
|
||||
# home-assistant/wheels doesn't support sha pinning
|
||||
- name: Build wheels
|
||||
uses: home-assistant/wheels@2025.07.0
|
||||
with:
|
||||
@@ -184,25 +185,25 @@ jobs:
|
||||
arch: ${{ fromJson(needs.init.outputs.architectures) }}
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@v5.0.0
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
|
||||
- name: Download env_file
|
||||
uses: actions/download-artifact@v5.0.0
|
||||
uses: actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0 # v5.0.0
|
||||
with:
|
||||
name: env_file
|
||||
|
||||
- name: Download build_constraints
|
||||
uses: actions/download-artifact@v5.0.0
|
||||
uses: actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0 # v5.0.0
|
||||
with:
|
||||
name: build_constraints
|
||||
|
||||
- name: Download requirements_diff
|
||||
uses: actions/download-artifact@v5.0.0
|
||||
uses: actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0 # v5.0.0
|
||||
with:
|
||||
name: requirements_diff
|
||||
|
||||
- name: Download requirements_all_wheels
|
||||
uses: actions/download-artifact@v5.0.0
|
||||
uses: actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0 # v5.0.0
|
||||
with:
|
||||
name: requirements_all_wheels
|
||||
|
||||
@@ -218,6 +219,7 @@ jobs:
|
||||
sed -i "/uv/d" requirements.txt
|
||||
sed -i "/uv/d" requirements_diff.txt
|
||||
|
||||
# home-assistant/wheels doesn't support sha pinning
|
||||
- name: Build wheels
|
||||
uses: home-assistant/wheels@2025.07.0
|
||||
with:
|
||||
|
4
CODEOWNERS
generated
4
CODEOWNERS
generated
@@ -442,8 +442,6 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/energyzero/ @klaasnicolaas
|
||||
/homeassistant/components/enigma2/ @autinerd
|
||||
/tests/components/enigma2/ @autinerd
|
||||
/homeassistant/components/enocean/ @bdurrer
|
||||
/tests/components/enocean/ @bdurrer
|
||||
/homeassistant/components/enphase_envoy/ @bdraco @cgarwood @catsmanac
|
||||
/tests/components/enphase_envoy/ @bdraco @cgarwood @catsmanac
|
||||
/homeassistant/components/entur_public_transport/ @hfurubotten
|
||||
@@ -970,6 +968,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/moat/ @bdraco
|
||||
/homeassistant/components/mobile_app/ @home-assistant/core
|
||||
/tests/components/mobile_app/ @home-assistant/core
|
||||
/homeassistant/components/modbus/ @janiversen
|
||||
/tests/components/modbus/ @janiversen
|
||||
/homeassistant/components/modem_callerid/ @tkdrob
|
||||
/tests/components/modem_callerid/ @tkdrob
|
||||
/homeassistant/components/modern_forms/ @wonderslug
|
||||
|
@@ -2,21 +2,23 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
import logging
|
||||
|
||||
from accuweather import AccuWeather
|
||||
|
||||
from homeassistant.components.sensor import DOMAIN as SENSOR_PLATFORM
|
||||
from homeassistant.const import CONF_API_KEY, CONF_NAME, Platform
|
||||
from homeassistant.const import CONF_API_KEY, Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import entity_registry as er
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
|
||||
from .const import DOMAIN, UPDATE_INTERVAL_DAILY_FORECAST, UPDATE_INTERVAL_OBSERVATION
|
||||
from .const import DOMAIN
|
||||
from .coordinator import (
|
||||
AccuWeatherConfigEntry,
|
||||
AccuWeatherDailyForecastDataUpdateCoordinator,
|
||||
AccuWeatherData,
|
||||
AccuWeatherHourlyForecastDataUpdateCoordinator,
|
||||
AccuWeatherObservationDataUpdateCoordinator,
|
||||
)
|
||||
|
||||
@@ -28,7 +30,6 @@ PLATFORMS = [Platform.SENSOR, Platform.WEATHER]
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: AccuWeatherConfigEntry) -> bool:
|
||||
"""Set up AccuWeather as config entry."""
|
||||
api_key: str = entry.data[CONF_API_KEY]
|
||||
name: str = entry.data[CONF_NAME]
|
||||
|
||||
location_key = entry.unique_id
|
||||
|
||||
@@ -41,26 +42,28 @@ async def async_setup_entry(hass: HomeAssistant, entry: AccuWeatherConfigEntry)
|
||||
hass,
|
||||
entry,
|
||||
accuweather,
|
||||
name,
|
||||
"observation",
|
||||
UPDATE_INTERVAL_OBSERVATION,
|
||||
)
|
||||
|
||||
coordinator_daily_forecast = AccuWeatherDailyForecastDataUpdateCoordinator(
|
||||
hass,
|
||||
entry,
|
||||
accuweather,
|
||||
name,
|
||||
"daily forecast",
|
||||
UPDATE_INTERVAL_DAILY_FORECAST,
|
||||
)
|
||||
coordinator_hourly_forecast = AccuWeatherHourlyForecastDataUpdateCoordinator(
|
||||
hass,
|
||||
entry,
|
||||
accuweather,
|
||||
)
|
||||
|
||||
await coordinator_observation.async_config_entry_first_refresh()
|
||||
await coordinator_daily_forecast.async_config_entry_first_refresh()
|
||||
await asyncio.gather(
|
||||
coordinator_observation.async_config_entry_first_refresh(),
|
||||
coordinator_daily_forecast.async_config_entry_first_refresh(),
|
||||
coordinator_hourly_forecast.async_config_entry_first_refresh(),
|
||||
)
|
||||
|
||||
entry.runtime_data = AccuWeatherData(
|
||||
coordinator_observation=coordinator_observation,
|
||||
coordinator_daily_forecast=coordinator_daily_forecast,
|
||||
coordinator_hourly_forecast=coordinator_hourly_forecast,
|
||||
)
|
||||
|
||||
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
|
||||
|
@@ -71,3 +71,4 @@ POLLEN_CATEGORY_MAP = {
|
||||
}
|
||||
UPDATE_INTERVAL_OBSERVATION = timedelta(minutes=10)
|
||||
UPDATE_INTERVAL_DAILY_FORECAST = timedelta(hours=6)
|
||||
UPDATE_INTERVAL_HOURLY_FORECAST = timedelta(hours=30)
|
||||
|
@@ -3,6 +3,7 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from asyncio import timeout
|
||||
from collections.abc import Awaitable, Callable
|
||||
from dataclasses import dataclass
|
||||
from datetime import timedelta
|
||||
import logging
|
||||
@@ -12,6 +13,7 @@ from accuweather import AccuWeather, ApiError, InvalidApiKeyError, RequestsExcee
|
||||
from aiohttp.client_exceptions import ClientConnectorError
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONF_NAME
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo
|
||||
from homeassistant.helpers.update_coordinator import (
|
||||
@@ -20,7 +22,13 @@ from homeassistant.helpers.update_coordinator import (
|
||||
UpdateFailed,
|
||||
)
|
||||
|
||||
from .const import DOMAIN, MANUFACTURER
|
||||
from .const import (
|
||||
DOMAIN,
|
||||
MANUFACTURER,
|
||||
UPDATE_INTERVAL_DAILY_FORECAST,
|
||||
UPDATE_INTERVAL_HOURLY_FORECAST,
|
||||
UPDATE_INTERVAL_OBSERVATION,
|
||||
)
|
||||
|
||||
EXCEPTIONS = (ApiError, ClientConnectorError, InvalidApiKeyError, RequestsExceededError)
|
||||
|
||||
@@ -33,6 +41,7 @@ class AccuWeatherData:
|
||||
|
||||
coordinator_observation: AccuWeatherObservationDataUpdateCoordinator
|
||||
coordinator_daily_forecast: AccuWeatherDailyForecastDataUpdateCoordinator
|
||||
coordinator_hourly_forecast: AccuWeatherHourlyForecastDataUpdateCoordinator
|
||||
|
||||
|
||||
type AccuWeatherConfigEntry = ConfigEntry[AccuWeatherData]
|
||||
@@ -48,13 +57,11 @@ class AccuWeatherObservationDataUpdateCoordinator(
|
||||
hass: HomeAssistant,
|
||||
config_entry: AccuWeatherConfigEntry,
|
||||
accuweather: AccuWeather,
|
||||
name: str,
|
||||
coordinator_type: str,
|
||||
update_interval: timedelta,
|
||||
) -> None:
|
||||
"""Initialize."""
|
||||
self.accuweather = accuweather
|
||||
self.location_key = accuweather.location_key
|
||||
name = config_entry.data[CONF_NAME]
|
||||
|
||||
if TYPE_CHECKING:
|
||||
assert self.location_key is not None
|
||||
@@ -65,8 +72,8 @@ class AccuWeatherObservationDataUpdateCoordinator(
|
||||
hass,
|
||||
_LOGGER,
|
||||
config_entry=config_entry,
|
||||
name=f"{name} ({coordinator_type})",
|
||||
update_interval=update_interval,
|
||||
name=f"{name} (observation)",
|
||||
update_interval=UPDATE_INTERVAL_OBSERVATION,
|
||||
)
|
||||
|
||||
async def _async_update_data(self) -> dict[str, Any]:
|
||||
@@ -86,23 +93,25 @@ class AccuWeatherObservationDataUpdateCoordinator(
|
||||
return result
|
||||
|
||||
|
||||
class AccuWeatherDailyForecastDataUpdateCoordinator(
|
||||
class AccuWeatherForecastDataUpdateCoordinator(
|
||||
TimestampDataUpdateCoordinator[list[dict[str, Any]]]
|
||||
):
|
||||
"""Class to manage fetching AccuWeather data API."""
|
||||
"""Base class for AccuWeather forecast."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
hass: HomeAssistant,
|
||||
config_entry: AccuWeatherConfigEntry,
|
||||
accuweather: AccuWeather,
|
||||
name: str,
|
||||
coordinator_type: str,
|
||||
update_interval: timedelta,
|
||||
fetch_method: Callable[..., Awaitable[list[dict[str, Any]]]],
|
||||
) -> None:
|
||||
"""Initialize."""
|
||||
self.accuweather = accuweather
|
||||
self.location_key = accuweather.location_key
|
||||
self._fetch_method = fetch_method
|
||||
name = config_entry.data[CONF_NAME]
|
||||
|
||||
if TYPE_CHECKING:
|
||||
assert self.location_key is not None
|
||||
@@ -118,12 +127,10 @@ class AccuWeatherDailyForecastDataUpdateCoordinator(
|
||||
)
|
||||
|
||||
async def _async_update_data(self) -> list[dict[str, Any]]:
|
||||
"""Update data via library."""
|
||||
"""Update forecast data via library."""
|
||||
try:
|
||||
async with timeout(10):
|
||||
result = await self.accuweather.async_get_daily_forecast(
|
||||
language=self.hass.config.language
|
||||
)
|
||||
result = await self._fetch_method(language=self.hass.config.language)
|
||||
except EXCEPTIONS as error:
|
||||
raise UpdateFailed(
|
||||
translation_domain=DOMAIN,
|
||||
@@ -132,10 +139,53 @@ class AccuWeatherDailyForecastDataUpdateCoordinator(
|
||||
) from error
|
||||
|
||||
_LOGGER.debug("Requests remaining: %d", self.accuweather.requests_remaining)
|
||||
|
||||
return result
|
||||
|
||||
|
||||
class AccuWeatherDailyForecastDataUpdateCoordinator(
|
||||
AccuWeatherForecastDataUpdateCoordinator
|
||||
):
|
||||
"""Coordinator for daily forecast."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
hass: HomeAssistant,
|
||||
config_entry: AccuWeatherConfigEntry,
|
||||
accuweather: AccuWeather,
|
||||
) -> None:
|
||||
"""Initialize."""
|
||||
super().__init__(
|
||||
hass,
|
||||
config_entry,
|
||||
accuweather,
|
||||
"daily forecast",
|
||||
UPDATE_INTERVAL_DAILY_FORECAST,
|
||||
fetch_method=accuweather.async_get_daily_forecast,
|
||||
)
|
||||
|
||||
|
||||
class AccuWeatherHourlyForecastDataUpdateCoordinator(
|
||||
AccuWeatherForecastDataUpdateCoordinator
|
||||
):
|
||||
"""Coordinator for hourly forecast."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
hass: HomeAssistant,
|
||||
config_entry: AccuWeatherConfigEntry,
|
||||
accuweather: AccuWeather,
|
||||
) -> None:
|
||||
"""Initialize."""
|
||||
super().__init__(
|
||||
hass,
|
||||
config_entry,
|
||||
accuweather,
|
||||
"hourly forecast",
|
||||
UPDATE_INTERVAL_HOURLY_FORECAST,
|
||||
fetch_method=accuweather.async_get_hourly_forecast,
|
||||
)
|
||||
|
||||
|
||||
def _get_device_info(location_key: str, name: str) -> DeviceInfo:
|
||||
"""Get device info."""
|
||||
return DeviceInfo(
|
||||
|
@@ -45,6 +45,7 @@ from .coordinator import (
|
||||
AccuWeatherConfigEntry,
|
||||
AccuWeatherDailyForecastDataUpdateCoordinator,
|
||||
AccuWeatherData,
|
||||
AccuWeatherHourlyForecastDataUpdateCoordinator,
|
||||
AccuWeatherObservationDataUpdateCoordinator,
|
||||
)
|
||||
|
||||
@@ -64,6 +65,7 @@ class AccuWeatherEntity(
|
||||
CoordinatorWeatherEntity[
|
||||
AccuWeatherObservationDataUpdateCoordinator,
|
||||
AccuWeatherDailyForecastDataUpdateCoordinator,
|
||||
AccuWeatherHourlyForecastDataUpdateCoordinator,
|
||||
]
|
||||
):
|
||||
"""Define an AccuWeather entity."""
|
||||
@@ -76,6 +78,7 @@ class AccuWeatherEntity(
|
||||
super().__init__(
|
||||
observation_coordinator=accuweather_data.coordinator_observation,
|
||||
daily_coordinator=accuweather_data.coordinator_daily_forecast,
|
||||
hourly_coordinator=accuweather_data.coordinator_hourly_forecast,
|
||||
)
|
||||
|
||||
self._attr_native_precipitation_unit = UnitOfPrecipitationDepth.MILLIMETERS
|
||||
@@ -86,10 +89,13 @@ class AccuWeatherEntity(
|
||||
self._attr_unique_id = accuweather_data.coordinator_observation.location_key
|
||||
self._attr_attribution = ATTRIBUTION
|
||||
self._attr_device_info = accuweather_data.coordinator_observation.device_info
|
||||
self._attr_supported_features = WeatherEntityFeature.FORECAST_DAILY
|
||||
self._attr_supported_features = (
|
||||
WeatherEntityFeature.FORECAST_DAILY | WeatherEntityFeature.FORECAST_HOURLY
|
||||
)
|
||||
|
||||
self.observation_coordinator = accuweather_data.coordinator_observation
|
||||
self.daily_coordinator = accuweather_data.coordinator_daily_forecast
|
||||
self.hourly_coordinator = accuweather_data.coordinator_hourly_forecast
|
||||
|
||||
@property
|
||||
def condition(self) -> str | None:
|
||||
@@ -207,3 +213,32 @@ class AccuWeatherEntity(
|
||||
}
|
||||
for item in self.daily_coordinator.data
|
||||
]
|
||||
|
||||
@callback
|
||||
def _async_forecast_hourly(self) -> list[Forecast] | None:
|
||||
"""Return the hourly forecast in native units."""
|
||||
return [
|
||||
{
|
||||
ATTR_FORECAST_TIME: utc_from_timestamp(
|
||||
item["EpochDateTime"]
|
||||
).isoformat(),
|
||||
ATTR_FORECAST_CLOUD_COVERAGE: item["CloudCover"],
|
||||
ATTR_FORECAST_HUMIDITY: item["RelativeHumidity"],
|
||||
ATTR_FORECAST_NATIVE_TEMP: item["Temperature"][ATTR_VALUE],
|
||||
ATTR_FORECAST_NATIVE_APPARENT_TEMP: item["RealFeelTemperature"][
|
||||
ATTR_VALUE
|
||||
],
|
||||
ATTR_FORECAST_NATIVE_PRECIPITATION: item["TotalLiquid"][ATTR_VALUE],
|
||||
ATTR_FORECAST_PRECIPITATION_PROBABILITY: item[
|
||||
"PrecipitationProbability"
|
||||
],
|
||||
ATTR_FORECAST_NATIVE_WIND_SPEED: item["Wind"][ATTR_SPEED][ATTR_VALUE],
|
||||
ATTR_FORECAST_NATIVE_WIND_GUST_SPEED: item["WindGust"][ATTR_SPEED][
|
||||
ATTR_VALUE
|
||||
],
|
||||
ATTR_FORECAST_UV_INDEX: item["UVIndex"],
|
||||
ATTR_FORECAST_WIND_BEARING: item["Wind"][ATTR_DIRECTION]["Degrees"],
|
||||
ATTR_FORECAST_CONDITION: CONDITION_MAP.get(item["WeatherIcon"]),
|
||||
}
|
||||
for item in self.hourly_coordinator.data
|
||||
]
|
||||
|
@@ -3,10 +3,8 @@
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from aiohttp import web
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components.http import KEY_HASS, HomeAssistantView
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import ATTR_ENTITY_ID, CONF_DESCRIPTION, CONF_SELECTOR
|
||||
from homeassistant.core import (
|
||||
@@ -28,7 +26,6 @@ from .const import (
|
||||
ATTR_STRUCTURE,
|
||||
ATTR_TASK_NAME,
|
||||
DATA_COMPONENT,
|
||||
DATA_IMAGES,
|
||||
DATA_PREFERENCES,
|
||||
DOMAIN,
|
||||
SERVICE_GENERATE_DATA,
|
||||
@@ -42,7 +39,6 @@ from .task import (
|
||||
GenDataTaskResult,
|
||||
GenImageTask,
|
||||
GenImageTaskResult,
|
||||
ImageData,
|
||||
async_generate_data,
|
||||
async_generate_image,
|
||||
)
|
||||
@@ -55,7 +51,6 @@ __all__ = [
|
||||
"GenDataTaskResult",
|
||||
"GenImageTask",
|
||||
"GenImageTaskResult",
|
||||
"ImageData",
|
||||
"async_generate_data",
|
||||
"async_generate_image",
|
||||
"async_setup",
|
||||
@@ -94,10 +89,8 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
entity_component = EntityComponent[AITaskEntity](_LOGGER, DOMAIN, hass)
|
||||
hass.data[DATA_COMPONENT] = entity_component
|
||||
hass.data[DATA_PREFERENCES] = AITaskPreferences(hass)
|
||||
hass.data[DATA_IMAGES] = {}
|
||||
await hass.data[DATA_PREFERENCES].async_load()
|
||||
async_setup_http(hass)
|
||||
hass.http.register_view(ImageView)
|
||||
hass.services.async_register(
|
||||
DOMAIN,
|
||||
SERVICE_GENERATE_DATA,
|
||||
@@ -209,28 +202,3 @@ class AITaskPreferences:
|
||||
def as_dict(self) -> dict[str, str | None]:
|
||||
"""Get the current preferences."""
|
||||
return {key: getattr(self, key) for key in self.KEYS}
|
||||
|
||||
|
||||
class ImageView(HomeAssistantView):
|
||||
"""View to generated images."""
|
||||
|
||||
url = f"/api/{DOMAIN}/images/{{filename}}"
|
||||
name = f"api:{DOMAIN}/images"
|
||||
|
||||
async def get(
|
||||
self,
|
||||
request: web.Request,
|
||||
filename: str,
|
||||
) -> web.Response:
|
||||
"""Serve image."""
|
||||
hass = request.app[KEY_HASS]
|
||||
image_storage = hass.data[DATA_IMAGES]
|
||||
image_data = image_storage.get(filename)
|
||||
|
||||
if image_data is None:
|
||||
raise web.HTTPNotFound
|
||||
|
||||
return web.Response(
|
||||
body=image_data.data,
|
||||
content_type=image_data.mime_type,
|
||||
)
|
||||
|
@@ -8,19 +8,19 @@ from typing import TYPE_CHECKING, Final
|
||||
from homeassistant.util.hass_dict import HassKey
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from homeassistant.components.media_source import local_source
|
||||
from homeassistant.helpers.entity_component import EntityComponent
|
||||
|
||||
from . import AITaskPreferences
|
||||
from .entity import AITaskEntity
|
||||
from .task import ImageData
|
||||
|
||||
DOMAIN = "ai_task"
|
||||
DATA_COMPONENT: HassKey[EntityComponent[AITaskEntity]] = HassKey(DOMAIN)
|
||||
DATA_PREFERENCES: HassKey[AITaskPreferences] = HassKey(f"{DOMAIN}_preferences")
|
||||
DATA_IMAGES: HassKey[dict[str, ImageData]] = HassKey(f"{DOMAIN}_images")
|
||||
DATA_MEDIA_SOURCE: HassKey[local_source.LocalSource] = HassKey(f"{DOMAIN}_media_source")
|
||||
|
||||
IMAGE_DIR: Final = "image"
|
||||
IMAGE_EXPIRY_TIME = 60 * 60 # 1 hour
|
||||
MAX_IMAGES = 20
|
||||
|
||||
SERVICE_GENERATE_DATA = "generate_data"
|
||||
SERVICE_GENERATE_IMAGE = "generate_image"
|
||||
|
@@ -1,7 +1,7 @@
|
||||
{
|
||||
"domain": "ai_task",
|
||||
"name": "AI Task",
|
||||
"after_dependencies": ["camera", "http"],
|
||||
"after_dependencies": ["camera"],
|
||||
"codeowners": ["@home-assistant/core"],
|
||||
"dependencies": ["conversation", "media_source"],
|
||||
"documentation": "https://www.home-assistant.io/integrations/ai_task",
|
||||
|
@@ -2,89 +2,22 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import timedelta
|
||||
import logging
|
||||
|
||||
from homeassistant.components.http.auth import async_sign_path
|
||||
from homeassistant.components.media_player import BrowseError, MediaClass
|
||||
from homeassistant.components.media_source import (
|
||||
BrowseMediaSource,
|
||||
MediaSource,
|
||||
MediaSourceItem,
|
||||
PlayMedia,
|
||||
Unresolvable,
|
||||
)
|
||||
from homeassistant.components.media_source import MediaSource, local_source
|
||||
from homeassistant.core import HomeAssistant
|
||||
|
||||
from .const import DATA_IMAGES, DOMAIN, IMAGE_EXPIRY_TIME
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
from .const import DATA_MEDIA_SOURCE, DOMAIN, IMAGE_DIR
|
||||
|
||||
|
||||
async def async_get_media_source(hass: HomeAssistant) -> ImageMediaSource:
|
||||
"""Set up image media source."""
|
||||
_LOGGER.debug("Setting up image media source")
|
||||
return ImageMediaSource(hass)
|
||||
async def async_get_media_source(hass: HomeAssistant) -> MediaSource:
|
||||
"""Set up local media source."""
|
||||
media_dir = hass.config.path(f"{DOMAIN}/{IMAGE_DIR}")
|
||||
|
||||
|
||||
class ImageMediaSource(MediaSource):
|
||||
"""Provide images as media sources."""
|
||||
|
||||
name: str = "AI Generated Images"
|
||||
|
||||
def __init__(self, hass: HomeAssistant) -> None:
|
||||
"""Initialize ImageMediaSource."""
|
||||
super().__init__(DOMAIN)
|
||||
self.hass = hass
|
||||
|
||||
async def async_resolve_media(self, item: MediaSourceItem) -> PlayMedia:
|
||||
"""Resolve media to a url."""
|
||||
image_storage = self.hass.data[DATA_IMAGES]
|
||||
image = image_storage.get(item.identifier)
|
||||
|
||||
if image is None:
|
||||
raise Unresolvable(f"Could not resolve media item: {item.identifier}")
|
||||
|
||||
return PlayMedia(
|
||||
async_sign_path(
|
||||
self.hass,
|
||||
f"/api/{DOMAIN}/images/{item.identifier}",
|
||||
timedelta(seconds=IMAGE_EXPIRY_TIME or 1800),
|
||||
),
|
||||
image.mime_type,
|
||||
)
|
||||
|
||||
async def async_browse_media(
|
||||
self,
|
||||
item: MediaSourceItem,
|
||||
) -> BrowseMediaSource:
|
||||
"""Return media."""
|
||||
if item.identifier:
|
||||
raise BrowseError("Unknown item")
|
||||
|
||||
image_storage = self.hass.data[DATA_IMAGES]
|
||||
|
||||
children = [
|
||||
BrowseMediaSource(
|
||||
domain=DOMAIN,
|
||||
identifier=filename,
|
||||
media_class=MediaClass.IMAGE,
|
||||
media_content_type=image.mime_type,
|
||||
title=image.title or filename,
|
||||
can_play=True,
|
||||
can_expand=False,
|
||||
)
|
||||
for filename, image in image_storage.items()
|
||||
]
|
||||
|
||||
return BrowseMediaSource(
|
||||
domain=DOMAIN,
|
||||
identifier=None,
|
||||
media_class=MediaClass.APP,
|
||||
media_content_type="",
|
||||
title="AI Generated Images",
|
||||
can_play=False,
|
||||
can_expand=True,
|
||||
children_media_class=MediaClass.IMAGE,
|
||||
children=children,
|
||||
)
|
||||
hass.data[DATA_MEDIA_SOURCE] = source = local_source.LocalSource(
|
||||
hass,
|
||||
DOMAIN,
|
||||
"AI Generated Images",
|
||||
{IMAGE_DIR: media_dir},
|
||||
False,
|
||||
f"/{DOMAIN}",
|
||||
)
|
||||
return source
|
||||
|
@@ -4,7 +4,7 @@ from __future__ import annotations
|
||||
|
||||
from dataclasses import dataclass
|
||||
from datetime import datetime, timedelta
|
||||
from functools import partial
|
||||
import io
|
||||
import mimetypes
|
||||
from pathlib import Path
|
||||
import tempfile
|
||||
@@ -18,16 +18,15 @@ from homeassistant.core import HomeAssistant, ServiceResponse, callback
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers import llm
|
||||
from homeassistant.helpers.chat_session import ChatSession, async_get_chat_session
|
||||
from homeassistant.helpers.event import async_call_later
|
||||
from homeassistant.util import RE_SANITIZE_FILENAME, slugify
|
||||
|
||||
from .const import (
|
||||
DATA_COMPONENT,
|
||||
DATA_IMAGES,
|
||||
DATA_MEDIA_SOURCE,
|
||||
DATA_PREFERENCES,
|
||||
DOMAIN,
|
||||
IMAGE_DIR,
|
||||
IMAGE_EXPIRY_TIME,
|
||||
MAX_IMAGES,
|
||||
AITaskEntityFeature,
|
||||
)
|
||||
|
||||
@@ -157,24 +156,6 @@ async def async_generate_data(
|
||||
)
|
||||
|
||||
|
||||
def _cleanup_images(image_storage: dict[str, ImageData], num_to_remove: int) -> None:
|
||||
"""Remove old images to keep the storage size under the limit."""
|
||||
if num_to_remove <= 0:
|
||||
return
|
||||
|
||||
if num_to_remove >= len(image_storage):
|
||||
image_storage.clear()
|
||||
return
|
||||
|
||||
sorted_images = sorted(
|
||||
image_storage.items(),
|
||||
key=lambda item: item[1].timestamp,
|
||||
)
|
||||
|
||||
for filename, _ in sorted_images[:num_to_remove]:
|
||||
image_storage.pop(filename, None)
|
||||
|
||||
|
||||
async def async_generate_image(
|
||||
hass: HomeAssistant,
|
||||
*,
|
||||
@@ -224,36 +205,34 @@ async def async_generate_image(
|
||||
if service_result.get("revised_prompt") is None:
|
||||
service_result["revised_prompt"] = instructions
|
||||
|
||||
image_storage = hass.data[DATA_IMAGES]
|
||||
|
||||
if len(image_storage) + 1 > MAX_IMAGES:
|
||||
_cleanup_images(image_storage, len(image_storage) + 1 - MAX_IMAGES)
|
||||
source = hass.data[DATA_MEDIA_SOURCE]
|
||||
|
||||
current_time = datetime.now()
|
||||
ext = mimetypes.guess_extension(task_result.mime_type, False) or ".png"
|
||||
sanitized_task_name = RE_SANITIZE_FILENAME.sub("", slugify(task_name))
|
||||
filename = f"{current_time.strftime('%Y-%m-%d_%H%M%S')}_{sanitized_task_name}{ext}"
|
||||
|
||||
image_storage[filename] = ImageData(
|
||||
data=image_data,
|
||||
timestamp=int(current_time.timestamp()),
|
||||
mime_type=task_result.mime_type,
|
||||
title=service_result["revised_prompt"],
|
||||
image_file = ImageData(
|
||||
filename=f"{current_time.strftime('%Y-%m-%d_%H%M%S')}_{sanitized_task_name}{ext}",
|
||||
file=io.BytesIO(image_data),
|
||||
content_type=task_result.mime_type,
|
||||
)
|
||||
|
||||
def _purge_image(filename: str, now: datetime) -> None:
|
||||
"""Remove image from storage."""
|
||||
image_storage.pop(filename, None)
|
||||
target_folder = media_source.MediaSourceItem.from_uri(
|
||||
hass, f"media-source://{DOMAIN}/{IMAGE_DIR}", None
|
||||
)
|
||||
|
||||
if IMAGE_EXPIRY_TIME > 0:
|
||||
async_call_later(hass, IMAGE_EXPIRY_TIME, partial(_purge_image, filename))
|
||||
service_result["media_source_id"] = await source.async_upload_media(
|
||||
target_folder, image_file
|
||||
)
|
||||
|
||||
item = media_source.MediaSourceItem.from_uri(
|
||||
hass, service_result["media_source_id"], None
|
||||
)
|
||||
service_result["url"] = async_sign_path(
|
||||
hass,
|
||||
f"/api/{DOMAIN}/images/{filename}",
|
||||
timedelta(seconds=IMAGE_EXPIRY_TIME or 1800),
|
||||
(await source.async_resolve_media(item)).url,
|
||||
timedelta(seconds=IMAGE_EXPIRY_TIME),
|
||||
)
|
||||
service_result["media_source_id"] = f"media-source://{DOMAIN}/images/{filename}"
|
||||
|
||||
return service_result
|
||||
|
||||
@@ -358,20 +337,8 @@ class GenImageTaskResult:
|
||||
|
||||
@dataclass(slots=True)
|
||||
class ImageData:
|
||||
"""Image data for stored generated images."""
|
||||
"""Implementation of media_source.local_source.UploadedFile protocol."""
|
||||
|
||||
data: bytes
|
||||
"""Raw image data."""
|
||||
|
||||
timestamp: int
|
||||
"""Timestamp when the image was generated, as a Unix timestamp."""
|
||||
|
||||
mime_type: str
|
||||
"""MIME type of the image."""
|
||||
|
||||
title: str
|
||||
"""Title of the image, usually the prompt used to generate it."""
|
||||
|
||||
def __str__(self) -> str:
|
||||
"""Return image data as a string."""
|
||||
return f"<ImageData {self.title}: {id(self)}>"
|
||||
filename: str
|
||||
file: io.IOBase
|
||||
content_type: str
|
||||
|
@@ -3,7 +3,6 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from genie_partner_sdk.client import AladdinConnectClient
|
||||
from genie_partner_sdk.model import GarageDoor
|
||||
|
||||
from homeassistant.const import Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
@@ -36,22 +35,7 @@ async def async_setup_entry(
|
||||
api.AsyncConfigEntryAuth(aiohttp_client.async_get_clientsession(hass), session)
|
||||
)
|
||||
|
||||
sdk_doors = await client.get_doors()
|
||||
|
||||
# Convert SDK GarageDoor objects to integration GarageDoor objects
|
||||
doors = [
|
||||
GarageDoor(
|
||||
{
|
||||
"device_id": door.device_id,
|
||||
"door_number": door.door_number,
|
||||
"name": door.name,
|
||||
"status": door.status,
|
||||
"link_status": door.link_status,
|
||||
"battery_level": door.battery_level,
|
||||
}
|
||||
)
|
||||
for door in sdk_doors
|
||||
]
|
||||
doors = await client.get_doors()
|
||||
|
||||
entry.runtime_data = {
|
||||
door.unique_id: AladdinConnectCoordinator(hass, entry, client, door)
|
||||
|
@@ -41,4 +41,10 @@ class AladdinConnectCoordinator(DataUpdateCoordinator[GarageDoor]):
|
||||
async def _async_update_data(self) -> GarageDoor:
|
||||
"""Fetch data from the Aladdin Connect API."""
|
||||
await self.client.update_door(self.data.device_id, self.data.door_number)
|
||||
self.data.status = self.client.get_door_status(
|
||||
self.data.device_id, self.data.door_number
|
||||
)
|
||||
self.data.battery_level = self.client.get_battery_status(
|
||||
self.data.device_id, self.data.door_number
|
||||
)
|
||||
return self.data
|
||||
|
@@ -49,7 +49,9 @@ class AladdinCoverEntity(AladdinConnectEntity, CoverEntity):
|
||||
@property
|
||||
def is_closed(self) -> bool | None:
|
||||
"""Update is closed attribute."""
|
||||
return self.coordinator.data.status == "closed"
|
||||
if (status := self.coordinator.data.status) is None:
|
||||
return None
|
||||
return status == "closed"
|
||||
|
||||
@property
|
||||
def is_closing(self) -> bool | None:
|
||||
|
@@ -12,5 +12,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/aladdin_connect",
|
||||
"integration_type": "hub",
|
||||
"iot_class": "cloud_polling",
|
||||
"requirements": ["genie-partner-sdk==1.0.10"]
|
||||
"requirements": ["genie-partner-sdk==1.0.11"]
|
||||
}
|
||||
|
@@ -33,9 +33,11 @@ from homeassistant.const import (
|
||||
)
|
||||
from homeassistant.core import Event, HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryNotReady
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.device_registry import format_mac
|
||||
from homeassistant.helpers.dispatcher import async_dispatcher_send
|
||||
from homeassistant.helpers.storage import STORAGE_DIR
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
|
||||
from .const import (
|
||||
CONF_ADB_SERVER_IP,
|
||||
@@ -46,10 +48,12 @@ from .const import (
|
||||
DEFAULT_ADB_SERVER_PORT,
|
||||
DEVICE_ANDROIDTV,
|
||||
DEVICE_FIRETV,
|
||||
DOMAIN,
|
||||
PROP_ETHMAC,
|
||||
PROP_WIFIMAC,
|
||||
SIGNAL_CONFIG_ENTITY,
|
||||
)
|
||||
from .services import async_setup_services
|
||||
|
||||
ADB_PYTHON_EXCEPTIONS: tuple = (
|
||||
AdbTimeoutError,
|
||||
@@ -63,6 +67,8 @@ ADB_PYTHON_EXCEPTIONS: tuple = (
|
||||
)
|
||||
ADB_TCP_EXCEPTIONS: tuple = (ConnectionResetError, RuntimeError)
|
||||
|
||||
CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN)
|
||||
|
||||
PLATFORMS = [Platform.MEDIA_PLAYER, Platform.REMOTE]
|
||||
RELOAD_OPTIONS = [CONF_STATE_DETECTION_RULES]
|
||||
|
||||
@@ -188,6 +194,12 @@ async def async_migrate_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
return True
|
||||
|
||||
|
||||
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
"""Set up the Android TV / Fire TV integration."""
|
||||
async_setup_services(hass)
|
||||
return True
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: AndroidTVConfigEntry) -> bool:
|
||||
"""Set up Android Debug Bridge platform."""
|
||||
|
||||
|
@@ -8,7 +8,6 @@ import logging
|
||||
|
||||
from androidtv.constants import APPS, KEYS
|
||||
from androidtv.setup_async import AndroidTVAsync, FireTVAsync
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components import persistent_notification
|
||||
from homeassistant.components.media_player import (
|
||||
@@ -17,9 +16,7 @@ from homeassistant.components.media_player import (
|
||||
MediaPlayerEntityFeature,
|
||||
MediaPlayerState,
|
||||
)
|
||||
from homeassistant.const import ATTR_COMMAND
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import config_validation as cv, entity_platform
|
||||
from homeassistant.helpers.dispatcher import async_dispatcher_connect
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from homeassistant.util.dt import utcnow
|
||||
@@ -39,19 +36,10 @@ from .const import (
|
||||
SIGNAL_CONFIG_ENTITY,
|
||||
)
|
||||
from .entity import AndroidTVEntity, adb_decorator
|
||||
from .services import ATTR_ADB_RESPONSE, ATTR_HDMI_INPUT, SERVICE_LEARN_SENDEVENT
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
ATTR_ADB_RESPONSE = "adb_response"
|
||||
ATTR_DEVICE_PATH = "device_path"
|
||||
ATTR_HDMI_INPUT = "hdmi_input"
|
||||
ATTR_LOCAL_PATH = "local_path"
|
||||
|
||||
SERVICE_ADB_COMMAND = "adb_command"
|
||||
SERVICE_DOWNLOAD = "download"
|
||||
SERVICE_LEARN_SENDEVENT = "learn_sendevent"
|
||||
SERVICE_UPLOAD = "upload"
|
||||
|
||||
# Translate from `AndroidTV` / `FireTV` reported state to HA state.
|
||||
ANDROIDTV_STATES = {
|
||||
"off": MediaPlayerState.OFF,
|
||||
@@ -77,32 +65,6 @@ async def async_setup_entry(
|
||||
]
|
||||
)
|
||||
|
||||
platform = entity_platform.async_get_current_platform()
|
||||
platform.async_register_entity_service(
|
||||
SERVICE_ADB_COMMAND,
|
||||
{vol.Required(ATTR_COMMAND): cv.string},
|
||||
"adb_command",
|
||||
)
|
||||
platform.async_register_entity_service(
|
||||
SERVICE_LEARN_SENDEVENT, None, "learn_sendevent"
|
||||
)
|
||||
platform.async_register_entity_service(
|
||||
SERVICE_DOWNLOAD,
|
||||
{
|
||||
vol.Required(ATTR_DEVICE_PATH): cv.string,
|
||||
vol.Required(ATTR_LOCAL_PATH): cv.string,
|
||||
},
|
||||
"service_download",
|
||||
)
|
||||
platform.async_register_entity_service(
|
||||
SERVICE_UPLOAD,
|
||||
{
|
||||
vol.Required(ATTR_DEVICE_PATH): cv.string,
|
||||
vol.Required(ATTR_LOCAL_PATH): cv.string,
|
||||
},
|
||||
"service_upload",
|
||||
)
|
||||
|
||||
|
||||
class ADBDevice(AndroidTVEntity, MediaPlayerEntity):
|
||||
"""Representation of an Android or Fire TV device."""
|
||||
|
66
homeassistant/components/androidtv/services.py
Normal file
66
homeassistant/components/androidtv/services.py
Normal file
@@ -0,0 +1,66 @@
|
||||
"""Services for Android/Fire TV devices."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components.media_player import DOMAIN as MEDIA_PLAYER_DOMAIN
|
||||
from homeassistant.const import ATTR_COMMAND
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers import config_validation as cv, service
|
||||
|
||||
from .const import DOMAIN
|
||||
|
||||
ATTR_ADB_RESPONSE = "adb_response"
|
||||
ATTR_DEVICE_PATH = "device_path"
|
||||
ATTR_HDMI_INPUT = "hdmi_input"
|
||||
ATTR_LOCAL_PATH = "local_path"
|
||||
|
||||
SERVICE_ADB_COMMAND = "adb_command"
|
||||
SERVICE_DOWNLOAD = "download"
|
||||
SERVICE_LEARN_SENDEVENT = "learn_sendevent"
|
||||
SERVICE_UPLOAD = "upload"
|
||||
|
||||
|
||||
@callback
|
||||
def async_setup_services(hass: HomeAssistant) -> None:
|
||||
"""Register the Android TV / Fire TV services."""
|
||||
|
||||
service.async_register_platform_entity_service(
|
||||
hass,
|
||||
DOMAIN,
|
||||
SERVICE_ADB_COMMAND,
|
||||
entity_domain=MEDIA_PLAYER_DOMAIN,
|
||||
schema={vol.Required(ATTR_COMMAND): cv.string},
|
||||
func="adb_command",
|
||||
)
|
||||
service.async_register_platform_entity_service(
|
||||
hass,
|
||||
DOMAIN,
|
||||
SERVICE_LEARN_SENDEVENT,
|
||||
entity_domain=MEDIA_PLAYER_DOMAIN,
|
||||
schema=None,
|
||||
func="learn_sendevent",
|
||||
)
|
||||
service.async_register_platform_entity_service(
|
||||
hass,
|
||||
DOMAIN,
|
||||
SERVICE_DOWNLOAD,
|
||||
entity_domain=MEDIA_PLAYER_DOMAIN,
|
||||
schema={
|
||||
vol.Required(ATTR_DEVICE_PATH): cv.string,
|
||||
vol.Required(ATTR_LOCAL_PATH): cv.string,
|
||||
},
|
||||
func="service_download",
|
||||
)
|
||||
service.async_register_platform_entity_service(
|
||||
hass,
|
||||
DOMAIN,
|
||||
SERVICE_UPLOAD,
|
||||
entity_domain=MEDIA_PLAYER_DOMAIN,
|
||||
schema={
|
||||
vol.Required(ATTR_DEVICE_PATH): cv.string,
|
||||
vol.Required(ATTR_LOCAL_PATH): cv.string,
|
||||
},
|
||||
func="service_upload",
|
||||
)
|
@@ -16,7 +16,7 @@ from .coordinator import (
|
||||
AOSmithStatusCoordinator,
|
||||
)
|
||||
|
||||
PLATFORMS: list[Platform] = [Platform.SENSOR, Platform.WATER_HEATER]
|
||||
PLATFORMS: list[Platform] = [Platform.SELECT, Platform.SENSOR, Platform.WATER_HEATER]
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: AOSmithConfigEntry) -> bool:
|
||||
|
@@ -1,5 +1,10 @@
|
||||
{
|
||||
"entity": {
|
||||
"select": {
|
||||
"hot_water_plus_level": {
|
||||
"default": "mdi:water-plus"
|
||||
}
|
||||
},
|
||||
"sensor": {
|
||||
"hot_water_availability": {
|
||||
"default": "mdi:water-thermometer"
|
||||
|
70
homeassistant/components/aosmith/select.py
Normal file
70
homeassistant/components/aosmith/select.py
Normal file
@@ -0,0 +1,70 @@
|
||||
"""The select platform for the A. O. Smith integration."""
|
||||
|
||||
from homeassistant.components.select import SelectEntity
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from . import AOSmithConfigEntry
|
||||
from .coordinator import AOSmithStatusCoordinator
|
||||
from .entity import AOSmithStatusEntity
|
||||
|
||||
HWP_LEVEL_HA_TO_AOSMITH = {
|
||||
"off": 0,
|
||||
"level1": 1,
|
||||
"level2": 2,
|
||||
"level3": 3,
|
||||
}
|
||||
HWP_LEVEL_AOSMITH_TO_HA = {value: key for key, value in HWP_LEVEL_HA_TO_AOSMITH.items()}
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
entry: AOSmithConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up A. O. Smith select platform."""
|
||||
data = entry.runtime_data
|
||||
|
||||
async_add_entities(
|
||||
AOSmithHotWaterPlusSelectEntity(data.status_coordinator, device.junction_id)
|
||||
for device in data.status_coordinator.data.values()
|
||||
if device.supports_hot_water_plus
|
||||
)
|
||||
|
||||
|
||||
class AOSmithHotWaterPlusSelectEntity(AOSmithStatusEntity, SelectEntity):
|
||||
"""Class for the Hot Water+ select entity."""
|
||||
|
||||
_attr_translation_key = "hot_water_plus_level"
|
||||
_attr_options = list(HWP_LEVEL_HA_TO_AOSMITH)
|
||||
|
||||
def __init__(self, coordinator: AOSmithStatusCoordinator, junction_id: str) -> None:
|
||||
"""Initialize the entity."""
|
||||
super().__init__(coordinator, junction_id)
|
||||
self._attr_unique_id = f"hot_water_plus_level_{junction_id}"
|
||||
|
||||
@property
|
||||
def suggested_object_id(self) -> str | None:
|
||||
"""Override the suggested object id to make '+' get converted to 'plus' in the entity id."""
|
||||
return "hot_water_plus_level"
|
||||
|
||||
@property
|
||||
def current_option(self) -> str | None:
|
||||
"""Return the current Hot Water+ mode."""
|
||||
hot_water_plus_level = self.device.status.hot_water_plus_level
|
||||
return (
|
||||
None
|
||||
if hot_water_plus_level is None
|
||||
else HWP_LEVEL_AOSMITH_TO_HA.get(hot_water_plus_level)
|
||||
)
|
||||
|
||||
async def async_select_option(self, option: str) -> None:
|
||||
"""Set the Hot Water+ mode."""
|
||||
aosmith_hwp_level = HWP_LEVEL_HA_TO_AOSMITH[option]
|
||||
await self.client.update_mode(
|
||||
junction_id=self.junction_id,
|
||||
mode=self.device.status.current_mode,
|
||||
hot_water_plus_level=aosmith_hwp_level,
|
||||
)
|
||||
|
||||
await self.coordinator.async_request_refresh()
|
@@ -26,6 +26,17 @@
|
||||
}
|
||||
},
|
||||
"entity": {
|
||||
"select": {
|
||||
"hot_water_plus_level": {
|
||||
"name": "Hot Water+ level",
|
||||
"state": {
|
||||
"off": "[%key:common::state::off%]",
|
||||
"level1": "Level 1",
|
||||
"level2": "Level 2",
|
||||
"level3": "Level 3"
|
||||
}
|
||||
}
|
||||
},
|
||||
"sensor": {
|
||||
"hot_water_availability": {
|
||||
"name": "Hot water availability"
|
||||
|
@@ -7,5 +7,5 @@
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["apcaccess"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["aioapcaccess==0.4.2"]
|
||||
"requirements": ["aioapcaccess==1.0.0"]
|
||||
}
|
||||
|
@@ -395,6 +395,7 @@ SENSORS: dict[str, SensorEntityDescription] = {
|
||||
"upsmode": SensorEntityDescription(
|
||||
key="upsmode",
|
||||
translation_key="ups_mode",
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
),
|
||||
"upsname": SensorEntityDescription(
|
||||
key="upsname",
|
||||
|
BIN
homeassistant/components/assist_pipeline/acknowledge.mp3
Normal file
BIN
homeassistant/components/assist_pipeline/acknowledge.mp3
Normal file
Binary file not shown.
@@ -1,5 +1,7 @@
|
||||
"""Constants for the Assist pipeline integration."""
|
||||
|
||||
from pathlib import Path
|
||||
|
||||
DOMAIN = "assist_pipeline"
|
||||
|
||||
DATA_CONFIG = f"{DOMAIN}.config"
|
||||
@@ -23,3 +25,5 @@ SAMPLES_PER_CHUNK = SAMPLE_RATE // (1000 // MS_PER_CHUNK) # 10 ms @ 16Khz
|
||||
BYTES_PER_CHUNK = SAMPLES_PER_CHUNK * SAMPLE_WIDTH * SAMPLE_CHANNELS # 16-bit
|
||||
|
||||
OPTION_PREFERRED = "preferred"
|
||||
|
||||
ACKNOWLEDGE_PATH = Path(__file__).parent / "acknowledge.mp3"
|
||||
|
@@ -23,7 +23,12 @@ from homeassistant.components import conversation, stt, tts, wake_word, websocke
|
||||
from homeassistant.const import ATTR_SUPPORTED_FEATURES, MATCH_ALL
|
||||
from homeassistant.core import Context, HomeAssistant, callback
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers import chat_session, intent
|
||||
from homeassistant.helpers import (
|
||||
chat_session,
|
||||
device_registry as dr,
|
||||
entity_registry as er,
|
||||
intent,
|
||||
)
|
||||
from homeassistant.helpers.collection import (
|
||||
CHANGE_UPDATED,
|
||||
CollectionError,
|
||||
@@ -45,6 +50,7 @@ from homeassistant.util.limited_size_dict import LimitedSizeDict
|
||||
|
||||
from .audio_enhancer import AudioEnhancer, EnhancedAudioChunk, MicroVadSpeexEnhancer
|
||||
from .const import (
|
||||
ACKNOWLEDGE_PATH,
|
||||
BYTES_PER_CHUNK,
|
||||
CONF_DEBUG_RECORDING_DIR,
|
||||
DATA_CONFIG,
|
||||
@@ -113,6 +119,7 @@ PIPELINE_FIELDS: VolDictType = {
|
||||
vol.Required("wake_word_entity"): vol.Any(str, None),
|
||||
vol.Required("wake_word_id"): vol.Any(str, None),
|
||||
vol.Optional("prefer_local_intents"): bool,
|
||||
vol.Optional("acknowledge_media_id"): str,
|
||||
}
|
||||
|
||||
STORED_PIPELINE_RUNS = 10
|
||||
@@ -1066,8 +1073,11 @@ class PipelineRun:
|
||||
intent_input: str,
|
||||
conversation_id: str,
|
||||
conversation_extra_system_prompt: str | None,
|
||||
) -> str:
|
||||
"""Run intent recognition portion of pipeline. Returns text to speak."""
|
||||
) -> tuple[str, bool]:
|
||||
"""Run intent recognition portion of pipeline.
|
||||
|
||||
Returns (speech, all_targets_in_satellite_area).
|
||||
"""
|
||||
if self.intent_agent is None or self._conversation_data is None:
|
||||
raise RuntimeError("Recognize intent was not prepared")
|
||||
|
||||
@@ -1116,6 +1126,7 @@ class PipelineRun:
|
||||
|
||||
agent_id = self.intent_agent.id
|
||||
processed_locally = agent_id == conversation.HOME_ASSISTANT_AGENT
|
||||
all_targets_in_satellite_area = False
|
||||
intent_response: intent.IntentResponse | None = None
|
||||
if not processed_locally and not self._intent_agent_only:
|
||||
# Sentence triggers override conversation agent
|
||||
@@ -1290,6 +1301,17 @@ class PipelineRun:
|
||||
if tts_input_stream and self._streamed_response_text:
|
||||
tts_input_stream.put_nowait(None)
|
||||
|
||||
if agent_id == conversation.HOME_ASSISTANT_AGENT:
|
||||
# Check if all targeted entities were in the same area as
|
||||
# the satellite device.
|
||||
# If so, the satellite should respond with an acknowledge beep
|
||||
# instead of a full response.
|
||||
all_targets_in_satellite_area = (
|
||||
self._get_all_targets_in_satellite_area(
|
||||
conversation_result.response, self._device_id
|
||||
)
|
||||
)
|
||||
|
||||
except Exception as src_error:
|
||||
_LOGGER.exception("Unexpected error during intent recognition")
|
||||
raise IntentRecognitionError(
|
||||
@@ -1312,7 +1334,45 @@ class PipelineRun:
|
||||
if conversation_result.continue_conversation:
|
||||
self._conversation_data.continue_conversation_agent = agent_id
|
||||
|
||||
return speech
|
||||
return (speech, all_targets_in_satellite_area)
|
||||
|
||||
def _get_all_targets_in_satellite_area(
|
||||
self, intent_response: intent.IntentResponse, device_id: str | None
|
||||
) -> bool:
|
||||
"""Return true if all targeted entities were in the same area as the device."""
|
||||
if (
|
||||
(intent_response.response_type != intent.IntentResponseType.ACTION_DONE)
|
||||
or (not intent_response.matched_states)
|
||||
or (not device_id)
|
||||
):
|
||||
return False
|
||||
|
||||
device_registry = dr.async_get(self.hass)
|
||||
|
||||
if (not (device := device_registry.async_get(device_id))) or (
|
||||
not device.area_id
|
||||
):
|
||||
return False
|
||||
|
||||
entity_registry = er.async_get(self.hass)
|
||||
for state in intent_response.matched_states:
|
||||
entity = entity_registry.async_get(state.entity_id)
|
||||
if not entity:
|
||||
return False
|
||||
|
||||
if (entity_area_id := entity.area_id) is None:
|
||||
if (entity.device_id is None) or (
|
||||
(entity_device := device_registry.async_get(entity.device_id))
|
||||
is None
|
||||
):
|
||||
return False
|
||||
|
||||
entity_area_id = entity_device.area_id
|
||||
|
||||
if entity_area_id != device.area_id:
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
async def prepare_text_to_speech(self) -> None:
|
||||
"""Prepare text-to-speech."""
|
||||
@@ -1350,7 +1410,9 @@ class PipelineRun:
|
||||
),
|
||||
) from err
|
||||
|
||||
async def text_to_speech(self, tts_input: str) -> None:
|
||||
async def text_to_speech(
|
||||
self, tts_input: str, override_media_path: Path | None = None
|
||||
) -> None:
|
||||
"""Run text-to-speech portion of pipeline."""
|
||||
assert self.tts_stream is not None
|
||||
|
||||
@@ -1362,11 +1424,14 @@ class PipelineRun:
|
||||
"language": self.pipeline.tts_language,
|
||||
"voice": self.pipeline.tts_voice,
|
||||
"tts_input": tts_input,
|
||||
"acknowledge_override": override_media_path is not None,
|
||||
},
|
||||
)
|
||||
)
|
||||
|
||||
if not self._streamed_response_text:
|
||||
if override_media_path:
|
||||
self.tts_stream.async_override_result(override_media_path)
|
||||
elif not self._streamed_response_text:
|
||||
self.tts_stream.async_set_message(tts_input)
|
||||
|
||||
tts_output = {
|
||||
@@ -1664,16 +1729,20 @@ class PipelineInput:
|
||||
|
||||
if self.run.end_stage != PipelineStage.STT:
|
||||
tts_input = self.tts_input
|
||||
all_targets_in_satellite_area = False
|
||||
|
||||
if current_stage == PipelineStage.INTENT:
|
||||
# intent-recognition
|
||||
assert intent_input is not None
|
||||
tts_input = await self.run.recognize_intent(
|
||||
(
|
||||
tts_input,
|
||||
all_targets_in_satellite_area,
|
||||
) = await self.run.recognize_intent(
|
||||
intent_input,
|
||||
self.session.conversation_id,
|
||||
self.conversation_extra_system_prompt,
|
||||
)
|
||||
if tts_input.strip():
|
||||
if all_targets_in_satellite_area or tts_input.strip():
|
||||
current_stage = PipelineStage.TTS
|
||||
else:
|
||||
# Skip TTS
|
||||
@@ -1682,8 +1751,14 @@ class PipelineInput:
|
||||
if self.run.end_stage != PipelineStage.INTENT:
|
||||
# text-to-speech
|
||||
if current_stage == PipelineStage.TTS:
|
||||
assert tts_input is not None
|
||||
await self.run.text_to_speech(tts_input)
|
||||
if all_targets_in_satellite_area:
|
||||
# Use acknowledge media instead of full response
|
||||
await self.run.text_to_speech(
|
||||
tts_input or "", override_media_path=ACKNOWLEDGE_PATH
|
||||
)
|
||||
else:
|
||||
assert tts_input is not None
|
||||
await self.run.text_to_speech(tts_input)
|
||||
|
||||
except PipelineError as err:
|
||||
self.run.process_event(
|
||||
|
@@ -3,6 +3,7 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Iterable
|
||||
from dataclasses import replace
|
||||
|
||||
from homeassistant.components.select import SelectEntity, SelectEntityDescription
|
||||
from homeassistant.const import EntityCategory, Platform
|
||||
@@ -64,15 +65,36 @@ class AssistPipelineSelect(SelectEntity, restore_state.RestoreEntity):
|
||||
translation_key="pipeline",
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
)
|
||||
|
||||
_attr_should_poll = False
|
||||
_attr_current_option = OPTION_PREFERRED
|
||||
_attr_options = [OPTION_PREFERRED]
|
||||
|
||||
def __init__(self, hass: HomeAssistant, domain: str, unique_id_prefix: str) -> None:
|
||||
def __init__(
|
||||
self,
|
||||
hass: HomeAssistant,
|
||||
domain: str,
|
||||
unique_id_prefix: str,
|
||||
index: int = 0,
|
||||
) -> None:
|
||||
"""Initialize a pipeline selector."""
|
||||
if index < 1:
|
||||
# Keep compatibility
|
||||
key_suffix = ""
|
||||
placeholder = ""
|
||||
else:
|
||||
key_suffix = f"_{index + 1}"
|
||||
placeholder = f" {index + 1}"
|
||||
|
||||
self.entity_description = replace(
|
||||
self.entity_description,
|
||||
key=f"pipeline{key_suffix}",
|
||||
translation_placeholders={"index": placeholder},
|
||||
)
|
||||
|
||||
self._domain = domain
|
||||
self._unique_id_prefix = unique_id_prefix
|
||||
self._attr_unique_id = f"{unique_id_prefix}-pipeline"
|
||||
self._attr_unique_id = f"{unique_id_prefix}-{self.entity_description.key}"
|
||||
self.hass = hass
|
||||
self._update_options()
|
||||
|
||||
|
@@ -7,7 +7,7 @@
|
||||
},
|
||||
"select": {
|
||||
"pipeline": {
|
||||
"name": "Assistant",
|
||||
"name": "Assistant{index}",
|
||||
"state": {
|
||||
"preferred": "Preferred"
|
||||
}
|
||||
|
@@ -26,6 +26,7 @@ EXCLUDE_FROM_BACKUP = [
|
||||
"tmp_backups/*.tar",
|
||||
"OZW_Log.txt",
|
||||
"tts/*",
|
||||
"ai_task/*",
|
||||
]
|
||||
|
||||
EXCLUDE_DATABASE_FROM_BACKUP = [
|
||||
|
@@ -14,15 +14,15 @@
|
||||
},
|
||||
"automatic_backup_failed_addons": {
|
||||
"title": "Not all add-ons could be included in automatic backup",
|
||||
"description": "Add-ons {failed_addons} could not be included in automatic backup. Please check the supervisor logs for more information. Another attempt will be made at the next scheduled time if a backup schedule is configured."
|
||||
"description": "Add-ons {failed_addons} could not be included in automatic backup. Please check the Supervisor logs for more information. Another attempt will be made at the next scheduled time if a backup schedule is configured."
|
||||
},
|
||||
"automatic_backup_failed_agents_addons_folders": {
|
||||
"title": "Automatic backup was created with errors",
|
||||
"description": "The automatic backup was created with errors:\n* Locations which the backup could not be uploaded to: {failed_agents}\n* Add-ons which could not be backed up: {failed_addons}\n* Folders which could not be backed up: {failed_folders}\n\nPlease check the core and supervisor logs for more information. Another attempt will be made at the next scheduled time if a backup schedule is configured."
|
||||
"description": "The automatic backup was created with errors:\n* Locations which the backup could not be uploaded to: {failed_agents}\n* Add-ons which could not be backed up: {failed_addons}\n* Folders which could not be backed up: {failed_folders}\n\nPlease check the Core and Supervisor logs for more information. Another attempt will be made at the next scheduled time if a backup schedule is configured."
|
||||
},
|
||||
"automatic_backup_failed_folders": {
|
||||
"title": "Not all folders could be included in automatic backup",
|
||||
"description": "Folders {failed_folders} could not be included in automatic backup. Please check the supervisor logs for more information. Another attempt will be made at the next scheduled time if a backup schedule is configured."
|
||||
"description": "Folders {failed_folders} could not be included in automatic backup. Please check the Supervisor logs for more information. Another attempt will be made at the next scheduled time if a backup schedule is configured."
|
||||
}
|
||||
},
|
||||
"services": {
|
||||
|
@@ -18,9 +18,9 @@
|
||||
"bleak==1.0.1",
|
||||
"bleak-retry-connector==4.4.3",
|
||||
"bluetooth-adapters==2.1.0",
|
||||
"bluetooth-auto-recovery==1.5.2",
|
||||
"bluetooth-auto-recovery==1.5.3",
|
||||
"bluetooth-data-tools==1.28.2",
|
||||
"dbus-fast==2.44.3",
|
||||
"habluetooth==5.6.2"
|
||||
"habluetooth==5.6.4"
|
||||
]
|
||||
}
|
||||
|
@@ -1,4 +1,9 @@
|
||||
{
|
||||
"entity_component": {
|
||||
"_": {
|
||||
"default": "mdi:forum-outline"
|
||||
}
|
||||
},
|
||||
"services": {
|
||||
"process": {
|
||||
"service": "mdi:message-processing"
|
||||
|
@@ -4,7 +4,7 @@
|
||||
"codeowners": ["@home-assistant/core", "@synesthesiam", "@arturpragacz"],
|
||||
"dependencies": ["http", "intent"],
|
||||
"documentation": "https://www.home-assistant.io/integrations/conversation",
|
||||
"integration_type": "system",
|
||||
"integration_type": "entity",
|
||||
"quality_scale": "internal",
|
||||
"requirements": ["hassil==3.2.0", "home-assistant-intents==2025.9.3"]
|
||||
}
|
||||
|
@@ -152,24 +152,28 @@ ECOWITT_SENSORS_MAPPING: Final = {
|
||||
native_unit_of_measurement=UnitOfPrecipitationDepth.MILLIMETERS,
|
||||
device_class=SensorDeviceClass.PRECIPITATION,
|
||||
state_class=SensorStateClass.TOTAL_INCREASING,
|
||||
suggested_display_precision=1,
|
||||
),
|
||||
EcoWittSensorTypes.RAIN_COUNT_INCHES: SensorEntityDescription(
|
||||
key="RAIN_COUNT_INCHES",
|
||||
native_unit_of_measurement=UnitOfPrecipitationDepth.INCHES,
|
||||
device_class=SensorDeviceClass.PRECIPITATION,
|
||||
state_class=SensorStateClass.TOTAL_INCREASING,
|
||||
suggested_display_precision=2,
|
||||
),
|
||||
EcoWittSensorTypes.RAIN_RATE_MM: SensorEntityDescription(
|
||||
key="RAIN_RATE_MM",
|
||||
native_unit_of_measurement=UnitOfVolumetricFlux.MILLIMETERS_PER_HOUR,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
device_class=SensorDeviceClass.PRECIPITATION_INTENSITY,
|
||||
suggested_display_precision=1,
|
||||
),
|
||||
EcoWittSensorTypes.RAIN_RATE_INCHES: SensorEntityDescription(
|
||||
key="RAIN_RATE_INCHES",
|
||||
native_unit_of_measurement=UnitOfVolumetricFlux.INCHES_PER_HOUR,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
device_class=SensorDeviceClass.PRECIPITATION_INTENSITY,
|
||||
suggested_display_precision=2,
|
||||
),
|
||||
EcoWittSensorTypes.LIGHTNING_DISTANCE_KM: SensorEntityDescription(
|
||||
key="LIGHTNING_DISTANCE_KM",
|
||||
|
@@ -14,7 +14,11 @@ from elkm1_lib.util import pretty_const
|
||||
from elkm1_lib.zones import Zone
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components.sensor import SensorEntity
|
||||
from homeassistant.components.sensor import (
|
||||
SensorDeviceClass,
|
||||
SensorEntity,
|
||||
SensorStateClass,
|
||||
)
|
||||
from homeassistant.const import EntityCategory, UnitOfElectricPotential
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
@@ -32,6 +36,16 @@ SERVICE_SENSOR_ZONE_BYPASS = "sensor_zone_bypass"
|
||||
SERVICE_SENSOR_ZONE_TRIGGER = "sensor_zone_trigger"
|
||||
UNDEFINED_TEMPERATURE = -40
|
||||
|
||||
_DEVICE_CLASS_MAP: dict[ZoneType, SensorDeviceClass] = {
|
||||
ZoneType.TEMPERATURE: SensorDeviceClass.TEMPERATURE,
|
||||
ZoneType.ANALOG_ZONE: SensorDeviceClass.VOLTAGE,
|
||||
}
|
||||
|
||||
_STATE_CLASS_MAP: dict[ZoneType, SensorStateClass] = {
|
||||
ZoneType.TEMPERATURE: SensorStateClass.MEASUREMENT,
|
||||
ZoneType.ANALOG_ZONE: SensorStateClass.MEASUREMENT,
|
||||
}
|
||||
|
||||
ELK_SET_COUNTER_SERVICE_SCHEMA: VolDictType = {
|
||||
vol.Required(ATTR_VALUE): vol.All(vol.Coerce(int), vol.Range(0, 65535))
|
||||
}
|
||||
@@ -248,6 +262,16 @@ class ElkZone(ElkSensor):
|
||||
return self._temperature_unit
|
||||
return None
|
||||
|
||||
@property
|
||||
def device_class(self) -> SensorDeviceClass | None:
|
||||
"""Return the device class of the sensor."""
|
||||
return _DEVICE_CLASS_MAP.get(self._element.definition)
|
||||
|
||||
@property
|
||||
def state_class(self) -> SensorStateClass | None:
|
||||
"""Return the state class of the sensor."""
|
||||
return _STATE_CLASS_MAP.get(self._element.definition)
|
||||
|
||||
@property
|
||||
def native_unit_of_measurement(self) -> str | None:
|
||||
"""Return the unit of measurement."""
|
||||
|
@@ -1,7 +1,7 @@
|
||||
{
|
||||
"domain": "enocean",
|
||||
"name": "EnOcean",
|
||||
"codeowners": ["@bdurrer"],
|
||||
"codeowners": [],
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/enocean",
|
||||
"iot_class": "local_push",
|
||||
|
@@ -127,27 +127,39 @@ class EsphomeAssistSatellite(
|
||||
available_wake_words=[], active_wake_words=[], max_active_wake_words=1
|
||||
)
|
||||
|
||||
@property
|
||||
def pipeline_entity_id(self) -> str | None:
|
||||
"""Return the entity ID of the pipeline to use for the next conversation."""
|
||||
assert self._entry_data.device_info is not None
|
||||
self._active_pipeline_index = 0
|
||||
|
||||
def _get_entity_id(self, suffix: str) -> str | None:
|
||||
"""Return the entity id for pipeline select, etc."""
|
||||
if self._entry_data.device_info is None:
|
||||
return None
|
||||
|
||||
ent_reg = er.async_get(self.hass)
|
||||
return ent_reg.async_get_entity_id(
|
||||
Platform.SELECT,
|
||||
DOMAIN,
|
||||
f"{self._entry_data.device_info.mac_address}-pipeline",
|
||||
f"{self._entry_data.device_info.mac_address}-{suffix}",
|
||||
)
|
||||
|
||||
@property
|
||||
def pipeline_entity_id(self) -> str | None:
|
||||
"""Return the entity ID of the primary pipeline to use for the next conversation."""
|
||||
return self.get_pipeline_entity(self._active_pipeline_index)
|
||||
|
||||
def get_pipeline_entity(self, index: int) -> str | None:
|
||||
"""Return the entity ID of a pipeline by index."""
|
||||
id_suffix = "" if index < 1 else f"_{index + 1}"
|
||||
return self._get_entity_id(f"pipeline{id_suffix}")
|
||||
|
||||
def get_wake_word_entity(self, index: int) -> str | None:
|
||||
"""Return the entity ID of a wake word by index."""
|
||||
id_suffix = "" if index < 1 else f"_{index + 1}"
|
||||
return self._get_entity_id(f"wake_word{id_suffix}")
|
||||
|
||||
@property
|
||||
def vad_sensitivity_entity_id(self) -> str | None:
|
||||
"""Return the entity ID of the VAD sensitivity to use for the next conversation."""
|
||||
assert self._entry_data.device_info is not None
|
||||
ent_reg = er.async_get(self.hass)
|
||||
return ent_reg.async_get_entity_id(
|
||||
Platform.SELECT,
|
||||
DOMAIN,
|
||||
f"{self._entry_data.device_info.mac_address}-vad_sensitivity",
|
||||
)
|
||||
return self._get_entity_id("vad_sensitivity")
|
||||
|
||||
@callback
|
||||
def async_get_configuration(
|
||||
@@ -235,6 +247,7 @@ class EsphomeAssistSatellite(
|
||||
)
|
||||
)
|
||||
|
||||
assert self._attr_supported_features is not None
|
||||
if feature_flags & VoiceAssistantFeature.ANNOUNCE:
|
||||
# Device supports announcements
|
||||
self._attr_supported_features |= (
|
||||
@@ -257,8 +270,8 @@ class EsphomeAssistSatellite(
|
||||
|
||||
# Update wake word select when config is updated
|
||||
self.async_on_remove(
|
||||
self._entry_data.async_register_assist_satellite_set_wake_word_callback(
|
||||
self.async_set_wake_word
|
||||
self._entry_data.async_register_assist_satellite_set_wake_words_callback(
|
||||
self.async_set_wake_words
|
||||
)
|
||||
)
|
||||
|
||||
@@ -482,8 +495,31 @@ class EsphomeAssistSatellite(
|
||||
# ANNOUNCEMENT format from media player
|
||||
self._update_tts_format()
|
||||
|
||||
# Run the pipeline
|
||||
_LOGGER.debug("Running pipeline from %s to %s", start_stage, end_stage)
|
||||
# Run the appropriate pipeline.
|
||||
self._active_pipeline_index = 0
|
||||
|
||||
maybe_pipeline_index = 0
|
||||
while True:
|
||||
if not (ww_entity_id := self.get_wake_word_entity(maybe_pipeline_index)):
|
||||
break
|
||||
|
||||
if not (ww_state := self.hass.states.get(ww_entity_id)):
|
||||
continue
|
||||
|
||||
if ww_state.state == wake_word_phrase:
|
||||
# First match
|
||||
self._active_pipeline_index = maybe_pipeline_index
|
||||
break
|
||||
|
||||
# Try next wake word select
|
||||
maybe_pipeline_index += 1
|
||||
|
||||
_LOGGER.debug(
|
||||
"Running pipeline %s from %s to %s",
|
||||
self._active_pipeline_index + 1,
|
||||
start_stage,
|
||||
end_stage,
|
||||
)
|
||||
self._pipeline_task = self.config_entry.async_create_background_task(
|
||||
self.hass,
|
||||
self.async_accept_pipeline_from_satellite(
|
||||
@@ -514,6 +550,7 @@ class EsphomeAssistSatellite(
|
||||
def handle_pipeline_finished(self) -> None:
|
||||
"""Handle when pipeline has finished running."""
|
||||
self._stop_udp_server()
|
||||
self._active_pipeline_index = 0
|
||||
_LOGGER.debug("Pipeline finished")
|
||||
|
||||
def handle_timer_event(
|
||||
@@ -542,15 +579,15 @@ class EsphomeAssistSatellite(
|
||||
self.tts_response_finished()
|
||||
|
||||
@callback
|
||||
def async_set_wake_word(self, wake_word_id: str) -> None:
|
||||
"""Set active wake word and update config on satellite."""
|
||||
self._satellite_config.active_wake_words = [wake_word_id]
|
||||
def async_set_wake_words(self, wake_word_ids: list[str]) -> None:
|
||||
"""Set active wake words and update config on satellite."""
|
||||
self._satellite_config.active_wake_words = wake_word_ids
|
||||
self.config_entry.async_create_background_task(
|
||||
self.hass,
|
||||
self.async_set_configuration(self._satellite_config),
|
||||
"esphome_voice_assistant_set_config",
|
||||
)
|
||||
_LOGGER.debug("Setting active wake word: %s", wake_word_id)
|
||||
_LOGGER.debug("Setting active wake word(s): %s", wake_word_ids)
|
||||
|
||||
def _update_tts_format(self) -> None:
|
||||
"""Update the TTS format from the first media player."""
|
||||
|
@@ -25,3 +25,5 @@ PROJECT_URLS = {
|
||||
# ESPHome always uses .0 for the changelog URL
|
||||
STABLE_BLE_URL_VERSION = f"{STABLE_BLE_VERSION.major}.{STABLE_BLE_VERSION.minor}.0"
|
||||
DEFAULT_URL = f"https://esphome.io/changelog/{STABLE_BLE_URL_VERSION}.html"
|
||||
|
||||
NO_WAKE_WORD: Final[str] = "no_wake_word"
|
||||
|
@@ -177,9 +177,10 @@ class RuntimeEntryData:
|
||||
assist_satellite_config_update_callbacks: list[
|
||||
Callable[[AssistSatelliteConfiguration], None]
|
||||
] = field(default_factory=list)
|
||||
assist_satellite_set_wake_word_callbacks: list[Callable[[str], None]] = field(
|
||||
default_factory=list
|
||||
assist_satellite_set_wake_words_callbacks: list[Callable[[list[str]], None]] = (
|
||||
field(default_factory=list)
|
||||
)
|
||||
assist_satellite_wake_words: dict[int, str] = field(default_factory=dict)
|
||||
device_id_to_name: dict[int, str] = field(default_factory=dict)
|
||||
entity_removal_callbacks: dict[EntityInfoKey, list[CALLBACK_TYPE]] = field(
|
||||
default_factory=dict
|
||||
@@ -501,19 +502,28 @@ class RuntimeEntryData:
|
||||
callback_(config)
|
||||
|
||||
@callback
|
||||
def async_register_assist_satellite_set_wake_word_callback(
|
||||
def async_register_assist_satellite_set_wake_words_callback(
|
||||
self,
|
||||
callback_: Callable[[str], None],
|
||||
callback_: Callable[[list[str]], None],
|
||||
) -> CALLBACK_TYPE:
|
||||
"""Register to receive callbacks when the Assist satellite's wake word is set."""
|
||||
self.assist_satellite_set_wake_word_callbacks.append(callback_)
|
||||
return partial(self.assist_satellite_set_wake_word_callbacks.remove, callback_)
|
||||
self.assist_satellite_set_wake_words_callbacks.append(callback_)
|
||||
return partial(self.assist_satellite_set_wake_words_callbacks.remove, callback_)
|
||||
|
||||
@callback
|
||||
def async_assist_satellite_set_wake_word(self, wake_word_id: str) -> None:
|
||||
"""Notify listeners that the Assist satellite wake word has been set."""
|
||||
for callback_ in self.assist_satellite_set_wake_word_callbacks.copy():
|
||||
callback_(wake_word_id)
|
||||
def async_assist_satellite_set_wake_word(
|
||||
self, wake_word_index: int, wake_word_id: str | None
|
||||
) -> None:
|
||||
"""Notify listeners that the Assist satellite wake words have been set."""
|
||||
if wake_word_id:
|
||||
self.assist_satellite_wake_words[wake_word_index] = wake_word_id
|
||||
else:
|
||||
self.assist_satellite_wake_words.pop(wake_word_index, None)
|
||||
|
||||
wake_word_ids = list(self.assist_satellite_wake_words.values())
|
||||
|
||||
for callback_ in self.assist_satellite_set_wake_words_callbacks.copy():
|
||||
callback_(wake_word_ids)
|
||||
|
||||
@callback
|
||||
def async_register_entity_removal_callback(
|
||||
|
@@ -9,11 +9,17 @@
|
||||
"pipeline": {
|
||||
"default": "mdi:filter-outline"
|
||||
},
|
||||
"pipeline_2": {
|
||||
"default": "mdi:filter-outline"
|
||||
},
|
||||
"vad_sensitivity": {
|
||||
"default": "mdi:volume-high"
|
||||
},
|
||||
"wake_word": {
|
||||
"default": "mdi:microphone"
|
||||
},
|
||||
"wake_word_2": {
|
||||
"default": "mdi:microphone"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@@ -17,7 +17,7 @@
|
||||
"mqtt": ["esphome/discover/#"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": [
|
||||
"aioesphomeapi==40.1.0",
|
||||
"aioesphomeapi==40.2.0",
|
||||
"esphome-dashboard-api==1.3.0",
|
||||
"bleak-esphome==3.3.0"
|
||||
],
|
||||
|
@@ -2,6 +2,8 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from dataclasses import replace
|
||||
|
||||
from aioesphomeapi import EntityInfo, SelectInfo, SelectState
|
||||
|
||||
from homeassistant.components.assist_pipeline.select import (
|
||||
@@ -15,7 +17,7 @@ from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers import restore_state
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from .const import DOMAIN
|
||||
from .const import DOMAIN, NO_WAKE_WORD
|
||||
from .entity import (
|
||||
EsphomeAssistEntity,
|
||||
EsphomeEntity,
|
||||
@@ -50,9 +52,11 @@ async def async_setup_entry(
|
||||
):
|
||||
async_add_entities(
|
||||
[
|
||||
EsphomeAssistPipelineSelect(hass, entry_data),
|
||||
EsphomeAssistPipelineSelect(hass, entry_data, index=0),
|
||||
EsphomeAssistPipelineSelect(hass, entry_data, index=1),
|
||||
EsphomeVadSensitivitySelect(hass, entry_data),
|
||||
EsphomeAssistSatelliteWakeWordSelect(entry_data),
|
||||
EsphomeAssistSatelliteWakeWordSelect(entry_data, index=0),
|
||||
EsphomeAssistSatelliteWakeWordSelect(entry_data, index=1),
|
||||
]
|
||||
)
|
||||
|
||||
@@ -84,10 +88,14 @@ class EsphomeSelect(EsphomeEntity[SelectInfo, SelectState], SelectEntity):
|
||||
class EsphomeAssistPipelineSelect(EsphomeAssistEntity, AssistPipelineSelect):
|
||||
"""Pipeline selector for esphome devices."""
|
||||
|
||||
def __init__(self, hass: HomeAssistant, entry_data: RuntimeEntryData) -> None:
|
||||
def __init__(
|
||||
self, hass: HomeAssistant, entry_data: RuntimeEntryData, index: int = 0
|
||||
) -> None:
|
||||
"""Initialize a pipeline selector."""
|
||||
EsphomeAssistEntity.__init__(self, entry_data)
|
||||
AssistPipelineSelect.__init__(self, hass, DOMAIN, self._device_info.mac_address)
|
||||
AssistPipelineSelect.__init__(
|
||||
self, hass, DOMAIN, self._device_info.mac_address, index=index
|
||||
)
|
||||
|
||||
|
||||
class EsphomeVadSensitivitySelect(EsphomeAssistEntity, VadSensitivitySelect):
|
||||
@@ -109,28 +117,47 @@ class EsphomeAssistSatelliteWakeWordSelect(
|
||||
translation_key="wake_word",
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
)
|
||||
_attr_current_option: str | None = None
|
||||
_attr_options: list[str] = []
|
||||
|
||||
def __init__(self, entry_data: RuntimeEntryData) -> None:
|
||||
_attr_current_option: str | None = None
|
||||
_attr_options: list[str] = [NO_WAKE_WORD]
|
||||
|
||||
def __init__(self, entry_data: RuntimeEntryData, index: int = 0) -> None:
|
||||
"""Initialize a wake word selector."""
|
||||
if index < 1:
|
||||
# Keep compatibility
|
||||
key_suffix = ""
|
||||
placeholder = ""
|
||||
else:
|
||||
key_suffix = f"_{index + 1}"
|
||||
placeholder = f" {index + 1}"
|
||||
|
||||
self.entity_description = replace(
|
||||
self.entity_description,
|
||||
key=f"wake_word{key_suffix}",
|
||||
translation_placeholders={"index": placeholder},
|
||||
)
|
||||
|
||||
EsphomeAssistEntity.__init__(self, entry_data)
|
||||
|
||||
unique_id_prefix = self._device_info.mac_address
|
||||
self._attr_unique_id = f"{unique_id_prefix}-wake_word"
|
||||
self._attr_unique_id = f"{unique_id_prefix}-{self.entity_description.key}"
|
||||
|
||||
# name -> id
|
||||
self._wake_words: dict[str, str] = {}
|
||||
self._wake_word_index = index
|
||||
|
||||
@property
|
||||
def available(self) -> bool:
|
||||
"""Return if entity is available."""
|
||||
return bool(self._attr_options)
|
||||
return len(self._attr_options) > 1 # more than just NO_WAKE_WORD
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Run when entity about to be added to hass."""
|
||||
await super().async_added_to_hass()
|
||||
|
||||
if last_state := await self.async_get_last_state():
|
||||
self._attr_current_option = last_state.state
|
||||
|
||||
# Update options when config is updated
|
||||
self.async_on_remove(
|
||||
self._entry_data.async_register_assist_satellite_config_updated_callback(
|
||||
@@ -140,33 +167,49 @@ class EsphomeAssistSatelliteWakeWordSelect(
|
||||
|
||||
async def async_select_option(self, option: str) -> None:
|
||||
"""Select an option."""
|
||||
if wake_word_id := self._wake_words.get(option):
|
||||
# _attr_current_option will be updated on
|
||||
# async_satellite_config_updated after the device sets the wake
|
||||
# word.
|
||||
self._entry_data.async_assist_satellite_set_wake_word(wake_word_id)
|
||||
self._attr_current_option = option
|
||||
self.async_write_ha_state()
|
||||
|
||||
wake_word_id = self._wake_words.get(option)
|
||||
self._entry_data.async_assist_satellite_set_wake_word(
|
||||
self._wake_word_index, wake_word_id
|
||||
)
|
||||
|
||||
def async_satellite_config_updated(
|
||||
self, config: AssistSatelliteConfiguration
|
||||
) -> None:
|
||||
"""Update options with available wake words."""
|
||||
if (not config.available_wake_words) or (config.max_active_wake_words < 1):
|
||||
self._attr_current_option = None
|
||||
# No wake words
|
||||
self._wake_words.clear()
|
||||
self._attr_current_option = NO_WAKE_WORD
|
||||
self._attr_options = [NO_WAKE_WORD]
|
||||
self._entry_data.assist_satellite_wake_words.pop(
|
||||
self._wake_word_index, None
|
||||
)
|
||||
self.async_write_ha_state()
|
||||
return
|
||||
|
||||
self._wake_words = {w.wake_word: w.id for w in config.available_wake_words}
|
||||
self._attr_options = sorted(self._wake_words)
|
||||
self._attr_options = [NO_WAKE_WORD, *sorted(self._wake_words)]
|
||||
|
||||
if config.active_wake_words:
|
||||
# Select first active wake word
|
||||
wake_word_id = config.active_wake_words[0]
|
||||
for wake_word in config.available_wake_words:
|
||||
if wake_word.id == wake_word_id:
|
||||
self._attr_current_option = wake_word.wake_word
|
||||
else:
|
||||
# Select first available wake word
|
||||
self._attr_current_option = config.available_wake_words[0].wake_word
|
||||
option = self._attr_current_option
|
||||
if (
|
||||
(option is None)
|
||||
or ((wake_word_id := self._wake_words.get(option)) is None)
|
||||
or (wake_word_id not in config.active_wake_words)
|
||||
):
|
||||
option = NO_WAKE_WORD
|
||||
|
||||
self._attr_current_option = option
|
||||
self.async_write_ha_state()
|
||||
|
||||
# Keep entry data in sync
|
||||
if wake_word_id := self._wake_words.get(option):
|
||||
self._entry_data.assist_satellite_wake_words[self._wake_word_index] = (
|
||||
wake_word_id
|
||||
)
|
||||
else:
|
||||
self._entry_data.assist_satellite_wake_words.pop(
|
||||
self._wake_word_index, None
|
||||
)
|
||||
|
@@ -12,7 +12,7 @@
|
||||
"mqtt_missing_mac": "Missing MAC address in MQTT properties.",
|
||||
"mqtt_missing_api": "Missing API port in MQTT properties.",
|
||||
"mqtt_missing_ip": "Missing IP address in MQTT properties.",
|
||||
"mqtt_missing_payload": "Missing MQTT Payload.",
|
||||
"mqtt_missing_payload": "Missing MQTT payload.",
|
||||
"name_conflict_migrated": "The configuration for `{name}` has been migrated to a new device with MAC address `{mac}` from `{existing_mac}`.",
|
||||
"reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]",
|
||||
"reauth_unique_id_changed": "**Re-authentication of `{name}` was aborted** because the address `{host}` points to a different device: `{unexpected_device_name}` (MAC: `{unexpected_mac}`) instead of the expected one (MAC: `{expected_mac}`).",
|
||||
@@ -91,7 +91,7 @@
|
||||
"subscribe_logs": "Subscribe to logs from the device."
|
||||
},
|
||||
"data_description": {
|
||||
"allow_service_calls": "When enabled, ESPHome devices can perform Home Assistant actions, such as calling services or sending events. Only enable this if you trust the device.",
|
||||
"allow_service_calls": "When enabled, ESPHome devices can perform Home Assistant actions or send events. Only enable this if you trust the device.",
|
||||
"subscribe_logs": "When enabled, the device will send logs to Home Assistant and you can view them in the logs panel."
|
||||
}
|
||||
}
|
||||
@@ -119,8 +119,9 @@
|
||||
}
|
||||
},
|
||||
"wake_word": {
|
||||
"name": "Wake word",
|
||||
"name": "Wake word{index}",
|
||||
"state": {
|
||||
"no_wake_word": "No wake word",
|
||||
"okay_nabu": "Okay Nabu"
|
||||
}
|
||||
}
|
||||
@@ -153,7 +154,7 @@
|
||||
"description": "To improve Bluetooth reliability and performance, we highly recommend updating {name} with ESPHome {version} or later. When updating the device from ESPHome earlier than 2022.12.0, it is recommended to use a serial cable instead of an over-the-air update to take advantage of the new partition scheme."
|
||||
},
|
||||
"api_password_deprecated": {
|
||||
"title": "API Password deprecated on {name}",
|
||||
"title": "API password deprecated on {name}",
|
||||
"description": "The API password for ESPHome is deprecated and the use of an API encryption key is recommended instead.\n\nRemove the API password and add an encryption key to your ESPHome device to resolve this issue."
|
||||
},
|
||||
"service_calls_not_allowed": {
|
||||
@@ -192,10 +193,10 @@
|
||||
"message": "Error communicating with the device {device_name}: {error}"
|
||||
},
|
||||
"error_compiling": {
|
||||
"message": "Error compiling {configuration}; Try again in ESPHome dashboard for more information."
|
||||
"message": "Error compiling {configuration}. Try again in ESPHome dashboard for more information."
|
||||
},
|
||||
"error_uploading": {
|
||||
"message": "Error during OTA (Over-The-Air) of {configuration}; Try again in ESPHome dashboard for more information."
|
||||
"message": "Error during OTA (Over-The-Air) update of {configuration}. Try again in ESPHome dashboard for more information."
|
||||
},
|
||||
"ota_in_progress": {
|
||||
"message": "An OTA (Over-The-Air) update is already in progress for {configuration}."
|
||||
|
@@ -14,13 +14,7 @@ from homeassistant.components.climate import (
|
||||
HVACAction,
|
||||
HVACMode,
|
||||
)
|
||||
from homeassistant.components.modbus import (
|
||||
CALL_TYPE_REGISTER_HOLDING,
|
||||
CALL_TYPE_REGISTER_INPUT,
|
||||
DEFAULT_HUB,
|
||||
ModbusHub,
|
||||
get_hub,
|
||||
)
|
||||
from homeassistant.components.modbus import ModbusHub, get_hub
|
||||
from homeassistant.const import (
|
||||
ATTR_TEMPERATURE,
|
||||
CONF_NAME,
|
||||
@@ -33,7 +27,13 @@ from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
||||
|
||||
# These constants are not offered by modbus, because modbus do not have
|
||||
# an official API.
|
||||
CALL_TYPE_REGISTER_HOLDING = "holding"
|
||||
CALL_TYPE_REGISTER_INPUT = "input"
|
||||
CALL_TYPE_WRITE_REGISTER = "write_register"
|
||||
DEFAULT_HUB = "modbus_hub"
|
||||
|
||||
CONF_HUB = "hub"
|
||||
|
||||
PLATFORM_SCHEMA = CLIMATE_PLATFORM_SCHEMA.extend(
|
||||
|
@@ -37,6 +37,7 @@ SENSOR_TYPES: tuple[FlexitSensorEntityDescription, ...] = (
|
||||
FlexitSensorEntityDescription(
|
||||
key="outside_air_temperature",
|
||||
device_class=SensorDeviceClass.TEMPERATURE,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
||||
translation_key="outside_air_temperature",
|
||||
value_fn=lambda data: data.outside_air_temperature,
|
||||
@@ -44,6 +45,7 @@ SENSOR_TYPES: tuple[FlexitSensorEntityDescription, ...] = (
|
||||
FlexitSensorEntityDescription(
|
||||
key="supply_air_temperature",
|
||||
device_class=SensorDeviceClass.TEMPERATURE,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
||||
translation_key="supply_air_temperature",
|
||||
value_fn=lambda data: data.supply_air_temperature,
|
||||
@@ -51,6 +53,7 @@ SENSOR_TYPES: tuple[FlexitSensorEntityDescription, ...] = (
|
||||
FlexitSensorEntityDescription(
|
||||
key="exhaust_air_temperature",
|
||||
device_class=SensorDeviceClass.TEMPERATURE,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
||||
translation_key="exhaust_air_temperature",
|
||||
value_fn=lambda data: data.exhaust_air_temperature,
|
||||
@@ -58,6 +61,7 @@ SENSOR_TYPES: tuple[FlexitSensorEntityDescription, ...] = (
|
||||
FlexitSensorEntityDescription(
|
||||
key="extract_air_temperature",
|
||||
device_class=SensorDeviceClass.TEMPERATURE,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
||||
translation_key="extract_air_temperature",
|
||||
value_fn=lambda data: data.extract_air_temperature,
|
||||
@@ -65,6 +69,7 @@ SENSOR_TYPES: tuple[FlexitSensorEntityDescription, ...] = (
|
||||
FlexitSensorEntityDescription(
|
||||
key="room_temperature",
|
||||
device_class=SensorDeviceClass.TEMPERATURE,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
||||
translation_key="room_temperature",
|
||||
value_fn=lambda data: data.room_temperature,
|
||||
|
@@ -20,5 +20,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/frontend",
|
||||
"integration_type": "system",
|
||||
"quality_scale": "internal",
|
||||
"requirements": ["home-assistant-frontend==20250903.3"]
|
||||
"requirements": ["home-assistant-frontend==20250903.5"]
|
||||
}
|
||||
|
@@ -6,5 +6,5 @@
|
||||
"dependencies": ["network"],
|
||||
"documentation": "https://www.home-assistant.io/integrations/govee_light_local",
|
||||
"iot_class": "local_push",
|
||||
"requirements": ["govee-local-api==2.1.0"]
|
||||
"requirements": ["govee-local-api==2.2.0"]
|
||||
}
|
||||
|
@@ -37,14 +37,14 @@
|
||||
},
|
||||
"issue_addon_detached_addon_missing": {
|
||||
"title": "Missing repository for an installed add-on",
|
||||
"description": "Repository for add-on {addon} is missing. This means it will not get updates, and backups may not be restored correctly as the supervisor may not be able to build/download the resources required.\n\nPlease check the [add-on's documentation]({addon_url}) for installation instructions and add the repository to the store."
|
||||
"description": "Repository for add-on {addon} is missing. This means it will not get updates, and backups may not be restored correctly as the Home Assistant Supervisor may not be able to build/download the resources required.\n\nPlease check the [add-on's documentation]({addon_url}) for installation instructions and add the repository to the store."
|
||||
},
|
||||
"issue_addon_detached_addon_removed": {
|
||||
"title": "Installed add-on has been removed from repository",
|
||||
"fix_flow": {
|
||||
"step": {
|
||||
"addon_execute_remove": {
|
||||
"description": "Add-on {addon} has been removed from the repository it was installed from. This means it will not get updates, and backups may not be restored correctly as the supervisor may not be able to build/download the resources required.\n\nSelecting **Submit** will uninstall this deprecated add-on. Alternatively, you can check [Home Assistant help]({help_url}) and the [community forum]({community_url}) for alternatives to migrate to."
|
||||
"description": "Add-on {addon} has been removed from the repository it was installed from. This means it will not get updates, and backups may not be restored correctly as the Home Assistant Supervisor may not be able to build/download the resources required.\n\nSelecting **Submit** will uninstall this deprecated add-on. Alternatively, you can check [Home Assistant help]({help_url}) and the [community forum]({community_url}) for alternatives to migrate to."
|
||||
}
|
||||
},
|
||||
"abort": {
|
||||
|
@@ -5,5 +5,5 @@
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/holiday",
|
||||
"iot_class": "local_polling",
|
||||
"requirements": ["holidays==0.79", "babel==2.15.0"]
|
||||
"requirements": ["holidays==0.80", "babel==2.15.0"]
|
||||
}
|
||||
|
@@ -12,13 +12,3 @@ async def async_get_authorization_server(hass: HomeAssistant) -> AuthorizationSe
|
||||
authorize_url=OAUTH2_AUTHORIZE,
|
||||
token_url=OAUTH2_TOKEN,
|
||||
)
|
||||
|
||||
|
||||
async def async_get_description_placeholders(hass: HomeAssistant) -> dict[str, str]:
|
||||
"""Return description placeholders for the credentials dialog."""
|
||||
return {
|
||||
"developer_dashboard_url": "https://developer.home-connect.com/",
|
||||
"applications_url": "https://developer.home-connect.com/applications",
|
||||
"register_application_url": "https://developer.home-connect.com/application/add",
|
||||
"redirect_url": "https://my.home-assistant.io/redirect/oauth",
|
||||
}
|
||||
|
@@ -659,17 +659,3 @@ class HomeConnectCoordinator(
|
||||
)
|
||||
|
||||
return False
|
||||
|
||||
async def reset_execution_tracker(self, appliance_ha_id: str) -> None:
|
||||
"""Reset the execution tracker for a specific appliance."""
|
||||
self._execution_tracker.pop(appliance_ha_id, None)
|
||||
appliance_info = await self.client.get_specific_appliance(appliance_ha_id)
|
||||
|
||||
appliance_data = await self._get_appliance_data(
|
||||
appliance_info, self.data.get(appliance_info.ha_id)
|
||||
)
|
||||
self.data[appliance_ha_id].update(appliance_data)
|
||||
for listener, context in self._special_listeners.values():
|
||||
if EventKey.BSH_COMMON_APPLIANCE_DEPAIRED not in context:
|
||||
listener()
|
||||
self._call_all_event_listeners_for_appliance(appliance_ha_id)
|
||||
|
@@ -1,60 +0,0 @@
|
||||
"""Repairs flows for Home Connect."""
|
||||
|
||||
from typing import cast
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant import data_entry_flow
|
||||
from homeassistant.components.repairs import ConfirmRepairFlow, RepairsFlow
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import issue_registry as ir
|
||||
|
||||
from .coordinator import HomeConnectConfigEntry
|
||||
|
||||
|
||||
class EnableApplianceUpdatesFlow(RepairsFlow):
|
||||
"""Handler for enabling appliance's updates after being refreshed too many times."""
|
||||
|
||||
async def async_step_init(
|
||||
self, user_input: dict[str, str] | None = None
|
||||
) -> data_entry_flow.FlowResult:
|
||||
"""Handle the first step of a fix flow."""
|
||||
return await self.async_step_confirm()
|
||||
|
||||
async def async_step_confirm(
|
||||
self, user_input: dict[str, str] | None = None
|
||||
) -> data_entry_flow.FlowResult:
|
||||
"""Handle the confirm step of a fix flow."""
|
||||
if user_input is not None:
|
||||
assert self.data
|
||||
entry = self.hass.config_entries.async_get_entry(
|
||||
cast(str, self.data["entry_id"])
|
||||
)
|
||||
assert entry
|
||||
entry = cast(HomeConnectConfigEntry, entry)
|
||||
await entry.runtime_data.reset_execution_tracker(
|
||||
cast(str, self.data["appliance_ha_id"])
|
||||
)
|
||||
return self.async_create_entry(data={})
|
||||
|
||||
issue_registry = ir.async_get(self.hass)
|
||||
description_placeholders = None
|
||||
if issue := issue_registry.async_get_issue(self.handler, self.issue_id):
|
||||
description_placeholders = issue.translation_placeholders
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="confirm",
|
||||
data_schema=vol.Schema({}),
|
||||
description_placeholders=description_placeholders,
|
||||
)
|
||||
|
||||
|
||||
async def async_create_fix_flow(
|
||||
hass: HomeAssistant,
|
||||
issue_id: str,
|
||||
data: dict[str, str | int | float | None] | None,
|
||||
) -> RepairsFlow:
|
||||
"""Create flow."""
|
||||
if issue_id.startswith("home_connect_too_many_connected_paired_events"):
|
||||
return EnableApplianceUpdatesFlow()
|
||||
return ConfirmRepairFlow()
|
@@ -57,7 +57,10 @@ from .utils import IidTuple, unique_id_to_iids
|
||||
|
||||
RETRY_INTERVAL = 60 # seconds
|
||||
MAX_POLL_FAILURES_TO_DECLARE_UNAVAILABLE = 3
|
||||
|
||||
# HomeKit accessories have varying limits on how many characteristics
|
||||
# they can handle per request. Since we don't know each device's specific limit,
|
||||
# we batch requests to a conservative size to avoid overwhelming any device.
|
||||
MAX_CHARACTERISTICS_PER_REQUEST = 49
|
||||
|
||||
BLE_AVAILABILITY_CHECK_INTERVAL = 1800 # seconds
|
||||
|
||||
@@ -326,16 +329,20 @@ class HKDevice:
|
||||
)
|
||||
entry.async_on_unload(self._async_cancel_subscription_timer)
|
||||
|
||||
if transport != Transport.BLE:
|
||||
# Although async_populate_accessories_state fetched the accessory database,
|
||||
# the /accessories endpoint may return cached values from the accessory's
|
||||
# perspective. For example, Ecobee thermostats may report stale temperature
|
||||
# values (like 100°C) in their /accessories response after restarting.
|
||||
# We need to explicitly poll characteristics to get fresh sensor readings
|
||||
# before processing the entity map and creating devices.
|
||||
# Use poll_all=True since entities haven't registered their characteristics yet.
|
||||
await self.async_update(poll_all=True)
|
||||
|
||||
await self.async_process_entity_map()
|
||||
|
||||
if transport != Transport.BLE:
|
||||
# When Home Assistant starts, we restore the accessory map from storage
|
||||
# which contains characteristic values from when HA was last running.
|
||||
# These values are stale and may be incorrect (e.g., Ecobee thermostats
|
||||
# report 100°C when restarting). We need to poll for fresh values before
|
||||
# creating entities. Use poll_all=True since entities haven't registered
|
||||
# their characteristics yet.
|
||||
await self.async_update(poll_all=True)
|
||||
# Start regular polling after entity map is processed
|
||||
self._async_start_polling()
|
||||
|
||||
# If everything is up to date, we can create the entities
|
||||
@@ -938,20 +945,26 @@ class HKDevice:
|
||||
async with self._polling_lock:
|
||||
_LOGGER.debug("Starting HomeKit device update: %s", self.unique_id)
|
||||
|
||||
try:
|
||||
new_values_dict = await self.get_characteristics(to_poll)
|
||||
except AccessoryNotFoundError:
|
||||
# Not only did the connection fail, but also the accessory is not
|
||||
# visible on the network.
|
||||
self.async_set_available_state(False)
|
||||
return
|
||||
except (AccessoryDisconnectedError, EncryptionError):
|
||||
# Temporary connection failure. Device may still available but our
|
||||
# connection was dropped or we are reconnecting
|
||||
self._poll_failures += 1
|
||||
if self._poll_failures >= MAX_POLL_FAILURES_TO_DECLARE_UNAVAILABLE:
|
||||
new_values_dict: dict[tuple[int, int], dict[str, Any]] = {}
|
||||
to_poll_list = list(to_poll)
|
||||
|
||||
for i in range(0, len(to_poll_list), MAX_CHARACTERISTICS_PER_REQUEST):
|
||||
batch = to_poll_list[i : i + MAX_CHARACTERISTICS_PER_REQUEST]
|
||||
try:
|
||||
batch_values = await self.get_characteristics(batch)
|
||||
new_values_dict.update(batch_values)
|
||||
except AccessoryNotFoundError:
|
||||
# Not only did the connection fail, but also the accessory is not
|
||||
# visible on the network.
|
||||
self.async_set_available_state(False)
|
||||
return
|
||||
return
|
||||
except (AccessoryDisconnectedError, EncryptionError):
|
||||
# Temporary connection failure. Device may still available but our
|
||||
# connection was dropped or we are reconnecting
|
||||
self._poll_failures += 1
|
||||
if self._poll_failures >= MAX_POLL_FAILURES_TO_DECLARE_UNAVAILABLE:
|
||||
self.async_set_available_state(False)
|
||||
return
|
||||
|
||||
self._poll_failures = 0
|
||||
self.process_new_events(new_values_dict)
|
||||
|
@@ -14,6 +14,6 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/homekit_controller",
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["aiohomekit", "commentjson"],
|
||||
"requirements": ["aiohomekit==3.2.15"],
|
||||
"requirements": ["aiohomekit==3.2.17"],
|
||||
"zeroconf": ["_hap._tcp.local.", "_hap._udp.local."]
|
||||
}
|
||||
|
@@ -21,6 +21,21 @@ from homeassistant.const import CONF_ADDRESS, CONF_CLIENT_ID, CONF_PIN
|
||||
|
||||
from .const import DOMAIN, LOGGER
|
||||
|
||||
BLUETOOTH_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_PIN): str,
|
||||
}
|
||||
)
|
||||
|
||||
USER_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_ADDRESS): str,
|
||||
vol.Required(CONF_PIN): str,
|
||||
}
|
||||
)
|
||||
|
||||
REAUTH_SCHEMA = BLUETOOTH_SCHEMA
|
||||
|
||||
|
||||
def _is_supported(discovery_info: BluetoothServiceInfo):
|
||||
"""Check if device is supported."""
|
||||
@@ -78,6 +93,10 @@ class HusqvarnaAutomowerBleConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
if not _is_supported(discovery_info):
|
||||
return self.async_abort(reason="no_devices_found")
|
||||
|
||||
self.context["title_placeholders"] = {
|
||||
"name": discovery_info.name,
|
||||
"address": discovery_info.address,
|
||||
}
|
||||
self.address = discovery_info.address
|
||||
await self.async_set_unique_id(self.address)
|
||||
self._abort_if_unique_id_configured()
|
||||
@@ -100,12 +119,7 @@ class HusqvarnaAutomowerBleConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
return self.async_show_form(
|
||||
step_id="bluetooth_confirm",
|
||||
data_schema=self.add_suggested_values_to_schema(
|
||||
vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_PIN): str,
|
||||
},
|
||||
),
|
||||
user_input,
|
||||
BLUETOOTH_SCHEMA, user_input
|
||||
),
|
||||
description_placeholders={"name": self.mower_name or self.address},
|
||||
errors=errors,
|
||||
@@ -129,15 +143,7 @@ class HusqvarnaAutomowerBleConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="user",
|
||||
data_schema=self.add_suggested_values_to_schema(
|
||||
vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_ADDRESS): str,
|
||||
vol.Required(CONF_PIN): str,
|
||||
},
|
||||
),
|
||||
user_input,
|
||||
),
|
||||
data_schema=self.add_suggested_values_to_schema(USER_SCHEMA, user_input),
|
||||
errors=errors,
|
||||
)
|
||||
|
||||
@@ -184,7 +190,24 @@ class HusqvarnaAutomowerBleConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
|
||||
title = await self.probe_mower(device)
|
||||
if title is None:
|
||||
return self.async_abort(reason="cannot_connect")
|
||||
if self.source == SOURCE_BLUETOOTH:
|
||||
return self.async_show_form(
|
||||
step_id="bluetooth_confirm",
|
||||
data_schema=BLUETOOTH_SCHEMA,
|
||||
description_placeholders={"name": self.address},
|
||||
errors={"base": "cannot_connect"},
|
||||
)
|
||||
return self.async_show_form(
|
||||
step_id="user",
|
||||
data_schema=self.add_suggested_values_to_schema(
|
||||
USER_SCHEMA,
|
||||
{
|
||||
CONF_ADDRESS: self.address,
|
||||
CONF_PIN: self.pin,
|
||||
},
|
||||
),
|
||||
errors={"base": "cannot_connect"},
|
||||
)
|
||||
self.mower_name = title
|
||||
|
||||
try:
|
||||
@@ -209,11 +232,7 @@ class HusqvarnaAutomowerBleConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
if self.source == SOURCE_BLUETOOTH:
|
||||
return self.async_show_form(
|
||||
step_id="bluetooth_confirm",
|
||||
data_schema=vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_PIN): str,
|
||||
},
|
||||
),
|
||||
data_schema=BLUETOOTH_SCHEMA,
|
||||
description_placeholders={
|
||||
"name": self.mower_name or self.address
|
||||
},
|
||||
@@ -230,13 +249,7 @@ class HusqvarnaAutomowerBleConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
return self.async_show_form(
|
||||
step_id="user",
|
||||
data_schema=self.add_suggested_values_to_schema(
|
||||
vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_ADDRESS): str,
|
||||
vol.Required(CONF_PIN): str,
|
||||
},
|
||||
),
|
||||
suggested_values,
|
||||
USER_SCHEMA, suggested_values
|
||||
),
|
||||
errors=errors,
|
||||
)
|
||||
@@ -312,12 +325,7 @@ class HusqvarnaAutomowerBleConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
return self.async_show_form(
|
||||
step_id="reauth_confirm",
|
||||
data_schema=self.add_suggested_values_to_schema(
|
||||
vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_PIN): str,
|
||||
},
|
||||
),
|
||||
{CONF_PIN: self.pin},
|
||||
REAUTH_SCHEMA, {CONF_PIN: self.pin}
|
||||
),
|
||||
description_placeholders={"name": self.mower_name},
|
||||
errors=errors,
|
||||
|
@@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/icloud",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["keyrings.alt", "pyicloud"],
|
||||
"requirements": ["pyicloud==1.0.0"]
|
||||
"requirements": ["pyicloud==2.0.3"]
|
||||
}
|
||||
|
@@ -6,7 +6,7 @@
|
||||
"description": "Enter your credentials",
|
||||
"data": {
|
||||
"username": "[%key:common::config_flow::data::email%]",
|
||||
"password": "App-specific password",
|
||||
"password": "Main Password (MFA)",
|
||||
"with_family": "With family"
|
||||
}
|
||||
},
|
||||
@@ -14,7 +14,8 @@
|
||||
"title": "[%key:common::config_flow::title::reauth%]",
|
||||
"description": "Your previously entered password for {username} is no longer working. Update your password to keep using this integration.",
|
||||
"data": {
|
||||
"password": "App-specific password"
|
||||
"username": "[%key:common::config_flow::data::email%]",
|
||||
"password": "[%key:component::icloud::config::step::user::data::password%]"
|
||||
}
|
||||
},
|
||||
"trusted_device": {
|
||||
|
@@ -7,5 +7,5 @@
|
||||
"integration_type": "hub",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["pyiskra"],
|
||||
"requirements": ["pyiskra==0.1.26"]
|
||||
"requirements": ["pyiskra==0.1.27"]
|
||||
}
|
||||
|
@@ -31,6 +31,21 @@
|
||||
"cycle_delay": {
|
||||
"default": "mdi:timer-outline"
|
||||
},
|
||||
"globe_brightness": {
|
||||
"default": "mdi:lightbulb-question",
|
||||
"state": {
|
||||
"low": "mdi:lightbulb-on-30",
|
||||
"medium": "mdi:lightbulb-on-50",
|
||||
"high": "mdi:lightbulb-on"
|
||||
}
|
||||
},
|
||||
"globe_light": {
|
||||
"state": {
|
||||
"off": "mdi:lightbulb-off",
|
||||
"on": "mdi:lightbulb-on",
|
||||
"auto": "mdi:lightbulb-auto"
|
||||
}
|
||||
},
|
||||
"meal_insert_size": {
|
||||
"default": "mdi:scale"
|
||||
}
|
||||
|
@@ -7,7 +7,7 @@ from dataclasses import dataclass
|
||||
from typing import Any, Generic, TypeVar
|
||||
|
||||
from pylitterbot import FeederRobot, LitterRobot, LitterRobot4, Robot
|
||||
from pylitterbot.robot.litterrobot4 import BrightnessLevel
|
||||
from pylitterbot.robot.litterrobot4 import BrightnessLevel, NightLightMode
|
||||
|
||||
from homeassistant.components.select import SelectEntity, SelectEntityDescription
|
||||
from homeassistant.const import EntityCategory, UnitOfTime
|
||||
@@ -32,35 +32,73 @@ class RobotSelectEntityDescription(
|
||||
select_fn: Callable[[_WhiskerEntityT, str], Coroutine[Any, Any, bool]]
|
||||
|
||||
|
||||
ROBOT_SELECT_MAP: dict[type[Robot], RobotSelectEntityDescription] = {
|
||||
LitterRobot: RobotSelectEntityDescription[LitterRobot, int]( # type: ignore[type-abstract] # only used for isinstance check
|
||||
key="cycle_delay",
|
||||
translation_key="cycle_delay",
|
||||
unit_of_measurement=UnitOfTime.MINUTES,
|
||||
current_fn=lambda robot: robot.clean_cycle_wait_time_minutes,
|
||||
options_fn=lambda robot: robot.VALID_WAIT_TIMES,
|
||||
select_fn=lambda robot, opt: robot.set_wait_time(int(opt)),
|
||||
),
|
||||
LitterRobot4: RobotSelectEntityDescription[LitterRobot4, str](
|
||||
key="panel_brightness",
|
||||
translation_key="brightness_level",
|
||||
current_fn=(
|
||||
lambda robot: bri.name.lower()
|
||||
if (bri := robot.panel_brightness) is not None
|
||||
else None
|
||||
),
|
||||
options_fn=lambda _: [level.name.lower() for level in BrightnessLevel],
|
||||
select_fn=(
|
||||
lambda robot, opt: robot.set_panel_brightness(BrightnessLevel[opt.upper()])
|
||||
ROBOT_SELECT_MAP: dict[type[Robot], tuple[RobotSelectEntityDescription, ...]] = {
|
||||
LitterRobot: (
|
||||
RobotSelectEntityDescription[LitterRobot, int]( # type: ignore[type-abstract] # only used for isinstance check
|
||||
key="cycle_delay",
|
||||
translation_key="cycle_delay",
|
||||
unit_of_measurement=UnitOfTime.MINUTES,
|
||||
current_fn=lambda robot: robot.clean_cycle_wait_time_minutes,
|
||||
options_fn=lambda robot: robot.VALID_WAIT_TIMES,
|
||||
select_fn=lambda robot, opt: robot.set_wait_time(int(opt)),
|
||||
),
|
||||
),
|
||||
FeederRobot: RobotSelectEntityDescription[FeederRobot, float](
|
||||
key="meal_insert_size",
|
||||
translation_key="meal_insert_size",
|
||||
unit_of_measurement="cups",
|
||||
current_fn=lambda robot: robot.meal_insert_size,
|
||||
options_fn=lambda robot: robot.VALID_MEAL_INSERT_SIZES,
|
||||
select_fn=lambda robot, opt: robot.set_meal_insert_size(float(opt)),
|
||||
LitterRobot4: (
|
||||
RobotSelectEntityDescription[LitterRobot4, str](
|
||||
key="globe_brightness",
|
||||
translation_key="globe_brightness",
|
||||
current_fn=(
|
||||
lambda robot: bri.name.lower()
|
||||
if (bri := robot.night_light_level) is not None
|
||||
else None
|
||||
),
|
||||
options_fn=lambda _: [level.name.lower() for level in BrightnessLevel],
|
||||
select_fn=(
|
||||
lambda robot, opt: robot.set_night_light_brightness(
|
||||
BrightnessLevel[opt.upper()]
|
||||
)
|
||||
),
|
||||
),
|
||||
RobotSelectEntityDescription[LitterRobot4, str](
|
||||
key="globe_light",
|
||||
translation_key="globe_light",
|
||||
current_fn=(
|
||||
lambda robot: mode.name.lower()
|
||||
if (mode := robot.night_light_mode) is not None
|
||||
else None
|
||||
),
|
||||
options_fn=lambda _: [mode.name.lower() for mode in NightLightMode],
|
||||
select_fn=(
|
||||
lambda robot, opt: robot.set_night_light_mode(
|
||||
NightLightMode[opt.upper()]
|
||||
)
|
||||
),
|
||||
),
|
||||
RobotSelectEntityDescription[LitterRobot4, str](
|
||||
key="panel_brightness",
|
||||
translation_key="brightness_level",
|
||||
current_fn=(
|
||||
lambda robot: bri.name.lower()
|
||||
if (bri := robot.panel_brightness) is not None
|
||||
else None
|
||||
),
|
||||
options_fn=lambda _: [level.name.lower() for level in BrightnessLevel],
|
||||
select_fn=(
|
||||
lambda robot, opt: robot.set_panel_brightness(
|
||||
BrightnessLevel[opt.upper()]
|
||||
)
|
||||
),
|
||||
),
|
||||
),
|
||||
FeederRobot: (
|
||||
RobotSelectEntityDescription[FeederRobot, float](
|
||||
key="meal_insert_size",
|
||||
translation_key="meal_insert_size",
|
||||
unit_of_measurement="cups",
|
||||
current_fn=lambda robot: robot.meal_insert_size,
|
||||
options_fn=lambda robot: robot.VALID_MEAL_INSERT_SIZES,
|
||||
select_fn=lambda robot, opt: robot.set_meal_insert_size(float(opt)),
|
||||
),
|
||||
),
|
||||
}
|
||||
|
||||
@@ -77,8 +115,9 @@ async def async_setup_entry(
|
||||
robot=robot, coordinator=coordinator, description=description
|
||||
)
|
||||
for robot in coordinator.account.robots
|
||||
for robot_type, description in ROBOT_SELECT_MAP.items()
|
||||
for robot_type, descriptions in ROBOT_SELECT_MAP.items()
|
||||
if isinstance(robot, robot_type)
|
||||
for description in descriptions
|
||||
)
|
||||
|
||||
|
||||
|
@@ -144,6 +144,22 @@
|
||||
"cycle_delay": {
|
||||
"name": "Clean cycle wait time minutes"
|
||||
},
|
||||
"globe_brightness": {
|
||||
"name": "Globe brightness",
|
||||
"state": {
|
||||
"low": "[%key:common::state::low%]",
|
||||
"medium": "[%key:common::state::medium%]",
|
||||
"high": "[%key:common::state::high%]"
|
||||
}
|
||||
},
|
||||
"globe_light": {
|
||||
"name": "Globe light",
|
||||
"state": {
|
||||
"auto": "[%key:common::state::auto%]",
|
||||
"off": "[%key:common::state::off%]",
|
||||
"on": "[%key:common::state::on%]"
|
||||
}
|
||||
},
|
||||
"meal_insert_size": {
|
||||
"name": "Meal insert size"
|
||||
},
|
||||
@@ -157,6 +173,9 @@
|
||||
}
|
||||
},
|
||||
"switch": {
|
||||
"gravity_mode": {
|
||||
"name": "Gravity mode"
|
||||
},
|
||||
"night_light_mode": {
|
||||
"name": "Night light mode"
|
||||
},
|
||||
|
@@ -6,7 +6,7 @@ from collections.abc import Callable, Coroutine
|
||||
from dataclasses import dataclass
|
||||
from typing import Any, Generic
|
||||
|
||||
from pylitterbot import FeederRobot, LitterRobot
|
||||
from pylitterbot import FeederRobot, LitterRobot, Robot
|
||||
|
||||
from homeassistant.components.switch import SwitchEntity, SwitchEntityDescription
|
||||
from homeassistant.const import EntityCategory
|
||||
@@ -26,20 +26,30 @@ class RobotSwitchEntityDescription(SwitchEntityDescription, Generic[_WhiskerEnti
|
||||
value_fn: Callable[[_WhiskerEntityT], bool]
|
||||
|
||||
|
||||
ROBOT_SWITCHES = [
|
||||
RobotSwitchEntityDescription[LitterRobot | FeederRobot](
|
||||
key="night_light_mode_enabled",
|
||||
translation_key="night_light_mode",
|
||||
set_fn=lambda robot, value: robot.set_night_light(value),
|
||||
value_fn=lambda robot: robot.night_light_mode_enabled,
|
||||
SWITCH_MAP: dict[type[Robot], tuple[RobotSwitchEntityDescription, ...]] = {
|
||||
FeederRobot: (
|
||||
RobotSwitchEntityDescription[FeederRobot](
|
||||
key="gravity_mode",
|
||||
translation_key="gravity_mode",
|
||||
set_fn=lambda robot, value: robot.set_gravity_mode(value),
|
||||
value_fn=lambda robot: robot.gravity_mode_enabled,
|
||||
),
|
||||
),
|
||||
RobotSwitchEntityDescription[LitterRobot | FeederRobot](
|
||||
key="panel_lock_enabled",
|
||||
translation_key="panel_lockout",
|
||||
set_fn=lambda robot, value: robot.set_panel_lockout(value),
|
||||
value_fn=lambda robot: robot.panel_lock_enabled,
|
||||
Robot: ( # type: ignore[type-abstract] # only used for isinstance check
|
||||
RobotSwitchEntityDescription[LitterRobot | FeederRobot](
|
||||
key="night_light_mode_enabled",
|
||||
translation_key="night_light_mode",
|
||||
set_fn=lambda robot, value: robot.set_night_light(value),
|
||||
value_fn=lambda robot: robot.night_light_mode_enabled,
|
||||
),
|
||||
RobotSwitchEntityDescription[LitterRobot | FeederRobot](
|
||||
key="panel_lock_enabled",
|
||||
translation_key="panel_lockout",
|
||||
set_fn=lambda robot, value: robot.set_panel_lockout(value),
|
||||
value_fn=lambda robot: robot.panel_lock_enabled,
|
||||
),
|
||||
),
|
||||
]
|
||||
}
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
@@ -51,9 +61,10 @@ async def async_setup_entry(
|
||||
coordinator = entry.runtime_data
|
||||
async_add_entities(
|
||||
RobotSwitchEntity(robot=robot, coordinator=coordinator, description=description)
|
||||
for description in ROBOT_SWITCHES
|
||||
for robot in coordinator.account.robots
|
||||
if isinstance(robot, (LitterRobot, FeederRobot))
|
||||
for robot_type, entity_descriptions in SWITCH_MAP.items()
|
||||
if isinstance(robot, robot_type)
|
||||
for description in entity_descriptions
|
||||
)
|
||||
|
||||
|
||||
|
@@ -132,7 +132,7 @@ class LocalTodoListEntity(TodoListEntity):
|
||||
self._store = store
|
||||
self._calendar = calendar
|
||||
self._calendar_lock = asyncio.Lock()
|
||||
self._attr_name = name.capitalize()
|
||||
self._attr_name = name
|
||||
self._attr_unique_id = unique_id
|
||||
|
||||
def _new_todo_store(self) -> TodoStore:
|
||||
|
@@ -5,8 +5,9 @@ from __future__ import annotations
|
||||
from collections.abc import Callable, Mapping
|
||||
from typing import Any
|
||||
|
||||
from homeassistant.components.sensor import ATTR_STATE_CLASS
|
||||
from homeassistant.components.sensor import ATTR_STATE_CLASS, NON_NUMERIC_DEVICE_CLASSES
|
||||
from homeassistant.const import (
|
||||
ATTR_DEVICE_CLASS,
|
||||
ATTR_DEVICE_ID,
|
||||
ATTR_DOMAIN,
|
||||
ATTR_ENTITY_ID,
|
||||
@@ -28,7 +29,13 @@ from homeassistant.helpers import device_registry as dr, entity_registry as er
|
||||
from homeassistant.helpers.event import async_track_state_change_event
|
||||
from homeassistant.util.event_type import EventType
|
||||
|
||||
from .const import ALWAYS_CONTINUOUS_DOMAINS, AUTOMATION_EVENTS, BUILT_IN_EVENTS, DOMAIN
|
||||
from .const import (
|
||||
ALWAYS_CONTINUOUS_DOMAINS,
|
||||
AUTOMATION_EVENTS,
|
||||
BUILT_IN_EVENTS,
|
||||
DOMAIN,
|
||||
SENSOR_DOMAIN,
|
||||
)
|
||||
from .models import LogbookConfig
|
||||
|
||||
|
||||
@@ -38,8 +45,10 @@ def async_filter_entities(hass: HomeAssistant, entity_ids: list[str]) -> list[st
|
||||
return [
|
||||
entity_id
|
||||
for entity_id in entity_ids
|
||||
if split_entity_id(entity_id)[0] not in ALWAYS_CONTINUOUS_DOMAINS
|
||||
and not is_sensor_continuous(hass, ent_reg, entity_id)
|
||||
if (domain := split_entity_id(entity_id)[0]) not in ALWAYS_CONTINUOUS_DOMAINS
|
||||
and not (
|
||||
domain == SENSOR_DOMAIN and is_sensor_continuous(hass, ent_reg, entity_id)
|
||||
)
|
||||
]
|
||||
|
||||
|
||||
@@ -214,6 +223,10 @@ def async_subscribe_events(
|
||||
)
|
||||
|
||||
|
||||
def _device_class_is_numeric(device_class: str | None) -> bool:
|
||||
return device_class is not None and device_class not in NON_NUMERIC_DEVICE_CLASSES
|
||||
|
||||
|
||||
def is_sensor_continuous(
|
||||
hass: HomeAssistant, ent_reg: er.EntityRegistry, entity_id: str
|
||||
) -> bool:
|
||||
@@ -233,7 +246,11 @@ def is_sensor_continuous(
|
||||
# has a unit_of_measurement or state_class, and filter if
|
||||
# it does
|
||||
if (state := hass.states.get(entity_id)) and (attributes := state.attributes):
|
||||
return ATTR_UNIT_OF_MEASUREMENT in attributes or ATTR_STATE_CLASS in attributes
|
||||
return (
|
||||
ATTR_UNIT_OF_MEASUREMENT in attributes
|
||||
or ATTR_STATE_CLASS in attributes
|
||||
or _device_class_is_numeric(attributes.get(ATTR_DEVICE_CLASS))
|
||||
)
|
||||
# If its not in the state machine, we need to check
|
||||
# the entity registry to see if its a sensor
|
||||
# filter with a state class. We do not check
|
||||
@@ -243,8 +260,10 @@ def is_sensor_continuous(
|
||||
# the state machine will always have the state.
|
||||
return bool(
|
||||
(entry := ent_reg.async_get(entity_id))
|
||||
and entry.capabilities
|
||||
and entry.capabilities.get(ATTR_STATE_CLASS)
|
||||
and (
|
||||
(entry.capabilities and entry.capabilities.get(ATTR_STATE_CLASS))
|
||||
or _device_class_is_numeric(entry.device_class)
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
@@ -258,6 +277,12 @@ def _is_state_filtered(new_state: State, old_state: State) -> bool:
|
||||
new_state.state == old_state.state
|
||||
or new_state.last_changed != new_state.last_updated
|
||||
or new_state.domain in ALWAYS_CONTINUOUS_DOMAINS
|
||||
or ATTR_UNIT_OF_MEASUREMENT in new_state.attributes
|
||||
or ATTR_STATE_CLASS in new_state.attributes
|
||||
or (
|
||||
new_state.domain == SENSOR_DOMAIN
|
||||
and (
|
||||
ATTR_UNIT_OF_MEASUREMENT in new_state.attributes
|
||||
or ATTR_STATE_CLASS in new_state.attributes
|
||||
or _device_class_is_numeric(new_state.attributes.get(ATTR_DEVICE_CLASS))
|
||||
)
|
||||
)
|
||||
)
|
||||
|
@@ -1,5 +1,6 @@
|
||||
"""Support for Lutron Caseta shades."""
|
||||
|
||||
from enum import Enum
|
||||
from typing import Any
|
||||
|
||||
from homeassistant.components.cover import (
|
||||
@@ -17,6 +18,14 @@ from .entity import LutronCasetaUpdatableEntity
|
||||
from .models import LutronCasetaConfigEntry
|
||||
|
||||
|
||||
class ShadeMovementDirection(Enum):
|
||||
"""Enum for shade movement direction."""
|
||||
|
||||
OPENING = "opening"
|
||||
CLOSING = "closing"
|
||||
STOPPED = "stopped"
|
||||
|
||||
|
||||
class LutronCasetaShade(LutronCasetaUpdatableEntity, CoverEntity):
|
||||
"""Representation of a Lutron shade with open/close functionality."""
|
||||
|
||||
@@ -27,6 +36,8 @@ class LutronCasetaShade(LutronCasetaUpdatableEntity, CoverEntity):
|
||||
| CoverEntityFeature.SET_POSITION
|
||||
)
|
||||
_attr_device_class = CoverDeviceClass.SHADE
|
||||
_previous_position: int | None = None
|
||||
_movement_direction: ShadeMovementDirection | None = None
|
||||
|
||||
@property
|
||||
def is_closed(self) -> bool:
|
||||
@@ -38,19 +49,50 @@ class LutronCasetaShade(LutronCasetaUpdatableEntity, CoverEntity):
|
||||
"""Return the current position of cover."""
|
||||
return self._device["current_state"]
|
||||
|
||||
def _handle_bridge_update(self) -> None:
|
||||
"""Handle updated data from the bridge and track movement direction."""
|
||||
current_position = self.current_cover_position
|
||||
|
||||
# Track movement direction based on position changes or endpoint status
|
||||
if self._previous_position is not None:
|
||||
if current_position > self._previous_position or current_position >= 100:
|
||||
# Moving up or at fully open
|
||||
self._movement_direction = ShadeMovementDirection.OPENING
|
||||
elif current_position < self._previous_position or current_position <= 0:
|
||||
# Moving down or at fully closed
|
||||
self._movement_direction = ShadeMovementDirection.CLOSING
|
||||
else:
|
||||
# Stopped
|
||||
self._movement_direction = ShadeMovementDirection.STOPPED
|
||||
|
||||
self._previous_position = current_position
|
||||
super()._handle_bridge_update()
|
||||
|
||||
async def async_close_cover(self, **kwargs: Any) -> None:
|
||||
"""Close the cover."""
|
||||
await self._smartbridge.lower_cover(self.device_id)
|
||||
# Use set_value to avoid the stuttering issue
|
||||
await self._smartbridge.set_value(self.device_id, 0)
|
||||
await self.async_update()
|
||||
self.async_write_ha_state()
|
||||
|
||||
async def async_stop_cover(self, **kwargs: Any) -> None:
|
||||
"""Stop the cover."""
|
||||
# Send appropriate directional command before stop to ensure it works correctly
|
||||
# Use tracked direction if moving, otherwise use position-based heuristic
|
||||
if self._movement_direction == ShadeMovementDirection.OPENING or (
|
||||
self._movement_direction in (ShadeMovementDirection.STOPPED, None)
|
||||
and self.current_cover_position >= 50
|
||||
):
|
||||
await self._smartbridge.raise_cover(self.device_id)
|
||||
else:
|
||||
await self._smartbridge.lower_cover(self.device_id)
|
||||
|
||||
await self._smartbridge.stop_cover(self.device_id)
|
||||
|
||||
async def async_open_cover(self, **kwargs: Any) -> None:
|
||||
"""Open the cover."""
|
||||
await self._smartbridge.raise_cover(self.device_id)
|
||||
# Use set_value to avoid the stuttering issue
|
||||
await self._smartbridge.set_value(self.device_id, 100)
|
||||
await self.async_update()
|
||||
self.async_write_ha_state()
|
||||
|
||||
|
@@ -65,7 +65,11 @@ class LutronCasetaEntity(Entity):
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Register callbacks."""
|
||||
self._smartbridge.add_subscriber(self.device_id, self.async_write_ha_state)
|
||||
self._smartbridge.add_subscriber(self.device_id, self._handle_bridge_update)
|
||||
|
||||
def _handle_bridge_update(self) -> None:
|
||||
"""Handle updated data from the bridge."""
|
||||
self.async_write_ha_state()
|
||||
|
||||
def _handle_none_serial(self, serial: str | int | None) -> str | int:
|
||||
"""Handle None serial returned by RA3 and QSX processors."""
|
||||
|
@@ -46,7 +46,14 @@ class UploadedFile(Protocol):
|
||||
|
||||
async def async_get_media_source(hass: HomeAssistant) -> LocalSource:
|
||||
"""Set up local media source."""
|
||||
return LocalSource(hass, DOMAIN, "My media", hass.config.media_dirs, "/media")
|
||||
return LocalSource(
|
||||
hass,
|
||||
DOMAIN,
|
||||
"My media",
|
||||
hass.config.media_dirs,
|
||||
not hass.config.media_dirs_set_default,
|
||||
"/media",
|
||||
)
|
||||
|
||||
|
||||
class LocalSource(MediaSource):
|
||||
@@ -58,6 +65,7 @@ class LocalSource(MediaSource):
|
||||
domain: str,
|
||||
name: str,
|
||||
media_dirs: dict[str, str],
|
||||
error_missing_media_dir: bool,
|
||||
url_prefix: str,
|
||||
) -> None:
|
||||
"""Initialize local source."""
|
||||
@@ -65,6 +73,7 @@ class LocalSource(MediaSource):
|
||||
self.hass = hass
|
||||
self.name = name
|
||||
self.media_dirs = media_dirs
|
||||
self.error_missing_media_dir = error_missing_media_dir
|
||||
self.url_prefix = url_prefix
|
||||
|
||||
@callback
|
||||
@@ -133,14 +142,13 @@ class LocalSource(MediaSource):
|
||||
|
||||
def _do_move() -> None:
|
||||
"""Move file to target."""
|
||||
if not target_dir.is_dir():
|
||||
raise PathNotSupportedError("Target is not an existing directory")
|
||||
|
||||
target_path = target_dir / uploaded_file.filename
|
||||
|
||||
try:
|
||||
target_path = target_dir / uploaded_file.filename
|
||||
|
||||
target_path.relative_to(target_dir)
|
||||
raise_if_invalid_path(str(target_path))
|
||||
|
||||
target_dir.mkdir(parents=True, exist_ok=True)
|
||||
except ValueError as err:
|
||||
raise PathNotSupportedError("Invalid path") from err
|
||||
|
||||
@@ -211,9 +219,23 @@ class LocalSource(MediaSource):
|
||||
full_path = Path(self.media_dirs[source_dir_id], location)
|
||||
|
||||
if not full_path.exists():
|
||||
if location == "":
|
||||
if location != "":
|
||||
raise BrowseError("Path does not exist.")
|
||||
|
||||
if self.error_missing_media_dir:
|
||||
raise BrowseError("Media directory does not exist.")
|
||||
raise BrowseError("Path does not exist.")
|
||||
|
||||
# If a media dir does not exist, return an empty folder
|
||||
# It will be created when uploading files
|
||||
return BrowseMediaSource(
|
||||
domain=self.domain,
|
||||
identifier=source_dir_id,
|
||||
media_class=MediaClass.DIRECTORY,
|
||||
media_content_type=None,
|
||||
title=self.name,
|
||||
can_play=False,
|
||||
can_expand=True,
|
||||
)
|
||||
|
||||
if not full_path.is_dir():
|
||||
raise BrowseError("Path is not a directory.")
|
||||
|
@@ -2,12 +2,13 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio.timeouts
|
||||
import asyncio
|
||||
from collections.abc import Callable
|
||||
from dataclasses import dataclass
|
||||
from datetime import timedelta
|
||||
import logging
|
||||
|
||||
from aiohttp import ClientResponseError
|
||||
from pymiele import MieleAction, MieleAPI, MieleDevice
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
@@ -66,7 +67,22 @@ class MieleDataUpdateCoordinator(DataUpdateCoordinator[MieleCoordinatorData]):
|
||||
self.devices = devices
|
||||
actions = {}
|
||||
for device_id in devices:
|
||||
actions_json = await self.api.get_actions(device_id)
|
||||
try:
|
||||
actions_json = await self.api.get_actions(device_id)
|
||||
except ClientResponseError as err:
|
||||
_LOGGER.debug(
|
||||
"Error fetching actions for device %s: Status: %s, Message: %s",
|
||||
device_id,
|
||||
err.status,
|
||||
err.message,
|
||||
)
|
||||
actions_json = {}
|
||||
except TimeoutError:
|
||||
_LOGGER.debug(
|
||||
"Timeout fetching actions for device %s",
|
||||
device_id,
|
||||
)
|
||||
actions_json = {}
|
||||
actions[device_id] = MieleAction(actions_json)
|
||||
return MieleCoordinatorData(devices=devices, actions=actions)
|
||||
|
||||
|
@@ -16,6 +16,7 @@ from homeassistant.components.sensor import (
|
||||
)
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import (
|
||||
ATTR_ENTITY_ID,
|
||||
ATTR_UNIT_OF_MEASUREMENT,
|
||||
CONF_NAME,
|
||||
CONF_TYPE,
|
||||
@@ -278,13 +279,18 @@ class MinMaxSensor(SensorEntity):
|
||||
@property
|
||||
def extra_state_attributes(self) -> dict[str, Any] | None:
|
||||
"""Return the state attributes of the sensor."""
|
||||
attributes: dict[str, list[str] | str | None] = {
|
||||
ATTR_ENTITY_ID: self._entity_ids
|
||||
}
|
||||
|
||||
if self._sensor_type == "min":
|
||||
return {ATTR_MIN_ENTITY_ID: self.min_entity_id}
|
||||
if self._sensor_type == "max":
|
||||
return {ATTR_MAX_ENTITY_ID: self.max_entity_id}
|
||||
if self._sensor_type == "last":
|
||||
return {ATTR_LAST_ENTITY_ID: self.last_entity_id}
|
||||
return None
|
||||
attributes[ATTR_MIN_ENTITY_ID] = self.min_entity_id
|
||||
elif self._sensor_type == "max":
|
||||
attributes[ATTR_MAX_ENTITY_ID] = self.max_entity_id
|
||||
elif self._sensor_type == "last":
|
||||
attributes[ATTR_LAST_ENTITY_ID] = self.last_entity_id
|
||||
|
||||
return attributes
|
||||
|
||||
@callback
|
||||
def _async_min_max_sensor_state_listener(
|
||||
|
@@ -1,9 +1,9 @@
|
||||
{
|
||||
"domain": "modbus",
|
||||
"name": "Modbus",
|
||||
"codeowners": [],
|
||||
"codeowners": ["@janiversen"],
|
||||
"documentation": "https://www.home-assistant.io/integrations/modbus",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["pymodbus"],
|
||||
"requirements": ["pymodbus==3.11.1"]
|
||||
"requirements": ["pymodbus==3.11.2"]
|
||||
}
|
||||
|
@@ -253,7 +253,6 @@ class ModbusHub:
|
||||
self._client: (
|
||||
AsyncModbusSerialClient | AsyncModbusTcpClient | AsyncModbusUdpClient | None
|
||||
) = None
|
||||
self._lock = asyncio.Lock()
|
||||
self.event_connected = asyncio.Event()
|
||||
self.hass = hass
|
||||
self.name = client_config[CONF_NAME]
|
||||
@@ -362,16 +361,13 @@ class ModbusHub:
|
||||
if not self._connect_task.done():
|
||||
self._connect_task.cancel()
|
||||
|
||||
async with self._lock:
|
||||
if self._client:
|
||||
try:
|
||||
self._client.close()
|
||||
except ModbusException as exception_error:
|
||||
self._log_error(str(exception_error))
|
||||
del self._client
|
||||
self._client = None
|
||||
message = f"modbus {self.name} communication closed"
|
||||
_LOGGER.info(message)
|
||||
if self._client:
|
||||
try:
|
||||
self._client.close()
|
||||
except ModbusException as exception_error:
|
||||
self._log_error(str(exception_error))
|
||||
self._client = None
|
||||
_LOGGER.info(f"modbus {self.name} communication closed")
|
||||
|
||||
async def low_level_pb_call(
|
||||
self, slave: int | None, address: int, value: int | list[int], use_call: str
|
||||
@@ -417,11 +413,9 @@ class ModbusHub:
|
||||
use_call: str,
|
||||
) -> ModbusPDU | None:
|
||||
"""Convert async to sync pymodbus call."""
|
||||
async with self._lock:
|
||||
if not self._client:
|
||||
return None
|
||||
result = await self.low_level_pb_call(unit, address, value, use_call)
|
||||
if self._msg_wait:
|
||||
# small delay until next request/response
|
||||
await asyncio.sleep(self._msg_wait)
|
||||
return result
|
||||
if not self._client:
|
||||
return None
|
||||
result = await self.low_level_pb_call(unit, address, value, use_call)
|
||||
if self._msg_wait:
|
||||
await asyncio.sleep(self._msg_wait)
|
||||
return result
|
||||
|
@@ -36,6 +36,7 @@ class MotionMountPresets(MotionMountEntity, SelectEntity):
|
||||
|
||||
_attr_should_poll = True
|
||||
_attr_translation_key = "motionmount_preset"
|
||||
_name_to_index: dict[str, int]
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
@@ -50,8 +51,12 @@ class MotionMountPresets(MotionMountEntity, SelectEntity):
|
||||
|
||||
def _update_options(self, presets: list[motionmount.Preset]) -> None:
|
||||
"""Convert presets to select options."""
|
||||
options = [f"{preset.index}: {preset.name}" for preset in presets]
|
||||
options.insert(0, WALL_PRESET_NAME)
|
||||
# Ordered list of options (wall first, then presets)
|
||||
options = [WALL_PRESET_NAME] + [preset.name for preset in presets]
|
||||
|
||||
# Build mapping name → index (wall = 0)
|
||||
self._name_to_index = {WALL_PRESET_NAME: 0}
|
||||
self._name_to_index.update({preset.name: preset.index for preset in presets})
|
||||
|
||||
self._attr_options = options
|
||||
|
||||
@@ -123,7 +128,10 @@ class MotionMountPresets(MotionMountEntity, SelectEntity):
|
||||
|
||||
async def async_select_option(self, option: str) -> None:
|
||||
"""Set the new option."""
|
||||
index = int(option[:1])
|
||||
index = self._name_to_index.get(option)
|
||||
if index is None:
|
||||
raise HomeAssistantError(f"Unknown preset selected: {option}")
|
||||
|
||||
try:
|
||||
await self.mm.go_to_preset(index)
|
||||
except (TimeoutError, socket.gaierror) as ex:
|
||||
|
@@ -83,7 +83,7 @@
|
||||
"motionmount_preset": {
|
||||
"name": "Preset",
|
||||
"state": {
|
||||
"0_wall": "0: Wall"
|
||||
"0_wall": "Wall"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@@ -669,7 +669,7 @@
|
||||
"direction_value_template": "Direction value template"
|
||||
},
|
||||
"data_description": {
|
||||
"direction_command_topic": "The MQTT topic to publish commands to change the fan direction payload, either `forward` or `reverse`. Use the direction command template to customize the payload. [Learn more.]({url}#direction_command_topic)",
|
||||
"direction_command_topic": "The MQTT topic to publish commands to change the fan direction. The payload will be either `forward` or `reverse` and can be customized using the direction command template. [Learn more.]({url}#direction_command_topic)",
|
||||
"direction_command_template": "A [template](https://www.home-assistant.io/docs/configuration/templating/#using-command-templates-with-mqtt) to compose the payload to be published at the direction command topic. The template variable `value` will be either `forward` or `reverse`.",
|
||||
"direction_state_topic": "The MQTT topic subscribed to receive fan direction state. Accepted state payloads are `forward` or `reverse`. [Learn more.]({url}#direction_state_topic)",
|
||||
"direction_value_template": "Defines a [template](https://www.home-assistant.io/docs/configuration/templating/#using-value-templates-with-mqtt) to extract fan direction state value. The template should return either `forward` or `reverse`. When the template returns an empty string, the direction will be ignored."
|
||||
|
@@ -43,8 +43,6 @@ LOG_NAME = "Tag"
|
||||
|
||||
TAG = "tag"
|
||||
|
||||
PARALLEL_UPDATES = 0
|
||||
|
||||
DISCOVERY_SCHEMA = MQTT_BASE_SCHEMA.extend(
|
||||
{
|
||||
vol.Optional(CONF_DEVICE): MQTT_ENTITY_DEVICE_INFO_SCHEMA,
|
||||
|
@@ -106,7 +106,10 @@ class NtfyEventEntity(NtfyBaseEntity, EventEntity):
|
||||
return
|
||||
except NtfyForbiddenError:
|
||||
if self._attr_available:
|
||||
_LOGGER.error("Failed to subscribe to topic. Topic is protected")
|
||||
_LOGGER.error(
|
||||
"Failed to subscribe to topic %s. Topic is protected",
|
||||
self.topic,
|
||||
)
|
||||
self._attr_available = False
|
||||
ir.async_create_issue(
|
||||
self.hass,
|
||||
|
@@ -147,6 +147,9 @@
|
||||
"volume": {
|
||||
"default": "mdi:car-coolant-level"
|
||||
},
|
||||
"volume_flow_rate": {
|
||||
"default": "mdi:pipe-valve"
|
||||
},
|
||||
"volume_storage": {
|
||||
"default": "mdi:storage-tank"
|
||||
},
|
||||
|
@@ -100,6 +100,7 @@ OVERKIZ_DEVICE_TO_PLATFORM: dict[UIClass | UIWidget, Platform | None] = {
|
||||
UIWidget.ATLANTIC_PASS_APC_HEATING_AND_COOLING_ZONE: Platform.CLIMATE, # widgetName, uiClass is HeatingSystem (not supported)
|
||||
UIWidget.ATLANTIC_PASS_APC_HEATING_ZONE: Platform.CLIMATE, # widgetName, uiClass is HeatingSystem (not supported)
|
||||
UIWidget.ATLANTIC_PASS_APC_ZONE_CONTROL: Platform.CLIMATE, # widgetName, uiClass is HeatingSystem (not supported)
|
||||
UIWidget.DISCRETE_EXTERIOR_HEATING: Platform.SWITCH, # widgetName, uiClass is ExteriorHeatingSystem (not supported)
|
||||
UIWidget.DOMESTIC_HOT_WATER_PRODUCTION: Platform.WATER_HEATER, # widgetName, uiClass is WaterHeatingSystem (not supported)
|
||||
UIWidget.DOMESTIC_HOT_WATER_TANK: Platform.SWITCH, # widgetName, uiClass is WaterHeatingSystem (not supported)
|
||||
UIWidget.EVO_HOME_CONTROLLER: Platform.CLIMATE, # widgetName, uiClass is EvoHome (not supported)
|
||||
|
@@ -100,6 +100,15 @@ SWITCH_DESCRIPTIONS: list[OverkizSwitchDescription] = [
|
||||
),
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
),
|
||||
OverkizSwitchDescription(
|
||||
key=UIWidget.DISCRETE_EXTERIOR_HEATING,
|
||||
turn_on=OverkizCommand.ON,
|
||||
turn_off=OverkizCommand.OFF,
|
||||
icon="mdi:radiator",
|
||||
is_on=lambda select_state: (
|
||||
select_state(OverkizState.CORE_ON_OFF) == OverkizCommandParam.ON
|
||||
),
|
||||
),
|
||||
]
|
||||
|
||||
SUPPORTED_DEVICES = {
|
||||
|
@@ -21,12 +21,16 @@ from pyprusalink.types import InvalidAuth, PrusaLinkError
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers.debounce import Debouncer
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
||||
|
||||
from .const import DOMAIN
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
# Allow automations using homeassistant.update_entity to collect
|
||||
# rapidly-changing metrics.
|
||||
_MINIMUM_REFRESH_INTERVAL = 1.0
|
||||
|
||||
T = TypeVar("T", PrinterStatus, LegacyPrinterStatus, JobInfo)
|
||||
|
||||
@@ -49,6 +53,9 @@ class PrusaLinkUpdateCoordinator(DataUpdateCoordinator[T], ABC):
|
||||
config_entry=config_entry,
|
||||
name=DOMAIN,
|
||||
update_interval=self._get_update_interval(None),
|
||||
request_refresh_debouncer=Debouncer(
|
||||
hass, _LOGGER, cooldown=_MINIMUM_REFRESH_INTERVAL, immediate=True
|
||||
),
|
||||
)
|
||||
|
||||
async def _async_update_data(self) -> T:
|
||||
|
@@ -43,7 +43,7 @@ SENSOR_FIELDS_TO_RETRIEVE = [
|
||||
"voc",
|
||||
]
|
||||
|
||||
UPDATE_INTERVAL = timedelta(minutes=2)
|
||||
UPDATE_INTERVAL = timedelta(minutes=5)
|
||||
|
||||
|
||||
type PurpleAirConfigEntry = ConfigEntry[PurpleAirDataUpdateCoordinator]
|
||||
|
@@ -66,7 +66,7 @@ class RadioMediaSource(MediaSource):
|
||||
# Register "click" with Radio Browser
|
||||
await radios.station_click(uuid=station.uuid)
|
||||
|
||||
return PlayMedia(station.url, mime_type)
|
||||
return PlayMedia(station.url_resolved, mime_type)
|
||||
|
||||
async def async_browse_media(
|
||||
self,
|
||||
@@ -134,7 +134,7 @@ class RadioMediaSource(MediaSource):
|
||||
) -> list[BrowseMediaSource]:
|
||||
"""Handle browsing radio stations by country."""
|
||||
category, _, country_code = (item.identifier or "").partition("/")
|
||||
if country_code:
|
||||
if category == "country" and country_code:
|
||||
stations = await radios.stations(
|
||||
filter_by=FilterBy.COUNTRY_CODE_EXACT,
|
||||
filter_term=country_code,
|
||||
@@ -185,7 +185,7 @@ class RadioMediaSource(MediaSource):
|
||||
return [
|
||||
BrowseMediaSource(
|
||||
domain=DOMAIN,
|
||||
identifier=f"language/{language.code}",
|
||||
identifier=f"language/{language.name.lower()}",
|
||||
media_class=MediaClass.DIRECTORY,
|
||||
media_content_type=MediaType.MUSIC,
|
||||
title=language.name,
|
||||
|
@@ -35,6 +35,7 @@ from . import RoborockConfigEntry
|
||||
from .const import (
|
||||
CONF_BASE_URL,
|
||||
CONF_ENTRY_CODE,
|
||||
CONF_SHOW_BACKGROUND,
|
||||
CONF_USER_DATA,
|
||||
DEFAULT_DRAWABLES,
|
||||
DOMAIN,
|
||||
@@ -215,6 +216,7 @@ class RoborockOptionsFlowHandler(OptionsFlowWithReload):
|
||||
) -> ConfigFlowResult:
|
||||
"""Manage the map object drawable options."""
|
||||
if user_input is not None:
|
||||
self.options[CONF_SHOW_BACKGROUND] = user_input.pop(CONF_SHOW_BACKGROUND)
|
||||
self.options.setdefault(DRAWABLES, {}).update(user_input)
|
||||
return self.async_create_entry(title="", data=self.options)
|
||||
data_schema = {}
|
||||
@@ -227,6 +229,12 @@ class RoborockOptionsFlowHandler(OptionsFlowWithReload):
|
||||
),
|
||||
)
|
||||
] = bool
|
||||
data_schema[
|
||||
vol.Required(
|
||||
CONF_SHOW_BACKGROUND,
|
||||
default=self.config_entry.options.get(CONF_SHOW_BACKGROUND, False),
|
||||
)
|
||||
] = bool
|
||||
return self.async_show_form(
|
||||
step_id=DRAWABLES,
|
||||
data_schema=vol.Schema(data_schema),
|
||||
|
@@ -10,6 +10,7 @@ DOMAIN = "roborock"
|
||||
CONF_ENTRY_CODE = "code"
|
||||
CONF_BASE_URL = "base_url"
|
||||
CONF_USER_DATA = "user_data"
|
||||
CONF_SHOW_BACKGROUND = "show_background"
|
||||
|
||||
# Option Flow steps
|
||||
DRAWABLES = "drawables"
|
||||
|
@@ -26,7 +26,7 @@ from roborock.version_1_apis.roborock_local_client_v1 import RoborockLocalClient
|
||||
from roborock.version_1_apis.roborock_mqtt_client_v1 import RoborockMqttClientV1
|
||||
from roborock.version_a01_apis import RoborockClientA01
|
||||
from roborock.web_api import RoborockApiClient
|
||||
from vacuum_map_parser_base.config.color import ColorsPalette
|
||||
from vacuum_map_parser_base.config.color import ColorsPalette, SupportedColor
|
||||
from vacuum_map_parser_base.config.image_config import ImageConfig
|
||||
from vacuum_map_parser_base.config.size import Size, Sizes
|
||||
from vacuum_map_parser_base.map_data import MapData
|
||||
@@ -44,6 +44,7 @@ from homeassistant.util import dt as dt_util, slugify
|
||||
|
||||
from .const import (
|
||||
A01_UPDATE_INTERVAL,
|
||||
CONF_SHOW_BACKGROUND,
|
||||
DEFAULT_DRAWABLES,
|
||||
DOMAIN,
|
||||
DRAWABLES,
|
||||
@@ -146,8 +147,11 @@ class RoborockDataUpdateCoordinator(DataUpdateCoordinator[DeviceProp]):
|
||||
for drawable, default_value in DEFAULT_DRAWABLES.items()
|
||||
if config_entry.options.get(DRAWABLES, {}).get(drawable, default_value)
|
||||
]
|
||||
colors = ColorsPalette()
|
||||
if not config_entry.options.get(CONF_SHOW_BACKGROUND, False):
|
||||
colors = ColorsPalette({SupportedColor.MAP_OUTSIDE: (0, 0, 0, 0)})
|
||||
self.map_parser = RoborockMapDataParser(
|
||||
ColorsPalette(),
|
||||
colors,
|
||||
Sizes(
|
||||
{
|
||||
k: v * MAP_SCALE
|
||||
|
@@ -60,7 +60,8 @@
|
||||
"room_names": "Room names",
|
||||
"vacuum_position": "Vacuum position",
|
||||
"virtual_walls": "Virtual walls",
|
||||
"zones": "Zones"
|
||||
"zones": "Zones",
|
||||
"show_background": "Show background"
|
||||
},
|
||||
"data_description": {
|
||||
"charger": "Show the charger on the map.",
|
||||
@@ -79,7 +80,8 @@
|
||||
"room_names": "Show room names on the map.",
|
||||
"vacuum_position": "Show the vacuum position on the map.",
|
||||
"virtual_walls": "Show virtual walls on the map.",
|
||||
"zones": "Show zones on the map."
|
||||
"zones": "Show zones on the map.",
|
||||
"show_background": "Add a background to the map."
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@@ -361,25 +361,30 @@ class SensorEntity(Entity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_):
|
||||
def _is_valid_suggested_unit(self, suggested_unit_of_measurement: str) -> bool:
|
||||
"""Validate the suggested unit.
|
||||
|
||||
Validate that a unit converter exists for the sensor's device class and that the
|
||||
unit converter supports both the native and the suggested units of measurement.
|
||||
Validate that the native unit of measurement can be converted to the
|
||||
suggested unit of measurement, either because they are the same or
|
||||
because a unit converter supports both.
|
||||
"""
|
||||
# Make sure we can convert the units
|
||||
if self.native_unit_of_measurement != suggested_unit_of_measurement and (
|
||||
(unit_converter := UNIT_CONVERTERS.get(self.device_class)) is None
|
||||
or self.__native_unit_of_measurement_compat
|
||||
not in unit_converter.VALID_UNITS
|
||||
or suggested_unit_of_measurement not in unit_converter.VALID_UNITS
|
||||
):
|
||||
if not self._invalid_suggested_unit_of_measurement_reported:
|
||||
self._invalid_suggested_unit_of_measurement_reported = True
|
||||
raise ValueError(
|
||||
f"Entity {type(self)} suggest an incorrect "
|
||||
f"unit of measurement: {suggested_unit_of_measurement}."
|
||||
)
|
||||
return False
|
||||
# No need to check the unit converter if the units are the same
|
||||
if self.native_unit_of_measurement == suggested_unit_of_measurement:
|
||||
return True
|
||||
|
||||
return True
|
||||
# Make sure there is a unit converter and it supports both units
|
||||
if (
|
||||
(unit_converter := UNIT_CONVERTERS.get(self.device_class))
|
||||
and self.__native_unit_of_measurement_compat in unit_converter.VALID_UNITS
|
||||
and suggested_unit_of_measurement in unit_converter.VALID_UNITS
|
||||
):
|
||||
return True
|
||||
|
||||
# Report invalid suggested unit only once per entity
|
||||
if not self._invalid_suggested_unit_of_measurement_reported:
|
||||
self._invalid_suggested_unit_of_measurement_reported = True
|
||||
raise ValueError(
|
||||
f"Entity {type(self)} suggest an incorrect "
|
||||
f"unit of measurement: {suggested_unit_of_measurement}."
|
||||
)
|
||||
return False
|
||||
|
||||
def _get_initial_suggested_unit(self) -> str | UndefinedType:
|
||||
"""Return the initial unit."""
|
||||
|
@@ -169,6 +169,9 @@
|
||||
"volume": {
|
||||
"default": "mdi:car-coolant-level"
|
||||
},
|
||||
"volume_flow_rate": {
|
||||
"default": "mdi:pipe-valve"
|
||||
},
|
||||
"volume_storage": {
|
||||
"default": "mdi:storage-tank"
|
||||
},
|
||||
|
@@ -67,6 +67,7 @@ from .utils import (
|
||||
get_http_port,
|
||||
get_rpc_scripts_event_types,
|
||||
get_ws_context,
|
||||
remove_empty_sub_devices,
|
||||
remove_stale_blu_trv_devices,
|
||||
)
|
||||
|
||||
@@ -223,6 +224,7 @@ async def _async_setup_block_entry(
|
||||
await hass.config_entries.async_forward_entry_setups(
|
||||
entry, runtime_data.platforms
|
||||
)
|
||||
remove_empty_sub_devices(hass, entry)
|
||||
elif (
|
||||
sleep_period is None
|
||||
or device_entry is None
|
||||
@@ -334,6 +336,7 @@ async def _async_setup_rpc_entry(hass: HomeAssistant, entry: ShellyConfigEntry)
|
||||
hass,
|
||||
entry,
|
||||
)
|
||||
remove_empty_sub_devices(hass, entry)
|
||||
elif (
|
||||
sleep_period is None
|
||||
or device_entry is None
|
||||
|
@@ -11,6 +11,7 @@ from aioshelly.const import BLU_TRV_IDENTIFIER, MODEL_BLU_GATEWAY_G3, RPC_GENERA
|
||||
from aioshelly.exceptions import DeviceConnectionError, InvalidAuthError, RpcCallError
|
||||
|
||||
from homeassistant.components.button import (
|
||||
DOMAIN as BUTTON_PLATFORM,
|
||||
ButtonDeviceClass,
|
||||
ButtonEntity,
|
||||
ButtonEntityDescription,
|
||||
@@ -26,7 +27,14 @@ from homeassistant.util import slugify
|
||||
from .const import DOMAIN, LOGGER, SHELLY_GAS_MODELS
|
||||
from .coordinator import ShellyBlockCoordinator, ShellyConfigEntry, ShellyRpcCoordinator
|
||||
from .entity import get_entity_block_device_info, get_entity_rpc_device_info
|
||||
from .utils import get_blu_trv_device_info, get_device_entry_gen, get_rpc_key_ids
|
||||
from .utils import (
|
||||
async_remove_orphaned_entities,
|
||||
get_blu_trv_device_info,
|
||||
get_device_entry_gen,
|
||||
get_rpc_entity_name,
|
||||
get_rpc_key_ids,
|
||||
get_virtual_component_ids,
|
||||
)
|
||||
|
||||
PARALLEL_UPDATES = 0
|
||||
|
||||
@@ -87,6 +95,13 @@ BLU_TRV_BUTTONS: Final[list[ShellyButtonDescription]] = [
|
||||
),
|
||||
]
|
||||
|
||||
VIRTUAL_BUTTONS: Final[list[ShellyButtonDescription]] = [
|
||||
ShellyButtonDescription[ShellyRpcCoordinator](
|
||||
key="button",
|
||||
press_action="single_push",
|
||||
)
|
||||
]
|
||||
|
||||
|
||||
@callback
|
||||
def async_migrate_unique_ids(
|
||||
@@ -138,7 +153,7 @@ async def async_setup_entry(
|
||||
hass, config_entry.entry_id, partial(async_migrate_unique_ids, coordinator)
|
||||
)
|
||||
|
||||
entities: list[ShellyButton | ShellyBluTrvButton] = []
|
||||
entities: list[ShellyButton | ShellyBluTrvButton | ShellyVirtualButton] = []
|
||||
|
||||
entities.extend(
|
||||
ShellyButton(coordinator, button)
|
||||
@@ -146,10 +161,20 @@ async def async_setup_entry(
|
||||
if button.supported(coordinator)
|
||||
)
|
||||
|
||||
if blutrv_key_ids := get_rpc_key_ids(coordinator.device.status, BLU_TRV_IDENTIFIER):
|
||||
if TYPE_CHECKING:
|
||||
assert isinstance(coordinator, ShellyRpcCoordinator)
|
||||
if not isinstance(coordinator, ShellyRpcCoordinator):
|
||||
async_add_entities(entities)
|
||||
return
|
||||
|
||||
# add virtual buttons
|
||||
if virtual_button_ids := get_rpc_key_ids(coordinator.device.status, "button"):
|
||||
entities.extend(
|
||||
ShellyVirtualButton(coordinator, button, id_)
|
||||
for id_ in virtual_button_ids
|
||||
for button in VIRTUAL_BUTTONS
|
||||
)
|
||||
|
||||
# add BLU TRV buttons
|
||||
if blutrv_key_ids := get_rpc_key_ids(coordinator.device.status, BLU_TRV_IDENTIFIER):
|
||||
entities.extend(
|
||||
ShellyBluTrvButton(coordinator, button, id_)
|
||||
for id_ in blutrv_key_ids
|
||||
@@ -159,6 +184,19 @@ async def async_setup_entry(
|
||||
|
||||
async_add_entities(entities)
|
||||
|
||||
# the user can remove virtual components from the device configuration, so
|
||||
# we need to remove orphaned entities
|
||||
virtual_button_component_ids = get_virtual_component_ids(
|
||||
coordinator.device.config, BUTTON_PLATFORM
|
||||
)
|
||||
async_remove_orphaned_entities(
|
||||
hass,
|
||||
config_entry.entry_id,
|
||||
coordinator.mac,
|
||||
BUTTON_PLATFORM,
|
||||
virtual_button_component_ids,
|
||||
)
|
||||
|
||||
|
||||
class ShellyBaseButton(
|
||||
CoordinatorEntity[ShellyRpcCoordinator | ShellyBlockCoordinator], ButtonEntity
|
||||
@@ -273,3 +311,32 @@ class ShellyBluTrvButton(ShellyBaseButton):
|
||||
assert method is not None
|
||||
|
||||
await method(self._id)
|
||||
|
||||
|
||||
class ShellyVirtualButton(ShellyBaseButton):
|
||||
"""Defines a Shelly virtual component button."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: ShellyRpcCoordinator,
|
||||
description: ShellyButtonDescription,
|
||||
_id: int,
|
||||
) -> None:
|
||||
"""Initialize Shelly virtual component button."""
|
||||
super().__init__(coordinator, description)
|
||||
|
||||
self._attr_unique_id = f"{coordinator.mac}-{description.key}:{_id}"
|
||||
self._attr_device_info = get_entity_rpc_device_info(coordinator)
|
||||
self._attr_name = get_rpc_entity_name(
|
||||
coordinator.device, f"{description.key}:{_id}"
|
||||
)
|
||||
self._id = _id
|
||||
|
||||
async def _press_method(self) -> None:
|
||||
"""Press method."""
|
||||
if TYPE_CHECKING:
|
||||
assert isinstance(self.coordinator, ShellyRpcCoordinator)
|
||||
|
||||
await self.coordinator.device.button_trigger(
|
||||
self._id, self.entity_description.press_action
|
||||
)
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user