mirror of
https://github.com/home-assistant/core.git
synced 2025-12-10 18:08:46 +00:00
Compare commits
58 Commits
hassfest-e
...
standardiz
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
05ec051bf9 | ||
|
|
d93e0a105a | ||
|
|
ab1619c0b4 | ||
|
|
70df7b8503 | ||
|
|
0e2c2ad355 | ||
|
|
4c26718739 | ||
|
|
96034e1525 | ||
|
|
df1302fc1c | ||
|
|
5a5b639aa4 | ||
|
|
e9fbe2227f | ||
|
|
82b57568a0 | ||
|
|
be692ab2fd | ||
|
|
24c04cceee | ||
|
|
97077898bb | ||
|
|
08485f4e09 | ||
|
|
b64d60fce4 | ||
|
|
3690497e1f | ||
|
|
b87e581cde | ||
|
|
f1c55ee7e2 | ||
|
|
9f17a82acf | ||
|
|
3955391cda | ||
|
|
d9a757c7e6 | ||
|
|
aa1ec944c0 | ||
|
|
88c3b6a9f5 | ||
|
|
ada73953f6 | ||
|
|
42e9b9a0bc | ||
|
|
ec6a052ff5 | ||
|
|
c91d64e04d | ||
|
|
0ac7cb311d | ||
|
|
3472020812 | ||
|
|
dcd09523a6 | ||
|
|
a5bfdc697b | ||
|
|
dbb29a7c7d | ||
|
|
124a63d846 | ||
|
|
3de701a9ab | ||
|
|
bfe1dd65b3 | ||
|
|
71bf5e14cc | ||
|
|
6d231c2c99 | ||
|
|
b93072865b | ||
|
|
14ebb6cd74 | ||
|
|
2ddbcd560e | ||
|
|
c5ff7ed1c9 | ||
|
|
c4bea5616c | ||
|
|
17fe147726 | ||
|
|
9fae4e7e1f | ||
|
|
0cebca498c | ||
|
|
521ff62aae | ||
|
|
fd1df5ad88 | ||
|
|
91e7a35a07 | ||
|
|
09381abf46 | ||
|
|
3713c03c07 | ||
|
|
bd8ddd7cd8 | ||
|
|
f0dc1f927b | ||
|
|
984590c6d1 | ||
|
|
d324021a3f | ||
|
|
1f4c0b3e9b | ||
|
|
69893aba4b | ||
|
|
b9dcf89b37 |
44
.github/workflows/builder.yml
vendored
44
.github/workflows/builder.yml
vendored
@@ -27,12 +27,12 @@ jobs:
|
||||
publish: ${{ steps.version.outputs.publish }}
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@v5.0.0
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
uses: actions/setup-python@v6.0.0
|
||||
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
|
||||
@@ -69,7 +69,7 @@ jobs:
|
||||
run: find ./homeassistant/components/*/translations -name "*.json" | tar zcvf translations.tar.gz -T -
|
||||
|
||||
- name: Upload translations
|
||||
uses: actions/upload-artifact@v4.6.2
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: translations
|
||||
path: translations.tar.gz
|
||||
@@ -90,11 +90,11 @@ jobs:
|
||||
arch: ${{ fromJson(needs.init.outputs.architectures) }}
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@v5.0.0
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
|
||||
- name: Download nightly wheels of frontend
|
||||
if: needs.init.outputs.channel == 'dev'
|
||||
uses: dawidd6/action-download-artifact@v11
|
||||
uses: dawidd6/action-download-artifact@ac66b43f0e6a346234dd65d4d0c8fbb31cb316e5 # v11
|
||||
with:
|
||||
github_token: ${{secrets.GITHUB_TOKEN}}
|
||||
repo: home-assistant/frontend
|
||||
@@ -105,7 +105,7 @@ jobs:
|
||||
|
||||
- name: Download nightly wheels of intents
|
||||
if: needs.init.outputs.channel == 'dev'
|
||||
uses: dawidd6/action-download-artifact@v11
|
||||
uses: dawidd6/action-download-artifact@ac66b43f0e6a346234dd65d4d0c8fbb31cb316e5 # v11
|
||||
with:
|
||||
github_token: ${{secrets.GITHUB_TOKEN}}
|
||||
repo: OHF-Voice/intents-package
|
||||
@@ -116,7 +116,7 @@ jobs:
|
||||
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
if: needs.init.outputs.channel == 'dev'
|
||||
uses: actions/setup-python@v6.0.0
|
||||
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
|
||||
@@ -175,7 +175,7 @@ jobs:
|
||||
sed -i "s|pykrakenapi|# pykrakenapi|g" requirements_all.txt
|
||||
|
||||
- name: Download translations
|
||||
uses: actions/download-artifact@v5.0.0
|
||||
uses: actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0 # v5.0.0
|
||||
with:
|
||||
name: translations
|
||||
|
||||
@@ -190,14 +190,14 @@ jobs:
|
||||
echo "${{ github.sha }};${{ github.ref }};${{ github.event_name }};${{ github.actor }}" > rootfs/OFFICIAL_IMAGE
|
||||
|
||||
- name: Login to GitHub Container Registry
|
||||
uses: docker/login-action@v3.5.0
|
||||
uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 # v3.5.0
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Build base image
|
||||
uses: home-assistant/builder@2025.03.0
|
||||
uses: home-assistant/builder@71885366c80f6ead6ae8c364b61d910e0dc5addc # 2025.03.0
|
||||
with:
|
||||
args: |
|
||||
$BUILD_ARGS \
|
||||
@@ -242,7 +242,7 @@ jobs:
|
||||
- green
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@v5.0.0
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
|
||||
- name: Set build additional args
|
||||
run: |
|
||||
@@ -256,14 +256,14 @@ jobs:
|
||||
fi
|
||||
|
||||
- name: Login to GitHub Container Registry
|
||||
uses: docker/login-action@v3.5.0
|
||||
uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 # v3.5.0
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Build base image
|
||||
uses: home-assistant/builder@2025.03.0
|
||||
uses: home-assistant/builder@71885366c80f6ead6ae8c364b61d910e0dc5addc # 2025.03.0
|
||||
with:
|
||||
args: |
|
||||
$BUILD_ARGS \
|
||||
@@ -279,7 +279,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@v5.0.0
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
|
||||
- name: Initialize git
|
||||
uses: home-assistant/actions/helpers/git-init@master
|
||||
@@ -321,23 +321,23 @@ jobs:
|
||||
registry: ["ghcr.io/home-assistant", "docker.io/homeassistant"]
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@v5.0.0
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
|
||||
- name: Install Cosign
|
||||
uses: sigstore/cosign-installer@v3.9.2
|
||||
uses: sigstore/cosign-installer@d58896d6a1865668819e1d91763c7751a165e159 # v3.9.2
|
||||
with:
|
||||
cosign-release: "v2.2.3"
|
||||
|
||||
- name: Login to DockerHub
|
||||
if: matrix.registry == 'docker.io/homeassistant'
|
||||
uses: docker/login-action@v3.5.0
|
||||
uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 # v3.5.0
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Login to GitHub Container Registry
|
||||
if: matrix.registry == 'ghcr.io/home-assistant'
|
||||
uses: docker/login-action@v3.5.0
|
||||
uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 # v3.5.0
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
@@ -454,15 +454,15 @@ jobs:
|
||||
if: github.repository_owner == 'home-assistant' && needs.init.outputs.publish == 'true'
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@v5.0.0
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
uses: actions/setup-python@v6.0.0
|
||||
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
|
||||
- name: Download translations
|
||||
uses: actions/download-artifact@v5.0.0
|
||||
uses: actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0 # v5.0.0
|
||||
with:
|
||||
name: translations
|
||||
|
||||
@@ -480,7 +480,7 @@ jobs:
|
||||
python -m build
|
||||
|
||||
- name: Upload package to PyPI
|
||||
uses: pypa/gh-action-pypi-publish@v1.13.0
|
||||
uses: pypa/gh-action-pypi-publish@ed0c53931b1dc9bd32cbe73a98c7f6766f8a527e # v1.13.0
|
||||
with:
|
||||
skip-existing: true
|
||||
|
||||
|
||||
168
.github/workflows/ci.yaml
vendored
168
.github/workflows/ci.yaml
vendored
@@ -98,7 +98,7 @@ jobs:
|
||||
runs-on: ubuntu-24.04
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v5.0.0
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
- name: Generate partial Python venv restore key
|
||||
id: generate_python_cache_key
|
||||
run: |
|
||||
@@ -120,7 +120,7 @@ jobs:
|
||||
run: |
|
||||
echo "key=$(lsb_release -rs)-apt-${{ env.CACHE_VERSION }}-${{ env.HA_SHORT_VERSION }}" >> $GITHUB_OUTPUT
|
||||
- name: Filter for core changes
|
||||
uses: dorny/paths-filter@v3.0.2
|
||||
uses: dorny/paths-filter@de90cc6fb38fc0963ad72b210f1f284cd68cea36 # v3.0.2
|
||||
id: core
|
||||
with:
|
||||
filters: .core_files.yaml
|
||||
@@ -135,7 +135,7 @@ jobs:
|
||||
echo "Result:"
|
||||
cat .integration_paths.yaml
|
||||
- name: Filter for integration changes
|
||||
uses: dorny/paths-filter@v3.0.2
|
||||
uses: dorny/paths-filter@de90cc6fb38fc0963ad72b210f1f284cd68cea36 # v3.0.2
|
||||
id: integrations
|
||||
with:
|
||||
filters: .integration_paths.yaml
|
||||
@@ -254,16 +254,16 @@ jobs:
|
||||
- info
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v5.0.0
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
id: python
|
||||
uses: actions/setup-python@v6.0.0
|
||||
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
check-latest: true
|
||||
- name: Restore base Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache@v4.2.4
|
||||
uses: actions/cache@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
|
||||
with:
|
||||
path: venv
|
||||
key: >-
|
||||
@@ -279,7 +279,7 @@ jobs:
|
||||
uv pip install "$(cat requirements_test.txt | grep pre-commit)"
|
||||
- name: Restore pre-commit environment from cache
|
||||
id: cache-precommit
|
||||
uses: actions/cache@v4.2.4
|
||||
uses: actions/cache@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
|
||||
with:
|
||||
path: ${{ env.PRE_COMMIT_CACHE }}
|
||||
lookup-only: true
|
||||
@@ -300,16 +300,16 @@ jobs:
|
||||
- pre-commit
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v5.0.0
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
uses: actions/setup-python@v6.0.0
|
||||
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
id: python
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
check-latest: true
|
||||
- name: Restore base Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@v4.2.4
|
||||
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@@ -318,7 +318,7 @@ jobs:
|
||||
needs.info.outputs.pre-commit_cache_key }}
|
||||
- name: Restore pre-commit environment from cache
|
||||
id: cache-precommit
|
||||
uses: actions/cache/restore@v4.2.4
|
||||
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
|
||||
with:
|
||||
path: ${{ env.PRE_COMMIT_CACHE }}
|
||||
fail-on-cache-miss: true
|
||||
@@ -340,16 +340,16 @@ jobs:
|
||||
- pre-commit
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v5.0.0
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
uses: actions/setup-python@v6.0.0
|
||||
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
id: python
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
check-latest: true
|
||||
- name: Restore base Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@v4.2.4
|
||||
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@@ -358,7 +358,7 @@ jobs:
|
||||
needs.info.outputs.pre-commit_cache_key }}
|
||||
- name: Restore pre-commit environment from cache
|
||||
id: cache-precommit
|
||||
uses: actions/cache/restore@v4.2.4
|
||||
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
|
||||
with:
|
||||
path: ${{ env.PRE_COMMIT_CACHE }}
|
||||
fail-on-cache-miss: true
|
||||
@@ -380,16 +380,16 @@ jobs:
|
||||
- pre-commit
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v5.0.0
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
uses: actions/setup-python@v6.0.0
|
||||
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
id: python
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
check-latest: true
|
||||
- name: Restore base Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@v4.2.4
|
||||
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@@ -398,7 +398,7 @@ jobs:
|
||||
needs.info.outputs.pre-commit_cache_key }}
|
||||
- name: Restore pre-commit environment from cache
|
||||
id: cache-precommit
|
||||
uses: actions/cache/restore@v4.2.4
|
||||
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
|
||||
with:
|
||||
path: ${{ env.PRE_COMMIT_CACHE }}
|
||||
fail-on-cache-miss: true
|
||||
@@ -470,7 +470,7 @@ jobs:
|
||||
- script/hassfest/docker/Dockerfile
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v5.0.0
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
- name: Register hadolint problem matcher
|
||||
run: |
|
||||
echo "::add-matcher::.github/workflows/matchers/hadolint.json"
|
||||
@@ -489,10 +489,10 @@ jobs:
|
||||
python-version: ${{ fromJSON(needs.info.outputs.python_versions) }}
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v5.0.0
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
id: python
|
||||
uses: actions/setup-python@v6.0.0
|
||||
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
check-latest: true
|
||||
@@ -505,7 +505,7 @@ jobs:
|
||||
env.HA_SHORT_VERSION }}-$(date -u '+%Y-%m-%dT%H:%M:%s')" >> $GITHUB_OUTPUT
|
||||
- name: Restore base Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache@v4.2.4
|
||||
uses: actions/cache@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
|
||||
with:
|
||||
path: venv
|
||||
key: >-
|
||||
@@ -513,7 +513,7 @@ jobs:
|
||||
needs.info.outputs.python_cache_key }}
|
||||
- name: Restore uv wheel cache
|
||||
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||
uses: actions/cache@v4.2.4
|
||||
uses: actions/cache@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
|
||||
with:
|
||||
path: ${{ env.UV_CACHE_DIR }}
|
||||
key: >-
|
||||
@@ -585,7 +585,7 @@ jobs:
|
||||
python --version
|
||||
uv pip freeze >> pip_freeze.txt
|
||||
- name: Upload pip_freeze artifact
|
||||
uses: actions/upload-artifact@v4.6.2
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: pip-freeze-${{ matrix.python-version }}
|
||||
path: pip_freeze.txt
|
||||
@@ -631,16 +631,16 @@ jobs:
|
||||
-o Dir::State::Lists=${{ env.APT_LIST_CACHE_DIR }} \
|
||||
libturbojpeg
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v5.0.0
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
id: python
|
||||
uses: actions/setup-python@v6.0.0
|
||||
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
check-latest: true
|
||||
- name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@v4.2.4
|
||||
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@@ -664,16 +664,16 @@ jobs:
|
||||
- base
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v5.0.0
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
id: python
|
||||
uses: actions/setup-python@v6.0.0
|
||||
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
check-latest: true
|
||||
- name: Restore base Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@v4.2.4
|
||||
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@@ -698,9 +698,9 @@ jobs:
|
||||
&& github.event_name == 'pull_request'
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v5.0.0
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
- name: Dependency review
|
||||
uses: actions/dependency-review-action@v4.7.3
|
||||
uses: actions/dependency-review-action@595b5aeba73380359d98a5e087f648dbb0edce1b # v4.7.3
|
||||
with:
|
||||
license-check: false # We use our own license audit checks
|
||||
|
||||
@@ -721,16 +721,16 @@ jobs:
|
||||
python-version: ${{ fromJson(needs.info.outputs.python_versions) }}
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v5.0.0
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
id: python
|
||||
uses: actions/setup-python@v6.0.0
|
||||
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
check-latest: true
|
||||
- name: Restore full Python ${{ matrix.python-version }} virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@v4.2.4
|
||||
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@@ -742,7 +742,7 @@ jobs:
|
||||
. venv/bin/activate
|
||||
python -m script.licenses extract --output-file=licenses-${{ matrix.python-version }}.json
|
||||
- name: Upload licenses
|
||||
uses: actions/upload-artifact@v4.6.2
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: licenses-${{ github.run_number }}-${{ matrix.python-version }}
|
||||
path: licenses-${{ matrix.python-version }}.json
|
||||
@@ -764,16 +764,16 @@ jobs:
|
||||
- base
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v5.0.0
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
id: python
|
||||
uses: actions/setup-python@v6.0.0
|
||||
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
check-latest: true
|
||||
- name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@v4.2.4
|
||||
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@@ -811,16 +811,16 @@ jobs:
|
||||
- base
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v5.0.0
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
id: python
|
||||
uses: actions/setup-python@v6.0.0
|
||||
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
check-latest: true
|
||||
- name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@v4.2.4
|
||||
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@@ -856,10 +856,10 @@ jobs:
|
||||
- base
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v5.0.0
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
id: python
|
||||
uses: actions/setup-python@v6.0.0
|
||||
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
check-latest: true
|
||||
@@ -872,7 +872,7 @@ jobs:
|
||||
env.HA_SHORT_VERSION }}-$(date -u '+%Y-%m-%dT%H:%M:%s')" >> $GITHUB_OUTPUT
|
||||
- name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@v4.2.4
|
||||
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@@ -880,7 +880,7 @@ jobs:
|
||||
${{ runner.os }}-${{ runner.arch }}-${{ steps.python.outputs.python-version }}-${{
|
||||
needs.info.outputs.python_cache_key }}
|
||||
- name: Restore mypy cache
|
||||
uses: actions/cache@v4.2.4
|
||||
uses: actions/cache@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
|
||||
with:
|
||||
path: .mypy_cache
|
||||
key: >-
|
||||
@@ -947,16 +947,16 @@ jobs:
|
||||
libturbojpeg \
|
||||
libgammu-dev
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v5.0.0
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
id: python
|
||||
uses: actions/setup-python@v6.0.0
|
||||
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
check-latest: true
|
||||
- name: Restore base Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@v4.2.4
|
||||
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@@ -968,7 +968,7 @@ jobs:
|
||||
. venv/bin/activate
|
||||
python -m script.split_tests ${{ needs.info.outputs.test_group_count }} tests
|
||||
- name: Upload pytest_buckets
|
||||
uses: actions/upload-artifact@v4.6.2
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: pytest_buckets
|
||||
path: pytest_buckets.txt
|
||||
@@ -1022,16 +1022,16 @@ jobs:
|
||||
libgammu-dev \
|
||||
libxml2-utils
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v5.0.0
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
id: python
|
||||
uses: actions/setup-python@v6.0.0
|
||||
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
check-latest: true
|
||||
- name: Restore full Python ${{ matrix.python-version }} virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@v4.2.4
|
||||
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@@ -1045,7 +1045,7 @@ jobs:
|
||||
run: |
|
||||
echo "::add-matcher::.github/workflows/matchers/pytest-slow.json"
|
||||
- name: Download pytest_buckets
|
||||
uses: actions/download-artifact@v5.0.0
|
||||
uses: actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0 # v5.0.0
|
||||
with:
|
||||
name: pytest_buckets
|
||||
- name: Compile English translations
|
||||
@@ -1084,14 +1084,14 @@ jobs:
|
||||
2>&1 | tee pytest-${{ matrix.python-version }}-${{ matrix.group }}.txt
|
||||
- name: Upload pytest output
|
||||
if: success() || failure() && steps.pytest-full.conclusion == 'failure'
|
||||
uses: actions/upload-artifact@v4.6.2
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: pytest-${{ github.run_number }}-${{ matrix.python-version }}-${{ matrix.group }}
|
||||
path: pytest-*.txt
|
||||
overwrite: true
|
||||
- name: Upload coverage artifact
|
||||
if: needs.info.outputs.skip_coverage != 'true'
|
||||
uses: actions/upload-artifact@v4.6.2
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: coverage-${{ matrix.python-version }}-${{ matrix.group }}
|
||||
path: coverage.xml
|
||||
@@ -1104,7 +1104,7 @@ jobs:
|
||||
mv "junit.xml-tmp" "junit.xml"
|
||||
- name: Upload test results artifact
|
||||
if: needs.info.outputs.skip_coverage != 'true' && !cancelled()
|
||||
uses: actions/upload-artifact@v4.6.2
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: test-results-full-${{ matrix.python-version }}-${{ matrix.group }}
|
||||
path: junit.xml
|
||||
@@ -1169,16 +1169,16 @@ jobs:
|
||||
libmariadb-dev-compat \
|
||||
libxml2-utils
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v5.0.0
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
id: python
|
||||
uses: actions/setup-python@v6.0.0
|
||||
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
check-latest: true
|
||||
- name: Restore full Python ${{ matrix.python-version }} virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@v4.2.4
|
||||
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@@ -1237,7 +1237,7 @@ jobs:
|
||||
2>&1 | tee pytest-${{ matrix.python-version }}-${mariadb}.txt
|
||||
- name: Upload pytest output
|
||||
if: success() || failure() && steps.pytest-partial.conclusion == 'failure'
|
||||
uses: actions/upload-artifact@v4.6.2
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: pytest-${{ github.run_number }}-${{ matrix.python-version }}-${{
|
||||
steps.pytest-partial.outputs.mariadb }}
|
||||
@@ -1245,7 +1245,7 @@ jobs:
|
||||
overwrite: true
|
||||
- name: Upload coverage artifact
|
||||
if: needs.info.outputs.skip_coverage != 'true'
|
||||
uses: actions/upload-artifact@v4.6.2
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: coverage-${{ matrix.python-version }}-${{
|
||||
steps.pytest-partial.outputs.mariadb }}
|
||||
@@ -1259,7 +1259,7 @@ jobs:
|
||||
mv "junit.xml-tmp" "junit.xml"
|
||||
- name: Upload test results artifact
|
||||
if: needs.info.outputs.skip_coverage != 'true' && !cancelled()
|
||||
uses: actions/upload-artifact@v4.6.2
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: test-results-mariadb-${{ matrix.python-version }}-${{
|
||||
steps.pytest-partial.outputs.mariadb }}
|
||||
@@ -1325,16 +1325,16 @@ jobs:
|
||||
sudo apt-get -y install \
|
||||
postgresql-server-dev-14
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v5.0.0
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
id: python
|
||||
uses: actions/setup-python@v6.0.0
|
||||
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
check-latest: true
|
||||
- name: Restore full Python ${{ matrix.python-version }} virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@v4.2.4
|
||||
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@@ -1394,7 +1394,7 @@ jobs:
|
||||
2>&1 | tee pytest-${{ matrix.python-version }}-${postgresql}.txt
|
||||
- name: Upload pytest output
|
||||
if: success() || failure() && steps.pytest-partial.conclusion == 'failure'
|
||||
uses: actions/upload-artifact@v4.6.2
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: pytest-${{ github.run_number }}-${{ matrix.python-version }}-${{
|
||||
steps.pytest-partial.outputs.postgresql }}
|
||||
@@ -1402,7 +1402,7 @@ jobs:
|
||||
overwrite: true
|
||||
- name: Upload coverage artifact
|
||||
if: needs.info.outputs.skip_coverage != 'true'
|
||||
uses: actions/upload-artifact@v4.6.2
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: coverage-${{ matrix.python-version }}-${{
|
||||
steps.pytest-partial.outputs.postgresql }}
|
||||
@@ -1416,7 +1416,7 @@ jobs:
|
||||
mv "junit.xml-tmp" "junit.xml"
|
||||
- name: Upload test results artifact
|
||||
if: needs.info.outputs.skip_coverage != 'true' && !cancelled()
|
||||
uses: actions/upload-artifact@v4.6.2
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: test-results-postgres-${{ matrix.python-version }}-${{
|
||||
steps.pytest-partial.outputs.postgresql }}
|
||||
@@ -1437,14 +1437,14 @@ jobs:
|
||||
timeout-minutes: 10
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v5.0.0
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
- name: Download all coverage artifacts
|
||||
uses: actions/download-artifact@v5.0.0
|
||||
uses: actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0 # v5.0.0
|
||||
with:
|
||||
pattern: coverage-*
|
||||
- name: Upload coverage to Codecov
|
||||
if: needs.info.outputs.test_full_suite == 'true'
|
||||
uses: codecov/codecov-action@v5.5.1
|
||||
uses: codecov/codecov-action@5a1091511ad55cbe89839c7260b706298ca349f7 # v5.5.1
|
||||
with:
|
||||
fail_ci_if_error: true
|
||||
flags: full-suite
|
||||
@@ -1498,16 +1498,16 @@ jobs:
|
||||
libgammu-dev \
|
||||
libxml2-utils
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v5.0.0
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
id: python
|
||||
uses: actions/setup-python@v6.0.0
|
||||
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
check-latest: true
|
||||
- name: Restore full Python ${{ matrix.python-version }} virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@v4.2.4
|
||||
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@@ -1563,14 +1563,14 @@ jobs:
|
||||
2>&1 | tee pytest-${{ matrix.python-version }}-${{ matrix.group }}.txt
|
||||
- name: Upload pytest output
|
||||
if: success() || failure() && steps.pytest-partial.conclusion == 'failure'
|
||||
uses: actions/upload-artifact@v4.6.2
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: pytest-${{ github.run_number }}-${{ matrix.python-version }}-${{ matrix.group }}
|
||||
path: pytest-*.txt
|
||||
overwrite: true
|
||||
- name: Upload coverage artifact
|
||||
if: needs.info.outputs.skip_coverage != 'true'
|
||||
uses: actions/upload-artifact@v4.6.2
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: coverage-${{ matrix.python-version }}-${{ matrix.group }}
|
||||
path: coverage.xml
|
||||
@@ -1583,7 +1583,7 @@ jobs:
|
||||
mv "junit.xml-tmp" "junit.xml"
|
||||
- name: Upload test results artifact
|
||||
if: needs.info.outputs.skip_coverage != 'true' && !cancelled()
|
||||
uses: actions/upload-artifact@v4.6.2
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: test-results-partial-${{ matrix.python-version }}-${{ matrix.group }}
|
||||
path: junit.xml
|
||||
@@ -1601,14 +1601,14 @@ jobs:
|
||||
timeout-minutes: 10
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v5.0.0
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
- name: Download all coverage artifacts
|
||||
uses: actions/download-artifact@v5.0.0
|
||||
uses: actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0 # v5.0.0
|
||||
with:
|
||||
pattern: coverage-*
|
||||
- name: Upload coverage to Codecov
|
||||
if: needs.info.outputs.test_full_suite == 'false'
|
||||
uses: codecov/codecov-action@v5.5.1
|
||||
uses: codecov/codecov-action@5a1091511ad55cbe89839c7260b706298ca349f7 # v5.5.1
|
||||
with:
|
||||
fail_ci_if_error: true
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
@@ -1628,11 +1628,11 @@ jobs:
|
||||
timeout-minutes: 10
|
||||
steps:
|
||||
- name: Download all coverage artifacts
|
||||
uses: actions/download-artifact@v5.0.0
|
||||
uses: actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0 # v5.0.0
|
||||
with:
|
||||
pattern: test-results-*
|
||||
- name: Upload test results to Codecov
|
||||
uses: codecov/test-results-action@v1
|
||||
uses: codecov/test-results-action@47f89e9acb64b76debcd5ea40642d25a4adced9f # v1.1.1
|
||||
with:
|
||||
fail_ci_if_error: true
|
||||
verbose: true
|
||||
|
||||
6
.github/workflows/codeql.yml
vendored
6
.github/workflows/codeql.yml
vendored
@@ -21,14 +21,14 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v5.0.0
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@v3.30.3
|
||||
uses: github/codeql-action/init@192325c86100d080feab897ff886c34abd4c83a3 # v3.30.3
|
||||
with:
|
||||
languages: python
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@v3.30.3
|
||||
uses: github/codeql-action/analyze@192325c86100d080feab897ff886c34abd4c83a3 # v3.30.3
|
||||
with:
|
||||
category: "/language:python"
|
||||
|
||||
@@ -16,7 +16,7 @@ jobs:
|
||||
steps:
|
||||
- name: Check if integration label was added and extract details
|
||||
id: extract
|
||||
uses: actions/github-script@v8
|
||||
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
|
||||
with:
|
||||
script: |
|
||||
// Debug: Log the event payload
|
||||
@@ -113,7 +113,7 @@ jobs:
|
||||
- name: Fetch similar issues
|
||||
id: fetch_similar
|
||||
if: steps.extract.outputs.should_continue == 'true'
|
||||
uses: actions/github-script@v8
|
||||
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
|
||||
env:
|
||||
INTEGRATION_LABELS: ${{ steps.extract.outputs.integration_labels }}
|
||||
CURRENT_NUMBER: ${{ steps.extract.outputs.current_number }}
|
||||
@@ -231,7 +231,7 @@ jobs:
|
||||
- name: Detect duplicates using AI
|
||||
id: ai_detection
|
||||
if: steps.extract.outputs.should_continue == 'true' && steps.fetch_similar.outputs.has_similar == 'true'
|
||||
uses: actions/ai-inference@v2.0.1
|
||||
uses: actions/ai-inference@a1c11829223a786afe3b5663db904a3aa1eac3a2 # v2.0.1
|
||||
with:
|
||||
model: openai/gpt-4o
|
||||
system-prompt: |
|
||||
@@ -280,7 +280,7 @@ jobs:
|
||||
- name: Post duplicate detection results
|
||||
id: post_results
|
||||
if: steps.extract.outputs.should_continue == 'true' && steps.fetch_similar.outputs.has_similar == 'true'
|
||||
uses: actions/github-script@v8
|
||||
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
|
||||
env:
|
||||
AI_RESPONSE: ${{ steps.ai_detection.outputs.response }}
|
||||
SIMILAR_ISSUES: ${{ steps.fetch_similar.outputs.similar_issues }}
|
||||
|
||||
@@ -16,7 +16,7 @@ jobs:
|
||||
steps:
|
||||
- name: Check issue language
|
||||
id: detect_language
|
||||
uses: actions/github-script@v8
|
||||
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
|
||||
env:
|
||||
ISSUE_NUMBER: ${{ github.event.issue.number }}
|
||||
ISSUE_TITLE: ${{ github.event.issue.title }}
|
||||
@@ -57,7 +57,7 @@ jobs:
|
||||
- name: Detect language using AI
|
||||
id: ai_language_detection
|
||||
if: steps.detect_language.outputs.should_continue == 'true'
|
||||
uses: actions/ai-inference@v2.0.1
|
||||
uses: actions/ai-inference@a1c11829223a786afe3b5663db904a3aa1eac3a2 # v2.0.1
|
||||
with:
|
||||
model: openai/gpt-4o-mini
|
||||
system-prompt: |
|
||||
@@ -90,7 +90,7 @@ jobs:
|
||||
|
||||
- name: Process non-English issues
|
||||
if: steps.detect_language.outputs.should_continue == 'true'
|
||||
uses: actions/github-script@v8
|
||||
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
|
||||
env:
|
||||
AI_RESPONSE: ${{ steps.ai_language_detection.outputs.response }}
|
||||
ISSUE_NUMBER: ${{ steps.detect_language.outputs.issue_number }}
|
||||
|
||||
2
.github/workflows/lock.yml
vendored
2
.github/workflows/lock.yml
vendored
@@ -10,7 +10,7 @@ jobs:
|
||||
if: github.repository_owner == 'home-assistant'
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: dessant/lock-threads@v5.0.1
|
||||
- uses: dessant/lock-threads@1bf7ec25051fe7c00bdd17e6a7cf3d7bfb7dc771 # v5.0.1
|
||||
with:
|
||||
github-token: ${{ github.token }}
|
||||
issue-inactive-days: "30"
|
||||
|
||||
2
.github/workflows/restrict-task-creation.yml
vendored
2
.github/workflows/restrict-task-creation.yml
vendored
@@ -12,7 +12,7 @@ jobs:
|
||||
if: github.event.issue.type.name == 'Task'
|
||||
steps:
|
||||
- name: Check if user is authorized
|
||||
uses: actions/github-script@v8
|
||||
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
|
||||
with:
|
||||
script: |
|
||||
const issueAuthor = context.payload.issue.user.login;
|
||||
|
||||
6
.github/workflows/stale.yml
vendored
6
.github/workflows/stale.yml
vendored
@@ -17,7 +17,7 @@ jobs:
|
||||
# - No PRs marked as no-stale
|
||||
# - No issues (-1)
|
||||
- name: 60 days stale PRs policy
|
||||
uses: actions/stale@v10.0.0
|
||||
uses: actions/stale@3a9db7e6a41a89f618792c92c0e97cc736e1b13f # v10.0.0
|
||||
with:
|
||||
repo-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
days-before-stale: 60
|
||||
@@ -57,7 +57,7 @@ jobs:
|
||||
# - No issues marked as no-stale or help-wanted
|
||||
# - No PRs (-1)
|
||||
- name: 90 days stale issues
|
||||
uses: actions/stale@v10.0.0
|
||||
uses: actions/stale@3a9db7e6a41a89f618792c92c0e97cc736e1b13f # v10.0.0
|
||||
with:
|
||||
repo-token: ${{ steps.token.outputs.token }}
|
||||
days-before-stale: 90
|
||||
@@ -87,7 +87,7 @@ jobs:
|
||||
# - No Issues marked as no-stale or help-wanted
|
||||
# - No PRs (-1)
|
||||
- name: Needs more information stale issues policy
|
||||
uses: actions/stale@v10.0.0
|
||||
uses: actions/stale@3a9db7e6a41a89f618792c92c0e97cc736e1b13f # v10.0.0
|
||||
with:
|
||||
repo-token: ${{ steps.token.outputs.token }}
|
||||
only-labels: "needs-more-information"
|
||||
|
||||
4
.github/workflows/translations.yml
vendored
4
.github/workflows/translations.yml
vendored
@@ -19,10 +19,10 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@v5.0.0
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
uses: actions/setup-python@v6.0.0
|
||||
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
|
||||
|
||||
32
.github/workflows/wheels.yml
vendored
32
.github/workflows/wheels.yml
vendored
@@ -32,11 +32,11 @@ jobs:
|
||||
architectures: ${{ steps.info.outputs.architectures }}
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@v5.0.0
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
id: python
|
||||
uses: actions/setup-python@v6.0.0
|
||||
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
check-latest: true
|
||||
@@ -91,7 +91,7 @@ jobs:
|
||||
) > build_constraints.txt
|
||||
|
||||
- name: Upload env_file
|
||||
uses: actions/upload-artifact@v4.6.2
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: env_file
|
||||
path: ./.env_file
|
||||
@@ -99,14 +99,14 @@ jobs:
|
||||
overwrite: true
|
||||
|
||||
- name: Upload build_constraints
|
||||
uses: actions/upload-artifact@v4.6.2
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: build_constraints
|
||||
path: ./build_constraints.txt
|
||||
overwrite: true
|
||||
|
||||
- name: Upload requirements_diff
|
||||
uses: actions/upload-artifact@v4.6.2
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: requirements_diff
|
||||
path: ./requirements_diff.txt
|
||||
@@ -118,7 +118,7 @@ jobs:
|
||||
python -m script.gen_requirements_all ci
|
||||
|
||||
- name: Upload requirements_all_wheels
|
||||
uses: actions/upload-artifact@v4.6.2
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: requirements_all_wheels
|
||||
path: ./requirements_all_wheels_*.txt
|
||||
@@ -135,20 +135,20 @@ jobs:
|
||||
arch: ${{ fromJson(needs.init.outputs.architectures) }}
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@v5.0.0
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
|
||||
- name: Download env_file
|
||||
uses: actions/download-artifact@v5.0.0
|
||||
uses: actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0 # v5.0.0
|
||||
with:
|
||||
name: env_file
|
||||
|
||||
- name: Download build_constraints
|
||||
uses: actions/download-artifact@v5.0.0
|
||||
uses: actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0 # v5.0.0
|
||||
with:
|
||||
name: build_constraints
|
||||
|
||||
- name: Download requirements_diff
|
||||
uses: actions/download-artifact@v5.0.0
|
||||
uses: actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0 # v5.0.0
|
||||
with:
|
||||
name: requirements_diff
|
||||
|
||||
@@ -158,6 +158,7 @@ jobs:
|
||||
sed -i "/uv/d" requirements.txt
|
||||
sed -i "/uv/d" requirements_diff.txt
|
||||
|
||||
# home-assistant/wheels doesn't support sha pinning
|
||||
- name: Build wheels
|
||||
uses: home-assistant/wheels@2025.07.0
|
||||
with:
|
||||
@@ -184,25 +185,25 @@ jobs:
|
||||
arch: ${{ fromJson(needs.init.outputs.architectures) }}
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@v5.0.0
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
|
||||
- name: Download env_file
|
||||
uses: actions/download-artifact@v5.0.0
|
||||
uses: actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0 # v5.0.0
|
||||
with:
|
||||
name: env_file
|
||||
|
||||
- name: Download build_constraints
|
||||
uses: actions/download-artifact@v5.0.0
|
||||
uses: actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0 # v5.0.0
|
||||
with:
|
||||
name: build_constraints
|
||||
|
||||
- name: Download requirements_diff
|
||||
uses: actions/download-artifact@v5.0.0
|
||||
uses: actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0 # v5.0.0
|
||||
with:
|
||||
name: requirements_diff
|
||||
|
||||
- name: Download requirements_all_wheels
|
||||
uses: actions/download-artifact@v5.0.0
|
||||
uses: actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0 # v5.0.0
|
||||
with:
|
||||
name: requirements_all_wheels
|
||||
|
||||
@@ -218,6 +219,7 @@ jobs:
|
||||
sed -i "/uv/d" requirements.txt
|
||||
sed -i "/uv/d" requirements_diff.txt
|
||||
|
||||
# home-assistant/wheels doesn't support sha pinning
|
||||
- name: Build wheels
|
||||
uses: home-assistant/wheels@2025.07.0
|
||||
with:
|
||||
|
||||
4
CODEOWNERS
generated
4
CODEOWNERS
generated
@@ -442,8 +442,6 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/energyzero/ @klaasnicolaas
|
||||
/homeassistant/components/enigma2/ @autinerd
|
||||
/tests/components/enigma2/ @autinerd
|
||||
/homeassistant/components/enocean/ @bdurrer
|
||||
/tests/components/enocean/ @bdurrer
|
||||
/homeassistant/components/enphase_envoy/ @bdraco @cgarwood @catsmanac
|
||||
/tests/components/enphase_envoy/ @bdraco @cgarwood @catsmanac
|
||||
/homeassistant/components/entur_public_transport/ @hfurubotten
|
||||
@@ -970,6 +968,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/moat/ @bdraco
|
||||
/homeassistant/components/mobile_app/ @home-assistant/core
|
||||
/tests/components/mobile_app/ @home-assistant/core
|
||||
/homeassistant/components/modbus/ @janiversen
|
||||
/tests/components/modbus/ @janiversen
|
||||
/homeassistant/components/modem_callerid/ @tkdrob
|
||||
/tests/components/modem_callerid/ @tkdrob
|
||||
/homeassistant/components/modern_forms/ @wonderslug
|
||||
|
||||
@@ -2,21 +2,23 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
import logging
|
||||
|
||||
from accuweather import AccuWeather
|
||||
|
||||
from homeassistant.components.sensor import DOMAIN as SENSOR_PLATFORM
|
||||
from homeassistant.const import CONF_API_KEY, CONF_NAME, Platform
|
||||
from homeassistant.const import CONF_API_KEY, Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import entity_registry as er
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
|
||||
from .const import DOMAIN, UPDATE_INTERVAL_DAILY_FORECAST, UPDATE_INTERVAL_OBSERVATION
|
||||
from .const import DOMAIN
|
||||
from .coordinator import (
|
||||
AccuWeatherConfigEntry,
|
||||
AccuWeatherDailyForecastDataUpdateCoordinator,
|
||||
AccuWeatherData,
|
||||
AccuWeatherHourlyForecastDataUpdateCoordinator,
|
||||
AccuWeatherObservationDataUpdateCoordinator,
|
||||
)
|
||||
|
||||
@@ -28,7 +30,6 @@ PLATFORMS = [Platform.SENSOR, Platform.WEATHER]
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: AccuWeatherConfigEntry) -> bool:
|
||||
"""Set up AccuWeather as config entry."""
|
||||
api_key: str = entry.data[CONF_API_KEY]
|
||||
name: str = entry.data[CONF_NAME]
|
||||
|
||||
location_key = entry.unique_id
|
||||
|
||||
@@ -41,26 +42,28 @@ async def async_setup_entry(hass: HomeAssistant, entry: AccuWeatherConfigEntry)
|
||||
hass,
|
||||
entry,
|
||||
accuweather,
|
||||
name,
|
||||
"observation",
|
||||
UPDATE_INTERVAL_OBSERVATION,
|
||||
)
|
||||
|
||||
coordinator_daily_forecast = AccuWeatherDailyForecastDataUpdateCoordinator(
|
||||
hass,
|
||||
entry,
|
||||
accuweather,
|
||||
name,
|
||||
"daily forecast",
|
||||
UPDATE_INTERVAL_DAILY_FORECAST,
|
||||
)
|
||||
coordinator_hourly_forecast = AccuWeatherHourlyForecastDataUpdateCoordinator(
|
||||
hass,
|
||||
entry,
|
||||
accuweather,
|
||||
)
|
||||
|
||||
await coordinator_observation.async_config_entry_first_refresh()
|
||||
await coordinator_daily_forecast.async_config_entry_first_refresh()
|
||||
await asyncio.gather(
|
||||
coordinator_observation.async_config_entry_first_refresh(),
|
||||
coordinator_daily_forecast.async_config_entry_first_refresh(),
|
||||
coordinator_hourly_forecast.async_config_entry_first_refresh(),
|
||||
)
|
||||
|
||||
entry.runtime_data = AccuWeatherData(
|
||||
coordinator_observation=coordinator_observation,
|
||||
coordinator_daily_forecast=coordinator_daily_forecast,
|
||||
coordinator_hourly_forecast=coordinator_hourly_forecast,
|
||||
)
|
||||
|
||||
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
|
||||
|
||||
@@ -71,3 +71,4 @@ POLLEN_CATEGORY_MAP = {
|
||||
}
|
||||
UPDATE_INTERVAL_OBSERVATION = timedelta(minutes=10)
|
||||
UPDATE_INTERVAL_DAILY_FORECAST = timedelta(hours=6)
|
||||
UPDATE_INTERVAL_HOURLY_FORECAST = timedelta(hours=30)
|
||||
|
||||
@@ -3,6 +3,7 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from asyncio import timeout
|
||||
from collections.abc import Awaitable, Callable
|
||||
from dataclasses import dataclass
|
||||
from datetime import timedelta
|
||||
import logging
|
||||
@@ -12,6 +13,7 @@ from accuweather import AccuWeather, ApiError, InvalidApiKeyError, RequestsExcee
|
||||
from aiohttp.client_exceptions import ClientConnectorError
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONF_NAME
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo
|
||||
from homeassistant.helpers.update_coordinator import (
|
||||
@@ -20,7 +22,13 @@ from homeassistant.helpers.update_coordinator import (
|
||||
UpdateFailed,
|
||||
)
|
||||
|
||||
from .const import DOMAIN, MANUFACTURER
|
||||
from .const import (
|
||||
DOMAIN,
|
||||
MANUFACTURER,
|
||||
UPDATE_INTERVAL_DAILY_FORECAST,
|
||||
UPDATE_INTERVAL_HOURLY_FORECAST,
|
||||
UPDATE_INTERVAL_OBSERVATION,
|
||||
)
|
||||
|
||||
EXCEPTIONS = (ApiError, ClientConnectorError, InvalidApiKeyError, RequestsExceededError)
|
||||
|
||||
@@ -33,6 +41,7 @@ class AccuWeatherData:
|
||||
|
||||
coordinator_observation: AccuWeatherObservationDataUpdateCoordinator
|
||||
coordinator_daily_forecast: AccuWeatherDailyForecastDataUpdateCoordinator
|
||||
coordinator_hourly_forecast: AccuWeatherHourlyForecastDataUpdateCoordinator
|
||||
|
||||
|
||||
type AccuWeatherConfigEntry = ConfigEntry[AccuWeatherData]
|
||||
@@ -48,13 +57,11 @@ class AccuWeatherObservationDataUpdateCoordinator(
|
||||
hass: HomeAssistant,
|
||||
config_entry: AccuWeatherConfigEntry,
|
||||
accuweather: AccuWeather,
|
||||
name: str,
|
||||
coordinator_type: str,
|
||||
update_interval: timedelta,
|
||||
) -> None:
|
||||
"""Initialize."""
|
||||
self.accuweather = accuweather
|
||||
self.location_key = accuweather.location_key
|
||||
name = config_entry.data[CONF_NAME]
|
||||
|
||||
if TYPE_CHECKING:
|
||||
assert self.location_key is not None
|
||||
@@ -65,8 +72,8 @@ class AccuWeatherObservationDataUpdateCoordinator(
|
||||
hass,
|
||||
_LOGGER,
|
||||
config_entry=config_entry,
|
||||
name=f"{name} ({coordinator_type})",
|
||||
update_interval=update_interval,
|
||||
name=f"{name} (observation)",
|
||||
update_interval=UPDATE_INTERVAL_OBSERVATION,
|
||||
)
|
||||
|
||||
async def _async_update_data(self) -> dict[str, Any]:
|
||||
@@ -86,23 +93,25 @@ class AccuWeatherObservationDataUpdateCoordinator(
|
||||
return result
|
||||
|
||||
|
||||
class AccuWeatherDailyForecastDataUpdateCoordinator(
|
||||
class AccuWeatherForecastDataUpdateCoordinator(
|
||||
TimestampDataUpdateCoordinator[list[dict[str, Any]]]
|
||||
):
|
||||
"""Class to manage fetching AccuWeather data API."""
|
||||
"""Base class for AccuWeather forecast."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
hass: HomeAssistant,
|
||||
config_entry: AccuWeatherConfigEntry,
|
||||
accuweather: AccuWeather,
|
||||
name: str,
|
||||
coordinator_type: str,
|
||||
update_interval: timedelta,
|
||||
fetch_method: Callable[..., Awaitable[list[dict[str, Any]]]],
|
||||
) -> None:
|
||||
"""Initialize."""
|
||||
self.accuweather = accuweather
|
||||
self.location_key = accuweather.location_key
|
||||
self._fetch_method = fetch_method
|
||||
name = config_entry.data[CONF_NAME]
|
||||
|
||||
if TYPE_CHECKING:
|
||||
assert self.location_key is not None
|
||||
@@ -118,12 +127,10 @@ class AccuWeatherDailyForecastDataUpdateCoordinator(
|
||||
)
|
||||
|
||||
async def _async_update_data(self) -> list[dict[str, Any]]:
|
||||
"""Update data via library."""
|
||||
"""Update forecast data via library."""
|
||||
try:
|
||||
async with timeout(10):
|
||||
result = await self.accuweather.async_get_daily_forecast(
|
||||
language=self.hass.config.language
|
||||
)
|
||||
result = await self._fetch_method(language=self.hass.config.language)
|
||||
except EXCEPTIONS as error:
|
||||
raise UpdateFailed(
|
||||
translation_domain=DOMAIN,
|
||||
@@ -132,10 +139,53 @@ class AccuWeatherDailyForecastDataUpdateCoordinator(
|
||||
) from error
|
||||
|
||||
_LOGGER.debug("Requests remaining: %d", self.accuweather.requests_remaining)
|
||||
|
||||
return result
|
||||
|
||||
|
||||
class AccuWeatherDailyForecastDataUpdateCoordinator(
|
||||
AccuWeatherForecastDataUpdateCoordinator
|
||||
):
|
||||
"""Coordinator for daily forecast."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
hass: HomeAssistant,
|
||||
config_entry: AccuWeatherConfigEntry,
|
||||
accuweather: AccuWeather,
|
||||
) -> None:
|
||||
"""Initialize."""
|
||||
super().__init__(
|
||||
hass,
|
||||
config_entry,
|
||||
accuweather,
|
||||
"daily forecast",
|
||||
UPDATE_INTERVAL_DAILY_FORECAST,
|
||||
fetch_method=accuweather.async_get_daily_forecast,
|
||||
)
|
||||
|
||||
|
||||
class AccuWeatherHourlyForecastDataUpdateCoordinator(
|
||||
AccuWeatherForecastDataUpdateCoordinator
|
||||
):
|
||||
"""Coordinator for hourly forecast."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
hass: HomeAssistant,
|
||||
config_entry: AccuWeatherConfigEntry,
|
||||
accuweather: AccuWeather,
|
||||
) -> None:
|
||||
"""Initialize."""
|
||||
super().__init__(
|
||||
hass,
|
||||
config_entry,
|
||||
accuweather,
|
||||
"hourly forecast",
|
||||
UPDATE_INTERVAL_HOURLY_FORECAST,
|
||||
fetch_method=accuweather.async_get_hourly_forecast,
|
||||
)
|
||||
|
||||
|
||||
def _get_device_info(location_key: str, name: str) -> DeviceInfo:
|
||||
"""Get device info."""
|
||||
return DeviceInfo(
|
||||
|
||||
@@ -45,6 +45,7 @@ from .coordinator import (
|
||||
AccuWeatherConfigEntry,
|
||||
AccuWeatherDailyForecastDataUpdateCoordinator,
|
||||
AccuWeatherData,
|
||||
AccuWeatherHourlyForecastDataUpdateCoordinator,
|
||||
AccuWeatherObservationDataUpdateCoordinator,
|
||||
)
|
||||
|
||||
@@ -64,6 +65,7 @@ class AccuWeatherEntity(
|
||||
CoordinatorWeatherEntity[
|
||||
AccuWeatherObservationDataUpdateCoordinator,
|
||||
AccuWeatherDailyForecastDataUpdateCoordinator,
|
||||
AccuWeatherHourlyForecastDataUpdateCoordinator,
|
||||
]
|
||||
):
|
||||
"""Define an AccuWeather entity."""
|
||||
@@ -76,6 +78,7 @@ class AccuWeatherEntity(
|
||||
super().__init__(
|
||||
observation_coordinator=accuweather_data.coordinator_observation,
|
||||
daily_coordinator=accuweather_data.coordinator_daily_forecast,
|
||||
hourly_coordinator=accuweather_data.coordinator_hourly_forecast,
|
||||
)
|
||||
|
||||
self._attr_native_precipitation_unit = UnitOfPrecipitationDepth.MILLIMETERS
|
||||
@@ -86,10 +89,13 @@ class AccuWeatherEntity(
|
||||
self._attr_unique_id = accuweather_data.coordinator_observation.location_key
|
||||
self._attr_attribution = ATTRIBUTION
|
||||
self._attr_device_info = accuweather_data.coordinator_observation.device_info
|
||||
self._attr_supported_features = WeatherEntityFeature.FORECAST_DAILY
|
||||
self._attr_supported_features = (
|
||||
WeatherEntityFeature.FORECAST_DAILY | WeatherEntityFeature.FORECAST_HOURLY
|
||||
)
|
||||
|
||||
self.observation_coordinator = accuweather_data.coordinator_observation
|
||||
self.daily_coordinator = accuweather_data.coordinator_daily_forecast
|
||||
self.hourly_coordinator = accuweather_data.coordinator_hourly_forecast
|
||||
|
||||
@property
|
||||
def condition(self) -> str | None:
|
||||
@@ -207,3 +213,32 @@ class AccuWeatherEntity(
|
||||
}
|
||||
for item in self.daily_coordinator.data
|
||||
]
|
||||
|
||||
@callback
|
||||
def _async_forecast_hourly(self) -> list[Forecast] | None:
|
||||
"""Return the hourly forecast in native units."""
|
||||
return [
|
||||
{
|
||||
ATTR_FORECAST_TIME: utc_from_timestamp(
|
||||
item["EpochDateTime"]
|
||||
).isoformat(),
|
||||
ATTR_FORECAST_CLOUD_COVERAGE: item["CloudCover"],
|
||||
ATTR_FORECAST_HUMIDITY: item["RelativeHumidity"],
|
||||
ATTR_FORECAST_NATIVE_TEMP: item["Temperature"][ATTR_VALUE],
|
||||
ATTR_FORECAST_NATIVE_APPARENT_TEMP: item["RealFeelTemperature"][
|
||||
ATTR_VALUE
|
||||
],
|
||||
ATTR_FORECAST_NATIVE_PRECIPITATION: item["TotalLiquid"][ATTR_VALUE],
|
||||
ATTR_FORECAST_PRECIPITATION_PROBABILITY: item[
|
||||
"PrecipitationProbability"
|
||||
],
|
||||
ATTR_FORECAST_NATIVE_WIND_SPEED: item["Wind"][ATTR_SPEED][ATTR_VALUE],
|
||||
ATTR_FORECAST_NATIVE_WIND_GUST_SPEED: item["WindGust"][ATTR_SPEED][
|
||||
ATTR_VALUE
|
||||
],
|
||||
ATTR_FORECAST_UV_INDEX: item["UVIndex"],
|
||||
ATTR_FORECAST_WIND_BEARING: item["Wind"][ATTR_DIRECTION]["Degrees"],
|
||||
ATTR_FORECAST_CONDITION: CONDITION_MAP.get(item["WeatherIcon"]),
|
||||
}
|
||||
for item in self.hourly_coordinator.data
|
||||
]
|
||||
|
||||
@@ -3,10 +3,8 @@
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from aiohttp import web
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components.http import KEY_HASS, HomeAssistantView
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import ATTR_ENTITY_ID, CONF_DESCRIPTION, CONF_SELECTOR
|
||||
from homeassistant.core import (
|
||||
@@ -28,7 +26,6 @@ from .const import (
|
||||
ATTR_STRUCTURE,
|
||||
ATTR_TASK_NAME,
|
||||
DATA_COMPONENT,
|
||||
DATA_IMAGES,
|
||||
DATA_PREFERENCES,
|
||||
DOMAIN,
|
||||
SERVICE_GENERATE_DATA,
|
||||
@@ -42,7 +39,6 @@ from .task import (
|
||||
GenDataTaskResult,
|
||||
GenImageTask,
|
||||
GenImageTaskResult,
|
||||
ImageData,
|
||||
async_generate_data,
|
||||
async_generate_image,
|
||||
)
|
||||
@@ -55,7 +51,6 @@ __all__ = [
|
||||
"GenDataTaskResult",
|
||||
"GenImageTask",
|
||||
"GenImageTaskResult",
|
||||
"ImageData",
|
||||
"async_generate_data",
|
||||
"async_generate_image",
|
||||
"async_setup",
|
||||
@@ -94,10 +89,8 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
entity_component = EntityComponent[AITaskEntity](_LOGGER, DOMAIN, hass)
|
||||
hass.data[DATA_COMPONENT] = entity_component
|
||||
hass.data[DATA_PREFERENCES] = AITaskPreferences(hass)
|
||||
hass.data[DATA_IMAGES] = {}
|
||||
await hass.data[DATA_PREFERENCES].async_load()
|
||||
async_setup_http(hass)
|
||||
hass.http.register_view(ImageView)
|
||||
hass.services.async_register(
|
||||
DOMAIN,
|
||||
SERVICE_GENERATE_DATA,
|
||||
@@ -209,28 +202,3 @@ class AITaskPreferences:
|
||||
def as_dict(self) -> dict[str, str | None]:
|
||||
"""Get the current preferences."""
|
||||
return {key: getattr(self, key) for key in self.KEYS}
|
||||
|
||||
|
||||
class ImageView(HomeAssistantView):
|
||||
"""View to generated images."""
|
||||
|
||||
url = f"/api/{DOMAIN}/images/{{filename}}"
|
||||
name = f"api:{DOMAIN}/images"
|
||||
|
||||
async def get(
|
||||
self,
|
||||
request: web.Request,
|
||||
filename: str,
|
||||
) -> web.Response:
|
||||
"""Serve image."""
|
||||
hass = request.app[KEY_HASS]
|
||||
image_storage = hass.data[DATA_IMAGES]
|
||||
image_data = image_storage.get(filename)
|
||||
|
||||
if image_data is None:
|
||||
raise web.HTTPNotFound
|
||||
|
||||
return web.Response(
|
||||
body=image_data.data,
|
||||
content_type=image_data.mime_type,
|
||||
)
|
||||
|
||||
@@ -8,19 +8,19 @@ from typing import TYPE_CHECKING, Final
|
||||
from homeassistant.util.hass_dict import HassKey
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from homeassistant.components.media_source import local_source
|
||||
from homeassistant.helpers.entity_component import EntityComponent
|
||||
|
||||
from . import AITaskPreferences
|
||||
from .entity import AITaskEntity
|
||||
from .task import ImageData
|
||||
|
||||
DOMAIN = "ai_task"
|
||||
DATA_COMPONENT: HassKey[EntityComponent[AITaskEntity]] = HassKey(DOMAIN)
|
||||
DATA_PREFERENCES: HassKey[AITaskPreferences] = HassKey(f"{DOMAIN}_preferences")
|
||||
DATA_IMAGES: HassKey[dict[str, ImageData]] = HassKey(f"{DOMAIN}_images")
|
||||
DATA_MEDIA_SOURCE: HassKey[local_source.LocalSource] = HassKey(f"{DOMAIN}_media_source")
|
||||
|
||||
IMAGE_DIR: Final = "image"
|
||||
IMAGE_EXPIRY_TIME = 60 * 60 # 1 hour
|
||||
MAX_IMAGES = 20
|
||||
|
||||
SERVICE_GENERATE_DATA = "generate_data"
|
||||
SERVICE_GENERATE_IMAGE = "generate_image"
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
{
|
||||
"domain": "ai_task",
|
||||
"name": "AI Task",
|
||||
"after_dependencies": ["camera", "http"],
|
||||
"after_dependencies": ["camera"],
|
||||
"codeowners": ["@home-assistant/core"],
|
||||
"dependencies": ["conversation", "media_source"],
|
||||
"documentation": "https://www.home-assistant.io/integrations/ai_task",
|
||||
|
||||
@@ -2,89 +2,21 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import timedelta
|
||||
import logging
|
||||
|
||||
from homeassistant.components.http.auth import async_sign_path
|
||||
from homeassistant.components.media_player import BrowseError, MediaClass
|
||||
from homeassistant.components.media_source import (
|
||||
BrowseMediaSource,
|
||||
MediaSource,
|
||||
MediaSourceItem,
|
||||
PlayMedia,
|
||||
Unresolvable,
|
||||
)
|
||||
from homeassistant.components.media_source import MediaSource, local_source
|
||||
from homeassistant.core import HomeAssistant
|
||||
|
||||
from .const import DATA_IMAGES, DOMAIN, IMAGE_EXPIRY_TIME
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
from .const import DATA_MEDIA_SOURCE, DOMAIN, IMAGE_DIR
|
||||
|
||||
|
||||
async def async_get_media_source(hass: HomeAssistant) -> ImageMediaSource:
|
||||
"""Set up image media source."""
|
||||
_LOGGER.debug("Setting up image media source")
|
||||
return ImageMediaSource(hass)
|
||||
async def async_get_media_source(hass: HomeAssistant) -> MediaSource:
|
||||
"""Set up local media source."""
|
||||
media_dir = hass.config.path(f"{DOMAIN}/{IMAGE_DIR}")
|
||||
|
||||
|
||||
class ImageMediaSource(MediaSource):
|
||||
"""Provide images as media sources."""
|
||||
|
||||
name: str = "AI Generated Images"
|
||||
|
||||
def __init__(self, hass: HomeAssistant) -> None:
|
||||
"""Initialize ImageMediaSource."""
|
||||
super().__init__(DOMAIN)
|
||||
self.hass = hass
|
||||
|
||||
async def async_resolve_media(self, item: MediaSourceItem) -> PlayMedia:
|
||||
"""Resolve media to a url."""
|
||||
image_storage = self.hass.data[DATA_IMAGES]
|
||||
image = image_storage.get(item.identifier)
|
||||
|
||||
if image is None:
|
||||
raise Unresolvable(f"Could not resolve media item: {item.identifier}")
|
||||
|
||||
return PlayMedia(
|
||||
async_sign_path(
|
||||
self.hass,
|
||||
f"/api/{DOMAIN}/images/{item.identifier}",
|
||||
timedelta(seconds=IMAGE_EXPIRY_TIME or 1800),
|
||||
),
|
||||
image.mime_type,
|
||||
)
|
||||
|
||||
async def async_browse_media(
|
||||
self,
|
||||
item: MediaSourceItem,
|
||||
) -> BrowseMediaSource:
|
||||
"""Return media."""
|
||||
if item.identifier:
|
||||
raise BrowseError("Unknown item")
|
||||
|
||||
image_storage = self.hass.data[DATA_IMAGES]
|
||||
|
||||
children = [
|
||||
BrowseMediaSource(
|
||||
domain=DOMAIN,
|
||||
identifier=filename,
|
||||
media_class=MediaClass.IMAGE,
|
||||
media_content_type=image.mime_type,
|
||||
title=image.title or filename,
|
||||
can_play=True,
|
||||
can_expand=False,
|
||||
)
|
||||
for filename, image in image_storage.items()
|
||||
]
|
||||
|
||||
return BrowseMediaSource(
|
||||
domain=DOMAIN,
|
||||
identifier=None,
|
||||
media_class=MediaClass.APP,
|
||||
media_content_type="",
|
||||
title="AI Generated Images",
|
||||
can_play=False,
|
||||
can_expand=True,
|
||||
children_media_class=MediaClass.IMAGE,
|
||||
children=children,
|
||||
)
|
||||
hass.data[DATA_MEDIA_SOURCE] = source = local_source.LocalSource(
|
||||
hass,
|
||||
DOMAIN,
|
||||
"AI Generated Images",
|
||||
{IMAGE_DIR: media_dir},
|
||||
f"/{DOMAIN}",
|
||||
)
|
||||
return source
|
||||
|
||||
@@ -2,9 +2,10 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from contextlib import AsyncExitStack, asynccontextmanager
|
||||
from dataclasses import dataclass
|
||||
from datetime import datetime, timedelta
|
||||
from functools import partial
|
||||
import io
|
||||
import mimetypes
|
||||
from pathlib import Path
|
||||
import tempfile
|
||||
@@ -14,20 +15,19 @@ import voluptuous as vol
|
||||
|
||||
from homeassistant.components import camera, conversation, media_source
|
||||
from homeassistant.components.http.auth import async_sign_path
|
||||
from homeassistant.core import HomeAssistant, ServiceResponse, callback
|
||||
from homeassistant.core import HomeAssistant, ServiceResponse
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers import llm
|
||||
from homeassistant.helpers.chat_session import ChatSession, async_get_chat_session
|
||||
from homeassistant.helpers.event import async_call_later
|
||||
from homeassistant.util import RE_SANITIZE_FILENAME, slugify
|
||||
|
||||
from .const import (
|
||||
DATA_COMPONENT,
|
||||
DATA_IMAGES,
|
||||
DATA_MEDIA_SOURCE,
|
||||
DATA_PREFERENCES,
|
||||
DOMAIN,
|
||||
IMAGE_DIR,
|
||||
IMAGE_EXPIRY_TIME,
|
||||
MAX_IMAGES,
|
||||
AITaskEntityFeature,
|
||||
)
|
||||
|
||||
@@ -43,45 +43,21 @@ def _save_camera_snapshot(image: camera.Image) -> Path:
|
||||
return Path(temp_file.name)
|
||||
|
||||
|
||||
@asynccontextmanager
|
||||
async def _resolve_attachments(
|
||||
hass: HomeAssistant,
|
||||
session: ChatSession,
|
||||
attachments: list[dict] | None = None,
|
||||
) -> list[conversation.Attachment]:
|
||||
"""Resolve attachments for a task."""
|
||||
resolved_attachments: list[conversation.Attachment] = []
|
||||
created_files: list[Path] = []
|
||||
async with AsyncExitStack() as stack:
|
||||
resolved_attachments: list[conversation.Attachment] = []
|
||||
|
||||
for attachment in attachments or []:
|
||||
media_content_id = attachment["media_content_id"]
|
||||
|
||||
# Special case for camera media sources
|
||||
if media_content_id.startswith("media-source://camera/"):
|
||||
# Extract entity_id from the media content ID
|
||||
entity_id = media_content_id.removeprefix("media-source://camera/")
|
||||
|
||||
# Get snapshot from camera
|
||||
image = await camera.async_get_image(hass, entity_id)
|
||||
|
||||
temp_filename = await hass.async_add_executor_job(
|
||||
_save_camera_snapshot, image
|
||||
for attachment in attachments or []:
|
||||
media_content_id = attachment["media_content_id"]
|
||||
media = await stack.enter_async_context(
|
||||
media_source.async_resolve_with_path(hass, media_content_id, None)
|
||||
)
|
||||
created_files.append(temp_filename)
|
||||
|
||||
resolved_attachments.append(
|
||||
conversation.Attachment(
|
||||
media_content_id=media_content_id,
|
||||
mime_type=image.content_type,
|
||||
path=temp_filename,
|
||||
)
|
||||
)
|
||||
else:
|
||||
# Handle regular media sources
|
||||
media = await media_source.async_resolve_media(hass, media_content_id, None)
|
||||
if media.path is None:
|
||||
raise HomeAssistantError(
|
||||
"Only local attachments are currently supported"
|
||||
)
|
||||
resolved_attachments.append(
|
||||
conversation.Attachment(
|
||||
media_content_id=media_content_id,
|
||||
@@ -90,22 +66,7 @@ async def _resolve_attachments(
|
||||
)
|
||||
)
|
||||
|
||||
if not created_files:
|
||||
return resolved_attachments
|
||||
|
||||
def cleanup_files() -> None:
|
||||
"""Cleanup temporary files."""
|
||||
for file in created_files:
|
||||
file.unlink(missing_ok=True)
|
||||
|
||||
@callback
|
||||
def cleanup_files_callback() -> None:
|
||||
"""Cleanup temporary files."""
|
||||
hass.async_add_executor_job(cleanup_files)
|
||||
|
||||
session.async_on_cleanup(cleanup_files_callback)
|
||||
|
||||
return resolved_attachments
|
||||
yield resolved_attachments
|
||||
|
||||
|
||||
async def async_generate_data(
|
||||
@@ -143,36 +104,19 @@ async def async_generate_data(
|
||||
)
|
||||
|
||||
with async_get_chat_session(hass) as session:
|
||||
resolved_attachments = await _resolve_attachments(hass, session, attachments)
|
||||
|
||||
return await entity.internal_async_generate_data(
|
||||
session,
|
||||
GenDataTask(
|
||||
name=task_name,
|
||||
instructions=instructions,
|
||||
structure=structure,
|
||||
attachments=resolved_attachments or None,
|
||||
llm_api=llm_api,
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
def _cleanup_images(image_storage: dict[str, ImageData], num_to_remove: int) -> None:
|
||||
"""Remove old images to keep the storage size under the limit."""
|
||||
if num_to_remove <= 0:
|
||||
return
|
||||
|
||||
if num_to_remove >= len(image_storage):
|
||||
image_storage.clear()
|
||||
return
|
||||
|
||||
sorted_images = sorted(
|
||||
image_storage.items(),
|
||||
key=lambda item: item[1].timestamp,
|
||||
)
|
||||
|
||||
for filename, _ in sorted_images[:num_to_remove]:
|
||||
image_storage.pop(filename, None)
|
||||
async with _resolve_attachments(
|
||||
hass, session, attachments
|
||||
) as resolved_attachments:
|
||||
return await entity.internal_async_generate_data(
|
||||
session,
|
||||
GenDataTask(
|
||||
name=task_name,
|
||||
instructions=instructions,
|
||||
structure=structure,
|
||||
attachments=resolved_attachments or None,
|
||||
llm_api=llm_api,
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
async def async_generate_image(
|
||||
@@ -208,52 +152,51 @@ async def async_generate_image(
|
||||
)
|
||||
|
||||
with async_get_chat_session(hass) as session:
|
||||
resolved_attachments = await _resolve_attachments(hass, session, attachments)
|
||||
|
||||
task_result = await entity.internal_async_generate_image(
|
||||
session,
|
||||
GenImageTask(
|
||||
name=task_name,
|
||||
instructions=instructions,
|
||||
attachments=resolved_attachments or None,
|
||||
),
|
||||
)
|
||||
async with _resolve_attachments(
|
||||
hass, session, attachments
|
||||
) as resolved_attachments:
|
||||
task_result = await entity.internal_async_generate_image(
|
||||
session,
|
||||
GenImageTask(
|
||||
name=task_name,
|
||||
instructions=instructions,
|
||||
attachments=resolved_attachments or None,
|
||||
),
|
||||
)
|
||||
|
||||
service_result = task_result.as_dict()
|
||||
image_data = service_result.pop("image_data")
|
||||
if service_result.get("revised_prompt") is None:
|
||||
service_result["revised_prompt"] = instructions
|
||||
|
||||
image_storage = hass.data[DATA_IMAGES]
|
||||
|
||||
if len(image_storage) + 1 > MAX_IMAGES:
|
||||
_cleanup_images(image_storage, len(image_storage) + 1 - MAX_IMAGES)
|
||||
source = hass.data[DATA_MEDIA_SOURCE]
|
||||
|
||||
current_time = datetime.now()
|
||||
ext = mimetypes.guess_extension(task_result.mime_type, False) or ".png"
|
||||
sanitized_task_name = RE_SANITIZE_FILENAME.sub("", slugify(task_name))
|
||||
filename = f"{current_time.strftime('%Y-%m-%d_%H%M%S')}_{sanitized_task_name}{ext}"
|
||||
|
||||
image_storage[filename] = ImageData(
|
||||
data=image_data,
|
||||
timestamp=int(current_time.timestamp()),
|
||||
mime_type=task_result.mime_type,
|
||||
title=service_result["revised_prompt"],
|
||||
image_file = ImageData(
|
||||
filename=f"{current_time.strftime('%Y-%m-%d_%H%M%S')}_{sanitized_task_name}{ext}",
|
||||
file=io.BytesIO(image_data),
|
||||
content_type=task_result.mime_type,
|
||||
)
|
||||
|
||||
def _purge_image(filename: str, now: datetime) -> None:
|
||||
"""Remove image from storage."""
|
||||
image_storage.pop(filename, None)
|
||||
target_folder = media_source.MediaSourceItem.from_uri(
|
||||
hass, f"media-source://{DOMAIN}/{IMAGE_DIR}", None
|
||||
)
|
||||
|
||||
if IMAGE_EXPIRY_TIME > 0:
|
||||
async_call_later(hass, IMAGE_EXPIRY_TIME, partial(_purge_image, filename))
|
||||
service_result["media_source_id"] = await source.async_upload_media(
|
||||
target_folder, image_file
|
||||
)
|
||||
|
||||
item = media_source.MediaSourceItem.from_uri(
|
||||
hass, service_result["media_source_id"], None
|
||||
)
|
||||
service_result["url"] = async_sign_path(
|
||||
hass,
|
||||
f"/api/{DOMAIN}/images/{filename}",
|
||||
timedelta(seconds=IMAGE_EXPIRY_TIME or 1800),
|
||||
(await source.async_resolve_media(item)).url,
|
||||
timedelta(seconds=IMAGE_EXPIRY_TIME),
|
||||
)
|
||||
service_result["media_source_id"] = f"media-source://{DOMAIN}/images/{filename}"
|
||||
|
||||
return service_result
|
||||
|
||||
@@ -358,20 +301,8 @@ class GenImageTaskResult:
|
||||
|
||||
@dataclass(slots=True)
|
||||
class ImageData:
|
||||
"""Image data for stored generated images."""
|
||||
"""Implementation of media_source.local_source.UploadedFile protocol."""
|
||||
|
||||
data: bytes
|
||||
"""Raw image data."""
|
||||
|
||||
timestamp: int
|
||||
"""Timestamp when the image was generated, as a Unix timestamp."""
|
||||
|
||||
mime_type: str
|
||||
"""MIME type of the image."""
|
||||
|
||||
title: str
|
||||
"""Title of the image, usually the prompt used to generate it."""
|
||||
|
||||
def __str__(self) -> str:
|
||||
"""Return image data as a string."""
|
||||
return f"<ImageData {self.title}: {id(self)}>"
|
||||
filename: str
|
||||
file: io.IOBase
|
||||
content_type: str
|
||||
|
||||
@@ -3,7 +3,6 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from genie_partner_sdk.client import AladdinConnectClient
|
||||
from genie_partner_sdk.model import GarageDoor
|
||||
|
||||
from homeassistant.const import Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
@@ -36,22 +35,7 @@ async def async_setup_entry(
|
||||
api.AsyncConfigEntryAuth(aiohttp_client.async_get_clientsession(hass), session)
|
||||
)
|
||||
|
||||
sdk_doors = await client.get_doors()
|
||||
|
||||
# Convert SDK GarageDoor objects to integration GarageDoor objects
|
||||
doors = [
|
||||
GarageDoor(
|
||||
{
|
||||
"device_id": door.device_id,
|
||||
"door_number": door.door_number,
|
||||
"name": door.name,
|
||||
"status": door.status,
|
||||
"link_status": door.link_status,
|
||||
"battery_level": door.battery_level,
|
||||
}
|
||||
)
|
||||
for door in sdk_doors
|
||||
]
|
||||
doors = await client.get_doors()
|
||||
|
||||
entry.runtime_data = {
|
||||
door.unique_id: AladdinConnectCoordinator(hass, entry, client, door)
|
||||
|
||||
@@ -41,4 +41,10 @@ class AladdinConnectCoordinator(DataUpdateCoordinator[GarageDoor]):
|
||||
async def _async_update_data(self) -> GarageDoor:
|
||||
"""Fetch data from the Aladdin Connect API."""
|
||||
await self.client.update_door(self.data.device_id, self.data.door_number)
|
||||
self.data.status = self.client.get_door_status(
|
||||
self.data.device_id, self.data.door_number
|
||||
)
|
||||
self.data.battery_level = self.client.get_battery_status(
|
||||
self.data.device_id, self.data.door_number
|
||||
)
|
||||
return self.data
|
||||
|
||||
@@ -49,7 +49,9 @@ class AladdinCoverEntity(AladdinConnectEntity, CoverEntity):
|
||||
@property
|
||||
def is_closed(self) -> bool | None:
|
||||
"""Update is closed attribute."""
|
||||
return self.coordinator.data.status == "closed"
|
||||
if (status := self.coordinator.data.status) is None:
|
||||
return None
|
||||
return status == "closed"
|
||||
|
||||
@property
|
||||
def is_closing(self) -> bool | None:
|
||||
|
||||
@@ -12,5 +12,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/aladdin_connect",
|
||||
"integration_type": "hub",
|
||||
"iot_class": "cloud_polling",
|
||||
"requirements": ["genie-partner-sdk==1.0.10"]
|
||||
"requirements": ["genie-partner-sdk==1.0.11"]
|
||||
}
|
||||
|
||||
@@ -33,9 +33,11 @@ from homeassistant.const import (
|
||||
)
|
||||
from homeassistant.core import Event, HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryNotReady
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.device_registry import format_mac
|
||||
from homeassistant.helpers.dispatcher import async_dispatcher_send
|
||||
from homeassistant.helpers.storage import STORAGE_DIR
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
|
||||
from .const import (
|
||||
CONF_ADB_SERVER_IP,
|
||||
@@ -46,10 +48,12 @@ from .const import (
|
||||
DEFAULT_ADB_SERVER_PORT,
|
||||
DEVICE_ANDROIDTV,
|
||||
DEVICE_FIRETV,
|
||||
DOMAIN,
|
||||
PROP_ETHMAC,
|
||||
PROP_WIFIMAC,
|
||||
SIGNAL_CONFIG_ENTITY,
|
||||
)
|
||||
from .services import async_setup_services
|
||||
|
||||
ADB_PYTHON_EXCEPTIONS: tuple = (
|
||||
AdbTimeoutError,
|
||||
@@ -63,6 +67,8 @@ ADB_PYTHON_EXCEPTIONS: tuple = (
|
||||
)
|
||||
ADB_TCP_EXCEPTIONS: tuple = (ConnectionResetError, RuntimeError)
|
||||
|
||||
CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN)
|
||||
|
||||
PLATFORMS = [Platform.MEDIA_PLAYER, Platform.REMOTE]
|
||||
RELOAD_OPTIONS = [CONF_STATE_DETECTION_RULES]
|
||||
|
||||
@@ -188,6 +194,12 @@ async def async_migrate_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
return True
|
||||
|
||||
|
||||
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
"""Set up the Android TV / Fire TV integration."""
|
||||
async_setup_services(hass)
|
||||
return True
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: AndroidTVConfigEntry) -> bool:
|
||||
"""Set up Android Debug Bridge platform."""
|
||||
|
||||
|
||||
@@ -8,7 +8,6 @@ import logging
|
||||
|
||||
from androidtv.constants import APPS, KEYS
|
||||
from androidtv.setup_async import AndroidTVAsync, FireTVAsync
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components import persistent_notification
|
||||
from homeassistant.components.media_player import (
|
||||
@@ -17,9 +16,7 @@ from homeassistant.components.media_player import (
|
||||
MediaPlayerEntityFeature,
|
||||
MediaPlayerState,
|
||||
)
|
||||
from homeassistant.const import ATTR_COMMAND
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import config_validation as cv, entity_platform
|
||||
from homeassistant.helpers.dispatcher import async_dispatcher_connect
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from homeassistant.util.dt import utcnow
|
||||
@@ -39,19 +36,10 @@ from .const import (
|
||||
SIGNAL_CONFIG_ENTITY,
|
||||
)
|
||||
from .entity import AndroidTVEntity, adb_decorator
|
||||
from .services import ATTR_ADB_RESPONSE, ATTR_HDMI_INPUT, SERVICE_LEARN_SENDEVENT
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
ATTR_ADB_RESPONSE = "adb_response"
|
||||
ATTR_DEVICE_PATH = "device_path"
|
||||
ATTR_HDMI_INPUT = "hdmi_input"
|
||||
ATTR_LOCAL_PATH = "local_path"
|
||||
|
||||
SERVICE_ADB_COMMAND = "adb_command"
|
||||
SERVICE_DOWNLOAD = "download"
|
||||
SERVICE_LEARN_SENDEVENT = "learn_sendevent"
|
||||
SERVICE_UPLOAD = "upload"
|
||||
|
||||
# Translate from `AndroidTV` / `FireTV` reported state to HA state.
|
||||
ANDROIDTV_STATES = {
|
||||
"off": MediaPlayerState.OFF,
|
||||
@@ -77,32 +65,6 @@ async def async_setup_entry(
|
||||
]
|
||||
)
|
||||
|
||||
platform = entity_platform.async_get_current_platform()
|
||||
platform.async_register_entity_service(
|
||||
SERVICE_ADB_COMMAND,
|
||||
{vol.Required(ATTR_COMMAND): cv.string},
|
||||
"adb_command",
|
||||
)
|
||||
platform.async_register_entity_service(
|
||||
SERVICE_LEARN_SENDEVENT, None, "learn_sendevent"
|
||||
)
|
||||
platform.async_register_entity_service(
|
||||
SERVICE_DOWNLOAD,
|
||||
{
|
||||
vol.Required(ATTR_DEVICE_PATH): cv.string,
|
||||
vol.Required(ATTR_LOCAL_PATH): cv.string,
|
||||
},
|
||||
"service_download",
|
||||
)
|
||||
platform.async_register_entity_service(
|
||||
SERVICE_UPLOAD,
|
||||
{
|
||||
vol.Required(ATTR_DEVICE_PATH): cv.string,
|
||||
vol.Required(ATTR_LOCAL_PATH): cv.string,
|
||||
},
|
||||
"service_upload",
|
||||
)
|
||||
|
||||
|
||||
class ADBDevice(AndroidTVEntity, MediaPlayerEntity):
|
||||
"""Representation of an Android or Fire TV device."""
|
||||
|
||||
66
homeassistant/components/androidtv/services.py
Normal file
66
homeassistant/components/androidtv/services.py
Normal file
@@ -0,0 +1,66 @@
|
||||
"""Services for Android/Fire TV devices."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components.media_player import DOMAIN as MEDIA_PLAYER_DOMAIN
|
||||
from homeassistant.const import ATTR_COMMAND
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers import config_validation as cv, service
|
||||
|
||||
from .const import DOMAIN
|
||||
|
||||
ATTR_ADB_RESPONSE = "adb_response"
|
||||
ATTR_DEVICE_PATH = "device_path"
|
||||
ATTR_HDMI_INPUT = "hdmi_input"
|
||||
ATTR_LOCAL_PATH = "local_path"
|
||||
|
||||
SERVICE_ADB_COMMAND = "adb_command"
|
||||
SERVICE_DOWNLOAD = "download"
|
||||
SERVICE_LEARN_SENDEVENT = "learn_sendevent"
|
||||
SERVICE_UPLOAD = "upload"
|
||||
|
||||
|
||||
@callback
|
||||
def async_setup_services(hass: HomeAssistant) -> None:
|
||||
"""Register the Android TV / Fire TV services."""
|
||||
|
||||
service.async_register_platform_entity_service(
|
||||
hass,
|
||||
DOMAIN,
|
||||
SERVICE_ADB_COMMAND,
|
||||
entity_domain=MEDIA_PLAYER_DOMAIN,
|
||||
schema={vol.Required(ATTR_COMMAND): cv.string},
|
||||
func="adb_command",
|
||||
)
|
||||
service.async_register_platform_entity_service(
|
||||
hass,
|
||||
DOMAIN,
|
||||
SERVICE_LEARN_SENDEVENT,
|
||||
entity_domain=MEDIA_PLAYER_DOMAIN,
|
||||
schema=None,
|
||||
func="learn_sendevent",
|
||||
)
|
||||
service.async_register_platform_entity_service(
|
||||
hass,
|
||||
DOMAIN,
|
||||
SERVICE_DOWNLOAD,
|
||||
entity_domain=MEDIA_PLAYER_DOMAIN,
|
||||
schema={
|
||||
vol.Required(ATTR_DEVICE_PATH): cv.string,
|
||||
vol.Required(ATTR_LOCAL_PATH): cv.string,
|
||||
},
|
||||
func="service_download",
|
||||
)
|
||||
service.async_register_platform_entity_service(
|
||||
hass,
|
||||
DOMAIN,
|
||||
SERVICE_UPLOAD,
|
||||
entity_domain=MEDIA_PLAYER_DOMAIN,
|
||||
schema={
|
||||
vol.Required(ATTR_DEVICE_PATH): cv.string,
|
||||
vol.Required(ATTR_LOCAL_PATH): cv.string,
|
||||
},
|
||||
func="service_upload",
|
||||
)
|
||||
@@ -16,7 +16,7 @@ from .coordinator import (
|
||||
AOSmithStatusCoordinator,
|
||||
)
|
||||
|
||||
PLATFORMS: list[Platform] = [Platform.SENSOR, Platform.WATER_HEATER]
|
||||
PLATFORMS: list[Platform] = [Platform.SELECT, Platform.SENSOR, Platform.WATER_HEATER]
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: AOSmithConfigEntry) -> bool:
|
||||
|
||||
@@ -1,5 +1,10 @@
|
||||
{
|
||||
"entity": {
|
||||
"select": {
|
||||
"hot_water_plus_level": {
|
||||
"default": "mdi:water-plus"
|
||||
}
|
||||
},
|
||||
"sensor": {
|
||||
"hot_water_availability": {
|
||||
"default": "mdi:water-thermometer"
|
||||
|
||||
70
homeassistant/components/aosmith/select.py
Normal file
70
homeassistant/components/aosmith/select.py
Normal file
@@ -0,0 +1,70 @@
|
||||
"""The select platform for the A. O. Smith integration."""
|
||||
|
||||
from homeassistant.components.select import SelectEntity
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from . import AOSmithConfigEntry
|
||||
from .coordinator import AOSmithStatusCoordinator
|
||||
from .entity import AOSmithStatusEntity
|
||||
|
||||
HWP_LEVEL_HA_TO_AOSMITH = {
|
||||
"off": 0,
|
||||
"level1": 1,
|
||||
"level2": 2,
|
||||
"level3": 3,
|
||||
}
|
||||
HWP_LEVEL_AOSMITH_TO_HA = {value: key for key, value in HWP_LEVEL_HA_TO_AOSMITH.items()}
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
entry: AOSmithConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up A. O. Smith select platform."""
|
||||
data = entry.runtime_data
|
||||
|
||||
async_add_entities(
|
||||
AOSmithHotWaterPlusSelectEntity(data.status_coordinator, device.junction_id)
|
||||
for device in data.status_coordinator.data.values()
|
||||
if device.supports_hot_water_plus
|
||||
)
|
||||
|
||||
|
||||
class AOSmithHotWaterPlusSelectEntity(AOSmithStatusEntity, SelectEntity):
|
||||
"""Class for the Hot Water+ select entity."""
|
||||
|
||||
_attr_translation_key = "hot_water_plus_level"
|
||||
_attr_options = list(HWP_LEVEL_HA_TO_AOSMITH)
|
||||
|
||||
def __init__(self, coordinator: AOSmithStatusCoordinator, junction_id: str) -> None:
|
||||
"""Initialize the entity."""
|
||||
super().__init__(coordinator, junction_id)
|
||||
self._attr_unique_id = f"hot_water_plus_level_{junction_id}"
|
||||
|
||||
@property
|
||||
def suggested_object_id(self) -> str | None:
|
||||
"""Override the suggested object id to make '+' get converted to 'plus' in the entity id."""
|
||||
return "hot_water_plus_level"
|
||||
|
||||
@property
|
||||
def current_option(self) -> str | None:
|
||||
"""Return the current Hot Water+ mode."""
|
||||
hot_water_plus_level = self.device.status.hot_water_plus_level
|
||||
return (
|
||||
None
|
||||
if hot_water_plus_level is None
|
||||
else HWP_LEVEL_AOSMITH_TO_HA.get(hot_water_plus_level)
|
||||
)
|
||||
|
||||
async def async_select_option(self, option: str) -> None:
|
||||
"""Set the Hot Water+ mode."""
|
||||
aosmith_hwp_level = HWP_LEVEL_HA_TO_AOSMITH[option]
|
||||
await self.client.update_mode(
|
||||
junction_id=self.junction_id,
|
||||
mode=self.device.status.current_mode,
|
||||
hot_water_plus_level=aosmith_hwp_level,
|
||||
)
|
||||
|
||||
await self.coordinator.async_request_refresh()
|
||||
@@ -26,6 +26,17 @@
|
||||
}
|
||||
},
|
||||
"entity": {
|
||||
"select": {
|
||||
"hot_water_plus_level": {
|
||||
"name": "Hot Water+ level",
|
||||
"state": {
|
||||
"off": "[%key:common::state::off%]",
|
||||
"level1": "Level 1",
|
||||
"level2": "Level 2",
|
||||
"level3": "Level 3"
|
||||
}
|
||||
}
|
||||
},
|
||||
"sensor": {
|
||||
"hot_water_availability": {
|
||||
"name": "Hot water availability"
|
||||
|
||||
@@ -7,5 +7,5 @@
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["apcaccess"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["aioapcaccess==0.4.2"]
|
||||
"requirements": ["aioapcaccess==1.0.0"]
|
||||
}
|
||||
|
||||
@@ -395,6 +395,7 @@ SENSORS: dict[str, SensorEntityDescription] = {
|
||||
"upsmode": SensorEntityDescription(
|
||||
key="upsmode",
|
||||
translation_key="ups_mode",
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
),
|
||||
"upsname": SensorEntityDescription(
|
||||
key="upsname",
|
||||
|
||||
BIN
homeassistant/components/assist_pipeline/acknowledge.mp3
Normal file
BIN
homeassistant/components/assist_pipeline/acknowledge.mp3
Normal file
Binary file not shown.
@@ -1,5 +1,7 @@
|
||||
"""Constants for the Assist pipeline integration."""
|
||||
|
||||
from pathlib import Path
|
||||
|
||||
DOMAIN = "assist_pipeline"
|
||||
|
||||
DATA_CONFIG = f"{DOMAIN}.config"
|
||||
@@ -23,3 +25,5 @@ SAMPLES_PER_CHUNK = SAMPLE_RATE // (1000 // MS_PER_CHUNK) # 10 ms @ 16Khz
|
||||
BYTES_PER_CHUNK = SAMPLES_PER_CHUNK * SAMPLE_WIDTH * SAMPLE_CHANNELS # 16-bit
|
||||
|
||||
OPTION_PREFERRED = "preferred"
|
||||
|
||||
ACKNOWLEDGE_PATH = Path(__file__).parent / "acknowledge.mp3"
|
||||
|
||||
@@ -23,7 +23,12 @@ from homeassistant.components import conversation, stt, tts, wake_word, websocke
|
||||
from homeassistant.const import ATTR_SUPPORTED_FEATURES, MATCH_ALL
|
||||
from homeassistant.core import Context, HomeAssistant, callback
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers import chat_session, intent
|
||||
from homeassistant.helpers import (
|
||||
chat_session,
|
||||
device_registry as dr,
|
||||
entity_registry as er,
|
||||
intent,
|
||||
)
|
||||
from homeassistant.helpers.collection import (
|
||||
CHANGE_UPDATED,
|
||||
CollectionError,
|
||||
@@ -45,6 +50,7 @@ from homeassistant.util.limited_size_dict import LimitedSizeDict
|
||||
|
||||
from .audio_enhancer import AudioEnhancer, EnhancedAudioChunk, MicroVadSpeexEnhancer
|
||||
from .const import (
|
||||
ACKNOWLEDGE_PATH,
|
||||
BYTES_PER_CHUNK,
|
||||
CONF_DEBUG_RECORDING_DIR,
|
||||
DATA_CONFIG,
|
||||
@@ -113,6 +119,7 @@ PIPELINE_FIELDS: VolDictType = {
|
||||
vol.Required("wake_word_entity"): vol.Any(str, None),
|
||||
vol.Required("wake_word_id"): vol.Any(str, None),
|
||||
vol.Optional("prefer_local_intents"): bool,
|
||||
vol.Optional("acknowledge_media_id"): str,
|
||||
}
|
||||
|
||||
STORED_PIPELINE_RUNS = 10
|
||||
@@ -1066,8 +1073,11 @@ class PipelineRun:
|
||||
intent_input: str,
|
||||
conversation_id: str,
|
||||
conversation_extra_system_prompt: str | None,
|
||||
) -> str:
|
||||
"""Run intent recognition portion of pipeline. Returns text to speak."""
|
||||
) -> tuple[str, bool]:
|
||||
"""Run intent recognition portion of pipeline.
|
||||
|
||||
Returns (speech, all_targets_in_satellite_area).
|
||||
"""
|
||||
if self.intent_agent is None or self._conversation_data is None:
|
||||
raise RuntimeError("Recognize intent was not prepared")
|
||||
|
||||
@@ -1116,6 +1126,7 @@ class PipelineRun:
|
||||
|
||||
agent_id = self.intent_agent.id
|
||||
processed_locally = agent_id == conversation.HOME_ASSISTANT_AGENT
|
||||
all_targets_in_satellite_area = False
|
||||
intent_response: intent.IntentResponse | None = None
|
||||
if not processed_locally and not self._intent_agent_only:
|
||||
# Sentence triggers override conversation agent
|
||||
@@ -1290,6 +1301,17 @@ class PipelineRun:
|
||||
if tts_input_stream and self._streamed_response_text:
|
||||
tts_input_stream.put_nowait(None)
|
||||
|
||||
if agent_id == conversation.HOME_ASSISTANT_AGENT:
|
||||
# Check if all targeted entities were in the same area as
|
||||
# the satellite device.
|
||||
# If so, the satellite should respond with an acknowledge beep
|
||||
# instead of a full response.
|
||||
all_targets_in_satellite_area = (
|
||||
self._get_all_targets_in_satellite_area(
|
||||
conversation_result.response, self._device_id
|
||||
)
|
||||
)
|
||||
|
||||
except Exception as src_error:
|
||||
_LOGGER.exception("Unexpected error during intent recognition")
|
||||
raise IntentRecognitionError(
|
||||
@@ -1312,7 +1334,45 @@ class PipelineRun:
|
||||
if conversation_result.continue_conversation:
|
||||
self._conversation_data.continue_conversation_agent = agent_id
|
||||
|
||||
return speech
|
||||
return (speech, all_targets_in_satellite_area)
|
||||
|
||||
def _get_all_targets_in_satellite_area(
|
||||
self, intent_response: intent.IntentResponse, device_id: str | None
|
||||
) -> bool:
|
||||
"""Return true if all targeted entities were in the same area as the device."""
|
||||
if (
|
||||
(intent_response.response_type != intent.IntentResponseType.ACTION_DONE)
|
||||
or (not intent_response.matched_states)
|
||||
or (not device_id)
|
||||
):
|
||||
return False
|
||||
|
||||
device_registry = dr.async_get(self.hass)
|
||||
|
||||
if (not (device := device_registry.async_get(device_id))) or (
|
||||
not device.area_id
|
||||
):
|
||||
return False
|
||||
|
||||
entity_registry = er.async_get(self.hass)
|
||||
for state in intent_response.matched_states:
|
||||
entity = entity_registry.async_get(state.entity_id)
|
||||
if not entity:
|
||||
return False
|
||||
|
||||
if (entity_area_id := entity.area_id) is None:
|
||||
if (entity.device_id is None) or (
|
||||
(entity_device := device_registry.async_get(entity.device_id))
|
||||
is None
|
||||
):
|
||||
return False
|
||||
|
||||
entity_area_id = entity_device.area_id
|
||||
|
||||
if entity_area_id != device.area_id:
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
async def prepare_text_to_speech(self) -> None:
|
||||
"""Prepare text-to-speech."""
|
||||
@@ -1350,7 +1410,9 @@ class PipelineRun:
|
||||
),
|
||||
) from err
|
||||
|
||||
async def text_to_speech(self, tts_input: str) -> None:
|
||||
async def text_to_speech(
|
||||
self, tts_input: str, override_media_path: Path | None = None
|
||||
) -> None:
|
||||
"""Run text-to-speech portion of pipeline."""
|
||||
assert self.tts_stream is not None
|
||||
|
||||
@@ -1362,11 +1424,14 @@ class PipelineRun:
|
||||
"language": self.pipeline.tts_language,
|
||||
"voice": self.pipeline.tts_voice,
|
||||
"tts_input": tts_input,
|
||||
"acknowledge_override": override_media_path is not None,
|
||||
},
|
||||
)
|
||||
)
|
||||
|
||||
if not self._streamed_response_text:
|
||||
if override_media_path:
|
||||
self.tts_stream.async_override_result(override_media_path)
|
||||
elif not self._streamed_response_text:
|
||||
self.tts_stream.async_set_message(tts_input)
|
||||
|
||||
tts_output = {
|
||||
@@ -1664,16 +1729,20 @@ class PipelineInput:
|
||||
|
||||
if self.run.end_stage != PipelineStage.STT:
|
||||
tts_input = self.tts_input
|
||||
all_targets_in_satellite_area = False
|
||||
|
||||
if current_stage == PipelineStage.INTENT:
|
||||
# intent-recognition
|
||||
assert intent_input is not None
|
||||
tts_input = await self.run.recognize_intent(
|
||||
(
|
||||
tts_input,
|
||||
all_targets_in_satellite_area,
|
||||
) = await self.run.recognize_intent(
|
||||
intent_input,
|
||||
self.session.conversation_id,
|
||||
self.conversation_extra_system_prompt,
|
||||
)
|
||||
if tts_input.strip():
|
||||
if all_targets_in_satellite_area or tts_input.strip():
|
||||
current_stage = PipelineStage.TTS
|
||||
else:
|
||||
# Skip TTS
|
||||
@@ -1682,8 +1751,14 @@ class PipelineInput:
|
||||
if self.run.end_stage != PipelineStage.INTENT:
|
||||
# text-to-speech
|
||||
if current_stage == PipelineStage.TTS:
|
||||
assert tts_input is not None
|
||||
await self.run.text_to_speech(tts_input)
|
||||
if all_targets_in_satellite_area:
|
||||
# Use acknowledge media instead of full response
|
||||
await self.run.text_to_speech(
|
||||
tts_input or "", override_media_path=ACKNOWLEDGE_PATH
|
||||
)
|
||||
else:
|
||||
assert tts_input is not None
|
||||
await self.run.text_to_speech(tts_input)
|
||||
|
||||
except PipelineError as err:
|
||||
self.run.process_event(
|
||||
|
||||
@@ -3,6 +3,7 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Iterable
|
||||
from dataclasses import replace
|
||||
|
||||
from homeassistant.components.select import SelectEntity, SelectEntityDescription
|
||||
from homeassistant.const import EntityCategory, Platform
|
||||
@@ -64,15 +65,36 @@ class AssistPipelineSelect(SelectEntity, restore_state.RestoreEntity):
|
||||
translation_key="pipeline",
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
)
|
||||
|
||||
_attr_should_poll = False
|
||||
_attr_current_option = OPTION_PREFERRED
|
||||
_attr_options = [OPTION_PREFERRED]
|
||||
|
||||
def __init__(self, hass: HomeAssistant, domain: str, unique_id_prefix: str) -> None:
|
||||
def __init__(
|
||||
self,
|
||||
hass: HomeAssistant,
|
||||
domain: str,
|
||||
unique_id_prefix: str,
|
||||
index: int = 0,
|
||||
) -> None:
|
||||
"""Initialize a pipeline selector."""
|
||||
if index < 1:
|
||||
# Keep compatibility
|
||||
key_suffix = ""
|
||||
placeholder = ""
|
||||
else:
|
||||
key_suffix = f"_{index + 1}"
|
||||
placeholder = f" {index + 1}"
|
||||
|
||||
self.entity_description = replace(
|
||||
self.entity_description,
|
||||
key=f"pipeline{key_suffix}",
|
||||
translation_placeholders={"index": placeholder},
|
||||
)
|
||||
|
||||
self._domain = domain
|
||||
self._unique_id_prefix = unique_id_prefix
|
||||
self._attr_unique_id = f"{unique_id_prefix}-pipeline"
|
||||
self._attr_unique_id = f"{unique_id_prefix}-{self.entity_description.key}"
|
||||
self.hass = hass
|
||||
self._update_options()
|
||||
|
||||
|
||||
@@ -7,7 +7,7 @@
|
||||
},
|
||||
"select": {
|
||||
"pipeline": {
|
||||
"name": "Assistant",
|
||||
"name": "Assistant{index}",
|
||||
"state": {
|
||||
"preferred": "Preferred"
|
||||
}
|
||||
|
||||
@@ -26,6 +26,7 @@ EXCLUDE_FROM_BACKUP = [
|
||||
"tmp_backups/*.tar",
|
||||
"OZW_Log.txt",
|
||||
"tts/*",
|
||||
"ai_task/*",
|
||||
]
|
||||
|
||||
EXCLUDE_DATABASE_FROM_BACKUP = [
|
||||
|
||||
@@ -18,9 +18,9 @@
|
||||
"bleak==1.0.1",
|
||||
"bleak-retry-connector==4.4.3",
|
||||
"bluetooth-adapters==2.1.0",
|
||||
"bluetooth-auto-recovery==1.5.2",
|
||||
"bluetooth-auto-recovery==1.5.3",
|
||||
"bluetooth-data-tools==1.28.2",
|
||||
"dbus-fast==2.44.3",
|
||||
"habluetooth==5.6.2"
|
||||
"habluetooth==5.6.4"
|
||||
]
|
||||
}
|
||||
|
||||
@@ -3,6 +3,10 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
from contextlib import asynccontextmanager
|
||||
import mimetypes
|
||||
from pathlib import Path
|
||||
import tempfile
|
||||
|
||||
from homeassistant.components.media_player import BrowseError, MediaClass
|
||||
from homeassistant.components.media_source import (
|
||||
@@ -17,7 +21,7 @@ from homeassistant.const import ATTR_FRIENDLY_NAME
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
|
||||
from . import Camera, _async_stream_endpoint_url
|
||||
from . import Camera, Image, _async_stream_endpoint_url, async_get_image
|
||||
from .const import DATA_COMPONENT, DOMAIN, StreamType
|
||||
|
||||
|
||||
@@ -84,6 +88,30 @@ class CameraMediaSource(MediaSource):
|
||||
|
||||
return PlayMedia(url, FORMAT_CONTENT_TYPE[HLS_PROVIDER])
|
||||
|
||||
@asynccontextmanager
|
||||
async def async_resolve_with_path(self, item: MediaSourceItem) -> PlayMedia:
|
||||
"""Resolve to playable item with path."""
|
||||
media = await self.async_resolve_media(item)
|
||||
entity_id = item.identifier
|
||||
image = await async_get_image(self.hass, entity_id)
|
||||
media.path = await self.hass.async_add_executor_job(
|
||||
self._save_camera_snapshot, image
|
||||
)
|
||||
|
||||
yield media
|
||||
|
||||
await self.hass.async_add_executor_job(media.path.unlink)
|
||||
|
||||
def _save_camera_snapshot(self, image: Image) -> Path:
|
||||
"""Save camera snapshot to temp file."""
|
||||
with tempfile.NamedTemporaryFile(
|
||||
mode="wb",
|
||||
suffix=mimetypes.guess_extension(image.content_type, False),
|
||||
delete=False,
|
||||
) as temp_file:
|
||||
temp_file.write(image.content)
|
||||
return Path(temp_file.name)
|
||||
|
||||
async def async_browse_media(
|
||||
self,
|
||||
item: MediaSourceItem,
|
||||
|
||||
@@ -14,7 +14,11 @@ from elkm1_lib.util import pretty_const
|
||||
from elkm1_lib.zones import Zone
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components.sensor import SensorEntity
|
||||
from homeassistant.components.sensor import (
|
||||
SensorDeviceClass,
|
||||
SensorEntity,
|
||||
SensorStateClass,
|
||||
)
|
||||
from homeassistant.const import EntityCategory, UnitOfElectricPotential
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
@@ -32,6 +36,16 @@ SERVICE_SENSOR_ZONE_BYPASS = "sensor_zone_bypass"
|
||||
SERVICE_SENSOR_ZONE_TRIGGER = "sensor_zone_trigger"
|
||||
UNDEFINED_TEMPERATURE = -40
|
||||
|
||||
_DEVICE_CLASS_MAP: dict[ZoneType, SensorDeviceClass] = {
|
||||
ZoneType.TEMPERATURE: SensorDeviceClass.TEMPERATURE,
|
||||
ZoneType.ANALOG_ZONE: SensorDeviceClass.VOLTAGE,
|
||||
}
|
||||
|
||||
_STATE_CLASS_MAP: dict[ZoneType, SensorStateClass] = {
|
||||
ZoneType.TEMPERATURE: SensorStateClass.MEASUREMENT,
|
||||
ZoneType.ANALOG_ZONE: SensorStateClass.MEASUREMENT,
|
||||
}
|
||||
|
||||
ELK_SET_COUNTER_SERVICE_SCHEMA: VolDictType = {
|
||||
vol.Required(ATTR_VALUE): vol.All(vol.Coerce(int), vol.Range(0, 65535))
|
||||
}
|
||||
@@ -248,6 +262,16 @@ class ElkZone(ElkSensor):
|
||||
return self._temperature_unit
|
||||
return None
|
||||
|
||||
@property
|
||||
def device_class(self) -> SensorDeviceClass | None:
|
||||
"""Return the device class of the sensor."""
|
||||
return _DEVICE_CLASS_MAP.get(self._element.definition)
|
||||
|
||||
@property
|
||||
def state_class(self) -> SensorStateClass | None:
|
||||
"""Return the state class of the sensor."""
|
||||
return _STATE_CLASS_MAP.get(self._element.definition)
|
||||
|
||||
@property
|
||||
def native_unit_of_measurement(self) -> str | None:
|
||||
"""Return the unit of measurement."""
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
{
|
||||
"domain": "enocean",
|
||||
"name": "EnOcean",
|
||||
"codeowners": ["@bdurrer"],
|
||||
"codeowners": [],
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/enocean",
|
||||
"iot_class": "local_push",
|
||||
|
||||
@@ -127,27 +127,39 @@ class EsphomeAssistSatellite(
|
||||
available_wake_words=[], active_wake_words=[], max_active_wake_words=1
|
||||
)
|
||||
|
||||
@property
|
||||
def pipeline_entity_id(self) -> str | None:
|
||||
"""Return the entity ID of the pipeline to use for the next conversation."""
|
||||
assert self._entry_data.device_info is not None
|
||||
self._active_pipeline_index = 0
|
||||
|
||||
def _get_entity_id(self, suffix: str) -> str | None:
|
||||
"""Return the entity id for pipeline select, etc."""
|
||||
if self._entry_data.device_info is None:
|
||||
return None
|
||||
|
||||
ent_reg = er.async_get(self.hass)
|
||||
return ent_reg.async_get_entity_id(
|
||||
Platform.SELECT,
|
||||
DOMAIN,
|
||||
f"{self._entry_data.device_info.mac_address}-pipeline",
|
||||
f"{self._entry_data.device_info.mac_address}-{suffix}",
|
||||
)
|
||||
|
||||
@property
|
||||
def pipeline_entity_id(self) -> str | None:
|
||||
"""Return the entity ID of the primary pipeline to use for the next conversation."""
|
||||
return self.get_pipeline_entity(self._active_pipeline_index)
|
||||
|
||||
def get_pipeline_entity(self, index: int) -> str | None:
|
||||
"""Return the entity ID of a pipeline by index."""
|
||||
id_suffix = "" if index < 1 else f"_{index + 1}"
|
||||
return self._get_entity_id(f"pipeline{id_suffix}")
|
||||
|
||||
def get_wake_word_entity(self, index: int) -> str | None:
|
||||
"""Return the entity ID of a wake word by index."""
|
||||
id_suffix = "" if index < 1 else f"_{index + 1}"
|
||||
return self._get_entity_id(f"wake_word{id_suffix}")
|
||||
|
||||
@property
|
||||
def vad_sensitivity_entity_id(self) -> str | None:
|
||||
"""Return the entity ID of the VAD sensitivity to use for the next conversation."""
|
||||
assert self._entry_data.device_info is not None
|
||||
ent_reg = er.async_get(self.hass)
|
||||
return ent_reg.async_get_entity_id(
|
||||
Platform.SELECT,
|
||||
DOMAIN,
|
||||
f"{self._entry_data.device_info.mac_address}-vad_sensitivity",
|
||||
)
|
||||
return self._get_entity_id("vad_sensitivity")
|
||||
|
||||
@callback
|
||||
def async_get_configuration(
|
||||
@@ -235,6 +247,7 @@ class EsphomeAssistSatellite(
|
||||
)
|
||||
)
|
||||
|
||||
assert self._attr_supported_features is not None
|
||||
if feature_flags & VoiceAssistantFeature.ANNOUNCE:
|
||||
# Device supports announcements
|
||||
self._attr_supported_features |= (
|
||||
@@ -257,8 +270,8 @@ class EsphomeAssistSatellite(
|
||||
|
||||
# Update wake word select when config is updated
|
||||
self.async_on_remove(
|
||||
self._entry_data.async_register_assist_satellite_set_wake_word_callback(
|
||||
self.async_set_wake_word
|
||||
self._entry_data.async_register_assist_satellite_set_wake_words_callback(
|
||||
self.async_set_wake_words
|
||||
)
|
||||
)
|
||||
|
||||
@@ -482,8 +495,31 @@ class EsphomeAssistSatellite(
|
||||
# ANNOUNCEMENT format from media player
|
||||
self._update_tts_format()
|
||||
|
||||
# Run the pipeline
|
||||
_LOGGER.debug("Running pipeline from %s to %s", start_stage, end_stage)
|
||||
# Run the appropriate pipeline.
|
||||
self._active_pipeline_index = 0
|
||||
|
||||
maybe_pipeline_index = 0
|
||||
while True:
|
||||
if not (ww_entity_id := self.get_wake_word_entity(maybe_pipeline_index)):
|
||||
break
|
||||
|
||||
if not (ww_state := self.hass.states.get(ww_entity_id)):
|
||||
continue
|
||||
|
||||
if ww_state.state == wake_word_phrase:
|
||||
# First match
|
||||
self._active_pipeline_index = maybe_pipeline_index
|
||||
break
|
||||
|
||||
# Try next wake word select
|
||||
maybe_pipeline_index += 1
|
||||
|
||||
_LOGGER.debug(
|
||||
"Running pipeline %s from %s to %s",
|
||||
self._active_pipeline_index + 1,
|
||||
start_stage,
|
||||
end_stage,
|
||||
)
|
||||
self._pipeline_task = self.config_entry.async_create_background_task(
|
||||
self.hass,
|
||||
self.async_accept_pipeline_from_satellite(
|
||||
@@ -514,6 +550,7 @@ class EsphomeAssistSatellite(
|
||||
def handle_pipeline_finished(self) -> None:
|
||||
"""Handle when pipeline has finished running."""
|
||||
self._stop_udp_server()
|
||||
self._active_pipeline_index = 0
|
||||
_LOGGER.debug("Pipeline finished")
|
||||
|
||||
def handle_timer_event(
|
||||
@@ -542,15 +579,15 @@ class EsphomeAssistSatellite(
|
||||
self.tts_response_finished()
|
||||
|
||||
@callback
|
||||
def async_set_wake_word(self, wake_word_id: str) -> None:
|
||||
"""Set active wake word and update config on satellite."""
|
||||
self._satellite_config.active_wake_words = [wake_word_id]
|
||||
def async_set_wake_words(self, wake_word_ids: list[str]) -> None:
|
||||
"""Set active wake words and update config on satellite."""
|
||||
self._satellite_config.active_wake_words = wake_word_ids
|
||||
self.config_entry.async_create_background_task(
|
||||
self.hass,
|
||||
self.async_set_configuration(self._satellite_config),
|
||||
"esphome_voice_assistant_set_config",
|
||||
)
|
||||
_LOGGER.debug("Setting active wake word: %s", wake_word_id)
|
||||
_LOGGER.debug("Setting active wake word(s): %s", wake_word_ids)
|
||||
|
||||
def _update_tts_format(self) -> None:
|
||||
"""Update the TTS format from the first media player."""
|
||||
|
||||
@@ -25,3 +25,5 @@ PROJECT_URLS = {
|
||||
# ESPHome always uses .0 for the changelog URL
|
||||
STABLE_BLE_URL_VERSION = f"{STABLE_BLE_VERSION.major}.{STABLE_BLE_VERSION.minor}.0"
|
||||
DEFAULT_URL = f"https://esphome.io/changelog/{STABLE_BLE_URL_VERSION}.html"
|
||||
|
||||
NO_WAKE_WORD: Final[str] = "no_wake_word"
|
||||
|
||||
@@ -177,9 +177,10 @@ class RuntimeEntryData:
|
||||
assist_satellite_config_update_callbacks: list[
|
||||
Callable[[AssistSatelliteConfiguration], None]
|
||||
] = field(default_factory=list)
|
||||
assist_satellite_set_wake_word_callbacks: list[Callable[[str], None]] = field(
|
||||
default_factory=list
|
||||
assist_satellite_set_wake_words_callbacks: list[Callable[[list[str]], None]] = (
|
||||
field(default_factory=list)
|
||||
)
|
||||
assist_satellite_wake_words: dict[int, str] = field(default_factory=dict)
|
||||
device_id_to_name: dict[int, str] = field(default_factory=dict)
|
||||
entity_removal_callbacks: dict[EntityInfoKey, list[CALLBACK_TYPE]] = field(
|
||||
default_factory=dict
|
||||
@@ -501,19 +502,28 @@ class RuntimeEntryData:
|
||||
callback_(config)
|
||||
|
||||
@callback
|
||||
def async_register_assist_satellite_set_wake_word_callback(
|
||||
def async_register_assist_satellite_set_wake_words_callback(
|
||||
self,
|
||||
callback_: Callable[[str], None],
|
||||
callback_: Callable[[list[str]], None],
|
||||
) -> CALLBACK_TYPE:
|
||||
"""Register to receive callbacks when the Assist satellite's wake word is set."""
|
||||
self.assist_satellite_set_wake_word_callbacks.append(callback_)
|
||||
return partial(self.assist_satellite_set_wake_word_callbacks.remove, callback_)
|
||||
self.assist_satellite_set_wake_words_callbacks.append(callback_)
|
||||
return partial(self.assist_satellite_set_wake_words_callbacks.remove, callback_)
|
||||
|
||||
@callback
|
||||
def async_assist_satellite_set_wake_word(self, wake_word_id: str) -> None:
|
||||
"""Notify listeners that the Assist satellite wake word has been set."""
|
||||
for callback_ in self.assist_satellite_set_wake_word_callbacks.copy():
|
||||
callback_(wake_word_id)
|
||||
def async_assist_satellite_set_wake_word(
|
||||
self, wake_word_index: int, wake_word_id: str | None
|
||||
) -> None:
|
||||
"""Notify listeners that the Assist satellite wake words have been set."""
|
||||
if wake_word_id:
|
||||
self.assist_satellite_wake_words[wake_word_index] = wake_word_id
|
||||
else:
|
||||
self.assist_satellite_wake_words.pop(wake_word_index, None)
|
||||
|
||||
wake_word_ids = list(self.assist_satellite_wake_words.values())
|
||||
|
||||
for callback_ in self.assist_satellite_set_wake_words_callbacks.copy():
|
||||
callback_(wake_word_ids)
|
||||
|
||||
@callback
|
||||
def async_register_entity_removal_callback(
|
||||
|
||||
@@ -9,11 +9,17 @@
|
||||
"pipeline": {
|
||||
"default": "mdi:filter-outline"
|
||||
},
|
||||
"pipeline_2": {
|
||||
"default": "mdi:filter-outline"
|
||||
},
|
||||
"vad_sensitivity": {
|
||||
"default": "mdi:volume-high"
|
||||
},
|
||||
"wake_word": {
|
||||
"default": "mdi:microphone"
|
||||
},
|
||||
"wake_word_2": {
|
||||
"default": "mdi:microphone"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2,6 +2,8 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from dataclasses import replace
|
||||
|
||||
from aioesphomeapi import EntityInfo, SelectInfo, SelectState
|
||||
|
||||
from homeassistant.components.assist_pipeline.select import (
|
||||
@@ -15,7 +17,7 @@ from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers import restore_state
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from .const import DOMAIN
|
||||
from .const import DOMAIN, NO_WAKE_WORD
|
||||
from .entity import (
|
||||
EsphomeAssistEntity,
|
||||
EsphomeEntity,
|
||||
@@ -50,9 +52,11 @@ async def async_setup_entry(
|
||||
):
|
||||
async_add_entities(
|
||||
[
|
||||
EsphomeAssistPipelineSelect(hass, entry_data),
|
||||
EsphomeAssistPipelineSelect(hass, entry_data, index=0),
|
||||
EsphomeAssistPipelineSelect(hass, entry_data, index=1),
|
||||
EsphomeVadSensitivitySelect(hass, entry_data),
|
||||
EsphomeAssistSatelliteWakeWordSelect(entry_data),
|
||||
EsphomeAssistSatelliteWakeWordSelect(entry_data, index=0),
|
||||
EsphomeAssistSatelliteWakeWordSelect(entry_data, index=1),
|
||||
]
|
||||
)
|
||||
|
||||
@@ -84,10 +88,14 @@ class EsphomeSelect(EsphomeEntity[SelectInfo, SelectState], SelectEntity):
|
||||
class EsphomeAssistPipelineSelect(EsphomeAssistEntity, AssistPipelineSelect):
|
||||
"""Pipeline selector for esphome devices."""
|
||||
|
||||
def __init__(self, hass: HomeAssistant, entry_data: RuntimeEntryData) -> None:
|
||||
def __init__(
|
||||
self, hass: HomeAssistant, entry_data: RuntimeEntryData, index: int = 0
|
||||
) -> None:
|
||||
"""Initialize a pipeline selector."""
|
||||
EsphomeAssistEntity.__init__(self, entry_data)
|
||||
AssistPipelineSelect.__init__(self, hass, DOMAIN, self._device_info.mac_address)
|
||||
AssistPipelineSelect.__init__(
|
||||
self, hass, DOMAIN, self._device_info.mac_address, index=index
|
||||
)
|
||||
|
||||
|
||||
class EsphomeVadSensitivitySelect(EsphomeAssistEntity, VadSensitivitySelect):
|
||||
@@ -109,28 +117,47 @@ class EsphomeAssistSatelliteWakeWordSelect(
|
||||
translation_key="wake_word",
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
)
|
||||
_attr_current_option: str | None = None
|
||||
_attr_options: list[str] = []
|
||||
|
||||
def __init__(self, entry_data: RuntimeEntryData) -> None:
|
||||
_attr_current_option: str | None = None
|
||||
_attr_options: list[str] = [NO_WAKE_WORD]
|
||||
|
||||
def __init__(self, entry_data: RuntimeEntryData, index: int = 0) -> None:
|
||||
"""Initialize a wake word selector."""
|
||||
if index < 1:
|
||||
# Keep compatibility
|
||||
key_suffix = ""
|
||||
placeholder = ""
|
||||
else:
|
||||
key_suffix = f"_{index + 1}"
|
||||
placeholder = f" {index + 1}"
|
||||
|
||||
self.entity_description = replace(
|
||||
self.entity_description,
|
||||
key=f"wake_word{key_suffix}",
|
||||
translation_placeholders={"index": placeholder},
|
||||
)
|
||||
|
||||
EsphomeAssistEntity.__init__(self, entry_data)
|
||||
|
||||
unique_id_prefix = self._device_info.mac_address
|
||||
self._attr_unique_id = f"{unique_id_prefix}-wake_word"
|
||||
self._attr_unique_id = f"{unique_id_prefix}-{self.entity_description.key}"
|
||||
|
||||
# name -> id
|
||||
self._wake_words: dict[str, str] = {}
|
||||
self._wake_word_index = index
|
||||
|
||||
@property
|
||||
def available(self) -> bool:
|
||||
"""Return if entity is available."""
|
||||
return bool(self._attr_options)
|
||||
return len(self._attr_options) > 1 # more than just NO_WAKE_WORD
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Run when entity about to be added to hass."""
|
||||
await super().async_added_to_hass()
|
||||
|
||||
if last_state := await self.async_get_last_state():
|
||||
self._attr_current_option = last_state.state
|
||||
|
||||
# Update options when config is updated
|
||||
self.async_on_remove(
|
||||
self._entry_data.async_register_assist_satellite_config_updated_callback(
|
||||
@@ -140,33 +167,49 @@ class EsphomeAssistSatelliteWakeWordSelect(
|
||||
|
||||
async def async_select_option(self, option: str) -> None:
|
||||
"""Select an option."""
|
||||
if wake_word_id := self._wake_words.get(option):
|
||||
# _attr_current_option will be updated on
|
||||
# async_satellite_config_updated after the device sets the wake
|
||||
# word.
|
||||
self._entry_data.async_assist_satellite_set_wake_word(wake_word_id)
|
||||
self._attr_current_option = option
|
||||
self.async_write_ha_state()
|
||||
|
||||
wake_word_id = self._wake_words.get(option)
|
||||
self._entry_data.async_assist_satellite_set_wake_word(
|
||||
self._wake_word_index, wake_word_id
|
||||
)
|
||||
|
||||
def async_satellite_config_updated(
|
||||
self, config: AssistSatelliteConfiguration
|
||||
) -> None:
|
||||
"""Update options with available wake words."""
|
||||
if (not config.available_wake_words) or (config.max_active_wake_words < 1):
|
||||
self._attr_current_option = None
|
||||
# No wake words
|
||||
self._wake_words.clear()
|
||||
self._attr_current_option = NO_WAKE_WORD
|
||||
self._attr_options = [NO_WAKE_WORD]
|
||||
self._entry_data.assist_satellite_wake_words.pop(
|
||||
self._wake_word_index, None
|
||||
)
|
||||
self.async_write_ha_state()
|
||||
return
|
||||
|
||||
self._wake_words = {w.wake_word: w.id for w in config.available_wake_words}
|
||||
self._attr_options = sorted(self._wake_words)
|
||||
self._attr_options = [NO_WAKE_WORD, *sorted(self._wake_words)]
|
||||
|
||||
if config.active_wake_words:
|
||||
# Select first active wake word
|
||||
wake_word_id = config.active_wake_words[0]
|
||||
for wake_word in config.available_wake_words:
|
||||
if wake_word.id == wake_word_id:
|
||||
self._attr_current_option = wake_word.wake_word
|
||||
else:
|
||||
# Select first available wake word
|
||||
self._attr_current_option = config.available_wake_words[0].wake_word
|
||||
option = self._attr_current_option
|
||||
if (
|
||||
(option is None)
|
||||
or ((wake_word_id := self._wake_words.get(option)) is None)
|
||||
or (wake_word_id not in config.active_wake_words)
|
||||
):
|
||||
option = NO_WAKE_WORD
|
||||
|
||||
self._attr_current_option = option
|
||||
self.async_write_ha_state()
|
||||
|
||||
# Keep entry data in sync
|
||||
if wake_word_id := self._wake_words.get(option):
|
||||
self._entry_data.assist_satellite_wake_words[self._wake_word_index] = (
|
||||
wake_word_id
|
||||
)
|
||||
else:
|
||||
self._entry_data.assist_satellite_wake_words.pop(
|
||||
self._wake_word_index, None
|
||||
)
|
||||
|
||||
@@ -119,8 +119,9 @@
|
||||
}
|
||||
},
|
||||
"wake_word": {
|
||||
"name": "Wake word",
|
||||
"name": "Wake word{index}",
|
||||
"state": {
|
||||
"no_wake_word": "No wake word",
|
||||
"okay_nabu": "Okay Nabu"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -37,6 +37,7 @@ SENSOR_TYPES: tuple[FlexitSensorEntityDescription, ...] = (
|
||||
FlexitSensorEntityDescription(
|
||||
key="outside_air_temperature",
|
||||
device_class=SensorDeviceClass.TEMPERATURE,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
||||
translation_key="outside_air_temperature",
|
||||
value_fn=lambda data: data.outside_air_temperature,
|
||||
@@ -44,6 +45,7 @@ SENSOR_TYPES: tuple[FlexitSensorEntityDescription, ...] = (
|
||||
FlexitSensorEntityDescription(
|
||||
key="supply_air_temperature",
|
||||
device_class=SensorDeviceClass.TEMPERATURE,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
||||
translation_key="supply_air_temperature",
|
||||
value_fn=lambda data: data.supply_air_temperature,
|
||||
@@ -51,6 +53,7 @@ SENSOR_TYPES: tuple[FlexitSensorEntityDescription, ...] = (
|
||||
FlexitSensorEntityDescription(
|
||||
key="exhaust_air_temperature",
|
||||
device_class=SensorDeviceClass.TEMPERATURE,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
||||
translation_key="exhaust_air_temperature",
|
||||
value_fn=lambda data: data.exhaust_air_temperature,
|
||||
@@ -58,6 +61,7 @@ SENSOR_TYPES: tuple[FlexitSensorEntityDescription, ...] = (
|
||||
FlexitSensorEntityDescription(
|
||||
key="extract_air_temperature",
|
||||
device_class=SensorDeviceClass.TEMPERATURE,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
||||
translation_key="extract_air_temperature",
|
||||
value_fn=lambda data: data.extract_air_temperature,
|
||||
@@ -65,6 +69,7 @@ SENSOR_TYPES: tuple[FlexitSensorEntityDescription, ...] = (
|
||||
FlexitSensorEntityDescription(
|
||||
key="room_temperature",
|
||||
device_class=SensorDeviceClass.TEMPERATURE,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
||||
translation_key="room_temperature",
|
||||
value_fn=lambda data: data.room_temperature,
|
||||
|
||||
@@ -20,5 +20,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/frontend",
|
||||
"integration_type": "system",
|
||||
"quality_scale": "internal",
|
||||
"requirements": ["home-assistant-frontend==20250903.3"]
|
||||
"requirements": ["home-assistant-frontend==20250903.5"]
|
||||
}
|
||||
|
||||
@@ -57,7 +57,10 @@ from .utils import IidTuple, unique_id_to_iids
|
||||
|
||||
RETRY_INTERVAL = 60 # seconds
|
||||
MAX_POLL_FAILURES_TO_DECLARE_UNAVAILABLE = 3
|
||||
|
||||
# HomeKit accessories have varying limits on how many characteristics
|
||||
# they can handle per request. Since we don't know each device's specific limit,
|
||||
# we batch requests to a conservative size to avoid overwhelming any device.
|
||||
MAX_CHARACTERISTICS_PER_REQUEST = 49
|
||||
|
||||
BLE_AVAILABILITY_CHECK_INTERVAL = 1800 # seconds
|
||||
|
||||
@@ -326,16 +329,20 @@ class HKDevice:
|
||||
)
|
||||
entry.async_on_unload(self._async_cancel_subscription_timer)
|
||||
|
||||
if transport != Transport.BLE:
|
||||
# Although async_populate_accessories_state fetched the accessory database,
|
||||
# the /accessories endpoint may return cached values from the accessory's
|
||||
# perspective. For example, Ecobee thermostats may report stale temperature
|
||||
# values (like 100°C) in their /accessories response after restarting.
|
||||
# We need to explicitly poll characteristics to get fresh sensor readings
|
||||
# before processing the entity map and creating devices.
|
||||
# Use poll_all=True since entities haven't registered their characteristics yet.
|
||||
await self.async_update(poll_all=True)
|
||||
|
||||
await self.async_process_entity_map()
|
||||
|
||||
if transport != Transport.BLE:
|
||||
# When Home Assistant starts, we restore the accessory map from storage
|
||||
# which contains characteristic values from when HA was last running.
|
||||
# These values are stale and may be incorrect (e.g., Ecobee thermostats
|
||||
# report 100°C when restarting). We need to poll for fresh values before
|
||||
# creating entities. Use poll_all=True since entities haven't registered
|
||||
# their characteristics yet.
|
||||
await self.async_update(poll_all=True)
|
||||
# Start regular polling after entity map is processed
|
||||
self._async_start_polling()
|
||||
|
||||
# If everything is up to date, we can create the entities
|
||||
@@ -938,20 +945,26 @@ class HKDevice:
|
||||
async with self._polling_lock:
|
||||
_LOGGER.debug("Starting HomeKit device update: %s", self.unique_id)
|
||||
|
||||
try:
|
||||
new_values_dict = await self.get_characteristics(to_poll)
|
||||
except AccessoryNotFoundError:
|
||||
# Not only did the connection fail, but also the accessory is not
|
||||
# visible on the network.
|
||||
self.async_set_available_state(False)
|
||||
return
|
||||
except (AccessoryDisconnectedError, EncryptionError):
|
||||
# Temporary connection failure. Device may still available but our
|
||||
# connection was dropped or we are reconnecting
|
||||
self._poll_failures += 1
|
||||
if self._poll_failures >= MAX_POLL_FAILURES_TO_DECLARE_UNAVAILABLE:
|
||||
new_values_dict: dict[tuple[int, int], dict[str, Any]] = {}
|
||||
to_poll_list = list(to_poll)
|
||||
|
||||
for i in range(0, len(to_poll_list), MAX_CHARACTERISTICS_PER_REQUEST):
|
||||
batch = to_poll_list[i : i + MAX_CHARACTERISTICS_PER_REQUEST]
|
||||
try:
|
||||
batch_values = await self.get_characteristics(batch)
|
||||
new_values_dict.update(batch_values)
|
||||
except AccessoryNotFoundError:
|
||||
# Not only did the connection fail, but also the accessory is not
|
||||
# visible on the network.
|
||||
self.async_set_available_state(False)
|
||||
return
|
||||
return
|
||||
except (AccessoryDisconnectedError, EncryptionError):
|
||||
# Temporary connection failure. Device may still available but our
|
||||
# connection was dropped or we are reconnecting
|
||||
self._poll_failures += 1
|
||||
if self._poll_failures >= MAX_POLL_FAILURES_TO_DECLARE_UNAVAILABLE:
|
||||
self.async_set_available_state(False)
|
||||
return
|
||||
|
||||
self._poll_failures = 0
|
||||
self.process_new_events(new_values_dict)
|
||||
|
||||
@@ -14,6 +14,6 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/homekit_controller",
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["aiohomekit", "commentjson"],
|
||||
"requirements": ["aiohomekit==3.2.15"],
|
||||
"requirements": ["aiohomekit==3.2.16"],
|
||||
"zeroconf": ["_hap._tcp.local.", "_hap._udp.local."]
|
||||
}
|
||||
|
||||
@@ -7,5 +7,5 @@
|
||||
"integration_type": "hub",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["pyiskra"],
|
||||
"requirements": ["pyiskra==0.1.26"]
|
||||
"requirements": ["pyiskra==0.1.27"]
|
||||
}
|
||||
|
||||
@@ -31,6 +31,21 @@
|
||||
"cycle_delay": {
|
||||
"default": "mdi:timer-outline"
|
||||
},
|
||||
"globe_brightness": {
|
||||
"default": "mdi:lightbulb-question",
|
||||
"state": {
|
||||
"low": "mdi:lightbulb-on-30",
|
||||
"medium": "mdi:lightbulb-on-50",
|
||||
"high": "mdi:lightbulb-on"
|
||||
}
|
||||
},
|
||||
"globe_light": {
|
||||
"state": {
|
||||
"off": "mdi:lightbulb-off",
|
||||
"on": "mdi:lightbulb-on",
|
||||
"auto": "mdi:lightbulb-auto"
|
||||
}
|
||||
},
|
||||
"meal_insert_size": {
|
||||
"default": "mdi:scale"
|
||||
}
|
||||
|
||||
@@ -7,7 +7,7 @@ from dataclasses import dataclass
|
||||
from typing import Any, Generic, TypeVar
|
||||
|
||||
from pylitterbot import FeederRobot, LitterRobot, LitterRobot4, Robot
|
||||
from pylitterbot.robot.litterrobot4 import BrightnessLevel
|
||||
from pylitterbot.robot.litterrobot4 import BrightnessLevel, NightLightMode
|
||||
|
||||
from homeassistant.components.select import SelectEntity, SelectEntityDescription
|
||||
from homeassistant.const import EntityCategory, UnitOfTime
|
||||
@@ -32,35 +32,73 @@ class RobotSelectEntityDescription(
|
||||
select_fn: Callable[[_WhiskerEntityT, str], Coroutine[Any, Any, bool]]
|
||||
|
||||
|
||||
ROBOT_SELECT_MAP: dict[type[Robot], RobotSelectEntityDescription] = {
|
||||
LitterRobot: RobotSelectEntityDescription[LitterRobot, int]( # type: ignore[type-abstract] # only used for isinstance check
|
||||
key="cycle_delay",
|
||||
translation_key="cycle_delay",
|
||||
unit_of_measurement=UnitOfTime.MINUTES,
|
||||
current_fn=lambda robot: robot.clean_cycle_wait_time_minutes,
|
||||
options_fn=lambda robot: robot.VALID_WAIT_TIMES,
|
||||
select_fn=lambda robot, opt: robot.set_wait_time(int(opt)),
|
||||
),
|
||||
LitterRobot4: RobotSelectEntityDescription[LitterRobot4, str](
|
||||
key="panel_brightness",
|
||||
translation_key="brightness_level",
|
||||
current_fn=(
|
||||
lambda robot: bri.name.lower()
|
||||
if (bri := robot.panel_brightness) is not None
|
||||
else None
|
||||
),
|
||||
options_fn=lambda _: [level.name.lower() for level in BrightnessLevel],
|
||||
select_fn=(
|
||||
lambda robot, opt: robot.set_panel_brightness(BrightnessLevel[opt.upper()])
|
||||
ROBOT_SELECT_MAP: dict[type[Robot], tuple[RobotSelectEntityDescription, ...]] = {
|
||||
LitterRobot: (
|
||||
RobotSelectEntityDescription[LitterRobot, int]( # type: ignore[type-abstract] # only used for isinstance check
|
||||
key="cycle_delay",
|
||||
translation_key="cycle_delay",
|
||||
unit_of_measurement=UnitOfTime.MINUTES,
|
||||
current_fn=lambda robot: robot.clean_cycle_wait_time_minutes,
|
||||
options_fn=lambda robot: robot.VALID_WAIT_TIMES,
|
||||
select_fn=lambda robot, opt: robot.set_wait_time(int(opt)),
|
||||
),
|
||||
),
|
||||
FeederRobot: RobotSelectEntityDescription[FeederRobot, float](
|
||||
key="meal_insert_size",
|
||||
translation_key="meal_insert_size",
|
||||
unit_of_measurement="cups",
|
||||
current_fn=lambda robot: robot.meal_insert_size,
|
||||
options_fn=lambda robot: robot.VALID_MEAL_INSERT_SIZES,
|
||||
select_fn=lambda robot, opt: robot.set_meal_insert_size(float(opt)),
|
||||
LitterRobot4: (
|
||||
RobotSelectEntityDescription[LitterRobot4, str](
|
||||
key="globe_brightness",
|
||||
translation_key="globe_brightness",
|
||||
current_fn=(
|
||||
lambda robot: bri.name.lower()
|
||||
if (bri := robot.night_light_level) is not None
|
||||
else None
|
||||
),
|
||||
options_fn=lambda _: [level.name.lower() for level in BrightnessLevel],
|
||||
select_fn=(
|
||||
lambda robot, opt: robot.set_night_light_brightness(
|
||||
BrightnessLevel[opt.upper()]
|
||||
)
|
||||
),
|
||||
),
|
||||
RobotSelectEntityDescription[LitterRobot4, str](
|
||||
key="globe_light",
|
||||
translation_key="globe_light",
|
||||
current_fn=(
|
||||
lambda robot: mode.name.lower()
|
||||
if (mode := robot.night_light_mode) is not None
|
||||
else None
|
||||
),
|
||||
options_fn=lambda _: [mode.name.lower() for mode in NightLightMode],
|
||||
select_fn=(
|
||||
lambda robot, opt: robot.set_night_light_mode(
|
||||
NightLightMode[opt.upper()]
|
||||
)
|
||||
),
|
||||
),
|
||||
RobotSelectEntityDescription[LitterRobot4, str](
|
||||
key="panel_brightness",
|
||||
translation_key="brightness_level",
|
||||
current_fn=(
|
||||
lambda robot: bri.name.lower()
|
||||
if (bri := robot.panel_brightness) is not None
|
||||
else None
|
||||
),
|
||||
options_fn=lambda _: [level.name.lower() for level in BrightnessLevel],
|
||||
select_fn=(
|
||||
lambda robot, opt: robot.set_panel_brightness(
|
||||
BrightnessLevel[opt.upper()]
|
||||
)
|
||||
),
|
||||
),
|
||||
),
|
||||
FeederRobot: (
|
||||
RobotSelectEntityDescription[FeederRobot, float](
|
||||
key="meal_insert_size",
|
||||
translation_key="meal_insert_size",
|
||||
unit_of_measurement="cups",
|
||||
current_fn=lambda robot: robot.meal_insert_size,
|
||||
options_fn=lambda robot: robot.VALID_MEAL_INSERT_SIZES,
|
||||
select_fn=lambda robot, opt: robot.set_meal_insert_size(float(opt)),
|
||||
),
|
||||
),
|
||||
}
|
||||
|
||||
@@ -77,8 +115,9 @@ async def async_setup_entry(
|
||||
robot=robot, coordinator=coordinator, description=description
|
||||
)
|
||||
for robot in coordinator.account.robots
|
||||
for robot_type, description in ROBOT_SELECT_MAP.items()
|
||||
for robot_type, descriptions in ROBOT_SELECT_MAP.items()
|
||||
if isinstance(robot, robot_type)
|
||||
for description in descriptions
|
||||
)
|
||||
|
||||
|
||||
|
||||
@@ -144,6 +144,22 @@
|
||||
"cycle_delay": {
|
||||
"name": "Clean cycle wait time minutes"
|
||||
},
|
||||
"globe_brightness": {
|
||||
"name": "Globe brightness",
|
||||
"state": {
|
||||
"low": "[%key:common::state::low%]",
|
||||
"medium": "[%key:common::state::medium%]",
|
||||
"high": "[%key:common::state::high%]"
|
||||
}
|
||||
},
|
||||
"globe_light": {
|
||||
"name": "Globe light",
|
||||
"state": {
|
||||
"auto": "[%key:common::state::auto%]",
|
||||
"off": "[%key:common::state::off%]",
|
||||
"on": "[%key:common::state::on%]"
|
||||
}
|
||||
},
|
||||
"meal_insert_size": {
|
||||
"name": "Meal insert size"
|
||||
},
|
||||
@@ -157,6 +173,9 @@
|
||||
}
|
||||
},
|
||||
"switch": {
|
||||
"gravity_mode": {
|
||||
"name": "Gravity mode"
|
||||
},
|
||||
"night_light_mode": {
|
||||
"name": "Night light mode"
|
||||
},
|
||||
|
||||
@@ -6,7 +6,7 @@ from collections.abc import Callable, Coroutine
|
||||
from dataclasses import dataclass
|
||||
from typing import Any, Generic
|
||||
|
||||
from pylitterbot import FeederRobot, LitterRobot
|
||||
from pylitterbot import FeederRobot, LitterRobot, Robot
|
||||
|
||||
from homeassistant.components.switch import SwitchEntity, SwitchEntityDescription
|
||||
from homeassistant.const import EntityCategory
|
||||
@@ -26,20 +26,30 @@ class RobotSwitchEntityDescription(SwitchEntityDescription, Generic[_WhiskerEnti
|
||||
value_fn: Callable[[_WhiskerEntityT], bool]
|
||||
|
||||
|
||||
ROBOT_SWITCHES = [
|
||||
RobotSwitchEntityDescription[LitterRobot | FeederRobot](
|
||||
key="night_light_mode_enabled",
|
||||
translation_key="night_light_mode",
|
||||
set_fn=lambda robot, value: robot.set_night_light(value),
|
||||
value_fn=lambda robot: robot.night_light_mode_enabled,
|
||||
SWITCH_MAP: dict[type[Robot], tuple[RobotSwitchEntityDescription, ...]] = {
|
||||
FeederRobot: (
|
||||
RobotSwitchEntityDescription[FeederRobot](
|
||||
key="gravity_mode",
|
||||
translation_key="gravity_mode",
|
||||
set_fn=lambda robot, value: robot.set_gravity_mode(value),
|
||||
value_fn=lambda robot: robot.gravity_mode_enabled,
|
||||
),
|
||||
),
|
||||
RobotSwitchEntityDescription[LitterRobot | FeederRobot](
|
||||
key="panel_lock_enabled",
|
||||
translation_key="panel_lockout",
|
||||
set_fn=lambda robot, value: robot.set_panel_lockout(value),
|
||||
value_fn=lambda robot: robot.panel_lock_enabled,
|
||||
Robot: ( # type: ignore[type-abstract] # only used for isinstance check
|
||||
RobotSwitchEntityDescription[LitterRobot | FeederRobot](
|
||||
key="night_light_mode_enabled",
|
||||
translation_key="night_light_mode",
|
||||
set_fn=lambda robot, value: robot.set_night_light(value),
|
||||
value_fn=lambda robot: robot.night_light_mode_enabled,
|
||||
),
|
||||
RobotSwitchEntityDescription[LitterRobot | FeederRobot](
|
||||
key="panel_lock_enabled",
|
||||
translation_key="panel_lockout",
|
||||
set_fn=lambda robot, value: robot.set_panel_lockout(value),
|
||||
value_fn=lambda robot: robot.panel_lock_enabled,
|
||||
),
|
||||
),
|
||||
]
|
||||
}
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
@@ -51,9 +61,10 @@ async def async_setup_entry(
|
||||
coordinator = entry.runtime_data
|
||||
async_add_entities(
|
||||
RobotSwitchEntity(robot=robot, coordinator=coordinator, description=description)
|
||||
for description in ROBOT_SWITCHES
|
||||
for robot in coordinator.account.robots
|
||||
if isinstance(robot, (LitterRobot, FeederRobot))
|
||||
for robot_type, entity_descriptions in SWITCH_MAP.items()
|
||||
if isinstance(robot, robot_type)
|
||||
for description in entity_descriptions
|
||||
)
|
||||
|
||||
|
||||
|
||||
@@ -5,8 +5,9 @@ from __future__ import annotations
|
||||
from collections.abc import Callable, Mapping
|
||||
from typing import Any
|
||||
|
||||
from homeassistant.components.sensor import ATTR_STATE_CLASS
|
||||
from homeassistant.components.sensor import ATTR_STATE_CLASS, NON_NUMERIC_DEVICE_CLASSES
|
||||
from homeassistant.const import (
|
||||
ATTR_DEVICE_CLASS,
|
||||
ATTR_DEVICE_ID,
|
||||
ATTR_DOMAIN,
|
||||
ATTR_ENTITY_ID,
|
||||
@@ -28,7 +29,13 @@ from homeassistant.helpers import device_registry as dr, entity_registry as er
|
||||
from homeassistant.helpers.event import async_track_state_change_event
|
||||
from homeassistant.util.event_type import EventType
|
||||
|
||||
from .const import ALWAYS_CONTINUOUS_DOMAINS, AUTOMATION_EVENTS, BUILT_IN_EVENTS, DOMAIN
|
||||
from .const import (
|
||||
ALWAYS_CONTINUOUS_DOMAINS,
|
||||
AUTOMATION_EVENTS,
|
||||
BUILT_IN_EVENTS,
|
||||
DOMAIN,
|
||||
SENSOR_DOMAIN,
|
||||
)
|
||||
from .models import LogbookConfig
|
||||
|
||||
|
||||
@@ -38,8 +45,10 @@ def async_filter_entities(hass: HomeAssistant, entity_ids: list[str]) -> list[st
|
||||
return [
|
||||
entity_id
|
||||
for entity_id in entity_ids
|
||||
if split_entity_id(entity_id)[0] not in ALWAYS_CONTINUOUS_DOMAINS
|
||||
and not is_sensor_continuous(hass, ent_reg, entity_id)
|
||||
if (domain := split_entity_id(entity_id)[0]) not in ALWAYS_CONTINUOUS_DOMAINS
|
||||
and not (
|
||||
domain == SENSOR_DOMAIN and is_sensor_continuous(hass, ent_reg, entity_id)
|
||||
)
|
||||
]
|
||||
|
||||
|
||||
@@ -214,6 +223,10 @@ def async_subscribe_events(
|
||||
)
|
||||
|
||||
|
||||
def _device_class_is_numeric(device_class: str | None) -> bool:
|
||||
return device_class is not None and device_class not in NON_NUMERIC_DEVICE_CLASSES
|
||||
|
||||
|
||||
def is_sensor_continuous(
|
||||
hass: HomeAssistant, ent_reg: er.EntityRegistry, entity_id: str
|
||||
) -> bool:
|
||||
@@ -233,7 +246,11 @@ def is_sensor_continuous(
|
||||
# has a unit_of_measurement or state_class, and filter if
|
||||
# it does
|
||||
if (state := hass.states.get(entity_id)) and (attributes := state.attributes):
|
||||
return ATTR_UNIT_OF_MEASUREMENT in attributes or ATTR_STATE_CLASS in attributes
|
||||
return (
|
||||
ATTR_UNIT_OF_MEASUREMENT in attributes
|
||||
or ATTR_STATE_CLASS in attributes
|
||||
or _device_class_is_numeric(attributes.get(ATTR_DEVICE_CLASS))
|
||||
)
|
||||
# If its not in the state machine, we need to check
|
||||
# the entity registry to see if its a sensor
|
||||
# filter with a state class. We do not check
|
||||
@@ -243,8 +260,10 @@ def is_sensor_continuous(
|
||||
# the state machine will always have the state.
|
||||
return bool(
|
||||
(entry := ent_reg.async_get(entity_id))
|
||||
and entry.capabilities
|
||||
and entry.capabilities.get(ATTR_STATE_CLASS)
|
||||
and (
|
||||
(entry.capabilities and entry.capabilities.get(ATTR_STATE_CLASS))
|
||||
or _device_class_is_numeric(entry.device_class)
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
@@ -258,6 +277,12 @@ def _is_state_filtered(new_state: State, old_state: State) -> bool:
|
||||
new_state.state == old_state.state
|
||||
or new_state.last_changed != new_state.last_updated
|
||||
or new_state.domain in ALWAYS_CONTINUOUS_DOMAINS
|
||||
or ATTR_UNIT_OF_MEASUREMENT in new_state.attributes
|
||||
or ATTR_STATE_CLASS in new_state.attributes
|
||||
or (
|
||||
new_state.domain == SENSOR_DOMAIN
|
||||
and (
|
||||
ATTR_UNIT_OF_MEASUREMENT in new_state.attributes
|
||||
or ATTR_STATE_CLASS in new_state.attributes
|
||||
or _device_class_is_numeric(new_state.attributes.get(ATTR_DEVICE_CLASS))
|
||||
)
|
||||
)
|
||||
)
|
||||
|
||||
@@ -3,6 +3,7 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Callable
|
||||
from contextlib import asynccontextmanager
|
||||
from typing import Any, Protocol
|
||||
|
||||
import voluptuous as vol
|
||||
@@ -197,6 +198,30 @@ async def async_resolve_media(
|
||||
return await item.async_resolve()
|
||||
|
||||
|
||||
@asynccontextmanager
|
||||
async def async_resolve_with_path(
|
||||
hass: HomeAssistant, media_content_id: str, target_media_player: str | None
|
||||
) -> PlayMedia:
|
||||
"""Get info to play media."""
|
||||
if DOMAIN not in hass.data:
|
||||
raise Unresolvable("Media Source not loaded")
|
||||
|
||||
try:
|
||||
item = _get_media_item(hass, media_content_id, target_media_player)
|
||||
except ValueError as err:
|
||||
raise Unresolvable(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="resolve_media_failed",
|
||||
translation_placeholders={
|
||||
"media_content_id": str(media_content_id),
|
||||
"error": str(err),
|
||||
},
|
||||
) from err
|
||||
|
||||
async with item.async_resolve_with_path() as media:
|
||||
yield media
|
||||
|
||||
|
||||
@websocket_api.websocket_command(
|
||||
{
|
||||
vol.Required("type"): "media_source/browse_media",
|
||||
|
||||
@@ -133,14 +133,13 @@ class LocalSource(MediaSource):
|
||||
|
||||
def _do_move() -> None:
|
||||
"""Move file to target."""
|
||||
if not target_dir.is_dir():
|
||||
raise PathNotSupportedError("Target is not an existing directory")
|
||||
|
||||
target_path = target_dir / uploaded_file.filename
|
||||
|
||||
try:
|
||||
target_path = target_dir / uploaded_file.filename
|
||||
|
||||
target_path.relative_to(target_dir)
|
||||
raise_if_invalid_path(str(target_path))
|
||||
|
||||
target_dir.mkdir(parents=True, exist_ok=True)
|
||||
except ValueError as err:
|
||||
raise PathNotSupportedError("Invalid path") from err
|
||||
|
||||
|
||||
@@ -2,13 +2,15 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from contextlib import asynccontextmanager
|
||||
from dataclasses import dataclass, field
|
||||
from typing import TYPE_CHECKING, Any
|
||||
|
||||
from homeassistant.components.media_player import BrowseMedia, MediaClass, MediaType
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
|
||||
from .const import MEDIA_SOURCE_DATA, URI_SCHEME, URI_SCHEME_REGEX
|
||||
from .const import DOMAIN, MEDIA_SOURCE_DATA, URI_SCHEME, URI_SCHEME_REGEX
|
||||
from .error import Unresolvable
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from pathlib import Path
|
||||
@@ -103,6 +105,12 @@ class MediaSourceItem:
|
||||
assert self.domain is not None
|
||||
return self.hass.data[MEDIA_SOURCE_DATA][self.domain]
|
||||
|
||||
@asynccontextmanager
|
||||
async def async_resolve_with_path(self) -> PlayMedia:
|
||||
"""Resolve to playable item with path."""
|
||||
async with self.async_media_source().async_resolve_with_path(self) as media:
|
||||
yield media
|
||||
|
||||
@classmethod
|
||||
def from_uri(
|
||||
cls, hass: HomeAssistant, uri: str, target_media_player: str | None
|
||||
@@ -132,6 +140,23 @@ class MediaSource:
|
||||
"""Resolve a media item to a playable item."""
|
||||
raise NotImplementedError
|
||||
|
||||
@asynccontextmanager
|
||||
async def async_resolve_with_path(self, item: MediaSourceItem) -> PlayMedia:
|
||||
"""Resolve to playable item with path."""
|
||||
item = await self.async_resolve_media(item)
|
||||
|
||||
if item.path is None:
|
||||
raise Unresolvable(
|
||||
translation_domain=DOMAIN,
|
||||
# TODO translations
|
||||
translation_key="resolve_media_path_failed",
|
||||
translation_placeholders={
|
||||
"media_content_id": item.media_source_id,
|
||||
},
|
||||
)
|
||||
|
||||
yield item
|
||||
|
||||
async def async_browse_media(self, item: MediaSourceItem) -> BrowseMediaSource:
|
||||
"""Browse media."""
|
||||
raise NotImplementedError
|
||||
|
||||
@@ -16,6 +16,7 @@ from homeassistant.components.sensor import (
|
||||
)
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import (
|
||||
ATTR_ENTITY_ID,
|
||||
ATTR_UNIT_OF_MEASUREMENT,
|
||||
CONF_NAME,
|
||||
CONF_TYPE,
|
||||
@@ -278,13 +279,18 @@ class MinMaxSensor(SensorEntity):
|
||||
@property
|
||||
def extra_state_attributes(self) -> dict[str, Any] | None:
|
||||
"""Return the state attributes of the sensor."""
|
||||
attributes: dict[str, list[str] | str | None] = {
|
||||
ATTR_ENTITY_ID: self._entity_ids
|
||||
}
|
||||
|
||||
if self._sensor_type == "min":
|
||||
return {ATTR_MIN_ENTITY_ID: self.min_entity_id}
|
||||
if self._sensor_type == "max":
|
||||
return {ATTR_MAX_ENTITY_ID: self.max_entity_id}
|
||||
if self._sensor_type == "last":
|
||||
return {ATTR_LAST_ENTITY_ID: self.last_entity_id}
|
||||
return None
|
||||
attributes[ATTR_MIN_ENTITY_ID] = self.min_entity_id
|
||||
elif self._sensor_type == "max":
|
||||
attributes[ATTR_MAX_ENTITY_ID] = self.max_entity_id
|
||||
elif self._sensor_type == "last":
|
||||
attributes[ATTR_LAST_ENTITY_ID] = self.last_entity_id
|
||||
|
||||
return attributes
|
||||
|
||||
@callback
|
||||
def _async_min_max_sensor_state_listener(
|
||||
|
||||
@@ -1,9 +1,9 @@
|
||||
{
|
||||
"domain": "modbus",
|
||||
"name": "Modbus",
|
||||
"codeowners": [],
|
||||
"codeowners": ["@janiversen"],
|
||||
"documentation": "https://www.home-assistant.io/integrations/modbus",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["pymodbus"],
|
||||
"requirements": ["pymodbus==3.11.1"]
|
||||
"requirements": ["pymodbus==3.11.2"]
|
||||
}
|
||||
|
||||
@@ -253,7 +253,6 @@ class ModbusHub:
|
||||
self._client: (
|
||||
AsyncModbusSerialClient | AsyncModbusTcpClient | AsyncModbusUdpClient | None
|
||||
) = None
|
||||
self._lock = asyncio.Lock()
|
||||
self.event_connected = asyncio.Event()
|
||||
self.hass = hass
|
||||
self.name = client_config[CONF_NAME]
|
||||
@@ -362,16 +361,13 @@ class ModbusHub:
|
||||
if not self._connect_task.done():
|
||||
self._connect_task.cancel()
|
||||
|
||||
async with self._lock:
|
||||
if self._client:
|
||||
try:
|
||||
self._client.close()
|
||||
except ModbusException as exception_error:
|
||||
self._log_error(str(exception_error))
|
||||
del self._client
|
||||
self._client = None
|
||||
message = f"modbus {self.name} communication closed"
|
||||
_LOGGER.info(message)
|
||||
if self._client:
|
||||
try:
|
||||
self._client.close()
|
||||
except ModbusException as exception_error:
|
||||
self._log_error(str(exception_error))
|
||||
self._client = None
|
||||
_LOGGER.info(f"modbus {self.name} communication closed")
|
||||
|
||||
async def low_level_pb_call(
|
||||
self, slave: int | None, address: int, value: int | list[int], use_call: str
|
||||
@@ -417,11 +413,9 @@ class ModbusHub:
|
||||
use_call: str,
|
||||
) -> ModbusPDU | None:
|
||||
"""Convert async to sync pymodbus call."""
|
||||
async with self._lock:
|
||||
if not self._client:
|
||||
return None
|
||||
result = await self.low_level_pb_call(unit, address, value, use_call)
|
||||
if self._msg_wait:
|
||||
# small delay until next request/response
|
||||
await asyncio.sleep(self._msg_wait)
|
||||
return result
|
||||
if not self._client:
|
||||
return None
|
||||
result = await self.low_level_pb_call(unit, address, value, use_call)
|
||||
if self._msg_wait:
|
||||
await asyncio.sleep(self._msg_wait)
|
||||
return result
|
||||
|
||||
@@ -36,6 +36,7 @@ class MotionMountPresets(MotionMountEntity, SelectEntity):
|
||||
|
||||
_attr_should_poll = True
|
||||
_attr_translation_key = "motionmount_preset"
|
||||
_name_to_index: dict[str, int]
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
@@ -50,8 +51,12 @@ class MotionMountPresets(MotionMountEntity, SelectEntity):
|
||||
|
||||
def _update_options(self, presets: list[motionmount.Preset]) -> None:
|
||||
"""Convert presets to select options."""
|
||||
options = [f"{preset.index}: {preset.name}" for preset in presets]
|
||||
options.insert(0, WALL_PRESET_NAME)
|
||||
# Ordered list of options (wall first, then presets)
|
||||
options = [WALL_PRESET_NAME] + [preset.name for preset in presets]
|
||||
|
||||
# Build mapping name → index (wall = 0)
|
||||
self._name_to_index = {WALL_PRESET_NAME: 0}
|
||||
self._name_to_index.update({preset.name: preset.index for preset in presets})
|
||||
|
||||
self._attr_options = options
|
||||
|
||||
@@ -123,7 +128,10 @@ class MotionMountPresets(MotionMountEntity, SelectEntity):
|
||||
|
||||
async def async_select_option(self, option: str) -> None:
|
||||
"""Set the new option."""
|
||||
index = int(option[:1])
|
||||
index = self._name_to_index.get(option)
|
||||
if index is None:
|
||||
raise HomeAssistantError(f"Unknown preset selected: {option}")
|
||||
|
||||
try:
|
||||
await self.mm.go_to_preset(index)
|
||||
except (TimeoutError, socket.gaierror) as ex:
|
||||
|
||||
@@ -83,7 +83,7 @@
|
||||
"motionmount_preset": {
|
||||
"name": "Preset",
|
||||
"state": {
|
||||
"0_wall": "0: Wall"
|
||||
"0_wall": "Wall"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -147,6 +147,9 @@
|
||||
"volume": {
|
||||
"default": "mdi:car-coolant-level"
|
||||
},
|
||||
"volume_flow_rate": {
|
||||
"default": "mdi:pipe-valve"
|
||||
},
|
||||
"volume_storage": {
|
||||
"default": "mdi:storage-tank"
|
||||
},
|
||||
|
||||
@@ -100,6 +100,7 @@ OVERKIZ_DEVICE_TO_PLATFORM: dict[UIClass | UIWidget, Platform | None] = {
|
||||
UIWidget.ATLANTIC_PASS_APC_HEATING_AND_COOLING_ZONE: Platform.CLIMATE, # widgetName, uiClass is HeatingSystem (not supported)
|
||||
UIWidget.ATLANTIC_PASS_APC_HEATING_ZONE: Platform.CLIMATE, # widgetName, uiClass is HeatingSystem (not supported)
|
||||
UIWidget.ATLANTIC_PASS_APC_ZONE_CONTROL: Platform.CLIMATE, # widgetName, uiClass is HeatingSystem (not supported)
|
||||
UIWidget.DISCRETE_EXTERIOR_HEATING: Platform.SWITCH, # widgetName, uiClass is ExteriorHeatingSystem (not supported)
|
||||
UIWidget.DOMESTIC_HOT_WATER_PRODUCTION: Platform.WATER_HEATER, # widgetName, uiClass is WaterHeatingSystem (not supported)
|
||||
UIWidget.DOMESTIC_HOT_WATER_TANK: Platform.SWITCH, # widgetName, uiClass is WaterHeatingSystem (not supported)
|
||||
UIWidget.EVO_HOME_CONTROLLER: Platform.CLIMATE, # widgetName, uiClass is EvoHome (not supported)
|
||||
|
||||
@@ -100,6 +100,15 @@ SWITCH_DESCRIPTIONS: list[OverkizSwitchDescription] = [
|
||||
),
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
),
|
||||
OverkizSwitchDescription(
|
||||
key=UIWidget.DISCRETE_EXTERIOR_HEATING,
|
||||
turn_on=OverkizCommand.ON,
|
||||
turn_off=OverkizCommand.OFF,
|
||||
icon="mdi:radiator",
|
||||
is_on=lambda select_state: (
|
||||
select_state(OverkizState.CORE_ON_OFF) == OverkizCommandParam.ON
|
||||
),
|
||||
),
|
||||
]
|
||||
|
||||
SUPPORTED_DEVICES = {
|
||||
|
||||
@@ -66,7 +66,7 @@ class RadioMediaSource(MediaSource):
|
||||
# Register "click" with Radio Browser
|
||||
await radios.station_click(uuid=station.uuid)
|
||||
|
||||
return PlayMedia(station.url, mime_type)
|
||||
return PlayMedia(station.url_resolved, mime_type)
|
||||
|
||||
async def async_browse_media(
|
||||
self,
|
||||
|
||||
@@ -35,6 +35,7 @@ from . import RoborockConfigEntry
|
||||
from .const import (
|
||||
CONF_BASE_URL,
|
||||
CONF_ENTRY_CODE,
|
||||
CONF_SHOW_BACKGROUND,
|
||||
CONF_USER_DATA,
|
||||
DEFAULT_DRAWABLES,
|
||||
DOMAIN,
|
||||
@@ -215,6 +216,7 @@ class RoborockOptionsFlowHandler(OptionsFlowWithReload):
|
||||
) -> ConfigFlowResult:
|
||||
"""Manage the map object drawable options."""
|
||||
if user_input is not None:
|
||||
self.options[CONF_SHOW_BACKGROUND] = user_input.pop(CONF_SHOW_BACKGROUND)
|
||||
self.options.setdefault(DRAWABLES, {}).update(user_input)
|
||||
return self.async_create_entry(title="", data=self.options)
|
||||
data_schema = {}
|
||||
@@ -227,6 +229,12 @@ class RoborockOptionsFlowHandler(OptionsFlowWithReload):
|
||||
),
|
||||
)
|
||||
] = bool
|
||||
data_schema[
|
||||
vol.Required(
|
||||
CONF_SHOW_BACKGROUND,
|
||||
default=self.config_entry.options.get(CONF_SHOW_BACKGROUND, False),
|
||||
)
|
||||
] = bool
|
||||
return self.async_show_form(
|
||||
step_id=DRAWABLES,
|
||||
data_schema=vol.Schema(data_schema),
|
||||
|
||||
@@ -10,6 +10,7 @@ DOMAIN = "roborock"
|
||||
CONF_ENTRY_CODE = "code"
|
||||
CONF_BASE_URL = "base_url"
|
||||
CONF_USER_DATA = "user_data"
|
||||
CONF_SHOW_BACKGROUND = "show_background"
|
||||
|
||||
# Option Flow steps
|
||||
DRAWABLES = "drawables"
|
||||
|
||||
@@ -26,7 +26,7 @@ from roborock.version_1_apis.roborock_local_client_v1 import RoborockLocalClient
|
||||
from roborock.version_1_apis.roborock_mqtt_client_v1 import RoborockMqttClientV1
|
||||
from roborock.version_a01_apis import RoborockClientA01
|
||||
from roborock.web_api import RoborockApiClient
|
||||
from vacuum_map_parser_base.config.color import ColorsPalette
|
||||
from vacuum_map_parser_base.config.color import ColorsPalette, SupportedColor
|
||||
from vacuum_map_parser_base.config.image_config import ImageConfig
|
||||
from vacuum_map_parser_base.config.size import Size, Sizes
|
||||
from vacuum_map_parser_base.map_data import MapData
|
||||
@@ -44,6 +44,7 @@ from homeassistant.util import dt as dt_util, slugify
|
||||
|
||||
from .const import (
|
||||
A01_UPDATE_INTERVAL,
|
||||
CONF_SHOW_BACKGROUND,
|
||||
DEFAULT_DRAWABLES,
|
||||
DOMAIN,
|
||||
DRAWABLES,
|
||||
@@ -146,8 +147,11 @@ class RoborockDataUpdateCoordinator(DataUpdateCoordinator[DeviceProp]):
|
||||
for drawable, default_value in DEFAULT_DRAWABLES.items()
|
||||
if config_entry.options.get(DRAWABLES, {}).get(drawable, default_value)
|
||||
]
|
||||
colors = ColorsPalette()
|
||||
if not config_entry.options.get(CONF_SHOW_BACKGROUND, False):
|
||||
colors = ColorsPalette({SupportedColor.MAP_OUTSIDE: (0, 0, 0, 0)})
|
||||
self.map_parser = RoborockMapDataParser(
|
||||
ColorsPalette(),
|
||||
colors,
|
||||
Sizes(
|
||||
{
|
||||
k: v * MAP_SCALE
|
||||
|
||||
@@ -60,7 +60,8 @@
|
||||
"room_names": "Room names",
|
||||
"vacuum_position": "Vacuum position",
|
||||
"virtual_walls": "Virtual walls",
|
||||
"zones": "Zones"
|
||||
"zones": "Zones",
|
||||
"show_background": "Show background"
|
||||
},
|
||||
"data_description": {
|
||||
"charger": "Show the charger on the map.",
|
||||
@@ -79,7 +80,8 @@
|
||||
"room_names": "Show room names on the map.",
|
||||
"vacuum_position": "Show the vacuum position on the map.",
|
||||
"virtual_walls": "Show virtual walls on the map.",
|
||||
"zones": "Show zones on the map."
|
||||
"zones": "Show zones on the map.",
|
||||
"show_background": "Add a background to the map."
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -361,25 +361,30 @@ class SensorEntity(Entity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_):
|
||||
def _is_valid_suggested_unit(self, suggested_unit_of_measurement: str) -> bool:
|
||||
"""Validate the suggested unit.
|
||||
|
||||
Validate that a unit converter exists for the sensor's device class and that the
|
||||
unit converter supports both the native and the suggested units of measurement.
|
||||
Validate that the native unit of measurement can be converted to the
|
||||
suggested unit of measurement, either because they are the same or
|
||||
because a unit converter supports both.
|
||||
"""
|
||||
# Make sure we can convert the units
|
||||
if self.native_unit_of_measurement != suggested_unit_of_measurement and (
|
||||
(unit_converter := UNIT_CONVERTERS.get(self.device_class)) is None
|
||||
or self.__native_unit_of_measurement_compat
|
||||
not in unit_converter.VALID_UNITS
|
||||
or suggested_unit_of_measurement not in unit_converter.VALID_UNITS
|
||||
):
|
||||
if not self._invalid_suggested_unit_of_measurement_reported:
|
||||
self._invalid_suggested_unit_of_measurement_reported = True
|
||||
raise ValueError(
|
||||
f"Entity {type(self)} suggest an incorrect "
|
||||
f"unit of measurement: {suggested_unit_of_measurement}."
|
||||
)
|
||||
return False
|
||||
# No need to check the unit converter if the units are the same
|
||||
if self.native_unit_of_measurement == suggested_unit_of_measurement:
|
||||
return True
|
||||
|
||||
return True
|
||||
# Make sure there is a unit converter and it supports both units
|
||||
if (
|
||||
(unit_converter := UNIT_CONVERTERS.get(self.device_class))
|
||||
and self.__native_unit_of_measurement_compat in unit_converter.VALID_UNITS
|
||||
and suggested_unit_of_measurement in unit_converter.VALID_UNITS
|
||||
):
|
||||
return True
|
||||
|
||||
# Report invalid suggested unit only once per entity
|
||||
if not self._invalid_suggested_unit_of_measurement_reported:
|
||||
self._invalid_suggested_unit_of_measurement_reported = True
|
||||
raise ValueError(
|
||||
f"Entity {type(self)} suggest an incorrect "
|
||||
f"unit of measurement: {suggested_unit_of_measurement}."
|
||||
)
|
||||
return False
|
||||
|
||||
def _get_initial_suggested_unit(self) -> str | UndefinedType:
|
||||
"""Return the initial unit."""
|
||||
|
||||
@@ -169,6 +169,9 @@
|
||||
"volume": {
|
||||
"default": "mdi:car-coolant-level"
|
||||
},
|
||||
"volume_flow_rate": {
|
||||
"default": "mdi:pipe-valve"
|
||||
},
|
||||
"volume_storage": {
|
||||
"default": "mdi:storage-tank"
|
||||
},
|
||||
|
||||
@@ -11,6 +11,7 @@ from aioshelly.const import BLU_TRV_IDENTIFIER, MODEL_BLU_GATEWAY_G3, RPC_GENERA
|
||||
from aioshelly.exceptions import DeviceConnectionError, InvalidAuthError, RpcCallError
|
||||
|
||||
from homeassistant.components.button import (
|
||||
DOMAIN as BUTTON_PLATFORM,
|
||||
ButtonDeviceClass,
|
||||
ButtonEntity,
|
||||
ButtonEntityDescription,
|
||||
@@ -26,7 +27,14 @@ from homeassistant.util import slugify
|
||||
from .const import DOMAIN, LOGGER, SHELLY_GAS_MODELS
|
||||
from .coordinator import ShellyBlockCoordinator, ShellyConfigEntry, ShellyRpcCoordinator
|
||||
from .entity import get_entity_block_device_info, get_entity_rpc_device_info
|
||||
from .utils import get_blu_trv_device_info, get_device_entry_gen, get_rpc_key_ids
|
||||
from .utils import (
|
||||
async_remove_orphaned_entities,
|
||||
get_blu_trv_device_info,
|
||||
get_device_entry_gen,
|
||||
get_rpc_entity_name,
|
||||
get_rpc_key_ids,
|
||||
get_virtual_component_ids,
|
||||
)
|
||||
|
||||
PARALLEL_UPDATES = 0
|
||||
|
||||
@@ -87,6 +95,13 @@ BLU_TRV_BUTTONS: Final[list[ShellyButtonDescription]] = [
|
||||
),
|
||||
]
|
||||
|
||||
VIRTUAL_BUTTONS: Final[list[ShellyButtonDescription]] = [
|
||||
ShellyButtonDescription[ShellyRpcCoordinator](
|
||||
key="button",
|
||||
press_action="single_push",
|
||||
)
|
||||
]
|
||||
|
||||
|
||||
@callback
|
||||
def async_migrate_unique_ids(
|
||||
@@ -138,7 +153,7 @@ async def async_setup_entry(
|
||||
hass, config_entry.entry_id, partial(async_migrate_unique_ids, coordinator)
|
||||
)
|
||||
|
||||
entities: list[ShellyButton | ShellyBluTrvButton] = []
|
||||
entities: list[ShellyButton | ShellyBluTrvButton | ShellyVirtualButton] = []
|
||||
|
||||
entities.extend(
|
||||
ShellyButton(coordinator, button)
|
||||
@@ -146,10 +161,20 @@ async def async_setup_entry(
|
||||
if button.supported(coordinator)
|
||||
)
|
||||
|
||||
if blutrv_key_ids := get_rpc_key_ids(coordinator.device.status, BLU_TRV_IDENTIFIER):
|
||||
if TYPE_CHECKING:
|
||||
assert isinstance(coordinator, ShellyRpcCoordinator)
|
||||
if not isinstance(coordinator, ShellyRpcCoordinator):
|
||||
async_add_entities(entities)
|
||||
return
|
||||
|
||||
# add virtual buttons
|
||||
if virtual_button_ids := get_rpc_key_ids(coordinator.device.status, "button"):
|
||||
entities.extend(
|
||||
ShellyVirtualButton(coordinator, button, id_)
|
||||
for id_ in virtual_button_ids
|
||||
for button in VIRTUAL_BUTTONS
|
||||
)
|
||||
|
||||
# add BLU TRV buttons
|
||||
if blutrv_key_ids := get_rpc_key_ids(coordinator.device.status, BLU_TRV_IDENTIFIER):
|
||||
entities.extend(
|
||||
ShellyBluTrvButton(coordinator, button, id_)
|
||||
for id_ in blutrv_key_ids
|
||||
@@ -159,6 +184,19 @@ async def async_setup_entry(
|
||||
|
||||
async_add_entities(entities)
|
||||
|
||||
# the user can remove virtual components from the device configuration, so
|
||||
# we need to remove orphaned entities
|
||||
virtual_button_component_ids = get_virtual_component_ids(
|
||||
coordinator.device.config, BUTTON_PLATFORM
|
||||
)
|
||||
async_remove_orphaned_entities(
|
||||
hass,
|
||||
config_entry.entry_id,
|
||||
coordinator.mac,
|
||||
BUTTON_PLATFORM,
|
||||
virtual_button_component_ids,
|
||||
)
|
||||
|
||||
|
||||
class ShellyBaseButton(
|
||||
CoordinatorEntity[ShellyRpcCoordinator | ShellyBlockCoordinator], ButtonEntity
|
||||
@@ -273,3 +311,32 @@ class ShellyBluTrvButton(ShellyBaseButton):
|
||||
assert method is not None
|
||||
|
||||
await method(self._id)
|
||||
|
||||
|
||||
class ShellyVirtualButton(ShellyBaseButton):
|
||||
"""Defines a Shelly virtual component button."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: ShellyRpcCoordinator,
|
||||
description: ShellyButtonDescription,
|
||||
_id: int,
|
||||
) -> None:
|
||||
"""Initialize Shelly virtual component button."""
|
||||
super().__init__(coordinator, description)
|
||||
|
||||
self._attr_unique_id = f"{coordinator.mac}-{description.key}:{_id}"
|
||||
self._attr_device_info = get_entity_rpc_device_info(coordinator)
|
||||
self._attr_name = get_rpc_entity_name(
|
||||
coordinator.device, f"{description.key}:{_id}"
|
||||
)
|
||||
self._id = _id
|
||||
|
||||
async def _press_method(self) -> None:
|
||||
"""Press method."""
|
||||
if TYPE_CHECKING:
|
||||
assert isinstance(self.coordinator, ShellyRpcCoordinator)
|
||||
|
||||
await self.coordinator.device.button_trigger(
|
||||
self._id, self.entity_description.press_action
|
||||
)
|
||||
|
||||
@@ -265,9 +265,10 @@ DEVICES_WITHOUT_FIRMWARE_CHANGELOG = (
|
||||
|
||||
CONF_GEN = "gen"
|
||||
|
||||
VIRTUAL_COMPONENTS = ("boolean", "enum", "input", "number", "text")
|
||||
VIRTUAL_COMPONENTS = ("boolean", "button", "enum", "input", "number", "text")
|
||||
VIRTUAL_COMPONENTS_MAP = {
|
||||
"binary_sensor": {"types": ["boolean"], "modes": ["label"]},
|
||||
"button": {"types": ["button"], "modes": ["button"]},
|
||||
"number": {"types": ["number"], "modes": ["field", "slider"]},
|
||||
"select": {"types": ["enum"], "modes": ["dropdown"]},
|
||||
"sensor": {"types": ["enum", "number", "text"], "modes": ["label"]},
|
||||
|
||||
@@ -631,6 +631,11 @@ class ShellyRpcCoordinator(ShellyCoordinatorBase[RpcDevice]):
|
||||
"""Handle device events."""
|
||||
events: list[dict[str, Any]] = event_data["events"]
|
||||
for event in events:
|
||||
# filter out button events as they are triggered by button entities
|
||||
component = event.get("component")
|
||||
if component is not None and component.startswith("button"):
|
||||
continue
|
||||
|
||||
event_type = event.get("event")
|
||||
if event_type is None:
|
||||
continue
|
||||
|
||||
@@ -682,20 +682,20 @@ def async_remove_orphaned_entities(
|
||||
):
|
||||
return
|
||||
|
||||
device_id = devices[0].id
|
||||
entities = er.async_entries_for_device(entity_reg, device_id, True)
|
||||
for entity in entities:
|
||||
if not entity.entity_id.startswith(platform):
|
||||
continue
|
||||
if key_suffix is not None and key_suffix not in entity.unique_id:
|
||||
continue
|
||||
# we are looking for the component ID, e.g. boolean:201, em1data:1
|
||||
if not (match := COMPONENT_ID_PATTERN.search(entity.unique_id)):
|
||||
continue
|
||||
for device in devices:
|
||||
entities = er.async_entries_for_device(entity_reg, device.id, True)
|
||||
for entity in entities:
|
||||
if not entity.entity_id.startswith(platform):
|
||||
continue
|
||||
if key_suffix is not None and key_suffix not in entity.unique_id:
|
||||
continue
|
||||
# we are looking for the component ID, e.g. boolean:201, em1data:1
|
||||
if not (match := COMPONENT_ID_PATTERN.search(entity.unique_id)):
|
||||
continue
|
||||
|
||||
key = match.group()
|
||||
if key not in keys:
|
||||
orphaned_entities.append(entity.unique_id.split("-", 1)[1])
|
||||
key = match.group()
|
||||
if key not in keys:
|
||||
orphaned_entities.append(entity.unique_id.split("-", 1)[1])
|
||||
|
||||
if orphaned_entities:
|
||||
async_remove_shelly_rpc_entities(hass, platform, mac, orphaned_entities)
|
||||
|
||||
@@ -31,9 +31,6 @@ from homeassistant.helpers.system_info import async_get_system_info
|
||||
|
||||
from .common import async_build_source_set
|
||||
|
||||
UPNP_SERVER_MIN_PORT = 40000
|
||||
UPNP_SERVER_MAX_PORT = 40100
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@@ -95,26 +92,17 @@ async def _async_find_next_available_port(
|
||||
) -> tuple[int, socket.socket]:
|
||||
"""Get a free TCP port."""
|
||||
family = socket.AF_INET if is_ipv4_address(source) else socket.AF_INET6
|
||||
# We use an ExitStack to ensure the socket is closed if we fail to find a port.
|
||||
with ExitStack() as stack:
|
||||
test_socket = stack.enter_context(socket.socket(family, socket.SOCK_STREAM))
|
||||
test_socket = socket.socket(family, socket.SOCK_STREAM)
|
||||
try:
|
||||
test_socket.setblocking(False)
|
||||
test_socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
|
||||
|
||||
for port in range(UPNP_SERVER_MIN_PORT, UPNP_SERVER_MAX_PORT):
|
||||
addr = (source[0], port, *source[2:])
|
||||
try:
|
||||
test_socket.bind(addr)
|
||||
except OSError:
|
||||
if port == UPNP_SERVER_MAX_PORT - 1:
|
||||
raise
|
||||
else:
|
||||
# The socket will be dealt by the caller, so we detach it from the stack
|
||||
# before returning it to prevent it from being closed.
|
||||
stack.pop_all()
|
||||
return port, test_socket
|
||||
|
||||
raise RuntimeError("unreachable")
|
||||
addr = (source[0], 0, *source[2:])
|
||||
test_socket.bind(addr)
|
||||
port = test_socket.getsockname()[1]
|
||||
except BaseException:
|
||||
test_socket.close()
|
||||
raise
|
||||
return port, test_socket
|
||||
|
||||
|
||||
class Server:
|
||||
|
||||
@@ -28,6 +28,7 @@ KNOWN_BRANDS: dict[str | None, str] = {
|
||||
"Apple Inc.": "apple",
|
||||
"Aqara": "aqara_gateway",
|
||||
"eero": "eero",
|
||||
"GL.iNET Inc.": "glinet",
|
||||
"Google Inc.": "google",
|
||||
"HomeAssistant": "homeassistant",
|
||||
"Home Assistant": "homeassistant",
|
||||
|
||||
@@ -7,5 +7,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/tibber",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["tibber"],
|
||||
"requirements": ["pyTibber==0.31.7"]
|
||||
"requirements": ["pyTibber==0.32.0"]
|
||||
}
|
||||
|
||||
@@ -377,7 +377,6 @@ class TibberSensorElPrice(TibberSensor):
|
||||
"app_nickname": None,
|
||||
"grid_company": None,
|
||||
"estimated_annual_consumption": None,
|
||||
"price_level": None,
|
||||
"max_price": None,
|
||||
"avg_price": None,
|
||||
"min_price": None,
|
||||
@@ -405,16 +404,16 @@ class TibberSensorElPrice(TibberSensor):
|
||||
await self._fetch_data()
|
||||
|
||||
elif (
|
||||
self._tibber_home.current_price_total
|
||||
self._tibber_home.price_total
|
||||
and self._last_updated
|
||||
and self._last_updated.hour == now.hour
|
||||
and now - self._last_updated < timedelta(minutes=15)
|
||||
and self._tibber_home.last_data_timestamp
|
||||
):
|
||||
return
|
||||
|
||||
res = self._tibber_home.current_price_data()
|
||||
self._attr_native_value, price_level, self._last_updated, price_rank = res
|
||||
self._attr_extra_state_attributes["price_level"] = price_level
|
||||
self._attr_native_value, self._last_updated, price_rank = res
|
||||
self._attr_extra_state_attributes["intraday_price_ranking"] = price_rank
|
||||
|
||||
attrs = self._tibber_home.current_attributes()
|
||||
|
||||
@@ -50,7 +50,6 @@ async def __get_prices(call: ServiceCall) -> ServiceResponse:
|
||||
{
|
||||
"start_time": starts_at,
|
||||
"price": price,
|
||||
"level": tibber_home.price_level.get(starts_at),
|
||||
}
|
||||
for starts_at, price in tibber_home.price_total.items()
|
||||
]
|
||||
|
||||
@@ -18,7 +18,6 @@ import secrets
|
||||
from time import monotonic
|
||||
from typing import Any, Final, Generic, Protocol, TypeVar
|
||||
|
||||
import aiofiles
|
||||
from aiohttp import web
|
||||
import mutagen
|
||||
from mutagen.id3 import ID3, TextFrame as ID3Text
|
||||
@@ -591,13 +590,9 @@ class ResultStream:
|
||||
|
||||
if not needs_conversion:
|
||||
# Read file directly (no conversion)
|
||||
async with aiofiles.open(self._override_media_path, "rb") as media_file:
|
||||
while True:
|
||||
chunk = await media_file.read(FFMPEG_CHUNK_SIZE)
|
||||
if not chunk:
|
||||
break
|
||||
yield chunk
|
||||
|
||||
yield await self.hass.async_add_executor_job(
|
||||
self._override_media_path.read_bytes
|
||||
)
|
||||
return
|
||||
|
||||
# Use ffmpeg to convert audio to preferred format
|
||||
|
||||
@@ -331,6 +331,7 @@ class DPCode(StrEnum):
|
||||
SMOKE_SENSOR_STATE = "smoke_sensor_state"
|
||||
SMOKE_SENSOR_STATUS = "smoke_sensor_status"
|
||||
SMOKE_SENSOR_VALUE = "smoke_sensor_value"
|
||||
SNOOZE = "snooze"
|
||||
SOS = "sos" # Emergency State
|
||||
SOS_STATE = "sos_state" # Emergency mode
|
||||
SPEED = "speed" # Speed level
|
||||
@@ -371,6 +372,7 @@ class DPCode(StrEnum):
|
||||
SWITCH_MODE7 = "switch_mode7"
|
||||
SWITCH_MODE8 = "switch_mode8"
|
||||
SWITCH_MODE9 = "switch_mode9"
|
||||
SWITCH_MUSIC = "switch_music"
|
||||
SWITCH_NIGHT_LIGHT = "switch_night_light"
|
||||
SWITCH_SAVE_ENERGY = "switch_save_energy"
|
||||
SWITCH_SOUND = "switch_sound" # Voice switch
|
||||
@@ -384,6 +386,7 @@ class DPCode(StrEnum):
|
||||
SWITCH_VERTICAL = "switch_vertical" # Vertical swing flap switch
|
||||
SWITCH_VOICE = "switch_voice" # Voice switch
|
||||
TARGET_DIS_CLOSEST = "target_dis_closest" # Closest target distance
|
||||
TDS_IN = "tds_in" # Total dissolved solids
|
||||
TEMP = "temp" # Temperature setting
|
||||
TEMP_BOILING_C = "temp_boiling_c"
|
||||
TEMP_BOILING_F = "temp_boiling_f"
|
||||
@@ -424,6 +427,7 @@ class DPCode(StrEnum):
|
||||
TOTAL_POWER = "total_power"
|
||||
TOTAL_TIME = "total_time"
|
||||
TVOC = "tvoc"
|
||||
UP_DOWN = "up_down"
|
||||
UPPER_TEMP = "upper_temp"
|
||||
UPPER_TEMP_F = "upper_temp_f"
|
||||
UV = "uv" # UV sterilization
|
||||
|
||||
@@ -126,7 +126,7 @@ class TuyaEntity(Entity):
|
||||
return None
|
||||
|
||||
def get_dptype(
|
||||
self, dpcode: DPCode | None, prefer_function: bool = False
|
||||
self, dpcode: DPCode | None, *, prefer_function: bool = False
|
||||
) -> DPType | None:
|
||||
"""Find a matching DPCode data type available on for this device."""
|
||||
if dpcode is None:
|
||||
|
||||
@@ -73,6 +73,15 @@ class TuyaLightEntityDescription(LightEntityDescription):
|
||||
|
||||
|
||||
LIGHTS: dict[str, tuple[TuyaLightEntityDescription, ...]] = {
|
||||
# White noise machine
|
||||
"bzyd": (
|
||||
TuyaLightEntityDescription(
|
||||
key=DPCode.SWITCH_LED,
|
||||
name=None,
|
||||
color_mode=DPCode.WORK_MODE,
|
||||
color_data=DPCode.COLOUR_DATA,
|
||||
),
|
||||
),
|
||||
# Curtain Switch
|
||||
# https://developer.tuya.com/en/docs/iot/category-clkg?id=Kaiuz0gitil39
|
||||
"clkg": (
|
||||
@@ -531,7 +540,7 @@ class TuyaLightEntity(TuyaEntity, LightEntity):
|
||||
|
||||
if (
|
||||
dpcode := get_dpcode(self.device, description.color_data)
|
||||
) and self.get_dptype(dpcode) == DPType.JSON:
|
||||
) and self.get_dptype(dpcode, prefer_function=True) == DPType.JSON:
|
||||
self._color_data_dpcode = dpcode
|
||||
color_modes.add(ColorMode.HS)
|
||||
if dpcode in self.device.function:
|
||||
|
||||
@@ -65,6 +65,14 @@ NUMBERS: dict[str, tuple[NumberEntityDescription, ...]] = {
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
),
|
||||
),
|
||||
# White noise machine
|
||||
"bzyd": (
|
||||
NumberEntityDescription(
|
||||
key=DPCode.VOLUME_SET,
|
||||
translation_key="volume",
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
),
|
||||
),
|
||||
# CO2 Detector
|
||||
# https://developer.tuya.com/en/docs/iot/categoryco2bj?id=Kaiuz3wes7yuy
|
||||
"co2bj": (
|
||||
|
||||
@@ -254,6 +254,11 @@ SELECTS: dict[str, tuple[SelectEntityDescription, ...]] = {
|
||||
translation_key="desk_level",
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
),
|
||||
SelectEntityDescription(
|
||||
key=DPCode.UP_DOWN,
|
||||
translation_key="desk_up_down",
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
),
|
||||
),
|
||||
# Smart Camera
|
||||
# https://developer.tuya.com/en/docs/iot/categorysp?id=Kaiuz35leyo12
|
||||
|
||||
@@ -1161,6 +1161,21 @@ SENSORS: dict[str, tuple[TuyaSensorEntityDescription, ...]] = {
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
),
|
||||
# Water tester
|
||||
"szjcy": (
|
||||
TuyaSensorEntityDescription(
|
||||
key=DPCode.TDS_IN,
|
||||
translation_key="total_dissolved_solids",
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
TuyaSensorEntityDescription(
|
||||
key=DPCode.TEMP_CURRENT,
|
||||
translation_key="temperature",
|
||||
device_class=SensorDeviceClass.TEMPERATURE,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
*BATTERY_SENSORS,
|
||||
),
|
||||
# Fingerbot
|
||||
"szjqr": BATTERY_SENSORS,
|
||||
# IoT Switch
|
||||
|
||||
@@ -534,6 +534,14 @@
|
||||
"level_4": "Level 4"
|
||||
}
|
||||
},
|
||||
"desk_up_down": {
|
||||
"name": "Up/Down",
|
||||
"state": {
|
||||
"up": "Up",
|
||||
"down": "Down",
|
||||
"stop": "Stop"
|
||||
}
|
||||
},
|
||||
"inverter_work_mode": {
|
||||
"name": "Inverter work mode",
|
||||
"state": {
|
||||
@@ -812,6 +820,9 @@
|
||||
},
|
||||
"supply_frequency": {
|
||||
"name": "Supply frequency"
|
||||
},
|
||||
"total_dissolved_solids": {
|
||||
"name": "Total dissolved solids"
|
||||
}
|
||||
},
|
||||
"switch": {
|
||||
@@ -973,6 +984,12 @@
|
||||
},
|
||||
"output_power_limit": {
|
||||
"name": "Output power limit"
|
||||
},
|
||||
"music": {
|
||||
"name": "Music"
|
||||
},
|
||||
"snooze": {
|
||||
"name": "Snooze"
|
||||
}
|
||||
},
|
||||
"valve": {
|
||||
|
||||
@@ -37,6 +37,31 @@ SWITCHES: dict[str, tuple[SwitchEntityDescription, ...]] = {
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
),
|
||||
),
|
||||
# White noise machine
|
||||
"bzyd": (
|
||||
SwitchEntityDescription(
|
||||
key=DPCode.SWITCH,
|
||||
name=None,
|
||||
),
|
||||
SwitchEntityDescription(
|
||||
key=DPCode.CHILD_LOCK,
|
||||
translation_key="child_lock",
|
||||
icon="mdi:account-lock",
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
),
|
||||
SwitchEntityDescription(
|
||||
key=DPCode.SWITCH_MUSIC,
|
||||
translation_key="music",
|
||||
icon="mdi:music",
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
),
|
||||
SwitchEntityDescription(
|
||||
key=DPCode.SNOOZE,
|
||||
translation_key="snooze",
|
||||
icon="mdi:alarm-snooze",
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
),
|
||||
),
|
||||
# Curtain
|
||||
# https://developer.tuya.com/en/docs/iot/f?id=K9gf46o5mtfyc
|
||||
"cl": (
|
||||
|
||||
@@ -79,6 +79,7 @@ class TwitchCoordinator(DataUpdateCoordinator[dict[str, TwitchUpdate]]):
|
||||
if not (user := await first(self.twitch.get_users())):
|
||||
raise UpdateFailed("Logged in user not found")
|
||||
self.current_user = user
|
||||
self.users.append(self.current_user) # Add current_user to users list.
|
||||
|
||||
async def _async_update_data(self) -> dict[str, TwitchUpdate]:
|
||||
await self.session.async_ensure_token_valid()
|
||||
@@ -95,6 +96,8 @@ class TwitchCoordinator(DataUpdateCoordinator[dict[str, TwitchUpdate]]):
|
||||
user_id=self.current_user.id, first=100
|
||||
)
|
||||
}
|
||||
async for s in self.twitch.get_streams(user_id=[self.current_user.id]):
|
||||
streams.update({s.user_id: s})
|
||||
follows: dict[str, FollowedChannel] = {
|
||||
f.broadcaster_id: f
|
||||
async for f in await self.twitch.get_followed_channels(
|
||||
|
||||
@@ -67,8 +67,13 @@ class VerisureAlarm(
|
||||
)
|
||||
LOGGER.debug("Verisure set arm state %s", state)
|
||||
result = None
|
||||
attempts = 0
|
||||
while result is None:
|
||||
await asyncio.sleep(0.5)
|
||||
if attempts == 30:
|
||||
break
|
||||
if attempts > 1:
|
||||
await asyncio.sleep(0.5)
|
||||
attempts += 1
|
||||
transaction = await self.hass.async_add_executor_job(
|
||||
self.coordinator.verisure.request,
|
||||
self.coordinator.verisure.poll_arm_state(
|
||||
@@ -81,8 +86,10 @@ class VerisureAlarm(
|
||||
.get("armStateChangePollResult", {})
|
||||
.get("result")
|
||||
)
|
||||
|
||||
await self.coordinator.async_refresh()
|
||||
LOGGER.debug("Result is %s", result)
|
||||
if result == "OK":
|
||||
self._attr_alarm_state = ALARM_STATE_TO_HA.get(state)
|
||||
self.async_write_ha_state()
|
||||
|
||||
async def async_alarm_disarm(self, code: str | None = None) -> None:
|
||||
"""Send disarm command."""
|
||||
@@ -108,16 +115,20 @@ class VerisureAlarm(
|
||||
"ARMED_AWAY", self.coordinator.verisure.arm_away(code)
|
||||
)
|
||||
|
||||
@callback
|
||||
def _handle_coordinator_update(self) -> None:
|
||||
"""Handle updated data from the coordinator."""
|
||||
def _update_alarm_attributes(self) -> None:
|
||||
"""Update alarm state and changed by from coordinator data."""
|
||||
self._attr_alarm_state = ALARM_STATE_TO_HA.get(
|
||||
self.coordinator.data["alarm"]["statusType"]
|
||||
)
|
||||
self._attr_changed_by = self.coordinator.data["alarm"].get("name")
|
||||
|
||||
@callback
|
||||
def _handle_coordinator_update(self) -> None:
|
||||
"""Handle updated data from the coordinator."""
|
||||
self._update_alarm_attributes()
|
||||
super()._handle_coordinator_update()
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""When entity is added to hass."""
|
||||
await super().async_added_to_hass()
|
||||
self._handle_coordinator_update()
|
||||
self._update_alarm_attributes()
|
||||
|
||||
@@ -10,7 +10,7 @@ from verisure import Error as VerisureError
|
||||
from homeassistant.components.lock import LockEntity, LockState
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import ATTR_CODE
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers.device_registry import DeviceInfo
|
||||
from homeassistant.helpers.entity_platform import (
|
||||
AddConfigEntryEntitiesCallback,
|
||||
@@ -70,7 +70,9 @@ class VerisureDoorlock(CoordinatorEntity[VerisureDataUpdateCoordinator], LockEnt
|
||||
self._attr_unique_id = serial_number
|
||||
|
||||
self.serial_number = serial_number
|
||||
self._state: str | None = None
|
||||
self._attr_is_locked = None
|
||||
self._attr_changed_by = None
|
||||
self._changed_method: str | None = None
|
||||
|
||||
@property
|
||||
def device_info(self) -> DeviceInfo:
|
||||
@@ -92,20 +94,6 @@ class VerisureDoorlock(CoordinatorEntity[VerisureDataUpdateCoordinator], LockEnt
|
||||
super().available and self.serial_number in self.coordinator.data["locks"]
|
||||
)
|
||||
|
||||
@property
|
||||
def changed_by(self) -> str | None:
|
||||
"""Last change triggered by."""
|
||||
return (
|
||||
self.coordinator.data["locks"][self.serial_number]
|
||||
.get("user", {})
|
||||
.get("name")
|
||||
)
|
||||
|
||||
@property
|
||||
def changed_method(self) -> str:
|
||||
"""Last change method."""
|
||||
return self.coordinator.data["locks"][self.serial_number]["lockMethod"]
|
||||
|
||||
@property
|
||||
def code_format(self) -> str:
|
||||
"""Return the configured code format."""
|
||||
@@ -115,16 +103,9 @@ class VerisureDoorlock(CoordinatorEntity[VerisureDataUpdateCoordinator], LockEnt
|
||||
return f"^\\d{{{digits}}}$"
|
||||
|
||||
@property
|
||||
def is_locked(self) -> bool:
|
||||
"""Return true if lock is locked."""
|
||||
return (
|
||||
self.coordinator.data["locks"][self.serial_number]["lockStatus"] == "LOCKED"
|
||||
)
|
||||
|
||||
@property
|
||||
def extra_state_attributes(self) -> dict[str, str]:
|
||||
def extra_state_attributes(self) -> dict[str, str | None]:
|
||||
"""Return the state attributes."""
|
||||
return {"method": self.changed_method}
|
||||
return {"method": self._changed_method}
|
||||
|
||||
async def async_unlock(self, **kwargs: Any) -> None:
|
||||
"""Send unlock command."""
|
||||
@@ -154,7 +135,7 @@ class VerisureDoorlock(CoordinatorEntity[VerisureDataUpdateCoordinator], LockEnt
|
||||
target_state = "LOCKED" if state == LockState.LOCKED else "UNLOCKED"
|
||||
lock_status = None
|
||||
attempts = 0
|
||||
while lock_status != "OK":
|
||||
while lock_status is None:
|
||||
if attempts == 30:
|
||||
break
|
||||
if attempts > 1:
|
||||
@@ -172,8 +153,10 @@ class VerisureDoorlock(CoordinatorEntity[VerisureDataUpdateCoordinator], LockEnt
|
||||
.get("doorLockStateChangePollResult", {})
|
||||
.get("result")
|
||||
)
|
||||
LOGGER.debug("Lock status is %s", lock_status)
|
||||
if lock_status == "OK":
|
||||
self._state = state
|
||||
self._attr_is_locked = state == LockState.LOCKED
|
||||
self.async_write_ha_state()
|
||||
|
||||
def disable_autolock(self) -> None:
|
||||
"""Disable autolock on a doorlock."""
|
||||
@@ -196,3 +179,21 @@ class VerisureDoorlock(CoordinatorEntity[VerisureDataUpdateCoordinator], LockEnt
|
||||
LOGGER.debug("Enabling autolock on %s", self.serial_number)
|
||||
except VerisureError as ex:
|
||||
LOGGER.error("Could not enable autolock, %s", ex)
|
||||
|
||||
def _update_lock_attributes(self) -> None:
|
||||
"""Update lock state, changed by, and method from coordinator data."""
|
||||
lock_data = self.coordinator.data["locks"][self.serial_number]
|
||||
self._attr_is_locked = lock_data["lockStatus"] == "LOCKED"
|
||||
self._attr_changed_by = lock_data.get("user", {}).get("name")
|
||||
self._changed_method = lock_data["lockMethod"]
|
||||
|
||||
@callback
|
||||
def _handle_coordinator_update(self) -> None:
|
||||
"""Handle updated data from the coordinator."""
|
||||
self._update_lock_attributes()
|
||||
super()._handle_coordinator_update()
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""When entity is added to hass."""
|
||||
await super().async_added_to_hass()
|
||||
self._update_lock_attributes()
|
||||
|
||||
@@ -99,4 +99,4 @@ class VerisureSmartplug(CoordinatorEntity[VerisureDataUpdateCoordinator], Switch
|
||||
)
|
||||
self._state = state
|
||||
self._change_timestamp = monotonic()
|
||||
await self.coordinator.async_request_refresh()
|
||||
self.async_write_ha_state()
|
||||
|
||||
@@ -6,5 +6,5 @@
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["waterfurnace"],
|
||||
"quality_scale": "legacy",
|
||||
"requirements": ["waterfurnace==1.1.0"]
|
||||
"requirements": ["waterfurnace==1.2.0"]
|
||||
}
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user