mirror of
https://github.com/home-assistant/core.git
synced 2025-09-20 02:19:36 +00:00
Compare commits
199 Commits
add-deprec
...
standardiz
Author | SHA1 | Date | |
---|---|---|---|
![]() |
05ec051bf9 | ||
![]() |
d93e0a105a | ||
![]() |
ab1619c0b4 | ||
![]() |
70df7b8503 | ||
![]() |
0e2c2ad355 | ||
![]() |
4c26718739 | ||
![]() |
96034e1525 | ||
![]() |
df1302fc1c | ||
![]() |
5a5b639aa4 | ||
![]() |
e9fbe2227f | ||
![]() |
82b57568a0 | ||
![]() |
be692ab2fd | ||
![]() |
24c04cceee | ||
![]() |
97077898bb | ||
![]() |
08485f4e09 | ||
![]() |
b64d60fce4 | ||
![]() |
3690497e1f | ||
![]() |
b87e581cde | ||
![]() |
f1c55ee7e2 | ||
![]() |
9f17a82acf | ||
![]() |
3955391cda | ||
![]() |
d9a757c7e6 | ||
![]() |
aa1ec944c0 | ||
![]() |
88c3b6a9f5 | ||
![]() |
ada73953f6 | ||
![]() |
42e9b9a0bc | ||
![]() |
ec6a052ff5 | ||
![]() |
c91d64e04d | ||
![]() |
0ac7cb311d | ||
![]() |
3472020812 | ||
![]() |
dcd09523a6 | ||
![]() |
a5bfdc697b | ||
![]() |
dbb29a7c7d | ||
![]() |
124a63d846 | ||
![]() |
3de701a9ab | ||
![]() |
bfe1dd65b3 | ||
![]() |
71bf5e14cc | ||
![]() |
6d231c2c99 | ||
![]() |
b93072865b | ||
![]() |
14ebb6cd74 | ||
![]() |
2ddbcd560e | ||
![]() |
c5ff7ed1c9 | ||
![]() |
c4bea5616c | ||
![]() |
17fe147726 | ||
![]() |
9fae4e7e1f | ||
![]() |
0cebca498c | ||
![]() |
521ff62aae | ||
![]() |
fd1df5ad88 | ||
![]() |
91e7a35a07 | ||
![]() |
09381abf46 | ||
![]() |
3713c03c07 | ||
![]() |
bd8ddd7cd8 | ||
![]() |
f0dc1f927b | ||
![]() |
984590c6d1 | ||
![]() |
d324021a3f | ||
![]() |
1f4c0b3e9b | ||
![]() |
69893aba4b | ||
![]() |
b9dcf89b37 | ||
![]() |
54fd55a1c6 | ||
![]() |
cc64fa639d | ||
![]() |
84140ba414 | ||
![]() |
d1726b84c8 | ||
![]() |
4724ecbc38 | ||
![]() |
85afe87b5e | ||
![]() |
5960179844 | ||
![]() |
9f8f7d2fde | ||
![]() |
4c22264b13 | ||
![]() |
baf4382724 | ||
![]() |
8263ea4a4a | ||
![]() |
8412581be4 | ||
![]() |
207c848438 | ||
![]() |
2b61601fd7 | ||
![]() |
ee506e6c14 | ||
![]() |
8003a49571 | ||
![]() |
e438b11afb | ||
![]() |
64ba43703c | ||
![]() |
1d214ae120 | ||
![]() |
68d987f866 | ||
![]() |
299cc5e40c | ||
![]() |
2c3456177e | ||
![]() |
1ef90180cc | ||
![]() |
4c1364dfd1 | ||
![]() |
09a44a6a30 | ||
![]() |
63303bdcde | ||
![]() |
59cd24f54b | ||
![]() |
82b9fead39 | ||
![]() |
a879e36e9b | ||
![]() |
b12c458188 | ||
![]() |
4985f9a5a1 | ||
![]() |
ae70ca7cba | ||
![]() |
66d1cf8af7 | ||
![]() |
27c0df3da8 | ||
![]() |
5413131885 | ||
![]() |
0acd77e60a | ||
![]() |
c5d552dc4a | ||
![]() |
4f045b45ac | ||
![]() |
4ad29161bd | ||
![]() |
fea7f537a8 | ||
![]() |
442b6e9cca | ||
![]() |
531b67101d | ||
![]() |
596a3fc879 | ||
![]() |
0e8295604e | ||
![]() |
9cfdb99e76 | ||
![]() |
b1a6e403fb | ||
![]() |
937d3e4a96 | ||
![]() |
a368ad4ab5 | ||
![]() |
254694b024 | ||
![]() |
bcfa7a7383 | ||
![]() |
ab7081d26a | ||
![]() |
4762c64c25 | ||
![]() |
1b99ffe61b | ||
![]() |
d5132e8ea9 | ||
![]() |
1bcf3cfbb2 | ||
![]() |
d4d912ef55 | ||
![]() |
393826635b | ||
![]() |
9edd5c35e0 | ||
![]() |
e8c1d3dc3c | ||
![]() |
46463ea4f8 | ||
![]() |
88e6b0c8d9 | ||
![]() |
2ed92c720f | ||
![]() |
7389f23d9a | ||
![]() |
a0cef80cf2 | ||
![]() |
343b17788f | ||
![]() |
50349e49f1 | ||
![]() |
42d0415a86 | ||
![]() |
1428b41a25 | ||
![]() |
e65b4292b2 | ||
![]() |
2fc2bb97fc | ||
![]() |
40da606177 | ||
![]() |
d613b69e4e | ||
![]() |
3c5d09e114 | ||
![]() |
9c54cc369b | ||
![]() |
f91e4090f9 | ||
![]() |
2cdf0b74d5 | ||
![]() |
86750ae5c3 | ||
![]() |
c1eb492616 | ||
![]() |
ac154c020c | ||
![]() |
0e23eb9ebd | ||
![]() |
8367930f42 | ||
![]() |
d71b1246cf | ||
![]() |
4ad664a652 | ||
![]() |
002493c3e1 | ||
![]() |
720ecde568 | ||
![]() |
4c548830b4 | ||
![]() |
214925e10a | ||
![]() |
885256299f | ||
![]() |
e73c670025 | ||
![]() |
e3c0cfd1e2 | ||
![]() |
46c38f185c | ||
![]() |
9082637133 | ||
![]() |
0a35fd0ea4 | ||
![]() |
c4649fc068 | ||
![]() |
b496637bdd | ||
![]() |
7d471f9624 | ||
![]() |
83f3b3e3eb | ||
![]() |
4592d6370a | ||
![]() |
ccef31a37a | ||
![]() |
6a482b1a3e | ||
![]() |
6f00f8a920 | ||
![]() |
ceeeb22040 | ||
![]() |
2cda0817b2 | ||
![]() |
881a0bd1fa | ||
![]() |
dba6f419c9 | ||
![]() |
fad8d4fca2 | ||
![]() |
1663ad1adb | ||
![]() |
6a8152bc7f | ||
![]() |
e5edccd56f | ||
![]() |
480527eb68 | ||
![]() |
1475108f1c | ||
![]() |
a1e68336fc | ||
![]() |
07392e3ff7 | ||
![]() |
3e39f77e92 | ||
![]() |
a12617645b | ||
![]() |
723476457e | ||
![]() |
7053727426 | ||
![]() |
0f4ce58f28 | ||
![]() |
2c72cd3832 | ||
![]() |
e8d5615e54 | ||
![]() |
7a332d489d | ||
![]() |
504421e257 | ||
![]() |
a0ace3b082 | ||
![]() |
aea055b444 | ||
![]() |
5b107349a1 | ||
![]() |
46fa98e0b2 | ||
![]() |
96e66009e5 | ||
![]() |
ad14a66187 | ||
![]() |
777ac97acb | ||
![]() |
af07ab4752 | ||
![]() |
74b731528d | ||
![]() |
c361c32407 | ||
![]() |
75c1eddaf9 | ||
![]() |
715aba3aca | ||
![]() |
285619e913 | ||
![]() |
eaf400f3b7 | ||
![]() |
3d79a73110 | ||
![]() |
6271765eaf | ||
![]() |
9e73ff06d2 | ||
![]() |
36edfd8c04 | ||
![]() |
a750cfcac6 |
44
.github/workflows/builder.yml
vendored
44
.github/workflows/builder.yml
vendored
@@ -27,12 +27,12 @@ jobs:
|
||||
publish: ${{ steps.version.outputs.publish }}
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@v5.0.0
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
uses: actions/setup-python@v6.0.0
|
||||
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
|
||||
@@ -69,7 +69,7 @@ jobs:
|
||||
run: find ./homeassistant/components/*/translations -name "*.json" | tar zcvf translations.tar.gz -T -
|
||||
|
||||
- name: Upload translations
|
||||
uses: actions/upload-artifact@v4.6.2
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: translations
|
||||
path: translations.tar.gz
|
||||
@@ -90,11 +90,11 @@ jobs:
|
||||
arch: ${{ fromJson(needs.init.outputs.architectures) }}
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@v5.0.0
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
|
||||
- name: Download nightly wheels of frontend
|
||||
if: needs.init.outputs.channel == 'dev'
|
||||
uses: dawidd6/action-download-artifact@v11
|
||||
uses: dawidd6/action-download-artifact@ac66b43f0e6a346234dd65d4d0c8fbb31cb316e5 # v11
|
||||
with:
|
||||
github_token: ${{secrets.GITHUB_TOKEN}}
|
||||
repo: home-assistant/frontend
|
||||
@@ -105,7 +105,7 @@ jobs:
|
||||
|
||||
- name: Download nightly wheels of intents
|
||||
if: needs.init.outputs.channel == 'dev'
|
||||
uses: dawidd6/action-download-artifact@v11
|
||||
uses: dawidd6/action-download-artifact@ac66b43f0e6a346234dd65d4d0c8fbb31cb316e5 # v11
|
||||
with:
|
||||
github_token: ${{secrets.GITHUB_TOKEN}}
|
||||
repo: OHF-Voice/intents-package
|
||||
@@ -116,7 +116,7 @@ jobs:
|
||||
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
if: needs.init.outputs.channel == 'dev'
|
||||
uses: actions/setup-python@v6.0.0
|
||||
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
|
||||
@@ -175,7 +175,7 @@ jobs:
|
||||
sed -i "s|pykrakenapi|# pykrakenapi|g" requirements_all.txt
|
||||
|
||||
- name: Download translations
|
||||
uses: actions/download-artifact@v5.0.0
|
||||
uses: actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0 # v5.0.0
|
||||
with:
|
||||
name: translations
|
||||
|
||||
@@ -190,14 +190,14 @@ jobs:
|
||||
echo "${{ github.sha }};${{ github.ref }};${{ github.event_name }};${{ github.actor }}" > rootfs/OFFICIAL_IMAGE
|
||||
|
||||
- name: Login to GitHub Container Registry
|
||||
uses: docker/login-action@v3.5.0
|
||||
uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 # v3.5.0
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Build base image
|
||||
uses: home-assistant/builder@2025.03.0
|
||||
uses: home-assistant/builder@71885366c80f6ead6ae8c364b61d910e0dc5addc # 2025.03.0
|
||||
with:
|
||||
args: |
|
||||
$BUILD_ARGS \
|
||||
@@ -242,7 +242,7 @@ jobs:
|
||||
- green
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@v5.0.0
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
|
||||
- name: Set build additional args
|
||||
run: |
|
||||
@@ -256,14 +256,14 @@ jobs:
|
||||
fi
|
||||
|
||||
- name: Login to GitHub Container Registry
|
||||
uses: docker/login-action@v3.5.0
|
||||
uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 # v3.5.0
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Build base image
|
||||
uses: home-assistant/builder@2025.03.0
|
||||
uses: home-assistant/builder@71885366c80f6ead6ae8c364b61d910e0dc5addc # 2025.03.0
|
||||
with:
|
||||
args: |
|
||||
$BUILD_ARGS \
|
||||
@@ -279,7 +279,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@v5.0.0
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
|
||||
- name: Initialize git
|
||||
uses: home-assistant/actions/helpers/git-init@master
|
||||
@@ -321,23 +321,23 @@ jobs:
|
||||
registry: ["ghcr.io/home-assistant", "docker.io/homeassistant"]
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@v5.0.0
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
|
||||
- name: Install Cosign
|
||||
uses: sigstore/cosign-installer@v3.9.2
|
||||
uses: sigstore/cosign-installer@d58896d6a1865668819e1d91763c7751a165e159 # v3.9.2
|
||||
with:
|
||||
cosign-release: "v2.2.3"
|
||||
|
||||
- name: Login to DockerHub
|
||||
if: matrix.registry == 'docker.io/homeassistant'
|
||||
uses: docker/login-action@v3.5.0
|
||||
uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 # v3.5.0
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Login to GitHub Container Registry
|
||||
if: matrix.registry == 'ghcr.io/home-assistant'
|
||||
uses: docker/login-action@v3.5.0
|
||||
uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 # v3.5.0
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
@@ -454,15 +454,15 @@ jobs:
|
||||
if: github.repository_owner == 'home-assistant' && needs.init.outputs.publish == 'true'
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@v5.0.0
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
uses: actions/setup-python@v6.0.0
|
||||
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
|
||||
- name: Download translations
|
||||
uses: actions/download-artifact@v5.0.0
|
||||
uses: actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0 # v5.0.0
|
||||
with:
|
||||
name: translations
|
||||
|
||||
@@ -480,7 +480,7 @@ jobs:
|
||||
python -m build
|
||||
|
||||
- name: Upload package to PyPI
|
||||
uses: pypa/gh-action-pypi-publish@v1.13.0
|
||||
uses: pypa/gh-action-pypi-publish@ed0c53931b1dc9bd32cbe73a98c7f6766f8a527e # v1.13.0
|
||||
with:
|
||||
skip-existing: true
|
||||
|
||||
|
294
.github/workflows/ci.yaml
vendored
294
.github/workflows/ci.yaml
vendored
@@ -37,7 +37,7 @@ on:
|
||||
type: boolean
|
||||
|
||||
env:
|
||||
CACHE_VERSION: 7
|
||||
CACHE_VERSION: 8
|
||||
UV_CACHE_VERSION: 1
|
||||
MYPY_CACHE_VERSION: 1
|
||||
HA_SHORT_VERSION: "2025.10"
|
||||
@@ -61,6 +61,9 @@ env:
|
||||
POSTGRESQL_VERSIONS: "['postgres:12.14','postgres:15.2']"
|
||||
PRE_COMMIT_CACHE: ~/.cache/pre-commit
|
||||
UV_CACHE_DIR: /tmp/uv-cache
|
||||
APT_CACHE_BASE: /home/runner/work/apt
|
||||
APT_CACHE_DIR: /home/runner/work/apt/cache
|
||||
APT_LIST_CACHE_DIR: /home/runner/work/apt/lists
|
||||
SQLALCHEMY_WARN_20: 1
|
||||
PYTHONASYNCIODEBUG: 1
|
||||
HASS_CI: 1
|
||||
@@ -78,6 +81,7 @@ jobs:
|
||||
core: ${{ steps.core.outputs.changes }}
|
||||
integrations_glob: ${{ steps.info.outputs.integrations_glob }}
|
||||
integrations: ${{ steps.integrations.outputs.changes }}
|
||||
apt_cache_key: ${{ steps.generate_apt_cache_key.outputs.key }}
|
||||
pre-commit_cache_key: ${{ steps.generate_pre-commit_cache_key.outputs.key }}
|
||||
python_cache_key: ${{ steps.generate_python_cache_key.outputs.key }}
|
||||
requirements: ${{ steps.core.outputs.requirements }}
|
||||
@@ -94,7 +98,7 @@ jobs:
|
||||
runs-on: ubuntu-24.04
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v5.0.0
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
- name: Generate partial Python venv restore key
|
||||
id: generate_python_cache_key
|
||||
run: |
|
||||
@@ -111,8 +115,12 @@ jobs:
|
||||
run: >-
|
||||
echo "key=pre-commit-${{ env.CACHE_VERSION }}-${{
|
||||
hashFiles('.pre-commit-config.yaml') }}" >> $GITHUB_OUTPUT
|
||||
- name: Generate partial apt restore key
|
||||
id: generate_apt_cache_key
|
||||
run: |
|
||||
echo "key=$(lsb_release -rs)-apt-${{ env.CACHE_VERSION }}-${{ env.HA_SHORT_VERSION }}" >> $GITHUB_OUTPUT
|
||||
- name: Filter for core changes
|
||||
uses: dorny/paths-filter@v3.0.2
|
||||
uses: dorny/paths-filter@de90cc6fb38fc0963ad72b210f1f284cd68cea36 # v3.0.2
|
||||
id: core
|
||||
with:
|
||||
filters: .core_files.yaml
|
||||
@@ -127,7 +135,7 @@ jobs:
|
||||
echo "Result:"
|
||||
cat .integration_paths.yaml
|
||||
- name: Filter for integration changes
|
||||
uses: dorny/paths-filter@v3.0.2
|
||||
uses: dorny/paths-filter@de90cc6fb38fc0963ad72b210f1f284cd68cea36 # v3.0.2
|
||||
id: integrations
|
||||
with:
|
||||
filters: .integration_paths.yaml
|
||||
@@ -246,16 +254,16 @@ jobs:
|
||||
- info
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v5.0.0
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
id: python
|
||||
uses: actions/setup-python@v6.0.0
|
||||
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
check-latest: true
|
||||
- name: Restore base Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache@v4.2.4
|
||||
uses: actions/cache@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
|
||||
with:
|
||||
path: venv
|
||||
key: >-
|
||||
@@ -271,7 +279,7 @@ jobs:
|
||||
uv pip install "$(cat requirements_test.txt | grep pre-commit)"
|
||||
- name: Restore pre-commit environment from cache
|
||||
id: cache-precommit
|
||||
uses: actions/cache@v4.2.4
|
||||
uses: actions/cache@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
|
||||
with:
|
||||
path: ${{ env.PRE_COMMIT_CACHE }}
|
||||
lookup-only: true
|
||||
@@ -292,16 +300,16 @@ jobs:
|
||||
- pre-commit
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v5.0.0
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
uses: actions/setup-python@v6.0.0
|
||||
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
id: python
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
check-latest: true
|
||||
- name: Restore base Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@v4.2.4
|
||||
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@@ -310,7 +318,7 @@ jobs:
|
||||
needs.info.outputs.pre-commit_cache_key }}
|
||||
- name: Restore pre-commit environment from cache
|
||||
id: cache-precommit
|
||||
uses: actions/cache/restore@v4.2.4
|
||||
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
|
||||
with:
|
||||
path: ${{ env.PRE_COMMIT_CACHE }}
|
||||
fail-on-cache-miss: true
|
||||
@@ -332,16 +340,16 @@ jobs:
|
||||
- pre-commit
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v5.0.0
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
uses: actions/setup-python@v6.0.0
|
||||
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
id: python
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
check-latest: true
|
||||
- name: Restore base Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@v4.2.4
|
||||
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@@ -350,7 +358,7 @@ jobs:
|
||||
needs.info.outputs.pre-commit_cache_key }}
|
||||
- name: Restore pre-commit environment from cache
|
||||
id: cache-precommit
|
||||
uses: actions/cache/restore@v4.2.4
|
||||
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
|
||||
with:
|
||||
path: ${{ env.PRE_COMMIT_CACHE }}
|
||||
fail-on-cache-miss: true
|
||||
@@ -372,16 +380,16 @@ jobs:
|
||||
- pre-commit
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v5.0.0
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
uses: actions/setup-python@v6.0.0
|
||||
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
id: python
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
check-latest: true
|
||||
- name: Restore base Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@v4.2.4
|
||||
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@@ -390,7 +398,7 @@ jobs:
|
||||
needs.info.outputs.pre-commit_cache_key }}
|
||||
- name: Restore pre-commit environment from cache
|
||||
id: cache-precommit
|
||||
uses: actions/cache/restore@v4.2.4
|
||||
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
|
||||
with:
|
||||
path: ${{ env.PRE_COMMIT_CACHE }}
|
||||
fail-on-cache-miss: true
|
||||
@@ -462,7 +470,7 @@ jobs:
|
||||
- script/hassfest/docker/Dockerfile
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v5.0.0
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
- name: Register hadolint problem matcher
|
||||
run: |
|
||||
echo "::add-matcher::.github/workflows/matchers/hadolint.json"
|
||||
@@ -481,10 +489,10 @@ jobs:
|
||||
python-version: ${{ fromJSON(needs.info.outputs.python_versions) }}
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v5.0.0
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
id: python
|
||||
uses: actions/setup-python@v6.0.0
|
||||
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
check-latest: true
|
||||
@@ -497,7 +505,7 @@ jobs:
|
||||
env.HA_SHORT_VERSION }}-$(date -u '+%Y-%m-%dT%H:%M:%s')" >> $GITHUB_OUTPUT
|
||||
- name: Restore base Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache@v4.2.4
|
||||
uses: actions/cache@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
|
||||
with:
|
||||
path: venv
|
||||
key: >-
|
||||
@@ -505,7 +513,7 @@ jobs:
|
||||
needs.info.outputs.python_cache_key }}
|
||||
- name: Restore uv wheel cache
|
||||
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||
uses: actions/cache@v4.2.4
|
||||
uses: actions/cache@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
|
||||
with:
|
||||
path: ${{ env.UV_CACHE_DIR }}
|
||||
key: >-
|
||||
@@ -515,16 +523,36 @@ jobs:
|
||||
${{ runner.os }}-${{ runner.arch }}-${{ steps.python.outputs.python-version }}-uv-${{
|
||||
env.UV_CACHE_VERSION }}-${{ steps.generate-uv-key.outputs.version }}-${{
|
||||
env.HA_SHORT_VERSION }}-
|
||||
- name: Restore apt cache
|
||||
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||
id: cache-apt
|
||||
uses: actions/cache@v4.2.4
|
||||
with:
|
||||
path: |
|
||||
${{ env.APT_CACHE_DIR }}
|
||||
${{ env.APT_LIST_CACHE_DIR }}
|
||||
key: >-
|
||||
${{ runner.os }}-${{ runner.arch }}-${{ needs.info.outputs.apt_cache_key }}
|
||||
- name: Install additional OS dependencies
|
||||
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||
timeout-minutes: 10
|
||||
run: |
|
||||
sudo rm /etc/apt/sources.list.d/microsoft-prod.list
|
||||
sudo apt-get update
|
||||
if [[ "${{ steps.cache-apt.outputs.cache-hit }}" != 'true' ]]; then
|
||||
mkdir -p ${{ env.APT_CACHE_DIR }}
|
||||
mkdir -p ${{ env.APT_LIST_CACHE_DIR }}
|
||||
fi
|
||||
|
||||
sudo apt-get update \
|
||||
-o Dir::Cache=${{ env.APT_CACHE_DIR }} \
|
||||
-o Dir::State::Lists=${{ env.APT_LIST_CACHE_DIR }}
|
||||
sudo apt-get -y install \
|
||||
-o Dir::Cache=${{ env.APT_CACHE_DIR }} \
|
||||
-o Dir::State::Lists=${{ env.APT_LIST_CACHE_DIR }} \
|
||||
bluez \
|
||||
ffmpeg \
|
||||
libturbojpeg \
|
||||
libxml2-utils \
|
||||
libavcodec-dev \
|
||||
libavdevice-dev \
|
||||
libavfilter-dev \
|
||||
@@ -534,6 +562,10 @@ jobs:
|
||||
libswresample-dev \
|
||||
libswscale-dev \
|
||||
libudev-dev
|
||||
|
||||
if [[ "${{ steps.cache-apt.outputs.cache-hit }}" != 'true' ]]; then
|
||||
sudo chmod -R 755 ${{ env.APT_CACHE_BASE }}
|
||||
fi
|
||||
- name: Create Python virtual environment
|
||||
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||
run: |
|
||||
@@ -553,7 +585,7 @@ jobs:
|
||||
python --version
|
||||
uv pip freeze >> pip_freeze.txt
|
||||
- name: Upload pip_freeze artifact
|
||||
uses: actions/upload-artifact@v4.6.2
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: pip-freeze-${{ matrix.python-version }}
|
||||
path: pip_freeze.txt
|
||||
@@ -578,24 +610,37 @@ jobs:
|
||||
- info
|
||||
- base
|
||||
steps:
|
||||
- name: Restore apt cache
|
||||
uses: actions/cache/restore@v4.2.4
|
||||
with:
|
||||
path: |
|
||||
${{ env.APT_CACHE_DIR }}
|
||||
${{ env.APT_LIST_CACHE_DIR }}
|
||||
fail-on-cache-miss: true
|
||||
key: >-
|
||||
${{ runner.os }}-${{ runner.arch }}-${{ needs.info.outputs.apt_cache_key }}
|
||||
- name: Install additional OS dependencies
|
||||
timeout-minutes: 10
|
||||
run: |
|
||||
sudo rm /etc/apt/sources.list.d/microsoft-prod.list
|
||||
sudo apt-get update
|
||||
sudo apt-get update \
|
||||
-o Dir::Cache=${{ env.APT_CACHE_DIR }} \
|
||||
-o Dir::State::Lists=${{ env.APT_LIST_CACHE_DIR }}
|
||||
sudo apt-get -y install \
|
||||
-o Dir::Cache=${{ env.APT_CACHE_DIR }} \
|
||||
-o Dir::State::Lists=${{ env.APT_LIST_CACHE_DIR }} \
|
||||
libturbojpeg
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v5.0.0
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
id: python
|
||||
uses: actions/setup-python@v6.0.0
|
||||
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
check-latest: true
|
||||
- name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@v4.2.4
|
||||
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@@ -619,16 +664,16 @@ jobs:
|
||||
- base
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v5.0.0
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
id: python
|
||||
uses: actions/setup-python@v6.0.0
|
||||
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
check-latest: true
|
||||
- name: Restore base Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@v4.2.4
|
||||
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@@ -653,9 +698,9 @@ jobs:
|
||||
&& github.event_name == 'pull_request'
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v5.0.0
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
- name: Dependency review
|
||||
uses: actions/dependency-review-action@v4.7.3
|
||||
uses: actions/dependency-review-action@595b5aeba73380359d98a5e087f648dbb0edce1b # v4.7.3
|
||||
with:
|
||||
license-check: false # We use our own license audit checks
|
||||
|
||||
@@ -676,16 +721,16 @@ jobs:
|
||||
python-version: ${{ fromJson(needs.info.outputs.python_versions) }}
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v5.0.0
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
id: python
|
||||
uses: actions/setup-python@v6.0.0
|
||||
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
check-latest: true
|
||||
- name: Restore full Python ${{ matrix.python-version }} virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@v4.2.4
|
||||
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@@ -697,7 +742,7 @@ jobs:
|
||||
. venv/bin/activate
|
||||
python -m script.licenses extract --output-file=licenses-${{ matrix.python-version }}.json
|
||||
- name: Upload licenses
|
||||
uses: actions/upload-artifact@v4.6.2
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: licenses-${{ github.run_number }}-${{ matrix.python-version }}
|
||||
path: licenses-${{ matrix.python-version }}.json
|
||||
@@ -719,16 +764,16 @@ jobs:
|
||||
- base
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v5.0.0
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
id: python
|
||||
uses: actions/setup-python@v6.0.0
|
||||
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
check-latest: true
|
||||
- name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@v4.2.4
|
||||
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@@ -766,16 +811,16 @@ jobs:
|
||||
- base
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v5.0.0
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
id: python
|
||||
uses: actions/setup-python@v6.0.0
|
||||
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
check-latest: true
|
||||
- name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@v4.2.4
|
||||
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@@ -811,10 +856,10 @@ jobs:
|
||||
- base
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v5.0.0
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
id: python
|
||||
uses: actions/setup-python@v6.0.0
|
||||
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
check-latest: true
|
||||
@@ -827,7 +872,7 @@ jobs:
|
||||
env.HA_SHORT_VERSION }}-$(date -u '+%Y-%m-%dT%H:%M:%s')" >> $GITHUB_OUTPUT
|
||||
- name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@v4.2.4
|
||||
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@@ -835,7 +880,7 @@ jobs:
|
||||
${{ runner.os }}-${{ runner.arch }}-${{ steps.python.outputs.python-version }}-${{
|
||||
needs.info.outputs.python_cache_key }}
|
||||
- name: Restore mypy cache
|
||||
uses: actions/cache@v4.2.4
|
||||
uses: actions/cache@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
|
||||
with:
|
||||
path: .mypy_cache
|
||||
key: >-
|
||||
@@ -878,27 +923,40 @@ jobs:
|
||||
- mypy
|
||||
name: Split tests for full run
|
||||
steps:
|
||||
- name: Restore apt cache
|
||||
uses: actions/cache/restore@v4.2.4
|
||||
with:
|
||||
path: |
|
||||
${{ env.APT_CACHE_DIR }}
|
||||
${{ env.APT_LIST_CACHE_DIR }}
|
||||
fail-on-cache-miss: true
|
||||
key: >-
|
||||
${{ runner.os }}-${{ runner.arch }}-${{ needs.info.outputs.apt_cache_key }}
|
||||
- name: Install additional OS dependencies
|
||||
timeout-minutes: 10
|
||||
run: |
|
||||
sudo rm /etc/apt/sources.list.d/microsoft-prod.list
|
||||
sudo apt-get update
|
||||
sudo apt-get update \
|
||||
-o Dir::Cache=${{ env.APT_CACHE_DIR }} \
|
||||
-o Dir::State::Lists=${{ env.APT_LIST_CACHE_DIR }}
|
||||
sudo apt-get -y install \
|
||||
-o Dir::Cache=${{ env.APT_CACHE_DIR }} \
|
||||
-o Dir::State::Lists=${{ env.APT_LIST_CACHE_DIR }} \
|
||||
bluez \
|
||||
ffmpeg \
|
||||
libturbojpeg \
|
||||
libgammu-dev
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v5.0.0
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
id: python
|
||||
uses: actions/setup-python@v6.0.0
|
||||
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
check-latest: true
|
||||
- name: Restore base Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@v4.2.4
|
||||
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@@ -910,7 +968,7 @@ jobs:
|
||||
. venv/bin/activate
|
||||
python -m script.split_tests ${{ needs.info.outputs.test_group_count }} tests
|
||||
- name: Upload pytest_buckets
|
||||
uses: actions/upload-artifact@v4.6.2
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: pytest_buckets
|
||||
path: pytest_buckets.txt
|
||||
@@ -939,28 +997,41 @@ jobs:
|
||||
name: >-
|
||||
Run tests Python ${{ matrix.python-version }} (${{ matrix.group }})
|
||||
steps:
|
||||
- name: Restore apt cache
|
||||
uses: actions/cache/restore@v4.2.4
|
||||
with:
|
||||
path: |
|
||||
${{ env.APT_CACHE_DIR }}
|
||||
${{ env.APT_LIST_CACHE_DIR }}
|
||||
fail-on-cache-miss: true
|
||||
key: >-
|
||||
${{ runner.os }}-${{ runner.arch }}-${{ needs.info.outputs.apt_cache_key }}
|
||||
- name: Install additional OS dependencies
|
||||
timeout-minutes: 10
|
||||
run: |
|
||||
sudo rm /etc/apt/sources.list.d/microsoft-prod.list
|
||||
sudo apt-get update
|
||||
sudo apt-get update \
|
||||
-o Dir::Cache=${{ env.APT_CACHE_DIR }} \
|
||||
-o Dir::State::Lists=${{ env.APT_LIST_CACHE_DIR }}
|
||||
sudo apt-get -y install \
|
||||
-o Dir::Cache=${{ env.APT_CACHE_DIR }} \
|
||||
-o Dir::State::Lists=${{ env.APT_LIST_CACHE_DIR }} \
|
||||
bluez \
|
||||
ffmpeg \
|
||||
libturbojpeg \
|
||||
libgammu-dev \
|
||||
libxml2-utils
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v5.0.0
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
id: python
|
||||
uses: actions/setup-python@v6.0.0
|
||||
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
check-latest: true
|
||||
- name: Restore full Python ${{ matrix.python-version }} virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@v4.2.4
|
||||
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@@ -974,7 +1045,7 @@ jobs:
|
||||
run: |
|
||||
echo "::add-matcher::.github/workflows/matchers/pytest-slow.json"
|
||||
- name: Download pytest_buckets
|
||||
uses: actions/download-artifact@v5.0.0
|
||||
uses: actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0 # v5.0.0
|
||||
with:
|
||||
name: pytest_buckets
|
||||
- name: Compile English translations
|
||||
@@ -1013,14 +1084,14 @@ jobs:
|
||||
2>&1 | tee pytest-${{ matrix.python-version }}-${{ matrix.group }}.txt
|
||||
- name: Upload pytest output
|
||||
if: success() || failure() && steps.pytest-full.conclusion == 'failure'
|
||||
uses: actions/upload-artifact@v4.6.2
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: pytest-${{ github.run_number }}-${{ matrix.python-version }}-${{ matrix.group }}
|
||||
path: pytest-*.txt
|
||||
overwrite: true
|
||||
- name: Upload coverage artifact
|
||||
if: needs.info.outputs.skip_coverage != 'true'
|
||||
uses: actions/upload-artifact@v4.6.2
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: coverage-${{ matrix.python-version }}-${{ matrix.group }}
|
||||
path: coverage.xml
|
||||
@@ -1033,7 +1104,7 @@ jobs:
|
||||
mv "junit.xml-tmp" "junit.xml"
|
||||
- name: Upload test results artifact
|
||||
if: needs.info.outputs.skip_coverage != 'true' && !cancelled()
|
||||
uses: actions/upload-artifact@v4.6.2
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: test-results-full-${{ matrix.python-version }}-${{ matrix.group }}
|
||||
path: junit.xml
|
||||
@@ -1073,28 +1144,41 @@ jobs:
|
||||
name: >-
|
||||
Run ${{ matrix.mariadb-group }} tests Python ${{ matrix.python-version }}
|
||||
steps:
|
||||
- name: Restore apt cache
|
||||
uses: actions/cache/restore@v4.2.4
|
||||
with:
|
||||
path: |
|
||||
${{ env.APT_CACHE_DIR }}
|
||||
${{ env.APT_LIST_CACHE_DIR }}
|
||||
fail-on-cache-miss: true
|
||||
key: >-
|
||||
${{ runner.os }}-${{ runner.arch }}-${{ needs.info.outputs.apt_cache_key }}
|
||||
- name: Install additional OS dependencies
|
||||
timeout-minutes: 10
|
||||
run: |
|
||||
sudo rm /etc/apt/sources.list.d/microsoft-prod.list
|
||||
sudo apt-get update
|
||||
sudo apt-get update \
|
||||
-o Dir::Cache=${{ env.APT_CACHE_DIR }} \
|
||||
-o Dir::State::Lists=${{ env.APT_LIST_CACHE_DIR }}
|
||||
sudo apt-get -y install \
|
||||
-o Dir::Cache=${{ env.APT_CACHE_DIR }} \
|
||||
-o Dir::State::Lists=${{ env.APT_LIST_CACHE_DIR }} \
|
||||
bluez \
|
||||
ffmpeg \
|
||||
libturbojpeg \
|
||||
libmariadb-dev-compat \
|
||||
libxml2-utils
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v5.0.0
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
id: python
|
||||
uses: actions/setup-python@v6.0.0
|
||||
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
check-latest: true
|
||||
- name: Restore full Python ${{ matrix.python-version }} virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@v4.2.4
|
||||
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@@ -1153,7 +1237,7 @@ jobs:
|
||||
2>&1 | tee pytest-${{ matrix.python-version }}-${mariadb}.txt
|
||||
- name: Upload pytest output
|
||||
if: success() || failure() && steps.pytest-partial.conclusion == 'failure'
|
||||
uses: actions/upload-artifact@v4.6.2
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: pytest-${{ github.run_number }}-${{ matrix.python-version }}-${{
|
||||
steps.pytest-partial.outputs.mariadb }}
|
||||
@@ -1161,7 +1245,7 @@ jobs:
|
||||
overwrite: true
|
||||
- name: Upload coverage artifact
|
||||
if: needs.info.outputs.skip_coverage != 'true'
|
||||
uses: actions/upload-artifact@v4.6.2
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: coverage-${{ matrix.python-version }}-${{
|
||||
steps.pytest-partial.outputs.mariadb }}
|
||||
@@ -1175,7 +1259,7 @@ jobs:
|
||||
mv "junit.xml-tmp" "junit.xml"
|
||||
- name: Upload test results artifact
|
||||
if: needs.info.outputs.skip_coverage != 'true' && !cancelled()
|
||||
uses: actions/upload-artifact@v4.6.2
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: test-results-mariadb-${{ matrix.python-version }}-${{
|
||||
steps.pytest-partial.outputs.mariadb }}
|
||||
@@ -1214,12 +1298,25 @@ jobs:
|
||||
name: >-
|
||||
Run ${{ matrix.postgresql-group }} tests Python ${{ matrix.python-version }}
|
||||
steps:
|
||||
- name: Restore apt cache
|
||||
uses: actions/cache/restore@v4.2.4
|
||||
with:
|
||||
path: |
|
||||
${{ env.APT_CACHE_DIR }}
|
||||
${{ env.APT_LIST_CACHE_DIR }}
|
||||
fail-on-cache-miss: true
|
||||
key: >-
|
||||
${{ runner.os }}-${{ runner.arch }}-${{ needs.info.outputs.apt_cache_key }}
|
||||
- name: Install additional OS dependencies
|
||||
timeout-minutes: 10
|
||||
run: |
|
||||
sudo rm /etc/apt/sources.list.d/microsoft-prod.list
|
||||
sudo apt-get update
|
||||
sudo apt-get update \
|
||||
-o Dir::Cache=${{ env.APT_CACHE_DIR }} \
|
||||
-o Dir::State::Lists=${{ env.APT_LIST_CACHE_DIR }}
|
||||
sudo apt-get -y install \
|
||||
-o Dir::Cache=${{ env.APT_CACHE_DIR }} \
|
||||
-o Dir::State::Lists=${{ env.APT_LIST_CACHE_DIR }} \
|
||||
bluez \
|
||||
ffmpeg \
|
||||
libturbojpeg \
|
||||
@@ -1228,16 +1325,16 @@ jobs:
|
||||
sudo apt-get -y install \
|
||||
postgresql-server-dev-14
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v5.0.0
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
id: python
|
||||
uses: actions/setup-python@v6.0.0
|
||||
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
check-latest: true
|
||||
- name: Restore full Python ${{ matrix.python-version }} virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@v4.2.4
|
||||
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@@ -1297,7 +1394,7 @@ jobs:
|
||||
2>&1 | tee pytest-${{ matrix.python-version }}-${postgresql}.txt
|
||||
- name: Upload pytest output
|
||||
if: success() || failure() && steps.pytest-partial.conclusion == 'failure'
|
||||
uses: actions/upload-artifact@v4.6.2
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: pytest-${{ github.run_number }}-${{ matrix.python-version }}-${{
|
||||
steps.pytest-partial.outputs.postgresql }}
|
||||
@@ -1305,7 +1402,7 @@ jobs:
|
||||
overwrite: true
|
||||
- name: Upload coverage artifact
|
||||
if: needs.info.outputs.skip_coverage != 'true'
|
||||
uses: actions/upload-artifact@v4.6.2
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: coverage-${{ matrix.python-version }}-${{
|
||||
steps.pytest-partial.outputs.postgresql }}
|
||||
@@ -1319,7 +1416,7 @@ jobs:
|
||||
mv "junit.xml-tmp" "junit.xml"
|
||||
- name: Upload test results artifact
|
||||
if: needs.info.outputs.skip_coverage != 'true' && !cancelled()
|
||||
uses: actions/upload-artifact@v4.6.2
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: test-results-postgres-${{ matrix.python-version }}-${{
|
||||
steps.pytest-partial.outputs.postgresql }}
|
||||
@@ -1340,14 +1437,14 @@ jobs:
|
||||
timeout-minutes: 10
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v5.0.0
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
- name: Download all coverage artifacts
|
||||
uses: actions/download-artifact@v5.0.0
|
||||
uses: actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0 # v5.0.0
|
||||
with:
|
||||
pattern: coverage-*
|
||||
- name: Upload coverage to Codecov
|
||||
if: needs.info.outputs.test_full_suite == 'true'
|
||||
uses: codecov/codecov-action@v5.5.1
|
||||
uses: codecov/codecov-action@5a1091511ad55cbe89839c7260b706298ca349f7 # v5.5.1
|
||||
with:
|
||||
fail_ci_if_error: true
|
||||
flags: full-suite
|
||||
@@ -1376,28 +1473,41 @@ jobs:
|
||||
name: >-
|
||||
Run tests Python ${{ matrix.python-version }} (${{ matrix.group }})
|
||||
steps:
|
||||
- name: Restore apt cache
|
||||
uses: actions/cache/restore@v4.2.4
|
||||
with:
|
||||
path: |
|
||||
${{ env.APT_CACHE_DIR }}
|
||||
${{ env.APT_LIST_CACHE_DIR }}
|
||||
fail-on-cache-miss: true
|
||||
key: >-
|
||||
${{ runner.os }}-${{ runner.arch }}-${{ needs.info.outputs.apt_cache_key }}
|
||||
- name: Install additional OS dependencies
|
||||
timeout-minutes: 10
|
||||
run: |
|
||||
sudo rm /etc/apt/sources.list.d/microsoft-prod.list
|
||||
sudo apt-get update
|
||||
sudo apt-get update \
|
||||
-o Dir::Cache=${{ env.APT_CACHE_DIR }} \
|
||||
-o Dir::State::Lists=${{ env.APT_LIST_CACHE_DIR }}
|
||||
sudo apt-get -y install \
|
||||
-o Dir::Cache=${{ env.APT_CACHE_DIR }} \
|
||||
-o Dir::State::Lists=${{ env.APT_LIST_CACHE_DIR }} \
|
||||
bluez \
|
||||
ffmpeg \
|
||||
libturbojpeg \
|
||||
libgammu-dev \
|
||||
libxml2-utils
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v5.0.0
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
id: python
|
||||
uses: actions/setup-python@v6.0.0
|
||||
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
check-latest: true
|
||||
- name: Restore full Python ${{ matrix.python-version }} virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@v4.2.4
|
||||
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@@ -1453,14 +1563,14 @@ jobs:
|
||||
2>&1 | tee pytest-${{ matrix.python-version }}-${{ matrix.group }}.txt
|
||||
- name: Upload pytest output
|
||||
if: success() || failure() && steps.pytest-partial.conclusion == 'failure'
|
||||
uses: actions/upload-artifact@v4.6.2
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: pytest-${{ github.run_number }}-${{ matrix.python-version }}-${{ matrix.group }}
|
||||
path: pytest-*.txt
|
||||
overwrite: true
|
||||
- name: Upload coverage artifact
|
||||
if: needs.info.outputs.skip_coverage != 'true'
|
||||
uses: actions/upload-artifact@v4.6.2
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: coverage-${{ matrix.python-version }}-${{ matrix.group }}
|
||||
path: coverage.xml
|
||||
@@ -1473,7 +1583,7 @@ jobs:
|
||||
mv "junit.xml-tmp" "junit.xml"
|
||||
- name: Upload test results artifact
|
||||
if: needs.info.outputs.skip_coverage != 'true' && !cancelled()
|
||||
uses: actions/upload-artifact@v4.6.2
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: test-results-partial-${{ matrix.python-version }}-${{ matrix.group }}
|
||||
path: junit.xml
|
||||
@@ -1491,14 +1601,14 @@ jobs:
|
||||
timeout-minutes: 10
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v5.0.0
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
- name: Download all coverage artifacts
|
||||
uses: actions/download-artifact@v5.0.0
|
||||
uses: actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0 # v5.0.0
|
||||
with:
|
||||
pattern: coverage-*
|
||||
- name: Upload coverage to Codecov
|
||||
if: needs.info.outputs.test_full_suite == 'false'
|
||||
uses: codecov/codecov-action@v5.5.1
|
||||
uses: codecov/codecov-action@5a1091511ad55cbe89839c7260b706298ca349f7 # v5.5.1
|
||||
with:
|
||||
fail_ci_if_error: true
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
@@ -1518,11 +1628,11 @@ jobs:
|
||||
timeout-minutes: 10
|
||||
steps:
|
||||
- name: Download all coverage artifacts
|
||||
uses: actions/download-artifact@v5.0.0
|
||||
uses: actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0 # v5.0.0
|
||||
with:
|
||||
pattern: test-results-*
|
||||
- name: Upload test results to Codecov
|
||||
uses: codecov/test-results-action@v1
|
||||
uses: codecov/test-results-action@47f89e9acb64b76debcd5ea40642d25a4adced9f # v1.1.1
|
||||
with:
|
||||
fail_ci_if_error: true
|
||||
verbose: true
|
||||
|
6
.github/workflows/codeql.yml
vendored
6
.github/workflows/codeql.yml
vendored
@@ -21,14 +21,14 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v5.0.0
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@v3.30.1
|
||||
uses: github/codeql-action/init@192325c86100d080feab897ff886c34abd4c83a3 # v3.30.3
|
||||
with:
|
||||
languages: python
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@v3.30.1
|
||||
uses: github/codeql-action/analyze@192325c86100d080feab897ff886c34abd4c83a3 # v3.30.3
|
||||
with:
|
||||
category: "/language:python"
|
||||
|
@@ -16,7 +16,7 @@ jobs:
|
||||
steps:
|
||||
- name: Check if integration label was added and extract details
|
||||
id: extract
|
||||
uses: actions/github-script@v8
|
||||
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
|
||||
with:
|
||||
script: |
|
||||
// Debug: Log the event payload
|
||||
@@ -113,7 +113,7 @@ jobs:
|
||||
- name: Fetch similar issues
|
||||
id: fetch_similar
|
||||
if: steps.extract.outputs.should_continue == 'true'
|
||||
uses: actions/github-script@v8
|
||||
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
|
||||
env:
|
||||
INTEGRATION_LABELS: ${{ steps.extract.outputs.integration_labels }}
|
||||
CURRENT_NUMBER: ${{ steps.extract.outputs.current_number }}
|
||||
@@ -231,7 +231,7 @@ jobs:
|
||||
- name: Detect duplicates using AI
|
||||
id: ai_detection
|
||||
if: steps.extract.outputs.should_continue == 'true' && steps.fetch_similar.outputs.has_similar == 'true'
|
||||
uses: actions/ai-inference@v2.0.1
|
||||
uses: actions/ai-inference@a1c11829223a786afe3b5663db904a3aa1eac3a2 # v2.0.1
|
||||
with:
|
||||
model: openai/gpt-4o
|
||||
system-prompt: |
|
||||
@@ -280,7 +280,7 @@ jobs:
|
||||
- name: Post duplicate detection results
|
||||
id: post_results
|
||||
if: steps.extract.outputs.should_continue == 'true' && steps.fetch_similar.outputs.has_similar == 'true'
|
||||
uses: actions/github-script@v8
|
||||
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
|
||||
env:
|
||||
AI_RESPONSE: ${{ steps.ai_detection.outputs.response }}
|
||||
SIMILAR_ISSUES: ${{ steps.fetch_similar.outputs.similar_issues }}
|
||||
|
@@ -16,7 +16,7 @@ jobs:
|
||||
steps:
|
||||
- name: Check issue language
|
||||
id: detect_language
|
||||
uses: actions/github-script@v8
|
||||
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
|
||||
env:
|
||||
ISSUE_NUMBER: ${{ github.event.issue.number }}
|
||||
ISSUE_TITLE: ${{ github.event.issue.title }}
|
||||
@@ -57,7 +57,7 @@ jobs:
|
||||
- name: Detect language using AI
|
||||
id: ai_language_detection
|
||||
if: steps.detect_language.outputs.should_continue == 'true'
|
||||
uses: actions/ai-inference@v2.0.1
|
||||
uses: actions/ai-inference@a1c11829223a786afe3b5663db904a3aa1eac3a2 # v2.0.1
|
||||
with:
|
||||
model: openai/gpt-4o-mini
|
||||
system-prompt: |
|
||||
@@ -90,7 +90,7 @@ jobs:
|
||||
|
||||
- name: Process non-English issues
|
||||
if: steps.detect_language.outputs.should_continue == 'true'
|
||||
uses: actions/github-script@v8
|
||||
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
|
||||
env:
|
||||
AI_RESPONSE: ${{ steps.ai_language_detection.outputs.response }}
|
||||
ISSUE_NUMBER: ${{ steps.detect_language.outputs.issue_number }}
|
||||
|
2
.github/workflows/lock.yml
vendored
2
.github/workflows/lock.yml
vendored
@@ -10,7 +10,7 @@ jobs:
|
||||
if: github.repository_owner == 'home-assistant'
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: dessant/lock-threads@v5.0.1
|
||||
- uses: dessant/lock-threads@1bf7ec25051fe7c00bdd17e6a7cf3d7bfb7dc771 # v5.0.1
|
||||
with:
|
||||
github-token: ${{ github.token }}
|
||||
issue-inactive-days: "30"
|
||||
|
2
.github/workflows/restrict-task-creation.yml
vendored
2
.github/workflows/restrict-task-creation.yml
vendored
@@ -12,7 +12,7 @@ jobs:
|
||||
if: github.event.issue.type.name == 'Task'
|
||||
steps:
|
||||
- name: Check if user is authorized
|
||||
uses: actions/github-script@v8
|
||||
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
|
||||
with:
|
||||
script: |
|
||||
const issueAuthor = context.payload.issue.user.login;
|
||||
|
6
.github/workflows/stale.yml
vendored
6
.github/workflows/stale.yml
vendored
@@ -17,7 +17,7 @@ jobs:
|
||||
# - No PRs marked as no-stale
|
||||
# - No issues (-1)
|
||||
- name: 60 days stale PRs policy
|
||||
uses: actions/stale@v10.0.0
|
||||
uses: actions/stale@3a9db7e6a41a89f618792c92c0e97cc736e1b13f # v10.0.0
|
||||
with:
|
||||
repo-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
days-before-stale: 60
|
||||
@@ -57,7 +57,7 @@ jobs:
|
||||
# - No issues marked as no-stale or help-wanted
|
||||
# - No PRs (-1)
|
||||
- name: 90 days stale issues
|
||||
uses: actions/stale@v10.0.0
|
||||
uses: actions/stale@3a9db7e6a41a89f618792c92c0e97cc736e1b13f # v10.0.0
|
||||
with:
|
||||
repo-token: ${{ steps.token.outputs.token }}
|
||||
days-before-stale: 90
|
||||
@@ -87,7 +87,7 @@ jobs:
|
||||
# - No Issues marked as no-stale or help-wanted
|
||||
# - No PRs (-1)
|
||||
- name: Needs more information stale issues policy
|
||||
uses: actions/stale@v10.0.0
|
||||
uses: actions/stale@3a9db7e6a41a89f618792c92c0e97cc736e1b13f # v10.0.0
|
||||
with:
|
||||
repo-token: ${{ steps.token.outputs.token }}
|
||||
only-labels: "needs-more-information"
|
||||
|
4
.github/workflows/translations.yml
vendored
4
.github/workflows/translations.yml
vendored
@@ -19,10 +19,10 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@v5.0.0
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
uses: actions/setup-python@v6.0.0
|
||||
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
|
||||
|
32
.github/workflows/wheels.yml
vendored
32
.github/workflows/wheels.yml
vendored
@@ -32,11 +32,11 @@ jobs:
|
||||
architectures: ${{ steps.info.outputs.architectures }}
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@v5.0.0
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
id: python
|
||||
uses: actions/setup-python@v6.0.0
|
||||
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
check-latest: true
|
||||
@@ -91,7 +91,7 @@ jobs:
|
||||
) > build_constraints.txt
|
||||
|
||||
- name: Upload env_file
|
||||
uses: actions/upload-artifact@v4.6.2
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: env_file
|
||||
path: ./.env_file
|
||||
@@ -99,14 +99,14 @@ jobs:
|
||||
overwrite: true
|
||||
|
||||
- name: Upload build_constraints
|
||||
uses: actions/upload-artifact@v4.6.2
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: build_constraints
|
||||
path: ./build_constraints.txt
|
||||
overwrite: true
|
||||
|
||||
- name: Upload requirements_diff
|
||||
uses: actions/upload-artifact@v4.6.2
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: requirements_diff
|
||||
path: ./requirements_diff.txt
|
||||
@@ -118,7 +118,7 @@ jobs:
|
||||
python -m script.gen_requirements_all ci
|
||||
|
||||
- name: Upload requirements_all_wheels
|
||||
uses: actions/upload-artifact@v4.6.2
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: requirements_all_wheels
|
||||
path: ./requirements_all_wheels_*.txt
|
||||
@@ -135,20 +135,20 @@ jobs:
|
||||
arch: ${{ fromJson(needs.init.outputs.architectures) }}
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@v5.0.0
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
|
||||
- name: Download env_file
|
||||
uses: actions/download-artifact@v5.0.0
|
||||
uses: actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0 # v5.0.0
|
||||
with:
|
||||
name: env_file
|
||||
|
||||
- name: Download build_constraints
|
||||
uses: actions/download-artifact@v5.0.0
|
||||
uses: actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0 # v5.0.0
|
||||
with:
|
||||
name: build_constraints
|
||||
|
||||
- name: Download requirements_diff
|
||||
uses: actions/download-artifact@v5.0.0
|
||||
uses: actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0 # v5.0.0
|
||||
with:
|
||||
name: requirements_diff
|
||||
|
||||
@@ -158,6 +158,7 @@ jobs:
|
||||
sed -i "/uv/d" requirements.txt
|
||||
sed -i "/uv/d" requirements_diff.txt
|
||||
|
||||
# home-assistant/wheels doesn't support sha pinning
|
||||
- name: Build wheels
|
||||
uses: home-assistant/wheels@2025.07.0
|
||||
with:
|
||||
@@ -184,25 +185,25 @@ jobs:
|
||||
arch: ${{ fromJson(needs.init.outputs.architectures) }}
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@v5.0.0
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
|
||||
- name: Download env_file
|
||||
uses: actions/download-artifact@v5.0.0
|
||||
uses: actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0 # v5.0.0
|
||||
with:
|
||||
name: env_file
|
||||
|
||||
- name: Download build_constraints
|
||||
uses: actions/download-artifact@v5.0.0
|
||||
uses: actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0 # v5.0.0
|
||||
with:
|
||||
name: build_constraints
|
||||
|
||||
- name: Download requirements_diff
|
||||
uses: actions/download-artifact@v5.0.0
|
||||
uses: actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0 # v5.0.0
|
||||
with:
|
||||
name: requirements_diff
|
||||
|
||||
- name: Download requirements_all_wheels
|
||||
uses: actions/download-artifact@v5.0.0
|
||||
uses: actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0 # v5.0.0
|
||||
with:
|
||||
name: requirements_all_wheels
|
||||
|
||||
@@ -218,6 +219,7 @@ jobs:
|
||||
sed -i "/uv/d" requirements.txt
|
||||
sed -i "/uv/d" requirements_diff.txt
|
||||
|
||||
# home-assistant/wheels doesn't support sha pinning
|
||||
- name: Build wheels
|
||||
uses: home-assistant/wheels@2025.07.0
|
||||
with:
|
||||
|
@@ -1,6 +1,6 @@
|
||||
repos:
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
rev: v0.12.1
|
||||
rev: v0.13.0
|
||||
hooks:
|
||||
- id: ruff-check
|
||||
args:
|
||||
|
@@ -169,6 +169,7 @@ homeassistant.components.dnsip.*
|
||||
homeassistant.components.doorbird.*
|
||||
homeassistant.components.dormakaba_dkey.*
|
||||
homeassistant.components.downloader.*
|
||||
homeassistant.components.droplet.*
|
||||
homeassistant.components.dsmr.*
|
||||
homeassistant.components.duckdns.*
|
||||
homeassistant.components.dunehd.*
|
||||
@@ -401,6 +402,7 @@ homeassistant.components.person.*
|
||||
homeassistant.components.pi_hole.*
|
||||
homeassistant.components.ping.*
|
||||
homeassistant.components.plugwise.*
|
||||
homeassistant.components.portainer.*
|
||||
homeassistant.components.powerfox.*
|
||||
homeassistant.components.powerwall.*
|
||||
homeassistant.components.private_ble_device.*
|
||||
|
8
CODEOWNERS
generated
8
CODEOWNERS
generated
@@ -377,6 +377,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/dremel_3d_printer/ @tkdrob
|
||||
/homeassistant/components/drop_connect/ @ChandlerSystems @pfrazer
|
||||
/tests/components/drop_connect/ @ChandlerSystems @pfrazer
|
||||
/homeassistant/components/droplet/ @sarahseidman
|
||||
/tests/components/droplet/ @sarahseidman
|
||||
/homeassistant/components/dsmr/ @Robbie1221
|
||||
/tests/components/dsmr/ @Robbie1221
|
||||
/homeassistant/components/dsmr_reader/ @sorted-bits @glodenox @erwindouna
|
||||
@@ -440,8 +442,6 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/energyzero/ @klaasnicolaas
|
||||
/homeassistant/components/enigma2/ @autinerd
|
||||
/tests/components/enigma2/ @autinerd
|
||||
/homeassistant/components/enocean/ @bdurrer
|
||||
/tests/components/enocean/ @bdurrer
|
||||
/homeassistant/components/enphase_envoy/ @bdraco @cgarwood @catsmanac
|
||||
/tests/components/enphase_envoy/ @bdraco @cgarwood @catsmanac
|
||||
/homeassistant/components/entur_public_transport/ @hfurubotten
|
||||
@@ -968,6 +968,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/moat/ @bdraco
|
||||
/homeassistant/components/mobile_app/ @home-assistant/core
|
||||
/tests/components/mobile_app/ @home-assistant/core
|
||||
/homeassistant/components/modbus/ @janiversen
|
||||
/tests/components/modbus/ @janiversen
|
||||
/homeassistant/components/modem_callerid/ @tkdrob
|
||||
/tests/components/modem_callerid/ @tkdrob
|
||||
/homeassistant/components/modern_forms/ @wonderslug
|
||||
@@ -1189,6 +1191,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/pooldose/ @lmaertin
|
||||
/homeassistant/components/poolsense/ @haemishkyd
|
||||
/tests/components/poolsense/ @haemishkyd
|
||||
/homeassistant/components/portainer/ @erwindouna
|
||||
/tests/components/portainer/ @erwindouna
|
||||
/homeassistant/components/powerfox/ @klaasnicolaas
|
||||
/tests/components/powerfox/ @klaasnicolaas
|
||||
/homeassistant/components/powerwall/ @bdraco @jrester @daniel-simpson
|
||||
|
@@ -6,7 +6,6 @@
|
||||
"google_assistant_sdk",
|
||||
"google_cloud",
|
||||
"google_drive",
|
||||
"google_gemini",
|
||||
"google_generative_ai_conversation",
|
||||
"google_mail",
|
||||
"google_maps",
|
||||
|
@@ -2,21 +2,23 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
import logging
|
||||
|
||||
from accuweather import AccuWeather
|
||||
|
||||
from homeassistant.components.sensor import DOMAIN as SENSOR_PLATFORM
|
||||
from homeassistant.const import CONF_API_KEY, CONF_NAME, Platform
|
||||
from homeassistant.const import CONF_API_KEY, Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import entity_registry as er
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
|
||||
from .const import DOMAIN, UPDATE_INTERVAL_DAILY_FORECAST, UPDATE_INTERVAL_OBSERVATION
|
||||
from .const import DOMAIN
|
||||
from .coordinator import (
|
||||
AccuWeatherConfigEntry,
|
||||
AccuWeatherDailyForecastDataUpdateCoordinator,
|
||||
AccuWeatherData,
|
||||
AccuWeatherHourlyForecastDataUpdateCoordinator,
|
||||
AccuWeatherObservationDataUpdateCoordinator,
|
||||
)
|
||||
|
||||
@@ -28,7 +30,6 @@ PLATFORMS = [Platform.SENSOR, Platform.WEATHER]
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: AccuWeatherConfigEntry) -> bool:
|
||||
"""Set up AccuWeather as config entry."""
|
||||
api_key: str = entry.data[CONF_API_KEY]
|
||||
name: str = entry.data[CONF_NAME]
|
||||
|
||||
location_key = entry.unique_id
|
||||
|
||||
@@ -41,26 +42,28 @@ async def async_setup_entry(hass: HomeAssistant, entry: AccuWeatherConfigEntry)
|
||||
hass,
|
||||
entry,
|
||||
accuweather,
|
||||
name,
|
||||
"observation",
|
||||
UPDATE_INTERVAL_OBSERVATION,
|
||||
)
|
||||
|
||||
coordinator_daily_forecast = AccuWeatherDailyForecastDataUpdateCoordinator(
|
||||
hass,
|
||||
entry,
|
||||
accuweather,
|
||||
name,
|
||||
"daily forecast",
|
||||
UPDATE_INTERVAL_DAILY_FORECAST,
|
||||
)
|
||||
coordinator_hourly_forecast = AccuWeatherHourlyForecastDataUpdateCoordinator(
|
||||
hass,
|
||||
entry,
|
||||
accuweather,
|
||||
)
|
||||
|
||||
await coordinator_observation.async_config_entry_first_refresh()
|
||||
await coordinator_daily_forecast.async_config_entry_first_refresh()
|
||||
await asyncio.gather(
|
||||
coordinator_observation.async_config_entry_first_refresh(),
|
||||
coordinator_daily_forecast.async_config_entry_first_refresh(),
|
||||
coordinator_hourly_forecast.async_config_entry_first_refresh(),
|
||||
)
|
||||
|
||||
entry.runtime_data = AccuWeatherData(
|
||||
coordinator_observation=coordinator_observation,
|
||||
coordinator_daily_forecast=coordinator_daily_forecast,
|
||||
coordinator_hourly_forecast=coordinator_hourly_forecast,
|
||||
)
|
||||
|
||||
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
|
||||
|
@@ -50,6 +50,7 @@ class AccuWeatherFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
await self.async_set_unique_id(
|
||||
accuweather.location_key, raise_on_progress=False
|
||||
)
|
||||
self._abort_if_unique_id_configured()
|
||||
|
||||
return self.async_create_entry(
|
||||
title=user_input[CONF_NAME], data=user_input
|
||||
|
@@ -69,5 +69,6 @@ POLLEN_CATEGORY_MAP = {
|
||||
4: "very_high",
|
||||
5: "extreme",
|
||||
}
|
||||
UPDATE_INTERVAL_OBSERVATION = timedelta(minutes=40)
|
||||
UPDATE_INTERVAL_OBSERVATION = timedelta(minutes=10)
|
||||
UPDATE_INTERVAL_DAILY_FORECAST = timedelta(hours=6)
|
||||
UPDATE_INTERVAL_HOURLY_FORECAST = timedelta(hours=30)
|
||||
|
@@ -3,6 +3,7 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from asyncio import timeout
|
||||
from collections.abc import Awaitable, Callable
|
||||
from dataclasses import dataclass
|
||||
from datetime import timedelta
|
||||
import logging
|
||||
@@ -12,6 +13,7 @@ from accuweather import AccuWeather, ApiError, InvalidApiKeyError, RequestsExcee
|
||||
from aiohttp.client_exceptions import ClientConnectorError
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONF_NAME
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo
|
||||
from homeassistant.helpers.update_coordinator import (
|
||||
@@ -20,7 +22,13 @@ from homeassistant.helpers.update_coordinator import (
|
||||
UpdateFailed,
|
||||
)
|
||||
|
||||
from .const import DOMAIN, MANUFACTURER
|
||||
from .const import (
|
||||
DOMAIN,
|
||||
MANUFACTURER,
|
||||
UPDATE_INTERVAL_DAILY_FORECAST,
|
||||
UPDATE_INTERVAL_HOURLY_FORECAST,
|
||||
UPDATE_INTERVAL_OBSERVATION,
|
||||
)
|
||||
|
||||
EXCEPTIONS = (ApiError, ClientConnectorError, InvalidApiKeyError, RequestsExceededError)
|
||||
|
||||
@@ -33,6 +41,7 @@ class AccuWeatherData:
|
||||
|
||||
coordinator_observation: AccuWeatherObservationDataUpdateCoordinator
|
||||
coordinator_daily_forecast: AccuWeatherDailyForecastDataUpdateCoordinator
|
||||
coordinator_hourly_forecast: AccuWeatherHourlyForecastDataUpdateCoordinator
|
||||
|
||||
|
||||
type AccuWeatherConfigEntry = ConfigEntry[AccuWeatherData]
|
||||
@@ -48,13 +57,11 @@ class AccuWeatherObservationDataUpdateCoordinator(
|
||||
hass: HomeAssistant,
|
||||
config_entry: AccuWeatherConfigEntry,
|
||||
accuweather: AccuWeather,
|
||||
name: str,
|
||||
coordinator_type: str,
|
||||
update_interval: timedelta,
|
||||
) -> None:
|
||||
"""Initialize."""
|
||||
self.accuweather = accuweather
|
||||
self.location_key = accuweather.location_key
|
||||
name = config_entry.data[CONF_NAME]
|
||||
|
||||
if TYPE_CHECKING:
|
||||
assert self.location_key is not None
|
||||
@@ -65,8 +72,8 @@ class AccuWeatherObservationDataUpdateCoordinator(
|
||||
hass,
|
||||
_LOGGER,
|
||||
config_entry=config_entry,
|
||||
name=f"{name} ({coordinator_type})",
|
||||
update_interval=update_interval,
|
||||
name=f"{name} (observation)",
|
||||
update_interval=UPDATE_INTERVAL_OBSERVATION,
|
||||
)
|
||||
|
||||
async def _async_update_data(self) -> dict[str, Any]:
|
||||
@@ -86,23 +93,25 @@ class AccuWeatherObservationDataUpdateCoordinator(
|
||||
return result
|
||||
|
||||
|
||||
class AccuWeatherDailyForecastDataUpdateCoordinator(
|
||||
class AccuWeatherForecastDataUpdateCoordinator(
|
||||
TimestampDataUpdateCoordinator[list[dict[str, Any]]]
|
||||
):
|
||||
"""Class to manage fetching AccuWeather data API."""
|
||||
"""Base class for AccuWeather forecast."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
hass: HomeAssistant,
|
||||
config_entry: AccuWeatherConfigEntry,
|
||||
accuweather: AccuWeather,
|
||||
name: str,
|
||||
coordinator_type: str,
|
||||
update_interval: timedelta,
|
||||
fetch_method: Callable[..., Awaitable[list[dict[str, Any]]]],
|
||||
) -> None:
|
||||
"""Initialize."""
|
||||
self.accuweather = accuweather
|
||||
self.location_key = accuweather.location_key
|
||||
self._fetch_method = fetch_method
|
||||
name = config_entry.data[CONF_NAME]
|
||||
|
||||
if TYPE_CHECKING:
|
||||
assert self.location_key is not None
|
||||
@@ -118,12 +127,10 @@ class AccuWeatherDailyForecastDataUpdateCoordinator(
|
||||
)
|
||||
|
||||
async def _async_update_data(self) -> list[dict[str, Any]]:
|
||||
"""Update data via library."""
|
||||
"""Update forecast data via library."""
|
||||
try:
|
||||
async with timeout(10):
|
||||
result = await self.accuweather.async_get_daily_forecast(
|
||||
language=self.hass.config.language
|
||||
)
|
||||
result = await self._fetch_method(language=self.hass.config.language)
|
||||
except EXCEPTIONS as error:
|
||||
raise UpdateFailed(
|
||||
translation_domain=DOMAIN,
|
||||
@@ -132,10 +139,53 @@ class AccuWeatherDailyForecastDataUpdateCoordinator(
|
||||
) from error
|
||||
|
||||
_LOGGER.debug("Requests remaining: %d", self.accuweather.requests_remaining)
|
||||
|
||||
return result
|
||||
|
||||
|
||||
class AccuWeatherDailyForecastDataUpdateCoordinator(
|
||||
AccuWeatherForecastDataUpdateCoordinator
|
||||
):
|
||||
"""Coordinator for daily forecast."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
hass: HomeAssistant,
|
||||
config_entry: AccuWeatherConfigEntry,
|
||||
accuweather: AccuWeather,
|
||||
) -> None:
|
||||
"""Initialize."""
|
||||
super().__init__(
|
||||
hass,
|
||||
config_entry,
|
||||
accuweather,
|
||||
"daily forecast",
|
||||
UPDATE_INTERVAL_DAILY_FORECAST,
|
||||
fetch_method=accuweather.async_get_daily_forecast,
|
||||
)
|
||||
|
||||
|
||||
class AccuWeatherHourlyForecastDataUpdateCoordinator(
|
||||
AccuWeatherForecastDataUpdateCoordinator
|
||||
):
|
||||
"""Coordinator for hourly forecast."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
hass: HomeAssistant,
|
||||
config_entry: AccuWeatherConfigEntry,
|
||||
accuweather: AccuWeather,
|
||||
) -> None:
|
||||
"""Initialize."""
|
||||
super().__init__(
|
||||
hass,
|
||||
config_entry,
|
||||
accuweather,
|
||||
"hourly forecast",
|
||||
UPDATE_INTERVAL_HOURLY_FORECAST,
|
||||
fetch_method=accuweather.async_get_hourly_forecast,
|
||||
)
|
||||
|
||||
|
||||
def _get_device_info(location_key: str, name: str) -> DeviceInfo:
|
||||
"""Get device info."""
|
||||
return DeviceInfo(
|
||||
|
@@ -7,6 +7,5 @@
|
||||
"integration_type": "service",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["accuweather"],
|
||||
"requirements": ["accuweather==4.2.0"],
|
||||
"single_config_entry": true
|
||||
"requirements": ["accuweather==4.2.1"]
|
||||
}
|
||||
|
@@ -17,6 +17,9 @@
|
||||
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
|
||||
"invalid_api_key": "[%key:common::config_flow::error::invalid_api_key%]",
|
||||
"requests_exceeded": "The allowed number of requests to the AccuWeather API has been exceeded. You have to wait or change the API key."
|
||||
},
|
||||
"abort": {
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_location%]"
|
||||
}
|
||||
},
|
||||
"entity": {
|
||||
|
@@ -45,6 +45,7 @@ from .coordinator import (
|
||||
AccuWeatherConfigEntry,
|
||||
AccuWeatherDailyForecastDataUpdateCoordinator,
|
||||
AccuWeatherData,
|
||||
AccuWeatherHourlyForecastDataUpdateCoordinator,
|
||||
AccuWeatherObservationDataUpdateCoordinator,
|
||||
)
|
||||
|
||||
@@ -64,6 +65,7 @@ class AccuWeatherEntity(
|
||||
CoordinatorWeatherEntity[
|
||||
AccuWeatherObservationDataUpdateCoordinator,
|
||||
AccuWeatherDailyForecastDataUpdateCoordinator,
|
||||
AccuWeatherHourlyForecastDataUpdateCoordinator,
|
||||
]
|
||||
):
|
||||
"""Define an AccuWeather entity."""
|
||||
@@ -76,6 +78,7 @@ class AccuWeatherEntity(
|
||||
super().__init__(
|
||||
observation_coordinator=accuweather_data.coordinator_observation,
|
||||
daily_coordinator=accuweather_data.coordinator_daily_forecast,
|
||||
hourly_coordinator=accuweather_data.coordinator_hourly_forecast,
|
||||
)
|
||||
|
||||
self._attr_native_precipitation_unit = UnitOfPrecipitationDepth.MILLIMETERS
|
||||
@@ -86,10 +89,13 @@ class AccuWeatherEntity(
|
||||
self._attr_unique_id = accuweather_data.coordinator_observation.location_key
|
||||
self._attr_attribution = ATTRIBUTION
|
||||
self._attr_device_info = accuweather_data.coordinator_observation.device_info
|
||||
self._attr_supported_features = WeatherEntityFeature.FORECAST_DAILY
|
||||
self._attr_supported_features = (
|
||||
WeatherEntityFeature.FORECAST_DAILY | WeatherEntityFeature.FORECAST_HOURLY
|
||||
)
|
||||
|
||||
self.observation_coordinator = accuweather_data.coordinator_observation
|
||||
self.daily_coordinator = accuweather_data.coordinator_daily_forecast
|
||||
self.hourly_coordinator = accuweather_data.coordinator_hourly_forecast
|
||||
|
||||
@property
|
||||
def condition(self) -> str | None:
|
||||
@@ -207,3 +213,32 @@ class AccuWeatherEntity(
|
||||
}
|
||||
for item in self.daily_coordinator.data
|
||||
]
|
||||
|
||||
@callback
|
||||
def _async_forecast_hourly(self) -> list[Forecast] | None:
|
||||
"""Return the hourly forecast in native units."""
|
||||
return [
|
||||
{
|
||||
ATTR_FORECAST_TIME: utc_from_timestamp(
|
||||
item["EpochDateTime"]
|
||||
).isoformat(),
|
||||
ATTR_FORECAST_CLOUD_COVERAGE: item["CloudCover"],
|
||||
ATTR_FORECAST_HUMIDITY: item["RelativeHumidity"],
|
||||
ATTR_FORECAST_NATIVE_TEMP: item["Temperature"][ATTR_VALUE],
|
||||
ATTR_FORECAST_NATIVE_APPARENT_TEMP: item["RealFeelTemperature"][
|
||||
ATTR_VALUE
|
||||
],
|
||||
ATTR_FORECAST_NATIVE_PRECIPITATION: item["TotalLiquid"][ATTR_VALUE],
|
||||
ATTR_FORECAST_PRECIPITATION_PROBABILITY: item[
|
||||
"PrecipitationProbability"
|
||||
],
|
||||
ATTR_FORECAST_NATIVE_WIND_SPEED: item["Wind"][ATTR_SPEED][ATTR_VALUE],
|
||||
ATTR_FORECAST_NATIVE_WIND_GUST_SPEED: item["WindGust"][ATTR_SPEED][
|
||||
ATTR_VALUE
|
||||
],
|
||||
ATTR_FORECAST_UV_INDEX: item["UVIndex"],
|
||||
ATTR_FORECAST_WIND_BEARING: item["Wind"][ATTR_DIRECTION]["Degrees"],
|
||||
ATTR_FORECAST_CONDITION: CONDITION_MAP.get(item["WeatherIcon"]),
|
||||
}
|
||||
for item in self.hourly_coordinator.data
|
||||
]
|
||||
|
@@ -3,10 +3,8 @@
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from aiohttp import web
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components.http import KEY_HASS, HomeAssistantView
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import ATTR_ENTITY_ID, CONF_DESCRIPTION, CONF_SELECTOR
|
||||
from homeassistant.core import (
|
||||
@@ -28,7 +26,6 @@ from .const import (
|
||||
ATTR_STRUCTURE,
|
||||
ATTR_TASK_NAME,
|
||||
DATA_COMPONENT,
|
||||
DATA_IMAGES,
|
||||
DATA_PREFERENCES,
|
||||
DOMAIN,
|
||||
SERVICE_GENERATE_DATA,
|
||||
@@ -42,7 +39,6 @@ from .task import (
|
||||
GenDataTaskResult,
|
||||
GenImageTask,
|
||||
GenImageTaskResult,
|
||||
ImageData,
|
||||
async_generate_data,
|
||||
async_generate_image,
|
||||
)
|
||||
@@ -55,7 +51,6 @@ __all__ = [
|
||||
"GenDataTaskResult",
|
||||
"GenImageTask",
|
||||
"GenImageTaskResult",
|
||||
"ImageData",
|
||||
"async_generate_data",
|
||||
"async_generate_image",
|
||||
"async_setup",
|
||||
@@ -94,10 +89,8 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
entity_component = EntityComponent[AITaskEntity](_LOGGER, DOMAIN, hass)
|
||||
hass.data[DATA_COMPONENT] = entity_component
|
||||
hass.data[DATA_PREFERENCES] = AITaskPreferences(hass)
|
||||
hass.data[DATA_IMAGES] = {}
|
||||
await hass.data[DATA_PREFERENCES].async_load()
|
||||
async_setup_http(hass)
|
||||
hass.http.register_view(ImageView)
|
||||
hass.services.async_register(
|
||||
DOMAIN,
|
||||
SERVICE_GENERATE_DATA,
|
||||
@@ -126,7 +119,7 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
schema=vol.Schema(
|
||||
{
|
||||
vol.Required(ATTR_TASK_NAME): cv.string,
|
||||
vol.Required(ATTR_ENTITY_ID): cv.entity_id,
|
||||
vol.Optional(ATTR_ENTITY_ID): cv.entity_id,
|
||||
vol.Required(ATTR_INSTRUCTIONS): cv.string,
|
||||
vol.Optional(ATTR_ATTACHMENTS): vol.All(
|
||||
cv.ensure_list, [selector.MediaSelector({"accept": ["*/*"]})]
|
||||
@@ -163,9 +156,10 @@ async def async_service_generate_image(call: ServiceCall) -> ServiceResponse:
|
||||
class AITaskPreferences:
|
||||
"""AI Task preferences."""
|
||||
|
||||
KEYS = ("gen_data_entity_id",)
|
||||
KEYS = ("gen_data_entity_id", "gen_image_entity_id")
|
||||
|
||||
gen_data_entity_id: str | None = None
|
||||
gen_image_entity_id: str | None = None
|
||||
|
||||
def __init__(self, hass: HomeAssistant) -> None:
|
||||
"""Initialize the preferences."""
|
||||
@@ -179,17 +173,21 @@ class AITaskPreferences:
|
||||
if data is None:
|
||||
return
|
||||
for key in self.KEYS:
|
||||
setattr(self, key, data[key])
|
||||
setattr(self, key, data.get(key))
|
||||
|
||||
@callback
|
||||
def async_set_preferences(
|
||||
self,
|
||||
*,
|
||||
gen_data_entity_id: str | None | UndefinedType = UNDEFINED,
|
||||
gen_image_entity_id: str | None | UndefinedType = UNDEFINED,
|
||||
) -> None:
|
||||
"""Set the preferences."""
|
||||
changed = False
|
||||
for key, value in (("gen_data_entity_id", gen_data_entity_id),):
|
||||
for key, value in (
|
||||
("gen_data_entity_id", gen_data_entity_id),
|
||||
("gen_image_entity_id", gen_image_entity_id),
|
||||
):
|
||||
if value is not UNDEFINED:
|
||||
if getattr(self, key) != value:
|
||||
setattr(self, key, value)
|
||||
@@ -204,28 +202,3 @@ class AITaskPreferences:
|
||||
def as_dict(self) -> dict[str, str | None]:
|
||||
"""Get the current preferences."""
|
||||
return {key: getattr(self, key) for key in self.KEYS}
|
||||
|
||||
|
||||
class ImageView(HomeAssistantView):
|
||||
"""View to generated images."""
|
||||
|
||||
url = f"/api/{DOMAIN}/images/{{filename}}"
|
||||
name = f"api:{DOMAIN}/images"
|
||||
|
||||
async def get(
|
||||
self,
|
||||
request: web.Request,
|
||||
filename: str,
|
||||
) -> web.Response:
|
||||
"""Serve image."""
|
||||
hass = request.app[KEY_HASS]
|
||||
image_storage = hass.data[DATA_IMAGES]
|
||||
image_data = image_storage.get(filename)
|
||||
|
||||
if image_data is None:
|
||||
raise web.HTTPNotFound
|
||||
|
||||
return web.Response(
|
||||
body=image_data.data,
|
||||
content_type=image_data.mime_type,
|
||||
)
|
||||
|
@@ -8,19 +8,19 @@ from typing import TYPE_CHECKING, Final
|
||||
from homeassistant.util.hass_dict import HassKey
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from homeassistant.components.media_source import local_source
|
||||
from homeassistant.helpers.entity_component import EntityComponent
|
||||
|
||||
from . import AITaskPreferences
|
||||
from .entity import AITaskEntity
|
||||
from .task import ImageData
|
||||
|
||||
DOMAIN = "ai_task"
|
||||
DATA_COMPONENT: HassKey[EntityComponent[AITaskEntity]] = HassKey(DOMAIN)
|
||||
DATA_PREFERENCES: HassKey[AITaskPreferences] = HassKey(f"{DOMAIN}_preferences")
|
||||
DATA_IMAGES: HassKey[dict[str, ImageData]] = HassKey(f"{DOMAIN}_images")
|
||||
DATA_MEDIA_SOURCE: HassKey[local_source.LocalSource] = HassKey(f"{DOMAIN}_media_source")
|
||||
|
||||
IMAGE_DIR: Final = "image"
|
||||
IMAGE_EXPIRY_TIME = 60 * 60 # 1 hour
|
||||
MAX_IMAGES = 20
|
||||
|
||||
SERVICE_GENERATE_DATA = "generate_data"
|
||||
SERVICE_GENERATE_IMAGE = "generate_image"
|
||||
|
@@ -60,6 +60,10 @@ class AITaskEntity(RestoreEntity):
|
||||
task: GenDataTask | GenImageTask,
|
||||
) -> AsyncGenerator[ChatLog]:
|
||||
"""Context manager used to manage the ChatLog used during an AI Task."""
|
||||
user_llm_hass_api: llm.API | None = None
|
||||
if isinstance(task, GenDataTask):
|
||||
user_llm_hass_api = task.llm_api
|
||||
|
||||
# pylint: disable-next=contextmanager-generator-missing-cleanup
|
||||
with (
|
||||
async_get_chat_log(
|
||||
@@ -77,6 +81,7 @@ class AITaskEntity(RestoreEntity):
|
||||
device_id=None,
|
||||
),
|
||||
user_llm_prompt=DEFAULT_SYSTEM_PROMPT,
|
||||
user_llm_hass_api=user_llm_hass_api,
|
||||
)
|
||||
|
||||
chat_log.async_add_user_content(
|
||||
|
@@ -37,6 +37,7 @@ def websocket_get_preferences(
|
||||
{
|
||||
vol.Required("type"): "ai_task/preferences/set",
|
||||
vol.Optional("gen_data_entity_id"): vol.Any(str, None),
|
||||
vol.Optional("gen_image_entity_id"): vol.Any(str, None),
|
||||
}
|
||||
)
|
||||
@websocket_api.require_admin
|
||||
|
@@ -1,7 +1,7 @@
|
||||
{
|
||||
"domain": "ai_task",
|
||||
"name": "AI Task",
|
||||
"after_dependencies": ["camera", "http"],
|
||||
"after_dependencies": ["camera"],
|
||||
"codeowners": ["@home-assistant/core"],
|
||||
"dependencies": ["conversation", "media_source"],
|
||||
"documentation": "https://www.home-assistant.io/integrations/ai_task",
|
||||
|
@@ -2,89 +2,21 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import timedelta
|
||||
import logging
|
||||
|
||||
from homeassistant.components.http.auth import async_sign_path
|
||||
from homeassistant.components.media_player import BrowseError, MediaClass
|
||||
from homeassistant.components.media_source import (
|
||||
BrowseMediaSource,
|
||||
MediaSource,
|
||||
MediaSourceItem,
|
||||
PlayMedia,
|
||||
Unresolvable,
|
||||
)
|
||||
from homeassistant.components.media_source import MediaSource, local_source
|
||||
from homeassistant.core import HomeAssistant
|
||||
|
||||
from .const import DATA_IMAGES, DOMAIN, IMAGE_EXPIRY_TIME
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
from .const import DATA_MEDIA_SOURCE, DOMAIN, IMAGE_DIR
|
||||
|
||||
|
||||
async def async_get_media_source(hass: HomeAssistant) -> ImageMediaSource:
|
||||
"""Set up image media source."""
|
||||
_LOGGER.debug("Setting up image media source")
|
||||
return ImageMediaSource(hass)
|
||||
async def async_get_media_source(hass: HomeAssistant) -> MediaSource:
|
||||
"""Set up local media source."""
|
||||
media_dir = hass.config.path(f"{DOMAIN}/{IMAGE_DIR}")
|
||||
|
||||
|
||||
class ImageMediaSource(MediaSource):
|
||||
"""Provide images as media sources."""
|
||||
|
||||
name: str = "AI Generated Images"
|
||||
|
||||
def __init__(self, hass: HomeAssistant) -> None:
|
||||
"""Initialize ImageMediaSource."""
|
||||
super().__init__(DOMAIN)
|
||||
self.hass = hass
|
||||
|
||||
async def async_resolve_media(self, item: MediaSourceItem) -> PlayMedia:
|
||||
"""Resolve media to a url."""
|
||||
image_storage = self.hass.data[DATA_IMAGES]
|
||||
image = image_storage.get(item.identifier)
|
||||
|
||||
if image is None:
|
||||
raise Unresolvable(f"Could not resolve media item: {item.identifier}")
|
||||
|
||||
return PlayMedia(
|
||||
async_sign_path(
|
||||
self.hass,
|
||||
f"/api/{DOMAIN}/images/{item.identifier}",
|
||||
timedelta(seconds=IMAGE_EXPIRY_TIME or 1800),
|
||||
),
|
||||
image.mime_type,
|
||||
)
|
||||
|
||||
async def async_browse_media(
|
||||
self,
|
||||
item: MediaSourceItem,
|
||||
) -> BrowseMediaSource:
|
||||
"""Return media."""
|
||||
if item.identifier:
|
||||
raise BrowseError("Unknown item")
|
||||
|
||||
image_storage = self.hass.data[DATA_IMAGES]
|
||||
|
||||
children = [
|
||||
BrowseMediaSource(
|
||||
domain=DOMAIN,
|
||||
identifier=filename,
|
||||
media_class=MediaClass.IMAGE,
|
||||
media_content_type=image.mime_type,
|
||||
title=image.title or filename,
|
||||
can_play=True,
|
||||
can_expand=False,
|
||||
)
|
||||
for filename, image in image_storage.items()
|
||||
]
|
||||
|
||||
return BrowseMediaSource(
|
||||
domain=DOMAIN,
|
||||
identifier=None,
|
||||
media_class=MediaClass.APP,
|
||||
media_content_type="",
|
||||
title="AI Generated Images",
|
||||
can_play=False,
|
||||
can_expand=True,
|
||||
children_media_class=MediaClass.IMAGE,
|
||||
children=children,
|
||||
)
|
||||
hass.data[DATA_MEDIA_SOURCE] = source = local_source.LocalSource(
|
||||
hass,
|
||||
DOMAIN,
|
||||
"AI Generated Images",
|
||||
{IMAGE_DIR: media_dir},
|
||||
f"/{DOMAIN}",
|
||||
)
|
||||
return source
|
||||
|
@@ -2,9 +2,10 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from contextlib import AsyncExitStack, asynccontextmanager
|
||||
from dataclasses import dataclass
|
||||
from datetime import datetime, timedelta
|
||||
from functools import partial
|
||||
import io
|
||||
import mimetypes
|
||||
from pathlib import Path
|
||||
import tempfile
|
||||
@@ -14,20 +15,19 @@ import voluptuous as vol
|
||||
|
||||
from homeassistant.components import camera, conversation, media_source
|
||||
from homeassistant.components.http.auth import async_sign_path
|
||||
from homeassistant.core import HomeAssistant, ServiceResponse, callback
|
||||
from homeassistant.core import HomeAssistant, ServiceResponse
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers import llm
|
||||
from homeassistant.helpers.chat_session import ChatSession, async_get_chat_session
|
||||
from homeassistant.helpers.event import async_call_later
|
||||
from homeassistant.helpers.network import get_url
|
||||
from homeassistant.util import RE_SANITIZE_FILENAME, slugify
|
||||
|
||||
from .const import (
|
||||
DATA_COMPONENT,
|
||||
DATA_IMAGES,
|
||||
DATA_MEDIA_SOURCE,
|
||||
DATA_PREFERENCES,
|
||||
DOMAIN,
|
||||
IMAGE_DIR,
|
||||
IMAGE_EXPIRY_TIME,
|
||||
MAX_IMAGES,
|
||||
AITaskEntityFeature,
|
||||
)
|
||||
|
||||
@@ -43,45 +43,21 @@ def _save_camera_snapshot(image: camera.Image) -> Path:
|
||||
return Path(temp_file.name)
|
||||
|
||||
|
||||
@asynccontextmanager
|
||||
async def _resolve_attachments(
|
||||
hass: HomeAssistant,
|
||||
session: ChatSession,
|
||||
attachments: list[dict] | None = None,
|
||||
) -> list[conversation.Attachment]:
|
||||
"""Resolve attachments for a task."""
|
||||
resolved_attachments: list[conversation.Attachment] = []
|
||||
created_files: list[Path] = []
|
||||
async with AsyncExitStack() as stack:
|
||||
resolved_attachments: list[conversation.Attachment] = []
|
||||
|
||||
for attachment in attachments or []:
|
||||
media_content_id = attachment["media_content_id"]
|
||||
|
||||
# Special case for camera media sources
|
||||
if media_content_id.startswith("media-source://camera/"):
|
||||
# Extract entity_id from the media content ID
|
||||
entity_id = media_content_id.removeprefix("media-source://camera/")
|
||||
|
||||
# Get snapshot from camera
|
||||
image = await camera.async_get_image(hass, entity_id)
|
||||
|
||||
temp_filename = await hass.async_add_executor_job(
|
||||
_save_camera_snapshot, image
|
||||
for attachment in attachments or []:
|
||||
media_content_id = attachment["media_content_id"]
|
||||
media = await stack.enter_async_context(
|
||||
media_source.async_resolve_with_path(hass, media_content_id, None)
|
||||
)
|
||||
created_files.append(temp_filename)
|
||||
|
||||
resolved_attachments.append(
|
||||
conversation.Attachment(
|
||||
media_content_id=media_content_id,
|
||||
mime_type=image.content_type,
|
||||
path=temp_filename,
|
||||
)
|
||||
)
|
||||
else:
|
||||
# Handle regular media sources
|
||||
media = await media_source.async_resolve_media(hass, media_content_id, None)
|
||||
if media.path is None:
|
||||
raise HomeAssistantError(
|
||||
"Only local attachments are currently supported"
|
||||
)
|
||||
resolved_attachments.append(
|
||||
conversation.Attachment(
|
||||
media_content_id=media_content_id,
|
||||
@@ -90,22 +66,7 @@ async def _resolve_attachments(
|
||||
)
|
||||
)
|
||||
|
||||
if not created_files:
|
||||
return resolved_attachments
|
||||
|
||||
def cleanup_files() -> None:
|
||||
"""Cleanup temporary files."""
|
||||
for file in created_files:
|
||||
file.unlink(missing_ok=True)
|
||||
|
||||
@callback
|
||||
def cleanup_files_callback() -> None:
|
||||
"""Cleanup temporary files."""
|
||||
hass.async_add_executor_job(cleanup_files)
|
||||
|
||||
session.async_on_cleanup(cleanup_files_callback)
|
||||
|
||||
return resolved_attachments
|
||||
yield resolved_attachments
|
||||
|
||||
|
||||
async def async_generate_data(
|
||||
@@ -116,6 +77,7 @@ async def async_generate_data(
|
||||
instructions: str,
|
||||
structure: vol.Schema | None = None,
|
||||
attachments: list[dict] | None = None,
|
||||
llm_api: llm.API | None = None,
|
||||
) -> GenDataTaskResult:
|
||||
"""Run a data generation task in the AI Task integration."""
|
||||
if entity_id is None:
|
||||
@@ -142,46 +104,36 @@ async def async_generate_data(
|
||||
)
|
||||
|
||||
with async_get_chat_session(hass) as session:
|
||||
resolved_attachments = await _resolve_attachments(hass, session, attachments)
|
||||
|
||||
return await entity.internal_async_generate_data(
|
||||
session,
|
||||
GenDataTask(
|
||||
name=task_name,
|
||||
instructions=instructions,
|
||||
structure=structure,
|
||||
attachments=resolved_attachments or None,
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
def _cleanup_images(image_storage: dict[str, ImageData], num_to_remove: int) -> None:
|
||||
"""Remove old images to keep the storage size under the limit."""
|
||||
if num_to_remove <= 0:
|
||||
return
|
||||
|
||||
if num_to_remove >= len(image_storage):
|
||||
image_storage.clear()
|
||||
return
|
||||
|
||||
sorted_images = sorted(
|
||||
image_storage.items(),
|
||||
key=lambda item: item[1].timestamp,
|
||||
)
|
||||
|
||||
for filename, _ in sorted_images[:num_to_remove]:
|
||||
image_storage.pop(filename, None)
|
||||
async with _resolve_attachments(
|
||||
hass, session, attachments
|
||||
) as resolved_attachments:
|
||||
return await entity.internal_async_generate_data(
|
||||
session,
|
||||
GenDataTask(
|
||||
name=task_name,
|
||||
instructions=instructions,
|
||||
structure=structure,
|
||||
attachments=resolved_attachments or None,
|
||||
llm_api=llm_api,
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
async def async_generate_image(
|
||||
hass: HomeAssistant,
|
||||
*,
|
||||
task_name: str,
|
||||
entity_id: str,
|
||||
entity_id: str | None = None,
|
||||
instructions: str,
|
||||
attachments: list[dict] | None = None,
|
||||
) -> ServiceResponse:
|
||||
"""Run an image generation task in the AI Task integration."""
|
||||
if entity_id is None:
|
||||
entity_id = hass.data[DATA_PREFERENCES].gen_image_entity_id
|
||||
|
||||
if entity_id is None:
|
||||
raise HomeAssistantError("No entity_id provided and no preferred entity set")
|
||||
|
||||
entity = hass.data[DATA_COMPONENT].get_entity(entity_id)
|
||||
if entity is None:
|
||||
raise HomeAssistantError(f"AI Task entity {entity_id} not found")
|
||||
@@ -200,52 +152,51 @@ async def async_generate_image(
|
||||
)
|
||||
|
||||
with async_get_chat_session(hass) as session:
|
||||
resolved_attachments = await _resolve_attachments(hass, session, attachments)
|
||||
|
||||
task_result = await entity.internal_async_generate_image(
|
||||
session,
|
||||
GenImageTask(
|
||||
name=task_name,
|
||||
instructions=instructions,
|
||||
attachments=resolved_attachments or None,
|
||||
),
|
||||
)
|
||||
async with _resolve_attachments(
|
||||
hass, session, attachments
|
||||
) as resolved_attachments:
|
||||
task_result = await entity.internal_async_generate_image(
|
||||
session,
|
||||
GenImageTask(
|
||||
name=task_name,
|
||||
instructions=instructions,
|
||||
attachments=resolved_attachments or None,
|
||||
),
|
||||
)
|
||||
|
||||
service_result = task_result.as_dict()
|
||||
image_data = service_result.pop("image_data")
|
||||
if service_result.get("revised_prompt") is None:
|
||||
service_result["revised_prompt"] = instructions
|
||||
|
||||
image_storage = hass.data[DATA_IMAGES]
|
||||
|
||||
if len(image_storage) + 1 > MAX_IMAGES:
|
||||
_cleanup_images(image_storage, len(image_storage) + 1 - MAX_IMAGES)
|
||||
source = hass.data[DATA_MEDIA_SOURCE]
|
||||
|
||||
current_time = datetime.now()
|
||||
ext = mimetypes.guess_extension(task_result.mime_type, False) or ".png"
|
||||
sanitized_task_name = RE_SANITIZE_FILENAME.sub("", slugify(task_name))
|
||||
filename = f"{current_time.strftime('%Y-%m-%d_%H%M%S')}_{sanitized_task_name}{ext}"
|
||||
|
||||
image_storage[filename] = ImageData(
|
||||
data=image_data,
|
||||
timestamp=int(current_time.timestamp()),
|
||||
mime_type=task_result.mime_type,
|
||||
title=service_result["revised_prompt"],
|
||||
image_file = ImageData(
|
||||
filename=f"{current_time.strftime('%Y-%m-%d_%H%M%S')}_{sanitized_task_name}{ext}",
|
||||
file=io.BytesIO(image_data),
|
||||
content_type=task_result.mime_type,
|
||||
)
|
||||
|
||||
def _purge_image(filename: str, now: datetime) -> None:
|
||||
"""Remove image from storage."""
|
||||
image_storage.pop(filename, None)
|
||||
target_folder = media_source.MediaSourceItem.from_uri(
|
||||
hass, f"media-source://{DOMAIN}/{IMAGE_DIR}", None
|
||||
)
|
||||
|
||||
if IMAGE_EXPIRY_TIME > 0:
|
||||
async_call_later(hass, IMAGE_EXPIRY_TIME, partial(_purge_image, filename))
|
||||
service_result["media_source_id"] = await source.async_upload_media(
|
||||
target_folder, image_file
|
||||
)
|
||||
|
||||
service_result["url"] = get_url(hass) + async_sign_path(
|
||||
item = media_source.MediaSourceItem.from_uri(
|
||||
hass, service_result["media_source_id"], None
|
||||
)
|
||||
service_result["url"] = async_sign_path(
|
||||
hass,
|
||||
f"/api/{DOMAIN}/images/{filename}",
|
||||
timedelta(seconds=IMAGE_EXPIRY_TIME or 1800),
|
||||
(await source.async_resolve_media(item)).url,
|
||||
timedelta(seconds=IMAGE_EXPIRY_TIME),
|
||||
)
|
||||
service_result["media_source_id"] = f"media-source://{DOMAIN}/images/{filename}"
|
||||
|
||||
return service_result
|
||||
|
||||
@@ -266,6 +217,9 @@ class GenDataTask:
|
||||
attachments: list[conversation.Attachment] | None = None
|
||||
"""List of attachments to go along the instructions."""
|
||||
|
||||
llm_api: llm.API | None = None
|
||||
"""API to provide to the LLM."""
|
||||
|
||||
def __str__(self) -> str:
|
||||
"""Return task as a string."""
|
||||
return f"<GenDataTask {self.name}: {id(self)}>"
|
||||
@@ -347,20 +301,8 @@ class GenImageTaskResult:
|
||||
|
||||
@dataclass(slots=True)
|
||||
class ImageData:
|
||||
"""Image data for stored generated images."""
|
||||
"""Implementation of media_source.local_source.UploadedFile protocol."""
|
||||
|
||||
data: bytes
|
||||
"""Raw image data."""
|
||||
|
||||
timestamp: int
|
||||
"""Timestamp when the image was generated, as a Unix timestamp."""
|
||||
|
||||
mime_type: str
|
||||
"""MIME type of the image."""
|
||||
|
||||
title: str
|
||||
"""Title of the image, usually the prompt used to generate it."""
|
||||
|
||||
def __str__(self) -> str:
|
||||
"""Return image data as a string."""
|
||||
return f"<ImageData {self.title}: {id(self)}>"
|
||||
filename: str
|
||||
file: io.IOBase
|
||||
content_type: str
|
||||
|
@@ -11,5 +11,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/airzone",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["aioairzone"],
|
||||
"requirements": ["aioairzone==1.0.0"]
|
||||
"requirements": ["aioairzone==1.0.1"]
|
||||
}
|
||||
|
@@ -3,7 +3,6 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from genie_partner_sdk.client import AladdinConnectClient
|
||||
from genie_partner_sdk.model import GarageDoor
|
||||
|
||||
from homeassistant.const import Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
@@ -36,22 +35,7 @@ async def async_setup_entry(
|
||||
api.AsyncConfigEntryAuth(aiohttp_client.async_get_clientsession(hass), session)
|
||||
)
|
||||
|
||||
sdk_doors = await client.get_doors()
|
||||
|
||||
# Convert SDK GarageDoor objects to integration GarageDoor objects
|
||||
doors = [
|
||||
GarageDoor(
|
||||
{
|
||||
"device_id": door.device_id,
|
||||
"door_number": door.door_number,
|
||||
"name": door.name,
|
||||
"status": door.status,
|
||||
"link_status": door.link_status,
|
||||
"battery_level": door.battery_level,
|
||||
}
|
||||
)
|
||||
for door in sdk_doors
|
||||
]
|
||||
doors = await client.get_doors()
|
||||
|
||||
entry.runtime_data = {
|
||||
door.unique_id: AladdinConnectCoordinator(hass, entry, client, door)
|
||||
|
@@ -41,4 +41,10 @@ class AladdinConnectCoordinator(DataUpdateCoordinator[GarageDoor]):
|
||||
async def _async_update_data(self) -> GarageDoor:
|
||||
"""Fetch data from the Aladdin Connect API."""
|
||||
await self.client.update_door(self.data.device_id, self.data.door_number)
|
||||
self.data.status = self.client.get_door_status(
|
||||
self.data.device_id, self.data.door_number
|
||||
)
|
||||
self.data.battery_level = self.client.get_battery_status(
|
||||
self.data.device_id, self.data.door_number
|
||||
)
|
||||
return self.data
|
||||
|
@@ -49,7 +49,9 @@ class AladdinCoverEntity(AladdinConnectEntity, CoverEntity):
|
||||
@property
|
||||
def is_closed(self) -> bool | None:
|
||||
"""Update is closed attribute."""
|
||||
return self.coordinator.data.status == "closed"
|
||||
if (status := self.coordinator.data.status) is None:
|
||||
return None
|
||||
return status == "closed"
|
||||
|
||||
@property
|
||||
def is_closing(self) -> bool | None:
|
||||
|
@@ -4,8 +4,13 @@
|
||||
"codeowners": ["@swcloudgenie"],
|
||||
"config_flow": true,
|
||||
"dependencies": ["application_credentials"],
|
||||
"dhcp": [
|
||||
{
|
||||
"hostname": "gdocntl-*"
|
||||
}
|
||||
],
|
||||
"documentation": "https://www.home-assistant.io/integrations/aladdin_connect",
|
||||
"integration_type": "hub",
|
||||
"iot_class": "cloud_polling",
|
||||
"requirements": ["genie-partner-sdk==1.0.10"]
|
||||
"requirements": ["genie-partner-sdk==1.0.11"]
|
||||
}
|
||||
|
@@ -7,6 +7,9 @@
|
||||
"reauth_confirm": {
|
||||
"title": "[%key:common::config_flow::title::reauth%]",
|
||||
"description": "Aladdin Connect needs to re-authenticate your account"
|
||||
},
|
||||
"oauth_discovery": {
|
||||
"description": "Home Assistant has found an Aladdin Connect device on your network. Press **Submit** to continue setting up Aladdin Connect."
|
||||
}
|
||||
},
|
||||
"abort": {
|
||||
|
@@ -61,7 +61,7 @@ ALARM_SERVICE_SCHEMA: Final = make_entity_service_schema(
|
||||
|
||||
|
||||
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
"""Track states and offer events for sensors."""
|
||||
"""Set up the alarm control panel component."""
|
||||
component = hass.data[DATA_COMPONENT] = EntityComponent[AlarmControlPanelEntity](
|
||||
_LOGGER, DOMAIN, hass, SCAN_INTERVAL
|
||||
)
|
||||
|
@@ -5,7 +5,7 @@ from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import aiohttp_client, config_validation as cv
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
|
||||
from .const import _LOGGER, CONF_LOGIN_DATA, COUNTRY_DOMAINS, DOMAIN
|
||||
from .const import _LOGGER, CONF_LOGIN_DATA, CONF_SITE, COUNTRY_DOMAINS, DOMAIN
|
||||
from .coordinator import AmazonConfigEntry, AmazonDevicesCoordinator
|
||||
from .services import async_setup_services
|
||||
|
||||
@@ -42,7 +42,23 @@ async def async_setup_entry(hass: HomeAssistant, entry: AmazonConfigEntry) -> bo
|
||||
|
||||
async def async_migrate_entry(hass: HomeAssistant, entry: AmazonConfigEntry) -> bool:
|
||||
"""Migrate old entry."""
|
||||
if entry.version == 1 and entry.minor_version == 1:
|
||||
|
||||
if entry.version == 1 and entry.minor_version < 3:
|
||||
if CONF_SITE in entry.data:
|
||||
# Site in data (wrong place), just move to login data
|
||||
new_data = entry.data.copy()
|
||||
new_data[CONF_LOGIN_DATA][CONF_SITE] = new_data[CONF_SITE]
|
||||
new_data.pop(CONF_SITE)
|
||||
hass.config_entries.async_update_entry(
|
||||
entry, data=new_data, version=1, minor_version=3
|
||||
)
|
||||
return True
|
||||
|
||||
if CONF_SITE in entry.data[CONF_LOGIN_DATA]:
|
||||
# Site is there, just update version to avoid future migrations
|
||||
hass.config_entries.async_update_entry(entry, version=1, minor_version=3)
|
||||
return True
|
||||
|
||||
_LOGGER.debug(
|
||||
"Migrating from version %s.%s", entry.version, entry.minor_version
|
||||
)
|
||||
@@ -53,10 +69,10 @@ async def async_migrate_entry(hass: HomeAssistant, entry: AmazonConfigEntry) ->
|
||||
|
||||
# Add site to login data
|
||||
new_data = entry.data.copy()
|
||||
new_data[CONF_LOGIN_DATA]["site"] = f"https://www.amazon.{domain}"
|
||||
new_data[CONF_LOGIN_DATA][CONF_SITE] = f"https://www.amazon.{domain}"
|
||||
|
||||
hass.config_entries.async_update_entry(
|
||||
entry, data=new_data, version=1, minor_version=2
|
||||
entry, data=new_data, version=1, minor_version=3
|
||||
)
|
||||
|
||||
_LOGGER.info(
|
||||
|
@@ -52,7 +52,7 @@ class AmazonDevicesConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
"""Handle a config flow for Alexa Devices."""
|
||||
|
||||
VERSION = 1
|
||||
MINOR_VERSION = 2
|
||||
MINOR_VERSION = 3
|
||||
|
||||
async def async_step_user(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
@@ -107,7 +107,9 @@ class AmazonDevicesConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
|
||||
if user_input is not None:
|
||||
try:
|
||||
await validate_input(self.hass, {**reauth_entry.data, **user_input})
|
||||
data = await validate_input(
|
||||
self.hass, {**reauth_entry.data, **user_input}
|
||||
)
|
||||
except CannotConnect:
|
||||
errors["base"] = "cannot_connect"
|
||||
except (CannotAuthenticate, TypeError):
|
||||
@@ -119,8 +121,9 @@ class AmazonDevicesConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
reauth_entry,
|
||||
data={
|
||||
CONF_USERNAME: entry_data[CONF_USERNAME],
|
||||
CONF_PASSWORD: entry_data[CONF_PASSWORD],
|
||||
CONF_PASSWORD: user_input[CONF_PASSWORD],
|
||||
CONF_CODE: user_input[CONF_CODE],
|
||||
CONF_LOGIN_DATA: data,
|
||||
},
|
||||
)
|
||||
|
||||
|
@@ -6,6 +6,7 @@ _LOGGER = logging.getLogger(__package__)
|
||||
|
||||
DOMAIN = "alexa_devices"
|
||||
CONF_LOGIN_DATA = "login_data"
|
||||
CONF_SITE = "site"
|
||||
|
||||
DEFAULT_DOMAIN = "com"
|
||||
COUNTRY_DOMAINS = {
|
||||
|
@@ -14,6 +14,7 @@ from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONF_PASSWORD, CONF_USERNAME
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryAuthFailed
|
||||
from homeassistant.helpers import device_registry as dr
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
||||
|
||||
from .const import _LOGGER, CONF_LOGIN_DATA, DOMAIN
|
||||
@@ -48,12 +49,13 @@ class AmazonDevicesCoordinator(DataUpdateCoordinator[dict[str, AmazonDevice]]):
|
||||
entry.data[CONF_PASSWORD],
|
||||
entry.data[CONF_LOGIN_DATA],
|
||||
)
|
||||
self.previous_devices: set[str] = set()
|
||||
|
||||
async def _async_update_data(self) -> dict[str, AmazonDevice]:
|
||||
"""Update device data."""
|
||||
try:
|
||||
await self.api.login_mode_stored_data()
|
||||
return await self.api.get_devices_data()
|
||||
data = await self.api.get_devices_data()
|
||||
except CannotConnect as err:
|
||||
raise UpdateFailed(
|
||||
translation_domain=DOMAIN,
|
||||
@@ -72,3 +74,31 @@ class AmazonDevicesCoordinator(DataUpdateCoordinator[dict[str, AmazonDevice]]):
|
||||
translation_key="invalid_auth",
|
||||
translation_placeholders={"error": repr(err)},
|
||||
) from err
|
||||
else:
|
||||
current_devices = set(data.keys())
|
||||
if stale_devices := self.previous_devices - current_devices:
|
||||
await self._async_remove_device_stale(stale_devices)
|
||||
|
||||
self.previous_devices = current_devices
|
||||
return data
|
||||
|
||||
async def _async_remove_device_stale(
|
||||
self,
|
||||
stale_devices: set[str],
|
||||
) -> None:
|
||||
"""Remove stale device."""
|
||||
device_registry = dr.async_get(self.hass)
|
||||
|
||||
for serial_num in stale_devices:
|
||||
_LOGGER.debug(
|
||||
"Detected change in devices: serial %s removed",
|
||||
serial_num,
|
||||
)
|
||||
device = device_registry.async_get_device(
|
||||
identifiers={(DOMAIN, serial_num)}
|
||||
)
|
||||
if device:
|
||||
device_registry.async_update_device(
|
||||
device_id=device.id,
|
||||
remove_config_entry_id=self.config_entry.entry_id,
|
||||
)
|
||||
|
@@ -64,9 +64,7 @@ rules:
|
||||
repair-issues:
|
||||
status: exempt
|
||||
comment: no known use cases for repair issues or flows, yet
|
||||
stale-devices:
|
||||
status: todo
|
||||
comment: automate the cleanup process
|
||||
stale-devices: done
|
||||
|
||||
# Platinum
|
||||
async-dependency: done
|
||||
|
@@ -33,9 +33,11 @@ from homeassistant.const import (
|
||||
)
|
||||
from homeassistant.core import Event, HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryNotReady
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.device_registry import format_mac
|
||||
from homeassistant.helpers.dispatcher import async_dispatcher_send
|
||||
from homeassistant.helpers.storage import STORAGE_DIR
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
|
||||
from .const import (
|
||||
CONF_ADB_SERVER_IP,
|
||||
@@ -46,10 +48,12 @@ from .const import (
|
||||
DEFAULT_ADB_SERVER_PORT,
|
||||
DEVICE_ANDROIDTV,
|
||||
DEVICE_FIRETV,
|
||||
DOMAIN,
|
||||
PROP_ETHMAC,
|
||||
PROP_WIFIMAC,
|
||||
SIGNAL_CONFIG_ENTITY,
|
||||
)
|
||||
from .services import async_setup_services
|
||||
|
||||
ADB_PYTHON_EXCEPTIONS: tuple = (
|
||||
AdbTimeoutError,
|
||||
@@ -63,6 +67,8 @@ ADB_PYTHON_EXCEPTIONS: tuple = (
|
||||
)
|
||||
ADB_TCP_EXCEPTIONS: tuple = (ConnectionResetError, RuntimeError)
|
||||
|
||||
CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN)
|
||||
|
||||
PLATFORMS = [Platform.MEDIA_PLAYER, Platform.REMOTE]
|
||||
RELOAD_OPTIONS = [CONF_STATE_DETECTION_RULES]
|
||||
|
||||
@@ -188,6 +194,12 @@ async def async_migrate_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
return True
|
||||
|
||||
|
||||
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
"""Set up the Android TV / Fire TV integration."""
|
||||
async_setup_services(hass)
|
||||
return True
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: AndroidTVConfigEntry) -> bool:
|
||||
"""Set up Android Debug Bridge platform."""
|
||||
|
||||
|
@@ -8,7 +8,6 @@ import logging
|
||||
|
||||
from androidtv.constants import APPS, KEYS
|
||||
from androidtv.setup_async import AndroidTVAsync, FireTVAsync
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components import persistent_notification
|
||||
from homeassistant.components.media_player import (
|
||||
@@ -17,9 +16,7 @@ from homeassistant.components.media_player import (
|
||||
MediaPlayerEntityFeature,
|
||||
MediaPlayerState,
|
||||
)
|
||||
from homeassistant.const import ATTR_COMMAND
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import config_validation as cv, entity_platform
|
||||
from homeassistant.helpers.dispatcher import async_dispatcher_connect
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from homeassistant.util.dt import utcnow
|
||||
@@ -39,19 +36,10 @@ from .const import (
|
||||
SIGNAL_CONFIG_ENTITY,
|
||||
)
|
||||
from .entity import AndroidTVEntity, adb_decorator
|
||||
from .services import ATTR_ADB_RESPONSE, ATTR_HDMI_INPUT, SERVICE_LEARN_SENDEVENT
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
ATTR_ADB_RESPONSE = "adb_response"
|
||||
ATTR_DEVICE_PATH = "device_path"
|
||||
ATTR_HDMI_INPUT = "hdmi_input"
|
||||
ATTR_LOCAL_PATH = "local_path"
|
||||
|
||||
SERVICE_ADB_COMMAND = "adb_command"
|
||||
SERVICE_DOWNLOAD = "download"
|
||||
SERVICE_LEARN_SENDEVENT = "learn_sendevent"
|
||||
SERVICE_UPLOAD = "upload"
|
||||
|
||||
# Translate from `AndroidTV` / `FireTV` reported state to HA state.
|
||||
ANDROIDTV_STATES = {
|
||||
"off": MediaPlayerState.OFF,
|
||||
@@ -77,32 +65,6 @@ async def async_setup_entry(
|
||||
]
|
||||
)
|
||||
|
||||
platform = entity_platform.async_get_current_platform()
|
||||
platform.async_register_entity_service(
|
||||
SERVICE_ADB_COMMAND,
|
||||
{vol.Required(ATTR_COMMAND): cv.string},
|
||||
"adb_command",
|
||||
)
|
||||
platform.async_register_entity_service(
|
||||
SERVICE_LEARN_SENDEVENT, None, "learn_sendevent"
|
||||
)
|
||||
platform.async_register_entity_service(
|
||||
SERVICE_DOWNLOAD,
|
||||
{
|
||||
vol.Required(ATTR_DEVICE_PATH): cv.string,
|
||||
vol.Required(ATTR_LOCAL_PATH): cv.string,
|
||||
},
|
||||
"service_download",
|
||||
)
|
||||
platform.async_register_entity_service(
|
||||
SERVICE_UPLOAD,
|
||||
{
|
||||
vol.Required(ATTR_DEVICE_PATH): cv.string,
|
||||
vol.Required(ATTR_LOCAL_PATH): cv.string,
|
||||
},
|
||||
"service_upload",
|
||||
)
|
||||
|
||||
|
||||
class ADBDevice(AndroidTVEntity, MediaPlayerEntity):
|
||||
"""Representation of an Android or Fire TV device."""
|
||||
|
66
homeassistant/components/androidtv/services.py
Normal file
66
homeassistant/components/androidtv/services.py
Normal file
@@ -0,0 +1,66 @@
|
||||
"""Services for Android/Fire TV devices."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components.media_player import DOMAIN as MEDIA_PLAYER_DOMAIN
|
||||
from homeassistant.const import ATTR_COMMAND
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers import config_validation as cv, service
|
||||
|
||||
from .const import DOMAIN
|
||||
|
||||
ATTR_ADB_RESPONSE = "adb_response"
|
||||
ATTR_DEVICE_PATH = "device_path"
|
||||
ATTR_HDMI_INPUT = "hdmi_input"
|
||||
ATTR_LOCAL_PATH = "local_path"
|
||||
|
||||
SERVICE_ADB_COMMAND = "adb_command"
|
||||
SERVICE_DOWNLOAD = "download"
|
||||
SERVICE_LEARN_SENDEVENT = "learn_sendevent"
|
||||
SERVICE_UPLOAD = "upload"
|
||||
|
||||
|
||||
@callback
|
||||
def async_setup_services(hass: HomeAssistant) -> None:
|
||||
"""Register the Android TV / Fire TV services."""
|
||||
|
||||
service.async_register_platform_entity_service(
|
||||
hass,
|
||||
DOMAIN,
|
||||
SERVICE_ADB_COMMAND,
|
||||
entity_domain=MEDIA_PLAYER_DOMAIN,
|
||||
schema={vol.Required(ATTR_COMMAND): cv.string},
|
||||
func="adb_command",
|
||||
)
|
||||
service.async_register_platform_entity_service(
|
||||
hass,
|
||||
DOMAIN,
|
||||
SERVICE_LEARN_SENDEVENT,
|
||||
entity_domain=MEDIA_PLAYER_DOMAIN,
|
||||
schema=None,
|
||||
func="learn_sendevent",
|
||||
)
|
||||
service.async_register_platform_entity_service(
|
||||
hass,
|
||||
DOMAIN,
|
||||
SERVICE_DOWNLOAD,
|
||||
entity_domain=MEDIA_PLAYER_DOMAIN,
|
||||
schema={
|
||||
vol.Required(ATTR_DEVICE_PATH): cv.string,
|
||||
vol.Required(ATTR_LOCAL_PATH): cv.string,
|
||||
},
|
||||
func="service_download",
|
||||
)
|
||||
service.async_register_platform_entity_service(
|
||||
hass,
|
||||
DOMAIN,
|
||||
SERVICE_UPLOAD,
|
||||
entity_domain=MEDIA_PLAYER_DOMAIN,
|
||||
schema={
|
||||
vol.Required(ATTR_DEVICE_PATH): cv.string,
|
||||
vol.Required(ATTR_LOCAL_PATH): cv.string,
|
||||
},
|
||||
func="service_upload",
|
||||
)
|
@@ -37,7 +37,7 @@ from .helpers import AndroidTVRemoteConfigEntry, create_api, get_enable_ime
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
APPS_NEW_ID = "NewApp"
|
||||
APPS_NEW_ID = "add_new"
|
||||
CONF_APP_DELETE = "app_delete"
|
||||
CONF_APP_ID = "app_id"
|
||||
|
||||
@@ -287,7 +287,9 @@ class AndroidTVRemoteOptionsFlowHandler(OptionsFlowWithReload):
|
||||
{
|
||||
vol.Optional(CONF_APPS): SelectSelector(
|
||||
SelectSelectorConfig(
|
||||
options=apps, mode=SelectSelectorMode.DROPDOWN
|
||||
options=apps,
|
||||
mode=SelectSelectorMode.DROPDOWN,
|
||||
translation_key="apps",
|
||||
)
|
||||
),
|
||||
vol.Required(
|
||||
|
@@ -7,6 +7,7 @@
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["androidtvremote2"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["androidtvremote2==0.2.3"],
|
||||
"zeroconf": ["_androidtvremote2._tcp.local."]
|
||||
}
|
||||
|
78
homeassistant/components/androidtv_remote/quality_scale.yaml
Normal file
78
homeassistant/components/androidtv_remote/quality_scale.yaml
Normal file
@@ -0,0 +1,78 @@
|
||||
rules:
|
||||
# Bronze
|
||||
action-setup:
|
||||
status: exempt
|
||||
comment: No integration-specific service actions are defined.
|
||||
appropriate-polling:
|
||||
status: exempt
|
||||
comment: This is a push-based integration.
|
||||
brands: done
|
||||
common-modules: done
|
||||
config-flow-test-coverage: done
|
||||
config-flow: done
|
||||
dependency-transparency: done
|
||||
docs-actions: done
|
||||
docs-high-level-description: done
|
||||
docs-installation-instructions: done
|
||||
docs-removal-instructions: done
|
||||
entity-event-setup: done
|
||||
entity-unique-id: done
|
||||
has-entity-name: done
|
||||
runtime-data: done
|
||||
test-before-configure: done
|
||||
test-before-setup: done
|
||||
unique-config-entry: done
|
||||
|
||||
# Silver
|
||||
action-exceptions: done
|
||||
config-entry-unloading: done
|
||||
docs-configuration-parameters: done
|
||||
docs-installation-parameters: done
|
||||
entity-unavailable: done
|
||||
integration-owner: done
|
||||
log-when-unavailable: done
|
||||
parallel-updates: done
|
||||
reauthentication-flow: done
|
||||
test-coverage: done
|
||||
|
||||
# Gold
|
||||
devices: done
|
||||
diagnostics: done
|
||||
discovery-update-info: done
|
||||
discovery: done
|
||||
docs-data-update: done
|
||||
docs-examples: done
|
||||
docs-known-limitations: done
|
||||
docs-supported-devices: done
|
||||
docs-supported-functions: done
|
||||
docs-troubleshooting: done
|
||||
docs-use-cases: done
|
||||
dynamic-devices:
|
||||
status: exempt
|
||||
comment: The integration is configured on a per-device basis, so there are no dynamic devices to add.
|
||||
entity-category:
|
||||
status: exempt
|
||||
comment: All entities are primary and do not require a specific category.
|
||||
entity-device-class: done
|
||||
entity-disabled-by-default:
|
||||
status: exempt
|
||||
comment: The integration provides only primary entities that should be enabled.
|
||||
entity-translations: done
|
||||
exception-translations: done
|
||||
icon-translations:
|
||||
status: exempt
|
||||
comment: Icons are provided by the entity's device class, and no state-based icons are needed.
|
||||
reconfiguration-flow: done
|
||||
repair-issues:
|
||||
status: exempt
|
||||
comment: The integration uses the reauth flow for authentication issues, and no other repairable issues have been identified.
|
||||
stale-devices:
|
||||
status: exempt
|
||||
comment: The integration manages a single device per config entry. Stale device removal is handled by removing the config entry.
|
||||
|
||||
# Platinum
|
||||
async-dependency: done
|
||||
inject-websession:
|
||||
status: exempt
|
||||
comment: The underlying library does not use HTTP for communication.
|
||||
strict-typing: done
|
@@ -92,5 +92,12 @@
|
||||
"invalid_media_type": {
|
||||
"message": "Invalid media type: {media_type}"
|
||||
}
|
||||
},
|
||||
"selector": {
|
||||
"apps": {
|
||||
"options": {
|
||||
"add_new": "Add new"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@@ -16,7 +16,7 @@ from .coordinator import (
|
||||
AOSmithStatusCoordinator,
|
||||
)
|
||||
|
||||
PLATFORMS: list[Platform] = [Platform.SENSOR, Platform.WATER_HEATER]
|
||||
PLATFORMS: list[Platform] = [Platform.SELECT, Platform.SENSOR, Platform.WATER_HEATER]
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: AOSmithConfigEntry) -> bool:
|
||||
|
@@ -1,5 +1,10 @@
|
||||
{
|
||||
"entity": {
|
||||
"select": {
|
||||
"hot_water_plus_level": {
|
||||
"default": "mdi:water-plus"
|
||||
}
|
||||
},
|
||||
"sensor": {
|
||||
"hot_water_availability": {
|
||||
"default": "mdi:water-thermometer"
|
||||
|
70
homeassistant/components/aosmith/select.py
Normal file
70
homeassistant/components/aosmith/select.py
Normal file
@@ -0,0 +1,70 @@
|
||||
"""The select platform for the A. O. Smith integration."""
|
||||
|
||||
from homeassistant.components.select import SelectEntity
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from . import AOSmithConfigEntry
|
||||
from .coordinator import AOSmithStatusCoordinator
|
||||
from .entity import AOSmithStatusEntity
|
||||
|
||||
HWP_LEVEL_HA_TO_AOSMITH = {
|
||||
"off": 0,
|
||||
"level1": 1,
|
||||
"level2": 2,
|
||||
"level3": 3,
|
||||
}
|
||||
HWP_LEVEL_AOSMITH_TO_HA = {value: key for key, value in HWP_LEVEL_HA_TO_AOSMITH.items()}
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
entry: AOSmithConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up A. O. Smith select platform."""
|
||||
data = entry.runtime_data
|
||||
|
||||
async_add_entities(
|
||||
AOSmithHotWaterPlusSelectEntity(data.status_coordinator, device.junction_id)
|
||||
for device in data.status_coordinator.data.values()
|
||||
if device.supports_hot_water_plus
|
||||
)
|
||||
|
||||
|
||||
class AOSmithHotWaterPlusSelectEntity(AOSmithStatusEntity, SelectEntity):
|
||||
"""Class for the Hot Water+ select entity."""
|
||||
|
||||
_attr_translation_key = "hot_water_plus_level"
|
||||
_attr_options = list(HWP_LEVEL_HA_TO_AOSMITH)
|
||||
|
||||
def __init__(self, coordinator: AOSmithStatusCoordinator, junction_id: str) -> None:
|
||||
"""Initialize the entity."""
|
||||
super().__init__(coordinator, junction_id)
|
||||
self._attr_unique_id = f"hot_water_plus_level_{junction_id}"
|
||||
|
||||
@property
|
||||
def suggested_object_id(self) -> str | None:
|
||||
"""Override the suggested object id to make '+' get converted to 'plus' in the entity id."""
|
||||
return "hot_water_plus_level"
|
||||
|
||||
@property
|
||||
def current_option(self) -> str | None:
|
||||
"""Return the current Hot Water+ mode."""
|
||||
hot_water_plus_level = self.device.status.hot_water_plus_level
|
||||
return (
|
||||
None
|
||||
if hot_water_plus_level is None
|
||||
else HWP_LEVEL_AOSMITH_TO_HA.get(hot_water_plus_level)
|
||||
)
|
||||
|
||||
async def async_select_option(self, option: str) -> None:
|
||||
"""Set the Hot Water+ mode."""
|
||||
aosmith_hwp_level = HWP_LEVEL_HA_TO_AOSMITH[option]
|
||||
await self.client.update_mode(
|
||||
junction_id=self.junction_id,
|
||||
mode=self.device.status.current_mode,
|
||||
hot_water_plus_level=aosmith_hwp_level,
|
||||
)
|
||||
|
||||
await self.coordinator.async_request_refresh()
|
@@ -26,6 +26,17 @@
|
||||
}
|
||||
},
|
||||
"entity": {
|
||||
"select": {
|
||||
"hot_water_plus_level": {
|
||||
"name": "Hot Water+ level",
|
||||
"state": {
|
||||
"off": "[%key:common::state::off%]",
|
||||
"level1": "Level 1",
|
||||
"level2": "Level 2",
|
||||
"level3": "Level 3"
|
||||
}
|
||||
}
|
||||
},
|
||||
"sensor": {
|
||||
"hot_water_availability": {
|
||||
"name": "Hot water availability"
|
||||
|
@@ -7,5 +7,5 @@
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["apcaccess"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["aioapcaccess==0.4.2"]
|
||||
"requirements": ["aioapcaccess==1.0.0"]
|
||||
}
|
||||
|
@@ -395,6 +395,7 @@ SENSORS: dict[str, SensorEntityDescription] = {
|
||||
"upsmode": SensorEntityDescription(
|
||||
key="upsmode",
|
||||
translation_key="ups_mode",
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
),
|
||||
"upsname": SensorEntityDescription(
|
||||
key="upsname",
|
||||
|
@@ -103,6 +103,7 @@ async def async_pipeline_from_audio_stream(
|
||||
wake_word_settings: WakeWordSettings | None = None,
|
||||
audio_settings: AudioSettings | None = None,
|
||||
device_id: str | None = None,
|
||||
satellite_id: str | None = None,
|
||||
start_stage: PipelineStage = PipelineStage.STT,
|
||||
end_stage: PipelineStage = PipelineStage.TTS,
|
||||
conversation_extra_system_prompt: str | None = None,
|
||||
@@ -115,6 +116,7 @@ async def async_pipeline_from_audio_stream(
|
||||
pipeline_input = PipelineInput(
|
||||
session=session,
|
||||
device_id=device_id,
|
||||
satellite_id=satellite_id,
|
||||
stt_metadata=stt_metadata,
|
||||
stt_stream=stt_stream,
|
||||
wake_word_phrase=wake_word_phrase,
|
||||
|
BIN
homeassistant/components/assist_pipeline/acknowledge.mp3
Normal file
BIN
homeassistant/components/assist_pipeline/acknowledge.mp3
Normal file
Binary file not shown.
@@ -1,5 +1,7 @@
|
||||
"""Constants for the Assist pipeline integration."""
|
||||
|
||||
from pathlib import Path
|
||||
|
||||
DOMAIN = "assist_pipeline"
|
||||
|
||||
DATA_CONFIG = f"{DOMAIN}.config"
|
||||
@@ -23,3 +25,5 @@ SAMPLES_PER_CHUNK = SAMPLE_RATE // (1000 // MS_PER_CHUNK) # 10 ms @ 16Khz
|
||||
BYTES_PER_CHUNK = SAMPLES_PER_CHUNK * SAMPLE_WIDTH * SAMPLE_CHANNELS # 16-bit
|
||||
|
||||
OPTION_PREFERRED = "preferred"
|
||||
|
||||
ACKNOWLEDGE_PATH = Path(__file__).parent / "acknowledge.mp3"
|
||||
|
@@ -23,7 +23,12 @@ from homeassistant.components import conversation, stt, tts, wake_word, websocke
|
||||
from homeassistant.const import ATTR_SUPPORTED_FEATURES, MATCH_ALL
|
||||
from homeassistant.core import Context, HomeAssistant, callback
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers import chat_session, intent
|
||||
from homeassistant.helpers import (
|
||||
chat_session,
|
||||
device_registry as dr,
|
||||
entity_registry as er,
|
||||
intent,
|
||||
)
|
||||
from homeassistant.helpers.collection import (
|
||||
CHANGE_UPDATED,
|
||||
CollectionError,
|
||||
@@ -45,6 +50,7 @@ from homeassistant.util.limited_size_dict import LimitedSizeDict
|
||||
|
||||
from .audio_enhancer import AudioEnhancer, EnhancedAudioChunk, MicroVadSpeexEnhancer
|
||||
from .const import (
|
||||
ACKNOWLEDGE_PATH,
|
||||
BYTES_PER_CHUNK,
|
||||
CONF_DEBUG_RECORDING_DIR,
|
||||
DATA_CONFIG,
|
||||
@@ -113,6 +119,7 @@ PIPELINE_FIELDS: VolDictType = {
|
||||
vol.Required("wake_word_entity"): vol.Any(str, None),
|
||||
vol.Required("wake_word_id"): vol.Any(str, None),
|
||||
vol.Optional("prefer_local_intents"): bool,
|
||||
vol.Optional("acknowledge_media_id"): str,
|
||||
}
|
||||
|
||||
STORED_PIPELINE_RUNS = 10
|
||||
@@ -583,6 +590,9 @@ class PipelineRun:
|
||||
_device_id: str | None = None
|
||||
"""Optional device id set during run start."""
|
||||
|
||||
_satellite_id: str | None = None
|
||||
"""Optional satellite id set during run start."""
|
||||
|
||||
_conversation_data: PipelineConversationData | None = None
|
||||
"""Data tied to the conversation ID."""
|
||||
|
||||
@@ -636,9 +646,12 @@ class PipelineRun:
|
||||
return
|
||||
pipeline_data.pipeline_debug[self.pipeline.id][self.id].events.append(event)
|
||||
|
||||
def start(self, conversation_id: str, device_id: str | None) -> None:
|
||||
def start(
|
||||
self, conversation_id: str, device_id: str | None, satellite_id: str | None
|
||||
) -> None:
|
||||
"""Emit run start event."""
|
||||
self._device_id = device_id
|
||||
self._satellite_id = satellite_id
|
||||
self._start_debug_recording_thread()
|
||||
|
||||
data: dict[str, Any] = {
|
||||
@@ -646,6 +659,8 @@ class PipelineRun:
|
||||
"language": self.language,
|
||||
"conversation_id": conversation_id,
|
||||
}
|
||||
if satellite_id is not None:
|
||||
data["satellite_id"] = satellite_id
|
||||
if self.runner_data is not None:
|
||||
data["runner_data"] = self.runner_data
|
||||
if self.tts_stream:
|
||||
@@ -1057,10 +1072,12 @@ class PipelineRun:
|
||||
self,
|
||||
intent_input: str,
|
||||
conversation_id: str,
|
||||
device_id: str | None,
|
||||
conversation_extra_system_prompt: str | None,
|
||||
) -> str:
|
||||
"""Run intent recognition portion of pipeline. Returns text to speak."""
|
||||
) -> tuple[str, bool]:
|
||||
"""Run intent recognition portion of pipeline.
|
||||
|
||||
Returns (speech, all_targets_in_satellite_area).
|
||||
"""
|
||||
if self.intent_agent is None or self._conversation_data is None:
|
||||
raise RuntimeError("Recognize intent was not prepared")
|
||||
|
||||
@@ -1088,7 +1105,8 @@ class PipelineRun:
|
||||
"language": input_language,
|
||||
"intent_input": intent_input,
|
||||
"conversation_id": conversation_id,
|
||||
"device_id": device_id,
|
||||
"device_id": self._device_id,
|
||||
"satellite_id": self._satellite_id,
|
||||
"prefer_local_intents": self.pipeline.prefer_local_intents,
|
||||
},
|
||||
)
|
||||
@@ -1099,7 +1117,8 @@ class PipelineRun:
|
||||
text=intent_input,
|
||||
context=self.context,
|
||||
conversation_id=conversation_id,
|
||||
device_id=device_id,
|
||||
device_id=self._device_id,
|
||||
satellite_id=self._satellite_id,
|
||||
language=input_language,
|
||||
agent_id=self.intent_agent.id,
|
||||
extra_system_prompt=conversation_extra_system_prompt,
|
||||
@@ -1107,6 +1126,7 @@ class PipelineRun:
|
||||
|
||||
agent_id = self.intent_agent.id
|
||||
processed_locally = agent_id == conversation.HOME_ASSISTANT_AGENT
|
||||
all_targets_in_satellite_area = False
|
||||
intent_response: intent.IntentResponse | None = None
|
||||
if not processed_locally and not self._intent_agent_only:
|
||||
# Sentence triggers override conversation agent
|
||||
@@ -1269,6 +1289,7 @@ class PipelineRun:
|
||||
text=user_input.text,
|
||||
conversation_id=user_input.conversation_id,
|
||||
device_id=user_input.device_id,
|
||||
satellite_id=user_input.satellite_id,
|
||||
context=user_input.context,
|
||||
language=user_input.language,
|
||||
agent_id=user_input.agent_id,
|
||||
@@ -1280,6 +1301,17 @@ class PipelineRun:
|
||||
if tts_input_stream and self._streamed_response_text:
|
||||
tts_input_stream.put_nowait(None)
|
||||
|
||||
if agent_id == conversation.HOME_ASSISTANT_AGENT:
|
||||
# Check if all targeted entities were in the same area as
|
||||
# the satellite device.
|
||||
# If so, the satellite should respond with an acknowledge beep
|
||||
# instead of a full response.
|
||||
all_targets_in_satellite_area = (
|
||||
self._get_all_targets_in_satellite_area(
|
||||
conversation_result.response, self._device_id
|
||||
)
|
||||
)
|
||||
|
||||
except Exception as src_error:
|
||||
_LOGGER.exception("Unexpected error during intent recognition")
|
||||
raise IntentRecognitionError(
|
||||
@@ -1302,7 +1334,45 @@ class PipelineRun:
|
||||
if conversation_result.continue_conversation:
|
||||
self._conversation_data.continue_conversation_agent = agent_id
|
||||
|
||||
return speech
|
||||
return (speech, all_targets_in_satellite_area)
|
||||
|
||||
def _get_all_targets_in_satellite_area(
|
||||
self, intent_response: intent.IntentResponse, device_id: str | None
|
||||
) -> bool:
|
||||
"""Return true if all targeted entities were in the same area as the device."""
|
||||
if (
|
||||
(intent_response.response_type != intent.IntentResponseType.ACTION_DONE)
|
||||
or (not intent_response.matched_states)
|
||||
or (not device_id)
|
||||
):
|
||||
return False
|
||||
|
||||
device_registry = dr.async_get(self.hass)
|
||||
|
||||
if (not (device := device_registry.async_get(device_id))) or (
|
||||
not device.area_id
|
||||
):
|
||||
return False
|
||||
|
||||
entity_registry = er.async_get(self.hass)
|
||||
for state in intent_response.matched_states:
|
||||
entity = entity_registry.async_get(state.entity_id)
|
||||
if not entity:
|
||||
return False
|
||||
|
||||
if (entity_area_id := entity.area_id) is None:
|
||||
if (entity.device_id is None) or (
|
||||
(entity_device := device_registry.async_get(entity.device_id))
|
||||
is None
|
||||
):
|
||||
return False
|
||||
|
||||
entity_area_id = entity_device.area_id
|
||||
|
||||
if entity_area_id != device.area_id:
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
async def prepare_text_to_speech(self) -> None:
|
||||
"""Prepare text-to-speech."""
|
||||
@@ -1340,7 +1410,9 @@ class PipelineRun:
|
||||
),
|
||||
) from err
|
||||
|
||||
async def text_to_speech(self, tts_input: str) -> None:
|
||||
async def text_to_speech(
|
||||
self, tts_input: str, override_media_path: Path | None = None
|
||||
) -> None:
|
||||
"""Run text-to-speech portion of pipeline."""
|
||||
assert self.tts_stream is not None
|
||||
|
||||
@@ -1352,11 +1424,14 @@ class PipelineRun:
|
||||
"language": self.pipeline.tts_language,
|
||||
"voice": self.pipeline.tts_voice,
|
||||
"tts_input": tts_input,
|
||||
"acknowledge_override": override_media_path is not None,
|
||||
},
|
||||
)
|
||||
)
|
||||
|
||||
if not self._streamed_response_text:
|
||||
if override_media_path:
|
||||
self.tts_stream.async_override_result(override_media_path)
|
||||
elif not self._streamed_response_text:
|
||||
self.tts_stream.async_set_message(tts_input)
|
||||
|
||||
tts_output = {
|
||||
@@ -1567,10 +1642,15 @@ class PipelineInput:
|
||||
device_id: str | None = None
|
||||
"""Identifier of the device that is processing the input/output of the pipeline."""
|
||||
|
||||
satellite_id: str | None = None
|
||||
"""Identifier of the satellite that is processing the input/output of the pipeline."""
|
||||
|
||||
async def execute(self) -> None:
|
||||
"""Run pipeline."""
|
||||
self.run.start(
|
||||
conversation_id=self.session.conversation_id, device_id=self.device_id
|
||||
conversation_id=self.session.conversation_id,
|
||||
device_id=self.device_id,
|
||||
satellite_id=self.satellite_id,
|
||||
)
|
||||
current_stage: PipelineStage | None = self.run.start_stage
|
||||
stt_audio_buffer: list[EnhancedAudioChunk] = []
|
||||
@@ -1649,17 +1729,20 @@ class PipelineInput:
|
||||
|
||||
if self.run.end_stage != PipelineStage.STT:
|
||||
tts_input = self.tts_input
|
||||
all_targets_in_satellite_area = False
|
||||
|
||||
if current_stage == PipelineStage.INTENT:
|
||||
# intent-recognition
|
||||
assert intent_input is not None
|
||||
tts_input = await self.run.recognize_intent(
|
||||
(
|
||||
tts_input,
|
||||
all_targets_in_satellite_area,
|
||||
) = await self.run.recognize_intent(
|
||||
intent_input,
|
||||
self.session.conversation_id,
|
||||
self.device_id,
|
||||
self.conversation_extra_system_prompt,
|
||||
)
|
||||
if tts_input.strip():
|
||||
if all_targets_in_satellite_area or tts_input.strip():
|
||||
current_stage = PipelineStage.TTS
|
||||
else:
|
||||
# Skip TTS
|
||||
@@ -1668,8 +1751,14 @@ class PipelineInput:
|
||||
if self.run.end_stage != PipelineStage.INTENT:
|
||||
# text-to-speech
|
||||
if current_stage == PipelineStage.TTS:
|
||||
assert tts_input is not None
|
||||
await self.run.text_to_speech(tts_input)
|
||||
if all_targets_in_satellite_area:
|
||||
# Use acknowledge media instead of full response
|
||||
await self.run.text_to_speech(
|
||||
tts_input or "", override_media_path=ACKNOWLEDGE_PATH
|
||||
)
|
||||
else:
|
||||
assert tts_input is not None
|
||||
await self.run.text_to_speech(tts_input)
|
||||
|
||||
except PipelineError as err:
|
||||
self.run.process_event(
|
||||
|
@@ -3,6 +3,7 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Iterable
|
||||
from dataclasses import replace
|
||||
|
||||
from homeassistant.components.select import SelectEntity, SelectEntityDescription
|
||||
from homeassistant.const import EntityCategory, Platform
|
||||
@@ -64,15 +65,36 @@ class AssistPipelineSelect(SelectEntity, restore_state.RestoreEntity):
|
||||
translation_key="pipeline",
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
)
|
||||
|
||||
_attr_should_poll = False
|
||||
_attr_current_option = OPTION_PREFERRED
|
||||
_attr_options = [OPTION_PREFERRED]
|
||||
|
||||
def __init__(self, hass: HomeAssistant, domain: str, unique_id_prefix: str) -> None:
|
||||
def __init__(
|
||||
self,
|
||||
hass: HomeAssistant,
|
||||
domain: str,
|
||||
unique_id_prefix: str,
|
||||
index: int = 0,
|
||||
) -> None:
|
||||
"""Initialize a pipeline selector."""
|
||||
if index < 1:
|
||||
# Keep compatibility
|
||||
key_suffix = ""
|
||||
placeholder = ""
|
||||
else:
|
||||
key_suffix = f"_{index + 1}"
|
||||
placeholder = f" {index + 1}"
|
||||
|
||||
self.entity_description = replace(
|
||||
self.entity_description,
|
||||
key=f"pipeline{key_suffix}",
|
||||
translation_placeholders={"index": placeholder},
|
||||
)
|
||||
|
||||
self._domain = domain
|
||||
self._unique_id_prefix = unique_id_prefix
|
||||
self._attr_unique_id = f"{unique_id_prefix}-pipeline"
|
||||
self._attr_unique_id = f"{unique_id_prefix}-{self.entity_description.key}"
|
||||
self.hass = hass
|
||||
self._update_options()
|
||||
|
||||
|
@@ -7,7 +7,7 @@
|
||||
},
|
||||
"select": {
|
||||
"pipeline": {
|
||||
"name": "Assistant",
|
||||
"name": "Assistant{index}",
|
||||
"state": {
|
||||
"preferred": "Preferred"
|
||||
}
|
||||
|
@@ -522,6 +522,7 @@ class AssistSatelliteEntity(entity.Entity):
|
||||
pipeline_id=self._resolve_pipeline(),
|
||||
conversation_id=session.conversation_id,
|
||||
device_id=device_id,
|
||||
satellite_id=self.entity_id,
|
||||
tts_audio_output=self.tts_options,
|
||||
wake_word_phrase=wake_word_phrase,
|
||||
audio_settings=AudioSettings(
|
||||
|
@@ -26,6 +26,7 @@ EXCLUDE_FROM_BACKUP = [
|
||||
"tmp_backups/*.tar",
|
||||
"OZW_Log.txt",
|
||||
"tts/*",
|
||||
"ai_task/*",
|
||||
]
|
||||
|
||||
EXCLUDE_DATABASE_FROM_BACKUP = [
|
||||
|
@@ -497,16 +497,18 @@ class BayesianBinarySensor(BinarySensorEntity):
|
||||
_LOGGER.debug(
|
||||
(
|
||||
"Observation for entity '%s' returned None, it will not be used"
|
||||
" for Bayesian updating"
|
||||
" for updating Bayesian sensor '%s'"
|
||||
),
|
||||
observation.entity_id,
|
||||
self.entity_id,
|
||||
)
|
||||
continue
|
||||
_LOGGER.debug(
|
||||
(
|
||||
"Observation for template entity returned None rather than a valid"
|
||||
" boolean, it will not be used for Bayesian updating"
|
||||
" boolean, it will not be used for updating Bayesian sensor '%s'"
|
||||
),
|
||||
self.entity_id,
|
||||
)
|
||||
# the prior has been updated and is now the posterior
|
||||
return prior
|
||||
|
@@ -57,6 +57,7 @@ from .api import (
|
||||
_get_manager,
|
||||
async_address_present,
|
||||
async_ble_device_from_address,
|
||||
async_current_scanners,
|
||||
async_discovered_service_info,
|
||||
async_get_advertisement_callback,
|
||||
async_get_fallback_availability_interval,
|
||||
@@ -114,6 +115,7 @@ __all__ = [
|
||||
"HomeAssistantRemoteScanner",
|
||||
"async_address_present",
|
||||
"async_ble_device_from_address",
|
||||
"async_current_scanners",
|
||||
"async_discovered_service_info",
|
||||
"async_get_advertisement_callback",
|
||||
"async_get_fallback_availability_interval",
|
||||
|
@@ -66,6 +66,22 @@ def async_scanner_count(hass: HomeAssistant, connectable: bool = True) -> int:
|
||||
return _get_manager(hass).async_scanner_count(connectable)
|
||||
|
||||
|
||||
@hass_callback
|
||||
def async_current_scanners(hass: HomeAssistant) -> list[BaseHaScanner]:
|
||||
"""Return the list of currently active scanners.
|
||||
|
||||
This method returns a list of all active Bluetooth scanners registered
|
||||
with Home Assistant, including both connectable and non-connectable scanners.
|
||||
|
||||
Args:
|
||||
hass: Home Assistant instance
|
||||
|
||||
Returns:
|
||||
List of all active scanner instances
|
||||
"""
|
||||
return _get_manager(hass).async_current_scanners()
|
||||
|
||||
|
||||
@hass_callback
|
||||
def async_discovered_service_info(
|
||||
hass: HomeAssistant, connectable: bool = True
|
||||
|
@@ -8,8 +8,19 @@ import itertools
|
||||
import logging
|
||||
|
||||
from bleak_retry_connector import BleakSlotManager
|
||||
from bluetooth_adapters import BluetoothAdapters
|
||||
from habluetooth import BaseHaRemoteScanner, BaseHaScanner, BluetoothManager
|
||||
from bluetooth_adapters import (
|
||||
ADAPTER_TYPE,
|
||||
BluetoothAdapters,
|
||||
adapter_human_name,
|
||||
adapter_model,
|
||||
)
|
||||
from habluetooth import (
|
||||
BaseHaRemoteScanner,
|
||||
BaseHaScanner,
|
||||
BluetoothManager,
|
||||
BluetoothScanningMode,
|
||||
HaScanner,
|
||||
)
|
||||
|
||||
from homeassistant import config_entries
|
||||
from homeassistant.const import EVENT_HOMEASSISTANT_STOP, EVENT_LOGGING_CHANGED
|
||||
@@ -19,8 +30,9 @@ from homeassistant.core import (
|
||||
HomeAssistant,
|
||||
callback as hass_callback,
|
||||
)
|
||||
from homeassistant.helpers import discovery_flow
|
||||
from homeassistant.helpers import discovery_flow, issue_registry as ir
|
||||
from homeassistant.helpers.dispatcher import async_dispatcher_connect
|
||||
from homeassistant.util.package import is_docker_env
|
||||
|
||||
from .const import (
|
||||
CONF_SOURCE,
|
||||
@@ -314,3 +326,97 @@ class HomeAssistantBluetoothManager(BluetoothManager):
|
||||
address = discovery_key.key
|
||||
_LOGGER.debug("Rediscover address %s", address)
|
||||
self.async_rediscover_address(address)
|
||||
|
||||
def on_scanner_start(self, scanner: BaseHaScanner) -> None:
|
||||
"""Handle when a scanner starts.
|
||||
|
||||
Create or delete repair issues for local adapters based on degraded mode.
|
||||
"""
|
||||
super().on_scanner_start(scanner)
|
||||
|
||||
# Only handle repair issues for local adapters (HaScanner instances)
|
||||
if not isinstance(scanner, HaScanner):
|
||||
return
|
||||
self.async_check_degraded_mode(scanner)
|
||||
self.async_check_scanning_mode(scanner)
|
||||
|
||||
@hass_callback
|
||||
def async_check_scanning_mode(self, scanner: HaScanner) -> None:
|
||||
"""Check if the scanner is running in passive mode when active mode is requested."""
|
||||
passive_mode_issue_id = f"bluetooth_adapter_passive_mode_{scanner.source}"
|
||||
|
||||
# Check if scanner is NOT in passive mode when active mode was requested
|
||||
if not (
|
||||
scanner.requested_mode is BluetoothScanningMode.ACTIVE
|
||||
and scanner.current_mode is BluetoothScanningMode.PASSIVE
|
||||
):
|
||||
# Delete passive mode issue if it exists and we're not in passive fallback
|
||||
ir.async_delete_issue(self.hass, DOMAIN, passive_mode_issue_id)
|
||||
return
|
||||
|
||||
# Create repair issue for passive mode fallback
|
||||
adapter_name = adapter_human_name(
|
||||
scanner.adapter, scanner.mac_address or "00:00:00:00:00:00"
|
||||
)
|
||||
adapter_details = self._bluetooth_adapters.adapters.get(scanner.adapter)
|
||||
model = adapter_model(adapter_details) if adapter_details else None
|
||||
|
||||
# Determine adapter type for specific instructions
|
||||
# Default to USB for any other type or unknown
|
||||
if adapter_details and adapter_details.get(ADAPTER_TYPE) == "uart":
|
||||
translation_key = "bluetooth_adapter_passive_mode_uart"
|
||||
else:
|
||||
translation_key = "bluetooth_adapter_passive_mode_usb"
|
||||
|
||||
ir.async_create_issue(
|
||||
self.hass,
|
||||
DOMAIN,
|
||||
passive_mode_issue_id,
|
||||
is_fixable=False, # Requires a reboot or unplug
|
||||
severity=ir.IssueSeverity.WARNING,
|
||||
translation_key=translation_key,
|
||||
translation_placeholders={
|
||||
"adapter": adapter_name,
|
||||
"model": model or "Unknown",
|
||||
},
|
||||
)
|
||||
|
||||
@hass_callback
|
||||
def async_check_degraded_mode(self, scanner: HaScanner) -> None:
|
||||
"""Check if we are in degraded mode and create/delete repair issues."""
|
||||
issue_id = f"bluetooth_adapter_missing_permissions_{scanner.source}"
|
||||
|
||||
# Delete any existing issue if not in degraded mode
|
||||
if not self.is_operating_degraded():
|
||||
ir.async_delete_issue(self.hass, DOMAIN, issue_id)
|
||||
return
|
||||
|
||||
# Only create repair issues for Docker-based installations where users
|
||||
# can fix permissions. This includes: Home Assistant Supervised,
|
||||
# Home Assistant Container, and third-party containers
|
||||
if not is_docker_env():
|
||||
return
|
||||
|
||||
# Create repair issue for degraded mode in Docker (including Supervised)
|
||||
adapter_name = adapter_human_name(
|
||||
scanner.adapter, scanner.mac_address or "00:00:00:00:00:00"
|
||||
)
|
||||
|
||||
# Try to get adapter details from the bluetooth adapters
|
||||
adapter_details = self._bluetooth_adapters.adapters.get(scanner.adapter)
|
||||
model = adapter_model(adapter_details) if adapter_details else None
|
||||
|
||||
ir.async_create_issue(
|
||||
self.hass,
|
||||
DOMAIN,
|
||||
issue_id,
|
||||
is_fixable=False, # Not fixable from within HA - requires
|
||||
# container restart with new permissions
|
||||
severity=ir.IssueSeverity.WARNING,
|
||||
translation_key="bluetooth_adapter_missing_permissions",
|
||||
translation_placeholders={
|
||||
"adapter": adapter_name,
|
||||
"model": model or "Unknown",
|
||||
"docs_url": "https://www.home-assistant.io/integrations/bluetooth/#additional-details-for-container",
|
||||
},
|
||||
)
|
||||
|
@@ -18,9 +18,9 @@
|
||||
"bleak==1.0.1",
|
||||
"bleak-retry-connector==4.4.3",
|
||||
"bluetooth-adapters==2.1.0",
|
||||
"bluetooth-auto-recovery==1.5.2",
|
||||
"bluetooth-auto-recovery==1.5.3",
|
||||
"bluetooth-data-tools==1.28.2",
|
||||
"dbus-fast==2.44.3",
|
||||
"habluetooth==5.6.0"
|
||||
"habluetooth==5.6.4"
|
||||
]
|
||||
}
|
||||
|
@@ -38,5 +38,19 @@
|
||||
"remote_adapters_not_supported": "Bluetooth configuration for remote adapters is not supported.",
|
||||
"local_adapters_no_passive_support": "Local Bluetooth adapters that do not support passive scanning cannot be configured."
|
||||
}
|
||||
},
|
||||
"issues": {
|
||||
"bluetooth_adapter_missing_permissions": {
|
||||
"title": "Bluetooth adapter requires additional permissions",
|
||||
"description": "The Bluetooth adapter **{adapter}** ({model}) is operating in degraded mode because your container needs additional permissions to fully access Bluetooth hardware.\n\nPlease follow the instructions in our documentation to add the required permissions:\n[Bluetooth permissions for Docker]({docs_url})\n\nAfter adding these permissions, restart your Home Assistant container for the changes to take effect."
|
||||
},
|
||||
"bluetooth_adapter_passive_mode_usb": {
|
||||
"title": "Bluetooth USB adapter requires manual power cycle",
|
||||
"description": "The Bluetooth adapter **{adapter}** ({model}) is stuck in passive scanning mode despite requesting active scanning mode. **Automatic recovery was attempted but failed.** This is likely a kernel, firmware, or operating system issue, and the adapter requires a manual power cycle to recover.\n\nIn passive mode, the adapter can only receive advertisements but cannot request additional data from devices, which will affect device discovery and functionality.\n\n**Manual intervention required:**\n1. **Unplug the USB adapter**\n2. Wait 5 seconds\n3. **Plug it back in**\n4. Wait for Home Assistant to detect the adapter\n\nIf the issue persists after power cycling:\n- Try a different USB port\n- Check for kernel/firmware updates\n- Consider using a different Bluetooth adapter"
|
||||
},
|
||||
"bluetooth_adapter_passive_mode_uart": {
|
||||
"title": "Bluetooth adapter requires system power cycle",
|
||||
"description": "The Bluetooth adapter **{adapter}** ({model}) is stuck in passive scanning mode despite requesting active scanning mode. **Automatic recovery was attempted but failed.** This is likely a kernel, firmware, or operating system issue, and the system requires a complete power cycle to recover the adapter.\n\nIn passive mode, the adapter can only receive advertisements but cannot request additional data from devices, which will affect device discovery and functionality.\n\n**Manual intervention required:**\n1. **Shut down the system completely** (not just a reboot)\n2. **Remove power** (unplug or turn off at the switch)\n3. Wait 10 seconds\n4. Restore power and boot the system\n\nIf the issue persists after power cycling:\n- Check for kernel/firmware updates\n- The onboard Bluetooth adapter may have hardware issues"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@@ -18,8 +18,10 @@ async def async_get_config_entry_diagnostics(
|
||||
coordinator = config_entry.runtime_data
|
||||
|
||||
device_info = await coordinator.client.get_system_info()
|
||||
command_list = await coordinator.client.get_command_list()
|
||||
|
||||
return {
|
||||
"remote_command_list": command_list,
|
||||
"config_entry": async_redact_data(config_entry.as_dict(), TO_REDACT),
|
||||
"device_info": async_redact_data(device_info, TO_REDACT),
|
||||
}
|
||||
|
@@ -2,28 +2,40 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
|
||||
from brother import Brother, SnmpError
|
||||
|
||||
from homeassistant.components.snmp import async_get_snmp_engine
|
||||
from homeassistant.const import CONF_HOST, CONF_TYPE, Platform
|
||||
from homeassistant.const import CONF_HOST, CONF_PORT, CONF_TYPE, Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryNotReady
|
||||
|
||||
from .const import DOMAIN
|
||||
from .const import (
|
||||
CONF_COMMUNITY,
|
||||
DEFAULT_COMMUNITY,
|
||||
DEFAULT_PORT,
|
||||
DOMAIN,
|
||||
SECTION_ADVANCED_SETTINGS,
|
||||
)
|
||||
from .coordinator import BrotherConfigEntry, BrotherDataUpdateCoordinator
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
PLATFORMS = [Platform.SENSOR]
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: BrotherConfigEntry) -> bool:
|
||||
"""Set up Brother from a config entry."""
|
||||
host = entry.data[CONF_HOST]
|
||||
port = entry.data[SECTION_ADVANCED_SETTINGS][CONF_PORT]
|
||||
community = entry.data[SECTION_ADVANCED_SETTINGS][CONF_COMMUNITY]
|
||||
printer_type = entry.data[CONF_TYPE]
|
||||
|
||||
snmp_engine = await async_get_snmp_engine(hass)
|
||||
try:
|
||||
brother = await Brother.create(
|
||||
host, printer_type=printer_type, snmp_engine=snmp_engine
|
||||
host, port, community, printer_type=printer_type, snmp_engine=snmp_engine
|
||||
)
|
||||
except (ConnectionError, SnmpError, TimeoutError) as error:
|
||||
raise ConfigEntryNotReady(
|
||||
@@ -48,3 +60,22 @@ async def async_setup_entry(hass: HomeAssistant, entry: BrotherConfigEntry) -> b
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: BrotherConfigEntry) -> bool:
|
||||
"""Unload a config entry."""
|
||||
return await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
|
||||
|
||||
|
||||
async def async_migrate_entry(hass: HomeAssistant, entry: BrotherConfigEntry) -> bool:
|
||||
"""Migrate an old entry."""
|
||||
if entry.version == 1 and entry.minor_version < 2:
|
||||
new_data = entry.data.copy()
|
||||
new_data[SECTION_ADVANCED_SETTINGS] = {
|
||||
CONF_PORT: DEFAULT_PORT,
|
||||
CONF_COMMUNITY: DEFAULT_COMMUNITY,
|
||||
}
|
||||
hass.config_entries.async_update_entry(entry, data=new_data, minor_version=2)
|
||||
|
||||
_LOGGER.info(
|
||||
"Migration to configuration version %s.%s successful",
|
||||
entry.version,
|
||||
entry.minor_version,
|
||||
)
|
||||
|
||||
return True
|
||||
|
@@ -9,21 +9,65 @@ import voluptuous as vol
|
||||
|
||||
from homeassistant.components.snmp import async_get_snmp_engine
|
||||
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
|
||||
from homeassistant.const import CONF_HOST, CONF_TYPE
|
||||
from homeassistant.const import CONF_HOST, CONF_PORT, CONF_TYPE
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.data_entry_flow import section
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers.service_info.zeroconf import ZeroconfServiceInfo
|
||||
from homeassistant.util.network import is_host_valid
|
||||
|
||||
from .const import DOMAIN, PRINTER_TYPES
|
||||
from .const import (
|
||||
CONF_COMMUNITY,
|
||||
DEFAULT_COMMUNITY,
|
||||
DEFAULT_PORT,
|
||||
DOMAIN,
|
||||
PRINTER_TYPES,
|
||||
SECTION_ADVANCED_SETTINGS,
|
||||
)
|
||||
|
||||
DATA_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_HOST): str,
|
||||
vol.Optional(CONF_TYPE, default="laser"): vol.In(PRINTER_TYPES),
|
||||
vol.Required(SECTION_ADVANCED_SETTINGS): section(
|
||||
vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_PORT, default=DEFAULT_PORT): int,
|
||||
vol.Required(CONF_COMMUNITY, default=DEFAULT_COMMUNITY): str,
|
||||
},
|
||||
),
|
||||
{"collapsed": True},
|
||||
),
|
||||
}
|
||||
)
|
||||
ZEROCONF_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Optional(CONF_TYPE, default="laser"): vol.In(PRINTER_TYPES),
|
||||
vol.Required(SECTION_ADVANCED_SETTINGS): section(
|
||||
vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_PORT, default=DEFAULT_PORT): int,
|
||||
vol.Required(CONF_COMMUNITY, default=DEFAULT_COMMUNITY): str,
|
||||
},
|
||||
),
|
||||
{"collapsed": True},
|
||||
),
|
||||
}
|
||||
)
|
||||
RECONFIGURE_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_HOST): str,
|
||||
vol.Required(SECTION_ADVANCED_SETTINGS): section(
|
||||
vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_PORT, default=DEFAULT_PORT): int,
|
||||
vol.Required(CONF_COMMUNITY, default=DEFAULT_COMMUNITY): str,
|
||||
},
|
||||
),
|
||||
{"collapsed": True},
|
||||
),
|
||||
}
|
||||
)
|
||||
RECONFIGURE_SCHEMA = vol.Schema({vol.Required(CONF_HOST): str})
|
||||
|
||||
|
||||
async def validate_input(
|
||||
@@ -35,7 +79,12 @@ async def validate_input(
|
||||
|
||||
snmp_engine = await async_get_snmp_engine(hass)
|
||||
|
||||
brother = await Brother.create(user_input[CONF_HOST], snmp_engine=snmp_engine)
|
||||
brother = await Brother.create(
|
||||
user_input[CONF_HOST],
|
||||
user_input[SECTION_ADVANCED_SETTINGS][CONF_PORT],
|
||||
user_input[SECTION_ADVANCED_SETTINGS][CONF_COMMUNITY],
|
||||
snmp_engine=snmp_engine,
|
||||
)
|
||||
await brother.async_update()
|
||||
|
||||
if expected_mac is not None and brother.serial.lower() != expected_mac:
|
||||
@@ -48,6 +97,7 @@ class BrotherConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
"""Handle a config flow for Brother Printer."""
|
||||
|
||||
VERSION = 1
|
||||
MINOR_VERSION = 2
|
||||
|
||||
def __init__(self) -> None:
|
||||
"""Initialize."""
|
||||
@@ -126,13 +176,11 @@ class BrotherConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
title = f"{self.brother.model} {self.brother.serial}"
|
||||
return self.async_create_entry(
|
||||
title=title,
|
||||
data={CONF_HOST: self.host, CONF_TYPE: user_input[CONF_TYPE]},
|
||||
data={CONF_HOST: self.host, **user_input},
|
||||
)
|
||||
return self.async_show_form(
|
||||
step_id="zeroconf_confirm",
|
||||
data_schema=vol.Schema(
|
||||
{vol.Optional(CONF_TYPE, default="laser"): vol.In(PRINTER_TYPES)}
|
||||
),
|
||||
data_schema=ZEROCONF_SCHEMA,
|
||||
description_placeholders={
|
||||
"serial_number": self.brother.serial,
|
||||
"model": self.brother.model,
|
||||
@@ -160,7 +208,7 @@ class BrotherConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
else:
|
||||
return self.async_update_reload_and_abort(
|
||||
entry,
|
||||
data_updates={CONF_HOST: user_input[CONF_HOST]},
|
||||
data_updates=user_input,
|
||||
)
|
||||
|
||||
return self.async_show_form(
|
||||
|
@@ -10,3 +10,10 @@ DOMAIN: Final = "brother"
|
||||
PRINTER_TYPES: Final = ["laser", "ink"]
|
||||
|
||||
UPDATE_INTERVAL = timedelta(seconds=30)
|
||||
|
||||
SECTION_ADVANCED_SETTINGS = "advanced_settings"
|
||||
|
||||
CONF_COMMUNITY = "community"
|
||||
|
||||
DEFAULT_COMMUNITY = "public"
|
||||
DEFAULT_PORT = 161
|
||||
|
@@ -8,7 +8,21 @@
|
||||
"type": "Type of the printer"
|
||||
},
|
||||
"data_description": {
|
||||
"host": "The hostname or IP address of the Brother printer to control."
|
||||
"host": "The hostname or IP address of the Brother printer to control.",
|
||||
"type": "Brother printer type: ink or laser."
|
||||
},
|
||||
"sections": {
|
||||
"advanced_settings": {
|
||||
"name": "Advanced settings",
|
||||
"data": {
|
||||
"port": "[%key:common::config_flow::data::port%]",
|
||||
"community": "SNMP Community"
|
||||
},
|
||||
"data_description": {
|
||||
"port": "The SNMP port of the Brother printer.",
|
||||
"community": "A simple password for devices to communicate to each other."
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"zeroconf_confirm": {
|
||||
@@ -16,6 +30,22 @@
|
||||
"title": "Discovered Brother Printer",
|
||||
"data": {
|
||||
"type": "[%key:component::brother::config::step::user::data::type%]"
|
||||
},
|
||||
"data_description": {
|
||||
"type": "[%key:component::brother::config::step::user::data_description::type%]"
|
||||
},
|
||||
"sections": {
|
||||
"advanced_settings": {
|
||||
"name": "Advanced settings",
|
||||
"data": {
|
||||
"port": "[%key:common::config_flow::data::port%]",
|
||||
"community": "SNMP Community"
|
||||
},
|
||||
"data_description": {
|
||||
"port": "The SNMP port of the Brother printer.",
|
||||
"community": "A simple password for devices to communicate to each other."
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"reconfigure": {
|
||||
@@ -25,6 +55,19 @@
|
||||
},
|
||||
"data_description": {
|
||||
"host": "[%key:component::brother::config::step::user::data_description::host%]"
|
||||
},
|
||||
"sections": {
|
||||
"advanced_settings": {
|
||||
"name": "Advanced settings",
|
||||
"data": {
|
||||
"port": "[%key:common::config_flow::data::port%]",
|
||||
"community": "SNMP Community"
|
||||
},
|
||||
"data_description": {
|
||||
"port": "The SNMP port of the Brother printer.",
|
||||
"community": "A simple password for devices to communicate to each other."
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
|
@@ -3,6 +3,10 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
from contextlib import asynccontextmanager
|
||||
import mimetypes
|
||||
from pathlib import Path
|
||||
import tempfile
|
||||
|
||||
from homeassistant.components.media_player import BrowseError, MediaClass
|
||||
from homeassistant.components.media_source import (
|
||||
@@ -17,7 +21,7 @@ from homeassistant.const import ATTR_FRIENDLY_NAME
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
|
||||
from . import Camera, _async_stream_endpoint_url
|
||||
from . import Camera, Image, _async_stream_endpoint_url, async_get_image
|
||||
from .const import DATA_COMPONENT, DOMAIN, StreamType
|
||||
|
||||
|
||||
@@ -84,6 +88,30 @@ class CameraMediaSource(MediaSource):
|
||||
|
||||
return PlayMedia(url, FORMAT_CONTENT_TYPE[HLS_PROVIDER])
|
||||
|
||||
@asynccontextmanager
|
||||
async def async_resolve_with_path(self, item: MediaSourceItem) -> PlayMedia:
|
||||
"""Resolve to playable item with path."""
|
||||
media = await self.async_resolve_media(item)
|
||||
entity_id = item.identifier
|
||||
image = await async_get_image(self.hass, entity_id)
|
||||
media.path = await self.hass.async_add_executor_job(
|
||||
self._save_camera_snapshot, image
|
||||
)
|
||||
|
||||
yield media
|
||||
|
||||
await self.hass.async_add_executor_job(media.path.unlink)
|
||||
|
||||
def _save_camera_snapshot(self, image: Image) -> Path:
|
||||
"""Save camera snapshot to temp file."""
|
||||
with tempfile.NamedTemporaryFile(
|
||||
mode="wb",
|
||||
suffix=mimetypes.guess_extension(image.content_type, False),
|
||||
delete=False,
|
||||
) as temp_file:
|
||||
temp_file.write(image.content)
|
||||
return Path(temp_file.name)
|
||||
|
||||
async def async_browse_media(
|
||||
self,
|
||||
item: MediaSourceItem,
|
||||
|
@@ -13,6 +13,6 @@
|
||||
"integration_type": "system",
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["acme", "hass_nabucasa", "snitun"],
|
||||
"requirements": ["hass-nabucasa==1.1.0"],
|
||||
"requirements": ["hass-nabucasa==1.1.1"],
|
||||
"single_config_entry": true
|
||||
}
|
||||
|
@@ -71,6 +71,7 @@ async def async_converse(
|
||||
language: str | None = None,
|
||||
agent_id: str | None = None,
|
||||
device_id: str | None = None,
|
||||
satellite_id: str | None = None,
|
||||
extra_system_prompt: str | None = None,
|
||||
) -> ConversationResult:
|
||||
"""Process text and get intent."""
|
||||
@@ -97,6 +98,7 @@ async def async_converse(
|
||||
context=context,
|
||||
conversation_id=conversation_id,
|
||||
device_id=device_id,
|
||||
satellite_id=satellite_id,
|
||||
language=language,
|
||||
agent_id=agent_id,
|
||||
extra_system_prompt=extra_system_prompt,
|
||||
|
@@ -507,14 +507,18 @@ class ChatLog:
|
||||
async def async_provide_llm_data(
|
||||
self,
|
||||
llm_context: llm.LLMContext,
|
||||
user_llm_hass_api: str | list[str] | None = None,
|
||||
user_llm_hass_api: str | list[str] | llm.API | None = None,
|
||||
user_llm_prompt: str | None = None,
|
||||
user_extra_system_prompt: str | None = None,
|
||||
) -> None:
|
||||
"""Set the LLM system prompt."""
|
||||
llm_api: llm.APIInstance | None = None
|
||||
|
||||
if user_llm_hass_api:
|
||||
if user_llm_hass_api is None:
|
||||
pass
|
||||
elif isinstance(user_llm_hass_api, llm.API):
|
||||
llm_api = await user_llm_hass_api.async_get_api_instance(llm_context)
|
||||
else:
|
||||
try:
|
||||
llm_api = await llm.async_get_api(
|
||||
self.hass,
|
||||
|
@@ -470,6 +470,7 @@ class DefaultAgent(ConversationEntity):
|
||||
language,
|
||||
assistant=DOMAIN,
|
||||
device_id=user_input.device_id,
|
||||
satellite_id=user_input.satellite_id,
|
||||
conversation_agent_id=user_input.agent_id,
|
||||
)
|
||||
except intent.MatchFailedError as match_error:
|
||||
|
@@ -201,6 +201,7 @@ async def websocket_hass_agent_debug(
|
||||
context=connection.context(msg),
|
||||
conversation_id=None,
|
||||
device_id=msg.get("device_id"),
|
||||
satellite_id=None,
|
||||
language=msg.get("language", hass.config.language),
|
||||
agent_id=agent.entity_id,
|
||||
)
|
||||
|
@@ -37,6 +37,9 @@ class ConversationInput:
|
||||
device_id: str | None
|
||||
"""Unique identifier for the device."""
|
||||
|
||||
satellite_id: str | None
|
||||
"""Unique identifier for the satellite."""
|
||||
|
||||
language: str
|
||||
"""Language of the request."""
|
||||
|
||||
@@ -53,6 +56,7 @@ class ConversationInput:
|
||||
"context": self.context.as_dict(),
|
||||
"conversation_id": self.conversation_id,
|
||||
"device_id": self.device_id,
|
||||
"satellite_id": self.satellite_id,
|
||||
"language": self.language,
|
||||
"agent_id": self.agent_id,
|
||||
"extra_system_prompt": self.extra_system_prompt,
|
||||
|
@@ -100,6 +100,7 @@ async def async_attach_trigger(
|
||||
entity_name: entity["value"] for entity_name, entity in details.items()
|
||||
},
|
||||
"device_id": user_input.device_id,
|
||||
"satellite_id": user_input.satellite_id,
|
||||
"user_input": user_input.as_dict(),
|
||||
}
|
||||
|
||||
|
@@ -99,7 +99,7 @@ T = TypeVar(
|
||||
|
||||
|
||||
@dataclass(frozen=True, kw_only=True)
|
||||
class DeconzSensorDescription(Generic[T], SensorEntityDescription):
|
||||
class DeconzSensorDescription(SensorEntityDescription, Generic[T]):
|
||||
"""Class describing deCONZ binary sensor entities."""
|
||||
|
||||
instance_check: type[T] | None = None
|
||||
|
@@ -6,5 +6,6 @@
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/derivative",
|
||||
"integration_type": "helper",
|
||||
"iot_class": "calculated"
|
||||
"iot_class": "calculated",
|
||||
"quality_scale": "internal"
|
||||
}
|
||||
|
@@ -19,8 +19,10 @@ from homeassistant.config_entries import (
|
||||
)
|
||||
from homeassistant.const import CONF_HOST, CONF_NAME, CONF_PASSWORD, CONF_USERNAME
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.data_entry_flow import AbortFlow
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.device_registry import format_mac
|
||||
from homeassistant.helpers.service_info.zeroconf import ZeroconfServiceInfo
|
||||
from homeassistant.helpers.typing import VolDictType
|
||||
|
||||
@@ -103,6 +105,43 @@ class DoorBirdConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
"""Initialize the DoorBird config flow."""
|
||||
self.discovery_schema: vol.Schema | None = None
|
||||
|
||||
async def _async_verify_existing_device_for_discovery(
|
||||
self,
|
||||
existing_entry: ConfigEntry,
|
||||
host: str,
|
||||
macaddress: str,
|
||||
) -> None:
|
||||
"""Verify discovered device matches existing entry before updating IP.
|
||||
|
||||
This method performs the following verification steps:
|
||||
1. Ensures that the stored credentials work before updating the entry.
|
||||
2. Verifies that the device at the discovered IP address has the expected MAC address.
|
||||
"""
|
||||
info, errors = await self._async_validate_or_error(
|
||||
{
|
||||
**existing_entry.data,
|
||||
CONF_HOST: host,
|
||||
}
|
||||
)
|
||||
|
||||
if errors:
|
||||
_LOGGER.debug(
|
||||
"Cannot validate DoorBird at %s with existing credentials: %s",
|
||||
host,
|
||||
errors,
|
||||
)
|
||||
raise AbortFlow("cannot_connect")
|
||||
|
||||
# Verify the MAC address matches what was advertised
|
||||
if format_mac(info["mac_addr"]) != format_mac(macaddress):
|
||||
_LOGGER.debug(
|
||||
"DoorBird at %s reports MAC %s but zeroconf advertised %s, ignoring",
|
||||
host,
|
||||
info["mac_addr"],
|
||||
macaddress,
|
||||
)
|
||||
raise AbortFlow("wrong_device")
|
||||
|
||||
async def async_step_reauth(
|
||||
self, entry_data: Mapping[str, Any]
|
||||
) -> ConfigFlowResult:
|
||||
@@ -172,7 +211,22 @@ class DoorBirdConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
|
||||
await self.async_set_unique_id(macaddress)
|
||||
host = discovery_info.host
|
||||
self._abort_if_unique_id_configured(updates={CONF_HOST: host})
|
||||
|
||||
# Check if we have an existing entry for this MAC
|
||||
existing_entry = self.hass.config_entries.async_entry_for_domain_unique_id(
|
||||
DOMAIN, macaddress
|
||||
)
|
||||
|
||||
if existing_entry:
|
||||
# Check if the host is actually changing
|
||||
if existing_entry.data.get(CONF_HOST) != host:
|
||||
await self._async_verify_existing_device_for_discovery(
|
||||
existing_entry, host, macaddress
|
||||
)
|
||||
|
||||
# All checks passed or no change needed, abort
|
||||
# if already configured with potential IP update
|
||||
self._abort_if_unique_id_configured(updates={CONF_HOST: host})
|
||||
|
||||
self._async_abort_entries_match({CONF_HOST: host})
|
||||
|
||||
|
@@ -49,6 +49,8 @@
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]",
|
||||
"link_local_address": "Link local addresses are not supported",
|
||||
"not_doorbird_device": "This device is not a DoorBird",
|
||||
"not_ipv4_address": "Only IPv4 addresses are supported",
|
||||
"wrong_device": "Device MAC address does not match",
|
||||
"reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]"
|
||||
},
|
||||
"flow_title": "{name} ({host})",
|
||||
|
37
homeassistant/components/droplet/__init__.py
Normal file
37
homeassistant/components/droplet/__init__.py
Normal file
@@ -0,0 +1,37 @@
|
||||
"""The Droplet integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
|
||||
from homeassistant.const import Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
|
||||
from .coordinator import DropletConfigEntry, DropletDataCoordinator
|
||||
|
||||
PLATFORMS: list[Platform] = [
|
||||
Platform.SENSOR,
|
||||
]
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant, config_entry: DropletConfigEntry
|
||||
) -> bool:
|
||||
"""Set up Droplet from a config entry."""
|
||||
|
||||
droplet_coordinator = DropletDataCoordinator(hass, config_entry)
|
||||
await droplet_coordinator.async_config_entry_first_refresh()
|
||||
config_entry.runtime_data = droplet_coordinator
|
||||
await hass.config_entries.async_forward_entry_setups(config_entry, PLATFORMS)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
async def async_unload_entry(
|
||||
hass: HomeAssistant, config_entry: DropletConfigEntry
|
||||
) -> bool:
|
||||
"""Unload a config entry."""
|
||||
|
||||
return await hass.config_entries.async_unload_platforms(config_entry, PLATFORMS)
|
118
homeassistant/components/droplet/config_flow.py
Normal file
118
homeassistant/components/droplet/config_flow.py
Normal file
@@ -0,0 +1,118 @@
|
||||
"""Config flow for Droplet integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Any
|
||||
|
||||
from pydroplet.droplet import DropletConnection, DropletDiscovery
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
|
||||
from homeassistant.const import CONF_CODE, CONF_DEVICE_ID, CONF_IP_ADDRESS, CONF_PORT
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.service_info.zeroconf import ZeroconfServiceInfo
|
||||
|
||||
from .const import DOMAIN
|
||||
|
||||
|
||||
class DropletConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
"""Handle Droplet config flow."""
|
||||
|
||||
_droplet_discovery: DropletDiscovery
|
||||
|
||||
async def async_step_zeroconf(
|
||||
self, discovery_info: ZeroconfServiceInfo
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle zeroconf discovery."""
|
||||
self._droplet_discovery = DropletDiscovery(
|
||||
discovery_info.host,
|
||||
discovery_info.port,
|
||||
discovery_info.name,
|
||||
)
|
||||
if not self._droplet_discovery.is_valid():
|
||||
return self.async_abort(reason="invalid_discovery_info")
|
||||
|
||||
# In this case, device ID was part of the zeroconf discovery info
|
||||
device_id: str = await self._droplet_discovery.get_device_id()
|
||||
await self.async_set_unique_id(device_id)
|
||||
|
||||
self._abort_if_unique_id_configured(
|
||||
updates={CONF_IP_ADDRESS: self._droplet_discovery.host},
|
||||
)
|
||||
|
||||
self.context.update({"title_placeholders": {"name": device_id}})
|
||||
return await self.async_step_confirm()
|
||||
|
||||
async def async_step_confirm(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Confirm the setup."""
|
||||
errors: dict[str, str] = {}
|
||||
device_id: str = await self._droplet_discovery.get_device_id()
|
||||
if user_input is not None:
|
||||
# Test if we can connect before returning
|
||||
session = async_get_clientsession(self.hass)
|
||||
if await self._droplet_discovery.try_connect(
|
||||
session, user_input[CONF_CODE]
|
||||
):
|
||||
device_data = {
|
||||
CONF_IP_ADDRESS: self._droplet_discovery.host,
|
||||
CONF_PORT: self._droplet_discovery.port,
|
||||
CONF_DEVICE_ID: device_id,
|
||||
CONF_CODE: user_input[CONF_CODE],
|
||||
}
|
||||
|
||||
return self.async_create_entry(
|
||||
title=device_id,
|
||||
data=device_data,
|
||||
)
|
||||
errors["base"] = "cannot_connect"
|
||||
return self.async_show_form(
|
||||
step_id="confirm",
|
||||
data_schema=vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_CODE): str,
|
||||
}
|
||||
),
|
||||
description_placeholders={
|
||||
"device_name": device_id,
|
||||
},
|
||||
errors=errors,
|
||||
)
|
||||
|
||||
async def async_step_user(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle a flow initialized by the user."""
|
||||
errors: dict[str, str] = {}
|
||||
if user_input is not None:
|
||||
self._droplet_discovery = DropletDiscovery(
|
||||
user_input[CONF_IP_ADDRESS], DropletConnection.DEFAULT_PORT, ""
|
||||
)
|
||||
session = async_get_clientsession(self.hass)
|
||||
if await self._droplet_discovery.try_connect(
|
||||
session, user_input[CONF_CODE]
|
||||
) and (device_id := await self._droplet_discovery.get_device_id()):
|
||||
device_data = {
|
||||
CONF_IP_ADDRESS: self._droplet_discovery.host,
|
||||
CONF_PORT: self._droplet_discovery.port,
|
||||
CONF_DEVICE_ID: device_id,
|
||||
CONF_CODE: user_input[CONF_CODE],
|
||||
}
|
||||
await self.async_set_unique_id(device_id, raise_on_progress=False)
|
||||
self._abort_if_unique_id_configured(
|
||||
description_placeholders={CONF_DEVICE_ID: device_id},
|
||||
)
|
||||
|
||||
return self.async_create_entry(
|
||||
title=device_id,
|
||||
data=device_data,
|
||||
)
|
||||
errors["base"] = "cannot_connect"
|
||||
return self.async_show_form(
|
||||
step_id="user",
|
||||
data_schema=vol.Schema(
|
||||
{vol.Required(CONF_IP_ADDRESS): str, vol.Required(CONF_CODE): str}
|
||||
),
|
||||
errors=errors,
|
||||
)
|
11
homeassistant/components/droplet/const.py
Normal file
11
homeassistant/components/droplet/const.py
Normal file
@@ -0,0 +1,11 @@
|
||||
"""Constants for the droplet integration."""
|
||||
|
||||
CONNECT_DELAY = 5
|
||||
|
||||
DOMAIN = "droplet"
|
||||
DEVICE_NAME = "Droplet"
|
||||
|
||||
KEY_CURRENT_FLOW_RATE = "current_flow_rate"
|
||||
KEY_VOLUME = "volume"
|
||||
KEY_SIGNAL_QUALITY = "signal_quality"
|
||||
KEY_SERVER_CONNECTIVITY = "server_connectivity"
|
84
homeassistant/components/droplet/coordinator.py
Normal file
84
homeassistant/components/droplet/coordinator.py
Normal file
@@ -0,0 +1,84 @@
|
||||
"""Droplet device data update coordinator object."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
import logging
|
||||
import time
|
||||
|
||||
from pydroplet.droplet import Droplet
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONF_CODE, CONF_IP_ADDRESS, CONF_PORT
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryNotReady
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
||||
|
||||
from .const import CONNECT_DELAY, DOMAIN
|
||||
|
||||
VERSION_TIMEOUT = 5
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
TIMEOUT = 1
|
||||
|
||||
type DropletConfigEntry = ConfigEntry[DropletDataCoordinator]
|
||||
|
||||
|
||||
class DropletDataCoordinator(DataUpdateCoordinator[None]):
|
||||
"""Droplet device object."""
|
||||
|
||||
config_entry: DropletConfigEntry
|
||||
|
||||
def __init__(self, hass: HomeAssistant, entry: DropletConfigEntry) -> None:
|
||||
"""Initialize the device."""
|
||||
super().__init__(
|
||||
hass, _LOGGER, config_entry=entry, name=f"{DOMAIN}-{entry.unique_id}"
|
||||
)
|
||||
self.droplet = Droplet(
|
||||
host=entry.data[CONF_IP_ADDRESS],
|
||||
port=entry.data[CONF_PORT],
|
||||
token=entry.data[CONF_CODE],
|
||||
session=async_get_clientsession(self.hass),
|
||||
logger=_LOGGER,
|
||||
)
|
||||
assert entry.unique_id is not None
|
||||
self.unique_id = entry.unique_id
|
||||
|
||||
async def _async_setup(self) -> None:
|
||||
if not await self.setup():
|
||||
raise ConfigEntryNotReady("Device is offline")
|
||||
|
||||
# Droplet should send its metadata within 5 seconds
|
||||
end = time.time() + VERSION_TIMEOUT
|
||||
while not self.droplet.version_info_available():
|
||||
await asyncio.sleep(TIMEOUT)
|
||||
if time.time() > end:
|
||||
_LOGGER.warning("Failed to get version info from Droplet")
|
||||
return
|
||||
|
||||
async def _async_update_data(self) -> None:
|
||||
if not self.droplet.connected:
|
||||
raise UpdateFailed(
|
||||
translation_domain=DOMAIN, translation_key="connection_error"
|
||||
)
|
||||
|
||||
async def setup(self) -> bool:
|
||||
"""Set up droplet client."""
|
||||
self.config_entry.async_on_unload(self.droplet.stop_listening)
|
||||
self.config_entry.async_create_background_task(
|
||||
self.hass,
|
||||
self.droplet.listen_forever(CONNECT_DELAY, self.async_set_updated_data),
|
||||
"droplet-listen",
|
||||
)
|
||||
end = time.time() + CONNECT_DELAY
|
||||
while time.time() < end:
|
||||
if self.droplet.connected:
|
||||
return True
|
||||
await asyncio.sleep(TIMEOUT)
|
||||
return False
|
||||
|
||||
def get_availability(self) -> bool:
|
||||
"""Retrieve Droplet's availability status."""
|
||||
return self.droplet.get_availability()
|
15
homeassistant/components/droplet/icons.json
Normal file
15
homeassistant/components/droplet/icons.json
Normal file
@@ -0,0 +1,15 @@
|
||||
{
|
||||
"entity": {
|
||||
"sensor": {
|
||||
"current_flow_rate": {
|
||||
"default": "mdi:chart-line"
|
||||
},
|
||||
"server_connectivity": {
|
||||
"default": "mdi:web"
|
||||
},
|
||||
"signal_quality": {
|
||||
"default": "mdi:waveform"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
11
homeassistant/components/droplet/manifest.json
Normal file
11
homeassistant/components/droplet/manifest.json
Normal file
@@ -0,0 +1,11 @@
|
||||
{
|
||||
"domain": "droplet",
|
||||
"name": "Droplet",
|
||||
"codeowners": ["@sarahseidman"],
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/droplet",
|
||||
"iot_class": "local_push",
|
||||
"quality_scale": "bronze",
|
||||
"requirements": ["pydroplet==2.3.2"],
|
||||
"zeroconf": ["_droplet._tcp.local."]
|
||||
}
|
72
homeassistant/components/droplet/quality_scale.yaml
Normal file
72
homeassistant/components/droplet/quality_scale.yaml
Normal file
@@ -0,0 +1,72 @@
|
||||
rules:
|
||||
# Bronze
|
||||
action-setup:
|
||||
status: exempt
|
||||
comment: |
|
||||
No custom actions defined
|
||||
appropriate-polling:
|
||||
status: exempt
|
||||
comment: |
|
||||
No polling
|
||||
brands: done
|
||||
common-modules: done
|
||||
config-flow-test-coverage: done
|
||||
config-flow: done
|
||||
dependency-transparency: done
|
||||
docs-actions:
|
||||
status: exempt
|
||||
comment: |
|
||||
No custom actions are defined.
|
||||
docs-high-level-description: done
|
||||
docs-installation-instructions: done
|
||||
docs-removal-instructions: done
|
||||
entity-event-setup: done
|
||||
entity-unique-id: done
|
||||
has-entity-name: done
|
||||
runtime-data: done
|
||||
test-before-configure: done
|
||||
test-before-setup: done
|
||||
unique-config-entry: done
|
||||
|
||||
# Silver
|
||||
action-exceptions:
|
||||
status: exempt
|
||||
comment: |
|
||||
No custom actions are defined.
|
||||
config-entry-unloading: done
|
||||
docs-configuration-parameters: todo
|
||||
docs-installation-parameters: done
|
||||
entity-unavailable: done
|
||||
integration-owner: done
|
||||
log-when-unavailable: todo
|
||||
parallel-updates: todo
|
||||
reauthentication-flow: todo
|
||||
test-coverage: todo
|
||||
|
||||
# Gold
|
||||
devices: done
|
||||
diagnostics: todo
|
||||
discovery-update-info: done
|
||||
discovery: done
|
||||
docs-data-update: done
|
||||
docs-examples: todo
|
||||
docs-known-limitations: todo
|
||||
docs-supported-devices: todo
|
||||
docs-supported-functions: done
|
||||
docs-troubleshooting: todo
|
||||
docs-use-cases: done
|
||||
dynamic-devices: todo
|
||||
entity-category: done
|
||||
entity-device-class: done
|
||||
entity-disabled-by-default: todo
|
||||
entity-translations: done
|
||||
exception-translations: todo
|
||||
icon-translations: done
|
||||
reconfiguration-flow: todo
|
||||
repair-issues: todo
|
||||
stale-devices: todo
|
||||
|
||||
# Platinum
|
||||
async-dependency: todo
|
||||
inject-websession: done
|
||||
strict-typing: done
|
131
homeassistant/components/droplet/sensor.py
Normal file
131
homeassistant/components/droplet/sensor.py
Normal file
@@ -0,0 +1,131 @@
|
||||
"""Support for Droplet."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Callable
|
||||
from dataclasses import dataclass
|
||||
from datetime import datetime
|
||||
|
||||
from pydroplet.droplet import Droplet
|
||||
|
||||
from homeassistant.components.sensor import (
|
||||
SensorDeviceClass,
|
||||
SensorEntity,
|
||||
SensorEntityDescription,
|
||||
SensorStateClass,
|
||||
)
|
||||
from homeassistant.const import EntityCategory, UnitOfVolume, UnitOfVolumeFlowRate
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.device_registry import DeviceInfo
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
|
||||
from .const import (
|
||||
DOMAIN,
|
||||
KEY_CURRENT_FLOW_RATE,
|
||||
KEY_SERVER_CONNECTIVITY,
|
||||
KEY_SIGNAL_QUALITY,
|
||||
KEY_VOLUME,
|
||||
)
|
||||
from .coordinator import DropletConfigEntry, DropletDataCoordinator
|
||||
|
||||
ML_L_CONVERSION = 1000
|
||||
|
||||
|
||||
@dataclass(kw_only=True, frozen=True)
|
||||
class DropletSensorEntityDescription(SensorEntityDescription):
|
||||
"""Describes Droplet sensor entity."""
|
||||
|
||||
value_fn: Callable[[Droplet], float | str | None]
|
||||
last_reset_fn: Callable[[Droplet], datetime | None] = lambda _: None
|
||||
|
||||
|
||||
SENSORS: list[DropletSensorEntityDescription] = [
|
||||
DropletSensorEntityDescription(
|
||||
key=KEY_CURRENT_FLOW_RATE,
|
||||
translation_key=KEY_CURRENT_FLOW_RATE,
|
||||
device_class=SensorDeviceClass.VOLUME_FLOW_RATE,
|
||||
native_unit_of_measurement=UnitOfVolumeFlowRate.LITERS_PER_MINUTE,
|
||||
suggested_unit_of_measurement=UnitOfVolumeFlowRate.GALLONS_PER_MINUTE,
|
||||
suggested_display_precision=2,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
value_fn=lambda device: device.get_flow_rate(),
|
||||
),
|
||||
DropletSensorEntityDescription(
|
||||
key=KEY_VOLUME,
|
||||
device_class=SensorDeviceClass.WATER,
|
||||
native_unit_of_measurement=UnitOfVolume.LITERS,
|
||||
suggested_unit_of_measurement=UnitOfVolume.GALLONS,
|
||||
suggested_display_precision=2,
|
||||
state_class=SensorStateClass.TOTAL,
|
||||
value_fn=lambda device: device.get_volume_delta() / ML_L_CONVERSION,
|
||||
last_reset_fn=lambda device: device.get_volume_last_fetched(),
|
||||
),
|
||||
DropletSensorEntityDescription(
|
||||
key=KEY_SERVER_CONNECTIVITY,
|
||||
translation_key=KEY_SERVER_CONNECTIVITY,
|
||||
device_class=SensorDeviceClass.ENUM,
|
||||
options=["connected", "connecting", "disconnected"],
|
||||
value_fn=lambda device: device.get_server_status(),
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
),
|
||||
DropletSensorEntityDescription(
|
||||
key=KEY_SIGNAL_QUALITY,
|
||||
translation_key=KEY_SIGNAL_QUALITY,
|
||||
device_class=SensorDeviceClass.ENUM,
|
||||
options=["no_signal", "weak_signal", "strong_signal"],
|
||||
value_fn=lambda device: device.get_signal_quality(),
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
),
|
||||
]
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
config_entry: DropletConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up the Droplet sensors from config entry."""
|
||||
coordinator = config_entry.runtime_data
|
||||
async_add_entities([DropletSensor(coordinator, sensor) for sensor in SENSORS])
|
||||
|
||||
|
||||
class DropletSensor(CoordinatorEntity[DropletDataCoordinator], SensorEntity):
|
||||
"""Representation of a Droplet."""
|
||||
|
||||
entity_description: DropletSensorEntityDescription
|
||||
_attr_has_entity_name = True
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: DropletDataCoordinator,
|
||||
entity_description: DropletSensorEntityDescription,
|
||||
) -> None:
|
||||
"""Initialize the sensor."""
|
||||
super().__init__(coordinator)
|
||||
self.entity_description = entity_description
|
||||
|
||||
unique_id = coordinator.config_entry.unique_id
|
||||
self._attr_unique_id = f"{unique_id}_{entity_description.key}"
|
||||
self._attr_device_info = DeviceInfo(
|
||||
identifiers={(DOMAIN, self.coordinator.unique_id)},
|
||||
manufacturer=self.coordinator.droplet.get_manufacturer(),
|
||||
model=self.coordinator.droplet.get_model(),
|
||||
sw_version=self.coordinator.droplet.get_fw_version(),
|
||||
serial_number=self.coordinator.droplet.get_sn(),
|
||||
)
|
||||
|
||||
@property
|
||||
def available(self) -> bool:
|
||||
"""Get Droplet's availability."""
|
||||
return self.coordinator.get_availability()
|
||||
|
||||
@property
|
||||
def native_value(self) -> float | str | None:
|
||||
"""Return the value reported by the sensor."""
|
||||
return self.entity_description.value_fn(self.coordinator.droplet)
|
||||
|
||||
@property
|
||||
def last_reset(self) -> datetime | None:
|
||||
"""Return the last reset of the sensor, if applicable."""
|
||||
return self.entity_description.last_reset_fn(self.coordinator.droplet)
|
46
homeassistant/components/droplet/strings.json
Normal file
46
homeassistant/components/droplet/strings.json
Normal file
@@ -0,0 +1,46 @@
|
||||
{
|
||||
"config": {
|
||||
"step": {
|
||||
"user": {
|
||||
"title": "Configure Droplet integration",
|
||||
"description": "Manually enter Droplet's connection details.",
|
||||
"data": {
|
||||
"ip_address": "[%key:common::config_flow::data::ip%]",
|
||||
"code": "Pairing code"
|
||||
},
|
||||
"data_description": {
|
||||
"ip_address": "Droplet's IP address",
|
||||
"code": "Code from the Droplet app"
|
||||
}
|
||||
},
|
||||
"confirm": {
|
||||
"title": "Confirm association",
|
||||
"description": "Enter pairing code to connect to {device_name}.",
|
||||
"data": {
|
||||
"code": "[%key:component::droplet::config::step::user::data::code%]"
|
||||
},
|
||||
"data_description": {
|
||||
"code": "[%key:component::droplet::config::step::user::data_description::code%]"
|
||||
}
|
||||
}
|
||||
},
|
||||
"abort": {
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]"
|
||||
},
|
||||
"error": {
|
||||
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]"
|
||||
}
|
||||
},
|
||||
"entity": {
|
||||
"sensor": {
|
||||
"server_connectivity": { "name": "Server status" },
|
||||
"signal_quality": { "name": "Signal quality" },
|
||||
"current_flow_rate": { "name": "Flow rate" }
|
||||
}
|
||||
},
|
||||
"exceptions": {
|
||||
"connection_error": {
|
||||
"message": "Disconnected from Droplet"
|
||||
}
|
||||
}
|
||||
}
|
@@ -230,6 +230,17 @@ ECOWITT_SENSORS_MAPPING: Final = {
|
||||
native_unit_of_measurement=PERCENTAGE,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
EcoWittSensorTypes.PM1: SensorEntityDescription(
|
||||
key="PM1",
|
||||
device_class=SensorDeviceClass.PM1,
|
||||
native_unit_of_measurement=CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
EcoWittSensorTypes.PM4: SensorEntityDescription(
|
||||
key="PM4",
|
||||
native_unit_of_measurement=CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
}
|
||||
|
||||
|
||||
|
@@ -14,7 +14,11 @@ from elkm1_lib.util import pretty_const
|
||||
from elkm1_lib.zones import Zone
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components.sensor import SensorEntity
|
||||
from homeassistant.components.sensor import (
|
||||
SensorDeviceClass,
|
||||
SensorEntity,
|
||||
SensorStateClass,
|
||||
)
|
||||
from homeassistant.const import EntityCategory, UnitOfElectricPotential
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
@@ -32,6 +36,16 @@ SERVICE_SENSOR_ZONE_BYPASS = "sensor_zone_bypass"
|
||||
SERVICE_SENSOR_ZONE_TRIGGER = "sensor_zone_trigger"
|
||||
UNDEFINED_TEMPERATURE = -40
|
||||
|
||||
_DEVICE_CLASS_MAP: dict[ZoneType, SensorDeviceClass] = {
|
||||
ZoneType.TEMPERATURE: SensorDeviceClass.TEMPERATURE,
|
||||
ZoneType.ANALOG_ZONE: SensorDeviceClass.VOLTAGE,
|
||||
}
|
||||
|
||||
_STATE_CLASS_MAP: dict[ZoneType, SensorStateClass] = {
|
||||
ZoneType.TEMPERATURE: SensorStateClass.MEASUREMENT,
|
||||
ZoneType.ANALOG_ZONE: SensorStateClass.MEASUREMENT,
|
||||
}
|
||||
|
||||
ELK_SET_COUNTER_SERVICE_SCHEMA: VolDictType = {
|
||||
vol.Required(ATTR_VALUE): vol.All(vol.Coerce(int), vol.Range(0, 65535))
|
||||
}
|
||||
@@ -248,6 +262,16 @@ class ElkZone(ElkSensor):
|
||||
return self._temperature_unit
|
||||
return None
|
||||
|
||||
@property
|
||||
def device_class(self) -> SensorDeviceClass | None:
|
||||
"""Return the device class of the sensor."""
|
||||
return _DEVICE_CLASS_MAP.get(self._element.definition)
|
||||
|
||||
@property
|
||||
def state_class(self) -> SensorStateClass | None:
|
||||
"""Return the state class of the sensor."""
|
||||
return _STATE_CLASS_MAP.get(self._element.definition)
|
||||
|
||||
@property
|
||||
def native_unit_of_measurement(self) -> str | None:
|
||||
"""Return the unit of measurement."""
|
||||
|
@@ -78,7 +78,7 @@ class SourceAdapter:
|
||||
"""Adapter to allow sources and their flows to be used as sensors."""
|
||||
|
||||
source_type: Literal["grid", "gas", "water"]
|
||||
flow_type: Literal["flow_from", "flow_to", None]
|
||||
flow_type: Literal["flow_from", "flow_to"] | None
|
||||
stat_energy_key: Literal["stat_energy_from", "stat_energy_to"]
|
||||
total_money_key: Literal["stat_cost", "stat_compensation"]
|
||||
name_suffix: str
|
||||
|
@@ -1,7 +1,7 @@
|
||||
{
|
||||
"domain": "enocean",
|
||||
"name": "EnOcean",
|
||||
"codeowners": ["@bdurrer"],
|
||||
"codeowners": [],
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/enocean",
|
||||
"iot_class": "local_push",
|
||||
|
@@ -127,27 +127,39 @@ class EsphomeAssistSatellite(
|
||||
available_wake_words=[], active_wake_words=[], max_active_wake_words=1
|
||||
)
|
||||
|
||||
@property
|
||||
def pipeline_entity_id(self) -> str | None:
|
||||
"""Return the entity ID of the pipeline to use for the next conversation."""
|
||||
assert self._entry_data.device_info is not None
|
||||
self._active_pipeline_index = 0
|
||||
|
||||
def _get_entity_id(self, suffix: str) -> str | None:
|
||||
"""Return the entity id for pipeline select, etc."""
|
||||
if self._entry_data.device_info is None:
|
||||
return None
|
||||
|
||||
ent_reg = er.async_get(self.hass)
|
||||
return ent_reg.async_get_entity_id(
|
||||
Platform.SELECT,
|
||||
DOMAIN,
|
||||
f"{self._entry_data.device_info.mac_address}-pipeline",
|
||||
f"{self._entry_data.device_info.mac_address}-{suffix}",
|
||||
)
|
||||
|
||||
@property
|
||||
def pipeline_entity_id(self) -> str | None:
|
||||
"""Return the entity ID of the primary pipeline to use for the next conversation."""
|
||||
return self.get_pipeline_entity(self._active_pipeline_index)
|
||||
|
||||
def get_pipeline_entity(self, index: int) -> str | None:
|
||||
"""Return the entity ID of a pipeline by index."""
|
||||
id_suffix = "" if index < 1 else f"_{index + 1}"
|
||||
return self._get_entity_id(f"pipeline{id_suffix}")
|
||||
|
||||
def get_wake_word_entity(self, index: int) -> str | None:
|
||||
"""Return the entity ID of a wake word by index."""
|
||||
id_suffix = "" if index < 1 else f"_{index + 1}"
|
||||
return self._get_entity_id(f"wake_word{id_suffix}")
|
||||
|
||||
@property
|
||||
def vad_sensitivity_entity_id(self) -> str | None:
|
||||
"""Return the entity ID of the VAD sensitivity to use for the next conversation."""
|
||||
assert self._entry_data.device_info is not None
|
||||
ent_reg = er.async_get(self.hass)
|
||||
return ent_reg.async_get_entity_id(
|
||||
Platform.SELECT,
|
||||
DOMAIN,
|
||||
f"{self._entry_data.device_info.mac_address}-vad_sensitivity",
|
||||
)
|
||||
return self._get_entity_id("vad_sensitivity")
|
||||
|
||||
@callback
|
||||
def async_get_configuration(
|
||||
@@ -235,6 +247,7 @@ class EsphomeAssistSatellite(
|
||||
)
|
||||
)
|
||||
|
||||
assert self._attr_supported_features is not None
|
||||
if feature_flags & VoiceAssistantFeature.ANNOUNCE:
|
||||
# Device supports announcements
|
||||
self._attr_supported_features |= (
|
||||
@@ -257,8 +270,8 @@ class EsphomeAssistSatellite(
|
||||
|
||||
# Update wake word select when config is updated
|
||||
self.async_on_remove(
|
||||
self._entry_data.async_register_assist_satellite_set_wake_word_callback(
|
||||
self.async_set_wake_word
|
||||
self._entry_data.async_register_assist_satellite_set_wake_words_callback(
|
||||
self.async_set_wake_words
|
||||
)
|
||||
)
|
||||
|
||||
@@ -482,8 +495,31 @@ class EsphomeAssistSatellite(
|
||||
# ANNOUNCEMENT format from media player
|
||||
self._update_tts_format()
|
||||
|
||||
# Run the pipeline
|
||||
_LOGGER.debug("Running pipeline from %s to %s", start_stage, end_stage)
|
||||
# Run the appropriate pipeline.
|
||||
self._active_pipeline_index = 0
|
||||
|
||||
maybe_pipeline_index = 0
|
||||
while True:
|
||||
if not (ww_entity_id := self.get_wake_word_entity(maybe_pipeline_index)):
|
||||
break
|
||||
|
||||
if not (ww_state := self.hass.states.get(ww_entity_id)):
|
||||
continue
|
||||
|
||||
if ww_state.state == wake_word_phrase:
|
||||
# First match
|
||||
self._active_pipeline_index = maybe_pipeline_index
|
||||
break
|
||||
|
||||
# Try next wake word select
|
||||
maybe_pipeline_index += 1
|
||||
|
||||
_LOGGER.debug(
|
||||
"Running pipeline %s from %s to %s",
|
||||
self._active_pipeline_index + 1,
|
||||
start_stage,
|
||||
end_stage,
|
||||
)
|
||||
self._pipeline_task = self.config_entry.async_create_background_task(
|
||||
self.hass,
|
||||
self.async_accept_pipeline_from_satellite(
|
||||
@@ -514,6 +550,7 @@ class EsphomeAssistSatellite(
|
||||
def handle_pipeline_finished(self) -> None:
|
||||
"""Handle when pipeline has finished running."""
|
||||
self._stop_udp_server()
|
||||
self._active_pipeline_index = 0
|
||||
_LOGGER.debug("Pipeline finished")
|
||||
|
||||
def handle_timer_event(
|
||||
@@ -542,15 +579,15 @@ class EsphomeAssistSatellite(
|
||||
self.tts_response_finished()
|
||||
|
||||
@callback
|
||||
def async_set_wake_word(self, wake_word_id: str) -> None:
|
||||
"""Set active wake word and update config on satellite."""
|
||||
self._satellite_config.active_wake_words = [wake_word_id]
|
||||
def async_set_wake_words(self, wake_word_ids: list[str]) -> None:
|
||||
"""Set active wake words and update config on satellite."""
|
||||
self._satellite_config.active_wake_words = wake_word_ids
|
||||
self.config_entry.async_create_background_task(
|
||||
self.hass,
|
||||
self.async_set_configuration(self._satellite_config),
|
||||
"esphome_voice_assistant_set_config",
|
||||
)
|
||||
_LOGGER.debug("Setting active wake word: %s", wake_word_id)
|
||||
_LOGGER.debug("Setting active wake word(s): %s", wake_word_ids)
|
||||
|
||||
def _update_tts_format(self) -> None:
|
||||
"""Update the TTS format from the first media player."""
|
||||
|
@@ -55,7 +55,9 @@ from homeassistant.const import (
|
||||
UnitOfTemperature,
|
||||
)
|
||||
from homeassistant.core import callback
|
||||
from homeassistant.exceptions import ServiceValidationError
|
||||
|
||||
from .const import DOMAIN
|
||||
from .entity import (
|
||||
EsphomeEntity,
|
||||
convert_api_error_ha_error,
|
||||
@@ -161,11 +163,9 @@ class EsphomeClimateEntity(EsphomeEntity[ClimateInfo, ClimateState], ClimateEnti
|
||||
self._attr_max_temp = static_info.visual_max_temperature
|
||||
self._attr_min_humidity = round(static_info.visual_min_humidity)
|
||||
self._attr_max_humidity = round(static_info.visual_max_humidity)
|
||||
features = ClimateEntityFeature(0)
|
||||
features = ClimateEntityFeature.TARGET_TEMPERATURE
|
||||
if static_info.supports_two_point_target_temperature:
|
||||
features |= ClimateEntityFeature.TARGET_TEMPERATURE_RANGE
|
||||
else:
|
||||
features |= ClimateEntityFeature.TARGET_TEMPERATURE
|
||||
if static_info.supports_target_humidity:
|
||||
features |= ClimateEntityFeature.TARGET_HUMIDITY
|
||||
if self.preset_modes:
|
||||
@@ -253,18 +253,31 @@ class EsphomeClimateEntity(EsphomeEntity[ClimateInfo, ClimateState], ClimateEnti
|
||||
@esphome_float_state_property
|
||||
def target_temperature(self) -> float | None:
|
||||
"""Return the temperature we try to reach."""
|
||||
return self._state.target_temperature
|
||||
if (
|
||||
not self._static_info.supports_two_point_target_temperature
|
||||
and self.hvac_mode != HVACMode.AUTO
|
||||
):
|
||||
return self._state.target_temperature
|
||||
if self.hvac_mode == HVACMode.HEAT:
|
||||
return self._state.target_temperature_low
|
||||
if self.hvac_mode == HVACMode.COOL:
|
||||
return self._state.target_temperature_high
|
||||
return None
|
||||
|
||||
@property
|
||||
@esphome_float_state_property
|
||||
def target_temperature_low(self) -> float | None:
|
||||
"""Return the lowbound target temperature we try to reach."""
|
||||
if self.hvac_mode == HVACMode.AUTO:
|
||||
return None
|
||||
return self._state.target_temperature_low
|
||||
|
||||
@property
|
||||
@esphome_float_state_property
|
||||
def target_temperature_high(self) -> float | None:
|
||||
"""Return the highbound target temperature we try to reach."""
|
||||
if self.hvac_mode == HVACMode.AUTO:
|
||||
return None
|
||||
return self._state.target_temperature_high
|
||||
|
||||
@property
|
||||
@@ -282,7 +295,27 @@ class EsphomeClimateEntity(EsphomeEntity[ClimateInfo, ClimateState], ClimateEnti
|
||||
cast(HVACMode, kwargs[ATTR_HVAC_MODE])
|
||||
)
|
||||
if ATTR_TEMPERATURE in kwargs:
|
||||
data["target_temperature"] = kwargs[ATTR_TEMPERATURE]
|
||||
if not self._static_info.supports_two_point_target_temperature:
|
||||
data["target_temperature"] = kwargs[ATTR_TEMPERATURE]
|
||||
else:
|
||||
hvac_mode = kwargs.get(ATTR_HVAC_MODE) or self.hvac_mode
|
||||
if hvac_mode == HVACMode.HEAT:
|
||||
data["target_temperature_low"] = kwargs[ATTR_TEMPERATURE]
|
||||
elif hvac_mode == HVACMode.COOL:
|
||||
data["target_temperature_high"] = kwargs[ATTR_TEMPERATURE]
|
||||
else:
|
||||
raise ServiceValidationError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="action_call_failed",
|
||||
translation_placeholders={
|
||||
"call_name": "climate.set_temperature",
|
||||
"device_name": self._static_info.name,
|
||||
"error": (
|
||||
f"Setting target_temperature is only supported in "
|
||||
f"{HVACMode.HEAT} or {HVACMode.COOL} modes"
|
||||
),
|
||||
},
|
||||
)
|
||||
if ATTR_TARGET_TEMP_LOW in kwargs:
|
||||
data["target_temperature_low"] = kwargs[ATTR_TARGET_TEMP_LOW]
|
||||
if ATTR_TARGET_TEMP_HIGH in kwargs:
|
||||
|
@@ -25,3 +25,5 @@ PROJECT_URLS = {
|
||||
# ESPHome always uses .0 for the changelog URL
|
||||
STABLE_BLE_URL_VERSION = f"{STABLE_BLE_VERSION.major}.{STABLE_BLE_VERSION.minor}.0"
|
||||
DEFAULT_URL = f"https://esphome.io/changelog/{STABLE_BLE_URL_VERSION}.html"
|
||||
|
||||
NO_WAKE_WORD: Final[str] = "no_wake_word"
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user