mirror of
https://github.com/home-assistant/core.git
synced 2025-09-23 11:59:37 +00:00
Compare commits
89 Commits
hassfest-e
...
master
Author | SHA1 | Date | |
---|---|---|---|
![]() |
f3b9bda876 | ||
![]() |
3f3aaa2815 | ||
![]() |
6dc7870779 | ||
![]() |
be83416c72 | ||
![]() |
c745ee18eb | ||
![]() |
cf907ae196 | ||
![]() |
8eee53036a | ||
![]() |
b37237d24b | ||
![]() |
950e758b62 | ||
![]() |
9cd940b7df | ||
![]() |
10b186a20d | ||
![]() |
757aec1c6b | ||
![]() |
0b159bdb9c | ||
![]() |
8728312e87 | ||
![]() |
bbb67db354 | ||
![]() |
265f5da21a | ||
![]() |
54859e8a83 | ||
![]() |
c87dba878d | ||
![]() |
8d8e008123 | ||
![]() |
b30667a469 | ||
![]() |
8920c548d5 | ||
![]() |
eac719f9af | ||
![]() |
3499ed7a98 | ||
![]() |
2c809d5903 | ||
![]() |
40988198f3 | ||
![]() |
ab5d1d27f1 | ||
![]() |
1c10b85fed | ||
![]() |
91a7db08ff | ||
![]() |
a764d54123 | ||
![]() |
dc09e33556 | ||
![]() |
14173bd9ec | ||
![]() |
d2e7537629 | ||
![]() |
9a165a64fe | ||
![]() |
9c749a6abc | ||
![]() |
2e33222c71 | ||
![]() |
ab1c2c4f70 | ||
![]() |
529219ae69 | ||
![]() |
d6ce71fa61 | ||
![]() |
e5b67d513a | ||
![]() |
a547179f66 | ||
![]() |
8c61788a7d | ||
![]() |
6b934d94db | ||
![]() |
d30ad82774 | ||
![]() |
4618b33e93 | ||
![]() |
d6299094db | ||
![]() |
087d9d30c0 | ||
![]() |
f07890cf5c | ||
![]() |
e5b78cc481 | ||
![]() |
12b409d8e1 | ||
![]() |
def5408db8 | ||
![]() |
f105b45ee2 | ||
![]() |
9d904c30a7 | ||
![]() |
99b047939f | ||
![]() |
3a615908ee | ||
![]() |
baff541f46 | ||
![]() |
6d8c35cfe9 | ||
![]() |
b8d9883e74 | ||
![]() |
c3c65af450 | ||
![]() |
3af8616764 | ||
![]() |
64ec4609c5 | ||
![]() |
c78bc26b83 | ||
![]() |
0c093646c9 | ||
![]() |
1b27acdde0 | ||
![]() |
9dafc0e02f | ||
![]() |
0091dafcb0 | ||
![]() |
b387acffb7 | ||
![]() |
36b3133fa2 | ||
![]() |
fe01e96012 | ||
![]() |
0b56ec16ed | ||
![]() |
ca79f4c963 | ||
![]() |
9a43f2776d | ||
![]() |
0cda883b56 | ||
![]() |
ae58e633f0 | ||
![]() |
06480bfd9d | ||
![]() |
625f586945 | ||
![]() |
7dbeaa475d | ||
![]() |
dff3d5f8af | ||
![]() |
89c335919a | ||
![]() |
2bb4573357 | ||
![]() |
7037ce989c | ||
![]() |
bfdd2053ba | ||
![]() |
fcc3f92f8c | ||
![]() |
8710267d53 | ||
![]() |
85b6adcc9a | ||
![]() |
beec6e86e0 | ||
![]() |
3dacffaaf9 | ||
![]() |
d90f2a1de1 | ||
![]() |
b6c9217429 | ||
![]() |
7fc8da6769 |
@@ -8,8 +8,6 @@
|
||||
"PYTHONASYNCIODEBUG": "1"
|
||||
},
|
||||
"features": {
|
||||
// Node feature required for Claude Code until fixed https://github.com/anthropics/devcontainer-features/issues/28
|
||||
"ghcr.io/devcontainers/features/node:1": {},
|
||||
"ghcr.io/anthropics/devcontainer-features/claude-code:1.0": {},
|
||||
"ghcr.io/devcontainers/features/github-cli:1": {}
|
||||
},
|
||||
|
10
.github/workflows/builder.yml
vendored
10
.github/workflows/builder.yml
vendored
@@ -32,7 +32,7 @@ jobs:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
uses: actions/setup-python@v6.0.0
|
||||
uses: actions/setup-python@v5.6.0
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
|
||||
@@ -116,7 +116,7 @@ jobs:
|
||||
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
if: needs.init.outputs.channel == 'dev'
|
||||
uses: actions/setup-python@v6.0.0
|
||||
uses: actions/setup-python@v5.6.0
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
|
||||
@@ -457,7 +457,7 @@ jobs:
|
||||
uses: actions/checkout@v5.0.0
|
||||
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
uses: actions/setup-python@v6.0.0
|
||||
uses: actions/setup-python@v5.6.0
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
|
||||
@@ -480,7 +480,7 @@ jobs:
|
||||
python -m build
|
||||
|
||||
- name: Upload package to PyPI
|
||||
uses: pypa/gh-action-pypi-publish@v1.13.0
|
||||
uses: pypa/gh-action-pypi-publish@v1.12.4
|
||||
with:
|
||||
skip-existing: true
|
||||
|
||||
@@ -531,7 +531,7 @@ jobs:
|
||||
|
||||
- name: Generate artifact attestation
|
||||
if: needs.init.outputs.channel != 'dev' && needs.init.outputs.publish == 'true'
|
||||
uses: actions/attest-build-provenance@977bb373ede98d70efdf65b84cb5f73e068dcc2a # v3.0.0
|
||||
uses: actions/attest-build-provenance@e8998f949152b193b063cb0ec769d69d929409be # v2.4.0
|
||||
with:
|
||||
subject-name: ${{ env.HASSFEST_IMAGE_NAME }}
|
||||
subject-digest: ${{ steps.push.outputs.digest }}
|
||||
|
171
.github/workflows/ci.yaml
vendored
171
.github/workflows/ci.yaml
vendored
@@ -37,10 +37,10 @@ on:
|
||||
type: boolean
|
||||
|
||||
env:
|
||||
CACHE_VERSION: 8
|
||||
CACHE_VERSION: 7
|
||||
UV_CACHE_VERSION: 1
|
||||
MYPY_CACHE_VERSION: 1
|
||||
HA_SHORT_VERSION: "2025.10"
|
||||
HA_SHORT_VERSION: "2025.9"
|
||||
DEFAULT_PYTHON: "3.13"
|
||||
ALL_PYTHON_VERSIONS: "['3.13']"
|
||||
# 10.3 is the oldest supported version
|
||||
@@ -61,9 +61,6 @@ env:
|
||||
POSTGRESQL_VERSIONS: "['postgres:12.14','postgres:15.2']"
|
||||
PRE_COMMIT_CACHE: ~/.cache/pre-commit
|
||||
UV_CACHE_DIR: /tmp/uv-cache
|
||||
APT_CACHE_BASE: /home/runner/work/apt
|
||||
APT_CACHE_DIR: /home/runner/work/apt/cache
|
||||
APT_LIST_CACHE_DIR: /home/runner/work/apt/lists
|
||||
SQLALCHEMY_WARN_20: 1
|
||||
PYTHONASYNCIODEBUG: 1
|
||||
HASS_CI: 1
|
||||
@@ -81,7 +78,6 @@ jobs:
|
||||
core: ${{ steps.core.outputs.changes }}
|
||||
integrations_glob: ${{ steps.info.outputs.integrations_glob }}
|
||||
integrations: ${{ steps.integrations.outputs.changes }}
|
||||
apt_cache_key: ${{ steps.generate_apt_cache_key.outputs.key }}
|
||||
pre-commit_cache_key: ${{ steps.generate_pre-commit_cache_key.outputs.key }}
|
||||
python_cache_key: ${{ steps.generate_python_cache_key.outputs.key }}
|
||||
requirements: ${{ steps.core.outputs.requirements }}
|
||||
@@ -115,10 +111,6 @@ jobs:
|
||||
run: >-
|
||||
echo "key=pre-commit-${{ env.CACHE_VERSION }}-${{
|
||||
hashFiles('.pre-commit-config.yaml') }}" >> $GITHUB_OUTPUT
|
||||
- name: Generate partial apt restore key
|
||||
id: generate_apt_cache_key
|
||||
run: |
|
||||
echo "key=$(lsb_release -rs)-apt-${{ env.CACHE_VERSION }}-${{ env.HA_SHORT_VERSION }}" >> $GITHUB_OUTPUT
|
||||
- name: Filter for core changes
|
||||
uses: dorny/paths-filter@v3.0.2
|
||||
id: core
|
||||
@@ -257,7 +249,7 @@ jobs:
|
||||
uses: actions/checkout@v5.0.0
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
id: python
|
||||
uses: actions/setup-python@v6.0.0
|
||||
uses: actions/setup-python@v5.6.0
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
check-latest: true
|
||||
@@ -302,7 +294,7 @@ jobs:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v5.0.0
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
uses: actions/setup-python@v6.0.0
|
||||
uses: actions/setup-python@v5.6.0
|
||||
id: python
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
@@ -342,7 +334,7 @@ jobs:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v5.0.0
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
uses: actions/setup-python@v6.0.0
|
||||
uses: actions/setup-python@v5.6.0
|
||||
id: python
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
@@ -382,7 +374,7 @@ jobs:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v5.0.0
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
uses: actions/setup-python@v6.0.0
|
||||
uses: actions/setup-python@v5.6.0
|
||||
id: python
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
@@ -492,7 +484,7 @@ jobs:
|
||||
uses: actions/checkout@v5.0.0
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
id: python
|
||||
uses: actions/setup-python@v6.0.0
|
||||
uses: actions/setup-python@v5.6.0
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
check-latest: true
|
||||
@@ -523,36 +515,15 @@ jobs:
|
||||
${{ runner.os }}-${{ runner.arch }}-${{ steps.python.outputs.python-version }}-uv-${{
|
||||
env.UV_CACHE_VERSION }}-${{ steps.generate-uv-key.outputs.version }}-${{
|
||||
env.HA_SHORT_VERSION }}-
|
||||
- name: Restore apt cache
|
||||
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||
id: cache-apt
|
||||
uses: actions/cache@v4.2.4
|
||||
with:
|
||||
path: |
|
||||
${{ env.APT_CACHE_DIR }}
|
||||
${{ env.APT_LIST_CACHE_DIR }}
|
||||
key: >-
|
||||
${{ runner.os }}-${{ runner.arch }}-${{ needs.info.outputs.apt_cache_key }}
|
||||
- name: Install additional OS dependencies
|
||||
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||
timeout-minutes: 10
|
||||
run: |
|
||||
sudo rm /etc/apt/sources.list.d/microsoft-prod.list
|
||||
if [[ "${{ steps.cache-apt.outputs.cache-hit }}" != 'true' ]]; then
|
||||
mkdir -p ${{ env.APT_CACHE_DIR }}
|
||||
mkdir -p ${{ env.APT_LIST_CACHE_DIR }}
|
||||
fi
|
||||
|
||||
sudo apt-get update \
|
||||
-o Dir::Cache=${{ env.APT_CACHE_DIR }} \
|
||||
-o Dir::State::Lists=${{ env.APT_LIST_CACHE_DIR }}
|
||||
sudo apt-get update
|
||||
sudo apt-get -y install \
|
||||
-o Dir::Cache=${{ env.APT_CACHE_DIR }} \
|
||||
-o Dir::State::Lists=${{ env.APT_LIST_CACHE_DIR }} \
|
||||
bluez \
|
||||
ffmpeg \
|
||||
libturbojpeg \
|
||||
libxml2-utils \
|
||||
libavcodec-dev \
|
||||
libavdevice-dev \
|
||||
libavfilter-dev \
|
||||
@@ -562,10 +533,6 @@ jobs:
|
||||
libswresample-dev \
|
||||
libswscale-dev \
|
||||
libudev-dev
|
||||
|
||||
if [[ "${{ steps.cache-apt.outputs.cache-hit }}" != 'true' ]]; then
|
||||
sudo chmod -R 755 ${{ env.APT_CACHE_BASE }}
|
||||
fi
|
||||
- name: Create Python virtual environment
|
||||
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||
run: |
|
||||
@@ -610,31 +577,17 @@ jobs:
|
||||
- info
|
||||
- base
|
||||
steps:
|
||||
- name: Restore apt cache
|
||||
uses: actions/cache/restore@v4.2.4
|
||||
with:
|
||||
path: |
|
||||
${{ env.APT_CACHE_DIR }}
|
||||
${{ env.APT_LIST_CACHE_DIR }}
|
||||
fail-on-cache-miss: true
|
||||
key: >-
|
||||
${{ runner.os }}-${{ runner.arch }}-${{ needs.info.outputs.apt_cache_key }}
|
||||
- name: Install additional OS dependencies
|
||||
timeout-minutes: 10
|
||||
run: |
|
||||
sudo rm /etc/apt/sources.list.d/microsoft-prod.list
|
||||
sudo apt-get update \
|
||||
-o Dir::Cache=${{ env.APT_CACHE_DIR }} \
|
||||
-o Dir::State::Lists=${{ env.APT_LIST_CACHE_DIR }}
|
||||
sudo apt-get update
|
||||
sudo apt-get -y install \
|
||||
-o Dir::Cache=${{ env.APT_CACHE_DIR }} \
|
||||
-o Dir::State::Lists=${{ env.APT_LIST_CACHE_DIR }} \
|
||||
libturbojpeg
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v5.0.0
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
id: python
|
||||
uses: actions/setup-python@v6.0.0
|
||||
uses: actions/setup-python@v5.6.0
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
check-latest: true
|
||||
@@ -667,7 +620,7 @@ jobs:
|
||||
uses: actions/checkout@v5.0.0
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
id: python
|
||||
uses: actions/setup-python@v6.0.0
|
||||
uses: actions/setup-python@v5.6.0
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
check-latest: true
|
||||
@@ -724,7 +677,7 @@ jobs:
|
||||
uses: actions/checkout@v5.0.0
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
id: python
|
||||
uses: actions/setup-python@v6.0.0
|
||||
uses: actions/setup-python@v5.6.0
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
check-latest: true
|
||||
@@ -767,7 +720,7 @@ jobs:
|
||||
uses: actions/checkout@v5.0.0
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
id: python
|
||||
uses: actions/setup-python@v6.0.0
|
||||
uses: actions/setup-python@v5.6.0
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
check-latest: true
|
||||
@@ -814,7 +767,7 @@ jobs:
|
||||
uses: actions/checkout@v5.0.0
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
id: python
|
||||
uses: actions/setup-python@v6.0.0
|
||||
uses: actions/setup-python@v5.6.0
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
check-latest: true
|
||||
@@ -859,7 +812,7 @@ jobs:
|
||||
uses: actions/checkout@v5.0.0
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
id: python
|
||||
uses: actions/setup-python@v6.0.0
|
||||
uses: actions/setup-python@v5.6.0
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
check-latest: true
|
||||
@@ -923,25 +876,11 @@ jobs:
|
||||
- mypy
|
||||
name: Split tests for full run
|
||||
steps:
|
||||
- name: Restore apt cache
|
||||
uses: actions/cache/restore@v4.2.4
|
||||
with:
|
||||
path: |
|
||||
${{ env.APT_CACHE_DIR }}
|
||||
${{ env.APT_LIST_CACHE_DIR }}
|
||||
fail-on-cache-miss: true
|
||||
key: >-
|
||||
${{ runner.os }}-${{ runner.arch }}-${{ needs.info.outputs.apt_cache_key }}
|
||||
- name: Install additional OS dependencies
|
||||
timeout-minutes: 10
|
||||
run: |
|
||||
sudo rm /etc/apt/sources.list.d/microsoft-prod.list
|
||||
sudo apt-get update \
|
||||
-o Dir::Cache=${{ env.APT_CACHE_DIR }} \
|
||||
-o Dir::State::Lists=${{ env.APT_LIST_CACHE_DIR }}
|
||||
sudo apt-get update
|
||||
sudo apt-get -y install \
|
||||
-o Dir::Cache=${{ env.APT_CACHE_DIR }} \
|
||||
-o Dir::State::Lists=${{ env.APT_LIST_CACHE_DIR }} \
|
||||
bluez \
|
||||
ffmpeg \
|
||||
libturbojpeg \
|
||||
@@ -950,7 +889,7 @@ jobs:
|
||||
uses: actions/checkout@v5.0.0
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
id: python
|
||||
uses: actions/setup-python@v6.0.0
|
||||
uses: actions/setup-python@v5.6.0
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
check-latest: true
|
||||
@@ -997,25 +936,11 @@ jobs:
|
||||
name: >-
|
||||
Run tests Python ${{ matrix.python-version }} (${{ matrix.group }})
|
||||
steps:
|
||||
- name: Restore apt cache
|
||||
uses: actions/cache/restore@v4.2.4
|
||||
with:
|
||||
path: |
|
||||
${{ env.APT_CACHE_DIR }}
|
||||
${{ env.APT_LIST_CACHE_DIR }}
|
||||
fail-on-cache-miss: true
|
||||
key: >-
|
||||
${{ runner.os }}-${{ runner.arch }}-${{ needs.info.outputs.apt_cache_key }}
|
||||
- name: Install additional OS dependencies
|
||||
timeout-minutes: 10
|
||||
run: |
|
||||
sudo rm /etc/apt/sources.list.d/microsoft-prod.list
|
||||
sudo apt-get update \
|
||||
-o Dir::Cache=${{ env.APT_CACHE_DIR }} \
|
||||
-o Dir::State::Lists=${{ env.APT_LIST_CACHE_DIR }}
|
||||
sudo apt-get update
|
||||
sudo apt-get -y install \
|
||||
-o Dir::Cache=${{ env.APT_CACHE_DIR }} \
|
||||
-o Dir::State::Lists=${{ env.APT_LIST_CACHE_DIR }} \
|
||||
bluez \
|
||||
ffmpeg \
|
||||
libturbojpeg \
|
||||
@@ -1025,7 +950,7 @@ jobs:
|
||||
uses: actions/checkout@v5.0.0
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
id: python
|
||||
uses: actions/setup-python@v6.0.0
|
||||
uses: actions/setup-python@v5.6.0
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
check-latest: true
|
||||
@@ -1144,25 +1069,11 @@ jobs:
|
||||
name: >-
|
||||
Run ${{ matrix.mariadb-group }} tests Python ${{ matrix.python-version }}
|
||||
steps:
|
||||
- name: Restore apt cache
|
||||
uses: actions/cache/restore@v4.2.4
|
||||
with:
|
||||
path: |
|
||||
${{ env.APT_CACHE_DIR }}
|
||||
${{ env.APT_LIST_CACHE_DIR }}
|
||||
fail-on-cache-miss: true
|
||||
key: >-
|
||||
${{ runner.os }}-${{ runner.arch }}-${{ needs.info.outputs.apt_cache_key }}
|
||||
- name: Install additional OS dependencies
|
||||
timeout-minutes: 10
|
||||
run: |
|
||||
sudo rm /etc/apt/sources.list.d/microsoft-prod.list
|
||||
sudo apt-get update \
|
||||
-o Dir::Cache=${{ env.APT_CACHE_DIR }} \
|
||||
-o Dir::State::Lists=${{ env.APT_LIST_CACHE_DIR }}
|
||||
sudo apt-get update
|
||||
sudo apt-get -y install \
|
||||
-o Dir::Cache=${{ env.APT_CACHE_DIR }} \
|
||||
-o Dir::State::Lists=${{ env.APT_LIST_CACHE_DIR }} \
|
||||
bluez \
|
||||
ffmpeg \
|
||||
libturbojpeg \
|
||||
@@ -1172,7 +1083,7 @@ jobs:
|
||||
uses: actions/checkout@v5.0.0
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
id: python
|
||||
uses: actions/setup-python@v6.0.0
|
||||
uses: actions/setup-python@v5.6.0
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
check-latest: true
|
||||
@@ -1298,25 +1209,11 @@ jobs:
|
||||
name: >-
|
||||
Run ${{ matrix.postgresql-group }} tests Python ${{ matrix.python-version }}
|
||||
steps:
|
||||
- name: Restore apt cache
|
||||
uses: actions/cache/restore@v4.2.4
|
||||
with:
|
||||
path: |
|
||||
${{ env.APT_CACHE_DIR }}
|
||||
${{ env.APT_LIST_CACHE_DIR }}
|
||||
fail-on-cache-miss: true
|
||||
key: >-
|
||||
${{ runner.os }}-${{ runner.arch }}-${{ needs.info.outputs.apt_cache_key }}
|
||||
- name: Install additional OS dependencies
|
||||
timeout-minutes: 10
|
||||
run: |
|
||||
sudo rm /etc/apt/sources.list.d/microsoft-prod.list
|
||||
sudo apt-get update \
|
||||
-o Dir::Cache=${{ env.APT_CACHE_DIR }} \
|
||||
-o Dir::State::Lists=${{ env.APT_LIST_CACHE_DIR }}
|
||||
sudo apt-get update
|
||||
sudo apt-get -y install \
|
||||
-o Dir::Cache=${{ env.APT_CACHE_DIR }} \
|
||||
-o Dir::State::Lists=${{ env.APT_LIST_CACHE_DIR }} \
|
||||
bluez \
|
||||
ffmpeg \
|
||||
libturbojpeg \
|
||||
@@ -1328,7 +1225,7 @@ jobs:
|
||||
uses: actions/checkout@v5.0.0
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
id: python
|
||||
uses: actions/setup-python@v6.0.0
|
||||
uses: actions/setup-python@v5.6.0
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
check-latest: true
|
||||
@@ -1444,7 +1341,7 @@ jobs:
|
||||
pattern: coverage-*
|
||||
- name: Upload coverage to Codecov
|
||||
if: needs.info.outputs.test_full_suite == 'true'
|
||||
uses: codecov/codecov-action@v5.5.1
|
||||
uses: codecov/codecov-action@v5.5.0
|
||||
with:
|
||||
fail_ci_if_error: true
|
||||
flags: full-suite
|
||||
@@ -1473,25 +1370,11 @@ jobs:
|
||||
name: >-
|
||||
Run tests Python ${{ matrix.python-version }} (${{ matrix.group }})
|
||||
steps:
|
||||
- name: Restore apt cache
|
||||
uses: actions/cache/restore@v4.2.4
|
||||
with:
|
||||
path: |
|
||||
${{ env.APT_CACHE_DIR }}
|
||||
${{ env.APT_LIST_CACHE_DIR }}
|
||||
fail-on-cache-miss: true
|
||||
key: >-
|
||||
${{ runner.os }}-${{ runner.arch }}-${{ needs.info.outputs.apt_cache_key }}
|
||||
- name: Install additional OS dependencies
|
||||
timeout-minutes: 10
|
||||
run: |
|
||||
sudo rm /etc/apt/sources.list.d/microsoft-prod.list
|
||||
sudo apt-get update \
|
||||
-o Dir::Cache=${{ env.APT_CACHE_DIR }} \
|
||||
-o Dir::State::Lists=${{ env.APT_LIST_CACHE_DIR }}
|
||||
sudo apt-get update
|
||||
sudo apt-get -y install \
|
||||
-o Dir::Cache=${{ env.APT_CACHE_DIR }} \
|
||||
-o Dir::State::Lists=${{ env.APT_LIST_CACHE_DIR }} \
|
||||
bluez \
|
||||
ffmpeg \
|
||||
libturbojpeg \
|
||||
@@ -1501,7 +1384,7 @@ jobs:
|
||||
uses: actions/checkout@v5.0.0
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
id: python
|
||||
uses: actions/setup-python@v6.0.0
|
||||
uses: actions/setup-python@v5.6.0
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
check-latest: true
|
||||
@@ -1608,7 +1491,7 @@ jobs:
|
||||
pattern: coverage-*
|
||||
- name: Upload coverage to Codecov
|
||||
if: needs.info.outputs.test_full_suite == 'false'
|
||||
uses: codecov/codecov-action@v5.5.1
|
||||
uses: codecov/codecov-action@v5.5.0
|
||||
with:
|
||||
fail_ci_if_error: true
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
|
4
.github/workflows/codeql.yml
vendored
4
.github/workflows/codeql.yml
vendored
@@ -24,11 +24,11 @@ jobs:
|
||||
uses: actions/checkout@v5.0.0
|
||||
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@v3.30.3
|
||||
uses: github/codeql-action/init@v3.29.11
|
||||
with:
|
||||
languages: python
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@v3.30.3
|
||||
uses: github/codeql-action/analyze@v3.29.11
|
||||
with:
|
||||
category: "/language:python"
|
||||
|
@@ -16,7 +16,7 @@ jobs:
|
||||
steps:
|
||||
- name: Check if integration label was added and extract details
|
||||
id: extract
|
||||
uses: actions/github-script@v8
|
||||
uses: actions/github-script@v7.0.1
|
||||
with:
|
||||
script: |
|
||||
// Debug: Log the event payload
|
||||
@@ -113,7 +113,7 @@ jobs:
|
||||
- name: Fetch similar issues
|
||||
id: fetch_similar
|
||||
if: steps.extract.outputs.should_continue == 'true'
|
||||
uses: actions/github-script@v8
|
||||
uses: actions/github-script@v7.0.1
|
||||
env:
|
||||
INTEGRATION_LABELS: ${{ steps.extract.outputs.integration_labels }}
|
||||
CURRENT_NUMBER: ${{ steps.extract.outputs.current_number }}
|
||||
@@ -280,7 +280,7 @@ jobs:
|
||||
- name: Post duplicate detection results
|
||||
id: post_results
|
||||
if: steps.extract.outputs.should_continue == 'true' && steps.fetch_similar.outputs.has_similar == 'true'
|
||||
uses: actions/github-script@v8
|
||||
uses: actions/github-script@v7.0.1
|
||||
env:
|
||||
AI_RESPONSE: ${{ steps.ai_detection.outputs.response }}
|
||||
SIMILAR_ISSUES: ${{ steps.fetch_similar.outputs.similar_issues }}
|
||||
|
@@ -16,7 +16,7 @@ jobs:
|
||||
steps:
|
||||
- name: Check issue language
|
||||
id: detect_language
|
||||
uses: actions/github-script@v8
|
||||
uses: actions/github-script@v7.0.1
|
||||
env:
|
||||
ISSUE_NUMBER: ${{ github.event.issue.number }}
|
||||
ISSUE_TITLE: ${{ github.event.issue.title }}
|
||||
@@ -90,7 +90,7 @@ jobs:
|
||||
|
||||
- name: Process non-English issues
|
||||
if: steps.detect_language.outputs.should_continue == 'true'
|
||||
uses: actions/github-script@v8
|
||||
uses: actions/github-script@v7.0.1
|
||||
env:
|
||||
AI_RESPONSE: ${{ steps.ai_language_detection.outputs.response }}
|
||||
ISSUE_NUMBER: ${{ steps.detect_language.outputs.issue_number }}
|
||||
|
2
.github/workflows/restrict-task-creation.yml
vendored
2
.github/workflows/restrict-task-creation.yml
vendored
@@ -12,7 +12,7 @@ jobs:
|
||||
if: github.event.issue.type.name == 'Task'
|
||||
steps:
|
||||
- name: Check if user is authorized
|
||||
uses: actions/github-script@v8
|
||||
uses: actions/github-script@v7
|
||||
with:
|
||||
script: |
|
||||
const issueAuthor = context.payload.issue.user.login;
|
||||
|
6
.github/workflows/stale.yml
vendored
6
.github/workflows/stale.yml
vendored
@@ -17,7 +17,7 @@ jobs:
|
||||
# - No PRs marked as no-stale
|
||||
# - No issues (-1)
|
||||
- name: 60 days stale PRs policy
|
||||
uses: actions/stale@v10.0.0
|
||||
uses: actions/stale@v9.1.0
|
||||
with:
|
||||
repo-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
days-before-stale: 60
|
||||
@@ -57,7 +57,7 @@ jobs:
|
||||
# - No issues marked as no-stale or help-wanted
|
||||
# - No PRs (-1)
|
||||
- name: 90 days stale issues
|
||||
uses: actions/stale@v10.0.0
|
||||
uses: actions/stale@v9.1.0
|
||||
with:
|
||||
repo-token: ${{ steps.token.outputs.token }}
|
||||
days-before-stale: 90
|
||||
@@ -87,7 +87,7 @@ jobs:
|
||||
# - No Issues marked as no-stale or help-wanted
|
||||
# - No PRs (-1)
|
||||
- name: Needs more information stale issues policy
|
||||
uses: actions/stale@v10.0.0
|
||||
uses: actions/stale@v9.1.0
|
||||
with:
|
||||
repo-token: ${{ steps.token.outputs.token }}
|
||||
only-labels: "needs-more-information"
|
||||
|
2
.github/workflows/translations.yml
vendored
2
.github/workflows/translations.yml
vendored
@@ -22,7 +22,7 @@ jobs:
|
||||
uses: actions/checkout@v5.0.0
|
||||
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
uses: actions/setup-python@v6.0.0
|
||||
uses: actions/setup-python@v5.6.0
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
|
||||
|
2
.github/workflows/wheels.yml
vendored
2
.github/workflows/wheels.yml
vendored
@@ -36,7 +36,7 @@ jobs:
|
||||
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
id: python
|
||||
uses: actions/setup-python@v6.0.0
|
||||
uses: actions/setup-python@v5.6.0
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
check-latest: true
|
||||
|
@@ -1,6 +1,6 @@
|
||||
repos:
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
rev: v0.13.0
|
||||
rev: v0.12.1
|
||||
hooks:
|
||||
- id: ruff-check
|
||||
args:
|
||||
|
@@ -169,7 +169,6 @@ homeassistant.components.dnsip.*
|
||||
homeassistant.components.doorbird.*
|
||||
homeassistant.components.dormakaba_dkey.*
|
||||
homeassistant.components.downloader.*
|
||||
homeassistant.components.droplet.*
|
||||
homeassistant.components.dsmr.*
|
||||
homeassistant.components.duckdns.*
|
||||
homeassistant.components.dunehd.*
|
||||
@@ -308,7 +307,6 @@ homeassistant.components.ld2410_ble.*
|
||||
homeassistant.components.led_ble.*
|
||||
homeassistant.components.lektrico.*
|
||||
homeassistant.components.letpot.*
|
||||
homeassistant.components.libre_hardware_monitor.*
|
||||
homeassistant.components.lidarr.*
|
||||
homeassistant.components.lifx.*
|
||||
homeassistant.components.light.*
|
||||
@@ -384,7 +382,6 @@ homeassistant.components.openai_conversation.*
|
||||
homeassistant.components.openexchangerates.*
|
||||
homeassistant.components.opensky.*
|
||||
homeassistant.components.openuv.*
|
||||
homeassistant.components.opnsense.*
|
||||
homeassistant.components.opower.*
|
||||
homeassistant.components.oralb.*
|
||||
homeassistant.components.otbr.*
|
||||
@@ -402,7 +399,6 @@ homeassistant.components.person.*
|
||||
homeassistant.components.pi_hole.*
|
||||
homeassistant.components.ping.*
|
||||
homeassistant.components.plugwise.*
|
||||
homeassistant.components.portainer.*
|
||||
homeassistant.components.powerfox.*
|
||||
homeassistant.components.powerwall.*
|
||||
homeassistant.components.private_ble_device.*
|
||||
@@ -462,7 +458,6 @@ homeassistant.components.sensorpush_cloud.*
|
||||
homeassistant.components.sensoterra.*
|
||||
homeassistant.components.senz.*
|
||||
homeassistant.components.sfr_box.*
|
||||
homeassistant.components.sftp_storage.*
|
||||
homeassistant.components.shell_command.*
|
||||
homeassistant.components.shelly.*
|
||||
homeassistant.components.shopping_list.*
|
||||
|
64
CODEOWNERS
generated
64
CODEOWNERS
generated
@@ -154,10 +154,10 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/arve/ @ikalnyi
|
||||
/homeassistant/components/aseko_pool_live/ @milanmeu
|
||||
/tests/components/aseko_pool_live/ @milanmeu
|
||||
/homeassistant/components/assist_pipeline/ @synesthesiam @arturpragacz
|
||||
/tests/components/assist_pipeline/ @synesthesiam @arturpragacz
|
||||
/homeassistant/components/assist_satellite/ @home-assistant/core @synesthesiam @arturpragacz
|
||||
/tests/components/assist_satellite/ @home-assistant/core @synesthesiam @arturpragacz
|
||||
/homeassistant/components/assist_pipeline/ @balloob @synesthesiam
|
||||
/tests/components/assist_pipeline/ @balloob @synesthesiam
|
||||
/homeassistant/components/assist_satellite/ @home-assistant/core @synesthesiam
|
||||
/tests/components/assist_satellite/ @home-assistant/core @synesthesiam
|
||||
/homeassistant/components/asuswrt/ @kennedyshead @ollo69 @Vaskivskyi
|
||||
/tests/components/asuswrt/ @kennedyshead @ollo69 @Vaskivskyi
|
||||
/homeassistant/components/atag/ @MatsNL
|
||||
@@ -298,8 +298,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/configurator/ @home-assistant/core
|
||||
/homeassistant/components/control4/ @lawtancool
|
||||
/tests/components/control4/ @lawtancool
|
||||
/homeassistant/components/conversation/ @home-assistant/core @synesthesiam @arturpragacz
|
||||
/tests/components/conversation/ @home-assistant/core @synesthesiam @arturpragacz
|
||||
/homeassistant/components/conversation/ @home-assistant/core @synesthesiam
|
||||
/tests/components/conversation/ @home-assistant/core @synesthesiam
|
||||
/homeassistant/components/cookidoo/ @miaucl
|
||||
/tests/components/cookidoo/ @miaucl
|
||||
/homeassistant/components/coolmaster/ @OnFreund
|
||||
@@ -377,8 +377,6 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/dremel_3d_printer/ @tkdrob
|
||||
/homeassistant/components/drop_connect/ @ChandlerSystems @pfrazer
|
||||
/tests/components/drop_connect/ @ChandlerSystems @pfrazer
|
||||
/homeassistant/components/droplet/ @sarahseidman
|
||||
/tests/components/droplet/ @sarahseidman
|
||||
/homeassistant/components/dsmr/ @Robbie1221
|
||||
/tests/components/dsmr/ @Robbie1221
|
||||
/homeassistant/components/dsmr_reader/ @sorted-bits @glodenox @erwindouna
|
||||
@@ -466,6 +464,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/eufylife_ble/ @bdr99
|
||||
/homeassistant/components/event/ @home-assistant/core
|
||||
/tests/components/event/ @home-assistant/core
|
||||
/homeassistant/components/evil_genius_labs/ @balloob
|
||||
/tests/components/evil_genius_labs/ @balloob
|
||||
/homeassistant/components/evohome/ @zxdavb
|
||||
/tests/components/evohome/ @zxdavb
|
||||
/homeassistant/components/ezviz/ @RenierM26
|
||||
@@ -515,8 +515,8 @@ build.json @home-assistant/supervisor
|
||||
/homeassistant/components/forked_daapd/ @uvjustin
|
||||
/tests/components/forked_daapd/ @uvjustin
|
||||
/homeassistant/components/fortios/ @kimfrellsen
|
||||
/homeassistant/components/foscam/ @Foscam-wangzhengyu
|
||||
/tests/components/foscam/ @Foscam-wangzhengyu
|
||||
/homeassistant/components/foscam/ @krmarien
|
||||
/tests/components/foscam/ @krmarien
|
||||
/homeassistant/components/freebox/ @hacf-fr @Quentame
|
||||
/tests/components/freebox/ @hacf-fr @Quentame
|
||||
/homeassistant/components/freedompro/ @stefano055415
|
||||
@@ -650,8 +650,6 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/homeassistant/ @home-assistant/core
|
||||
/homeassistant/components/homeassistant_alerts/ @home-assistant/core
|
||||
/tests/components/homeassistant_alerts/ @home-assistant/core
|
||||
/homeassistant/components/homeassistant_connect_zbt2/ @home-assistant/core
|
||||
/tests/components/homeassistant_connect_zbt2/ @home-assistant/core
|
||||
/homeassistant/components/homeassistant_green/ @home-assistant/core
|
||||
/tests/components/homeassistant_green/ @home-assistant/core
|
||||
/homeassistant/components/homeassistant_hardware/ @home-assistant/core
|
||||
@@ -680,8 +678,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/http/ @home-assistant/core
|
||||
/homeassistant/components/huawei_lte/ @scop @fphammerle
|
||||
/tests/components/huawei_lte/ @scop @fphammerle
|
||||
/homeassistant/components/hue/ @marcelveldt
|
||||
/tests/components/hue/ @marcelveldt
|
||||
/homeassistant/components/hue/ @balloob @marcelveldt
|
||||
/tests/components/hue/ @balloob @marcelveldt
|
||||
/homeassistant/components/huisbaasje/ @dennisschroer
|
||||
/tests/components/huisbaasje/ @dennisschroer
|
||||
/homeassistant/components/humidifier/ @home-assistant/core @Shulyaka
|
||||
@@ -753,8 +751,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/integration/ @dgomes
|
||||
/homeassistant/components/intellifire/ @jeeftor
|
||||
/tests/components/intellifire/ @jeeftor
|
||||
/homeassistant/components/intent/ @home-assistant/core @synesthesiam @arturpragacz
|
||||
/tests/components/intent/ @home-assistant/core @synesthesiam @arturpragacz
|
||||
/homeassistant/components/intent/ @home-assistant/core @synesthesiam
|
||||
/tests/components/intent/ @home-assistant/core @synesthesiam
|
||||
/homeassistant/components/intesishome/ @jnimmo
|
||||
/homeassistant/components/iometer/ @MaestroOnICe
|
||||
/tests/components/iometer/ @MaestroOnICe
|
||||
@@ -862,8 +860,6 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/lg_netcast/ @Drafteed @splinter98
|
||||
/homeassistant/components/lg_thinq/ @LG-ThinQ-Integration
|
||||
/tests/components/lg_thinq/ @LG-ThinQ-Integration
|
||||
/homeassistant/components/libre_hardware_monitor/ @Sab44
|
||||
/tests/components/libre_hardware_monitor/ @Sab44
|
||||
/homeassistant/components/lidarr/ @tkdrob
|
||||
/tests/components/lidarr/ @tkdrob
|
||||
/homeassistant/components/lifx/ @Djelibeybi
|
||||
@@ -1112,6 +1108,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/open_meteo/ @frenck
|
||||
/homeassistant/components/open_router/ @joostlek
|
||||
/tests/components/open_router/ @joostlek
|
||||
/homeassistant/components/openai_conversation/ @balloob
|
||||
/tests/components/openai_conversation/ @balloob
|
||||
/homeassistant/components/openerz/ @misialq
|
||||
/tests/components/openerz/ @misialq
|
||||
/homeassistant/components/openexchangerates/ @MartinHjelmare
|
||||
@@ -1191,8 +1189,6 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/pooldose/ @lmaertin
|
||||
/homeassistant/components/poolsense/ @haemishkyd
|
||||
/tests/components/poolsense/ @haemishkyd
|
||||
/homeassistant/components/portainer/ @erwindouna
|
||||
/tests/components/portainer/ @erwindouna
|
||||
/homeassistant/components/powerfox/ @klaasnicolaas
|
||||
/tests/components/powerfox/ @klaasnicolaas
|
||||
/homeassistant/components/powerwall/ @bdraco @jrester @daniel-simpson
|
||||
@@ -1212,6 +1208,8 @@ build.json @home-assistant/supervisor
|
||||
/homeassistant/components/proximity/ @mib1185
|
||||
/tests/components/proximity/ @mib1185
|
||||
/homeassistant/components/proxmoxve/ @jhollowe @Corbeno
|
||||
/homeassistant/components/prusalink/ @balloob
|
||||
/tests/components/prusalink/ @balloob
|
||||
/homeassistant/components/ps4/ @ktnrg45
|
||||
/tests/components/ps4/ @ktnrg45
|
||||
/homeassistant/components/pterodactyl/ @elmurato
|
||||
@@ -1305,8 +1303,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/rflink/ @javicalle
|
||||
/homeassistant/components/rfxtrx/ @danielhiversen @elupus @RobBie1221
|
||||
/tests/components/rfxtrx/ @danielhiversen @elupus @RobBie1221
|
||||
/homeassistant/components/rhasspy/ @synesthesiam
|
||||
/tests/components/rhasspy/ @synesthesiam
|
||||
/homeassistant/components/rhasspy/ @balloob @synesthesiam
|
||||
/tests/components/rhasspy/ @balloob @synesthesiam
|
||||
/homeassistant/components/ridwell/ @bachya
|
||||
/tests/components/ridwell/ @bachya
|
||||
/homeassistant/components/ring/ @sdb9696
|
||||
@@ -1394,14 +1392,12 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/seventeentrack/ @shaiu
|
||||
/homeassistant/components/sfr_box/ @epenet
|
||||
/tests/components/sfr_box/ @epenet
|
||||
/homeassistant/components/sftp_storage/ @maretodoric
|
||||
/tests/components/sftp_storage/ @maretodoric
|
||||
/homeassistant/components/sharkiq/ @JeffResc @funkybunch
|
||||
/tests/components/sharkiq/ @JeffResc @funkybunch
|
||||
/homeassistant/components/shell_command/ @home-assistant/core
|
||||
/tests/components/shell_command/ @home-assistant/core
|
||||
/homeassistant/components/shelly/ @bieniu @thecode @chemelli74 @bdraco
|
||||
/tests/components/shelly/ @bieniu @thecode @chemelli74 @bdraco
|
||||
/homeassistant/components/shelly/ @balloob @bieniu @thecode @chemelli74 @bdraco
|
||||
/tests/components/shelly/ @balloob @bieniu @thecode @chemelli74 @bdraco
|
||||
/homeassistant/components/shodan/ @fabaff
|
||||
/homeassistant/components/sia/ @eavanvalkenburg
|
||||
/tests/components/sia/ @eavanvalkenburg
|
||||
@@ -1548,8 +1544,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/systemmonitor/ @gjohansson-ST
|
||||
/homeassistant/components/tado/ @erwindouna
|
||||
/tests/components/tado/ @erwindouna
|
||||
/homeassistant/components/tag/ @home-assistant/core
|
||||
/tests/components/tag/ @home-assistant/core
|
||||
/homeassistant/components/tag/ @balloob @dmulcahey
|
||||
/tests/components/tag/ @balloob @dmulcahey
|
||||
/homeassistant/components/tailscale/ @frenck
|
||||
/tests/components/tailscale/ @frenck
|
||||
/homeassistant/components/tailwind/ @frenck
|
||||
@@ -1701,8 +1697,8 @@ build.json @home-assistant/supervisor
|
||||
/homeassistant/components/versasense/ @imstevenxyz
|
||||
/homeassistant/components/version/ @ludeeus
|
||||
/tests/components/version/ @ludeeus
|
||||
/homeassistant/components/vesync/ @markperdue @webdjoe @thegardenmonkey @cdnninja @iprak @sapuseven
|
||||
/tests/components/vesync/ @markperdue @webdjoe @thegardenmonkey @cdnninja @iprak @sapuseven
|
||||
/homeassistant/components/vesync/ @markperdue @webdjoe @thegardenmonkey @cdnninja @iprak
|
||||
/tests/components/vesync/ @markperdue @webdjoe @thegardenmonkey @cdnninja @iprak
|
||||
/homeassistant/components/vicare/ @CFenner
|
||||
/tests/components/vicare/ @CFenner
|
||||
/homeassistant/components/vilfo/ @ManneW
|
||||
@@ -1714,8 +1710,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/vlc_telnet/ @rodripf @MartinHjelmare
|
||||
/homeassistant/components/vodafone_station/ @paoloantinori @chemelli74
|
||||
/tests/components/vodafone_station/ @paoloantinori @chemelli74
|
||||
/homeassistant/components/voip/ @synesthesiam @jaminh
|
||||
/tests/components/voip/ @synesthesiam @jaminh
|
||||
/homeassistant/components/voip/ @balloob @synesthesiam @jaminh
|
||||
/tests/components/voip/ @balloob @synesthesiam @jaminh
|
||||
/homeassistant/components/volumio/ @OnFreund
|
||||
/tests/components/volumio/ @OnFreund
|
||||
/homeassistant/components/volvo/ @thomasddn
|
||||
@@ -1786,8 +1782,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/worldclock/ @fabaff
|
||||
/homeassistant/components/ws66i/ @ssaenger
|
||||
/tests/components/ws66i/ @ssaenger
|
||||
/homeassistant/components/wyoming/ @synesthesiam
|
||||
/tests/components/wyoming/ @synesthesiam
|
||||
/homeassistant/components/wyoming/ @balloob @synesthesiam
|
||||
/tests/components/wyoming/ @balloob @synesthesiam
|
||||
/homeassistant/components/xbox/ @hunterjm
|
||||
/tests/components/xbox/ @hunterjm
|
||||
/homeassistant/components/xiaomi_aqara/ @danielhiversen @syssi
|
||||
|
@@ -3,7 +3,8 @@ FROM mcr.microsoft.com/vscode/devcontainers/base:debian
|
||||
SHELL ["/bin/bash", "-o", "pipefail", "-c"]
|
||||
|
||||
RUN \
|
||||
apt-get update \
|
||||
curl -sS https://dl.yarnpkg.com/debian/pubkey.gpg | apt-key add - \
|
||||
&& apt-get update \
|
||||
&& DEBIAN_FRONTEND=noninteractive apt-get install -y --no-install-recommends \
|
||||
# Additional library needed by some tests and accordingly by VScode Tests Discovery
|
||||
bluez \
|
||||
|
10
build.yaml
10
build.yaml
@@ -1,10 +1,10 @@
|
||||
image: ghcr.io/home-assistant/{arch}-homeassistant
|
||||
build_from:
|
||||
aarch64: ghcr.io/home-assistant/aarch64-homeassistant-base:2025.09.1
|
||||
armhf: ghcr.io/home-assistant/armhf-homeassistant-base:2025.09.1
|
||||
armv7: ghcr.io/home-assistant/armv7-homeassistant-base:2025.09.1
|
||||
amd64: ghcr.io/home-assistant/amd64-homeassistant-base:2025.09.1
|
||||
i386: ghcr.io/home-assistant/i386-homeassistant-base:2025.09.1
|
||||
aarch64: ghcr.io/home-assistant/aarch64-homeassistant-base:2025.09.0
|
||||
armhf: ghcr.io/home-assistant/armhf-homeassistant-base:2025.09.0
|
||||
armv7: ghcr.io/home-assistant/armv7-homeassistant-base:2025.09.0
|
||||
amd64: ghcr.io/home-assistant/amd64-homeassistant-base:2025.09.0
|
||||
i386: ghcr.io/home-assistant/i386-homeassistant-base:2025.09.0
|
||||
codenotary:
|
||||
signer: notary@home-assistant.io
|
||||
base_image: notary@home-assistant.io
|
||||
|
@@ -187,42 +187,36 @@ def main() -> int:
|
||||
|
||||
from . import config, runner # noqa: PLC0415
|
||||
|
||||
# Ensure only one instance runs per config directory
|
||||
with runner.ensure_single_execution(config_dir) as single_execution_lock:
|
||||
# Check if another instance is already running
|
||||
if single_execution_lock.exit_code is not None:
|
||||
return single_execution_lock.exit_code
|
||||
safe_mode = config.safe_mode_enabled(config_dir)
|
||||
|
||||
safe_mode = config.safe_mode_enabled(config_dir)
|
||||
runtime_conf = runner.RuntimeConfig(
|
||||
config_dir=config_dir,
|
||||
verbose=args.verbose,
|
||||
log_rotate_days=args.log_rotate_days,
|
||||
log_file=args.log_file,
|
||||
log_no_color=args.log_no_color,
|
||||
skip_pip=args.skip_pip,
|
||||
skip_pip_packages=args.skip_pip_packages,
|
||||
recovery_mode=args.recovery_mode,
|
||||
debug=args.debug,
|
||||
open_ui=args.open_ui,
|
||||
safe_mode=safe_mode,
|
||||
)
|
||||
|
||||
runtime_conf = runner.RuntimeConfig(
|
||||
config_dir=config_dir,
|
||||
verbose=args.verbose,
|
||||
log_rotate_days=args.log_rotate_days,
|
||||
log_file=args.log_file,
|
||||
log_no_color=args.log_no_color,
|
||||
skip_pip=args.skip_pip,
|
||||
skip_pip_packages=args.skip_pip_packages,
|
||||
recovery_mode=args.recovery_mode,
|
||||
debug=args.debug,
|
||||
open_ui=args.open_ui,
|
||||
safe_mode=safe_mode,
|
||||
)
|
||||
fault_file_name = os.path.join(config_dir, FAULT_LOG_FILENAME)
|
||||
with open(fault_file_name, mode="a", encoding="utf8") as fault_file:
|
||||
faulthandler.enable(fault_file)
|
||||
exit_code = runner.run(runtime_conf)
|
||||
faulthandler.disable()
|
||||
|
||||
fault_file_name = os.path.join(config_dir, FAULT_LOG_FILENAME)
|
||||
with open(fault_file_name, mode="a", encoding="utf8") as fault_file:
|
||||
faulthandler.enable(fault_file)
|
||||
exit_code = runner.run(runtime_conf)
|
||||
faulthandler.disable()
|
||||
# It's possible for the fault file to disappear, so suppress obvious errors
|
||||
with suppress(FileNotFoundError):
|
||||
if os.path.getsize(fault_file_name) == 0:
|
||||
os.remove(fault_file_name)
|
||||
|
||||
# It's possible for the fault file to disappear, so suppress obvious errors
|
||||
with suppress(FileNotFoundError):
|
||||
if os.path.getsize(fault_file_name) == 0:
|
||||
os.remove(fault_file_name)
|
||||
check_threads()
|
||||
|
||||
check_threads()
|
||||
|
||||
return exit_code
|
||||
return exit_code
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
@@ -27,7 +27,7 @@ from . import (
|
||||
SetupFlow,
|
||||
)
|
||||
|
||||
REQUIREMENTS = ["pyotp==2.9.0"]
|
||||
REQUIREMENTS = ["pyotp==2.8.0"]
|
||||
|
||||
CONF_MESSAGE = "message"
|
||||
|
||||
|
@@ -20,7 +20,7 @@ from . import (
|
||||
SetupFlow,
|
||||
)
|
||||
|
||||
REQUIREMENTS = ["pyotp==2.9.0", "PyQRCode==1.2.1"]
|
||||
REQUIREMENTS = ["pyotp==2.8.0", "PyQRCode==1.2.1"]
|
||||
|
||||
CONFIG_SCHEMA = MULTI_FACTOR_AUTH_MODULE_SCHEMA.extend({}, extra=vol.PREVENT_EXTRA)
|
||||
|
||||
|
@@ -1,5 +1,5 @@
|
||||
{
|
||||
"domain": "fritzbox",
|
||||
"name": "FRITZ!",
|
||||
"name": "FRITZ!Box",
|
||||
"integrations": ["fritz", "fritzbox", "fritzbox_callmonitor"]
|
||||
}
|
||||
|
@@ -6,6 +6,7 @@
|
||||
"google_assistant_sdk",
|
||||
"google_cloud",
|
||||
"google_drive",
|
||||
"google_gemini",
|
||||
"google_generative_ai_conversation",
|
||||
"google_mail",
|
||||
"google_maps",
|
||||
|
@@ -50,7 +50,6 @@ class AccuWeatherFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
await self.async_set_unique_id(
|
||||
accuweather.location_key, raise_on_progress=False
|
||||
)
|
||||
self._abort_if_unique_id_configured()
|
||||
|
||||
return self.async_create_entry(
|
||||
title=user_input[CONF_NAME], data=user_input
|
||||
|
@@ -69,5 +69,5 @@ POLLEN_CATEGORY_MAP = {
|
||||
4: "very_high",
|
||||
5: "extreme",
|
||||
}
|
||||
UPDATE_INTERVAL_OBSERVATION = timedelta(minutes=10)
|
||||
UPDATE_INTERVAL_OBSERVATION = timedelta(minutes=40)
|
||||
UPDATE_INTERVAL_DAILY_FORECAST = timedelta(hours=6)
|
||||
|
@@ -7,5 +7,6 @@
|
||||
"integration_type": "service",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["accuweather"],
|
||||
"requirements": ["accuweather==4.2.1"]
|
||||
"requirements": ["accuweather==4.2.1"],
|
||||
"single_config_entry": true
|
||||
}
|
||||
|
@@ -17,9 +17,6 @@
|
||||
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
|
||||
"invalid_api_key": "[%key:common::config_flow::error::invalid_api_key%]",
|
||||
"requests_exceeded": "The allowed number of requests to the AccuWeather API has been exceeded. You have to wait or change the API key."
|
||||
},
|
||||
"abort": {
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_location%]"
|
||||
}
|
||||
},
|
||||
"entity": {
|
||||
|
@@ -126,7 +126,7 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
schema=vol.Schema(
|
||||
{
|
||||
vol.Required(ATTR_TASK_NAME): cv.string,
|
||||
vol.Optional(ATTR_ENTITY_ID): cv.entity_id,
|
||||
vol.Required(ATTR_ENTITY_ID): cv.entity_id,
|
||||
vol.Required(ATTR_INSTRUCTIONS): cv.string,
|
||||
vol.Optional(ATTR_ATTACHMENTS): vol.All(
|
||||
cv.ensure_list, [selector.MediaSelector({"accept": ["*/*"]})]
|
||||
@@ -163,10 +163,9 @@ async def async_service_generate_image(call: ServiceCall) -> ServiceResponse:
|
||||
class AITaskPreferences:
|
||||
"""AI Task preferences."""
|
||||
|
||||
KEYS = ("gen_data_entity_id", "gen_image_entity_id")
|
||||
KEYS = ("gen_data_entity_id",)
|
||||
|
||||
gen_data_entity_id: str | None = None
|
||||
gen_image_entity_id: str | None = None
|
||||
|
||||
def __init__(self, hass: HomeAssistant) -> None:
|
||||
"""Initialize the preferences."""
|
||||
@@ -180,21 +179,17 @@ class AITaskPreferences:
|
||||
if data is None:
|
||||
return
|
||||
for key in self.KEYS:
|
||||
setattr(self, key, data.get(key))
|
||||
setattr(self, key, data[key])
|
||||
|
||||
@callback
|
||||
def async_set_preferences(
|
||||
self,
|
||||
*,
|
||||
gen_data_entity_id: str | None | UndefinedType = UNDEFINED,
|
||||
gen_image_entity_id: str | None | UndefinedType = UNDEFINED,
|
||||
) -> None:
|
||||
"""Set the preferences."""
|
||||
changed = False
|
||||
for key, value in (
|
||||
("gen_data_entity_id", gen_data_entity_id),
|
||||
("gen_image_entity_id", gen_image_entity_id),
|
||||
):
|
||||
for key, value in (("gen_data_entity_id", gen_data_entity_id),):
|
||||
if value is not UNDEFINED:
|
||||
if getattr(self, key) != value:
|
||||
setattr(self, key, value)
|
||||
@@ -216,6 +211,7 @@ class ImageView(HomeAssistantView):
|
||||
|
||||
url = f"/api/{DOMAIN}/images/{{filename}}"
|
||||
name = f"api:{DOMAIN}/images"
|
||||
requires_auth = False
|
||||
|
||||
async def get(
|
||||
self,
|
||||
|
@@ -60,10 +60,6 @@ class AITaskEntity(RestoreEntity):
|
||||
task: GenDataTask | GenImageTask,
|
||||
) -> AsyncGenerator[ChatLog]:
|
||||
"""Context manager used to manage the ChatLog used during an AI Task."""
|
||||
user_llm_hass_api: llm.API | None = None
|
||||
if isinstance(task, GenDataTask):
|
||||
user_llm_hass_api = task.llm_api
|
||||
|
||||
# pylint: disable-next=contextmanager-generator-missing-cleanup
|
||||
with (
|
||||
async_get_chat_log(
|
||||
@@ -81,7 +77,6 @@ class AITaskEntity(RestoreEntity):
|
||||
device_id=None,
|
||||
),
|
||||
user_llm_prompt=DEFAULT_SYSTEM_PROMPT,
|
||||
user_llm_hass_api=user_llm_hass_api,
|
||||
)
|
||||
|
||||
chat_log.async_add_user_content(
|
||||
|
@@ -37,7 +37,6 @@ def websocket_get_preferences(
|
||||
{
|
||||
vol.Required("type"): "ai_task/preferences/set",
|
||||
vol.Optional("gen_data_entity_id"): vol.Any(str, None),
|
||||
vol.Optional("gen_image_entity_id"): vol.Any(str, None),
|
||||
}
|
||||
)
|
||||
@websocket_api.require_admin
|
||||
|
@@ -2,10 +2,8 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import timedelta
|
||||
import logging
|
||||
|
||||
from homeassistant.components.http.auth import async_sign_path
|
||||
from homeassistant.components.media_player import BrowseError, MediaClass
|
||||
from homeassistant.components.media_source import (
|
||||
BrowseMediaSource,
|
||||
@@ -16,7 +14,7 @@ from homeassistant.components.media_source import (
|
||||
)
|
||||
from homeassistant.core import HomeAssistant
|
||||
|
||||
from .const import DATA_IMAGES, DOMAIN, IMAGE_EXPIRY_TIME
|
||||
from .const import DATA_IMAGES, DOMAIN
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -45,14 +43,7 @@ class ImageMediaSource(MediaSource):
|
||||
if image is None:
|
||||
raise Unresolvable(f"Could not resolve media item: {item.identifier}")
|
||||
|
||||
return PlayMedia(
|
||||
async_sign_path(
|
||||
self.hass,
|
||||
f"/api/{DOMAIN}/images/{item.identifier}",
|
||||
timedelta(seconds=IMAGE_EXPIRY_TIME or 1800),
|
||||
),
|
||||
image.mime_type,
|
||||
)
|
||||
return PlayMedia(f"/api/{DOMAIN}/images/{item.identifier}", image.mime_type)
|
||||
|
||||
async def async_browse_media(
|
||||
self,
|
||||
|
@@ -3,7 +3,7 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from dataclasses import dataclass
|
||||
from datetime import datetime, timedelta
|
||||
from datetime import datetime
|
||||
from functools import partial
|
||||
import mimetypes
|
||||
from pathlib import Path
|
||||
@@ -13,12 +13,11 @@ from typing import Any
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components import camera, conversation, media_source
|
||||
from homeassistant.components.http.auth import async_sign_path
|
||||
from homeassistant.core import HomeAssistant, ServiceResponse, callback
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers import llm
|
||||
from homeassistant.helpers.chat_session import ChatSession, async_get_chat_session
|
||||
from homeassistant.helpers.event import async_call_later
|
||||
from homeassistant.helpers.network import get_url
|
||||
from homeassistant.util import RE_SANITIZE_FILENAME, slugify
|
||||
|
||||
from .const import (
|
||||
@@ -116,7 +115,6 @@ async def async_generate_data(
|
||||
instructions: str,
|
||||
structure: vol.Schema | None = None,
|
||||
attachments: list[dict] | None = None,
|
||||
llm_api: llm.API | None = None,
|
||||
) -> GenDataTaskResult:
|
||||
"""Run a data generation task in the AI Task integration."""
|
||||
if entity_id is None:
|
||||
@@ -152,7 +150,6 @@ async def async_generate_data(
|
||||
instructions=instructions,
|
||||
structure=structure,
|
||||
attachments=resolved_attachments or None,
|
||||
llm_api=llm_api,
|
||||
),
|
||||
)
|
||||
|
||||
@@ -179,17 +176,11 @@ async def async_generate_image(
|
||||
hass: HomeAssistant,
|
||||
*,
|
||||
task_name: str,
|
||||
entity_id: str | None = None,
|
||||
entity_id: str,
|
||||
instructions: str,
|
||||
attachments: list[dict] | None = None,
|
||||
) -> ServiceResponse:
|
||||
"""Run an image generation task in the AI Task integration."""
|
||||
if entity_id is None:
|
||||
entity_id = hass.data[DATA_PREFERENCES].gen_image_entity_id
|
||||
|
||||
if entity_id is None:
|
||||
raise HomeAssistantError("No entity_id provided and no preferred entity set")
|
||||
|
||||
entity = hass.data[DATA_COMPONENT].get_entity(entity_id)
|
||||
if entity is None:
|
||||
raise HomeAssistantError(f"AI Task entity {entity_id} not found")
|
||||
@@ -248,11 +239,7 @@ async def async_generate_image(
|
||||
if IMAGE_EXPIRY_TIME > 0:
|
||||
async_call_later(hass, IMAGE_EXPIRY_TIME, partial(_purge_image, filename))
|
||||
|
||||
service_result["url"] = async_sign_path(
|
||||
hass,
|
||||
f"/api/{DOMAIN}/images/{filename}",
|
||||
timedelta(seconds=IMAGE_EXPIRY_TIME or 1800),
|
||||
)
|
||||
service_result["url"] = get_url(hass) + f"/api/{DOMAIN}/images/{filename}"
|
||||
service_result["media_source_id"] = f"media-source://{DOMAIN}/images/{filename}"
|
||||
|
||||
return service_result
|
||||
@@ -274,9 +261,6 @@ class GenDataTask:
|
||||
attachments: list[conversation.Attachment] | None = None
|
||||
"""List of attachments to go along the instructions."""
|
||||
|
||||
llm_api: llm.API | None = None
|
||||
"""API to provide to the LLM."""
|
||||
|
||||
def __str__(self) -> str:
|
||||
"""Return task as a string."""
|
||||
return f"<GenDataTask {self.name}: {id(self)}>"
|
||||
|
@@ -2,7 +2,7 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from airos.airos8 import AirOS8
|
||||
from airos.airos8 import AirOS
|
||||
|
||||
from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_USERNAME, Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
@@ -23,7 +23,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: AirOSConfigEntry) -> boo
|
||||
# with no option in the web UI to change or upload a custom certificate.
|
||||
session = async_get_clientsession(hass, verify_ssl=False)
|
||||
|
||||
airos_device = AirOS8(
|
||||
airos_device = AirOS(
|
||||
host=entry.data[CONF_HOST],
|
||||
username=entry.data[CONF_USERNAME],
|
||||
password=entry.data[CONF_PASSWORD],
|
||||
|
@@ -15,7 +15,7 @@ from homeassistant.const import EntityCategory
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from .coordinator import AirOS8Data, AirOSConfigEntry, AirOSDataUpdateCoordinator
|
||||
from .coordinator import AirOSConfigEntry, AirOSData, AirOSDataUpdateCoordinator
|
||||
from .entity import AirOSEntity
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
@@ -27,7 +27,7 @@ PARALLEL_UPDATES = 0
|
||||
class AirOSBinarySensorEntityDescription(BinarySensorEntityDescription):
|
||||
"""Describe an AirOS binary sensor."""
|
||||
|
||||
value_fn: Callable[[AirOS8Data], bool]
|
||||
value_fn: Callable[[AirOSData], bool]
|
||||
|
||||
|
||||
BINARY_SENSORS: tuple[AirOSBinarySensorEntityDescription, ...] = (
|
||||
|
@@ -19,7 +19,7 @@ from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_USERNAME
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
|
||||
from .const import DOMAIN
|
||||
from .coordinator import AirOS8
|
||||
from .coordinator import AirOS
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -48,7 +48,7 @@ class AirOSConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
# with no option in the web UI to change or upload a custom certificate.
|
||||
session = async_get_clientsession(self.hass, verify_ssl=False)
|
||||
|
||||
airos_device = AirOS8(
|
||||
airos_device = AirOS(
|
||||
host=user_input[CONF_HOST],
|
||||
username=user_input[CONF_USERNAME],
|
||||
password=user_input[CONF_PASSWORD],
|
||||
|
@@ -4,7 +4,7 @@ from __future__ import annotations
|
||||
|
||||
import logging
|
||||
|
||||
from airos.airos8 import AirOS8, AirOS8Data
|
||||
from airos.airos8 import AirOS, AirOSData
|
||||
from airos.exceptions import (
|
||||
AirOSConnectionAuthenticationError,
|
||||
AirOSConnectionSetupError,
|
||||
@@ -24,13 +24,13 @@ _LOGGER = logging.getLogger(__name__)
|
||||
type AirOSConfigEntry = ConfigEntry[AirOSDataUpdateCoordinator]
|
||||
|
||||
|
||||
class AirOSDataUpdateCoordinator(DataUpdateCoordinator[AirOS8Data]):
|
||||
class AirOSDataUpdateCoordinator(DataUpdateCoordinator[AirOSData]):
|
||||
"""Class to manage fetching AirOS data from single endpoint."""
|
||||
|
||||
config_entry: AirOSConfigEntry
|
||||
|
||||
def __init__(
|
||||
self, hass: HomeAssistant, config_entry: AirOSConfigEntry, airos_device: AirOS8
|
||||
self, hass: HomeAssistant, config_entry: AirOSConfigEntry, airos_device: AirOS
|
||||
) -> None:
|
||||
"""Initialize the coordinator."""
|
||||
self.airos_device = airos_device
|
||||
@@ -42,7 +42,7 @@ class AirOSDataUpdateCoordinator(DataUpdateCoordinator[AirOS8Data]):
|
||||
update_interval=SCAN_INTERVAL,
|
||||
)
|
||||
|
||||
async def _async_update_data(self) -> AirOS8Data:
|
||||
async def _async_update_data(self) -> AirOSData:
|
||||
"""Fetch data from AirOS."""
|
||||
try:
|
||||
await self.airos_device.login()
|
||||
|
@@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/airos",
|
||||
"iot_class": "local_polling",
|
||||
"quality_scale": "bronze",
|
||||
"requirements": ["airos==0.5.1"]
|
||||
"requirements": ["airos==0.4.4"]
|
||||
}
|
||||
|
@@ -26,7 +26,7 @@ from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from homeassistant.helpers.typing import StateType
|
||||
|
||||
from .coordinator import AirOS8Data, AirOSConfigEntry, AirOSDataUpdateCoordinator
|
||||
from .coordinator import AirOSConfigEntry, AirOSData, AirOSDataUpdateCoordinator
|
||||
from .entity import AirOSEntity
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
@@ -42,7 +42,7 @@ PARALLEL_UPDATES = 0
|
||||
class AirOSSensorEntityDescription(SensorEntityDescription):
|
||||
"""Describe an AirOS sensor."""
|
||||
|
||||
value_fn: Callable[[AirOS8Data], StateType]
|
||||
value_fn: Callable[[AirOSData], StateType]
|
||||
|
||||
|
||||
SENSORS: tuple[AirOSSensorEntityDescription, ...] = (
|
||||
|
@@ -11,5 +11,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/airzone",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["aioairzone"],
|
||||
"requirements": ["aioairzone==1.0.1"]
|
||||
"requirements": ["aioairzone==1.0.0"]
|
||||
}
|
||||
|
@@ -3,7 +3,6 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from genie_partner_sdk.client import AladdinConnectClient
|
||||
from genie_partner_sdk.model import GarageDoor
|
||||
|
||||
from homeassistant.const import Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
@@ -36,22 +35,7 @@ async def async_setup_entry(
|
||||
api.AsyncConfigEntryAuth(aiohttp_client.async_get_clientsession(hass), session)
|
||||
)
|
||||
|
||||
sdk_doors = await client.get_doors()
|
||||
|
||||
# Convert SDK GarageDoor objects to integration GarageDoor objects
|
||||
doors = [
|
||||
GarageDoor(
|
||||
{
|
||||
"device_id": door.device_id,
|
||||
"door_number": door.door_number,
|
||||
"name": door.name,
|
||||
"status": door.status,
|
||||
"link_status": door.link_status,
|
||||
"battery_level": door.battery_level,
|
||||
}
|
||||
)
|
||||
for door in sdk_doors
|
||||
]
|
||||
doors = await client.get_doors()
|
||||
|
||||
entry.runtime_data = {
|
||||
door.unique_id: AladdinConnectCoordinator(hass, entry, client, door)
|
||||
|
@@ -41,4 +41,10 @@ class AladdinConnectCoordinator(DataUpdateCoordinator[GarageDoor]):
|
||||
async def _async_update_data(self) -> GarageDoor:
|
||||
"""Fetch data from the Aladdin Connect API."""
|
||||
await self.client.update_door(self.data.device_id, self.data.door_number)
|
||||
self.data.status = self.client.get_door_status(
|
||||
self.data.device_id, self.data.door_number
|
||||
)
|
||||
self.data.battery_level = self.client.get_battery_status(
|
||||
self.data.device_id, self.data.door_number
|
||||
)
|
||||
return self.data
|
||||
|
@@ -49,7 +49,9 @@ class AladdinCoverEntity(AladdinConnectEntity, CoverEntity):
|
||||
@property
|
||||
def is_closed(self) -> bool | None:
|
||||
"""Update is closed attribute."""
|
||||
return self.coordinator.data.status == "closed"
|
||||
if (status := self.coordinator.data.status) is None:
|
||||
return None
|
||||
return status == "closed"
|
||||
|
||||
@property
|
||||
def is_closing(self) -> bool | None:
|
||||
|
@@ -4,13 +4,8 @@
|
||||
"codeowners": ["@swcloudgenie"],
|
||||
"config_flow": true,
|
||||
"dependencies": ["application_credentials"],
|
||||
"dhcp": [
|
||||
{
|
||||
"hostname": "gdocntl-*"
|
||||
}
|
||||
],
|
||||
"documentation": "https://www.home-assistant.io/integrations/aladdin_connect",
|
||||
"integration_type": "hub",
|
||||
"iot_class": "cloud_polling",
|
||||
"requirements": ["genie-partner-sdk==1.0.10"]
|
||||
"requirements": ["genie-partner-sdk==1.0.11"]
|
||||
}
|
||||
|
@@ -7,9 +7,6 @@
|
||||
"reauth_confirm": {
|
||||
"title": "[%key:common::config_flow::title::reauth%]",
|
||||
"description": "Aladdin Connect needs to re-authenticate your account"
|
||||
},
|
||||
"oauth_discovery": {
|
||||
"description": "Home Assistant has found an Aladdin Connect device on your network. Press **Submit** to continue setting up Aladdin Connect."
|
||||
}
|
||||
},
|
||||
"abort": {
|
||||
|
@@ -61,7 +61,7 @@ ALARM_SERVICE_SCHEMA: Final = make_entity_service_schema(
|
||||
|
||||
|
||||
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
"""Set up the alarm control panel component."""
|
||||
"""Track states and offer events for sensors."""
|
||||
component = hass.data[DATA_COMPONENT] = EntityComponent[AlarmControlPanelEntity](
|
||||
_LOGGER, DOMAIN, hass, SCAN_INTERVAL
|
||||
)
|
||||
|
@@ -1,7 +1,4 @@
|
||||
"""Support for repeating alerts when conditions are met.
|
||||
|
||||
DEVELOPMENT OF THE ALERT INTEGRATION IS FROZEN.
|
||||
"""
|
||||
"""Support for repeating alerts when conditions are met."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
@@ -66,10 +63,7 @@ CONFIG_SCHEMA = vol.Schema(
|
||||
|
||||
|
||||
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
"""Set up the Alert component.
|
||||
|
||||
DEVELOPMENT OF THE ALERT INTEGRATION IS FROZEN.
|
||||
"""
|
||||
"""Set up the Alert component."""
|
||||
component = EntityComponent[AlertEntity](LOGGER, DOMAIN, hass)
|
||||
|
||||
entities: list[AlertEntity] = []
|
||||
|
@@ -1,7 +1,4 @@
|
||||
"""Support for repeating alerts when conditions are met.
|
||||
|
||||
DEVELOPMENT OF THE ALERT INTEGRATION IS FROZEN.
|
||||
"""
|
||||
"""Support for repeating alerts when conditions are met."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
@@ -30,10 +27,7 @@ from .const import DOMAIN, LOGGER
|
||||
|
||||
|
||||
class AlertEntity(Entity):
|
||||
"""Representation of an alert.
|
||||
|
||||
DEVELOPMENT OF THE ALERT INTEGRATION IS FROZEN.
|
||||
"""
|
||||
"""Representation of an alert."""
|
||||
|
||||
_attr_should_poll = False
|
||||
|
||||
|
@@ -1,7 +1,4 @@
|
||||
"""Reproduce an Alert state.
|
||||
|
||||
DEVELOPMENT OF THE ALERT INTEGRATION IS FROZEN.
|
||||
"""
|
||||
"""Reproduce an Alert state."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
|
@@ -14,7 +14,6 @@ from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONF_PASSWORD, CONF_USERNAME
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryAuthFailed
|
||||
from homeassistant.helpers import device_registry as dr
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
||||
|
||||
from .const import _LOGGER, CONF_LOGIN_DATA, DOMAIN
|
||||
@@ -49,13 +48,12 @@ class AmazonDevicesCoordinator(DataUpdateCoordinator[dict[str, AmazonDevice]]):
|
||||
entry.data[CONF_PASSWORD],
|
||||
entry.data[CONF_LOGIN_DATA],
|
||||
)
|
||||
self.previous_devices: set[str] = set()
|
||||
|
||||
async def _async_update_data(self) -> dict[str, AmazonDevice]:
|
||||
"""Update device data."""
|
||||
try:
|
||||
await self.api.login_mode_stored_data()
|
||||
data = await self.api.get_devices_data()
|
||||
return await self.api.get_devices_data()
|
||||
except CannotConnect as err:
|
||||
raise UpdateFailed(
|
||||
translation_domain=DOMAIN,
|
||||
@@ -74,31 +72,3 @@ class AmazonDevicesCoordinator(DataUpdateCoordinator[dict[str, AmazonDevice]]):
|
||||
translation_key="invalid_auth",
|
||||
translation_placeholders={"error": repr(err)},
|
||||
) from err
|
||||
else:
|
||||
current_devices = set(data.keys())
|
||||
if stale_devices := self.previous_devices - current_devices:
|
||||
await self._async_remove_device_stale(stale_devices)
|
||||
|
||||
self.previous_devices = current_devices
|
||||
return data
|
||||
|
||||
async def _async_remove_device_stale(
|
||||
self,
|
||||
stale_devices: set[str],
|
||||
) -> None:
|
||||
"""Remove stale device."""
|
||||
device_registry = dr.async_get(self.hass)
|
||||
|
||||
for serial_num in stale_devices:
|
||||
_LOGGER.debug(
|
||||
"Detected change in devices: serial %s removed",
|
||||
serial_num,
|
||||
)
|
||||
device = device_registry.async_get_device(
|
||||
identifiers={(DOMAIN, serial_num)}
|
||||
)
|
||||
if device:
|
||||
device_registry.async_update_device(
|
||||
device_id=device.id,
|
||||
remove_config_entry_id=self.config_entry.entry_id,
|
||||
)
|
||||
|
@@ -64,7 +64,9 @@ rules:
|
||||
repair-issues:
|
||||
status: exempt
|
||||
comment: no known use cases for repair issues or flows, yet
|
||||
stale-devices: done
|
||||
stale-devices:
|
||||
status: todo
|
||||
comment: automate the cleanup process
|
||||
|
||||
# Platinum
|
||||
async-dependency: done
|
||||
|
@@ -104,6 +104,10 @@
|
||||
"sound": {
|
||||
"name": "Alexa Skill sound file",
|
||||
"description": "The sound file to play."
|
||||
},
|
||||
"sound_variant": {
|
||||
"name": "Sound variant",
|
||||
"description": "The variant of the sound to play."
|
||||
}
|
||||
}
|
||||
},
|
||||
|
@@ -24,12 +24,7 @@ from homeassistant.components.recorder import (
|
||||
get_instance as get_recorder_instance,
|
||||
)
|
||||
from homeassistant.config_entries import SOURCE_IGNORE
|
||||
from homeassistant.const import (
|
||||
ATTR_ASSUMED_STATE,
|
||||
ATTR_DOMAIN,
|
||||
BASE_PLATFORMS,
|
||||
__version__ as HA_VERSION,
|
||||
)
|
||||
from homeassistant.const import ATTR_DOMAIN, BASE_PLATFORMS, __version__ as HA_VERSION
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers import device_registry as dr, entity_registry as er
|
||||
@@ -394,117 +389,66 @@ def _domains_from_yaml_config(yaml_configuration: dict[str, Any]) -> set[str]:
|
||||
|
||||
|
||||
async def async_devices_payload(hass: HomeAssistant) -> dict:
|
||||
"""Return detailed information about entities and devices."""
|
||||
integrations_info: dict[str, dict[str, Any]] = {}
|
||||
|
||||
"""Return the devices payload."""
|
||||
devices: list[dict[str, Any]] = []
|
||||
dev_reg = dr.async_get(hass)
|
||||
# Devices that need via device info set
|
||||
new_indexes: dict[str, int] = {}
|
||||
via_devices: dict[str, str] = {}
|
||||
|
||||
# We need to refer to other devices, for example in `via_device` field.
|
||||
# We don't however send the original device ids outside of Home Assistant,
|
||||
# instead we refer to devices by (integration_domain, index_in_integration_device_list).
|
||||
device_id_mapping: dict[str, tuple[str, int]] = {}
|
||||
seen_integrations = set()
|
||||
|
||||
for device_entry in dev_reg.devices.values():
|
||||
if not device_entry.primary_config_entry:
|
||||
for device in dev_reg.devices.values():
|
||||
if not device.primary_config_entry:
|
||||
continue
|
||||
|
||||
config_entry = hass.config_entries.async_get_entry(
|
||||
device_entry.primary_config_entry
|
||||
)
|
||||
config_entry = hass.config_entries.async_get_entry(device.primary_config_entry)
|
||||
|
||||
if config_entry is None:
|
||||
continue
|
||||
|
||||
integration_domain = config_entry.domain
|
||||
integration_info = integrations_info.setdefault(
|
||||
integration_domain, {"devices": [], "entities": []}
|
||||
)
|
||||
seen_integrations.add(config_entry.domain)
|
||||
|
||||
devices_info = integration_info["devices"]
|
||||
|
||||
device_id_mapping[device_entry.id] = (integration_domain, len(devices_info))
|
||||
|
||||
devices_info.append(
|
||||
new_indexes[device.id] = len(devices)
|
||||
devices.append(
|
||||
{
|
||||
"entities": [],
|
||||
"entry_type": device_entry.entry_type,
|
||||
"has_configuration_url": device_entry.configuration_url is not None,
|
||||
"hw_version": device_entry.hw_version,
|
||||
"manufacturer": device_entry.manufacturer,
|
||||
"model": device_entry.model,
|
||||
"model_id": device_entry.model_id,
|
||||
"sw_version": device_entry.sw_version,
|
||||
"via_device": device_entry.via_device_id,
|
||||
"integration": config_entry.domain,
|
||||
"manufacturer": device.manufacturer,
|
||||
"model_id": device.model_id,
|
||||
"model": device.model,
|
||||
"sw_version": device.sw_version,
|
||||
"hw_version": device.hw_version,
|
||||
"has_configuration_url": device.configuration_url is not None,
|
||||
"via_device": None,
|
||||
"entry_type": device.entry_type.value if device.entry_type else None,
|
||||
}
|
||||
)
|
||||
|
||||
# Fill out via_device with new device ids
|
||||
for integration_info in integrations_info.values():
|
||||
for device_info in integration_info["devices"]:
|
||||
if device_info["via_device"] is None:
|
||||
continue
|
||||
device_info["via_device"] = device_id_mapping.get(device_info["via_device"])
|
||||
if device.via_device_id:
|
||||
via_devices[device.id] = device.via_device_id
|
||||
|
||||
ent_reg = er.async_get(hass)
|
||||
|
||||
for entity_entry in ent_reg.entities.values():
|
||||
integration_domain = entity_entry.platform
|
||||
integration_info = integrations_info.setdefault(
|
||||
integration_domain, {"devices": [], "entities": []}
|
||||
)
|
||||
|
||||
devices_info = integration_info["devices"]
|
||||
entities_info = integration_info["entities"]
|
||||
|
||||
entity_state = hass.states.get(entity_entry.entity_id)
|
||||
|
||||
entity_info = {
|
||||
# LIMITATION: `assumed_state` can be overridden by users;
|
||||
# we should replace it with the original value in the future.
|
||||
# It is also not present, if entity is not in the state machine,
|
||||
# which can happen for disabled entities.
|
||||
"assumed_state": entity_state.attributes.get(ATTR_ASSUMED_STATE, False)
|
||||
if entity_state is not None
|
||||
else None,
|
||||
"capabilities": entity_entry.capabilities,
|
||||
"domain": entity_entry.domain,
|
||||
"entity_category": entity_entry.entity_category,
|
||||
"has_entity_name": entity_entry.has_entity_name,
|
||||
"original_device_class": entity_entry.original_device_class,
|
||||
# LIMITATION: `unit_of_measurement` can be overridden by users;
|
||||
# we should replace it with the original value in the future.
|
||||
"unit_of_measurement": entity_entry.unit_of_measurement,
|
||||
}
|
||||
|
||||
if (
|
||||
((device_id := entity_entry.device_id) is not None)
|
||||
and ((new_device_id := device_id_mapping.get(device_id)) is not None)
|
||||
and (new_device_id[0] == integration_domain)
|
||||
):
|
||||
device_info = devices_info[new_device_id[1]]
|
||||
device_info["entities"].append(entity_info)
|
||||
else:
|
||||
entities_info.append(entity_info)
|
||||
for from_device, via_device in via_devices.items():
|
||||
if via_device not in new_indexes:
|
||||
continue
|
||||
devices[new_indexes[from_device]]["via_device"] = new_indexes[via_device]
|
||||
|
||||
integrations = {
|
||||
domain: integration
|
||||
for domain, integration in (
|
||||
await async_get_integrations(hass, integrations_info.keys())
|
||||
await async_get_integrations(hass, seen_integrations)
|
||||
).items()
|
||||
if isinstance(integration, Integration)
|
||||
}
|
||||
|
||||
for domain, integration_info in integrations_info.items():
|
||||
if integration := integrations.get(domain):
|
||||
integration_info["is_custom_integration"] = not integration.is_built_in
|
||||
for device_info in devices:
|
||||
if integration := integrations.get(device_info["integration"]):
|
||||
device_info["is_custom_integration"] = not integration.is_built_in
|
||||
# Include version for custom integrations
|
||||
if not integration.is_built_in and integration.version:
|
||||
integration_info["custom_integration_version"] = str(
|
||||
integration.version
|
||||
)
|
||||
device_info["custom_integration_version"] = str(integration.version)
|
||||
|
||||
return {
|
||||
"version": "home-assistant:1",
|
||||
"home_assistant": HA_VERSION,
|
||||
"integrations": integrations_info,
|
||||
"devices": devices,
|
||||
}
|
||||
|
@@ -37,7 +37,7 @@ from .helpers import AndroidTVRemoteConfigEntry, create_api, get_enable_ime
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
APPS_NEW_ID = "add_new"
|
||||
APPS_NEW_ID = "NewApp"
|
||||
CONF_APP_DELETE = "app_delete"
|
||||
CONF_APP_ID = "app_id"
|
||||
|
||||
@@ -66,14 +66,9 @@ class AndroidTVRemoteConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
if user_input is not None:
|
||||
self.host = user_input[CONF_HOST]
|
||||
api = create_api(self.hass, self.host, enable_ime=False)
|
||||
await api.async_generate_cert_if_missing()
|
||||
try:
|
||||
await api.async_generate_cert_if_missing()
|
||||
self.name, self.mac = await api.async_get_name_and_mac()
|
||||
except CannotConnect:
|
||||
# Likely invalid IP address or device is network unreachable. Stay
|
||||
# in the user step allowing the user to enter a different host.
|
||||
errors["base"] = "cannot_connect"
|
||||
else:
|
||||
await self.async_set_unique_id(format_mac(self.mac))
|
||||
if self.source == SOURCE_RECONFIGURE:
|
||||
self._abort_if_unique_id_mismatch()
|
||||
@@ -86,10 +81,11 @@ class AndroidTVRemoteConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
},
|
||||
)
|
||||
self._abort_if_unique_id_configured(updates={CONF_HOST: self.host})
|
||||
try:
|
||||
return await self._async_start_pair()
|
||||
except (CannotConnect, ConnectionClosed):
|
||||
errors["base"] = "cannot_connect"
|
||||
return await self._async_start_pair()
|
||||
except (CannotConnect, ConnectionClosed):
|
||||
# Likely invalid IP address or device is network unreachable. Stay
|
||||
# in the user step allowing the user to enter a different host.
|
||||
errors["base"] = "cannot_connect"
|
||||
else:
|
||||
user_input = {}
|
||||
default_host = user_input.get(CONF_HOST, vol.UNDEFINED)
|
||||
@@ -116,9 +112,22 @@ class AndroidTVRemoteConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
"""Handle the pair step."""
|
||||
errors: dict[str, str] = {}
|
||||
if user_input is not None:
|
||||
pin = user_input["pin"]
|
||||
try:
|
||||
pin = user_input["pin"]
|
||||
await self.api.async_finish_pairing(pin)
|
||||
if self.source == SOURCE_REAUTH:
|
||||
return self.async_update_reload_and_abort(
|
||||
self._get_reauth_entry(), reload_even_if_entry_is_unchanged=True
|
||||
)
|
||||
|
||||
return self.async_create_entry(
|
||||
title=self.name,
|
||||
data={
|
||||
CONF_HOST: self.host,
|
||||
CONF_NAME: self.name,
|
||||
CONF_MAC: self.mac,
|
||||
},
|
||||
)
|
||||
except InvalidAuth:
|
||||
# Invalid PIN. Stay in the pair step allowing the user to enter
|
||||
# a different PIN.
|
||||
@@ -136,20 +145,6 @@ class AndroidTVRemoteConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
# them to enter a new IP address but we cannot do that for the zeroconf
|
||||
# flow. Simpler to abort for both flows.
|
||||
return self.async_abort(reason="cannot_connect")
|
||||
else:
|
||||
if self.source == SOURCE_REAUTH:
|
||||
return self.async_update_reload_and_abort(
|
||||
self._get_reauth_entry(), reload_even_if_entry_is_unchanged=True
|
||||
)
|
||||
|
||||
return self.async_create_entry(
|
||||
title=self.name,
|
||||
data={
|
||||
CONF_HOST: self.host,
|
||||
CONF_NAME: self.name,
|
||||
CONF_MAC: self.mac,
|
||||
},
|
||||
)
|
||||
return self.async_show_form(
|
||||
step_id="pair",
|
||||
data_schema=STEP_PAIR_DATA_SCHEMA,
|
||||
@@ -287,9 +282,7 @@ class AndroidTVRemoteOptionsFlowHandler(OptionsFlowWithReload):
|
||||
{
|
||||
vol.Optional(CONF_APPS): SelectSelector(
|
||||
SelectSelectorConfig(
|
||||
options=apps,
|
||||
mode=SelectSelectorMode.DROPDOWN,
|
||||
translation_key="apps",
|
||||
options=apps, mode=SelectSelectorMode.DROPDOWN
|
||||
)
|
||||
),
|
||||
vol.Required(
|
||||
|
@@ -6,7 +6,7 @@ from typing import Any
|
||||
|
||||
from androidtvremote2 import AndroidTVRemote, ConnectionClosed
|
||||
|
||||
from homeassistant.const import CONF_MAC, CONF_NAME
|
||||
from homeassistant.const import CONF_HOST, CONF_MAC, CONF_NAME
|
||||
from homeassistant.core import callback
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers.device_registry import CONNECTION_NETWORK_MAC, DeviceInfo
|
||||
@@ -28,6 +28,8 @@ class AndroidTVRemoteBaseEntity(Entity):
|
||||
) -> None:
|
||||
"""Initialize the entity."""
|
||||
self._api = api
|
||||
self._host = config_entry.data[CONF_HOST]
|
||||
self._name = config_entry.data[CONF_NAME]
|
||||
self._apps: dict[str, Any] = config_entry.options.get(CONF_APPS, {})
|
||||
self._attr_unique_id = config_entry.unique_id
|
||||
self._attr_is_on = api.is_on
|
||||
@@ -37,7 +39,7 @@ class AndroidTVRemoteBaseEntity(Entity):
|
||||
self._attr_device_info = DeviceInfo(
|
||||
connections={(CONNECTION_NETWORK_MAC, config_entry.data[CONF_MAC])},
|
||||
identifiers={(DOMAIN, config_entry.unique_id)},
|
||||
name=config_entry.data[CONF_NAME],
|
||||
name=self._name,
|
||||
manufacturer=device_info["manufacturer"],
|
||||
model=device_info["model"],
|
||||
)
|
||||
|
@@ -7,7 +7,6 @@
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["androidtvremote2"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["androidtvremote2==0.2.3"],
|
||||
"zeroconf": ["_androidtvremote2._tcp.local."]
|
||||
}
|
||||
|
@@ -175,11 +175,7 @@ class AndroidTVRemoteMediaPlayerEntity(AndroidTVRemoteBaseEntity, MediaPlayerEnt
|
||||
"""Play a piece of media."""
|
||||
if media_type == MediaType.CHANNEL:
|
||||
if not media_id.isnumeric():
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="invalid_channel",
|
||||
translation_placeholders={"media_id": media_id},
|
||||
)
|
||||
raise ValueError(f"Channel must be numeric: {media_id}")
|
||||
if self._channel_set_task:
|
||||
self._channel_set_task.cancel()
|
||||
self._channel_set_task = asyncio.create_task(
|
||||
@@ -192,11 +188,7 @@ class AndroidTVRemoteMediaPlayerEntity(AndroidTVRemoteBaseEntity, MediaPlayerEnt
|
||||
self._send_launch_app_command(media_id)
|
||||
return
|
||||
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="invalid_media_type",
|
||||
translation_placeholders={"media_type": media_type},
|
||||
)
|
||||
raise ValueError(f"Invalid media type: {media_type}")
|
||||
|
||||
async def async_browse_media(
|
||||
self,
|
||||
|
@@ -1,78 +0,0 @@
|
||||
rules:
|
||||
# Bronze
|
||||
action-setup:
|
||||
status: exempt
|
||||
comment: No integration-specific service actions are defined.
|
||||
appropriate-polling:
|
||||
status: exempt
|
||||
comment: This is a push-based integration.
|
||||
brands: done
|
||||
common-modules: done
|
||||
config-flow-test-coverage: done
|
||||
config-flow: done
|
||||
dependency-transparency: done
|
||||
docs-actions: done
|
||||
docs-high-level-description: done
|
||||
docs-installation-instructions: done
|
||||
docs-removal-instructions: done
|
||||
entity-event-setup: done
|
||||
entity-unique-id: done
|
||||
has-entity-name: done
|
||||
runtime-data: done
|
||||
test-before-configure: done
|
||||
test-before-setup: done
|
||||
unique-config-entry: done
|
||||
|
||||
# Silver
|
||||
action-exceptions: done
|
||||
config-entry-unloading: done
|
||||
docs-configuration-parameters: done
|
||||
docs-installation-parameters: done
|
||||
entity-unavailable: done
|
||||
integration-owner: done
|
||||
log-when-unavailable: done
|
||||
parallel-updates: done
|
||||
reauthentication-flow: done
|
||||
test-coverage: done
|
||||
|
||||
# Gold
|
||||
devices: done
|
||||
diagnostics: done
|
||||
discovery-update-info: done
|
||||
discovery: done
|
||||
docs-data-update: done
|
||||
docs-examples: done
|
||||
docs-known-limitations: done
|
||||
docs-supported-devices: done
|
||||
docs-supported-functions: done
|
||||
docs-troubleshooting: done
|
||||
docs-use-cases: done
|
||||
dynamic-devices:
|
||||
status: exempt
|
||||
comment: The integration is configured on a per-device basis, so there are no dynamic devices to add.
|
||||
entity-category:
|
||||
status: exempt
|
||||
comment: All entities are primary and do not require a specific category.
|
||||
entity-device-class: done
|
||||
entity-disabled-by-default:
|
||||
status: exempt
|
||||
comment: The integration provides only primary entities that should be enabled.
|
||||
entity-translations: done
|
||||
exception-translations: done
|
||||
icon-translations:
|
||||
status: exempt
|
||||
comment: Icons are provided by the entity's device class, and no state-based icons are needed.
|
||||
reconfiguration-flow: done
|
||||
repair-issues:
|
||||
status: exempt
|
||||
comment: The integration uses the reauth flow for authentication issues, and no other repairable issues have been identified.
|
||||
stale-devices:
|
||||
status: exempt
|
||||
comment: The integration manages a single device per config entry. Stale device removal is handled by removing the config entry.
|
||||
|
||||
# Platinum
|
||||
async-dependency: done
|
||||
inject-websession:
|
||||
status: exempt
|
||||
comment: The underlying library does not use HTTP for communication.
|
||||
strict-typing: done
|
@@ -22,7 +22,7 @@
|
||||
},
|
||||
"zeroconf_confirm": {
|
||||
"title": "Discovered Android TV",
|
||||
"description": "Do you want to add the Android TV ({name}) to Home Assistant? It will turn on and a pairing code will be displayed on it that you will need to enter in the next screen."
|
||||
"description": "Do you want to add the Android TV ({name}) to Home Assistant? It will turn on and a pairing code will be displayed on it that you will need to enter in the next screen."
|
||||
},
|
||||
"pair": {
|
||||
"description": "Enter the pairing code displayed on the Android TV ({name}).",
|
||||
@@ -85,19 +85,6 @@
|
||||
"exceptions": {
|
||||
"connection_closed": {
|
||||
"message": "Connection to the Android TV device is closed"
|
||||
},
|
||||
"invalid_channel": {
|
||||
"message": "Channel must be numeric: {media_id}"
|
||||
},
|
||||
"invalid_media_type": {
|
||||
"message": "Invalid media type: {media_type}"
|
||||
}
|
||||
},
|
||||
"selector": {
|
||||
"apps": {
|
||||
"options": {
|
||||
"add_new": "Add new"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@@ -5,5 +5,5 @@
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/aosmith",
|
||||
"iot_class": "cloud_polling",
|
||||
"requirements": ["py-aosmith==1.0.14"]
|
||||
"requirements": ["py-aosmith==1.0.12"]
|
||||
}
|
||||
|
@@ -9,7 +9,7 @@ from homeassistant.core import HomeAssistant
|
||||
|
||||
from .coordinator import APCUPSdConfigEntry, APCUPSdCoordinator
|
||||
|
||||
PLATFORMS: Final = [Platform.BINARY_SENSOR, Platform.SENSOR]
|
||||
PLATFORMS: Final = (Platform.BINARY_SENSOR, Platform.SENSOR)
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
|
@@ -100,7 +100,6 @@ class APCUPSdCoordinator(DataUpdateCoordinator[APCUPSdData]):
|
||||
name=self.data.name or "APC UPS",
|
||||
hw_version=self.data.get("FIRMWARE"),
|
||||
sw_version=self.data.get("VERSION"),
|
||||
serial_number=self.data.serial_no,
|
||||
)
|
||||
|
||||
async def _async_update_data(self) -> APCUPSdData:
|
||||
|
@@ -6,6 +6,6 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/apcupsd",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["apcaccess"],
|
||||
"quality_scale": "platinum",
|
||||
"quality_scale": "bronze",
|
||||
"requirements": ["aioapcaccess==0.4.2"]
|
||||
}
|
||||
|
@@ -43,7 +43,10 @@ rules:
|
||||
status: exempt
|
||||
comment: |
|
||||
The integration does not require authentication.
|
||||
test-coverage: done
|
||||
test-coverage:
|
||||
status: todo
|
||||
comment: |
|
||||
Patch `aioapcaccess.request_status` where we use it.
|
||||
# Gold
|
||||
devices: done
|
||||
diagnostics: done
|
||||
|
@@ -103,7 +103,6 @@ async def async_pipeline_from_audio_stream(
|
||||
wake_word_settings: WakeWordSettings | None = None,
|
||||
audio_settings: AudioSettings | None = None,
|
||||
device_id: str | None = None,
|
||||
satellite_id: str | None = None,
|
||||
start_stage: PipelineStage = PipelineStage.STT,
|
||||
end_stage: PipelineStage = PipelineStage.TTS,
|
||||
conversation_extra_system_prompt: str | None = None,
|
||||
@@ -116,7 +115,6 @@ async def async_pipeline_from_audio_stream(
|
||||
pipeline_input = PipelineInput(
|
||||
session=session,
|
||||
device_id=device_id,
|
||||
satellite_id=satellite_id,
|
||||
stt_metadata=stt_metadata,
|
||||
stt_stream=stt_stream,
|
||||
wake_word_phrase=wake_word_phrase,
|
||||
|
@@ -2,7 +2,7 @@
|
||||
"domain": "assist_pipeline",
|
||||
"name": "Assist pipeline",
|
||||
"after_dependencies": ["repairs"],
|
||||
"codeowners": ["@synesthesiam", "@arturpragacz"],
|
||||
"codeowners": ["@balloob", "@synesthesiam"],
|
||||
"dependencies": ["conversation", "stt", "tts", "wake_word"],
|
||||
"documentation": "https://www.home-assistant.io/integrations/assist_pipeline",
|
||||
"integration_type": "system",
|
||||
|
@@ -583,9 +583,6 @@ class PipelineRun:
|
||||
_device_id: str | None = None
|
||||
"""Optional device id set during run start."""
|
||||
|
||||
_satellite_id: str | None = None
|
||||
"""Optional satellite id set during run start."""
|
||||
|
||||
_conversation_data: PipelineConversationData | None = None
|
||||
"""Data tied to the conversation ID."""
|
||||
|
||||
@@ -639,12 +636,9 @@ class PipelineRun:
|
||||
return
|
||||
pipeline_data.pipeline_debug[self.pipeline.id][self.id].events.append(event)
|
||||
|
||||
def start(
|
||||
self, conversation_id: str, device_id: str | None, satellite_id: str | None
|
||||
) -> None:
|
||||
def start(self, conversation_id: str, device_id: str | None) -> None:
|
||||
"""Emit run start event."""
|
||||
self._device_id = device_id
|
||||
self._satellite_id = satellite_id
|
||||
self._start_debug_recording_thread()
|
||||
|
||||
data: dict[str, Any] = {
|
||||
@@ -652,8 +646,6 @@ class PipelineRun:
|
||||
"language": self.language,
|
||||
"conversation_id": conversation_id,
|
||||
}
|
||||
if satellite_id is not None:
|
||||
data["satellite_id"] = satellite_id
|
||||
if self.runner_data is not None:
|
||||
data["runner_data"] = self.runner_data
|
||||
if self.tts_stream:
|
||||
@@ -1065,6 +1057,7 @@ class PipelineRun:
|
||||
self,
|
||||
intent_input: str,
|
||||
conversation_id: str,
|
||||
device_id: str | None,
|
||||
conversation_extra_system_prompt: str | None,
|
||||
) -> str:
|
||||
"""Run intent recognition portion of pipeline. Returns text to speak."""
|
||||
@@ -1095,8 +1088,7 @@ class PipelineRun:
|
||||
"language": input_language,
|
||||
"intent_input": intent_input,
|
||||
"conversation_id": conversation_id,
|
||||
"device_id": self._device_id,
|
||||
"satellite_id": self._satellite_id,
|
||||
"device_id": device_id,
|
||||
"prefer_local_intents": self.pipeline.prefer_local_intents,
|
||||
},
|
||||
)
|
||||
@@ -1107,8 +1099,7 @@ class PipelineRun:
|
||||
text=intent_input,
|
||||
context=self.context,
|
||||
conversation_id=conversation_id,
|
||||
device_id=self._device_id,
|
||||
satellite_id=self._satellite_id,
|
||||
device_id=device_id,
|
||||
language=input_language,
|
||||
agent_id=self.intent_agent.id,
|
||||
extra_system_prompt=conversation_extra_system_prompt,
|
||||
@@ -1278,7 +1269,6 @@ class PipelineRun:
|
||||
text=user_input.text,
|
||||
conversation_id=user_input.conversation_id,
|
||||
device_id=user_input.device_id,
|
||||
satellite_id=user_input.satellite_id,
|
||||
context=user_input.context,
|
||||
language=user_input.language,
|
||||
agent_id=user_input.agent_id,
|
||||
@@ -1577,15 +1567,10 @@ class PipelineInput:
|
||||
device_id: str | None = None
|
||||
"""Identifier of the device that is processing the input/output of the pipeline."""
|
||||
|
||||
satellite_id: str | None = None
|
||||
"""Identifier of the satellite that is processing the input/output of the pipeline."""
|
||||
|
||||
async def execute(self) -> None:
|
||||
"""Run pipeline."""
|
||||
self.run.start(
|
||||
conversation_id=self.session.conversation_id,
|
||||
device_id=self.device_id,
|
||||
satellite_id=self.satellite_id,
|
||||
conversation_id=self.session.conversation_id, device_id=self.device_id
|
||||
)
|
||||
current_stage: PipelineStage | None = self.run.start_stage
|
||||
stt_audio_buffer: list[EnhancedAudioChunk] = []
|
||||
@@ -1671,6 +1656,7 @@ class PipelineInput:
|
||||
tts_input = await self.run.recognize_intent(
|
||||
intent_input,
|
||||
self.session.conversation_id,
|
||||
self.device_id,
|
||||
self.conversation_extra_system_prompt,
|
||||
)
|
||||
if tts_input.strip():
|
||||
|
@@ -522,7 +522,6 @@ class AssistSatelliteEntity(entity.Entity):
|
||||
pipeline_id=self._resolve_pipeline(),
|
||||
conversation_id=session.conversation_id,
|
||||
device_id=device_id,
|
||||
satellite_id=self.entity_id,
|
||||
tts_audio_output=self.tts_options,
|
||||
wake_word_phrase=wake_word_phrase,
|
||||
audio_settings=AudioSettings(
|
||||
|
@@ -75,6 +75,7 @@ class BroadcastIntentHandler(intent.IntentHandler):
|
||||
)
|
||||
|
||||
response = intent_obj.create_response()
|
||||
response.response_type = intent.IntentResponseType.ACTION_DONE
|
||||
response.async_set_results(
|
||||
success_results=[
|
||||
intent.IntentResponseTarget(
|
||||
|
@@ -1,7 +1,7 @@
|
||||
{
|
||||
"domain": "assist_satellite",
|
||||
"name": "Assist Satellite",
|
||||
"codeowners": ["@home-assistant/core", "@synesthesiam", "@arturpragacz"],
|
||||
"codeowners": ["@home-assistant/core", "@synesthesiam"],
|
||||
"dependencies": ["assist_pipeline", "http", "stt", "tts"],
|
||||
"documentation": "https://www.home-assistant.io/integrations/assist_satellite",
|
||||
"integration_type": "entity",
|
||||
|
@@ -120,12 +120,16 @@ class AsusWrtBridge(ABC):
|
||||
|
||||
def __init__(self, host: str) -> None:
|
||||
"""Initialize Bridge."""
|
||||
self._configuration_url = f"http://{host}"
|
||||
self._host = host
|
||||
self._firmware: str | None = None
|
||||
self._label_mac: str | None = None
|
||||
self._model: str | None = None
|
||||
self._model_id: str | None = None
|
||||
self._serial_number: str | None = None
|
||||
|
||||
@property
|
||||
def configuration_url(self) -> str:
|
||||
"""Return configuration URL."""
|
||||
return self._configuration_url
|
||||
|
||||
@property
|
||||
def host(self) -> str:
|
||||
@@ -147,16 +151,6 @@ class AsusWrtBridge(ABC):
|
||||
"""Return model information."""
|
||||
return self._model
|
||||
|
||||
@property
|
||||
def model_id(self) -> str | None:
|
||||
"""Return model_id information."""
|
||||
return self._model_id
|
||||
|
||||
@property
|
||||
def serial_number(self) -> str | None:
|
||||
"""Return serial number information."""
|
||||
return self._serial_number
|
||||
|
||||
@property
|
||||
@abstractmethod
|
||||
def is_connected(self) -> bool:
|
||||
@@ -371,10 +365,9 @@ class AsusWrtHttpBridge(AsusWrtBridge):
|
||||
# get main router properties
|
||||
if mac := _identity.mac:
|
||||
self._label_mac = format_mac(mac)
|
||||
self._configuration_url = self._api.webpanel
|
||||
self._firmware = str(_identity.firmware)
|
||||
self._model = _identity.model
|
||||
self._model_id = _identity.product_id
|
||||
self._serial_number = _identity.serial
|
||||
|
||||
async def async_disconnect(self) -> None:
|
||||
"""Disconnect to the device."""
|
||||
|
@@ -388,13 +388,11 @@ class AsusWrtRouter:
|
||||
def device_info(self) -> DeviceInfo:
|
||||
"""Return the device information."""
|
||||
info = DeviceInfo(
|
||||
configuration_url=self._api.configuration_url,
|
||||
identifiers={(DOMAIN, self._entry.unique_id or "AsusWRT")},
|
||||
name=self.host,
|
||||
model=self._api.model or "Asus Router",
|
||||
model_id=self._api.model_id,
|
||||
serial_number=self._api.serial_number,
|
||||
manufacturer="Asus",
|
||||
configuration_url=f"http://{self.host}",
|
||||
)
|
||||
if self._api.firmware:
|
||||
info["sw_version"] = self._api.firmware
|
||||
|
@@ -92,7 +92,11 @@ from homeassistant.components.http.ban import (
|
||||
from homeassistant.components.http.data_validator import RequestDataValidator
|
||||
from homeassistant.components.http.view import HomeAssistantView
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers.network import is_cloud_connection
|
||||
from homeassistant.helpers.network import (
|
||||
NoURLAvailableError,
|
||||
get_url,
|
||||
is_cloud_connection,
|
||||
)
|
||||
from homeassistant.util.network import is_local
|
||||
|
||||
from . import indieauth
|
||||
@@ -125,11 +129,18 @@ class WellKnownOAuthInfoView(HomeAssistantView):
|
||||
|
||||
async def get(self, request: web.Request) -> web.Response:
|
||||
"""Return the well known OAuth2 authorization info."""
|
||||
hass = request.app[KEY_HASS]
|
||||
# Some applications require absolute urls, so we prefer using the
|
||||
# current requests url if possible, with fallback to a relative url.
|
||||
try:
|
||||
url_prefix = get_url(hass, require_current_request=True)
|
||||
except NoURLAvailableError:
|
||||
url_prefix = ""
|
||||
return self.json(
|
||||
{
|
||||
"authorization_endpoint": "/auth/authorize",
|
||||
"token_endpoint": "/auth/token",
|
||||
"revocation_endpoint": "/auth/revoke",
|
||||
"authorization_endpoint": f"{url_prefix}/auth/authorize",
|
||||
"token_endpoint": f"{url_prefix}/auth/token",
|
||||
"revocation_endpoint": f"{url_prefix}/auth/revoke",
|
||||
"response_types_supported": ["code"],
|
||||
"service_documentation": (
|
||||
"https://developers.home-assistant.io/docs/auth_api"
|
||||
|
@@ -497,18 +497,16 @@ class BayesianBinarySensor(BinarySensorEntity):
|
||||
_LOGGER.debug(
|
||||
(
|
||||
"Observation for entity '%s' returned None, it will not be used"
|
||||
" for updating Bayesian sensor '%s'"
|
||||
" for Bayesian updating"
|
||||
),
|
||||
observation.entity_id,
|
||||
self.entity_id,
|
||||
)
|
||||
continue
|
||||
_LOGGER.debug(
|
||||
(
|
||||
"Observation for template entity returned None rather than a valid"
|
||||
" boolean, it will not be used for updating Bayesian sensor '%s'"
|
||||
" boolean, it will not be used for Bayesian updating"
|
||||
),
|
||||
self.entity_id,
|
||||
)
|
||||
# the prior has been updated and is now the posterior
|
||||
return prior
|
||||
@@ -557,6 +555,10 @@ class BayesianBinarySensor(BinarySensorEntity):
|
||||
for observation in self._observations:
|
||||
if observation.value_template is None:
|
||||
continue
|
||||
if isinstance(observation.value_template, str):
|
||||
observation.value_template = Template(
|
||||
observation.value_template, hass=self.hass
|
||||
)
|
||||
template = observation.value_template
|
||||
observations_by_template.setdefault(template, []).append(observation)
|
||||
|
||||
|
@@ -6,7 +6,7 @@
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/bluesound",
|
||||
"iot_class": "local_polling",
|
||||
"requirements": ["pyblu==2.0.5"],
|
||||
"requirements": ["pyblu==2.0.4"],
|
||||
"zeroconf": [
|
||||
{
|
||||
"type": "_musc._tcp.local."
|
||||
|
@@ -321,14 +321,8 @@ class BluesoundPlayer(CoordinatorEntity[BluesoundCoordinator], MediaPlayerEntity
|
||||
if self.available is False or (self.is_grouped and not self.is_leader):
|
||||
return None
|
||||
|
||||
sources = [x.name for x in self._presets]
|
||||
|
||||
# ignore if both id and text are None
|
||||
for input_ in self._inputs:
|
||||
if input_.text is not None:
|
||||
sources.append(input_.text)
|
||||
elif input_.id is not None:
|
||||
sources.append(input_.id)
|
||||
sources = [x.text for x in self._inputs]
|
||||
sources += [x.name for x in self._presets]
|
||||
|
||||
return sources
|
||||
|
||||
@@ -346,7 +340,7 @@ class BluesoundPlayer(CoordinatorEntity[BluesoundCoordinator], MediaPlayerEntity
|
||||
input_.id == self._status.input_id
|
||||
or input_.url == self._status.stream_url
|
||||
):
|
||||
return input_.text if input_.text is not None else input_.id
|
||||
return input_.text
|
||||
|
||||
for preset in self._presets:
|
||||
if preset.url == self._status.stream_url:
|
||||
@@ -543,7 +537,7 @@ class BluesoundPlayer(CoordinatorEntity[BluesoundCoordinator], MediaPlayerEntity
|
||||
|
||||
# presets and inputs might have the same name; presets have priority
|
||||
for input_ in self._inputs:
|
||||
if source in (input_.text, input_.id):
|
||||
if input_.text == source:
|
||||
await self._player.play_url(input_.url)
|
||||
return
|
||||
for preset in self._presets:
|
||||
|
@@ -57,7 +57,6 @@ from .api import (
|
||||
_get_manager,
|
||||
async_address_present,
|
||||
async_ble_device_from_address,
|
||||
async_current_scanners,
|
||||
async_discovered_service_info,
|
||||
async_get_advertisement_callback,
|
||||
async_get_fallback_availability_interval,
|
||||
@@ -115,7 +114,6 @@ __all__ = [
|
||||
"HomeAssistantRemoteScanner",
|
||||
"async_address_present",
|
||||
"async_ble_device_from_address",
|
||||
"async_current_scanners",
|
||||
"async_discovered_service_info",
|
||||
"async_get_advertisement_callback",
|
||||
"async_get_fallback_availability_interval",
|
||||
|
@@ -66,22 +66,6 @@ def async_scanner_count(hass: HomeAssistant, connectable: bool = True) -> int:
|
||||
return _get_manager(hass).async_scanner_count(connectable)
|
||||
|
||||
|
||||
@hass_callback
|
||||
def async_current_scanners(hass: HomeAssistant) -> list[BaseHaScanner]:
|
||||
"""Return the list of currently active scanners.
|
||||
|
||||
This method returns a list of all active Bluetooth scanners registered
|
||||
with Home Assistant, including both connectable and non-connectable scanners.
|
||||
|
||||
Args:
|
||||
hass: Home Assistant instance
|
||||
|
||||
Returns:
|
||||
List of all active scanner instances
|
||||
"""
|
||||
return _get_manager(hass).async_current_scanners()
|
||||
|
||||
|
||||
@hass_callback
|
||||
def async_discovered_service_info(
|
||||
hass: HomeAssistant, connectable: bool = True
|
||||
|
@@ -8,19 +8,8 @@ import itertools
|
||||
import logging
|
||||
|
||||
from bleak_retry_connector import BleakSlotManager
|
||||
from bluetooth_adapters import (
|
||||
ADAPTER_TYPE,
|
||||
BluetoothAdapters,
|
||||
adapter_human_name,
|
||||
adapter_model,
|
||||
)
|
||||
from habluetooth import (
|
||||
BaseHaRemoteScanner,
|
||||
BaseHaScanner,
|
||||
BluetoothManager,
|
||||
BluetoothScanningMode,
|
||||
HaScanner,
|
||||
)
|
||||
from bluetooth_adapters import BluetoothAdapters
|
||||
from habluetooth import BaseHaRemoteScanner, BaseHaScanner, BluetoothManager
|
||||
|
||||
from homeassistant import config_entries
|
||||
from homeassistant.const import EVENT_HOMEASSISTANT_STOP, EVENT_LOGGING_CHANGED
|
||||
@@ -30,9 +19,8 @@ from homeassistant.core import (
|
||||
HomeAssistant,
|
||||
callback as hass_callback,
|
||||
)
|
||||
from homeassistant.helpers import discovery_flow, issue_registry as ir
|
||||
from homeassistant.helpers import discovery_flow
|
||||
from homeassistant.helpers.dispatcher import async_dispatcher_connect
|
||||
from homeassistant.util.package import is_docker_env
|
||||
|
||||
from .const import (
|
||||
CONF_SOURCE,
|
||||
@@ -326,97 +314,3 @@ class HomeAssistantBluetoothManager(BluetoothManager):
|
||||
address = discovery_key.key
|
||||
_LOGGER.debug("Rediscover address %s", address)
|
||||
self.async_rediscover_address(address)
|
||||
|
||||
def on_scanner_start(self, scanner: BaseHaScanner) -> None:
|
||||
"""Handle when a scanner starts.
|
||||
|
||||
Create or delete repair issues for local adapters based on degraded mode.
|
||||
"""
|
||||
super().on_scanner_start(scanner)
|
||||
|
||||
# Only handle repair issues for local adapters (HaScanner instances)
|
||||
if not isinstance(scanner, HaScanner):
|
||||
return
|
||||
self.async_check_degraded_mode(scanner)
|
||||
self.async_check_scanning_mode(scanner)
|
||||
|
||||
@hass_callback
|
||||
def async_check_scanning_mode(self, scanner: HaScanner) -> None:
|
||||
"""Check if the scanner is running in passive mode when active mode is requested."""
|
||||
passive_mode_issue_id = f"bluetooth_adapter_passive_mode_{scanner.source}"
|
||||
|
||||
# Check if scanner is NOT in passive mode when active mode was requested
|
||||
if not (
|
||||
scanner.requested_mode is BluetoothScanningMode.ACTIVE
|
||||
and scanner.current_mode is BluetoothScanningMode.PASSIVE
|
||||
):
|
||||
# Delete passive mode issue if it exists and we're not in passive fallback
|
||||
ir.async_delete_issue(self.hass, DOMAIN, passive_mode_issue_id)
|
||||
return
|
||||
|
||||
# Create repair issue for passive mode fallback
|
||||
adapter_name = adapter_human_name(
|
||||
scanner.adapter, scanner.mac_address or "00:00:00:00:00:00"
|
||||
)
|
||||
adapter_details = self._bluetooth_adapters.adapters.get(scanner.adapter)
|
||||
model = adapter_model(adapter_details) if adapter_details else None
|
||||
|
||||
# Determine adapter type for specific instructions
|
||||
# Default to USB for any other type or unknown
|
||||
if adapter_details and adapter_details.get(ADAPTER_TYPE) == "uart":
|
||||
translation_key = "bluetooth_adapter_passive_mode_uart"
|
||||
else:
|
||||
translation_key = "bluetooth_adapter_passive_mode_usb"
|
||||
|
||||
ir.async_create_issue(
|
||||
self.hass,
|
||||
DOMAIN,
|
||||
passive_mode_issue_id,
|
||||
is_fixable=False, # Requires a reboot or unplug
|
||||
severity=ir.IssueSeverity.WARNING,
|
||||
translation_key=translation_key,
|
||||
translation_placeholders={
|
||||
"adapter": adapter_name,
|
||||
"model": model or "Unknown",
|
||||
},
|
||||
)
|
||||
|
||||
@hass_callback
|
||||
def async_check_degraded_mode(self, scanner: HaScanner) -> None:
|
||||
"""Check if we are in degraded mode and create/delete repair issues."""
|
||||
issue_id = f"bluetooth_adapter_missing_permissions_{scanner.source}"
|
||||
|
||||
# Delete any existing issue if not in degraded mode
|
||||
if not self.is_operating_degraded():
|
||||
ir.async_delete_issue(self.hass, DOMAIN, issue_id)
|
||||
return
|
||||
|
||||
# Only create repair issues for Docker-based installations where users
|
||||
# can fix permissions. This includes: Home Assistant Supervised,
|
||||
# Home Assistant Container, and third-party containers
|
||||
if not is_docker_env():
|
||||
return
|
||||
|
||||
# Create repair issue for degraded mode in Docker (including Supervised)
|
||||
adapter_name = adapter_human_name(
|
||||
scanner.adapter, scanner.mac_address or "00:00:00:00:00:00"
|
||||
)
|
||||
|
||||
# Try to get adapter details from the bluetooth adapters
|
||||
adapter_details = self._bluetooth_adapters.adapters.get(scanner.adapter)
|
||||
model = adapter_model(adapter_details) if adapter_details else None
|
||||
|
||||
ir.async_create_issue(
|
||||
self.hass,
|
||||
DOMAIN,
|
||||
issue_id,
|
||||
is_fixable=False, # Not fixable from within HA - requires
|
||||
# container restart with new permissions
|
||||
severity=ir.IssueSeverity.WARNING,
|
||||
translation_key="bluetooth_adapter_missing_permissions",
|
||||
translation_placeholders={
|
||||
"adapter": adapter_name,
|
||||
"model": model or "Unknown",
|
||||
"docs_url": "https://www.home-assistant.io/integrations/bluetooth/#additional-details-for-container",
|
||||
},
|
||||
)
|
||||
|
@@ -18,9 +18,9 @@
|
||||
"bleak==1.0.1",
|
||||
"bleak-retry-connector==4.4.3",
|
||||
"bluetooth-adapters==2.1.0",
|
||||
"bluetooth-auto-recovery==1.5.2",
|
||||
"bluetooth-auto-recovery==1.5.3",
|
||||
"bluetooth-data-tools==1.28.2",
|
||||
"dbus-fast==2.44.3",
|
||||
"habluetooth==5.6.2"
|
||||
"habluetooth==5.6.4"
|
||||
]
|
||||
}
|
||||
|
@@ -38,19 +38,5 @@
|
||||
"remote_adapters_not_supported": "Bluetooth configuration for remote adapters is not supported.",
|
||||
"local_adapters_no_passive_support": "Local Bluetooth adapters that do not support passive scanning cannot be configured."
|
||||
}
|
||||
},
|
||||
"issues": {
|
||||
"bluetooth_adapter_missing_permissions": {
|
||||
"title": "Bluetooth adapter requires additional permissions",
|
||||
"description": "The Bluetooth adapter **{adapter}** ({model}) is operating in degraded mode because your container needs additional permissions to fully access Bluetooth hardware.\n\nPlease follow the instructions in our documentation to add the required permissions:\n[Bluetooth permissions for Docker]({docs_url})\n\nAfter adding these permissions, restart your Home Assistant container for the changes to take effect."
|
||||
},
|
||||
"bluetooth_adapter_passive_mode_usb": {
|
||||
"title": "Bluetooth USB adapter requires manual power cycle",
|
||||
"description": "The Bluetooth adapter **{adapter}** ({model}) is stuck in passive scanning mode despite requesting active scanning mode. **Automatic recovery was attempted but failed.** This is likely a kernel, firmware, or operating system issue, and the adapter requires a manual power cycle to recover.\n\nIn passive mode, the adapter can only receive advertisements but cannot request additional data from devices, which will affect device discovery and functionality.\n\n**Manual intervention required:**\n1. **Unplug the USB adapter**\n2. Wait 5 seconds\n3. **Plug it back in**\n4. Wait for Home Assistant to detect the adapter\n\nIf the issue persists after power cycling:\n- Try a different USB port\n- Check for kernel/firmware updates\n- Consider using a different Bluetooth adapter"
|
||||
},
|
||||
"bluetooth_adapter_passive_mode_uart": {
|
||||
"title": "Bluetooth adapter requires system power cycle",
|
||||
"description": "The Bluetooth adapter **{adapter}** ({model}) is stuck in passive scanning mode despite requesting active scanning mode. **Automatic recovery was attempted but failed.** This is likely a kernel, firmware, or operating system issue, and the system requires a complete power cycle to recover the adapter.\n\nIn passive mode, the adapter can only receive advertisements but cannot request additional data from devices, which will affect device discovery and functionality.\n\n**Manual intervention required:**\n1. **Shut down the system completely** (not just a reboot)\n2. **Remove power** (unplug or turn off at the switch)\n3. Wait 10 seconds\n4. Restore power and boot the system\n\nIf the issue persists after power cycling:\n- Check for kernel/firmware updates\n- The onboard Bluetooth adapter may have hardware issues"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@@ -8,10 +8,8 @@ import time
|
||||
from typing import Any
|
||||
|
||||
from habluetooth import (
|
||||
BaseHaScanner,
|
||||
BluetoothScanningMode,
|
||||
HaBluetoothSlotAllocations,
|
||||
HaScannerModeChange,
|
||||
HaScannerRegistration,
|
||||
HaScannerRegistrationEvent,
|
||||
)
|
||||
@@ -29,54 +27,12 @@ from .models import BluetoothChange
|
||||
from .util import InvalidConfigEntryID, InvalidSource, config_entry_id_to_source
|
||||
|
||||
|
||||
@callback
|
||||
def _async_get_source_from_config_entry(
|
||||
hass: HomeAssistant,
|
||||
connection: websocket_api.ActiveConnection,
|
||||
msg_id: int,
|
||||
config_entry_id: str | None,
|
||||
validate_source: bool = True,
|
||||
) -> str | None:
|
||||
"""Get source from config entry id.
|
||||
|
||||
Returns None if no config_entry_id provided or on error (after sending error response).
|
||||
If validate_source is True, also validates that the scanner exists.
|
||||
"""
|
||||
if not config_entry_id:
|
||||
return None
|
||||
|
||||
if validate_source:
|
||||
# Use the full validation that checks if scanner exists
|
||||
try:
|
||||
return config_entry_id_to_source(hass, config_entry_id)
|
||||
except InvalidConfigEntryID as err:
|
||||
connection.send_error(msg_id, "invalid_config_entry_id", str(err))
|
||||
return None
|
||||
except InvalidSource as err:
|
||||
connection.send_error(msg_id, "invalid_source", str(err))
|
||||
return None
|
||||
|
||||
# Just check if config entry exists and belongs to bluetooth
|
||||
if (
|
||||
not (entry := hass.config_entries.async_get_entry(config_entry_id))
|
||||
or entry.domain != DOMAIN
|
||||
):
|
||||
connection.send_error(
|
||||
msg_id,
|
||||
"invalid_config_entry_id",
|
||||
f"Config entry {config_entry_id} not found",
|
||||
)
|
||||
return None
|
||||
return entry.unique_id
|
||||
|
||||
|
||||
@callback
|
||||
def async_setup(hass: HomeAssistant) -> None:
|
||||
"""Set up the bluetooth websocket API."""
|
||||
websocket_api.async_register_command(hass, ws_subscribe_advertisements)
|
||||
websocket_api.async_register_command(hass, ws_subscribe_connection_allocations)
|
||||
websocket_api.async_register_command(hass, ws_subscribe_scanner_details)
|
||||
websocket_api.async_register_command(hass, ws_subscribe_scanner_state)
|
||||
|
||||
|
||||
@lru_cache(maxsize=1024)
|
||||
@@ -224,12 +180,16 @@ async def ws_subscribe_connection_allocations(
|
||||
) -> None:
|
||||
"""Handle subscribe advertisements websocket command."""
|
||||
ws_msg_id = msg["id"]
|
||||
config_entry_id = msg.get("config_entry_id")
|
||||
source = _async_get_source_from_config_entry(
|
||||
hass, connection, ws_msg_id, config_entry_id
|
||||
)
|
||||
if config_entry_id and source is None:
|
||||
return # Error already sent by helper
|
||||
source: str | None = None
|
||||
if config_entry_id := msg.get("config_entry_id"):
|
||||
try:
|
||||
source = config_entry_id_to_source(hass, config_entry_id)
|
||||
except InvalidConfigEntryID as err:
|
||||
connection.send_error(ws_msg_id, "invalid_config_entry_id", str(err))
|
||||
return
|
||||
except InvalidSource as err:
|
||||
connection.send_error(ws_msg_id, "invalid_source", str(err))
|
||||
return
|
||||
|
||||
def _async_allocations_changed(allocations: HaBluetoothSlotAllocations) -> None:
|
||||
connection.send_message(
|
||||
@@ -260,12 +220,20 @@ async def ws_subscribe_scanner_details(
|
||||
) -> None:
|
||||
"""Handle subscribe scanner details websocket command."""
|
||||
ws_msg_id = msg["id"]
|
||||
config_entry_id = msg.get("config_entry_id")
|
||||
source = _async_get_source_from_config_entry(
|
||||
hass, connection, ws_msg_id, config_entry_id, validate_source=False
|
||||
)
|
||||
if config_entry_id and source is None:
|
||||
return # Error already sent by helper
|
||||
source: str | None = None
|
||||
if config_entry_id := msg.get("config_entry_id"):
|
||||
if (
|
||||
not (entry := hass.config_entries.async_get_entry(config_entry_id))
|
||||
or entry.domain != DOMAIN
|
||||
):
|
||||
connection.send_error(
|
||||
ws_msg_id,
|
||||
"invalid_config_entry_id",
|
||||
f"Invalid config entry id: {config_entry_id}",
|
||||
)
|
||||
return
|
||||
source = entry.unique_id
|
||||
assert source is not None
|
||||
|
||||
def _async_event_message(message: dict[str, Any]) -> None:
|
||||
connection.send_message(
|
||||
@@ -292,70 +260,3 @@ async def ws_subscribe_scanner_details(
|
||||
]
|
||||
):
|
||||
_async_event_message({"add": matching_scanners})
|
||||
|
||||
|
||||
@websocket_api.require_admin
|
||||
@websocket_api.websocket_command(
|
||||
{
|
||||
vol.Required("type"): "bluetooth/subscribe_scanner_state",
|
||||
vol.Optional("config_entry_id"): str,
|
||||
}
|
||||
)
|
||||
@websocket_api.async_response
|
||||
async def ws_subscribe_scanner_state(
|
||||
hass: HomeAssistant, connection: websocket_api.ActiveConnection, msg: dict[str, Any]
|
||||
) -> None:
|
||||
"""Handle subscribe scanner state websocket command."""
|
||||
ws_msg_id = msg["id"]
|
||||
config_entry_id = msg.get("config_entry_id")
|
||||
source = _async_get_source_from_config_entry(
|
||||
hass, connection, ws_msg_id, config_entry_id, validate_source=False
|
||||
)
|
||||
if config_entry_id and source is None:
|
||||
return # Error already sent by helper
|
||||
|
||||
@callback
|
||||
def _async_send_scanner_state(
|
||||
scanner: BaseHaScanner,
|
||||
current_mode: BluetoothScanningMode | None,
|
||||
requested_mode: BluetoothScanningMode | None,
|
||||
) -> None:
|
||||
payload = {
|
||||
"source": scanner.source,
|
||||
"adapter": scanner.adapter,
|
||||
"current_mode": current_mode.value if current_mode else None,
|
||||
"requested_mode": requested_mode.value if requested_mode else None,
|
||||
}
|
||||
connection.send_message(
|
||||
json_bytes(
|
||||
websocket_api.event_message(
|
||||
ws_msg_id,
|
||||
payload,
|
||||
)
|
||||
)
|
||||
)
|
||||
|
||||
@callback
|
||||
def _async_scanner_state_changed(mode_change: HaScannerModeChange) -> None:
|
||||
_async_send_scanner_state(
|
||||
mode_change.scanner,
|
||||
mode_change.current_mode,
|
||||
mode_change.requested_mode,
|
||||
)
|
||||
|
||||
manager = _get_manager(hass)
|
||||
connection.subscriptions[ws_msg_id] = (
|
||||
manager.async_register_scanner_mode_change_callback(
|
||||
_async_scanner_state_changed, source
|
||||
)
|
||||
)
|
||||
connection.send_message(json_bytes(websocket_api.result_message(ws_msg_id)))
|
||||
|
||||
# Send initial state for all matching scanners
|
||||
for scanner in manager.async_current_scanners():
|
||||
if source is None or scanner.source == source:
|
||||
_async_send_scanner_state(
|
||||
scanner,
|
||||
scanner.current_mode,
|
||||
scanner.requested_mode,
|
||||
)
|
||||
|
@@ -18,10 +18,8 @@ async def async_get_config_entry_diagnostics(
|
||||
coordinator = config_entry.runtime_data
|
||||
|
||||
device_info = await coordinator.client.get_system_info()
|
||||
command_list = await coordinator.client.get_command_list()
|
||||
|
||||
return {
|
||||
"remote_command_list": command_list,
|
||||
"config_entry": async_redact_data(config_entry.as_dict(), TO_REDACT),
|
||||
"device_info": async_redact_data(device_info, TO_REDACT),
|
||||
}
|
||||
|
@@ -164,6 +164,10 @@
|
||||
"name": "[%key:component::notify::services::notify::name%]",
|
||||
"description": "Sends a mobile push notification to members of a shared Bring! list.",
|
||||
"fields": {
|
||||
"entity_id": {
|
||||
"name": "List",
|
||||
"description": "Bring! list whose members (except sender) will be notified."
|
||||
},
|
||||
"message": {
|
||||
"name": "Notification type",
|
||||
"description": "Type of push notification to send to list members."
|
||||
|
@@ -2,40 +2,28 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
|
||||
from brother import Brother, SnmpError
|
||||
|
||||
from homeassistant.components.snmp import async_get_snmp_engine
|
||||
from homeassistant.const import CONF_HOST, CONF_PORT, CONF_TYPE, Platform
|
||||
from homeassistant.const import CONF_HOST, CONF_TYPE, Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryNotReady
|
||||
|
||||
from .const import (
|
||||
CONF_COMMUNITY,
|
||||
DEFAULT_COMMUNITY,
|
||||
DEFAULT_PORT,
|
||||
DOMAIN,
|
||||
SECTION_ADVANCED_SETTINGS,
|
||||
)
|
||||
from .const import DOMAIN
|
||||
from .coordinator import BrotherConfigEntry, BrotherDataUpdateCoordinator
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
PLATFORMS = [Platform.SENSOR]
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: BrotherConfigEntry) -> bool:
|
||||
"""Set up Brother from a config entry."""
|
||||
host = entry.data[CONF_HOST]
|
||||
port = entry.data[SECTION_ADVANCED_SETTINGS][CONF_PORT]
|
||||
community = entry.data[SECTION_ADVANCED_SETTINGS][CONF_COMMUNITY]
|
||||
printer_type = entry.data[CONF_TYPE]
|
||||
|
||||
snmp_engine = await async_get_snmp_engine(hass)
|
||||
try:
|
||||
brother = await Brother.create(
|
||||
host, port, community, printer_type=printer_type, snmp_engine=snmp_engine
|
||||
host, printer_type=printer_type, snmp_engine=snmp_engine
|
||||
)
|
||||
except (ConnectionError, SnmpError, TimeoutError) as error:
|
||||
raise ConfigEntryNotReady(
|
||||
@@ -60,22 +48,3 @@ async def async_setup_entry(hass: HomeAssistant, entry: BrotherConfigEntry) -> b
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: BrotherConfigEntry) -> bool:
|
||||
"""Unload a config entry."""
|
||||
return await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
|
||||
|
||||
|
||||
async def async_migrate_entry(hass: HomeAssistant, entry: BrotherConfigEntry) -> bool:
|
||||
"""Migrate an old entry."""
|
||||
if entry.version == 1 and entry.minor_version < 2:
|
||||
new_data = entry.data.copy()
|
||||
new_data[SECTION_ADVANCED_SETTINGS] = {
|
||||
CONF_PORT: DEFAULT_PORT,
|
||||
CONF_COMMUNITY: DEFAULT_COMMUNITY,
|
||||
}
|
||||
hass.config_entries.async_update_entry(entry, data=new_data, minor_version=2)
|
||||
|
||||
_LOGGER.info(
|
||||
"Migration to configuration version %s.%s successful",
|
||||
entry.version,
|
||||
entry.minor_version,
|
||||
)
|
||||
|
||||
return True
|
||||
|
@@ -9,65 +9,21 @@ import voluptuous as vol
|
||||
|
||||
from homeassistant.components.snmp import async_get_snmp_engine
|
||||
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
|
||||
from homeassistant.const import CONF_HOST, CONF_PORT, CONF_TYPE
|
||||
from homeassistant.const import CONF_HOST, CONF_TYPE
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.data_entry_flow import section
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers.service_info.zeroconf import ZeroconfServiceInfo
|
||||
from homeassistant.util.network import is_host_valid
|
||||
|
||||
from .const import (
|
||||
CONF_COMMUNITY,
|
||||
DEFAULT_COMMUNITY,
|
||||
DEFAULT_PORT,
|
||||
DOMAIN,
|
||||
PRINTER_TYPES,
|
||||
SECTION_ADVANCED_SETTINGS,
|
||||
)
|
||||
from .const import DOMAIN, PRINTER_TYPES
|
||||
|
||||
DATA_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_HOST): str,
|
||||
vol.Optional(CONF_TYPE, default="laser"): vol.In(PRINTER_TYPES),
|
||||
vol.Required(SECTION_ADVANCED_SETTINGS): section(
|
||||
vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_PORT, default=DEFAULT_PORT): int,
|
||||
vol.Required(CONF_COMMUNITY, default=DEFAULT_COMMUNITY): str,
|
||||
},
|
||||
),
|
||||
{"collapsed": True},
|
||||
),
|
||||
}
|
||||
)
|
||||
ZEROCONF_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Optional(CONF_TYPE, default="laser"): vol.In(PRINTER_TYPES),
|
||||
vol.Required(SECTION_ADVANCED_SETTINGS): section(
|
||||
vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_PORT, default=DEFAULT_PORT): int,
|
||||
vol.Required(CONF_COMMUNITY, default=DEFAULT_COMMUNITY): str,
|
||||
},
|
||||
),
|
||||
{"collapsed": True},
|
||||
),
|
||||
}
|
||||
)
|
||||
RECONFIGURE_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_HOST): str,
|
||||
vol.Required(SECTION_ADVANCED_SETTINGS): section(
|
||||
vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_PORT, default=DEFAULT_PORT): int,
|
||||
vol.Required(CONF_COMMUNITY, default=DEFAULT_COMMUNITY): str,
|
||||
},
|
||||
),
|
||||
{"collapsed": True},
|
||||
),
|
||||
}
|
||||
)
|
||||
RECONFIGURE_SCHEMA = vol.Schema({vol.Required(CONF_HOST): str})
|
||||
|
||||
|
||||
async def validate_input(
|
||||
@@ -79,12 +35,7 @@ async def validate_input(
|
||||
|
||||
snmp_engine = await async_get_snmp_engine(hass)
|
||||
|
||||
brother = await Brother.create(
|
||||
user_input[CONF_HOST],
|
||||
user_input[SECTION_ADVANCED_SETTINGS][CONF_PORT],
|
||||
user_input[SECTION_ADVANCED_SETTINGS][CONF_COMMUNITY],
|
||||
snmp_engine=snmp_engine,
|
||||
)
|
||||
brother = await Brother.create(user_input[CONF_HOST], snmp_engine=snmp_engine)
|
||||
await brother.async_update()
|
||||
|
||||
if expected_mac is not None and brother.serial.lower() != expected_mac:
|
||||
@@ -97,7 +48,6 @@ class BrotherConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
"""Handle a config flow for Brother Printer."""
|
||||
|
||||
VERSION = 1
|
||||
MINOR_VERSION = 2
|
||||
|
||||
def __init__(self) -> None:
|
||||
"""Initialize."""
|
||||
@@ -176,11 +126,13 @@ class BrotherConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
title = f"{self.brother.model} {self.brother.serial}"
|
||||
return self.async_create_entry(
|
||||
title=title,
|
||||
data={CONF_HOST: self.host, **user_input},
|
||||
data={CONF_HOST: self.host, CONF_TYPE: user_input[CONF_TYPE]},
|
||||
)
|
||||
return self.async_show_form(
|
||||
step_id="zeroconf_confirm",
|
||||
data_schema=ZEROCONF_SCHEMA,
|
||||
data_schema=vol.Schema(
|
||||
{vol.Optional(CONF_TYPE, default="laser"): vol.In(PRINTER_TYPES)}
|
||||
),
|
||||
description_placeholders={
|
||||
"serial_number": self.brother.serial,
|
||||
"model": self.brother.model,
|
||||
@@ -208,7 +160,7 @@ class BrotherConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
else:
|
||||
return self.async_update_reload_and_abort(
|
||||
entry,
|
||||
data_updates=user_input,
|
||||
data_updates={CONF_HOST: user_input[CONF_HOST]},
|
||||
)
|
||||
|
||||
return self.async_show_form(
|
||||
|
@@ -10,10 +10,3 @@ DOMAIN: Final = "brother"
|
||||
PRINTER_TYPES: Final = ["laser", "ink"]
|
||||
|
||||
UPDATE_INTERVAL = timedelta(seconds=30)
|
||||
|
||||
SECTION_ADVANCED_SETTINGS = "advanced_settings"
|
||||
|
||||
CONF_COMMUNITY = "community"
|
||||
|
||||
DEFAULT_COMMUNITY = "public"
|
||||
DEFAULT_PORT = 161
|
||||
|
@@ -8,7 +8,7 @@
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["brother", "pyasn1", "pysmi", "pysnmp"],
|
||||
"requirements": ["brother==5.1.0"],
|
||||
"requirements": ["brother==5.0.1"],
|
||||
"zeroconf": [
|
||||
{
|
||||
"type": "_printer._tcp.local.",
|
||||
|
@@ -8,21 +8,7 @@
|
||||
"type": "Type of the printer"
|
||||
},
|
||||
"data_description": {
|
||||
"host": "The hostname or IP address of the Brother printer to control.",
|
||||
"type": "Brother printer type: ink or laser."
|
||||
},
|
||||
"sections": {
|
||||
"advanced_settings": {
|
||||
"name": "Advanced settings",
|
||||
"data": {
|
||||
"port": "[%key:common::config_flow::data::port%]",
|
||||
"community": "SNMP Community"
|
||||
},
|
||||
"data_description": {
|
||||
"port": "The SNMP port of the Brother printer.",
|
||||
"community": "A simple password for devices to communicate to each other."
|
||||
}
|
||||
}
|
||||
"host": "The hostname or IP address of the Brother printer to control."
|
||||
}
|
||||
},
|
||||
"zeroconf_confirm": {
|
||||
@@ -30,22 +16,6 @@
|
||||
"title": "Discovered Brother Printer",
|
||||
"data": {
|
||||
"type": "[%key:component::brother::config::step::user::data::type%]"
|
||||
},
|
||||
"data_description": {
|
||||
"type": "[%key:component::brother::config::step::user::data_description::type%]"
|
||||
},
|
||||
"sections": {
|
||||
"advanced_settings": {
|
||||
"name": "Advanced settings",
|
||||
"data": {
|
||||
"port": "[%key:common::config_flow::data::port%]",
|
||||
"community": "SNMP Community"
|
||||
},
|
||||
"data_description": {
|
||||
"port": "The SNMP port of the Brother printer.",
|
||||
"community": "A simple password for devices to communicate to each other."
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"reconfigure": {
|
||||
@@ -55,19 +25,6 @@
|
||||
},
|
||||
"data_description": {
|
||||
"host": "[%key:component::brother::config::step::user::data_description::host%]"
|
||||
},
|
||||
"sections": {
|
||||
"advanced_settings": {
|
||||
"name": "Advanced settings",
|
||||
"data": {
|
||||
"port": "[%key:common::config_flow::data::port%]",
|
||||
"community": "SNMP Community"
|
||||
},
|
||||
"data_description": {
|
||||
"port": "The SNMP port of the Brother printer.",
|
||||
"community": "A simple password for devices to communicate to each other."
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
|
@@ -85,10 +85,10 @@
|
||||
"entity": {
|
||||
"sensor": {
|
||||
"current_temperature": {
|
||||
"name": "Current temperature"
|
||||
"name": "Current Temperature"
|
||||
},
|
||||
"outside_temperature": {
|
||||
"name": "Outside temperature"
|
||||
"name": "Outside Temperature"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@@ -81,11 +81,7 @@ from .const import (
|
||||
)
|
||||
from .helper import get_camera_from_entity_id
|
||||
from .img_util import scale_jpeg_camera_image
|
||||
from .prefs import (
|
||||
CameraPreferences,
|
||||
DynamicStreamSettings, # noqa: F401
|
||||
get_dynamic_camera_stream_settings,
|
||||
)
|
||||
from .prefs import CameraPreferences, DynamicStreamSettings # noqa: F401
|
||||
from .webrtc import (
|
||||
DATA_ICE_SERVERS,
|
||||
CameraWebRTCProvider,
|
||||
@@ -554,9 +550,9 @@ class Camera(Entity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_):
|
||||
self.hass,
|
||||
source,
|
||||
options=self.stream_options,
|
||||
dynamic_stream_settings=await get_dynamic_camera_stream_settings(
|
||||
self.hass, self.entity_id
|
||||
),
|
||||
dynamic_stream_settings=await self.hass.data[
|
||||
DATA_CAMERA_PREFS
|
||||
].get_dynamic_stream_settings(self.entity_id),
|
||||
stream_label=self.entity_id,
|
||||
)
|
||||
self.stream.set_update_callback(self.async_write_ha_state)
|
||||
@@ -946,7 +942,9 @@ async def websocket_get_prefs(
|
||||
hass: HomeAssistant, connection: ActiveConnection, msg: dict[str, Any]
|
||||
) -> None:
|
||||
"""Handle request for account info."""
|
||||
stream_prefs = await get_dynamic_camera_stream_settings(hass, msg["entity_id"])
|
||||
stream_prefs = await hass.data[DATA_CAMERA_PREFS].get_dynamic_stream_settings(
|
||||
msg["entity_id"]
|
||||
)
|
||||
connection.send_result(msg["id"], asdict(stream_prefs))
|
||||
|
||||
|
||||
|
@@ -13,7 +13,7 @@ from homeassistant.helpers import entity_registry as er
|
||||
from homeassistant.helpers.storage import Store
|
||||
from homeassistant.helpers.typing import UNDEFINED, UndefinedType
|
||||
|
||||
from .const import DATA_CAMERA_PREFS, DOMAIN, PREF_ORIENTATION, PREF_PRELOAD_STREAM
|
||||
from .const import DOMAIN, PREF_ORIENTATION, PREF_PRELOAD_STREAM
|
||||
|
||||
STORAGE_KEY: Final = DOMAIN
|
||||
STORAGE_VERSION: Final = 1
|
||||
@@ -106,12 +106,3 @@ class CameraPreferences:
|
||||
)
|
||||
self._dynamic_stream_settings_by_entity_id[entity_id] = settings
|
||||
return settings
|
||||
|
||||
|
||||
async def get_dynamic_camera_stream_settings(
|
||||
hass: HomeAssistant, entity_id: str
|
||||
) -> DynamicStreamSettings:
|
||||
"""Get dynamic stream settings for a camera entity."""
|
||||
if DATA_CAMERA_PREFS not in hass.data:
|
||||
raise HomeAssistantError("Camera integration not set up")
|
||||
return await hass.data[DATA_CAMERA_PREFS].get_dynamic_stream_settings(entity_id)
|
||||
|
@@ -3,8 +3,7 @@
|
||||
import logging
|
||||
import threading
|
||||
|
||||
import pychromecast.discovery
|
||||
import pychromecast.models
|
||||
import pychromecast
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import EVENT_HOMEASSISTANT_STOP
|
||||
|
@@ -11,13 +11,10 @@ from uuid import UUID
|
||||
|
||||
import aiohttp
|
||||
import attr
|
||||
import pychromecast
|
||||
from pychromecast import dial
|
||||
from pychromecast.const import CAST_TYPE_GROUP
|
||||
import pychromecast.controllers.media
|
||||
import pychromecast.controllers.multizone
|
||||
import pychromecast.controllers.receiver
|
||||
from pychromecast.models import CastInfo
|
||||
import pychromecast.socket_client
|
||||
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import aiohttp_client
|
||||
|
@@ -10,10 +10,8 @@ import json
|
||||
import logging
|
||||
from typing import TYPE_CHECKING, Any, Concatenate
|
||||
|
||||
import pychromecast.config
|
||||
import pychromecast.const
|
||||
import pychromecast
|
||||
from pychromecast.controllers.homeassistant import HomeAssistantController
|
||||
import pychromecast.controllers.media
|
||||
from pychromecast.controllers.media import (
|
||||
MEDIA_PLAYER_ERROR_CODES,
|
||||
MEDIA_PLAYER_STATE_BUFFERING,
|
||||
|
@@ -89,6 +89,7 @@ class SetTemperatureIntent(intent.IntentHandler):
|
||||
)
|
||||
|
||||
response = intent_obj.create_response()
|
||||
response.response_type = intent.IntentResponseType.ACTION_DONE
|
||||
response.async_set_results(
|
||||
success_results=[
|
||||
intent.IntentResponseTarget(
|
||||
|
@@ -274,16 +274,16 @@
|
||||
"message": "Provided temperature {check_temp} is not valid. Accepted range is {min_temp} to {max_temp}."
|
||||
},
|
||||
"low_temp_higher_than_high_temp": {
|
||||
"message": "'Lower target temperature' can not be higher than 'Upper target temperature'."
|
||||
"message": "Target temperature low can not be higher than Target temperature high."
|
||||
},
|
||||
"humidity_out_of_range": {
|
||||
"message": "Provided humidity {humidity} is not valid. Accepted range is {min_humidity} to {max_humidity}."
|
||||
},
|
||||
"missing_target_temperature_entity_feature": {
|
||||
"message": "Set temperature action was used with the 'Target temperature' parameter but the entity does not support it."
|
||||
"message": "Set temperature action was used with the target temperature parameter but the entity does not support it."
|
||||
},
|
||||
"missing_target_temperature_range_entity_feature": {
|
||||
"message": "Set temperature action was used with the 'Lower/Upper target temperature' parameter but the entity does not support it."
|
||||
"message": "Set temperature action was used with the target temperature low/high parameter but the entity does not support it."
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@@ -71,7 +71,6 @@ async def async_converse(
|
||||
language: str | None = None,
|
||||
agent_id: str | None = None,
|
||||
device_id: str | None = None,
|
||||
satellite_id: str | None = None,
|
||||
extra_system_prompt: str | None = None,
|
||||
) -> ConversationResult:
|
||||
"""Process text and get intent."""
|
||||
@@ -98,7 +97,6 @@ async def async_converse(
|
||||
context=context,
|
||||
conversation_id=conversation_id,
|
||||
device_id=device_id,
|
||||
satellite_id=satellite_id,
|
||||
language=language,
|
||||
agent_id=agent_id,
|
||||
extra_system_prompt=extra_system_prompt,
|
||||
|
@@ -507,18 +507,14 @@ class ChatLog:
|
||||
async def async_provide_llm_data(
|
||||
self,
|
||||
llm_context: llm.LLMContext,
|
||||
user_llm_hass_api: str | list[str] | llm.API | None = None,
|
||||
user_llm_hass_api: str | list[str] | None = None,
|
||||
user_llm_prompt: str | None = None,
|
||||
user_extra_system_prompt: str | None = None,
|
||||
) -> None:
|
||||
"""Set the LLM system prompt."""
|
||||
llm_api: llm.APIInstance | None = None
|
||||
|
||||
if user_llm_hass_api is None:
|
||||
pass
|
||||
elif isinstance(user_llm_hass_api, llm.API):
|
||||
llm_api = await user_llm_hass_api.async_get_api_instance(llm_context)
|
||||
else:
|
||||
if user_llm_hass_api:
|
||||
try:
|
||||
llm_api = await llm.async_get_api(
|
||||
self.hass,
|
||||
|
@@ -371,6 +371,7 @@ class DefaultAgent(ConversationEntity):
|
||||
response = intent.IntentResponse(
|
||||
language=user_input.language or self.hass.config.language
|
||||
)
|
||||
response.response_type = intent.IntentResponseType.ACTION_DONE
|
||||
response.async_set_speech(response_text)
|
||||
|
||||
if response is None:
|
||||
@@ -470,7 +471,6 @@ class DefaultAgent(ConversationEntity):
|
||||
language,
|
||||
assistant=DOMAIN,
|
||||
device_id=user_input.device_id,
|
||||
satellite_id=user_input.satellite_id,
|
||||
conversation_agent_id=user_input.agent_id,
|
||||
)
|
||||
except intent.MatchFailedError as match_error:
|
||||
|
@@ -201,7 +201,6 @@ async def websocket_hass_agent_debug(
|
||||
context=connection.context(msg),
|
||||
conversation_id=None,
|
||||
device_id=msg.get("device_id"),
|
||||
satellite_id=None,
|
||||
language=msg.get("language", hass.config.language),
|
||||
agent_id=agent.entity_id,
|
||||
)
|
||||
|
@@ -1,7 +1,7 @@
|
||||
{
|
||||
"domain": "conversation",
|
||||
"name": "Conversation",
|
||||
"codeowners": ["@home-assistant/core", "@synesthesiam", "@arturpragacz"],
|
||||
"codeowners": ["@home-assistant/core", "@synesthesiam"],
|
||||
"dependencies": ["http", "intent"],
|
||||
"documentation": "https://www.home-assistant.io/integrations/conversation",
|
||||
"integration_type": "system",
|
||||
|
@@ -37,9 +37,6 @@ class ConversationInput:
|
||||
device_id: str | None
|
||||
"""Unique identifier for the device."""
|
||||
|
||||
satellite_id: str | None
|
||||
"""Unique identifier for the satellite."""
|
||||
|
||||
language: str
|
||||
"""Language of the request."""
|
||||
|
||||
@@ -56,7 +53,6 @@ class ConversationInput:
|
||||
"context": self.context.as_dict(),
|
||||
"conversation_id": self.conversation_id,
|
||||
"device_id": self.device_id,
|
||||
"satellite_id": self.satellite_id,
|
||||
"language": self.language,
|
||||
"agent_id": self.agent_id,
|
||||
"extra_system_prompt": self.extra_system_prompt,
|
||||
|
@@ -100,7 +100,6 @@ async def async_attach_trigger(
|
||||
entity_name: entity["value"] for entity_name, entity in details.items()
|
||||
},
|
||||
"device_id": user_input.device_id,
|
||||
"satellite_id": user_input.satellite_id,
|
||||
"user_input": user_input.as_dict(),
|
||||
}
|
||||
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user