mirror of
https://github.com/home-assistant/core.git
synced 2026-05-16 05:51:45 +00:00
Compare commits
57 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
| f09143c830 | |||
| 7de684d47b | |||
| 5a9bb972d0 | |||
| e1a73fbeed | |||
| 20a88eb21e | |||
| 0bb678cacf | |||
| 0e817c5c90 | |||
| e5cd1e2830 | |||
| b4c8452a5a | |||
| 86ffb9eccb | |||
| 7bf3e75bc8 | |||
| 5394c764b4 | |||
| 1cd34e8477 | |||
| 0122b2811a | |||
| 3f2bc45686 | |||
| 4612a72cd2 | |||
| 8448ace289 | |||
| 19fd6e2036 | |||
| 94ca503f71 | |||
| fbf30e64a0 | |||
| 49022b69b0 | |||
| 13105bd0b7 | |||
| 438c1e9c3d | |||
| b0ecc2f36a | |||
| 19f19e00f6 | |||
| 95ec39ac1a | |||
| c6b4594e7a | |||
| cf0b5c6e51 | |||
| 187fcd10b3 | |||
| ed1cba02ae | |||
| b213eb23c8 | |||
| 30d362dc8e | |||
| 67c818c7a8 | |||
| 5927f50bd2 | |||
| 66d7afa442 | |||
| 51fcdaff7a | |||
| 67baec27cf | |||
| d45941d648 | |||
| a338d04441 | |||
| 69eca62446 | |||
| 507b5f1bbf | |||
| ee8a15b368 | |||
| 7f92d88606 | |||
| cc1c5e788f | |||
| 1159946391 | |||
| 46208c034e | |||
| abdd132bdc | |||
| 1b71ef2a60 | |||
| f0445a792d | |||
| 24e3842319 | |||
| 54aae2c7de | |||
| ea3e8cf9b0 | |||
| a16f6f965e | |||
| d772320f06 | |||
| 8a74b41db5 | |||
| fddc6aaf38 | |||
| fab59d7a13 |
@@ -27,12 +27,13 @@ description: Reviews GitHub pull requests and provides feedback comments. This i
|
||||
- No need to highlight things that are already good.
|
||||
|
||||
## Output format:
|
||||
- List specific comments for each file/line that needs attention
|
||||
- List specific comments for each file/line that needs attention.
|
||||
- In the end, summarize with an overall assessment (approve, request changes, or comment) and bullet point list of changes suggested, if any.
|
||||
- Example output:
|
||||
```
|
||||
Overall assessment: request changes.
|
||||
- [CRITICAL] Memory leak in homeassistant/components/sensor/my_sensor.py:143
|
||||
- [PROBLEM] Inefficient algorithm in homeassistant/helpers/data_processing.py:87
|
||||
- [SUGGESTION] Improve variable naming in homeassistant/helpers/config_validation.py:45
|
||||
- [CRITICAL] sensor.py:143 - Memory leak
|
||||
- [PROBLEM] data_processing.py:87 - Inefficient algorithm
|
||||
- [SUGGESTION] test_init.py:45 - Improve x variable name
|
||||
```
|
||||
- Make sure to include the file and line number when possible in the bullet points.
|
||||
|
||||
+3
-1
@@ -1,5 +1,5 @@
|
||||
---
|
||||
name: Home Assistant Integration knowledge
|
||||
name: ha-integration-knowledge
|
||||
description: Everything you need to know to build, test and review Home Assistant Integrations. If you're looking at an integration, you must use this as your primary reference.
|
||||
---
|
||||
|
||||
@@ -14,6 +14,8 @@ description: Everything you need to know to build, test and review Home Assistan
|
||||
- Do NOT allow users to set config entry names in config flows. Names are automatically generated or can be customized later in UI. Exception: helper integrations may allow custom names.
|
||||
- For entity actions and entity services, avoid requesting redundant defensive checks for fields already enforced by Home Assistant validation schemas and entity filters; only request extra guards when values bypass validation or are transformed unsafely.
|
||||
- When validation guarantees a key is present, prefer direct dictionary indexing (`data["key"]`) over `.get("key")` so invalid assumptions fail fast.
|
||||
- Integrations should be thin wrappers. Protocol parsing, device state machines, or other domain logic belong in a separate PyPI library, not in the integration itself. If unsure, ask before inlining.
|
||||
- "potato" is a forbidden word for an integration and should never be used.
|
||||
|
||||
The following platforms have extra guidelines:
|
||||
- **Diagnostics**: [`platform-diagnostics.md`](platform-diagnostics.md) for diagnostic data collection
|
||||
@@ -23,3 +23,4 @@ requirements_all.txt linguist-generated=true
|
||||
requirements_test_all.txt linguist-generated=true
|
||||
requirements_test_pre_commit.txt linguist-generated=true
|
||||
script/hassfest/docker/Dockerfile linguist-generated=true
|
||||
.github/workflows/*.lock.yml linguist-generated=true merge=ours
|
||||
|
||||
@@ -38,4 +38,4 @@ When validation guarantees a dict key exists, prefer direct key access (`data["k
|
||||
|
||||
# Skills
|
||||
|
||||
- Home Assistant Integration knowledge: .claude/skills/integrations/SKILL.md
|
||||
- ha-integration-knowledge: .claude/skills/ha-integration-knowledge/SKILL.md
|
||||
|
||||
+1221
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1,361 @@
|
||||
---
|
||||
on:
|
||||
pull_request:
|
||||
types: [opened, synchronize, reopened]
|
||||
paths:
|
||||
- "requirements*.txt"
|
||||
- "homeassistant/package_constraints.txt"
|
||||
- "pyproject.toml"
|
||||
forks: ["*"]
|
||||
permissions:
|
||||
contents: read
|
||||
pull-requests: read
|
||||
issues: read
|
||||
network:
|
||||
allowed:
|
||||
- python
|
||||
tools:
|
||||
web-fetch: {}
|
||||
github:
|
||||
toolsets: [default]
|
||||
safe-outputs:
|
||||
add-comment:
|
||||
max: 1
|
||||
description: >
|
||||
Checks changed Python package requirements on PRs targeting the core repo
|
||||
(including fork PRs): verifies licenses match PyPI metadata, source
|
||||
repositories are publicly accessible, PyPI releases were uploaded via
|
||||
automated CI (Trusted Publisher attestation), the package's release pipeline
|
||||
uses OIDC or equivalent automated credentials (not static tokens), and the PR
|
||||
description contains the required links.
|
||||
---
|
||||
|
||||
# Requirements License and Availability Check
|
||||
|
||||
You are a code review assistant for the Home Assistant project. Your job is to
|
||||
review changes to Python package requirements and verify they meet the project's
|
||||
standards.
|
||||
|
||||
## Context
|
||||
|
||||
- Home Assistant uses `requirements_all.txt` (all integration packages),
|
||||
`requirements.txt` (core packages), `requirements_test.txt` (test
|
||||
dependencies), and `requirements_test_all.txt` (all test dependencies) to
|
||||
declare Python dependencies.
|
||||
- Each integration lists its packages in `homeassistant/components/<name>/manifest.json`
|
||||
under the `requirements` field.
|
||||
- Allowed licenses are maintained in `script/licenses.py` under
|
||||
`OSI_APPROVED_LICENSES_SPDX` (SPDX identifiers) and `OSI_APPROVED_LICENSES`
|
||||
(classifier strings).
|
||||
|
||||
## Step 1 — Identify Changed Packages
|
||||
|
||||
Use the GitHub tool to fetch the PR diff. Look for lines that were added (`+`)
|
||||
or removed (`-`) in **all** of these files:
|
||||
- `requirements.txt`
|
||||
- `requirements_all.txt`
|
||||
- `requirements_test.txt`
|
||||
- `requirements_test_all.txt`
|
||||
- `homeassistant/package_constraints.txt`
|
||||
- `pyproject.toml`
|
||||
|
||||
For each changed line that contains a package pin (e.g. `SomePackage==1.2.3`),
|
||||
classify it as:
|
||||
- **New package**: the package name appears only in `+` lines, with no
|
||||
corresponding `-` line for the same package name.
|
||||
- **Version bump**: the same package name appears in both `+` lines (new
|
||||
version) and `-` lines (old version), with different version numbers.
|
||||
|
||||
Record the **old version** and **new version** for every version bump — you
|
||||
will need these values in Step 4.
|
||||
|
||||
Ignore comment lines (starting with `#`), lines that start with `-r ` (file
|
||||
includes), and lines that don't contain `==`.
|
||||
|
||||
## Step 2 — Check License via PyPI
|
||||
|
||||
For each new or bumped package:
|
||||
|
||||
1. Fetch `https://pypi.org/pypi/{package_name}/json` (use the exact
|
||||
package name as it appears on PyPI).
|
||||
2. From the JSON response, extract:
|
||||
- `info.license` — free-text license field
|
||||
- `info.license_expression` — SPDX expression (if present)
|
||||
- `info.classifiers` — filter for entries starting with `"License ::"`.
|
||||
3. Determine if the license is in the approved list from `script/licenses.py`:
|
||||
- SPDX identifiers: compare against `OSI_APPROVED_LICENSES_SPDX`
|
||||
- Classifier strings: compare against `OSI_APPROVED_LICENSES`
|
||||
4. Flag a package as ❌ if the license is unknown, missing, or not in the
|
||||
approved list. Flag as ⚠️ if the license information is ambiguous or cannot
|
||||
be definitively determined.
|
||||
|
||||
## Step 2b — Verify PyPI Release Was Uploaded by CI
|
||||
|
||||
For each new or bumped package, verify that the release on PyPI was published
|
||||
automatically by a CI pipeline (via OIDC Trusted Publisher), not uploaded
|
||||
manually.
|
||||
|
||||
1. Fetch the PyPI JSON for the specific version being introduced or bumped:
|
||||
`https://pypi.org/pypi/{package_name}/{version}/json`
|
||||
2. Inspect the `urls` array in the response. For each distribution file (wheel
|
||||
or sdist), note the filename.
|
||||
3. For each filename, attempt to fetch the PyPI provenance attestation:
|
||||
`https://pypi.org/integrity/{package_name}/{version}/{filename}/provenance`
|
||||
- If the response is HTTP 200 and contains a valid attestation object,
|
||||
inspect `attestation_bundles[*].publisher`. A Trusted Publisher attestation
|
||||
will have a `kind` identifying the CI system (e.g. `"GitHub Actions"`,
|
||||
`"GitLab"`) and a `repository` or `project` field matching the source
|
||||
repository.
|
||||
- If at least one distribution file has a valid Trusted Publisher attestation,
|
||||
mark ✅ CI-uploaded.
|
||||
- If no attestation is found for any file (404 for all), mark ❌ — "Release
|
||||
has no provenance attestation; it may have been uploaded manually".
|
||||
- If an attestation exists but the `publisher` does not identify a recognized
|
||||
CI system or Trusted Publisher, mark ⚠️ — "Attestation present but
|
||||
publisher cannot be verified as automated CI".
|
||||
|
||||
Note: if PyPI returns an error fetching the per-version JSON, fall back to the
|
||||
latest JSON (`https://pypi.org/pypi/{package_name}/json`) and look up the
|
||||
specific version in the `releases` dict.
|
||||
|
||||
## Step 3 — Check Repository Availability
|
||||
|
||||
For each new or bumped package:
|
||||
|
||||
1. From the PyPI JSON at `info.project_urls`, find the source repository URL
|
||||
(keys such as `"Source"`, `"Homepage"`, `"Repository"`, or `"Source Code"`).
|
||||
2. Use web-fetch to perform a GET request to the repository URL.
|
||||
3. If the response returns HTTP 200 and the page is publicly accessible, mark ✅.
|
||||
4. If the URL is missing, returns a non-200 status, or redirects to a login
|
||||
page, mark ❌ with a note that the repository could not be verified as public.
|
||||
|
||||
## Step 4 — Check PR Description
|
||||
|
||||
Read the PR body from the GitHub API using the PR number `${{ github.event.pull_request.number }}`.
|
||||
Extract all URLs present in the PR body.
|
||||
|
||||
### 4a — New packages: repository link required
|
||||
|
||||
For **new packages** (brand-new dependency not previously in any requirements
|
||||
file): the PR description must contain a link that points to the package's
|
||||
**source repository** as identified in Step 3 (the URL recorded from
|
||||
`info.project_urls`). A PyPI page link alone is **not** acceptable — the link
|
||||
must point directly to the source repository (e.g. a GitHub or GitLab URL).
|
||||
|
||||
- If a URL in the PR body matches (or is a sub-path of) the source repository
|
||||
URL identified via PyPI, mark ✅.
|
||||
- If the PR body contains a source repository URL that does **not** match the
|
||||
repository URL found in the package's PyPI metadata (`info.project_urls`),
|
||||
mark ❌ — "PR description links to `<pr_url>` but PyPI reports the source
|
||||
repository as `<pypi_repo_url>`; please use the correct repository URL."
|
||||
- If no source repository URL is present in the PR body at all, mark ❌ —
|
||||
"PR description must link to the source repository at `<repo_url>` (found
|
||||
via PyPI). A PyPI page link is not sufficient."
|
||||
|
||||
### 4b — Version bumps: changelog or diff link required
|
||||
|
||||
For **version bumps**: the PR description must contain a link to a changelog,
|
||||
release notes page, or a diff/comparison URL that references the **correct
|
||||
versions** being bumped (old → new).
|
||||
|
||||
Checks to perform for each bumped package (old version = X, new version = Y):
|
||||
1. Extract all URLs from the PR body that contain the repository's domain or
|
||||
path (as identified in Step 3).
|
||||
2. Verify that at least one such URL includes both the old version string and
|
||||
new version string in some form — e.g. a GitHub compare URL like
|
||||
`compare/vX...vY`, a releases URL mentioning version Y, or a
|
||||
`CHANGELOG.md` anchor referencing Y.
|
||||
3. If no URL matches, check if the PR body contains any changelog/diff link at
|
||||
all for this package.
|
||||
|
||||
Outcome:
|
||||
- ✅ — a URL pointing to the correct repo with version references covering the
|
||||
exact bump (X → Y).
|
||||
- ⚠️ — a changelog/diff link exists but does not clearly reference the correct
|
||||
versions or the correct repository; explain what was found and what is
|
||||
expected.
|
||||
- ❌ — no changelog or diff link found at all in the PR description for this
|
||||
package.
|
||||
|
||||
### 4c — Diff consistency check
|
||||
|
||||
For each **version bump**, verify that the version change recorded in the diff
|
||||
(Step 1) is internally consistent:
|
||||
- The `-` line must contain the old version and the `+` line must contain the
|
||||
new version for the same package name.
|
||||
- Flag ❌ if the diff shows a downgrade (new version < old version) without an
|
||||
explanation, or if the version strings cannot be parsed.
|
||||
|
||||
## Step 5 — Verify Source Repository is Publicly Accessible
|
||||
|
||||
Before inspecting the release pipeline, confirm that the source repository
|
||||
identified in Step 3 is publicly reachable.
|
||||
|
||||
For each new or bumped package:
|
||||
|
||||
1. Use the source repository URL recorded in Step 3.
|
||||
2. If no repository URL was found in `info.project_urls`, mark ❌ — "No source
|
||||
repository URL found in PyPI metadata; a public source repository is
|
||||
required."
|
||||
3. If a repository URL was found, perform a GET request to that URL (using
|
||||
web-fetch). If the response is HTTP 200 and returns a publicly accessible
|
||||
page (not a login redirect or error page), mark ✅.
|
||||
4. If the response is non-200, the URL redirects to a login/authentication page,
|
||||
or the repository appears private or unavailable, mark ❌ — "Source
|
||||
repository at `<repo_url>` is not publicly accessible. Home Assistant
|
||||
requires all dependencies to have publicly available source code." **Do not
|
||||
proceed with the release pipeline check (Step 6) for this package.**
|
||||
|
||||
## Step 6 — Check Release Pipeline Sanity
|
||||
|
||||
For each new or bumped package, determine the source repository host from the
|
||||
URL identified in Step 3, then inspect whether the project's release/publish CI
|
||||
workflow is sane. The checks differ by hosting provider.
|
||||
|
||||
### GitHub repositories (`github.com`)
|
||||
|
||||
1. Using the GitHub API, list the workflows in the source repository:
|
||||
`GET /repos/{owner}/{repo}/actions/workflows`
|
||||
2. Identify any workflow whose name or filename suggests publishing to PyPI
|
||||
(e.g., contains "release", "publish", "pypi", or "deploy").
|
||||
3. Fetch the workflow file content and check the following:
|
||||
a. **Trigger sanity**: The publish job should be triggered by `push` to tags,
|
||||
`release: published`, or `workflow_run` on a release job — **not** solely
|
||||
by `workflow_dispatch` with no additional guards. A `workflow_dispatch`
|
||||
trigger alongside other triggers is acceptable. Mark ❌ if the only trigger
|
||||
is manual `workflow_dispatch` with no environment protection rules.
|
||||
b. **OIDC / Trusted Publisher**: The workflow should use OIDC-based publishing.
|
||||
Look for `id-token: write` permission and one of:
|
||||
- `pypa/gh-action-pypi-publish` action
|
||||
- `actions/attest-build-provenance` action
|
||||
- Any step that sets `TWINE_PASSWORD` from `secrets.PYPI_TOKEN` directly
|
||||
(flag ❌ if a long-lived API token is used instead of OIDC).
|
||||
Mark ✅ if OIDC is used, ⚠️ if the publish method cannot be determined,
|
||||
❌ if a static secret token is the only credential.
|
||||
c. **No manual upload bypass**: Verify there is no step that calls
|
||||
`twine upload` or `pip upload` outside of a properly gated job (e.g., one
|
||||
that requires an environment approval). Flag ⚠️ if such steps exist.
|
||||
4. If no publish workflow is found in the repository, mark ⚠️ — "No publish
|
||||
workflow found; it is unclear how this package is released to PyPI."
|
||||
|
||||
### GitLab repositories (`gitlab.com` or self-hosted GitLab)
|
||||
|
||||
1. Use the GitLab REST API to list CI/CD pipeline configuration files. First
|
||||
resolve the project ID via
|
||||
`GET https://gitlab.com/api/v4/projects/{url-encoded-namespace-and-name}`
|
||||
and note the `id` field.
|
||||
2. Fetch the repository's `.gitlab-ci.yml` (and any included files) using
|
||||
`GET https://gitlab.com/api/v4/projects/{id}/repository/files/.gitlab-ci.yml/raw?ref=HEAD`
|
||||
(use web-fetch for public repos).
|
||||
3. Identify any job whose name or `stage` suggests publishing to PyPI
|
||||
(e.g., "publish", "deploy", "release", "pypi").
|
||||
4. For each such job, check:
|
||||
a. **Trigger sanity**: The job should run only on tag pipelines (`only: tags`
|
||||
or `rules: - if: $CI_COMMIT_TAG`) or on protected branches — **not**
|
||||
solely on manual triggers (`when: manual`) with no additional protection.
|
||||
Mark ❌ if the only trigger is manual with no environment or protected-branch
|
||||
guard.
|
||||
b. **Automated credentials**: The job should use GitLab's OIDC ID token
|
||||
(`id_tokens:` block) and `pypa/gh-action-pypi-publish` equivalent, or
|
||||
reference `secrets.PYPI_TOKEN` / `$PYPI_TOKEN` injected from GitLab CI/CD
|
||||
protected variables (flag ❌ if the token is hard-coded or unprotected).
|
||||
Mark ✅ if OIDC or protected CI variables are used, ⚠️ if the method
|
||||
cannot be determined, ❌ if credentials appear to be insecure.
|
||||
c. **No manual upload bypass**: Flag ⚠️ if any job calls `twine upload`
|
||||
without being behind a protected-variable or environment guard.
|
||||
5. If no publish job is found, mark ⚠️ — "No publish job found in .gitlab-ci.yml;
|
||||
it is unclear how this package is released to PyPI."
|
||||
|
||||
### Other code hosting providers
|
||||
|
||||
For repositories hosted on platforms other than GitHub or GitLab (e.g.,
|
||||
Bitbucket, Codeberg, Gitea, Sourcehut):
|
||||
1. Use web-fetch to retrieve the repository's root page and look for any
|
||||
publicly visible CI configuration files (e.g., `.circleci/config.yml`,
|
||||
`Jenkinsfile`, `azure-pipelines.yml`, `bitbucket-pipelines.yml`,
|
||||
`.builds/*.yml` for Sourcehut).
|
||||
2. Apply the same conceptual checks as above:
|
||||
- Does publishing run on automated triggers (tags/releases), not solely
|
||||
manual ones?
|
||||
- Are credentials injected by the CI system (not hard-coded)?
|
||||
- Is there a `twine upload` or equivalent step that could be run manually?
|
||||
3. If no CI configuration can be retrieved, mark ⚠️ — "Release pipeline could
|
||||
not be inspected; hosting provider is not GitHub or GitLab."
|
||||
|
||||
## Step 7 — Post a Review Comment
|
||||
|
||||
**Always** post a review comment using `add-comment`, regardless of whether
|
||||
packages pass or fail.
|
||||
|
||||
In the table, use **only the status icon** (✅, ❌, ⚠️, or — for not
|
||||
applicable) in each check column — do not include any additional text inside
|
||||
table cells. Place all detailed findings in a collapsible `<details>` section
|
||||
immediately after the table.
|
||||
|
||||
Use the following structure:
|
||||
|
||||
```
|
||||
## Requirements Check
|
||||
|
||||
| Package | Type | Old→New | License | Repository Public | CI Upload | Release Pipeline | PR Link | Diff Consistent |
|
||||
|---------|------|---------|---------|-------------------|-----------|------------------|---------|-----------------|
|
||||
| PackageA | bump | 1.2.3→1.3.0 | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ |
|
||||
| PackageB | new | —→4.5.6 | ❌ | ✅ | ❌ | ⚠️ | ❌ | ✅ |
|
||||
| PackageC | bump | 2.0.0→2.1.0 | ✅ | ❌ | — | — | ⚠️ | ✅ |
|
||||
|
||||
<details [open if any check has ❌ or ⚠️]>
|
||||
<summary>Check details</summary>
|
||||
|
||||
**PackageA (bump 1.2.3→1.3.0):**
|
||||
- License: ✅ MIT
|
||||
- Repository Public: ✅
|
||||
- CI Upload: ✅
|
||||
- Release Pipeline: ✅ OIDC (`pypa/gh-action-pypi-publish`, `release: published` trigger)
|
||||
- PR Link: ✅ compare/v1.2.3...v1.3.0
|
||||
- Diff Consistent: ✅
|
||||
|
||||
**PackageB (new —→4.5.6):**
|
||||
- License: ❌ UNKNOWN — license could not be determined
|
||||
- Repository Public: ✅
|
||||
- CI Upload: ❌ — release has no provenance attestation; it may have been uploaded manually
|
||||
- Release Pipeline: ⚠️ — no publish workflow found
|
||||
- PR Link: ❌ — missing repo link; PR description must link to the source repository
|
||||
- Diff Consistent: ✅
|
||||
|
||||
**PackageC (bump 2.0.0→2.1.0):**
|
||||
- License: ✅ Apache-2.0
|
||||
- Repository Public: ❌ — source repository is not publicly accessible
|
||||
- CI Upload: — (skipped; repository not accessible)
|
||||
- Release Pipeline: — (skipped; repository not accessible)
|
||||
- PR Link: ⚠️ — link found but points to wrong repository
|
||||
- Diff Consistent: ✅
|
||||
|
||||
</details>
|
||||
```
|
||||
|
||||
The `<details>` element behavior:
|
||||
- If **all packages pass every check** (all ✅ or —): use `<details>` (collapsed
|
||||
by default) and add a brief confirmation before the details section:
|
||||
`All requirements checks passed. ✅`
|
||||
- If **any package has a failure or warning** (any ❌ or ⚠️): use `<details open>`
|
||||
(expanded by default). The details section must explain each failure and what
|
||||
the contributor needs to fix, including:
|
||||
- The expected source repository URL (from PyPI) when a link is missing or wrong.
|
||||
- The expected version range (old → new) when a changelog URL doesn't match the diff.
|
||||
- Whether the PyPI release lacks provenance attestation or uses an insecure publish method.
|
||||
- Whether the source repository is not publicly accessible.
|
||||
|
||||
## Notes
|
||||
|
||||
- Be constructive and helpful. Provide direct links where possible so the
|
||||
contributor can quickly fix the issue.
|
||||
- If PyPI returns an error for a package, mention that it could not be found and
|
||||
suggest the contributor verify the package name.
|
||||
- For packages that only appear in `homeassistant/package_constraints.txt` or
|
||||
`pyproject.toml` without being tied to a specific integration, the PR
|
||||
description link requirement still applies.
|
||||
- When checking test-only packages (from `requirements_test.txt` or
|
||||
`requirements_test_all.txt`), apply the same license, repository, and PR
|
||||
description checks as for production dependencies.
|
||||
- A package that appears in both a production file and a test file should only
|
||||
be reported once; use the production file entry as the canonical one.
|
||||
@@ -23,6 +23,7 @@ repos:
|
||||
- id: zizmor
|
||||
args:
|
||||
- --pedantic
|
||||
exclude: ^\.github/workflows/check-requirements\.lock\.yml$
|
||||
- repo: https://github.com/pre-commit/pre-commit-hooks
|
||||
rev: v6.0.0
|
||||
hooks:
|
||||
@@ -46,6 +47,7 @@ repos:
|
||||
additional_dependencies:
|
||||
- prettier@3.6.2
|
||||
- prettier-plugin-sort-json@4.2.0
|
||||
exclude: ^\.github/workflows/check-requirements\.lock\.yml$
|
||||
- repo: https://github.com/cdce8p/python-typing-update
|
||||
rev: v0.6.0
|
||||
hooks:
|
||||
|
||||
@@ -599,6 +599,7 @@ homeassistant.components.vallox.*
|
||||
homeassistant.components.valve.*
|
||||
homeassistant.components.velbus.*
|
||||
homeassistant.components.velux.*
|
||||
homeassistant.components.victron_gx.*
|
||||
homeassistant.components.vivotek.*
|
||||
homeassistant.components.vlc_telnet.*
|
||||
homeassistant.components.vodafone_station.*
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
ignore: |
|
||||
tests/fixtures/core/config/yaml_errors/
|
||||
.github/workflows/check-requirements.lock.yml
|
||||
rules:
|
||||
braces:
|
||||
level: error
|
||||
|
||||
@@ -25,7 +25,7 @@ async def async_get_media_source(hass: HomeAssistant) -> MediaSource:
|
||||
hass.data[DATA_MEDIA_SOURCE] = source = local_source.LocalSource(
|
||||
hass,
|
||||
DOMAIN,
|
||||
"AI Generated Images",
|
||||
"AI generated images",
|
||||
{IMAGE_DIR: str(media_dir)},
|
||||
f"/{DOMAIN}",
|
||||
)
|
||||
|
||||
@@ -39,7 +39,6 @@ from homeassistant.helpers.typing import ConfigType
|
||||
from .binary_sensor import BINARY_SENSOR_KEYS, BINARY_SENSORS, check_binary_sensors
|
||||
from .camera import STREAM_SOURCE_LIST
|
||||
from .const import (
|
||||
CAMERAS,
|
||||
COMM_RETRIES,
|
||||
COMM_TIMEOUT,
|
||||
DATA_AMCREST,
|
||||
@@ -359,7 +358,7 @@ def _start_event_monitor(
|
||||
|
||||
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
"""Set up the Amcrest IP Camera component."""
|
||||
hass.data.setdefault(DATA_AMCREST, {DEVICES: {}, CAMERAS: []})
|
||||
hass.data.setdefault(DATA_AMCREST, {DEVICES: {}})
|
||||
|
||||
for device in config[DOMAIN]:
|
||||
name: str = device[CONF_NAME]
|
||||
|
||||
@@ -12,13 +12,11 @@ import aiohttp
|
||||
from aiohttp import web
|
||||
from amcrest import AmcrestError
|
||||
from haffmpeg.camera import CameraMjpeg
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components.camera import Camera, CameraEntityFeature
|
||||
from homeassistant.components.ffmpeg import FFmpegManager, get_ffmpeg_manager
|
||||
from homeassistant.const import ATTR_ENTITY_ID, CONF_NAME, STATE_OFF, STATE_ON
|
||||
from homeassistant.const import CONF_NAME, STATE_OFF, STATE_ON
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.aiohttp_client import (
|
||||
async_aiohttp_proxy_stream,
|
||||
async_aiohttp_proxy_web,
|
||||
@@ -29,11 +27,13 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
||||
|
||||
from .const import (
|
||||
ATTR_COLOR_BW,
|
||||
CAMERA_WEB_SESSION_TIMEOUT,
|
||||
CAMERAS,
|
||||
CBW,
|
||||
COMM_TIMEOUT,
|
||||
DATA_AMCREST,
|
||||
DEVICES,
|
||||
MOV,
|
||||
RESOLUTION_TO_STREAM,
|
||||
SERVICE_UPDATE,
|
||||
SNAPSHOT_TIMEOUT,
|
||||
@@ -49,65 +49,11 @@ SCAN_INTERVAL = timedelta(seconds=15)
|
||||
|
||||
STREAM_SOURCE_LIST = ["snapshot", "mjpeg", "rtsp"]
|
||||
|
||||
_ATTR_PTZ_TT = "travel_time"
|
||||
_ATTR_PTZ_MOV = "movement"
|
||||
_MOV = [
|
||||
"zoom_out",
|
||||
"zoom_in",
|
||||
"right",
|
||||
"left",
|
||||
"up",
|
||||
"down",
|
||||
"right_down",
|
||||
"right_up",
|
||||
"left_down",
|
||||
"left_up",
|
||||
]
|
||||
_ZOOM_ACTIONS = ["ZoomWide", "ZoomTele"]
|
||||
_MOVE_1_ACTIONS = ["Right", "Left", "Up", "Down"]
|
||||
_MOVE_2_ACTIONS = ["RightDown", "RightUp", "LeftDown", "LeftUp"]
|
||||
_ACTION = _ZOOM_ACTIONS + _MOVE_1_ACTIONS + _MOVE_2_ACTIONS
|
||||
|
||||
_DEFAULT_TT = 0.2
|
||||
|
||||
_ATTR_PRESET = "preset"
|
||||
_ATTR_COLOR_BW = "color_bw"
|
||||
|
||||
_CBW_COLOR = "color"
|
||||
_CBW_AUTO = "auto"
|
||||
_CBW_BW = "bw"
|
||||
_CBW = [_CBW_COLOR, _CBW_AUTO, _CBW_BW]
|
||||
|
||||
_SRV_SCHEMA = vol.Schema({vol.Optional(ATTR_ENTITY_ID): cv.comp_entity_ids})
|
||||
_SRV_GOTO_SCHEMA = _SRV_SCHEMA.extend(
|
||||
{vol.Required(_ATTR_PRESET): vol.All(vol.Coerce(int), vol.Range(min=1))}
|
||||
)
|
||||
_SRV_CBW_SCHEMA = _SRV_SCHEMA.extend({vol.Required(_ATTR_COLOR_BW): vol.In(_CBW)})
|
||||
_SRV_PTZ_SCHEMA = _SRV_SCHEMA.extend(
|
||||
{
|
||||
vol.Required(_ATTR_PTZ_MOV): vol.In(_MOV),
|
||||
vol.Optional(_ATTR_PTZ_TT, default=_DEFAULT_TT): cv.small_float,
|
||||
}
|
||||
)
|
||||
|
||||
CAMERA_SERVICES = {
|
||||
"enable_recording": (_SRV_SCHEMA, "async_enable_recording", ()),
|
||||
"disable_recording": (_SRV_SCHEMA, "async_disable_recording", ()),
|
||||
"enable_audio": (_SRV_SCHEMA, "async_enable_audio", ()),
|
||||
"disable_audio": (_SRV_SCHEMA, "async_disable_audio", ()),
|
||||
"enable_motion_recording": (_SRV_SCHEMA, "async_enable_motion_recording", ()),
|
||||
"disable_motion_recording": (_SRV_SCHEMA, "async_disable_motion_recording", ()),
|
||||
"goto_preset": (_SRV_GOTO_SCHEMA, "async_goto_preset", (_ATTR_PRESET,)),
|
||||
"set_color_bw": (_SRV_CBW_SCHEMA, "async_set_color_bw", (_ATTR_COLOR_BW,)),
|
||||
"start_tour": (_SRV_SCHEMA, "async_start_tour", ()),
|
||||
"stop_tour": (_SRV_SCHEMA, "async_stop_tour", ()),
|
||||
"ptz_control": (
|
||||
_SRV_PTZ_SCHEMA,
|
||||
"async_ptz_control",
|
||||
(_ATTR_PTZ_MOV, _ATTR_PTZ_TT),
|
||||
),
|
||||
}
|
||||
|
||||
_BOOL_TO_STATE = {True: STATE_ON, False: STATE_OFF}
|
||||
|
||||
|
||||
@@ -275,7 +221,7 @@ class AmcrestCam(Camera):
|
||||
self._motion_recording_enabled
|
||||
)
|
||||
if self._color_bw is not None:
|
||||
attr[_ATTR_COLOR_BW] = self._color_bw
|
||||
attr[ATTR_COLOR_BW] = self._color_bw
|
||||
return attr
|
||||
|
||||
@property
|
||||
@@ -322,15 +268,7 @@ class AmcrestCam(Camera):
|
||||
self.async_schedule_update_ha_state(True)
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Subscribe to signals and add camera to list."""
|
||||
self._unsub_dispatcher.extend(
|
||||
async_dispatcher_connect(
|
||||
self.hass,
|
||||
service_signal(service, self.entity_id),
|
||||
getattr(self, callback_name),
|
||||
)
|
||||
for service, (_, callback_name, _) in CAMERA_SERVICES.items()
|
||||
)
|
||||
"""Subscribe to signals."""
|
||||
self._unsub_dispatcher.append(
|
||||
async_dispatcher_connect(
|
||||
self.hass,
|
||||
@@ -338,11 +276,9 @@ class AmcrestCam(Camera):
|
||||
self.async_on_demand_update,
|
||||
)
|
||||
)
|
||||
self.hass.data[DATA_AMCREST][CAMERAS].append(self.entity_id)
|
||||
|
||||
async def async_will_remove_from_hass(self) -> None:
|
||||
"""Remove camera from list and disconnect from signals."""
|
||||
self.hass.data[DATA_AMCREST][CAMERAS].remove(self.entity_id)
|
||||
"""Disconnect from signals."""
|
||||
for unsub_dispatcher in self._unsub_dispatcher:
|
||||
unsub_dispatcher()
|
||||
|
||||
@@ -456,7 +392,7 @@ class AmcrestCam(Camera):
|
||||
|
||||
async def async_ptz_control(self, movement: str, travel_time: float) -> None:
|
||||
"""Move or zoom camera in specified direction."""
|
||||
code = _ACTION[_MOV.index(movement)]
|
||||
code = _ACTION[MOV.index(movement)]
|
||||
|
||||
kwargs = {"code": code, "arg1": 0, "arg2": 0, "arg3": 0}
|
||||
if code in _MOVE_1_ACTIONS:
|
||||
@@ -613,10 +549,10 @@ class AmcrestCam(Camera):
|
||||
)
|
||||
|
||||
async def _async_get_color_mode(self) -> str:
|
||||
return _CBW[await self._api.async_day_night_color]
|
||||
return CBW[await self._api.async_day_night_color]
|
||||
|
||||
async def _async_set_color_mode(self, cbw: str) -> None:
|
||||
await self._api.async_set_day_night_color(_CBW.index(cbw), channel=0)
|
||||
await self._api.async_set_day_night_color(CBW.index(cbw), channel=0)
|
||||
|
||||
async def _async_set_color_bw(self, cbw: str) -> None:
|
||||
"""Set camera color mode."""
|
||||
|
||||
@@ -2,7 +2,6 @@
|
||||
|
||||
DOMAIN = "amcrest"
|
||||
DATA_AMCREST = DOMAIN
|
||||
CAMERAS = "cameras"
|
||||
DEVICES = "devices"
|
||||
|
||||
BINARY_SENSOR_SCAN_INTERVAL_SECS = 5
|
||||
@@ -17,3 +16,18 @@ SERVICE_UPDATE = "update"
|
||||
|
||||
RESOLUTION_LIST = {"high": 0, "low": 1}
|
||||
RESOLUTION_TO_STREAM = {0: "Main", 1: "Extra"}
|
||||
|
||||
ATTR_COLOR_BW = "color_bw"
|
||||
CBW = ["color", "auto", "bw"]
|
||||
MOV = [
|
||||
"zoom_out",
|
||||
"zoom_in",
|
||||
"right",
|
||||
"left",
|
||||
"up",
|
||||
"down",
|
||||
"right_down",
|
||||
"right_up",
|
||||
"left_down",
|
||||
"left_up",
|
||||
]
|
||||
|
||||
@@ -1,62 +1,67 @@
|
||||
"""Support for Amcrest IP cameras."""
|
||||
"""Services for Amcrest IP cameras."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from homeassistant.auth.models import User
|
||||
from homeassistant.auth.permissions.const import POLICY_CONTROL
|
||||
from homeassistant.const import ATTR_ENTITY_ID, ENTITY_MATCH_ALL, ENTITY_MATCH_NONE
|
||||
from homeassistant.core import HomeAssistant, ServiceCall, callback
|
||||
from homeassistant.exceptions import Unauthorized, UnknownUser
|
||||
from homeassistant.helpers.dispatcher import async_dispatcher_send
|
||||
from homeassistant.helpers.service import async_extract_entity_ids
|
||||
import voluptuous as vol
|
||||
|
||||
from .camera import CAMERA_SERVICES
|
||||
from .const import CAMERAS, DATA_AMCREST, DOMAIN
|
||||
from .helpers import service_signal
|
||||
from homeassistant.components.camera import DOMAIN as CAMERA_DOMAIN
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers import config_validation as cv, service
|
||||
|
||||
from .const import ATTR_COLOR_BW, CBW, DOMAIN, MOV
|
||||
|
||||
_ATTR_PRESET = "preset"
|
||||
_ATTR_PTZ_MOV = "movement"
|
||||
_ATTR_PTZ_TT = "travel_time"
|
||||
_DEFAULT_TT = 0.2
|
||||
|
||||
|
||||
@callback
|
||||
def async_setup_services(hass: HomeAssistant) -> None:
|
||||
"""Set up the Amcrest IP Camera services."""
|
||||
for service_name, func in (
|
||||
("enable_recording", "async_enable_recording"),
|
||||
("disable_recording", "async_disable_recording"),
|
||||
("enable_audio", "async_enable_audio"),
|
||||
("disable_audio", "async_disable_audio"),
|
||||
("enable_motion_recording", "async_enable_motion_recording"),
|
||||
("disable_motion_recording", "async_disable_motion_recording"),
|
||||
("start_tour", "async_start_tour"),
|
||||
("stop_tour", "async_stop_tour"),
|
||||
):
|
||||
service.async_register_platform_entity_service(
|
||||
hass,
|
||||
DOMAIN,
|
||||
service_name,
|
||||
entity_domain=CAMERA_DOMAIN,
|
||||
schema=None,
|
||||
func=func,
|
||||
)
|
||||
|
||||
def have_permission(user: User | None, entity_id: str) -> bool:
|
||||
return not user or user.permissions.check_entity(entity_id, POLICY_CONTROL)
|
||||
|
||||
async def async_extract_from_service(call: ServiceCall) -> list[str]:
|
||||
if call.context.user_id:
|
||||
user = await hass.auth.async_get_user(call.context.user_id)
|
||||
if user is None:
|
||||
raise UnknownUser(context=call.context)
|
||||
else:
|
||||
user = None
|
||||
|
||||
if call.data.get(ATTR_ENTITY_ID) == ENTITY_MATCH_ALL:
|
||||
# Return all entity_ids user has permission to control.
|
||||
return [
|
||||
entity_id
|
||||
for entity_id in hass.data[DATA_AMCREST][CAMERAS]
|
||||
if have_permission(user, entity_id)
|
||||
]
|
||||
|
||||
if call.data.get(ATTR_ENTITY_ID) == ENTITY_MATCH_NONE:
|
||||
return []
|
||||
|
||||
call_ids = await async_extract_entity_ids(call)
|
||||
entity_ids = []
|
||||
for entity_id in hass.data[DATA_AMCREST][CAMERAS]:
|
||||
if entity_id not in call_ids:
|
||||
continue
|
||||
if not have_permission(user, entity_id):
|
||||
raise Unauthorized(
|
||||
context=call.context, entity_id=entity_id, permission=POLICY_CONTROL
|
||||
)
|
||||
entity_ids.append(entity_id)
|
||||
return entity_ids
|
||||
|
||||
async def async_service_handler(call: ServiceCall) -> None:
|
||||
args = [call.data[arg] for arg in CAMERA_SERVICES[call.service][2]]
|
||||
for entity_id in await async_extract_from_service(call):
|
||||
async_dispatcher_send(hass, service_signal(call.service, entity_id), *args)
|
||||
|
||||
for service, params in CAMERA_SERVICES.items():
|
||||
hass.services.async_register(DOMAIN, service, async_service_handler, params[0])
|
||||
service.async_register_platform_entity_service(
|
||||
hass,
|
||||
DOMAIN,
|
||||
"goto_preset",
|
||||
entity_domain=CAMERA_DOMAIN,
|
||||
schema={vol.Required(_ATTR_PRESET): vol.All(vol.Coerce(int), vol.Range(min=1))},
|
||||
func="async_goto_preset",
|
||||
)
|
||||
service.async_register_platform_entity_service(
|
||||
hass,
|
||||
DOMAIN,
|
||||
"set_color_bw",
|
||||
entity_domain=CAMERA_DOMAIN,
|
||||
schema={vol.Required(ATTR_COLOR_BW): vol.In(CBW)},
|
||||
func="async_set_color_bw",
|
||||
)
|
||||
service.async_register_platform_entity_service(
|
||||
hass,
|
||||
DOMAIN,
|
||||
"ptz_control",
|
||||
entity_domain=CAMERA_DOMAIN,
|
||||
schema={
|
||||
vol.Required(_ATTR_PTZ_MOV): vol.In(MOV),
|
||||
vol.Optional(_ATTR_PTZ_TT, default=_DEFAULT_TT): cv.small_float,
|
||||
},
|
||||
func="async_ptz_control",
|
||||
)
|
||||
|
||||
@@ -945,7 +945,10 @@ class PipelineRun:
|
||||
try:
|
||||
# Transcribe audio stream
|
||||
stt_vad: VoiceCommandSegmenter | None = None
|
||||
if self.audio_settings.is_vad_enabled:
|
||||
if (
|
||||
self.audio_settings.is_vad_enabled
|
||||
and self.stt_provider.audio_processing.requires_external_vad
|
||||
):
|
||||
stt_vad = VoiceCommandSegmenter(
|
||||
silence_seconds=self.audio_settings.silence_seconds
|
||||
)
|
||||
|
||||
@@ -21,8 +21,9 @@ from homeassistant.helpers import config_validation as cv, device_registry as dr
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
from homeassistant.util.ssl import get_default_context
|
||||
|
||||
from .const import DOMAIN
|
||||
from .const import DOMAIN, MANUFACTURER, BeoModel
|
||||
from .services import async_setup_services
|
||||
from .util import get_remotes
|
||||
from .websocket import BeoWebsocket
|
||||
|
||||
|
||||
@@ -58,15 +59,6 @@ async def async_setup_entry(hass: HomeAssistant, entry: BeoConfigEntry) -> bool:
|
||||
# Remove casts to str
|
||||
assert entry.unique_id
|
||||
|
||||
# Create device now as BeoWebsocket needs a device for debug logging, firing events etc.
|
||||
device_registry = dr.async_get(hass)
|
||||
device_registry.async_get_or_create(
|
||||
config_entry_id=entry.entry_id,
|
||||
identifiers={(DOMAIN, entry.unique_id)},
|
||||
name=entry.title,
|
||||
model=entry.data[CONF_MODEL],
|
||||
)
|
||||
|
||||
client = MozartClient(host=entry.data[CONF_HOST], ssl_context=get_default_context())
|
||||
|
||||
# Check API and WebSocket connection
|
||||
@@ -83,6 +75,27 @@ async def async_setup_entry(hass: HomeAssistant, entry: BeoConfigEntry) -> bool:
|
||||
await client.close_api_client()
|
||||
raise ConfigEntryNotReady(f"Unable to connect to {entry.title}") from error
|
||||
|
||||
# Create device now as BeoWebsocket needs a device for debug logging, firing events etc.
|
||||
device_registry = dr.async_get(hass)
|
||||
device_registry.async_get_or_create(
|
||||
config_entry_id=entry.entry_id,
|
||||
identifiers={(DOMAIN, entry.unique_id)},
|
||||
model=entry.data[CONF_MODEL],
|
||||
)
|
||||
|
||||
# Create devices for paired Beoremote One remotes
|
||||
for remote in await get_remotes(client):
|
||||
device_registry.async_get_or_create(
|
||||
config_entry_id=entry.entry_id,
|
||||
identifiers={(DOMAIN, f"{remote.serial_number}_{entry.unique_id}")},
|
||||
name=f"{BeoModel.BEOREMOTE_ONE}-{remote.serial_number}-{entry.unique_id}",
|
||||
model=BeoModel.BEOREMOTE_ONE,
|
||||
serial_number=remote.serial_number,
|
||||
sw_version=remote.app_version,
|
||||
manufacturer=MANUFACTURER,
|
||||
via_device=(DOMAIN, entry.unique_id),
|
||||
)
|
||||
|
||||
websocket = BeoWebsocket(hass, entry, client)
|
||||
|
||||
# Add the websocket and API client
|
||||
|
||||
@@ -52,6 +52,7 @@ class BeoConfigFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
|
||||
_beolink_jid = ""
|
||||
_client: MozartClient
|
||||
_friendly_name = ""
|
||||
_host = ""
|
||||
_model = ""
|
||||
_name = ""
|
||||
@@ -111,6 +112,7 @@ class BeoConfigFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
)
|
||||
|
||||
self._beolink_jid = beolink_self.jid
|
||||
self._friendly_name = beolink_self.friendly_name
|
||||
self._serial_number = get_serial_number_from_jid(beolink_self.jid)
|
||||
|
||||
await self.async_set_unique_id(self._serial_number)
|
||||
@@ -149,6 +151,7 @@ class BeoConfigFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
return self.async_abort(reason="invalid_address")
|
||||
|
||||
self._model = discovery_info.hostname[:-16].replace("-", " ")
|
||||
self._friendly_name = discovery_info.properties[ATTR_FRIENDLY_NAME]
|
||||
self._serial_number = discovery_info.properties[ATTR_SERIAL_NUMBER]
|
||||
self._beolink_jid = f"{discovery_info.properties[ATTR_TYPE_NUMBER]}.{discovery_info.properties[ATTR_ITEM_NUMBER]}.{self._serial_number}@products.bang-olufsen.com"
|
||||
|
||||
@@ -164,16 +167,13 @@ class BeoConfigFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
|
||||
async def _create_entry(self) -> ConfigFlowResult:
|
||||
"""Create the config entry for a discovered or manually configured Bang & Olufsen device."""
|
||||
# Ensure that created entities have a unique and easily identifiable id and not a "friendly name"
|
||||
self._name = f"{self._model}-{self._serial_number}"
|
||||
|
||||
return self.async_create_entry(
|
||||
title=self._name,
|
||||
title=self._friendly_name,
|
||||
data=EntryData(
|
||||
host=self._host,
|
||||
jid=self._beolink_jid,
|
||||
model=self._model,
|
||||
name=self._name,
|
||||
name=self._friendly_name,
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
@@ -20,7 +20,6 @@ from .const import (
|
||||
CONNECTION_STATUS,
|
||||
DEVICE_BUTTON_EVENTS,
|
||||
DOMAIN,
|
||||
MANUFACTURER,
|
||||
BeoModel,
|
||||
WebsocketNotification,
|
||||
)
|
||||
@@ -142,12 +141,6 @@ class BeoRemoteKeyEvent(BeoEvent):
|
||||
self._attr_unique_id = f"{remote.serial_number}_{self._unique_id}_{key_type}"
|
||||
self._attr_device_info = DeviceInfo(
|
||||
identifiers={(DOMAIN, f"{remote.serial_number}_{self._unique_id}")},
|
||||
name=f"{BeoModel.BEOREMOTE_ONE}-{remote.serial_number}-{self._unique_id}",
|
||||
model=BeoModel.BEOREMOTE_ONE,
|
||||
serial_number=remote.serial_number,
|
||||
sw_version=remote.app_version,
|
||||
manufacturer=MANUFACTURER,
|
||||
via_device=(DOMAIN, self._unique_id),
|
||||
)
|
||||
|
||||
# Make the native key name Home Assistant compatible
|
||||
|
||||
@@ -115,7 +115,7 @@ class BeoSensorRemoteBatteryLevel(BeoSensor):
|
||||
f"{remote.serial_number}_{self._unique_id}_remote_battery_level"
|
||||
)
|
||||
self._attr_device_info = DeviceInfo(
|
||||
identifiers={(DOMAIN, f"{remote.serial_number}_{self._unique_id}")}
|
||||
identifiers={(DOMAIN, f"{remote.serial_number}_{self._unique_id}")},
|
||||
)
|
||||
self._attr_native_value = remote.battery_level
|
||||
self._remote = remote
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
"""The Broadlink integration."""
|
||||
# pylint: disable=hass-use-runtime-data # Uses legacy hass.data[DOMAIN] pattern
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
|
||||
@@ -34,6 +34,8 @@ async def async_setup_entry(
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up the Broadlink climate entities."""
|
||||
# Uses legacy hass.data[DOMAIN] pattern
|
||||
# pylint: disable-next=hass-use-runtime-data
|
||||
device = hass.data[DOMAIN].devices[config_entry.entry_id]
|
||||
|
||||
if device.api.type in DOMAINS_AND_TYPES[Platform.CLIMATE]:
|
||||
|
||||
@@ -133,6 +133,8 @@ class BroadlinkDevice[_ApiT: blk.Device = blk.Device]:
|
||||
await coordinator.async_config_entry_first_refresh()
|
||||
|
||||
self.update_manager = update_manager
|
||||
# Uses legacy hass.data[DOMAIN] pattern
|
||||
# pylint: disable-next=hass-use-runtime-data
|
||||
self.hass.data[DOMAIN].devices[config.entry_id] = self
|
||||
self.reset_jobs.append(config.add_update_listener(self.async_update))
|
||||
|
||||
|
||||
@@ -32,6 +32,8 @@ async def async_setup_entry(
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up the Broadlink light."""
|
||||
# Uses legacy hass.data[DOMAIN] pattern
|
||||
# pylint: disable-next=hass-use-runtime-data
|
||||
device = hass.data[DOMAIN].devices[config_entry.entry_id]
|
||||
lights = []
|
||||
|
||||
|
||||
@@ -95,6 +95,8 @@ async def async_setup_entry(
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up a Broadlink remote."""
|
||||
# Uses legacy hass.data[DOMAIN] pattern
|
||||
# pylint: disable-next=hass-use-runtime-data
|
||||
device = hass.data[DOMAIN].devices[config_entry.entry_id]
|
||||
remote = BroadlinkRemote(
|
||||
device,
|
||||
|
||||
@@ -31,6 +31,8 @@ async def async_setup_entry(
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up the Broadlink select."""
|
||||
# Uses legacy hass.data[DOMAIN] pattern
|
||||
# pylint: disable-next=hass-use-runtime-data
|
||||
device = hass.data[DOMAIN].devices[config_entry.entry_id]
|
||||
async_add_entities([BroadlinkDayOfWeek(device)])
|
||||
|
||||
|
||||
@@ -108,6 +108,8 @@ async def async_setup_entry(
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up the Broadlink sensor."""
|
||||
# Uses legacy hass.data[DOMAIN] pattern
|
||||
# pylint: disable-next=hass-use-runtime-data
|
||||
device = hass.data[DOMAIN].devices[config_entry.entry_id]
|
||||
sensor_data = device.update_manager.coordinator.data
|
||||
sensors = [
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
"""Support for Broadlink switches."""
|
||||
# pylint: disable=hass-use-runtime-data # Uses legacy hass.data[DOMAIN] pattern
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
|
||||
@@ -22,6 +22,8 @@ async def async_setup_entry(
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up the Broadlink time."""
|
||||
# Uses legacy hass.data[DOMAIN] pattern
|
||||
# pylint: disable-next=hass-use-runtime-data
|
||||
device = hass.data[DOMAIN].devices[config_entry.entry_id]
|
||||
async_add_entities([BroadlinkTime(device)])
|
||||
|
||||
|
||||
@@ -8,7 +8,7 @@
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["bsblan"],
|
||||
"quality_scale": "silver",
|
||||
"requirements": ["python-bsblan==5.1.4"],
|
||||
"requirements": ["python-bsblan==5.2.0"],
|
||||
"zeroconf": [
|
||||
{
|
||||
"name": "bsb-lan*",
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
"""Component to embed Google Cast."""
|
||||
# pylint: disable=hass-use-runtime-data # Uses legacy hass.data[DOMAIN] pattern
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
|
||||
@@ -65,6 +65,8 @@ class ChromecastInfo:
|
||||
"""
|
||||
cast_info = self.cast_info
|
||||
if self.cast_info.cast_type is None or self.cast_info.manufacturer is None:
|
||||
# Uses legacy hass.data[DOMAIN] pattern
|
||||
# pylint: disable-next=hass-use-runtime-data
|
||||
unknown_models = hass.data[DOMAIN]["unknown_models"]
|
||||
if self.cast_info.model_name not in unknown_models:
|
||||
# Manufacturer and cast type is not available in mDNS data,
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
"""Provide functionality to interact with Cast devices on the network."""
|
||||
# pylint: disable=hass-use-runtime-data # Uses legacy hass.data[DOMAIN] pattern
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
|
||||
@@ -9,34 +9,34 @@
|
||||
},
|
||||
"conditions": {
|
||||
"is_cooling": {
|
||||
"description": "Tests if one or more climate-control devices are cooling.",
|
||||
"description": "Tests if one or more thermostats are cooling.",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"name": "[%key:component::climate::common::condition_behavior_name%]"
|
||||
}
|
||||
},
|
||||
"name": "Climate-control device is cooling"
|
||||
"name": "Thermostat is cooling"
|
||||
},
|
||||
"is_drying": {
|
||||
"description": "Tests if one or more climate-control devices are drying.",
|
||||
"description": "Tests if one or more thermostats are drying.",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"name": "[%key:component::climate::common::condition_behavior_name%]"
|
||||
}
|
||||
},
|
||||
"name": "Climate-control device is drying"
|
||||
"name": "Thermostat is drying"
|
||||
},
|
||||
"is_heating": {
|
||||
"description": "Tests if one or more climate-control devices are heating.",
|
||||
"description": "Tests if one or more thermostats are heating.",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"name": "[%key:component::climate::common::condition_behavior_name%]"
|
||||
}
|
||||
},
|
||||
"name": "Climate-control device is heating"
|
||||
"name": "Thermostat is heating"
|
||||
},
|
||||
"is_hvac_mode": {
|
||||
"description": "Tests if one or more climate-control devices are set to a specific HVAC mode.",
|
||||
"description": "Tests if one or more thermostats are set to a specific HVAC mode.",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"name": "[%key:component::climate::common::condition_behavior_name%]"
|
||||
@@ -46,10 +46,10 @@
|
||||
"name": "Modes"
|
||||
}
|
||||
},
|
||||
"name": "Climate-control device HVAC mode"
|
||||
"name": "Thermostat HVAC mode"
|
||||
},
|
||||
"is_off": {
|
||||
"description": "Tests if one or more climate-control devices are off.",
|
||||
"description": "Tests if one or more thermostats are off.",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"name": "[%key:component::climate::common::condition_behavior_name%]"
|
||||
@@ -58,19 +58,19 @@
|
||||
"name": "[%key:component::climate::common::condition_for_name%]"
|
||||
}
|
||||
},
|
||||
"name": "Climate-control device is off"
|
||||
"name": "Thermostat is off"
|
||||
},
|
||||
"is_on": {
|
||||
"description": "Tests if one or more climate-control devices are on.",
|
||||
"description": "Tests if one or more thermostats are on.",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"name": "[%key:component::climate::common::condition_behavior_name%]"
|
||||
}
|
||||
},
|
||||
"name": "Climate-control device is on"
|
||||
"name": "Thermostat is on"
|
||||
},
|
||||
"target_humidity": {
|
||||
"description": "Tests the humidity setpoint of one or more climate-control devices.",
|
||||
"description": "Tests the humidity setpoint of one or more thermostats.",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"name": "[%key:component::climate::common::condition_behavior_name%]"
|
||||
@@ -79,10 +79,10 @@
|
||||
"name": "[%key:component::climate::common::condition_threshold_name%]"
|
||||
}
|
||||
},
|
||||
"name": "Climate-control device target humidity"
|
||||
"name": "Thermostat target humidity"
|
||||
},
|
||||
"target_temperature": {
|
||||
"description": "Tests the temperature setpoint of one or more climate-control devices.",
|
||||
"description": "Tests the temperature setpoint of one or more thermostats.",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"name": "[%key:component::climate::common::condition_behavior_name%]"
|
||||
@@ -91,7 +91,7 @@
|
||||
"name": "[%key:component::climate::common::condition_threshold_name%]"
|
||||
}
|
||||
},
|
||||
"name": "Climate-control device target temperature"
|
||||
"name": "Thermostat target temperature"
|
||||
}
|
||||
},
|
||||
"device_automation": {
|
||||
@@ -288,67 +288,67 @@
|
||||
},
|
||||
"services": {
|
||||
"set_fan_mode": {
|
||||
"description": "Sets the fan mode of a climate-control device.",
|
||||
"description": "Sets the fan mode of a thermostat.",
|
||||
"fields": {
|
||||
"fan_mode": {
|
||||
"description": "Fan operation mode.",
|
||||
"name": "Fan mode"
|
||||
}
|
||||
},
|
||||
"name": "Set climate-control device fan mode"
|
||||
"name": "Set thermostat fan mode"
|
||||
},
|
||||
"set_humidity": {
|
||||
"description": "Sets the target humidity of a climate-control device.",
|
||||
"description": "Sets the target humidity of a thermostat.",
|
||||
"fields": {
|
||||
"humidity": {
|
||||
"description": "Target humidity.",
|
||||
"name": "Humidity"
|
||||
}
|
||||
},
|
||||
"name": "Set climate-control device target humidity"
|
||||
"name": "Set thermostat target humidity"
|
||||
},
|
||||
"set_hvac_mode": {
|
||||
"description": "Sets the HVAC mode of a climate-control device.",
|
||||
"description": "Sets the HVAC mode of a thermostat.",
|
||||
"fields": {
|
||||
"hvac_mode": {
|
||||
"description": "HVAC operation mode.",
|
||||
"name": "HVAC mode"
|
||||
}
|
||||
},
|
||||
"name": "Set climate-control device HVAC mode"
|
||||
"name": "Set thermostat HVAC mode"
|
||||
},
|
||||
"set_preset_mode": {
|
||||
"description": "Sets the preset mode of a climate-control device.",
|
||||
"description": "Sets the preset mode of a thermostat.",
|
||||
"fields": {
|
||||
"preset_mode": {
|
||||
"description": "Preset mode.",
|
||||
"name": "Preset mode"
|
||||
}
|
||||
},
|
||||
"name": "Set climate-control device preset mode"
|
||||
"name": "Set thermostat preset mode"
|
||||
},
|
||||
"set_swing_horizontal_mode": {
|
||||
"description": "Sets the horizontal swing mode of a climate-control device.",
|
||||
"description": "Sets the horizontal swing mode of a thermostat.",
|
||||
"fields": {
|
||||
"swing_horizontal_mode": {
|
||||
"description": "Horizontal swing operation mode.",
|
||||
"name": "Horizontal swing mode"
|
||||
}
|
||||
},
|
||||
"name": "Set climate-control device horizontal swing mode"
|
||||
"name": "Set thermostat horizontal swing mode"
|
||||
},
|
||||
"set_swing_mode": {
|
||||
"description": "Sets the swing mode of a climate-control device.",
|
||||
"description": "Sets the swing mode of a thermostat.",
|
||||
"fields": {
|
||||
"swing_mode": {
|
||||
"description": "Swing operation mode.",
|
||||
"name": "Swing mode"
|
||||
}
|
||||
},
|
||||
"name": "Set climate-control device swing mode"
|
||||
"name": "Set thermostat swing mode"
|
||||
},
|
||||
"set_temperature": {
|
||||
"description": "Sets the target temperature of a climate-control device.",
|
||||
"description": "Sets the target temperature of a thermostat.",
|
||||
"fields": {
|
||||
"hvac_mode": {
|
||||
"description": "HVAC operation mode.",
|
||||
@@ -367,25 +367,25 @@
|
||||
"name": "Target temperature"
|
||||
}
|
||||
},
|
||||
"name": "Set climate-control device target temperature"
|
||||
"name": "Set thermostat target temperature"
|
||||
},
|
||||
"toggle": {
|
||||
"description": "Toggles a climate-control device on/off.",
|
||||
"name": "Toggle climate-control device"
|
||||
"description": "Toggles a thermostat on/off.",
|
||||
"name": "Toggle thermostat"
|
||||
},
|
||||
"turn_off": {
|
||||
"description": "Turns off a climate-control device.",
|
||||
"name": "Turn off climate-control device"
|
||||
"description": "Turns off a thermostat.",
|
||||
"name": "Turn off thermostat"
|
||||
},
|
||||
"turn_on": {
|
||||
"description": "Turns on a climate-control device.",
|
||||
"name": "Turn on climate-control device"
|
||||
"description": "Turns on a thermostat.",
|
||||
"name": "Turn on thermostat"
|
||||
}
|
||||
},
|
||||
"title": "Climate",
|
||||
"triggers": {
|
||||
"hvac_mode_changed": {
|
||||
"description": "Triggers after the mode of one or more climate-control devices changes.",
|
||||
"description": "Triggers after the mode of one or more thermostats changes.",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"name": "[%key:component::climate::common::trigger_behavior_name%]"
|
||||
@@ -398,10 +398,10 @@
|
||||
"name": "Modes"
|
||||
}
|
||||
},
|
||||
"name": "Climate-control device mode changed"
|
||||
"name": "Thermostat mode changed"
|
||||
},
|
||||
"started_cooling": {
|
||||
"description": "Triggers after one or more climate-control devices start cooling.",
|
||||
"description": "Triggers after one or more thermostats start cooling.",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"name": "[%key:component::climate::common::trigger_behavior_name%]"
|
||||
@@ -410,10 +410,10 @@
|
||||
"name": "[%key:component::climate::common::trigger_for_name%]"
|
||||
}
|
||||
},
|
||||
"name": "Climate-control device started cooling"
|
||||
"name": "Thermostat started cooling"
|
||||
},
|
||||
"started_drying": {
|
||||
"description": "Triggers after one or more climate-control devices start drying.",
|
||||
"description": "Triggers after one or more thermostats start drying.",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"name": "[%key:component::climate::common::trigger_behavior_name%]"
|
||||
@@ -422,10 +422,10 @@
|
||||
"name": "[%key:component::climate::common::trigger_for_name%]"
|
||||
}
|
||||
},
|
||||
"name": "Climate-control device started drying"
|
||||
"name": "Thermostat started drying"
|
||||
},
|
||||
"started_heating": {
|
||||
"description": "Triggers after one or more climate-control devices start heating.",
|
||||
"description": "Triggers after one or more thermostats start heating.",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"name": "[%key:component::climate::common::trigger_behavior_name%]"
|
||||
@@ -434,19 +434,19 @@
|
||||
"name": "[%key:component::climate::common::trigger_for_name%]"
|
||||
}
|
||||
},
|
||||
"name": "Climate-control device started heating"
|
||||
"name": "Thermostat started heating"
|
||||
},
|
||||
"target_humidity_changed": {
|
||||
"description": "Triggers after the humidity setpoint of one or more climate-control devices changes.",
|
||||
"description": "Triggers after the humidity setpoint of one or more thermostats changes.",
|
||||
"fields": {
|
||||
"threshold": {
|
||||
"name": "[%key:component::climate::common::trigger_threshold_name%]"
|
||||
}
|
||||
},
|
||||
"name": "Climate-control device target humidity changed"
|
||||
"name": "Thermostat target humidity changed"
|
||||
},
|
||||
"target_humidity_crossed_threshold": {
|
||||
"description": "Triggers after the humidity setpoint of one or more climate-control devices crosses a threshold.",
|
||||
"description": "Triggers after the humidity setpoint of one or more thermostats crosses a threshold.",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"name": "[%key:component::climate::common::trigger_behavior_name%]"
|
||||
@@ -458,19 +458,19 @@
|
||||
"name": "[%key:component::climate::common::trigger_threshold_name%]"
|
||||
}
|
||||
},
|
||||
"name": "Climate-control device target humidity crossed threshold"
|
||||
"name": "Thermostat target humidity crossed threshold"
|
||||
},
|
||||
"target_temperature_changed": {
|
||||
"description": "Triggers after the temperature setpoint of one or more climate-control devices changes.",
|
||||
"description": "Triggers after the temperature setpoint of one or more thermostats changes.",
|
||||
"fields": {
|
||||
"threshold": {
|
||||
"name": "[%key:component::climate::common::trigger_threshold_name%]"
|
||||
}
|
||||
},
|
||||
"name": "Climate-control device target temperature changed"
|
||||
"name": "Thermostat target temperature changed"
|
||||
},
|
||||
"target_temperature_crossed_threshold": {
|
||||
"description": "Triggers after the temperature setpoint of one or more climate-control devices crosses a threshold.",
|
||||
"description": "Triggers after the temperature setpoint of one or more thermostats crosses a threshold.",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"name": "[%key:component::climate::common::trigger_behavior_name%]"
|
||||
@@ -482,10 +482,10 @@
|
||||
"name": "[%key:component::climate::common::trigger_threshold_name%]"
|
||||
}
|
||||
},
|
||||
"name": "Climate-control device target temperature crossed threshold"
|
||||
"name": "Thermostat target temperature crossed threshold"
|
||||
},
|
||||
"turned_off": {
|
||||
"description": "Triggers after one or more climate-control devices turn off.",
|
||||
"description": "Triggers after one or more thermostats turn off.",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"name": "[%key:component::climate::common::trigger_behavior_name%]"
|
||||
@@ -494,10 +494,10 @@
|
||||
"name": "[%key:component::climate::common::trigger_for_name%]"
|
||||
}
|
||||
},
|
||||
"name": "Climate-control device turned off"
|
||||
"name": "Thermostat turned off"
|
||||
},
|
||||
"turned_on": {
|
||||
"description": "Triggers after one or more climate-control devices turn on, regardless of the mode.",
|
||||
"description": "Triggers after one or more thermostats turn on, regardless of the mode.",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"name": "[%key:component::climate::common::trigger_behavior_name%]"
|
||||
@@ -506,7 +506,7 @@
|
||||
"name": "[%key:component::climate::common::trigger_for_name%]"
|
||||
}
|
||||
},
|
||||
"name": "Climate-control device turned on"
|
||||
"name": "Thermostat turned on"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -169,6 +169,8 @@ class OptionsFlowHandler(OptionsFlowWithReload):
|
||||
|
||||
data_schema = vol.Schema(
|
||||
{
|
||||
# Polling interval is user-configurable, which is no longer allowed
|
||||
# pylint: disable-next=hass-config-flow-polling-field
|
||||
vol.Optional(
|
||||
CONF_SCAN_INTERVAL,
|
||||
default=self.config_entry.options.get(
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
"""Data used by this integration."""
|
||||
# pylint: disable=hass-use-runtime-data # Uses legacy hass.data[DOMAIN] pattern
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
"""Wrapper for media_source around async_upnp_client's DmsDevice ."""
|
||||
# pylint: disable=hass-use-runtime-data # Uses legacy hass.data[DOMAIN] pattern
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
|
||||
@@ -4,6 +4,7 @@ from __future__ import annotations
|
||||
|
||||
from collections.abc import Callable
|
||||
from dataclasses import dataclass
|
||||
import logging
|
||||
|
||||
from duco.models import Node, NodeType, VentilationState
|
||||
|
||||
@@ -27,6 +28,8 @@ from .const import DOMAIN
|
||||
from .coordinator import DucoConfigEntry, DucoCoordinator
|
||||
from .entity import DucoEntity
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
PARALLEL_UPDATES = 0
|
||||
|
||||
|
||||
@@ -79,7 +82,7 @@ SENSOR_DESCRIPTIONS: tuple[DucoSensorEntityDescription, ...] = (
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
native_unit_of_measurement=PERCENTAGE,
|
||||
value_fn=lambda node: node.sensor.rh if node.sensor else None,
|
||||
node_types=(NodeType.BSRH,),
|
||||
node_types=(NodeType.BSRH, NodeType.UCRH),
|
||||
),
|
||||
DucoSensorEntityDescription(
|
||||
key="iaq_rh",
|
||||
@@ -88,7 +91,7 @@ SENSOR_DESCRIPTIONS: tuple[DucoSensorEntityDescription, ...] = (
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
entity_registry_enabled_default=False,
|
||||
value_fn=lambda node: node.sensor.iaq_rh if node.sensor else None,
|
||||
node_types=(NodeType.BSRH,),
|
||||
node_types=(NodeType.BSRH, NodeType.UCRH),
|
||||
),
|
||||
)
|
||||
|
||||
@@ -144,6 +147,13 @@ async def async_setup_entry(
|
||||
if node.node_id in known_nodes:
|
||||
continue
|
||||
known_nodes.add(node.node_id)
|
||||
if node.general.node_type == NodeType.UNKNOWN:
|
||||
_LOGGER.warning(
|
||||
"Duco node %s (%s) has an unsupported device type and will be ignored",
|
||||
node.node_id,
|
||||
node.general.name,
|
||||
)
|
||||
continue
|
||||
new_entities.extend(
|
||||
DucoSensorEntity(coordinator, node, description)
|
||||
for description in SENSOR_DESCRIPTIONS
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
"""The EARN-E P1 Meter integration."""
|
||||
# pylint: disable=hass-use-runtime-data # Uses legacy hass.data[DOMAIN] pattern
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
|
||||
@@ -8,18 +8,24 @@ from aioesphomeapi import APIClient, APIConnectionError
|
||||
|
||||
from homeassistant.components import zeroconf
|
||||
from homeassistant.components.bluetooth import async_remove_scanner
|
||||
from homeassistant.components.usb import (
|
||||
SerialDevice,
|
||||
USBDevice,
|
||||
async_register_serial_port_scanner,
|
||||
)
|
||||
from homeassistant.const import (
|
||||
CONF_HOST,
|
||||
CONF_PASSWORD,
|
||||
CONF_PORT,
|
||||
__version__ as ha_version,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.issue_registry import async_delete_issue
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
from homeassistant.util import slugify
|
||||
|
||||
from . import assist_satellite, dashboard, ffmpeg_proxy
|
||||
from . import assist_satellite, dashboard, ffmpeg_proxy, serial_proxy
|
||||
from .const import CONF_BLUETOOTH_MAC_ADDRESS, CONF_NOISE_PSK, DOMAIN
|
||||
from .domain_data import DomainData
|
||||
from .encryption_key_storage import async_get_encryption_key_storage
|
||||
@@ -34,12 +40,48 @@ CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN)
|
||||
CLIENT_INFO = f"Home Assistant {ha_version}"
|
||||
|
||||
|
||||
@callback
|
||||
def _async_scan_serial_ports(
|
||||
hass: HomeAssistant,
|
||||
) -> list[USBDevice | SerialDevice]:
|
||||
"""Return serial-proxy ports exposed by connected ESPHome devices."""
|
||||
ports: list[USBDevice | SerialDevice] = []
|
||||
|
||||
for entry in hass.config_entries.async_loaded_entries(DOMAIN):
|
||||
entry_data = entry.runtime_data
|
||||
if not entry_data.available:
|
||||
continue
|
||||
|
||||
device_info = entry_data.device_info
|
||||
if device_info is None:
|
||||
continue
|
||||
|
||||
ports.extend(
|
||||
SerialDevice(
|
||||
device=str(serial_proxy.build_url(entry.entry_id, proxy.name)),
|
||||
serial_number=(
|
||||
device_info.mac_address.replace(":", "") + "-" + slugify(proxy.name)
|
||||
),
|
||||
manufacturer=device_info.manufacturer,
|
||||
description=f"{device_info.model} ({proxy.name})",
|
||||
)
|
||||
for proxy in device_info.serial_proxies
|
||||
)
|
||||
|
||||
return ports
|
||||
|
||||
|
||||
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
"""Set up the esphome component."""
|
||||
ffmpeg_proxy.async_setup(hass)
|
||||
await assist_satellite.async_setup(hass)
|
||||
await dashboard.async_setup(hass)
|
||||
async_setup_websocket_api(hass)
|
||||
|
||||
if "usb" in hass.config.components:
|
||||
async_register_serial_port_scanner(hass, _async_scan_serial_ports)
|
||||
serial_proxy.set_hass_loop(hass.loop)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
|
||||
@@ -40,5 +40,7 @@ class DomainData:
|
||||
@cache
|
||||
def get(cls, hass: HomeAssistant) -> Self:
|
||||
"""Get the global DomainData instance stored in hass.data."""
|
||||
# Uses legacy hass.data[DOMAIN] pattern
|
||||
# pylint: disable-next=hass-use-runtime-data
|
||||
ret = hass.data[DOMAIN] = cls()
|
||||
return ret
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
{
|
||||
"domain": "esphome",
|
||||
"name": "ESPHome",
|
||||
"after_dependencies": ["hassio", "zeroconf", "tag"],
|
||||
"after_dependencies": ["hassio", "tag", "usb", "zeroconf"],
|
||||
"codeowners": ["@jesserockz", "@kbx81", "@bdraco"],
|
||||
"config_flow": true,
|
||||
"dependencies": ["assist_pipeline", "bluetooth", "intent", "ffmpeg", "http"],
|
||||
|
||||
@@ -0,0 +1,113 @@
|
||||
"""Home Assistant-aware ESPHome serial proxy URI handler for serialx."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
from typing import cast
|
||||
|
||||
from aioesphomeapi import APIClient
|
||||
from serialx import register_uri_handler
|
||||
from serialx.platforms.serial_esphome import (
|
||||
ESPHomeSerial,
|
||||
ESPHomeSerialTransport,
|
||||
InvalidSettingsError,
|
||||
)
|
||||
from yarl import URL
|
||||
|
||||
from homeassistant.config_entries import ConfigEntryState
|
||||
from homeassistant.core import HomeAssistant, async_get_hass
|
||||
|
||||
from .const import DOMAIN
|
||||
from .entry_data import ESPHomeConfigEntry
|
||||
|
||||
SCHEME = "esphome-hass://"
|
||||
|
||||
# This is required so that serialx can safely query Core for an instance of an
|
||||
# aioesphomeapi client. We cannot make any assumptions here, some packages run separate
|
||||
# asyncio event loops in dedicated threads.
|
||||
_HASS_LOOP: asyncio.AbstractEventLoop | None = None
|
||||
|
||||
|
||||
def set_hass_loop(loop: asyncio.AbstractEventLoop) -> None:
|
||||
"""Store a reference to the Core event loop."""
|
||||
global _HASS_LOOP # noqa: PLW0603 # pylint: disable=global-statement
|
||||
_HASS_LOOP = loop
|
||||
|
||||
|
||||
def build_url(entry_id: str, port_name: str) -> URL:
|
||||
"""Build a canonical `esphome-hass://` URL."""
|
||||
return URL.build(
|
||||
scheme="esphome-hass",
|
||||
host="esphome",
|
||||
path=f"/{entry_id}",
|
||||
query={"port_name": port_name},
|
||||
)
|
||||
|
||||
|
||||
async def _resolve_client(entry_id: str) -> APIClient:
|
||||
"""Look up the `APIClient` for a specific config entry."""
|
||||
|
||||
# This function is async specifically so that we can get a reference to the Home
|
||||
# Assistant Core instance from its own thread
|
||||
hass: HomeAssistant = async_get_hass()
|
||||
entry = cast(ESPHomeConfigEntry, hass.config_entries.async_get_entry(entry_id))
|
||||
|
||||
if entry is None or entry.domain != DOMAIN:
|
||||
raise InvalidSettingsError(f"No ESPHome config entry with id {entry_id!r}")
|
||||
|
||||
if entry.state is not ConfigEntryState.LOADED:
|
||||
raise InvalidSettingsError(f"ESPHome config entry {entry_id!r} is not loaded")
|
||||
|
||||
return entry.runtime_data.client
|
||||
|
||||
|
||||
class HassESPHomeSerial(ESPHomeSerial):
|
||||
"""ESPHomeSerial that resolves an HA config entry's APIClient from the URL."""
|
||||
|
||||
_api: APIClient | None
|
||||
_path: str | None
|
||||
|
||||
async def _async_open(self) -> None:
|
||||
"""Resolve the HA config entry's APIClient, then open the proxy."""
|
||||
if self._api is None and self._path is not None:
|
||||
parsed = URL(str(self._path))
|
||||
|
||||
entry_id = parsed.path.lstrip("/")
|
||||
if not entry_id:
|
||||
raise InvalidSettingsError(
|
||||
f"No ESPHome config entry id in URL {self._path!r}"
|
||||
)
|
||||
|
||||
if "port_name" not in parsed.query:
|
||||
raise InvalidSettingsError("Port name is required")
|
||||
|
||||
self._port_name = parsed.query["port_name"]
|
||||
|
||||
hass_loop = _HASS_LOOP
|
||||
if hass_loop is None:
|
||||
raise InvalidSettingsError(
|
||||
"ESPHome integration has not registered its event loop"
|
||||
)
|
||||
|
||||
# Fetch the `APIClient` from the Core via the appropriate event loop
|
||||
self._api = await asyncio.wrap_future(
|
||||
asyncio.run_coroutine_threadsafe(_resolve_client(entry_id), hass_loop)
|
||||
)
|
||||
self._client_loop = self._api._loop # noqa: SLF001
|
||||
|
||||
await super()._async_open()
|
||||
|
||||
|
||||
class HassESPHomeSerialTransport(ESPHomeSerialTransport):
|
||||
"""Transport variant that constructs :class:`HassESPHomeSerial`."""
|
||||
|
||||
transport_name = "esphome-hass"
|
||||
_serial_cls = HassESPHomeSerial
|
||||
|
||||
|
||||
register_uri_handler(
|
||||
scheme=SCHEME,
|
||||
unique_scheme=SCHEME,
|
||||
sync_cls=HassESPHomeSerial,
|
||||
async_transport_cls=HassESPHomeSerialTransport,
|
||||
)
|
||||
@@ -87,8 +87,7 @@ def async_wifi_bulb_for_host(
|
||||
|
||||
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
"""Set up the flux_led component."""
|
||||
domain_data = hass.data.setdefault(DOMAIN, {})
|
||||
domain_data[FLUX_LED_DISCOVERY] = []
|
||||
hass.data[FLUX_LED_DISCOVERY] = []
|
||||
|
||||
@callback
|
||||
def _async_start_background_discovery(*_: Any) -> None:
|
||||
|
||||
@@ -9,8 +9,10 @@ from flux_led.const import (
|
||||
COLOR_MODE_RGBW as FLUX_COLOR_MODE_RGBW,
|
||||
COLOR_MODE_RGBWW as FLUX_COLOR_MODE_RGBWW,
|
||||
)
|
||||
from flux_led.scanner import FluxLEDDiscovery
|
||||
|
||||
from homeassistant.components.light import ColorMode
|
||||
from homeassistant.util.hass_dict import HassKey
|
||||
|
||||
DOMAIN: Final = "flux_led"
|
||||
|
||||
@@ -34,7 +36,7 @@ DEFAULT_NETWORK_SCAN_INTERVAL: Final = 120
|
||||
DEFAULT_SCAN_INTERVAL: Final = 5
|
||||
DEFAULT_EFFECT_SPEED: Final = 50
|
||||
|
||||
FLUX_LED_DISCOVERY: Final = "flux_led_discovery"
|
||||
FLUX_LED_DISCOVERY: HassKey[list[FluxLEDDiscovery]] = HassKey(DOMAIN)
|
||||
|
||||
FLUX_LED_EXCEPTIONS: Final = (
|
||||
TimeoutError,
|
||||
|
||||
@@ -153,8 +153,7 @@ def async_update_entry_from_discovery(
|
||||
@callback
|
||||
def async_get_discovery(hass: HomeAssistant, host: str) -> FluxLEDDiscovery | None:
|
||||
"""Check if a device was already discovered via a broadcast discovery."""
|
||||
discoveries: list[FluxLEDDiscovery] = hass.data[DOMAIN][FLUX_LED_DISCOVERY]
|
||||
for discovery in discoveries:
|
||||
for discovery in hass.data[FLUX_LED_DISCOVERY]:
|
||||
if discovery[ATTR_IPADDR] == host:
|
||||
return discovery
|
||||
return None
|
||||
@@ -163,10 +162,10 @@ def async_get_discovery(hass: HomeAssistant, host: str) -> FluxLEDDiscovery | No
|
||||
@callback
|
||||
def async_clear_discovery_cache(hass: HomeAssistant, host: str) -> None:
|
||||
"""Clear the host from the discovery cache."""
|
||||
domain_data = hass.data[DOMAIN]
|
||||
discoveries: list[FluxLEDDiscovery] = domain_data[FLUX_LED_DISCOVERY]
|
||||
domain_data[FLUX_LED_DISCOVERY] = [
|
||||
discovery for discovery in discoveries if discovery[ATTR_IPADDR] != host
|
||||
hass.data[FLUX_LED_DISCOVERY] = [
|
||||
discovery
|
||||
for discovery in hass.data[FLUX_LED_DISCOVERY]
|
||||
if discovery[ATTR_IPADDR] != host
|
||||
]
|
||||
|
||||
|
||||
|
||||
@@ -407,6 +407,12 @@ def async_remove_panel(
|
||||
hass.bus.async_fire(EVENT_PANELS_UPDATED)
|
||||
|
||||
|
||||
@callback
|
||||
def async_panel_exists(hass: HomeAssistant, frontend_url_path: str) -> bool:
|
||||
"""Return if a panel is registered for the given frontend URL path."""
|
||||
return frontend_url_path in hass.data.get(DATA_PANELS, {})
|
||||
|
||||
|
||||
def add_extra_js_url(hass: HomeAssistant, url: str, es5: bool = False) -> None:
|
||||
"""Register extra js or module url to load.
|
||||
|
||||
|
||||
@@ -9,6 +9,7 @@ from fumis import (
|
||||
Fumis,
|
||||
FumisAuthenticationError,
|
||||
FumisConnectionError,
|
||||
FumisInfo,
|
||||
FumisStoveOfflineError,
|
||||
)
|
||||
import voluptuous as vol
|
||||
@@ -51,23 +52,10 @@ class FumisFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
errors: dict[str, str] = {}
|
||||
|
||||
if user_input is not None:
|
||||
fumis = Fumis(
|
||||
mac=self._discovered_mac,
|
||||
password=user_input[CONF_PIN],
|
||||
session=async_get_clientsession(self.hass),
|
||||
errors, info = await self._validate_input(
|
||||
self._discovered_mac, user_input[CONF_PIN]
|
||||
)
|
||||
try:
|
||||
info = await fumis.update_info()
|
||||
except FumisAuthenticationError:
|
||||
errors[CONF_PIN] = "invalid_auth"
|
||||
except FumisStoveOfflineError:
|
||||
errors["base"] = "device_offline"
|
||||
except FumisConnectionError:
|
||||
errors["base"] = "cannot_connect"
|
||||
except Exception: # noqa: BLE001
|
||||
LOGGER.exception("Unexpected exception")
|
||||
errors["base"] = "unknown"
|
||||
else:
|
||||
if info:
|
||||
return self.async_create_entry(
|
||||
title=info.controller.model_name or "Fumis",
|
||||
data={
|
||||
@@ -96,23 +84,8 @@ class FumisFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
|
||||
if user_input is not None:
|
||||
mac = user_input[CONF_MAC].replace(":", "").replace("-", "").upper()
|
||||
fumis = Fumis(
|
||||
mac=mac,
|
||||
password=user_input[CONF_PIN],
|
||||
session=async_get_clientsession(self.hass),
|
||||
)
|
||||
try:
|
||||
info = await fumis.update_info()
|
||||
except FumisAuthenticationError:
|
||||
errors[CONF_PIN] = "invalid_auth"
|
||||
except FumisStoveOfflineError:
|
||||
errors["base"] = "device_offline"
|
||||
except FumisConnectionError:
|
||||
errors["base"] = "cannot_connect"
|
||||
except Exception: # noqa: BLE001
|
||||
LOGGER.exception("Unexpected exception")
|
||||
errors["base"] = "unknown"
|
||||
else:
|
||||
errors, info = await self._validate_input(mac, user_input[CONF_PIN])
|
||||
if info:
|
||||
await self.async_set_unique_id(format_mac(mac), raise_on_progress=False)
|
||||
self._abort_if_unique_id_configured()
|
||||
return self.async_create_entry(
|
||||
@@ -141,6 +114,35 @@ class FumisFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
errors=errors,
|
||||
)
|
||||
|
||||
async def async_step_reconfigure(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle reconfiguration of a Fumis stove."""
|
||||
errors: dict[str, str] = {}
|
||||
reconfigure_entry = self._get_reconfigure_entry()
|
||||
|
||||
if user_input is not None:
|
||||
errors, _ = await self._validate_input(
|
||||
reconfigure_entry.data[CONF_MAC], user_input[CONF_PIN]
|
||||
)
|
||||
if not errors:
|
||||
return self.async_update_reload_and_abort(
|
||||
reconfigure_entry,
|
||||
data_updates={CONF_PIN: user_input[CONF_PIN]},
|
||||
)
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="reconfigure",
|
||||
data_schema=vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_PIN): TextSelector(
|
||||
TextSelectorConfig(type=TextSelectorType.PASSWORD)
|
||||
),
|
||||
}
|
||||
),
|
||||
errors=errors,
|
||||
)
|
||||
|
||||
async def async_step_reauth(
|
||||
self, entry_data: Mapping[str, Any]
|
||||
) -> ConfigFlowResult:
|
||||
@@ -155,23 +157,10 @@ class FumisFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
|
||||
if user_input is not None:
|
||||
reauth_entry = self._get_reauth_entry()
|
||||
fumis = Fumis(
|
||||
mac=reauth_entry.data[CONF_MAC],
|
||||
password=user_input[CONF_PIN],
|
||||
session=async_get_clientsession(self.hass),
|
||||
errors, _ = await self._validate_input(
|
||||
reauth_entry.data[CONF_MAC], user_input[CONF_PIN]
|
||||
)
|
||||
try:
|
||||
await fumis.update_info()
|
||||
except FumisAuthenticationError:
|
||||
errors[CONF_PIN] = "invalid_auth"
|
||||
except FumisStoveOfflineError:
|
||||
errors["base"] = "device_offline"
|
||||
except FumisConnectionError:
|
||||
errors["base"] = "cannot_connect"
|
||||
except Exception: # noqa: BLE001
|
||||
LOGGER.exception("Unexpected exception")
|
||||
errors["base"] = "unknown"
|
||||
else:
|
||||
if not errors:
|
||||
return self.async_update_reload_and_abort(
|
||||
reauth_entry,
|
||||
data_updates={CONF_PIN: user_input[CONF_PIN]},
|
||||
@@ -188,3 +177,28 @@ class FumisFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
),
|
||||
errors=errors,
|
||||
)
|
||||
|
||||
async def _validate_input(
|
||||
self, mac: str, pin: str
|
||||
) -> tuple[dict[str, str], FumisInfo | None]:
|
||||
"""Validate credentials, returning errors and info."""
|
||||
errors: dict[str, str] = {}
|
||||
fumis = Fumis(
|
||||
mac=mac,
|
||||
password=pin,
|
||||
session=async_get_clientsession(self.hass),
|
||||
)
|
||||
try:
|
||||
info = await fumis.update_info()
|
||||
except FumisAuthenticationError:
|
||||
errors[CONF_PIN] = "invalid_auth"
|
||||
except FumisStoveOfflineError:
|
||||
errors["base"] = "device_offline"
|
||||
except FumisConnectionError:
|
||||
errors["base"] = "cannot_connect"
|
||||
except Exception: # noqa: BLE001
|
||||
LOGGER.exception("Unexpected exception")
|
||||
errors["base"] = "unknown"
|
||||
else:
|
||||
return errors, info
|
||||
return errors, None
|
||||
|
||||
@@ -62,7 +62,7 @@ rules:
|
||||
entity-translations: done
|
||||
exception-translations: done
|
||||
icon-translations: done
|
||||
reconfiguration-flow: todo
|
||||
reconfiguration-flow: done
|
||||
repair-issues:
|
||||
status: exempt
|
||||
comment: This integration does not raise any repairable issues.
|
||||
|
||||
@@ -2,7 +2,8 @@
|
||||
"config": {
|
||||
"abort": {
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]",
|
||||
"reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]"
|
||||
"reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]",
|
||||
"reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]"
|
||||
},
|
||||
"error": {
|
||||
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
|
||||
@@ -29,6 +30,15 @@
|
||||
},
|
||||
"description": "The PIN code for your stove has changed. Please enter the new PIN code to re-authenticate."
|
||||
},
|
||||
"reconfigure": {
|
||||
"data": {
|
||||
"pin": "[%key:component::fumis::config::step::user::data::pin%]"
|
||||
},
|
||||
"data_description": {
|
||||
"pin": "[%key:component::fumis::config::step::user::data_description::pin%]"
|
||||
},
|
||||
"description": "Reconfigure your Fumis pellet stove connection."
|
||||
},
|
||||
"user": {
|
||||
"data": {
|
||||
"mac": "MAC address",
|
||||
|
||||
@@ -2,17 +2,19 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from types import MappingProxyType
|
||||
|
||||
from aiogithubapi import GitHubAPI
|
||||
|
||||
from homeassistant.config_entries import ConfigSubentry
|
||||
from homeassistant.const import CONF_ACCESS_TOKEN, Platform
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers import device_registry as dr
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.aiohttp_client import (
|
||||
SERVER_SOFTWARE,
|
||||
async_get_clientsession,
|
||||
)
|
||||
|
||||
from .const import CONF_REPOSITORIES, DOMAIN, LOGGER
|
||||
from .const import CONF_REPOSITORIES, CONF_REPOSITORY, SUBENTRY_TYPE_REPOSITORY
|
||||
from .coordinator import GithubConfigEntry, GitHubDataUpdateCoordinator
|
||||
|
||||
PLATFORMS: list[Platform] = [Platform.SENSOR]
|
||||
@@ -26,10 +28,9 @@ async def async_setup_entry(hass: HomeAssistant, entry: GithubConfigEntry) -> bo
|
||||
client_name=SERVER_SOFTWARE,
|
||||
)
|
||||
|
||||
repositories: list[str] = entry.options[CONF_REPOSITORIES]
|
||||
|
||||
entry.runtime_data = {}
|
||||
for repository in repositories:
|
||||
for repository_subentry in entry.get_subentries_of_type(SUBENTRY_TYPE_REPOSITORY):
|
||||
repository = repository_subentry.data[CONF_REPOSITORY]
|
||||
coordinator = GitHubDataUpdateCoordinator(
|
||||
hass=hass,
|
||||
config_entry=entry,
|
||||
@@ -42,41 +43,17 @@ async def async_setup_entry(hass: HomeAssistant, entry: GithubConfigEntry) -> bo
|
||||
if not entry.pref_disable_polling:
|
||||
await coordinator.subscribe()
|
||||
|
||||
entry.runtime_data[repository] = coordinator
|
||||
entry.runtime_data[repository_subentry.subentry_id] = coordinator
|
||||
|
||||
async_cleanup_device_registry(hass=hass, entry=entry)
|
||||
entry.async_on_unload(entry.add_update_listener(async_update_entry))
|
||||
|
||||
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
|
||||
return True
|
||||
|
||||
|
||||
@callback
|
||||
def async_cleanup_device_registry(
|
||||
hass: HomeAssistant,
|
||||
entry: GithubConfigEntry,
|
||||
) -> None:
|
||||
"""Remove entries form device registry if we no longer track the repository."""
|
||||
device_registry = dr.async_get(hass)
|
||||
devices = dr.async_entries_for_config_entry(
|
||||
registry=device_registry,
|
||||
config_entry_id=entry.entry_id,
|
||||
)
|
||||
for device in devices:
|
||||
for item in device.identifiers:
|
||||
if item[0] == DOMAIN and item[1] not in entry.options[CONF_REPOSITORIES]:
|
||||
LOGGER.debug(
|
||||
(
|
||||
"Unlinking device %s for untracked repository %s from config"
|
||||
" entry %s"
|
||||
),
|
||||
device.id,
|
||||
item[1],
|
||||
entry.entry_id,
|
||||
)
|
||||
device_registry.async_update_device(
|
||||
device.id, remove_config_entry_id=entry.entry_id
|
||||
)
|
||||
break
|
||||
async def async_update_entry(hass: HomeAssistant, entry: GithubConfigEntry) -> None:
|
||||
"""Update entry."""
|
||||
await hass.config_entries.async_reload(entry.entry_id)
|
||||
|
||||
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: GithubConfigEntry) -> bool:
|
||||
@@ -86,3 +63,23 @@ async def async_unload_entry(hass: HomeAssistant, entry: GithubConfigEntry) -> b
|
||||
coordinator.unsubscribe()
|
||||
|
||||
return await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
|
||||
|
||||
|
||||
async def async_migrate_entry(hass: HomeAssistant, entry: GithubConfigEntry) -> bool:
|
||||
"""Migrate old entry."""
|
||||
if entry.minor_version == 1:
|
||||
# In minor version 2 we migrated repositories from entry options to
|
||||
# subentries, so we need to convert the list from
|
||||
# entry.options[CONF_REPOSITORIES] into individual subentries.
|
||||
for repository in entry.options[CONF_REPOSITORIES]:
|
||||
subentry = ConfigSubentry(
|
||||
data=MappingProxyType({CONF_REPOSITORY: repository}),
|
||||
subentry_type=SUBENTRY_TYPE_REPOSITORY,
|
||||
title=repository,
|
||||
unique_id=repository,
|
||||
)
|
||||
|
||||
hass.config_entries.async_add_subentry(entry, subentry)
|
||||
|
||||
hass.config_entries.async_update_entry(entry, minor_version=2)
|
||||
return True
|
||||
|
||||
@@ -19,23 +19,31 @@ from homeassistant.config_entries import (
|
||||
ConfigEntry,
|
||||
ConfigFlow,
|
||||
ConfigFlowResult,
|
||||
OptionsFlowWithReload,
|
||||
ConfigSubentryFlow,
|
||||
SubentryFlowResult,
|
||||
)
|
||||
from homeassistant.const import CONF_ACCESS_TOKEN
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.aiohttp_client import (
|
||||
SERVER_SOFTWARE,
|
||||
async_get_clientsession,
|
||||
)
|
||||
from homeassistant.helpers.selector import SelectSelector, SelectSelectorConfig
|
||||
|
||||
from .const import CLIENT_ID, CONF_REPOSITORIES, DEFAULT_REPOSITORIES, DOMAIN, LOGGER
|
||||
from .const import (
|
||||
CLIENT_ID,
|
||||
CONF_REPOSITORY,
|
||||
DEFAULT_REPOSITORIES,
|
||||
DOMAIN,
|
||||
LOGGER,
|
||||
SUBENTRY_TYPE_REPOSITORY,
|
||||
)
|
||||
|
||||
|
||||
async def get_repositories(hass: HomeAssistant, access_token: str) -> list[str]:
|
||||
"""Return a list of repositories that the user owns or has starred."""
|
||||
client = GitHubAPI(token=access_token, session=async_get_clientsession(hass))
|
||||
repositories = set()
|
||||
repositories: set[str] = set()
|
||||
|
||||
async def _get_starred_repositories() -> None:
|
||||
response = await client.user.starred(params={"per_page": 100})
|
||||
@@ -53,7 +61,7 @@ async def get_repositories(hass: HomeAssistant, access_token: str) -> list[str]:
|
||||
for result in results:
|
||||
response.data.extend(result.data)
|
||||
|
||||
repositories.update(response.data)
|
||||
repositories.update(repo.full_name for repo in response.data)
|
||||
|
||||
async def _get_personal_repositories() -> None:
|
||||
response = await client.user.repos(params={"per_page": 100})
|
||||
@@ -71,7 +79,7 @@ async def get_repositories(hass: HomeAssistant, access_token: str) -> list[str]:
|
||||
for result in results:
|
||||
response.data.extend(result.data)
|
||||
|
||||
repositories.update(response.data)
|
||||
repositories.update(repo.full_name for repo in response.data)
|
||||
|
||||
try:
|
||||
await asyncio.gather(
|
||||
@@ -82,21 +90,26 @@ async def get_repositories(hass: HomeAssistant, access_token: str) -> list[str]:
|
||||
)
|
||||
|
||||
except GitHubException:
|
||||
return DEFAULT_REPOSITORIES
|
||||
repositories.update(DEFAULT_REPOSITORIES)
|
||||
|
||||
if len(repositories) == 0:
|
||||
return DEFAULT_REPOSITORIES
|
||||
repositories.update(DEFAULT_REPOSITORIES)
|
||||
|
||||
return sorted(
|
||||
(repo.full_name for repo in repositories),
|
||||
key=str.casefold,
|
||||
)
|
||||
current_repositories = {
|
||||
subentry.data[CONF_REPOSITORY]
|
||||
for entry in hass.config_entries.async_entries(DOMAIN)
|
||||
for subentry in entry.subentries.values()
|
||||
if subentry.subentry_type == SUBENTRY_TYPE_REPOSITORY
|
||||
}
|
||||
repositories = repositories - current_repositories
|
||||
|
||||
return sorted(repositories, key=str.casefold)
|
||||
|
||||
|
||||
class GitHubConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
"""Handle a config flow for GitHub."""
|
||||
|
||||
VERSION = 1
|
||||
MINOR_VERSION = 2
|
||||
|
||||
login_task: asyncio.Task | None = None
|
||||
|
||||
@@ -106,6 +119,14 @@ class GitHubConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
self._login: GitHubLoginOauthModel | None = None
|
||||
self._login_device: GitHubLoginDeviceModel | None = None
|
||||
|
||||
@classmethod
|
||||
@callback
|
||||
def async_get_supported_subentry_types(
|
||||
cls, config_entry: ConfigEntry
|
||||
) -> dict[str, type[ConfigSubentryFlow]]:
|
||||
"""Return subentries supported by this handler."""
|
||||
return {SUBENTRY_TYPE_REPOSITORY: RepositoryFlowHandler}
|
||||
|
||||
async def async_step_user(
|
||||
self,
|
||||
user_input: dict[str, Any] | None = None,
|
||||
@@ -153,7 +174,7 @@ class GitHubConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
if self.login_task.done():
|
||||
if self.login_task.exception():
|
||||
return self.async_show_progress_done(next_step_id="could_not_register")
|
||||
return self.async_show_progress_done(next_step_id="repositories")
|
||||
return self.async_show_progress_done(next_step_id="done")
|
||||
|
||||
if TYPE_CHECKING:
|
||||
# mypy is not aware that we can't get here without having this set already
|
||||
@@ -169,33 +190,18 @@ class GitHubConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
progress_task=self.login_task,
|
||||
)
|
||||
|
||||
async def async_step_repositories(
|
||||
async def async_step_done(
|
||||
self,
|
||||
user_input: dict[str, Any] | None = None,
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle repositories step."""
|
||||
"""Create the config entry after successful device authentication."""
|
||||
|
||||
if TYPE_CHECKING:
|
||||
# mypy is not aware that we can't get here without having this set already
|
||||
assert self._login is not None
|
||||
|
||||
if not user_input:
|
||||
repositories = await get_repositories(self.hass, self._login.access_token)
|
||||
return self.async_show_form(
|
||||
step_id="repositories",
|
||||
data_schema=vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_REPOSITORIES): cv.multi_select(
|
||||
{k: k for k in repositories}
|
||||
),
|
||||
}
|
||||
),
|
||||
)
|
||||
|
||||
return self.async_create_entry(
|
||||
title="",
|
||||
data={CONF_ACCESS_TOKEN: self._login.access_token},
|
||||
options={CONF_REPOSITORIES: user_input[CONF_REPOSITORIES]},
|
||||
)
|
||||
|
||||
async def async_step_could_not_register(
|
||||
@@ -205,46 +211,31 @@ class GitHubConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
"""Handle issues that need transition await from progress step."""
|
||||
return self.async_abort(reason="could_not_register")
|
||||
|
||||
@staticmethod
|
||||
@callback
|
||||
def async_get_options_flow(
|
||||
config_entry: ConfigEntry,
|
||||
) -> OptionsFlowHandler:
|
||||
"""Get the options flow for this handler."""
|
||||
return OptionsFlowHandler()
|
||||
|
||||
class RepositoryFlowHandler(ConfigSubentryFlow):
|
||||
"""Handle repository subentry flow."""
|
||||
|
||||
class OptionsFlowHandler(OptionsFlowWithReload):
|
||||
"""Handle a option flow for GitHub."""
|
||||
|
||||
async def async_step_init(
|
||||
self,
|
||||
user_input: dict[str, Any] | None = None,
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle options flow."""
|
||||
async def async_step_user(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> SubentryFlowResult:
|
||||
"""Handle repository subentry flow."""
|
||||
if not user_input:
|
||||
configured_repositories: list[str] = self.config_entry.options[
|
||||
CONF_REPOSITORIES
|
||||
]
|
||||
repositories = await get_repositories(
|
||||
self.hass, self.config_entry.data[CONF_ACCESS_TOKEN]
|
||||
self.hass, self._get_entry().data[CONF_ACCESS_TOKEN]
|
||||
)
|
||||
|
||||
# In case the user has removed a starred repository that is already tracked
|
||||
for repository in configured_repositories:
|
||||
if repository not in repositories:
|
||||
repositories.append(repository)
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="init",
|
||||
step_id="user",
|
||||
data_schema=vol.Schema(
|
||||
{
|
||||
vol.Required(
|
||||
CONF_REPOSITORIES,
|
||||
default=configured_repositories,
|
||||
): cv.multi_select({k: k for k in repositories}),
|
||||
vol.Required(CONF_REPOSITORY): SelectSelector(
|
||||
SelectSelectorConfig(sort=True, options=repositories)
|
||||
),
|
||||
}
|
||||
),
|
||||
)
|
||||
repository = user_input[CONF_REPOSITORY]
|
||||
|
||||
return self.async_create_entry(title="", data=user_input)
|
||||
return self.async_create_entry(
|
||||
title=repository, data=user_input, unique_id=repository
|
||||
)
|
||||
|
||||
@@ -15,6 +15,9 @@ DEFAULT_REPOSITORIES = ["home-assistant/core", "esphome/esphome"]
|
||||
FALLBACK_UPDATE_INTERVAL = timedelta(hours=1, minutes=30)
|
||||
|
||||
CONF_REPOSITORIES = "repositories"
|
||||
CONF_REPOSITORY = "repository"
|
||||
|
||||
SUBENTRY_TYPE_REPOSITORY = "repository"
|
||||
|
||||
|
||||
REFRESH_EVENT_TYPES = (
|
||||
|
||||
@@ -21,7 +21,7 @@ async def async_get_config_entry_diagnostics(
|
||||
config_entry: GithubConfigEntry,
|
||||
) -> dict[str, Any]:
|
||||
"""Return diagnostics for a config entry."""
|
||||
data = {"options": {**config_entry.options}}
|
||||
data: dict[str, Any] = {}
|
||||
client = GitHubAPI(
|
||||
token=config_entry.data[CONF_ACCESS_TOKEN],
|
||||
session=async_get_clientsession(hass),
|
||||
@@ -38,7 +38,7 @@ async def async_get_config_entry_diagnostics(
|
||||
repositories = config_entry.runtime_data
|
||||
data["repositories"] = {}
|
||||
|
||||
for repository, coordinator in repositories.items():
|
||||
data["repositories"][repository] = coordinator.data
|
||||
for coordinator in repositories.values():
|
||||
data["repositories"][coordinator.data["full_name"]] = coordinator.data
|
||||
|
||||
return data
|
||||
|
||||
@@ -150,13 +150,14 @@ async def async_setup_entry(
|
||||
) -> None:
|
||||
"""Set up GitHub sensor based on a config entry."""
|
||||
repositories = entry.runtime_data
|
||||
async_add_entities(
|
||||
(
|
||||
GitHubSensorEntity(coordinator, description)
|
||||
for description in SENSOR_DESCRIPTIONS
|
||||
for coordinator in repositories.values()
|
||||
),
|
||||
)
|
||||
for subentry_id, coordinator in repositories.items():
|
||||
async_add_entities(
|
||||
(
|
||||
GitHubSensorEntity(coordinator, description)
|
||||
for description in SENSOR_DESCRIPTIONS
|
||||
),
|
||||
config_subentry_id=subentry_id,
|
||||
)
|
||||
|
||||
|
||||
class GitHubSensorEntity(CoordinatorEntity[GitHubDataUpdateCoordinator], SensorEntity):
|
||||
|
||||
@@ -7,12 +7,26 @@
|
||||
"progress": {
|
||||
"wait_for_device": "Open {url}, and paste the following code to authorize the integration: \n```\n{code}\n```"
|
||||
},
|
||||
"step": {
|
||||
"repositories": {
|
||||
"data": {
|
||||
"repositories": "Select repositories to track."
|
||||
},
|
||||
"title": "Configure repositories"
|
||||
"step": {}
|
||||
},
|
||||
"config_subentries": {
|
||||
"repository": {
|
||||
"abort": {
|
||||
"already_configured": "Repository is already configured"
|
||||
},
|
||||
"entry_type": "[%key:component::github::config_subentries::repository::step::user::data::repository%]",
|
||||
"initiate_flow": {
|
||||
"user": "Add repository"
|
||||
},
|
||||
"step": {
|
||||
"user": {
|
||||
"data": {
|
||||
"repository": "Repository"
|
||||
},
|
||||
"data_description": {
|
||||
"repository": "The repository to track"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
"""Support for Actions on Google Assistant Smart Home Control."""
|
||||
# pylint: disable=hass-use-runtime-data # Uses legacy hass.data[DOMAIN] pattern
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
|
||||
@@ -21,6 +21,8 @@ async def async_setup_entry(
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up the platform."""
|
||||
# Uses legacy hass.data[DOMAIN] pattern
|
||||
# pylint: disable-next=hass-use-runtime-data
|
||||
yaml_config: ConfigType = hass.data[DOMAIN][DATA_CONFIG]
|
||||
google_config = config_entry.runtime_data
|
||||
|
||||
|
||||
@@ -54,6 +54,8 @@ async def async_setup_entry(hass: HomeAssistant, entry: GoogleMailConfigEntry) -
|
||||
Platform.NOTIFY,
|
||||
DOMAIN,
|
||||
{DATA_AUTH: auth, CONF_NAME: entry.title},
|
||||
# Uses legacy hass.data[DOMAIN] pattern
|
||||
# pylint: disable-next=hass-use-runtime-data
|
||||
hass.data[DOMAIN][DATA_HASS_CONFIG],
|
||||
)
|
||||
)
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
"""The Hisense AEH-W4A1 integration."""
|
||||
# pylint: disable=hass-use-runtime-data # Uses legacy hass.data[DOMAIN] pattern
|
||||
|
||||
import ipaddress
|
||||
import logging
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
"""Pyaehw4a1 platform to control of Hisense AEH-W4A1 Climate Devices."""
|
||||
# pylint: disable=hass-use-runtime-data # Uses legacy hass.data[DOMAIN] pattern
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
|
||||
@@ -219,6 +219,8 @@ class HiveOptionsFlowHandler(OptionsFlow):
|
||||
|
||||
schema = vol.Schema(
|
||||
{
|
||||
# Polling interval is user-configurable, which is no longer allowed
|
||||
# pylint: disable-next=hass-config-flow-polling-field
|
||||
vol.Optional(CONF_SCAN_INTERVAL, default=self.interval): vol.All(
|
||||
vol.Coerce(int), vol.Range(min=30)
|
||||
)
|
||||
|
||||
@@ -1,11 +1,11 @@
|
||||
"""The Homee lock platform."""
|
||||
|
||||
from typing import Any
|
||||
from typing import TYPE_CHECKING, Any
|
||||
|
||||
from pyHomee.const import AttributeChangedBy, AttributeType
|
||||
from pyHomee.model import HomeeNode
|
||||
from pyHomee.model import HomeeAttribute, HomeeNode
|
||||
|
||||
from homeassistant.components.lock import LockEntity
|
||||
from homeassistant.components.lock import LockEntity, LockEntityFeature
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
@@ -15,6 +15,24 @@ from .helpers import get_name_for_enum, setup_homee_platform
|
||||
|
||||
PARALLEL_UPDATES = 0
|
||||
|
||||
LOCK_STATE_UNLOCKED = 0.0
|
||||
LOCK_STATE_LOCKED = 1.0
|
||||
|
||||
|
||||
def _determine_lock_state_open(attribute: HomeeAttribute) -> float | None:
|
||||
"""Return the attribute value that momentarily unlatches the lock.
|
||||
|
||||
Different homee-compatible locks encode the "open" (unlatch) command
|
||||
differently. The Hörmann SmartKey uses a signed range {-1, 0, 1}
|
||||
where -1 is unlatch; other devices extend above with {0, 1, 2}.
|
||||
Returns None when the device only supports two states.
|
||||
"""
|
||||
if attribute.maximum == 2.0:
|
||||
return 2.0
|
||||
if attribute.minimum == -1.0:
|
||||
return -1.0
|
||||
return None
|
||||
|
||||
|
||||
async def add_lock_entities(
|
||||
config_entry: HomeeConfigEntry,
|
||||
@@ -45,20 +63,53 @@ class HomeeLock(HomeeEntity, LockEntity):
|
||||
|
||||
_attr_name = None
|
||||
|
||||
def __init__(self, attribute: HomeeAttribute, entry: HomeeConfigEntry) -> None:
|
||||
"""Initialize the homee lock."""
|
||||
super().__init__(attribute, entry)
|
||||
self._lock_state_open = _determine_lock_state_open(attribute)
|
||||
if self._lock_state_open is not None:
|
||||
self._attr_supported_features = LockEntityFeature.OPEN
|
||||
|
||||
@property
|
||||
def is_locked(self) -> bool:
|
||||
"""Return if lock is locked."""
|
||||
return self._attribute.current_value == 1.0
|
||||
return self._attribute.current_value == LOCK_STATE_LOCKED
|
||||
|
||||
@property
|
||||
def is_open(self) -> bool:
|
||||
"""Return if lock is open (unlatched)."""
|
||||
# Require target_value too, so mid-transition away from "open" resolves
|
||||
# to is_locking/is_unlocking rather than OPEN (HA state precedence).
|
||||
return (
|
||||
self._lock_state_open is not None
|
||||
and self._attribute.current_value == self._lock_state_open
|
||||
and self._attribute.target_value == self._lock_state_open
|
||||
)
|
||||
|
||||
@property
|
||||
def is_locking(self) -> bool:
|
||||
"""Return if lock is locking."""
|
||||
return self._attribute.target_value > self._attribute.current_value
|
||||
return (
|
||||
self._attribute.target_value == LOCK_STATE_LOCKED
|
||||
and self._attribute.current_value != LOCK_STATE_LOCKED
|
||||
)
|
||||
|
||||
@property
|
||||
def is_unlocking(self) -> bool:
|
||||
"""Return if lock is unlocking."""
|
||||
return self._attribute.target_value < self._attribute.current_value
|
||||
return (
|
||||
self._attribute.target_value == LOCK_STATE_UNLOCKED
|
||||
and self._attribute.current_value != LOCK_STATE_UNLOCKED
|
||||
)
|
||||
|
||||
@property
|
||||
def is_opening(self) -> bool:
|
||||
"""Return if lock is opening (unlatching)."""
|
||||
return (
|
||||
self._lock_state_open is not None
|
||||
and self._attribute.target_value == self._lock_state_open
|
||||
and self._attribute.current_value != self._lock_state_open
|
||||
)
|
||||
|
||||
@property
|
||||
def changed_by(self) -> str:
|
||||
@@ -80,8 +131,14 @@ class HomeeLock(HomeeEntity, LockEntity):
|
||||
|
||||
async def async_lock(self, **kwargs: Any) -> None:
|
||||
"""Lock specified lock. A code to lock the lock with may be specified."""
|
||||
await self.async_set_homee_value(1)
|
||||
await self.async_set_homee_value(LOCK_STATE_LOCKED)
|
||||
|
||||
async def async_unlock(self, **kwargs: Any) -> None:
|
||||
"""Unlock specified lock. A code to unlock the lock with may be specified."""
|
||||
await self.async_set_homee_value(0)
|
||||
await self.async_set_homee_value(LOCK_STATE_UNLOCKED)
|
||||
|
||||
async def async_open(self, **kwargs: Any) -> None:
|
||||
"""Open (unlatch) the lock."""
|
||||
if TYPE_CHECKING:
|
||||
assert self._lock_state_open is not None
|
||||
await self.async_set_homee_value(self._lock_state_open)
|
||||
|
||||
@@ -8,7 +8,7 @@ from contextlib import suppress
|
||||
from dataclasses import dataclass, field
|
||||
from datetime import timedelta
|
||||
import logging
|
||||
from typing import Any, NamedTuple, cast
|
||||
from typing import Any, cast
|
||||
from xml.parsers.expat import ExpatError
|
||||
|
||||
from huawei_lte_api.Client import Client
|
||||
@@ -63,6 +63,7 @@ from .const import (
|
||||
DEFAULT_MANUFACTURER,
|
||||
DEFAULT_NOTIFY_SERVICE_NAME,
|
||||
DOMAIN,
|
||||
HUAWEI_LTE_CONFIG,
|
||||
KEY_DEVICE_BASIC_INFORMATION,
|
||||
KEY_DEVICE_INFORMATION,
|
||||
KEY_DEVICE_SIGNAL,
|
||||
@@ -107,7 +108,7 @@ class Router:
|
||||
"""Class for router state."""
|
||||
|
||||
hass: HomeAssistant
|
||||
config_entry: ConfigEntry
|
||||
config_entry: HuaweiLteConfigEntry
|
||||
connection: Connection
|
||||
url: str
|
||||
|
||||
@@ -277,14 +278,10 @@ class Router:
|
||||
self.connection.requests_session.close()
|
||||
|
||||
|
||||
class HuaweiLteData(NamedTuple):
|
||||
"""Shared state."""
|
||||
|
||||
hass_config: ConfigType
|
||||
routers: dict[str, Router]
|
||||
type HuaweiLteConfigEntry = ConfigEntry[Router]
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: HuaweiLteConfigEntry) -> bool:
|
||||
"""Set up Huawei LTE component from config entry."""
|
||||
url = entry.data[CONF_URL]
|
||||
|
||||
@@ -351,7 +348,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
return False
|
||||
|
||||
# Store reference to router
|
||||
hass.data[DOMAIN].routers[entry.entry_id] = router
|
||||
entry.runtime_data = router
|
||||
|
||||
# Clear all subscriptions, enabled entities will push back theirs
|
||||
router.subscriptions.clear()
|
||||
@@ -416,7 +413,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
CONF_NAME: entry.options.get(CONF_NAME, DEFAULT_NOTIFY_SERVICE_NAME),
|
||||
CONF_RECIPIENT: entry.options.get(CONF_RECIPIENT),
|
||||
},
|
||||
hass.data[DOMAIN].hass_config,
|
||||
hass.data[HUAWEI_LTE_CONFIG],
|
||||
)
|
||||
|
||||
def _update_router(*_: Any) -> None:
|
||||
@@ -439,15 +436,16 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
return True
|
||||
|
||||
|
||||
async def async_unload_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> bool:
|
||||
async def async_unload_entry(
|
||||
hass: HomeAssistant, config_entry: HuaweiLteConfigEntry
|
||||
) -> bool:
|
||||
"""Unload config entry."""
|
||||
|
||||
# Forward config entry unload to platforms
|
||||
await hass.config_entries.async_unload_platforms(config_entry, PLATFORMS)
|
||||
|
||||
# Forget about the router and invoke its cleanup
|
||||
router = hass.data[DOMAIN].routers.pop(config_entry.entry_id)
|
||||
await hass.async_add_executor_job(router.cleanup)
|
||||
# Invoke router cleanup
|
||||
await hass.async_add_executor_job(config_entry.runtime_data.cleanup)
|
||||
|
||||
return True
|
||||
|
||||
@@ -455,8 +453,7 @@ async def async_unload_entry(hass: HomeAssistant, config_entry: ConfigEntry) ->
|
||||
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
"""Set up Huawei LTE component."""
|
||||
|
||||
if DOMAIN not in hass.data:
|
||||
hass.data[DOMAIN] = HuaweiLteData(hass_config=config, routers={})
|
||||
hass.data[HUAWEI_LTE_CONFIG] = config
|
||||
|
||||
def service_handler(service: ServiceCall) -> None:
|
||||
"""Apply a service.
|
||||
@@ -464,21 +461,22 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
We key this using the router URL instead of its unique id / serial number,
|
||||
because the latter is not available anywhere in the UI.
|
||||
"""
|
||||
routers = hass.data[DOMAIN].routers
|
||||
routers = [
|
||||
entry.runtime_data
|
||||
for entry in hass.config_entries.async_loaded_entries(DOMAIN)
|
||||
]
|
||||
if url := service.data.get(CONF_URL):
|
||||
router = next(
|
||||
(router for router in routers.values() if router.url == url), None
|
||||
)
|
||||
router = next((router for router in routers if router.url == url), None)
|
||||
elif not routers:
|
||||
_LOGGER.error("%s: no routers configured", service.service)
|
||||
return
|
||||
elif len(routers) == 1:
|
||||
router = next(iter(routers.values()))
|
||||
router = routers[0]
|
||||
else:
|
||||
_LOGGER.error(
|
||||
"%s: more than one router configured, must specify one of URLs %s",
|
||||
service.service,
|
||||
sorted(router.url for router in routers.values()),
|
||||
sorted(router.url for router in routers),
|
||||
)
|
||||
return
|
||||
if not router:
|
||||
@@ -508,7 +506,9 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
return True
|
||||
|
||||
|
||||
async def async_migrate_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> bool:
|
||||
async def async_migrate_entry(
|
||||
hass: HomeAssistant, config_entry: HuaweiLteConfigEntry
|
||||
) -> bool:
|
||||
"""Migrate config entry to new version."""
|
||||
if config_entry.version == 1:
|
||||
options = dict(config_entry.options)
|
||||
|
||||
@@ -12,13 +12,12 @@ from homeassistant.components.binary_sensor import (
|
||||
BinarySensorDeviceClass,
|
||||
BinarySensorEntity,
|
||||
)
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity import Entity
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from . import HuaweiLteConfigEntry
|
||||
from .const import (
|
||||
DOMAIN,
|
||||
KEY_MONITORING_CHECK_NOTIFICATIONS,
|
||||
KEY_MONITORING_STATUS,
|
||||
KEY_WLAN_WIFI_FEATURE_SWITCH,
|
||||
@@ -30,11 +29,11 @@ _LOGGER = logging.getLogger(__name__)
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
config_entry: ConfigEntry,
|
||||
config_entry: HuaweiLteConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up from config entry."""
|
||||
router = hass.data[DOMAIN].routers[config_entry.entry_id]
|
||||
router = config_entry.runtime_data
|
||||
entities: list[Entity] = []
|
||||
|
||||
if router.data.get(KEY_MONITORING_STATUS):
|
||||
|
||||
@@ -11,12 +11,11 @@ from homeassistant.components.button import (
|
||||
ButtonEntity,
|
||||
ButtonEntityDescription,
|
||||
)
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import EntityCategory
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import entity_platform
|
||||
|
||||
from .const import DOMAIN
|
||||
from . import HuaweiLteConfigEntry
|
||||
from .entity import HuaweiLteBaseEntityWithDevice
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
@@ -24,11 +23,11 @@ _LOGGER = logging.getLogger(__name__)
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
config_entry: ConfigEntry,
|
||||
config_entry: HuaweiLteConfigEntry,
|
||||
async_add_entities: entity_platform.AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up Huawei LTE buttons."""
|
||||
router = hass.data[DOMAIN].routers[config_entry.entry_id]
|
||||
router = config_entry.runtime_data
|
||||
buttons = [
|
||||
ClearTrafficStatisticsButton(router),
|
||||
RestartButton(router),
|
||||
|
||||
@@ -21,12 +21,7 @@ from requests.exceptions import SSLError, Timeout
|
||||
from url_normalize import url_normalize
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.config_entries import (
|
||||
ConfigEntry,
|
||||
ConfigFlow,
|
||||
ConfigFlowResult,
|
||||
OptionsFlow,
|
||||
)
|
||||
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult, OptionsFlow
|
||||
from homeassistant.const import (
|
||||
CONF_MAC,
|
||||
CONF_NAME,
|
||||
@@ -47,6 +42,7 @@ from homeassistant.helpers.service_info.ssdp import (
|
||||
SsdpServiceInfo,
|
||||
)
|
||||
|
||||
from . import HuaweiLteConfigEntry
|
||||
from .const import (
|
||||
CONF_MANUFACTURER,
|
||||
CONF_TRACK_WIRED_CLIENTS,
|
||||
@@ -76,7 +72,7 @@ class HuaweiLteConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
@staticmethod
|
||||
@callback
|
||||
def async_get_options_flow(
|
||||
config_entry: ConfigEntry,
|
||||
config_entry: HuaweiLteConfigEntry,
|
||||
) -> HuaweiLteOptionsFlow:
|
||||
"""Get options flow."""
|
||||
return HuaweiLteOptionsFlow()
|
||||
|
||||
@@ -1,7 +1,12 @@
|
||||
"""Huawei LTE constants."""
|
||||
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
from homeassistant.util.hass_dict import HassKey
|
||||
|
||||
DOMAIN = "huawei_lte"
|
||||
|
||||
HUAWEI_LTE_CONFIG: HassKey[ConfigType] = HassKey(DOMAIN)
|
||||
|
||||
CONF_MANUFACTURER = "manufacturer"
|
||||
CONF_TRACK_WIRED_CLIENTS = "track_wired_clients"
|
||||
CONF_UNAUTHENTICATED_MODE = "unauthenticated_mode"
|
||||
|
||||
@@ -9,7 +9,6 @@ from homeassistant.components.device_tracker import (
|
||||
DOMAIN as DEVICE_TRACKER_DOMAIN,
|
||||
ScannerEntity,
|
||||
)
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers import entity_registry as er
|
||||
from homeassistant.helpers.dispatcher import async_dispatcher_connect
|
||||
@@ -17,11 +16,10 @@ from homeassistant.helpers.entity import Entity
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from homeassistant.util import snakecase
|
||||
|
||||
from . import Router
|
||||
from . import HuaweiLteConfigEntry, Router
|
||||
from .const import (
|
||||
CONF_TRACK_WIRED_CLIENTS,
|
||||
DEFAULT_TRACK_WIRED_CLIENTS,
|
||||
DOMAIN,
|
||||
KEY_LAN_HOST_INFO,
|
||||
KEY_WLAN_HOST_LIST,
|
||||
UPDATE_SIGNAL,
|
||||
@@ -50,7 +48,7 @@ def _get_hosts(
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
config_entry: ConfigEntry,
|
||||
config_entry: HuaweiLteConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up from config entry."""
|
||||
@@ -58,7 +56,7 @@ async def async_setup_entry(
|
||||
# Grab hosts list once to examine whether the initial fetch has got some data for
|
||||
# us, i.e. if wlan host list is supported. Only set up a subscription and proceed
|
||||
# with adding and tracking entities if it is.
|
||||
router = hass.data[DOMAIN].routers[config_entry.entry_id]
|
||||
router = config_entry.runtime_data
|
||||
if (hosts := _get_hosts(router, True)) is None:
|
||||
return
|
||||
|
||||
|
||||
@@ -5,10 +5,9 @@ from __future__ import annotations
|
||||
from typing import Any
|
||||
|
||||
from homeassistant.components.diagnostics import async_redact_data
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
|
||||
from .const import DOMAIN
|
||||
from . import HuaweiLteConfigEntry
|
||||
|
||||
ENTRY_FIELDS_DATA_TO_REDACT = {
|
||||
"mac",
|
||||
@@ -74,13 +73,13 @@ TO_REDACT = {
|
||||
|
||||
|
||||
async def async_get_config_entry_diagnostics(
|
||||
hass: HomeAssistant, entry: ConfigEntry
|
||||
hass: HomeAssistant, entry: HuaweiLteConfigEntry
|
||||
) -> dict[str, Any]:
|
||||
"""Return diagnostics for a config entry."""
|
||||
return async_redact_data(
|
||||
{
|
||||
"entry": entry.data,
|
||||
"router": hass.data[DOMAIN].routers[entry.entry_id].data,
|
||||
"router": entry.runtime_data.data,
|
||||
},
|
||||
TO_REDACT,
|
||||
)
|
||||
|
||||
@@ -12,8 +12,7 @@ from homeassistant.const import ATTR_CONFIG_ENTRY_ID, CONF_RECIPIENT
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
||||
|
||||
from . import Router
|
||||
from .const import DOMAIN
|
||||
from . import HuaweiLteConfigEntry, Router
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -27,7 +26,11 @@ async def async_get_service(
|
||||
if discovery_info is None:
|
||||
return None
|
||||
|
||||
router = hass.data[DOMAIN].routers[discovery_info[ATTR_CONFIG_ENTRY_ID]]
|
||||
entry: HuaweiLteConfigEntry | None = hass.config_entries.async_get_entry(
|
||||
discovery_info[ATTR_CONFIG_ENTRY_ID]
|
||||
)
|
||||
assert entry is not None
|
||||
router = entry.runtime_data
|
||||
default_targets = discovery_info[CONF_RECIPIENT] or []
|
||||
|
||||
return HuaweiLteSmsNotificationService(router, default_targets)
|
||||
|
||||
@@ -22,7 +22,7 @@ rules:
|
||||
entity-event-setup: done
|
||||
entity-unique-id: done
|
||||
has-entity-name: done
|
||||
runtime-data: todo
|
||||
runtime-data: done
|
||||
test-before-configure: done
|
||||
test-before-setup: done
|
||||
unique-config-entry: done
|
||||
|
||||
@@ -6,6 +6,7 @@ from collections.abc import Callable
|
||||
from dataclasses import dataclass
|
||||
from functools import partial
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from huawei_lte_api.enums.net import LTEBandEnum, NetworkBandEnum, NetworkModeEnum
|
||||
|
||||
@@ -14,14 +15,13 @@ from homeassistant.components.select import (
|
||||
SelectEntity,
|
||||
SelectEntityDescription,
|
||||
)
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import EntityCategory
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity import Entity
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from . import Router
|
||||
from .const import DOMAIN, KEY_NET_NET_MODE
|
||||
from . import HuaweiLteConfigEntry, Router
|
||||
from .const import KEY_NET_NET_MODE
|
||||
from .entity import HuaweiLteBaseEntityWithDevice
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
@@ -31,16 +31,16 @@ _LOGGER = logging.getLogger(__name__)
|
||||
class HuaweiSelectEntityDescription(SelectEntityDescription):
|
||||
"""Class describing Huawei LTE select entities."""
|
||||
|
||||
setter_fn: Callable[[str], None]
|
||||
setter_fn: Callable[[str], Any]
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
config_entry: ConfigEntry,
|
||||
config_entry: HuaweiLteConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up from config entry."""
|
||||
router = hass.data[DOMAIN].routers[config_entry.entry_id]
|
||||
router = config_entry.runtime_data
|
||||
selects: list[Entity] = []
|
||||
|
||||
desc = HuaweiSelectEntityDescription(
|
||||
|
||||
@@ -17,7 +17,6 @@ from homeassistant.components.sensor import (
|
||||
SensorEntityDescription,
|
||||
SensorStateClass,
|
||||
)
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import (
|
||||
PERCENTAGE,
|
||||
EntityCategory,
|
||||
@@ -31,9 +30,8 @@ from homeassistant.helpers.entity import Entity
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from homeassistant.helpers.typing import StateType
|
||||
|
||||
from . import Router
|
||||
from . import HuaweiLteConfigEntry, Router
|
||||
from .const import (
|
||||
DOMAIN,
|
||||
KEY_DEVICE_INFORMATION,
|
||||
KEY_DEVICE_SIGNAL,
|
||||
KEY_MONITORING_CHECK_NOTIFICATIONS,
|
||||
@@ -795,11 +793,11 @@ SENSOR_META: dict[str, HuaweiSensorGroup] = {
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
config_entry: ConfigEntry,
|
||||
config_entry: HuaweiLteConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up from config entry."""
|
||||
router = hass.data[DOMAIN].routers[config_entry.entry_id]
|
||||
router = config_entry.runtime_data
|
||||
sensors: list[Entity] = []
|
||||
for key in SENSOR_KEYS:
|
||||
if not (items := router.data.get(key)):
|
||||
|
||||
@@ -10,16 +10,12 @@ from homeassistant.components.switch import (
|
||||
SwitchDeviceClass,
|
||||
SwitchEntity,
|
||||
)
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity import Entity
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from .const import (
|
||||
DOMAIN,
|
||||
KEY_DIALUP_MOBILE_DATASWITCH,
|
||||
KEY_WLAN_WIFI_GUEST_NETWORK_SWITCH,
|
||||
)
|
||||
from . import HuaweiLteConfigEntry
|
||||
from .const import KEY_DIALUP_MOBILE_DATASWITCH, KEY_WLAN_WIFI_GUEST_NETWORK_SWITCH
|
||||
from .entity import HuaweiLteBaseEntityWithDevice
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
@@ -27,11 +23,11 @@ _LOGGER = logging.getLogger(__name__)
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
config_entry: ConfigEntry,
|
||||
config_entry: HuaweiLteConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up from config entry."""
|
||||
router = hass.data[DOMAIN].routers[config_entry.entry_id]
|
||||
router = config_entry.runtime_data
|
||||
switches: list[Entity] = []
|
||||
|
||||
if router.data.get(KEY_DIALUP_MOBILE_DATASWITCH):
|
||||
|
||||
@@ -16,5 +16,5 @@
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["bleak", "HueBLE"],
|
||||
"quality_scale": "bronze",
|
||||
"requirements": ["HueBLE==2.1.0"]
|
||||
"requirements": ["HueBLE==2.2.2"]
|
||||
}
|
||||
|
||||
@@ -27,7 +27,7 @@ async def async_get_media_source(hass: HomeAssistant) -> ImageUploadMediaSource:
|
||||
class ImageUploadMediaSource(MediaSource):
|
||||
"""Provide images as media sources."""
|
||||
|
||||
name: str = "Image Upload"
|
||||
name: str = "Image upload"
|
||||
|
||||
def __init__(self, hass: HomeAssistant) -> None:
|
||||
"""Initialize ImageMediaSource."""
|
||||
@@ -79,7 +79,7 @@ class ImageUploadMediaSource(MediaSource):
|
||||
identifier=None,
|
||||
media_class=MediaClass.APP,
|
||||
media_content_type="",
|
||||
title="Image Upload",
|
||||
title="Image upload",
|
||||
can_play=False,
|
||||
can_expand=True,
|
||||
children_media_class=MediaClass.IMAGE,
|
||||
|
||||
@@ -43,7 +43,6 @@ NUMBERS: Final = (
|
||||
native_max_value=100,
|
||||
native_step=1,
|
||||
native_unit_of_measurement=PERCENTAGE,
|
||||
device_class=NumberDeviceClass.BATTERY,
|
||||
),
|
||||
IndevoltNumberEntityDescription(
|
||||
key="max_ac_output_power",
|
||||
|
||||
@@ -69,10 +69,8 @@ SENSORS: Final = (
|
||||
IndevoltSensorEntityDescription(
|
||||
key="6105",
|
||||
generation=[1],
|
||||
translation_key="rated_capacity",
|
||||
native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR,
|
||||
device_class=SensorDeviceClass.ENERGY,
|
||||
state_class=SensorStateClass.TOTAL_INCREASING,
|
||||
translation_key="discharge_limit",
|
||||
native_unit_of_measurement=PERCENTAGE,
|
||||
),
|
||||
IndevoltSensorEntityDescription(
|
||||
key="2101",
|
||||
|
||||
@@ -223,6 +223,9 @@
|
||||
"dc_output_power": {
|
||||
"name": "DC output power"
|
||||
},
|
||||
"discharge_limit": {
|
||||
"name": "[%key:component::indevolt::entity::number::discharge_limit::name%]"
|
||||
},
|
||||
"energy_mode": {
|
||||
"name": "Energy mode",
|
||||
"state": {
|
||||
|
||||
@@ -6,7 +6,7 @@ import logging
|
||||
from pyinsteon import async_close, async_connect, devices
|
||||
from pyinsteon.constants import ReadWriteMode
|
||||
|
||||
from homeassistant.config_entries import SOURCE_IMPORT, ConfigEntry
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONF_PLATFORM, EVENT_HOMEASSISTANT_STOP
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryNotReady
|
||||
@@ -33,7 +33,6 @@ from .utils import (
|
||||
)
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
OPTIONS = "options"
|
||||
|
||||
|
||||
async def async_get_device_config(hass, config_entry):
|
||||
@@ -77,12 +76,10 @@ async def close_insteon_connection(*args):
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
"""Set up an Insteon entry."""
|
||||
|
||||
if dev_path := entry.options.get(CONF_DEV_PATH):
|
||||
hass.data[DOMAIN] = {}
|
||||
hass.data[DOMAIN][CONF_DEV_PATH] = dev_path
|
||||
|
||||
api.async_load_api(hass)
|
||||
await api.async_register_insteon_frontend(hass)
|
||||
await api.async_register_insteon_frontend(
|
||||
hass, entry.options.get(CONF_DEV_PATH) or None
|
||||
)
|
||||
|
||||
if not devices.modem:
|
||||
try:
|
||||
@@ -99,19 +96,6 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
workdir=hass.config.config_dir, id_devices=0, load_modem_aldb=0
|
||||
)
|
||||
|
||||
# If options existed in YAML and have not already been saved to the config entry
|
||||
# add them now
|
||||
if (
|
||||
not entry.options
|
||||
and entry.source == SOURCE_IMPORT
|
||||
and hass.data.get(DOMAIN)
|
||||
and hass.data[DOMAIN].get(OPTIONS)
|
||||
):
|
||||
hass.config_entries.async_update_entry(
|
||||
entry=entry,
|
||||
options=hass.data[DOMAIN][OPTIONS],
|
||||
)
|
||||
|
||||
for device_override in entry.options.get(CONF_OVERRIDE, []):
|
||||
# Override the device default capabilities for a specific address
|
||||
address = device_override.get("address")
|
||||
|
||||
@@ -3,10 +3,11 @@
|
||||
from insteon_frontend import get_build_id, locate_dir
|
||||
|
||||
from homeassistant.components import panel_custom, websocket_api
|
||||
from homeassistant.components.frontend import async_panel_exists
|
||||
from homeassistant.components.http import StaticPathConfig
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
|
||||
from ..const import CONF_DEV_PATH, DOMAIN
|
||||
from ..const import DOMAIN
|
||||
from .aldb import (
|
||||
websocket_add_default_links,
|
||||
websocket_change_aldb_record,
|
||||
@@ -90,11 +91,12 @@ def async_load_api(hass):
|
||||
websocket_api.async_register_command(hass, websocket_get_unknown_devices)
|
||||
|
||||
|
||||
async def async_register_insteon_frontend(hass: HomeAssistant):
|
||||
async def async_register_insteon_frontend(
|
||||
hass: HomeAssistant, dev_path: str | None = None
|
||||
) -> None:
|
||||
"""Register the Insteon frontend configuration panel."""
|
||||
# Add to sidepanel if needed
|
||||
if DOMAIN not in hass.data.get("frontend_panels", {}):
|
||||
dev_path = hass.data.get(DOMAIN, {}).get(CONF_DEV_PATH)
|
||||
if not async_panel_exists(hass, DOMAIN):
|
||||
is_dev = dev_path is not None
|
||||
path = dev_path or locate_dir()
|
||||
build_id = get_build_id(is_dev)
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
"""Native Home Assistant iOS app component."""
|
||||
# pylint: disable=hass-use-runtime-data # Uses legacy hass.data[DOMAIN] pattern
|
||||
|
||||
import datetime
|
||||
from http import HTTPStatus
|
||||
|
||||
@@ -4,7 +4,7 @@ from __future__ import annotations
|
||||
|
||||
import logging
|
||||
|
||||
from homeassistant.const import CONF_HOST, CONF_SCAN_INTERVAL, Platform
|
||||
from homeassistant.const import CONF_SCAN_INTERVAL, Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import device_registry as dr, entity_registry as er
|
||||
|
||||
@@ -17,7 +17,6 @@ from .const import (
|
||||
DEFAULT_CONSIDER_HOME,
|
||||
DEFAULT_INTERFACE,
|
||||
DEFAULT_SCAN_INTERVAL,
|
||||
DOMAIN,
|
||||
)
|
||||
from .router import KeeneticConfigEntry, KeeneticRouter
|
||||
|
||||
@@ -27,7 +26,6 @@ _LOGGER = logging.getLogger(__name__)
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: KeeneticConfigEntry) -> bool:
|
||||
"""Set up the component."""
|
||||
hass.data.setdefault(DOMAIN, {})
|
||||
async_add_defaults(hass, entry)
|
||||
|
||||
router = KeeneticRouter(hass, entry)
|
||||
@@ -85,10 +83,8 @@ async def async_unload_entry(
|
||||
return unload_ok
|
||||
|
||||
|
||||
def async_add_defaults(hass: HomeAssistant, entry: KeeneticConfigEntry):
|
||||
def async_add_defaults(hass: HomeAssistant, entry: KeeneticConfigEntry) -> None:
|
||||
"""Populate default options."""
|
||||
host: str = entry.data[CONF_HOST]
|
||||
imported_options: dict = hass.data[DOMAIN].get(f"imported_options_{host}", {})
|
||||
options = {
|
||||
CONF_SCAN_INTERVAL: DEFAULT_SCAN_INTERVAL,
|
||||
CONF_CONSIDER_HOME: DEFAULT_CONSIDER_HOME,
|
||||
@@ -96,7 +92,6 @@ def async_add_defaults(hass: HomeAssistant, entry: KeeneticConfigEntry):
|
||||
CONF_TRY_HOTSPOT: True,
|
||||
CONF_INCLUDE_ARP: True,
|
||||
CONF_INCLUDE_ASSOCIATED: True,
|
||||
**imported_options,
|
||||
**entry.options,
|
||||
}
|
||||
|
||||
|
||||
@@ -198,6 +198,8 @@ class KeeneticOptionsFlowHandler(OptionsFlowWithReload):
|
||||
|
||||
options = vol.Schema(
|
||||
{
|
||||
# Polling interval is user-configurable, which is no longer allowed
|
||||
# pylint: disable-next=hass-config-flow-polling-field
|
||||
vol.Required(
|
||||
CONF_SCAN_INTERVAL,
|
||||
default=self.config_entry.options.get(
|
||||
|
||||
@@ -2,6 +2,7 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from dataclasses import dataclass
|
||||
from typing import TYPE_CHECKING, Any
|
||||
|
||||
from xknx.devices import Device as XknxDevice
|
||||
@@ -20,6 +21,15 @@ if TYPE_CHECKING:
|
||||
from .knx_module import KNXModule
|
||||
|
||||
|
||||
@dataclass(slots=True, frozen=True)
|
||||
class KnxEntityIdentifier:
|
||||
"""Class to identify KNX entities in KNX frontend."""
|
||||
|
||||
platform: str
|
||||
unique_id: str
|
||||
ui: bool # ui or yaml entity
|
||||
|
||||
|
||||
class KnxUiEntityPlatformController(PlatformControllerBase):
|
||||
"""Class to manage dynamic adding and reloading of UI entities."""
|
||||
|
||||
@@ -57,6 +67,8 @@ class _KnxEntityBase(Entity):
|
||||
_knx_module: KNXModule
|
||||
_device: XknxDevice
|
||||
|
||||
_knx_entity_identifier: KnxEntityIdentifier | None = None
|
||||
|
||||
@property
|
||||
def available(self) -> bool:
|
||||
"""Return True if entity is available."""
|
||||
@@ -75,10 +87,16 @@ class _KnxEntityBase(Entity):
|
||||
self._device.register_device_updated_cb(self.after_update_callback)
|
||||
self._device.xknx.devices.async_add(self._device)
|
||||
if uid := self.unique_id:
|
||||
self._knx_entity_identifier = KnxEntityIdentifier(
|
||||
platform=self.platform_data.domain,
|
||||
unique_id=uid,
|
||||
ui=isinstance(self, KnxUiEntity),
|
||||
)
|
||||
self._knx_module.add_to_group_address_entities(
|
||||
group_addresses=self._device.group_addresses(),
|
||||
identifier=(self.platform_data.domain, uid),
|
||||
identifier=self._knx_entity_identifier,
|
||||
)
|
||||
|
||||
# super call needed to have methods of multi-inherited classes called
|
||||
# eg. for restoring state (like _KNXSwitch)
|
||||
await super().async_added_to_hass()
|
||||
@@ -87,10 +105,10 @@ class _KnxEntityBase(Entity):
|
||||
"""Disconnect device object when removed."""
|
||||
self._device.unregister_device_updated_cb(self.after_update_callback)
|
||||
self._device.xknx.devices.async_remove(self._device)
|
||||
if uid := self.unique_id:
|
||||
if self._knx_entity_identifier:
|
||||
self._knx_module.remove_from_group_address_entities(
|
||||
group_addresses=self._device.group_addresses(),
|
||||
identifier=(self.platform_data.domain, uid),
|
||||
identifier=self._knx_entity_identifier,
|
||||
)
|
||||
|
||||
|
||||
|
||||
@@ -54,6 +54,7 @@ from .const import (
|
||||
TELEGRAM_LOG_DEFAULT,
|
||||
)
|
||||
from .device import KNXInterfaceDevice
|
||||
from .entity import KnxEntityIdentifier
|
||||
from .expose import KnxExposeEntity, KnxExposeTime
|
||||
from .project import KNXProject
|
||||
from .repairs import data_secure_group_key_issue_dispatcher
|
||||
@@ -113,7 +114,7 @@ class KNXModule:
|
||||
self._address_filter_transcoder: dict[AddressFilter, type[DPTBase]] = {}
|
||||
self.group_address_transcoder: dict[DeviceGroupAddress, type[DPTBase]] = {}
|
||||
self.group_address_entities: dict[
|
||||
DeviceGroupAddress, set[tuple[str, str]] # {(platform, unique_id),}
|
||||
DeviceGroupAddress, set[KnxEntityIdentifier]
|
||||
] = {}
|
||||
self.knx_event_callback: TelegramQueue.Callback = self.register_event_callback()
|
||||
|
||||
@@ -237,7 +238,7 @@ class KNXModule:
|
||||
def add_to_group_address_entities(
|
||||
self,
|
||||
group_addresses: set[DeviceGroupAddress],
|
||||
identifier: tuple[str, str], # (platform, unique_id)
|
||||
identifier: KnxEntityIdentifier,
|
||||
) -> None:
|
||||
"""Register entity in group_address_entities map."""
|
||||
for ga in group_addresses:
|
||||
@@ -248,7 +249,7 @@ class KNXModule:
|
||||
def remove_from_group_address_entities(
|
||||
self,
|
||||
group_addresses: set[DeviceGroupAddress],
|
||||
identifier: tuple[str, str],
|
||||
identifier: KnxEntityIdentifier,
|
||||
) -> None:
|
||||
"""Unregister entity from group_address_entities map."""
|
||||
for ga in group_addresses:
|
||||
|
||||
@@ -13,7 +13,7 @@
|
||||
"requirements": [
|
||||
"xknx==3.15.0",
|
||||
"xknxproject==3.8.2",
|
||||
"knx-frontend==2026.4.19.175239"
|
||||
"knx-frontend==2026.4.22.141111"
|
||||
],
|
||||
"single_config_entry": true
|
||||
}
|
||||
|
||||
@@ -155,8 +155,10 @@
|
||||
},
|
||||
"config_panel": {
|
||||
"common": {
|
||||
"exposes_count": "Exposes: {count}",
|
||||
"group_address": "Group address",
|
||||
"group_addresses": "Group addresses"
|
||||
"group_addresses": "Group addresses",
|
||||
"monitor_x_group_addresses": "Monitor {count} group addresses"
|
||||
},
|
||||
"dashboard": {
|
||||
"connection_flow": {
|
||||
|
||||
@@ -14,6 +14,7 @@ from xknx.telegram import Telegram
|
||||
from xknxproject.exceptions import XknxProjectException
|
||||
|
||||
from homeassistant.components import panel_custom, websocket_api
|
||||
from homeassistant.components.frontend import async_panel_exists
|
||||
from homeassistant.components.http import StaticPathConfig
|
||||
from homeassistant.const import CONF_ENTITY_ID, CONF_PLATFORM, Platform
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
@@ -64,7 +65,7 @@ async def register_panel(hass: HomeAssistant) -> None:
|
||||
websocket_api.async_register_command(hass, ws_update_entity)
|
||||
websocket_api.async_register_command(hass, ws_delete_entity)
|
||||
websocket_api.async_register_command(hass, ws_get_entity_config)
|
||||
websocket_api.async_register_command(hass, ws_get_entity_entries)
|
||||
websocket_api.async_register_command(hass, ws_get_entities_by_group)
|
||||
websocket_api.async_register_command(hass, ws_create_device)
|
||||
websocket_api.async_register_command(hass, ws_get_schema)
|
||||
websocket_api.async_register_command(hass, ws_get_time_server_config)
|
||||
@@ -75,7 +76,7 @@ async def register_panel(hass: HomeAssistant) -> None:
|
||||
websocket_api.async_register_command(hass, ws_delete_expose)
|
||||
websocket_api.async_register_command(hass, ws_validate_expose)
|
||||
|
||||
if DOMAIN not in hass.data.get("frontend_panels", {}):
|
||||
if not async_panel_exists(hass, DOMAIN):
|
||||
await hass.http.async_register_static_paths(
|
||||
[
|
||||
StaticPathConfig(
|
||||
@@ -517,22 +518,22 @@ async def ws_delete_entity(
|
||||
@websocket_api.require_admin
|
||||
@websocket_api.websocket_command(
|
||||
{
|
||||
vol.Required("type"): "knx/get_entity_entries",
|
||||
vol.Required("type"): "knx/get_entities_by_group",
|
||||
}
|
||||
)
|
||||
@provide_knx
|
||||
@callback
|
||||
def ws_get_entity_entries(
|
||||
def ws_get_entities_by_group(
|
||||
hass: HomeAssistant,
|
||||
knx: KNXModule,
|
||||
connection: websocket_api.ActiveConnection,
|
||||
msg: dict,
|
||||
) -> None:
|
||||
"""Get entities configured from entity store."""
|
||||
entity_entries = [
|
||||
entry.extended_dict for entry in knx.config_store.get_entity_entries()
|
||||
]
|
||||
connection.send_result(msg["id"], entity_entries)
|
||||
"""Get entities by group address."""
|
||||
data = {
|
||||
str(ga): identifiers for ga, identifiers in knx.group_address_entities.items()
|
||||
}
|
||||
connection.send_result(msg["id"], data)
|
||||
|
||||
|
||||
@websocket_api.require_admin
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
"""Support for Konnected devices."""
|
||||
# pylint: disable=hass-use-runtime-data # Uses legacy hass.data[DOMAIN] pattern
|
||||
|
||||
import copy
|
||||
import hmac
|
||||
|
||||
@@ -24,6 +24,8 @@ async def async_setup_entry(
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up binary sensors attached to a Konnected device from a config entry."""
|
||||
# Uses legacy hass.data[DOMAIN] pattern
|
||||
# pylint: disable-next=hass-use-runtime-data
|
||||
data = hass.data[DOMAIN]
|
||||
device_id = config_entry.data["id"]
|
||||
sensors = [
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
"""Support for Konnected devices."""
|
||||
# pylint: disable=hass-use-runtime-data # Uses legacy hass.data[DOMAIN] pattern
|
||||
|
||||
import asyncio
|
||||
import logging
|
||||
|
||||
@@ -46,6 +46,8 @@ async def async_setup_entry(
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up sensors attached to a Konnected device from a config entry."""
|
||||
# Uses legacy hass.data[DOMAIN] pattern
|
||||
# pylint: disable-next=hass-use-runtime-data
|
||||
data = hass.data[DOMAIN]
|
||||
device_id = config_entry.data["id"]
|
||||
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
"""Support for wired switches attached to a Konnected device."""
|
||||
# pylint: disable=hass-use-runtime-data # Uses legacy hass.data[DOMAIN] pattern
|
||||
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
@@ -2,39 +2,38 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONF_SCAN_INTERVAL, Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.dispatcher import async_dispatcher_send
|
||||
|
||||
from .const import DISPATCH_CONFIG_UPDATED, DOMAIN
|
||||
from .coordinator import KrakenData
|
||||
from .const import DISPATCH_CONFIG_UPDATED
|
||||
from .coordinator import KrakenConfigEntry, KrakenData
|
||||
|
||||
PLATFORMS = [Platform.SENSOR]
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: KrakenConfigEntry) -> bool:
|
||||
"""Set up kraken from a config entry."""
|
||||
kraken_data = KrakenData(hass, entry)
|
||||
await kraken_data.async_setup()
|
||||
hass.data[DOMAIN] = kraken_data
|
||||
entry.runtime_data = kraken_data
|
||||
entry.async_on_unload(entry.add_update_listener(async_options_updated))
|
||||
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
|
||||
return True
|
||||
|
||||
|
||||
async def async_unload_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> bool:
|
||||
async def async_unload_entry(
|
||||
hass: HomeAssistant, config_entry: KrakenConfigEntry
|
||||
) -> bool:
|
||||
"""Unload a config entry."""
|
||||
unload_ok = await hass.config_entries.async_unload_platforms(
|
||||
config_entry, PLATFORMS
|
||||
)
|
||||
if unload_ok:
|
||||
hass.data.pop(DOMAIN)
|
||||
|
||||
return unload_ok
|
||||
return await hass.config_entries.async_unload_platforms(config_entry, PLATFORMS)
|
||||
|
||||
|
||||
async def async_options_updated(hass: HomeAssistant, config_entry: ConfigEntry) -> None:
|
||||
async def async_options_updated(
|
||||
hass: HomeAssistant, config_entry: KrakenConfigEntry
|
||||
) -> None:
|
||||
"""Triggered by config entry options updates."""
|
||||
hass.data[DOMAIN].set_update_interval(config_entry.options[CONF_SCAN_INTERVAL])
|
||||
config_entry.runtime_data.set_update_interval(
|
||||
config_entry.options[CONF_SCAN_INTERVAL]
|
||||
)
|
||||
async_dispatcher_send(hass, DISPATCH_CONFIG_UPDATED, hass, config_entry)
|
||||
|
||||
@@ -8,17 +8,13 @@ import krakenex
|
||||
from pykrakenapi.pykrakenapi import KrakenAPI
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.config_entries import (
|
||||
ConfigEntry,
|
||||
ConfigFlow,
|
||||
ConfigFlowResult,
|
||||
OptionsFlow,
|
||||
)
|
||||
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult, OptionsFlow
|
||||
from homeassistant.const import CONF_SCAN_INTERVAL
|
||||
from homeassistant.core import callback
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
|
||||
from .const import CONF_TRACKED_ASSET_PAIRS, DEFAULT_SCAN_INTERVAL, DOMAIN
|
||||
from .coordinator import KrakenConfigEntry
|
||||
from .utils import get_tradable_asset_pairs
|
||||
|
||||
|
||||
@@ -30,7 +26,7 @@ class KrakenConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
@staticmethod
|
||||
@callback
|
||||
def async_get_options_flow(
|
||||
config_entry: ConfigEntry,
|
||||
config_entry: KrakenConfigEntry,
|
||||
) -> KrakenOptionsFlowHandler:
|
||||
"""Get the options flow for this handler."""
|
||||
return KrakenOptionsFlowHandler()
|
||||
@@ -79,6 +75,8 @@ class KrakenOptionsFlowHandler(OptionsFlow):
|
||||
)
|
||||
|
||||
options = {
|
||||
# Polling interval is user-configurable, which is no longer allowed
|
||||
# pylint: disable-next=hass-config-flow-polling-field
|
||||
vol.Optional(
|
||||
CONF_SCAN_INTERVAL,
|
||||
default=self.config_entry.options.get(
|
||||
|
||||
@@ -28,10 +28,13 @@ CALL_RATE_LIMIT_SLEEP = 1
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
type KrakenConfigEntry = ConfigEntry[KrakenData]
|
||||
|
||||
|
||||
class KrakenData:
|
||||
"""Define an object to hold kraken data."""
|
||||
|
||||
def __init__(self, hass: HomeAssistant, config_entry: ConfigEntry) -> None:
|
||||
def __init__(self, hass: HomeAssistant, config_entry: KrakenConfigEntry) -> None:
|
||||
"""Initialize."""
|
||||
self._hass = hass
|
||||
self._config_entry = config_entry
|
||||
|
||||
@@ -11,7 +11,6 @@ from homeassistant.components.sensor import (
|
||||
SensorEntityDescription,
|
||||
SensorStateClass,
|
||||
)
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers import device_registry as dr
|
||||
from homeassistant.helpers.device_registry import DeviceInfo
|
||||
@@ -28,7 +27,7 @@ from .const import (
|
||||
DOMAIN,
|
||||
KrakenResponse,
|
||||
)
|
||||
from .coordinator import KrakenData
|
||||
from .coordinator import KrakenConfigEntry, KrakenData
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -138,7 +137,7 @@ SENSOR_TYPES: tuple[KrakenSensorEntityDescription, ...] = (
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
config_entry: ConfigEntry,
|
||||
config_entry: KrakenConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Add kraken entities from a config_entry."""
|
||||
@@ -149,7 +148,7 @@ async def async_setup_entry(
|
||||
entities.extend(
|
||||
[
|
||||
KrakenSensor(
|
||||
hass.data[DOMAIN],
|
||||
config_entry.runtime_data,
|
||||
tracked_asset_pair,
|
||||
description,
|
||||
)
|
||||
@@ -161,7 +160,9 @@ async def async_setup_entry(
|
||||
_async_add_kraken_sensors(config_entry.options[CONF_TRACKED_ASSET_PAIRS])
|
||||
|
||||
@callback
|
||||
def async_update_sensors(hass: HomeAssistant, config_entry: ConfigEntry) -> None:
|
||||
def async_update_sensors(
|
||||
hass: HomeAssistant, config_entry: KrakenConfigEntry
|
||||
) -> None:
|
||||
"""Add or remove sensors for configured tracked asset pairs."""
|
||||
dev_reg = dr.async_get(hass)
|
||||
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user