mirror of
https://github.com/home-assistant/core.git
synced 2025-09-20 10:29:26 +00:00
Compare commits
1 Commits
async_curr
...
llm-python
Author | SHA1 | Date | |
---|---|---|---|
![]() |
176f9c9f94 |
@@ -8,8 +8,6 @@
|
||||
"PYTHONASYNCIODEBUG": "1"
|
||||
},
|
||||
"features": {
|
||||
// Node feature required for Claude Code until fixed https://github.com/anthropics/devcontainer-features/issues/28
|
||||
"ghcr.io/devcontainers/features/node:1": {},
|
||||
"ghcr.io/anthropics/devcontainer-features/claude-code:1.0": {},
|
||||
"ghcr.io/devcontainers/features/github-cli:1": {}
|
||||
},
|
||||
|
@@ -14,8 +14,7 @@ tests
|
||||
|
||||
# Other virtualization methods
|
||||
venv
|
||||
.venv
|
||||
.vagrant
|
||||
|
||||
# Temporary files
|
||||
**/__pycache__
|
||||
**/__pycache__
|
19
.github/copilot-instructions.md
vendored
19
.github/copilot-instructions.md
vendored
@@ -1073,11 +1073,7 @@ async def test_flow_connection_error(hass, mock_api_error):
|
||||
|
||||
### Entity Testing Patterns
|
||||
```python
|
||||
@pytest.fixture
|
||||
def platforms() -> list[Platform]:
|
||||
"""Overridden fixture to specify platforms to test."""
|
||||
return [Platform.SENSOR] # Or another specific platform as needed.
|
||||
|
||||
@pytest.mark.parametrize("init_integration", [Platform.SENSOR], indirect=True)
|
||||
@pytest.mark.usefixtures("entity_registry_enabled_by_default", "init_integration")
|
||||
async def test_entities(
|
||||
hass: HomeAssistant,
|
||||
@@ -1124,25 +1120,16 @@ def mock_device_api() -> Generator[MagicMock]:
|
||||
)
|
||||
yield api
|
||||
|
||||
@pytest.fixture
|
||||
def platforms() -> list[Platform]:
|
||||
"""Fixture to specify platforms to test."""
|
||||
return PLATFORMS
|
||||
|
||||
@pytest.fixture
|
||||
async def init_integration(
|
||||
hass: HomeAssistant,
|
||||
mock_config_entry: MockConfigEntry,
|
||||
mock_device_api: MagicMock,
|
||||
platforms: list[Platform],
|
||||
) -> MockConfigEntry:
|
||||
"""Set up the integration for testing."""
|
||||
mock_config_entry.add_to_hass(hass)
|
||||
|
||||
with patch("homeassistant.components.my_integration.PLATFORMS", platforms):
|
||||
await hass.config_entries.async_setup(mock_config_entry.entry_id)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
await hass.config_entries.async_setup(mock_config_entry.entry_id)
|
||||
await hass.async_block_till_done()
|
||||
return mock_config_entry
|
||||
```
|
||||
|
||||
|
10
.github/workflows/builder.yml
vendored
10
.github/workflows/builder.yml
vendored
@@ -32,7 +32,7 @@ jobs:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
uses: actions/setup-python@v6.0.0
|
||||
uses: actions/setup-python@v5.6.0
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
|
||||
@@ -116,7 +116,7 @@ jobs:
|
||||
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
if: needs.init.outputs.channel == 'dev'
|
||||
uses: actions/setup-python@v6.0.0
|
||||
uses: actions/setup-python@v5.6.0
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
|
||||
@@ -457,7 +457,7 @@ jobs:
|
||||
uses: actions/checkout@v5.0.0
|
||||
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
uses: actions/setup-python@v6.0.0
|
||||
uses: actions/setup-python@v5.6.0
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
|
||||
@@ -480,7 +480,7 @@ jobs:
|
||||
python -m build
|
||||
|
||||
- name: Upload package to PyPI
|
||||
uses: pypa/gh-action-pypi-publish@v1.13.0
|
||||
uses: pypa/gh-action-pypi-publish@v1.12.4
|
||||
with:
|
||||
skip-existing: true
|
||||
|
||||
@@ -531,7 +531,7 @@ jobs:
|
||||
|
||||
- name: Generate artifact attestation
|
||||
if: needs.init.outputs.channel != 'dev' && needs.init.outputs.publish == 'true'
|
||||
uses: actions/attest-build-provenance@977bb373ede98d70efdf65b84cb5f73e068dcc2a # v3.0.0
|
||||
uses: actions/attest-build-provenance@e8998f949152b193b063cb0ec769d69d929409be # v2.4.0
|
||||
with:
|
||||
subject-name: ${{ env.HASSFEST_IMAGE_NAME }}
|
||||
subject-digest: ${{ steps.push.outputs.digest }}
|
||||
|
49
.github/workflows/ci.yaml
vendored
49
.github/workflows/ci.yaml
vendored
@@ -37,10 +37,10 @@ on:
|
||||
type: boolean
|
||||
|
||||
env:
|
||||
CACHE_VERSION: 7
|
||||
CACHE_VERSION: 5
|
||||
UV_CACHE_VERSION: 1
|
||||
MYPY_CACHE_VERSION: 1
|
||||
HA_SHORT_VERSION: "2025.10"
|
||||
HA_SHORT_VERSION: "2025.9"
|
||||
DEFAULT_PYTHON: "3.13"
|
||||
ALL_PYTHON_VERSIONS: "['3.13']"
|
||||
# 10.3 is the oldest supported version
|
||||
@@ -249,7 +249,7 @@ jobs:
|
||||
uses: actions/checkout@v5.0.0
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
id: python
|
||||
uses: actions/setup-python@v6.0.0
|
||||
uses: actions/setup-python@v5.6.0
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
check-latest: true
|
||||
@@ -294,7 +294,7 @@ jobs:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v5.0.0
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
uses: actions/setup-python@v6.0.0
|
||||
uses: actions/setup-python@v5.6.0
|
||||
id: python
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
@@ -334,7 +334,7 @@ jobs:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v5.0.0
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
uses: actions/setup-python@v6.0.0
|
||||
uses: actions/setup-python@v5.6.0
|
||||
id: python
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
@@ -374,7 +374,7 @@ jobs:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v5.0.0
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
uses: actions/setup-python@v6.0.0
|
||||
uses: actions/setup-python@v5.6.0
|
||||
id: python
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
@@ -484,7 +484,7 @@ jobs:
|
||||
uses: actions/checkout@v5.0.0
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
id: python
|
||||
uses: actions/setup-python@v6.0.0
|
||||
uses: actions/setup-python@v5.6.0
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
check-latest: true
|
||||
@@ -517,7 +517,6 @@ jobs:
|
||||
env.HA_SHORT_VERSION }}-
|
||||
- name: Install additional OS dependencies
|
||||
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||
timeout-minutes: 10
|
||||
run: |
|
||||
sudo rm /etc/apt/sources.list.d/microsoft-prod.list
|
||||
sudo apt-get update
|
||||
@@ -579,7 +578,6 @@ jobs:
|
||||
- base
|
||||
steps:
|
||||
- name: Install additional OS dependencies
|
||||
timeout-minutes: 10
|
||||
run: |
|
||||
sudo rm /etc/apt/sources.list.d/microsoft-prod.list
|
||||
sudo apt-get update
|
||||
@@ -589,7 +587,7 @@ jobs:
|
||||
uses: actions/checkout@v5.0.0
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
id: python
|
||||
uses: actions/setup-python@v6.0.0
|
||||
uses: actions/setup-python@v5.6.0
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
check-latest: true
|
||||
@@ -622,7 +620,7 @@ jobs:
|
||||
uses: actions/checkout@v5.0.0
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
id: python
|
||||
uses: actions/setup-python@v6.0.0
|
||||
uses: actions/setup-python@v5.6.0
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
check-latest: true
|
||||
@@ -655,7 +653,7 @@ jobs:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v5.0.0
|
||||
- name: Dependency review
|
||||
uses: actions/dependency-review-action@v4.7.3
|
||||
uses: actions/dependency-review-action@v4.7.1
|
||||
with:
|
||||
license-check: false # We use our own license audit checks
|
||||
|
||||
@@ -679,7 +677,7 @@ jobs:
|
||||
uses: actions/checkout@v5.0.0
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
id: python
|
||||
uses: actions/setup-python@v6.0.0
|
||||
uses: actions/setup-python@v5.6.0
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
check-latest: true
|
||||
@@ -722,7 +720,7 @@ jobs:
|
||||
uses: actions/checkout@v5.0.0
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
id: python
|
||||
uses: actions/setup-python@v6.0.0
|
||||
uses: actions/setup-python@v5.6.0
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
check-latest: true
|
||||
@@ -769,7 +767,7 @@ jobs:
|
||||
uses: actions/checkout@v5.0.0
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
id: python
|
||||
uses: actions/setup-python@v6.0.0
|
||||
uses: actions/setup-python@v5.6.0
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
check-latest: true
|
||||
@@ -814,7 +812,7 @@ jobs:
|
||||
uses: actions/checkout@v5.0.0
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
id: python
|
||||
uses: actions/setup-python@v6.0.0
|
||||
uses: actions/setup-python@v5.6.0
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
check-latest: true
|
||||
@@ -879,7 +877,6 @@ jobs:
|
||||
name: Split tests for full run
|
||||
steps:
|
||||
- name: Install additional OS dependencies
|
||||
timeout-minutes: 10
|
||||
run: |
|
||||
sudo rm /etc/apt/sources.list.d/microsoft-prod.list
|
||||
sudo apt-get update
|
||||
@@ -892,7 +889,7 @@ jobs:
|
||||
uses: actions/checkout@v5.0.0
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
id: python
|
||||
uses: actions/setup-python@v6.0.0
|
||||
uses: actions/setup-python@v5.6.0
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
check-latest: true
|
||||
@@ -940,7 +937,6 @@ jobs:
|
||||
Run tests Python ${{ matrix.python-version }} (${{ matrix.group }})
|
||||
steps:
|
||||
- name: Install additional OS dependencies
|
||||
timeout-minutes: 10
|
||||
run: |
|
||||
sudo rm /etc/apt/sources.list.d/microsoft-prod.list
|
||||
sudo apt-get update
|
||||
@@ -954,7 +950,7 @@ jobs:
|
||||
uses: actions/checkout@v5.0.0
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
id: python
|
||||
uses: actions/setup-python@v6.0.0
|
||||
uses: actions/setup-python@v5.6.0
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
check-latest: true
|
||||
@@ -1074,7 +1070,6 @@ jobs:
|
||||
Run ${{ matrix.mariadb-group }} tests Python ${{ matrix.python-version }}
|
||||
steps:
|
||||
- name: Install additional OS dependencies
|
||||
timeout-minutes: 10
|
||||
run: |
|
||||
sudo rm /etc/apt/sources.list.d/microsoft-prod.list
|
||||
sudo apt-get update
|
||||
@@ -1088,7 +1083,7 @@ jobs:
|
||||
uses: actions/checkout@v5.0.0
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
id: python
|
||||
uses: actions/setup-python@v6.0.0
|
||||
uses: actions/setup-python@v5.6.0
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
check-latest: true
|
||||
@@ -1215,7 +1210,6 @@ jobs:
|
||||
Run ${{ matrix.postgresql-group }} tests Python ${{ matrix.python-version }}
|
||||
steps:
|
||||
- name: Install additional OS dependencies
|
||||
timeout-minutes: 10
|
||||
run: |
|
||||
sudo rm /etc/apt/sources.list.d/microsoft-prod.list
|
||||
sudo apt-get update
|
||||
@@ -1231,7 +1225,7 @@ jobs:
|
||||
uses: actions/checkout@v5.0.0
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
id: python
|
||||
uses: actions/setup-python@v6.0.0
|
||||
uses: actions/setup-python@v5.6.0
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
check-latest: true
|
||||
@@ -1347,7 +1341,7 @@ jobs:
|
||||
pattern: coverage-*
|
||||
- name: Upload coverage to Codecov
|
||||
if: needs.info.outputs.test_full_suite == 'true'
|
||||
uses: codecov/codecov-action@v5.5.1
|
||||
uses: codecov/codecov-action@v5.4.3
|
||||
with:
|
||||
fail_ci_if_error: true
|
||||
flags: full-suite
|
||||
@@ -1377,7 +1371,6 @@ jobs:
|
||||
Run tests Python ${{ matrix.python-version }} (${{ matrix.group }})
|
||||
steps:
|
||||
- name: Install additional OS dependencies
|
||||
timeout-minutes: 10
|
||||
run: |
|
||||
sudo rm /etc/apt/sources.list.d/microsoft-prod.list
|
||||
sudo apt-get update
|
||||
@@ -1391,7 +1384,7 @@ jobs:
|
||||
uses: actions/checkout@v5.0.0
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
id: python
|
||||
uses: actions/setup-python@v6.0.0
|
||||
uses: actions/setup-python@v5.6.0
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
check-latest: true
|
||||
@@ -1498,7 +1491,7 @@ jobs:
|
||||
pattern: coverage-*
|
||||
- name: Upload coverage to Codecov
|
||||
if: needs.info.outputs.test_full_suite == 'false'
|
||||
uses: codecov/codecov-action@v5.5.1
|
||||
uses: codecov/codecov-action@v5.4.3
|
||||
with:
|
||||
fail_ci_if_error: true
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
|
4
.github/workflows/codeql.yml
vendored
4
.github/workflows/codeql.yml
vendored
@@ -24,11 +24,11 @@ jobs:
|
||||
uses: actions/checkout@v5.0.0
|
||||
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@v3.30.1
|
||||
uses: github/codeql-action/init@v3.29.9
|
||||
with:
|
||||
languages: python
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@v3.30.1
|
||||
uses: github/codeql-action/analyze@v3.29.9
|
||||
with:
|
||||
category: "/language:python"
|
||||
|
@@ -16,7 +16,7 @@ jobs:
|
||||
steps:
|
||||
- name: Check if integration label was added and extract details
|
||||
id: extract
|
||||
uses: actions/github-script@v8
|
||||
uses: actions/github-script@v7.0.1
|
||||
with:
|
||||
script: |
|
||||
// Debug: Log the event payload
|
||||
@@ -113,7 +113,7 @@ jobs:
|
||||
- name: Fetch similar issues
|
||||
id: fetch_similar
|
||||
if: steps.extract.outputs.should_continue == 'true'
|
||||
uses: actions/github-script@v8
|
||||
uses: actions/github-script@v7.0.1
|
||||
env:
|
||||
INTEGRATION_LABELS: ${{ steps.extract.outputs.integration_labels }}
|
||||
CURRENT_NUMBER: ${{ steps.extract.outputs.current_number }}
|
||||
@@ -231,7 +231,7 @@ jobs:
|
||||
- name: Detect duplicates using AI
|
||||
id: ai_detection
|
||||
if: steps.extract.outputs.should_continue == 'true' && steps.fetch_similar.outputs.has_similar == 'true'
|
||||
uses: actions/ai-inference@v2.0.1
|
||||
uses: actions/ai-inference@v2.0.0
|
||||
with:
|
||||
model: openai/gpt-4o
|
||||
system-prompt: |
|
||||
@@ -280,7 +280,7 @@ jobs:
|
||||
- name: Post duplicate detection results
|
||||
id: post_results
|
||||
if: steps.extract.outputs.should_continue == 'true' && steps.fetch_similar.outputs.has_similar == 'true'
|
||||
uses: actions/github-script@v8
|
||||
uses: actions/github-script@v7.0.1
|
||||
env:
|
||||
AI_RESPONSE: ${{ steps.ai_detection.outputs.response }}
|
||||
SIMILAR_ISSUES: ${{ steps.fetch_similar.outputs.similar_issues }}
|
||||
|
@@ -16,7 +16,7 @@ jobs:
|
||||
steps:
|
||||
- name: Check issue language
|
||||
id: detect_language
|
||||
uses: actions/github-script@v8
|
||||
uses: actions/github-script@v7.0.1
|
||||
env:
|
||||
ISSUE_NUMBER: ${{ github.event.issue.number }}
|
||||
ISSUE_TITLE: ${{ github.event.issue.title }}
|
||||
@@ -57,7 +57,7 @@ jobs:
|
||||
- name: Detect language using AI
|
||||
id: ai_language_detection
|
||||
if: steps.detect_language.outputs.should_continue == 'true'
|
||||
uses: actions/ai-inference@v2.0.1
|
||||
uses: actions/ai-inference@v2.0.0
|
||||
with:
|
||||
model: openai/gpt-4o-mini
|
||||
system-prompt: |
|
||||
@@ -90,7 +90,7 @@ jobs:
|
||||
|
||||
- name: Process non-English issues
|
||||
if: steps.detect_language.outputs.should_continue == 'true'
|
||||
uses: actions/github-script@v8
|
||||
uses: actions/github-script@v7.0.1
|
||||
env:
|
||||
AI_RESPONSE: ${{ steps.ai_language_detection.outputs.response }}
|
||||
ISSUE_NUMBER: ${{ steps.detect_language.outputs.issue_number }}
|
||||
|
2
.github/workflows/restrict-task-creation.yml
vendored
2
.github/workflows/restrict-task-creation.yml
vendored
@@ -12,7 +12,7 @@ jobs:
|
||||
if: github.event.issue.type.name == 'Task'
|
||||
steps:
|
||||
- name: Check if user is authorized
|
||||
uses: actions/github-script@v8
|
||||
uses: actions/github-script@v7
|
||||
with:
|
||||
script: |
|
||||
const issueAuthor = context.payload.issue.user.login;
|
||||
|
6
.github/workflows/stale.yml
vendored
6
.github/workflows/stale.yml
vendored
@@ -17,7 +17,7 @@ jobs:
|
||||
# - No PRs marked as no-stale
|
||||
# - No issues (-1)
|
||||
- name: 60 days stale PRs policy
|
||||
uses: actions/stale@v10.0.0
|
||||
uses: actions/stale@v9.1.0
|
||||
with:
|
||||
repo-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
days-before-stale: 60
|
||||
@@ -57,7 +57,7 @@ jobs:
|
||||
# - No issues marked as no-stale or help-wanted
|
||||
# - No PRs (-1)
|
||||
- name: 90 days stale issues
|
||||
uses: actions/stale@v10.0.0
|
||||
uses: actions/stale@v9.1.0
|
||||
with:
|
||||
repo-token: ${{ steps.token.outputs.token }}
|
||||
days-before-stale: 90
|
||||
@@ -87,7 +87,7 @@ jobs:
|
||||
# - No Issues marked as no-stale or help-wanted
|
||||
# - No PRs (-1)
|
||||
- name: Needs more information stale issues policy
|
||||
uses: actions/stale@v10.0.0
|
||||
uses: actions/stale@v9.1.0
|
||||
with:
|
||||
repo-token: ${{ steps.token.outputs.token }}
|
||||
only-labels: "needs-more-information"
|
||||
|
2
.github/workflows/translations.yml
vendored
2
.github/workflows/translations.yml
vendored
@@ -22,7 +22,7 @@ jobs:
|
||||
uses: actions/checkout@v5.0.0
|
||||
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
uses: actions/setup-python@v6.0.0
|
||||
uses: actions/setup-python@v5.6.0
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
|
||||
|
2
.github/workflows/wheels.yml
vendored
2
.github/workflows/wheels.yml
vendored
@@ -36,7 +36,7 @@ jobs:
|
||||
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
id: python
|
||||
uses: actions/setup-python@v6.0.0
|
||||
uses: actions/setup-python@v5.6.0
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
check-latest: true
|
||||
|
@@ -169,7 +169,6 @@ homeassistant.components.dnsip.*
|
||||
homeassistant.components.doorbird.*
|
||||
homeassistant.components.dormakaba_dkey.*
|
||||
homeassistant.components.downloader.*
|
||||
homeassistant.components.droplet.*
|
||||
homeassistant.components.dsmr.*
|
||||
homeassistant.components.duckdns.*
|
||||
homeassistant.components.dunehd.*
|
||||
@@ -308,7 +307,6 @@ homeassistant.components.ld2410_ble.*
|
||||
homeassistant.components.led_ble.*
|
||||
homeassistant.components.lektrico.*
|
||||
homeassistant.components.letpot.*
|
||||
homeassistant.components.libre_hardware_monitor.*
|
||||
homeassistant.components.lidarr.*
|
||||
homeassistant.components.lifx.*
|
||||
homeassistant.components.light.*
|
||||
@@ -384,7 +382,6 @@ homeassistant.components.openai_conversation.*
|
||||
homeassistant.components.openexchangerates.*
|
||||
homeassistant.components.opensky.*
|
||||
homeassistant.components.openuv.*
|
||||
homeassistant.components.opnsense.*
|
||||
homeassistant.components.opower.*
|
||||
homeassistant.components.oralb.*
|
||||
homeassistant.components.otbr.*
|
||||
@@ -461,7 +458,6 @@ homeassistant.components.sensorpush_cloud.*
|
||||
homeassistant.components.sensoterra.*
|
||||
homeassistant.components.senz.*
|
||||
homeassistant.components.sfr_box.*
|
||||
homeassistant.components.sftp_storage.*
|
||||
homeassistant.components.shell_command.*
|
||||
homeassistant.components.shelly.*
|
||||
homeassistant.components.shopping_list.*
|
||||
|
74
CODEOWNERS
generated
74
CODEOWNERS
generated
@@ -87,8 +87,6 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/airzone/ @Noltari
|
||||
/homeassistant/components/airzone_cloud/ @Noltari
|
||||
/tests/components/airzone_cloud/ @Noltari
|
||||
/homeassistant/components/aladdin_connect/ @swcloudgenie
|
||||
/tests/components/aladdin_connect/ @swcloudgenie
|
||||
/homeassistant/components/alarm_control_panel/ @home-assistant/core
|
||||
/tests/components/alarm_control_panel/ @home-assistant/core
|
||||
/homeassistant/components/alert/ @home-assistant/core @frenck
|
||||
@@ -154,10 +152,10 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/arve/ @ikalnyi
|
||||
/homeassistant/components/aseko_pool_live/ @milanmeu
|
||||
/tests/components/aseko_pool_live/ @milanmeu
|
||||
/homeassistant/components/assist_pipeline/ @synesthesiam @arturpragacz
|
||||
/tests/components/assist_pipeline/ @synesthesiam @arturpragacz
|
||||
/homeassistant/components/assist_satellite/ @home-assistant/core @synesthesiam @arturpragacz
|
||||
/tests/components/assist_satellite/ @home-assistant/core @synesthesiam @arturpragacz
|
||||
/homeassistant/components/assist_pipeline/ @balloob @synesthesiam
|
||||
/tests/components/assist_pipeline/ @balloob @synesthesiam
|
||||
/homeassistant/components/assist_satellite/ @home-assistant/core @synesthesiam
|
||||
/tests/components/assist_satellite/ @home-assistant/core @synesthesiam
|
||||
/homeassistant/components/asuswrt/ @kennedyshead @ollo69 @Vaskivskyi
|
||||
/tests/components/asuswrt/ @kennedyshead @ollo69 @Vaskivskyi
|
||||
/homeassistant/components/atag/ @MatsNL
|
||||
@@ -298,8 +296,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/configurator/ @home-assistant/core
|
||||
/homeassistant/components/control4/ @lawtancool
|
||||
/tests/components/control4/ @lawtancool
|
||||
/homeassistant/components/conversation/ @home-assistant/core @synesthesiam @arturpragacz
|
||||
/tests/components/conversation/ @home-assistant/core @synesthesiam @arturpragacz
|
||||
/homeassistant/components/conversation/ @home-assistant/core @synesthesiam
|
||||
/tests/components/conversation/ @home-assistant/core @synesthesiam
|
||||
/homeassistant/components/cookidoo/ @miaucl
|
||||
/tests/components/cookidoo/ @miaucl
|
||||
/homeassistant/components/coolmaster/ @OnFreund
|
||||
@@ -377,8 +375,6 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/dremel_3d_printer/ @tkdrob
|
||||
/homeassistant/components/drop_connect/ @ChandlerSystems @pfrazer
|
||||
/tests/components/drop_connect/ @ChandlerSystems @pfrazer
|
||||
/homeassistant/components/droplet/ @sarahseidman
|
||||
/tests/components/droplet/ @sarahseidman
|
||||
/homeassistant/components/dsmr/ @Robbie1221
|
||||
/tests/components/dsmr/ @Robbie1221
|
||||
/homeassistant/components/dsmr_reader/ @sorted-bits @glodenox @erwindouna
|
||||
@@ -426,8 +422,6 @@ build.json @home-assistant/supervisor
|
||||
/homeassistant/components/emby/ @mezz64
|
||||
/homeassistant/components/emoncms/ @borpin @alexandrecuer
|
||||
/tests/components/emoncms/ @borpin @alexandrecuer
|
||||
/homeassistant/components/emoncms_history/ @alexandrecuer
|
||||
/tests/components/emoncms_history/ @alexandrecuer
|
||||
/homeassistant/components/emonitor/ @bdraco
|
||||
/tests/components/emonitor/ @bdraco
|
||||
/homeassistant/components/emulated_hue/ @bdraco @Tho85
|
||||
@@ -466,6 +460,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/eufylife_ble/ @bdr99
|
||||
/homeassistant/components/event/ @home-assistant/core
|
||||
/tests/components/event/ @home-assistant/core
|
||||
/homeassistant/components/evil_genius_labs/ @balloob
|
||||
/tests/components/evil_genius_labs/ @balloob
|
||||
/homeassistant/components/evohome/ @zxdavb
|
||||
/tests/components/evohome/ @zxdavb
|
||||
/homeassistant/components/ezviz/ @RenierM26
|
||||
@@ -515,8 +511,8 @@ build.json @home-assistant/supervisor
|
||||
/homeassistant/components/forked_daapd/ @uvjustin
|
||||
/tests/components/forked_daapd/ @uvjustin
|
||||
/homeassistant/components/fortios/ @kimfrellsen
|
||||
/homeassistant/components/foscam/ @Foscam-wangzhengyu
|
||||
/tests/components/foscam/ @Foscam-wangzhengyu
|
||||
/homeassistant/components/foscam/ @krmarien
|
||||
/tests/components/foscam/ @krmarien
|
||||
/homeassistant/components/freebox/ @hacf-fr @Quentame
|
||||
/tests/components/freebox/ @hacf-fr @Quentame
|
||||
/homeassistant/components/freedompro/ @stefano055415
|
||||
@@ -650,8 +646,6 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/homeassistant/ @home-assistant/core
|
||||
/homeassistant/components/homeassistant_alerts/ @home-assistant/core
|
||||
/tests/components/homeassistant_alerts/ @home-assistant/core
|
||||
/homeassistant/components/homeassistant_connect_zbt2/ @home-assistant/core
|
||||
/tests/components/homeassistant_connect_zbt2/ @home-assistant/core
|
||||
/homeassistant/components/homeassistant_green/ @home-assistant/core
|
||||
/tests/components/homeassistant_green/ @home-assistant/core
|
||||
/homeassistant/components/homeassistant_hardware/ @home-assistant/core
|
||||
@@ -680,8 +674,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/http/ @home-assistant/core
|
||||
/homeassistant/components/huawei_lte/ @scop @fphammerle
|
||||
/tests/components/huawei_lte/ @scop @fphammerle
|
||||
/homeassistant/components/hue/ @marcelveldt
|
||||
/tests/components/hue/ @marcelveldt
|
||||
/homeassistant/components/hue/ @balloob @marcelveldt
|
||||
/tests/components/hue/ @balloob @marcelveldt
|
||||
/homeassistant/components/huisbaasje/ @dennisschroer
|
||||
/tests/components/huisbaasje/ @dennisschroer
|
||||
/homeassistant/components/humidifier/ @home-assistant/core @Shulyaka
|
||||
@@ -753,8 +747,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/integration/ @dgomes
|
||||
/homeassistant/components/intellifire/ @jeeftor
|
||||
/tests/components/intellifire/ @jeeftor
|
||||
/homeassistant/components/intent/ @home-assistant/core @synesthesiam @arturpragacz
|
||||
/tests/components/intent/ @home-assistant/core @synesthesiam @arturpragacz
|
||||
/homeassistant/components/intent/ @home-assistant/core @synesthesiam
|
||||
/tests/components/intent/ @home-assistant/core @synesthesiam
|
||||
/homeassistant/components/intesishome/ @jnimmo
|
||||
/homeassistant/components/iometer/ @MaestroOnICe
|
||||
/tests/components/iometer/ @MaestroOnICe
|
||||
@@ -862,8 +856,6 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/lg_netcast/ @Drafteed @splinter98
|
||||
/homeassistant/components/lg_thinq/ @LG-ThinQ-Integration
|
||||
/tests/components/lg_thinq/ @LG-ThinQ-Integration
|
||||
/homeassistant/components/libre_hardware_monitor/ @Sab44
|
||||
/tests/components/libre_hardware_monitor/ @Sab44
|
||||
/homeassistant/components/lidarr/ @tkdrob
|
||||
/tests/components/lidarr/ @tkdrob
|
||||
/homeassistant/components/lifx/ @Djelibeybi
|
||||
@@ -1112,6 +1104,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/open_meteo/ @frenck
|
||||
/homeassistant/components/open_router/ @joostlek
|
||||
/tests/components/open_router/ @joostlek
|
||||
/homeassistant/components/openai_conversation/ @balloob
|
||||
/tests/components/openai_conversation/ @balloob
|
||||
/homeassistant/components/openerz/ @misialq
|
||||
/tests/components/openerz/ @misialq
|
||||
/homeassistant/components/openexchangerates/ @MartinHjelmare
|
||||
@@ -1187,8 +1181,6 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/plum_lightpad/ @ColinHarrington @prystupa
|
||||
/homeassistant/components/point/ @fredrike
|
||||
/tests/components/point/ @fredrike
|
||||
/homeassistant/components/pooldose/ @lmaertin
|
||||
/tests/components/pooldose/ @lmaertin
|
||||
/homeassistant/components/poolsense/ @haemishkyd
|
||||
/tests/components/poolsense/ @haemishkyd
|
||||
/homeassistant/components/powerfox/ @klaasnicolaas
|
||||
@@ -1210,6 +1202,8 @@ build.json @home-assistant/supervisor
|
||||
/homeassistant/components/proximity/ @mib1185
|
||||
/tests/components/proximity/ @mib1185
|
||||
/homeassistant/components/proxmoxve/ @jhollowe @Corbeno
|
||||
/homeassistant/components/prusalink/ @balloob
|
||||
/tests/components/prusalink/ @balloob
|
||||
/homeassistant/components/ps4/ @ktnrg45
|
||||
/tests/components/ps4/ @ktnrg45
|
||||
/homeassistant/components/pterodactyl/ @elmurato
|
||||
@@ -1303,8 +1297,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/rflink/ @javicalle
|
||||
/homeassistant/components/rfxtrx/ @danielhiversen @elupus @RobBie1221
|
||||
/tests/components/rfxtrx/ @danielhiversen @elupus @RobBie1221
|
||||
/homeassistant/components/rhasspy/ @synesthesiam
|
||||
/tests/components/rhasspy/ @synesthesiam
|
||||
/homeassistant/components/rhasspy/ @balloob @synesthesiam
|
||||
/tests/components/rhasspy/ @balloob @synesthesiam
|
||||
/homeassistant/components/ridwell/ @bachya
|
||||
/tests/components/ridwell/ @bachya
|
||||
/homeassistant/components/ring/ @sdb9696
|
||||
@@ -1392,14 +1386,12 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/seventeentrack/ @shaiu
|
||||
/homeassistant/components/sfr_box/ @epenet
|
||||
/tests/components/sfr_box/ @epenet
|
||||
/homeassistant/components/sftp_storage/ @maretodoric
|
||||
/tests/components/sftp_storage/ @maretodoric
|
||||
/homeassistant/components/sharkiq/ @JeffResc @funkybunch
|
||||
/tests/components/sharkiq/ @JeffResc @funkybunch
|
||||
/homeassistant/components/shell_command/ @home-assistant/core
|
||||
/tests/components/shell_command/ @home-assistant/core
|
||||
/homeassistant/components/shelly/ @bieniu @thecode @chemelli74 @bdraco
|
||||
/tests/components/shelly/ @bieniu @thecode @chemelli74 @bdraco
|
||||
/homeassistant/components/shelly/ @balloob @bieniu @thecode @chemelli74 @bdraco
|
||||
/tests/components/shelly/ @balloob @bieniu @thecode @chemelli74 @bdraco
|
||||
/homeassistant/components/shodan/ @fabaff
|
||||
/homeassistant/components/sia/ @eavanvalkenburg
|
||||
/tests/components/sia/ @eavanvalkenburg
|
||||
@@ -1546,8 +1538,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/systemmonitor/ @gjohansson-ST
|
||||
/homeassistant/components/tado/ @erwindouna
|
||||
/tests/components/tado/ @erwindouna
|
||||
/homeassistant/components/tag/ @home-assistant/core
|
||||
/tests/components/tag/ @home-assistant/core
|
||||
/homeassistant/components/tag/ @balloob @dmulcahey
|
||||
/tests/components/tag/ @balloob @dmulcahey
|
||||
/homeassistant/components/tailscale/ @frenck
|
||||
/tests/components/tailscale/ @frenck
|
||||
/homeassistant/components/tailwind/ @frenck
|
||||
@@ -1692,15 +1684,15 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/vegehub/ @ghowevege
|
||||
/homeassistant/components/velbus/ @Cereal2nd @brefra
|
||||
/tests/components/velbus/ @Cereal2nd @brefra
|
||||
/homeassistant/components/velux/ @Julius2342 @DeerMaximum @pawlizio @wollew
|
||||
/tests/components/velux/ @Julius2342 @DeerMaximum @pawlizio @wollew
|
||||
/homeassistant/components/velux/ @Julius2342 @DeerMaximum @pawlizio
|
||||
/tests/components/velux/ @Julius2342 @DeerMaximum @pawlizio
|
||||
/homeassistant/components/venstar/ @garbled1 @jhollowe
|
||||
/tests/components/venstar/ @garbled1 @jhollowe
|
||||
/homeassistant/components/versasense/ @imstevenxyz
|
||||
/homeassistant/components/version/ @ludeeus
|
||||
/tests/components/version/ @ludeeus
|
||||
/homeassistant/components/vesync/ @markperdue @webdjoe @thegardenmonkey @cdnninja @iprak @sapuseven
|
||||
/tests/components/vesync/ @markperdue @webdjoe @thegardenmonkey @cdnninja @iprak @sapuseven
|
||||
/homeassistant/components/vesync/ @markperdue @webdjoe @thegardenmonkey @cdnninja @iprak
|
||||
/tests/components/vesync/ @markperdue @webdjoe @thegardenmonkey @cdnninja @iprak
|
||||
/homeassistant/components/vicare/ @CFenner
|
||||
/tests/components/vicare/ @CFenner
|
||||
/homeassistant/components/vilfo/ @ManneW
|
||||
@@ -1712,14 +1704,16 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/vlc_telnet/ @rodripf @MartinHjelmare
|
||||
/homeassistant/components/vodafone_station/ @paoloantinori @chemelli74
|
||||
/tests/components/vodafone_station/ @paoloantinori @chemelli74
|
||||
/homeassistant/components/voip/ @synesthesiam @jaminh
|
||||
/tests/components/voip/ @synesthesiam @jaminh
|
||||
/homeassistant/components/voip/ @balloob @synesthesiam @jaminh
|
||||
/tests/components/voip/ @balloob @synesthesiam @jaminh
|
||||
/homeassistant/components/volumio/ @OnFreund
|
||||
/tests/components/volumio/ @OnFreund
|
||||
/homeassistant/components/volvo/ @thomasddn
|
||||
/tests/components/volvo/ @thomasddn
|
||||
/homeassistant/components/volvooncall/ @molobrakos
|
||||
/tests/components/volvooncall/ @molobrakos
|
||||
/homeassistant/components/vulcan/ @Antoni-Czaplicki
|
||||
/tests/components/vulcan/ @Antoni-Czaplicki
|
||||
/homeassistant/components/wake_on_lan/ @ntilley905
|
||||
/tests/components/wake_on_lan/ @ntilley905
|
||||
/homeassistant/components/wake_word/ @home-assistant/core @synesthesiam
|
||||
@@ -1784,8 +1778,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/worldclock/ @fabaff
|
||||
/homeassistant/components/ws66i/ @ssaenger
|
||||
/tests/components/ws66i/ @ssaenger
|
||||
/homeassistant/components/wyoming/ @synesthesiam
|
||||
/tests/components/wyoming/ @synesthesiam
|
||||
/homeassistant/components/wyoming/ @balloob @synesthesiam
|
||||
/tests/components/wyoming/ @balloob @synesthesiam
|
||||
/homeassistant/components/xbox/ @hunterjm
|
||||
/tests/components/xbox/ @hunterjm
|
||||
/homeassistant/components/xiaomi_aqara/ @danielhiversen @syssi
|
||||
|
@@ -14,8 +14,5 @@ Still interested? Then you should take a peek at the [developer documentation](h
|
||||
|
||||
## Feature suggestions
|
||||
|
||||
If you want to suggest a new feature for Home Assistant (e.g. new integrations), please [start a discussion](https://github.com/orgs/home-assistant/discussions) on GitHub.
|
||||
|
||||
## Issue Tracker
|
||||
|
||||
If you want to report an issue, please [create an issue](https://github.com/home-assistant/core/issues) on GitHub.
|
||||
If you want to suggest a new feature for Home Assistant (e.g., new integrations), please open a thread in our [Community Forum: Feature Requests](https://community.home-assistant.io/c/feature-requests).
|
||||
We use [GitHub for tracking issues](https://github.com/home-assistant/core/issues), not for tracking feature requests.
|
||||
|
@@ -3,7 +3,8 @@ FROM mcr.microsoft.com/vscode/devcontainers/base:debian
|
||||
SHELL ["/bin/bash", "-o", "pipefail", "-c"]
|
||||
|
||||
RUN \
|
||||
apt-get update \
|
||||
curl -sS https://dl.yarnpkg.com/debian/pubkey.gpg | apt-key add - \
|
||||
&& apt-get update \
|
||||
&& DEBIAN_FRONTEND=noninteractive apt-get install -y --no-install-recommends \
|
||||
# Additional library needed by some tests and accordingly by VScode Tests Discovery
|
||||
bluez \
|
||||
|
10
build.yaml
10
build.yaml
@@ -1,10 +1,10 @@
|
||||
image: ghcr.io/home-assistant/{arch}-homeassistant
|
||||
build_from:
|
||||
aarch64: ghcr.io/home-assistant/aarch64-homeassistant-base:2025.09.1
|
||||
armhf: ghcr.io/home-assistant/armhf-homeassistant-base:2025.09.1
|
||||
armv7: ghcr.io/home-assistant/armv7-homeassistant-base:2025.09.1
|
||||
amd64: ghcr.io/home-assistant/amd64-homeassistant-base:2025.09.1
|
||||
i386: ghcr.io/home-assistant/i386-homeassistant-base:2025.09.1
|
||||
aarch64: ghcr.io/home-assistant/aarch64-homeassistant-base:2025.05.0
|
||||
armhf: ghcr.io/home-assistant/armhf-homeassistant-base:2025.05.0
|
||||
armv7: ghcr.io/home-assistant/armv7-homeassistant-base:2025.05.0
|
||||
amd64: ghcr.io/home-assistant/amd64-homeassistant-base:2025.05.0
|
||||
i386: ghcr.io/home-assistant/i386-homeassistant-base:2025.05.0
|
||||
codenotary:
|
||||
signer: notary@home-assistant.io
|
||||
base_image: notary@home-assistant.io
|
||||
|
@@ -187,42 +187,36 @@ def main() -> int:
|
||||
|
||||
from . import config, runner # noqa: PLC0415
|
||||
|
||||
# Ensure only one instance runs per config directory
|
||||
with runner.ensure_single_execution(config_dir) as single_execution_lock:
|
||||
# Check if another instance is already running
|
||||
if single_execution_lock.exit_code is not None:
|
||||
return single_execution_lock.exit_code
|
||||
safe_mode = config.safe_mode_enabled(config_dir)
|
||||
|
||||
safe_mode = config.safe_mode_enabled(config_dir)
|
||||
runtime_conf = runner.RuntimeConfig(
|
||||
config_dir=config_dir,
|
||||
verbose=args.verbose,
|
||||
log_rotate_days=args.log_rotate_days,
|
||||
log_file=args.log_file,
|
||||
log_no_color=args.log_no_color,
|
||||
skip_pip=args.skip_pip,
|
||||
skip_pip_packages=args.skip_pip_packages,
|
||||
recovery_mode=args.recovery_mode,
|
||||
debug=args.debug,
|
||||
open_ui=args.open_ui,
|
||||
safe_mode=safe_mode,
|
||||
)
|
||||
|
||||
runtime_conf = runner.RuntimeConfig(
|
||||
config_dir=config_dir,
|
||||
verbose=args.verbose,
|
||||
log_rotate_days=args.log_rotate_days,
|
||||
log_file=args.log_file,
|
||||
log_no_color=args.log_no_color,
|
||||
skip_pip=args.skip_pip,
|
||||
skip_pip_packages=args.skip_pip_packages,
|
||||
recovery_mode=args.recovery_mode,
|
||||
debug=args.debug,
|
||||
open_ui=args.open_ui,
|
||||
safe_mode=safe_mode,
|
||||
)
|
||||
fault_file_name = os.path.join(config_dir, FAULT_LOG_FILENAME)
|
||||
with open(fault_file_name, mode="a", encoding="utf8") as fault_file:
|
||||
faulthandler.enable(fault_file)
|
||||
exit_code = runner.run(runtime_conf)
|
||||
faulthandler.disable()
|
||||
|
||||
fault_file_name = os.path.join(config_dir, FAULT_LOG_FILENAME)
|
||||
with open(fault_file_name, mode="a", encoding="utf8") as fault_file:
|
||||
faulthandler.enable(fault_file)
|
||||
exit_code = runner.run(runtime_conf)
|
||||
faulthandler.disable()
|
||||
# It's possible for the fault file to disappear, so suppress obvious errors
|
||||
with suppress(FileNotFoundError):
|
||||
if os.path.getsize(fault_file_name) == 0:
|
||||
os.remove(fault_file_name)
|
||||
|
||||
# It's possible for the fault file to disappear, so suppress obvious errors
|
||||
with suppress(FileNotFoundError):
|
||||
if os.path.getsize(fault_file_name) == 0:
|
||||
os.remove(fault_file_name)
|
||||
check_threads()
|
||||
|
||||
check_threads()
|
||||
|
||||
return exit_code
|
||||
return exit_code
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
@@ -27,7 +27,7 @@ from . import (
|
||||
SetupFlow,
|
||||
)
|
||||
|
||||
REQUIREMENTS = ["pyotp==2.9.0"]
|
||||
REQUIREMENTS = ["pyotp==2.8.0"]
|
||||
|
||||
CONF_MESSAGE = "message"
|
||||
|
||||
|
@@ -20,7 +20,7 @@ from . import (
|
||||
SetupFlow,
|
||||
)
|
||||
|
||||
REQUIREMENTS = ["pyotp==2.9.0", "PyQRCode==1.2.1"]
|
||||
REQUIREMENTS = ["pyotp==2.8.0", "PyQRCode==1.2.1"]
|
||||
|
||||
CONFIG_SCHEMA = MULTI_FACTOR_AUTH_MODULE_SCHEMA.extend({}, extra=vol.PREVENT_EXTRA)
|
||||
|
||||
|
@@ -1,5 +1,5 @@
|
||||
{
|
||||
"domain": "fritzbox",
|
||||
"name": "FRITZ!",
|
||||
"name": "FRITZ!Box",
|
||||
"integrations": ["fritz", "fritzbox", "fritzbox_callmonitor"]
|
||||
}
|
||||
|
@@ -7,6 +7,6 @@
|
||||
"integration_type": "service",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["accuweather"],
|
||||
"requirements": ["accuweather==4.2.1"],
|
||||
"requirements": ["accuweather==4.2.0"],
|
||||
"single_config_entry": true
|
||||
}
|
||||
|
@@ -3,10 +3,8 @@
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from aiohttp import web
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components.http import KEY_HASS, HomeAssistantView
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import ATTR_ENTITY_ID, CONF_DESCRIPTION, CONF_SELECTOR
|
||||
from homeassistant.core import (
|
||||
@@ -28,24 +26,14 @@ from .const import (
|
||||
ATTR_STRUCTURE,
|
||||
ATTR_TASK_NAME,
|
||||
DATA_COMPONENT,
|
||||
DATA_IMAGES,
|
||||
DATA_PREFERENCES,
|
||||
DOMAIN,
|
||||
SERVICE_GENERATE_DATA,
|
||||
SERVICE_GENERATE_IMAGE,
|
||||
AITaskEntityFeature,
|
||||
)
|
||||
from .entity import AITaskEntity
|
||||
from .http import async_setup as async_setup_http
|
||||
from .task import (
|
||||
GenDataTask,
|
||||
GenDataTaskResult,
|
||||
GenImageTask,
|
||||
GenImageTaskResult,
|
||||
ImageData,
|
||||
async_generate_data,
|
||||
async_generate_image,
|
||||
)
|
||||
from .task import GenDataTask, GenDataTaskResult, async_generate_data
|
||||
|
||||
__all__ = [
|
||||
"DOMAIN",
|
||||
@@ -53,11 +41,7 @@ __all__ = [
|
||||
"AITaskEntityFeature",
|
||||
"GenDataTask",
|
||||
"GenDataTaskResult",
|
||||
"GenImageTask",
|
||||
"GenImageTaskResult",
|
||||
"ImageData",
|
||||
"async_generate_data",
|
||||
"async_generate_image",
|
||||
"async_setup",
|
||||
"async_setup_entry",
|
||||
"async_unload_entry",
|
||||
@@ -94,10 +78,8 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
entity_component = EntityComponent[AITaskEntity](_LOGGER, DOMAIN, hass)
|
||||
hass.data[DATA_COMPONENT] = entity_component
|
||||
hass.data[DATA_PREFERENCES] = AITaskPreferences(hass)
|
||||
hass.data[DATA_IMAGES] = {}
|
||||
await hass.data[DATA_PREFERENCES].async_load()
|
||||
async_setup_http(hass)
|
||||
hass.http.register_view(ImageView)
|
||||
hass.services.async_register(
|
||||
DOMAIN,
|
||||
SERVICE_GENERATE_DATA,
|
||||
@@ -119,23 +101,6 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
supports_response=SupportsResponse.ONLY,
|
||||
job_type=HassJobType.Coroutinefunction,
|
||||
)
|
||||
hass.services.async_register(
|
||||
DOMAIN,
|
||||
SERVICE_GENERATE_IMAGE,
|
||||
async_service_generate_image,
|
||||
schema=vol.Schema(
|
||||
{
|
||||
vol.Required(ATTR_TASK_NAME): cv.string,
|
||||
vol.Optional(ATTR_ENTITY_ID): cv.entity_id,
|
||||
vol.Required(ATTR_INSTRUCTIONS): cv.string,
|
||||
vol.Optional(ATTR_ATTACHMENTS): vol.All(
|
||||
cv.ensure_list, [selector.MediaSelector({"accept": ["*/*"]})]
|
||||
),
|
||||
}
|
||||
),
|
||||
supports_response=SupportsResponse.ONLY,
|
||||
job_type=HassJobType.Coroutinefunction,
|
||||
)
|
||||
return True
|
||||
|
||||
|
||||
@@ -150,23 +115,17 @@ async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
|
||||
|
||||
async def async_service_generate_data(call: ServiceCall) -> ServiceResponse:
|
||||
"""Run the data task service."""
|
||||
"""Run the run task service."""
|
||||
result = await async_generate_data(hass=call.hass, **call.data)
|
||||
return result.as_dict()
|
||||
|
||||
|
||||
async def async_service_generate_image(call: ServiceCall) -> ServiceResponse:
|
||||
"""Run the image task service."""
|
||||
return await async_generate_image(hass=call.hass, **call.data)
|
||||
|
||||
|
||||
class AITaskPreferences:
|
||||
"""AI Task preferences."""
|
||||
|
||||
KEYS = ("gen_data_entity_id", "gen_image_entity_id")
|
||||
KEYS = ("gen_data_entity_id",)
|
||||
|
||||
gen_data_entity_id: str | None = None
|
||||
gen_image_entity_id: str | None = None
|
||||
|
||||
def __init__(self, hass: HomeAssistant) -> None:
|
||||
"""Initialize the preferences."""
|
||||
@@ -180,21 +139,17 @@ class AITaskPreferences:
|
||||
if data is None:
|
||||
return
|
||||
for key in self.KEYS:
|
||||
setattr(self, key, data.get(key))
|
||||
setattr(self, key, data[key])
|
||||
|
||||
@callback
|
||||
def async_set_preferences(
|
||||
self,
|
||||
*,
|
||||
gen_data_entity_id: str | None | UndefinedType = UNDEFINED,
|
||||
gen_image_entity_id: str | None | UndefinedType = UNDEFINED,
|
||||
) -> None:
|
||||
"""Set the preferences."""
|
||||
changed = False
|
||||
for key, value in (
|
||||
("gen_data_entity_id", gen_data_entity_id),
|
||||
("gen_image_entity_id", gen_image_entity_id),
|
||||
):
|
||||
for key, value in (("gen_data_entity_id", gen_data_entity_id),):
|
||||
if value is not UNDEFINED:
|
||||
if getattr(self, key) != value:
|
||||
setattr(self, key, value)
|
||||
@@ -209,28 +164,3 @@ class AITaskPreferences:
|
||||
def as_dict(self) -> dict[str, str | None]:
|
||||
"""Get the current preferences."""
|
||||
return {key: getattr(self, key) for key in self.KEYS}
|
||||
|
||||
|
||||
class ImageView(HomeAssistantView):
|
||||
"""View to generated images."""
|
||||
|
||||
url = f"/api/{DOMAIN}/images/{{filename}}"
|
||||
name = f"api:{DOMAIN}/images"
|
||||
|
||||
async def get(
|
||||
self,
|
||||
request: web.Request,
|
||||
filename: str,
|
||||
) -> web.Response:
|
||||
"""Serve image."""
|
||||
hass = request.app[KEY_HASS]
|
||||
image_storage = hass.data[DATA_IMAGES]
|
||||
image_data = image_storage.get(filename)
|
||||
|
||||
if image_data is None:
|
||||
raise web.HTTPNotFound
|
||||
|
||||
return web.Response(
|
||||
body=image_data.data,
|
||||
content_type=image_data.mime_type,
|
||||
)
|
||||
|
@@ -12,18 +12,12 @@ if TYPE_CHECKING:
|
||||
|
||||
from . import AITaskPreferences
|
||||
from .entity import AITaskEntity
|
||||
from .task import ImageData
|
||||
|
||||
DOMAIN = "ai_task"
|
||||
DATA_COMPONENT: HassKey[EntityComponent[AITaskEntity]] = HassKey(DOMAIN)
|
||||
DATA_PREFERENCES: HassKey[AITaskPreferences] = HassKey(f"{DOMAIN}_preferences")
|
||||
DATA_IMAGES: HassKey[dict[str, ImageData]] = HassKey(f"{DOMAIN}_images")
|
||||
|
||||
IMAGE_EXPIRY_TIME = 60 * 60 # 1 hour
|
||||
MAX_IMAGES = 20
|
||||
|
||||
SERVICE_GENERATE_DATA = "generate_data"
|
||||
SERVICE_GENERATE_IMAGE = "generate_image"
|
||||
|
||||
ATTR_INSTRUCTIONS: Final = "instructions"
|
||||
ATTR_TASK_NAME: Final = "task_name"
|
||||
@@ -44,6 +38,3 @@ class AITaskEntityFeature(IntFlag):
|
||||
|
||||
SUPPORT_ATTACHMENTS = 2
|
||||
"""Support attachments with generate data."""
|
||||
|
||||
GENERATE_IMAGE = 4
|
||||
"""Generate images based on instructions."""
|
||||
|
@@ -18,7 +18,7 @@ from homeassistant.helpers.restore_state import RestoreEntity
|
||||
from homeassistant.util import dt as dt_util
|
||||
|
||||
from .const import DEFAULT_SYSTEM_PROMPT, DOMAIN, AITaskEntityFeature
|
||||
from .task import GenDataTask, GenDataTaskResult, GenImageTask, GenImageTaskResult
|
||||
from .task import GenDataTask, GenDataTaskResult
|
||||
|
||||
|
||||
class AITaskEntity(RestoreEntity):
|
||||
@@ -57,13 +57,9 @@ class AITaskEntity(RestoreEntity):
|
||||
async def _async_get_ai_task_chat_log(
|
||||
self,
|
||||
session: ChatSession,
|
||||
task: GenDataTask | GenImageTask,
|
||||
task: GenDataTask,
|
||||
) -> AsyncGenerator[ChatLog]:
|
||||
"""Context manager used to manage the ChatLog used during an AI Task."""
|
||||
user_llm_hass_api: llm.API | None = None
|
||||
if isinstance(task, GenDataTask):
|
||||
user_llm_hass_api = task.llm_api
|
||||
|
||||
# pylint: disable-next=contextmanager-generator-missing-cleanup
|
||||
with (
|
||||
async_get_chat_log(
|
||||
@@ -81,7 +77,6 @@ class AITaskEntity(RestoreEntity):
|
||||
device_id=None,
|
||||
),
|
||||
user_llm_prompt=DEFAULT_SYSTEM_PROMPT,
|
||||
user_llm_hass_api=user_llm_hass_api,
|
||||
)
|
||||
|
||||
chat_log.async_add_user_content(
|
||||
@@ -109,23 +104,3 @@ class AITaskEntity(RestoreEntity):
|
||||
) -> GenDataTaskResult:
|
||||
"""Handle a gen data task."""
|
||||
raise NotImplementedError
|
||||
|
||||
@final
|
||||
async def internal_async_generate_image(
|
||||
self,
|
||||
session: ChatSession,
|
||||
task: GenImageTask,
|
||||
) -> GenImageTaskResult:
|
||||
"""Run a gen image task."""
|
||||
self.__last_activity = dt_util.utcnow().isoformat()
|
||||
self.async_write_ha_state()
|
||||
async with self._async_get_ai_task_chat_log(session, task) as chat_log:
|
||||
return await self._async_generate_image(task, chat_log)
|
||||
|
||||
async def _async_generate_image(
|
||||
self,
|
||||
task: GenImageTask,
|
||||
chat_log: ChatLog,
|
||||
) -> GenImageTaskResult:
|
||||
"""Handle a gen image task."""
|
||||
raise NotImplementedError
|
||||
|
@@ -37,7 +37,6 @@ def websocket_get_preferences(
|
||||
{
|
||||
vol.Required("type"): "ai_task/preferences/set",
|
||||
vol.Optional("gen_data_entity_id"): vol.Any(str, None),
|
||||
vol.Optional("gen_image_entity_id"): vol.Any(str, None),
|
||||
}
|
||||
)
|
||||
@websocket_api.require_admin
|
||||
|
@@ -1,15 +1,7 @@
|
||||
{
|
||||
"entity_component": {
|
||||
"_": {
|
||||
"default": "mdi:star-four-points"
|
||||
}
|
||||
},
|
||||
"services": {
|
||||
"generate_data": {
|
||||
"service": "mdi:file-star-four-points-outline"
|
||||
},
|
||||
"generate_image": {
|
||||
"service": "mdi:star-four-points-box-outline"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@@ -1,10 +1,10 @@
|
||||
{
|
||||
"domain": "ai_task",
|
||||
"name": "AI Task",
|
||||
"after_dependencies": ["camera", "http"],
|
||||
"after_dependencies": ["camera"],
|
||||
"codeowners": ["@home-assistant/core"],
|
||||
"dependencies": ["conversation", "media_source"],
|
||||
"documentation": "https://www.home-assistant.io/integrations/ai_task",
|
||||
"integration_type": "entity",
|
||||
"integration_type": "system",
|
||||
"quality_scale": "internal"
|
||||
}
|
||||
|
@@ -1,90 +0,0 @@
|
||||
"""Expose images as media sources."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import timedelta
|
||||
import logging
|
||||
|
||||
from homeassistant.components.http.auth import async_sign_path
|
||||
from homeassistant.components.media_player import BrowseError, MediaClass
|
||||
from homeassistant.components.media_source import (
|
||||
BrowseMediaSource,
|
||||
MediaSource,
|
||||
MediaSourceItem,
|
||||
PlayMedia,
|
||||
Unresolvable,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant
|
||||
|
||||
from .const import DATA_IMAGES, DOMAIN, IMAGE_EXPIRY_TIME
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
async def async_get_media_source(hass: HomeAssistant) -> ImageMediaSource:
|
||||
"""Set up image media source."""
|
||||
_LOGGER.debug("Setting up image media source")
|
||||
return ImageMediaSource(hass)
|
||||
|
||||
|
||||
class ImageMediaSource(MediaSource):
|
||||
"""Provide images as media sources."""
|
||||
|
||||
name: str = "AI Generated Images"
|
||||
|
||||
def __init__(self, hass: HomeAssistant) -> None:
|
||||
"""Initialize ImageMediaSource."""
|
||||
super().__init__(DOMAIN)
|
||||
self.hass = hass
|
||||
|
||||
async def async_resolve_media(self, item: MediaSourceItem) -> PlayMedia:
|
||||
"""Resolve media to a url."""
|
||||
image_storage = self.hass.data[DATA_IMAGES]
|
||||
image = image_storage.get(item.identifier)
|
||||
|
||||
if image is None:
|
||||
raise Unresolvable(f"Could not resolve media item: {item.identifier}")
|
||||
|
||||
return PlayMedia(
|
||||
async_sign_path(
|
||||
self.hass,
|
||||
f"/api/{DOMAIN}/images/{item.identifier}",
|
||||
timedelta(seconds=IMAGE_EXPIRY_TIME or 1800),
|
||||
),
|
||||
image.mime_type,
|
||||
)
|
||||
|
||||
async def async_browse_media(
|
||||
self,
|
||||
item: MediaSourceItem,
|
||||
) -> BrowseMediaSource:
|
||||
"""Return media."""
|
||||
if item.identifier:
|
||||
raise BrowseError("Unknown item")
|
||||
|
||||
image_storage = self.hass.data[DATA_IMAGES]
|
||||
|
||||
children = [
|
||||
BrowseMediaSource(
|
||||
domain=DOMAIN,
|
||||
identifier=filename,
|
||||
media_class=MediaClass.IMAGE,
|
||||
media_content_type=image.mime_type,
|
||||
title=image.title or filename,
|
||||
can_play=True,
|
||||
can_expand=False,
|
||||
)
|
||||
for filename, image in image_storage.items()
|
||||
]
|
||||
|
||||
return BrowseMediaSource(
|
||||
domain=DOMAIN,
|
||||
identifier=None,
|
||||
media_class=MediaClass.APP,
|
||||
media_content_type="",
|
||||
title="AI Generated Images",
|
||||
can_play=False,
|
||||
can_expand=True,
|
||||
children_media_class=MediaClass.IMAGE,
|
||||
children=children,
|
||||
)
|
@@ -20,6 +20,7 @@ generate_data:
|
||||
supported_features:
|
||||
- ai_task.AITaskEntityFeature.GENERATE_DATA
|
||||
structure:
|
||||
advanced: true
|
||||
required: false
|
||||
example: '{ "name": { "selector": { "text": }, "description": "Name of the user", "required": "True" } } }, "age": { "selector": { "number": }, "description": "Age of the user" } }'
|
||||
selector:
|
||||
@@ -30,30 +31,3 @@ generate_data:
|
||||
media:
|
||||
accept:
|
||||
- "*"
|
||||
generate_image:
|
||||
fields:
|
||||
task_name:
|
||||
example: "picture of a dog"
|
||||
required: true
|
||||
selector:
|
||||
text:
|
||||
instructions:
|
||||
example: "Generate a high quality square image of a dog on transparent background"
|
||||
required: true
|
||||
selector:
|
||||
text:
|
||||
multiline: true
|
||||
entity_id:
|
||||
required: true
|
||||
selector:
|
||||
entity:
|
||||
filter:
|
||||
domain: ai_task
|
||||
supported_features:
|
||||
- ai_task.AITaskEntityFeature.GENERATE_IMAGE
|
||||
attachments:
|
||||
required: false
|
||||
selector:
|
||||
media:
|
||||
accept:
|
||||
- "*"
|
||||
|
@@ -25,28 +25,6 @@
|
||||
"description": "List of files to attach for multi-modal AI analysis."
|
||||
}
|
||||
}
|
||||
},
|
||||
"generate_image": {
|
||||
"name": "Generate image",
|
||||
"description": "Uses AI to generate image.",
|
||||
"fields": {
|
||||
"task_name": {
|
||||
"name": "Task name",
|
||||
"description": "Name of the task."
|
||||
},
|
||||
"instructions": {
|
||||
"name": "Instructions",
|
||||
"description": "Instructions that explains the image to be generated."
|
||||
},
|
||||
"entity_id": {
|
||||
"name": "Entity ID",
|
||||
"description": "Entity ID to run the task on."
|
||||
},
|
||||
"attachments": {
|
||||
"name": "Attachments",
|
||||
"description": "List of files to attach for using as references."
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@@ -3,8 +3,6 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from dataclasses import dataclass
|
||||
from datetime import datetime, timedelta
|
||||
from functools import partial
|
||||
import mimetypes
|
||||
from pathlib import Path
|
||||
import tempfile
|
||||
@@ -13,24 +11,11 @@ from typing import Any
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components import camera, conversation, media_source
|
||||
from homeassistant.components.http.auth import async_sign_path
|
||||
from homeassistant.core import HomeAssistant, ServiceResponse, callback
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers import llm
|
||||
from homeassistant.helpers.chat_session import ChatSession, async_get_chat_session
|
||||
from homeassistant.helpers.event import async_call_later
|
||||
from homeassistant.helpers.network import get_url
|
||||
from homeassistant.util import RE_SANITIZE_FILENAME, slugify
|
||||
from homeassistant.helpers.chat_session import async_get_chat_session
|
||||
|
||||
from .const import (
|
||||
DATA_COMPONENT,
|
||||
DATA_IMAGES,
|
||||
DATA_PREFERENCES,
|
||||
DOMAIN,
|
||||
IMAGE_EXPIRY_TIME,
|
||||
MAX_IMAGES,
|
||||
AITaskEntityFeature,
|
||||
)
|
||||
from .const import DATA_COMPONENT, DATA_PREFERENCES, AITaskEntityFeature
|
||||
|
||||
|
||||
def _save_camera_snapshot(image: camera.Image) -> Path:
|
||||
@@ -44,15 +29,43 @@ def _save_camera_snapshot(image: camera.Image) -> Path:
|
||||
return Path(temp_file.name)
|
||||
|
||||
|
||||
async def _resolve_attachments(
|
||||
async def async_generate_data(
|
||||
hass: HomeAssistant,
|
||||
session: ChatSession,
|
||||
*,
|
||||
task_name: str,
|
||||
entity_id: str | None = None,
|
||||
instructions: str,
|
||||
structure: vol.Schema | None = None,
|
||||
attachments: list[dict] | None = None,
|
||||
) -> list[conversation.Attachment]:
|
||||
"""Resolve attachments for a task."""
|
||||
) -> GenDataTaskResult:
|
||||
"""Run a task in the AI Task integration."""
|
||||
if entity_id is None:
|
||||
entity_id = hass.data[DATA_PREFERENCES].gen_data_entity_id
|
||||
|
||||
if entity_id is None:
|
||||
raise HomeAssistantError("No entity_id provided and no preferred entity set")
|
||||
|
||||
entity = hass.data[DATA_COMPONENT].get_entity(entity_id)
|
||||
if entity is None:
|
||||
raise HomeAssistantError(f"AI Task entity {entity_id} not found")
|
||||
|
||||
if AITaskEntityFeature.GENERATE_DATA not in entity.supported_features:
|
||||
raise HomeAssistantError(
|
||||
f"AI Task entity {entity_id} does not support generating data"
|
||||
)
|
||||
|
||||
# Resolve attachments
|
||||
resolved_attachments: list[conversation.Attachment] = []
|
||||
created_files: list[Path] = []
|
||||
|
||||
if (
|
||||
attachments
|
||||
and AITaskEntityFeature.SUPPORT_ATTACHMENTS not in entity.supported_features
|
||||
):
|
||||
raise HomeAssistantError(
|
||||
f"AI Task entity {entity_id} does not support attachments"
|
||||
)
|
||||
|
||||
for attachment in attachments or []:
|
||||
media_content_id = attachment["media_content_id"]
|
||||
|
||||
@@ -91,60 +104,20 @@ async def _resolve_attachments(
|
||||
)
|
||||
)
|
||||
|
||||
if not created_files:
|
||||
return resolved_attachments
|
||||
|
||||
def cleanup_files() -> None:
|
||||
"""Cleanup temporary files."""
|
||||
for file in created_files:
|
||||
file.unlink(missing_ok=True)
|
||||
|
||||
@callback
|
||||
def cleanup_files_callback() -> None:
|
||||
"""Cleanup temporary files."""
|
||||
hass.async_add_executor_job(cleanup_files)
|
||||
|
||||
session.async_on_cleanup(cleanup_files_callback)
|
||||
|
||||
return resolved_attachments
|
||||
|
||||
|
||||
async def async_generate_data(
|
||||
hass: HomeAssistant,
|
||||
*,
|
||||
task_name: str,
|
||||
entity_id: str | None = None,
|
||||
instructions: str,
|
||||
structure: vol.Schema | None = None,
|
||||
attachments: list[dict] | None = None,
|
||||
llm_api: llm.API | None = None,
|
||||
) -> GenDataTaskResult:
|
||||
"""Run a data generation task in the AI Task integration."""
|
||||
if entity_id is None:
|
||||
entity_id = hass.data[DATA_PREFERENCES].gen_data_entity_id
|
||||
|
||||
if entity_id is None:
|
||||
raise HomeAssistantError("No entity_id provided and no preferred entity set")
|
||||
|
||||
entity = hass.data[DATA_COMPONENT].get_entity(entity_id)
|
||||
if entity is None:
|
||||
raise HomeAssistantError(f"AI Task entity {entity_id} not found")
|
||||
|
||||
if AITaskEntityFeature.GENERATE_DATA not in entity.supported_features:
|
||||
raise HomeAssistantError(
|
||||
f"AI Task entity {entity_id} does not support generating data"
|
||||
)
|
||||
|
||||
if (
|
||||
attachments
|
||||
and AITaskEntityFeature.SUPPORT_ATTACHMENTS not in entity.supported_features
|
||||
):
|
||||
raise HomeAssistantError(
|
||||
f"AI Task entity {entity_id} does not support attachments"
|
||||
)
|
||||
|
||||
with async_get_chat_session(hass) as session:
|
||||
resolved_attachments = await _resolve_attachments(hass, session, attachments)
|
||||
if created_files:
|
||||
|
||||
def cleanup_files() -> None:
|
||||
"""Cleanup temporary files."""
|
||||
for file in created_files:
|
||||
file.unlink(missing_ok=True)
|
||||
|
||||
@callback
|
||||
def cleanup_files_callback() -> None:
|
||||
"""Cleanup temporary files."""
|
||||
hass.async_add_executor_job(cleanup_files)
|
||||
|
||||
session.async_on_cleanup(cleanup_files_callback)
|
||||
|
||||
return await entity.internal_async_generate_data(
|
||||
session,
|
||||
@@ -153,112 +126,10 @@ async def async_generate_data(
|
||||
instructions=instructions,
|
||||
structure=structure,
|
||||
attachments=resolved_attachments or None,
|
||||
llm_api=llm_api,
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
def _cleanup_images(image_storage: dict[str, ImageData], num_to_remove: int) -> None:
|
||||
"""Remove old images to keep the storage size under the limit."""
|
||||
if num_to_remove <= 0:
|
||||
return
|
||||
|
||||
if num_to_remove >= len(image_storage):
|
||||
image_storage.clear()
|
||||
return
|
||||
|
||||
sorted_images = sorted(
|
||||
image_storage.items(),
|
||||
key=lambda item: item[1].timestamp,
|
||||
)
|
||||
|
||||
for filename, _ in sorted_images[:num_to_remove]:
|
||||
image_storage.pop(filename, None)
|
||||
|
||||
|
||||
async def async_generate_image(
|
||||
hass: HomeAssistant,
|
||||
*,
|
||||
task_name: str,
|
||||
entity_id: str | None = None,
|
||||
instructions: str,
|
||||
attachments: list[dict] | None = None,
|
||||
) -> ServiceResponse:
|
||||
"""Run an image generation task in the AI Task integration."""
|
||||
if entity_id is None:
|
||||
entity_id = hass.data[DATA_PREFERENCES].gen_image_entity_id
|
||||
|
||||
if entity_id is None:
|
||||
raise HomeAssistantError("No entity_id provided and no preferred entity set")
|
||||
|
||||
entity = hass.data[DATA_COMPONENT].get_entity(entity_id)
|
||||
if entity is None:
|
||||
raise HomeAssistantError(f"AI Task entity {entity_id} not found")
|
||||
|
||||
if AITaskEntityFeature.GENERATE_IMAGE not in entity.supported_features:
|
||||
raise HomeAssistantError(
|
||||
f"AI Task entity {entity_id} does not support generating images"
|
||||
)
|
||||
|
||||
if (
|
||||
attachments
|
||||
and AITaskEntityFeature.SUPPORT_ATTACHMENTS not in entity.supported_features
|
||||
):
|
||||
raise HomeAssistantError(
|
||||
f"AI Task entity {entity_id} does not support attachments"
|
||||
)
|
||||
|
||||
with async_get_chat_session(hass) as session:
|
||||
resolved_attachments = await _resolve_attachments(hass, session, attachments)
|
||||
|
||||
task_result = await entity.internal_async_generate_image(
|
||||
session,
|
||||
GenImageTask(
|
||||
name=task_name,
|
||||
instructions=instructions,
|
||||
attachments=resolved_attachments or None,
|
||||
),
|
||||
)
|
||||
|
||||
service_result = task_result.as_dict()
|
||||
image_data = service_result.pop("image_data")
|
||||
if service_result.get("revised_prompt") is None:
|
||||
service_result["revised_prompt"] = instructions
|
||||
|
||||
image_storage = hass.data[DATA_IMAGES]
|
||||
|
||||
if len(image_storage) + 1 > MAX_IMAGES:
|
||||
_cleanup_images(image_storage, len(image_storage) + 1 - MAX_IMAGES)
|
||||
|
||||
current_time = datetime.now()
|
||||
ext = mimetypes.guess_extension(task_result.mime_type, False) or ".png"
|
||||
sanitized_task_name = RE_SANITIZE_FILENAME.sub("", slugify(task_name))
|
||||
filename = f"{current_time.strftime('%Y-%m-%d_%H%M%S')}_{sanitized_task_name}{ext}"
|
||||
|
||||
image_storage[filename] = ImageData(
|
||||
data=image_data,
|
||||
timestamp=int(current_time.timestamp()),
|
||||
mime_type=task_result.mime_type,
|
||||
title=service_result["revised_prompt"],
|
||||
)
|
||||
|
||||
def _purge_image(filename: str, now: datetime) -> None:
|
||||
"""Remove image from storage."""
|
||||
image_storage.pop(filename, None)
|
||||
|
||||
if IMAGE_EXPIRY_TIME > 0:
|
||||
async_call_later(hass, IMAGE_EXPIRY_TIME, partial(_purge_image, filename))
|
||||
|
||||
service_result["url"] = get_url(hass) + async_sign_path(
|
||||
hass,
|
||||
f"/api/{DOMAIN}/images/{filename}",
|
||||
timedelta(seconds=IMAGE_EXPIRY_TIME or 1800),
|
||||
)
|
||||
service_result["media_source_id"] = f"media-source://{DOMAIN}/images/{filename}"
|
||||
|
||||
return service_result
|
||||
|
||||
|
||||
@dataclass(slots=True)
|
||||
class GenDataTask:
|
||||
"""Gen data task to be processed."""
|
||||
@@ -275,9 +146,6 @@ class GenDataTask:
|
||||
attachments: list[conversation.Attachment] | None = None
|
||||
"""List of attachments to go along the instructions."""
|
||||
|
||||
llm_api: llm.API | None = None
|
||||
"""API to provide to the LLM."""
|
||||
|
||||
def __str__(self) -> str:
|
||||
"""Return task as a string."""
|
||||
return f"<GenDataTask {self.name}: {id(self)}>"
|
||||
@@ -299,80 +167,3 @@ class GenDataTaskResult:
|
||||
"conversation_id": self.conversation_id,
|
||||
"data": self.data,
|
||||
}
|
||||
|
||||
|
||||
@dataclass(slots=True)
|
||||
class GenImageTask:
|
||||
"""Gen image task to be processed."""
|
||||
|
||||
name: str
|
||||
"""Name of the task."""
|
||||
|
||||
instructions: str
|
||||
"""Instructions on what needs to be done."""
|
||||
|
||||
attachments: list[conversation.Attachment] | None = None
|
||||
"""List of attachments to go along the instructions."""
|
||||
|
||||
def __str__(self) -> str:
|
||||
"""Return task as a string."""
|
||||
return f"<GenImageTask {self.name}: {id(self)}>"
|
||||
|
||||
|
||||
@dataclass(slots=True)
|
||||
class GenImageTaskResult:
|
||||
"""Result of gen image task."""
|
||||
|
||||
image_data: bytes
|
||||
"""Raw image data generated by the model."""
|
||||
|
||||
conversation_id: str
|
||||
"""Unique identifier for the conversation."""
|
||||
|
||||
mime_type: str
|
||||
"""MIME type of the generated image."""
|
||||
|
||||
width: int | None = None
|
||||
"""Width of the generated image, if available."""
|
||||
|
||||
height: int | None = None
|
||||
"""Height of the generated image, if available."""
|
||||
|
||||
model: str | None = None
|
||||
"""Model used to generate the image, if available."""
|
||||
|
||||
revised_prompt: str | None = None
|
||||
"""Revised prompt used to generate the image, if applicable."""
|
||||
|
||||
def as_dict(self) -> dict[str, Any]:
|
||||
"""Return result as a dict."""
|
||||
return {
|
||||
"image_data": self.image_data,
|
||||
"conversation_id": self.conversation_id,
|
||||
"mime_type": self.mime_type,
|
||||
"width": self.width,
|
||||
"height": self.height,
|
||||
"model": self.model,
|
||||
"revised_prompt": self.revised_prompt,
|
||||
}
|
||||
|
||||
|
||||
@dataclass(slots=True)
|
||||
class ImageData:
|
||||
"""Image data for stored generated images."""
|
||||
|
||||
data: bytes
|
||||
"""Raw image data."""
|
||||
|
||||
timestamp: int
|
||||
"""Timestamp when the image was generated, as a Unix timestamp."""
|
||||
|
||||
mime_type: str
|
||||
"""MIME type of the image."""
|
||||
|
||||
title: str
|
||||
"""Title of the image, usually the prompt used to generate it."""
|
||||
|
||||
def __str__(self) -> str:
|
||||
"""Return image data as a string."""
|
||||
return f"<ImageData {self.title}: {id(self)}>"
|
||||
|
@@ -61,7 +61,7 @@
|
||||
"display_pm_standard": {
|
||||
"name": "Display PM standard",
|
||||
"state": {
|
||||
"ugm3": "μg/m³",
|
||||
"ugm3": "µg/m³",
|
||||
"us_aqi": "US AQI"
|
||||
}
|
||||
},
|
||||
|
@@ -2,7 +2,7 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from airos.airos8 import AirOS8
|
||||
from airos.airos8 import AirOS
|
||||
|
||||
from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_USERNAME, Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
@@ -23,7 +23,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: AirOSConfigEntry) -> boo
|
||||
# with no option in the web UI to change or upload a custom certificate.
|
||||
session = async_get_clientsession(hass, verify_ssl=False)
|
||||
|
||||
airos_device = AirOS8(
|
||||
airos_device = AirOS(
|
||||
host=entry.data[CONF_HOST],
|
||||
username=entry.data[CONF_USERNAME],
|
||||
password=entry.data[CONF_PASSWORD],
|
||||
|
@@ -15,7 +15,7 @@ from homeassistant.const import EntityCategory
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from .coordinator import AirOS8Data, AirOSConfigEntry, AirOSDataUpdateCoordinator
|
||||
from .coordinator import AirOSConfigEntry, AirOSData, AirOSDataUpdateCoordinator
|
||||
from .entity import AirOSEntity
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
@@ -27,7 +27,7 @@ PARALLEL_UPDATES = 0
|
||||
class AirOSBinarySensorEntityDescription(BinarySensorEntityDescription):
|
||||
"""Describe an AirOS binary sensor."""
|
||||
|
||||
value_fn: Callable[[AirOS8Data], bool]
|
||||
value_fn: Callable[[AirOSData], bool]
|
||||
|
||||
|
||||
BINARY_SENSORS: tuple[AirOSBinarySensorEntityDescription, ...] = (
|
||||
|
@@ -19,7 +19,7 @@ from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_USERNAME
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
|
||||
from .const import DOMAIN
|
||||
from .coordinator import AirOS8
|
||||
from .coordinator import AirOS
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -48,7 +48,7 @@ class AirOSConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
# with no option in the web UI to change or upload a custom certificate.
|
||||
session = async_get_clientsession(self.hass, verify_ssl=False)
|
||||
|
||||
airos_device = AirOS8(
|
||||
airos_device = AirOS(
|
||||
host=user_input[CONF_HOST],
|
||||
username=user_input[CONF_USERNAME],
|
||||
password=user_input[CONF_PASSWORD],
|
||||
|
@@ -4,7 +4,7 @@ from __future__ import annotations
|
||||
|
||||
import logging
|
||||
|
||||
from airos.airos8 import AirOS8, AirOS8Data
|
||||
from airos.airos8 import AirOS, AirOSData
|
||||
from airos.exceptions import (
|
||||
AirOSConnectionAuthenticationError,
|
||||
AirOSConnectionSetupError,
|
||||
@@ -24,13 +24,13 @@ _LOGGER = logging.getLogger(__name__)
|
||||
type AirOSConfigEntry = ConfigEntry[AirOSDataUpdateCoordinator]
|
||||
|
||||
|
||||
class AirOSDataUpdateCoordinator(DataUpdateCoordinator[AirOS8Data]):
|
||||
class AirOSDataUpdateCoordinator(DataUpdateCoordinator[AirOSData]):
|
||||
"""Class to manage fetching AirOS data from single endpoint."""
|
||||
|
||||
config_entry: AirOSConfigEntry
|
||||
|
||||
def __init__(
|
||||
self, hass: HomeAssistant, config_entry: AirOSConfigEntry, airos_device: AirOS8
|
||||
self, hass: HomeAssistant, config_entry: AirOSConfigEntry, airos_device: AirOS
|
||||
) -> None:
|
||||
"""Initialize the coordinator."""
|
||||
self.airos_device = airos_device
|
||||
@@ -42,7 +42,7 @@ class AirOSDataUpdateCoordinator(DataUpdateCoordinator[AirOS8Data]):
|
||||
update_interval=SCAN_INTERVAL,
|
||||
)
|
||||
|
||||
async def _async_update_data(self) -> AirOS8Data:
|
||||
async def _async_update_data(self) -> AirOSData:
|
||||
"""Fetch data from AirOS."""
|
||||
try:
|
||||
await self.airos_device.login()
|
||||
|
@@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/airos",
|
||||
"iot_class": "local_polling",
|
||||
"quality_scale": "bronze",
|
||||
"requirements": ["airos==0.5.1"]
|
||||
"requirements": ["airos==0.3.0"]
|
||||
}
|
||||
|
@@ -26,7 +26,7 @@ from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from homeassistant.helpers.typing import StateType
|
||||
|
||||
from .coordinator import AirOS8Data, AirOSConfigEntry, AirOSDataUpdateCoordinator
|
||||
from .coordinator import AirOSConfigEntry, AirOSData, AirOSDataUpdateCoordinator
|
||||
from .entity import AirOSEntity
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
@@ -42,7 +42,7 @@ PARALLEL_UPDATES = 0
|
||||
class AirOSSensorEntityDescription(SensorEntityDescription):
|
||||
"""Describe an AirOS sensor."""
|
||||
|
||||
value_fn: Callable[[AirOS8Data], StateType]
|
||||
value_fn: Callable[[AirOSData], StateType]
|
||||
|
||||
|
||||
SENSORS: tuple[AirOSSensorEntityDescription, ...] = (
|
||||
|
@@ -11,5 +11,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/airzone",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["aioairzone"],
|
||||
"requirements": ["aioairzone==1.0.1"]
|
||||
"requirements": ["aioairzone==1.0.0"]
|
||||
}
|
||||
|
@@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/airzone_cloud",
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["aioairzone_cloud"],
|
||||
"requirements": ["aioairzone-cloud==0.7.2"]
|
||||
"requirements": ["aioairzone-cloud==0.7.1"]
|
||||
}
|
||||
|
@@ -2,112 +2,39 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from genie_partner_sdk.client import AladdinConnectClient
|
||||
from genie_partner_sdk.model import GarageDoor
|
||||
|
||||
from homeassistant.const import Platform
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import (
|
||||
aiohttp_client,
|
||||
config_entry_oauth2_flow,
|
||||
device_registry as dr,
|
||||
)
|
||||
from homeassistant.helpers import issue_registry as ir
|
||||
|
||||
from . import api
|
||||
from .const import CONFIG_FLOW_MINOR_VERSION, CONFIG_FLOW_VERSION, DOMAIN
|
||||
from .coordinator import AladdinConnectConfigEntry, AladdinConnectCoordinator
|
||||
|
||||
PLATFORMS: list[Platform] = [Platform.COVER, Platform.SENSOR]
|
||||
DOMAIN = "aladdin_connect"
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant, entry: AladdinConnectConfigEntry
|
||||
) -> bool:
|
||||
"""Set up Aladdin Connect Genie from a config entry."""
|
||||
implementation = (
|
||||
await config_entry_oauth2_flow.async_get_config_entry_implementation(
|
||||
hass, entry
|
||||
)
|
||||
async def async_setup_entry(hass: HomeAssistant, _: ConfigEntry) -> bool:
|
||||
"""Set up Aladdin Connect from a config entry."""
|
||||
ir.async_create_issue(
|
||||
hass,
|
||||
DOMAIN,
|
||||
DOMAIN,
|
||||
is_fixable=False,
|
||||
severity=ir.IssueSeverity.ERROR,
|
||||
translation_key="integration_removed",
|
||||
translation_placeholders={
|
||||
"entries": "/config/integrations/integration/aladdin_connect",
|
||||
},
|
||||
)
|
||||
|
||||
session = config_entry_oauth2_flow.OAuth2Session(hass, entry, implementation)
|
||||
|
||||
client = AladdinConnectClient(
|
||||
api.AsyncConfigEntryAuth(aiohttp_client.async_get_clientsession(hass), session)
|
||||
)
|
||||
|
||||
sdk_doors = await client.get_doors()
|
||||
|
||||
# Convert SDK GarageDoor objects to integration GarageDoor objects
|
||||
doors = [
|
||||
GarageDoor(
|
||||
{
|
||||
"device_id": door.device_id,
|
||||
"door_number": door.door_number,
|
||||
"name": door.name,
|
||||
"status": door.status,
|
||||
"link_status": door.link_status,
|
||||
"battery_level": door.battery_level,
|
||||
}
|
||||
)
|
||||
for door in sdk_doors
|
||||
]
|
||||
|
||||
entry.runtime_data = {
|
||||
door.unique_id: AladdinConnectCoordinator(hass, entry, client, door)
|
||||
for door in doors
|
||||
}
|
||||
|
||||
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
|
||||
|
||||
remove_stale_devices(hass, entry)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
async def async_unload_entry(
|
||||
hass: HomeAssistant, entry: AladdinConnectConfigEntry
|
||||
) -> bool:
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
"""Unload a config entry."""
|
||||
return await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
|
||||
|
||||
|
||||
async def async_migrate_entry(
|
||||
hass: HomeAssistant, config_entry: AladdinConnectConfigEntry
|
||||
) -> bool:
|
||||
"""Migrate old config."""
|
||||
if config_entry.version < CONFIG_FLOW_VERSION:
|
||||
config_entry.async_start_reauth(hass)
|
||||
new_data = {**config_entry.data}
|
||||
hass.config_entries.async_update_entry(
|
||||
config_entry,
|
||||
data=new_data,
|
||||
version=CONFIG_FLOW_VERSION,
|
||||
minor_version=CONFIG_FLOW_MINOR_VERSION,
|
||||
)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def remove_stale_devices(
|
||||
hass: HomeAssistant,
|
||||
config_entry: AladdinConnectConfigEntry,
|
||||
) -> None:
|
||||
"""Remove stale devices from device registry."""
|
||||
device_registry = dr.async_get(hass)
|
||||
device_entries = dr.async_entries_for_config_entry(
|
||||
device_registry, config_entry.entry_id
|
||||
)
|
||||
all_device_ids = set(config_entry.runtime_data)
|
||||
|
||||
for device_entry in device_entries:
|
||||
device_id: str | None = None
|
||||
for identifier in device_entry.identifiers:
|
||||
if identifier[0] == DOMAIN:
|
||||
device_id = identifier[1]
|
||||
break
|
||||
|
||||
if device_id and device_id not in all_device_ids:
|
||||
device_registry.async_update_device(
|
||||
device_entry.id, remove_config_entry_id=config_entry.entry_id
|
||||
)
|
||||
async def async_remove_entry(hass: HomeAssistant, entry: ConfigEntry) -> None:
|
||||
"""Remove a config entry."""
|
||||
if not hass.config_entries.async_loaded_entries(DOMAIN):
|
||||
ir.async_delete_issue(hass, DOMAIN, DOMAIN)
|
||||
# Remove any remaining disabled or ignored entries
|
||||
for _entry in hass.config_entries.async_entries(DOMAIN):
|
||||
hass.async_create_task(hass.config_entries.async_remove(_entry.entry_id))
|
||||
|
@@ -1,33 +0,0 @@
|
||||
"""API for Aladdin Connect Genie bound to Home Assistant OAuth."""
|
||||
|
||||
from typing import cast
|
||||
|
||||
from aiohttp import ClientSession
|
||||
from genie_partner_sdk.auth import Auth
|
||||
|
||||
from homeassistant.helpers import config_entry_oauth2_flow
|
||||
|
||||
API_URL = "https://twdvzuefzh.execute-api.us-east-2.amazonaws.com/v1"
|
||||
API_KEY = "k6QaiQmcTm2zfaNns5L1Z8duBtJmhDOW8JawlCC3"
|
||||
|
||||
|
||||
class AsyncConfigEntryAuth(Auth):
|
||||
"""Provide Aladdin Connect Genie authentication tied to an OAuth2 based config entry."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
websession: ClientSession,
|
||||
oauth_session: config_entry_oauth2_flow.OAuth2Session,
|
||||
) -> None:
|
||||
"""Initialize Aladdin Connect Genie auth."""
|
||||
super().__init__(
|
||||
websession, API_URL, oauth_session.token["access_token"], API_KEY
|
||||
)
|
||||
self._oauth_session = oauth_session
|
||||
|
||||
async def async_get_access_token(self) -> str:
|
||||
"""Return a valid access token."""
|
||||
if not self._oauth_session.valid_token:
|
||||
await self._oauth_session.async_ensure_token_valid()
|
||||
|
||||
return cast(str, self._oauth_session.token["access_token"])
|
@@ -1,14 +0,0 @@
|
||||
"""application_credentials platform the Aladdin Connect Genie integration."""
|
||||
|
||||
from homeassistant.components.application_credentials import AuthorizationServer
|
||||
from homeassistant.core import HomeAssistant
|
||||
|
||||
from .const import OAUTH2_AUTHORIZE, OAUTH2_TOKEN
|
||||
|
||||
|
||||
async def async_get_authorization_server(hass: HomeAssistant) -> AuthorizationServer:
|
||||
"""Return authorization server."""
|
||||
return AuthorizationServer(
|
||||
authorize_url=OAUTH2_AUTHORIZE,
|
||||
token_url=OAUTH2_TOKEN,
|
||||
)
|
@@ -1,63 +1,11 @@
|
||||
"""Config flow for Aladdin Connect Genie."""
|
||||
"""Config flow for Aladdin Connect integration."""
|
||||
|
||||
from collections.abc import Mapping
|
||||
import logging
|
||||
from typing import Any
|
||||
from homeassistant.config_entries import ConfigFlow
|
||||
|
||||
import jwt
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.config_entries import SOURCE_REAUTH, ConfigFlowResult
|
||||
from homeassistant.helpers import config_entry_oauth2_flow
|
||||
|
||||
from .const import CONFIG_FLOW_MINOR_VERSION, CONFIG_FLOW_VERSION, DOMAIN
|
||||
from . import DOMAIN
|
||||
|
||||
|
||||
class OAuth2FlowHandler(
|
||||
config_entry_oauth2_flow.AbstractOAuth2FlowHandler, domain=DOMAIN
|
||||
):
|
||||
"""Config flow to handle Aladdin Connect Genie OAuth2 authentication."""
|
||||
class AladdinConnectConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
"""Handle a config flow for Aladdin Connect."""
|
||||
|
||||
DOMAIN = DOMAIN
|
||||
VERSION = CONFIG_FLOW_VERSION
|
||||
MINOR_VERSION = CONFIG_FLOW_MINOR_VERSION
|
||||
|
||||
async def async_step_reauth(
|
||||
self, user_input: Mapping[str, Any]
|
||||
) -> ConfigFlowResult:
|
||||
"""Perform reauth upon API auth error or upgrade from v1 to v2."""
|
||||
return await self.async_step_reauth_confirm()
|
||||
|
||||
async def async_step_reauth_confirm(
|
||||
self, user_input: Mapping[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Dialog that informs the user that reauth is required."""
|
||||
if user_input is None:
|
||||
return self.async_show_form(
|
||||
step_id="reauth_confirm",
|
||||
data_schema=vol.Schema({}),
|
||||
)
|
||||
return await self.async_step_user()
|
||||
|
||||
async def async_oauth_create_entry(self, data: dict) -> ConfigFlowResult:
|
||||
"""Create an oauth config entry or update existing entry for reauth."""
|
||||
# Extract the user ID from the JWT token's 'sub' field
|
||||
token = jwt.decode(
|
||||
data["token"]["access_token"], options={"verify_signature": False}
|
||||
)
|
||||
user_id = token["sub"]
|
||||
await self.async_set_unique_id(user_id)
|
||||
|
||||
if self.source == SOURCE_REAUTH:
|
||||
self._abort_if_unique_id_mismatch(reason="wrong_account")
|
||||
return self.async_update_reload_and_abort(
|
||||
self._get_reauth_entry(), data=data
|
||||
)
|
||||
|
||||
self._abort_if_unique_id_configured()
|
||||
return self.async_create_entry(title="Aladdin Connect", data=data)
|
||||
|
||||
@property
|
||||
def logger(self) -> logging.Logger:
|
||||
"""Return logger."""
|
||||
return logging.getLogger(__name__)
|
||||
VERSION = 1
|
||||
|
@@ -1,14 +0,0 @@
|
||||
"""Constants for the Aladdin Connect Genie integration."""
|
||||
|
||||
from typing import Final
|
||||
|
||||
from homeassistant.components.cover import CoverEntityFeature
|
||||
|
||||
DOMAIN = "aladdin_connect"
|
||||
CONFIG_FLOW_VERSION = 2
|
||||
CONFIG_FLOW_MINOR_VERSION = 1
|
||||
|
||||
OAUTH2_AUTHORIZE = "https://app.aladdinconnect.com/login.html"
|
||||
OAUTH2_TOKEN = "https://twdvzuefzh.execute-api.us-east-2.amazonaws.com/v1/oauth2/token"
|
||||
|
||||
SUPPORTED_FEATURES: Final = CoverEntityFeature.OPEN | CoverEntityFeature.CLOSE
|
@@ -1,44 +0,0 @@
|
||||
"""Coordinator for Aladdin Connect integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import timedelta
|
||||
import logging
|
||||
|
||||
from genie_partner_sdk.client import AladdinConnectClient
|
||||
from genie_partner_sdk.model import GarageDoor
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
type AladdinConnectConfigEntry = ConfigEntry[dict[str, AladdinConnectCoordinator]]
|
||||
SCAN_INTERVAL = timedelta(seconds=15)
|
||||
|
||||
|
||||
class AladdinConnectCoordinator(DataUpdateCoordinator[GarageDoor]):
|
||||
"""Coordinator for Aladdin Connect integration."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
hass: HomeAssistant,
|
||||
entry: AladdinConnectConfigEntry,
|
||||
client: AladdinConnectClient,
|
||||
garage_door: GarageDoor,
|
||||
) -> None:
|
||||
"""Initialize the coordinator."""
|
||||
super().__init__(
|
||||
hass,
|
||||
logger=_LOGGER,
|
||||
config_entry=entry,
|
||||
name="Aladdin Connect Coordinator",
|
||||
update_interval=SCAN_INTERVAL,
|
||||
)
|
||||
self.client = client
|
||||
self.data = garage_door
|
||||
|
||||
async def _async_update_data(self) -> GarageDoor:
|
||||
"""Fetch data from the Aladdin Connect API."""
|
||||
await self.client.update_door(self.data.device_id, self.data.door_number)
|
||||
return self.data
|
@@ -1,62 +0,0 @@
|
||||
"""Cover Entity for Genie Garage Door."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Any
|
||||
|
||||
from homeassistant.components.cover import CoverDeviceClass, CoverEntity
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from .const import SUPPORTED_FEATURES
|
||||
from .coordinator import AladdinConnectConfigEntry, AladdinConnectCoordinator
|
||||
from .entity import AladdinConnectEntity
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
entry: AladdinConnectConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up the cover platform."""
|
||||
coordinators = entry.runtime_data
|
||||
|
||||
async_add_entities(
|
||||
AladdinCoverEntity(coordinator) for coordinator in coordinators.values()
|
||||
)
|
||||
|
||||
|
||||
class AladdinCoverEntity(AladdinConnectEntity, CoverEntity):
|
||||
"""Representation of Aladdin Connect cover."""
|
||||
|
||||
_attr_device_class = CoverDeviceClass.GARAGE
|
||||
_attr_supported_features = SUPPORTED_FEATURES
|
||||
_attr_name = None
|
||||
|
||||
def __init__(self, coordinator: AladdinConnectCoordinator) -> None:
|
||||
"""Initialize the Aladdin Connect cover."""
|
||||
super().__init__(coordinator)
|
||||
self._attr_unique_id = coordinator.data.unique_id
|
||||
|
||||
async def async_open_cover(self, **kwargs: Any) -> None:
|
||||
"""Issue open command to cover."""
|
||||
await self.client.open_door(self._device_id, self._number)
|
||||
|
||||
async def async_close_cover(self, **kwargs: Any) -> None:
|
||||
"""Issue close command to cover."""
|
||||
await self.client.close_door(self._device_id, self._number)
|
||||
|
||||
@property
|
||||
def is_closed(self) -> bool | None:
|
||||
"""Update is closed attribute."""
|
||||
return self.coordinator.data.status == "closed"
|
||||
|
||||
@property
|
||||
def is_closing(self) -> bool | None:
|
||||
"""Update is closing attribute."""
|
||||
return self.coordinator.data.status == "closing"
|
||||
|
||||
@property
|
||||
def is_opening(self) -> bool | None:
|
||||
"""Update is opening attribute."""
|
||||
return self.coordinator.data.status == "opening"
|
@@ -1,32 +0,0 @@
|
||||
"""Base class for Aladdin Connect entities."""
|
||||
|
||||
from genie_partner_sdk.client import AladdinConnectClient
|
||||
|
||||
from homeassistant.helpers.device_registry import DeviceInfo
|
||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
|
||||
from .const import DOMAIN
|
||||
from .coordinator import AladdinConnectCoordinator
|
||||
|
||||
|
||||
class AladdinConnectEntity(CoordinatorEntity[AladdinConnectCoordinator]):
|
||||
"""Defines a base Aladdin Connect entity."""
|
||||
|
||||
_attr_has_entity_name = True
|
||||
|
||||
def __init__(self, coordinator: AladdinConnectCoordinator) -> None:
|
||||
"""Initialize Aladdin Connect entity."""
|
||||
super().__init__(coordinator)
|
||||
device = coordinator.data
|
||||
self._attr_device_info = DeviceInfo(
|
||||
identifiers={(DOMAIN, device.unique_id)},
|
||||
manufacturer="Aladdin Connect",
|
||||
name=device.name,
|
||||
)
|
||||
self._device_id = device.device_id
|
||||
self._number = device.door_number
|
||||
|
||||
@property
|
||||
def client(self) -> AladdinConnectClient:
|
||||
"""Return the client for this entity."""
|
||||
return self.coordinator.client
|
@@ -1,11 +1,9 @@
|
||||
{
|
||||
"domain": "aladdin_connect",
|
||||
"name": "Aladdin Connect",
|
||||
"codeowners": ["@swcloudgenie"],
|
||||
"config_flow": true,
|
||||
"dependencies": ["application_credentials"],
|
||||
"codeowners": [],
|
||||
"documentation": "https://www.home-assistant.io/integrations/aladdin_connect",
|
||||
"integration_type": "hub",
|
||||
"integration_type": "system",
|
||||
"iot_class": "cloud_polling",
|
||||
"requirements": ["genie-partner-sdk==1.0.10"]
|
||||
"requirements": []
|
||||
}
|
||||
|
@@ -1,94 +0,0 @@
|
||||
rules:
|
||||
# Bronze
|
||||
action-setup:
|
||||
status: exempt
|
||||
comment: Integration does not register any service actions.
|
||||
appropriate-polling: done
|
||||
brands: done
|
||||
common-modules: done
|
||||
config-flow: done
|
||||
config-flow-test-coverage: todo
|
||||
dependency-transparency: done
|
||||
docs-actions:
|
||||
status: exempt
|
||||
comment: Integration does not register any service actions.
|
||||
docs-high-level-description: done
|
||||
docs-installation-instructions:
|
||||
status: todo
|
||||
comment: Documentation needs to be created.
|
||||
docs-removal-instructions:
|
||||
status: todo
|
||||
comment: Documentation needs to be created.
|
||||
entity-event-setup:
|
||||
status: exempt
|
||||
comment: Integration does not subscribe to external events.
|
||||
entity-unique-id: done
|
||||
has-entity-name: done
|
||||
runtime-data: done
|
||||
test-before-configure:
|
||||
status: todo
|
||||
comment: Config flow does not currently test connection during setup.
|
||||
test-before-setup: todo
|
||||
unique-config-entry: done
|
||||
|
||||
# Silver
|
||||
action-exceptions: todo
|
||||
config-entry-unloading: done
|
||||
docs-configuration-parameters:
|
||||
status: todo
|
||||
comment: Documentation needs to be created.
|
||||
docs-installation-parameters:
|
||||
status: todo
|
||||
comment: Documentation needs to be created.
|
||||
entity-unavailable: todo
|
||||
integration-owner: done
|
||||
log-when-unavailable: todo
|
||||
parallel-updates: todo
|
||||
reauthentication-flow: done
|
||||
test-coverage:
|
||||
status: todo
|
||||
comment: Platform tests for cover and sensor need to be implemented to reach 95% coverage.
|
||||
|
||||
# Gold
|
||||
devices: done
|
||||
diagnostics: todo
|
||||
discovery: todo
|
||||
discovery-update-info: todo
|
||||
docs-data-update:
|
||||
status: todo
|
||||
comment: Documentation needs to be created.
|
||||
docs-examples:
|
||||
status: todo
|
||||
comment: Documentation needs to be created.
|
||||
docs-known-limitations:
|
||||
status: todo
|
||||
comment: Documentation needs to be created.
|
||||
docs-supported-devices:
|
||||
status: todo
|
||||
comment: Documentation needs to be created.
|
||||
docs-supported-functions:
|
||||
status: todo
|
||||
comment: Documentation needs to be created.
|
||||
docs-troubleshooting:
|
||||
status: todo
|
||||
comment: Documentation needs to be created.
|
||||
docs-use-cases:
|
||||
status: todo
|
||||
comment: Documentation needs to be created.
|
||||
dynamic-devices: todo
|
||||
entity-category: done
|
||||
entity-device-class: done
|
||||
entity-disabled-by-default: done
|
||||
entity-translations: done
|
||||
exception-translations: todo
|
||||
icon-translations: todo
|
||||
reconfiguration-flow: todo
|
||||
repair-issues: todo
|
||||
stale-devices:
|
||||
status: todo
|
||||
comment: Stale devices can be done dynamically
|
||||
|
||||
# Platinum
|
||||
async-dependency: todo
|
||||
inject-websession: done
|
||||
strict-typing: done
|
@@ -1,77 +0,0 @@
|
||||
"""Support for Aladdin Connect Genie sensors."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Callable
|
||||
from dataclasses import dataclass
|
||||
|
||||
from genie_partner_sdk.model import GarageDoor
|
||||
|
||||
from homeassistant.components.sensor import (
|
||||
SensorDeviceClass,
|
||||
SensorEntity,
|
||||
SensorEntityDescription,
|
||||
SensorStateClass,
|
||||
)
|
||||
from homeassistant.const import PERCENTAGE, EntityCategory
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from .coordinator import AladdinConnectConfigEntry, AladdinConnectCoordinator
|
||||
from .entity import AladdinConnectEntity
|
||||
|
||||
|
||||
@dataclass(frozen=True, kw_only=True)
|
||||
class AladdinConnectSensorEntityDescription(SensorEntityDescription):
|
||||
"""Sensor entity description for Aladdin Connect."""
|
||||
|
||||
value_fn: Callable[[GarageDoor], float | None]
|
||||
|
||||
|
||||
SENSOR_TYPES: tuple[AladdinConnectSensorEntityDescription, ...] = (
|
||||
AladdinConnectSensorEntityDescription(
|
||||
key="battery_level",
|
||||
device_class=SensorDeviceClass.BATTERY,
|
||||
entity_registry_enabled_default=False,
|
||||
native_unit_of_measurement=PERCENTAGE,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
value_fn=lambda garage_door: garage_door.battery_level,
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
entry: AladdinConnectConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up Aladdin Connect sensor devices."""
|
||||
coordinators = entry.runtime_data
|
||||
|
||||
async_add_entities(
|
||||
AladdinConnectSensor(coordinator, description)
|
||||
for coordinator in coordinators.values()
|
||||
for description in SENSOR_TYPES
|
||||
)
|
||||
|
||||
|
||||
class AladdinConnectSensor(AladdinConnectEntity, SensorEntity):
|
||||
"""A sensor implementation for Aladdin Connect device."""
|
||||
|
||||
entity_description: AladdinConnectSensorEntityDescription
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: AladdinConnectCoordinator,
|
||||
entity_description: AladdinConnectSensorEntityDescription,
|
||||
) -> None:
|
||||
"""Initialize the Aladdin Connect sensor."""
|
||||
super().__init__(coordinator)
|
||||
self.entity_description = entity_description
|
||||
self._attr_unique_id = f"{coordinator.data.unique_id}-{entity_description.key}"
|
||||
|
||||
@property
|
||||
def native_value(self) -> float | None:
|
||||
"""Return the state of the sensor."""
|
||||
return self.entity_description.value_fn(self.coordinator.data)
|
@@ -1,30 +1,8 @@
|
||||
{
|
||||
"config": {
|
||||
"step": {
|
||||
"pick_implementation": {
|
||||
"title": "[%key:common::config_flow::title::oauth2_pick_implementation%]"
|
||||
},
|
||||
"reauth_confirm": {
|
||||
"title": "[%key:common::config_flow::title::reauth%]",
|
||||
"description": "Aladdin Connect needs to re-authenticate your account"
|
||||
}
|
||||
},
|
||||
"abort": {
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_account%]",
|
||||
"already_in_progress": "[%key:common::config_flow::abort::already_in_progress%]",
|
||||
"oauth_error": "[%key:common::config_flow::abort::oauth2_error%]",
|
||||
"oauth_failed": "[%key:common::config_flow::abort::oauth2_failed%]",
|
||||
"oauth_timeout": "[%key:common::config_flow::abort::oauth2_timeout%]",
|
||||
"oauth_unauthorized": "[%key:common::config_flow::abort::oauth2_unauthorized%]",
|
||||
"missing_configuration": "[%key:common::config_flow::abort::oauth2_missing_configuration%]",
|
||||
"authorize_url_timeout": "[%key:common::config_flow::abort::oauth2_authorize_url_timeout%]",
|
||||
"no_url_available": "[%key:common::config_flow::abort::oauth2_no_url_available%]",
|
||||
"user_rejected_authorize": "[%key:common::config_flow::abort::oauth2_user_rejected_authorize%]",
|
||||
"reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]",
|
||||
"wrong_account": "You are authenticated with a different account than the one set up. Please authenticate with the configured account."
|
||||
},
|
||||
"create_entry": {
|
||||
"default": "[%key:common::config_flow::create_entry::authenticated%]"
|
||||
"issues": {
|
||||
"integration_removed": {
|
||||
"title": "The Aladdin Connect integration has been removed",
|
||||
"description": "The Aladdin Connect integration has been removed from Home Assistant.\n\nTo resolve this issue, please remove the (now defunct) integration entries from your Home Assistant setup. [Click here to see your existing Aladdin Connect integration entries]({entries})."
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@@ -1,7 +1,4 @@
|
||||
"""Support for repeating alerts when conditions are met.
|
||||
|
||||
DEVELOPMENT OF THE ALERT INTEGRATION IS FROZEN.
|
||||
"""
|
||||
"""Support for repeating alerts when conditions are met."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
@@ -66,10 +63,7 @@ CONFIG_SCHEMA = vol.Schema(
|
||||
|
||||
|
||||
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
"""Set up the Alert component.
|
||||
|
||||
DEVELOPMENT OF THE ALERT INTEGRATION IS FROZEN.
|
||||
"""
|
||||
"""Set up the Alert component."""
|
||||
component = EntityComponent[AlertEntity](LOGGER, DOMAIN, hass)
|
||||
|
||||
entities: list[AlertEntity] = []
|
||||
|
@@ -1,7 +1,4 @@
|
||||
"""Support for repeating alerts when conditions are met.
|
||||
|
||||
DEVELOPMENT OF THE ALERT INTEGRATION IS FROZEN.
|
||||
"""
|
||||
"""Support for repeating alerts when conditions are met."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
@@ -30,10 +27,7 @@ from .const import DOMAIN, LOGGER
|
||||
|
||||
|
||||
class AlertEntity(Entity):
|
||||
"""Representation of an alert.
|
||||
|
||||
DEVELOPMENT OF THE ALERT INTEGRATION IS FROZEN.
|
||||
"""
|
||||
"""Representation of an alert."""
|
||||
|
||||
_attr_should_poll = False
|
||||
|
||||
|
@@ -1,7 +1,4 @@
|
||||
"""Reproduce an Alert state.
|
||||
|
||||
DEVELOPMENT OF THE ALERT INTEGRATION IS FROZEN.
|
||||
"""
|
||||
"""Reproduce an Alert state."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
|
@@ -1,11 +1,11 @@
|
||||
"""Alexa Devices integration."""
|
||||
|
||||
from homeassistant.const import CONF_COUNTRY, Platform
|
||||
from homeassistant.const import Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import aiohttp_client, config_validation as cv
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
|
||||
from .const import _LOGGER, CONF_LOGIN_DATA, CONF_SITE, COUNTRY_DOMAINS, DOMAIN
|
||||
from .const import DOMAIN
|
||||
from .coordinator import AmazonConfigEntry, AmazonDevicesCoordinator
|
||||
from .services import async_setup_services
|
||||
|
||||
@@ -40,48 +40,6 @@ async def async_setup_entry(hass: HomeAssistant, entry: AmazonConfigEntry) -> bo
|
||||
return True
|
||||
|
||||
|
||||
async def async_migrate_entry(hass: HomeAssistant, entry: AmazonConfigEntry) -> bool:
|
||||
"""Migrate old entry."""
|
||||
|
||||
if entry.version == 1 and entry.minor_version < 3:
|
||||
if CONF_SITE in entry.data:
|
||||
# Site in data (wrong place), just move to login data
|
||||
new_data = entry.data.copy()
|
||||
new_data[CONF_LOGIN_DATA][CONF_SITE] = new_data[CONF_SITE]
|
||||
new_data.pop(CONF_SITE)
|
||||
hass.config_entries.async_update_entry(
|
||||
entry, data=new_data, version=1, minor_version=3
|
||||
)
|
||||
return True
|
||||
|
||||
if CONF_SITE in entry.data[CONF_LOGIN_DATA]:
|
||||
# Site is there, just update version to avoid future migrations
|
||||
hass.config_entries.async_update_entry(entry, version=1, minor_version=3)
|
||||
return True
|
||||
|
||||
_LOGGER.debug(
|
||||
"Migrating from version %s.%s", entry.version, entry.minor_version
|
||||
)
|
||||
|
||||
# Convert country in domain
|
||||
country = entry.data[CONF_COUNTRY].lower()
|
||||
domain = COUNTRY_DOMAINS.get(country, country)
|
||||
|
||||
# Add site to login data
|
||||
new_data = entry.data.copy()
|
||||
new_data[CONF_LOGIN_DATA][CONF_SITE] = f"https://www.amazon.{domain}"
|
||||
|
||||
hass.config_entries.async_update_entry(
|
||||
entry, data=new_data, version=1, minor_version=3
|
||||
)
|
||||
|
||||
_LOGGER.info(
|
||||
"Migration to version %s.%s successful", entry.version, entry.minor_version
|
||||
)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: AmazonConfigEntry) -> bool:
|
||||
"""Unload a config entry."""
|
||||
return await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
|
||||
|
@@ -10,14 +10,16 @@ from aioamazondevices.exceptions import (
|
||||
CannotAuthenticate,
|
||||
CannotConnect,
|
||||
CannotRetrieveData,
|
||||
WrongCountry,
|
||||
)
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
|
||||
from homeassistant.const import CONF_CODE, CONF_PASSWORD, CONF_USERNAME
|
||||
from homeassistant.const import CONF_CODE, CONF_COUNTRY, CONF_PASSWORD, CONF_USERNAME
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import aiohttp_client
|
||||
import homeassistant.helpers.config_validation as cv
|
||||
from homeassistant.helpers.selector import CountrySelector
|
||||
|
||||
from .const import CONF_LOGIN_DATA, DOMAIN
|
||||
|
||||
@@ -27,12 +29,6 @@ STEP_REAUTH_DATA_SCHEMA = vol.Schema(
|
||||
vol.Required(CONF_CODE): cv.string,
|
||||
}
|
||||
)
|
||||
STEP_RECONFIGURE = vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_PASSWORD): cv.string,
|
||||
vol.Required(CONF_CODE): cv.string,
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
async def validate_input(hass: HomeAssistant, data: dict[str, Any]) -> dict[str, Any]:
|
||||
@@ -41,6 +37,7 @@ async def validate_input(hass: HomeAssistant, data: dict[str, Any]) -> dict[str,
|
||||
session = aiohttp_client.async_create_clientsession(hass)
|
||||
api = AmazonEchoApi(
|
||||
session,
|
||||
data[CONF_COUNTRY],
|
||||
data[CONF_USERNAME],
|
||||
data[CONF_PASSWORD],
|
||||
)
|
||||
@@ -51,9 +48,6 @@ async def validate_input(hass: HomeAssistant, data: dict[str, Any]) -> dict[str,
|
||||
class AmazonDevicesConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
"""Handle a config flow for Alexa Devices."""
|
||||
|
||||
VERSION = 1
|
||||
MINOR_VERSION = 3
|
||||
|
||||
async def async_step_user(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
@@ -64,10 +58,12 @@ class AmazonDevicesConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
data = await validate_input(self.hass, user_input)
|
||||
except CannotConnect:
|
||||
errors["base"] = "cannot_connect"
|
||||
except (CannotAuthenticate, TypeError):
|
||||
except CannotAuthenticate:
|
||||
errors["base"] = "invalid_auth"
|
||||
except CannotRetrieveData:
|
||||
errors["base"] = "cannot_retrieve_data"
|
||||
except WrongCountry:
|
||||
errors["base"] = "wrong_country"
|
||||
else:
|
||||
await self.async_set_unique_id(data["customer_info"]["user_id"])
|
||||
self._abort_if_unique_id_configured()
|
||||
@@ -82,6 +78,9 @@ class AmazonDevicesConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
errors=errors,
|
||||
data_schema=vol.Schema(
|
||||
{
|
||||
vol.Required(
|
||||
CONF_COUNTRY, default=self.hass.config.country
|
||||
): CountrySelector(),
|
||||
vol.Required(CONF_USERNAME): cv.string,
|
||||
vol.Required(CONF_PASSWORD): cv.string,
|
||||
vol.Required(CONF_CODE): cv.string,
|
||||
@@ -110,7 +109,7 @@ class AmazonDevicesConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
await validate_input(self.hass, {**reauth_entry.data, **user_input})
|
||||
except CannotConnect:
|
||||
errors["base"] = "cannot_connect"
|
||||
except (CannotAuthenticate, TypeError):
|
||||
except CannotAuthenticate:
|
||||
errors["base"] = "invalid_auth"
|
||||
except CannotRetrieveData:
|
||||
errors["base"] = "cannot_retrieve_data"
|
||||
@@ -130,47 +129,3 @@ class AmazonDevicesConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
data_schema=STEP_REAUTH_DATA_SCHEMA,
|
||||
errors=errors,
|
||||
)
|
||||
|
||||
async def async_step_reconfigure(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle reconfiguration of the device."""
|
||||
reconfigure_entry = self._get_reconfigure_entry()
|
||||
if not user_input:
|
||||
return self.async_show_form(
|
||||
step_id="reconfigure",
|
||||
data_schema=STEP_RECONFIGURE,
|
||||
)
|
||||
|
||||
updated_password = user_input[CONF_PASSWORD]
|
||||
|
||||
self._async_abort_entries_match(
|
||||
{CONF_USERNAME: reconfigure_entry.data[CONF_USERNAME]}
|
||||
)
|
||||
|
||||
errors: dict[str, str] = {}
|
||||
|
||||
try:
|
||||
data = await validate_input(
|
||||
self.hass, {**reconfigure_entry.data, **user_input}
|
||||
)
|
||||
except CannotConnect:
|
||||
errors["base"] = "cannot_connect"
|
||||
except CannotAuthenticate:
|
||||
errors["base"] = "invalid_auth"
|
||||
except CannotRetrieveData:
|
||||
errors["base"] = "cannot_retrieve_data"
|
||||
else:
|
||||
return self.async_update_reload_and_abort(
|
||||
reconfigure_entry,
|
||||
data_updates={
|
||||
CONF_PASSWORD: updated_password,
|
||||
CONF_LOGIN_DATA: data,
|
||||
},
|
||||
)
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="reconfigure",
|
||||
data_schema=STEP_RECONFIGURE,
|
||||
errors=errors,
|
||||
)
|
||||
|
@@ -6,23 +6,3 @@ _LOGGER = logging.getLogger(__package__)
|
||||
|
||||
DOMAIN = "alexa_devices"
|
||||
CONF_LOGIN_DATA = "login_data"
|
||||
CONF_SITE = "site"
|
||||
|
||||
DEFAULT_DOMAIN = "com"
|
||||
COUNTRY_DOMAINS = {
|
||||
"ar": DEFAULT_DOMAIN,
|
||||
"at": DEFAULT_DOMAIN,
|
||||
"au": "com.au",
|
||||
"be": "com.be",
|
||||
"br": DEFAULT_DOMAIN,
|
||||
"gb": "co.uk",
|
||||
"il": DEFAULT_DOMAIN,
|
||||
"jp": "co.jp",
|
||||
"mx": "com.mx",
|
||||
"no": DEFAULT_DOMAIN,
|
||||
"nz": "com.au",
|
||||
"pl": DEFAULT_DOMAIN,
|
||||
"tr": "com.tr",
|
||||
"us": DEFAULT_DOMAIN,
|
||||
"za": "co.za",
|
||||
}
|
||||
|
@@ -11,10 +11,9 @@ from aioamazondevices.exceptions import (
|
||||
from aiohttp import ClientSession
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONF_PASSWORD, CONF_USERNAME
|
||||
from homeassistant.const import CONF_COUNTRY, CONF_PASSWORD, CONF_USERNAME
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryAuthFailed
|
||||
from homeassistant.helpers import device_registry as dr
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
||||
|
||||
from .const import _LOGGER, CONF_LOGIN_DATA, DOMAIN
|
||||
@@ -45,17 +44,17 @@ class AmazonDevicesCoordinator(DataUpdateCoordinator[dict[str, AmazonDevice]]):
|
||||
)
|
||||
self.api = AmazonEchoApi(
|
||||
session,
|
||||
entry.data[CONF_COUNTRY],
|
||||
entry.data[CONF_USERNAME],
|
||||
entry.data[CONF_PASSWORD],
|
||||
entry.data[CONF_LOGIN_DATA],
|
||||
)
|
||||
self.previous_devices: set[str] = set()
|
||||
|
||||
async def _async_update_data(self) -> dict[str, AmazonDevice]:
|
||||
"""Update device data."""
|
||||
try:
|
||||
await self.api.login_mode_stored_data()
|
||||
data = await self.api.get_devices_data()
|
||||
return await self.api.get_devices_data()
|
||||
except CannotConnect as err:
|
||||
raise UpdateFailed(
|
||||
translation_domain=DOMAIN,
|
||||
@@ -68,37 +67,9 @@ class AmazonDevicesCoordinator(DataUpdateCoordinator[dict[str, AmazonDevice]]):
|
||||
translation_key="cannot_retrieve_data_with_error",
|
||||
translation_placeholders={"error": repr(err)},
|
||||
) from err
|
||||
except (CannotAuthenticate, TypeError) as err:
|
||||
except CannotAuthenticate as err:
|
||||
raise ConfigEntryAuthFailed(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="invalid_auth",
|
||||
translation_placeholders={"error": repr(err)},
|
||||
) from err
|
||||
else:
|
||||
current_devices = set(data.keys())
|
||||
if stale_devices := self.previous_devices - current_devices:
|
||||
await self._async_remove_device_stale(stale_devices)
|
||||
|
||||
self.previous_devices = current_devices
|
||||
return data
|
||||
|
||||
async def _async_remove_device_stale(
|
||||
self,
|
||||
stale_devices: set[str],
|
||||
) -> None:
|
||||
"""Remove stale device."""
|
||||
device_registry = dr.async_get(self.hass)
|
||||
|
||||
for serial_num in stale_devices:
|
||||
_LOGGER.debug(
|
||||
"Detected change in devices: serial %s removed",
|
||||
serial_num,
|
||||
)
|
||||
device = device_registry.async_get_device(
|
||||
identifiers={(DOMAIN, serial_num)}
|
||||
)
|
||||
if device:
|
||||
device_registry.async_update_device(
|
||||
device_id=device.id,
|
||||
remove_config_entry_id=self.config_entry.entry_id,
|
||||
)
|
||||
|
@@ -8,5 +8,5 @@
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["aioamazondevices"],
|
||||
"quality_scale": "silver",
|
||||
"requirements": ["aioamazondevices==6.0.0"]
|
||||
"requirements": ["aioamazondevices==4.0.0"]
|
||||
}
|
||||
|
@@ -60,11 +60,13 @@ rules:
|
||||
entity-translations: done
|
||||
exception-translations: done
|
||||
icon-translations: done
|
||||
reconfiguration-flow: done
|
||||
reconfiguration-flow: todo
|
||||
repair-issues:
|
||||
status: exempt
|
||||
comment: no known use cases for repair issues or flows, yet
|
||||
stale-devices: done
|
||||
stale-devices:
|
||||
status: todo
|
||||
comment: automate the cleanup process
|
||||
|
||||
# Platinum
|
||||
async-dependency: done
|
||||
|
@@ -12,7 +12,6 @@ from homeassistant.components.sensor import (
|
||||
SensorDeviceClass,
|
||||
SensorEntity,
|
||||
SensorEntityDescription,
|
||||
SensorStateClass,
|
||||
)
|
||||
from homeassistant.const import LIGHT_LUX, UnitOfTemperature
|
||||
from homeassistant.core import HomeAssistant
|
||||
@@ -42,13 +41,11 @@ SENSORS: Final = (
|
||||
if device.sensors[_key].scale == "CELSIUS"
|
||||
else UnitOfTemperature.FAHRENHEIT
|
||||
),
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
AmazonSensorEntityDescription(
|
||||
key="illuminance",
|
||||
device_class=SensorDeviceClass.ILLUMINANCE,
|
||||
native_unit_of_measurement=LIGHT_LUX,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
)
|
||||
|
||||
|
@@ -14,12 +14,14 @@ from .coordinator import AmazonConfigEntry
|
||||
|
||||
ATTR_TEXT_COMMAND = "text_command"
|
||||
ATTR_SOUND = "sound"
|
||||
ATTR_SOUND_VARIANT = "sound_variant"
|
||||
SERVICE_TEXT_COMMAND = "send_text_command"
|
||||
SERVICE_SOUND_NOTIFICATION = "send_sound"
|
||||
|
||||
SCHEMA_SOUND_SERVICE = vol.Schema(
|
||||
{
|
||||
vol.Required(ATTR_SOUND): cv.string,
|
||||
vol.Required(ATTR_SOUND_VARIANT): cv.positive_int,
|
||||
vol.Required(ATTR_DEVICE_ID): cv.string,
|
||||
},
|
||||
)
|
||||
@@ -73,14 +75,17 @@ async def _async_execute_action(call: ServiceCall, attribute: str) -> None:
|
||||
coordinator = config_entry.runtime_data
|
||||
|
||||
if attribute == ATTR_SOUND:
|
||||
if value not in SOUNDS_LIST:
|
||||
variant: int = call.data[ATTR_SOUND_VARIANT]
|
||||
pad = "_" if variant > 10 else "_0"
|
||||
file = f"{value}{pad}{variant!s}"
|
||||
if value not in SOUNDS_LIST or variant > SOUNDS_LIST[value]:
|
||||
raise ServiceValidationError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="invalid_sound_value",
|
||||
translation_placeholders={"sound": value},
|
||||
translation_placeholders={"sound": value, "variant": str(variant)},
|
||||
)
|
||||
await coordinator.api.call_alexa_sound(
|
||||
coordinator.data[device.serial_number], value
|
||||
coordinator.data[device.serial_number], file
|
||||
)
|
||||
elif attribute == ATTR_TEXT_COMMAND:
|
||||
await coordinator.api.call_alexa_text_command(
|
||||
|
@@ -18,6 +18,14 @@ send_sound:
|
||||
selector:
|
||||
device:
|
||||
integration: alexa_devices
|
||||
sound_variant:
|
||||
required: true
|
||||
example: 1
|
||||
default: 1
|
||||
selector:
|
||||
number:
|
||||
min: 1
|
||||
max: 50
|
||||
sound:
|
||||
required: true
|
||||
example: amzn_sfx_doorbell_chime
|
||||
@@ -25,45 +33,472 @@ send_sound:
|
||||
selector:
|
||||
select:
|
||||
options:
|
||||
- air_horn_03
|
||||
- amzn_sfx_cat_meow_1x_01
|
||||
- amzn_sfx_church_bell_1x_02
|
||||
- amzn_sfx_crowd_applause_01
|
||||
- amzn_sfx_dog_med_bark_1x_02
|
||||
- amzn_sfx_doorbell_01
|
||||
- amzn_sfx_doorbell_chime_01
|
||||
- amzn_sfx_doorbell_chime_02
|
||||
- amzn_sfx_large_crowd_cheer_01
|
||||
- amzn_sfx_lion_roar_02
|
||||
- amzn_sfx_rooster_crow_01
|
||||
- amzn_sfx_scifi_alarm_01
|
||||
- amzn_sfx_scifi_alarm_04
|
||||
- amzn_sfx_scifi_engines_on_02
|
||||
- amzn_sfx_scifi_sheilds_up_01
|
||||
- amzn_sfx_trumpet_bugle_04
|
||||
- amzn_sfx_wolf_howl_02
|
||||
- bell_02
|
||||
- boing_01
|
||||
- boing_03
|
||||
- buzzers_pistols_01
|
||||
- camera_01
|
||||
- christmas_05
|
||||
- clock_01
|
||||
- futuristic_10
|
||||
- halloween_bats
|
||||
- halloween_crows
|
||||
- halloween_footsteps
|
||||
- halloween_wind
|
||||
- halloween_wolf
|
||||
- holiday_halloween_ghost
|
||||
- horror_10
|
||||
- med_system_alerts_minimal_dragon_short
|
||||
- med_system_alerts_minimal_owl_short
|
||||
- med_system_alerts_minimals_blue_wave_small
|
||||
- med_system_alerts_minimals_galaxy_short
|
||||
- med_system_alerts_minimals_panda_short
|
||||
- med_system_alerts_minimals_tiger_short
|
||||
- med_ui_success_generic_1-1
|
||||
- squeaky_12
|
||||
- zap_01
|
||||
- air_horn
|
||||
- air_horns
|
||||
- airboat
|
||||
- airport
|
||||
- aliens
|
||||
- amzn_sfx_airplane_takeoff_whoosh
|
||||
- amzn_sfx_army_march_clank_7x
|
||||
- amzn_sfx_army_march_large_8x
|
||||
- amzn_sfx_army_march_small_8x
|
||||
- amzn_sfx_baby_big_cry
|
||||
- amzn_sfx_baby_cry
|
||||
- amzn_sfx_baby_fuss
|
||||
- amzn_sfx_battle_group_clanks
|
||||
- amzn_sfx_battle_man_grunts
|
||||
- amzn_sfx_battle_men_grunts
|
||||
- amzn_sfx_battle_men_horses
|
||||
- amzn_sfx_battle_noisy_clanks
|
||||
- amzn_sfx_battle_yells_men
|
||||
- amzn_sfx_battle_yells_men_run
|
||||
- amzn_sfx_bear_groan_roar
|
||||
- amzn_sfx_bear_roar_grumble
|
||||
- amzn_sfx_bear_roar_small
|
||||
- amzn_sfx_beep_1x
|
||||
- amzn_sfx_bell_med_chime
|
||||
- amzn_sfx_bell_short_chime
|
||||
- amzn_sfx_bell_timer
|
||||
- amzn_sfx_bicycle_bell_ring
|
||||
- amzn_sfx_bird_chickadee_chirp_1x
|
||||
- amzn_sfx_bird_chickadee_chirps
|
||||
- amzn_sfx_bird_forest
|
||||
- amzn_sfx_bird_forest_short
|
||||
- amzn_sfx_bird_robin_chirp_1x
|
||||
- amzn_sfx_boing_long_1x
|
||||
- amzn_sfx_boing_med_1x
|
||||
- amzn_sfx_boing_short_1x
|
||||
- amzn_sfx_bus_drive_past
|
||||
- amzn_sfx_buzz_electronic
|
||||
- amzn_sfx_buzzer_loud_alarm
|
||||
- amzn_sfx_buzzer_small
|
||||
- amzn_sfx_car_accelerate
|
||||
- amzn_sfx_car_accelerate_noisy
|
||||
- amzn_sfx_car_click_seatbelt
|
||||
- amzn_sfx_car_close_door_1x
|
||||
- amzn_sfx_car_drive_past
|
||||
- amzn_sfx_car_honk_1x
|
||||
- amzn_sfx_car_honk_2x
|
||||
- amzn_sfx_car_honk_3x
|
||||
- amzn_sfx_car_honk_long_1x
|
||||
- amzn_sfx_car_into_driveway
|
||||
- amzn_sfx_car_into_driveway_fast
|
||||
- amzn_sfx_car_slam_door_1x
|
||||
- amzn_sfx_car_undo_seatbelt
|
||||
- amzn_sfx_cat_angry_meow_1x
|
||||
- amzn_sfx_cat_angry_screech_1x
|
||||
- amzn_sfx_cat_long_meow_1x
|
||||
- amzn_sfx_cat_meow_1x
|
||||
- amzn_sfx_cat_purr
|
||||
- amzn_sfx_cat_purr_meow
|
||||
- amzn_sfx_chicken_cluck
|
||||
- amzn_sfx_church_bell_1x
|
||||
- amzn_sfx_church_bells_ringing
|
||||
- amzn_sfx_clear_throat_ahem
|
||||
- amzn_sfx_clock_ticking
|
||||
- amzn_sfx_clock_ticking_long
|
||||
- amzn_sfx_copy_machine
|
||||
- amzn_sfx_cough
|
||||
- amzn_sfx_crow_caw_1x
|
||||
- amzn_sfx_crowd_applause
|
||||
- amzn_sfx_crowd_bar
|
||||
- amzn_sfx_crowd_bar_rowdy
|
||||
- amzn_sfx_crowd_boo
|
||||
- amzn_sfx_crowd_cheer_med
|
||||
- amzn_sfx_crowd_excited_cheer
|
||||
- amzn_sfx_dog_med_bark_1x
|
||||
- amzn_sfx_dog_med_bark_2x
|
||||
- amzn_sfx_dog_med_bark_growl
|
||||
- amzn_sfx_dog_med_growl_1x
|
||||
- amzn_sfx_dog_med_woof_1x
|
||||
- amzn_sfx_dog_small_bark_2x
|
||||
- amzn_sfx_door_open
|
||||
- amzn_sfx_door_shut
|
||||
- amzn_sfx_doorbell
|
||||
- amzn_sfx_doorbell_buzz
|
||||
- amzn_sfx_doorbell_chime
|
||||
- amzn_sfx_drinking_slurp
|
||||
- amzn_sfx_drum_and_cymbal
|
||||
- amzn_sfx_drum_comedy
|
||||
- amzn_sfx_earthquake_rumble
|
||||
- amzn_sfx_electric_guitar
|
||||
- amzn_sfx_electronic_beep
|
||||
- amzn_sfx_electronic_major_chord
|
||||
- amzn_sfx_elephant
|
||||
- amzn_sfx_elevator_bell_1x
|
||||
- amzn_sfx_elevator_open_bell
|
||||
- amzn_sfx_fairy_melodic_chimes
|
||||
- amzn_sfx_fairy_sparkle_chimes
|
||||
- amzn_sfx_faucet_drip
|
||||
- amzn_sfx_faucet_running
|
||||
- amzn_sfx_fireplace_crackle
|
||||
- amzn_sfx_fireworks
|
||||
- amzn_sfx_fireworks_firecrackers
|
||||
- amzn_sfx_fireworks_launch
|
||||
- amzn_sfx_fireworks_whistles
|
||||
- amzn_sfx_food_frying
|
||||
- amzn_sfx_footsteps
|
||||
- amzn_sfx_footsteps_muffled
|
||||
- amzn_sfx_ghost_spooky
|
||||
- amzn_sfx_glass_on_table
|
||||
- amzn_sfx_glasses_clink
|
||||
- amzn_sfx_horse_gallop_4x
|
||||
- amzn_sfx_horse_huff_whinny
|
||||
- amzn_sfx_horse_neigh
|
||||
- amzn_sfx_horse_neigh_low
|
||||
- amzn_sfx_horse_whinny
|
||||
- amzn_sfx_human_walking
|
||||
- amzn_sfx_jar_on_table_1x
|
||||
- amzn_sfx_kitchen_ambience
|
||||
- amzn_sfx_large_crowd_cheer
|
||||
- amzn_sfx_large_fire_crackling
|
||||
- amzn_sfx_laughter
|
||||
- amzn_sfx_laughter_giggle
|
||||
- amzn_sfx_lightning_strike
|
||||
- amzn_sfx_lion_roar
|
||||
- amzn_sfx_magic_blast_1x
|
||||
- amzn_sfx_monkey_calls_3x
|
||||
- amzn_sfx_monkey_chimp
|
||||
- amzn_sfx_monkeys_chatter
|
||||
- amzn_sfx_motorcycle_accelerate
|
||||
- amzn_sfx_motorcycle_engine_idle
|
||||
- amzn_sfx_motorcycle_engine_rev
|
||||
- amzn_sfx_musical_drone_intro
|
||||
- amzn_sfx_oars_splashing_rowboat
|
||||
- amzn_sfx_object_on_table_2x
|
||||
- amzn_sfx_ocean_wave_1x
|
||||
- amzn_sfx_ocean_wave_on_rocks_1x
|
||||
- amzn_sfx_ocean_wave_surf
|
||||
- amzn_sfx_people_walking
|
||||
- amzn_sfx_person_running
|
||||
- amzn_sfx_piano_note_1x
|
||||
- amzn_sfx_punch
|
||||
- amzn_sfx_rain
|
||||
- amzn_sfx_rain_on_roof
|
||||
- amzn_sfx_rain_thunder
|
||||
- amzn_sfx_rat_squeak_2x
|
||||
- amzn_sfx_rat_squeaks
|
||||
- amzn_sfx_raven_caw_1x
|
||||
- amzn_sfx_raven_caw_2x
|
||||
- amzn_sfx_restaurant_ambience
|
||||
- amzn_sfx_rooster_crow
|
||||
- amzn_sfx_scifi_air_escaping
|
||||
- amzn_sfx_scifi_alarm
|
||||
- amzn_sfx_scifi_alien_voice
|
||||
- amzn_sfx_scifi_boots_walking
|
||||
- amzn_sfx_scifi_close_large_explosion
|
||||
- amzn_sfx_scifi_door_open
|
||||
- amzn_sfx_scifi_engines_on
|
||||
- amzn_sfx_scifi_engines_on_large
|
||||
- amzn_sfx_scifi_engines_on_short_burst
|
||||
- amzn_sfx_scifi_explosion
|
||||
- amzn_sfx_scifi_explosion_2x
|
||||
- amzn_sfx_scifi_incoming_explosion
|
||||
- amzn_sfx_scifi_laser_gun_battle
|
||||
- amzn_sfx_scifi_laser_gun_fires
|
||||
- amzn_sfx_scifi_laser_gun_fires_large
|
||||
- amzn_sfx_scifi_long_explosion_1x
|
||||
- amzn_sfx_scifi_missile
|
||||
- amzn_sfx_scifi_motor_short_1x
|
||||
- amzn_sfx_scifi_open_airlock
|
||||
- amzn_sfx_scifi_radar_high_ping
|
||||
- amzn_sfx_scifi_radar_low
|
||||
- amzn_sfx_scifi_radar_medium
|
||||
- amzn_sfx_scifi_run_away
|
||||
- amzn_sfx_scifi_sheilds_up
|
||||
- amzn_sfx_scifi_short_low_explosion
|
||||
- amzn_sfx_scifi_small_whoosh_flyby
|
||||
- amzn_sfx_scifi_small_zoom_flyby
|
||||
- amzn_sfx_scifi_sonar_ping_3x
|
||||
- amzn_sfx_scifi_sonar_ping_4x
|
||||
- amzn_sfx_scifi_spaceship_flyby
|
||||
- amzn_sfx_scifi_timer_beep
|
||||
- amzn_sfx_scifi_zap_backwards
|
||||
- amzn_sfx_scifi_zap_electric
|
||||
- amzn_sfx_sheep_baa
|
||||
- amzn_sfx_sheep_bleat
|
||||
- amzn_sfx_silverware_clank
|
||||
- amzn_sfx_sirens
|
||||
- amzn_sfx_sleigh_bells
|
||||
- amzn_sfx_small_stream
|
||||
- amzn_sfx_sneeze
|
||||
- amzn_sfx_stream
|
||||
- amzn_sfx_strong_wind_desert
|
||||
- amzn_sfx_strong_wind_whistling
|
||||
- amzn_sfx_subway_leaving
|
||||
- amzn_sfx_subway_passing
|
||||
- amzn_sfx_subway_stopping
|
||||
- amzn_sfx_swoosh_cartoon_fast
|
||||
- amzn_sfx_swoosh_fast_1x
|
||||
- amzn_sfx_swoosh_fast_6x
|
||||
- amzn_sfx_test_tone
|
||||
- amzn_sfx_thunder_rumble
|
||||
- amzn_sfx_toilet_flush
|
||||
- amzn_sfx_trumpet_bugle
|
||||
- amzn_sfx_turkey_gobbling
|
||||
- amzn_sfx_typing_medium
|
||||
- amzn_sfx_typing_short
|
||||
- amzn_sfx_typing_typewriter
|
||||
- amzn_sfx_vacuum_off
|
||||
- amzn_sfx_vacuum_on
|
||||
- amzn_sfx_walking_in_mud
|
||||
- amzn_sfx_walking_in_snow
|
||||
- amzn_sfx_walking_on_grass
|
||||
- amzn_sfx_water_dripping
|
||||
- amzn_sfx_water_droplets
|
||||
- amzn_sfx_wind_strong_gusting
|
||||
- amzn_sfx_wind_whistling_desert
|
||||
- amzn_sfx_wings_flap_4x
|
||||
- amzn_sfx_wings_flap_fast
|
||||
- amzn_sfx_wolf_howl
|
||||
- amzn_sfx_wolf_young_howl
|
||||
- amzn_sfx_wooden_door
|
||||
- amzn_sfx_wooden_door_creaks_long
|
||||
- amzn_sfx_wooden_door_creaks_multiple
|
||||
- amzn_sfx_wooden_door_creaks_open
|
||||
- amzn_ui_sfx_gameshow_bridge
|
||||
- amzn_ui_sfx_gameshow_countdown_loop_32s_full
|
||||
- amzn_ui_sfx_gameshow_countdown_loop_64s_full
|
||||
- amzn_ui_sfx_gameshow_countdown_loop_64s_minimal
|
||||
- amzn_ui_sfx_gameshow_intro
|
||||
- amzn_ui_sfx_gameshow_negative_response
|
||||
- amzn_ui_sfx_gameshow_neutral_response
|
||||
- amzn_ui_sfx_gameshow_outro
|
||||
- amzn_ui_sfx_gameshow_player1
|
||||
- amzn_ui_sfx_gameshow_player2
|
||||
- amzn_ui_sfx_gameshow_player3
|
||||
- amzn_ui_sfx_gameshow_player4
|
||||
- amzn_ui_sfx_gameshow_positive_response
|
||||
- amzn_ui_sfx_gameshow_tally_negative
|
||||
- amzn_ui_sfx_gameshow_tally_positive
|
||||
- amzn_ui_sfx_gameshow_waiting_loop_30s
|
||||
- anchor
|
||||
- answering_machines
|
||||
- arcs_sparks
|
||||
- arrows_bows
|
||||
- baby
|
||||
- back_up_beeps
|
||||
- bars_restaurants
|
||||
- baseball
|
||||
- basketball
|
||||
- battles
|
||||
- beeps_tones
|
||||
- bell
|
||||
- bikes
|
||||
- billiards
|
||||
- board_games
|
||||
- body
|
||||
- boing
|
||||
- books
|
||||
- bow_wash
|
||||
- box
|
||||
- break_shatter_smash
|
||||
- breaks
|
||||
- brooms_mops
|
||||
- bullets
|
||||
- buses
|
||||
- buzz
|
||||
- buzz_hums
|
||||
- buzzers
|
||||
- buzzers_pistols
|
||||
- cables_metal
|
||||
- camera
|
||||
- cannons
|
||||
- car_alarm
|
||||
- car_alarms
|
||||
- car_cell_phones
|
||||
- carnivals_fairs
|
||||
- cars
|
||||
- casino
|
||||
- casinos
|
||||
- cellar
|
||||
- chimes
|
||||
- chimes_bells
|
||||
- chorus
|
||||
- christmas
|
||||
- church_bells
|
||||
- clock
|
||||
- cloth
|
||||
- concrete
|
||||
- construction
|
||||
- construction_factory
|
||||
- crashes
|
||||
- crowds
|
||||
- debris
|
||||
- dining_kitchens
|
||||
- dinosaurs
|
||||
- dripping
|
||||
- drops
|
||||
- electric
|
||||
- electrical
|
||||
- elevator
|
||||
- evolution_monsters
|
||||
- explosions
|
||||
- factory
|
||||
- falls
|
||||
- fax_scanner_copier
|
||||
- feedback_mics
|
||||
- fight
|
||||
- fire
|
||||
- fire_extinguisher
|
||||
- fireballs
|
||||
- fireworks
|
||||
- fishing_pole
|
||||
- flags
|
||||
- football
|
||||
- footsteps
|
||||
- futuristic
|
||||
- futuristic_ship
|
||||
- gameshow
|
||||
- gear
|
||||
- ghosts_demons
|
||||
- giant_monster
|
||||
- glass
|
||||
- glasses_clink
|
||||
- golf
|
||||
- gorilla
|
||||
- grenade_lanucher
|
||||
- griffen
|
||||
- gyms_locker_rooms
|
||||
- handgun_loading
|
||||
- handgun_shot
|
||||
- handle
|
||||
- hands
|
||||
- heartbeats_ekg
|
||||
- helicopter
|
||||
- high_tech
|
||||
- hit_punch_slap
|
||||
- hits
|
||||
- horns
|
||||
- horror
|
||||
- hot_tub_filling_up
|
||||
- human
|
||||
- human_vocals
|
||||
- hygene # codespell:ignore
|
||||
- ice_skating
|
||||
- ignitions
|
||||
- infantry
|
||||
- intro
|
||||
- jet
|
||||
- juggling
|
||||
- key_lock
|
||||
- kids
|
||||
- knocks
|
||||
- lab_equip
|
||||
- lacrosse
|
||||
- lamps_lanterns
|
||||
- leather
|
||||
- liquid_suction
|
||||
- locker_doors
|
||||
- machine_gun
|
||||
- magic_spells
|
||||
- medium_large_explosions
|
||||
- metal
|
||||
- modern_rings
|
||||
- money_coins
|
||||
- motorcycles
|
||||
- movement
|
||||
- moves
|
||||
- nature
|
||||
- oar_boat
|
||||
- pagers
|
||||
- paintball
|
||||
- paper
|
||||
- parachute
|
||||
- pay_phones
|
||||
- phone_beeps
|
||||
- pigmy_bats
|
||||
- pills
|
||||
- pour_water
|
||||
- power_up_down
|
||||
- printers
|
||||
- prison
|
||||
- public_space
|
||||
- racquetball
|
||||
- radios_static
|
||||
- rain
|
||||
- rc_airplane
|
||||
- rc_car
|
||||
- refrigerators_freezers
|
||||
- regular
|
||||
- respirator
|
||||
- rifle
|
||||
- roller_coaster
|
||||
- rollerskates_rollerblades
|
||||
- room_tones
|
||||
- ropes_climbing
|
||||
- rotary_rings
|
||||
- rowboat_canoe
|
||||
- rubber
|
||||
- running
|
||||
- sails
|
||||
- sand_gravel
|
||||
- screen_doors
|
||||
- screens
|
||||
- seats_stools
|
||||
- servos
|
||||
- shoes_boots
|
||||
- shotgun
|
||||
- shower
|
||||
- sink_faucet
|
||||
- sink_filling_water
|
||||
- sink_run_and_off
|
||||
- sink_water_splatter
|
||||
- sirens
|
||||
- skateboards
|
||||
- ski
|
||||
- skids_tires
|
||||
- sled
|
||||
- slides
|
||||
- small_explosions
|
||||
- snow
|
||||
- snowmobile
|
||||
- soldiers
|
||||
- splash_water
|
||||
- splashes_sprays
|
||||
- sports_whistles
|
||||
- squeaks
|
||||
- squeaky
|
||||
- stairs
|
||||
- steam
|
||||
- submarine_diesel
|
||||
- swing_doors
|
||||
- switches_levers
|
||||
- swords
|
||||
- tape
|
||||
- tape_machine
|
||||
- televisions_shows
|
||||
- tennis_pingpong
|
||||
- textile
|
||||
- throw
|
||||
- thunder
|
||||
- ticks
|
||||
- timer
|
||||
- toilet_flush
|
||||
- tone
|
||||
- tones_noises
|
||||
- toys
|
||||
- tractors
|
||||
- traffic
|
||||
- train
|
||||
- trucks_vans
|
||||
- turnstiles
|
||||
- typing
|
||||
- umbrella
|
||||
- underwater
|
||||
- vampires
|
||||
- various
|
||||
- video_tunes
|
||||
- volcano_earthquake
|
||||
- watches
|
||||
- water
|
||||
- water_running
|
||||
- werewolves
|
||||
- winches_gears
|
||||
- wind
|
||||
- wood
|
||||
- wood_boat
|
||||
- woosh
|
||||
- zap
|
||||
- zippers
|
||||
translation_key: sound
|
||||
|
@@ -1,6 +1,7 @@
|
||||
{
|
||||
"common": {
|
||||
"data_code": "One-time password (OTP code)",
|
||||
"data_description_country": "The country where your Amazon account is registered.",
|
||||
"data_description_username": "The email address of your Amazon account.",
|
||||
"data_description_password": "The password of your Amazon account.",
|
||||
"data_description_code": "The one-time password to log in to your account. Currently, only tokens from OTP applications are supported.",
|
||||
@@ -11,11 +12,13 @@
|
||||
"step": {
|
||||
"user": {
|
||||
"data": {
|
||||
"country": "[%key:common::config_flow::data::country%]",
|
||||
"username": "[%key:common::config_flow::data::username%]",
|
||||
"password": "[%key:common::config_flow::data::password%]",
|
||||
"code": "[%key:component::alexa_devices::common::data_code%]"
|
||||
},
|
||||
"data_description": {
|
||||
"country": "[%key:component::alexa_devices::common::data_description_country%]",
|
||||
"username": "[%key:component::alexa_devices::common::data_description_username%]",
|
||||
"password": "[%key:component::alexa_devices::common::data_description_password%]",
|
||||
"code": "[%key:component::alexa_devices::common::data_description_code%]"
|
||||
@@ -30,16 +33,6 @@
|
||||
"password": "[%key:component::alexa_devices::common::data_description_password%]",
|
||||
"code": "[%key:component::alexa_devices::common::data_description_code%]"
|
||||
}
|
||||
},
|
||||
"reconfigure": {
|
||||
"data": {
|
||||
"password": "[%key:common::config_flow::data::password%]",
|
||||
"code": "[%key:component::alexa_devices::common::data_code%]"
|
||||
},
|
||||
"data_description": {
|
||||
"password": "[%key:component::alexa_devices::common::data_description_password%]",
|
||||
"code": "[%key:component::alexa_devices::common::data_description_code%]"
|
||||
}
|
||||
}
|
||||
},
|
||||
"abort": {
|
||||
@@ -47,13 +40,13 @@
|
||||
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
|
||||
"invalid_auth": "[%key:common::config_flow::error::invalid_auth%]",
|
||||
"reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]",
|
||||
"reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]",
|
||||
"unknown": "[%key:common::config_flow::error::unknown%]"
|
||||
},
|
||||
"error": {
|
||||
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
|
||||
"cannot_retrieve_data": "Unable to retrieve data from Amazon. Please try again later.",
|
||||
"invalid_auth": "[%key:common::config_flow::error::invalid_auth%]",
|
||||
"wrong_country": "Wrong country selected. Please select the country where your Amazon account is registered.",
|
||||
"unknown": "[%key:common::config_flow::error::unknown%]"
|
||||
}
|
||||
},
|
||||
@@ -104,6 +97,10 @@
|
||||
"sound": {
|
||||
"name": "Alexa Skill sound file",
|
||||
"description": "The sound file to play."
|
||||
},
|
||||
"sound_variant": {
|
||||
"name": "Sound variant",
|
||||
"description": "The variant of the sound to play."
|
||||
}
|
||||
}
|
||||
},
|
||||
@@ -125,47 +122,474 @@
|
||||
"selector": {
|
||||
"sound": {
|
||||
"options": {
|
||||
"air_horn_03": "Air horn",
|
||||
"amzn_sfx_cat_meow_1x_01": "Cat meow",
|
||||
"amzn_sfx_church_bell_1x_02": "Church bell",
|
||||
"amzn_sfx_crowd_applause_01": "Crowd applause",
|
||||
"amzn_sfx_dog_med_bark_1x_02": "Dog bark",
|
||||
"amzn_sfx_doorbell_01": "Doorbell 1",
|
||||
"amzn_sfx_doorbell_chime_01": "Doorbell 2",
|
||||
"amzn_sfx_doorbell_chime_02": "Doorbell 3",
|
||||
"amzn_sfx_large_crowd_cheer_01": "Crowd cheers",
|
||||
"amzn_sfx_lion_roar_02": "Lion roar",
|
||||
"amzn_sfx_rooster_crow_01": "Rooster",
|
||||
"amzn_sfx_scifi_alarm_01": "Sirens",
|
||||
"amzn_sfx_scifi_alarm_04": "Red alert",
|
||||
"amzn_sfx_scifi_engines_on_02": "Engines on",
|
||||
"amzn_sfx_scifi_sheilds_up_01": "Shields up",
|
||||
"amzn_sfx_trumpet_bugle_04": "Trumpet",
|
||||
"amzn_sfx_wolf_howl_02": "Wolf howl",
|
||||
"bell_02": "Bells",
|
||||
"boing_01": "Boing 1",
|
||||
"boing_03": "Boing 2",
|
||||
"buzzers_pistols_01": "Buzzer",
|
||||
"camera_01": "Camera",
|
||||
"christmas_05": "Christmas bells",
|
||||
"clock_01": "Ticking clock",
|
||||
"futuristic_10": "Aircraft",
|
||||
"halloween_bats": "Halloween bats",
|
||||
"halloween_crows": "Halloween crows",
|
||||
"halloween_footsteps": "Halloween spooky footsteps",
|
||||
"halloween_wind": "Halloween wind",
|
||||
"halloween_wolf": "Halloween wolf",
|
||||
"holiday_halloween_ghost": "Halloween ghost",
|
||||
"horror_10": "Halloween creepy door",
|
||||
"med_system_alerts_minimal_dragon_short": "Friendly dragon",
|
||||
"med_system_alerts_minimal_owl_short": "Happy owl",
|
||||
"med_system_alerts_minimals_blue_wave_small": "Underwater World Sonata",
|
||||
"med_system_alerts_minimals_galaxy_short": "Infinite Galaxy",
|
||||
"med_system_alerts_minimals_panda_short": "Baby panda",
|
||||
"med_system_alerts_minimals_tiger_short": "Playful tiger",
|
||||
"med_ui_success_generic_1-1": "Success 1",
|
||||
"squeaky_12": "Squeaky door",
|
||||
"zap_01": "Zap"
|
||||
"air_horn": "Air Horn",
|
||||
"air_horns": "Air Horns",
|
||||
"airboat": "Airboat",
|
||||
"airport": "Airport",
|
||||
"aliens": "Aliens",
|
||||
"amzn_sfx_airplane_takeoff_whoosh": "Airplane Takeoff Whoosh",
|
||||
"amzn_sfx_army_march_clank_7x": "Army March Clank 7x",
|
||||
"amzn_sfx_army_march_large_8x": "Army March Large 8x",
|
||||
"amzn_sfx_army_march_small_8x": "Army March Small 8x",
|
||||
"amzn_sfx_baby_big_cry": "Baby Big Cry",
|
||||
"amzn_sfx_baby_cry": "Baby Cry",
|
||||
"amzn_sfx_baby_fuss": "Baby Fuss",
|
||||
"amzn_sfx_battle_group_clanks": "Battle Group Clanks",
|
||||
"amzn_sfx_battle_man_grunts": "Battle Man Grunts",
|
||||
"amzn_sfx_battle_men_grunts": "Battle Men Grunts",
|
||||
"amzn_sfx_battle_men_horses": "Battle Men Horses",
|
||||
"amzn_sfx_battle_noisy_clanks": "Battle Noisy Clanks",
|
||||
"amzn_sfx_battle_yells_men": "Battle Yells Men",
|
||||
"amzn_sfx_battle_yells_men_run": "Battle Yells Men Run",
|
||||
"amzn_sfx_bear_groan_roar": "Bear Groan Roar",
|
||||
"amzn_sfx_bear_roar_grumble": "Bear Roar Grumble",
|
||||
"amzn_sfx_bear_roar_small": "Bear Roar Small",
|
||||
"amzn_sfx_beep_1x": "Beep 1x",
|
||||
"amzn_sfx_bell_med_chime": "Bell Med Chime",
|
||||
"amzn_sfx_bell_short_chime": "Bell Short Chime",
|
||||
"amzn_sfx_bell_timer": "Bell Timer",
|
||||
"amzn_sfx_bicycle_bell_ring": "Bicycle Bell Ring",
|
||||
"amzn_sfx_bird_chickadee_chirp_1x": "Bird Chickadee Chirp 1x",
|
||||
"amzn_sfx_bird_chickadee_chirps": "Bird Chickadee Chirps",
|
||||
"amzn_sfx_bird_forest": "Bird Forest",
|
||||
"amzn_sfx_bird_forest_short": "Bird Forest Short",
|
||||
"amzn_sfx_bird_robin_chirp_1x": "Bird Robin Chirp 1x",
|
||||
"amzn_sfx_boing_long_1x": "Boing Long 1x",
|
||||
"amzn_sfx_boing_med_1x": "Boing Med 1x",
|
||||
"amzn_sfx_boing_short_1x": "Boing Short 1x",
|
||||
"amzn_sfx_bus_drive_past": "Bus Drive Past",
|
||||
"amzn_sfx_buzz_electronic": "Buzz Electronic",
|
||||
"amzn_sfx_buzzer_loud_alarm": "Buzzer Loud Alarm",
|
||||
"amzn_sfx_buzzer_small": "Buzzer Small",
|
||||
"amzn_sfx_car_accelerate": "Car Accelerate",
|
||||
"amzn_sfx_car_accelerate_noisy": "Car Accelerate Noisy",
|
||||
"amzn_sfx_car_click_seatbelt": "Car Click Seatbelt",
|
||||
"amzn_sfx_car_close_door_1x": "Car Close Door 1x",
|
||||
"amzn_sfx_car_drive_past": "Car Drive Past",
|
||||
"amzn_sfx_car_honk_1x": "Car Honk 1x",
|
||||
"amzn_sfx_car_honk_2x": "Car Honk 2x",
|
||||
"amzn_sfx_car_honk_3x": "Car Honk 3x",
|
||||
"amzn_sfx_car_honk_long_1x": "Car Honk Long 1x",
|
||||
"amzn_sfx_car_into_driveway": "Car Into Driveway",
|
||||
"amzn_sfx_car_into_driveway_fast": "Car Into Driveway Fast",
|
||||
"amzn_sfx_car_slam_door_1x": "Car Slam Door 1x",
|
||||
"amzn_sfx_car_undo_seatbelt": "Car Undo Seatbelt",
|
||||
"amzn_sfx_cat_angry_meow_1x": "Cat Angry Meow 1x",
|
||||
"amzn_sfx_cat_angry_screech_1x": "Cat Angry Screech 1x",
|
||||
"amzn_sfx_cat_long_meow_1x": "Cat Long Meow 1x",
|
||||
"amzn_sfx_cat_meow_1x": "Cat Meow 1x",
|
||||
"amzn_sfx_cat_purr": "Cat Purr",
|
||||
"amzn_sfx_cat_purr_meow": "Cat Purr Meow",
|
||||
"amzn_sfx_chicken_cluck": "Chicken Cluck",
|
||||
"amzn_sfx_church_bell_1x": "Church Bell 1x",
|
||||
"amzn_sfx_church_bells_ringing": "Church Bells Ringing",
|
||||
"amzn_sfx_clear_throat_ahem": "Clear Throat Ahem",
|
||||
"amzn_sfx_clock_ticking": "Clock Ticking",
|
||||
"amzn_sfx_clock_ticking_long": "Clock Ticking Long",
|
||||
"amzn_sfx_copy_machine": "Copy Machine",
|
||||
"amzn_sfx_cough": "Cough",
|
||||
"amzn_sfx_crow_caw_1x": "Crow Caw 1x",
|
||||
"amzn_sfx_crowd_applause": "Crowd Applause",
|
||||
"amzn_sfx_crowd_bar": "Crowd Bar",
|
||||
"amzn_sfx_crowd_bar_rowdy": "Crowd Bar Rowdy",
|
||||
"amzn_sfx_crowd_boo": "Crowd Boo",
|
||||
"amzn_sfx_crowd_cheer_med": "Crowd Cheer Med",
|
||||
"amzn_sfx_crowd_excited_cheer": "Crowd Excited Cheer",
|
||||
"amzn_sfx_dog_med_bark_1x": "Dog Med Bark 1x",
|
||||
"amzn_sfx_dog_med_bark_2x": "Dog Med Bark 2x",
|
||||
"amzn_sfx_dog_med_bark_growl": "Dog Med Bark Growl",
|
||||
"amzn_sfx_dog_med_growl_1x": "Dog Med Growl 1x",
|
||||
"amzn_sfx_dog_med_woof_1x": "Dog Med Woof 1x",
|
||||
"amzn_sfx_dog_small_bark_2x": "Dog Small Bark 2x",
|
||||
"amzn_sfx_door_open": "Door Open",
|
||||
"amzn_sfx_door_shut": "Door Shut",
|
||||
"amzn_sfx_doorbell": "Doorbell",
|
||||
"amzn_sfx_doorbell_buzz": "Doorbell Buzz",
|
||||
"amzn_sfx_doorbell_chime": "Doorbell Chime",
|
||||
"amzn_sfx_drinking_slurp": "Drinking Slurp",
|
||||
"amzn_sfx_drum_and_cymbal": "Drum And Cymbal",
|
||||
"amzn_sfx_drum_comedy": "Drum Comedy",
|
||||
"amzn_sfx_earthquake_rumble": "Earthquake Rumble",
|
||||
"amzn_sfx_electric_guitar": "Electric Guitar",
|
||||
"amzn_sfx_electronic_beep": "Electronic Beep",
|
||||
"amzn_sfx_electronic_major_chord": "Electronic Major Chord",
|
||||
"amzn_sfx_elephant": "Elephant",
|
||||
"amzn_sfx_elevator_bell_1x": "Elevator Bell 1x",
|
||||
"amzn_sfx_elevator_open_bell": "Elevator Open Bell",
|
||||
"amzn_sfx_fairy_melodic_chimes": "Fairy Melodic Chimes",
|
||||
"amzn_sfx_fairy_sparkle_chimes": "Fairy Sparkle Chimes",
|
||||
"amzn_sfx_faucet_drip": "Faucet Drip",
|
||||
"amzn_sfx_faucet_running": "Faucet Running",
|
||||
"amzn_sfx_fireplace_crackle": "Fireplace Crackle",
|
||||
"amzn_sfx_fireworks": "Fireworks",
|
||||
"amzn_sfx_fireworks_firecrackers": "Fireworks Firecrackers",
|
||||
"amzn_sfx_fireworks_launch": "Fireworks Launch",
|
||||
"amzn_sfx_fireworks_whistles": "Fireworks Whistles",
|
||||
"amzn_sfx_food_frying": "Food Frying",
|
||||
"amzn_sfx_footsteps": "Footsteps",
|
||||
"amzn_sfx_footsteps_muffled": "Footsteps Muffled",
|
||||
"amzn_sfx_ghost_spooky": "Ghost Spooky",
|
||||
"amzn_sfx_glass_on_table": "Glass On Table",
|
||||
"amzn_sfx_glasses_clink": "Glasses Clink",
|
||||
"amzn_sfx_horse_gallop_4x": "Horse Gallop 4x",
|
||||
"amzn_sfx_horse_huff_whinny": "Horse Huff Whinny",
|
||||
"amzn_sfx_horse_neigh": "Horse Neigh",
|
||||
"amzn_sfx_horse_neigh_low": "Horse Neigh Low",
|
||||
"amzn_sfx_horse_whinny": "Horse Whinny",
|
||||
"amzn_sfx_human_walking": "Human Walking",
|
||||
"amzn_sfx_jar_on_table_1x": "Jar On Table 1x",
|
||||
"amzn_sfx_kitchen_ambience": "Kitchen Ambience",
|
||||
"amzn_sfx_large_crowd_cheer": "Large Crowd Cheer",
|
||||
"amzn_sfx_large_fire_crackling": "Large Fire Crackling",
|
||||
"amzn_sfx_laughter": "Laughter",
|
||||
"amzn_sfx_laughter_giggle": "Laughter Giggle",
|
||||
"amzn_sfx_lightning_strike": "Lightning Strike",
|
||||
"amzn_sfx_lion_roar": "Lion Roar",
|
||||
"amzn_sfx_magic_blast_1x": "Magic Blast 1x",
|
||||
"amzn_sfx_monkey_calls_3x": "Monkey Calls 3x",
|
||||
"amzn_sfx_monkey_chimp": "Monkey Chimp",
|
||||
"amzn_sfx_monkeys_chatter": "Monkeys Chatter",
|
||||
"amzn_sfx_motorcycle_accelerate": "Motorcycle Accelerate",
|
||||
"amzn_sfx_motorcycle_engine_idle": "Motorcycle Engine Idle",
|
||||
"amzn_sfx_motorcycle_engine_rev": "Motorcycle Engine Rev",
|
||||
"amzn_sfx_musical_drone_intro": "Musical Drone Intro",
|
||||
"amzn_sfx_oars_splashing_rowboat": "Oars Splashing Rowboat",
|
||||
"amzn_sfx_object_on_table_2x": "Object On Table 2x",
|
||||
"amzn_sfx_ocean_wave_1x": "Ocean Wave 1x",
|
||||
"amzn_sfx_ocean_wave_on_rocks_1x": "Ocean Wave On Rocks 1x",
|
||||
"amzn_sfx_ocean_wave_surf": "Ocean Wave Surf",
|
||||
"amzn_sfx_people_walking": "People Walking",
|
||||
"amzn_sfx_person_running": "Person Running",
|
||||
"amzn_sfx_piano_note_1x": "Piano Note 1x",
|
||||
"amzn_sfx_punch": "Punch",
|
||||
"amzn_sfx_rain": "Rain",
|
||||
"amzn_sfx_rain_on_roof": "Rain On Roof",
|
||||
"amzn_sfx_rain_thunder": "Rain Thunder",
|
||||
"amzn_sfx_rat_squeak_2x": "Rat Squeak 2x",
|
||||
"amzn_sfx_rat_squeaks": "Rat Squeaks",
|
||||
"amzn_sfx_raven_caw_1x": "Raven Caw 1x",
|
||||
"amzn_sfx_raven_caw_2x": "Raven Caw 2x",
|
||||
"amzn_sfx_restaurant_ambience": "Restaurant Ambience",
|
||||
"amzn_sfx_rooster_crow": "Rooster Crow",
|
||||
"amzn_sfx_scifi_air_escaping": "Scifi Air Escaping",
|
||||
"amzn_sfx_scifi_alarm": "Scifi Alarm",
|
||||
"amzn_sfx_scifi_alien_voice": "Scifi Alien Voice",
|
||||
"amzn_sfx_scifi_boots_walking": "Scifi Boots Walking",
|
||||
"amzn_sfx_scifi_close_large_explosion": "Scifi Close Large Explosion",
|
||||
"amzn_sfx_scifi_door_open": "Scifi Door Open",
|
||||
"amzn_sfx_scifi_engines_on": "Scifi Engines On",
|
||||
"amzn_sfx_scifi_engines_on_large": "Scifi Engines On Large",
|
||||
"amzn_sfx_scifi_engines_on_short_burst": "Scifi Engines On Short Burst",
|
||||
"amzn_sfx_scifi_explosion": "Scifi Explosion",
|
||||
"amzn_sfx_scifi_explosion_2x": "Scifi Explosion 2x",
|
||||
"amzn_sfx_scifi_incoming_explosion": "Scifi Incoming Explosion",
|
||||
"amzn_sfx_scifi_laser_gun_battle": "Scifi Laser Gun Battle",
|
||||
"amzn_sfx_scifi_laser_gun_fires": "Scifi Laser Gun Fires",
|
||||
"amzn_sfx_scifi_laser_gun_fires_large": "Scifi Laser Gun Fires Large",
|
||||
"amzn_sfx_scifi_long_explosion_1x": "Scifi Long Explosion 1x",
|
||||
"amzn_sfx_scifi_missile": "Scifi Missile",
|
||||
"amzn_sfx_scifi_motor_short_1x": "Scifi Motor Short 1x",
|
||||
"amzn_sfx_scifi_open_airlock": "Scifi Open Airlock",
|
||||
"amzn_sfx_scifi_radar_high_ping": "Scifi Radar High Ping",
|
||||
"amzn_sfx_scifi_radar_low": "Scifi Radar Low",
|
||||
"amzn_sfx_scifi_radar_medium": "Scifi Radar Medium",
|
||||
"amzn_sfx_scifi_run_away": "Scifi Run Away",
|
||||
"amzn_sfx_scifi_sheilds_up": "Scifi Sheilds Up",
|
||||
"amzn_sfx_scifi_short_low_explosion": "Scifi Short Low Explosion",
|
||||
"amzn_sfx_scifi_small_whoosh_flyby": "Scifi Small Whoosh Flyby",
|
||||
"amzn_sfx_scifi_small_zoom_flyby": "Scifi Small Zoom Flyby",
|
||||
"amzn_sfx_scifi_sonar_ping_3x": "Scifi Sonar Ping 3x",
|
||||
"amzn_sfx_scifi_sonar_ping_4x": "Scifi Sonar Ping 4x",
|
||||
"amzn_sfx_scifi_spaceship_flyby": "Scifi Spaceship Flyby",
|
||||
"amzn_sfx_scifi_timer_beep": "Scifi Timer Beep",
|
||||
"amzn_sfx_scifi_zap_backwards": "Scifi Zap Backwards",
|
||||
"amzn_sfx_scifi_zap_electric": "Scifi Zap Electric",
|
||||
"amzn_sfx_sheep_baa": "Sheep Baa",
|
||||
"amzn_sfx_sheep_bleat": "Sheep Bleat",
|
||||
"amzn_sfx_silverware_clank": "Silverware Clank",
|
||||
"amzn_sfx_sirens": "Sirens",
|
||||
"amzn_sfx_sleigh_bells": "Sleigh Bells",
|
||||
"amzn_sfx_small_stream": "Small Stream",
|
||||
"amzn_sfx_sneeze": "Sneeze",
|
||||
"amzn_sfx_stream": "Stream",
|
||||
"amzn_sfx_strong_wind_desert": "Strong Wind Desert",
|
||||
"amzn_sfx_strong_wind_whistling": "Strong Wind Whistling",
|
||||
"amzn_sfx_subway_leaving": "Subway Leaving",
|
||||
"amzn_sfx_subway_passing": "Subway Passing",
|
||||
"amzn_sfx_subway_stopping": "Subway Stopping",
|
||||
"amzn_sfx_swoosh_cartoon_fast": "Swoosh Cartoon Fast",
|
||||
"amzn_sfx_swoosh_fast_1x": "Swoosh Fast 1x",
|
||||
"amzn_sfx_swoosh_fast_6x": "Swoosh Fast 6x",
|
||||
"amzn_sfx_test_tone": "Test Tone",
|
||||
"amzn_sfx_thunder_rumble": "Thunder Rumble",
|
||||
"amzn_sfx_toilet_flush": "Toilet Flush",
|
||||
"amzn_sfx_trumpet_bugle": "Trumpet Bugle",
|
||||
"amzn_sfx_turkey_gobbling": "Turkey Gobbling",
|
||||
"amzn_sfx_typing_medium": "Typing Medium",
|
||||
"amzn_sfx_typing_short": "Typing Short",
|
||||
"amzn_sfx_typing_typewriter": "Typing Typewriter",
|
||||
"amzn_sfx_vacuum_off": "Vacuum Off",
|
||||
"amzn_sfx_vacuum_on": "Vacuum On",
|
||||
"amzn_sfx_walking_in_mud": "Walking In Mud",
|
||||
"amzn_sfx_walking_in_snow": "Walking In Snow",
|
||||
"amzn_sfx_walking_on_grass": "Walking On Grass",
|
||||
"amzn_sfx_water_dripping": "Water Dripping",
|
||||
"amzn_sfx_water_droplets": "Water Droplets",
|
||||
"amzn_sfx_wind_strong_gusting": "Wind Strong Gusting",
|
||||
"amzn_sfx_wind_whistling_desert": "Wind Whistling Desert",
|
||||
"amzn_sfx_wings_flap_4x": "Wings Flap 4x",
|
||||
"amzn_sfx_wings_flap_fast": "Wings Flap Fast",
|
||||
"amzn_sfx_wolf_howl": "Wolf Howl",
|
||||
"amzn_sfx_wolf_young_howl": "Wolf Young Howl",
|
||||
"amzn_sfx_wooden_door": "Wooden Door",
|
||||
"amzn_sfx_wooden_door_creaks_long": "Wooden Door Creaks Long",
|
||||
"amzn_sfx_wooden_door_creaks_multiple": "Wooden Door Creaks Multiple",
|
||||
"amzn_sfx_wooden_door_creaks_open": "Wooden Door Creaks Open",
|
||||
"amzn_ui_sfx_gameshow_bridge": "Gameshow Bridge",
|
||||
"amzn_ui_sfx_gameshow_countdown_loop_32s_full": "Gameshow Countdown Loop 32s Full",
|
||||
"amzn_ui_sfx_gameshow_countdown_loop_64s_full": "Gameshow Countdown Loop 64s Full",
|
||||
"amzn_ui_sfx_gameshow_countdown_loop_64s_minimal": "Gameshow Countdown Loop 64s Minimal",
|
||||
"amzn_ui_sfx_gameshow_intro": "Gameshow Intro",
|
||||
"amzn_ui_sfx_gameshow_negative_response": "Gameshow Negative Response",
|
||||
"amzn_ui_sfx_gameshow_neutral_response": "Gameshow Neutral Response",
|
||||
"amzn_ui_sfx_gameshow_outro": "Gameshow Outro",
|
||||
"amzn_ui_sfx_gameshow_player1": "Gameshow Player1",
|
||||
"amzn_ui_sfx_gameshow_player2": "Gameshow Player2",
|
||||
"amzn_ui_sfx_gameshow_player3": "Gameshow Player3",
|
||||
"amzn_ui_sfx_gameshow_player4": "Gameshow Player4",
|
||||
"amzn_ui_sfx_gameshow_positive_response": "Gameshow Positive Response",
|
||||
"amzn_ui_sfx_gameshow_tally_negative": "Gameshow Tally Negative",
|
||||
"amzn_ui_sfx_gameshow_tally_positive": "Gameshow Tally Positive",
|
||||
"amzn_ui_sfx_gameshow_waiting_loop_30s": "Gameshow Waiting Loop 30s",
|
||||
"anchor": "Anchor",
|
||||
"answering_machines": "Answering Machines",
|
||||
"arcs_sparks": "Arcs Sparks",
|
||||
"arrows_bows": "Arrows Bows",
|
||||
"baby": "Baby",
|
||||
"back_up_beeps": "Back Up Beeps",
|
||||
"bars_restaurants": "Bars Restaurants",
|
||||
"baseball": "Baseball",
|
||||
"basketball": "Basketball",
|
||||
"battles": "Battles",
|
||||
"beeps_tones": "Beeps Tones",
|
||||
"bell": "Bell",
|
||||
"bikes": "Bikes",
|
||||
"billiards": "Billiards",
|
||||
"board_games": "Board Games",
|
||||
"body": "Body",
|
||||
"boing": "Boing",
|
||||
"books": "Books",
|
||||
"bow_wash": "Bow Wash",
|
||||
"box": "Box",
|
||||
"break_shatter_smash": "Break Shatter Smash",
|
||||
"breaks": "Breaks",
|
||||
"brooms_mops": "Brooms Mops",
|
||||
"bullets": "Bullets",
|
||||
"buses": "Buses",
|
||||
"buzz": "Buzz",
|
||||
"buzz_hums": "Buzz Hums",
|
||||
"buzzers": "Buzzers",
|
||||
"buzzers_pistols": "Buzzers Pistols",
|
||||
"cables_metal": "Cables Metal",
|
||||
"camera": "Camera",
|
||||
"cannons": "Cannons",
|
||||
"car_alarm": "Car Alarm",
|
||||
"car_alarms": "Car Alarms",
|
||||
"car_cell_phones": "Car Cell Phones",
|
||||
"carnivals_fairs": "Carnivals Fairs",
|
||||
"cars": "Cars",
|
||||
"casino": "Casino",
|
||||
"casinos": "Casinos",
|
||||
"cellar": "Cellar",
|
||||
"chimes": "Chimes",
|
||||
"chimes_bells": "Chimes Bells",
|
||||
"chorus": "Chorus",
|
||||
"christmas": "Christmas",
|
||||
"church_bells": "Church Bells",
|
||||
"clock": "Clock",
|
||||
"cloth": "Cloth",
|
||||
"concrete": "Concrete",
|
||||
"construction": "Construction",
|
||||
"construction_factory": "Construction Factory",
|
||||
"crashes": "Crashes",
|
||||
"crowds": "Crowds",
|
||||
"debris": "Debris",
|
||||
"dining_kitchens": "Dining Kitchens",
|
||||
"dinosaurs": "Dinosaurs",
|
||||
"dripping": "Dripping",
|
||||
"drops": "Drops",
|
||||
"electric": "Electric",
|
||||
"electrical": "Electrical",
|
||||
"elevator": "Elevator",
|
||||
"evolution_monsters": "Evolution Monsters",
|
||||
"explosions": "Explosions",
|
||||
"factory": "Factory",
|
||||
"falls": "Falls",
|
||||
"fax_scanner_copier": "Fax Scanner Copier",
|
||||
"feedback_mics": "Feedback Mics",
|
||||
"fight": "Fight",
|
||||
"fire": "Fire",
|
||||
"fire_extinguisher": "Fire Extinguisher",
|
||||
"fireballs": "Fireballs",
|
||||
"fireworks": "Fireworks",
|
||||
"fishing_pole": "Fishing Pole",
|
||||
"flags": "Flags",
|
||||
"football": "Football",
|
||||
"footsteps": "Footsteps",
|
||||
"futuristic": "Futuristic",
|
||||
"futuristic_ship": "Futuristic Ship",
|
||||
"gameshow": "Gameshow",
|
||||
"gear": "Gear",
|
||||
"ghosts_demons": "Ghosts Demons",
|
||||
"giant_monster": "Giant Monster",
|
||||
"glass": "Glass",
|
||||
"glasses_clink": "Glasses Clink",
|
||||
"golf": "Golf",
|
||||
"gorilla": "Gorilla",
|
||||
"grenade_lanucher": "Grenade Lanucher",
|
||||
"griffen": "Griffen",
|
||||
"gyms_locker_rooms": "Gyms Locker Rooms",
|
||||
"handgun_loading": "Handgun Loading",
|
||||
"handgun_shot": "Handgun Shot",
|
||||
"handle": "Handle",
|
||||
"hands": "Hands",
|
||||
"heartbeats_ekg": "Heartbeats EKG",
|
||||
"helicopter": "Helicopter",
|
||||
"high_tech": "High Tech",
|
||||
"hit_punch_slap": "Hit Punch Slap",
|
||||
"hits": "Hits",
|
||||
"horns": "Horns",
|
||||
"horror": "Horror",
|
||||
"hot_tub_filling_up": "Hot Tub Filling Up",
|
||||
"human": "Human",
|
||||
"human_vocals": "Human Vocals",
|
||||
"hygene": "Hygene",
|
||||
"ice_skating": "Ice Skating",
|
||||
"ignitions": "Ignitions",
|
||||
"infantry": "Infantry",
|
||||
"intro": "Intro",
|
||||
"jet": "Jet",
|
||||
"juggling": "Juggling",
|
||||
"key_lock": "Key Lock",
|
||||
"kids": "Kids",
|
||||
"knocks": "Knocks",
|
||||
"lab_equip": "Lab Equip",
|
||||
"lacrosse": "Lacrosse",
|
||||
"lamps_lanterns": "Lamps Lanterns",
|
||||
"leather": "Leather",
|
||||
"liquid_suction": "Liquid Suction",
|
||||
"locker_doors": "Locker Doors",
|
||||
"machine_gun": "Machine Gun",
|
||||
"magic_spells": "Magic Spells",
|
||||
"medium_large_explosions": "Medium Large Explosions",
|
||||
"metal": "Metal",
|
||||
"modern_rings": "Modern Rings",
|
||||
"money_coins": "Money Coins",
|
||||
"motorcycles": "Motorcycles",
|
||||
"movement": "Movement",
|
||||
"moves": "Moves",
|
||||
"nature": "Nature",
|
||||
"oar_boat": "Oar Boat",
|
||||
"pagers": "Pagers",
|
||||
"paintball": "Paintball",
|
||||
"paper": "Paper",
|
||||
"parachute": "Parachute",
|
||||
"pay_phones": "Pay Phones",
|
||||
"phone_beeps": "Phone Beeps",
|
||||
"pigmy_bats": "Pigmy Bats",
|
||||
"pills": "Pills",
|
||||
"pour_water": "Pour Water",
|
||||
"power_up_down": "Power Up Down",
|
||||
"printers": "Printers",
|
||||
"prison": "Prison",
|
||||
"public_space": "Public Space",
|
||||
"racquetball": "Racquetball",
|
||||
"radios_static": "Radios Static",
|
||||
"rain": "Rain",
|
||||
"rc_airplane": "RC Airplane",
|
||||
"rc_car": "RC Car",
|
||||
"refrigerators_freezers": "Refrigerators Freezers",
|
||||
"regular": "Regular",
|
||||
"respirator": "Respirator",
|
||||
"rifle": "Rifle",
|
||||
"roller_coaster": "Roller Coaster",
|
||||
"rollerskates_rollerblades": "RollerSkates RollerBlades",
|
||||
"room_tones": "Room Tones",
|
||||
"ropes_climbing": "Ropes Climbing",
|
||||
"rotary_rings": "Rotary Rings",
|
||||
"rowboat_canoe": "Rowboat Canoe",
|
||||
"rubber": "Rubber",
|
||||
"running": "Running",
|
||||
"sails": "Sails",
|
||||
"sand_gravel": "Sand Gravel",
|
||||
"screen_doors": "Screen Doors",
|
||||
"screens": "Screens",
|
||||
"seats_stools": "Seats Stools",
|
||||
"servos": "Servos",
|
||||
"shoes_boots": "Shoes Boots",
|
||||
"shotgun": "Shotgun",
|
||||
"shower": "Shower",
|
||||
"sink_faucet": "Sink Faucet",
|
||||
"sink_filling_water": "Sink Filling Water",
|
||||
"sink_run_and_off": "Sink Run And Off",
|
||||
"sink_water_splatter": "Sink Water Splatter",
|
||||
"sirens": "Sirens",
|
||||
"skateboards": "Skateboards",
|
||||
"ski": "Ski",
|
||||
"skids_tires": "Skids Tires",
|
||||
"sled": "Sled",
|
||||
"slides": "Slides",
|
||||
"small_explosions": "Small Explosions",
|
||||
"snow": "Snow",
|
||||
"snowmobile": "Snowmobile",
|
||||
"soldiers": "Soldiers",
|
||||
"splash_water": "Splash Water",
|
||||
"splashes_sprays": "Splashes Sprays",
|
||||
"sports_whistles": "Sports Whistles",
|
||||
"squeaks": "Squeaks",
|
||||
"squeaky": "Squeaky",
|
||||
"stairs": "Stairs",
|
||||
"steam": "Steam",
|
||||
"submarine_diesel": "Submarine Diesel",
|
||||
"swing_doors": "Swing Doors",
|
||||
"switches_levers": "Switches Levers",
|
||||
"swords": "Swords",
|
||||
"tape": "Tape",
|
||||
"tape_machine": "Tape Machine",
|
||||
"televisions_shows": "Televisions Shows",
|
||||
"tennis_pingpong": "Tennis PingPong",
|
||||
"textile": "Textile",
|
||||
"throw": "Throw",
|
||||
"thunder": "Thunder",
|
||||
"ticks": "Ticks",
|
||||
"timer": "Timer",
|
||||
"toilet_flush": "Toilet Flush",
|
||||
"tone": "Tone",
|
||||
"tones_noises": "Tones Noises",
|
||||
"toys": "Toys",
|
||||
"tractors": "Tractors",
|
||||
"traffic": "Traffic",
|
||||
"train": "Train",
|
||||
"trucks_vans": "Trucks Vans",
|
||||
"turnstiles": "Turnstiles",
|
||||
"typing": "Typing",
|
||||
"umbrella": "Umbrella",
|
||||
"underwater": "Underwater",
|
||||
"vampires": "Vampires",
|
||||
"various": "Various",
|
||||
"video_tunes": "Video Tunes",
|
||||
"volcano_earthquake": "Volcano Earthquake",
|
||||
"watches": "Watches",
|
||||
"water": "Water",
|
||||
"water_running": "Water Running",
|
||||
"werewolves": "Werewolves",
|
||||
"winches_gears": "Winches Gears",
|
||||
"wind": "Wind",
|
||||
"wood": "Wood",
|
||||
"wood_boat": "Wood Boat",
|
||||
"woosh": "Woosh",
|
||||
"zap": "Zap",
|
||||
"zippers": "Zippers"
|
||||
}
|
||||
}
|
||||
},
|
||||
@@ -183,7 +607,7 @@
|
||||
"message": "Invalid device ID specified: {device_id}"
|
||||
},
|
||||
"invalid_sound_value": {
|
||||
"message": "Invalid sound {sound} specified"
|
||||
"message": "Invalid sound {sound} with variant {variant} specified"
|
||||
},
|
||||
"entry_not_loaded": {
|
||||
"message": "Entry not loaded: {entry}"
|
||||
|
@@ -16,7 +16,7 @@ from homeassistant.helpers.selector import (
|
||||
SelectSelectorMode,
|
||||
)
|
||||
|
||||
from .const import CONF_SITE_ID, CONF_SITE_NAME, DOMAIN, REQUEST_TIMEOUT
|
||||
from .const import CONF_SITE_ID, CONF_SITE_NAME, DOMAIN
|
||||
|
||||
API_URL = "https://app.amber.com.au/developers"
|
||||
|
||||
@@ -64,9 +64,7 @@ class AmberElectricConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
api = amberelectric.AmberApi(api_client)
|
||||
|
||||
try:
|
||||
sites: list[Site] = filter_sites(
|
||||
api.get_sites(_request_timeout=REQUEST_TIMEOUT)
|
||||
)
|
||||
sites: list[Site] = filter_sites(api.get_sites())
|
||||
except amberelectric.ApiException as api_exception:
|
||||
if api_exception.status == 403:
|
||||
self._errors[CONF_API_TOKEN] = "invalid_api_token"
|
||||
|
@@ -21,5 +21,3 @@ SERVICE_GET_FORECASTS = "get_forecasts"
|
||||
GENERAL_CHANNEL = "general"
|
||||
CONTROLLED_LOAD_CHANNEL = "controlled_load"
|
||||
FEED_IN_CHANNEL = "feed_in"
|
||||
|
||||
REQUEST_TIMEOUT = 15
|
||||
|
@@ -16,7 +16,7 @@ from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
||||
|
||||
from .const import LOGGER, REQUEST_TIMEOUT
|
||||
from .const import LOGGER
|
||||
from .helpers import normalize_descriptor
|
||||
|
||||
type AmberConfigEntry = ConfigEntry[AmberUpdateCoordinator]
|
||||
@@ -82,11 +82,7 @@ class AmberUpdateCoordinator(DataUpdateCoordinator):
|
||||
"grid": {},
|
||||
}
|
||||
try:
|
||||
data = self._api.get_current_prices(
|
||||
self.site_id,
|
||||
next=288,
|
||||
_request_timeout=REQUEST_TIMEOUT,
|
||||
)
|
||||
data = self._api.get_current_prices(self.site_id, next=288)
|
||||
intervals = [interval.actual_instance for interval in data]
|
||||
except ApiException as api_exception:
|
||||
raise UpdateFailed("Missing price data, skipping update") from api_exception
|
||||
|
@@ -24,12 +24,7 @@ from homeassistant.components.recorder import (
|
||||
get_instance as get_recorder_instance,
|
||||
)
|
||||
from homeassistant.config_entries import SOURCE_IGNORE
|
||||
from homeassistant.const import (
|
||||
ATTR_ASSUMED_STATE,
|
||||
ATTR_DOMAIN,
|
||||
BASE_PLATFORMS,
|
||||
__version__ as HA_VERSION,
|
||||
)
|
||||
from homeassistant.const import ATTR_DOMAIN, BASE_PLATFORMS, __version__ as HA_VERSION
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers import device_registry as dr, entity_registry as er
|
||||
@@ -394,117 +389,65 @@ def _domains_from_yaml_config(yaml_configuration: dict[str, Any]) -> set[str]:
|
||||
|
||||
|
||||
async def async_devices_payload(hass: HomeAssistant) -> dict:
|
||||
"""Return detailed information about entities and devices."""
|
||||
integrations_info: dict[str, dict[str, Any]] = {}
|
||||
|
||||
"""Return the devices payload."""
|
||||
devices: list[dict[str, Any]] = []
|
||||
dev_reg = dr.async_get(hass)
|
||||
# Devices that need via device info set
|
||||
new_indexes: dict[str, int] = {}
|
||||
via_devices: dict[str, str] = {}
|
||||
|
||||
# We need to refer to other devices, for example in `via_device` field.
|
||||
# We don't however send the original device ids outside of Home Assistant,
|
||||
# instead we refer to devices by (integration_domain, index_in_integration_device_list).
|
||||
device_id_mapping: dict[str, tuple[str, int]] = {}
|
||||
seen_integrations = set()
|
||||
|
||||
for device_entry in dev_reg.devices.values():
|
||||
if not device_entry.primary_config_entry:
|
||||
for device in dev_reg.devices.values():
|
||||
if not device.primary_config_entry:
|
||||
continue
|
||||
|
||||
config_entry = hass.config_entries.async_get_entry(
|
||||
device_entry.primary_config_entry
|
||||
)
|
||||
config_entry = hass.config_entries.async_get_entry(device.primary_config_entry)
|
||||
|
||||
if config_entry is None:
|
||||
continue
|
||||
|
||||
integration_domain = config_entry.domain
|
||||
integration_info = integrations_info.setdefault(
|
||||
integration_domain, {"devices": [], "entities": []}
|
||||
)
|
||||
seen_integrations.add(config_entry.domain)
|
||||
|
||||
devices_info = integration_info["devices"]
|
||||
|
||||
device_id_mapping[device_entry.id] = (integration_domain, len(devices_info))
|
||||
|
||||
devices_info.append(
|
||||
new_indexes[device.id] = len(devices)
|
||||
devices.append(
|
||||
{
|
||||
"entities": [],
|
||||
"entry_type": device_entry.entry_type,
|
||||
"has_configuration_url": device_entry.configuration_url is not None,
|
||||
"hw_version": device_entry.hw_version,
|
||||
"manufacturer": device_entry.manufacturer,
|
||||
"model": device_entry.model,
|
||||
"model_id": device_entry.model_id,
|
||||
"sw_version": device_entry.sw_version,
|
||||
"via_device": device_entry.via_device_id,
|
||||
"integration": config_entry.domain,
|
||||
"manufacturer": device.manufacturer,
|
||||
"model_id": device.model_id,
|
||||
"model": device.model,
|
||||
"sw_version": device.sw_version,
|
||||
"hw_version": device.hw_version,
|
||||
"has_configuration_url": device.configuration_url is not None,
|
||||
"via_device": None,
|
||||
"entry_type": device.entry_type.value if device.entry_type else None,
|
||||
}
|
||||
)
|
||||
|
||||
# Fill out via_device with new device ids
|
||||
for integration_info in integrations_info.values():
|
||||
for device_info in integration_info["devices"]:
|
||||
if device_info["via_device"] is None:
|
||||
continue
|
||||
device_info["via_device"] = device_id_mapping.get(device_info["via_device"])
|
||||
if device.via_device_id:
|
||||
via_devices[device.id] = device.via_device_id
|
||||
|
||||
ent_reg = er.async_get(hass)
|
||||
|
||||
for entity_entry in ent_reg.entities.values():
|
||||
integration_domain = entity_entry.platform
|
||||
integration_info = integrations_info.setdefault(
|
||||
integration_domain, {"devices": [], "entities": []}
|
||||
)
|
||||
|
||||
devices_info = integration_info["devices"]
|
||||
entities_info = integration_info["entities"]
|
||||
|
||||
entity_state = hass.states.get(entity_entry.entity_id)
|
||||
|
||||
entity_info = {
|
||||
# LIMITATION: `assumed_state` can be overridden by users;
|
||||
# we should replace it with the original value in the future.
|
||||
# It is also not present, if entity is not in the state machine,
|
||||
# which can happen for disabled entities.
|
||||
"assumed_state": entity_state.attributes.get(ATTR_ASSUMED_STATE, False)
|
||||
if entity_state is not None
|
||||
else None,
|
||||
"capabilities": entity_entry.capabilities,
|
||||
"domain": entity_entry.domain,
|
||||
"entity_category": entity_entry.entity_category,
|
||||
"has_entity_name": entity_entry.has_entity_name,
|
||||
"original_device_class": entity_entry.original_device_class,
|
||||
# LIMITATION: `unit_of_measurement` can be overridden by users;
|
||||
# we should replace it with the original value in the future.
|
||||
"unit_of_measurement": entity_entry.unit_of_measurement,
|
||||
}
|
||||
|
||||
if (
|
||||
((device_id := entity_entry.device_id) is not None)
|
||||
and ((new_device_id := device_id_mapping.get(device_id)) is not None)
|
||||
and (new_device_id[0] == integration_domain)
|
||||
):
|
||||
device_info = devices_info[new_device_id[1]]
|
||||
device_info["entities"].append(entity_info)
|
||||
else:
|
||||
entities_info.append(entity_info)
|
||||
for from_device, via_device in via_devices.items():
|
||||
if via_device not in new_indexes:
|
||||
continue
|
||||
devices[new_indexes[from_device]]["via_device"] = new_indexes[via_device]
|
||||
|
||||
integrations = {
|
||||
domain: integration
|
||||
for domain, integration in (
|
||||
await async_get_integrations(hass, integrations_info.keys())
|
||||
await async_get_integrations(hass, seen_integrations)
|
||||
).items()
|
||||
if isinstance(integration, Integration)
|
||||
}
|
||||
|
||||
for domain, integration_info in integrations_info.items():
|
||||
if integration := integrations.get(domain):
|
||||
integration_info["is_custom_integration"] = not integration.is_built_in
|
||||
for device_info in devices:
|
||||
if integration := integrations.get(device_info["integration"]):
|
||||
device_info["is_custom_integration"] = not integration.is_built_in
|
||||
# Include version for custom integrations
|
||||
if not integration.is_built_in and integration.version:
|
||||
integration_info["custom_integration_version"] = str(
|
||||
integration.version
|
||||
)
|
||||
device_info["custom_integration_version"] = str(integration.version)
|
||||
|
||||
return {
|
||||
"version": "home-assistant:1",
|
||||
"home_assistant": HA_VERSION,
|
||||
"integrations": integrations_info,
|
||||
"devices": devices,
|
||||
}
|
||||
|
@@ -37,7 +37,7 @@ from .helpers import AndroidTVRemoteConfigEntry, create_api, get_enable_ime
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
APPS_NEW_ID = "add_new"
|
||||
APPS_NEW_ID = "NewApp"
|
||||
CONF_APP_DELETE = "app_delete"
|
||||
CONF_APP_ID = "app_id"
|
||||
|
||||
@@ -66,14 +66,9 @@ class AndroidTVRemoteConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
if user_input is not None:
|
||||
self.host = user_input[CONF_HOST]
|
||||
api = create_api(self.hass, self.host, enable_ime=False)
|
||||
await api.async_generate_cert_if_missing()
|
||||
try:
|
||||
await api.async_generate_cert_if_missing()
|
||||
self.name, self.mac = await api.async_get_name_and_mac()
|
||||
except CannotConnect:
|
||||
# Likely invalid IP address or device is network unreachable. Stay
|
||||
# in the user step allowing the user to enter a different host.
|
||||
errors["base"] = "cannot_connect"
|
||||
else:
|
||||
await self.async_set_unique_id(format_mac(self.mac))
|
||||
if self.source == SOURCE_RECONFIGURE:
|
||||
self._abort_if_unique_id_mismatch()
|
||||
@@ -86,10 +81,11 @@ class AndroidTVRemoteConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
},
|
||||
)
|
||||
self._abort_if_unique_id_configured(updates={CONF_HOST: self.host})
|
||||
try:
|
||||
return await self._async_start_pair()
|
||||
except (CannotConnect, ConnectionClosed):
|
||||
errors["base"] = "cannot_connect"
|
||||
return await self._async_start_pair()
|
||||
except (CannotConnect, ConnectionClosed):
|
||||
# Likely invalid IP address or device is network unreachable. Stay
|
||||
# in the user step allowing the user to enter a different host.
|
||||
errors["base"] = "cannot_connect"
|
||||
else:
|
||||
user_input = {}
|
||||
default_host = user_input.get(CONF_HOST, vol.UNDEFINED)
|
||||
@@ -116,9 +112,22 @@ class AndroidTVRemoteConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
"""Handle the pair step."""
|
||||
errors: dict[str, str] = {}
|
||||
if user_input is not None:
|
||||
pin = user_input["pin"]
|
||||
try:
|
||||
pin = user_input["pin"]
|
||||
await self.api.async_finish_pairing(pin)
|
||||
if self.source == SOURCE_REAUTH:
|
||||
return self.async_update_reload_and_abort(
|
||||
self._get_reauth_entry(), reload_even_if_entry_is_unchanged=True
|
||||
)
|
||||
|
||||
return self.async_create_entry(
|
||||
title=self.name,
|
||||
data={
|
||||
CONF_HOST: self.host,
|
||||
CONF_NAME: self.name,
|
||||
CONF_MAC: self.mac,
|
||||
},
|
||||
)
|
||||
except InvalidAuth:
|
||||
# Invalid PIN. Stay in the pair step allowing the user to enter
|
||||
# a different PIN.
|
||||
@@ -136,20 +145,6 @@ class AndroidTVRemoteConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
# them to enter a new IP address but we cannot do that for the zeroconf
|
||||
# flow. Simpler to abort for both flows.
|
||||
return self.async_abort(reason="cannot_connect")
|
||||
else:
|
||||
if self.source == SOURCE_REAUTH:
|
||||
return self.async_update_reload_and_abort(
|
||||
self._get_reauth_entry(), reload_even_if_entry_is_unchanged=True
|
||||
)
|
||||
|
||||
return self.async_create_entry(
|
||||
title=self.name,
|
||||
data={
|
||||
CONF_HOST: self.host,
|
||||
CONF_NAME: self.name,
|
||||
CONF_MAC: self.mac,
|
||||
},
|
||||
)
|
||||
return self.async_show_form(
|
||||
step_id="pair",
|
||||
data_schema=STEP_PAIR_DATA_SCHEMA,
|
||||
@@ -287,9 +282,7 @@ class AndroidTVRemoteOptionsFlowHandler(OptionsFlowWithReload):
|
||||
{
|
||||
vol.Optional(CONF_APPS): SelectSelector(
|
||||
SelectSelectorConfig(
|
||||
options=apps,
|
||||
mode=SelectSelectorMode.DROPDOWN,
|
||||
translation_key="apps",
|
||||
options=apps, mode=SelectSelectorMode.DROPDOWN
|
||||
)
|
||||
),
|
||||
vol.Required(
|
||||
|
@@ -6,7 +6,7 @@ from typing import Any
|
||||
|
||||
from androidtvremote2 import AndroidTVRemote, ConnectionClosed
|
||||
|
||||
from homeassistant.const import CONF_MAC, CONF_NAME
|
||||
from homeassistant.const import CONF_HOST, CONF_MAC, CONF_NAME
|
||||
from homeassistant.core import callback
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers.device_registry import CONNECTION_NETWORK_MAC, DeviceInfo
|
||||
@@ -28,6 +28,8 @@ class AndroidTVRemoteBaseEntity(Entity):
|
||||
) -> None:
|
||||
"""Initialize the entity."""
|
||||
self._api = api
|
||||
self._host = config_entry.data[CONF_HOST]
|
||||
self._name = config_entry.data[CONF_NAME]
|
||||
self._apps: dict[str, Any] = config_entry.options.get(CONF_APPS, {})
|
||||
self._attr_unique_id = config_entry.unique_id
|
||||
self._attr_is_on = api.is_on
|
||||
@@ -37,7 +39,7 @@ class AndroidTVRemoteBaseEntity(Entity):
|
||||
self._attr_device_info = DeviceInfo(
|
||||
connections={(CONNECTION_NETWORK_MAC, config_entry.data[CONF_MAC])},
|
||||
identifiers={(DOMAIN, config_entry.unique_id)},
|
||||
name=config_entry.data[CONF_NAME],
|
||||
name=self._name,
|
||||
manufacturer=device_info["manufacturer"],
|
||||
model=device_info["model"],
|
||||
)
|
||||
|
@@ -7,7 +7,6 @@
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["androidtvremote2"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["androidtvremote2==0.2.3"],
|
||||
"zeroconf": ["_androidtvremote2._tcp.local."]
|
||||
}
|
||||
|
@@ -175,11 +175,7 @@ class AndroidTVRemoteMediaPlayerEntity(AndroidTVRemoteBaseEntity, MediaPlayerEnt
|
||||
"""Play a piece of media."""
|
||||
if media_type == MediaType.CHANNEL:
|
||||
if not media_id.isnumeric():
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="invalid_channel",
|
||||
translation_placeholders={"media_id": media_id},
|
||||
)
|
||||
raise ValueError(f"Channel must be numeric: {media_id}")
|
||||
if self._channel_set_task:
|
||||
self._channel_set_task.cancel()
|
||||
self._channel_set_task = asyncio.create_task(
|
||||
@@ -192,11 +188,7 @@ class AndroidTVRemoteMediaPlayerEntity(AndroidTVRemoteBaseEntity, MediaPlayerEnt
|
||||
self._send_launch_app_command(media_id)
|
||||
return
|
||||
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="invalid_media_type",
|
||||
translation_placeholders={"media_type": media_type},
|
||||
)
|
||||
raise ValueError(f"Invalid media type: {media_type}")
|
||||
|
||||
async def async_browse_media(
|
||||
self,
|
||||
|
@@ -1,78 +0,0 @@
|
||||
rules:
|
||||
# Bronze
|
||||
action-setup:
|
||||
status: exempt
|
||||
comment: No integration-specific service actions are defined.
|
||||
appropriate-polling:
|
||||
status: exempt
|
||||
comment: This is a push-based integration.
|
||||
brands: done
|
||||
common-modules: done
|
||||
config-flow-test-coverage: done
|
||||
config-flow: done
|
||||
dependency-transparency: done
|
||||
docs-actions: done
|
||||
docs-high-level-description: done
|
||||
docs-installation-instructions: done
|
||||
docs-removal-instructions: done
|
||||
entity-event-setup: done
|
||||
entity-unique-id: done
|
||||
has-entity-name: done
|
||||
runtime-data: done
|
||||
test-before-configure: done
|
||||
test-before-setup: done
|
||||
unique-config-entry: done
|
||||
|
||||
# Silver
|
||||
action-exceptions: done
|
||||
config-entry-unloading: done
|
||||
docs-configuration-parameters: done
|
||||
docs-installation-parameters: done
|
||||
entity-unavailable: done
|
||||
integration-owner: done
|
||||
log-when-unavailable: done
|
||||
parallel-updates: done
|
||||
reauthentication-flow: done
|
||||
test-coverage: done
|
||||
|
||||
# Gold
|
||||
devices: done
|
||||
diagnostics: done
|
||||
discovery-update-info: done
|
||||
discovery: done
|
||||
docs-data-update: done
|
||||
docs-examples: done
|
||||
docs-known-limitations: done
|
||||
docs-supported-devices: done
|
||||
docs-supported-functions: done
|
||||
docs-troubleshooting: done
|
||||
docs-use-cases: done
|
||||
dynamic-devices:
|
||||
status: exempt
|
||||
comment: The integration is configured on a per-device basis, so there are no dynamic devices to add.
|
||||
entity-category:
|
||||
status: exempt
|
||||
comment: All entities are primary and do not require a specific category.
|
||||
entity-device-class: done
|
||||
entity-disabled-by-default:
|
||||
status: exempt
|
||||
comment: The integration provides only primary entities that should be enabled.
|
||||
entity-translations: done
|
||||
exception-translations: done
|
||||
icon-translations:
|
||||
status: exempt
|
||||
comment: Icons are provided by the entity's device class, and no state-based icons are needed.
|
||||
reconfiguration-flow: done
|
||||
repair-issues:
|
||||
status: exempt
|
||||
comment: The integration uses the reauth flow for authentication issues, and no other repairable issues have been identified.
|
||||
stale-devices:
|
||||
status: exempt
|
||||
comment: The integration manages a single device per config entry. Stale device removal is handled by removing the config entry.
|
||||
|
||||
# Platinum
|
||||
async-dependency: done
|
||||
inject-websession:
|
||||
status: exempt
|
||||
comment: The underlying library does not use HTTP for communication.
|
||||
strict-typing: done
|
@@ -22,7 +22,7 @@
|
||||
},
|
||||
"zeroconf_confirm": {
|
||||
"title": "Discovered Android TV",
|
||||
"description": "Do you want to add the Android TV ({name}) to Home Assistant? It will turn on and a pairing code will be displayed on it that you will need to enter in the next screen."
|
||||
"description": "Do you want to add the Android TV ({name}) to Home Assistant? It will turn on and a pairing code will be displayed on it that you will need to enter in the next screen."
|
||||
},
|
||||
"pair": {
|
||||
"description": "Enter the pairing code displayed on the Android TV ({name}).",
|
||||
@@ -85,19 +85,6 @@
|
||||
"exceptions": {
|
||||
"connection_closed": {
|
||||
"message": "Connection to the Android TV device is closed"
|
||||
},
|
||||
"invalid_channel": {
|
||||
"message": "Channel must be numeric: {media_id}"
|
||||
},
|
||||
"invalid_media_type": {
|
||||
"message": "Invalid media type: {media_type}"
|
||||
}
|
||||
},
|
||||
"selector": {
|
||||
"apps": {
|
||||
"options": {
|
||||
"add_new": "Add new"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@@ -129,9 +129,9 @@ async def async_migrate_integration(hass: HomeAssistant) -> None:
|
||||
entity_disabled_by is er.RegistryEntryDisabler.CONFIG_ENTRY
|
||||
and not all_disabled
|
||||
):
|
||||
# Device and entity registries will set the disabled_by flag to None
|
||||
# when moving a device or entity disabled by CONFIG_ENTRY to an enabled
|
||||
# config entry, but we want to set it to DEVICE or USER instead,
|
||||
# Device and entity registries don't update the disabled_by flag
|
||||
# when moving a device or entity from one config entry to another,
|
||||
# so we need to do it manually.
|
||||
entity_disabled_by = (
|
||||
er.RegistryEntryDisabler.DEVICE
|
||||
if device
|
||||
@@ -146,9 +146,9 @@ async def async_migrate_integration(hass: HomeAssistant) -> None:
|
||||
)
|
||||
|
||||
if device is not None:
|
||||
# Device and entity registries will set the disabled_by flag to None
|
||||
# when moving a device or entity disabled by CONFIG_ENTRY to an enabled
|
||||
# config entry, but we want to set it to USER instead,
|
||||
# Device and entity registries don't update the disabled_by flag when
|
||||
# moving a device or entity from one config entry to another, so we
|
||||
# need to do it manually.
|
||||
device_disabled_by = device.disabled_by
|
||||
if (
|
||||
device.disabled_by is dr.DeviceEntryDisabler.CONFIG_ENTRY
|
||||
|
@@ -5,5 +5,5 @@
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/aosmith",
|
||||
"iot_class": "cloud_polling",
|
||||
"requirements": ["py-aosmith==1.0.14"]
|
||||
"requirements": ["py-aosmith==1.0.12"]
|
||||
}
|
||||
|
@@ -9,7 +9,7 @@ from homeassistant.core import HomeAssistant
|
||||
|
||||
from .coordinator import APCUPSdConfigEntry, APCUPSdCoordinator
|
||||
|
||||
PLATFORMS: Final = [Platform.BINARY_SENSOR, Platform.SENSOR]
|
||||
PLATFORMS: Final = (Platform.BINARY_SENSOR, Platform.SENSOR)
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
|
@@ -10,9 +10,9 @@ from homeassistant.components.binary_sensor import (
|
||||
)
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
|
||||
from .coordinator import APCUPSdConfigEntry, APCUPSdCoordinator
|
||||
from .entity import APCUPSdEntity
|
||||
|
||||
PARALLEL_UPDATES = 0
|
||||
|
||||
@@ -40,16 +40,22 @@ async def async_setup_entry(
|
||||
async_add_entities([OnlineStatus(coordinator, _DESCRIPTION)])
|
||||
|
||||
|
||||
class OnlineStatus(APCUPSdEntity, BinarySensorEntity):
|
||||
class OnlineStatus(CoordinatorEntity[APCUPSdCoordinator], BinarySensorEntity):
|
||||
"""Representation of a UPS online status."""
|
||||
|
||||
_attr_has_entity_name = True
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: APCUPSdCoordinator,
|
||||
description: BinarySensorEntityDescription,
|
||||
) -> None:
|
||||
"""Initialize the APCUPSd binary device."""
|
||||
super().__init__(coordinator, description)
|
||||
super().__init__(coordinator, context=description.key.upper())
|
||||
|
||||
self.entity_description = description
|
||||
self._attr_unique_id = f"{coordinator.unique_device_id}_{description.key}"
|
||||
self._attr_device_info = coordinator.device_info
|
||||
|
||||
@property
|
||||
def is_on(self) -> bool | None:
|
||||
|
@@ -100,7 +100,6 @@ class APCUPSdCoordinator(DataUpdateCoordinator[APCUPSdData]):
|
||||
name=self.data.name or "APC UPS",
|
||||
hw_version=self.data.get("FIRMWARE"),
|
||||
sw_version=self.data.get("VERSION"),
|
||||
serial_number=self.data.serial_no,
|
||||
)
|
||||
|
||||
async def _async_update_data(self) -> APCUPSdData:
|
||||
|
@@ -1,26 +0,0 @@
|
||||
"""Base entity for APCUPSd integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from homeassistant.helpers.entity import EntityDescription
|
||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
|
||||
from .coordinator import APCUPSdCoordinator
|
||||
|
||||
|
||||
class APCUPSdEntity(CoordinatorEntity[APCUPSdCoordinator]):
|
||||
"""Base entity for APCUPSd integration."""
|
||||
|
||||
_attr_has_entity_name = True
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: APCUPSdCoordinator,
|
||||
description: EntityDescription,
|
||||
) -> None:
|
||||
"""Initialize the APCUPSd entity."""
|
||||
super().__init__(coordinator, context=description.key.upper())
|
||||
|
||||
self.entity_description = description
|
||||
self._attr_unique_id = f"{coordinator.unique_device_id}_{description.key}"
|
||||
self._attr_device_info = coordinator.device_info
|
@@ -6,6 +6,6 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/apcupsd",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["apcaccess"],
|
||||
"quality_scale": "platinum",
|
||||
"quality_scale": "bronze",
|
||||
"requirements": ["aioapcaccess==0.4.2"]
|
||||
}
|
||||
|
@@ -3,7 +3,10 @@ rules:
|
||||
action-setup: done
|
||||
appropriate-polling: done
|
||||
brands: done
|
||||
common-modules: done
|
||||
common-modules:
|
||||
status: done
|
||||
comment: |
|
||||
Consider deriving a base entity.
|
||||
config-flow-test-coverage: done
|
||||
config-flow: done
|
||||
dependency-transparency: done
|
||||
@@ -43,7 +46,10 @@ rules:
|
||||
status: exempt
|
||||
comment: |
|
||||
The integration does not require authentication.
|
||||
test-coverage: done
|
||||
test-coverage:
|
||||
status: todo
|
||||
comment: |
|
||||
Patch `aioapcaccess.request_status` where we use it.
|
||||
# Gold
|
||||
devices: done
|
||||
diagnostics: done
|
||||
|
@@ -23,10 +23,10 @@ from homeassistant.const import (
|
||||
)
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
|
||||
from .const import LAST_S_TEST
|
||||
from .coordinator import APCUPSdConfigEntry, APCUPSdCoordinator
|
||||
from .entity import APCUPSdEntity
|
||||
|
||||
PARALLEL_UPDATES = 0
|
||||
|
||||
@@ -490,16 +490,22 @@ def infer_unit(value: str) -> tuple[str, str | None]:
|
||||
return value, None
|
||||
|
||||
|
||||
class APCUPSdSensor(APCUPSdEntity, SensorEntity):
|
||||
class APCUPSdSensor(CoordinatorEntity[APCUPSdCoordinator], SensorEntity):
|
||||
"""Representation of a sensor entity for APCUPSd status values."""
|
||||
|
||||
_attr_has_entity_name = True
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: APCUPSdCoordinator,
|
||||
description: SensorEntityDescription,
|
||||
) -> None:
|
||||
"""Initialize the sensor."""
|
||||
super().__init__(coordinator, description)
|
||||
super().__init__(coordinator=coordinator, context=description.key.upper())
|
||||
|
||||
self.entity_description = description
|
||||
self._attr_unique_id = f"{coordinator.unique_device_id}_{description.key}"
|
||||
self._attr_device_info = coordinator.device_info
|
||||
|
||||
# Initial update of attributes.
|
||||
self._update_attrs()
|
||||
|
1
homeassistant/components/aps/__init__.py
Normal file
1
homeassistant/components/aps/__init__.py
Normal file
@@ -0,0 +1 @@
|
||||
"""Virtual integration: Arizona Public Service (APS)."""
|
6
homeassistant/components/aps/manifest.json
Normal file
6
homeassistant/components/aps/manifest.json
Normal file
@@ -0,0 +1,6 @@
|
||||
{
|
||||
"domain": "aps",
|
||||
"name": "Arizona Public Service (APS)",
|
||||
"integration_type": "virtual",
|
||||
"supported_by": "opower"
|
||||
}
|
@@ -2,7 +2,7 @@
|
||||
"domain": "assist_pipeline",
|
||||
"name": "Assist pipeline",
|
||||
"after_dependencies": ["repairs"],
|
||||
"codeowners": ["@synesthesiam", "@arturpragacz"],
|
||||
"codeowners": ["@balloob", "@synesthesiam"],
|
||||
"dependencies": ["conversation", "stt", "tts", "wake_word"],
|
||||
"documentation": "https://www.home-assistant.io/integrations/assist_pipeline",
|
||||
"integration_type": "system",
|
||||
|
@@ -75,6 +75,7 @@ class BroadcastIntentHandler(intent.IntentHandler):
|
||||
)
|
||||
|
||||
response = intent_obj.create_response()
|
||||
response.response_type = intent.IntentResponseType.ACTION_DONE
|
||||
response.async_set_results(
|
||||
success_results=[
|
||||
intent.IntentResponseTarget(
|
||||
|
@@ -1,10 +1,10 @@
|
||||
{
|
||||
"domain": "assist_satellite",
|
||||
"name": "Assist Satellite",
|
||||
"codeowners": ["@home-assistant/core", "@synesthesiam", "@arturpragacz"],
|
||||
"codeowners": ["@home-assistant/core", "@synesthesiam"],
|
||||
"dependencies": ["assist_pipeline", "http", "stt", "tts"],
|
||||
"documentation": "https://www.home-assistant.io/integrations/assist_satellite",
|
||||
"integration_type": "entity",
|
||||
"quality_scale": "internal",
|
||||
"requirements": ["hassil==3.2.0"]
|
||||
"requirements": ["hassil==3.1.0"]
|
||||
}
|
||||
|
@@ -12,11 +12,9 @@ from typing import Any, cast
|
||||
from aioasuswrt.asuswrt import AsusWrt as AsusWrtLegacy
|
||||
from aiohttp import ClientSession
|
||||
from asusrouter import AsusRouter, AsusRouterError
|
||||
from asusrouter.config import ARConfigKey
|
||||
from asusrouter.modules.client import AsusClient
|
||||
from asusrouter.modules.data import AsusData
|
||||
from asusrouter.modules.homeassistant import convert_to_ha_data, convert_to_ha_sensors
|
||||
from asusrouter.tools.connection import get_cookie_jar
|
||||
|
||||
from homeassistant.const import (
|
||||
CONF_HOST,
|
||||
@@ -27,7 +25,7 @@ from homeassistant.const import (
|
||||
CONF_USERNAME,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.aiohttp_client import async_create_clientsession
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.device_registry import format_mac
|
||||
from homeassistant.helpers.update_coordinator import UpdateFailed
|
||||
|
||||
@@ -111,10 +109,7 @@ class AsusWrtBridge(ABC):
|
||||
) -> AsusWrtBridge:
|
||||
"""Get Bridge instance."""
|
||||
if conf[CONF_PROTOCOL] in (PROTOCOL_HTTPS, PROTOCOL_HTTP):
|
||||
session = async_create_clientsession(
|
||||
hass,
|
||||
cookie_jar=get_cookie_jar(),
|
||||
)
|
||||
session = async_get_clientsession(hass)
|
||||
return AsusWrtHttpBridge(conf, session)
|
||||
return AsusWrtLegacyBridge(conf, options)
|
||||
|
||||
@@ -124,8 +119,6 @@ class AsusWrtBridge(ABC):
|
||||
self._firmware: str | None = None
|
||||
self._label_mac: str | None = None
|
||||
self._model: str | None = None
|
||||
self._model_id: str | None = None
|
||||
self._serial_number: str | None = None
|
||||
|
||||
@property
|
||||
def host(self) -> str:
|
||||
@@ -147,16 +140,6 @@ class AsusWrtBridge(ABC):
|
||||
"""Return model information."""
|
||||
return self._model
|
||||
|
||||
@property
|
||||
def model_id(self) -> str | None:
|
||||
"""Return model_id information."""
|
||||
return self._model_id
|
||||
|
||||
@property
|
||||
def serial_number(self) -> str | None:
|
||||
"""Return serial number information."""
|
||||
return self._serial_number
|
||||
|
||||
@property
|
||||
@abstractmethod
|
||||
def is_connected(self) -> bool:
|
||||
@@ -327,14 +310,10 @@ class AsusWrtHttpBridge(AsusWrtBridge):
|
||||
def __init__(self, conf: dict[str, Any], session: ClientSession) -> None:
|
||||
"""Initialize Bridge that use HTTP library."""
|
||||
super().__init__(conf[CONF_HOST])
|
||||
# Get API configuration
|
||||
config = self._get_api_config()
|
||||
self._api = self._get_api(conf, session, config)
|
||||
self._api = self._get_api(conf, session)
|
||||
|
||||
@staticmethod
|
||||
def _get_api(
|
||||
conf: dict[str, Any], session: ClientSession, config: dict[ARConfigKey, Any]
|
||||
) -> AsusRouter:
|
||||
def _get_api(conf: dict[str, Any], session: ClientSession) -> AsusRouter:
|
||||
"""Get the AsusRouter API."""
|
||||
return AsusRouter(
|
||||
hostname=conf[CONF_HOST],
|
||||
@@ -343,19 +322,8 @@ class AsusWrtHttpBridge(AsusWrtBridge):
|
||||
use_ssl=conf[CONF_PROTOCOL] == PROTOCOL_HTTPS,
|
||||
port=conf.get(CONF_PORT),
|
||||
session=session,
|
||||
config=config,
|
||||
)
|
||||
|
||||
def _get_api_config(self) -> dict[ARConfigKey, Any]:
|
||||
"""Get configuration for the API."""
|
||||
return {
|
||||
# Enable automatic temperature data correction in the library
|
||||
ARConfigKey.OPTIMISTIC_TEMPERATURE: True,
|
||||
# Disable `warning`-level log message when temperature
|
||||
# is corrected by setting it to already notified.
|
||||
ARConfigKey.NOTIFIED_OPTIMISTIC_TEMPERATURE: True,
|
||||
}
|
||||
|
||||
@property
|
||||
def is_connected(self) -> bool:
|
||||
"""Get connected status."""
|
||||
@@ -373,8 +341,6 @@ class AsusWrtHttpBridge(AsusWrtBridge):
|
||||
self._label_mac = format_mac(mac)
|
||||
self._firmware = str(_identity.firmware)
|
||||
self._model = _identity.model
|
||||
self._model_id = _identity.product_id
|
||||
self._serial_number = _identity.serial
|
||||
|
||||
async def async_disconnect(self) -> None:
|
||||
"""Disconnect to the device."""
|
||||
|
@@ -7,5 +7,5 @@
|
||||
"integration_type": "hub",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["aioasuswrt", "asusrouter", "asyncssh"],
|
||||
"requirements": ["aioasuswrt==1.4.0", "asusrouter==1.21.0"]
|
||||
"requirements": ["aioasuswrt==1.4.0", "asusrouter==1.19.0"]
|
||||
}
|
||||
|
@@ -391,8 +391,6 @@ class AsusWrtRouter:
|
||||
identifiers={(DOMAIN, self._entry.unique_id or "AsusWRT")},
|
||||
name=self.host,
|
||||
model=self._api.model or "Asus Router",
|
||||
model_id=self._api.model_id,
|
||||
serial_number=self._api.serial_number,
|
||||
manufacturer="Asus",
|
||||
configuration_url=f"http://{self.host}",
|
||||
)
|
||||
|
@@ -6,21 +6,18 @@ from pathlib import Path
|
||||
from typing import cast
|
||||
|
||||
from aiohttp import ClientResponseError
|
||||
from yalexs.const import Brand
|
||||
from yalexs.exceptions import AugustApiAIOHTTPError
|
||||
from yalexs.manager.exceptions import CannotConnect, InvalidAuth, RequireValidation
|
||||
from yalexs.manager.gateway import Config as YaleXSConfig
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import EVENT_HOMEASSISTANT_STOP
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady
|
||||
from homeassistant.helpers import (
|
||||
config_entry_oauth2_flow,
|
||||
device_registry as dr,
|
||||
issue_registry as ir,
|
||||
)
|
||||
from homeassistant.helpers import device_registry as dr, issue_registry as ir
|
||||
|
||||
from .const import DEFAULT_AUGUST_BRAND, DOMAIN, PLATFORMS
|
||||
from .const import DOMAIN, PLATFORMS
|
||||
from .data import AugustData
|
||||
from .gateway import AugustGateway
|
||||
from .util import async_create_august_clientsession
|
||||
@@ -28,21 +25,30 @@ from .util import async_create_august_clientsession
|
||||
type AugustConfigEntry = ConfigEntry[AugustData]
|
||||
|
||||
|
||||
@callback
|
||||
def _async_create_yale_brand_migration_issue(
|
||||
hass: HomeAssistant, entry: AugustConfigEntry
|
||||
) -> None:
|
||||
"""Create an issue for a brand migration."""
|
||||
ir.async_create_issue(
|
||||
hass,
|
||||
DOMAIN,
|
||||
"yale_brand_migration",
|
||||
breaks_in_ha_version="2024.9",
|
||||
learn_more_url="https://www.home-assistant.io/integrations/yale",
|
||||
translation_key="yale_brand_migration",
|
||||
is_fixable=False,
|
||||
severity=ir.IssueSeverity.CRITICAL,
|
||||
translation_placeholders={
|
||||
"migrate_url": "https://my.home-assistant.io/redirect/config_flow_start?domain=yale"
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: AugustConfigEntry) -> bool:
|
||||
"""Set up August from a config entry."""
|
||||
# Check if this is a legacy config entry that needs migration to OAuth
|
||||
if "auth_implementation" not in entry.data:
|
||||
# This is a legacy entry using username/password, trigger reauth
|
||||
raise ConfigEntryAuthFailed("Migration to OAuth required")
|
||||
|
||||
session = async_create_august_clientsession(hass)
|
||||
implementation = (
|
||||
await config_entry_oauth2_flow.async_get_config_entry_implementation(
|
||||
hass, entry
|
||||
)
|
||||
)
|
||||
oauth_session = config_entry_oauth2_flow.OAuth2Session(hass, entry, implementation)
|
||||
august_gateway = AugustGateway(Path(hass.config.config_dir), session, oauth_session)
|
||||
august_gateway = AugustGateway(Path(hass.config.config_dir), session)
|
||||
try:
|
||||
await async_setup_august(hass, entry, august_gateway)
|
||||
except (RequireValidation, InvalidAuth) as err:
|
||||
@@ -70,7 +76,9 @@ async def async_setup_august(
|
||||
) -> None:
|
||||
"""Set up the August component."""
|
||||
config = cast(YaleXSConfig, entry.data)
|
||||
await august_gateway.async_setup({**config, "brand": DEFAULT_AUGUST_BRAND})
|
||||
await august_gateway.async_setup(config)
|
||||
if august_gateway.api.brand == Brand.YALE_HOME:
|
||||
_async_create_yale_brand_migration_issue(hass, entry)
|
||||
await august_gateway.async_authenticate()
|
||||
await august_gateway.async_refresh_access_token_if_needed()
|
||||
data = entry.runtime_data = AugustData(hass, august_gateway)
|
||||
|
@@ -1,15 +0,0 @@
|
||||
"""application_credentials platform for the august integration."""
|
||||
|
||||
from homeassistant.components.application_credentials import AuthorizationServer
|
||||
from homeassistant.core import HomeAssistant
|
||||
|
||||
OAUTH2_AUTHORIZE = "https://auth.august.com/authorization"
|
||||
OAUTH2_TOKEN = "https://auth.august.com/access_token"
|
||||
|
||||
|
||||
async def async_get_authorization_server(hass: HomeAssistant) -> AuthorizationServer:
|
||||
"""Return authorization server."""
|
||||
return AuthorizationServer(
|
||||
authorize_url=OAUTH2_AUTHORIZE,
|
||||
token_url=OAUTH2_TOKEN,
|
||||
)
|
@@ -1,86 +1,284 @@
|
||||
"""Config flow for August integration."""
|
||||
|
||||
from collections.abc import Mapping
|
||||
from dataclasses import dataclass
|
||||
import logging
|
||||
from pathlib import Path
|
||||
from typing import Any
|
||||
|
||||
import jwt
|
||||
import aiohttp
|
||||
import voluptuous as vol
|
||||
from yalexs.authenticator_common import ValidationResult
|
||||
from yalexs.const import BRANDS_WITHOUT_OAUTH, DEFAULT_BRAND, Brand
|
||||
from yalexs.manager.exceptions import CannotConnect, InvalidAuth, RequireValidation
|
||||
|
||||
from homeassistant.config_entries import SOURCE_REAUTH, ConfigFlowResult
|
||||
from homeassistant.helpers import config_entry_oauth2_flow
|
||||
from homeassistant.config_entries import SOURCE_REAUTH, ConfigFlow, ConfigFlowResult
|
||||
from homeassistant.const import CONF_PASSWORD, CONF_USERNAME
|
||||
from homeassistant.core import callback
|
||||
|
||||
from .const import (
|
||||
CONF_ACCESS_TOKEN_CACHE_FILE,
|
||||
CONF_BRAND,
|
||||
CONF_LOGIN_METHOD,
|
||||
DEFAULT_LOGIN_METHOD,
|
||||
DOMAIN,
|
||||
LOGIN_METHODS,
|
||||
VERIFICATION_CODE_KEY,
|
||||
)
|
||||
from .gateway import AugustGateway
|
||||
from .util import async_create_august_clientsession
|
||||
|
||||
# The Yale Home Brand is not supported by the August integration
|
||||
# anymore and should migrate to the Yale integration
|
||||
AVAILABLE_BRANDS = BRANDS_WITHOUT_OAUTH.copy()
|
||||
del AVAILABLE_BRANDS[Brand.YALE_HOME]
|
||||
|
||||
from .const import DOMAIN
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class AugustConfigFlow(
|
||||
config_entry_oauth2_flow.AbstractOAuth2FlowHandler, domain=DOMAIN
|
||||
):
|
||||
async def async_validate_input(
|
||||
data: dict[str, Any], august_gateway: AugustGateway
|
||||
) -> dict[str, Any]:
|
||||
"""Validate the user input allows us to connect.
|
||||
|
||||
Data has the keys from DATA_SCHEMA with values provided by the user.
|
||||
|
||||
Request configuration steps from the user.
|
||||
"""
|
||||
assert august_gateway.authenticator is not None
|
||||
authenticator = august_gateway.authenticator
|
||||
if (code := data.get(VERIFICATION_CODE_KEY)) is not None:
|
||||
result = await authenticator.async_validate_verification_code(code)
|
||||
_LOGGER.debug("Verification code validation: %s", result)
|
||||
if result != ValidationResult.VALIDATED:
|
||||
raise RequireValidation
|
||||
|
||||
try:
|
||||
await august_gateway.async_authenticate()
|
||||
except RequireValidation:
|
||||
_LOGGER.debug(
|
||||
"Requesting new verification code for %s via %s",
|
||||
data.get(CONF_USERNAME),
|
||||
data.get(CONF_LOGIN_METHOD),
|
||||
)
|
||||
if code is None:
|
||||
await august_gateway.authenticator.async_send_verification_code()
|
||||
raise
|
||||
|
||||
return {
|
||||
"title": data.get(CONF_USERNAME),
|
||||
"data": august_gateway.config_entry(),
|
||||
}
|
||||
|
||||
|
||||
@dataclass(slots=True)
|
||||
class ValidateResult:
|
||||
"""Result from validation."""
|
||||
|
||||
validation_required: bool
|
||||
info: dict[str, Any]
|
||||
errors: dict[str, str]
|
||||
description_placeholders: dict[str, str]
|
||||
|
||||
|
||||
class AugustConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
"""Handle a config flow for August."""
|
||||
|
||||
VERSION = 1
|
||||
DOMAIN = DOMAIN
|
||||
|
||||
@property
|
||||
def logger(self) -> logging.Logger:
|
||||
"""Return logger."""
|
||||
return _LOGGER
|
||||
def __init__(self) -> None:
|
||||
"""Store an AugustGateway()."""
|
||||
self._august_gateway: AugustGateway | None = None
|
||||
self._aiohttp_session: aiohttp.ClientSession | None = None
|
||||
self._user_auth_details: dict[str, Any] = {}
|
||||
self._needs_reset = True
|
||||
super().__init__()
|
||||
|
||||
async def async_step_user(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle the initial step."""
|
||||
return await self.async_step_user_validate()
|
||||
|
||||
async def async_step_user_validate(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle authentication."""
|
||||
errors: dict[str, str] = {}
|
||||
description_placeholders: dict[str, str] = {}
|
||||
if user_input is not None:
|
||||
self._user_auth_details.update(user_input)
|
||||
validate_result = await self._async_auth_or_validate()
|
||||
description_placeholders = validate_result.description_placeholders
|
||||
if validate_result.validation_required:
|
||||
return await self.async_step_validation()
|
||||
if not (errors := validate_result.errors):
|
||||
return await self._async_update_or_create_entry(validate_result.info)
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="user_validate",
|
||||
data_schema=vol.Schema(
|
||||
{
|
||||
vol.Required(
|
||||
CONF_BRAND,
|
||||
default=self._user_auth_details.get(CONF_BRAND, DEFAULT_BRAND),
|
||||
): vol.In(AVAILABLE_BRANDS),
|
||||
vol.Required(
|
||||
CONF_LOGIN_METHOD,
|
||||
default=self._user_auth_details.get(
|
||||
CONF_LOGIN_METHOD, DEFAULT_LOGIN_METHOD
|
||||
),
|
||||
): vol.In(LOGIN_METHODS),
|
||||
vol.Required(
|
||||
CONF_USERNAME,
|
||||
default=self._user_auth_details.get(CONF_USERNAME),
|
||||
): str,
|
||||
vol.Required(CONF_PASSWORD): str,
|
||||
}
|
||||
),
|
||||
errors=errors,
|
||||
description_placeholders=description_placeholders,
|
||||
)
|
||||
|
||||
async def async_step_validation(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle validation (2fa) step."""
|
||||
if user_input:
|
||||
if self.source == SOURCE_REAUTH:
|
||||
return await self.async_step_reauth_validate(user_input)
|
||||
return await self.async_step_user_validate(user_input)
|
||||
|
||||
previously_failed = VERIFICATION_CODE_KEY in self._user_auth_details
|
||||
return self.async_show_form(
|
||||
step_id="validation",
|
||||
data_schema=vol.Schema(
|
||||
{vol.Required(VERIFICATION_CODE_KEY): vol.All(str, vol.Strip)}
|
||||
),
|
||||
errors={"base": "invalid_verification_code"} if previously_failed else None,
|
||||
description_placeholders={
|
||||
CONF_BRAND: self._user_auth_details[CONF_BRAND],
|
||||
CONF_USERNAME: self._user_auth_details[CONF_USERNAME],
|
||||
CONF_LOGIN_METHOD: self._user_auth_details[CONF_LOGIN_METHOD],
|
||||
},
|
||||
)
|
||||
|
||||
@callback
|
||||
def _async_get_gateway(self) -> AugustGateway:
|
||||
"""Set up the gateway."""
|
||||
if self._august_gateway is not None:
|
||||
return self._august_gateway
|
||||
self._aiohttp_session = async_create_august_clientsession(self.hass)
|
||||
self._august_gateway = AugustGateway(
|
||||
Path(self.hass.config.config_dir), self._aiohttp_session
|
||||
)
|
||||
return self._august_gateway
|
||||
|
||||
@callback
|
||||
def _async_shutdown_gateway(self) -> None:
|
||||
"""Shutdown the gateway."""
|
||||
if self._aiohttp_session is not None:
|
||||
self._aiohttp_session.detach()
|
||||
self._august_gateway = None
|
||||
|
||||
async def async_step_reauth(
|
||||
self, entry_data: Mapping[str, Any]
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle configuration by re-auth."""
|
||||
return await self.async_step_user()
|
||||
self._user_auth_details = dict(entry_data)
|
||||
return await self.async_step_reauth_validate()
|
||||
|
||||
def _async_decode_jwt(self, encoded: str) -> dict[str, Any]:
|
||||
"""Decode JWT token."""
|
||||
return jwt.decode(
|
||||
encoded,
|
||||
"",
|
||||
verify=False,
|
||||
options={"verify_signature": False},
|
||||
algorithms=["HS256"],
|
||||
)
|
||||
|
||||
async def _async_handle_reauth(
|
||||
self, data: dict, decoded: dict[str, Any], user_id: str
|
||||
async def async_step_reauth_validate(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle reauth flow."""
|
||||
reauth_entry = self._get_reauth_entry()
|
||||
assert reauth_entry.unique_id is not None
|
||||
# Check if this is a migration from username (contains @) to user ID
|
||||
if "@" not in reauth_entry.unique_id:
|
||||
# This is a normal oauth reauth, enforce ID matching for security
|
||||
await self.async_set_unique_id(user_id)
|
||||
self._abort_if_unique_id_mismatch(reason="reauth_invalid_user")
|
||||
return self.async_update_reload_and_abort(reauth_entry, data=data)
|
||||
"""Handle reauth and validation."""
|
||||
errors: dict[str, str] = {}
|
||||
description_placeholders: dict[str, str] = {}
|
||||
if user_input is not None:
|
||||
self._user_auth_details.update(user_input)
|
||||
validate_result = await self._async_auth_or_validate()
|
||||
description_placeholders = validate_result.description_placeholders
|
||||
if validate_result.validation_required:
|
||||
return await self.async_step_validation()
|
||||
if not (errors := validate_result.errors):
|
||||
return await self._async_update_or_create_entry(validate_result.info)
|
||||
|
||||
# This is a one-time migration from username to user ID
|
||||
# Only validate if the account has emails
|
||||
emails: list[str]
|
||||
if emails := decoded.get("email", []):
|
||||
# Validate that the email matches before allowing migration
|
||||
email_to_check_lower = reauth_entry.unique_id.casefold()
|
||||
if not any(email.casefold() == email_to_check_lower for email in emails):
|
||||
# Email doesn't match - this is a different account
|
||||
return self.async_abort(reason="reauth_invalid_user")
|
||||
|
||||
# Email matches or no emails on account, update with new unique ID
|
||||
return self.async_update_reload_and_abort(
|
||||
reauth_entry, data=data, unique_id=user_id
|
||||
return self.async_show_form(
|
||||
step_id="reauth_validate",
|
||||
data_schema=vol.Schema(
|
||||
{
|
||||
vol.Required(
|
||||
CONF_BRAND,
|
||||
default=self._user_auth_details.get(CONF_BRAND, DEFAULT_BRAND),
|
||||
): vol.In(BRANDS_WITHOUT_OAUTH),
|
||||
vol.Required(CONF_PASSWORD): str,
|
||||
}
|
||||
),
|
||||
errors=errors,
|
||||
description_placeholders=description_placeholders
|
||||
| {
|
||||
CONF_USERNAME: self._user_auth_details[CONF_USERNAME],
|
||||
},
|
||||
)
|
||||
|
||||
async def async_oauth_create_entry(self, data: dict) -> ConfigFlowResult:
|
||||
"""Create an entry for the flow."""
|
||||
# Decode JWT once
|
||||
access_token = data["token"]["access_token"]
|
||||
decoded = self._async_decode_jwt(access_token)
|
||||
user_id = decoded["userId"]
|
||||
async def _async_reset_access_token_cache_if_needed(
|
||||
self, gateway: AugustGateway, username: str, access_token_cache_file: str | None
|
||||
) -> None:
|
||||
"""Reset the access token cache if needed."""
|
||||
# We need to configure the access token cache file before we setup the gateway
|
||||
# since we need to reset it if the brand changes BEFORE we setup the gateway
|
||||
gateway.async_configure_access_token_cache_file(
|
||||
username, access_token_cache_file
|
||||
)
|
||||
if self._needs_reset:
|
||||
self._needs_reset = False
|
||||
await gateway.async_reset_authentication()
|
||||
|
||||
if self.source == SOURCE_REAUTH:
|
||||
return await self._async_handle_reauth(data, decoded, user_id)
|
||||
async def _async_auth_or_validate(self) -> ValidateResult:
|
||||
"""Authenticate or validate."""
|
||||
user_auth_details = self._user_auth_details
|
||||
gateway = self._async_get_gateway()
|
||||
assert gateway is not None
|
||||
await self._async_reset_access_token_cache_if_needed(
|
||||
gateway,
|
||||
user_auth_details[CONF_USERNAME],
|
||||
user_auth_details.get(CONF_ACCESS_TOKEN_CACHE_FILE),
|
||||
)
|
||||
await gateway.async_setup(user_auth_details)
|
||||
|
||||
await self.async_set_unique_id(user_id)
|
||||
self._abort_if_unique_id_configured()
|
||||
return await super().async_oauth_create_entry(data)
|
||||
errors: dict[str, str] = {}
|
||||
info: dict[str, Any] = {}
|
||||
description_placeholders: dict[str, str] = {}
|
||||
validation_required = False
|
||||
|
||||
try:
|
||||
info = await async_validate_input(user_auth_details, gateway)
|
||||
except CannotConnect:
|
||||
errors["base"] = "cannot_connect"
|
||||
except InvalidAuth:
|
||||
errors["base"] = "invalid_auth"
|
||||
except RequireValidation:
|
||||
validation_required = True
|
||||
except Exception as ex:
|
||||
_LOGGER.exception("Unexpected exception")
|
||||
errors["base"] = "unhandled"
|
||||
description_placeholders = {"error": str(ex)}
|
||||
|
||||
return ValidateResult(
|
||||
validation_required, info, errors, description_placeholders
|
||||
)
|
||||
|
||||
async def _async_update_or_create_entry(
|
||||
self, info: dict[str, Any]
|
||||
) -> ConfigFlowResult:
|
||||
"""Update existing entry or create a new one."""
|
||||
self._async_shutdown_gateway()
|
||||
|
||||
existing_entry = await self.async_set_unique_id(
|
||||
self._user_auth_details[CONF_USERNAME]
|
||||
)
|
||||
if not existing_entry:
|
||||
return self.async_create_entry(title=info["title"], data=info["data"])
|
||||
|
||||
return self.async_update_reload_and_abort(existing_entry, data=info["data"])
|
||||
|
@@ -1,7 +1,5 @@
|
||||
"""Constants for August devices."""
|
||||
|
||||
from yalexs.const import Brand
|
||||
|
||||
from homeassistant.const import Platform
|
||||
|
||||
DEFAULT_TIMEOUT = 25
|
||||
@@ -11,8 +9,6 @@ CONF_BRAND = "brand"
|
||||
CONF_LOGIN_METHOD = "login_method"
|
||||
CONF_INSTALL_ID = "install_id"
|
||||
|
||||
DEFAULT_AUGUST_BRAND = Brand.YALE_AUGUST
|
||||
|
||||
VERIFICATION_CODE_KEY = "verification_code"
|
||||
|
||||
NOTIFICATION_ID = "august_notification"
|
||||
|
@@ -1,43 +1,30 @@
|
||||
"""Handle August connection setup and authentication."""
|
||||
|
||||
import logging
|
||||
from pathlib import Path
|
||||
from typing import Any
|
||||
|
||||
from aiohttp import ClientSession
|
||||
from yalexs.authenticator_common import Authentication, AuthenticationState
|
||||
from yalexs.const import DEFAULT_BRAND
|
||||
from yalexs.manager.gateway import Gateway
|
||||
|
||||
from homeassistant.helpers import config_entry_oauth2_flow
|
||||
from homeassistant.const import CONF_USERNAME
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
from .const import (
|
||||
CONF_ACCESS_TOKEN_CACHE_FILE,
|
||||
CONF_BRAND,
|
||||
CONF_INSTALL_ID,
|
||||
CONF_LOGIN_METHOD,
|
||||
)
|
||||
|
||||
|
||||
class AugustGateway(Gateway):
|
||||
"""Handle the connection to August."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
config_path: Path,
|
||||
aiohttp_session: ClientSession,
|
||||
oauth_session: config_entry_oauth2_flow.OAuth2Session,
|
||||
) -> None:
|
||||
"""Init the connection."""
|
||||
super().__init__(config_path, aiohttp_session)
|
||||
self._oauth_session = oauth_session
|
||||
|
||||
async def async_get_access_token(self) -> str:
|
||||
"""Get access token."""
|
||||
await self._oauth_session.async_ensure_token_valid()
|
||||
return self._oauth_session.token["access_token"]
|
||||
|
||||
async def async_refresh_access_token_if_needed(self) -> None:
|
||||
"""Refresh the access token if needed."""
|
||||
await self._oauth_session.async_ensure_token_valid()
|
||||
|
||||
async def async_authenticate(self) -> Authentication:
|
||||
"""Authenticate with the details provided to setup."""
|
||||
await self._oauth_session.async_ensure_token_valid()
|
||||
self.authentication = Authentication(
|
||||
AuthenticationState.AUTHENTICATED, None, None, None
|
||||
)
|
||||
return self.authentication
|
||||
def config_entry(self) -> dict[str, Any]:
|
||||
"""Config entry."""
|
||||
assert self._config is not None
|
||||
return {
|
||||
CONF_BRAND: self._config.get(CONF_BRAND, DEFAULT_BRAND),
|
||||
CONF_LOGIN_METHOD: self._config[CONF_LOGIN_METHOD],
|
||||
CONF_USERNAME: self._config[CONF_USERNAME],
|
||||
CONF_INSTALL_ID: self._config.get(CONF_INSTALL_ID),
|
||||
CONF_ACCESS_TOKEN_CACHE_FILE: self._access_token_cache_file,
|
||||
}
|
||||
|
@@ -3,7 +3,6 @@
|
||||
"name": "August",
|
||||
"codeowners": ["@bdraco"],
|
||||
"config_flow": true,
|
||||
"dependencies": ["application_credentials", "cloud"],
|
||||
"dhcp": [
|
||||
{
|
||||
"hostname": "connect",
|
||||
@@ -29,5 +28,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/august",
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["pubnub", "yalexs"],
|
||||
"requirements": ["yalexs==9.0.1", "yalexs-ble==3.1.2"]
|
||||
"requirements": ["yalexs==8.11.1", "yalexs-ble==3.1.2"]
|
||||
}
|
||||
|
@@ -6,34 +6,42 @@
|
||||
}
|
||||
},
|
||||
"config": {
|
||||
"step": {
|
||||
"pick_implementation": {
|
||||
"title": "[%key:common::config_flow::title::oauth2_pick_implementation%]",
|
||||
"data": {
|
||||
"implementation": "[%key:common::config_flow::data::implementation%]"
|
||||
},
|
||||
"data_description": {
|
||||
"implementation": "[%key:common::config_flow::description::implementation%]"
|
||||
}
|
||||
}
|
||||
"error": {
|
||||
"unhandled": "Unhandled error: {error}",
|
||||
"invalid_verification_code": "Invalid verification code",
|
||||
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
|
||||
"invalid_auth": "[%key:common::config_flow::error::invalid_auth%]"
|
||||
},
|
||||
"abort": {
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_account%]",
|
||||
"already_in_progress": "[%key:common::config_flow::abort::already_in_progress%]",
|
||||
"oauth_error": "[%key:common::config_flow::abort::oauth2_error%]",
|
||||
"missing_configuration": "[%key:common::config_flow::abort::oauth2_missing_configuration%]",
|
||||
"missing_credentials": "[%key:common::config_flow::abort::oauth2_missing_credentials%]",
|
||||
"authorize_url_timeout": "[%key:common::config_flow::abort::oauth2_authorize_url_timeout%]",
|
||||
"no_url_available": "[%key:common::config_flow::abort::oauth2_no_url_available%]",
|
||||
"user_rejected_authorize": "[%key:common::config_flow::abort::oauth2_user_rejected_authorize%]",
|
||||
"reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]",
|
||||
"oauth_timeout": "[%key:common::config_flow::abort::oauth2_timeout%]",
|
||||
"oauth_unauthorized": "[%key:common::config_flow::abort::oauth2_unauthorized%]",
|
||||
"oauth_failed": "[%key:common::config_flow::abort::oauth2_failed%]",
|
||||
"reauth_invalid_user": "Reauthenticate must use the same account."
|
||||
"reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]"
|
||||
},
|
||||
"create_entry": {
|
||||
"default": "[%key:common::config_flow::create_entry::authenticated%]"
|
||||
"step": {
|
||||
"validation": {
|
||||
"title": "Two-factor authentication",
|
||||
"data": {
|
||||
"verification_code": "Verification code"
|
||||
},
|
||||
"description": "Please check your {login_method} ({username}) and enter the verification code below. Codes may take a few minutes to arrive."
|
||||
},
|
||||
"user_validate": {
|
||||
"description": "It is recommended to use the 'email' login method as some brands may not work with the 'phone' method. If the Login Method is 'email', Username is the email address. If the Login Method is 'phone', Username is the phone number in the format '+NNNNNNNNN'. If you choose the wrong brand, you may be able to authenticate initially; however, you will not be able to operate devices. If you are unsure of the brand, create the integration again and try another brand.",
|
||||
"data": {
|
||||
"brand": "Brand",
|
||||
"login_method": "Login Method",
|
||||
"username": "[%key:common::config_flow::data::username%]",
|
||||
"password": "[%key:common::config_flow::data::password%]"
|
||||
},
|
||||
"title": "Set up an August account"
|
||||
},
|
||||
"reauth_validate": {
|
||||
"description": "Choose the correct brand for your device, and enter the password for {username}. If you choose the wrong brand, you may be able to authenticate initially; however, you will not be able to operate devices. If you are unsure of the brand, create the integration again and try another brand.",
|
||||
"data": {
|
||||
"brand": "[%key:component::august::config::step::user_validate::data::brand%]",
|
||||
"password": "[%key:common::config_flow::data::password%]"
|
||||
},
|
||||
"title": "Reauthenticate an August account"
|
||||
}
|
||||
}
|
||||
},
|
||||
"entity": {
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user