mirror of
https://github.com/home-assistant/core.git
synced 2025-07-29 08:07:45 +00:00
Merge pull request #38065 from home-assistant/rc
This commit is contained in:
commit
c9380d4972
21
.coveragerc
21
.coveragerc
@ -214,7 +214,14 @@ omit =
|
|||||||
homeassistant/components/emoncms_history/*
|
homeassistant/components/emoncms_history/*
|
||||||
homeassistant/components/emulated_hue/upnp.py
|
homeassistant/components/emulated_hue/upnp.py
|
||||||
homeassistant/components/enigma2/media_player.py
|
homeassistant/components/enigma2/media_player.py
|
||||||
homeassistant/components/enocean/*
|
homeassistant/components/enocean/__init__.py
|
||||||
|
homeassistant/components/enocean/binary_sensor.py
|
||||||
|
homeassistant/components/enocean/const.py
|
||||||
|
homeassistant/components/enocean/device.py
|
||||||
|
homeassistant/components/enocean/dongle.py
|
||||||
|
homeassistant/components/enocean/light.py
|
||||||
|
homeassistant/components/enocean/sensor.py
|
||||||
|
homeassistant/components/enocean/switch.py
|
||||||
homeassistant/components/enphase_envoy/sensor.py
|
homeassistant/components/enphase_envoy/sensor.py
|
||||||
homeassistant/components/entur_public_transport/*
|
homeassistant/components/entur_public_transport/*
|
||||||
homeassistant/components/environment_canada/*
|
homeassistant/components/environment_canada/*
|
||||||
@ -313,6 +320,7 @@ omit =
|
|||||||
homeassistant/components/guardian/binary_sensor.py
|
homeassistant/components/guardian/binary_sensor.py
|
||||||
homeassistant/components/guardian/sensor.py
|
homeassistant/components/guardian/sensor.py
|
||||||
homeassistant/components/guardian/switch.py
|
homeassistant/components/guardian/switch.py
|
||||||
|
homeassistant/components/guardian/util.py
|
||||||
homeassistant/components/habitica/*
|
homeassistant/components/habitica/*
|
||||||
homeassistant/components/hangouts/*
|
homeassistant/components/hangouts/*
|
||||||
homeassistant/components/hangouts/__init__.py
|
homeassistant/components/hangouts/__init__.py
|
||||||
@ -372,7 +380,6 @@ omit =
|
|||||||
homeassistant/components/ihc/*
|
homeassistant/components/ihc/*
|
||||||
homeassistant/components/imap/sensor.py
|
homeassistant/components/imap/sensor.py
|
||||||
homeassistant/components/imap_email_content/sensor.py
|
homeassistant/components/imap_email_content/sensor.py
|
||||||
homeassistant/components/influxdb/sensor.py
|
|
||||||
homeassistant/components/insteon/*
|
homeassistant/components/insteon/*
|
||||||
homeassistant/components/incomfort/*
|
homeassistant/components/incomfort/*
|
||||||
homeassistant/components/intesishome/*
|
homeassistant/components/intesishome/*
|
||||||
@ -531,6 +538,7 @@ omit =
|
|||||||
homeassistant/components/netatmo/climate.py
|
homeassistant/components/netatmo/climate.py
|
||||||
homeassistant/components/netatmo/const.py
|
homeassistant/components/netatmo/const.py
|
||||||
homeassistant/components/netatmo/sensor.py
|
homeassistant/components/netatmo/sensor.py
|
||||||
|
homeassistant/components/netatmo/webhook.py
|
||||||
homeassistant/components/netdata/sensor.py
|
homeassistant/components/netdata/sensor.py
|
||||||
homeassistant/components/netgear/device_tracker.py
|
homeassistant/components/netgear/device_tracker.py
|
||||||
homeassistant/components/netgear_lte/*
|
homeassistant/components/netgear_lte/*
|
||||||
@ -621,9 +629,12 @@ omit =
|
|||||||
homeassistant/components/plugwise/climate.py
|
homeassistant/components/plugwise/climate.py
|
||||||
homeassistant/components/plugwise/sensor.py
|
homeassistant/components/plugwise/sensor.py
|
||||||
homeassistant/components/plugwise/switch.py
|
homeassistant/components/plugwise/switch.py
|
||||||
homeassistant/components/plum_lightpad/*
|
homeassistant/components/plum_lightpad/light.py
|
||||||
homeassistant/components/pocketcasts/sensor.py
|
homeassistant/components/pocketcasts/sensor.py
|
||||||
homeassistant/components/point/*
|
homeassistant/components/point/*
|
||||||
|
homeassistant/components/poolsense/__init__.py
|
||||||
|
homeassistant/components/poolsense/sensor.py
|
||||||
|
homeassistant/components/poolsense/binary_sensor.py
|
||||||
homeassistant/components/prezzibenzina/sensor.py
|
homeassistant/components/prezzibenzina/sensor.py
|
||||||
homeassistant/components/proliphix/climate.py
|
homeassistant/components/proliphix/climate.py
|
||||||
homeassistant/components/prometheus/*
|
homeassistant/components/prometheus/*
|
||||||
@ -731,7 +742,9 @@ omit =
|
|||||||
homeassistant/components/smappee/sensor.py
|
homeassistant/components/smappee/sensor.py
|
||||||
homeassistant/components/smappee/switch.py
|
homeassistant/components/smappee/switch.py
|
||||||
homeassistant/components/smarty/*
|
homeassistant/components/smarty/*
|
||||||
homeassistant/components/smarthab/*
|
homeassistant/components/smarthab/__init__.py
|
||||||
|
homeassistant/components/smarthab/cover.py
|
||||||
|
homeassistant/components/smarthab/light.py
|
||||||
homeassistant/components/sms/*
|
homeassistant/components/sms/*
|
||||||
homeassistant/components/smtp/notify.py
|
homeassistant/components/smtp/notify.py
|
||||||
homeassistant/components/snapcast/*
|
homeassistant/components/snapcast/*
|
||||||
|
8
.github/dependabot.yml
vendored
Normal file
8
.github/dependabot.yml
vendored
Normal file
@ -0,0 +1,8 @@
|
|||||||
|
version: 2
|
||||||
|
updates:
|
||||||
|
- package-ecosystem: "github-actions"
|
||||||
|
directory: "/"
|
||||||
|
schedule:
|
||||||
|
interval: daily
|
||||||
|
time: "06:00"
|
||||||
|
open-pull-requests-limit: 10
|
784
.github/workflows/ci.yaml
vendored
Normal file
784
.github/workflows/ci.yaml
vendored
Normal file
@ -0,0 +1,784 @@
|
|||||||
|
name: CI
|
||||||
|
|
||||||
|
# yamllint disable-line rule:truthy
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches:
|
||||||
|
- dev
|
||||||
|
- rc
|
||||||
|
- master
|
||||||
|
pull_request: ~
|
||||||
|
|
||||||
|
env:
|
||||||
|
DEFAULT_PYTHON: 3.7
|
||||||
|
PRE_COMMIT_HOME: ~/.cache/pre-commit
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
# Separate job to pre-populate the base dependency cache
|
||||||
|
# This prevent upcoming jobs to do the same individually
|
||||||
|
prepare-base:
|
||||||
|
name: Prepare base dependencies
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- name: Check out code from GitHub
|
||||||
|
uses: actions/checkout@v2
|
||||||
|
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||||
|
id: python
|
||||||
|
uses: actions/setup-python@v2
|
||||||
|
with:
|
||||||
|
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||||
|
- name: Restore base Python virtual environment
|
||||||
|
id: cache-venv
|
||||||
|
uses: actions/cache@v2
|
||||||
|
with:
|
||||||
|
path: venv
|
||||||
|
key: >-
|
||||||
|
${{ runner.os }}-base-venv-${{ steps.python.outputs.python-version
|
||||||
|
}}-${{ hashFiles('requirements.txt') }}-${{
|
||||||
|
hashFiles('requirements_test.txt') }}-${{
|
||||||
|
hashFiles('homeassistant/package_constraints.txt') }}
|
||||||
|
restore-keys: |
|
||||||
|
${{ runner.os }}-base-venv-${{ steps.python.outputs.python-version }}-${{ hashFiles('requirements.txt') }}-${{ hashFiles('requirements_test.txt') }}-
|
||||||
|
${{ runner.os }}-base-venv-${{ steps.python.outputs.python-version }}-${{ hashFiles('requirements.txt') }}
|
||||||
|
${{ runner.os }}-base-venv-${{ steps.python.outputs.python-version }}-
|
||||||
|
- name: Create Python virtual environment
|
||||||
|
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||||
|
run: |
|
||||||
|
python -m venv venv
|
||||||
|
. venv/bin/activate
|
||||||
|
pip install -U pip setuptools
|
||||||
|
pip install -r requirements.txt -r requirements_test.txt
|
||||||
|
# Uninstalling typing as a workaround. Eventually we should make sure
|
||||||
|
# all our dependencies drop typing.
|
||||||
|
# Find offending deps with `pipdeptree -r -p typing`
|
||||||
|
pip uninstall -y typing
|
||||||
|
- name: Restore pre-commit environment from cache
|
||||||
|
id: cache-precommit
|
||||||
|
uses: actions/cache@v2
|
||||||
|
with:
|
||||||
|
path: ${{ env.PRE_COMMIT_HOME }}
|
||||||
|
key: |
|
||||||
|
${{ runner.os }}-pre-commit-${{ hashFiles('.pre-commit-config.yaml') }}
|
||||||
|
restore-keys: |
|
||||||
|
${{ runner.os }}-pre-commit-
|
||||||
|
- name: Install pre-commit dependencies
|
||||||
|
if: steps.cache-precommit.outputs.cache-hit != 'true'
|
||||||
|
run: |
|
||||||
|
. venv/bin/activate
|
||||||
|
pre-commit install-hooks
|
||||||
|
|
||||||
|
lint-bandit:
|
||||||
|
name: Check bandit
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
needs: prepare-base
|
||||||
|
steps:
|
||||||
|
- name: Check out code from GitHub
|
||||||
|
uses: actions/checkout@v2
|
||||||
|
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||||
|
uses: actions/setup-python@v2
|
||||||
|
id: python
|
||||||
|
with:
|
||||||
|
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||||
|
- name: Restore base Python virtual environment
|
||||||
|
id: cache-venv
|
||||||
|
uses: actions/cache@v2
|
||||||
|
with:
|
||||||
|
path: venv
|
||||||
|
key: >-
|
||||||
|
${{ runner.os }}-base-venv-${{ steps.python.outputs.python-version
|
||||||
|
}}-${{ hashFiles('requirements.txt') }}-${{
|
||||||
|
hashFiles('requirements_test.txt') }}-${{
|
||||||
|
hashFiles('homeassistant/package_constraints.txt') }}
|
||||||
|
- name: Fail job if Python cache restore failed
|
||||||
|
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||||
|
run: |
|
||||||
|
echo "Failed to restore Python virtual environment from cache"
|
||||||
|
exit 1
|
||||||
|
- name: Restore pre-commit environment from cache
|
||||||
|
id: cache-precommit
|
||||||
|
uses: actions/cache@v2
|
||||||
|
with:
|
||||||
|
path: ${{ env.PRE_COMMIT_HOME }}
|
||||||
|
key: |
|
||||||
|
${{ runner.os }}-pre-commit-${{ hashFiles('.pre-commit-config.yaml') }}
|
||||||
|
- name: Fail job if cache restore failed
|
||||||
|
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||||
|
run: |
|
||||||
|
echo "Failed to restore Python virtual environment from cache"
|
||||||
|
exit 1
|
||||||
|
- name: Run bandit
|
||||||
|
run: |
|
||||||
|
. venv/bin/activate
|
||||||
|
pre-commit run --hook-stage manual bandit --all-files --show-diff-on-failure
|
||||||
|
|
||||||
|
lint-black:
|
||||||
|
name: Check black
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
needs: prepare-base
|
||||||
|
steps:
|
||||||
|
- name: Check out code from GitHub
|
||||||
|
uses: actions/checkout@v2
|
||||||
|
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||||
|
uses: actions/setup-python@v2
|
||||||
|
id: python
|
||||||
|
with:
|
||||||
|
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||||
|
- name: Restore base Python virtual environment
|
||||||
|
id: cache-venv
|
||||||
|
uses: actions/cache@v2
|
||||||
|
with:
|
||||||
|
path: venv
|
||||||
|
key: >-
|
||||||
|
${{ runner.os }}-base-venv-${{ steps.python.outputs.python-version
|
||||||
|
}}-${{ hashFiles('requirements.txt') }}-${{
|
||||||
|
hashFiles('requirements_test.txt') }}-${{
|
||||||
|
hashFiles('homeassistant/package_constraints.txt') }}
|
||||||
|
- name: Fail job if Python cache restore failed
|
||||||
|
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||||
|
run: |
|
||||||
|
echo "Failed to restore Python virtual environment from cache"
|
||||||
|
exit 1
|
||||||
|
- name: Restore pre-commit environment from cache
|
||||||
|
id: cache-precommit
|
||||||
|
uses: actions/cache@v2
|
||||||
|
with:
|
||||||
|
path: ${{ env.PRE_COMMIT_HOME }}
|
||||||
|
key: |
|
||||||
|
${{ runner.os }}-pre-commit-${{ hashFiles('.pre-commit-config.yaml') }}
|
||||||
|
- name: Fail job if cache restore failed
|
||||||
|
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||||
|
run: |
|
||||||
|
echo "Failed to restore Python virtual environment from cache"
|
||||||
|
exit 1
|
||||||
|
- name: Run black
|
||||||
|
run: |
|
||||||
|
. venv/bin/activate
|
||||||
|
pre-commit run --hook-stage manual black --all-files --show-diff-on-failure
|
||||||
|
|
||||||
|
lint-codespell:
|
||||||
|
name: Check codespell
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
needs: prepare-base
|
||||||
|
steps:
|
||||||
|
- name: Check out code from GitHub
|
||||||
|
uses: actions/checkout@v2
|
||||||
|
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||||
|
uses: actions/setup-python@v2
|
||||||
|
id: python
|
||||||
|
with:
|
||||||
|
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||||
|
- name: Restore base Python virtual environment
|
||||||
|
id: cache-venv
|
||||||
|
uses: actions/cache@v2
|
||||||
|
with:
|
||||||
|
path: venv
|
||||||
|
key: >-
|
||||||
|
${{ runner.os }}-base-venv-${{ steps.python.outputs.python-version
|
||||||
|
}}-${{ hashFiles('requirements.txt') }}-${{
|
||||||
|
hashFiles('requirements_test.txt') }}-${{
|
||||||
|
hashFiles('homeassistant/package_constraints.txt') }}
|
||||||
|
- name: Fail job if Python cache restore failed
|
||||||
|
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||||
|
run: |
|
||||||
|
echo "Failed to restore Python virtual environment from cache"
|
||||||
|
exit 1
|
||||||
|
- name: Restore pre-commit environment from cache
|
||||||
|
id: cache-precommit
|
||||||
|
uses: actions/cache@v2
|
||||||
|
with:
|
||||||
|
path: ${{ env.PRE_COMMIT_HOME }}
|
||||||
|
key: |
|
||||||
|
${{ runner.os }}-pre-commit-${{ hashFiles('.pre-commit-config.yaml') }}
|
||||||
|
- name: Fail job if cache restore failed
|
||||||
|
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||||
|
run: |
|
||||||
|
echo "Failed to restore Python virtual environment from cache"
|
||||||
|
exit 1
|
||||||
|
- name: Register codespell problem matcher
|
||||||
|
run: |
|
||||||
|
echo "::add-matcher::.github/workflows/matchers/codespell.json"
|
||||||
|
- name: Run codespell
|
||||||
|
run: |
|
||||||
|
. venv/bin/activate
|
||||||
|
pre-commit run --show-diff-on-failure --hook-stage manual codespell --all-files
|
||||||
|
|
||||||
|
lint-dockerfile:
|
||||||
|
name: Check Dockerfile
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
needs: prepare-base
|
||||||
|
steps:
|
||||||
|
- name: Check out code from GitHub
|
||||||
|
uses: actions/checkout@v2
|
||||||
|
- name: Register hadolint problem matcher
|
||||||
|
run: |
|
||||||
|
echo "::add-matcher::.github/workflows/matchers/hadolint.json"
|
||||||
|
- name: Check Dockerfile
|
||||||
|
uses: docker://hadolint/hadolint:v1.18.0
|
||||||
|
with:
|
||||||
|
args: hadolint Dockerfile
|
||||||
|
- name: Check Dockerfile.dev
|
||||||
|
uses: docker://hadolint/hadolint:v1.18.0
|
||||||
|
with:
|
||||||
|
args: hadolint Dockerfile.dev
|
||||||
|
|
||||||
|
lint-executable-shebangs:
|
||||||
|
name: Check executables
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
needs: prepare-base
|
||||||
|
steps:
|
||||||
|
- name: Check out code from GitHub
|
||||||
|
uses: actions/checkout@v2
|
||||||
|
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||||
|
uses: actions/setup-python@v2
|
||||||
|
id: python
|
||||||
|
with:
|
||||||
|
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||||
|
- name: Restore base Python virtual environment
|
||||||
|
id: cache-venv
|
||||||
|
uses: actions/cache@v2
|
||||||
|
with:
|
||||||
|
path: venv
|
||||||
|
key: >-
|
||||||
|
${{ runner.os }}-base-venv-${{ steps.python.outputs.python-version
|
||||||
|
}}-${{ hashFiles('requirements.txt') }}-${{
|
||||||
|
hashFiles('requirements_test.txt') }}-${{
|
||||||
|
hashFiles('homeassistant/package_constraints.txt') }}
|
||||||
|
- name: Fail job if Python cache restore failed
|
||||||
|
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||||
|
run: |
|
||||||
|
echo "Failed to restore Python virtual environment from cache"
|
||||||
|
exit 1
|
||||||
|
- name: Restore pre-commit environment from cache
|
||||||
|
id: cache-precommit
|
||||||
|
uses: actions/cache@v2
|
||||||
|
with:
|
||||||
|
path: ${{ env.PRE_COMMIT_HOME }}
|
||||||
|
key: |
|
||||||
|
${{ runner.os }}-pre-commit-${{ hashFiles('.pre-commit-config.yaml') }}
|
||||||
|
- name: Fail job if cache restore failed
|
||||||
|
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||||
|
run: |
|
||||||
|
echo "Failed to restore Python virtual environment from cache"
|
||||||
|
exit 1
|
||||||
|
- name: Register check executables problem matcher
|
||||||
|
run: |
|
||||||
|
echo "::add-matcher::.github/workflows/matchers/check-executables-have-shebangs.json"
|
||||||
|
- name: Run executables check
|
||||||
|
run: |
|
||||||
|
. venv/bin/activate
|
||||||
|
pre-commit run --hook-stage manual check-executables-have-shebangs --all-files
|
||||||
|
|
||||||
|
lint-flake8:
|
||||||
|
name: Check flake8
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
needs: prepare-base
|
||||||
|
steps:
|
||||||
|
- name: Check out code from GitHub
|
||||||
|
uses: actions/checkout@v2
|
||||||
|
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||||
|
uses: actions/setup-python@v2
|
||||||
|
id: python
|
||||||
|
with:
|
||||||
|
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||||
|
- name: Restore base Python virtual environment
|
||||||
|
id: cache-venv
|
||||||
|
uses: actions/cache@v2
|
||||||
|
with:
|
||||||
|
path: venv
|
||||||
|
key: >-
|
||||||
|
${{ runner.os }}-base-venv-${{ steps.python.outputs.python-version
|
||||||
|
}}-${{ hashFiles('requirements.txt') }}-${{
|
||||||
|
hashFiles('requirements_test.txt') }}-${{
|
||||||
|
hashFiles('homeassistant/package_constraints.txt') }}
|
||||||
|
- name: Fail job if Python cache restore failed
|
||||||
|
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||||
|
run: |
|
||||||
|
echo "Failed to restore Python virtual environment from cache"
|
||||||
|
exit 1
|
||||||
|
- name: Restore pre-commit environment from cache
|
||||||
|
id: cache-precommit
|
||||||
|
uses: actions/cache@v2
|
||||||
|
with:
|
||||||
|
path: ${{ env.PRE_COMMIT_HOME }}
|
||||||
|
key: |
|
||||||
|
${{ runner.os }}-pre-commit-${{ hashFiles('.pre-commit-config.yaml') }}
|
||||||
|
- name: Fail job if cache restore failed
|
||||||
|
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||||
|
run: |
|
||||||
|
echo "Failed to restore Python virtual environment from cache"
|
||||||
|
exit 1
|
||||||
|
- name: Register flake8 problem matcher
|
||||||
|
run: |
|
||||||
|
echo "::add-matcher::.github/workflows/matchers/flake8.json"
|
||||||
|
- name: Run flake8
|
||||||
|
run: |
|
||||||
|
. venv/bin/activate
|
||||||
|
pre-commit run --hook-stage manual flake8 --all-files
|
||||||
|
|
||||||
|
lint-isort:
|
||||||
|
name: Check isort
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
needs: prepare-base
|
||||||
|
steps:
|
||||||
|
- name: Check out code from GitHub
|
||||||
|
uses: actions/checkout@v2
|
||||||
|
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||||
|
uses: actions/setup-python@v2
|
||||||
|
id: python
|
||||||
|
with:
|
||||||
|
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||||
|
- name: Restore base Python virtual environment
|
||||||
|
id: cache-venv
|
||||||
|
uses: actions/cache@v2
|
||||||
|
with:
|
||||||
|
path: venv
|
||||||
|
key: >-
|
||||||
|
${{ runner.os }}-base-venv-${{ steps.python.outputs.python-version
|
||||||
|
}}-${{ hashFiles('requirements.txt') }}-${{
|
||||||
|
hashFiles('requirements_test.txt') }}-${{
|
||||||
|
hashFiles('homeassistant/package_constraints.txt') }}
|
||||||
|
- name: Fail job if Python cache restore failed
|
||||||
|
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||||
|
run: |
|
||||||
|
echo "Failed to restore Python virtual environment from cache"
|
||||||
|
exit 1
|
||||||
|
- name: Restore pre-commit environment from cache
|
||||||
|
id: cache-precommit
|
||||||
|
uses: actions/cache@v2
|
||||||
|
with:
|
||||||
|
path: ${{ env.PRE_COMMIT_HOME }}
|
||||||
|
key: |
|
||||||
|
${{ runner.os }}-pre-commit-${{ hashFiles('.pre-commit-config.yaml') }}
|
||||||
|
- name: Fail job if cache restore failed
|
||||||
|
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||||
|
run: |
|
||||||
|
echo "Failed to restore Python virtual environment from cache"
|
||||||
|
exit 1
|
||||||
|
- name: Run isort
|
||||||
|
run: |
|
||||||
|
. venv/bin/activate
|
||||||
|
pre-commit run --hook-stage manual isort --all-files --show-diff-on-failure
|
||||||
|
|
||||||
|
lint-json:
|
||||||
|
name: Check JSON
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
needs: prepare-base
|
||||||
|
steps:
|
||||||
|
- name: Check out code from GitHub
|
||||||
|
uses: actions/checkout@v2
|
||||||
|
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||||
|
uses: actions/setup-python@v2
|
||||||
|
id: python
|
||||||
|
with:
|
||||||
|
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||||
|
- name: Restore base Python virtual environment
|
||||||
|
id: cache-venv
|
||||||
|
uses: actions/cache@v2
|
||||||
|
with:
|
||||||
|
path: venv
|
||||||
|
key: >-
|
||||||
|
${{ runner.os }}-base-venv-${{ steps.python.outputs.python-version
|
||||||
|
}}-${{ hashFiles('requirements.txt') }}-${{
|
||||||
|
hashFiles('requirements_test.txt') }}-${{
|
||||||
|
hashFiles('homeassistant/package_constraints.txt') }}
|
||||||
|
- name: Fail job if Python cache restore failed
|
||||||
|
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||||
|
run: |
|
||||||
|
echo "Failed to restore Python virtual environment from cache"
|
||||||
|
exit 1
|
||||||
|
- name: Restore pre-commit environment from cache
|
||||||
|
id: cache-precommit
|
||||||
|
uses: actions/cache@v2
|
||||||
|
with:
|
||||||
|
path: ${{ env.PRE_COMMIT_HOME }}
|
||||||
|
key: |
|
||||||
|
${{ runner.os }}-pre-commit-${{ hashFiles('.pre-commit-config.yaml') }}
|
||||||
|
- name: Fail job if cache restore failed
|
||||||
|
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||||
|
run: |
|
||||||
|
echo "Failed to restore Python virtual environment from cache"
|
||||||
|
exit 1
|
||||||
|
- name: Register check-json problem matcher
|
||||||
|
run: |
|
||||||
|
echo "::add-matcher::.github/workflows/matchers/check-json.json"
|
||||||
|
- name: Run check-json
|
||||||
|
run: |
|
||||||
|
. venv/bin/activate
|
||||||
|
pre-commit run --hook-stage manual check-json --all-files
|
||||||
|
|
||||||
|
lint-pyupgrade:
|
||||||
|
name: Check pyupgrade
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
needs: prepare-base
|
||||||
|
steps:
|
||||||
|
- name: Check out code from GitHub
|
||||||
|
uses: actions/checkout@v2
|
||||||
|
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||||
|
uses: actions/setup-python@v2
|
||||||
|
id: python
|
||||||
|
with:
|
||||||
|
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||||
|
- name: Restore base Python virtual environment
|
||||||
|
id: cache-venv
|
||||||
|
uses: actions/cache@v2
|
||||||
|
with:
|
||||||
|
path: venv
|
||||||
|
key: >-
|
||||||
|
${{ runner.os }}-base-venv-${{ steps.python.outputs.python-version
|
||||||
|
}}-${{ hashFiles('requirements.txt') }}-${{
|
||||||
|
hashFiles('requirements_test.txt') }}-${{
|
||||||
|
hashFiles('homeassistant/package_constraints.txt') }}
|
||||||
|
- name: Fail job if Python cache restore failed
|
||||||
|
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||||
|
run: |
|
||||||
|
echo "Failed to restore Python virtual environment from cache"
|
||||||
|
exit 1
|
||||||
|
- name: Restore pre-commit environment from cache
|
||||||
|
id: cache-precommit
|
||||||
|
uses: actions/cache@v2
|
||||||
|
with:
|
||||||
|
path: ${{ env.PRE_COMMIT_HOME }}
|
||||||
|
key: |
|
||||||
|
${{ runner.os }}-pre-commit-${{ hashFiles('.pre-commit-config.yaml') }}
|
||||||
|
- name: Fail job if cache restore failed
|
||||||
|
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||||
|
run: |
|
||||||
|
echo "Failed to restore Python virtual environment from cache"
|
||||||
|
exit 1
|
||||||
|
- name: Run pyupgrade
|
||||||
|
run: |
|
||||||
|
. venv/bin/activate
|
||||||
|
pre-commit run --hook-stage manual pyupgrade --all-files --show-diff-on-failure
|
||||||
|
|
||||||
|
# Disabled until we have the existing issues fixed
|
||||||
|
# lint-shellcheck:
|
||||||
|
# name: Check ShellCheck
|
||||||
|
# runs-on: ubuntu-latest
|
||||||
|
# needs: prepare-base
|
||||||
|
# steps:
|
||||||
|
# - name: Check out code from GitHub
|
||||||
|
# uses: actions/checkout@v2
|
||||||
|
# - name: Run ShellCheck
|
||||||
|
# uses: ludeeus/action-shellcheck@0.3.0
|
||||||
|
|
||||||
|
lint-yaml:
|
||||||
|
name: Check YAML
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
needs: prepare-base
|
||||||
|
steps:
|
||||||
|
- name: Check out code from GitHub
|
||||||
|
uses: actions/checkout@v2
|
||||||
|
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||||
|
uses: actions/setup-python@v2
|
||||||
|
id: python
|
||||||
|
with:
|
||||||
|
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||||
|
- name: Restore base Python virtual environment
|
||||||
|
id: cache-venv
|
||||||
|
uses: actions/cache@v2
|
||||||
|
with:
|
||||||
|
path: venv
|
||||||
|
key: >-
|
||||||
|
${{ runner.os }}-base-venv-${{ steps.python.outputs.python-version
|
||||||
|
}}-${{ hashFiles('requirements.txt') }}-${{
|
||||||
|
hashFiles('requirements_test.txt') }}-${{
|
||||||
|
hashFiles('homeassistant/package_constraints.txt') }}
|
||||||
|
- name: Fail job if Python cache restore failed
|
||||||
|
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||||
|
run: |
|
||||||
|
echo "Failed to restore Python virtual environment from cache"
|
||||||
|
exit 1
|
||||||
|
- name: Restore pre-commit environment from cache
|
||||||
|
id: cache-precommit
|
||||||
|
uses: actions/cache@v2
|
||||||
|
with:
|
||||||
|
path: ${{ env.PRE_COMMIT_HOME }}
|
||||||
|
key: |
|
||||||
|
${{ runner.os }}-pre-commit-${{ hashFiles('.pre-commit-config.yaml') }}
|
||||||
|
- name: Fail job if cache restore failed
|
||||||
|
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||||
|
run: |
|
||||||
|
echo "Failed to restore Python virtual environment from cache"
|
||||||
|
exit 1
|
||||||
|
- name: Register yamllint problem matcher
|
||||||
|
run: |
|
||||||
|
echo "::add-matcher::.github/workflows/matchers/yamllint.json"
|
||||||
|
- name: Run yamllint
|
||||||
|
run: |
|
||||||
|
. venv/bin/activate
|
||||||
|
pre-commit run --hook-stage manual yamllint --all-files --show-diff-on-failure
|
||||||
|
|
||||||
|
hassfest:
|
||||||
|
name: Check hassfest
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
needs: prepare-base
|
||||||
|
steps:
|
||||||
|
- name: Check out code from GitHub
|
||||||
|
uses: actions/checkout@v2
|
||||||
|
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||||
|
uses: actions/setup-python@v2
|
||||||
|
id: python
|
||||||
|
with:
|
||||||
|
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||||
|
- name: Restore base Python virtual environment
|
||||||
|
id: cache-venv
|
||||||
|
uses: actions/cache@v2
|
||||||
|
with:
|
||||||
|
path: venv
|
||||||
|
key: >-
|
||||||
|
${{ runner.os }}-base-venv-${{ steps.python.outputs.python-version
|
||||||
|
}}-${{ hashFiles('requirements.txt') }}-${{
|
||||||
|
hashFiles('requirements_test.txt') }}-${{
|
||||||
|
hashFiles('homeassistant/package_constraints.txt') }}
|
||||||
|
- name: Fail job if Python cache restore failed
|
||||||
|
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||||
|
run: |
|
||||||
|
echo "Failed to restore Python virtual environment from cache"
|
||||||
|
exit 1
|
||||||
|
- name: Run hassfest
|
||||||
|
run: |
|
||||||
|
. venv/bin/activate
|
||||||
|
python -m script.hassfest --action validate
|
||||||
|
|
||||||
|
gen-requirements-all:
|
||||||
|
name: Check all requirements
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
needs: prepare-base
|
||||||
|
steps:
|
||||||
|
- name: Check out code from GitHub
|
||||||
|
uses: actions/checkout@v2
|
||||||
|
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||||
|
uses: actions/setup-python@v2
|
||||||
|
id: python
|
||||||
|
with:
|
||||||
|
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||||
|
- name: Restore base Python virtual environment
|
||||||
|
id: cache-venv
|
||||||
|
uses: actions/cache@v2
|
||||||
|
with:
|
||||||
|
path: venv
|
||||||
|
key: >-
|
||||||
|
${{ runner.os }}-base-venv-${{ steps.python.outputs.python-version
|
||||||
|
}}-${{ hashFiles('requirements.txt') }}-${{
|
||||||
|
hashFiles('requirements_test.txt') }}-${{
|
||||||
|
hashFiles('homeassistant/package_constraints.txt') }}
|
||||||
|
- name: Fail job if Python cache restore failed
|
||||||
|
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||||
|
run: |
|
||||||
|
echo "Failed to restore Python virtual environment from cache"
|
||||||
|
exit 1
|
||||||
|
- name: Run gen_requirements_all.py
|
||||||
|
run: |
|
||||||
|
. venv/bin/activate
|
||||||
|
python -m script.gen_requirements_all validate
|
||||||
|
|
||||||
|
prepare-tests:
|
||||||
|
name: Prepare tests for Python ${{ matrix.python-version }}
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
strategy:
|
||||||
|
matrix:
|
||||||
|
python-version: [3.7, 3.8]
|
||||||
|
container: homeassistant/ci-azure:${{ matrix.python-version }}
|
||||||
|
steps:
|
||||||
|
- name: Check out code from GitHub
|
||||||
|
uses: actions/checkout@v2
|
||||||
|
- name:
|
||||||
|
Restore full Python ${{ matrix.python-version }} virtual environment
|
||||||
|
id: cache-venv
|
||||||
|
uses: actions/cache@v2
|
||||||
|
with:
|
||||||
|
path: venv
|
||||||
|
key: >-
|
||||||
|
${{ runner.os }}-venv-${{ matrix.python-version }}-${{
|
||||||
|
hashFiles('requirements_test.txt') }}-${{
|
||||||
|
hashFiles('requirements_all.txt') }}-${{
|
||||||
|
hashFiles('homeassistant/package_constraints.txt') }}
|
||||||
|
restore-keys: |
|
||||||
|
${{ runner.os }}-venv-${{ matrix.python-version }}-${{ hashFiles('requirements_test.txt') }}-${{ hashFiles('requirements_all.txt') }}
|
||||||
|
${{ runner.os }}-venv-${{ matrix.python-version }}-${{ hashFiles('requirements_test.txt') }}
|
||||||
|
${{ runner.os }}-venv-${{ matrix.python-version }}-
|
||||||
|
- name:
|
||||||
|
Create full Python ${{ matrix.python-version }} virtual environment
|
||||||
|
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||||
|
run: |
|
||||||
|
python -m venv venv
|
||||||
|
. venv/bin/activate
|
||||||
|
pip install -U pip setuptools wheel
|
||||||
|
pip install -r requirements_all.txt
|
||||||
|
pip install -r requirements_test.txt
|
||||||
|
# Uninstalling typing as a workaround. Eventually we should make sure
|
||||||
|
# all our dependencies drop typing.
|
||||||
|
# Find offending deps with `pipdeptree -r -p typing`
|
||||||
|
pip uninstall -y typing
|
||||||
|
pip install -e .
|
||||||
|
|
||||||
|
pylint:
|
||||||
|
name: Check pylint
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
needs: prepare-tests
|
||||||
|
strategy:
|
||||||
|
matrix:
|
||||||
|
python-version: [3.7]
|
||||||
|
container: homeassistant/ci-azure:${{ matrix.python-version }}
|
||||||
|
steps:
|
||||||
|
- name: Check out code from GitHub
|
||||||
|
uses: actions/checkout@v2
|
||||||
|
- name:
|
||||||
|
Restore full Python ${{ matrix.python-version }} virtual environment
|
||||||
|
id: cache-venv
|
||||||
|
uses: actions/cache@v2
|
||||||
|
with:
|
||||||
|
path: venv
|
||||||
|
key: >-
|
||||||
|
${{ runner.os }}-venv-${{ matrix.python-version }}-${{
|
||||||
|
hashFiles('requirements_test.txt') }}-${{
|
||||||
|
hashFiles('requirements_all.txt') }}-${{
|
||||||
|
hashFiles('homeassistant/package_constraints.txt') }}
|
||||||
|
- name: Fail job if Python cache restore failed
|
||||||
|
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||||
|
run: |
|
||||||
|
echo "Failed to restore Python virtual environment from cache"
|
||||||
|
exit 1
|
||||||
|
- name: Register pylint problem matcher
|
||||||
|
run: |
|
||||||
|
echo "::add-matcher::.github/workflows/matchers/pylint.json"
|
||||||
|
- name: Run pylint
|
||||||
|
run: |
|
||||||
|
. venv/bin/activate
|
||||||
|
pylint homeassistant
|
||||||
|
|
||||||
|
mypy:
|
||||||
|
name: Check mypy
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
needs: prepare-tests
|
||||||
|
strategy:
|
||||||
|
matrix:
|
||||||
|
python-version: [3.7]
|
||||||
|
container: homeassistant/ci-azure:${{ matrix.python-version }}
|
||||||
|
steps:
|
||||||
|
- name: Check out code from GitHub
|
||||||
|
uses: actions/checkout@v2
|
||||||
|
- name:
|
||||||
|
Restore full Python ${{ matrix.python-version }} virtual environment
|
||||||
|
id: cache-venv
|
||||||
|
uses: actions/cache@v2
|
||||||
|
with:
|
||||||
|
path: venv
|
||||||
|
key: >-
|
||||||
|
${{ runner.os }}-venv-${{ matrix.python-version }}-${{
|
||||||
|
hashFiles('requirements_test.txt') }}-${{
|
||||||
|
hashFiles('requirements_all.txt') }}-${{
|
||||||
|
hashFiles('homeassistant/package_constraints.txt') }}
|
||||||
|
- name: Fail job if Python cache restore failed
|
||||||
|
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||||
|
run: |
|
||||||
|
echo "Failed to restore Python virtual environment from cache"
|
||||||
|
exit 1
|
||||||
|
- name: Register mypy problem matcher
|
||||||
|
run: |
|
||||||
|
echo "::add-matcher::.github/workflows/matchers/mypy.json"
|
||||||
|
- name: Run mypy
|
||||||
|
run: |
|
||||||
|
. venv/bin/activate
|
||||||
|
mypy homeassistant
|
||||||
|
|
||||||
|
pytest:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
needs: prepare-tests
|
||||||
|
strategy:
|
||||||
|
matrix:
|
||||||
|
group: [1, 2, 3, 4]
|
||||||
|
python-version: [3.7, 3.8]
|
||||||
|
name: >-
|
||||||
|
Run tests Python ${{ matrix.python-version }} (group ${{ matrix.group }})
|
||||||
|
container: homeassistant/ci-azure:${{ matrix.python-version }}
|
||||||
|
steps:
|
||||||
|
- name: Check out code from GitHub
|
||||||
|
uses: actions/checkout@v2
|
||||||
|
- name:
|
||||||
|
Restore full Python ${{ matrix.python-version }} virtual environment
|
||||||
|
id: cache-venv
|
||||||
|
uses: actions/cache@v2
|
||||||
|
with:
|
||||||
|
path: venv
|
||||||
|
key: >-
|
||||||
|
${{ runner.os }}-venv-${{ matrix.python-version }}-${{
|
||||||
|
hashFiles('requirements_test.txt') }}-${{
|
||||||
|
hashFiles('requirements_all.txt') }}-${{
|
||||||
|
hashFiles('homeassistant/package_constraints.txt') }}
|
||||||
|
- name: Fail job if Python cache restore failed
|
||||||
|
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||||
|
run: |
|
||||||
|
echo "Failed to restore Python virtual environment from cache"
|
||||||
|
exit 1
|
||||||
|
- name: Register Python problem matcher
|
||||||
|
run: |
|
||||||
|
echo "::add-matcher::.github/workflows/matchers/python.json"
|
||||||
|
- name: Install Pytest Annotation plugin
|
||||||
|
run: |
|
||||||
|
. venv/bin/activate
|
||||||
|
# Ideally this should be part of our dependencies
|
||||||
|
# However this plugin is fairly new and doesn't run correctly
|
||||||
|
# on a non-GitHub environment.
|
||||||
|
pip install pytest-github-actions-annotate-failures
|
||||||
|
- name: Run pytest
|
||||||
|
run: |
|
||||||
|
. venv/bin/activate
|
||||||
|
pytest \
|
||||||
|
-qq \
|
||||||
|
--timeout=9 \
|
||||||
|
--durations=10 \
|
||||||
|
-n auto \
|
||||||
|
--dist=loadfile \
|
||||||
|
--test-group-count 4 \
|
||||||
|
--test-group=${{ matrix.group }} \
|
||||||
|
--cov homeassistant \
|
||||||
|
-o console_output_style=count \
|
||||||
|
-p no:sugar \
|
||||||
|
tests
|
||||||
|
- name: Upload coverage artifact
|
||||||
|
uses: actions/upload-artifact@2.1.0
|
||||||
|
with:
|
||||||
|
name: coverage-${{ matrix.python-version }}-group${{ matrix.group }}
|
||||||
|
path: .coverage
|
||||||
|
- name: Check dirty
|
||||||
|
run: |
|
||||||
|
./script/check_dirty
|
||||||
|
|
||||||
|
coverage:
|
||||||
|
name: Process test coverage
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
needs: pytest
|
||||||
|
strategy:
|
||||||
|
matrix:
|
||||||
|
python-version: [3.7]
|
||||||
|
container: homeassistant/ci-azure:${{ matrix.python-version }}
|
||||||
|
steps:
|
||||||
|
- name: Check out code from GitHub
|
||||||
|
uses: actions/checkout@v2
|
||||||
|
- name:
|
||||||
|
Restore full Python ${{ matrix.python-version }} virtual environment
|
||||||
|
id: cache-venv
|
||||||
|
uses: actions/cache@v2
|
||||||
|
with:
|
||||||
|
path: venv
|
||||||
|
key: >-
|
||||||
|
${{ runner.os }}-venv-${{ matrix.python-version }}-${{
|
||||||
|
hashFiles('requirements_test.txt') }}-${{
|
||||||
|
hashFiles('requirements_all.txt') }}-${{
|
||||||
|
hashFiles('homeassistant/package_constraints.txt') }}
|
||||||
|
- name: Fail job if Python cache restore failed
|
||||||
|
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||||
|
run: |
|
||||||
|
echo "Failed to restore Python virtual environment from cache"
|
||||||
|
exit 1
|
||||||
|
- name: Download all coverage artifacts
|
||||||
|
uses: actions/download-artifact@v2
|
||||||
|
- name: Combine coverage results
|
||||||
|
run: |
|
||||||
|
. venv/bin/activate
|
||||||
|
coverage combine coverage*/.coverage*
|
||||||
|
coverage report --fail-under=94
|
||||||
|
coverage xml
|
||||||
|
- name: Upload coverage to Codecov
|
||||||
|
uses: codecov/codecov-action@v1.0.10
|
14
.github/workflows/matchers/check-executables-have-shebangs.json
vendored
Normal file
14
.github/workflows/matchers/check-executables-have-shebangs.json
vendored
Normal file
@ -0,0 +1,14 @@
|
|||||||
|
{
|
||||||
|
"problemMatcher": [
|
||||||
|
{
|
||||||
|
"owner": "check-executables-have-shebangs",
|
||||||
|
"pattern": [
|
||||||
|
{
|
||||||
|
"regexp": "^(.+):\\s(.+)$",
|
||||||
|
"file": 1,
|
||||||
|
"message": 2
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
16
.github/workflows/matchers/check-json.json
vendored
Normal file
16
.github/workflows/matchers/check-json.json
vendored
Normal file
@ -0,0 +1,16 @@
|
|||||||
|
{
|
||||||
|
"problemMatcher": [
|
||||||
|
{
|
||||||
|
"owner": "check-json",
|
||||||
|
"pattern": [
|
||||||
|
{
|
||||||
|
"regexp": "^(.+):\\s(.+\\sline\\s(\\d+)\\scolumn\\s(\\d+).+)$",
|
||||||
|
"file": 1,
|
||||||
|
"message": 2,
|
||||||
|
"line": 3,
|
||||||
|
"column": 4
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
16
.github/workflows/matchers/codespell.json
vendored
Normal file
16
.github/workflows/matchers/codespell.json
vendored
Normal file
@ -0,0 +1,16 @@
|
|||||||
|
{
|
||||||
|
"problemMatcher": [
|
||||||
|
{
|
||||||
|
"owner": "codespell",
|
||||||
|
"severity": "warning",
|
||||||
|
"pattern": [
|
||||||
|
{
|
||||||
|
"regexp": "^(.+):(\\d+):\\s(.+)$",
|
||||||
|
"file": 1,
|
||||||
|
"line": 2,
|
||||||
|
"message": 3
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
30
.github/workflows/matchers/flake8.json
vendored
Normal file
30
.github/workflows/matchers/flake8.json
vendored
Normal file
@ -0,0 +1,30 @@
|
|||||||
|
{
|
||||||
|
"problemMatcher": [
|
||||||
|
{
|
||||||
|
"owner": "flake8-error",
|
||||||
|
"severity": "error",
|
||||||
|
"pattern": [
|
||||||
|
{
|
||||||
|
"regexp": "^(.*):(\\d+):(\\d+):\\s([EF]\\d{3}\\s.*)$",
|
||||||
|
"file": 1,
|
||||||
|
"line": 2,
|
||||||
|
"column": 3,
|
||||||
|
"message": 4
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"owner": "flake8-warning",
|
||||||
|
"severity": "warning",
|
||||||
|
"pattern": [
|
||||||
|
{
|
||||||
|
"regexp": "^(.*):(\\d+):(\\d+):\\s([CDNW]\\d{3}\\s.*)$",
|
||||||
|
"file": 1,
|
||||||
|
"line": 2,
|
||||||
|
"column": 3,
|
||||||
|
"message": 4
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
16
.github/workflows/matchers/hadolint.json
vendored
Normal file
16
.github/workflows/matchers/hadolint.json
vendored
Normal file
@ -0,0 +1,16 @@
|
|||||||
|
{
|
||||||
|
"problemMatcher": [
|
||||||
|
{
|
||||||
|
"owner": "hadolint",
|
||||||
|
"pattern": [
|
||||||
|
{
|
||||||
|
"regexp": "^(.+):(\\d+)\\s+((DL\\d{4}).+)$",
|
||||||
|
"file": 1,
|
||||||
|
"line": 2,
|
||||||
|
"message": 3,
|
||||||
|
"code": 4
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
16
.github/workflows/matchers/mypy.json
vendored
Normal file
16
.github/workflows/matchers/mypy.json
vendored
Normal file
@ -0,0 +1,16 @@
|
|||||||
|
{
|
||||||
|
"problemMatcher": [
|
||||||
|
{
|
||||||
|
"owner": "mypy",
|
||||||
|
"pattern": [
|
||||||
|
{
|
||||||
|
"regexp": "^(.+):(\\d+):\\s(error|warning):\\s(.+)$",
|
||||||
|
"file": 1,
|
||||||
|
"line": 2,
|
||||||
|
"severity": 3,
|
||||||
|
"message": 4
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
32
.github/workflows/matchers/pylint.json
vendored
Normal file
32
.github/workflows/matchers/pylint.json
vendored
Normal file
@ -0,0 +1,32 @@
|
|||||||
|
{
|
||||||
|
"problemMatcher": [
|
||||||
|
{
|
||||||
|
"owner": "pylint-error",
|
||||||
|
"severity": "error",
|
||||||
|
"pattern": [
|
||||||
|
{
|
||||||
|
"regexp": "^(.+):(\\d+):(\\d+):\\s(([EF]\\d{4}):\\s.+)$",
|
||||||
|
"file": 1,
|
||||||
|
"line": 2,
|
||||||
|
"column": 3,
|
||||||
|
"message": 4,
|
||||||
|
"code": 5
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"owner": "pylint-warning",
|
||||||
|
"severity": "warning",
|
||||||
|
"pattern": [
|
||||||
|
{
|
||||||
|
"regexp": "^(.+):(\\d+):(\\d+):\\s(([CRW]\\d{4}):\\s.+)$",
|
||||||
|
"file": 1,
|
||||||
|
"line": 2,
|
||||||
|
"column": 3,
|
||||||
|
"message": 4,
|
||||||
|
"code": 5
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
18
.github/workflows/matchers/python.json
vendored
Normal file
18
.github/workflows/matchers/python.json
vendored
Normal file
@ -0,0 +1,18 @@
|
|||||||
|
{
|
||||||
|
"problemMatcher": [
|
||||||
|
{
|
||||||
|
"owner": "python",
|
||||||
|
"pattern": [
|
||||||
|
{
|
||||||
|
"regexp": "^\\s*File\\s\\\"(.*)\\\",\\sline\\s(\\d+),\\sin\\s(.*)$",
|
||||||
|
"file": 1,
|
||||||
|
"line": 2
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"regexp": "^\\s*raise\\s(.*)\\(\\'(.*)\\'\\)$",
|
||||||
|
"message": 2
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
22
.github/workflows/matchers/yamllint.json
vendored
Normal file
22
.github/workflows/matchers/yamllint.json
vendored
Normal file
@ -0,0 +1,22 @@
|
|||||||
|
{
|
||||||
|
"problemMatcher": [
|
||||||
|
{
|
||||||
|
"owner": "yamllint",
|
||||||
|
"pattern": [
|
||||||
|
{
|
||||||
|
"regexp": "^(.*\\.ya?ml)$",
|
||||||
|
"file": 1
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"regexp": "^\\s{2}(\\d+):(\\d+)\\s+(error|warning)\\s+(.*?)\\s+\\((.*)\\)$",
|
||||||
|
"line": 1,
|
||||||
|
"column": 2,
|
||||||
|
"severity": 3,
|
||||||
|
"message": 4,
|
||||||
|
"code": 5,
|
||||||
|
"loop": true
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
@ -22,7 +22,7 @@ repos:
|
|||||||
- --quiet-level=2
|
- --quiet-level=2
|
||||||
exclude_types: [csv, json]
|
exclude_types: [csv, json]
|
||||||
- repo: https://gitlab.com/pycqa/flake8
|
- repo: https://gitlab.com/pycqa/flake8
|
||||||
rev: 3.8.1
|
rev: 3.8.3
|
||||||
hooks:
|
hooks:
|
||||||
- id: flake8
|
- id: flake8
|
||||||
additional_dependencies:
|
additional_dependencies:
|
||||||
|
25
.travis.yml
25
.travis.yml
@ -1,4 +1,3 @@
|
|||||||
sudo: false
|
|
||||||
dist: bionic
|
dist: bionic
|
||||||
addons:
|
addons:
|
||||||
apt:
|
apt:
|
||||||
@ -14,22 +13,30 @@ addons:
|
|||||||
sources:
|
sources:
|
||||||
- sourceline: ppa:savoury1/ffmpeg4
|
- sourceline: ppa:savoury1/ffmpeg4
|
||||||
|
|
||||||
matrix:
|
python:
|
||||||
|
- "3.7.1"
|
||||||
|
- "3.8"
|
||||||
|
|
||||||
|
env:
|
||||||
|
- TOX_ARGS="-- --test-group-count 4 --test-group 1"
|
||||||
|
- TOX_ARGS="-- --test-group-count 4 --test-group 2"
|
||||||
|
- TOX_ARGS="-- --test-group-count 4 --test-group 3"
|
||||||
|
- TOX_ARGS="-- --test-group-count 4 --test-group 4"
|
||||||
|
|
||||||
|
jobs:
|
||||||
fast_finish: true
|
fast_finish: true
|
||||||
include:
|
include:
|
||||||
- python: "3.7.0"
|
- python: "3.7.1"
|
||||||
env: TOXENV=lint
|
env: TOXENV=lint
|
||||||
- python: "3.7.0"
|
- python: "3.7.1"
|
||||||
env: TOXENV=pylint PYLINT_ARGS=--jobs=0 TRAVIS_WAIT=30
|
env: TOXENV=pylint PYLINT_ARGS=--jobs=0 TRAVIS_WAIT=30
|
||||||
- python: "3.7.0"
|
- python: "3.7.1"
|
||||||
env: TOXENV=typing
|
env: TOXENV=typing
|
||||||
- python: "3.7.0"
|
|
||||||
env: TOXENV=py37
|
|
||||||
|
|
||||||
cache:
|
cache:
|
||||||
pip: true
|
pip: true
|
||||||
directories:
|
directories:
|
||||||
- $HOME/.cache/pre-commit
|
- $HOME/.cache/pre-commit
|
||||||
install: pip install -U tox
|
install: pip install -U tox tox-travis
|
||||||
language: python
|
language: python
|
||||||
script: ${TRAVIS_WAIT:+travis_wait $TRAVIS_WAIT} tox --develop
|
script: ${TRAVIS_WAIT:+travis_wait $TRAVIS_WAIT} tox --develop ${TOX_ARGS-}
|
||||||
|
4
.vscode/tasks.json
vendored
4
.vscode/tasks.json
vendored
@ -76,7 +76,7 @@
|
|||||||
{
|
{
|
||||||
"label": "Install all Requirements",
|
"label": "Install all Requirements",
|
||||||
"type": "shell",
|
"type": "shell",
|
||||||
"command": "pip3 install -r requirements_all.txt -c homeassistant/package_constraints.txt",
|
"command": "pip3 install -r requirements_all.txt",
|
||||||
"group": {
|
"group": {
|
||||||
"kind": "build",
|
"kind": "build",
|
||||||
"isDefault": true
|
"isDefault": true
|
||||||
@ -90,7 +90,7 @@
|
|||||||
{
|
{
|
||||||
"label": "Install all Test Requirements",
|
"label": "Install all Test Requirements",
|
||||||
"type": "shell",
|
"type": "shell",
|
||||||
"command": "pip3 install -r requirements_test_all.txt -c homeassistant/package_constraints.txt",
|
"command": "pip3 install -r requirements_test_all.txt",
|
||||||
"group": {
|
"group": {
|
||||||
"kind": "build",
|
"kind": "build",
|
||||||
"isDefault": true
|
"isDefault": true
|
||||||
|
22
CODEOWNERS
22
CODEOWNERS
@ -23,7 +23,6 @@ homeassistant/components/alarmdecoder/* @ajschmidt8
|
|||||||
homeassistant/components/alexa/* @home-assistant/cloud @ochlocracy
|
homeassistant/components/alexa/* @home-assistant/cloud @ochlocracy
|
||||||
homeassistant/components/almond/* @gcampax @balloob
|
homeassistant/components/almond/* @gcampax @balloob
|
||||||
homeassistant/components/alpha_vantage/* @fabaff
|
homeassistant/components/alpha_vantage/* @fabaff
|
||||||
homeassistant/components/amazon_polly/* @robbiet480
|
|
||||||
homeassistant/components/ambiclimate/* @danielhiversen
|
homeassistant/components/ambiclimate/* @danielhiversen
|
||||||
homeassistant/components/ambient_station/* @bachya
|
homeassistant/components/ambient_station/* @bachya
|
||||||
homeassistant/components/amcrest/* @pnbruckner
|
homeassistant/components/amcrest/* @pnbruckner
|
||||||
@ -47,7 +46,7 @@ homeassistant/components/automation/* @home-assistant/core
|
|||||||
homeassistant/components/avea/* @pattyland
|
homeassistant/components/avea/* @pattyland
|
||||||
homeassistant/components/avri/* @timvancann
|
homeassistant/components/avri/* @timvancann
|
||||||
homeassistant/components/awair/* @ahayworth @danielsjf
|
homeassistant/components/awair/* @ahayworth @danielsjf
|
||||||
homeassistant/components/aws/* @awarecan @robbiet480
|
homeassistant/components/aws/* @awarecan
|
||||||
homeassistant/components/axis/* @Kane610
|
homeassistant/components/axis/* @Kane610
|
||||||
homeassistant/components/azure_event_hub/* @eavanvalkenburg
|
homeassistant/components/azure_event_hub/* @eavanvalkenburg
|
||||||
homeassistant/components/azure_service_bus/* @hfurubotten
|
homeassistant/components/azure_service_bus/* @hfurubotten
|
||||||
@ -59,6 +58,7 @@ homeassistant/components/blink/* @fronzbot
|
|||||||
homeassistant/components/bmp280/* @belidzs
|
homeassistant/components/bmp280/* @belidzs
|
||||||
homeassistant/components/bmw_connected_drive/* @gerard33 @rikroe
|
homeassistant/components/bmw_connected_drive/* @gerard33 @rikroe
|
||||||
homeassistant/components/bom/* @maddenp
|
homeassistant/components/bom/* @maddenp
|
||||||
|
homeassistant/components/bond/* @prystupa
|
||||||
homeassistant/components/braviatv/* @bieniu
|
homeassistant/components/braviatv/* @bieniu
|
||||||
homeassistant/components/broadlink/* @danielhiversen @felipediel
|
homeassistant/components/broadlink/* @danielhiversen @felipediel
|
||||||
homeassistant/components/brother/* @bieniu
|
homeassistant/components/brother/* @bieniu
|
||||||
@ -94,6 +94,7 @@ homeassistant/components/denonavr/* @scarface-4711 @starkillerOG
|
|||||||
homeassistant/components/derivative/* @afaucogney
|
homeassistant/components/derivative/* @afaucogney
|
||||||
homeassistant/components/device_automation/* @home-assistant/core
|
homeassistant/components/device_automation/* @home-assistant/core
|
||||||
homeassistant/components/devolo_home_control/* @2Fake @Shutgun
|
homeassistant/components/devolo_home_control/* @2Fake @Shutgun
|
||||||
|
homeassistant/components/dexcom/* @gagebenne
|
||||||
homeassistant/components/digital_ocean/* @fabaff
|
homeassistant/components/digital_ocean/* @fabaff
|
||||||
homeassistant/components/directv/* @ctalkington
|
homeassistant/components/directv/* @ctalkington
|
||||||
homeassistant/components/discogs/* @thibmaek
|
homeassistant/components/discogs/* @thibmaek
|
||||||
@ -127,7 +128,6 @@ homeassistant/components/ezviz/* @baqs
|
|||||||
homeassistant/components/fastdotcom/* @rohankapoorcom
|
homeassistant/components/fastdotcom/* @rohankapoorcom
|
||||||
homeassistant/components/file/* @fabaff
|
homeassistant/components/file/* @fabaff
|
||||||
homeassistant/components/filter/* @dgomes
|
homeassistant/components/filter/* @dgomes
|
||||||
homeassistant/components/fitbit/* @robbiet480
|
|
||||||
homeassistant/components/fixer/* @fabaff
|
homeassistant/components/fixer/* @fabaff
|
||||||
homeassistant/components/flick_electric/* @ZephireNZ
|
homeassistant/components/flick_electric/* @ZephireNZ
|
||||||
homeassistant/components/flock/* @fabaff
|
homeassistant/components/flock/* @fabaff
|
||||||
@ -136,7 +136,6 @@ homeassistant/components/flunearyou/* @bachya
|
|||||||
homeassistant/components/forked_daapd/* @uvjustin
|
homeassistant/components/forked_daapd/* @uvjustin
|
||||||
homeassistant/components/fortios/* @kimfrellsen
|
homeassistant/components/fortios/* @kimfrellsen
|
||||||
homeassistant/components/foscam/* @skgsergio
|
homeassistant/components/foscam/* @skgsergio
|
||||||
homeassistant/components/foursquare/* @robbiet480
|
|
||||||
homeassistant/components/freebox/* @snoof85 @Quentame
|
homeassistant/components/freebox/* @snoof85 @Quentame
|
||||||
homeassistant/components/fronius/* @nielstron
|
homeassistant/components/fronius/* @nielstron
|
||||||
homeassistant/components/frontend/* @home-assistant/frontend
|
homeassistant/components/frontend/* @home-assistant/frontend
|
||||||
@ -149,18 +148,15 @@ homeassistant/components/geonetnz_volcano/* @exxamalte
|
|||||||
homeassistant/components/gios/* @bieniu
|
homeassistant/components/gios/* @bieniu
|
||||||
homeassistant/components/gitter/* @fabaff
|
homeassistant/components/gitter/* @fabaff
|
||||||
homeassistant/components/glances/* @fabaff @engrbm87
|
homeassistant/components/glances/* @fabaff @engrbm87
|
||||||
homeassistant/components/gntp/* @robbiet480
|
|
||||||
homeassistant/components/gogogate2/* @vangorra
|
homeassistant/components/gogogate2/* @vangorra
|
||||||
homeassistant/components/google_assistant/* @home-assistant/cloud
|
homeassistant/components/google_assistant/* @home-assistant/cloud
|
||||||
homeassistant/components/google_cloud/* @lufton
|
homeassistant/components/google_cloud/* @lufton
|
||||||
homeassistant/components/google_translate/* @awarecan
|
homeassistant/components/google_translate/* @awarecan
|
||||||
homeassistant/components/google_travel_time/* @robbiet480
|
|
||||||
homeassistant/components/gpsd/* @fabaff
|
homeassistant/components/gpsd/* @fabaff
|
||||||
homeassistant/components/greeneye_monitor/* @jkeljo
|
homeassistant/components/greeneye_monitor/* @jkeljo
|
||||||
homeassistant/components/griddy/* @bdraco
|
homeassistant/components/griddy/* @bdraco
|
||||||
homeassistant/components/group/* @home-assistant/core
|
homeassistant/components/group/* @home-assistant/core
|
||||||
homeassistant/components/growatt_server/* @indykoning
|
homeassistant/components/growatt_server/* @indykoning
|
||||||
homeassistant/components/gtfs/* @robbiet480
|
|
||||||
homeassistant/components/guardian/* @bachya
|
homeassistant/components/guardian/* @bachya
|
||||||
homeassistant/components/harmony/* @ehendrix23 @bramkragten @bdraco
|
homeassistant/components/harmony/* @ehendrix23 @bramkragten @bdraco
|
||||||
homeassistant/components/hassio/* @home-assistant/hass-io
|
homeassistant/components/hassio/* @home-assistant/hass-io
|
||||||
@ -179,11 +175,10 @@ homeassistant/components/homekit_controller/* @Jc2k
|
|||||||
homeassistant/components/homematic/* @pvizeli @danielperna84
|
homeassistant/components/homematic/* @pvizeli @danielperna84
|
||||||
homeassistant/components/homematicip_cloud/* @SukramJ
|
homeassistant/components/homematicip_cloud/* @SukramJ
|
||||||
homeassistant/components/honeywell/* @zxdavb
|
homeassistant/components/honeywell/* @zxdavb
|
||||||
homeassistant/components/html5/* @robbiet480
|
|
||||||
homeassistant/components/http/* @home-assistant/core
|
homeassistant/components/http/* @home-assistant/core
|
||||||
homeassistant/components/huawei_lte/* @scop @fphammerle
|
homeassistant/components/huawei_lte/* @scop @fphammerle
|
||||||
homeassistant/components/huawei_router/* @abmantis
|
homeassistant/components/huawei_router/* @abmantis
|
||||||
homeassistant/components/hue/* @balloob
|
homeassistant/components/hue/* @balloob @frenck
|
||||||
homeassistant/components/humidifier/* @home-assistant/core @Shulyaka
|
homeassistant/components/humidifier/* @home-assistant/core @Shulyaka
|
||||||
homeassistant/components/hunterdouglas_powerview/* @bdraco
|
homeassistant/components/hunterdouglas_powerview/* @bdraco
|
||||||
homeassistant/components/hvv_departures/* @vigonotion
|
homeassistant/components/hvv_departures/* @vigonotion
|
||||||
@ -193,7 +188,7 @@ homeassistant/components/iaqualink/* @flz
|
|||||||
homeassistant/components/icloud/* @Quentame
|
homeassistant/components/icloud/* @Quentame
|
||||||
homeassistant/components/ign_sismologia/* @exxamalte
|
homeassistant/components/ign_sismologia/* @exxamalte
|
||||||
homeassistant/components/incomfort/* @zxdavb
|
homeassistant/components/incomfort/* @zxdavb
|
||||||
homeassistant/components/influxdb/* @fabaff
|
homeassistant/components/influxdb/* @fabaff @mdegat01
|
||||||
homeassistant/components/input_boolean/* @home-assistant/core
|
homeassistant/components/input_boolean/* @home-assistant/core
|
||||||
homeassistant/components/input_datetime/* @home-assistant/core
|
homeassistant/components/input_datetime/* @home-assistant/core
|
||||||
homeassistant/components/input_number/* @home-assistant/core
|
homeassistant/components/input_number/* @home-assistant/core
|
||||||
@ -318,6 +313,7 @@ homeassistant/components/plex/* @jjlawren
|
|||||||
homeassistant/components/plugwise/* @CoMPaTech @bouwew
|
homeassistant/components/plugwise/* @CoMPaTech @bouwew
|
||||||
homeassistant/components/plum_lightpad/* @ColinHarrington @prystupa
|
homeassistant/components/plum_lightpad/* @ColinHarrington @prystupa
|
||||||
homeassistant/components/point/* @fredrike
|
homeassistant/components/point/* @fredrike
|
||||||
|
homeassistant/components/poolsense/* @haemishkyd
|
||||||
homeassistant/components/powerwall/* @bdraco @jrester
|
homeassistant/components/powerwall/* @bdraco @jrester
|
||||||
homeassistant/components/prometheus/* @knyar
|
homeassistant/components/prometheus/* @knyar
|
||||||
homeassistant/components/proxmoxve/* @k4ds3 @jhollowe
|
homeassistant/components/proxmoxve/* @k4ds3 @jhollowe
|
||||||
@ -338,7 +334,7 @@ homeassistant/components/rainforest_eagle/* @gtdiehl @jcalbert
|
|||||||
homeassistant/components/rainmachine/* @bachya
|
homeassistant/components/rainmachine/* @bachya
|
||||||
homeassistant/components/random/* @fabaff
|
homeassistant/components/random/* @fabaff
|
||||||
homeassistant/components/repetier/* @MTrab
|
homeassistant/components/repetier/* @MTrab
|
||||||
homeassistant/components/rfxtrx/* @danielhiversen
|
homeassistant/components/rfxtrx/* @danielhiversen @elupus
|
||||||
homeassistant/components/ring/* @balloob
|
homeassistant/components/ring/* @balloob
|
||||||
homeassistant/components/rmvtransport/* @cgtobi
|
homeassistant/components/rmvtransport/* @cgtobi
|
||||||
homeassistant/components/roku/* @ctalkington
|
homeassistant/components/roku/* @ctalkington
|
||||||
@ -431,8 +427,6 @@ homeassistant/components/transmission/* @engrbm87 @JPHutchins
|
|||||||
homeassistant/components/tts/* @pvizeli
|
homeassistant/components/tts/* @pvizeli
|
||||||
homeassistant/components/tuya/* @ollo69
|
homeassistant/components/tuya/* @ollo69
|
||||||
homeassistant/components/twentemilieu/* @frenck
|
homeassistant/components/twentemilieu/* @frenck
|
||||||
homeassistant/components/twilio_call/* @robbiet480
|
|
||||||
homeassistant/components/twilio_sms/* @robbiet480
|
|
||||||
homeassistant/components/ubee/* @mzdrale
|
homeassistant/components/ubee/* @mzdrale
|
||||||
homeassistant/components/unifi/* @Kane610
|
homeassistant/components/unifi/* @Kane610
|
||||||
homeassistant/components/unifiled/* @florisvdk
|
homeassistant/components/unifiled/* @florisvdk
|
||||||
@ -478,7 +472,7 @@ homeassistant/components/yeelightsunflower/* @lindsaymarkward
|
|||||||
homeassistant/components/yessssms/* @flowolf
|
homeassistant/components/yessssms/* @flowolf
|
||||||
homeassistant/components/yi/* @bachya
|
homeassistant/components/yi/* @bachya
|
||||||
homeassistant/components/yr/* @danielhiversen
|
homeassistant/components/yr/* @danielhiversen
|
||||||
homeassistant/components/zeroconf/* @robbiet480 @Kane610
|
homeassistant/components/zeroconf/* @Kane610
|
||||||
homeassistant/components/zerproc/* @emlove
|
homeassistant/components/zerproc/* @emlove
|
||||||
homeassistant/components/zha/* @dmulcahey @adminiuga
|
homeassistant/components/zha/* @dmulcahey @adminiuga
|
||||||
homeassistant/components/zone/* @home-assistant/core
|
homeassistant/components/zone/* @home-assistant/core
|
||||||
|
@ -10,9 +10,10 @@ WORKDIR /usr/src
|
|||||||
COPY . homeassistant/
|
COPY . homeassistant/
|
||||||
RUN \
|
RUN \
|
||||||
pip3 install --no-cache-dir --no-index --only-binary=:all: --find-links "${WHEELS_LINKS}" \
|
pip3 install --no-cache-dir --no-index --only-binary=:all: --find-links "${WHEELS_LINKS}" \
|
||||||
-r homeassistant/requirements_all.txt -c homeassistant/homeassistant/package_constraints.txt \
|
-r homeassistant/requirements_all.txt \
|
||||||
|
&& pip3 uninstall -y typing \
|
||||||
&& pip3 install --no-cache-dir --no-index --only-binary=:all: --find-links "${WHEELS_LINKS}" \
|
&& pip3 install --no-cache-dir --no-index --only-binary=:all: --find-links "${WHEELS_LINKS}" \
|
||||||
-e ./homeassistant \
|
-e ./homeassistant \
|
||||||
&& python3 -m compileall homeassistant/homeassistant
|
&& python3 -m compileall homeassistant/homeassistant
|
||||||
|
|
||||||
# Home Assistant S6-Overlay
|
# Home Assistant S6-Overlay
|
||||||
|
@ -23,9 +23,10 @@ RUN git clone --depth 1 https://github.com/home-assistant/hass-release \
|
|||||||
WORKDIR /workspaces
|
WORKDIR /workspaces
|
||||||
|
|
||||||
# Install Python dependencies from requirements
|
# Install Python dependencies from requirements
|
||||||
COPY requirements_test.txt requirements_test_pre_commit.txt homeassistant/package_constraints.txt ./
|
COPY requirements_test.txt requirements_test_pre_commit.txt ./
|
||||||
RUN pip3 install -r requirements_test.txt -c package_constraints.txt \
|
COPY homeassistant/package_constraints.txt homeassistant/package_constraints.txt
|
||||||
&& rm -f requirements_test.txt package_constraints.txt requirements_test_pre_commit.txt
|
RUN pip3 install -r requirements_test.txt \
|
||||||
|
&& rm -rf requirements_test.txt requirements_test_pre_commit.txt homeassistant/
|
||||||
|
|
||||||
# Set the default shell to bash instead of sh
|
# Set the default shell to bash instead of sh
|
||||||
ENV SHELL /bin/bash
|
ENV SHELL /bin/bash
|
||||||
|
@ -44,7 +44,7 @@ stages:
|
|||||||
python -m venv venv
|
python -m venv venv
|
||||||
|
|
||||||
. venv/bin/activate
|
. venv/bin/activate
|
||||||
pip install -r requirements_test.txt -c homeassistant/package_constraints.txt
|
pip install -r requirements_test.txt
|
||||||
pre-commit install-hooks
|
pre-commit install-hooks
|
||||||
- script: |
|
- script: |
|
||||||
. venv/bin/activate
|
. venv/bin/activate
|
||||||
@ -117,7 +117,7 @@ stages:
|
|||||||
python -m venv venv
|
python -m venv venv
|
||||||
|
|
||||||
. venv/bin/activate
|
. venv/bin/activate
|
||||||
pip install -r requirements_test.txt -c homeassistant/package_constraints.txt
|
pip install -r requirements_test.txt
|
||||||
pre-commit install-hooks
|
pre-commit install-hooks
|
||||||
- script: |
|
- script: |
|
||||||
. venv/bin/activate
|
. venv/bin/activate
|
||||||
@ -165,7 +165,7 @@ stages:
|
|||||||
|
|
||||||
. venv/bin/activate
|
. venv/bin/activate
|
||||||
pip install -U pip setuptools pytest-azurepipelines pytest-xdist -c homeassistant/package_constraints.txt
|
pip install -U pip setuptools pytest-azurepipelines pytest-xdist -c homeassistant/package_constraints.txt
|
||||||
pip install -r requirements_test_all.txt -c homeassistant/package_constraints.txt
|
pip install -r requirements_test_all.txt
|
||||||
# This is a TEMP. Eventually we should make sure our 4 dependencies drop typing.
|
# This is a TEMP. Eventually we should make sure our 4 dependencies drop typing.
|
||||||
# Find offending deps with `pipdeptree -r -p typing`
|
# Find offending deps with `pipdeptree -r -p typing`
|
||||||
pip uninstall -y typing
|
pip uninstall -y typing
|
||||||
@ -209,8 +209,8 @@ stages:
|
|||||||
|
|
||||||
. venv/bin/activate
|
. venv/bin/activate
|
||||||
pip install -U pip setuptools wheel
|
pip install -U pip setuptools wheel
|
||||||
pip install -r requirements_all.txt -c homeassistant/package_constraints.txt
|
pip install -r requirements_all.txt
|
||||||
pip install -r requirements_test.txt -c homeassistant/package_constraints.txt
|
pip install -r requirements_test.txt
|
||||||
# This is a TEMP. Eventually we should make sure our 4 dependencies drop typing.
|
# This is a TEMP. Eventually we should make sure our 4 dependencies drop typing.
|
||||||
# Find offending deps with `pipdeptree -r -p typing`
|
# Find offending deps with `pipdeptree -r -p typing`
|
||||||
pip uninstall -y typing
|
pip uninstall -y typing
|
||||||
@ -234,7 +234,7 @@ stages:
|
|||||||
python -m venv venv
|
python -m venv venv
|
||||||
|
|
||||||
. venv/bin/activate
|
. venv/bin/activate
|
||||||
pip install -e . -r requirements_test.txt -c homeassistant/package_constraints.txt
|
pip install -e . -r requirements_test.txt
|
||||||
pre-commit install-hooks
|
pre-commit install-hooks
|
||||||
- script: |
|
- script: |
|
||||||
. venv/bin/activate
|
. venv/bin/activate
|
||||||
|
@ -17,7 +17,7 @@ schedules:
|
|||||||
- dev
|
- dev
|
||||||
variables:
|
variables:
|
||||||
- name: versionWheels
|
- name: versionWheels
|
||||||
value: '1.10.1-3.7-alpine3.11'
|
value: '1.13.0-3.8-alpine3.12'
|
||||||
resources:
|
resources:
|
||||||
repositories:
|
repositories:
|
||||||
- repository: azure
|
- repository: azure
|
||||||
|
10
build.json
10
build.json
@ -1,11 +1,11 @@
|
|||||||
{
|
{
|
||||||
"image": "homeassistant/{arch}-homeassistant",
|
"image": "homeassistant/{arch}-homeassistant",
|
||||||
"build_from": {
|
"build_from": {
|
||||||
"aarch64": "homeassistant/aarch64-homeassistant-base:7.2.0",
|
"aarch64": "homeassistant/aarch64-homeassistant-base:8.0.0",
|
||||||
"armhf": "homeassistant/armhf-homeassistant-base:7.2.0",
|
"armhf": "homeassistant/armhf-homeassistant-base:8.0.0",
|
||||||
"armv7": "homeassistant/armv7-homeassistant-base:7.2.0",
|
"armv7": "homeassistant/armv7-homeassistant-base:8.0.0",
|
||||||
"amd64": "homeassistant/amd64-homeassistant-base:7.2.0",
|
"amd64": "homeassistant/amd64-homeassistant-base:8.0.0",
|
||||||
"i386": "homeassistant/i386-homeassistant-base:7.2.0"
|
"i386": "homeassistant/i386-homeassistant-base:8.0.0"
|
||||||
},
|
},
|
||||||
"labels": {
|
"labels": {
|
||||||
"io.hass.type": "core"
|
"io.hass.type": "core"
|
||||||
|
@ -1,6 +1,5 @@
|
|||||||
"""Start Home Assistant."""
|
"""Start Home Assistant."""
|
||||||
import argparse
|
import argparse
|
||||||
import asyncio
|
|
||||||
import os
|
import os
|
||||||
import platform
|
import platform
|
||||||
import subprocess
|
import subprocess
|
||||||
@ -8,32 +7,9 @@ import sys
|
|||||||
import threading
|
import threading
|
||||||
from typing import List
|
from typing import List
|
||||||
|
|
||||||
import yarl
|
|
||||||
|
|
||||||
from homeassistant.const import REQUIRED_PYTHON_VER, RESTART_EXIT_CODE, __version__
|
from homeassistant.const import REQUIRED_PYTHON_VER, RESTART_EXIT_CODE, __version__
|
||||||
|
|
||||||
|
|
||||||
def set_loop() -> None:
|
|
||||||
"""Attempt to use different loop."""
|
|
||||||
# pylint: disable=import-outside-toplevel
|
|
||||||
from asyncio.events import BaseDefaultEventLoopPolicy
|
|
||||||
|
|
||||||
if sys.platform == "win32":
|
|
||||||
if hasattr(asyncio, "WindowsProactorEventLoopPolicy"):
|
|
||||||
# pylint: disable=no-member
|
|
||||||
policy = asyncio.WindowsProactorEventLoopPolicy()
|
|
||||||
else:
|
|
||||||
|
|
||||||
class ProactorPolicy(BaseDefaultEventLoopPolicy):
|
|
||||||
"""Event loop policy to create proactor loops."""
|
|
||||||
|
|
||||||
_loop_factory = asyncio.ProactorEventLoop
|
|
||||||
|
|
||||||
policy = ProactorPolicy()
|
|
||||||
|
|
||||||
asyncio.set_event_loop_policy(policy)
|
|
||||||
|
|
||||||
|
|
||||||
def validate_python() -> None:
|
def validate_python() -> None:
|
||||||
"""Validate that the right Python version is running."""
|
"""Validate that the right Python version is running."""
|
||||||
if sys.version_info[:3] < REQUIRED_PYTHON_VER:
|
if sys.version_info[:3] < REQUIRED_PYTHON_VER:
|
||||||
@ -240,39 +216,6 @@ def cmdline() -> List[str]:
|
|||||||
return [arg for arg in sys.argv if arg != "--daemon"]
|
return [arg for arg in sys.argv if arg != "--daemon"]
|
||||||
|
|
||||||
|
|
||||||
async def setup_and_run_hass(config_dir: str, args: argparse.Namespace) -> int:
|
|
||||||
"""Set up Home Assistant and run."""
|
|
||||||
# pylint: disable=import-outside-toplevel
|
|
||||||
from homeassistant import bootstrap
|
|
||||||
|
|
||||||
hass = await bootstrap.async_setup_hass(
|
|
||||||
config_dir=config_dir,
|
|
||||||
verbose=args.verbose,
|
|
||||||
log_rotate_days=args.log_rotate_days,
|
|
||||||
log_file=args.log_file,
|
|
||||||
log_no_color=args.log_no_color,
|
|
||||||
skip_pip=args.skip_pip,
|
|
||||||
safe_mode=args.safe_mode,
|
|
||||||
)
|
|
||||||
|
|
||||||
if hass is None:
|
|
||||||
return 1
|
|
||||||
|
|
||||||
if args.open_ui:
|
|
||||||
import webbrowser # pylint: disable=import-outside-toplevel
|
|
||||||
|
|
||||||
if hass.config.api is not None:
|
|
||||||
scheme = "https" if hass.config.api.use_ssl else "http"
|
|
||||||
url = str(
|
|
||||||
yarl.URL.build(
|
|
||||||
scheme=scheme, host="127.0.0.1", port=hass.config.api.port
|
|
||||||
)
|
|
||||||
)
|
|
||||||
hass.add_job(webbrowser.open, url)
|
|
||||||
|
|
||||||
return await hass.async_run()
|
|
||||||
|
|
||||||
|
|
||||||
def try_to_restart() -> None:
|
def try_to_restart() -> None:
|
||||||
"""Attempt to clean up state and start a new Home Assistant instance."""
|
"""Attempt to clean up state and start a new Home Assistant instance."""
|
||||||
# Things should be mostly shut down already at this point, now just try
|
# Things should be mostly shut down already at this point, now just try
|
||||||
@ -319,8 +262,6 @@ def main() -> int:
|
|||||||
"""Start Home Assistant."""
|
"""Start Home Assistant."""
|
||||||
validate_python()
|
validate_python()
|
||||||
|
|
||||||
set_loop()
|
|
||||||
|
|
||||||
# Run a simple daemon runner process on Windows to handle restarts
|
# Run a simple daemon runner process on Windows to handle restarts
|
||||||
if os.name == "nt" and "--runner" not in sys.argv:
|
if os.name == "nt" and "--runner" not in sys.argv:
|
||||||
nt_args = cmdline() + ["--runner"]
|
nt_args = cmdline() + ["--runner"]
|
||||||
@ -353,7 +294,22 @@ def main() -> int:
|
|||||||
if args.pid_file:
|
if args.pid_file:
|
||||||
write_pid(args.pid_file)
|
write_pid(args.pid_file)
|
||||||
|
|
||||||
exit_code = asyncio.run(setup_and_run_hass(config_dir, args), debug=args.debug)
|
# pylint: disable=import-outside-toplevel
|
||||||
|
from homeassistant import runner
|
||||||
|
|
||||||
|
runtime_conf = runner.RuntimeConfig(
|
||||||
|
config_dir=config_dir,
|
||||||
|
verbose=args.verbose,
|
||||||
|
log_rotate_days=args.log_rotate_days,
|
||||||
|
log_file=args.log_file,
|
||||||
|
log_no_color=args.log_no_color,
|
||||||
|
skip_pip=args.skip_pip,
|
||||||
|
safe_mode=args.safe_mode,
|
||||||
|
debug=args.debug,
|
||||||
|
open_ui=args.open_ui,
|
||||||
|
)
|
||||||
|
|
||||||
|
exit_code = runner.run(runtime_conf)
|
||||||
if exit_code == RESTART_EXIT_CODE and not args.runner:
|
if exit_code == RESTART_EXIT_CODE and not args.runner:
|
||||||
try_to_restart()
|
try_to_restart()
|
||||||
|
|
||||||
|
@ -77,10 +77,10 @@ def _verify_otp(secret: str, otp: str, count: int) -> bool:
|
|||||||
class NotifySetting:
|
class NotifySetting:
|
||||||
"""Store notify setting for one user."""
|
"""Store notify setting for one user."""
|
||||||
|
|
||||||
secret = attr.ib(type=str, factory=_generate_secret) # not persistent
|
secret: str = attr.ib(factory=_generate_secret) # not persistent
|
||||||
counter = attr.ib(type=int, factory=_generate_random) # not persistent
|
counter: int = attr.ib(factory=_generate_random) # not persistent
|
||||||
notify_service = attr.ib(type=Optional[str], default=None)
|
notify_service: Optional[str] = attr.ib(default=None)
|
||||||
target = attr.ib(type=Optional[str], default=None)
|
target: Optional[str] = attr.ib(default=None)
|
||||||
|
|
||||||
|
|
||||||
_UsersDict = Dict[str, NotifySetting]
|
_UsersDict = Dict[str, NotifySetting]
|
||||||
|
@ -20,39 +20,35 @@ TOKEN_TYPE_LONG_LIVED_ACCESS_TOKEN = "long_lived_access_token"
|
|||||||
class Group:
|
class Group:
|
||||||
"""A group."""
|
"""A group."""
|
||||||
|
|
||||||
name = attr.ib(type=Optional[str])
|
name: Optional[str] = attr.ib()
|
||||||
policy = attr.ib(type=perm_mdl.PolicyType)
|
policy: perm_mdl.PolicyType = attr.ib()
|
||||||
id = attr.ib(type=str, factory=lambda: uuid.uuid4().hex)
|
id: str = attr.ib(factory=lambda: uuid.uuid4().hex)
|
||||||
system_generated = attr.ib(type=bool, default=False)
|
system_generated: bool = attr.ib(default=False)
|
||||||
|
|
||||||
|
|
||||||
@attr.s(slots=True)
|
@attr.s(slots=True)
|
||||||
class User:
|
class User:
|
||||||
"""A user."""
|
"""A user."""
|
||||||
|
|
||||||
name = attr.ib(type=Optional[str])
|
name: Optional[str] = attr.ib()
|
||||||
perm_lookup = attr.ib(type=perm_mdl.PermissionLookup, eq=False, order=False)
|
perm_lookup: perm_mdl.PermissionLookup = attr.ib(eq=False, order=False)
|
||||||
id = attr.ib(type=str, factory=lambda: uuid.uuid4().hex)
|
id: str = attr.ib(factory=lambda: uuid.uuid4().hex)
|
||||||
is_owner = attr.ib(type=bool, default=False)
|
is_owner: bool = attr.ib(default=False)
|
||||||
is_active = attr.ib(type=bool, default=False)
|
is_active: bool = attr.ib(default=False)
|
||||||
system_generated = attr.ib(type=bool, default=False)
|
system_generated: bool = attr.ib(default=False)
|
||||||
|
|
||||||
groups = attr.ib(type=List[Group], factory=list, eq=False, order=False)
|
groups: List[Group] = attr.ib(factory=list, eq=False, order=False)
|
||||||
|
|
||||||
# List of credentials of a user.
|
# List of credentials of a user.
|
||||||
credentials = attr.ib(type=List["Credentials"], factory=list, eq=False, order=False)
|
credentials: List["Credentials"] = attr.ib(factory=list, eq=False, order=False)
|
||||||
|
|
||||||
# Tokens associated with a user.
|
# Tokens associated with a user.
|
||||||
refresh_tokens = attr.ib(
|
refresh_tokens: Dict[str, "RefreshToken"] = attr.ib(
|
||||||
type=Dict[str, "RefreshToken"], factory=dict, eq=False, order=False
|
factory=dict, eq=False, order=False
|
||||||
)
|
)
|
||||||
|
|
||||||
_permissions = attr.ib(
|
_permissions: Optional[perm_mdl.PolicyPermissions] = attr.ib(
|
||||||
type=Optional[perm_mdl.PolicyPermissions],
|
init=False, eq=False, order=False, default=None,
|
||||||
init=False,
|
|
||||||
eq=False,
|
|
||||||
order=False,
|
|
||||||
default=None,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
@ -88,39 +84,38 @@ class User:
|
|||||||
class RefreshToken:
|
class RefreshToken:
|
||||||
"""RefreshToken for a user to grant new access tokens."""
|
"""RefreshToken for a user to grant new access tokens."""
|
||||||
|
|
||||||
user = attr.ib(type=User)
|
user: User = attr.ib()
|
||||||
client_id = attr.ib(type=Optional[str])
|
client_id: Optional[str] = attr.ib()
|
||||||
access_token_expiration = attr.ib(type=timedelta)
|
access_token_expiration: timedelta = attr.ib()
|
||||||
client_name = attr.ib(type=Optional[str], default=None)
|
client_name: Optional[str] = attr.ib(default=None)
|
||||||
client_icon = attr.ib(type=Optional[str], default=None)
|
client_icon: Optional[str] = attr.ib(default=None)
|
||||||
token_type = attr.ib(
|
token_type: str = attr.ib(
|
||||||
type=str,
|
|
||||||
default=TOKEN_TYPE_NORMAL,
|
default=TOKEN_TYPE_NORMAL,
|
||||||
validator=attr.validators.in_(
|
validator=attr.validators.in_(
|
||||||
(TOKEN_TYPE_NORMAL, TOKEN_TYPE_SYSTEM, TOKEN_TYPE_LONG_LIVED_ACCESS_TOKEN)
|
(TOKEN_TYPE_NORMAL, TOKEN_TYPE_SYSTEM, TOKEN_TYPE_LONG_LIVED_ACCESS_TOKEN)
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
id = attr.ib(type=str, factory=lambda: uuid.uuid4().hex)
|
id: str = attr.ib(factory=lambda: uuid.uuid4().hex)
|
||||||
created_at = attr.ib(type=datetime, factory=dt_util.utcnow)
|
created_at: datetime = attr.ib(factory=dt_util.utcnow)
|
||||||
token = attr.ib(type=str, factory=lambda: secrets.token_hex(64))
|
token: str = attr.ib(factory=lambda: secrets.token_hex(64))
|
||||||
jwt_key = attr.ib(type=str, factory=lambda: secrets.token_hex(64))
|
jwt_key: str = attr.ib(factory=lambda: secrets.token_hex(64))
|
||||||
|
|
||||||
last_used_at = attr.ib(type=Optional[datetime], default=None)
|
last_used_at: Optional[datetime] = attr.ib(default=None)
|
||||||
last_used_ip = attr.ib(type=Optional[str], default=None)
|
last_used_ip: Optional[str] = attr.ib(default=None)
|
||||||
|
|
||||||
|
|
||||||
@attr.s(slots=True)
|
@attr.s(slots=True)
|
||||||
class Credentials:
|
class Credentials:
|
||||||
"""Credentials for a user on an auth provider."""
|
"""Credentials for a user on an auth provider."""
|
||||||
|
|
||||||
auth_provider_type = attr.ib(type=str)
|
auth_provider_type: str = attr.ib()
|
||||||
auth_provider_id = attr.ib(type=Optional[str])
|
auth_provider_id: Optional[str] = attr.ib()
|
||||||
|
|
||||||
# Allow the auth provider to store data to represent their auth.
|
# Allow the auth provider to store data to represent their auth.
|
||||||
data = attr.ib(type=dict)
|
data: dict = attr.ib()
|
||||||
|
|
||||||
id = attr.ib(type=str, factory=lambda: uuid.uuid4().hex)
|
id: str = attr.ib(factory=lambda: uuid.uuid4().hex)
|
||||||
is_new = attr.ib(type=bool, default=True)
|
is_new: bool = attr.ib(default=True)
|
||||||
|
|
||||||
|
|
||||||
class UserMeta(NamedTuple):
|
class UserMeta(NamedTuple):
|
||||||
|
@ -13,5 +13,5 @@ if TYPE_CHECKING:
|
|||||||
class PermissionLookup:
|
class PermissionLookup:
|
||||||
"""Class to hold data for permission lookups."""
|
"""Class to hold data for permission lookups."""
|
||||||
|
|
||||||
entity_registry = attr.ib(type="ent_reg.EntityRegistry")
|
entity_registry: "ent_reg.EntityRegistry" = attr.ib()
|
||||||
device_registry = attr.ib(type="dev_reg.DeviceRegistry")
|
device_registry: "dev_reg.DeviceRegistry" = attr.ib()
|
||||||
|
@ -75,7 +75,7 @@ class CommandLineAuthProvider(AuthProvider):
|
|||||||
|
|
||||||
if process.returncode != 0:
|
if process.returncode != 0:
|
||||||
_LOGGER.error(
|
_LOGGER.error(
|
||||||
"User %r failed to authenticate, command exited with code %d.",
|
"User %r failed to authenticate, command exited with code %d",
|
||||||
username,
|
username,
|
||||||
process.returncode,
|
process.returncode,
|
||||||
)
|
)
|
||||||
|
@ -190,7 +190,7 @@ class TrustedNetworksLoginFlow(LoginFlow):
|
|||||||
).async_validate_access(self._ip_address)
|
).async_validate_access(self._ip_address)
|
||||||
|
|
||||||
except InvalidAuthError:
|
except InvalidAuthError:
|
||||||
return self.async_abort(reason="not_whitelisted")
|
return self.async_abort(reason="not_allowed")
|
||||||
|
|
||||||
if user_input is not None:
|
if user_input is not None:
|
||||||
return await self.async_finish(user_input)
|
return await self.async_finish(user_input)
|
||||||
|
@ -7,10 +7,11 @@ import logging.handlers
|
|||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
from time import monotonic
|
from time import monotonic
|
||||||
from typing import Any, Dict, Optional, Set
|
from typing import TYPE_CHECKING, Any, Dict, Optional, Set
|
||||||
|
|
||||||
from async_timeout import timeout
|
from async_timeout import timeout
|
||||||
import voluptuous as vol
|
import voluptuous as vol
|
||||||
|
import yarl
|
||||||
|
|
||||||
from homeassistant import config as conf_util, config_entries, core, loader
|
from homeassistant import config as conf_util, config_entries, core, loader
|
||||||
from homeassistant.components import http
|
from homeassistant.components import http
|
||||||
@ -31,6 +32,9 @@ from homeassistant.util.logging import async_activate_log_queue_handler
|
|||||||
from homeassistant.util.package import async_get_user_site, is_virtual_env
|
from homeassistant.util.package import async_get_user_site, is_virtual_env
|
||||||
from homeassistant.util.yaml import clear_secret_cache
|
from homeassistant.util.yaml import clear_secret_cache
|
||||||
|
|
||||||
|
if TYPE_CHECKING:
|
||||||
|
from .runner import RuntimeConfig
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
ERROR_LOG_FILENAME = "home-assistant.log"
|
ERROR_LOG_FILENAME = "home-assistant.log"
|
||||||
@ -66,23 +70,22 @@ STAGE_1_INTEGRATIONS = {
|
|||||||
|
|
||||||
|
|
||||||
async def async_setup_hass(
|
async def async_setup_hass(
|
||||||
*,
|
runtime_config: "RuntimeConfig",
|
||||||
config_dir: str,
|
|
||||||
verbose: bool,
|
|
||||||
log_rotate_days: int,
|
|
||||||
log_file: str,
|
|
||||||
log_no_color: bool,
|
|
||||||
skip_pip: bool,
|
|
||||||
safe_mode: bool,
|
|
||||||
) -> Optional[core.HomeAssistant]:
|
) -> Optional[core.HomeAssistant]:
|
||||||
"""Set up Home Assistant."""
|
"""Set up Home Assistant."""
|
||||||
hass = core.HomeAssistant()
|
hass = core.HomeAssistant()
|
||||||
hass.config.config_dir = config_dir
|
hass.config.config_dir = runtime_config.config_dir
|
||||||
|
|
||||||
async_enable_logging(hass, verbose, log_rotate_days, log_file, log_no_color)
|
async_enable_logging(
|
||||||
|
hass,
|
||||||
|
runtime_config.verbose,
|
||||||
|
runtime_config.log_rotate_days,
|
||||||
|
runtime_config.log_file,
|
||||||
|
runtime_config.log_no_color,
|
||||||
|
)
|
||||||
|
|
||||||
hass.config.skip_pip = skip_pip
|
hass.config.skip_pip = runtime_config.skip_pip
|
||||||
if skip_pip:
|
if runtime_config.skip_pip:
|
||||||
_LOGGER.warning(
|
_LOGGER.warning(
|
||||||
"Skipping pip installation of required modules. This may cause issues"
|
"Skipping pip installation of required modules. This may cause issues"
|
||||||
)
|
)
|
||||||
@ -91,10 +94,11 @@ async def async_setup_hass(
|
|||||||
_LOGGER.error("Error getting configuration path")
|
_LOGGER.error("Error getting configuration path")
|
||||||
return None
|
return None
|
||||||
|
|
||||||
_LOGGER.info("Config directory: %s", config_dir)
|
_LOGGER.info("Config directory: %s", runtime_config.config_dir)
|
||||||
|
|
||||||
config_dict = None
|
config_dict = None
|
||||||
basic_setup_success = False
|
basic_setup_success = False
|
||||||
|
safe_mode = runtime_config.safe_mode
|
||||||
|
|
||||||
if not safe_mode:
|
if not safe_mode:
|
||||||
await hass.async_add_executor_job(conf_util.process_ha_config_upgrade, hass)
|
await hass.async_add_executor_job(conf_util.process_ha_config_upgrade, hass)
|
||||||
@ -107,7 +111,7 @@ async def async_setup_hass(
|
|||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
if not is_virtual_env():
|
if not is_virtual_env():
|
||||||
await async_mount_local_lib_path(config_dir)
|
await async_mount_local_lib_path(runtime_config.config_dir)
|
||||||
|
|
||||||
basic_setup_success = (
|
basic_setup_success = (
|
||||||
await async_from_config_dict(config_dict, hass) is not None
|
await async_from_config_dict(config_dict, hass) is not None
|
||||||
@ -137,6 +141,7 @@ async def async_setup_hass(
|
|||||||
|
|
||||||
safe_mode = True
|
safe_mode = True
|
||||||
old_config = hass.config
|
old_config = hass.config
|
||||||
|
|
||||||
hass = core.HomeAssistant()
|
hass = core.HomeAssistant()
|
||||||
hass.config.skip_pip = old_config.skip_pip
|
hass.config.skip_pip = old_config.skip_pip
|
||||||
hass.config.internal_url = old_config.internal_url
|
hass.config.internal_url = old_config.internal_url
|
||||||
@ -153,9 +158,32 @@ async def async_setup_hass(
|
|||||||
{"safe_mode": {}, "http": http_conf}, hass,
|
{"safe_mode": {}, "http": http_conf}, hass,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
if runtime_config.open_ui:
|
||||||
|
hass.add_job(open_hass_ui, hass)
|
||||||
|
|
||||||
return hass
|
return hass
|
||||||
|
|
||||||
|
|
||||||
|
def open_hass_ui(hass: core.HomeAssistant) -> None:
|
||||||
|
"""Open the UI."""
|
||||||
|
import webbrowser # pylint: disable=import-outside-toplevel
|
||||||
|
|
||||||
|
if hass.config.api is None or "frontend" not in hass.config.components:
|
||||||
|
_LOGGER.warning("Cannot launch the UI because frontend not loaded")
|
||||||
|
return
|
||||||
|
|
||||||
|
scheme = "https" if hass.config.api.use_ssl else "http"
|
||||||
|
url = str(
|
||||||
|
yarl.URL.build(scheme=scheme, host="127.0.0.1", port=hass.config.api.port)
|
||||||
|
)
|
||||||
|
|
||||||
|
if not webbrowser.open(url):
|
||||||
|
_LOGGER.warning(
|
||||||
|
"Unable to open the Home Assistant UI in a browser. Open it yourself at %s",
|
||||||
|
url,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
async def async_from_config_dict(
|
async def async_from_config_dict(
|
||||||
config: ConfigType, hass: core.HomeAssistant
|
config: ConfigType, hass: core.HomeAssistant
|
||||||
) -> Optional[core.HomeAssistant]:
|
) -> Optional[core.HomeAssistant]:
|
||||||
|
@ -37,7 +37,7 @@ def is_on(hass, entity_id=None):
|
|||||||
continue
|
continue
|
||||||
|
|
||||||
if not hasattr(component, "is_on"):
|
if not hasattr(component, "is_on"):
|
||||||
_LOGGER.warning("Integration %s has no is_on method.", domain)
|
_LOGGER.warning("Integration %s has no is_on method", domain)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
if component.is_on(ent_id):
|
if component.is_on(ent_id):
|
||||||
|
@ -61,7 +61,7 @@ class AcmedaCover(AcmedaBase, CoverEntity):
|
|||||||
None is unknown, 0 is closed, 100 is fully open.
|
None is unknown, 0 is closed, 100 is fully open.
|
||||||
"""
|
"""
|
||||||
position = None
|
position = None
|
||||||
if self.roller.type == 7 or self.roller.type == 10:
|
if self.roller.type in [7, 10]:
|
||||||
position = 100 - self.roller.closed_percent
|
position = 100 - self.roller.closed_percent
|
||||||
return position
|
return position
|
||||||
|
|
||||||
@ -86,37 +86,36 @@ class AcmedaCover(AcmedaBase, CoverEntity):
|
|||||||
@property
|
@property
|
||||||
def is_closed(self):
|
def is_closed(self):
|
||||||
"""Return if the cover is closed."""
|
"""Return if the cover is closed."""
|
||||||
is_closed = self.roller.closed_percent == 100
|
return self.roller.closed_percent == 100
|
||||||
return is_closed
|
|
||||||
|
|
||||||
async def close_cover(self, **kwargs):
|
async def async_close_cover(self, **kwargs):
|
||||||
"""Close the roller."""
|
"""Close the roller."""
|
||||||
await self.roller.move_down()
|
await self.roller.move_down()
|
||||||
|
|
||||||
async def open_cover(self, **kwargs):
|
async def async_open_cover(self, **kwargs):
|
||||||
"""Open the roller."""
|
"""Open the roller."""
|
||||||
await self.roller.move_up()
|
await self.roller.move_up()
|
||||||
|
|
||||||
async def stop_cover(self, **kwargs):
|
async def async_stop_cover(self, **kwargs):
|
||||||
"""Stop the roller."""
|
"""Stop the roller."""
|
||||||
await self.roller.move_stop()
|
await self.roller.move_stop()
|
||||||
|
|
||||||
async def set_cover_position(self, **kwargs):
|
async def async_set_cover_position(self, **kwargs):
|
||||||
"""Move the roller shutter to a specific position."""
|
"""Move the roller shutter to a specific position."""
|
||||||
await self.roller.move_to(100 - kwargs[ATTR_POSITION])
|
await self.roller.move_to(100 - kwargs[ATTR_POSITION])
|
||||||
|
|
||||||
async def close_cover_tilt(self, **kwargs):
|
async def async_close_cover_tilt(self, **kwargs):
|
||||||
"""Close the roller."""
|
"""Close the roller."""
|
||||||
await self.roller.move_down()
|
await self.roller.move_down()
|
||||||
|
|
||||||
async def open_cover_tilt(self, **kwargs):
|
async def async_open_cover_tilt(self, **kwargs):
|
||||||
"""Open the roller."""
|
"""Open the roller."""
|
||||||
await self.roller.move_up()
|
await self.roller.move_up()
|
||||||
|
|
||||||
async def stop_cover_tilt(self, **kwargs):
|
async def async_stop_cover_tilt(self, **kwargs):
|
||||||
"""Stop the roller."""
|
"""Stop the roller."""
|
||||||
await self.roller.move_stop()
|
await self.roller.move_stop()
|
||||||
|
|
||||||
async def set_cover_tilt(self, **kwargs):
|
async def async_set_cover_tilt(self, **kwargs):
|
||||||
"""Tilt the roller shutter to a specific position."""
|
"""Tilt the roller shutter to a specific position."""
|
||||||
await self.roller.move_to(100 - kwargs[ATTR_POSITION])
|
await self.roller.move_to(100 - kwargs[ATTR_POSITION])
|
||||||
|
@ -183,7 +183,7 @@ class AdGuardHomeEntity(Entity):
|
|||||||
except AdGuardHomeError:
|
except AdGuardHomeError:
|
||||||
if self._available:
|
if self._available:
|
||||||
_LOGGER.debug(
|
_LOGGER.debug(
|
||||||
"An error occurred while updating AdGuard Home sensor.",
|
"An error occurred while updating AdGuard Home sensor",
|
||||||
exc_info=True,
|
exc_info=True,
|
||||||
)
|
)
|
||||||
self._available = False
|
self._available = False
|
||||||
|
@ -73,7 +73,7 @@ class AdGuardHomeSwitch(AdGuardHomeDeviceEntity, SwitchEntity):
|
|||||||
try:
|
try:
|
||||||
await self._adguard_turn_off()
|
await self._adguard_turn_off()
|
||||||
except AdGuardHomeError:
|
except AdGuardHomeError:
|
||||||
_LOGGER.error("An error occurred while turning off AdGuard Home switch.")
|
_LOGGER.error("An error occurred while turning off AdGuard Home switch")
|
||||||
self._available = False
|
self._available = False
|
||||||
|
|
||||||
async def _adguard_turn_off(self) -> None:
|
async def _adguard_turn_off(self) -> None:
|
||||||
@ -85,7 +85,7 @@ class AdGuardHomeSwitch(AdGuardHomeDeviceEntity, SwitchEntity):
|
|||||||
try:
|
try:
|
||||||
await self._adguard_turn_on()
|
await self._adguard_turn_on()
|
||||||
except AdGuardHomeError:
|
except AdGuardHomeError:
|
||||||
_LOGGER.error("An error occurred while turning on AdGuard Home switch.")
|
_LOGGER.error("An error occurred while turning on AdGuard Home switch")
|
||||||
self._available = False
|
self._available = False
|
||||||
|
|
||||||
async def _adguard_turn_on(self) -> None:
|
async def _adguard_turn_on(self) -> None:
|
||||||
|
@ -4,6 +4,14 @@
|
|||||||
"hassio_confirm": {
|
"hassio_confirm": {
|
||||||
"description": "Chcete nakonfigurovat slu\u017ebu Home Assistant pro p\u0159ipojen\u00ed k AddGuard pomoc\u00ed hass.io {addon}?",
|
"description": "Chcete nakonfigurovat slu\u017ebu Home Assistant pro p\u0159ipojen\u00ed k AddGuard pomoc\u00ed hass.io {addon}?",
|
||||||
"title": "AdGuard prost\u0159ednictv\u00edm dopl\u0148ku Hass.io"
|
"title": "AdGuard prost\u0159ednictv\u00edm dopl\u0148ku Hass.io"
|
||||||
|
},
|
||||||
|
"user": {
|
||||||
|
"data": {
|
||||||
|
"host": "Hostitel",
|
||||||
|
"password": "Heslo",
|
||||||
|
"port": "Port",
|
||||||
|
"username": "U\u017eivatelsk\u00e9 jm\u00e9no"
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1,10 +1,17 @@
|
|||||||
{
|
{
|
||||||
"config": {
|
"config": {
|
||||||
|
"error": {
|
||||||
|
"connection_error": "Falha na liga\u00e7\u00e3o"
|
||||||
|
},
|
||||||
"step": {
|
"step": {
|
||||||
|
"hassio_confirm": {
|
||||||
|
"title": "AdGuard Home via Hass.io add-on"
|
||||||
|
},
|
||||||
"user": {
|
"user": {
|
||||||
"data": {
|
"data": {
|
||||||
"host": "Servidor",
|
"host": "Servidor",
|
||||||
"password": "Palavra-passe",
|
"password": "Palavra-passe",
|
||||||
|
"port": "Porta",
|
||||||
"username": "Nome de Utilizador"
|
"username": "Nome de Utilizador"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -2,6 +2,6 @@
|
|||||||
"domain": "ads",
|
"domain": "ads",
|
||||||
"name": "ADS",
|
"name": "ADS",
|
||||||
"documentation": "https://www.home-assistant.io/integrations/ads",
|
"documentation": "https://www.home-assistant.io/integrations/ads",
|
||||||
"requirements": ["pyads==3.0.7"],
|
"requirements": ["pyads==3.1.3"],
|
||||||
"codeowners": []
|
"codeowners": []
|
||||||
}
|
}
|
||||||
|
@ -74,8 +74,8 @@ class AgentCamera(MjpegCamera):
|
|||||||
|
|
||||||
device_info = {
|
device_info = {
|
||||||
CONF_NAME: device.name,
|
CONF_NAME: device.name,
|
||||||
CONF_MJPEG_URL: f"{self.server_url}{device.mjpeg_image_url}&size=640x480",
|
CONF_MJPEG_URL: f"{self.server_url}{device.mjpeg_image_url}&size={device.mjpegStreamWidth}x{device.mjpegStreamHeight}",
|
||||||
CONF_STILL_IMAGE_URL: f"{self.server_url}{device.still_image_url}&size=640x480",
|
CONF_STILL_IMAGE_URL: f"{self.server_url}{device.still_image_url}&size={device.mjpegStreamWidth}x{device.mjpegStreamHeight}",
|
||||||
}
|
}
|
||||||
self.device = device
|
self.device = device
|
||||||
self._removed = False
|
self._removed = False
|
||||||
|
@ -2,7 +2,7 @@
|
|||||||
"domain": "agent_dvr",
|
"domain": "agent_dvr",
|
||||||
"name": "Agent DVR",
|
"name": "Agent DVR",
|
||||||
"documentation": "https://www.home-assistant.io/integrations/agent_dvr/",
|
"documentation": "https://www.home-assistant.io/integrations/agent_dvr/",
|
||||||
"requirements": ["agent-py==0.0.20"],
|
"requirements": ["agent-py==0.0.23"],
|
||||||
"config_flow": true,
|
"config_flow": true,
|
||||||
"codeowners": ["@ispysoftware"]
|
"codeowners": ["@ispysoftware"]
|
||||||
}
|
}
|
||||||
|
12
homeassistant/components/agent_dvr/translations/cs.json
Normal file
12
homeassistant/components/agent_dvr/translations/cs.json
Normal file
@ -0,0 +1,12 @@
|
|||||||
|
{
|
||||||
|
"config": {
|
||||||
|
"step": {
|
||||||
|
"user": {
|
||||||
|
"data": {
|
||||||
|
"host": "Hostitel",
|
||||||
|
"port": "Port"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
@ -10,7 +10,7 @@
|
|||||||
"step": {
|
"step": {
|
||||||
"user": {
|
"user": {
|
||||||
"data": {
|
"data": {
|
||||||
"host": "H\u00f4te",
|
"host": "Nom d'h\u00f4te ou adresse IP",
|
||||||
"port": "Port"
|
"port": "Port"
|
||||||
},
|
},
|
||||||
"title": "Configurer l'agent DVR"
|
"title": "Configurer l'agent DVR"
|
||||||
|
11
homeassistant/components/airly/translations/cs.json
Normal file
11
homeassistant/components/airly/translations/cs.json
Normal file
@ -0,0 +1,11 @@
|
|||||||
|
{
|
||||||
|
"config": {
|
||||||
|
"step": {
|
||||||
|
"user": {
|
||||||
|
"data": {
|
||||||
|
"api_key": "Kl\u00ed\u010d API"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
16
homeassistant/components/airvisual/translations/cs.json
Normal file
16
homeassistant/components/airvisual/translations/cs.json
Normal file
@ -0,0 +1,16 @@
|
|||||||
|
{
|
||||||
|
"config": {
|
||||||
|
"step": {
|
||||||
|
"geography": {
|
||||||
|
"data": {
|
||||||
|
"api_key": "Kl\u00ed\u010d API"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_pro": {
|
||||||
|
"data": {
|
||||||
|
"password": "Heslo"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
11
homeassistant/components/airvisual/translations/pt.json
Normal file
11
homeassistant/components/airvisual/translations/pt.json
Normal file
@ -0,0 +1,11 @@
|
|||||||
|
{
|
||||||
|
"config": {
|
||||||
|
"step": {
|
||||||
|
"node_pro": {
|
||||||
|
"data": {
|
||||||
|
"password": "Palavra-passe"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
@ -8,7 +8,7 @@ alarm_disarm:
|
|||||||
example: "alarm_control_panel.downstairs"
|
example: "alarm_control_panel.downstairs"
|
||||||
code:
|
code:
|
||||||
description: An optional code to disarm the alarm control panel with.
|
description: An optional code to disarm the alarm control panel with.
|
||||||
example: 1234
|
example: "1234"
|
||||||
|
|
||||||
alarm_arm_custom_bypass:
|
alarm_arm_custom_bypass:
|
||||||
description: Send arm custom bypass command.
|
description: Send arm custom bypass command.
|
||||||
@ -18,7 +18,7 @@ alarm_arm_custom_bypass:
|
|||||||
example: "alarm_control_panel.downstairs"
|
example: "alarm_control_panel.downstairs"
|
||||||
code:
|
code:
|
||||||
description: An optional code to arm custom bypass the alarm control panel with.
|
description: An optional code to arm custom bypass the alarm control panel with.
|
||||||
example: 1234
|
example: "1234"
|
||||||
|
|
||||||
alarm_arm_home:
|
alarm_arm_home:
|
||||||
description: Send the alarm the command for arm home.
|
description: Send the alarm the command for arm home.
|
||||||
@ -28,7 +28,7 @@ alarm_arm_home:
|
|||||||
example: "alarm_control_panel.downstairs"
|
example: "alarm_control_panel.downstairs"
|
||||||
code:
|
code:
|
||||||
description: An optional code to arm home the alarm control panel with.
|
description: An optional code to arm home the alarm control panel with.
|
||||||
example: 1234
|
example: "1234"
|
||||||
|
|
||||||
alarm_arm_away:
|
alarm_arm_away:
|
||||||
description: Send the alarm the command for arm away.
|
description: Send the alarm the command for arm away.
|
||||||
@ -38,7 +38,7 @@ alarm_arm_away:
|
|||||||
example: "alarm_control_panel.downstairs"
|
example: "alarm_control_panel.downstairs"
|
||||||
code:
|
code:
|
||||||
description: An optional code to arm away the alarm control panel with.
|
description: An optional code to arm away the alarm control panel with.
|
||||||
example: 1234
|
example: "1234"
|
||||||
|
|
||||||
alarm_arm_night:
|
alarm_arm_night:
|
||||||
description: Send the alarm the command for arm night.
|
description: Send the alarm the command for arm night.
|
||||||
@ -48,7 +48,7 @@ alarm_arm_night:
|
|||||||
example: "alarm_control_panel.downstairs"
|
example: "alarm_control_panel.downstairs"
|
||||||
code:
|
code:
|
||||||
description: An optional code to arm night the alarm control panel with.
|
description: An optional code to arm night the alarm control panel with.
|
||||||
example: 1234
|
example: "1234"
|
||||||
|
|
||||||
alarm_trigger:
|
alarm_trigger:
|
||||||
description: Send the alarm the command for trigger.
|
description: Send the alarm the command for trigger.
|
||||||
@ -58,4 +58,4 @@ alarm_trigger:
|
|||||||
example: "alarm_control_panel.downstairs"
|
example: "alarm_control_panel.downstairs"
|
||||||
code:
|
code:
|
||||||
description: An optional code to trigger the alarm control panel with.
|
description: An optional code to trigger the alarm control panel with.
|
||||||
example: 1234
|
example: "1234"
|
||||||
|
@ -18,7 +18,7 @@
|
|||||||
"armed_away": "{entity_name} armada ausente",
|
"armed_away": "{entity_name} armada ausente",
|
||||||
"armed_home": "{entity_name} armada en casa",
|
"armed_home": "{entity_name} armada en casa",
|
||||||
"armed_night": "{entity_name} armada noche",
|
"armed_night": "{entity_name} armada noche",
|
||||||
"disarmed": "{entity_name} desarmado",
|
"disarmed": "{entity_name} desarmada",
|
||||||
"triggered": "{entity_name} activado"
|
"triggered": "{entity_name} activado"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
@ -162,7 +162,7 @@ def setup(hass, config):
|
|||||||
if not restart:
|
if not restart:
|
||||||
return
|
return
|
||||||
restart = False
|
restart = False
|
||||||
_LOGGER.warning("AlarmDecoder unexpectedly lost connection.")
|
_LOGGER.warning("AlarmDecoder unexpectedly lost connection")
|
||||||
hass.add_job(open_connection)
|
hass.add_job(open_connection)
|
||||||
|
|
||||||
def handle_message(sender, message):
|
def handle_message(sender, message):
|
||||||
|
@ -199,8 +199,8 @@ class Alert(ToggleEntity):
|
|||||||
self._send_done_message = False
|
self._send_done_message = False
|
||||||
self.entity_id = f"{DOMAIN}.{entity_id}"
|
self.entity_id = f"{DOMAIN}.{entity_id}"
|
||||||
|
|
||||||
event.async_track_state_change(
|
event.async_track_state_change_event(
|
||||||
hass, watched_entity_id, self.watched_entity_change
|
hass, [watched_entity_id], self.watched_entity_change
|
||||||
)
|
)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
@ -222,9 +222,12 @@ class Alert(ToggleEntity):
|
|||||||
return STATE_ON
|
return STATE_ON
|
||||||
return STATE_IDLE
|
return STATE_IDLE
|
||||||
|
|
||||||
async def watched_entity_change(self, entity, from_state, to_state):
|
async def watched_entity_change(self, ev):
|
||||||
"""Determine if the alert should start or stop."""
|
"""Determine if the alert should start or stop."""
|
||||||
_LOGGER.debug("Watched entity (%s) has changed", entity)
|
to_state = ev.data.get("new_state")
|
||||||
|
if to_state is None:
|
||||||
|
return
|
||||||
|
_LOGGER.debug("Watched entity (%s) has changed", ev.data.get("entity_id"))
|
||||||
if to_state.state == self._alert_state and not self._firing:
|
if to_state.state == self._alert_state and not self._firing:
|
||||||
await self.begin_alerting()
|
await self.begin_alerting()
|
||||||
if to_state.state != self._alert_state and self._firing:
|
if to_state.state != self._alert_state and self._firing:
|
||||||
|
@ -70,11 +70,11 @@ class Auth:
|
|||||||
await self.async_load_preferences()
|
await self.async_load_preferences()
|
||||||
|
|
||||||
if self.is_token_valid():
|
if self.is_token_valid():
|
||||||
_LOGGER.debug("Token still valid, using it.")
|
_LOGGER.debug("Token still valid, using it")
|
||||||
return self._prefs[STORAGE_ACCESS_TOKEN]
|
return self._prefs[STORAGE_ACCESS_TOKEN]
|
||||||
|
|
||||||
if self._prefs[STORAGE_REFRESH_TOKEN] is None:
|
if self._prefs[STORAGE_REFRESH_TOKEN] is None:
|
||||||
_LOGGER.debug("Token invalid and no refresh token available.")
|
_LOGGER.debug("Token invalid and no refresh token available")
|
||||||
return None
|
return None
|
||||||
|
|
||||||
lwa_params = {
|
lwa_params = {
|
||||||
@ -84,7 +84,7 @@ class Auth:
|
|||||||
CONF_CLIENT_SECRET: self.client_secret,
|
CONF_CLIENT_SECRET: self.client_secret,
|
||||||
}
|
}
|
||||||
|
|
||||||
_LOGGER.debug("Calling LWA to refresh the access token.")
|
_LOGGER.debug("Calling LWA to refresh the access token")
|
||||||
return await self._async_request_new_token(lwa_params)
|
return await self._async_request_new_token(lwa_params)
|
||||||
|
|
||||||
@callback
|
@callback
|
||||||
@ -113,14 +113,14 @@ class Auth:
|
|||||||
)
|
)
|
||||||
|
|
||||||
except (asyncio.TimeoutError, aiohttp.ClientError):
|
except (asyncio.TimeoutError, aiohttp.ClientError):
|
||||||
_LOGGER.error("Timeout calling LWA to get auth token.")
|
_LOGGER.error("Timeout calling LWA to get auth token")
|
||||||
return None
|
return None
|
||||||
|
|
||||||
_LOGGER.debug("LWA response header: %s", response.headers)
|
_LOGGER.debug("LWA response header: %s", response.headers)
|
||||||
_LOGGER.debug("LWA response status: %s", response.status)
|
_LOGGER.debug("LWA response status: %s", response.status)
|
||||||
|
|
||||||
if response.status != HTTP_OK:
|
if response.status != HTTP_OK:
|
||||||
_LOGGER.error("Error calling LWA to get auth token.")
|
_LOGGER.error("Error calling LWA to get auth token")
|
||||||
return None
|
return None
|
||||||
|
|
||||||
response_json = await response.json()
|
response_json = await response.json()
|
||||||
|
@ -590,9 +590,8 @@ class ScriptCapabilities(AlexaEntity):
|
|||||||
|
|
||||||
def interfaces(self):
|
def interfaces(self):
|
||||||
"""Yield the supported interfaces."""
|
"""Yield the supported interfaces."""
|
||||||
can_cancel = bool(self.entity.attributes.get("can_cancel"))
|
|
||||||
return [
|
return [
|
||||||
AlexaSceneController(self.entity, supports_deactivation=can_cancel),
|
AlexaSceneController(self.entity, supports_deactivation=True),
|
||||||
Alexa(self.hass),
|
Alexa(self.hass),
|
||||||
]
|
]
|
||||||
|
|
||||||
|
@ -101,7 +101,7 @@ async def async_send_changereport_message(
|
|||||||
)
|
)
|
||||||
|
|
||||||
except (asyncio.TimeoutError, aiohttp.ClientError):
|
except (asyncio.TimeoutError, aiohttp.ClientError):
|
||||||
_LOGGER.error("Timeout sending report to Alexa.")
|
_LOGGER.error("Timeout sending report to Alexa")
|
||||||
return
|
return
|
||||||
|
|
||||||
response_text = await response.text()
|
response_text = await response.text()
|
||||||
@ -233,7 +233,7 @@ async def async_send_doorbell_event_message(hass, config, alexa_entity):
|
|||||||
)
|
)
|
||||||
|
|
||||||
except (asyncio.TimeoutError, aiohttp.ClientError):
|
except (asyncio.TimeoutError, aiohttp.ClientError):
|
||||||
_LOGGER.error("Timeout sending report to Alexa.")
|
_LOGGER.error("Timeout sending report to Alexa")
|
||||||
return
|
return
|
||||||
|
|
||||||
response_text = await response.text()
|
response_text = await response.text()
|
||||||
|
@ -3,5 +3,5 @@
|
|||||||
"name": "Amazon Polly",
|
"name": "Amazon Polly",
|
||||||
"documentation": "https://www.home-assistant.io/integrations/amazon_polly",
|
"documentation": "https://www.home-assistant.io/integrations/amazon_polly",
|
||||||
"requirements": ["boto3==1.9.252"],
|
"requirements": ["boto3==1.9.252"],
|
||||||
"codeowners": ["@robbiet480"]
|
"codeowners": []
|
||||||
}
|
}
|
||||||
|
@ -0,0 +1,11 @@
|
|||||||
|
{
|
||||||
|
"config": {
|
||||||
|
"step": {
|
||||||
|
"user": {
|
||||||
|
"data": {
|
||||||
|
"api_key": "Kl\u00ed\u010d API"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
@ -33,7 +33,7 @@ from homeassistant.helpers.dispatcher import async_dispatcher_send, dispatcher_s
|
|||||||
from homeassistant.helpers.event import track_time_interval
|
from homeassistant.helpers.event import track_time_interval
|
||||||
from homeassistant.helpers.service import async_extract_entity_ids
|
from homeassistant.helpers.service import async_extract_entity_ids
|
||||||
|
|
||||||
from .binary_sensor import BINARY_SENSORS
|
from .binary_sensor import BINARY_POLLED_SENSORS, BINARY_SENSORS, check_binary_sensors
|
||||||
from .camera import CAMERA_SERVICES, STREAM_SOURCE_LIST
|
from .camera import CAMERA_SERVICES, STREAM_SOURCE_LIST
|
||||||
from .const import (
|
from .const import (
|
||||||
CAMERAS,
|
CAMERAS,
|
||||||
@ -98,7 +98,7 @@ AMCREST_SCHEMA = vol.Schema(
|
|||||||
vol.Optional(CONF_FFMPEG_ARGUMENTS, default=DEFAULT_ARGUMENTS): cv.string,
|
vol.Optional(CONF_FFMPEG_ARGUMENTS, default=DEFAULT_ARGUMENTS): cv.string,
|
||||||
vol.Optional(CONF_SCAN_INTERVAL, default=SCAN_INTERVAL): cv.time_period,
|
vol.Optional(CONF_SCAN_INTERVAL, default=SCAN_INTERVAL): cv.time_period,
|
||||||
vol.Optional(CONF_BINARY_SENSORS): vol.All(
|
vol.Optional(CONF_BINARY_SENSORS): vol.All(
|
||||||
cv.ensure_list, [vol.In(BINARY_SENSORS)], vol.Unique()
|
cv.ensure_list, [vol.In(BINARY_SENSORS)], vol.Unique(), check_binary_sensors
|
||||||
),
|
),
|
||||||
vol.Optional(CONF_SENSORS): vol.All(
|
vol.Optional(CONF_SENSORS): vol.All(
|
||||||
cv.ensure_list, [vol.In(SENSORS)], vol.Unique()
|
cv.ensure_list, [vol.In(SENSORS)], vol.Unique()
|
||||||
@ -271,7 +271,7 @@ def setup(hass, config):
|
|||||||
event_codes = [
|
event_codes = [
|
||||||
BINARY_SENSORS[sensor_type][SENSOR_EVENT_CODE]
|
BINARY_SENSORS[sensor_type][SENSOR_EVENT_CODE]
|
||||||
for sensor_type in binary_sensors
|
for sensor_type in binary_sensors
|
||||||
if BINARY_SENSORS[sensor_type][SENSOR_EVENT_CODE] is not None
|
if sensor_type not in BINARY_POLLED_SENSORS
|
||||||
]
|
]
|
||||||
if event_codes:
|
if event_codes:
|
||||||
_start_event_monitor(hass, name, api, event_codes)
|
_start_event_monitor(hass, name, api, event_codes)
|
||||||
|
@ -3,15 +3,18 @@ from datetime import timedelta
|
|||||||
import logging
|
import logging
|
||||||
|
|
||||||
from amcrest import AmcrestError
|
from amcrest import AmcrestError
|
||||||
|
import voluptuous as vol
|
||||||
|
|
||||||
from homeassistant.components.binary_sensor import (
|
from homeassistant.components.binary_sensor import (
|
||||||
DEVICE_CLASS_CONNECTIVITY,
|
DEVICE_CLASS_CONNECTIVITY,
|
||||||
DEVICE_CLASS_MOTION,
|
DEVICE_CLASS_MOTION,
|
||||||
|
DEVICE_CLASS_SOUND,
|
||||||
BinarySensorEntity,
|
BinarySensorEntity,
|
||||||
)
|
)
|
||||||
from homeassistant.const import CONF_BINARY_SENSORS, CONF_NAME
|
from homeassistant.const import CONF_BINARY_SENSORS, CONF_NAME
|
||||||
from homeassistant.core import callback
|
from homeassistant.core import callback
|
||||||
from homeassistant.helpers.dispatcher import async_dispatcher_connect
|
from homeassistant.helpers.dispatcher import async_dispatcher_connect
|
||||||
|
from homeassistant.util import Throttle
|
||||||
|
|
||||||
from .const import (
|
from .const import (
|
||||||
BINARY_SENSOR_SCAN_INTERVAL_SECS,
|
BINARY_SENSOR_SCAN_INTERVAL_SECS,
|
||||||
@ -28,25 +31,48 @@ from .helpers import log_update_error, service_signal
|
|||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
SCAN_INTERVAL = timedelta(seconds=BINARY_SENSOR_SCAN_INTERVAL_SECS)
|
SCAN_INTERVAL = timedelta(seconds=BINARY_SENSOR_SCAN_INTERVAL_SECS)
|
||||||
|
_ONLINE_SCAN_INTERVAL = timedelta(seconds=60 - BINARY_SENSOR_SCAN_INTERVAL_SECS)
|
||||||
|
|
||||||
|
BINARY_SENSOR_AUDIO_DETECTED = "audio_detected"
|
||||||
|
BINARY_SENSOR_AUDIO_DETECTED_POLLED = "audio_detected_polled"
|
||||||
BINARY_SENSOR_MOTION_DETECTED = "motion_detected"
|
BINARY_SENSOR_MOTION_DETECTED = "motion_detected"
|
||||||
|
BINARY_SENSOR_MOTION_DETECTED_POLLED = "motion_detected_polled"
|
||||||
BINARY_SENSOR_ONLINE = "online"
|
BINARY_SENSOR_ONLINE = "online"
|
||||||
|
BINARY_POLLED_SENSORS = [
|
||||||
|
BINARY_SENSOR_AUDIO_DETECTED_POLLED,
|
||||||
|
BINARY_SENSOR_MOTION_DETECTED_POLLED,
|
||||||
|
BINARY_SENSOR_ONLINE,
|
||||||
|
]
|
||||||
|
_AUDIO_DETECTED_PARAMS = ("Audio Detected", DEVICE_CLASS_SOUND, "AudioMutation")
|
||||||
|
_MOTION_DETECTED_PARAMS = ("Motion Detected", DEVICE_CLASS_MOTION, "VideoMotion")
|
||||||
BINARY_SENSORS = {
|
BINARY_SENSORS = {
|
||||||
BINARY_SENSOR_MOTION_DETECTED: (
|
BINARY_SENSOR_AUDIO_DETECTED: _AUDIO_DETECTED_PARAMS,
|
||||||
"Motion Detected",
|
BINARY_SENSOR_AUDIO_DETECTED_POLLED: _AUDIO_DETECTED_PARAMS,
|
||||||
DEVICE_CLASS_MOTION,
|
BINARY_SENSOR_MOTION_DETECTED: _MOTION_DETECTED_PARAMS,
|
||||||
"VideoMotion",
|
BINARY_SENSOR_MOTION_DETECTED_POLLED: _MOTION_DETECTED_PARAMS,
|
||||||
),
|
|
||||||
BINARY_SENSOR_ONLINE: ("Online", DEVICE_CLASS_CONNECTIVITY, None),
|
BINARY_SENSOR_ONLINE: ("Online", DEVICE_CLASS_CONNECTIVITY, None),
|
||||||
}
|
}
|
||||||
BINARY_SENSORS = {
|
BINARY_SENSORS = {
|
||||||
k: dict(zip((SENSOR_NAME, SENSOR_DEVICE_CLASS, SENSOR_EVENT_CODE), v))
|
k: dict(zip((SENSOR_NAME, SENSOR_DEVICE_CLASS, SENSOR_EVENT_CODE), v))
|
||||||
for k, v in BINARY_SENSORS.items()
|
for k, v in BINARY_SENSORS.items()
|
||||||
}
|
}
|
||||||
|
_EXCLUSIVE_OPTIONS = [
|
||||||
|
{BINARY_SENSOR_MOTION_DETECTED, BINARY_SENSOR_MOTION_DETECTED_POLLED},
|
||||||
|
]
|
||||||
|
|
||||||
_UPDATE_MSG = "Updating %s binary sensor"
|
_UPDATE_MSG = "Updating %s binary sensor"
|
||||||
|
|
||||||
|
|
||||||
|
def check_binary_sensors(value):
|
||||||
|
"""Validate binary sensor configurations."""
|
||||||
|
for exclusive_options in _EXCLUSIVE_OPTIONS:
|
||||||
|
if len(set(value) & exclusive_options) > 1:
|
||||||
|
raise vol.Invalid(
|
||||||
|
f"must contain at most one of {', '.join(exclusive_options)}."
|
||||||
|
)
|
||||||
|
return value
|
||||||
|
|
||||||
|
|
||||||
async def async_setup_platform(hass, config, async_add_entities, discovery_info=None):
|
async def async_setup_platform(hass, config, async_add_entities, discovery_info=None):
|
||||||
"""Set up a binary sensor for an Amcrest IP Camera."""
|
"""Set up a binary sensor for an Amcrest IP Camera."""
|
||||||
if discovery_info is None:
|
if discovery_info is None:
|
||||||
@ -80,7 +106,7 @@ class AmcrestBinarySensor(BinarySensorEntity):
|
|||||||
@property
|
@property
|
||||||
def should_poll(self):
|
def should_poll(self):
|
||||||
"""Return True if entity has to be polled for state."""
|
"""Return True if entity has to be polled for state."""
|
||||||
return self._sensor_type == BINARY_SENSOR_ONLINE
|
return self._sensor_type in BINARY_POLLED_SENSORS
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def name(self):
|
def name(self):
|
||||||
@ -109,6 +135,7 @@ class AmcrestBinarySensor(BinarySensorEntity):
|
|||||||
else:
|
else:
|
||||||
self._update_others()
|
self._update_others()
|
||||||
|
|
||||||
|
@Throttle(_ONLINE_SCAN_INTERVAL)
|
||||||
def _update_online(self):
|
def _update_online(self):
|
||||||
if not (self._api.available or self.is_on):
|
if not (self._api.available or self.is_on):
|
||||||
return
|
return
|
||||||
@ -137,6 +164,11 @@ class AmcrestBinarySensor(BinarySensorEntity):
|
|||||||
|
|
||||||
async def async_on_demand_update(self):
|
async def async_on_demand_update(self):
|
||||||
"""Update state."""
|
"""Update state."""
|
||||||
|
if self._sensor_type == BINARY_SENSOR_ONLINE:
|
||||||
|
_LOGGER.debug(_UPDATE_MSG, self._name)
|
||||||
|
self._state = self._api.available
|
||||||
|
self.async_write_ha_state()
|
||||||
|
return
|
||||||
self.async_schedule_update_ha_state(True)
|
self.async_schedule_update_ha_state(True)
|
||||||
|
|
||||||
@callback
|
@callback
|
||||||
@ -155,7 +187,7 @@ class AmcrestBinarySensor(BinarySensorEntity):
|
|||||||
self.async_on_demand_update,
|
self.async_on_demand_update,
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
if self._event_code:
|
if self._event_code and self._sensor_type not in BINARY_POLLED_SENSORS:
|
||||||
self._unsub_dispatcher.append(
|
self._unsub_dispatcher.append(
|
||||||
async_dispatcher_connect(
|
async_dispatcher_connect(
|
||||||
self.hass,
|
self.hass,
|
||||||
|
@ -4,7 +4,7 @@ DATA_AMCREST = DOMAIN
|
|||||||
CAMERAS = "cameras"
|
CAMERAS = "cameras"
|
||||||
DEVICES = "devices"
|
DEVICES = "devices"
|
||||||
|
|
||||||
BINARY_SENSOR_SCAN_INTERVAL_SECS = 60
|
BINARY_SENSOR_SCAN_INTERVAL_SECS = 5
|
||||||
CAMERA_WEB_SESSION_TIMEOUT = 10
|
CAMERA_WEB_SESSION_TIMEOUT = 10
|
||||||
COMM_RETRIES = 1
|
COMM_RETRIES = 1
|
||||||
COMM_TIMEOUT = 6.05
|
COMM_TIMEOUT = 6.05
|
||||||
|
@ -3,8 +3,8 @@
|
|||||||
"name": "Android TV",
|
"name": "Android TV",
|
||||||
"documentation": "https://www.home-assistant.io/integrations/androidtv",
|
"documentation": "https://www.home-assistant.io/integrations/androidtv",
|
||||||
"requirements": [
|
"requirements": [
|
||||||
"adb-shell==0.1.3",
|
"adb-shell[async]==0.2.0",
|
||||||
"androidtv==0.0.43",
|
"androidtv[async]==0.0.45",
|
||||||
"pure-python-adb==0.2.2.dev0"
|
"pure-python-adb==0.2.2.dev0"
|
||||||
],
|
],
|
||||||
"codeowners": ["@JeffLIrion"]
|
"codeowners": ["@JeffLIrion"]
|
||||||
|
@ -5,15 +5,18 @@ import logging
|
|||||||
import os
|
import os
|
||||||
|
|
||||||
from adb_shell.auth.keygen import keygen
|
from adb_shell.auth.keygen import keygen
|
||||||
|
from adb_shell.auth.sign_pythonrsa import PythonRSASigner
|
||||||
from adb_shell.exceptions import (
|
from adb_shell.exceptions import (
|
||||||
|
AdbTimeoutError,
|
||||||
InvalidChecksumError,
|
InvalidChecksumError,
|
||||||
InvalidCommandError,
|
InvalidCommandError,
|
||||||
InvalidResponseError,
|
InvalidResponseError,
|
||||||
TcpTimeoutException,
|
TcpTimeoutException,
|
||||||
)
|
)
|
||||||
from androidtv import ha_state_detection_rules_validator, setup
|
from androidtv import ha_state_detection_rules_validator
|
||||||
from androidtv.constants import APPS, KEYS
|
from androidtv.constants import APPS, KEYS
|
||||||
from androidtv.exceptions import LockNotAcquiredException
|
from androidtv.exceptions import LockNotAcquiredException
|
||||||
|
from androidtv.setup_async import setup
|
||||||
import voluptuous as vol
|
import voluptuous as vol
|
||||||
|
|
||||||
from homeassistant.components.media_player import PLATFORM_SCHEMA, MediaPlayerEntity
|
from homeassistant.components.media_player import PLATFORM_SCHEMA, MediaPlayerEntity
|
||||||
@ -44,7 +47,7 @@ from homeassistant.const import (
|
|||||||
STATE_STANDBY,
|
STATE_STANDBY,
|
||||||
)
|
)
|
||||||
from homeassistant.exceptions import PlatformNotReady
|
from homeassistant.exceptions import PlatformNotReady
|
||||||
import homeassistant.helpers.config_validation as cv
|
from homeassistant.helpers import config_validation as cv, entity_platform
|
||||||
from homeassistant.helpers.storage import STORAGE_DIR
|
from homeassistant.helpers.storage import STORAGE_DIR
|
||||||
|
|
||||||
ANDROIDTV_DOMAIN = "androidtv"
|
ANDROIDTV_DOMAIN = "androidtv"
|
||||||
@ -103,6 +106,7 @@ DEVICE_CLASSES = [DEFAULT_DEVICE_CLASS, DEVICE_ANDROIDTV, DEVICE_FIRETV]
|
|||||||
|
|
||||||
SERVICE_ADB_COMMAND = "adb_command"
|
SERVICE_ADB_COMMAND = "adb_command"
|
||||||
SERVICE_DOWNLOAD = "download"
|
SERVICE_DOWNLOAD = "download"
|
||||||
|
SERVICE_LEARN_SENDEVENT = "learn_sendevent"
|
||||||
SERVICE_UPLOAD = "upload"
|
SERVICE_UPLOAD = "upload"
|
||||||
|
|
||||||
SERVICE_ADB_COMMAND_SCHEMA = vol.Schema(
|
SERVICE_ADB_COMMAND_SCHEMA = vol.Schema(
|
||||||
@ -161,7 +165,30 @@ ANDROIDTV_STATES = {
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
def setup_platform(hass, config, add_entities, discovery_info=None):
|
def setup_androidtv(hass, config):
|
||||||
|
"""Generate an ADB key (if needed) and load it."""
|
||||||
|
adbkey = config.get(CONF_ADBKEY, hass.config.path(STORAGE_DIR, "androidtv_adbkey"))
|
||||||
|
if CONF_ADB_SERVER_IP not in config:
|
||||||
|
# Use "adb_shell" (Python ADB implementation)
|
||||||
|
if not os.path.isfile(adbkey):
|
||||||
|
# Generate ADB key files
|
||||||
|
keygen(adbkey)
|
||||||
|
|
||||||
|
# Load the ADB key
|
||||||
|
with open(adbkey) as priv_key:
|
||||||
|
priv = priv_key.read()
|
||||||
|
signer = PythonRSASigner("", priv)
|
||||||
|
adb_log = f"using Python ADB implementation with adbkey='{adbkey}'"
|
||||||
|
|
||||||
|
else:
|
||||||
|
# Use "pure-python-adb" (communicate with ADB server)
|
||||||
|
signer = None
|
||||||
|
adb_log = f"using ADB server at {config[CONF_ADB_SERVER_IP]}:{config[CONF_ADB_SERVER_PORT]}"
|
||||||
|
|
||||||
|
return adbkey, signer, adb_log
|
||||||
|
|
||||||
|
|
||||||
|
async def async_setup_platform(hass, config, async_add_entities, discovery_info=None):
|
||||||
"""Set up the Android TV / Fire TV platform."""
|
"""Set up the Android TV / Fire TV platform."""
|
||||||
hass.data.setdefault(ANDROIDTV_DOMAIN, {})
|
hass.data.setdefault(ANDROIDTV_DOMAIN, {})
|
||||||
|
|
||||||
@ -171,51 +198,21 @@ def setup_platform(hass, config, add_entities, discovery_info=None):
|
|||||||
_LOGGER.warning("Platform already setup on %s, skipping", address)
|
_LOGGER.warning("Platform already setup on %s, skipping", address)
|
||||||
return
|
return
|
||||||
|
|
||||||
if CONF_ADB_SERVER_IP not in config:
|
adbkey, signer, adb_log = await hass.async_add_executor_job(
|
||||||
# Use "adb_shell" (Python ADB implementation)
|
setup_androidtv, hass, config
|
||||||
if CONF_ADBKEY not in config:
|
)
|
||||||
# Generate ADB key files (if they don't exist)
|
|
||||||
adbkey = hass.config.path(STORAGE_DIR, "androidtv_adbkey")
|
|
||||||
if not os.path.isfile(adbkey):
|
|
||||||
keygen(adbkey)
|
|
||||||
|
|
||||||
adb_log = f"using Python ADB implementation with adbkey='{adbkey}'"
|
aftv = await setup(
|
||||||
|
config[CONF_HOST],
|
||||||
aftv = setup(
|
config[CONF_PORT],
|
||||||
config[CONF_HOST],
|
adbkey,
|
||||||
config[CONF_PORT],
|
config.get(CONF_ADB_SERVER_IP, ""),
|
||||||
adbkey,
|
config[CONF_ADB_SERVER_PORT],
|
||||||
device_class=config[CONF_DEVICE_CLASS],
|
config[CONF_STATE_DETECTION_RULES],
|
||||||
state_detection_rules=config[CONF_STATE_DETECTION_RULES],
|
config[CONF_DEVICE_CLASS],
|
||||||
auth_timeout_s=10.0,
|
10.0,
|
||||||
)
|
signer,
|
||||||
|
)
|
||||||
else:
|
|
||||||
adb_log = (
|
|
||||||
f"using Python ADB implementation with adbkey='{config[CONF_ADBKEY]}'"
|
|
||||||
)
|
|
||||||
|
|
||||||
aftv = setup(
|
|
||||||
config[CONF_HOST],
|
|
||||||
config[CONF_PORT],
|
|
||||||
config[CONF_ADBKEY],
|
|
||||||
device_class=config[CONF_DEVICE_CLASS],
|
|
||||||
state_detection_rules=config[CONF_STATE_DETECTION_RULES],
|
|
||||||
auth_timeout_s=10.0,
|
|
||||||
)
|
|
||||||
|
|
||||||
else:
|
|
||||||
# Use "pure-python-adb" (communicate with ADB server)
|
|
||||||
adb_log = f"using ADB server at {config[CONF_ADB_SERVER_IP]}:{config[CONF_ADB_SERVER_PORT]}"
|
|
||||||
|
|
||||||
aftv = setup(
|
|
||||||
config[CONF_HOST],
|
|
||||||
config[CONF_PORT],
|
|
||||||
adb_server_ip=config[CONF_ADB_SERVER_IP],
|
|
||||||
adb_server_port=config[CONF_ADB_SERVER_PORT],
|
|
||||||
device_class=config[CONF_DEVICE_CLASS],
|
|
||||||
state_detection_rules=config[CONF_STATE_DETECTION_RULES],
|
|
||||||
)
|
|
||||||
|
|
||||||
if not aftv.available:
|
if not aftv.available:
|
||||||
# Determine the name that will be used for the device in the log
|
# Determine the name that will be used for the device in the log
|
||||||
@ -251,14 +248,16 @@ def setup_platform(hass, config, add_entities, discovery_info=None):
|
|||||||
device = FireTVDevice(*device_args)
|
device = FireTVDevice(*device_args)
|
||||||
device_name = config.get(CONF_NAME, "Fire TV")
|
device_name = config.get(CONF_NAME, "Fire TV")
|
||||||
|
|
||||||
add_entities([device])
|
async_add_entities([device])
|
||||||
_LOGGER.debug("Setup %s at %s %s", device_name, address, adb_log)
|
_LOGGER.debug("Setup %s at %s %s", device_name, address, adb_log)
|
||||||
hass.data[ANDROIDTV_DOMAIN][address] = device
|
hass.data[ANDROIDTV_DOMAIN][address] = device
|
||||||
|
|
||||||
if hass.services.has_service(ANDROIDTV_DOMAIN, SERVICE_ADB_COMMAND):
|
if hass.services.has_service(ANDROIDTV_DOMAIN, SERVICE_ADB_COMMAND):
|
||||||
return
|
return
|
||||||
|
|
||||||
def service_adb_command(service):
|
platform = entity_platform.current_platform.get()
|
||||||
|
|
||||||
|
async def service_adb_command(service):
|
||||||
"""Dispatch service calls to target entities."""
|
"""Dispatch service calls to target entities."""
|
||||||
cmd = service.data[ATTR_COMMAND]
|
cmd = service.data[ATTR_COMMAND]
|
||||||
entity_id = service.data[ATTR_ENTITY_ID]
|
entity_id = service.data[ATTR_ENTITY_ID]
|
||||||
@ -269,7 +268,7 @@ def setup_platform(hass, config, add_entities, discovery_info=None):
|
|||||||
]
|
]
|
||||||
|
|
||||||
for target_device in target_devices:
|
for target_device in target_devices:
|
||||||
output = target_device.adb_command(cmd)
|
output = await target_device.adb_command(cmd)
|
||||||
|
|
||||||
# log the output, if there is any
|
# log the output, if there is any
|
||||||
if output:
|
if output:
|
||||||
@ -280,14 +279,18 @@ def setup_platform(hass, config, add_entities, discovery_info=None):
|
|||||||
output,
|
output,
|
||||||
)
|
)
|
||||||
|
|
||||||
hass.services.register(
|
hass.services.async_register(
|
||||||
ANDROIDTV_DOMAIN,
|
ANDROIDTV_DOMAIN,
|
||||||
SERVICE_ADB_COMMAND,
|
SERVICE_ADB_COMMAND,
|
||||||
service_adb_command,
|
service_adb_command,
|
||||||
schema=SERVICE_ADB_COMMAND_SCHEMA,
|
schema=SERVICE_ADB_COMMAND_SCHEMA,
|
||||||
)
|
)
|
||||||
|
|
||||||
def service_download(service):
|
platform.async_register_entity_service(
|
||||||
|
SERVICE_LEARN_SENDEVENT, {}, "learn_sendevent"
|
||||||
|
)
|
||||||
|
|
||||||
|
async def service_download(service):
|
||||||
"""Download a file from your Android TV / Fire TV device to your Home Assistant instance."""
|
"""Download a file from your Android TV / Fire TV device to your Home Assistant instance."""
|
||||||
local_path = service.data[ATTR_LOCAL_PATH]
|
local_path = service.data[ATTR_LOCAL_PATH]
|
||||||
if not hass.config.is_allowed_path(local_path):
|
if not hass.config.is_allowed_path(local_path):
|
||||||
@ -302,16 +305,16 @@ def setup_platform(hass, config, add_entities, discovery_info=None):
|
|||||||
if dev.entity_id in entity_id
|
if dev.entity_id in entity_id
|
||||||
][0]
|
][0]
|
||||||
|
|
||||||
target_device.adb_pull(local_path, device_path)
|
await target_device.adb_pull(local_path, device_path)
|
||||||
|
|
||||||
hass.services.register(
|
hass.services.async_register(
|
||||||
ANDROIDTV_DOMAIN,
|
ANDROIDTV_DOMAIN,
|
||||||
SERVICE_DOWNLOAD,
|
SERVICE_DOWNLOAD,
|
||||||
service_download,
|
service_download,
|
||||||
schema=SERVICE_DOWNLOAD_SCHEMA,
|
schema=SERVICE_DOWNLOAD_SCHEMA,
|
||||||
)
|
)
|
||||||
|
|
||||||
def service_upload(service):
|
async def service_upload(service):
|
||||||
"""Upload a file from your Home Assistant instance to an Android TV / Fire TV device."""
|
"""Upload a file from your Home Assistant instance to an Android TV / Fire TV device."""
|
||||||
local_path = service.data[ATTR_LOCAL_PATH]
|
local_path = service.data[ATTR_LOCAL_PATH]
|
||||||
if not hass.config.is_allowed_path(local_path):
|
if not hass.config.is_allowed_path(local_path):
|
||||||
@ -327,9 +330,9 @@ def setup_platform(hass, config, add_entities, discovery_info=None):
|
|||||||
]
|
]
|
||||||
|
|
||||||
for target_device in target_devices:
|
for target_device in target_devices:
|
||||||
target_device.adb_push(local_path, device_path)
|
await target_device.adb_push(local_path, device_path)
|
||||||
|
|
||||||
hass.services.register(
|
hass.services.async_register(
|
||||||
ANDROIDTV_DOMAIN, SERVICE_UPLOAD, service_upload, schema=SERVICE_UPLOAD_SCHEMA
|
ANDROIDTV_DOMAIN, SERVICE_UPLOAD, service_upload, schema=SERVICE_UPLOAD_SCHEMA
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -345,13 +348,13 @@ def adb_decorator(override_available=False):
|
|||||||
"""Wrap the provided ADB method and catch exceptions."""
|
"""Wrap the provided ADB method and catch exceptions."""
|
||||||
|
|
||||||
@functools.wraps(func)
|
@functools.wraps(func)
|
||||||
def _adb_exception_catcher(self, *args, **kwargs):
|
async def _adb_exception_catcher(self, *args, **kwargs):
|
||||||
"""Call an ADB-related method and catch exceptions."""
|
"""Call an ADB-related method and catch exceptions."""
|
||||||
if not self.available and not override_available:
|
if not self.available and not override_available:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
try:
|
try:
|
||||||
return func(self, *args, **kwargs)
|
return await func(self, *args, **kwargs)
|
||||||
except LockNotAcquiredException:
|
except LockNotAcquiredException:
|
||||||
# If the ADB lock could not be acquired, skip this command
|
# If the ADB lock could not be acquired, skip this command
|
||||||
_LOGGER.info(
|
_LOGGER.info(
|
||||||
@ -364,7 +367,7 @@ def adb_decorator(override_available=False):
|
|||||||
"establishing attempt in the next update. Error: %s",
|
"establishing attempt in the next update. Error: %s",
|
||||||
err,
|
err,
|
||||||
)
|
)
|
||||||
self.aftv.adb_close()
|
await self.aftv.adb_close()
|
||||||
self._available = False # pylint: disable=protected-access
|
self._available = False # pylint: disable=protected-access
|
||||||
return None
|
return None
|
||||||
|
|
||||||
@ -411,6 +414,7 @@ class ADBDevice(MediaPlayerEntity):
|
|||||||
if not self.aftv.adb_server_ip:
|
if not self.aftv.adb_server_ip:
|
||||||
# Using "adb_shell" (Python ADB implementation)
|
# Using "adb_shell" (Python ADB implementation)
|
||||||
self.exceptions = (
|
self.exceptions = (
|
||||||
|
AdbTimeoutError,
|
||||||
AttributeError,
|
AttributeError,
|
||||||
BrokenPipeError,
|
BrokenPipeError,
|
||||||
ConnectionResetError,
|
ConnectionResetError,
|
||||||
@ -487,64 +491,60 @@ class ADBDevice(MediaPlayerEntity):
|
|||||||
"""Return the device unique id."""
|
"""Return the device unique id."""
|
||||||
return self._unique_id
|
return self._unique_id
|
||||||
|
|
||||||
|
@adb_decorator()
|
||||||
async def async_get_media_image(self):
|
async def async_get_media_image(self):
|
||||||
"""Fetch current playing image."""
|
"""Fetch current playing image."""
|
||||||
if not self._screencap or self.state in [STATE_OFF, None] or not self.available:
|
if not self._screencap or self.state in [STATE_OFF, None] or not self.available:
|
||||||
return None, None
|
return None, None
|
||||||
|
|
||||||
media_data = await self.hass.async_add_executor_job(self.get_raw_media_data)
|
media_data = await self.aftv.adb_screencap()
|
||||||
if media_data:
|
if media_data:
|
||||||
return media_data, "image/png"
|
return media_data, "image/png"
|
||||||
return None, None
|
return None, None
|
||||||
|
|
||||||
@adb_decorator()
|
@adb_decorator()
|
||||||
def get_raw_media_data(self):
|
async def async_media_play(self):
|
||||||
"""Raw image data."""
|
|
||||||
return self.aftv.adb_screencap()
|
|
||||||
|
|
||||||
@adb_decorator()
|
|
||||||
def media_play(self):
|
|
||||||
"""Send play command."""
|
"""Send play command."""
|
||||||
self.aftv.media_play()
|
await self.aftv.media_play()
|
||||||
|
|
||||||
@adb_decorator()
|
@adb_decorator()
|
||||||
def media_pause(self):
|
async def async_media_pause(self):
|
||||||
"""Send pause command."""
|
"""Send pause command."""
|
||||||
self.aftv.media_pause()
|
await self.aftv.media_pause()
|
||||||
|
|
||||||
@adb_decorator()
|
@adb_decorator()
|
||||||
def media_play_pause(self):
|
async def async_media_play_pause(self):
|
||||||
"""Send play/pause command."""
|
"""Send play/pause command."""
|
||||||
self.aftv.media_play_pause()
|
await self.aftv.media_play_pause()
|
||||||
|
|
||||||
@adb_decorator()
|
@adb_decorator()
|
||||||
def turn_on(self):
|
async def async_turn_on(self):
|
||||||
"""Turn on the device."""
|
"""Turn on the device."""
|
||||||
if self.turn_on_command:
|
if self.turn_on_command:
|
||||||
self.aftv.adb_shell(self.turn_on_command)
|
await self.aftv.adb_shell(self.turn_on_command)
|
||||||
else:
|
else:
|
||||||
self.aftv.turn_on()
|
await self.aftv.turn_on()
|
||||||
|
|
||||||
@adb_decorator()
|
@adb_decorator()
|
||||||
def turn_off(self):
|
async def async_turn_off(self):
|
||||||
"""Turn off the device."""
|
"""Turn off the device."""
|
||||||
if self.turn_off_command:
|
if self.turn_off_command:
|
||||||
self.aftv.adb_shell(self.turn_off_command)
|
await self.aftv.adb_shell(self.turn_off_command)
|
||||||
else:
|
else:
|
||||||
self.aftv.turn_off()
|
await self.aftv.turn_off()
|
||||||
|
|
||||||
@adb_decorator()
|
@adb_decorator()
|
||||||
def media_previous_track(self):
|
async def async_media_previous_track(self):
|
||||||
"""Send previous track command (results in rewind)."""
|
"""Send previous track command (results in rewind)."""
|
||||||
self.aftv.media_previous_track()
|
await self.aftv.media_previous_track()
|
||||||
|
|
||||||
@adb_decorator()
|
@adb_decorator()
|
||||||
def media_next_track(self):
|
async def async_media_next_track(self):
|
||||||
"""Send next track command (results in fast-forward)."""
|
"""Send next track command (results in fast-forward)."""
|
||||||
self.aftv.media_next_track()
|
await self.aftv.media_next_track()
|
||||||
|
|
||||||
@adb_decorator()
|
@adb_decorator()
|
||||||
def select_source(self, source):
|
async def async_select_source(self, source):
|
||||||
"""Select input source.
|
"""Select input source.
|
||||||
|
|
||||||
If the source starts with a '!', then it will close the app instead of
|
If the source starts with a '!', then it will close the app instead of
|
||||||
@ -552,50 +552,58 @@ class ADBDevice(MediaPlayerEntity):
|
|||||||
"""
|
"""
|
||||||
if isinstance(source, str):
|
if isinstance(source, str):
|
||||||
if not source.startswith("!"):
|
if not source.startswith("!"):
|
||||||
self.aftv.launch_app(self._app_name_to_id.get(source, source))
|
await self.aftv.launch_app(self._app_name_to_id.get(source, source))
|
||||||
else:
|
else:
|
||||||
source_ = source[1:].lstrip()
|
source_ = source[1:].lstrip()
|
||||||
self.aftv.stop_app(self._app_name_to_id.get(source_, source_))
|
await self.aftv.stop_app(self._app_name_to_id.get(source_, source_))
|
||||||
|
|
||||||
@adb_decorator()
|
@adb_decorator()
|
||||||
def adb_command(self, cmd):
|
async def adb_command(self, cmd):
|
||||||
"""Send an ADB command to an Android TV / Fire TV device."""
|
"""Send an ADB command to an Android TV / Fire TV device."""
|
||||||
key = self._keys.get(cmd)
|
key = self._keys.get(cmd)
|
||||||
if key:
|
if key:
|
||||||
self.aftv.adb_shell(f"input keyevent {key}")
|
await self.aftv.adb_shell(f"input keyevent {key}")
|
||||||
self._adb_response = None
|
|
||||||
self.schedule_update_ha_state()
|
|
||||||
return
|
return
|
||||||
|
|
||||||
if cmd == "GET_PROPERTIES":
|
if cmd == "GET_PROPERTIES":
|
||||||
self._adb_response = str(self.aftv.get_properties_dict())
|
self._adb_response = str(await self.aftv.get_properties_dict())
|
||||||
self.schedule_update_ha_state()
|
self.async_write_ha_state()
|
||||||
return self._adb_response
|
return self._adb_response
|
||||||
|
|
||||||
try:
|
try:
|
||||||
response = self.aftv.adb_shell(cmd)
|
response = await self.aftv.adb_shell(cmd)
|
||||||
except UnicodeDecodeError:
|
except UnicodeDecodeError:
|
||||||
self._adb_response = None
|
|
||||||
self.schedule_update_ha_state()
|
|
||||||
return
|
return
|
||||||
|
|
||||||
if isinstance(response, str) and response.strip():
|
if isinstance(response, str) and response.strip():
|
||||||
self._adb_response = response.strip()
|
self._adb_response = response.strip()
|
||||||
else:
|
self.async_write_ha_state()
|
||||||
self._adb_response = None
|
|
||||||
|
|
||||||
self.schedule_update_ha_state()
|
|
||||||
return self._adb_response
|
return self._adb_response
|
||||||
|
|
||||||
@adb_decorator()
|
@adb_decorator()
|
||||||
def adb_pull(self, local_path, device_path):
|
async def learn_sendevent(self):
|
||||||
"""Download a file from your Android TV / Fire TV device to your Home Assistant instance."""
|
"""Translate a key press on a remote to ADB 'sendevent' commands."""
|
||||||
self.aftv.adb_pull(local_path, device_path)
|
output = await self.aftv.learn_sendevent()
|
||||||
|
if output:
|
||||||
|
self._adb_response = output
|
||||||
|
self.async_write_ha_state()
|
||||||
|
|
||||||
|
msg = f"Output from service '{SERVICE_LEARN_SENDEVENT}' from {self.entity_id}: '{output}'"
|
||||||
|
self.hass.components.persistent_notification.async_create(
|
||||||
|
msg, title="Android TV",
|
||||||
|
)
|
||||||
|
_LOGGER.info("%s", msg)
|
||||||
|
|
||||||
@adb_decorator()
|
@adb_decorator()
|
||||||
def adb_push(self, local_path, device_path):
|
async def adb_pull(self, local_path, device_path):
|
||||||
|
"""Download a file from your Android TV / Fire TV device to your Home Assistant instance."""
|
||||||
|
await self.aftv.adb_pull(local_path, device_path)
|
||||||
|
|
||||||
|
@adb_decorator()
|
||||||
|
async def adb_push(self, local_path, device_path):
|
||||||
"""Upload a file from your Home Assistant instance to an Android TV / Fire TV device."""
|
"""Upload a file from your Home Assistant instance to an Android TV / Fire TV device."""
|
||||||
self.aftv.adb_push(local_path, device_path)
|
await self.aftv.adb_push(local_path, device_path)
|
||||||
|
|
||||||
|
|
||||||
class AndroidTVDevice(ADBDevice):
|
class AndroidTVDevice(ADBDevice):
|
||||||
@ -628,17 +636,12 @@ class AndroidTVDevice(ADBDevice):
|
|||||||
self._volume_level = None
|
self._volume_level = None
|
||||||
|
|
||||||
@adb_decorator(override_available=True)
|
@adb_decorator(override_available=True)
|
||||||
def update(self):
|
async def async_update(self):
|
||||||
"""Update the device state and, if necessary, re-connect."""
|
"""Update the device state and, if necessary, re-connect."""
|
||||||
# Check if device is disconnected.
|
# Check if device is disconnected.
|
||||||
if not self._available:
|
if not self._available:
|
||||||
# Try to connect
|
# Try to connect
|
||||||
self._available = self.aftv.adb_connect(always_log_errors=False)
|
self._available = await self.aftv.adb_connect(always_log_errors=False)
|
||||||
|
|
||||||
# To be safe, wait until the next update to run ADB commands if
|
|
||||||
# using the Python ADB implementation.
|
|
||||||
if not self.aftv.adb_server_ip:
|
|
||||||
return
|
|
||||||
|
|
||||||
# If the ADB connection is not intact, don't update.
|
# If the ADB connection is not intact, don't update.
|
||||||
if not self._available:
|
if not self._available:
|
||||||
@ -652,7 +655,7 @@ class AndroidTVDevice(ADBDevice):
|
|||||||
_,
|
_,
|
||||||
self._is_volume_muted,
|
self._is_volume_muted,
|
||||||
self._volume_level,
|
self._volume_level,
|
||||||
) = self.aftv.update(self._get_sources)
|
) = await self.aftv.update(self._get_sources)
|
||||||
|
|
||||||
self._state = ANDROIDTV_STATES.get(state)
|
self._state = ANDROIDTV_STATES.get(state)
|
||||||
if self._state is None:
|
if self._state is None:
|
||||||
@ -685,53 +688,50 @@ class AndroidTVDevice(ADBDevice):
|
|||||||
return self._volume_level
|
return self._volume_level
|
||||||
|
|
||||||
@adb_decorator()
|
@adb_decorator()
|
||||||
def media_stop(self):
|
async def async_media_stop(self):
|
||||||
"""Send stop command."""
|
"""Send stop command."""
|
||||||
self.aftv.media_stop()
|
await self.aftv.media_stop()
|
||||||
|
|
||||||
@adb_decorator()
|
@adb_decorator()
|
||||||
def mute_volume(self, mute):
|
async def async_mute_volume(self, mute):
|
||||||
"""Mute the volume."""
|
"""Mute the volume."""
|
||||||
self.aftv.mute_volume()
|
await self.aftv.mute_volume()
|
||||||
|
|
||||||
@adb_decorator()
|
@adb_decorator()
|
||||||
def set_volume_level(self, volume):
|
async def async_set_volume_level(self, volume):
|
||||||
"""Set the volume level."""
|
"""Set the volume level."""
|
||||||
self.aftv.set_volume_level(volume)
|
await self.aftv.set_volume_level(volume)
|
||||||
|
|
||||||
@adb_decorator()
|
@adb_decorator()
|
||||||
def volume_down(self):
|
async def async_volume_down(self):
|
||||||
"""Send volume down command."""
|
"""Send volume down command."""
|
||||||
self._volume_level = self.aftv.volume_down(self._volume_level)
|
self._volume_level = await self.aftv.volume_down(self._volume_level)
|
||||||
|
|
||||||
@adb_decorator()
|
@adb_decorator()
|
||||||
def volume_up(self):
|
async def async_volume_up(self):
|
||||||
"""Send volume up command."""
|
"""Send volume up command."""
|
||||||
self._volume_level = self.aftv.volume_up(self._volume_level)
|
self._volume_level = await self.aftv.volume_up(self._volume_level)
|
||||||
|
|
||||||
|
|
||||||
class FireTVDevice(ADBDevice):
|
class FireTVDevice(ADBDevice):
|
||||||
"""Representation of a Fire TV device."""
|
"""Representation of a Fire TV device."""
|
||||||
|
|
||||||
@adb_decorator(override_available=True)
|
@adb_decorator(override_available=True)
|
||||||
def update(self):
|
async def async_update(self):
|
||||||
"""Update the device state and, if necessary, re-connect."""
|
"""Update the device state and, if necessary, re-connect."""
|
||||||
# Check if device is disconnected.
|
# Check if device is disconnected.
|
||||||
if not self._available:
|
if not self._available:
|
||||||
# Try to connect
|
# Try to connect
|
||||||
self._available = self.aftv.adb_connect(always_log_errors=False)
|
self._available = await self.aftv.adb_connect(always_log_errors=False)
|
||||||
|
|
||||||
# To be safe, wait until the next update to run ADB commands if
|
|
||||||
# using the Python ADB implementation.
|
|
||||||
if not self.aftv.adb_server_ip:
|
|
||||||
return
|
|
||||||
|
|
||||||
# If the ADB connection is not intact, don't update.
|
# If the ADB connection is not intact, don't update.
|
||||||
if not self._available:
|
if not self._available:
|
||||||
return
|
return
|
||||||
|
|
||||||
# Get the `state`, `current_app`, and `running_apps`.
|
# Get the `state`, `current_app`, and `running_apps`.
|
||||||
state, self._current_app, running_apps = self.aftv.update(self._get_sources)
|
state, self._current_app, running_apps = await self.aftv.update(
|
||||||
|
self._get_sources
|
||||||
|
)
|
||||||
|
|
||||||
self._state = ANDROIDTV_STATES.get(state)
|
self._state = ANDROIDTV_STATES.get(state)
|
||||||
if self._state is None:
|
if self._state is None:
|
||||||
@ -754,6 +754,6 @@ class FireTVDevice(ADBDevice):
|
|||||||
return SUPPORT_FIRETV
|
return SUPPORT_FIRETV
|
||||||
|
|
||||||
@adb_decorator()
|
@adb_decorator()
|
||||||
def media_stop(self):
|
async def async_media_stop(self):
|
||||||
"""Send stop (back) command."""
|
"""Send stop (back) command."""
|
||||||
self.aftv.back()
|
await self.aftv.back()
|
||||||
|
@ -33,3 +33,9 @@ upload:
|
|||||||
local_path:
|
local_path:
|
||||||
description: The filepath on your Home Assistant instance.
|
description: The filepath on your Home Assistant instance.
|
||||||
example: "/config/www/example.txt"
|
example: "/config/www/example.txt"
|
||||||
|
learn_sendevent:
|
||||||
|
description: Translate a key press on a remote into ADB 'sendevent' commands. You must press one button on the remote within 8 seconds of calling this service.
|
||||||
|
fields:
|
||||||
|
entity_id:
|
||||||
|
description: Name(s) of Android TV / Fire TV entities.
|
||||||
|
example: "media_player.android_tv_living_room"
|
||||||
|
@ -2,6 +2,6 @@
|
|||||||
"domain": "apache_kafka",
|
"domain": "apache_kafka",
|
||||||
"name": "Apache Kafka",
|
"name": "Apache Kafka",
|
||||||
"documentation": "https://www.home-assistant.io/integrations/apache_kafka",
|
"documentation": "https://www.home-assistant.io/integrations/apache_kafka",
|
||||||
"requirements": ["aiokafka==0.5.1"],
|
"requirements": ["aiokafka==0.6.0"],
|
||||||
"codeowners": ["@bachya"]
|
"codeowners": ["@bachya"]
|
||||||
}
|
}
|
||||||
|
@ -48,7 +48,7 @@ def setup(hass, config):
|
|||||||
try:
|
try:
|
||||||
apcups_data.update(no_throttle=True)
|
apcups_data.update(no_throttle=True)
|
||||||
except Exception: # pylint: disable=broad-except
|
except Exception: # pylint: disable=broad-except
|
||||||
_LOGGER.exception("Failure while testing APCUPSd status retrieval.")
|
_LOGGER.exception("Failure while testing APCUPSd status retrieval")
|
||||||
return False
|
return False
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
@ -36,13 +36,13 @@ SENSOR_TYPES = {
|
|||||||
"battv": ["Battery Voltage", VOLT, "mdi:flash"],
|
"battv": ["Battery Voltage", VOLT, "mdi:flash"],
|
||||||
"bcharge": ["Battery", UNIT_PERCENTAGE, "mdi:battery"],
|
"bcharge": ["Battery", UNIT_PERCENTAGE, "mdi:battery"],
|
||||||
"cable": ["Cable Type", "", "mdi:ethernet-cable"],
|
"cable": ["Cable Type", "", "mdi:ethernet-cable"],
|
||||||
"cumonbatt": ["Total Time on Battery", "", "mdi:timer"],
|
"cumonbatt": ["Total Time on Battery", "", "mdi:timer-outline"],
|
||||||
"date": ["Status Date", "", "mdi:calendar-clock"],
|
"date": ["Status Date", "", "mdi:calendar-clock"],
|
||||||
"dipsw": ["Dip Switch Settings", "", "mdi:information-outline"],
|
"dipsw": ["Dip Switch Settings", "", "mdi:information-outline"],
|
||||||
"dlowbatt": ["Low Battery Signal", "", "mdi:clock-alert"],
|
"dlowbatt": ["Low Battery Signal", "", "mdi:clock-alert"],
|
||||||
"driver": ["Driver", "", "mdi:information-outline"],
|
"driver": ["Driver", "", "mdi:information-outline"],
|
||||||
"dshutd": ["Shutdown Delay", "", "mdi:timer"],
|
"dshutd": ["Shutdown Delay", "", "mdi:timer-outline"],
|
||||||
"dwake": ["Wake Delay", "", "mdi:timer"],
|
"dwake": ["Wake Delay", "", "mdi:timer-outline"],
|
||||||
"endapc": ["Date and Time", "", "mdi:calendar-clock"],
|
"endapc": ["Date and Time", "", "mdi:calendar-clock"],
|
||||||
"extbatts": ["External Batteries", "", "mdi:information-outline"],
|
"extbatts": ["External Batteries", "", "mdi:information-outline"],
|
||||||
"firmware": ["Firmware Version", "", "mdi:information-outline"],
|
"firmware": ["Firmware Version", "", "mdi:information-outline"],
|
||||||
@ -60,10 +60,10 @@ SENSOR_TYPES = {
|
|||||||
"mandate": ["Manufacture Date", "", "mdi:calendar"],
|
"mandate": ["Manufacture Date", "", "mdi:calendar"],
|
||||||
"masterupd": ["Master Update", "", "mdi:information-outline"],
|
"masterupd": ["Master Update", "", "mdi:information-outline"],
|
||||||
"maxlinev": ["Input Voltage High", VOLT, "mdi:flash"],
|
"maxlinev": ["Input Voltage High", VOLT, "mdi:flash"],
|
||||||
"maxtime": ["Battery Timeout", "", "mdi:timer-off"],
|
"maxtime": ["Battery Timeout", "", "mdi:timer-off-outline"],
|
||||||
"mbattchg": ["Battery Shutdown", UNIT_PERCENTAGE, "mdi:battery-alert"],
|
"mbattchg": ["Battery Shutdown", UNIT_PERCENTAGE, "mdi:battery-alert"],
|
||||||
"minlinev": ["Input Voltage Low", VOLT, "mdi:flash"],
|
"minlinev": ["Input Voltage Low", VOLT, "mdi:flash"],
|
||||||
"mintimel": ["Shutdown Time", "", "mdi:timer"],
|
"mintimel": ["Shutdown Time", "", "mdi:timer-outline"],
|
||||||
"model": ["Model", "", "mdi:information-outline"],
|
"model": ["Model", "", "mdi:information-outline"],
|
||||||
"nombattv": ["Battery Nominal Voltage", VOLT, "mdi:flash"],
|
"nombattv": ["Battery Nominal Voltage", VOLT, "mdi:flash"],
|
||||||
"nominv": ["Nominal Input Voltage", VOLT, "mdi:flash"],
|
"nominv": ["Nominal Input Voltage", VOLT, "mdi:flash"],
|
||||||
@ -85,7 +85,7 @@ SENSOR_TYPES = {
|
|||||||
"status": ["Status", "", "mdi:information-outline"],
|
"status": ["Status", "", "mdi:information-outline"],
|
||||||
"stesti": ["Self Test Interval", "", "mdi:information-outline"],
|
"stesti": ["Self Test Interval", "", "mdi:information-outline"],
|
||||||
"timeleft": ["Time Left", "", "mdi:clock-alert"],
|
"timeleft": ["Time Left", "", "mdi:clock-alert"],
|
||||||
"tonbatt": ["Time on Battery", "", "mdi:timer"],
|
"tonbatt": ["Time on Battery", "", "mdi:timer-outline"],
|
||||||
"upsmode": ["Mode", "", "mdi:information-outline"],
|
"upsmode": ["Mode", "", "mdi:information-outline"],
|
||||||
"upsname": ["Name", "", "mdi:information-outline"],
|
"upsname": ["Name", "", "mdi:information-outline"],
|
||||||
"version": ["Daemon Info", "", "mdi:information-outline"],
|
"version": ["Daemon Info", "", "mdi:information-outline"],
|
||||||
|
@ -97,7 +97,7 @@ def setup_scanner(hass, config, see, discovery_info=None):
|
|||||||
hass.bus.listen_once(EVENT_HOMEASSISTANT_STOP, aprs_disconnect)
|
hass.bus.listen_once(EVENT_HOMEASSISTANT_STOP, aprs_disconnect)
|
||||||
|
|
||||||
if not aprs_listener.start_event.wait(timeout):
|
if not aprs_listener.start_event.wait(timeout):
|
||||||
_LOGGER.error("Timeout waiting for APRS to connect.")
|
_LOGGER.error("Timeout waiting for APRS to connect")
|
||||||
return
|
return
|
||||||
|
|
||||||
if not aprs_listener.start_success:
|
if not aprs_listener.start_success:
|
||||||
@ -141,7 +141,7 @@ class AprsListenerThread(threading.Thread):
|
|||||||
|
|
||||||
try:
|
try:
|
||||||
_LOGGER.info(
|
_LOGGER.info(
|
||||||
"Opening connection to %s with callsign %s.", self.host, self.callsign
|
"Opening connection to %s with callsign %s", self.host, self.callsign
|
||||||
)
|
)
|
||||||
self.ais.connect()
|
self.ais.connect()
|
||||||
self.start_complete(
|
self.start_complete(
|
||||||
@ -152,7 +152,7 @@ class AprsListenerThread(threading.Thread):
|
|||||||
self.start_complete(False, str(err))
|
self.start_complete(False, str(err))
|
||||||
except OSError:
|
except OSError:
|
||||||
_LOGGER.info(
|
_LOGGER.info(
|
||||||
"Closing connection to %s with callsign %s.", self.host, self.callsign
|
"Closing connection to %s with callsign %s", self.host, self.callsign
|
||||||
)
|
)
|
||||||
|
|
||||||
def stop(self):
|
def stop(self):
|
||||||
|
13
homeassistant/components/arcam_fmj/translations/cs.json
Normal file
13
homeassistant/components/arcam_fmj/translations/cs.json
Normal file
@ -0,0 +1,13 @@
|
|||||||
|
{
|
||||||
|
"config": {
|
||||||
|
"step": {
|
||||||
|
"user": {
|
||||||
|
"data": {
|
||||||
|
"host": "Hostitel",
|
||||||
|
"port": "Port"
|
||||||
|
},
|
||||||
|
"description": "Zadejte n\u00e1zev hostitele nebo IP adresu za\u0159\u00edzen\u00ed."
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
@ -9,7 +9,11 @@
|
|||||||
"one": "Vide",
|
"one": "Vide",
|
||||||
"other": "Vide"
|
"other": "Vide"
|
||||||
},
|
},
|
||||||
|
"flow_title": "Arcam FMJ sur {host}",
|
||||||
"step": {
|
"step": {
|
||||||
|
"confirm": {
|
||||||
|
"description": "Voulez-vous ajouter Arcam FMJ sur ` {host} ` \u00e0 HomeAssistant ?"
|
||||||
|
},
|
||||||
"user": {
|
"user": {
|
||||||
"data": {
|
"data": {
|
||||||
"host": "H\u00f4te",
|
"host": "H\u00f4te",
|
||||||
|
@ -1,11 +1,16 @@
|
|||||||
{
|
{
|
||||||
"config": {
|
"config": {
|
||||||
|
"error": {
|
||||||
|
"one": "uma",
|
||||||
|
"other": "mais"
|
||||||
|
},
|
||||||
"step": {
|
"step": {
|
||||||
"user": {
|
"user": {
|
||||||
"data": {
|
"data": {
|
||||||
"host": "Servidor",
|
"host": "Servidor",
|
||||||
"port": "Porto"
|
"port": "Porto"
|
||||||
}
|
},
|
||||||
|
"description": "Por favor, introduza o nome ou o endere\u00e7o IP do dispositivo."
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -15,7 +15,7 @@
|
|||||||
"host": "\u0425\u043e\u0441\u0442",
|
"host": "\u0425\u043e\u0441\u0442",
|
||||||
"port": "\u041f\u043e\u0440\u0442"
|
"port": "\u041f\u043e\u0440\u0442"
|
||||||
},
|
},
|
||||||
"description": "\u0412\u0432\u0435\u0434\u0438\u0442\u0435 \u0438\u043c\u044f \u0445\u043e\u0441\u0442\u0430 \u0438\u043b\u0438 IP-\u0430\u0434\u0440\u0435\u0441 \u0443\u0441\u0442\u0440\u043e\u0439\u0441\u0442\u0432\u0430."
|
"description": "\u0412\u0432\u0435\u0434\u0438\u0442\u0435 \u0434\u043e\u043c\u0435\u043d\u043d\u043e\u0435 \u0438\u043c\u044f \u0438\u043b\u0438 IP-\u0430\u0434\u0440\u0435\u0441 \u0443\u0441\u0442\u0440\u043e\u0439\u0441\u0442\u0432\u0430."
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
@ -59,7 +59,7 @@ def setup(hass, config):
|
|||||||
if arlo_base_station is not None:
|
if arlo_base_station is not None:
|
||||||
arlo_base_station.refresh_rate = scan_interval.total_seconds()
|
arlo_base_station.refresh_rate = scan_interval.total_seconds()
|
||||||
elif not arlo.cameras:
|
elif not arlo.cameras:
|
||||||
_LOGGER.error("No Arlo camera or base station available.")
|
_LOGGER.error("No Arlo camera or base station available")
|
||||||
return False
|
return False
|
||||||
|
|
||||||
hass.data[DATA_ARLO] = arlo
|
hass.data[DATA_ARLO] = arlo
|
||||||
|
@ -103,7 +103,7 @@ async def async_setup(hass, config, retry_delay=FIRST_RETRY_TIME):
|
|||||||
return True
|
return True
|
||||||
|
|
||||||
if not api.is_connected:
|
if not api.is_connected:
|
||||||
_LOGGER.error("Error connecting %s to %s.", DOMAIN, conf[CONF_HOST])
|
_LOGGER.error("Error connecting %s to %s", DOMAIN, conf[CONF_HOST])
|
||||||
return False
|
return False
|
||||||
|
|
||||||
hass.data[DATA_ASUSWRT] = api
|
hass.data[DATA_ASUSWRT] = api
|
||||||
|
@ -54,7 +54,7 @@ class AsusWrtDeviceScanner(DeviceScanner):
|
|||||||
self.last_results = await self.connection.async_get_connected_devices()
|
self.last_results = await self.connection.async_get_connected_devices()
|
||||||
if self._connect_error:
|
if self._connect_error:
|
||||||
self._connect_error = False
|
self._connect_error = False
|
||||||
_LOGGER.error("Reconnected to ASUS router for device update")
|
_LOGGER.info("Reconnected to ASUS router for device update")
|
||||||
|
|
||||||
except OSError as err:
|
except OSError as err:
|
||||||
if not self._connect_error:
|
if not self._connect_error:
|
||||||
|
@ -2,6 +2,6 @@
|
|||||||
"domain": "asuswrt",
|
"domain": "asuswrt",
|
||||||
"name": "ASUSWRT",
|
"name": "ASUSWRT",
|
||||||
"documentation": "https://www.home-assistant.io/integrations/asuswrt",
|
"documentation": "https://www.home-assistant.io/integrations/asuswrt",
|
||||||
"requirements": ["aioasuswrt==1.2.6"],
|
"requirements": ["aioasuswrt==1.2.7"],
|
||||||
"codeowners": ["@kennedyshead"]
|
"codeowners": ["@kennedyshead"]
|
||||||
}
|
}
|
||||||
|
@ -69,9 +69,7 @@ class AsuswrtSensor(Entity):
|
|||||||
self._speed = await self._api.async_get_current_transfer_rates()
|
self._speed = await self._api.async_get_current_transfer_rates()
|
||||||
if self._connect_error:
|
if self._connect_error:
|
||||||
self._connect_error = False
|
self._connect_error = False
|
||||||
_LOGGER.error(
|
_LOGGER.info("Reconnected to ASUS router for %s update", self.entity_id)
|
||||||
"Reconnected to ASUS router for %s update", self.entity_id
|
|
||||||
)
|
|
||||||
except OSError as err:
|
except OSError as err:
|
||||||
if not self._connect_error:
|
if not self._connect_error:
|
||||||
self._connect_error = True
|
self._connect_error = True
|
||||||
|
12
homeassistant/components/atag/translations/cs.json
Normal file
12
homeassistant/components/atag/translations/cs.json
Normal file
@ -0,0 +1,12 @@
|
|||||||
|
{
|
||||||
|
"config": {
|
||||||
|
"step": {
|
||||||
|
"user": {
|
||||||
|
"data": {
|
||||||
|
"host": "Hostitel",
|
||||||
|
"port": "Port"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
@ -11,7 +11,7 @@
|
|||||||
"user": {
|
"user": {
|
||||||
"data": {
|
"data": {
|
||||||
"email": "Courriel (facultatif)",
|
"email": "Courriel (facultatif)",
|
||||||
"host": "H\u00f4te",
|
"host": "Nom d'h\u00f4te ou adresse IP",
|
||||||
"port": "Port (10000)"
|
"port": "Port (10000)"
|
||||||
},
|
},
|
||||||
"title": "Se connecter \u00e0 l'appareil"
|
"title": "Se connecter \u00e0 l'appareil"
|
||||||
|
@ -3,7 +3,9 @@
|
|||||||
"step": {
|
"step": {
|
||||||
"user": {
|
"user": {
|
||||||
"data": {
|
"data": {
|
||||||
"host": "Servidor"
|
"email": "E-mail (opcional)",
|
||||||
|
"host": "Servidor",
|
||||||
|
"port": "Porta"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -143,7 +143,7 @@ class AtomeData:
|
|||||||
values = self.atome_client.get_consumption(DAILY_TYPE)
|
values = self.atome_client.get_consumption(DAILY_TYPE)
|
||||||
self._day_usage = values["total"] / 1000
|
self._day_usage = values["total"] / 1000
|
||||||
self._day_price = values["price"]
|
self._day_price = values["price"]
|
||||||
_LOGGER.debug("Updating Atome daily data. Got: %d.", self._day_usage)
|
_LOGGER.debug("Updating Atome daily data. Got: %d", self._day_usage)
|
||||||
|
|
||||||
except KeyError as error:
|
except KeyError as error:
|
||||||
_LOGGER.error("Missing last value in values: %s: %s", values, error)
|
_LOGGER.error("Missing last value in values: %s: %s", values, error)
|
||||||
@ -165,7 +165,7 @@ class AtomeData:
|
|||||||
values = self.atome_client.get_consumption(WEEKLY_TYPE)
|
values = self.atome_client.get_consumption(WEEKLY_TYPE)
|
||||||
self._week_usage = values["total"] / 1000
|
self._week_usage = values["total"] / 1000
|
||||||
self._week_price = values["price"]
|
self._week_price = values["price"]
|
||||||
_LOGGER.debug("Updating Atome weekly data. Got: %d.", self._week_usage)
|
_LOGGER.debug("Updating Atome weekly data. Got: %d", self._week_usage)
|
||||||
|
|
||||||
except KeyError as error:
|
except KeyError as error:
|
||||||
_LOGGER.error("Missing last value in values: %s: %s", values, error)
|
_LOGGER.error("Missing last value in values: %s: %s", values, error)
|
||||||
@ -187,7 +187,7 @@ class AtomeData:
|
|||||||
values = self.atome_client.get_consumption(MONTHLY_TYPE)
|
values = self.atome_client.get_consumption(MONTHLY_TYPE)
|
||||||
self._month_usage = values["total"] / 1000
|
self._month_usage = values["total"] / 1000
|
||||||
self._month_price = values["price"]
|
self._month_price = values["price"]
|
||||||
_LOGGER.debug("Updating Atome monthly data. Got: %d.", self._month_usage)
|
_LOGGER.debug("Updating Atome monthly data. Got: %d", self._month_usage)
|
||||||
|
|
||||||
except KeyError as error:
|
except KeyError as error:
|
||||||
_LOGGER.error("Missing last value in values: %s: %s", values, error)
|
_LOGGER.error("Missing last value in values: %s: %s", values, error)
|
||||||
@ -209,7 +209,7 @@ class AtomeData:
|
|||||||
values = self.atome_client.get_consumption(YEARLY_TYPE)
|
values = self.atome_client.get_consumption(YEARLY_TYPE)
|
||||||
self._year_usage = values["total"] / 1000
|
self._year_usage = values["total"] / 1000
|
||||||
self._year_price = values["price"]
|
self._year_price = values["price"]
|
||||||
_LOGGER.debug("Updating Atome yearly data. Got: %d.", self._year_usage)
|
_LOGGER.debug("Updating Atome yearly data. Got: %d", self._year_usage)
|
||||||
|
|
||||||
except KeyError as error:
|
except KeyError as error:
|
||||||
_LOGGER.error("Missing last value in values: %s: %s", values, error)
|
_LOGGER.error("Missing last value in values: %s: %s", values, error)
|
||||||
|
@ -60,7 +60,7 @@ async def async_request_validation(hass, config_entry, august_gateway):
|
|||||||
# In the future this should start a new config flow
|
# In the future this should start a new config flow
|
||||||
# instead of using the legacy configurator
|
# instead of using the legacy configurator
|
||||||
#
|
#
|
||||||
_LOGGER.error("Access token is no longer valid.")
|
_LOGGER.error("Access token is no longer valid")
|
||||||
configurator = hass.components.configurator
|
configurator = hass.components.configurator
|
||||||
entry_id = config_entry.entry_id
|
entry_id = config_entry.entry_id
|
||||||
|
|
||||||
@ -351,7 +351,7 @@ class AugustData(AugustSubscriberMixin):
|
|||||||
doorbell_detail = self._device_detail_by_id.get(device_id)
|
doorbell_detail = self._device_detail_by_id.get(device_id)
|
||||||
if doorbell_detail is None:
|
if doorbell_detail is None:
|
||||||
_LOGGER.info(
|
_LOGGER.info(
|
||||||
"The doorbell %s could not be setup because the system could not fetch details about the doorbell.",
|
"The doorbell %s could not be setup because the system could not fetch details about the doorbell",
|
||||||
doorbell.device_name,
|
doorbell.device_name,
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
@ -373,17 +373,17 @@ class AugustData(AugustSubscriberMixin):
|
|||||||
lock_detail = self._device_detail_by_id.get(device_id)
|
lock_detail = self._device_detail_by_id.get(device_id)
|
||||||
if lock_detail is None:
|
if lock_detail is None:
|
||||||
_LOGGER.info(
|
_LOGGER.info(
|
||||||
"The lock %s could not be setup because the system could not fetch details about the lock.",
|
"The lock %s could not be setup because the system could not fetch details about the lock",
|
||||||
lock.device_name,
|
lock.device_name,
|
||||||
)
|
)
|
||||||
elif lock_detail.bridge is None:
|
elif lock_detail.bridge is None:
|
||||||
_LOGGER.info(
|
_LOGGER.info(
|
||||||
"The lock %s could not be setup because it does not have a bridge (Connect).",
|
"The lock %s could not be setup because it does not have a bridge (Connect)",
|
||||||
lock.device_name,
|
lock.device_name,
|
||||||
)
|
)
|
||||||
elif not lock_detail.bridge.operative:
|
elif not lock_detail.bridge.operative:
|
||||||
_LOGGER.info(
|
_LOGGER.info(
|
||||||
"The lock %s could not be setup because the bridge (Connect) is not operative.",
|
"The lock %s could not be setup because the bridge (Connect) is not operative",
|
||||||
lock.device_name,
|
lock.device_name,
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
|
@ -88,7 +88,7 @@ async def async_setup_entry(hass, config_entry, async_add_entities):
|
|||||||
detail = data.get_device_detail(door.device_id)
|
detail = data.get_device_detail(door.device_id)
|
||||||
if not detail.doorsense:
|
if not detail.doorsense:
|
||||||
_LOGGER.debug(
|
_LOGGER.debug(
|
||||||
"Not adding sensor class door for lock %s because it does not have doorsense.",
|
"Not adding sensor class door for lock %s because it does not have doorsense",
|
||||||
door.device_name,
|
door.device_name,
|
||||||
)
|
)
|
||||||
continue
|
continue
|
||||||
|
@ -13,6 +13,8 @@ VERIFICATION_CODE_KEY = "verification_code"
|
|||||||
NOTIFICATION_ID = "august_notification"
|
NOTIFICATION_ID = "august_notification"
|
||||||
NOTIFICATION_TITLE = "August"
|
NOTIFICATION_TITLE = "August"
|
||||||
|
|
||||||
|
MANUFACTURER = "August Home Inc."
|
||||||
|
|
||||||
DEFAULT_AUGUST_CONFIG_FILE = ".august.conf"
|
DEFAULT_AUGUST_CONFIG_FILE = ".august.conf"
|
||||||
|
|
||||||
DATA_AUGUST = "data_august"
|
DATA_AUGUST = "data_august"
|
||||||
|
@ -5,7 +5,8 @@ import logging
|
|||||||
from homeassistant.core import callback
|
from homeassistant.core import callback
|
||||||
from homeassistant.helpers.entity import Entity
|
from homeassistant.helpers.entity import Entity
|
||||||
|
|
||||||
from . import DEFAULT_NAME, DOMAIN
|
from . import DOMAIN
|
||||||
|
from .const import MANUFACTURER
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
@ -38,7 +39,7 @@ class AugustEntityMixin(Entity):
|
|||||||
return {
|
return {
|
||||||
"identifiers": {(DOMAIN, self._device_id)},
|
"identifiers": {(DOMAIN, self._device_id)},
|
||||||
"name": self._device.device_name,
|
"name": self._device.device_name,
|
||||||
"manufacturer": DEFAULT_NAME,
|
"manufacturer": MANUFACTURER,
|
||||||
"sw_version": self._detail.firmware_version,
|
"sw_version": self._detail.firmware_version,
|
||||||
"model": self._detail.model,
|
"model": self._detail.model,
|
||||||
}
|
}
|
||||||
|
12
homeassistant/components/august/translations/cs.json
Normal file
12
homeassistant/components/august/translations/cs.json
Normal file
@ -0,0 +1,12 @@
|
|||||||
|
{
|
||||||
|
"config": {
|
||||||
|
"step": {
|
||||||
|
"user": {
|
||||||
|
"data": {
|
||||||
|
"password": "Heslo",
|
||||||
|
"username": "U\u017eivatelsk\u00e9 jm\u00e9no"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
@ -5,7 +5,8 @@
|
|||||||
"data": {
|
"data": {
|
||||||
"password": "Palavra-passe",
|
"password": "Palavra-passe",
|
||||||
"username": "Nome de Utilizador"
|
"username": "Nome de Utilizador"
|
||||||
}
|
},
|
||||||
|
"description": "Se o m\u00e9todo de login for 'email', Nome do utilizador \u00e9 o endere\u00e7o de email. Se o m\u00e9todo de login for 'telefone', Nome do utilizador ser\u00e1 o n\u00famero de telefone no formato '+NNNNNNNNN'."
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -9,9 +9,11 @@ import voluptuous as vol
|
|||||||
from homeassistant.const import (
|
from homeassistant.const import (
|
||||||
ATTR_ENTITY_ID,
|
ATTR_ENTITY_ID,
|
||||||
ATTR_NAME,
|
ATTR_NAME,
|
||||||
|
CONF_ALIAS,
|
||||||
CONF_DEVICE_ID,
|
CONF_DEVICE_ID,
|
||||||
CONF_ENTITY_ID,
|
CONF_ENTITY_ID,
|
||||||
CONF_ID,
|
CONF_ID,
|
||||||
|
CONF_MODE,
|
||||||
CONF_PLATFORM,
|
CONF_PLATFORM,
|
||||||
CONF_ZONE,
|
CONF_ZONE,
|
||||||
EVENT_HOMEASSISTANT_STARTED,
|
EVENT_HOMEASSISTANT_STARTED,
|
||||||
@ -23,11 +25,20 @@ from homeassistant.const import (
|
|||||||
)
|
)
|
||||||
from homeassistant.core import Context, CoreState, HomeAssistant, callback
|
from homeassistant.core import Context, CoreState, HomeAssistant, callback
|
||||||
from homeassistant.exceptions import HomeAssistantError
|
from homeassistant.exceptions import HomeAssistantError
|
||||||
from homeassistant.helpers import condition, extract_domain_configs, script
|
from homeassistant.helpers import condition, extract_domain_configs
|
||||||
import homeassistant.helpers.config_validation as cv
|
import homeassistant.helpers.config_validation as cv
|
||||||
from homeassistant.helpers.entity import ToggleEntity
|
from homeassistant.helpers.entity import ToggleEntity
|
||||||
from homeassistant.helpers.entity_component import EntityComponent
|
from homeassistant.helpers.entity_component import EntityComponent
|
||||||
from homeassistant.helpers.restore_state import RestoreEntity
|
from homeassistant.helpers.restore_state import RestoreEntity
|
||||||
|
from homeassistant.helpers.script import (
|
||||||
|
ATTR_CUR,
|
||||||
|
ATTR_MAX,
|
||||||
|
ATTR_MODE,
|
||||||
|
CONF_MAX,
|
||||||
|
SCRIPT_MODE_SINGLE,
|
||||||
|
Script,
|
||||||
|
make_script_schema,
|
||||||
|
)
|
||||||
from homeassistant.helpers.service import async_register_admin_service
|
from homeassistant.helpers.service import async_register_admin_service
|
||||||
from homeassistant.helpers.typing import TemplateVarsType
|
from homeassistant.helpers.typing import TemplateVarsType
|
||||||
from homeassistant.loader import bind_hass
|
from homeassistant.loader import bind_hass
|
||||||
@ -41,7 +52,6 @@ ENTITY_ID_FORMAT = DOMAIN + ".{}"
|
|||||||
|
|
||||||
GROUP_NAME_ALL_AUTOMATIONS = "all automations"
|
GROUP_NAME_ALL_AUTOMATIONS = "all automations"
|
||||||
|
|
||||||
CONF_ALIAS = "alias"
|
|
||||||
CONF_DESCRIPTION = "description"
|
CONF_DESCRIPTION = "description"
|
||||||
CONF_HIDE_ENTITY = "hide_entity"
|
CONF_HIDE_ENTITY = "hide_entity"
|
||||||
|
|
||||||
@ -96,7 +106,7 @@ _CONDITION_SCHEMA = vol.All(cv.ensure_list, [cv.CONDITION_SCHEMA])
|
|||||||
|
|
||||||
PLATFORM_SCHEMA = vol.All(
|
PLATFORM_SCHEMA = vol.All(
|
||||||
cv.deprecated(CONF_HIDE_ENTITY, invalidation_version="0.110"),
|
cv.deprecated(CONF_HIDE_ENTITY, invalidation_version="0.110"),
|
||||||
vol.Schema(
|
make_script_schema(
|
||||||
{
|
{
|
||||||
# str on purpose
|
# str on purpose
|
||||||
CONF_ID: str,
|
CONF_ID: str,
|
||||||
@ -107,7 +117,8 @@ PLATFORM_SCHEMA = vol.All(
|
|||||||
vol.Required(CONF_TRIGGER): _TRIGGER_SCHEMA,
|
vol.Required(CONF_TRIGGER): _TRIGGER_SCHEMA,
|
||||||
vol.Optional(CONF_CONDITION): _CONDITION_SCHEMA,
|
vol.Optional(CONF_CONDITION): _CONDITION_SCHEMA,
|
||||||
vol.Required(CONF_ACTION): cv.SCRIPT_SCHEMA,
|
vol.Required(CONF_ACTION): cv.SCRIPT_SCHEMA,
|
||||||
}
|
},
|
||||||
|
SCRIPT_MODE_SINGLE,
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -268,7 +279,15 @@ class AutomationEntity(ToggleEntity, RestoreEntity):
|
|||||||
@property
|
@property
|
||||||
def state_attributes(self):
|
def state_attributes(self):
|
||||||
"""Return the entity state attributes."""
|
"""Return the entity state attributes."""
|
||||||
return {ATTR_LAST_TRIGGERED: self._last_triggered}
|
attrs = {
|
||||||
|
ATTR_LAST_TRIGGERED: self._last_triggered,
|
||||||
|
ATTR_MODE: self.action_script.script_mode,
|
||||||
|
}
|
||||||
|
if self.action_script.supports_max:
|
||||||
|
attrs[ATTR_MAX] = self.action_script.max_runs
|
||||||
|
if self.is_on:
|
||||||
|
attrs[ATTR_CUR] = self.action_script.runs
|
||||||
|
return attrs
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def is_on(self) -> bool:
|
def is_on(self) -> bool:
|
||||||
@ -334,7 +353,7 @@ class AutomationEntity(ToggleEntity, RestoreEntity):
|
|||||||
else:
|
else:
|
||||||
enable_automation = DEFAULT_INITIAL_STATE
|
enable_automation = DEFAULT_INITIAL_STATE
|
||||||
_LOGGER.debug(
|
_LOGGER.debug(
|
||||||
"Automation %s not in state storage, state %s from default is used.",
|
"Automation %s not in state storage, state %s from default is used",
|
||||||
self.entity_id,
|
self.entity_id,
|
||||||
enable_automation,
|
enable_automation,
|
||||||
)
|
)
|
||||||
@ -389,7 +408,7 @@ class AutomationEntity(ToggleEntity, RestoreEntity):
|
|||||||
try:
|
try:
|
||||||
await self.action_script.async_run(variables, trigger_context)
|
await self.action_script.async_run(variables, trigger_context)
|
||||||
except Exception: # pylint: disable=broad-except
|
except Exception: # pylint: disable=broad-except
|
||||||
pass
|
_LOGGER.exception("While executing automation %s", self.entity_id)
|
||||||
|
|
||||||
async def async_will_remove_from_hass(self):
|
async def async_will_remove_from_hass(self):
|
||||||
"""Remove listeners when removing automation from Home Assistant."""
|
"""Remove listeners when removing automation from Home Assistant."""
|
||||||
@ -498,8 +517,13 @@ async def _async_process_config(hass, config, component):
|
|||||||
|
|
||||||
initial_state = config_block.get(CONF_INITIAL_STATE)
|
initial_state = config_block.get(CONF_INITIAL_STATE)
|
||||||
|
|
||||||
action_script = script.Script(
|
action_script = Script(
|
||||||
hass, config_block.get(CONF_ACTION, {}), name, logger=_LOGGER
|
hass,
|
||||||
|
config_block[CONF_ACTION],
|
||||||
|
name,
|
||||||
|
script_mode=config_block[CONF_MODE],
|
||||||
|
max_runs=config_block[CONF_MAX],
|
||||||
|
logger=_LOGGER,
|
||||||
)
|
)
|
||||||
|
|
||||||
if CONF_CONDITION in config_block:
|
if CONF_CONDITION in config_block:
|
||||||
|
@ -10,7 +10,8 @@ from homeassistant.components.device_automation.exceptions import (
|
|||||||
from homeassistant.config import async_log_exception, config_without_domain
|
from homeassistant.config import async_log_exception, config_without_domain
|
||||||
from homeassistant.const import CONF_PLATFORM
|
from homeassistant.const import CONF_PLATFORM
|
||||||
from homeassistant.exceptions import HomeAssistantError
|
from homeassistant.exceptions import HomeAssistantError
|
||||||
from homeassistant.helpers import condition, config_per_platform, script
|
from homeassistant.helpers import condition, config_per_platform
|
||||||
|
from homeassistant.helpers.script import async_validate_action_config
|
||||||
from homeassistant.loader import IntegrationNotFound
|
from homeassistant.loader import IntegrationNotFound
|
||||||
|
|
||||||
from . import CONF_ACTION, CONF_CONDITION, CONF_TRIGGER, DOMAIN, PLATFORM_SCHEMA
|
from . import CONF_ACTION, CONF_CONDITION, CONF_TRIGGER, DOMAIN, PLATFORM_SCHEMA
|
||||||
@ -44,10 +45,7 @@ async def async_validate_config_item(hass, config, full_config=None):
|
|||||||
)
|
)
|
||||||
|
|
||||||
config[CONF_ACTION] = await asyncio.gather(
|
config[CONF_ACTION] = await asyncio.gather(
|
||||||
*[
|
*[async_validate_action_config(hass, action) for action in config[CONF_ACTION]]
|
||||||
script.async_validate_action_config(hass, action)
|
|
||||||
for action in config[CONF_ACTION]
|
|
||||||
]
|
|
||||||
)
|
)
|
||||||
|
|
||||||
return config
|
return config
|
||||||
@ -71,19 +69,18 @@ async def _try_async_validate_config_item(hass, config, full_config=None):
|
|||||||
|
|
||||||
async def async_validate_config(hass, config):
|
async def async_validate_config(hass, config):
|
||||||
"""Validate config."""
|
"""Validate config."""
|
||||||
validated_automations = await asyncio.gather(
|
automations = list(
|
||||||
*(
|
filter(
|
||||||
_try_async_validate_config_item(hass, p_config, config)
|
lambda x: x is not None,
|
||||||
for _, p_config in config_per_platform(config, DOMAIN)
|
await asyncio.gather(
|
||||||
|
*(
|
||||||
|
_try_async_validate_config_item(hass, p_config, config)
|
||||||
|
for _, p_config in config_per_platform(config, DOMAIN)
|
||||||
|
)
|
||||||
|
),
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
automations = [
|
|
||||||
validated_automation
|
|
||||||
for validated_automation in validated_automations
|
|
||||||
if validated_automation is not None
|
|
||||||
]
|
|
||||||
|
|
||||||
# Create a copy of the configuration with all config for current
|
# Create a copy of the configuration with all config for current
|
||||||
# component removed and add validated config back in.
|
# component removed and add validated config back in.
|
||||||
config = config_without_domain(config, DOMAIN)
|
config = config_without_domain(config, DOMAIN)
|
||||||
|
@ -14,7 +14,10 @@ from homeassistant.const import (
|
|||||||
)
|
)
|
||||||
from homeassistant.core import CALLBACK_TYPE, callback
|
from homeassistant.core import CALLBACK_TYPE, callback
|
||||||
from homeassistant.helpers import condition, config_validation as cv, template
|
from homeassistant.helpers import condition, config_validation as cv, template
|
||||||
from homeassistant.helpers.event import async_track_same_state, async_track_state_change
|
from homeassistant.helpers.event import (
|
||||||
|
async_track_same_state,
|
||||||
|
async_track_state_change_event,
|
||||||
|
)
|
||||||
|
|
||||||
# mypy: allow-incomplete-defs, allow-untyped-calls, allow-untyped-defs
|
# mypy: allow-incomplete-defs, allow-untyped-calls, allow-untyped-defs
|
||||||
# mypy: no-check-untyped-defs
|
# mypy: no-check-untyped-defs
|
||||||
@ -94,8 +97,11 @@ async def async_attach_trigger(
|
|||||||
)
|
)
|
||||||
|
|
||||||
@callback
|
@callback
|
||||||
def state_automation_listener(entity, from_s, to_s):
|
def state_automation_listener(event):
|
||||||
"""Listen for state changes and calls action."""
|
"""Listen for state changes and calls action."""
|
||||||
|
entity = event.data.get("entity_id")
|
||||||
|
from_s = event.data.get("old_state")
|
||||||
|
to_s = event.data.get("new_state")
|
||||||
|
|
||||||
@callback
|
@callback
|
||||||
def call_action():
|
def call_action():
|
||||||
@ -168,7 +174,7 @@ async def async_attach_trigger(
|
|||||||
else:
|
else:
|
||||||
call_action()
|
call_action()
|
||||||
|
|
||||||
unsub = async_track_state_change(hass, entity_id, state_automation_listener)
|
unsub = async_track_state_change_event(hass, entity_id, state_automation_listener)
|
||||||
|
|
||||||
@callback
|
@callback
|
||||||
def async_remove():
|
def async_remove():
|
||||||
|
@ -6,12 +6,13 @@ from typing import Dict
|
|||||||
import voluptuous as vol
|
import voluptuous as vol
|
||||||
|
|
||||||
from homeassistant import exceptions
|
from homeassistant import exceptions
|
||||||
from homeassistant.const import CONF_FOR, CONF_PLATFORM, EVENT_STATE_CHANGED, MATCH_ALL
|
from homeassistant.const import CONF_FOR, CONF_PLATFORM, MATCH_ALL
|
||||||
from homeassistant.core import CALLBACK_TYPE, HomeAssistant, callback
|
from homeassistant.core import CALLBACK_TYPE, HomeAssistant, callback
|
||||||
from homeassistant.helpers import config_validation as cv, template
|
from homeassistant.helpers import config_validation as cv, template
|
||||||
from homeassistant.helpers.event import (
|
from homeassistant.helpers.event import (
|
||||||
Event,
|
Event,
|
||||||
async_track_same_state,
|
async_track_same_state,
|
||||||
|
async_track_state_change_event,
|
||||||
process_state_match,
|
process_state_match,
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -153,7 +154,7 @@ async def async_attach_trigger(
|
|||||||
hass, period[entity], call_action, _check_same_state, entity_ids=entity,
|
hass, period[entity], call_action, _check_same_state, entity_ids=entity,
|
||||||
)
|
)
|
||||||
|
|
||||||
unsub = hass.bus.async_listen(EVENT_STATE_CHANGED, state_automation_listener)
|
unsub = async_track_state_change_event(hass, entity_id, state_automation_listener)
|
||||||
|
|
||||||
@callback
|
@callback
|
||||||
def async_remove():
|
def async_remove():
|
||||||
|
@ -1,16 +1,10 @@
|
|||||||
"""Offer zone automation rules."""
|
"""Offer zone automation rules."""
|
||||||
import voluptuous as vol
|
import voluptuous as vol
|
||||||
|
|
||||||
from homeassistant.const import (
|
from homeassistant.const import CONF_ENTITY_ID, CONF_EVENT, CONF_PLATFORM, CONF_ZONE
|
||||||
CONF_ENTITY_ID,
|
|
||||||
CONF_EVENT,
|
|
||||||
CONF_PLATFORM,
|
|
||||||
CONF_ZONE,
|
|
||||||
MATCH_ALL,
|
|
||||||
)
|
|
||||||
from homeassistant.core import callback
|
from homeassistant.core import callback
|
||||||
from homeassistant.helpers import condition, config_validation as cv, location
|
from homeassistant.helpers import condition, config_validation as cv, location
|
||||||
from homeassistant.helpers.event import async_track_state_change
|
from homeassistant.helpers.event import async_track_state_change_event
|
||||||
|
|
||||||
# mypy: allow-untyped-defs, no-check-untyped-defs
|
# mypy: allow-untyped-defs, no-check-untyped-defs
|
||||||
|
|
||||||
@ -37,8 +31,12 @@ async def async_attach_trigger(hass, config, action, automation_info):
|
|||||||
event = config.get(CONF_EVENT)
|
event = config.get(CONF_EVENT)
|
||||||
|
|
||||||
@callback
|
@callback
|
||||||
def zone_automation_listener(entity, from_s, to_s):
|
def zone_automation_listener(zone_event):
|
||||||
"""Listen for state changes and calls action."""
|
"""Listen for state changes and calls action."""
|
||||||
|
entity = zone_event.data.get("entity_id")
|
||||||
|
from_s = zone_event.data.get("old_state")
|
||||||
|
to_s = zone_event.data.get("new_state")
|
||||||
|
|
||||||
if (
|
if (
|
||||||
from_s
|
from_s
|
||||||
and not location.has_location(from_s)
|
and not location.has_location(from_s)
|
||||||
@ -74,6 +72,4 @@ async def async_attach_trigger(hass, config, action, automation_info):
|
|||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
return async_track_state_change(
|
return async_track_state_change_event(hass, entity_id, zone_automation_listener)
|
||||||
hass, entity_id, zone_automation_listener, MATCH_ALL, MATCH_ALL
|
|
||||||
)
|
|
||||||
|
@ -22,7 +22,7 @@ async def async_setup_entry(
|
|||||||
integration_id = entry.data[CONF_ID]
|
integration_id = entry.data[CONF_ID]
|
||||||
|
|
||||||
try:
|
try:
|
||||||
each_upcoming = client.upcoming_of_each()
|
each_upcoming = await hass.async_add_executor_job(client.upcoming_of_each)
|
||||||
except AvriException as ex:
|
except AvriException as ex:
|
||||||
raise PlatformNotReady from ex
|
raise PlatformNotReady from ex
|
||||||
else:
|
else:
|
||||||
|
@ -4,11 +4,13 @@
|
|||||||
"already_configured": "D\u00ebs Adress ass scho konfigur\u00e9iert."
|
"already_configured": "D\u00ebs Adress ass scho konfigur\u00e9iert."
|
||||||
},
|
},
|
||||||
"error": {
|
"error": {
|
||||||
|
"invalid_country_code": "Onbekannte Zweestellege L\u00e4nner Code",
|
||||||
"invalid_house_number": "Ong\u00eblteg Haus Nummer"
|
"invalid_house_number": "Ong\u00eblteg Haus Nummer"
|
||||||
},
|
},
|
||||||
"step": {
|
"step": {
|
||||||
"user": {
|
"user": {
|
||||||
"data": {
|
"data": {
|
||||||
|
"country_code": "Zweestellege L\u00e4nner Code",
|
||||||
"house_number": "Haus Nummer",
|
"house_number": "Haus Nummer",
|
||||||
"house_number_extension": "Haus Nummer Extensioun",
|
"house_number_extension": "Haus Nummer Extensioun",
|
||||||
"zip_code": "Postleitzuel"
|
"zip_code": "Postleitzuel"
|
||||||
|
16
homeassistant/components/avri/translations/pl.json
Normal file
16
homeassistant/components/avri/translations/pl.json
Normal file
@ -0,0 +1,16 @@
|
|||||||
|
{
|
||||||
|
"config": {
|
||||||
|
"error": {
|
||||||
|
"invalid_house_number": "Nieprawid\u0142owy numer domu"
|
||||||
|
},
|
||||||
|
"step": {
|
||||||
|
"user": {
|
||||||
|
"data": {
|
||||||
|
"country_code": "Dwuliterowy kod kraju",
|
||||||
|
"house_number": "Numer domu",
|
||||||
|
"zip_code": "Kod pocztowy"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
11
homeassistant/components/avri/translations/pt.json
Normal file
11
homeassistant/components/avri/translations/pt.json
Normal file
@ -0,0 +1,11 @@
|
|||||||
|
{
|
||||||
|
"config": {
|
||||||
|
"step": {
|
||||||
|
"user": {
|
||||||
|
"data": {
|
||||||
|
"zip_code": "C\u00f3digo postal"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
@ -21,7 +21,8 @@
|
|||||||
"data": {
|
"data": {
|
||||||
"access_token": "Token d'acc\u00e9s",
|
"access_token": "Token d'acc\u00e9s",
|
||||||
"email": "Correu electr\u00f2nic"
|
"email": "Correu electr\u00f2nic"
|
||||||
}
|
},
|
||||||
|
"description": "T'has de registrar a Awair per a obtenir un token d'acc\u00e9s de desenvolupador a trav\u00e9s de l'enlla\u00e7 seg\u00fcent: https://developer.getawair.com/onboard/login"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
26
homeassistant/components/awair/translations/cs.json
Normal file
26
homeassistant/components/awair/translations/cs.json
Normal file
@ -0,0 +1,26 @@
|
|||||||
|
{
|
||||||
|
"config": {
|
||||||
|
"abort": {
|
||||||
|
"already_configured": "\u00da\u010det je ji\u017e nakonfigurov\u00e1n",
|
||||||
|
"no_devices": "V s\u00edti nebyla nalezena \u017e\u00e1dn\u00e1 za\u0159\u00edzen\u00ed.",
|
||||||
|
"reauth_successful": "P\u0159\u00edstupov\u00fd token \u00fasp\u011b\u0161n\u011b aktualizov\u00e1n"
|
||||||
|
},
|
||||||
|
"error": {
|
||||||
|
"auth": "Neplatn\u00fd p\u0159\u00edstupov\u00fd token"
|
||||||
|
},
|
||||||
|
"step": {
|
||||||
|
"reauth": {
|
||||||
|
"data": {
|
||||||
|
"access_token": "P\u0159\u00edstupov\u00fd token",
|
||||||
|
"email": "E-mail"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"user": {
|
||||||
|
"data": {
|
||||||
|
"access_token": "P\u0159\u00edstupov\u00fd token",
|
||||||
|
"email": "E-mail"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
12
homeassistant/components/awair/translations/de.json
Normal file
12
homeassistant/components/awair/translations/de.json
Normal file
@ -0,0 +1,12 @@
|
|||||||
|
{
|
||||||
|
"config": {
|
||||||
|
"error": {
|
||||||
|
"unknown": "Unbekannter Awair-API-Fehler."
|
||||||
|
},
|
||||||
|
"step": {
|
||||||
|
"reauth": {
|
||||||
|
"description": "Bitte geben Sie Ihr Awair-Entwicklerzugriffstoken erneut ein."
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
29
homeassistant/components/awair/translations/fr.json
Normal file
29
homeassistant/components/awair/translations/fr.json
Normal file
@ -0,0 +1,29 @@
|
|||||||
|
{
|
||||||
|
"config": {
|
||||||
|
"abort": {
|
||||||
|
"already_configured": "Le compte est d\u00e9j\u00e0 configur\u00e9",
|
||||||
|
"no_devices": "Pas d'appareil trouv\u00e9 sur le r\u00e9seau",
|
||||||
|
"reauth_successful": "Jeton d'acc\u00e8s mis \u00e0 jour avec succ\u00e8s"
|
||||||
|
},
|
||||||
|
"error": {
|
||||||
|
"auth": "Jeton d'acc\u00e8s invalide",
|
||||||
|
"unknown": "Erreur d'API Awair inconnue."
|
||||||
|
},
|
||||||
|
"step": {
|
||||||
|
"reauth": {
|
||||||
|
"data": {
|
||||||
|
"access_token": "Jeton d'acc\u00e8s",
|
||||||
|
"email": "Email"
|
||||||
|
},
|
||||||
|
"description": "Veuillez ressaisir votre jeton d'acc\u00e8s d\u00e9veloppeur Awair."
|
||||||
|
},
|
||||||
|
"user": {
|
||||||
|
"data": {
|
||||||
|
"access_token": "Jeton d'acc\u00e8s",
|
||||||
|
"email": "Email"
|
||||||
|
},
|
||||||
|
"description": "Vous devez vous inscrire pour un jeton d'acc\u00e8s d\u00e9veloppeur Awair sur: https://developer.getawair.com/onboard/login"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
29
homeassistant/components/awair/translations/it.json
Normal file
29
homeassistant/components/awair/translations/it.json
Normal file
@ -0,0 +1,29 @@
|
|||||||
|
{
|
||||||
|
"config": {
|
||||||
|
"abort": {
|
||||||
|
"already_configured": "L'account \u00e8 gi\u00e0 configurato",
|
||||||
|
"no_devices": "Nessun dispositivo trovato sulla rete",
|
||||||
|
"reauth_successful": "Token di accesso aggiornato correttamente"
|
||||||
|
},
|
||||||
|
"error": {
|
||||||
|
"auth": "Token di accesso non valido",
|
||||||
|
"unknown": "Errore API Awair sconosciuto."
|
||||||
|
},
|
||||||
|
"step": {
|
||||||
|
"reauth": {
|
||||||
|
"data": {
|
||||||
|
"access_token": "Token di accesso",
|
||||||
|
"email": "E-mail"
|
||||||
|
},
|
||||||
|
"description": "Inserisci nuovamente il tuo token di accesso per sviluppatori Awair."
|
||||||
|
},
|
||||||
|
"user": {
|
||||||
|
"data": {
|
||||||
|
"access_token": "Token di accesso",
|
||||||
|
"email": "E-mail"
|
||||||
|
},
|
||||||
|
"description": "\u00c8 necessario registrarsi per un token di accesso per sviluppatori Awair all'indirizzo: https://developer.getawair.com/onboard/login"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
29
homeassistant/components/awair/translations/ko.json
Normal file
29
homeassistant/components/awair/translations/ko.json
Normal file
@ -0,0 +1,29 @@
|
|||||||
|
{
|
||||||
|
"config": {
|
||||||
|
"abort": {
|
||||||
|
"already_configured": "\uacc4\uc815\uc774 \uc774\ubbf8 \uad6c\uc131\ub418\uc5c8\uc2b5\ub2c8\ub2e4.",
|
||||||
|
"no_devices": "\ub124\ud2b8\uc6cc\ud06c\uc5d0\uc11c \uae30\uae30\ub97c \ucc3e\uc744 \uc218 \uc5c6\uc2b5\ub2c8\ub2e4",
|
||||||
|
"reauth_successful": "\uc561\uc138\uc2a4 \ud1a0\ud070\uc774 \uc131\uacf5\uc801\uc73c\ub85c \uc5c5\ub370\uc774\ud2b8\ub418\uc5c8\uc2b5\ub2c8\ub2e4"
|
||||||
|
},
|
||||||
|
"error": {
|
||||||
|
"auth": "\uc561\uc138\uc2a4 \ud1a0\ud070\uc774 \uc798\ubabb\ub418\uc5c8\uc2b5\ub2c8\ub2e4",
|
||||||
|
"unknown": "\uc54c \uc218 \uc5c6\ub294 Awair API \uc624\ub958\uac00 \ubc1c\uc0dd\ud588\uc2b5\ub2c8\ub2e4."
|
||||||
|
},
|
||||||
|
"step": {
|
||||||
|
"reauth": {
|
||||||
|
"data": {
|
||||||
|
"access_token": "\uc561\uc138\uc2a4 \ud1a0\ud070",
|
||||||
|
"email": "\uc774\uba54\uc77c"
|
||||||
|
},
|
||||||
|
"description": "Awair \uac1c\ubc1c\uc790 \uc561\uc138\uc2a4 \ud1a0\ud070\uc744 \ub2e4\uc2dc \uc785\ub825\ud574\uc8fc\uc138\uc694."
|
||||||
|
},
|
||||||
|
"user": {
|
||||||
|
"data": {
|
||||||
|
"access_token": "\uc561\uc138\uc2a4 \ud1a0\ud070",
|
||||||
|
"email": "\uc774\uba54\uc77c"
|
||||||
|
},
|
||||||
|
"description": "https://developer.getawair.com/onboard/login \uc5d0 Awair \uac1c\ubc1c\uc790 \uc561\uc138\uc2a4 \ud1a0\ud070\uc744 \ub4f1\ub85d\ud574\uc57c\ud569\ub2c8\ub2e4"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
29
homeassistant/components/awair/translations/lb.json
Normal file
29
homeassistant/components/awair/translations/lb.json
Normal file
@ -0,0 +1,29 @@
|
|||||||
|
{
|
||||||
|
"config": {
|
||||||
|
"abort": {
|
||||||
|
"already_configured": "Kont ass",
|
||||||
|
"no_devices": "Keng Apparater am Netzwierk fonnt",
|
||||||
|
"reauth_successful": "Acc\u00e8s Jeton erfollegr\u00e4ich aktualis\u00e9iert"
|
||||||
|
},
|
||||||
|
"error": {
|
||||||
|
"auth": "Ong\u00ebltege Acc\u00e8s Jeton",
|
||||||
|
"unknown": "Onbekannten Awair API Feeler"
|
||||||
|
},
|
||||||
|
"step": {
|
||||||
|
"reauth": {
|
||||||
|
"data": {
|
||||||
|
"access_token": "Acc\u00e8s Jeton",
|
||||||
|
"email": "E-Mail"
|
||||||
|
},
|
||||||
|
"description": "G\u00ebff d\u00e4in Awair Developpeur Acc\u00e8s jeton nach emol un."
|
||||||
|
},
|
||||||
|
"user": {
|
||||||
|
"data": {
|
||||||
|
"access_token": "Acc\u00e8s Jeton",
|
||||||
|
"email": "E-Mail"
|
||||||
|
},
|
||||||
|
"description": "Du muss dech fir een Awair Developpeur Acc\u00e8s Jeton registr\u00e9ien op:\nhttps://developer.getawair.com/onboard/login"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
25
homeassistant/components/awair/translations/pt.json
Normal file
25
homeassistant/components/awair/translations/pt.json
Normal file
@ -0,0 +1,25 @@
|
|||||||
|
{
|
||||||
|
"config": {
|
||||||
|
"abort": {
|
||||||
|
"already_configured": "Conta j\u00e1 configurada",
|
||||||
|
"no_devices": "Nenhum dispositivo encontrado na rede",
|
||||||
|
"reauth_successful": "Token de Acesso actualizado com sucesso"
|
||||||
|
},
|
||||||
|
"error": {
|
||||||
|
"auth": "Token de acesso inv\u00e1lido"
|
||||||
|
},
|
||||||
|
"step": {
|
||||||
|
"reauth": {
|
||||||
|
"data": {
|
||||||
|
"email": "Email"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"user": {
|
||||||
|
"data": {
|
||||||
|
"access_token": "Token de Acesso",
|
||||||
|
"email": "Email"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
@ -3,5 +3,5 @@
|
|||||||
"name": "Amazon Web Services (AWS)",
|
"name": "Amazon Web Services (AWS)",
|
||||||
"documentation": "https://www.home-assistant.io/integrations/aws",
|
"documentation": "https://www.home-assistant.io/integrations/aws",
|
||||||
"requirements": ["aiobotocore==0.11.1"],
|
"requirements": ["aiobotocore==0.11.1"],
|
||||||
"codeowners": ["@awarecan", "@robbiet480"]
|
"codeowners": ["@awarecan"]
|
||||||
}
|
}
|
||||||
|
@ -1,5 +1,15 @@
|
|||||||
{
|
{
|
||||||
"config": {
|
"config": {
|
||||||
"flow_title": "Za\u0159\u00edzen\u00ed Axis: {name} ({host})"
|
"flow_title": "Za\u0159\u00edzen\u00ed Axis: {name} ({host})",
|
||||||
|
"step": {
|
||||||
|
"user": {
|
||||||
|
"data": {
|
||||||
|
"host": "Hostitel",
|
||||||
|
"password": "Heslo",
|
||||||
|
"port": "Port",
|
||||||
|
"username": "U\u017eivatelsk\u00e9 jm\u00e9no"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
@ -16,7 +16,7 @@
|
|||||||
"step": {
|
"step": {
|
||||||
"user": {
|
"user": {
|
||||||
"data": {
|
"data": {
|
||||||
"host": "H\u00f4te",
|
"host": "Nom d'h\u00f4te ou adresse IP",
|
||||||
"password": "Mot de passe",
|
"password": "Mot de passe",
|
||||||
"port": "Port",
|
"port": "Port",
|
||||||
"username": "Nom d'utilisateur"
|
"username": "Nom d'utilisateur"
|
||||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user