Merge branch 'current' into esphome-migrate-bluetooth-n-mediaplayer-pages

This commit is contained in:
c0ffeeca7 2023-06-01 22:12:35 +02:00 committed by GitHub
commit d3294feb10
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
122 changed files with 780 additions and 420 deletions

View File

@ -20,7 +20,7 @@ group :jekyll_plugins do
end
gem 'sinatra', '3.0.6'
gem 'nokogiri', '1.15.0'
gem 'nokogiri', '1.15.2'
# Windows and JRuby does not include zoneinfo files, so bundle the tzinfo-data gem
# and associated library

View File

@ -27,7 +27,7 @@ GEM
ffi (1.15.5)
ffi (1.15.5-x64-mingw32)
forwardable-extended (2.6.0)
google-protobuf (3.23.0)
google-protobuf (3.23.2)
http_parser.rb (0.8.0)
i18n (1.13.0)
concurrent-ruby (~> 1.0)
@ -72,7 +72,7 @@ GEM
multi_json (1.15.0)
mustermann (3.0.0)
ruby2_keywords (~> 0.0.1)
nokogiri (1.15.0)
nokogiri (1.15.2)
mini_portile2 (~> 2.8.2)
racc (~> 1.4)
pathutil (0.16.2)
@ -129,7 +129,7 @@ DEPENDENCIES
jekyll-paginate (= 1.1.0)
jekyll-sitemap (= 1.4.0)
jekyll-toc (= 0.18.0)
nokogiri (= 1.15.0)
nokogiri (= 1.15.2)
rake (= 13.0.6)
sass-globbing (= 1.1.5)
sassc (= 2.1.0)

View File

@ -110,8 +110,8 @@ social:
# Home Assistant release details
current_major_version: 2023
current_minor_version: 5
current_patch_version: 3
date_released: 2023-05-14
current_patch_version: 4
date_released: 2023-05-23
# Either # or the anchor link to latest release notes in the blog post.
# Must be prefixed with a # and have double quotes around it.

32
package-lock.json generated
View File

@ -10,10 +10,10 @@
"devDependencies": {
"remark-cli": "^11.0.0",
"remark-frontmatter": "^4.0.1",
"remark-lint": "^9.1.1",
"remark-lint-fenced-code-flag": "^3.1.1",
"remark-lint-no-shell-dollars": "^3.1.1",
"remark-stringify": "^10.0.2",
"remark-lint": "^9.1.2",
"remark-lint-fenced-code-flag": "^3.1.2",
"remark-lint-no-shell-dollars": "^3.1.2",
"remark-stringify": "^10.0.3",
"textlint": "^13.3.2",
"textlint-filter-rule-comments": "^1.2.2",
"textlint-rule-common-misspellings": "^1.0.1",
@ -3481,9 +3481,9 @@
}
},
"node_modules/remark-lint": {
"version": "9.1.1",
"resolved": "https://registry.npmjs.org/remark-lint/-/remark-lint-9.1.1.tgz",
"integrity": "sha512-zhe6twuqgkx/9KgZyNyaO0cceA4jQuJcyzMOBC+JZiAzMN6mFUmcssWZyY30ko8ut9vQDMX/pyQnolGn+Fg/Tw==",
"version": "9.1.2",
"resolved": "https://registry.npmjs.org/remark-lint/-/remark-lint-9.1.2.tgz",
"integrity": "sha512-m9e/aPlh7tsvfJfj8tPxrQzD6oEdb9Foko+Ya/6OwUP9EoGMfehv1Qtv26W1DoH58Wn8rT8CD+KuprTWscMmIA==",
"dev": true,
"dependencies": {
"@types/mdast": "^3.0.0",
@ -3496,9 +3496,9 @@
}
},
"node_modules/remark-lint-fenced-code-flag": {
"version": "3.1.1",
"resolved": "https://registry.npmjs.org/remark-lint-fenced-code-flag/-/remark-lint-fenced-code-flag-3.1.1.tgz",
"integrity": "sha512-FFVZmYsBccKIIEgOtgdZEpQdARtAat1LTLBydnIpyNIvcntzWwtrtlj9mtjL8ZoSRre8HtwmEnBFyOfmM/NWaA==",
"version": "3.1.2",
"resolved": "https://registry.npmjs.org/remark-lint-fenced-code-flag/-/remark-lint-fenced-code-flag-3.1.2.tgz",
"integrity": "sha512-yh4m3dlPmRsqM/BFhpqHYfrmBvFQ+D5dZZKDDYP2rf3YEoXlEVt8T8lWQueTTSxcq6yXAqL/XQL/iqqUHlLcHw==",
"dev": true,
"dependencies": {
"@types/mdast": "^3.0.0",
@ -3514,9 +3514,9 @@
}
},
"node_modules/remark-lint-no-shell-dollars": {
"version": "3.1.1",
"resolved": "https://registry.npmjs.org/remark-lint-no-shell-dollars/-/remark-lint-no-shell-dollars-3.1.1.tgz",
"integrity": "sha512-Q3Ad1TaOPxbYog5+Of/quPG3Fy+dMKiHjT8KsU7NDiHG6YJOnAJ3f3w+y13CIlNIaKc/MrisgcthhrZ7NsgXfA==",
"version": "3.1.2",
"resolved": "https://registry.npmjs.org/remark-lint-no-shell-dollars/-/remark-lint-no-shell-dollars-3.1.2.tgz",
"integrity": "sha512-np2MDEhXHviXhbQFjnC1QYv5/fxCV1cIHfGMoJpqiW7Zcu/UGCOo5TE3XswZH4ukHZJ65c3X2A6qfLDW+ur3CQ==",
"dev": true,
"dependencies": {
"@types/mdast": "^3.0.0",
@ -3563,9 +3563,9 @@
}
},
"node_modules/remark-stringify": {
"version": "10.0.2",
"resolved": "https://registry.npmjs.org/remark-stringify/-/remark-stringify-10.0.2.tgz",
"integrity": "sha512-6wV3pvbPvHkbNnWB0wdDvVFHOe1hBRAx1Q/5g/EpH4RppAII6J8Gnwe7VbHuXaoKIF6LAg6ExTel/+kNqSQ7lw==",
"version": "10.0.3",
"resolved": "https://registry.npmjs.org/remark-stringify/-/remark-stringify-10.0.3.tgz",
"integrity": "sha512-koyOzCMYoUHudypbj4XpnAKFbkddRMYZHwghnxd7ue5210WzGw6kOBwauJTRUMq16jsovXx8dYNvSSWP89kZ3A==",
"dev": true,
"dependencies": {
"@types/mdast": "^3.0.0",

View File

@ -5,10 +5,10 @@
"devDependencies": {
"remark-cli": "^11.0.0",
"remark-frontmatter": "^4.0.1",
"remark-lint": "^9.1.1",
"remark-lint-fenced-code-flag": "^3.1.1",
"remark-lint-no-shell-dollars": "^3.1.1",
"remark-stringify": "^10.0.2",
"remark-lint": "^9.1.2",
"remark-lint-fenced-code-flag": "^3.1.2",
"remark-lint-no-shell-dollars": "^3.1.2",
"remark-stringify": "^10.0.3",
"textlint": "^13.3.2",
"textlint-filter-rule-comments": "^1.2.2",
"textlint-rule-common-misspellings": "^1.0.1",

View File

@ -65,7 +65,7 @@ entity:
type: string
type:
required: false
description: "Sets a custom card type: `custom:my-custom-card`"
description: "Sets a custom card type: `custom:my-custom-card`. It also can be used to force entities with a default special row format to render as a simple state. You can do this by setting the type: `simple-entity`. This can be used, for example, to replace a helper with an editable control with a read-only value."
type: string
name:
required: false

View File

@ -57,6 +57,19 @@
aliases:
- automations
- term: Backup
definition: >-
Home Assistant has built-in functionality to create files containing a copy of
your configuration. This can be used to restore your Home Assistant as well
as migrate to a new system. The backup feature is available on some installation
types.
link: /integrations/backup/
excerpt: >-
Home Assistant has built-in functionality to create files containing a copy of
your configurations. This is available on certain installation types.
aliases:
- backups
- term: Binary sensor
definition: >-
A binary sensor returns information about things that only have two states -
@ -286,16 +299,17 @@
- term: Reload
definition: >-
Updates Home Assistant configuration files. Changes are normally
automatically updated. However, when changes are made outside of the UI
and at a file level, Home Assistant is not aware of these changes and
requires the configuration file(s) to be reloaded to pick up any changes
made by going to **Settings** > **System** > **Restart Home Assistant**
(top right) > **Quick reload**. More granular reload options are available
in *YAML configuration reloading* section in **Developer tools** > **YAML**.
Applies the changes made to the Home Assistant configuration files. Changes
are normally automatically updated. However, changes made outside of the front
end will not be reflected in Home Assistant and require a reload.
To perform a manual reload, go to **Settings** > **System** >
**Restart Home Assistant** (top right) > **Quick reload**. More granular
reload options are available in *YAML configuration reloading* section
in **Developer tools** > **YAML**.
excerpt: >
Updates Home Assistant configuration files. Changes are normally
automatically updated.
Applies the changes made to Home Assistant configuration files. Changes are normally
automatically updated. However, changes made outside of the front
end will not be reflected in Home Assistant and require a reload.
- term: Scene
definition: >-
@ -393,7 +407,7 @@
- term: TTS
definition: >-
TTS (text to speech) allows Home Assistant to talk to you.
TTS (text-to-speech) allows Home Assistant to talk to you.
link: /integrations/tts/
- term: Variables

View File

@ -10,7 +10,7 @@ The automation's `mode` configuration option controls what happens when the auto
Mode | Description
-|-
`single` | (Default) Do not start a new run. Issue a warning.
`restart` | Start a new run after first stopping previous run.
`restart` | Start a new run after first stopping the previous run. The automation only restarts if the conditions are met.
`queued` | Start a new run after all previous runs complete. Runs are guaranteed to execute in the order they were queued. Note that subsequent queued automations will only join the queue if any conditions it may have are met at the time it is triggered.
`parallel` | Start a new, independent run in parallel with previous runs.

View File

@ -13,11 +13,11 @@ Quick links:
Automations based on a blueprint only need to be configured to be used. What needs to be configured differs on each blueprint.
To create your first automation based on a blueprint, go to **{% my config %}** -> **Automations & Scenes** -> **{% my blueprints %}**. Find the blueprint that you want to use and click on "Create Automation".
To create your first automation based on a blueprint, go to **{% my blueprints title="Settings > Automations & Scenes > Blueprints" %}**. Find the blueprint that you want to use and select **Create Automation**.
This will open the automation editor with the blueprint selected. Give it a name and configure the blueprint and click on the blue button "Save Automation" in the bottom right.
Done! If you want to revisit the configuration values, you can find it by going to **{% my config %}** and then **{% my automations %}**.
Done! If you want to revisit the configuration values, you can find it by going to **Settings** and then **{% my blueprints %}**.
## Importing blueprints

View File

@ -561,8 +561,7 @@ include_entities:
filter:
description: >
When filter options are provided, the entities are limited by entities
that at least match the given conditions. Can be either a object or a list of object.
Can be either a object or a list of object.
that at least match the given conditions. Can be either an object or a list of objects.
type: list
required: false
keys:

View File

@ -3,17 +3,17 @@ title: "Configuration.yaml"
description: "Configuring Home Assistant via text files."
---
While you can configure most of Home Assistant directly from the user interface under {% my config %}, some parts need you to edit `configuration.yaml`. This file contains integrations to be loaded along with their configurations. Throughout the documentation you will find snippets that you can add to your configuration file to enable specific functionality.
While you can configure most of Home Assistant directly from the user interface under {% my config %}, some parts need you to edit `configuration.yaml`. This file contains {% term integrations %} to be loaded along with their configurations. Throughout the documentation you will find snippets that you can add to your configuration file to enable specific functionality.
If you run into trouble while configuring Home Assistant, refer to the [configuration troubleshooting page](/docs/configuration/troubleshooting/) and the [`configuration.yaml` examples](/examples/#example-configurationyaml).
## Editing `configuration.yaml`
The easiest option to edit `configuration.yaml` is to use the {% my supervisor_addon title="Studio Code Server add-on" addon="a0d7b954_vscode" %}. This add-on runs VS Code, which offers live syntax checking and auto-fill of various Home Assistant entities (if unavailable on your system, use {% my supervisor_addon title="File Editor add-on" addon="core_configurator" %} instead).
The easiest option to edit `configuration.yaml` is to use the {% my supervisor_addon title="Studio Code Server add-on" addon="a0d7b954_vscode" %}. This add-on runs VS Code, which offers live syntax checking and auto-fill of various Home Assistant entities. See [here](/common-tasks/supervised/#installing-and-using-the-visual-studio-code-vsc-add-on) for details. If unavailable on your system, use {% my supervisor_addon title="File Editor add-on" addon="core_configurator" %} instead. Again, details can be found [here](/common-tasks/supervised/#installing-and-using-the-file-editor-add-on).
If you prefer to use a file editor on your computer, use the {% my supervisor_addon title="Samba add-on" addon="core_samba" %} to access the files as a network share.
If you prefer to use a file editor on your computer, use the {% my supervisor_addon title="Samba add-on" addon="core_samba" %} to access the files as a network share. More details can be found [here](/common-tasks/supervised/#installing-and-using-the-samba-add-on).
The path to your configuration directory can be found in the Home Assistant frontend by going to {% my system_health title="Settings > System > Repairs > System information from the top right menu" %}
The path to your configuration directory can be found in the Home Assistant {% term frontend %} by going to {% my system_health title="Settings > System > Repairs > System information from the top right menu" %}
![Show system menu option](/images/screenshots/System_information_menu.png)
@ -28,7 +28,7 @@ _If you use Home Assistant Core, you can find `configuration.yaml` in the config
## Reloading changes
Most integrations in Home Assistant that do not interact with devices or services can reload changes made to their configuration in `configuration.yaml`. To do this, go to {% my server_controls title="Developer Tools > YAML" %} and scroll down to the YAML configuration reloading section (alternatively, hit "c" anywhere in the UI and search for it).
Most integrations in Home Assistant that do not interact with {% term devices %} or {% term services %} can reload changes made to their configuration in `configuration.yaml`. To do this, go to {% my server_controls title="Developer Tools > YAML" %} and scroll down to the YAML configuration reloading section (alternatively, hit "c" anywhere in the UI and search for it).
If you can't see your integration listed there, you will need to restart Home Assistant for changes to take effect.
@ -40,6 +40,6 @@ If you can't see your integration listed there, you will need to restart Home As
## Migrating to a new system
The preferred way of migrating to a new system is by {% my supervisor_backups title="making a backup" %}. Once you have created the backup on the old system, you can download it to the system that is running the Home Assistant frontend. When setting up the new system, you may use the backup. Alternatively, you can upload it to your new system using the *Upload backup* menu option of the *Backups* menu. Then, a restore of the uploaded backup on the new system concludes the migration.
The preferred way of migrating to a new system is by {% my supervisor_backups title="making a backup" %}. Once you have created the backup on the old system, you can download it to the system that is running the Home Assistant frontend. When setting up the new system, you may use the backup. Alternatively, you can upload it to your new system using the _Upload backup_ menu option of the _Backups_ menu. Then, a restore of the uploaded backup on the new system concludes the migration.
If you run the container or core installation methods, you will need to manually make a backup of your configuration folder. Be aware that some of the files you need start with `.`, which is hidden by default from both `ls` (in SSH), in Windows Explorer, and macOS Finder. You'll need to ensure that you're viewing all files before you copy them.

View File

@ -20,6 +20,9 @@ Users should upgrade the firmware on all 700 series controllers to version 7.17.
</div>
- 800 series controllers
- Zooz 800 Series Z-Wave Long Range S2 Stick (ZST39 LR)
- 700 series controllers
- Aeotec Z-Stick 7 USB stick (ZWA010) (the EU version is not recommended due to RF performance issues)
- Silicon Labs UZB-7 USB Stick (Silabs SLUSB7000A / SLUSB001A)
@ -39,7 +42,7 @@ Users should upgrade the firmware on all 700 series controllers to version 7.17.
- Z-Wave.Me RaZberry 7 Pro (ZMEERAZBERRY7_PRO or ZMEURAZBERRY7_PRO, 700 series)
- Z-Wave.Me Razberry 2 (500 series)
If you are just starting out, we recommend that you purchase a 700 series controller or a Raspberry Pi module.
If you are just starting out, we recommend that you purchase a 700 series controller or a Raspberry Pi module. The 700 series controllers are the more recent version (when compared to the 500 series). The 700 series controllers support SmartStart, which allows you to add a device by scanning a QR code.
<div class='note'>
If you're using Home Assistant OS, Supervised, or Container, it's recommended to use a USB stick, not a module. Passing a module through Docker is more complicated than passing a USB stick through.
@ -134,4 +137,4 @@ This procedure has been tested with the following modules:
1. Carefully [close the case](https://yellow.home-assistant.io/guides/add-ssd-existing-installation/#reassembling-top-part) and power up Home Assistant Yellow.
1. Follow the procedure on [setting up a Z-Wave JS server](/integrations/zwave_js/#setting-up-a-z-wave-js-server).
1. In step 2, follow the manual setup steps to install the Z-Wave integration.
1. in Step 4, you will be prompted to choose a **Device path**. Choose **ttyAMA0**.
1. in Step 4, you will be prompted to choose a **Device path**. Choose **ttyAMA0**.

View File

@ -0,0 +1,6 @@
---
title: "Configuration.yaml by dannytsang"
description: ""
ha_category: Example configuration.yaml
ha_external_link: https://github.com/dannytsang/homeassistant-config
---

View File

@ -39,22 +39,6 @@
</li>
</ul>
</li>
<li>
<b>{% active_link /docs/assist/ Assist %}</b>
<ul>
<li>{% active_link /docs/assist/android/ Assist for Android %}</li>
<li>{% active_link /docs/assist/apple/ Assist for Apple devices %}</li>
<li>{% active_link /docs/assist/builtin_sentences/ Built-in sentences %}</li>
<li>{% active_link /docs/assist/custom_sentences/ Custom sentences %}</li>
<li>{% active_link /docs/assist/voice_remote_expose_devices/ Exposing devices to your voice assistant %}</li>
<li>{% active_link /docs/assist/voice_remote_local_assistant/ Configuring a local assistant %}</li>
<li>{% active_link /docs/assist/troubleshooting/ Troubleshooting Assist %}</li>
<li>{% active_link /docs/assist/voice_remote_local_assistant/ Configuring a local assistant %}</li>
<li>{% active_link /projects/worlds-most-private-voice-assistant/ Tutorial: World's most private voice assistant %}</li>
<li>{% active_link /projects/thirteen-usd-voice-remote/ Tutorial: $13 voice remote %}
</li>
</ul>
</li>
<li>
<b>{% active_link /docs/energy/ Home Energy Management %}</b>
<ul>

View File

@ -0,0 +1,31 @@
<section class="aside-module grid__item one-whole lap-one-half">
{% assign elements = site.dashboards | sort_natural: 'title' %}
<div class="section">
<h1 class="title delta">Devices</h1>
<ul class="divided sidebar-menu">
<li>{% active_link /voice_control/android/ Assist for Android %}</li>
<li>{% active_link /voice_control/apple/ Assist for Apple %}</li>
</ul>
</div>
<div class="section">
<h1 class="title delta">Voice assistants</h1>
<ul class="divided sidebar-menu">
<li>{% active_link /voice_control/using_voice_assistants_overview/ Voice assistants: Overview %}</li>
<li>{% active_link /voice_control/voice_remote_local_assistant/ Configuring a local assistant %}</li>
<li>{% active_link /voice_control/voice_remote_expose_devices/ Exposing devices to voice assistant %}</li>
<li>{% active_link /voice_control/builtin_sentences/ Built-in sentences %}</li>
<li>{% active_link /voice_control/custom_sentences/ Custom sentences %}</li>
<li>{% active_link /voice_control/troubleshooting/ Troubleshooting Assist %}</li>
</ul>
</div>
<div class="section">
<h1 class="title delta">Projects</h1>
<ul class="divided sidebar-menu">
<li>{% active_link /voice_control/worlds-most-private-voice-assistant/ Tutorial: World's most private voice assistant %}</li>
<li>{% active_link /voice_control/thirteen-usd-voice-remote/ Tutorial: $13 voice remote %}</li>
</ul>
</div>
</section>

View File

@ -33,7 +33,7 @@ sudo apt-get upgrade -y
Install the dependencies:
```bash
sudo apt-get install -y python3 python3-dev python3-venv python3-pip bluez libffi-dev libssl-dev libjpeg-dev zlib1g-dev autoconf build-essential libopenjp2-7 libtiff5 libturbojpeg0-dev tzdata
sudo apt-get install -y python3 python3-dev python3-venv python3-pip bluez libffi-dev libssl-dev libjpeg-dev zlib1g-dev autoconf build-essential libopenjp2-7 libtiff5 libturbojpeg0-dev tzdata ffmpeg liblapack3 liblapack-dev libatlas-base-dev
```
The above-listed dependencies might differ or missing, depending on your system or personal use of Home Assistant.

View File

@ -12,15 +12,34 @@ Follow this guide if you want to get started with Home Assistant easily or if yo
We will need a few things to get started with installing Home Assistant. The links below lead to Ameridroid. If youre not in the US, you should be able to find these items in web stores in your country.
To get started we suggest the ODROID N2+, it's the most powerful ODROID. It's fast and with built-in eMMC one of the best boards to run Home Assistant. It's also the board that powers our [Home Assistant Blue](/blue/).
To get started, we suggest the ODROID N2+, the board that powers our [Home Assistant Blue](/blue/), or the ODROID M1.
- [ODROID N2+](https://ameridroid.com/products/odroid-n2-plus?ref=eeb6nfw07e)
- [Power Supply](https://ameridroid.com/products/12v-2a-power-supply-plug?ref=eeb6nfw07e)
- [CR2032 Coin Cell](https://ameridroid.com/products/rtc-bios-battery?ref=eeb6nfw07e)
- [eMMC Module](https://ameridroid.com/products/emmc-module-n2-linux-red-dot?ref=eeb6nfw07e)
- [Case](https://ameridroid.com/products/odroid-n2-case?ref=eeb6nfw07e)
If unavailable, we also recommend the [ODROID C4](https://ameridroid.com/products/odroid-c4?ref=eeb6nfw07e).
If unavailable, we also recommend the [ODROID C4](https://ameridroid.com/products/odroid-c4?ref=eeb6nfw07e) or [ODROID M1](https://ameridroid.com/products/odroid-M1?ref=eeb6nfw07e).
Home Assistant bundles (US market):
The bundles come with Home Assistant pre-installed.
* [ODROID N2+: 2 GB RAM / 16 GB eMMC](https://ameridroid.com/products/odroid-n2-home-assistant-blue-bundle-limited-edition?variant=44748729286935?ref=eeb6nfw07e)
* [ODROID N2+: 4 GB RAM / 64 GB eMMC](https://ameridroid.com/products/odroid-n2-home-assistant-blue-bundle-limited-edition?variant=44748729221399?ref=eeb6nfw07e)
* ODROID M1: 4 GB RAM / 256 GB NVMe / [16 GB &micro;SD](https://ameridroid.com/products/odroid-n2-home-assistant-blue-bundle-limited-edition?variant=44929573028119?ref=eeb6nfw07e) or [16 GB eMMC](https://ameridroid.com/products/odroid-n2-home-assistant-blue-bundle-limited-edition?variant=44994940567831?ref=eeb6nfw07e)
* ODROID M1: 8 GB RAM / 256 GB NVMe / [16 GB &micro;SD](https://ameridroid.com/products/odroid-n2-home-assistant-blue-bundle-limited-edition?variant=44929573093655?ref=eeb6nfw07e) or [16 GB eMMC](https://ameridroid.com/products/odroid-n2-home-assistant-blue-bundle-limited-edition?variant=44994940633367?ref=eeb6nfw07e)
* [ODROID M1: 8 GB RAM / 1 TB NVMe / 64 GB eMMC ](https://ameridroid.com/products/odroid-n2-home-assistant-blue-bundle-limited-edition?variant=44994940698903?ref=eeb6nfw07e)
* ODROID XU4: 2 GB RAM / [32 GB &micro;SD](https://ameridroid.com/products/odroid-n2-home-assistant-blue-bundle-limited-edition?variant=44748729352471?ref=eeb6nfw07e) or [16 GB eMMC](https://ameridroid.com/products/odroid-n2-home-assistant-blue-bundle-limited-edition?variant=44748782305559?ref=eeb6nfw07e)
Variants without pre-installed Home Assistant:
* ODROID N2+, [2 GB RAM](https://ameridroid.com/products/odroid-n2-plus?variant=40371828719650?ref=eeb6nfw07e) or [4 GB RAM](https://ameridroid.com/products/odroid-n2-plus?variant=40371828752418?ref=eeb6nfw07e)
* [ODROID C4](https://ameridroid.com/products/odroid-c4?ref=eeb6nfw07e)
* [ODROID M1](https://ameridroid.com/products/odroid-M1?ref=eeb6nfw07e)
* [Power Supply](https://ameridroid.com/products/12v-2a-power-supply-plug?ref=eeb6nfw07e)
* [CR2032 Coin Cell](https://ameridroid.com/products/rtc-bios-battery?ref=eeb6nfw07e)
* [eMMC Module](https://ameridroid.com/products/emmc-module-n2-linux-red-dot?ref=eeb6nfw07e)
* [Case](https://ameridroid.com/products/odroid-n2-case?ref=eeb6nfw07e)
* These are affiliated links. We get commissions for purchases made through links in this post.*
{% endif %}
@ -146,6 +165,7 @@ _Select and copy the URL or use the "copy" button that appear when you hover it.
![Screenshot of the Etcher software showing the Flash button highlighted.](/images/installation/etcher5.png)
1. When Balena Etcher has finished writing the image, you will see a confirmation.
![Screenshot of the Etcher software showing that the installation has completed.](/images/installation/etcher6.png)
* If you are having issues with Balena Etcher, try version [1.10](https://github.com/balena-io/etcher/releases/tag/v1.10.4).
### Start up your {{site.installation.types[page.installation_type].board}}
@ -215,6 +235,11 @@ After downloading, decompress the image. If the image comes in a ZIP file, for e
Follow this guide if you already are running a supported virtual machine hypervisor. If you are not familiar with virtual machines, we recommend installing Home Assistant OS directly on a [Home Assistant Yellow](/installation/yellow), a [Raspberry Pi](/installation/raspberrypi), or an [ODROID](/installation/odroid).
{% if page.installation_type == 'macos' %}
- If VirtualBox is not supported on your Mac, and you have experience using virtual machines, you can try running the Home Assistant Operating system on [UTM](https://mac.getutm.app/).
{% endif %}
### Create the virtual machine
Load the appliance image into your virtual machine hypervisor. (Note: You are free to assign as much resources as you wish to the VM, please assign enough based on your add-on needs).

View File

@ -41,6 +41,9 @@
<li>
<a href="/dashboards/">Dashboards</a>
</li>
<li>
<a href="/voice_control/">Voice control</a>
</li>
</ul>
</li>
<li><a href="/integrations/">Integrations</a></li>

View File

@ -20,6 +20,8 @@
{% include asides/docs_navigation.html %}
{% elsif root == 'faq' %}
{% include asides/faq_navigation.html %}
{% elsif root == 'voice_control' %}
{% include asides/voice_navigation.html %}
{% elsif root == 'hassio' or root == 'addons' %}
{% include asides/hassio_navigation.html %}
{% elsif root == 'cloud' %}

View File

@ -16,7 +16,7 @@ Polly is a paid service via Amazon Web Services. There is a [free tier](https:/
## Setup
For more information, please read the [AWS General Reference regarding Security Credentials](https://docs.aws.amazon.com/general/latest/gr/aws-security-credentials.html) to get the needed details. Also, check the [boto3 Documentation](https://boto3.readthedocs.io/en/latest/guide/configuration.html#shared-credentials-file) about the profiles and the [AWS Regions and Endpoints Reference](https://docs.aws.amazon.com/general/latest/gr/rande.html#pol_region) for available regions.
For more information, please read the [AWS General Reference regarding Security Credentials](https://docs.aws.amazon.com/IAM/latest/UserGuide/security-creds.html) to get the needed details. Also, check the [boto3 Documentation](https://boto3.amazonaws.com/v1/documentation/api/latest/guide/credentials.html#shared-credentials-file) about the profiles and the [AWS Regions and Endpoints Reference](https://docs.aws.amazon.com/general/latest/gr/rande.html#regional-endpoints) for available regions.
Available voices are listed in the [Amazon Documentation](https://docs.aws.amazon.com/polly/latest/dg/voicelist.html).

View File

@ -15,7 +15,7 @@ ha_platforms:
- select
---
The Assist pipeline integration provides the foundation for the [Assist](/docs/assist/) voice assistant in Home Assistant.
The Assist pipeline integration provides the foundation for the [Assist](/voice_control/) voice assistant in Home Assistant.
For most users, there is no need to install this integration manually. The Assist pipeline integration is part of the default configuration and is set up automatically if needed by other integrations.
If you are not using the default integration, you need to add the following to your `configuration.yaml` file:
@ -25,4 +25,4 @@ If you are not using the default integration, you need to add the following to y
assist_pipeline:
```
For more information, refer to the procedure on [configuring a pipeline](/docs/assist/voice_remote_local_assistant/).
For more information, refer to the procedure on [configuring a pipeline](/voice_control/voice_remote_local_assistant/).

View File

@ -15,7 +15,7 @@ The `aws` integration provides a single place to interact with [Amazon Web Servi
## Setup
You have to have an AWS account to use Amazon Web Services, create one [here](https://aws.amazon.com/free/) with a 12 months free tier benefit. Please note, even in the first 12-months, you may still be billed if you use more resources than offered in the free tier. We advise you to monitor your costs in the [AWS Billing Console](https://console.aws.amazon.com/billing/) closely. You can read the [Control your AWS costs](https://aws.amazon.com/getting-started/tutorials/control-your-costs-free-tier-budgets/) guide for more information.
You have to have an AWS account to use Amazon Web Services, create one [here](https://aws.amazon.com/free/) with a 12 months free tier benefit. Please note, even in the first 12-months, you may still be billed if you use more resources than offered in the free tier. We advise you to monitor your costs in the [AWS Billing Console](https://console.aws.amazon.com/billing/) closely. You can read the [Control your AWS costs](https://aws.amazon.com/getting-started/hands-on/control-your-costs-free-tier-budgets/) guide for more information.
The `lambda`, `sns`, `sqs`, and `events` services, used in the `aws` component, all provide an **Always Free** tier for all users even after the 12-month period. The general usage in Home Automation will most likely not reach the free tier limit. Please read [Lambda Pricing](https://aws.amazon.com/lambda/pricing/), [SNS Pricing](https://aws.amazon.com/sns/pricing/), [SQS Pricing](https://aws.amazon.com/sqs/pricing/), and [EventBridge Pricing](https://aws.amazon.com/eventbridge/pricing/) for more details.
@ -104,7 +104,7 @@ context:
## Lambda Notify Usage
AWS Lambda is a notification platform and thus can be controlled by calling the `notify` service [as described here](/integrations/notify/). It will invoke a Lambda for all targets given in the notification payload. A target can be formatted as a function name, an entire ARN ([Amazon Resource Name](https://docs.aws.amazon.com/general/latest/gr/aws-arns-and-namespaces.html)) or a partial ARN. For more information, please see the [botocore documentation](https://botocore.amazonaws.com/v1/documentation/api/latest/reference/services/lambda.html#Lambda.Client.invoke).
AWS Lambda is a notification platform and thus can be controlled by calling the `notify` service [as described here](/integrations/notify/). It will invoke a Lambda for all targets given in the notification payload. A target can be formatted as a function name, an entire ARN ([Amazon Resource Name](https://docs.aws.amazon.com/IAM/latest/UserGuide/reference-arns.html)) or a partial ARN. For more information, please see the [botocore documentation](https://botocore.amazonaws.com/v1/documentation/api/latest/reference/services/lambda/client/invoke.html).
The Lambda event payload will contain everything passed in the service call payload. Here is an example payload that would be sent to Lambda:
@ -132,7 +132,7 @@ The context will look like this:
## SNS Notify Usage
AWS SNS is a notification platform and thus can be controlled by calling the `notify` service [as described here](/integrations/notify/). It will publish a message to all targets given in the notification payload. A target must be a SNS topic or endpoint ARN ([Amazon Resource Name](https://docs.aws.amazon.com/general/latest/gr/aws-arns-and-namespaces.html)). For more information, please see the [botocore documentation](https://botocore.amazonaws.com/v1/documentation/api/latest/reference/services/sns.html#SNS.Client.publish).
AWS SNS is a notification platform and thus can be controlled by calling the `notify` service [as described here](/integrations/notify/). It will publish a message to all targets given in the notification payload. A target must be a SNS topic or endpoint ARN ([Amazon Resource Name](https://docs.aws.amazon.com/IAM/latest/UserGuide/reference-arns.html)). For more information, please see the [botocore documentation](https://botocore.amazonaws.com/v1/documentation/api/latest/reference/services/sns/client/publish.html).
If one exists, the SNS Subject will be set to the title. All attributes from the payload, except the message, will be sent as stringified message attributes.
@ -158,7 +158,7 @@ If you do not download them, you will lose them and will have to recreate a new
## SQS Notify Usage
AWS SQS is a notification platform and thus can be controlled by calling the `notify` service [as described here](/integrations/notify/). It will publish a message to the queue for all targets given in the notification payload. A target must be a SQS topic URL. For more information, please see the [SQS documentation](https://docs.aws.amazon.com/AWSSimpleQueueService/latest/SQSDeveloperGuide/ImportantIdentifiers.html) and [bototcore documentation](https://botocore.amazonaws.com/v1/documentation/api/latest/reference/services/sqs.html#SQS.Client.send_message)
AWS SQS is a notification platform and thus can be controlled by calling the `notify` service [as described here](/integrations/notify/). It will publish a message to the queue for all targets given in the notification payload. A target must be a SQS topic URL. For more information, please see the [SQS documentation](https://docs.aws.amazon.com/AWSSimpleQueueService/latest/SQSDeveloperGuide/sqs-queue-message-identifiers.html) and [botocore documentation](https://botocore.amazonaws.com/v1/documentation/api/latest/reference/services/sqs/client/send_message.html)
The SQS event payload will contain everything passed in the service call payload. SQS payloads will be published as stringified JSON. All attributes from the payload, except message, will also be sent as stringified message attributes. Here is an example message that would be published to the SQS queue:
@ -174,7 +174,7 @@ The SQS event payload will contain everything passed in the service call payload
```
## EventBridge Notify Usage
AWS EventBridge is a notification platform and thus can be controlled by calling the `notify` service [as described here](/integrations/notify/). It will publish a message to the event bus for all targets given in the notification payload. A target must be a name of an event bus accessible by the given credentials. A target is not required, and the default event bus will be used if none are specified. For more information, please see the [EventBridge documentation](https://docs.aws.amazon.com/eventbridge/latest/userguide/eb-event-bus.html) and [bototcore documentation](https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/events.html#EventBridge.Client.put_events)
AWS EventBridge is a notification platform and thus can be controlled by calling the `notify` service [as described here](/integrations/notify/). It will publish a message to the event bus for all targets given in the notification payload. A target must be a name of an event bus accessible by the given credentials. A target is not required, and the default event bus will be used if none are specified. For more information, please see the [EventBridge documentation](https://docs.aws.amazon.com/eventbridge/latest/userguide/eb-event-bus.html) and [botocore documentation](https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/events/client/put_events.html)
There are two options for generating the event detail based on the service call payload. If the `detail` attribute is specified, then its value will be serialized as a JSON object and used for the event detail. If the attribute is not specified, then the value of the `message` attribute is serialized as a simple JSON object with a single key named `message` and the value of the message supplied to the service call.

View File

@ -12,15 +12,15 @@ ha_config_flow: true
ha_integration_type: integration
---
The `Azure Event Hub` integration allows you to hook into the Home Assistant event bus and send events to [Azure Event Hub](https://azure.microsoft.com/en-us/services/event-hubs/) or to an [Azure IoT Hub](https://docs.microsoft.com/en-us/azure/iot-hub/iot-hub-devguide-messages-read-builtin).
The `Azure Event Hub` integration allows you to hook into the Home Assistant event bus and send events to [Azure Event Hub](https://azure.microsoft.com/products/event-hubs/) or to an [Azure IoT Hub](https://learn.microsoft.com/azure/iot-hub/iot-hub-devguide-messages-read-builtin).
## First time setup
This assumes you already have an Azure account. Otherwise create a Free account [here](https://azure.microsoft.com/en-us/free/).
This assumes you already have an Azure account. Otherwise create a Free account [here](https://azure.microsoft.com/free/).
You need to create an Event Hub namespace and an Event Hub in that namespace, you can follow [this guide](https://docs.microsoft.com/en-us/azure/event-hubs/event-hubs-create). Alternatively you can directly deploy an ARM template with the namespace and the Event Hub [from here](https://github.com/Azure/azure-quickstart-templates/tree/master/quickstarts/microsoft.eventhub/event-hubs-create-event-hub-and-consumer-group).
You need to create an Event Hub namespace and an Event Hub in that namespace, you can follow [this guide](https://learn.microsoft.com/azure/event-hubs/event-hubs-create). Alternatively you can directly deploy an ARM template with the namespace and the Event Hub [from here](https://github.com/Azure/azure-quickstart-templates/tree/master/quickstarts/microsoft.eventhub/event-hubs-create-event-hub-and-consumer-group).
You must then create a Shared Access Policy for the Event Hub with 'Send' claims or use the RootManageAccessKey from your namespace (this key has additional claims, including managing the event hub and listening, which are not needed for this purpose), for more details on the security of Event Hubs [go here](https://docs.microsoft.com/en-us/azure/event-hubs/event-hubs-authentication-and-security-model-overview).
You must then create a Shared Access Policy for the Event Hub with 'Send' claims or use the RootManageAccessKey from your namespace (this key has additional claims, including managing the event hub and listening, which are not needed for this purpose), for more details on the security of Event Hubs [go here](https://learn.microsoft.com/azure/event-hubs/authenticate-shared-access-signature).
Once you have the name of your namespace, instance, Shared Access Policy and the key for that policy, you can setup the integration itself.
@ -94,10 +94,10 @@ filter:
## Using the data in Azure
There are a number of ways to stream the data that comes into the Event Hub into storages in Azure, the easiest way is to use the built-in Capture function and this allows you to capture the data in Azure Blob Storage or Azure Data Lake store, [details here](https://docs.microsoft.com/en-us/azure/event-hubs/event-hubs-capture-overview).
There are a number of ways to stream the data that comes into the Event Hub into storages in Azure, the easiest way is to use the built-in Capture function and this allows you to capture the data in Azure Blob Storage or Azure Data Lake store, [details here](https://learn.microsoft.com/azure/event-hubs/event-hubs-capture-overview).
Other storages in Azure (and outside) are possible with an [Azure Stream Analytics job](https://docs.microsoft.com/en-us/azure/stream-analytics/stream-analytics-define-inputs#stream-data-from-event-hubs), for instance for [Cosmos DB](https://docs.microsoft.com/en-us/azure/stream-analytics/stream-analytics-documentdb-output), [Azure SQL DB](https://docs.microsoft.com/en-us/azure/stream-analytics/stream-analytics-sql-output-perf), [Azure Table Storage](https://docs.microsoft.com/en-us/azure/stream-analytics/stream-analytics-define-outputs#table-storage), custom writing to [Azure Blob Storage](https://docs.microsoft.com/en-us/azure/stream-analytics/stream-analytics-custom-path-patterns-blob-storage-output) and [Topic and Queues](https://docs.microsoft.com/en-us/azure/stream-analytics/stream-analytics-quick-create-portal#configure-job-output).
Other storages in Azure (and outside) are possible with an [Azure Stream Analytics job](https://learn.microsoft.com/azure/stream-analytics/stream-analytics-define-inputs#stream-data-from-event-hubs), for instance for [Cosmos DB](https://learn.microsoft.com/azure/stream-analytics/stream-analytics-documentdb-output), [Azure SQL DB](https://learn.microsoft.com/azure/stream-analytics/stream-analytics-sql-output-perf), [Azure Table Storage](https://learn.microsoft.com/azure/stream-analytics/stream-analytics-define-outputs), custom writing to [Azure Blob Storage](https://learn.microsoft.com/azure/stream-analytics/stream-analytics-custom-path-patterns-blob-storage-output) and [Topic and Queues](https://learn.microsoft.com/azure/stream-analytics/stream-analytics-quick-create-portal#configure-job-output).
On the analytical side, Event Hub can be directly fed into [Azure Databricks Spark](https://docs.microsoft.com/en-us/azure/azure-databricks/databricks-stream-from-eventhubs?toc=https%3A%2F%2Fdocs.microsoft.com%2Fen-us%2Fazure%2Fevent-hubs%2FTOC.json&bc=https%3A%2F%2Fdocs.microsoft.com%2Fen-us%2Fazure%2Fbread%2Ftoc.json), [Azure Time Series Insights](https://docs.microsoft.com/en-us/azure/time-series-insights/time-series-insights-how-to-add-an-event-source-eventhub) and [Microsoft Power BI](https://docs.microsoft.com/en-us/azure/event-hubs/event-hubs-tutorial-visualize-anomalies).
On the analytical side, Event Hub can be directly fed into [Azure Databricks Spark](https://learn.microsoft.com/azure/databricks/structured-streaming/streaming-event-hubs), [Azure Time Series Insights](https://learn.microsoft.com/azure/time-series-insights/how-to-ingest-data-event-hub) and [Microsoft Power BI](https://learn.microsoft.com/azure/stream-analytics/stream-analytics-real-time-fraud-detection).
The final way to use the data in Azure is to connect an Azure Function to the Event Hub using the [Event Hub trigger binding](https://docs.microsoft.com/en-us/azure/azure-functions/functions-bindings-event-hubs).
The final way to use the data in Azure is to connect an Azure Function to the Event Hub using the [Event Hub trigger binding](https://learn.microsoft.com/azure/azure-functions/functions-bindings-event-hubs).

View File

@ -13,15 +13,15 @@ ha_platforms:
ha_integration_type: integration
---
The `Azure Service Bus` integration allows you to send messages to [Azure Service Bus](https://azure.microsoft.com/en-us/services/service-bus/) from within Home Assistant.
The `Azure Service Bus` integration allows you to send messages to [Azure Service Bus](https://azure.microsoft.com/products/service-bus/) from within Home Assistant.
## First-time setup
This assumes you already have an Azure account. Otherwise, create a free account [here](https://azure.microsoft.com/en-us/free/).
This assumes you already have an Azure account. Otherwise, create a free account [here](https://azure.microsoft.com/free/).
You need to create a Service Bus namespace; you can follow [this guide](https://docs.microsoft.com/en-us/azure/service-bus-messaging/service-bus-create-namespace-portal).
You need to create a Service Bus namespace; you can follow [this guide](https://learn.microsoft.com/azure/service-bus-messaging/service-bus-quickstart-portal#create-a-namespace-in-the-azure-portal).
You must then create a Shared Access Policy for the Service Bus with `Send` claims or use the RootManageAccessKey from your namespace (this key has additional claims, including managing the event hub and listening, which are not needed for this purpose), for more details on the security of Service Bus [go here](https://docs.microsoft.com/en-us/azure/service-bus-messaging/service-bus-authentication-and-authorization#shared-access-signature). Alternatively you can create a dedicated key for only one queue or topic, to restrict access to only that queue or topic.
You must then create a Shared Access Policy for the Service Bus with `Send` claims or use the RootManageAccessKey from your namespace (this key has additional claims, including managing the event hub and listening, which are not needed for this purpose), for more details on the security of Service Bus [go here](https://learn.microsoft.com/azure/service-bus-messaging/service-bus-authentication-and-authorization#shared-access-signature). Alternatively you can create a dedicated key for only one queue or topic, to restrict access to only that queue or topic.
Once you have the connection string with `Send` policy, you can set up the integration itself.

View File

@ -69,7 +69,7 @@ availability_topic:
required: false
type: string
device:
description: "Information about the device this binary sensor is a part of to tie it into the [device registry](https://developers.home-assistant.io/docs/en/device_registry_index.html). Only works through [MQTT discovery](/integrations/mqtt/#mqtt-discovery) and when [`unique_id`](#unique_id) is set. At least one of identifiers or connections must be present to identify the device."
description: "Information about the device this binary sensor is a part of to tie it into the [device registry](https://developers.home-assistant.io/docs/device_registry_index/). Only works through [MQTT discovery](/integrations/mqtt/#mqtt-discovery) and when [`unique_id`](#unique_id) is set. At least one of identifiers or connections must be present to identify the device."
required: false
type: map
keys:
@ -129,7 +129,7 @@ encoding:
type: string
default: "utf-8"
entity_category:
description: The [category](https://developers.home-assistant.io/docs/core/entity#generic-properties) of the entity.
description: The [category](https://developers.home-assistant.io/docs/core/entity/#generic-properties) of the entity.
required: false
type: string
default: None

View File

@ -629,25 +629,36 @@ Learning RF Frequency, press and hold the button to learn...
Press and hold a button on the remote.
You will know it succeeded when you see the following text:
```txt
Found RF Frequency - 1 of 2!
You can now let go of the button
Press enter to continue...
```
Press enter.
If the attempt fails, you will see the error:
```txt
To complete learning, single press the button you want to learn
RF Frequency not found
```
If a failure occurs, you may need to simply keep pressing the button during the `Learning RF Frequency` step, as some remotes appear to not continuously transmit when buttons are held.
Short press the button and you get the code:
After a success, do one of the following two options:
```txt
Found RF Frequency - 2 of 2!
b2002c0111211011211121112111212110112122101121112111202210211121112110221011211121112121102210112121111021112221101121211100017b10211111211121102111212210112121111121102111212210211121102210211111211121102122102111112121101121112122101121211000017c10211111211022102111212210112121111022102112202210211121102210221011211022102122102210112121101122102122101121211100017b10211111211121102210212210112122101121102210212210221021112110221011211121112121102210112121111121102122101121221000017b1121101121112111211121211110212210112111211121211121102210211121101121112111212111211011222110112111212111112121100005dc000000000000000000000000
Base64: b'sgAsAREhEBEhESERIREhIRARISIQESERIREgIhAhESERIRAiEBEhESERISEQIhARISERECERIiEQESEhEQABexAhEREhESEQIREhIhARISERESEQIREhIhAhESEQIhAhEREhESEQISIQIRERISEQESERISIQESEhEAABfBAhEREhECIQIREhIhARISERECIQIRIgIhAhESEQIhAiEBEhECIQISIQIhARISEQESIQISIQESEhEQABexAhEREhESEQIhAhIhARISIQESEQIhAhIhAiECERIRAiEBEhESERISEQIhARISERESEQISIQESEiEAABexEhEBEhESERIREhIREQISIQESERIREhIREhECIQIREhEBEhESERISERIRARIiEQESERISERESEhEAAF3AAAAAAAAAAAAAAAAA=='
```
1. To learn a single button press RF code, press enter and follow the prompt:
```txt
To complete learning, single press the button you want to learn
```
Short press the button and you get the code:
```txt
Found RF Frequency - 2 of 2!
b2002c0111211011211121112111212110112122101121112111202210211121112110221011211121112121102210112121111021112221101121211100017b10211111211121102111212210112121111121102111212210211121102210211111211121102122102111112121101121112122101121211000017c10211111211022102111212210112121111022102112202210211121102210221011211022102122102210112121101122102122101121211100017b10211111211121102210212210112122101121102210212210221021112110221011211121112121102210112121111121102122101121221000017b1121101121112111211121211110212210112111211121211121102210211121101121112111212111211011222110112111212111112121100005dc000000000000000000000000
Base64: b'sgAsAREhEBEhESERIREhIRARISIQESERIREgIhAhESERIRAiEBEhESERISEQIhARISERECERIiEQESEhEQABexAhEREhESEQIREhIhARISERESEQIREhIhAhESEQIhAhEREhESEQISIQIRERISEQESERISIQESEhEAABfBAhEREhECIQIREhIhARISERECIQIRIgIhAhESEQIhAiEBEhECIQISIQIhARISEQESIQISIQESEhEQABexAhEREhESEQIhAhIhARISIQESEQIhAhIhAiECERIRAiEBEhESERISEQIhARISERESEQISIQESEiEAABexEhEBEhESERIREhIREQISIQESERIREhIREhECIQIREhEBEhESERISERIRARIiEQESERISERESEhEAAF3AAAAAAAAAAAAAAAAA=='
```
2. To learn a button hold RF code, hold the button you wish to learn for 1-2 seconds then immediately press enter.
* You will see the same prompts for a short press as shown above. You should see it return a different base64 code.
* Test the base64 code to ensure it performs the button 'hold' command as expected, rather than the button 'press' command.
* This might take some trial and error to get the hold timing right before hitting enter to scan for the code.
### Conversion of codes from other projects

View File

@ -23,7 +23,7 @@ The setup requires an API Token created with `Zone:Zone:Read` and `Zone:DNS:Edit
An easy way to create this is to start with the "Edit zone DNS" template then add `Zone:Zone:Read` to the permissions.
[Cloudflare API Tokens Guide](https://developers.cloudflare.com/api/tokens/create)
[Cloudflare API Tokens Guide](https://developers.cloudflare.com/fundamentals/api/get-started/create-token/)
{% include integrations/config_flow.md %}

View File

@ -10,11 +10,11 @@ ha_domain: dialogflow
ha_integration_type: integration
---
The `dialogflow` integration is designed to be used with the [webhook](https://dialogflow.com/docs/fulfillment#webhook) integration of [Dialogflow](https://dialogflow.com/). When a conversation ends with a user, Dialogflow sends an action and parameters to the webhook.
The `dialogflow` integration is designed to be used with the [webhook](https://cloud.google.com/dialogflow/es/docs/fulfillment-webhook) integration of [Dialogflow](https://cloud.google.com/dialogflow/docs/). When a conversation ends with a user, Dialogflow sends an action and parameters to the webhook.
To be able to receive messages from Dialogflow, your Home Assistant instance needs to be accessible from the web and you need to have the external URL [configured](/docs/configuration/basic). Dialogflow will return fallback answers if your server does not answer or takes too long (more than 5 seconds).
Dialogflow could be [integrated](https://dialogflow.com/docs/integrations/) with many popular messaging, virtual assistant and IoT platforms.
Dialogflow could be [integrated](https://cloud.google.com/dialogflow/es/docs/integrations) with many popular messaging, virtual assistant and IoT platforms.
Using Dialogflow will be easy to create conversations like:
@ -38,7 +38,7 @@ To get the webhook URL, go to the integrations page in the configuration screen
- [Login](https://console.dialogflow.com/) with your Google account.
- Click on "Create Agent".
- Select name, language (if you are planning to use Google Actions check their [supported languages](https://support.google.com/assistant/answer/7108196?hl=en)) and time zone.
- Select name, language (if you are planning to use Google Actions check their [supported languages](https://support.google.com/assistant/answer/7108196)) and time zone.
- Click "Save".
- Now go to "Fulfillment" (in the left menu).
- Enable Webhook and set your Dialogflow webhook URL as the endpoint, e.g., `https://myhome.duckdns.org/api/webhook/800b4cb4d27d078a8871656a90854a292651b20635685f8ea23ddb7a09e8b417`
@ -66,7 +66,7 @@ When activated, the [`alexa` integration](/integrations/alexa/) will have Home A
## Examples
Download [this zip](https://github.com/home-assistant/home-assistant.io/blob/next/source/assets/HomeAssistant_APIAI.zip) and load it in your Dialogflow agent (**Settings** -> **Export and Import**) for examples intents to use with this configuration:
Download [this zip](https://github.com/home-assistant/home-assistant.io/blob/current/source/assets/HomeAssistant_APIAI.zip) and load it in your Dialogflow agent (**Settings** -> **Export and Import**) for examples intents to use with this configuration:
{% raw %}

View File

@ -15,7 +15,7 @@ ha_integration_type: integration
---
The FAA Delays integration collects and displays information about delays at US Airports based on the
[FAA's National Airspace System Status](https://www.fly.faa.gov/ois/).
[FAA's National Airspace System Status](https://nasstatus.faa.gov/).
Data measured includes:

View File

@ -25,7 +25,7 @@ notify:
{% configuration %}
page_access_token:
description: "Access token for your Facebook page. Checkout [Facebook Messenger Platform](https://developers.facebook.com/docs/messenger-platform/guides/setup) for more information."
description: "Access token for your Facebook page. Checkout [Facebook Messenger Platform](https://developers.facebook.com/docs/messenger-platform/webhooks) for more information."
required: true
type: string
name:
@ -37,8 +37,8 @@ name:
### Usage
With Facebook notify service, you can send your notifications to your Facebook messenger with help of your Facebook page. You have to create a [Facebook Page and App](https://developers.facebook.com/docs/messenger-platform/guides/quick-start) for this service. You can control it by calling the notify service [as described here](/integrations/notify/). It will send a message on messenger to user specified by **target** on behalf of your page. See the [quick start](https://developers.facebook.com/docs/messenger-platform/guides/quick-start) guide for more information.
The phone number used in **target** should be registered with Facebook messenger. Phone number of the recipient should be in +1(212)555-2368 format. If your app is not approved by Facebook then the recipient should by either admin, developer or tester for your Facebook app. [More information](https://developers.facebook.com/docs/messenger-platform/send-api-reference#phone_number) about the phone number.
With Facebook notify service, you can send your notifications to your Facebook messenger with help of your Facebook page. You have to create a [Facebook Page and App](https://developers.facebook.com/docs/messenger-platform/getting-started/quick-start) for this service. You can control it by calling the notify service [as described here](/integrations/notify/). It will send a message on messenger to user specified by **target** on behalf of your page. See the [quick start](https://developers.facebook.com/docs/messenger-platform/getting-started/quick-start) guide for more information.
The phone number used in **target** should be registered with Facebook messenger. Phone number of the recipient should be in +1(212)555-2368 format. If your app is not approved by Facebook then the recipient should by either admin, developer or tester for your Facebook app. [More information](https://developers.facebook.com/docs/messenger-platform/reference/send-api#phone_number) about the phone number.
```yaml
# Example automation notification entry
@ -102,7 +102,7 @@ if (preg_match('/get my id/', strtolower($message))) {
```
### Rich messages
You could also send rich messing (cards, buttons, images, videos, etc). [Info](https://developers.facebook.com/docs/messenger-platform/send-api-reference) to which types of messages and how to build them.
You could also send rich messing (cards, buttons, images, videos, etc). [Info](https://developers.facebook.com/docs/messenger-platform/reference/send-api) to which types of messages and how to build them.
```yaml
# Example script with a notification entry with a rich message

View File

@ -89,6 +89,11 @@ automation:
Any field under the `<entry>` tag in the feed can be used for example `trigger.event.data.content` will get the body of the feed entry.
### Video Tutorial
This video tutorial explains how to set up the feedreader and show the latest news feed item on your dashboard in Home Assistant.
<lite-youtube videoid="Va4JOKbesi0" videotitle="How to view RSS feeds on your Dashboard in Home Assistant" posterquality="maxresdefault"></lite-youtube>
For more advanced use cases, a custom integration registering to the `feedreader` event type could be used instead:
```python

View File

@ -104,7 +104,7 @@ The following attributes are available:
With Automation you can configure one or more of the following useful actions:
1. Sound an alarm and/or switch on lights when an emergency incident is received.
1. Use text to speech to play incident details via a media player while getting dressed.
1. Use text-to-speech to play incident details via a media player while getting dressed.
1. Respond with a response acknowledgment using a door-sensor when leaving the house or by pressing a button to let your teammates know you are underway.
1. Cast a FireServiceRota dashboard to a Chromecast device. (this requires a Nabu Casa subscription)

View File

@ -271,21 +271,23 @@ Currently, the following domains are available to be used with Google Assistant,
- alarm_control_panel (arm/disarm)
- button (scene)
- camera (streaming, requires compatible camera)
- group (on/off)
- input_boolean (on/off)
- input_select (option/setting/mode/value)
- scene (on)
- script (on)
- switch (on/off)
- climate (temperature setting, hvac_mode)
- cover (on/off/set position)
- fan (on/off/speed percentage/preset mode)
- group (on/off)
- humidifier (humidity setting/on/off/mode)
- input_boolean (on/off)
- input_button
- input_select (option/setting/mode/value)
- light (on/off/brightness/rgb color/color temp)
- lock
- cover (on/off/set position)
- media_player (on/off/set volume (via set volume)/source (via set input source)/control playback)
- climate (temperature setting, hvac_mode)
- vacuum (dock/start/stop/pause)
- scene (on)
- script (on)
- select
- sensor (temperature setting for temperature sensors and humidity setting for humidity sensors)
- humidifier (humidity setting/on/off/mode)
- switch (on/off)
- vacuum (dock/start/stop/pause)
<div class='note'>

View File

@ -30,8 +30,8 @@ tts:
API key obtaining process described in corresponding documentation:
* [Text-to-Speech](https://cloud.google.com/text-to-speech/docs/quickstart-protocol)
* [Speech-to-Text](https://cloud.google.com/speech-to-text/docs/quickstart-protocol)
* [Text-to-speech](https://cloud.google.com/text-to-speech/docs/quickstart-protocol)
* [Speech-to-text](https://cloud.google.com/speech-to-text/docs/quickstart-protocol)
* [Geocoding](https://developers.google.com/maps/documentation/geocoding/start)
Basic instruction for all APIs:
@ -42,36 +42,36 @@ Basic instruction for all APIs:
4. [Make sure that billing is enabled for your Google Cloud Platform project](https://cloud.google.com/billing/docs/how-to/modify-project).
5. Enable needed Cloud API visiting one of the links below or [APIs library](https://console.cloud.google.com/apis/library), selecting your `Project` from the dropdown list and clicking the `Continue` button:
* [Text-to-Speech](https://console.cloud.google.com/flows/enableapi?apiid=texttospeech.googleapis.com)
* [Speech-to-Text](https://console.cloud.google.com/flows/enableapi?apiid=speech.googleapis.com)
* [Geocoding](https://console.cloud.google.com/flows/enableapi?apiid=geocoding-backend.googleapis.com)
* [Text-to-speech](https://console.cloud.google.com/flows/enableapi?apiid=texttospeech.googleapis.com)
* [Speech-to-text](https://console.cloud.google.com/flows/enableapi?apiid=speech.googleapis.com)
* [Geocoding](https://console.cloud.google.com/flows/enableapi?apiid=geocoding-backend.googleapis.com)
6. Set up authentication:
1. Visit [this link](https://console.cloud.google.com/apis/credentials/serviceaccountkey)
2. From the `Service account` list, select `New service account`.
3. In the `Service account name` field, enter any name.
If you are requesting Text-to-Speech API key:
If you are requesting a text-to-speech API key:
4. Don't select a value from the Role list. **No role is required to access this service**.
5. Click `Create`. A note appears, warning that this service account has no role.
6. Click `Create without role`. A JSON file that contains your `API key` downloads to your computer.
## Google Cloud Text-to-Speech
## Google Cloud text-to-speech
[Google Cloud Text-to-Speech](https://cloud.google.com/text-to-speech/) converts text into human-like speech in more than 100 voices across 20+ languages and variants. It applies groundbreaking research in speech synthesis (WaveNet) and Google's powerful neural networks to deliver high-fidelity audio. With this easy-to-use API, you can create lifelike interactions with your users that transform customer service, device interaction, and other applications.
[Google Cloud text-to-speech](https://cloud.google.com/text-to-speech/) converts text into human-like speech in more than 100 voices across 20+ languages and variants. It applies groundbreaking research in speech synthesis (WaveNet) and Google's powerful neural networks to deliver high-fidelity audio. With this easy-to-use API, you can create lifelike interactions with your users that transform customer service, device interaction, and other applications.
### Pricing
The Cloud Text-to-Speech API is priced monthly based on the amount of characters to synthesize into audio sent to the service.
The Cloud text-to-speech API is priced monthly based on the amount of characters to synthesize into audio sent to the service.
| Feature | Monthly free tier | Paid usage |
|-------------------------------|---------------------------|-----------------------------------|
| Standard (non-WaveNet) voices | 0 to 4 million characters | $4.00 USD / 1 million characters |
| WaveNet voices | 0 to 1 million characters | $16.00 USD / 1 million characters |
### Text-to-Speech configuration
### Text-to-speech configuration
{% configuration %}
key_file:
@ -113,7 +113,7 @@ gain:
type: float
default: 0.0
profiles:
description: "An identifier which selects 'audio effects' profiles that are applied on (post synthesized) text to speech. Effects are applied on top of each other in the order they are given. Supported profile ids listed [here](https://cloud.google.com/text-to-speech/docs/audio-profiles)."
description: "An identifier which selects 'audio effects' profiles that are applied on (post synthesized) text-to-speech. Effects are applied on top of each other in the order they are given. Supported profile ids listed [here](https://cloud.google.com/text-to-speech/docs/audio-profiles)."
required: false
type: list
default: "[]"
@ -126,7 +126,7 @@ text_type:
### Full configuration example
The Google Cloud Text-to-Speech configuration can look like:
The Google Cloud text-to-speech configuration can look like:
```yaml
# Example configuration.yaml entry

View File

@ -72,6 +72,11 @@ The integration setup will next give you instructions to enter the [Application
If you have an error with your credentials you can delete them in the [Application Credentials](/integrations/application_credentials/) user interface.
### Video Tutorial
This video tutorial explains how to set up the Google Sheets integration and how you can add data from Home Assistant to a Google Sheet.
<lite-youtube videoid="hgGMgoxLYwo" videotitle="How to use Google Sheets in Home Assistant - TUTORIAL" posterquality="maxresdefault"></lite-youtube>
### Service `google_sheets.append_sheet`
You can use the service `google_sheets.append_sheet` to add a row of data to the Sheets document created at setup.

View File

@ -1,6 +1,6 @@
---
title: Google Translate Text-to-Speech
description: Instructions on how to setup Google Translate Text-to-Speech with Home Assistant.
title: Google Translate text-to-speech
description: Instructions on how to setup Google Translate text-to-speech with Home Assistant.
ha_category:
- Text-to-speech
ha_release: 0.35
@ -11,7 +11,7 @@ ha_platforms:
ha_integration_type: integration
---
The `google_translate` text-to-speech platform uses the unofficial [Google Translate Text-to-Speech engine](https://translate.google.com/) to read a text with natural sounding voices. Contrary to what the name suggests, the integration only does text-to-speech and does not translate messages sent to it.
The `google_translate` text-to-speech platform uses the unofficial [Google Translate text-to-speech engine](https://translate.google.com/) to read a text with natural sounding voices. Contrary to what the name suggests, the integration only does text-to-speech and does not translate messages sent to it.
## Configuration

View File

@ -318,7 +318,7 @@ The HomeKit Accessory Protocol Specification only allows a maximum of 150 unique
### Multiple HomeKit instances
If you create a HomeKit integration via the UI (i.e., **Settings** -> **Devices & Services**), it must be configured via the UI **only**. While the UI only offers limited configuration options at the moment, any attempt to configure a HomeKit instance created in the UI via the `configuration.yaml` file will result in another instance of HomeKit running on a different port.
If you create a HomeKit integration via the UI (i.e., **Settings** > **Devices & Services**), it must be configured via the UI **only**. While the UI only offers limited configuration options at the moment, any attempt to configure a HomeKit instance created in the UI via the `configuration.yaml` file will result in another instance of HomeKit running on a different port.
It is recommended to only edit a HomeKit instance in the UI that was created in the UI, and likewise, only edit a HomeKit instance in YAML that was created in YAML.
@ -328,7 +328,7 @@ When exposing a Camera, Activity based remote (a `remote` that supports activiti
To quickly add all accessory mode entities in the UI:
1. Create a new bridge via the UI (i.e., **{% my config_flow_start title="Settings >> Devices & Services" domain=page.ha_domain %}**).
1. Create a new bridge via the UI (i.e., **{% my config_flow_start title="Settings > Devices & Services" domain=page.ha_domain %}**).
2. Select `media_player`, `remote`, `lock`, and `camera` domains.
3. Complete the flow as normal.
4. Additional HomeKit entries for each entity that must operate in accessory mode will be created for each entity that does not already have one.
@ -337,7 +337,7 @@ To quickly add all accessory mode entities in the UI:
To add a single entity in accessory mode:
1. Create a new bridge via the UI (i.e., **{% my config_flow_start title="Settings >> Devices & Services" domain=page.ha_domain %}**)
1. Create a new bridge via the UI (i.e., **{% my config_flow_start title="Settings > Devices & Services" domain=page.ha_domain %}**)
2. Before pairing the bridge, access the options for the bridge.
3. Change the mode to `accessory`
4. Select the entity.
@ -424,7 +424,7 @@ The following integrations are currently supported:
# Device Triggers
Devices that support triggers can be added to the bridge by accessing options for the bridge in **{% my integrations title="Settings >> Devices & Services" %}**. To use this feature, Advanced Mode must be enabled in your user profile.
Devices that support triggers can be added to the bridge by accessing options for the bridge in **{% my integrations title="Settings > Devices & Services" %}**. To use this feature, Advanced Mode must be enabled in your user profile.
Bridged device triggers are represented as a single press button on stateless programmable switches. This allows a HomeKit automation to run when a device trigger fires. Because the Apple Home app currently only shows the number of the button and not the name, users may find it easier to identify the name of the button in the `Eve for HomeKit` app.
@ -549,7 +549,7 @@ Remember that the iOS device needs to be in the same local network as the Home A
#### `Home Assistant Bridge` doesn't appear in the Home App (for pairing) - Docker
Set `network_mode: host` in your `docker-compose.yaml`. If you have further problems this [issue](https://github.com/home-assistant/home-assistant/issues/15692) might help.
Set `network_mode: host` in your `docker-compose.yaml`. If you have further problems this [issue](https://github.com/home-assistant/core/issues/15692) might help.
You can also try to use `avahi-daemon` in reflector mode together with the option `advertise_ip`, see above.
@ -592,7 +592,7 @@ To use the HomeKit integration with multiple different Home Assistant instances
#### Specific entity doesn't work
Although we try our best, some entities don't work with the HomeKit integration yet. The result will be that either pairing fails completely or all Home Assistant accessories will stop working. Use the filter to identify which entity is causing the issue. It's best to try pairing and step by step including more entities. If it works, unpair and repeat until you find the one that is causing the issues. To help others and the developers, please open a new issue here: [home-assistant/issues/new](https://github.com/home-assistant/home-assistant/issues/new?labels=component:%20homekit)
Although we try our best, some entities don't work with the HomeKit integration yet. The result will be that either pairing fails completely or all Home Assistant accessories will stop working. Use the filter to identify which entity is causing the issue. It's best to try pairing and step by step including more entities. If it works, unpair and repeat until you find the one that is causing the issues. To help others and the developers, please open a new issue here: [core/issues/new](https://github.com/home-assistant/core/issues/new)
If you have any iOS 12.x devices signed into your iCloud account, media player entities with `device_class: tv` may trigger this condition. Filtering the entity or signing the iOS 12.x device out of iCloud should resolve the issue after restarting other devices.
@ -626,7 +626,7 @@ Ensure that the [`ffmpeg`](/integrations/ffmpeg) integration is configured corre
#### Camera streaming is unstable or slow
If your camera supports native H.264 streams, Home Assistant can avoid converting the video stream, which is an expensive operation. To enable native H.264 streaming when configured via YAML, change the `video_codec` to `copy`. To allow native H.264 streaming when setting up HomeKit via the UI, go to **Settings** -> **Devices & Services** in the UI, click **Options** for your HomeKit Bridge, and check the box for your camera on the `Cameras that support native H.264 streams` screen.
If your camera supports native H.264 streams, Home Assistant can avoid converting the video stream, which is an expensive operation. To enable native H.264 streaming when configured via YAML, change the `video_codec` to `copy`. To allow native H.264 streaming when setting up HomeKit via the UI, go to **Settings** > **Devices & Services** in the UI, click **Options** for your HomeKit Bridge, and check the box for your camera on the `Cameras that support native H.264 streams` screen.
#### Multiple camera streams

View File

@ -43,7 +43,7 @@ ha_platforms:
ha_integration_type: integration
---
The [HomeKit](https://developer.apple.com/homekit/) controller integration allows you to connect accessories with the "Works with HomeKit" logo to Home Assistant. This integration should not be confused with the [HomeKit](/integrations/homekit/) integration, which allows you to control Home Assistant devices via HomeKit.
The [HomeKit](https://developer.apple.com/apple-home/) controller integration allows you to connect accessories with the "Works with HomeKit" logo to Home Assistant. This integration should not be confused with the [HomeKit](/integrations/homekit/) integration, which allows you to control Home Assistant devices via HomeKit.
The integration will automatically detect HomeKit compatible devices that are ready to pair if the [`zeroconf`](/integrations/zeroconf/) integration is enabled. This is enabled by default on new installations via the [`default_config`](/integrations/default_config/) component.
@ -165,7 +165,7 @@ homekit:
`netdisco` is not used by Home Assistant to discover HomeKit devices, so if it can't see your device the problem is more likely to be environmental than with Home Assistant itself.
Alternatively if you are less comfortable with the command line you could use Discovery for [Mac](https://apps.apple.com/us/app/discovery-dns-sd-browser/id1381004916?mt=12) or [iOS](https://apps.apple.com/us/app/discovery-dns-sd-browser/id305441017), Android [Service Browser](https://play.google.com/store/apps/details?id=com.druk.servicebrowser) or [All My Lan](https://www.microsoft.com/en-us/p/all-my-lan/9wzdncrdn19v). These are a less useful diagnostic as they aren't running from the same point on your network as Home Assistant. Even if it is visible in this tool it might still be a networking issue. They can give sometimes give clues.
Alternatively if you are less comfortable with the command line you could use Discovery for [Mac](https://apps.apple.com/app/discovery-dns-sd-browser/id1381004916) or [iOS](https://apps.apple.com/app/discovery-dns-sd-browser/id305441017), Android [Service Browser](https://play.google.com/store/apps/details?id=com.druk.servicebrowser) or [All My Lan](https://apps.microsoft.com/store/detail/all-my-lan/9WZDNCRDN19V). These are a less useful diagnostic as they aren't running from the same point on your network as Home Assistant. Even if it is visible in this tool it might still be a networking issue. They can give sometimes give clues.
Where a discovery tool does give an IP, check it is what you expect (compare to DHCP leases in your router for example). Can you ping it? If not, you have a network problem.

View File

@ -175,7 +175,7 @@ target:
#### Overrides
You can pass any of the parameters listed [here](https://developer.mozilla.org/en-US/docs/Web/API/ServiceWorkerRegistration/showNotification#Parameters) in the `data` dictionary. Please note, Chrome specifies that the maximum size for an icon is 320px by 320px, the maximum `badge` size is 96px by 96px and the maximum icon size for an action button is 128px by 128px.
You can pass any of the parameters listed [here](https://developer.mozilla.org/docs/Web/API/ServiceWorkerRegistration/showNotification#Parameters) in the `data` dictionary. Please note, Chrome specifies that the maximum size for an icon is 320px by 320px, the maximum `badge` size is 96px by 96px and the maximum icon size for an action button is 128px by 128px.
#### URL

View File

@ -74,10 +74,12 @@ To get the Estimated distance sensor to work, in most cases, it has to be calibr
- [Feasycom FSC-BP103B](https://www.feasycom.com/bluetooth-ibeacon-da14531)
- [Feasycom FSC-BP104D](https://www.feasycom.com/dialog-da14531-bluetooth-low-energy-beacon)
- [Feasycom FSC-BP108](https://www.feasycom.com/bluetooth-5-1-waterproof-bluetooth-beacon)
- [MikroTik TG-BT5-IN](https://mikrotik.com/product/tg_bt5_in) (Additional sensors such as angle or impact are not compatible)
- [NRF51822 iBeacon](https://www.aliexpress.com/item/32826502025.html)
- [NRF52810 iBeacon](https://www.aliexpress.com/item/1005003211033416.html)
- [Pawscout Tag](https://pawscout.com/shop/pawscout-tag/)
- [SwiftFinder](https://www.amazon.com/dp/B089MD5NP7) (Requires being paired to a phone first before it starts transmitting once a minute, otherwise it stays asleep)
- [Teltonika EYE Teltonika EYE Sensor](https://teltonika-gps.com/products/accessories/sensors-beacons/eye) (Additional sensors such as accelerometer, temperature, and humidity are not compatible)
## Example automation

View File

@ -43,7 +43,7 @@ Most lights do not support all attributes. You can check the integration documen
| ---------------------- | -------- | ----------- |
| `entity_id` | no | String or list of strings that point at `entity_id`s of lights. To target all lights, set `entity_id` to `all`.
| `transition` | yes | Number that represents the time (in seconds) the light should take to transition to the new state.
| `profile` | yes | String with the name of one of the [built-in profiles](https://github.com/home-assistant/home-assistant/blob/master/homeassistant/components/light/light_profiles.csv) (relax, energize, concentrate, reading) or one of the custom profiles defined in `light_profiles.csv` in the current working directory. Light profiles define an xy color, brightness and a transition value (if no transition is desired, set to 0 or leave out the column entirely). If a profile is given, and a brightness is set, then the profile brightness will be overwritten.
| `profile` | yes | String with the name of one of the [built-in profiles](https://github.com/home-assistant/core/blob/master/homeassistant/components/light/light_profiles.csv) (relax, energize, concentrate, reading) or one of the custom profiles defined in `light_profiles.csv` in the current working directory. Light profiles define an xy color, brightness and a transition value (if no transition is desired, set to 0 or leave out the column entirely). If a profile is given, and a brightness is set, then the profile brightness will be overwritten.
| `hs_color` | yes | A list containing two floats representing the hue and saturation of the color you want the light to be. Hue is scaled 0-360, and saturation is scaled 0-100.
| `xy_color` | yes | A list containing two floats representing the xy color you want the light to be. Two comma-separated floats that represent the color in XY.
| `rgb_color` | yes | A list containing three integers between 0 and 255 representing the RGB color you want the light to be. Three comma-separated integers that represent the color in RGB, within square brackets.
@ -116,6 +116,6 @@ Turns one or multiple lights off.
### Service `light.toggle`
Toggles the state of one or multiple lights. Takes the same arguments as [`turn_on`](#service-lightturn_on) service.
Toggles the state of one or multiple lights. Takes the same arguments as the [`light.turn_on`](#service-lightturn_on) service.
*Note*: If `light.toggle` is used for a group of lights, it will toggle the individual state of each light. If you want the lights to be treated as a single light, use [Light Groups](/integrations/light.group/) instead.
*Note*: If `light.toggle` is used for a group of lights, it will toggle the individual state of each light. If you want the lights to be treated as a single light, use [Light Groups](/integrations/group#binary-sensor-light-and-switch-groups) instead.

View File

@ -11,7 +11,7 @@ ha_platforms:
ha_integration_type: integration
---
The `marytts` text-to-speech platform uses [MaryTTS](http://mary.dfki.de/) Text-to-Speech engine to read a text with natural sounding voices.
The `marytts` text-to-speech platform uses [MaryTTS](http://mary.dfki.de/) text-to-speech engine to read a text with natural sounding voices.
## Configuration

View File

@ -223,6 +223,10 @@ The Philips Hue V2 bridge supports Matter since a recent update (the beta progra
- Device events for example for dimmer remotes are not supported.
- Only basic control of lights is supported, no scenes, events, effects etc.
### Tasmota
Tasmota supports Matter over IP on all ESP32 based devices (in experimental phase). Follow the [instructions](https://tasmota.github.io/docs/Matter/).
### TP-Link Tapo P125M (power plug)
- Look for the M addition in the model name, a device without the M (regular P125) is not Matter compliant.

View File

@ -16,7 +16,11 @@ ha_platforms:
ha_integration_type: integration
---
The `metoffice` weather platform uses the Met Office's [DataPoint API](https://www.metoffice.gov.uk/datapoint) for weather data. You can get an API key by registering for a Met Office [account](https://register.metoffice.gov.uk/WaveRegistrationClient/public/register.do?service=datapoint). As their website is not as straightforward, after registration and verifying your account you can login [here](https://register.metoffice.gov.uk/MyAccountClient/account/view) to retrieve your API key.
The `metoffice` weather platform uses the Met Office's [DataPoint API](https://www.metoffice.gov.uk/datapoint) for weather data.
## Getting started
Their website is not as straightforward so check the [getting started](https://www.metoffice.gov.uk/services/data/datapoint/getting-started).
1. Register for a [Met Office account](https://register.metoffice.gov.uk/WaveRegistrationClient/public/register.do?service=datapoint).
2. After registration and verification of your account, [login](https://register.metoffice.gov.uk/MyAccountClient/account/view) to retrieve your API key.
{% include integrations/config_flow.md %}

View File

@ -1,6 +1,6 @@
---
title: Microsoft Text-to-Speech (TTS)
description: Instructions on how to set up Microsoft Text-to-Speech with Home Assistant.
title: Microsoft text-to-speech (TTS)
description: Instructions on how to set up Microsoft text-to-speech with Home Assistant.
ha_category:
- Text-to-speech
ha_iot_class: Cloud Push
@ -11,7 +11,7 @@ ha_platforms:
ha_integration_type: integration
---
The `microsoft` text-to-speech platform uses the [TTS engine of the Microsoft Speech Service](https://docs.microsoft.com/en-us/azure/cognitive-services/speech-service/text-to-speech) to read a text with natural sounding voices. This integration uses an API that is part of the Cognitive Services offering and is known as the Microsoft Speech API. For this integration to work, you need a free API key. You can use your [Azure subscription](https://azure.microsoft.com) to create an [Azure Speech resource](https://portal.azure.com/#create/Microsoft.CognitiveServicesSpeechServices).
The `microsoft` text-to-speech platform uses the [TTS engine of the Microsoft Speech Service](https://learn.microsoft.com/azure/cognitive-services/speech-service/text-to-speech) to read a text with natural sounding voices. This integration uses an API that is part of the Cognitive Services offering and is known as the Microsoft Speech API. For this integration to work, you need a free API key. You can use your [Azure subscription](https://azure.microsoft.com) to create an [Azure Speech resource](https://portal.azure.com/#create/Microsoft.CognitiveServicesSpeechServices).
## Configuration
@ -40,7 +40,7 @@ gender:
type: string
default: "`Female`"
type:
description: "The voice type you want to use. Accepted values are listed as the service name mapping [in the documentation](https://docs.microsoft.com/en-us/azure/cognitive-services/speech-service/language-support#text-to-speech)."
description: "The voice type you want to use. Accepted values are listed as the service name mapping [in the documentation](https://learn.microsoft.com/azure/cognitive-services/speech-service/language-support?tabs=tts)."
required: false
type: string
default: "`JennyNeural`"
@ -64,7 +64,7 @@ contour:
required: false
type: string
region:
description: "The region of your API endpoint. See [documentation](https://docs.microsoft.com/en-us/azure/cognitive-services/speech-service/regions)."
description: "The region of your API endpoint. See [documentation](https://learn.microsoft.com/azure/cognitive-services/speech-service/regions)."
required: false
type: string
default: "`eastus`"
@ -72,9 +72,9 @@ region:
<div class='note'>
Not all Azure regions support high-quality neural voices. Use [this overview](https://docs.microsoft.com/en-us/azure/cognitive-services/speech-service/regions#neural-and-standard-voices) to determine the availability of standard and neural voices by region/endpoint.
Not all Azure regions support high-quality neural voices. Use [this overview](https://learn.microsoft.com/azure/cognitive-services/speech-service/regions) to determine the availability of standard and neural voices by region/endpoint.
New users ([any newly created Azure Speech resource after August 31st, 2021](https://docs.microsoft.com/en-us/azure/cognitive-services/speech-service/text-to-speech#migrate-to-neural-voice)) can only use neural voices. Existing resources can continue using standard voices through August 31st, 2024.
New users ([any newly created Azure Speech resource after August 31st, 2021](https://learn.microsoft.com/azure/cognitive-services/speech-service/text-to-speech#more-about-neural-text-to-speech-features)) can only use neural voices. Existing resources can continue using standard voices through August 31st, 2024.
</div>

View File

@ -11,17 +11,17 @@ ha_integration_type: integration
The `microsoft_face` integration platform is the main integration for Microsoft
Azure Cognitive service
[Face](https://azure.microsoft.com/en-us/services/cognitive-services/face/).
[Face](https://azure.microsoft.com/products/cognitive-services/vision-services).
All data are stored in your own private instance in the Azure cloud.
## Setup
You need an API key, which is free, but requires an
[Azure registration](https://azure.microsoft.com/en-us/free/) using your
[Azure registration](https://azure.microsoft.com/free/) using your
Microsoft ID. The free resource (*F0*) is limited to 20 requests per minute and
30k requests in a month. If you don't want to use the Azure cloud, you can also
get an API key by registering with
[cognitive-services](https://azure.microsoft.com/en-us/try/cognitive-services/).
[cognitive-services](https://azure.microsoft.com/try/cognitive-services/).
Please note that all keys on cognitive services must be recreated every 90 days.
## Configuration

View File

@ -10,19 +10,19 @@ ha_integration_type: integration
---
The `microsoft_face_detect` image processing platform allows you to use the
[Microsoft Face Identify](https://www.microsoft.com/cognitive-services/en-us/)
[Microsoft Face Identify](https://azure.microsoft.com/products/cognitive-services/)
API through Home Assistant. This platform enables you to detect face on camera
and fire an event with attributes.
Please refer to the [Microsoft Face component](/integrations/microsoft_face/) configuration on
Please refer to the [Microsoft Face integration](/integrations/microsoft_face/) configuration on
how to setup the API key.
For using the result inside an automation rule,
take a look at the [Image Processing component](/integrations/image_processing/) page.
take a look at the [Image Processing integration](/integrations/image_processing/) page.
<div class='note'>
The free version of the Microsoft Face identify API limits the number of requests possible per month. Therefore, it is strongly recommended that you limit the `scan_interval` when setting up an instance of this entity as detailed on the main [Image Processing component](/integrations/image_processing/) page.
The free version of the Microsoft Face identify API limits the number of requests possible per month. Therefore, it is strongly recommended that you limit the `scan_interval` when setting up an instance of this entity as detailed on the main [Image Processing integration](/integrations/image_processing/) page.
</div>

View File

@ -10,19 +10,19 @@ ha_integration_type: integration
---
The `microsoft_face_identify` image processing platform lets you use
[Microsoft Face identify](https://www.microsoft.com/cognitive-services/en-us/)
[Microsoft Face identify](https://azure.microsoft.com/products/cognitive-services/)
API through Home Assistant. This platform allow you do identify persons on
camera and fire an event with attributes.
Please refer to the [Microsoft Face component](/integrations/microsoft_face/) configuration on
Please refer to the [Microsoft Face integration](/integrations/microsoft_face/) configuration on
how to setup the API key.
For using the result inside an automation rule,
take a look at the [Image Processing component](/integrations/image_processing/) page.
take a look at the [Image Processing integration](/integrations/image_processing/) page.
<div class='note'>
The free version of the Microsoft Face identify API limits the number of requests possible per month. Therefore, it is strongly recommended that you limit the `scan_interval` when setting up an instance of this entity as detailed on the main [Image Processing component](/integrations/image_processing/) page.
The free version of the Microsoft Face identify API limits the number of requests possible per month. Therefore, it is strongly recommended that you limit the `scan_interval` when setting up an instance of this entity as detailed on the main [Image Processing integration](/integrations/image_processing/) page.
</div>

View File

@ -169,7 +169,7 @@ For Ubiquiti routers/access points the "Enable multicast enhancement (IGMPv3)" s
### Bypassing UDP multicast
If UDP Multicast does not work in your setup (due to network limitations), this integration can be used in local polling mode.
Go to Settings -> Integrations -> on the already set up Motion Blinds integration click "configure" --> disable the "Wait for push" option (disabled by default).
Go to Settings -> Integrations -> on the already set up Motion Blinds integration click "configure" --> disable the "Wait for multicast push on update" option (disabled by default).
The default update interval of the Motion Blinds integration is every 10 minutes. When UDP multicast pushes do not work, this polling interval can be a bit high.
To increase the polling interval:

View File

@ -109,7 +109,7 @@ With a secure broker connection it is possible to use a client certificate for a
#### Using WebSockets as transport
You can select `websockets` as transport method if your MQTT broker supports it. When you select `websockets` and click `NEXT` you will be able to add a WebSockets path (default = `/` and WebSockets headers (optional). The target WebSockets URI: `ws://{broker}:{port}{WebSockets path}` is built with `broker`, `port` and `ws_path` (WebSocket path) settings.
You can select `websockets` as transport method if your MQTT broker supports it. When you select `websockets` and click `NEXT`, you will be able to add a WebSockets path (default = `/`) and WebSockets headers (optional). The target WebSockets URI: `ws://{broker}:{port}{WebSockets path}` is built with `broker`, `port` and `ws_path` (WebSocket path) settings.
To configure the WebSocketS headers supply a valid JSON dictionary string. E.g. `{ "Authorization": "token" , "x-header": "some header"}`. The default transport method is `tcp`. The WebSockets transport can be secured using TLS and optionally using user credentials or a client certificate.
<div class='note'>
@ -140,13 +140,13 @@ MQTT Birth and Last Will messages can be customized or disabled from the UI. To
The `mosquitto` broker package ships commandline tools (often as `*-clients` package) to send and receive MQTT messages. For sending test messages to a broker running on `localhost` check the example below:
```bash
mosquitto_pub -h 127.0.0.1 -t home-assistant/switch/1/on -m "Switch is ON"
mosquitto_pub -h 127.0.0.1 -t homeassistant/switch/1/on -m "Switch is ON"
```
Another way to send MQTT messages manually is to use the "MQTT" integration in the frontend. Choose "Settings" on the left menu, click "Devices & Services", and choose "Configure" in the "Mosquitto broker" tile. Enter something similar to the example below into the "topic" field under "Publish a packet" and press "PUBLISH" .
```bash
home-assistant/switch/1/power
homeassistant/switch/1/power
```
and in the Payload field
@ -155,23 +155,23 @@ and in the Payload field
ON
```
In the "Listen to a topic" field, type `#` to see everything, or "home-assistant/switch/#" to just follow a published topic, then press "START LISTENING". The messages should appear similar to the text below:
In the "Listen to a topic" field, type `#` to see everything, or "homeassistant/switch/#" to just follow a published topic, then press "START LISTENING". The messages should appear similar to the text below:
```bash
Message 23 received on home-assistant/switch/1/power/stat/POWER at 12:16 PM:
Message 23 received on homeassistant/switch/1/power/stat/POWER at 12:16 PM:
ON
QoS: 0 - Retain: false
Message 22 received on home-assistant/switch/1/power/stat/RESULT at 12:16 PM:
Message 22 received on homeassistant/switch/1/power/stat/RESULT at 12:16 PM:
{
"POWER": "ON"
}
QoS: 0 - Retain: false
```
For reading all messages sent on the topic `home-assistant` to a broker running on localhost:
For reading all messages sent on the topic `homeassistant` to a broker running on localhost:
```bash
mosquitto_sub -h 127.0.0.1 -v -t "home-assistant/#"
mosquitto_sub -h 127.0.0.1 -v -t "homeassistant/#"
```
## MQTT Discovery
@ -218,6 +218,7 @@ The discovery topic needs to follow a specific format:
<discovery_prefix>/<component>/[<node_id>/]<object_id>/config
```
- `<discovery_prefix>`: The Discovery Prefix defaults to `homeassistant`. This prefix can be [changed](#discovery-options).
- `<component>`: One of the supported MQTT components, eg. `binary_sensor`.
- `<node_id>` (*Optional*): ID of the node providing the topic, this is not used by Home Assistant but may be used to structure the MQTT topic. The ID of the node must only consist of characters from the character class `[a-zA-Z0-9_-]` (alphanumerics, underscore and hyphen).
- `<object_id>`: The ID of the device. This is only to allow for separate topics for each device and is not used for the `entity_id`. The ID of the device must only consist of characters from the character class `[a-zA-Z0-9_-]` (alphanumerics, underscore and hyphen).
@ -536,7 +537,7 @@ The following software has built-in support for MQTT discovery:
- [IOTLink](https://iotlink.gitlab.io) (starting with 2.0.0)
- [MiFlora MQTT Daemon](https://github.com/ThomDietrich/miflora-mqtt-daemon)
- [Nuki Hub](https://github.com/technyon/nuki_hub)
- [Nuki Smart Lock 3.0 Pro](https://support.nuki.io/hc/en-us/articles/12947926779409-MQTT-support)
- [Nuki Smart Lock 3.0 Pro](https://support.nuki.io/hc/articles/12947926779409-MQTT-support), [more info](https://developer.nuki.io/t/mqtt-api-specification-v1-3/17626)
- [OpenMQTTGateway](https://github.com/1technophile/OpenMQTTGateway)
- [room-assistant](https://github.com/mKeRix/room-assistant) (starting with 1.1.0)
- [SmartHome](https://github.com/roncoa/SmartHome)
@ -784,14 +785,14 @@ You must include either `topic` or `topic_template`, but not both. If providing
</p>
```yaml
topic: home-assistant/light/1/command
topic: homeassistant/light/1/command
payload: on
```
{% raw %}
```yaml
topic: home-assistant/light/1/state
topic: homeassistant/light/1/state
payload_template: "{{ states('device_tracker.paulus') }}"
```
@ -800,7 +801,7 @@ payload_template: "{{ states('device_tracker.paulus') }}"
{% raw %}
```yaml
topic_template: "home-assistant/light/{{ states('sensor.light_active') }}/state"
topic_template: "homeassistant/light/{{ states('sensor.light_active') }}/state"
payload_template: "{{ states('device_tracker.paulus') }}"
```
@ -811,7 +812,7 @@ If you want to send JSON using the YAML editor then you need to format/escape
it properly. Like:
```yaml
topic: home-assistant/light/1/state
topic: homeassistant/light/1/state
payload: "{\"Status\":\"off\", \"Data\":\"something\"}"`
```
@ -844,7 +845,7 @@ data:
Example of how to use `qos` and `retain`:
```yaml
topic: home-assistant/light/1/command
topic: homeassistant/light/1/command
payload: on
qos: 2
retain: true

View File

@ -13,7 +13,7 @@ ha_platforms:
ha_integration_type: integration
---
The `Microsoft Teams` platform allows you to send notifications from Home Assistant to a team channel in [Microsoft Teams](https://products.office.com/en-us/microsoft-teams/group-chat-software).
The `Microsoft Teams` platform allows you to send notifications from Home Assistant to a team channel in [Microsoft Teams](https://www.microsoft.com/microsoft-teams/group-chat-software).
## Setup

View File

@ -109,3 +109,7 @@ Displays the current link rate of the device indicating the maximum possible dat
### Link type
Displays the current link type: wired, 2.4GHz or 5GHz.
## Troubleshooting
- If you get a "Connection or login error" when trying to setup the NETGEAR integration, please try using the IP address of the router (often "192.168.1.1") as host instead of the default "routerlogin.net".

View File

@ -12,7 +12,7 @@ ha_codeowners:
ha_integration_type: system
---
This integration provides network configuration for integrations such as [Zeroconf](/integrations/zeroconf/). It is managed by going to **{% my network title="Settings >> System >> Network" %}** and is only available to users that have "Advanced Mode" enabled on their {% my profile title="user profile" %}.
This integration provides network configuration for integrations such as [Zeroconf](/integrations/zeroconf/). It is managed by going to **{% my network title="Settings > System > Network" %}** and is only available to users that have "Advanced Mode" enabled on their {% my profile title="user profile" %}.
**{% my general badge %}**

View File

@ -68,3 +68,8 @@ Events generated by Nuki are sent as events of type `nuki_event` with the follow
| -------------------- | ------------------------------------------ |
| `type` | The type of the event. Values: `ring`
| `entity_id` | The ID of the entity generating the event.
## MQTT support
The Nuki Smart Lock 3.0 Pro also [supports MQTT](https://support.nuki.io/hc/en-us/articles/12947926779409-MQTT-support) and can directly integrate with Home Assistant through [MQTT discovery](/integrations/mqtt/#mqtt-discovery).
Specific information can be found [here](https://developer.nuki.io/t/mqtt-api-specification-v1-3/17626).

View File

@ -46,6 +46,15 @@ You can configure specific FFmpeg options through the integration options flow b
| Extra FFmpeg arguments | Extra options to pass to `ffmpeg`, e.g., image quality or video filter options. More details in [`ffmpeg` integration](/integrations/ffmpeg). |
| Use wallclock as timestamps | ([Advanced Mode](/blog/2019/07/17/release-96/#advanced-mode) only) Rewrite the camera timestamps. This may help with playback or crashing issues from Wi-Fi cameras or cameras of certain brands (e.g., EZVIZ). |
#### Snapshots
Some cameras will not produce usable snapshots with larger stream sizes.
By default, the integration will only enable the camera entity for the first H264 profile. If you are unable to get a working snapshot:
- If additional camera entities are available for other profiles, try enabling those entities.
- Set the `Extra FFmpeg arguments` to `-pred 1 -ss 00:00:05 -frames:v 1` to cause the snapshot to be taken 5 seconds into the stream.
### Supported Sensors
This integration uses the ONVIF pullpoint subscription API to process events into sensors that will be automatically added to Home Assistant. Below is a list of currently supported event topics along with the entities they create.

View File

@ -28,8 +28,7 @@ trained medical professional.
## Generating an API Key
To generate an API key,
[simply log in to the OpenUV website](https://www.openuv.io/auth/google).
To generate an API key, simply log in at [the OpenUV website](https://www.openuv.io/).
{% include integrations/config_flow.md %}

View File

@ -22,6 +22,10 @@ To learn more about PECO, visit [**their about page**](https://www.peco.com/Abou
This integration is only for PECO customers. To see if your county is supported, take a look at [**their outage map**](https://www.peco.com/Outages/CheckOutageStatus/Pages/OutageMap.aspx).
PECO Is a registered trademark of Exelon Business Services Co., LLC
</div>
{% include integrations/config_flow.md %}
@ -37,4 +41,4 @@ This integration will create 5 entities.
- A sensor that shows the total amount of customers with no power.
- A sensor that shows the total amount of customers served in the county.
- A sensor that shows the percentage of customers with no power.
- A sensor that shows the alert that appears when you open the online outage map.
- A sensor that shows the alert that appears when you open the online outage map.

View File

@ -78,7 +78,7 @@ The message attribute supports the [Markdown formatting syntax](https://daringfi
| Headline 2 | `## Headline` |
| Newline | `\n` |
| Bold | `**My bold text**` |
| Cursive | `*My cursive text*` |
| Italic | `*My italic text*` |
| Link | `[Link](https://home-assistant.io/)` |
| Image | `![image](/local/my_image.jpg)` |

View File

@ -1,6 +1,6 @@
---
title: Pico TTS
description: Instructions on how to setup Pico Text-to-Speech with Home Assistant.
description: Instructions on how to setup Pico text-to-speech with Home Assistant.
ha_category:
- Text-to-speech
ha_iot_class: Local Push

View File

@ -52,6 +52,11 @@ proximity:
unit_of_measurement: mi
```
### Video Tutorial
This video tutorial explains how to set up geofencing in Home Assistant using the proximity integration.
<lite-youtube videoid="pjAyRN5UiBg" videotitle="Geofencing in Home Assistant - Tutorial" posterquality="maxresdefault"></lite-youtube>
{% configuration %}
zone:
description: The zone to which this integration is measuring the distance to. Default is the home zone.

View File

@ -290,22 +290,17 @@ sensor:
{% raw %}
```yaml
sensor:
- platform: rest
name: JSON time
json_attributes:
- date
- milliseconds_since_epoch
resource: http://date.jsontest.com/
value_template: "{{ value_json.time }}"
- platform: template
sensors:
date:
friendly_name: "Date"
value_template: "{{ state_attr('sensor.json_time', 'date') }}"
milliseconds:
friendly_name: "milliseconds"
value_template: "{{ state_attr('sensor.json_time', 'milliseconds_since_epoch') }}"
rest:
- resource: "http://date.jsontest.com/"
sensor:
- name: "Time"
value_template: "{{ value_json.time }}"
- name: "Date"
value_template: "{{ value_json.date }}"
- name: "Milliseconds"
value_template: "{{ value_json.milliseconds_since_epoch }}"
```
{% endraw %}
@ -335,40 +330,29 @@ This sample fetches a weather report from [OpenWeatherMap](https://openweatherma
{% raw %}
```yaml
sensor:
- platform: rest
name: OWM_report
json_attributes:
- main
- weather
value_template: "{{ value_json['weather'][0]['description'].title() }}"
resource: https://api.openweathermap.org/data/2.5/weather?zip=80302,us&APPID=VERYSECRETAPIKEY
- platform: template
sensors:
owm_weather:
value_template: "{{ state_attr('sensor.owm_report', 'weather')[0]['description'].title() }}"
entity_picture_template: "{{ 'https://openweathermap.org/img/w/' + state_attr('sensor.owm_report', 'weather')[0]['icon'].lower() + '.png' }}"
entity_id: sensor.owm_report
owm_temp:
friendly_name: "Outside temp"
value_template: "{{ state_attr('sensor.owm_report', 'main')['temp'] - 273.15 }}"
rest:
- resource: "https://api.openweathermap.org/data/2.5/weather?zip=80302,us&APPID=VERYSECRETAPIKEY"
sensor:
- name: "Report"
value_template: "{{ value_json['weather'][0]['description'].title() }}"
picture: "{{ 'https://openweathermap.org/img/w/' + value_json['weather'][0]['icon'].lower() + '.png' }}"
- name: "Outside temp"
value_template: "{{ value_json['main']['temp'] - 273.15 }}"
unit_of_measurement: "°C"
entity_id: sensor.owm_report
owm_pressure:
friendly_name: "Outside pressure"
value_template: "{{ state_attr('sensor.owm_report', 'main')['pressure'] }}"
- name: "Outside pressure"
value_template: "{{ value_json['main']['pressure'] }}"
unit_of_measurement: "hP"
entity_id: sensor.owm_report
owm_humidity:
friendly_name: "Outside humidity"
value_template: "{{ state_attr('sensor.owm_report', 'main')['humidity'] }}"
- name: "Outside humidity"
value_template: "{{ value_json['main']['humidity'] }}"
unit_of_measurement: "%"
entity_id: sensor.owm_report
```
{% endraw %}
This configuration shows how to extract multiple values from a dictionary with `json_attributes` and `template`. This avoids flooding the REST service by only requesting the result once, then creating multiple attributes from that single result using templates. By default, the sensor state would be set to the full JSON — here, that would exceed the 255-character maximum allowed length for the state, so we override that default by using `value_template` to set a static value of `OK`.
This configuration shows how to extract multiple values from a dictionary. This method avoids flooding the REST service because the result is only requested once. From that single request, multiple sensors can be created by using template sensors.
{% raw %}
@ -400,73 +384,52 @@ This configuration shows how to extract multiple values from a dictionary with `
{% raw %}
```yaml
sensor:
- platform: rest
name: room_sensors
resource: http://<address_to_rest_service>
json_attributes:
- bedroom1
- bedroom2
- bedroom3
value_template: "OK"
- platform: template
sensors:
bedroom1_temperature:
value_template: "{{ state_attr('sensor.room_sensors', 'bedroom1')['temperature'] }}"
rest:
resource: "http://<address_to_rest_service>"
sensor:
- name: "Bedroom1 Temperature"
value_template: "{{ value_json['bedroom1']['temperature'] }}"
device_class: temperature
unit_of_measurement: "°C"
bedroom1_humidity:
value_template: "{{ state_attr('sensor.room_sensors', 'bedroom1')['humidity'] }}"
- name: "Bedroom1 Humidity"
value_template: "{{ value_json['bedroom1']['humidity'] }}"
device_class: humidity
unit_of_measurement: "%"
bedroom1_battery:
value_template: "{{ state_attr('sensor.room_sensors', 'bedroom1')['battery'] }}"
- name: "Bedroom1 Battery"
value_template: "{{ value_json['bedroom1']['battery'] }}"
device_class: battery
unit_of_measurement: "V"
bedroom2_temperature:
value_template: "{{ state_attr('sensor.room_sensors', 'bedroom2')['temperature'] }}"
- name: "Bedroom2 Temperature"
value_template: "{{ value_json['bedroom2']['temperature'] }}"
device_class: temperature
unit_of_measurement: "°C"
```
{% endraw %}
The below example allows shows how to extract multiple values from a dictionary with `json_attributes` and `json_attributes_path` from the XML of a Steamist Steambath Wi-Fi interface and use them to create a switch and multiple sensors without having to poll the endpoint numerous times.
In the below example `json_attributes_path` is set to `$.response` which is the location of the `usr0`, `pot0`, ... attributes used for `json_attributes`.
The example below shows how to extract multiple values from a dictionary from the XML file of a Steamist Steambath Wi-Fi interface. The values are used to create a switch and multiple sensors without having to poll the endpoint numerous times.
{% raw %}
```yaml
sensor:
# Steam Controller
- platform: rest
name: Steam System Data
resource: http://192.168.1.105/status.xml
json_attributes_path: "$.response"
rest:
# Steam Controller
- resource: "http://192.168.1.105/status.xml"
scan_interval: 15
value_template: "OK"
json_attributes:
- "usr0"
- "pot0"
- "temp0"
- "time0"
- platform: template
sensors:
steam_temp:
friendly_name: Steam Temp
value_template: "{{ state_attr('sensor.steam_system_data', 'temp0') | regex_findall_index('([0-9]+)XF') }}"
sensor:
- name: "Steam Temp"
value_template: "{{ json_value['response']['temp0'] | regex_findall_index('([0-9]+)XF') }}"
unit_of_measurement: "°F"
steam_time_remaining:
friendly_name: "Steam Time Remaining"
value_template: "{{ state_attr('sensor.steam_system_data', 'time0') }}"
- name: "Steam Time Remaining"
value_template: "{{ json_value['response']['time0'] }}"
unit_of_measurement: "minutes"
switch:
- platform: template
switches:
steam:
value_template: "{{ state_attr('sensor.steam_system_data', 'usr0') | int >= 1 }}"
switch:
- name: "Steam"
value_template: "{{ json_value['response']['usr0'] | int >= 1 }}"
turn_on:
- service: rest_command.set_steam_led
data:
@ -489,7 +452,6 @@ switch:
- service: homeassistant.update_entity
target:
entity_id: sensor.steam_system_data
friendly_name: Steam
rest_command:
set_steam_led:

View File

@ -117,7 +117,7 @@ Sonos accepts a variety of `media_content_id` formats in the `media_player.play_
Music services which require an account (e.g., Spotify) must first be configured using the Sonos app.
Playing TTS (text to speech) or audio files as alerts (e.g., a doorbell or alarm) is possible by setting the `announce` argument to `true`. Using `announce` will play the provided media URL as an overlay, gently lowering the current music volume and automatically restoring to the original level when finished. An optional `volume` argument can also be provided in the `extra` dictionary to play the alert at a specific volume level. Note that older Sonos hardware or legacy firmware versions ("S1") may not fully support these features. Additionally, see [Network Requirements](#network-requirements) for use in restricted networking environments.
Playing TTS (text-to-speech) or audio files as alerts (e.g., a doorbell or alarm) is possible by setting the `announce` argument to `true`. Using `announce` will play the provided media URL as an overlay, gently lowering the current music volume and automatically restoring to the original level when finished. An optional `volume` argument can also be provided in the `extra` dictionary to play the alert at a specific volume level. Note that older Sonos hardware or legacy firmware versions ("S1") may not fully support these features. Additionally, see [Network Requirements](#network-requirements) for use in restricted networking environments.
An optional `enqueue` argument can be added to the service call. If `true`, the media will be appended to the end of the playback queue. If not provided or `false` then the queue will be replaced.

View File

@ -45,9 +45,9 @@ You can also play HTTP (not HTTPS) URLs:
media_content_type: MUSIC
```
### Text-to-Speech services
### Text-to-speech services
You can use TTS services like [Google Text-to-Speech](/integrations/google_translate) or [Amazon Polly](/integrations/amazon_polly) only if your Home Assistant is configured in HTTP and not HTTPS (current device limitation, a firmware upgrade is planned).
You can use TTS services like [Google text-to-speech](/integrations/google_translate) or [Amazon Polly](/integrations/amazon_polly) only if your Home Assistant is configured in HTTP and not HTTPS (current device limitation, a firmware upgrade is planned).
A workaround if you want to publish your Home Assistant installation on Internet in SSL is to configure an HTTPS Web Server as a reverse proxy ([NGINX](/docs/ecosystem/nginx/) for example) and let your Home Assistant configuration in HTTP on your local network. The SoundTouch devices will be available to access the TTS files in HTTP in local and your configuration will be in HTTPS on the Internet.

View File

@ -186,6 +186,31 @@ WHERE
```
Use `state` as column for value.
### State of an entity x time ago
If you want to extract the state of an entity from a day, hour, or minute ago, the query is:
```sql
SELECT
states.state
FROM
states
INNER JOIN states_meta ON
states.metadata_id = states_meta.metadata_id
WHERE
states_meta.entity_id = 'sensor.temperature_in'
AND last_updated_ts <= strftime('%s', 'now', '-1 day')
ORDER BY
last_updated_ts DESC
LIMIT
1;
```
Replace `-1 day` with the target offset, for example, `-1 hour`.
Use `state` as column for value.
Keep in mind that, depending on the update frequency of your sensor and other factors, this may not be a 100% accurate reflection of the actual situation you are measuring. Since your database wont necessarily have a value saved exactly 24 hours ago, use “>=” or “<=” to get one of the closest values.
### Database size
#### Postgres

View File

@ -1,6 +1,6 @@
---
title: Speech-to-Text (STT)
description: Instructions on how to set up Speech-to-Text (STT) with Home Assistant.
title: Speech-to-text (STT)
description: Instructions on how to set up speech-to-text (STT) with Home Assistant.
ha_release: '0.102'
ha_codeowners:
- '@home-assistant/core'
@ -11,11 +11,11 @@ ha_category: []
ha_integration_type: entity
---
A speech to text (STT) entity allows other integrations or applications to stream speech data to the STT API and get text back.
A speech-to-text (STT) entity allows other integrations or applications to stream speech data to the STT API and get text back.
The speech to text entities cannot be implemented manually, but can be provided by integrations.
The speech-to-text entities cannot be implemented manually, but can be provided by integrations.
## The state of a speech to text entity
## The state of a speech-to-text entity
Every speech to text entity keeps track of the timestamp of when the last time
the speech to text entity was used to process speech.
Every speech-to-text entity keeps track of the timestamp of when the last time
the speech-to-text entity was used to process speech.

View File

@ -35,10 +35,10 @@ scale:
type: integer
default: 1
name:
description: The name to use when displaying this switch.
description: The name to use when displaying this sensor.
required: false
type: string
default: myStrom Switch
default: Unnamed Device
{% endconfiguration %}
Since some of these sensors consistently show higher temperatures the scale and offset values can be used to fine-tune your sensor.

View File

@ -113,7 +113,7 @@ sensor:
type: map
keys:
state:
description: Defines a template to get the state of the sensor.
description: "Defines a template to get the state of the sensor. If the sensor is numeric, i.e. it has a `state_class` or a `unit_of_measurement`, the state template must render to a number or to `none`. The state template must not render to a string, including `unknown` or `unavailable`. An `availability` template may be defined to suppress rendering of the state template."
required: true
type: template
unit_of_measurement:
@ -460,6 +460,8 @@ template:
### State based sensor changing the unit of measurement of another sensor
With a Template Sensor, it's easy to convert given values into others if the unit of measurement doesn't fit your needs.
Because the sensors do math on the source sensor's state and need to render to a numeric value, an availability template is used
to suppress rendering of the state template if the source sensor does not have a valid numeric state.
{% raw %}
@ -469,10 +471,12 @@ template:
- name: "Transmission Down Speed"
unit_of_measurement: "kB/s"
state: "{{ states('sensor.transmission_down_speed')|float * 1024 }}"
availability: "{{ is_number(states('sensor.transmission_down_speed')) }}"
- name: "Transmission Up Speed"
unit_of_measurement: "kB/s"
state: "{{ states('sensor.transmission_up_speed')|float * 1024 }}"
availability: "{{ is_number(states('sensor.transmission_up_speed')) }}"
```
{% endraw %}

View File

@ -1,6 +1,6 @@
---
title: Text-to-Speech (TTS)
description: Instructions on how to set up Text-to-Speech (TTS) with Home Assistant.
title: Text-to-speech (TTS)
description: Instructions on how to set up text-to-speech (TTS) with Home Assistant.
ha_category:
- Media Source
- Text-to-speech
@ -15,7 +15,7 @@ ha_platforms:
ha_integration_type: entity
---
Text-to-Speech (TTS) enables Home Assistant to speak to you.
Text-to-speech (TTS) enables Home Assistant to speak to you.
## Services

View File

@ -135,6 +135,8 @@ If no devices show up in Home Assistant:
- Incorrect username or password: Enter the correct account and password of the Tuya Smart or Smart Life app in the **Account** and **Password** fields (social login, which the Tuya Smart app allows, may not work, and thus should be avoided for use with the Home Assistant integration). Note that the app account depends on which app (Tuya Smart or Smart Life) you used to link devices on the [Tuya IoT Platform](https://iot.tuya.com/cloud/).
- Incorrect country. You must select the region of your account of the Tuya Smart app or Smart Life app.
- Some users still experience the **Permission denied** error after adding the correct app account credentials in a correctly configured project. A workaround involves adding a custom user under **Cloud** > **Development** > **Users**.
"1100: param is empty":
description: Empty parameter of username or app. Please fill the parameters refer to the **Configuration** part above.

View File

@ -13,4 +13,4 @@ ha_integration_type: integration
ha_quality_scale: internal
---
The Voice Assistant integration contains logic for running *pipelines*, which perform the common steps of a voice assistant like [Assist](/docs/assist/).
The Voice Assistant integration contains logic for running *pipelines*, which perform the common steps of a voice assistant like [Assist](/voice_control/).

View File

@ -11,7 +11,7 @@ ha_platforms:
ha_integration_type: integration
---
The `voicerss` text-to-speech platform uses [VoiceRSS](http://www.voicerss.org/) Text-to-Speech engine to read a text with natural sounding voices.
The `voicerss` text-to-speech platform uses [VoiceRSS](http://www.voicerss.org/) text-to-speech engine to read a text with natural sounding voices.
## Configuration

View File

@ -18,10 +18,12 @@ ha_platforms:
ha_config_flow: true
---
The VoIP integration enables users to talk to [Assist](/docs/assist) using an analog phone and a VoIP adapter. Currently, the system works with the [Grandstream HT801](https://amzn.to/40k7mRa). See [the tutorial](/projects/worlds-most-private-voice-assistant) for detailed instructions.
The VoIP integration enables users to talk to [Assist](/voice_control/) using an analog phone and a VoIP adapter. Currently, the system works with the [Grandstream HT801](https://amzn.to/40k7mRa). See [the tutorial](/projects/worlds-most-private-voice-assistant) for detailed instructions.
As an alternative, the [Grandstream HT802](https://www.amazon.com/Grandstream-GS-HT802-Analog-Telephone-Adapter/dp/B01JH7MYKA/) can be used, which is basically the same as the previously mentioned HT801, but has two phone ports, of which Home Assistant currently support using only one of them.
Also, the Grandstream HT812 has been reported to work. Home Assistant supports using only one of the two phone ports.
<p class='img'>
<img src="/images/integrations/voip/voip_adapter.png" />
Connecting a phone to Home Assistant requires an adapter.

View File

@ -37,7 +37,7 @@ These devices have been sold under at least the following brands:
- [Atom Lighting](https://atomlighting.com.au/)
- [ATX LED](https://atxledinc.com/)
- [Brilliant](https://www.brilliantlightsource.com/)
- [Designers Fountain](https://designersftn.com/default.dmx)
- [Designers Fountain](https://designersfountain.com/)
- [Evoluziona](https://tecnolite.mx/)
- [Fischer & Honsel](https://fischer-honsel.com/)
- [Gauss](https://gauss.ru/smartlight/products/)
@ -47,12 +47,12 @@ These devices have been sold under at least the following brands:
- [Lednify](https://lednify.com/)
- [Leyton](https://www.leyton-lighting.co.uk/)
- [Liteline](https://www.liteline.com/page/oncloud)
- [Lutec](https://www.lutec.com/highlight/wiz)
- [Lutec](https://www.lutec.com/segments/connected)
- [Philips Smart LED lights with WiZ Connected](https://www.usa.lighting.philips.com/consumer/smart-wifi-led)
- [Spex](https://spexlighting.com/pages/smart-lights)
- [SLV](https://www.slv.com/)
- [Trenz](https://trenzlighting.com/pages/smart-lights)
- [Trio](https://wiz.trio-lighting.com/en/)
- [Trio](https://wiz.trio-lighting.com/)
- [Wofi](https://wofi-wiz.com/)
The integration can report the state of occupancy sensors that have been linked to a device.
@ -61,8 +61,8 @@ The integration can report the state of occupancy sensors that have been linked
## Connect WiZ devices to your network
To connect a WiZ device to your Wi-Fi network, please follow the instructions in the [WiZ app](https://www.wizconnected.com/en/consumer/app/) (available for iOS and Android).
If you have further questions, please have a look at the [WiZ Support Page](https://www.wizconnected.com/en/consumer/support/).
To connect a WiZ device to your Wi-Fi network, please follow the instructions in the [WiZ app](https://www.wizconnected.com/en-us/explore-wiz/app) (available for iOS and Android).
If you have further questions, please have a look at the [WiZ Support Page](https://www.wizconnected.com/en-us/support/faq).
### Enable local connectivity

View File

@ -47,6 +47,8 @@ Add holidays will only take dates formatted with `YYYY-MM-DD`.
Remove holidays will take dates formatted with `YYYY-MM-DD` or partial of name, for example, `christmas` will exclude `Christmas Day`.
The offset can be used to see if future days are workdays. For example, put `1` to see if tomorrow is a workday.
## Automation example
Example usage for automation:

View File

@ -16,7 +16,7 @@ ha_platforms:
ha_config_flow: true
---
The Wyoming integration connects external voice services to Home Assistant using a [small protocol](https://github.com/rhasspy/rhasspy3/blob/master/docs/wyoming.md). This enables [Assist](/docs/assist) to use a variety of local [speech-to-text](/integrations/stt/) and [text-to-speech](/integrations/tts/) systems, such as:
The Wyoming integration connects external voice services to Home Assistant using a [small protocol](https://github.com/rhasspy/rhasspy3/blob/master/docs/wyoming.md). This enables [Assist](/voice_control/) to use a variety of local [speech-to-text](/integrations/stt/) and [text-to-speech](/integrations/tts/) systems, such as:
* Whisper {% my supervisor_addon badge addon="core_whisper" %}
* Piper {% my supervisor_addon badge addon="core_piper" %}

View File

@ -50,6 +50,10 @@ These devices do not send updates, but can be locked and unlocked.
- MD-04I (Yale Conexis L1, Yale Conexis L2)
- YRCB-490 (Yale Smart Cabinet Lock)
## Troubleshooting
Lock operation requires setting up an encrypted connection highly sensitive to latency. It is recommended to use a [High-Performance Bluetooth adapter](/integrations/bluetooth/#known-working-high-performance-adapters) or [ESPHome Bluetooth proxy](/integrations/bluetooth/#remote-adapters-bluetooth-proxies) for stable operation.
## Push updates
Some locks only send push updates when they have an active HomeKit pairing. If your lock is not sending push updates, ensure it's paired with a HomeKit using an iOS device or the HomeKit controller integration. The lock cannot be paired via HomeKit Controller and the Yale Access Bluetooth integration on the same Home Assistant instance as they will both try to access the lock simultaneously and fail.
@ -58,18 +62,19 @@ One easy way to fix this is to create a new/second home in the Apple Home app an
Alternatively, call the `homeassistant.update_entity` service to force the integration to update the lock state.
If the lock does not support push updates via advertisements or you want lock operation to be more responsive, you can enable always connected mode. Always connected will cause the lock to stay connected to Home Assistant via Bluetooth, which will use more battery.
## Door Sensors
The lock must be calibrated in the Yale Access App for the door sensors to function correctly. If the door sensor has an unknown state or is not updating, try recalibrating the lock in the app.
## Obtaining the offline key
The offline key and slot number are required to operate the lock. These credentials can be found in multiple places depending on the lock brand and model.
### Yale Access or August Cloud
### Yale Access, Yale Home, or August Cloud
The [August](/integrations/august) integration can automatically provision the offline key if the configured account has the key loaded. You may need to create or use a non-primary existing account with owner-level access to the lock, as not all accounts will have the key loaded.
The [August](/integrations/august) integration can automatically provision the offline key if the configured account has the key loaded. You may need to create or use a non-primary existing account with owner-level access to the lock, as not all accounts will have the key loaded. If the lock was not discovered by Home Assistant when the cloud integration was loaded, reload the cloud integration once the lock has been discovered.
If the offline key can automatically be provisioned, you will not be asked to enter it and the integration should be automatically added, configured and running.

View File

@ -34,7 +34,7 @@ The Yamaha MusicCast integration implements the grouping services. There are som
## Play Media functionality
The MusicCast integration supports the Home Assistant media browser for all streaming services, your device supports. For services such as Deezer, you have to log in using the official MusicCast app. In addition, local HTTP URLs can be played back using this service. This includes the Home Assistant text to speech services.
The MusicCast integration supports the Home Assistant media browser for all streaming services, your device supports. For services such as Deezer, you have to log in using the official MusicCast app. In addition, local HTTP URLs can be played back using this service. This includes the Home Assistant text-to-speech services.
It is also possible to recall NetUSB presets using the play media service. To do so "presets:<preset_num>" has to be used as `media_content_id` in the service call.

View File

@ -11,7 +11,7 @@ ha_platforms:
ha_integration_type: integration
---
The `yandextts` text-to-speech platform uses [Yandex SpeechKit](https://tech.yandex.com/speechkit/) Text-to-Speech engine to read a text with natural sounding voices.
The `yandextts` text-to-speech platform uses [Yandex SpeechKit](https://tech.yandex.com/speechkit/) text-to-speech engine to read a text with natural sounding voices.
<div class='note warning'>
This integration is working only with old API keys. For the new API keys, this integration cannot be used.

View File

@ -272,6 +272,8 @@ Note: `cluster_id: 25` may also be `cluster_id: 0x0019`. The two are synonymous.
### Defining Zigbee channel to use
Tip! Before considering to change to an other Zigbee channel on an existing Zigbee network, it is highly recommended that you read through the two segments under the [troubleshooting](#troubleshooting) section below about "*Best practices to avoid pairing/connection difficulties*" and "*Zigbee interference avoidance and network range/coverage optimization*". These sections provide prerequisite information and advice on how to achieve the best possible Zigbee network in your environment.
ZHA prefers to use Zigbee channel 15 by default. You can change this using YAML configuration, but this only works
if there's no existing network. To change the channel for an existing network, radio has to be factory reset and a new network to be formed. This requires re-pairing of all the devices.
@ -283,13 +285,13 @@ zha:
channels: [15, 20, 25] # Channel mask
```
This is a good reference for channel selection for [Zigbee and WiFi coexistance](https://support.metageek.com/hc/en-us/articles/203845040-ZigBee-and-WiFi-Coexistence).
Note! The best practice is to not change the Zigbee channel from the ZHA default. Also, the related troubleshooting segments mentioned in the tip above will, among other things, inform that if you have issues with overlapping frequencies between Wi-Fi and Zigbee, then it is usually better to first only try changing and setting a static Wi-Fi channel on your Wi-Fi router or all your Wi-Fi access points (instead of just changing to another Zigbee channel).
The Zigbee specification standards divide the 2.4Ghz ISM radio band into 16 Zigbee channels (i.e. distinct radio frequencies for Zigbee). For all Zigbee devices to be able to communicate, they must support the same Zigbee channel (i.e. Zigbee radio frequency) that is set on the Zigbee Coordinator as the channel to use for its Zigbee network. Not all Zigbee devices support all Zigbee channels, it will usually depend on the hardware and firmware age as well as devices power ratings.
MetaGeek Support has a good reference article about channel selection for [Zigbee and WiFi coexistance]([https://support.metageek.com/hc/en-Ti](https://support.metageek.com/hc/en-us/articles/203845040-ZigBee-and-WiFi-Coexistence)).
The general recommendation is to only use channels 15, 20, or 25 in order to avoid interoperability problems with Zigbee devices that are limited to only being compatible with the ZLL (Zigbee Light Link) channels as well as lessen the chance of Wi-Fi networks interfering too much with the Zigbee network. Note that especially using Zigbee channels 11, 24, 25, or 26 on your Zigbee Coordinator could mean it will probably not be accessible to older devices as those Zigbee channels are commonly only supported by relatively modern Zigbee hardware devices with newer Zigbee firmware.
The Zigbee specification standards divide the 2.4&nbsp;GHz ISM radio band into 16 Zigbee channels (i.e. distinct radio frequencies for Zigbee). For all Zigbee devices to be able to communicate, they must support the same Zigbee channel (i.e. Zigbee radio frequency) that is set on the Zigbee Coordinator as the channel to use for its Zigbee network. Not all Zigbee devices support all Zigbee channels. Channel support usually depends on the age of the hardware and firmware, as well as on the device's power ratings.
Regardless, note that the best practice recommendation is, however, not to change the Zigbee channel from default as not all Zigbee devices support all channels. If you have issues with overlapping frequencies, then it will generally be a better idea to just change Wi-Fi channels on your Wi-Fi Router or all your Wi-Fi Access Points instead.
The general recommendation is to only use channels 15, 20, or 25 in order to avoid interoperability problems with Zigbee devices. Not only because there is less chance of Wi-Fi networks interfering too much with the Zigbee network on other channels, but also because not all Zigbee devices support all channels. Some devices, for example, are limited to only being compatible with ZLL (Zigbee Light Link) channels. It is therefore especially not recommended to use Zigbee channels 11, 24, 25, or 26 on your Zigbee coordinator. These Zigbee channels are commonly only supported by relatively modern Zigbee hardware devices with newer Zigbee firmware. If using those channels, your coordinator may not be usable with older Zigbee devices.
### Modifying the device type
@ -380,27 +382,36 @@ This service disables a lock code on a Zigbee lock.
## Adding devices
To add a new device:
Tip! It is highly recommended that you read through the two segments under the troubleshooting section below about "*Best practices to avoid pairing/connection difficulties*" and "*Zigbee interference avoidance and network range/coverage optimization*" for general prerequisite knowledge and advice on how to achieve the best possible Zigbee network in your environment.
**To add a new Zigbee device:**
1. Go to the **Integrations** panel, find the **Zigbee Home Automation** integration that was added by the configuration steps above, and select **Configure**.
1. Click on the plus button at the bottom right corner to start a scan for new devices.
1. Reset your Zigbee devices according to the device instructions provided by the manufacturer (e.g., turn on/off lights up to 10 times, switches usually have a reset button/pin). It might take a few seconds for the devices to appear. You can click on **Show logs** for more verbose output.
1. Reset your Zigbee devices to factory default settings according to the device instructions provided by the manufacturer (e.g., turn on/off lights up to 10 times; switches usually have a reset button/pin). It might take a few seconds for the devices to appear. You can click on **Show logs** for more verbose output.
1. Once the device is found, it will appear on that page and will be automatically added to your devices. You can optionally change its name and add it to an area (you can change this later). You can search again to add another device, or you can go back to the list of added devices.
### Using router devices
### Using router devices to add more devices
You use routers to increase the number of Zigbee devices that can be used in a network. The total number of Zigbee devices that you have on a Zigbee network depends on a few things, but you should know that Zigbee coordinator hardware and firmware only plays a larger role in Zigbee networks with a lot of devices. More important is how many directly connected devices ("direct children") versus how many routers are connected to your Zigbee coordinator. Zigpy library which ZHA uses has an upper limit. This is 32 direct children, but if your Zigbee coordinator hardware is powerful enough then you can still have hundreds of Zigbee devices connected through routers.
Most mains-powered devices, e.g., many always-powered wall plugs or light bulbs in your Zigbee network will automatically act as a Zigbee router device (sometimes also referred to as a Zigbee "signal repeater" or "range extender").
Even the least powerful Zigbee coordinator hardware supported by Zigpy is CC2530/2531 and its default firmware, only supports 20 devices connected directly to the coordinator. However, by having routers in your Zigbee network, the mesh network size can be extended. You can assume that most, if not all mains/AC-powered devices, e.g., wall-plugs and always powered-on lightbulbs in your Zigbee network can serve as a router. You can even use CC2530/CC2531 with router firmware, as additional routers (which in their turn have a limit of 21 devices).
Because Zigbee should use a "[wireless mesh network]"(https://en.wikipedia.org/wiki/Wireless_mesh_network) to be effective, you will need to add Zigbee router devices to increase the number of Zigbee devices that can be used in your Zigbee network, both in the total number of devices that can be added as well as the total range and coverage of the network. Some Zigbee router devices do a much better job at routing and repeating Zigbee signals and messages than some other devices. You should not have a setup where Zigbee router devices (e.g. light bulbs) are often powered-off. Zigbee router devices are meant to be always available.
An example using the default CC2531 coordinator firmware + two CC2531 routers; Your device limit will be:
All Zigbee coordinator firmware will only allow you to directly connect a certain amount of devices. That limit is set for two reasons; firstly, to not overload the Zigbee coordinator, and secondly, to encourage your Zigbee network to quickly begin to utilize a "[mesh networking](https://en.wikipedia.org/wiki/Mesh_networking)" topology instead of only a "[star network](https://en.wikipedia.org/wiki/Star_network)" topology.
- Coordinator: 15 devices - 2 routers = 13
- Router one: + 21 devices
- Router two: + 21 devices
- Total device limit = **55 devices**
The total number of Zigbee devices that you can have on a Zigbee network depends on a few things. The Zigbee coordinator hardware and its firmware only play a larger role in Zigbee networks with a lot of devices. More important is the number of directly connected devices ("direct children") versus the number of routers that are connected to your Zigbee coordinator. The Zigpy library, which the ZHA integration depends on, has an upper limit that is 32 direct children, but you can still have hundreds of Zigbee devices in total connected indirectly through routes via Zigbee router devices.
### Binding and unbinding
In this theoretical example, a CC2652-based Zigbee coordinator has three CC2530 Zigbee router devices for a total limit of 77 devices:
- Coordinator: 32 Zigbee End devices - 3 routers = 29
- Router one: + 16 devices
- Router two: + 16 devices
- Router three: + 16 devices
- Total device limit = **77 devices**
In practice, you will likely need to add a lot more Zigbee router devices than in this example in order to extend the coverage of the network to reach that many devices.
## Binding and unbinding
ZHA support for binding and unbinding. Binding is an action in Zigbee which defines relations between two Zigbee devices, specific endpoints, and cluster id. It provides a mechanism for attaching an endpoint on one Zigbee node to one or more endpoints on another Zigbee node or Zigbee group (a group of Zigbee devices).
@ -435,15 +446,34 @@ ZHA does not currently support devices that can only use the ZSE ("Zigbee Smart
### Knowing which devices are supported
There is no official compatibility list of supported devices for the simple reason that practically all devices Zigbee Home Automation that are fully compliant with the standards and specifications as set by the [Zigbee Alliance](https://zigbeealliance.org) should technically be compatible with this ZHA integration. The fact remains, however, that some hardware manufacturers do not always fully comply with each set specification, which can cause a few devices to only partially work or not work at all with ZHA, but developers can create workarounds for such issues via a solution for 'ZHA exception and deviation handling' that this implementation features. See that section for more information.
Home Assistant's ZHA integration supports all standard Zigbee device types. It should be compatible with most Zigbee devices as long as they fully conform to the official ZCL (Zigbee Cluster Library) specifications defined by the [CSA (Connectivity Standards Alliance, formerly the Zigbee Alliance)](https://csa-iot.org/all-solutions/zigbee/). There is therefore no official compatibility list of devices that will work out-of-the-box with the ZHA integration.
Tip to new users is that, while there is no official list of supported devices, some ZHA users take comfort that blakadder maintains an unofficial Zigbee Device Compatibility Repository which anyone can submit compatibility reports to, it can be found at [zigbee.blakadder.com](https://zigbee.blakadder.com) and currently contains independent compatibility lists and device pairing tips for several home automation gateway/bridge/hub software, including but not limited to open source Zigbee implementations such as; ZHA, Tasmota, Zigbee2MQTT, and ZiGate.
Not all hardware manufacturers always fully comply with the standard specifications. Sometimes, they may also implement unique features. For this reason, some Zigbee devices pair/join fine with ZHA but then only show none or only a few entities in the integration. Developers can work around most such interoperability issues by adding conversion/translation code in custom device handlers. For more information, refer to the section below on _How to add support for new and unsupported devices_.
### ZHA exception and deviation handling
For clarification, normally only devices that do not fully conform to CSA's ZCL specifications that will not present all standard attributes as entities for configuration in the ZHA integration. Zigbee devices that only use the standard clusters and attributes that are Zigbee specifications set by the Connectivity Standards Alliance should not need custom device handlers.
Home Assistant's ZHA integration already supports most standard device types out-of-the-box as long as they follow the official Zigbee specifications, but for devices that manufacturers have not properly not fully Zigbee compatible, the ZHA integration has implemented a library called "[ZHA Device Handlers (also known as "zha-quirk")](https://github.com/zigpy/zha-device-handlers)" that handle and resolve compliance issues via custom conversion/translation of Zigbee parameters (clusters and attributes) for specific devices which do not conform with the Zigbee standards.
Before continuing with this section: If a device does not join/pair at all, read the troubleshooting sections about how to avoid pairing/connection difficulties, interference avoidance, and network range/coverage optimization.
The few devices that will, for example, not join/pair properly or, while joined/paired, do not present all attributes in the ZHA integration likely deviate from the Zigbee specifications set by the [CSA (Connectivity Standards Alliance)](https://csa-iot.org/all-solutions/zigbee/). These devices may therefore require the creation of a bug report by a device owner, supplying debug logs. The device owner may need to actively assist in troubleshooting by providing the information developers need to create custom ZHA Device Handlers.
Tip to new Zigbee users: Checkout [blakadder's unofficial Zigbee Device Compatibility Repository](https://zigbee.blakadder.com). Anyone can help maintain the site by submitting device compatibility information to it. The repository contains independent community member's reports or device-specific pairing tips for several home automation gateway/bridge/hub software, including open-source Zigbee implementations, such as ZHA, Zigbee2MQTT, and Tasmota (Zigbee2Tasmota).
#### How to add support for new and unsupported devices
If your Zigbee device pairs/joins successfully with the ZHA integration but does not show all of the expected entities:
1. Try to re-pair/re-join the device several times.
2. Checkout the troubleshooting section.
3. Still not working? You may need a custom device handler. This handler will have exception handling code to work around device-specific issues.
For devices that do not follow the standard defined in the CSA's ZCL (Zigbee Cluster Library), the ZHA integration relies on a project called "[ZHA Device Handlers (also known as "zha-quirk")](https://github.com/zigpy/zha-device-handlers)". It contains device-specific Python scripts called "quirks". These scripts can resolve compliance and interoperability issues by implementing on-the-fly conversion of custom Zigbee configurations or by implementing manufacturer-specific features for specific devices.
People familiar with other Zigbee gateway solutions for home automation may know similar concepts of using custom Zigbee device handlers/converters for non-standard devices. For example, [Zigbee2MQTT (and IoBroker) uses zigbee-herdsman converters](https://www.zigbee2mqtt.io/advanced/support-new-devices/01_support_new_devices.html) and [SmartThings Classics (Legacy) platform has Hub Connected Device Handlers](https://developer.smartthings.com/docs/devices/hub-connected/legacy).
If you do not want to develop such a "quirk" Python script yourself, you can submit a "device support request" as a new issue to the [ZHA Device Handlers project repository on GitHub](https://github.com/zigpy/zha-device-handlers/issues):
1. Sign in to GitHub.
2. Select **New issue** and follow the instructions.
* New device support requests require the device signature + diagnostic information.
* You may also need to actively help in further testing or provide additional information to the volunteering developers.
Note that submitting a new "device support request" does not guarantee that someone else will develop a custom "quirk" for ZHA. The project relies on volunteering developers. However, without "device support requests", the developers may not be aware that your specific Zigbee device is not working correctly in ZHA.
### Best practices to avoid pairing/connection difficulties

View File

@ -73,12 +73,12 @@ Use this My button:
{% my config_flow_start badge domain="zwave_js" %}, or follow these steps:
* Browse to your Home Assistant instance.
* In the sidebar click on _**{% my config icon %}**_.
* From the configuration menu select: _**{% my integrations %}**_.
* In the bottom right, click on the
_**{% my config_flow_start icon domain="zwave_js" %}**_ button.
* From the list, search and select _**"Z-Wave"**_.
* Follow the instructions on screen to complete the set up.
* In the sidebar, select **{% my config icon %}**.
* From the configuration menu, select: **{% my integrations %}**.
* On the bottom right, select the
**{% my config_flow_start icon domain="zwave_js" %}** button.
* From the list, search and select **Z-Wave**.
* Follow the instructions on screen to complete the setup.
{% enddetails %}
@ -102,7 +102,7 @@ While your Z-Wave mesh is permanently stored on your dongle, the additional meta
1. In the Z-Wave integration, select **Configure**.
1. Select **Add device**.
* The Z-Wave controller is now in inclusion mode.
1. If your device supports SmartStart, select **Scan QR code** and scan the QR code on your device.
1. If your device supports SmartStart (700 series controller), select **Scan QR code** and scan the QR code on your device.
1. If your device does not support SmartStart, set the device in inclusion mode. Refer to the device manual to see how this is done.
* If your device is included using S2 security, you may be prompted to enter a PIN number provided with your device. Often, this PIN is provided with the documentation _and_ is also printed on the device itself. For more information on secure inclusion, refer to [this section](/integrations/zwave_js/#should-i-use-secure-inclusion).
1. The UI should confirm that the device was added. After a short while (seconds to minutes), the entities should also be created.

View File

@ -15,7 +15,7 @@ og_image: /images/blog/2016-12-0.35/social.png
This will be the last release of 2016 as our developers are taking a well deserved break. We will be back in 2017!
## Text to Speech
## Text-to-speech
With the addition of a [text-to-speech][tts] component by [@pvizeli] we have been able to bring Home Assistant to a whole new level. The text-to-speech component will take in any text and will play it on a media player that supports to play media. We have tested this on Sonos, Chromecast, and Google Home.
[https://www.youtube.com/watch?v=Ke0QuoJ4tRM](https://www.youtube.com/watch?v=Ke0QuoJ4tRM)
@ -72,7 +72,7 @@ http:
```
- Fix exit hanging on OS X with async logging ([@balloob])
- Fix Text to speech clearing cache ([@pvizeli])
- Fix text-to-speech clearing cache ([@pvizeli])
- Allow setting a base API url in HTTP component ([@balloob])
- Fix occasional errors in automation ([@pvizeli])

View File

@ -76,7 +76,7 @@ We have a lot of ideas! We are not going to make any promises but here are some
- Google Home / Google Assistant Smart Home skill
- Allow easy linking of other cloud services to Home Assistant. No more local juggling with OAuth flows. For example, link your Fitbit account and the Fitbit component will show up in Home Assistant.
- Encrypted backups of your Hass.io data
- Text to speech powered by AWS Polly
- Text-to-speech powered by AWS Polly
- Generic HTTP cloud endpoint for people to send messages to their local instance. This will allow people to build applications on top of the Home Assistant cloud.
- IFTTT integration
- Alexa shopping list integration

View File

@ -90,7 +90,7 @@ There have been several improvements to notifications as well.
- An event gets sent upon a notification being [cleared](https://companion.home-assistant.io/docs/notifications/notification-cleared) along with all notification data.
- Notifications can make use of the alarm stream to bypass a device's ringer mode setting. This can be useful if there is an important event such as an alarm being triggered. Make sure to check the updated Android examples on the [companion site](https://companion.home-assistant.io/docs/notifications/critical-notifications).
- [Text To Speech notifications](https://companion.home-assistant.io/docs/notifications/notifications-basic#text-to-speech-notifications), with the ability to use the alarm stream if desired. By default it will use the device's music stream. There is also an additional option to temporarily change the volume level to the maximum level while speaking, the level would then restored to what it was previously.
- [Text-to-speech notifications](https://companion.home-assistant.io/docs/notifications/notifications-basic#text-to-speech-notifications), with the ability to use the alarm stream if desired. By default it will use the device's music stream. There is also an additional option to temporarily change the volume level to the maximum level while speaking, the level would then restored to what it was previously.
- New device [commands](https://companion.home-assistant.io/docs/notifications/notification-commands) to control your phone: broadcasting an intent to another app, controlling Do Not Disturb and ringer mode.
- Opening another app with an [actionable notification](https://companion.home-assistant.io/docs/notifications/actionable-notifications#building-automations-for-notification-actions), make sure to follow the Android examples.

View File

@ -125,7 +125,7 @@ inspiring others.
## New neural voices for Nabu Casa Cloud TTS
If you have a [Nabu Casa Home Assistant Cloud][cloud] subscription, this release
brings in some really nice goodness for you. The Text-to-Speech service offered
brings in some really nice goodness for you. The text-to-speech service offered
by Nabu Casa has been extended and now supports a lot of new voices in many
different languages.

View File

@ -256,13 +256,13 @@ Screenshot of the text selectors.
Screenshot of the object selector, giving a YAML input field.
</p>
## Cloud Text to Speech settings
## Cloud text-to-speech settings
Nabu Casa has been offering an amazing text to speech service for a while now,
Nabu Casa has been offering an amazing text-to-speech service for a while now,
yet it was hard to find, and even harder to setup and use.
To fix this, a new settings UI has been added where you can select the default
language and gender to use for the text to speech service, so you no longer have
language and gender to use for the text-to-speech service, so you no longer have
to attach that to every service call. You can find it in the Home Assistant Cloud
panel.

View File

@ -1,6 +1,6 @@
---
title: "Community Highlights: 19th edition"
description: "Schedule your vacuum cleaning robot with a blueprint, show the robot status with a card and get started with open source Text To Speech systems"
description: "Schedule your vacuum cleaning robot with a blueprint, show the robot status with a card and get started with open source text-to-speech systems"
date: 2021-04-30 00:00:00
date_formatted: "April 30, 2021"
author: Klaas Schoute
@ -91,7 +91,7 @@ well-known models that are now available on the market.
Maybe the name still sounds fairly unknown to you, but [OpenTTS](https://github.com/synesthesiam/hassio-addons)
is an add-on, which gives you the possibility to use multiple open source
Text to Speech systems. So that you can eventually have text spoken on: for
text-to-speech systems. So that you can eventually have text spoken on: for
example, a Google Home speaker. [synesthesiam](https://github.com/synesthesiam)
recently released a new version of OpenTTS and you can install it as an
add-on in Home Assistant.

View File

@ -24,7 +24,7 @@ Information on [how to share](#got-a-tip-for-the-next-edition).
Are you one of those who always leave the doors open?
Then this week we have a nice blueprint for you! [BasTijs](https://community.home-assistant.io/u/bastijs )
has made a blueprint that announces through text to speech in the house,
has made a blueprint that announces through text-to-speech in the house,
that a door is open and only stops when the door is closed again.
{% my blueprint_import badge blueprint_url="https://community.home-assistant.io/t/door-open-tts-announcer/266252" %}

View File

@ -827,7 +827,7 @@ and thus can be safely removed from your YAML configuration after upgrading.
{% enddetails %}
{% details "Microsoft Text-to-Speech (TTS)" %}
{% details "Microsoft text-to-speech (TTS)" %}
The default voice is changed to `JennyNeural`; The previous default `ZiraRUS`

View File

@ -111,7 +111,7 @@ So, this release will bring in a bunch of new media sources.
Your Cameras! Your Lovelace Dashboards! You can just pick one of your cameras
or Lovelace dashboards and "Play" them on a supported device
(like a Google Nest Hub or television). But also text to speech!
(like a Google Nest Hub or television). But also text-to-speech!
<img class="no-shadow" src='/images/blog/2022-03/pick-tts.png' alt='Screenshot showing playing TTS as a media action'>

View File

@ -1562,7 +1562,7 @@ Home Assistant startup, instead of to "unknown".
{% enddetails %}
{% details "Text-to-Speech (TTS)" %}
{% details "text-to-speech (TTS)" %}
The TTS `base_url` option is deprecated. Please, configure internal/external
URL instead.

View File

@ -44,7 +44,7 @@ With Home Assistant we want to make a privacy and locally focused smart home ava
With Home Assistant we prefer to get the things were building in the user's hands as early as possible. Even basic functionality allows users to find things that work and dont work, allowing us to address the direction if needed.
A voice assistant has a lot of different parts: hot word detection, speech to text, intent recognition, intent execution, text to speech. Making each work in every language is a lot of work. The most important part is the intent recognition and intent execution. We need to be able to understand your commands and execute them.
A voice assistant has a lot of different parts: hot word detection, speech-to-text, intent recognition, intent execution, text-to-speech. Making each work in every language is a lot of work. The most important part is the intent recognition and intent execution. We need to be able to understand your commands and execute them.
We started gathering these command sentences in our new [intents repository](https://github.com/home-assistant/intents). It will soon power the existing [conversation integration](/integrations/conversation) in Home Assistant, allowing you to use our app to write and say commands.

View File

@ -32,7 +32,7 @@ We want Assist to be as accessible to as many people as possible. To do this, we
Assist is enabled by default in the Home Assistant 2023.2 release. Tap the new Assist icon <img src='/images/assist/assist-icon.svg' alt='Assist icon' style='height: 32px' class='no-shadow'> at the top right of the dashboard to use it.
[Assist documentation.](https://www.home-assistant.io/docs/assist/)
[Assist documentation.](https://www.home-assistant.io/voice_control/)
<img src="/images/blog/2023-01-26-year-of-the-voice-chapter-1/assist-dialog.png" alt="Screenshot of the Assist dialog" class='no-shadow' />
@ -40,7 +40,7 @@ Assist is enabled by default in the Home Assistant 2023.2 release. Tap the new A
We want to make it as easy as possible to use Assist. To enable this for Android users, we have added a new tile to the Android Wear app. A simple swipe from the clock face will show the assist button and allows you to send voice commands.
[Assist on Android Wear documentation.](https://www.home-assistant.io/docs/assist/android/)
[Assist on Android Wear documentation.](https://www.home-assistant.io/voice_control/android/)
_The tile is available in [Home Assistant Companion for Android 2023.1.1](https://play.google.com/store/apps/details?id=io.homeassistant.companion.android&pcampaignid=pcampaignidMKT-Other-global-all-co-prtnr-py-PartBadge-Mar2515-1&pcampaignid=pcampaignidMKT-Other-global-all-co-prtnr-py-PartBadge-Mar2515-1)._
@ -50,7 +50,7 @@ _The tile is available in [Home Assistant Companion for Android 2023.1.1](https:
For Apple devices we have been able to create a fully hands-free experience by integrating with Siri. This is powered by a new Apple Shortcut action called Assist, which is part of the Home Assistant app. This shortcut action can also be manually triggered from your Mac taskbar, iPhone home screen or Apple Watch complication. We have two ready-made shortcuts that users can import from the documentation with a single tap to unlock these features.
[Assist via Siri and Apple Shortcuts documentation.](https://www.home-assistant.io/docs/assist/apple/)
[Assist via Siri and Apple Shortcuts documentation.](https://www.home-assistant.io/voice_control/apple/)
_The Assist shortcut is available in [Home Assistant Companion for iOS 2023.2](https://apps.apple.com/us/app/home-assistant/id1099568401?itsct=apps_box_badge&itscg=30200). Mac version is awaiting approval._
@ -66,7 +66,7 @@ With Home Assistant we believe that every home is uniquely yours and that [techn
Assist includes support for custom sentences, responses and intents, allowing you to achieve all of the above, and more. We've designed the custom sentence format in a way that it can be easily shared with the community.
Read [the documentation](https://www.home-assistant.io/docs/assist/custom_sentences) on how to get started.
Read [the documentation](https://www.home-assistant.io/voice_control/custom_sentences) on how to get started.
_In a future release we're planning on adding a user interface to customize and import sentences._
@ -92,8 +92,7 @@ For Year of the Voice - Chapter 1 we focused on building intent recognition into
We will continue collecting home automation sentences for all languages ([anyone can help!](https://developers.home-assistant.io/docs/voice/intent-recognition/)). Updates will be included with every major release of Home Assistant.
Our next step is integrating Speech-to-Text and Text-to-Speech with Assist. We don't have a timeline yet when that will be ready. Stay tuned!
Our next step is integrating speech-to-text and text-to-speech with Assist. We don't have a timeline yet when that will be ready. Stay tuned!
## Credits
A lot of people have worked very hard to make all of the above possible.

View File

@ -89,7 +89,7 @@ Go ahead, it is enabled by default; just tap the new Assist icon
at the top right of your dashboard to start using it.
Oh, and we are also releasing some fun stuff we've cooked up along the way!
[Read more about Assist](/docs/assist/) and other released voice features in the
[Read more about Assist](/voice_control/) and other released voice features in the
[Chapter 1: Assist](/blog/2023/01/26/year-of-the-voice-chapter-1/) blogpost
and a [video presentation (including live demos) on YouTube](https://www.youtube.com/live/ixgNT3RETPg).

View File

@ -27,7 +27,7 @@ _To watch the video presentation of this blog post, including live demos, check
[Chapter 1]: https://www.home-assistant.io/blog/2023/01/26/year-of-the-voice-chapter-1/
[45 languages]: https://home-assistant.github.io/intents/
[live-stream]: https://youtube.com/live/Tk-pnm7FY7c?feature=share
[assist]: /docs/assist/
[assist]: /voice_control/
<!--more-->
@ -52,7 +52,7 @@ Screenshot of the new Assist debug tool.
</p>
[Assist Pipeline integration]: https://www.home-assistant.io/integrations/assist_pipeline/
[Assist dialog]: /docs/assist/
[Assist dialog]: /voice_control/
## Voice Assistant powered by Home Assistant Cloud
@ -131,7 +131,7 @@ Today were launching support for building voice assistants using ESPHome. Con
Weve been focusing on the [M5STACK ATOM Echo][atom-echo] for testing and development. For $13 it comes with a microphone and a speaker in a nice little box. Weve created a tutorial to turn this device into a voice remote directly from your browser!
[Tutorial: create a $13 voice remote for Home Assistant.](https://www.home-assistant.io/projects/thirteen-usd-voice-remote/)
[Tutorial: create a $13 voice remote for Home Assistant.](https://www.home-assistant.io/voice_control/thirteen-usd-voice-remote/)
[ESPHome Voice Assistant documentation.](https://esphome.io/components/voice_assistant.html)
@ -152,7 +152,7 @@ By configuring off-hook autodial, your phone will automatically call Home Assist
Weve focused our initial efforts on supporting [the Grandstream HT801 Voice-over-IP box][ht801]. It works with any phone with an RJ11 connector, and connects directly to Home Assistant. There is no need for an extra server.
[Tutorial: create your own Worlds Most Private Voice Assistant](https://www.home-assistant.io/projects/worlds-most-private-voice-assistant/)
[Tutorial: create your own Worlds Most Private Voice Assistant](https://www.home-assistant.io/voice_control/worlds-most-private-voice-assistant/)
<p class='img'>

View File

@ -50,6 +50,7 @@ Enjoy the release!
- [Release 2023.5.1 - May 4](#release-202351---may-4)
- [Release 2023.5.2 - May 5](#release-202352---may-5)
- [Release 2023.5.3 - May 14](#release-202353---may-14)
- [Release 2023.5.4 - May 23](#release-202354---may-23)
- [Need help? Join the community!](#need-help-join-the-community)
- [Breaking Changes](#breaking-changes)
- [Farewell to the following](#farewell-to-the-following)
@ -86,10 +87,10 @@ To help you get started, we made sure the documentation is perfect, including
some cool project tutorials to jump-start your own private voice assistant
journey:
- [The world's most private voice assistant](/projects/worlds-most-private-voice-assistant/)
- [Giving your voice assistant a Super Mario personality using OpenAI](/projects/worlds-most-private-voice-assistant/#give-your-voice-assistant-personality-using-the-openai-integration)
- [Installing a local Assist pipeline](/docs/assist/voice_remote_local_assistant/)
- [The $13 tiny ESPHome-based voice assistant](/projects/thirteen-usd-voice-remote/)
- [The world's most private voice assistant](/voice_control/worlds-most-private-voice-assistant/)
- [Giving your voice assistant a Super Mario personality using OpenAI](/voice_control/worlds-most-private-voice-assistant/#give-your-voice-assistant-personality-using-the-openai-integration)
- [Installing a local Assist pipeline](/voice_control/voice_remote_local_assistant/)
- [The $13 tiny ESPHome-based voice assistant](/voice_control/thirteen-usd-voice-remote/)
If you missed [last week's live stream](https://www.youtube.com/watch?v=Tk-pnm7FY7c),
be sure to check it out. It is full of live demos and detailed explanations
@ -122,7 +123,7 @@ manage the entity's aliases.
<img class="no-shadow" src='/images/blog/2023-05/voice-assistants-expose-entities-settings.png' alt='Screenshot showing the new expose entities tab in the voice assistants menu.'>
This currently supports our [Assist](/docs/assist), and Amazon Alexa and
This currently supports our [Assist](/voice_control/), and Amazon Alexa and
Google Assistant via Home Assistant Cloud.
## Improved entity setting
@ -276,7 +277,7 @@ findability. This one is new:
[@tronikos]: https://github.com/tronikos
[android tv remote]: /integrations/androidtv_remote
[Anova]: /integrations/anova
[assist]: /docs/assist
[assist]: /voice_control/
[Intellifire]: /integrations/intellifire
[Monessen]: /integrations/monessen
[RAPT Bluetooth]: /integrations/rapt_ble
@ -539,6 +540,107 @@ The following integrations are now available via the Home Assistant UI:
[zha docs]: /integrations/zha/
[zwave_js docs]: /integrations/zwave_js/
## Release 2023.5.4 - May 23
- Fix weather handling in zamg ([@killer0071234] - [#85635]) ([zamg docs])
- Fix onvif cameras that cannot parse relative time ([@bdraco] - [#92711]) ([onvif docs]) (dependency)
- Update solax state class for sensors with no units ([@MichaelMraka] - [#92914]) ([solax docs])
- Fix NWS error with no observation ([@MatthewFlamm] - [#92997]) ([nws docs])
- Increase timeout to 30 seconds for homeassistant_alerts integration ([@N3rdix] - [#93089]) ([homeassistant_alerts docs])
- Fix ONVIF cameras that change the xaddr for the pull point service ([@bdraco] - [#93104]) ([onvif docs])
- Bump pyunifiprotect to 4.9.0 ([@bdraco] - [#93106]) ([unifiprotect docs]) (dependency)
- Fix last imap message is not reset on empty search ([@jbouwh] - [#93119]) ([imap docs])
- Bump `accuweather` to version 0.5.2 ([@bieniu] - [#93130]) ([accuweather docs]) (dependency)
- Better handling of source sensor unavailability in Riemman Integration ([@dgomes] - [#93137]) ([integration docs])
- Bump `regenmaschine` to 2023.05.1 ([@bachya] - [#93139]) ([rainmachine docs]) (dependency)
- Bump pyatv to 0.11.0 ([@bdraco] - [#93172]) ([apple_tv docs])
- Fix china login for bmw_connected_drive ([@rikroe] - [#93180]) ([bmw_connected_drive docs])
- Add Fan and Dry HVAC modes to Advantage Air MyTemp preset ([@Bre77] - [#93189]) ([advantage_air docs])
- Disconnect yale access locks at the stop event ([@bdraco] - [#93192]) ([august docs]) ([yalexs_ble docs]) (dependency)
- Add support for Yale Home brand to august ([@bdraco] - [#93214]) ([august docs])
- Bump async-upnp-client to 0.33.2 ([@StevenLooman] - [#93329]) (dependency)
- Bump `aionotion` to 2023.05.5 ([@bachya] - [#93334]) ([notion docs])
- Bump zwave-js-server-python to 0.48.1 ([@raman325] - [#93342]) ([zwave_js docs]) (dependency)
- Bump glances_api to 0.4.2 ([@freeDom-] - [#93352]) ([glances docs])
- Fix august configuration url with Yale Home brand ([@bdraco] - [#93361]) ([august docs]) (dependency)
- Bump httpx to 0.24.1 ([@epenet] - [#93396]) (dependency)
- Fix non threadsafe call xiaomi_aqara ([@bdraco] - [#93405]) ([xiaomi_aqara docs])
- Bump Matter server library to 3.4.1 and address changes ([@marcelveldt] - [#93411]) ([matter docs]) (dependency)
- Fix race in tracking pending writes in recorder ([@bdraco] - [#93414]) ([recorder docs])
[#85635]: https://github.com/home-assistant/core/pull/85635
[#92422]: https://github.com/home-assistant/core/pull/92422
[#92513]: https://github.com/home-assistant/core/pull/92513
[#92610]: https://github.com/home-assistant/core/pull/92610
[#92711]: https://github.com/home-assistant/core/pull/92711
[#92914]: https://github.com/home-assistant/core/pull/92914
[#92997]: https://github.com/home-assistant/core/pull/92997
[#93066]: https://github.com/home-assistant/core/pull/93066
[#93089]: https://github.com/home-assistant/core/pull/93089
[#93104]: https://github.com/home-assistant/core/pull/93104
[#93106]: https://github.com/home-assistant/core/pull/93106
[#93119]: https://github.com/home-assistant/core/pull/93119
[#93130]: https://github.com/home-assistant/core/pull/93130
[#93137]: https://github.com/home-assistant/core/pull/93137
[#93139]: https://github.com/home-assistant/core/pull/93139
[#93172]: https://github.com/home-assistant/core/pull/93172
[#93180]: https://github.com/home-assistant/core/pull/93180
[#93189]: https://github.com/home-assistant/core/pull/93189
[#93192]: https://github.com/home-assistant/core/pull/93192
[#93214]: https://github.com/home-assistant/core/pull/93214
[#93329]: https://github.com/home-assistant/core/pull/93329
[#93334]: https://github.com/home-assistant/core/pull/93334
[#93342]: https://github.com/home-assistant/core/pull/93342
[#93352]: https://github.com/home-assistant/core/pull/93352
[#93361]: https://github.com/home-assistant/core/pull/93361
[#93396]: https://github.com/home-assistant/core/pull/93396
[#93405]: https://github.com/home-assistant/core/pull/93405
[#93411]: https://github.com/home-assistant/core/pull/93411
[#93414]: https://github.com/home-assistant/core/pull/93414
[@Bre77]: https://github.com/Bre77
[@MatthewFlamm]: https://github.com/MatthewFlamm
[@MichaelMraka]: https://github.com/MichaelMraka
[@N3rdix]: https://github.com/N3rdix
[@StevenLooman]: https://github.com/StevenLooman
[@bachya]: https://github.com/bachya
[@balloob]: https://github.com/balloob
[@bdraco]: https://github.com/bdraco
[@bieniu]: https://github.com/bieniu
[@dgomes]: https://github.com/dgomes
[@epenet]: https://github.com/epenet
[@freeDom-]: https://github.com/freeDom-
[@frenck]: https://github.com/frenck
[@jbouwh]: https://github.com/jbouwh
[@killer0071234]: https://github.com/killer0071234
[@marcelveldt]: https://github.com/marcelveldt
[@raman325]: https://github.com/raman325
[@rikroe]: https://github.com/rikroe
[accuweather docs]: /integrations/accuweather/
[advantage_air docs]: /integrations/advantage_air/
[apple_tv docs]: /integrations/apple_tv/
[august docs]: /integrations/august/
[bluetooth docs]: /integrations/bluetooth/
[bmw_connected_drive docs]: /integrations/bmw_connected_drive/
[cloud docs]: /integrations/cloud/
[frontend docs]: /integrations/frontend/
[glances docs]: /integrations/glances/
[homeassistant_alerts docs]: /integrations/homeassistant_alerts/
[imap docs]: /integrations/imap/
[integration docs]: /integrations/integration/
[lifx docs]: /integrations/lifx/
[matter docs]: /integrations/matter/
[notion docs]: /integrations/notion/
[nws docs]: /integrations/nws/
[onvif docs]: /integrations/onvif/
[rainmachine docs]: /integrations/rainmachine/
[recorder docs]: /integrations/recorder/
[solax docs]: /integrations/solax/
[unifiprotect docs]: /integrations/unifiprotect/
[xiaomi_aqara docs]: /integrations/xiaomi_aqara/
[yalexs_ble docs]: /integrations/yalexs_ble/
[zamg docs]: /integrations/zamg/
[zwave_js docs]: /integrations/zwave_js/
## Need help? Join the community!
Home Assistant has a great community of users who are all more than willing

View File

@ -218,6 +218,17 @@ layout: null
# Moved documentation
/details/database /docs/backend/database
/details/updater /docs/backend/updater
/docs/assist/ /voice_control/
/docs/assist/android/ /voice_control/android/
/docs/assist/apple/ /voice_control/apple/
/docs/assist/builtin_sentences/ /voice_control/builtin_sentences/
/docs/assist/custom_sentences/ /voice_control/custom_sentences/
/docs/assist/using_voice_assistants_overview/ /voice_control/using_voice_assistants_overview/
/docs/assist/voice_remote_expose_devices/ /voice_control/voice_remote_expose_devices/
/docs/assist/voice_remote_local_assistant/ /voice_control/voice_remote_local_assistant/
/docs/assist/troubleshooting/ /voice_control/troubleshooting/
/docs/assist/worlds-most-private-voice-assistant/ /voice_control/worlds-most-private-voice-assistant/
/docs/assist/thirteen-usd-voice-remote/ /voice_control/thirteen-usd-voice-remote/
/docs/backend/updater /integrations/analytics
/docs/ecosystem/ios/ https://companion.home-assistant.io/
/docs/ecosystem/ios/devices_file https://companion.home-assistant.io/
@ -376,24 +387,24 @@ layout: null
/getting-started/autostart-systemd https://community.home-assistant.io/t/autostart-using-systemd/199497
# Add-ons
/addons/cec_scan https://github.com/home-assistant/hassio-addons/blob/master/cec_scan/README.md
/addons/check_config https://github.com/home-assistant/hassio-addons/blob/master/check_config/README.md
/addons/configurator https://github.com/home-assistant/hassio-addons/blob/master/configurator/README.md
/addons/dhcp_server https://github.com/home-assistant/hassio-addons/blob/master/dhcp_server/README.md
/addons/dnsmasq https://github.com/home-assistant/hassio-addons/blob/master/dnsmasq/README.md
/addons/duckdns https://github.com/home-assistant/hassio-addons/blob/master/duckdns/README.md
/addons/git_pull https://github.com/home-assistant/hassio-addons/blob/master/git_pull/README.md
/addons/google_assistant https://github.com/home-assistant/hassio-addons/blob/master/google_assistant/README.md
/addons/homematic https://github.com/home-assistant/hassio-addons/blob/master/homematic/README.md
/addons/lets_encrypt https://github.com/home-assistant/hassio-addons/blob/master/letsencrypt/README.md
/addons/mariadb https://github.com/home-assistant/hassio-addons/blob/master/mariadb/README.md
/addons/mosquitto https://github.com/home-assistant/hassio-addons/blob/master/mosquitto/README.md
/addons/nginx_proxy https://github.com/home-assistant/hassio-addons/blob/master/nginx_proxy/README.md
/addons/rpc_shutdown https://github.com/home-assistant/hassio-addons/blob/master/rpc_shutdown/README.md
/addons/samba https://github.com/home-assistant/hassio-addons/blob/master/samba/README.md
/addons/snips https://github.com/home-assistant/hassio-addons/blob/master/snips/README.md
/addons/ssh https://github.com/home-assistant/hassio-addons/blob/master/ssh/README.md
/addons/tellstick https://github.com/home-assistant/hassio-addons/blob/master/tellstick/README.md
/addons/cec_scan https://github.com/home-assistant/addons/blob/master/cec_scan/README.md
/addons/check_config https://github.com/home-assistant/addons/blob/master/check_config/README.md
/addons/configurator https://github.com/home-assistant/addons/blob/master/configurator/README.md
/addons/dhcp_server https://github.com/home-assistant/addons/blob/master/dhcp_server/README.md
/addons/dnsmasq https://github.com/home-assistant/addons/blob/master/dnsmasq/README.md
/addons/duckdns https://github.com/home-assistant/addons/blob/master/duckdns/README.md
/addons/git_pull https://github.com/home-assistant/addons/blob/master/git_pull/README.md
/addons/google_assistant https://github.com/home-assistant/addons/blob/master/google_assistant/README.md
/addons/homematic https://github.com/home-assistant/addons/blob/master/homematic/README.md
/addons/lets_encrypt https://github.com/home-assistant/addons/blob/master/letsencrypt/README.md
/addons/mariadb https://github.com/home-assistant/addons/blob/master/mariadb/README.md
/addons/mosquitto https://github.com/home-assistant/addons/blob/master/mosquitto/README.md
/addons/nginx_proxy https://github.com/home-assistant/addons/blob/master/nginx_proxy/README.md
/addons/rpc_shutdown https://github.com/home-assistant/addons/blob/master/rpc_shutdown/README.md
/addons/samba https://github.com/home-assistant/addons/blob/master/samba/README.md
/addons/snips https://github.com/home-assistant/addons/blob/master/snips/README.md
/addons/ssh https://github.com/home-assistant/addons/blob/master/ssh/README.md
/addons/tellstick https://github.com/home-assistant/addons/blob/master/tellstick/README.md
# Redirect old /demo to new demo page
/demo https://demo.home-assistant.io

View File

@ -37,6 +37,7 @@ frontpage_image: /images/frontpage/blue-frontpage.jpg
<p style="margin-top: 12px;"><b style="color:#B60017;">Discontinued!</b> The edition of the Blue was &mdash; limited. We are out of stock, with no plans to relaunch this edition.</p>
<p>Sorry you missed it. The new kid on the block is <a href="/yellow" target="_blank">Home&nbsp;Assistant&nbsp;Yellow</a>.</p>
<p>Lover of ODROID? <a href="/installation/odroid/" target="_blank">This way, please</a>.</p>
<p>You know what else is blue, pretty, and has our logo on it? <a href="/skyconnect" target="_blank">Home&nbsp;Assistant&nbsp;SkyConnect</a>.</p>
</div>
</div>

View File

@ -37,6 +37,12 @@ The documentation covers beginner to advanced topics around the installation, se
</div>
<div class='title'>Android and iOS</div>
</a>
<a class='option-card' href='/voice_control/'>
<div class='img-container'>
<img src='/images/assist/assist-icon.svg' />
</div>
<div class='title'>Voice control</div>
</a>
</div>
<br/>

View File

@ -21,7 +21,7 @@ A great place to find popular configurations is on this
## Popular Blueprints
This is a list of the most popular [blueprints](/integrations/blueprint) in the [Blueprint Exchange category on the forums](https://www.home-assistant.io/get-blueprints).
This is a list of the most popular [blueprints](/docs/automation/using_blueprints/) in the [Blueprint Exchange category on the forums](https://www.home-assistant.io/get-blueprints).
{% for post in site.data.blueprint_exchange_data limit:25 %}

View File

@ -5,18 +5,41 @@ description: "Instructions to get Home Assistant configured."
Alright, you made it here. The tough part is done.
With Home Assistant installed, it's time to configure it. Here you will create the owner account of Home Assistant. This account will be an administrator and will always be able to change everything. Enter a name, username, password and select **create account**.
After entering the Home Assistant device's address in your browser's address bar, the preparation screen is shown. Depending on your hardware, preparation may take a while.
![Set your username and password.](/images/getting-started/username.png)
![Home Assistant preparation](/images/getting-started/onboarding_preparing_01.png)
Next, you can enter a name for your home and set your location and unit system. Select **Detect** to find your location and set your time zone and unit system based on that location. If you'd rather not send your location, you can set these values manually.
To show the logs, select the blue pulsing circle.
![Home Assistant preparation](/images/getting-started/onboarding_preparing_show_logs.png)
![Set your location, time zone, and unit system.](/images/getting-started/location.png)
With Home Assistant installed, it's time to set up the basics.
Once you are done, click **Next**. In this screen, Home Assistant will show any {% term devices %} that it has discovered on your network. Don't be alarmed if you see fewer items than shown below; you can always manually add devices later.
In this step, you will create the owner account of Home Assistant. This account is an administrator account. It will always be able to change everything.
1. Enter a name, username, and password. Select **Create account**.
![Discovery of devices on your network.](/images/getting-started/devices.png)
![Set your username and password.](/images/getting-started/username.png)
Finally, click **Finish**. Now you're brought to the Home Assistant web interface. This screen will show all of your devices.
1. Enter a name for your home and define the location specific settings and the language of the user interface.
* To automatically populate these settings, select **Detect**.
* If you'd rather not send your location, you can set these values manually.
![Define your location specific settings.](/images/getting-started/onboarding_location.png)
1. Select which information you are willing to share.
* Sharing is disabled by default. However, we would like to encourage you to share some of this data.
* This information helps us to find out which platforms we need to support and where to focus our efforts.
* The data is anonymized and aggregated. To see the charts we generate out of this data, take a look at our [analytics page](https://analytics.home-assistant.io/).
![Share anonymized data](/images/getting-started/onboarding_share_anonymized_info.png)
1. Once you are done, select **Next**.
* Home Assistant will then show any {% term devices %} it has discovered on your network.
* Don't be alarmed if you see fewer items than shown below; you can always manually add devices later.
![Discovery of devices on your network.](/images/getting-started/onboarding_devices.png)
1. Finally, select **Finish**.
* Now you're brought to the Home Assistant web interface. This screen will show all of your devices.
{% include getting-started/next_step.html step="Concepts & Terminologies" link="/getting-started/concepts-terminology/" %}

Some files were not shown because too many files have changed in this diff Show More