From 8b17b6ed1c6726805a5afde4e6ae4b6b8abcf55b Mon Sep 17 00:00:00 2001 From: Bram Kragten Date: Sun, 10 Nov 2019 20:30:41 +0100 Subject: [PATCH] Add attribution and onboarding to voice (#4190) * Add attribution and onboarding to voice * Align with backend changes * Layout + switch to ws for process * Don't mutate window * Move speechRecognition * Add border * Update ha-voice-command-dialog.ts --- src/common/dom/speech-recognition.ts | 14 ++ src/data/conversation.ts | 25 ++- .../ha-voice-command-dialog.ts | 171 ++++++++++++------ 3 files changed, 150 insertions(+), 60 deletions(-) create mode 100644 src/common/dom/speech-recognition.ts diff --git a/src/common/dom/speech-recognition.ts b/src/common/dom/speech-recognition.ts new file mode 100644 index 0000000000..6b7850dfeb --- /dev/null +++ b/src/common/dom/speech-recognition.ts @@ -0,0 +1,14 @@ +/* tslint:disable */ +// @ts-ignore +export const SpeechRecognition = + // @ts-ignore + window.SpeechRecognition || window.webkitSpeechRecognition; +// @ts-ignore +export const SpeechGrammarList = + // @ts-ignore + window.SpeechGrammarList || window.webkitSpeechGrammarList; +// @ts-ignore +export const SpeechRecognitionEvent = + // @ts-ignore + window.SpeechRecognitionEvent || window.webkitSpeechRecognitionEvent; +/* tslint:enable */ diff --git a/src/data/conversation.ts b/src/data/conversation.ts index e4a096fb3c..4a4c629d92 100644 --- a/src/data/conversation.ts +++ b/src/data/conversation.ts @@ -7,10 +7,33 @@ interface ProcessResults { }; } +export interface AgentInfo { + attribution?: { name: string; url: string }; + onboarding?: { text: string; url: string }; +} + export const processText = ( hass: HomeAssistant, text: string, // tslint:disable-next-line: variable-name conversation_id: string ): Promise => - hass.callApi("POST", "conversation/process", { text, conversation_id }); + hass.callWS({ + type: "conversation/process", + text, + conversation_id, + }); + +export const getAgentInfo = (hass: HomeAssistant): Promise => + hass.callWS({ + type: "conversation/agent/info", + }); + +export const setConversationOnboarding = ( + hass: HomeAssistant, + value: boolean +): Promise => + hass.callWS({ + type: "conversation/onboarding/set", + shown: value, + }); diff --git a/src/dialogs/voice-command-dialog/ha-voice-command-dialog.ts b/src/dialogs/voice-command-dialog/ha-voice-command-dialog.ts index b42d1c9eb1..9bad9e621a 100644 --- a/src/dialogs/voice-command-dialog/ha-voice-command-dialog.ts +++ b/src/dialogs/voice-command-dialog/ha-voice-command-dialog.ts @@ -16,7 +16,13 @@ import { } from "lit-element"; import { HomeAssistant } from "../../types"; import { fireEvent } from "../../common/dom/fire_event"; -import { processText } from "../../data/conversation"; +import { SpeechRecognition } from "../../common/dom/speech-recognition"; +import { + processText, + getAgentInfo, + setConversationOnboarding, + AgentInfo, +} from "../../data/conversation"; import { classMap } from "lit-html/directives/class-map"; import { PaperInputElement } from "@polymer/paper-input/paper-input"; import { haStyleDialog } from "../../resources/styles"; @@ -35,21 +41,6 @@ interface Results { final: boolean; } -/* tslint:disable */ -// @ts-ignore -window.SpeechRecognition = - // @ts-ignore - window.SpeechRecognition || window.webkitSpeechRecognition; -// @ts-ignore -window.SpeechGrammarList = - // @ts-ignore - window.SpeechGrammarList || window.webkitSpeechGrammarList; -// @ts-ignore -window.SpeechRecognitionEvent = - // @ts-ignore - window.SpeechRecognitionEvent || window.webkitSpeechRecognitionEvent; -/* tslint:enable */ - @customElement("ha-voice-command-dialog") export class HaVoiceCommandDialog extends LitElement { @property() public hass!: HomeAssistant; @@ -61,8 +52,9 @@ export class HaVoiceCommandDialog extends LitElement { }, ]; @property() private _opened = false; + @property() private _agentInfo?: AgentInfo; @query("#messages") private messages!: PaperDialogScrollableElement; - private recognition?: SpeechRecognition; + private recognition!: SpeechRecognition; private _conversationId?: string; public async showDialog(): Promise { @@ -70,6 +62,7 @@ export class HaVoiceCommandDialog extends LitElement { if (SpeechRecognition) { this._startListening(); } + this._agentInfo = await getAgentInfo(this.hass); } protected render(): TemplateResult { @@ -111,7 +104,30 @@ export class HaVoiceCommandDialog extends LitElement { .opened=${this._opened} @opened-changed=${this._openedChanged} > - + ${this._agentInfo && this._agentInfo.onboarding + ? html` +
+ ${this._agentInfo.onboarding.text} +
+ Yes! + No +
+
+ ` + : ""} + ${this._conversation.map( (message) => html`
@@ -132,36 +148,48 @@ export class HaVoiceCommandDialog extends LitElement { ` : ""} - - ${SpeechRecognition +
+ + ${SpeechRecognition + ? html` + + ${this.results + ? html` +
+
+
+
+ ` + : ""} + + +
+ ` + : ""} +
+ ${this._agentInfo && this._agentInfo.attribution ? html` - - ${this.results - ? html` -
-
-
-
- ` - : ""} - - -
+ ${this._agentInfo.attribution.name} ` : ""} - +
`; } @@ -196,18 +224,23 @@ export class HaVoiceCommandDialog extends LitElement { } } + private _completeOnboarding() { + setConversationOnboarding(this.hass, true); + this._agentInfo! = { ...this._agentInfo, onboarding: undefined }; + } + private _initRecognition() { this.recognition = new SpeechRecognition(); this.recognition.interimResults = true; this.recognition.lang = "en-US"; - this.recognition!.onstart = () => { + this.recognition.onstart = () => { this.results = { final: false, transcript: "", }; }; - this.recognition!.onerror = (event) => { + this.recognition.onerror = (event) => { this.recognition!.abort(); if (event.error !== "aborted") { const text = @@ -220,7 +253,7 @@ export class HaVoiceCommandDialog extends LitElement { } this.results = null; }; - this.recognition!.onend = () => { + this.recognition.onend = () => { // Already handled by onerror if (this.results == null) { return; @@ -240,7 +273,7 @@ export class HaVoiceCommandDialog extends LitElement { } }; - this.recognition!.onresult = (event) => { + this.recognition.onresult = (event) => { const result = event.results[0]; this.results = { transcript: result[0].transcript, @@ -270,14 +303,6 @@ export class HaVoiceCommandDialog extends LitElement { message.text = plain.speech; this.requestUpdate("_conversation"); - - if (speechSynthesis) { - const speech = new SpeechSynthesisUtterance( - response.speech.plain.speech - ); - speech.lang = "en-US"; - speechSynthesis.speak(speech); - } } catch { message.text = this.hass.localize("ui.dialogs.voice_command.error"); message.error = true; @@ -343,14 +368,42 @@ export class HaVoiceCommandDialog extends LitElement { color: var(--primary-color); } - paper-input { + .input { margin: 0 0 16px 0; } ha-paper-dialog { width: 450px; } - + a.button { + text-decoration: none; + } + a.button > mwc-button { + width: 100%; + } + .onboarding { + padding: 0 24px; + } + paper-dialog-scrollable.top-border::before { + content: ""; + position: absolute; + top: 0; + left: 0; + right: 0; + height: 1px; + background: var(--divider-color); + } + .side-by-side { + display: flex; + margin: 8px 0; + } + .side-by-side > * { + flex: 1 0; + padding: 4px; + } + .attribution { + color: var(--secondary-text-color); + } .message { font-size: 18px; clear: both;