[Assistant] Added assistant entity type.

This commit is contained in:
Fabio Manganiello 2023-10-23 16:02:05 +02:00
parent fd26f7fef3
commit a005958317
Signed by: blacklight
GPG key ID: D90FBA7F76362774
8 changed files with 725 additions and 88 deletions

View file

@ -2,6 +2,9 @@
"arduino": { "arduino": {
"class": "fas fa-microchip" "class": "fas fa-microchip"
}, },
"assistant.google": {
"class": "fas fa-microphone-lines"
},
"bluetooth": { "bluetooth": {
"class": "fab fa-bluetooth" "class": "fab fa-bluetooth"
}, },

View file

@ -0,0 +1,107 @@
<template>
<Modal ref="modal" :title="title">
<form @submit.prevent="onConfirm">
<div class="dialog-content">
<slot />
<input type="text" ref="input" />
</div>
<div class="buttons">
<button type="submit" class="ok-btn" @click="onConfirm" @touch="onConfirm">
<i class="fas fa-check" /> &nbsp; {{ confirmText }}
</button>
<button type="button" class="cancel-btn" @click="close" @touch="close">
<i class="fas fa-xmark" /> &nbsp; {{ cancelText }}
</button>
</div>
</form>
</Modal>
</template>
<script>
import Modal from "@/components/Modal";
export default {
emits: ['input'],
components: {Modal},
props: {
title: {
type: String,
},
confirmText: {
type: String,
default: "OK",
},
cancelText: {
type: String,
default: "Cancel",
},
},
methods: {
onConfirm() {
this.$emit('input', this.$refs.input.value)
this.close()
},
show() {
this.$refs.modal.show()
},
close() {
this.$refs.modal.hide()
},
},
mounted() {
this.$nextTick(() => {
this.$refs.input.value = ""
this.$refs.input.focus()
})
},
}
</script>
<style lang="scss" scoped>
:deep(.modal) {
.dialog-content {
padding: 1em;
}
.body {
padding: 1.5em;
}
.buttons {
display: flex;
flex-direction: row;
justify-content: right;
margin-bottom: 1em;
border: 0;
border-radius: 0;
button {
margin-right: 1em;
padding: 0.5em 1em;
border: 1px solid $border-color-2;
border-radius: 1em;
&:hover {
background: $hover-bg;
}
}
}
}
form {
display: flex;
flex-direction: column !important;
.buttons {
flex-direction: row !important;
}
}
</style>

View file

@ -0,0 +1,250 @@
<template>
<div class="entity assistant-container">
<TextPrompt ref="prompt">
Enter a text query to send to the assistant.
</TextPrompt>
<div class="head" @click="onHeadClick">
<div class="col-1 icon entity-icon" ref="icon">
<EntityIcon
:entity="value"
:class="{active: value.conversation_running}"
:loading="loading"
:error="error" />
</div>
<div class="label">
<div class="name" ref="name" v-text="value.name" />
</div>
<div class="value-container">
<button @click.stop="collapsed = !collapsed">
<i class="fas"
:class="{'fa-angle-up': !collapsed, 'fa-angle-down': collapsed}" />
</button>
</div>
</div>
<div class="body" ref="body" v-if="!collapsed" @click.stop="prevent">
<div class="row" @click.stop="stopConversation" v-if="value.conversation_running">
<div class="icon">
<i class="fas fa-comment-slash" />
</div>
<div class="label">
<div class="name">Stop Conversation</div>
</div>
<div class="value">
<ToggleSwitch
@click.stop="stopConversation"
:value="false"
:disabled="loading" />
</div>
</div>
<div class="row" @click.stop="startConversation" v-else>
<div class="icon">
<i class="fas fa-comment" />
</div>
<div class="label">
<div class="name">Start Conversation</div>
</div>
<div class="value">
<ToggleSwitch
@click.stop="startConversation"
:value="false"
:disabled="loading" />
</div>
</div>
<div class="row" @click.stop="toggleMute">
<div class="icon">
<i class="fas fa-microphone-lines-slash" />
</div>
<div class="label">
<div class="name">Muted</div>
</div>
<div class="value">
<ToggleSwitch
@click.stop="toggleMute"
:value="value.is_muted"
:disabled="loading" />
</div>
</div>
<div class="row" @click.stop="showPrompt">
<div class="icon">
<i class="fas fa-keyboard" />
</div>
<div class="label">
<div class="name">Send query from text prompt</div>
</div>
<div class="value">
<ToggleSwitch
@click.stop="showPrompt"
:value="false"
:disabled="loading" />
</div>
</div>
</div>
</div>
</template>
<script>
import TextPrompt from "@/components/elements/TextPrompt"
import ToggleSwitch from "@/components/elements/ToggleSwitch"
import EntityIcon from "./EntityIcon"
import EntityMixin from "./EntityMixin"
export default {
name: 'Assistant',
mixins: [EntityMixin],
components: {
EntityIcon,
TextPrompt,
ToggleSwitch,
},
data() {
return {
collapsed: true,
showTextQueryPrompt: false,
modalId: 'assistant-text-prompt-modal',
}
},
methods: {
hidePrompt() {
document.body.querySelector(`#${this.modalId}`)?.remove()
},
showPrompt() {
const modalElement = this.$refs.prompt.$el
this.hidePrompt()
modalElement.id = this.modalId
modalElement.classList.remove('hidden')
const input = modalElement.querySelector('input[type="text"]')
const form = modalElement.querySelector('form')
if (form) {
form.addEventListener('submit', (event) => {
event.stopPropagation()
this.onTextPrompt(input?.value)
})
}
const cancelBtn = modalElement.querySelector('.cancel-btn')
if (cancelBtn) {
cancelBtn.onclick = (event) => {
this.hidePrompt()
event.stopPropagation()
}
}
modalElement.onclick = (event) => {
const modalContent = modalElement.querySelector('.modal')
if (modalContent?.contains(event.target)) {
event.stopPropagation()
return false
}
this.hidePrompt()
}
document.body.appendChild(modalElement)
this.$nextTick(() => {
modalElement.querySelector('input[type="text"]').focus()
})
},
onHeadClick(event) {
if (
this.$refs.name.contains(event.target) ||
this.$refs.icon.contains(event.target)
) {
// Propagate the event upwards and let it open the entity modal
return true
}
// Toggle the collapse state if the click is outside of the entity
// name/icon
this.collapsed = !this.collapsed
event.stopPropagation()
},
async toggleMute() {
await this.request('entities.execute', {
id: this.value.id,
action: 'toggle_mute',
})
},
async startConversation() {
await this.request('entities.execute', {
id: this.value.id,
action: 'start_conversation',
})
},
async stopConversation() {
await this.request('entities.execute', {
id: this.value.id,
action: 'stop_conversation',
})
},
async onTextPrompt(query) {
await this.request('entities.execute', {
id: this.value.id,
action: 'send_text_query',
query: query,
})
this.hidePrompt()
},
}
}
</script>
<style lang="scss" scoped>
@import "common";
$icon-size: 2em;
.assistant-container {
.body {
padding: 0;
}
.row {
margin: 0;
padding: 1em 0.5em;
display: flex;
&:hover {
background: $hover-bg;
}
&:not(:last-child) {
border-bottom: 1px solid $border-color-1;
}
.icon {
flex: 0 0 $icon-size;
display: flex;
align-items: center;
justify-content: center;
}
.label {
width: calc(100% - $icon-size);
}
}
:deep(.entity-icon) {
.active {
color: $selected-fg;
}
}
}
</style>

View file

@ -1,4 +1,12 @@
{ {
"assistant": {
"name": "Assistant",
"name_plural": "Assistants",
"icon": {
"class": "fas fa-microphone-lines"
}
},
"battery": { "battery": {
"name": "Battery", "name": "Battery",
"name_plural": "Batteries", "name_plural": "Batteries",

View file

@ -0,0 +1,29 @@
from sqlalchemy import Column, Integer, ForeignKey, Boolean, String
from platypush.common.db import is_defined
from . import Entity
if not is_defined('assistant'):
class Assistant(Entity):
"""
Base class for voice assistant entities.
"""
__tablename__ = 'assistant'
id = Column(
Integer, ForeignKey(Entity.id, ondelete='CASCADE'), primary_key=True
)
last_query = Column(String)
last_response = Column(String)
conversation_running = Column(Boolean)
is_muted = Column(Boolean, default=False)
is_detecting = Column(Boolean, default=True)
__table_args__ = {'extend_existing': True}
__mapper_args__ = {
'polymorphic_identity': __tablename__,
}

View file

@ -0,0 +1,78 @@
from abc import ABC, abstractmethod
from . import EntityManager
class AssistantEntityManager(EntityManager, ABC):
"""
Base class for voice assistant integrations that support entity management.
"""
@abstractmethod
def start_conversation(self, *args, **kwargs):
"""
Programmatically starts a conversation.
"""
raise NotImplementedError()
@abstractmethod
def stop_conversation(self, *args, **kwargs):
"""
Programmatically stops a conversation.
"""
raise NotImplementedError()
@abstractmethod
def is_muted(self, *args, **kwargs) -> bool:
"""
:return: True if the microphone is muted, False otherwise.
"""
raise NotImplementedError()
@abstractmethod
def mute(self, *args, **kwargs):
"""
Mute the microphone.
"""
raise NotImplementedError()
@abstractmethod
def unmute(self, *args, **kwargs):
"""
Unmute the microphone.
"""
raise NotImplementedError()
def toggle_mute(self, *_, **__):
"""
Toggle the mute state of the microphone.
"""
return self.mute() if self.is_muted() else self.unmute()
@abstractmethod
def pause_detection(self, *args, **kwargs):
"""
Put the assistant on pause. No new conversation events will be triggered.
"""
raise NotImplementedError()
@abstractmethod
def resume_detection(self, *args, **kwargs):
"""
Resume the assistant hotword detection from a paused state.
"""
raise NotImplementedError()
@abstractmethod
def is_detecting(self, *args, **kwargs) -> bool:
"""
:return: True if the asistant is detecting, False otherwise.
"""
raise NotImplementedError()
@abstractmethod
def send_text_query(self, *args, **kwargs):
"""
Send a text query to the assistant.
"""
raise NotImplementedError()

View file

@ -1,21 +1,72 @@
from abc import ABC, abstractmethod from abc import ABC, abstractmethod
from dataclasses import asdict, dataclass
from enum import Enum
import os
from threading import Event from threading import Event
from typing import Any, Dict, Optional from typing import Any, Collection, Dict, Optional
from platypush.context import get_plugin from platypush.context import get_bus, get_plugin
from platypush.entities.assistants import Assistant
from platypush.entities.managers.assistants import AssistantEntityManager
from platypush.message.event.assistant import (
AssistantEvent,
ConversationStartEvent,
ConversationEndEvent,
ConversationTimeoutEvent,
ResponseEvent,
NoResponseEvent,
SpeechRecognizedEvent,
AlarmStartedEvent,
AlarmEndEvent,
TimerStartedEvent,
TimerEndEvent,
AlertStartedEvent,
AlertEndEvent,
MicMutedEvent,
MicUnmutedEvent,
)
from platypush.plugins import Plugin, action from platypush.plugins import Plugin, action
from platypush.utils import get_plugin_name_by_class
class AssistantPlugin(ABC, Plugin): @dataclass
class AlertType(Enum):
"""
Enum representing the type of an alert.
"""
ALARM = 'alarm'
TIMER = 'timer'
ALERT = 'alert'
@dataclass
class AssistantState:
"""
Dataclass representing the state of an assistant.
"""
last_query: Optional[str] = None
last_response: Optional[str] = None
conversation_running: bool = False
is_muted: bool = False
is_detecting: bool = True
alert_state: Optional[str] = None
class AssistantPlugin(Plugin, AssistantEntityManager, ABC):
""" """
Base class for assistant plugins. Base class for assistant plugins.
""" """
_entity_name = 'Assistant'
def __init__( def __init__(
self, self,
*args, *args,
tts_plugin: Optional[str] = None, tts_plugin: Optional[str] = None,
tts_plugin_args: Optional[Dict[str, Any]] = None, tts_plugin_args: Optional[Dict[str, Any]] = None,
conversation_start_sound: Optional[str] = None,
**kwargs **kwargs
): ):
""" """
@ -25,11 +76,37 @@ class AssistantPlugin(ABC, Plugin):
:param tts_plugin_args: Optional arguments to be passed to the TTS :param tts_plugin_args: Optional arguments to be passed to the TTS
``say`` action, if ``tts_plugin`` is set. ``say`` action, if ``tts_plugin`` is set.
:param conversation_start_sound: If set, the assistant will play this
audio file when it detects a speech. The sound file will be played
on the default audio output device. If not set, the assistant won't
play any sound when it detects a speech.
""" """
super().__init__(*args, **kwargs) super().__init__(*args, **kwargs)
self.tts_plugin = tts_plugin self.tts_plugin = tts_plugin
self.tts_plugin_args = tts_plugin_args or {} self.tts_plugin_args = tts_plugin_args or {}
if conversation_start_sound:
self._conversation_start_sound = os.path.abspath(
os.path.expanduser(conversation_start_sound)
)
self._detection_paused = Event() self._detection_paused = Event()
self._conversation_running = Event()
self._is_muted = False
self._last_query: Optional[str] = None
self._last_response: Optional[str] = None
self._cur_alert_type: Optional[AlertType] = None
@property
def _state(self) -> AssistantState:
return AssistantState(
last_query=self._last_query,
last_response=self._last_response,
conversation_running=self._conversation_running.is_set(),
is_muted=self._is_muted,
is_detecting=not self._detection_paused.is_set(),
alert_state=self._cur_alert_type.value if self._cur_alert_type else None,
)
@abstractmethod @abstractmethod
def start_conversation(self, *_, **__): def start_conversation(self, *_, **__):
@ -46,31 +123,164 @@ class AssistantPlugin(ABC, Plugin):
raise NotImplementedError raise NotImplementedError
@action @action
def pause_detection(self): def pause_detection(self, *_, **__):
""" """
Put the assistant on pause. No new conversation events will be triggered. Put the assistant on pause. No new conversation events will be triggered.
""" """
self._detection_paused.set() self._detection_paused.set()
@action @action
def resume_detection(self): def resume_detection(self, *_, **__):
""" """
Resume the assistant hotword detection from a paused state. Resume the assistant hotword detection from a paused state.
""" """
self._detection_paused.clear() self._detection_paused.clear()
@action @action
def is_detecting(self) -> bool: def is_detecting(self, *_, **__) -> bool:
""" """
:return: True if the asistant is detecting, False otherwise. :return: True if the asistant is detecting, False otherwise.
""" """
return not self._detection_paused.is_set() return not self._detection_paused.is_set()
@action
def is_muted(self, *_, **__) -> bool:
"""
:return: True if the microphone is muted, False otherwise.
"""
return self._is_muted
@action
def status(self, *_, **__):
"""
:return: The current assistant status:
.. code-block:: json
{
"last_query": "What time is it?",
"last_response": "It's 10:30 AM",
"conversation_running": true,
"is_muted": false,
"is_detecting": true
}
"""
self.publish_entities([self])
return asdict(self._state)
def _get_tts_plugin(self): def _get_tts_plugin(self):
if not self.tts_plugin: if not self.tts_plugin:
return None return None
return get_plugin(self.tts_plugin) return get_plugin(self.tts_plugin)
def _play_conversation_start_sound(self):
if not self._conversation_start_sound:
return
audio = get_plugin('sound')
if not audio:
self.logger.warning(
'Unable to play conversation start sound: sound plugin not found'
)
return
audio.play(self._conversation_start_sound)
def _send_event(self, event: AssistantEvent):
self.publish_entities([self])
get_bus().post(event)
def _on_conversation_start(self):
self._last_response = None
self._last_query = None
self._conversation_running.set()
self._send_event(ConversationStartEvent(assistant=self))
self._play_conversation_start_sound()
def _on_conversation_end(self):
self._conversation_running.clear()
self._send_event(ConversationEndEvent(assistant=self))
def _on_conversation_timeout(self):
self._last_response = None
self._last_query = None
self._conversation_running.clear()
self._send_event(ConversationTimeoutEvent(assistant=self))
def _on_no_response(self):
self._last_response = None
self._conversation_running.clear()
self._send_event(NoResponseEvent(assistant=self))
def _on_reponse_rendered(self, text: Optional[str]):
self._last_response = text
self._send_event(ResponseEvent(assistant=self, response_text=text))
tts = self._get_tts_plugin()
if tts and text:
self.stop_conversation()
tts.say(text=text, **self.tts_plugin_args)
def _on_speech_recognized(self, phrase: Optional[str]):
phrase = (phrase or '').lower().strip()
self._last_query = phrase
self._send_event(SpeechRecognizedEvent(assistant=self, phrase=phrase))
def _on_alarm_start(self):
self._cur_alert_type = AlertType.ALARM
self._send_event(AlarmStartedEvent(assistant=self))
def _on_alarm_end(self):
self._cur_alert_type = None
self._send_event(AlarmEndEvent(assistant=self))
def _on_timer_start(self):
self._cur_alert_type = AlertType.TIMER
self._send_event(TimerStartedEvent(assistant=self))
def _on_timer_end(self):
self._cur_alert_type = None
self._send_event(TimerEndEvent(assistant=self))
def _on_alert_start(self):
self._cur_alert_type = AlertType.ALERT
self._send_event(AlertStartedEvent(assistant=self))
def _on_alert_end(self):
self._cur_alert_type = None
self._send_event(AlertEndEvent(assistant=self))
def _on_mute(self):
self._is_muted = True
self._send_event(MicMutedEvent(assistant=self))
def _on_unmute(self):
self._is_muted = False
self._send_event(MicUnmutedEvent(assistant=self))
def _on_mute_changed(self, value: bool):
if value:
self._on_mute()
else:
self._on_unmute()
def transform_entities(self, entities: Collection['AssistantPlugin']):
return super().transform_entities(
[
Assistant(
external_id=get_plugin_name_by_class(type(dev)),
name=self._entity_name,
last_query=dev._state.last_query,
last_response=dev._state.last_response,
conversation_running=dev._state.conversation_running,
is_muted=dev._state.is_muted,
is_detecting=dev._state.is_detecting,
)
for dev in (entities or [])
]
)
# vim:sw=4:ts=4:et: # vim:sw=4:ts=4:et:

View file

@ -3,23 +3,6 @@ import os
from typing import Optional from typing import Optional
from platypush.config import Config from platypush.config import Config
from platypush.context import get_bus, get_plugin
from platypush.message.event.assistant import (
ConversationStartEvent,
ConversationEndEvent,
ConversationTimeoutEvent,
ResponseEvent,
NoResponseEvent,
SpeechRecognizedEvent,
AlarmStartedEvent,
AlarmEndEvent,
TimerStartedEvent,
TimerEndEvent,
AlertStartedEvent,
AlertEndEvent,
MicMutedEvent,
MicUnmutedEvent,
)
from platypush.plugins import RunnablePlugin, action from platypush.plugins import RunnablePlugin, action
from platypush.plugins.assistant import AssistantPlugin from platypush.plugins.assistant import AssistantPlugin
@ -79,6 +62,8 @@ class AssistantGooglePlugin(AssistantPlugin, RunnablePlugin):
the automated ways fail. the automated ways fail.
""" """
_entity_name = 'Google Assistant'
_default_credentials_files = ( _default_credentials_files = (
os.path.join(Config.get_workdir(), 'credentials', 'google', 'assistant.json'), os.path.join(Config.get_workdir(), 'credentials', 'google', 'assistant.json'),
os.path.join( os.path.join(
@ -90,7 +75,6 @@ class AssistantGooglePlugin(AssistantPlugin, RunnablePlugin):
self, self,
credentials_file: Optional[str] = None, credentials_file: Optional[str] = None,
device_model_id: str = 'Platypush', device_model_id: str = 'Platypush',
conversation_start_sound: Optional[str] = None,
**kwargs, **kwargs,
): ):
""" """
@ -109,11 +93,6 @@ class AssistantGooglePlugin(AssistantPlugin, RunnablePlugin):
:param device_model_id: The device model ID that identifies the device :param device_model_id: The device model ID that identifies the device
where the assistant is running (default: Platypush). It can be a where the assistant is running (default: Platypush). It can be a
custom string. custom string.
:param conversation_start_sound: If set, the assistant will play this
audio file when it detects a speech. The sound file will be played
on the default audio output device. If not set, the assistant won't
play any sound when it detects a speech.
""" """
super().__init__(**kwargs) super().__init__(**kwargs)
@ -121,13 +100,6 @@ class AssistantGooglePlugin(AssistantPlugin, RunnablePlugin):
self.device_model_id = device_model_id self.device_model_id = device_model_id
self.credentials = None self.credentials = None
self._assistant = None self._assistant = None
self._is_muted = False
if conversation_start_sound:
self._conversation_start_sound = os.path.abspath(
os.path.expanduser(conversation_start_sound)
)
self.logger.info('Initialized Google Assistant plugin') self.logger.info('Initialized Google Assistant plugin')
@property @property
@ -152,79 +124,54 @@ class AssistantGooglePlugin(AssistantPlugin, RunnablePlugin):
return self._assistant return self._assistant
def _play_conversation_start_sound(self):
if not self._conversation_start_sound:
return
audio = get_plugin('sound')
if not audio:
self.logger.warning(
'Unable to play conversation start sound: sound plugin not found'
)
return
audio.play(self._conversation_start_sound)
def _process_event(self, event): def _process_event(self, event):
from google.assistant.library.event import EventType, AlertType from google.assistant.library.event import EventType, AlertType
self.logger.info('Received assistant event: %s', event) self.logger.info('Received assistant event: %s', event)
if event.type == EventType.ON_CONVERSATION_TURN_STARTED: if event.type == EventType.ON_CONVERSATION_TURN_STARTED:
get_bus().post(ConversationStartEvent(assistant=self)) self._on_conversation_start()
self._play_conversation_start_sound()
elif event.type == EventType.ON_CONVERSATION_TURN_FINISHED: elif event.type == EventType.ON_CONVERSATION_TURN_FINISHED:
if not event.args.get('with_follow_on_turn'): if not event.args.get('with_follow_on_turn'):
get_bus().post(ConversationEndEvent(assistant=self)) self._on_conversation_end()
elif event.type == EventType.ON_CONVERSATION_TURN_TIMEOUT: elif event.type == EventType.ON_CONVERSATION_TURN_TIMEOUT:
get_bus().post(ConversationTimeoutEvent(assistant=self)) self._on_conversation_timeout()
elif event.type == EventType.ON_NO_RESPONSE: elif event.type == EventType.ON_NO_RESPONSE:
get_bus().post(NoResponseEvent(assistant=self)) self._on_no_response()
elif ( elif (
hasattr(EventType, 'ON_RENDER_RESPONSE') hasattr(EventType, 'ON_RENDER_RESPONSE')
and event.type == EventType.ON_RENDER_RESPONSE and event.type == EventType.ON_RENDER_RESPONSE
): ):
get_bus().post( self._on_reponse_rendered(event.args.get('text'))
ResponseEvent(assistant=self, response_text=event.args.get('text'))
)
tts = self._get_tts_plugin()
if tts and event.args.get('text'):
self.stop_conversation()
tts.say(text=event.args['text'], **self.tts_plugin_args)
elif ( elif (
hasattr(EventType, 'ON_RESPONDING_STARTED') hasattr(EventType, 'ON_RESPONDING_STARTED')
and event.type == EventType.ON_RESPONDING_STARTED and event.type == EventType.ON_RESPONDING_STARTED
and event.args.get('is_error_response', False) is True and event.args.get('is_error_response') is True
): ):
self.logger.warning('Assistant response error') self.logger.warning('Assistant response error: %s', json.dumps(event.args))
elif event.type == EventType.ON_RECOGNIZING_SPEECH_FINISHED: elif event.type == EventType.ON_RECOGNIZING_SPEECH_FINISHED:
phrase = event.args['text'].lower().strip() self._on_speech_recognized(event.args.get('text'))
self.logger.info('Speech recognized: %s', phrase)
get_bus().post(SpeechRecognizedEvent(assistant=self, phrase=phrase))
elif event.type == EventType.ON_ALERT_STARTED: elif event.type == EventType.ON_ALERT_STARTED:
if event.args.get('alert_type') == AlertType.ALARM: if event.args.get('alert_type') == AlertType.ALARM:
get_bus().post(AlarmStartedEvent(assistant=self)) self._on_alarm_start()
elif event.args.get('alert_type') == AlertType.TIMER: elif event.args.get('alert_type') == AlertType.TIMER:
get_bus().post(TimerStartedEvent(assistant=self)) self._on_timer_start()
else: else:
get_bus().post(AlertStartedEvent(assistant=self)) self._on_alert_start()
elif event.type == EventType.ON_ALERT_FINISHED: elif event.type == EventType.ON_ALERT_FINISHED:
if event.args.get('alert_type') == AlertType.ALARM: if event.args.get('alert_type') == AlertType.ALARM:
get_bus().post(AlarmEndEvent(assistant=self)) self._on_alarm_end()
elif event.args.get('alert_type') == AlertType.TIMER: elif event.args.get('alert_type') == AlertType.TIMER:
get_bus().post(TimerEndEvent(assistant=self)) self._on_timer_end()
else: else:
get_bus().post(AlertEndEvent(assistant=self)) self._on_alert_end()
elif event.type == EventType.ON_ASSISTANT_ERROR: elif event.type == EventType.ON_ASSISTANT_ERROR:
if event.args.get('is_fatal'): if event.args.get('is_fatal'):
raise RuntimeError(f'Fatal assistant error: {json.dumps(event.args)}') raise RuntimeError(f'Fatal assistant error: {json.dumps(event.args)}')
self.logger.warning('Assistant error: %s', json.dumps(event.args)) self.logger.warning('Assistant error: %s', json.dumps(event.args))
elif event.type == EventType.ON_MUTED_CHANGED: elif event.type == EventType.ON_MUTED_CHANGED:
self._is_muted = event.args.get('is_muted') self._on_mute_changed(event.args.get('is_muted', False))
event = MicMutedEvent() if self._is_muted else MicUnmutedEvent()
get_bus().post(event)
@action @action
def start_conversation(self, *_, **__): def start_conversation(self, *_, **__):
@ -235,7 +182,7 @@ class AssistantGooglePlugin(AssistantPlugin, RunnablePlugin):
self.assistant.start_conversation() self.assistant.start_conversation()
@action @action
def stop_conversation(self): def stop_conversation(self, *_, **__):
""" """
Programmatically stop a running conversation with the assistant Programmatically stop a running conversation with the assistant
""" """
@ -243,20 +190,20 @@ class AssistantGooglePlugin(AssistantPlugin, RunnablePlugin):
self.assistant.stop_conversation() self.assistant.stop_conversation()
@action @action
def mute(self): def mute(self, *_, **__):
""" """
Mute the microphone. Alias for :meth:`.set_mic_mute` with Mute the microphone. Alias for :meth:`.set_mic_mute` with
``muted=True``. ``muted=True``.
""" """
return self.set_mic_mute(muted=True) self.set_mic_mute(muted=True)
@action @action
def unmute(self): def unmute(self, *_, **__):
""" """
Unmute the microphone. Alias for :meth:`.set_mic_mute` with Unmute the microphone. Alias for :meth:`.set_mic_mute` with
``muted=False``. ``muted=False``.
""" """
return self.set_mic_mute(muted=False) self.set_mic_mute(muted=False)
@action @action
def set_mic_mute(self, muted: bool): def set_mic_mute(self, muted: bool):
@ -268,23 +215,27 @@ class AssistantGooglePlugin(AssistantPlugin, RunnablePlugin):
if self.assistant: if self.assistant:
self.assistant.set_mic_mute(muted) self.assistant.set_mic_mute(muted)
if muted:
self._on_mute()
else:
self._on_unmute()
@action @action
def is_muted(self) -> bool: def toggle_mute(self, *_, **__):
""" """
:return: True if the microphone is muted, False otherwise. Toggle the mic mute state.
""" """
return self._is_muted self.set_mic_mute(muted=not self._is_muted)
@action @action
def toggle_mic_mute(self): def toggle_mic_mute(self):
""" """
Toggle the mic mute state. Deprecated alias for :meth:`.toggle_mute`.
""" """
is_muted = self.is_muted() return self.toggle_mute()
self.set_mic_mute(muted=not is_muted)
@action @action
def send_text_query(self, query: str): def send_text_query(self, *_, query: str, **__):
""" """
Send a text query to the assistant. Send a text query to the assistant.
@ -323,6 +274,7 @@ class AssistantGooglePlugin(AssistantPlugin, RunnablePlugin):
with Assistant( with Assistant(
self.credentials, self.device_model_id self.credentials, self.device_model_id
) as self._assistant: ) as self._assistant:
self.publish_entities([self])
for event in self._assistant.start(): for event in self._assistant.start():
last_sleep = 0 last_sleep = 0