From 7a849379f96d5eebcbebcdf19344c37f610d1d71 Mon Sep 17 00:00:00 2001 From: Fabio Manganiello Date: Wed, 8 May 2024 21:58:58 +0200 Subject: [PATCH] [#399] Added `@when` decorator as an alias for `@hook`. Closes: #399 --- README.md | 13 ++++----- examples/config/hook.py | 11 +++----- platypush/__init__.py | 5 ++++ platypush/backend/http/__init__.py | 5 ++-- .../plugins/assistant/picovoice/__init__.py | 28 +++++++++---------- 5 files changed, 31 insertions(+), 31 deletions(-) diff --git a/README.md b/README.md index 820c4e92ed..1d03416b04 100644 --- a/README.md +++ b/README.md @@ -505,11 +505,10 @@ event.hook.SearchSongVoiceCommand: [Example](https://git.platypush.tech/platypush/platypush/src/branch/master/examples/conf/hook.py): ```python -from platypush.event.hook import hook -from platypush.utils import run +from platypush import run, when from platypush.message.event.assistant import SpeechRecognizedEvent -@hook(SpeechRecognizedEvent, phrase='play ${title} by ${artist}') +@when(SpeechRecognizedEvent, phrase='play ${title} by ${artist}') def on_music_play_command(event, title=None, artist=None, **context): results = run('music.mpd.search', filter={ 'artist': artist, @@ -527,22 +526,22 @@ against partial event arguments are also possible, and relational operators are supported as well. For example: ```python -from platypush.event.hook import hook +from platypush import hook from platypush.message.event.sensor import SensorDataChangeEvent -@hook(SensorDataChangeEvent, data=1): +@when(SensorDataChangeEvent, data=1): def hook_1(event): """ Triggered when event.data == 1 """ -@hook(SensorDataChangeEvent, data={'state': 1}): +@when(SensorDataChangeEvent, data={'state': 1}): def hook_2(event): """ Triggered when event.data['state'] == 1 """ -@hook(SensorDataChangeEvent, data={ +@when(SensorDataChangeEvent, data={ 'temperature': {'$gt': 25}, 'humidity': {'$le': 15} }): diff --git a/examples/config/hook.py b/examples/config/hook.py index 2f4081b815..9f36b9a830 100644 --- a/examples/config/hook.py +++ b/examples/config/hook.py @@ -3,13 +3,10 @@ # which event type they should be called, and optionally on which event attribute values. # # Event hooks should be stored in Python files under `~/.config/platypush/scripts`. All the functions that use the -# @hook decorator will automatically be discovered and imported as event hooks into the platform at runtime. +# @when decorator will automatically be discovered and imported as event hooks into the platform at runtime. # `run` is a utility function that runs a request by name (e.g. `light.hue.on`). -from platypush.utils import run - -# @hook decorator -from platypush.event.hook import hook +from platypush import when, run # Event types that you want to react to from platypush.message.event.assistant import ( @@ -18,7 +15,7 @@ from platypush.message.event.assistant import ( ) -@hook(SpeechRecognizedEvent, phrase='play ${title} by ${artist}') +@when(SpeechRecognizedEvent, phrase='play ${title} by ${artist}') def on_music_play_command(event, title=None, artist=None, **context): """ This function will be executed when a SpeechRecognizedEvent with `phrase="play the music"` is triggered. @@ -40,7 +37,7 @@ def on_music_play_command(event, title=None, artist=None, **context): run('tts.say', "I can't find any music matching your query") -@hook(ConversationStartEvent) +@when(ConversationStartEvent) def on_conversation_start(event, **context): """ A simple hook that gets invoked when a new conversation starts with a voice assistant and simply pauses the music diff --git a/platypush/__init__.py b/platypush/__init__.py index c962a5d123..0d569d8d1a 100644 --- a/platypush/__init__.py +++ b/platypush/__init__.py @@ -17,6 +17,10 @@ from .procedure import procedure from .runner import main from .utils import run +# Alias for platypush.event.hook.hook, +# see https://git.platypush.tech/platypush/platypush/issues/399 +when = hook + __author__ = 'Fabio Manganiello ' __version__ = '0.50.3' @@ -35,6 +39,7 @@ __all__ = [ 'main', 'procedure', 'run', + 'when', ] diff --git a/platypush/backend/http/__init__.py b/platypush/backend/http/__init__.py index a682262755..1701c74ddc 100644 --- a/platypush/backend/http/__init__.py +++ b/platypush/backend/http/__init__.py @@ -153,14 +153,13 @@ class HttpBackend(Backend): .. code-block:: python - from platypush.context import get_plugin - from platypush.event.hook import hook + from platypush import get_plugin, when from platypush.message.event.http.hook import WebhookEvent hook_token = 'abcdefabcdef' # Expose the hook under the /hook/lights_toggle endpoint - @hook(WebhookEvent, hook='lights_toggle') + @when(WebhookEvent, hook='lights_toggle') def lights_toggle(event, **context): # Do any checks on the request assert event.headers.get('X-Token') == hook_token, 'Unauthorized' diff --git a/platypush/plugins/assistant/picovoice/__init__.py b/platypush/plugins/assistant/picovoice/__init__.py index 3e36e35c8d..3661a2fa6e 100644 --- a/platypush/plugins/assistant/picovoice/__init__.py +++ b/platypush/plugins/assistant/picovoice/__init__.py @@ -86,11 +86,11 @@ class AssistantPicovoicePlugin(AssistantPlugin, RunnablePlugin): import time - from platypush import hook, run + from platypush import when, run from platypush.message.event.assistant import HotwordDetectedEvent # Turn on a light for 5 seconds when the hotword "Alexa" is detected - @hook(HotwordDetectedEvent, hotword='Alexa') + @when(HotwordDetectedEvent, hotword='Alexa') def on_hotword_detected(event: HotwordDetectedEvent, **context): run("light.hue.on", lights=["Living Room"]) time.sleep(5) @@ -109,12 +109,12 @@ class AssistantPicovoicePlugin(AssistantPlugin, RunnablePlugin): .. code-block:: python - from platypush import hook, run + from platypush import when, run from platypush.message.event.assistant import HotwordDetectedEvent # Start a conversation using the Italian language model when the # "Buongiorno" hotword is detected - @hook(HotwordDetectedEvent, hotword='Buongiorno') + @when(HotwordDetectedEvent, hotword='Buongiorno') def on_it_hotword_detected(event: HotwordDetectedEvent, **context): event.assistant.start_conversation(model_file='path/to/it.pv') @@ -136,7 +136,7 @@ class AssistantPicovoicePlugin(AssistantPlugin, RunnablePlugin): .. code-block:: python - from platypush import hook, run + from platypush import when, run from platypush.message.event.assistant import SpeechRecognizedEvent # Turn on a light when the phrase "turn on the lights" is detected. @@ -144,7 +144,7 @@ class AssistantPicovoicePlugin(AssistantPlugin, RunnablePlugin): # flexible when matching the phrases. For example, the following hook # will be matched when the user says "turn on the lights", "turn on # lights", "lights on", "lights on please", "turn on light" etc. - @hook(SpeechRecognizedEvent, phrase='turn on (the)? lights?') + @when(SpeechRecognizedEvent, phrase='turn on (the)? lights?') def on_turn_on_lights(event: SpeechRecognizedEvent, **context): run("light.hue.on") @@ -154,10 +154,10 @@ class AssistantPicovoicePlugin(AssistantPlugin, RunnablePlugin): .. code-block:: python - from platypush import hook, run + from platypush import when, run from platypush.message.event.assistant import SpeechRecognizedEvent - @hook(SpeechRecognizedEvent, phrase='play ${title} by ${artist}') + @when(SpeechRecognizedEvent, phrase='play ${title} by ${artist}') def on_play_track_command( event: SpeechRecognizedEvent, title: str, artist: str, **context ): @@ -227,10 +227,10 @@ class AssistantPicovoicePlugin(AssistantPlugin, RunnablePlugin): .. code-block:: python - from platypush import hook, run + from platypush import when, run from platypush.message.event.assistant import IntentRecognizedEvent - @hook(IntentRecognizedEvent, intent='lights_ctrl', slots={'state': 'on'}) + @when(IntentRecognizedEvent, intent='lights_ctrl', slots={'state': 'on'}) def on_turn_on_lights(event: IntentRecognizedEvent, **context): room = event.slots.get('room') if room: @@ -255,10 +255,10 @@ class AssistantPicovoicePlugin(AssistantPlugin, RunnablePlugin): .. code-block:: python - from platypush import hook, run + from platypush import when, run from platypush.message.event.assistant import SpeechRecognizedEvent - @hook(SpeechRecognizedEvent, phrase='turn ${state} (the)? ${room} lights?') + @when(SpeechRecognizedEvent, phrase='turn ${state} (the)? ${room} lights?') def on_turn_on_lights(event: SpeechRecognizedEvent, phrase, room, **context): if room: run("light.hue.on", groups=[room]) @@ -331,7 +331,7 @@ class AssistantPicovoicePlugin(AssistantPlugin, RunnablePlugin): (re.compile(r".*"), ai_assist), ) - @hook(SpeechRecognizedEvent) + @when(SpeechRecognizedEvent) def on_speech_recognized(event, **kwargs): for pattern, command in hooks: if pattern.search(event.phrase): @@ -339,7 +339,7 @@ class AssistantPicovoicePlugin(AssistantPlugin, RunnablePlugin): command(event, **kwargs) break - @hook(ResponseEndEvent) + @when(ResponseEndEvent) def on_response_end(event: ResponseEndEvent, **__): # Check if the response is a question and start a follow-on turn if so. # Note that the ``openai`` plugin by default is configured to keep