forked from platypush/platypush
Merge branch 'master' into 191-support-for-general-entities-backend-and-plugin
This commit is contained in:
commit
deb25196d2
23 changed files with 1570 additions and 277 deletions
28
CHANGELOG.md
28
CHANGELOG.md
|
@ -1,7 +1,33 @@
|
|||
# Changelog
|
||||
|
||||
All notable changes to this project will be documented in this file.
|
||||
Given the high speed of development in the first phase, changes are being reported only starting from v0.20.2.
|
||||
Given the high speed of development in the first phase, changes are being
|
||||
reported only starting from v0.20.2.
|
||||
|
||||
## [0.23.6] - 2022-09-19
|
||||
|
||||
### Fixed
|
||||
|
||||
- Fixed album_id and list of tracks on `music.tidal.get_album`.
|
||||
|
||||
## [0.23.5] - 2022-09-18
|
||||
|
||||
### Added
|
||||
|
||||
- Added support for web hooks returning their hook method responses back to the
|
||||
HTTP client.
|
||||
|
||||
- Added [Tidal integration](https://git.platypush.tech/platypush/platypush/pulls/223)
|
||||
|
||||
- Added support for [OPML
|
||||
subscriptions](https://git.platypush.tech/platypush/platypush/pulls/220) to
|
||||
the `rss` plugin.
|
||||
|
||||
- Better support for bulk database operations on the `db` plugin.
|
||||
|
||||
### Fixed
|
||||
|
||||
- Now supporting YAML sections with empty configurations.
|
||||
|
||||
## [0.23.4] - 2022-08-28
|
||||
|
||||
|
|
|
@ -48,6 +48,7 @@ Events
|
|||
platypush/events/mqtt.rst
|
||||
platypush/events/music.rst
|
||||
platypush/events/music.snapcast.rst
|
||||
platypush/events/music.tidal.rst
|
||||
platypush/events/nextcloud.rst
|
||||
platypush/events/nfc.rst
|
||||
platypush/events/ngrok.rst
|
||||
|
|
5
docs/source/platypush/events/music.tidal.rst
Normal file
5
docs/source/platypush/events/music.tidal.rst
Normal file
|
@ -0,0 +1,5 @@
|
|||
``music.tidal``
|
||||
===============
|
||||
|
||||
.. automodule:: platypush.message.event.music.tidal
|
||||
:members:
|
5
docs/source/platypush/plugins/music.tidal.rst
Normal file
5
docs/source/platypush/plugins/music.tidal.rst
Normal file
|
@ -0,0 +1,5 @@
|
|||
``music.tidal``
|
||||
===============
|
||||
|
||||
.. automodule:: platypush.plugins.music.tidal
|
||||
:members:
|
|
@ -95,6 +95,7 @@ Plugins
|
|||
platypush/plugins/music.mpd.rst
|
||||
platypush/plugins/music.snapcast.rst
|
||||
platypush/plugins/music.spotify.rst
|
||||
platypush/plugins/music.tidal.rst
|
||||
platypush/plugins/nextcloud.rst
|
||||
platypush/plugins/ngrok.rst
|
||||
platypush/plugins/nmap.rst
|
||||
|
|
|
@ -25,7 +25,7 @@ from .message.response import Response
|
|||
from .utils import set_thread_name, get_enabled_plugins
|
||||
|
||||
__author__ = 'Fabio Manganiello <info@fabiomanganiello.com>'
|
||||
__version__ = '0.23.4'
|
||||
__version__ = '0.23.6'
|
||||
|
||||
logger = logging.getLogger('platypush')
|
||||
|
||||
|
|
|
@ -1,9 +1,11 @@
|
|||
import json
|
||||
|
||||
from flask import Blueprint, abort, request, Response
|
||||
from flask import Blueprint, abort, request, make_response
|
||||
|
||||
from platypush.backend.http.app import template_folder
|
||||
from platypush.backend.http.app.utils import logger, send_message
|
||||
from platypush.config import Config
|
||||
from platypush.event.hook import EventCondition
|
||||
from platypush.message.event.http.hook import WebhookEvent
|
||||
|
||||
|
||||
|
@ -15,9 +17,23 @@ __routes__ = [
|
|||
]
|
||||
|
||||
|
||||
@hook.route('/hook/<hook_name>', methods=['GET', 'POST', 'PUT', 'DELETE', 'PATCH', 'OPTIONS'])
|
||||
def _hook(hook_name):
|
||||
""" Endpoint for custom webhooks """
|
||||
def matches_condition(event: WebhookEvent, hook):
|
||||
if isinstance(hook, dict):
|
||||
if_ = hook['if'].copy()
|
||||
if_['type'] = '.'.join([event.__module__, event.__class__.__qualname__])
|
||||
|
||||
condition = EventCondition.build(if_)
|
||||
else:
|
||||
condition = hook.condition
|
||||
|
||||
return event.matches_condition(condition)
|
||||
|
||||
|
||||
@hook.route(
|
||||
'/hook/<hook_name>', methods=['GET', 'POST', 'PUT', 'DELETE', 'PATCH', 'OPTIONS']
|
||||
)
|
||||
def hook_route(hook_name):
|
||||
"""Endpoint for custom webhooks"""
|
||||
|
||||
event_args = {
|
||||
'hook': hook_name,
|
||||
|
@ -28,20 +44,54 @@ def _hook(hook_name):
|
|||
}
|
||||
|
||||
if event_args['data']:
|
||||
# noinspection PyBroadException
|
||||
try:
|
||||
event_args['data'] = json.loads(event_args['data'])
|
||||
except Exception as e:
|
||||
logger().warning('Not a valid JSON string: {}: {}'.format(event_args['data'], str(e)))
|
||||
logger().warning(
|
||||
'Not a valid JSON string: %s: %s', event_args['data'], str(e)
|
||||
)
|
||||
|
||||
event = WebhookEvent(**event_args)
|
||||
matching_hooks = [
|
||||
hook
|
||||
for hook in Config.get_event_hooks().values()
|
||||
if matches_condition(event, hook)
|
||||
]
|
||||
|
||||
try:
|
||||
send_message(event)
|
||||
return Response(json.dumps({'status': 'ok', **event_args}), mimetype='application/json')
|
||||
rs = default_rs = make_response(json.dumps({'status': 'ok', **event_args}))
|
||||
headers = {}
|
||||
status_code = 200
|
||||
|
||||
# If there are matching hooks, wait for their completion before returning
|
||||
if matching_hooks:
|
||||
rs = event.wait_response(timeout=60)
|
||||
try:
|
||||
rs = json.loads(rs.decode()) # type: ignore
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
if isinstance(rs, dict) and '___data___' in rs:
|
||||
# data + http_code + custom_headers return format
|
||||
headers = rs.get('___headers___', {})
|
||||
status_code = rs.get('___code___', status_code)
|
||||
rs = rs['___data___']
|
||||
|
||||
if rs is None:
|
||||
rs = default_rs
|
||||
headers = {'Content-Type': 'application/json'}
|
||||
|
||||
rs = make_response(rs)
|
||||
else:
|
||||
headers = {'Content-Type': 'application/json'}
|
||||
|
||||
rs.status_code = status_code
|
||||
rs.headers.update(headers)
|
||||
return rs
|
||||
except Exception as e:
|
||||
logger().exception(e)
|
||||
logger().error('Error while dispatching webhook event {}: {}'.format(event, str(e)))
|
||||
logger().error('Error while dispatching webhook event %s: %s', event, str(e))
|
||||
abort(500, str(e))
|
||||
|
||||
|
||||
|
|
|
@ -215,7 +215,9 @@ class Config:
|
|||
)
|
||||
else:
|
||||
section_config = file_config.get(section, {}) or {}
|
||||
if not section_config.get('disabled'):
|
||||
if not (
|
||||
hasattr(section_config, 'get') and section_config.get('disabled')
|
||||
):
|
||||
config[section] = section_config
|
||||
|
||||
return config
|
||||
|
|
|
@ -15,10 +15,10 @@ logger = logging.getLogger('platypush')
|
|||
|
||||
|
||||
def parse(msg):
|
||||
""" Builds a dict given another dictionary or
|
||||
a JSON UTF-8 encoded string/bytearray """
|
||||
"""Builds a dict given another dictionary or
|
||||
a JSON UTF-8 encoded string/bytearray"""
|
||||
|
||||
if isinstance(msg, bytes) or isinstance(msg, bytearray):
|
||||
if isinstance(msg, (bytes, bytearray)):
|
||||
msg = msg.decode('utf-8')
|
||||
if isinstance(msg, str):
|
||||
try:
|
||||
|
@ -30,8 +30,8 @@ def parse(msg):
|
|||
return msg
|
||||
|
||||
|
||||
class EventCondition(object):
|
||||
""" Event hook condition class """
|
||||
class EventCondition:
|
||||
"""Event hook condition class"""
|
||||
|
||||
def __init__(self, type=Event.__class__, priority=None, **kwargs):
|
||||
"""
|
||||
|
@ -55,8 +55,8 @@ class EventCondition(object):
|
|||
|
||||
@classmethod
|
||||
def build(cls, rule):
|
||||
""" Builds a rule given either another EventRule, a dictionary or
|
||||
a JSON UTF-8 encoded string/bytearray """
|
||||
"""Builds a rule given either another EventRule, a dictionary or
|
||||
a JSON UTF-8 encoded string/bytearray"""
|
||||
|
||||
if isinstance(rule, cls):
|
||||
return rule
|
||||
|
@ -64,8 +64,7 @@ class EventCondition(object):
|
|||
rule = parse(rule)
|
||||
|
||||
assert isinstance(rule, dict), f'Not a valid rule: {rule}'
|
||||
type = get_event_class_by_type(
|
||||
rule.pop('type') if 'type' in rule else 'Event')
|
||||
type = get_event_class_by_type(rule.pop('type') if 'type' in rule else 'Event')
|
||||
|
||||
args = {}
|
||||
for (key, value) in rule.items():
|
||||
|
@ -75,8 +74,8 @@ class EventCondition(object):
|
|||
|
||||
|
||||
class EventAction(Request):
|
||||
""" Event hook action class. It is a special type of runnable request
|
||||
whose fields can be configured later depending on the event context """
|
||||
"""Event hook action class. It is a special type of runnable request
|
||||
whose fields can be configured later depending on the event context"""
|
||||
|
||||
def __init__(self, target=None, action=None, **args):
|
||||
if target is None:
|
||||
|
@ -99,16 +98,16 @@ class EventAction(Request):
|
|||
return super().build(action)
|
||||
|
||||
|
||||
class EventHook(object):
|
||||
""" Event hook class. It consists of one conditions and
|
||||
one or multiple actions to be executed """
|
||||
class EventHook:
|
||||
"""Event hook class. It consists of one conditions and
|
||||
one or multiple actions to be executed"""
|
||||
|
||||
def __init__(self, name, priority=None, condition=None, actions=None):
|
||||
""" Constructor. Takes a name, a EventCondition object and an event action
|
||||
procedure as input. It may also have a priority attached
|
||||
as a positive number. If multiple hooks match against an event,
|
||||
only the ones that have either the maximum match score or the
|
||||
maximum pre-configured priority will be run. """
|
||||
"""Constructor. Takes a name, a EventCondition object and an event action
|
||||
procedure as input. It may also have a priority attached
|
||||
as a positive number. If multiple hooks match against an event,
|
||||
only the ones that have either the maximum match score or the
|
||||
maximum pre-configured priority will be run."""
|
||||
|
||||
self.name = name
|
||||
self.condition = EventCondition.build(condition or {})
|
||||
|
@ -118,8 +117,8 @@ class EventHook(object):
|
|||
|
||||
@classmethod
|
||||
def build(cls, name, hook):
|
||||
""" Builds a rule given either another EventRule, a dictionary or
|
||||
a JSON UTF-8 encoded string/bytearray """
|
||||
"""Builds a rule given either another EventRule, a dictionary or
|
||||
a JSON UTF-8 encoded string/bytearray"""
|
||||
|
||||
if isinstance(hook, cls):
|
||||
return hook
|
||||
|
@ -146,14 +145,14 @@ class EventHook(object):
|
|||
return cls(name=name, condition=condition, actions=actions, priority=priority)
|
||||
|
||||
def matches_event(self, event):
|
||||
""" Returns an EventMatchResult object containing the information
|
||||
about the match between the event and this hook """
|
||||
"""Returns an EventMatchResult object containing the information
|
||||
about the match between the event and this hook"""
|
||||
|
||||
return event.matches_condition(self.condition)
|
||||
|
||||
def run(self, event):
|
||||
""" Checks the condition of the hook against a particular event and
|
||||
runs the hook actions if the condition is met """
|
||||
"""Checks the condition of the hook against a particular event and
|
||||
runs the hook actions if the condition is met"""
|
||||
|
||||
def _thread_func(result):
|
||||
set_thread_name('Event-' + self.name)
|
||||
|
@ -163,7 +162,9 @@ class EventHook(object):
|
|||
|
||||
if result.is_match:
|
||||
logger.info('Running hook {} triggered by an event'.format(self.name))
|
||||
threading.Thread(target=_thread_func, name='Event-' + self.name, args=(result,)).start()
|
||||
threading.Thread(
|
||||
target=_thread_func, name='Event-' + self.name, args=(result,)
|
||||
).start()
|
||||
|
||||
|
||||
def hook(event_type=Event, **condition):
|
||||
|
@ -172,8 +173,14 @@ def hook(event_type=Event, **condition):
|
|||
f.condition = EventCondition(type=event_type, **condition)
|
||||
|
||||
@wraps(f)
|
||||
def wrapped(*args, **kwargs):
|
||||
return exec_wrapper(f, *args, **kwargs)
|
||||
def wrapped(event, *args, **kwargs):
|
||||
from platypush.message.event.http.hook import WebhookEvent
|
||||
|
||||
response = exec_wrapper(f, event, *args, **kwargs)
|
||||
if isinstance(event, WebhookEvent):
|
||||
event.send_response(response)
|
||||
|
||||
return response
|
||||
|
||||
return wrapped
|
||||
|
||||
|
|
|
@ -1,10 +1,9 @@
|
|||
import copy
|
||||
import hashlib
|
||||
import json
|
||||
import random
|
||||
import re
|
||||
import sys
|
||||
import time
|
||||
import uuid
|
||||
|
||||
from datetime import date
|
||||
|
||||
|
@ -79,9 +78,7 @@ class Event(Message):
|
|||
@staticmethod
|
||||
def _generate_id():
|
||||
"""Generate a unique event ID"""
|
||||
return hashlib.md5(
|
||||
str(uuid.uuid1()).encode()
|
||||
).hexdigest() # lgtm [py/weak-sensitive-data-hashing]
|
||||
return ''.join(['{:02x}'.format(random.randint(0, 255)) for _ in range(16)])
|
||||
|
||||
def matches_condition(self, condition):
|
||||
"""
|
||||
|
|
|
@ -1,11 +1,26 @@
|
|||
import json
|
||||
import uuid
|
||||
|
||||
from platypush.message.event import Event
|
||||
from platypush.utils import get_redis
|
||||
|
||||
|
||||
class WebhookEvent(Event):
|
||||
"""
|
||||
Event triggered when a custom webhook is called.
|
||||
"""
|
||||
|
||||
def __init__(self, *argv, hook, method, data=None, args=None, headers=None, **kwargs):
|
||||
def __init__(
|
||||
self,
|
||||
*argv,
|
||||
hook,
|
||||
method,
|
||||
data=None,
|
||||
args=None,
|
||||
headers=None,
|
||||
response=None,
|
||||
**kwargs,
|
||||
):
|
||||
"""
|
||||
:param hook: Name of the invoked web hook, from http://host:port/hook/<hook>
|
||||
:type hook: str
|
||||
|
@ -21,10 +36,56 @@ class WebhookEvent(Event):
|
|||
|
||||
:param headers: Request headers
|
||||
:type args: dict
|
||||
"""
|
||||
|
||||
super().__init__(hook=hook, method=method, data=data, args=args or {},
|
||||
headers=headers or {}, *argv, **kwargs)
|
||||
:param response: Response returned by the hook.
|
||||
:type args: dict | list | str
|
||||
"""
|
||||
# This queue is used to synchronize with the hook and wait for its completion
|
||||
kwargs['response_queue'] = kwargs.get(
|
||||
'response_queue', f'platypush/webhook/{str(uuid.uuid1())}'
|
||||
)
|
||||
|
||||
super().__init__(
|
||||
*argv,
|
||||
hook=hook,
|
||||
method=method,
|
||||
data=data,
|
||||
args=args or {},
|
||||
headers=headers or {},
|
||||
response=response,
|
||||
**kwargs,
|
||||
)
|
||||
|
||||
def send_response(self, response):
|
||||
output = response.output
|
||||
if isinstance(output, tuple):
|
||||
# A 3-sized tuple where the second element is an int and the third
|
||||
# is a dict represents an HTTP response in the format `(data,
|
||||
# http_code headers)`.
|
||||
if (
|
||||
len(output) == 3
|
||||
and isinstance(output[1], int)
|
||||
and isinstance(output[2], dict)
|
||||
):
|
||||
output = {
|
||||
'___data___': output[0],
|
||||
'___code___': output[1],
|
||||
'___headers___': output[2],
|
||||
}
|
||||
else:
|
||||
# Normalize tuples to lists before serialization
|
||||
output = list(output)
|
||||
if isinstance(output, (dict, list)):
|
||||
output = json.dumps(output)
|
||||
|
||||
if output is None:
|
||||
output = ''
|
||||
get_redis().rpush(self.args['response_queue'], output)
|
||||
|
||||
def wait_response(self, timeout=None):
|
||||
rs = get_redis().blpop(self.args['response_queue'], timeout=timeout)
|
||||
if rs and len(rs) > 1:
|
||||
return rs[1]
|
||||
|
||||
|
||||
# vim:sw=4:ts=4:et:
|
||||
|
|
14
platypush/message/event/music/tidal.py
Normal file
14
platypush/message/event/music/tidal.py
Normal file
|
@ -0,0 +1,14 @@
|
|||
from platypush.message.event import Event
|
||||
|
||||
|
||||
class TidalEvent(Event):
|
||||
"""Base class for Tidal events"""
|
||||
|
||||
|
||||
class TidalPlaylistUpdatedEvent(TidalEvent):
|
||||
"""
|
||||
Event fired when a Tidal playlist is updated.
|
||||
"""
|
||||
|
||||
def __init__(self, playlist_id: str, *args, **kwargs):
|
||||
super().__init__(*args, playlist_id=playlist_id, **kwargs)
|
|
@ -1,11 +1,13 @@
|
|||
import time
|
||||
from contextlib import contextmanager
|
||||
from multiprocessing import RLock
|
||||
from typing import Generator
|
||||
from typing import Optional, Generator
|
||||
|
||||
from sqlalchemy import create_engine, Table, MetaData
|
||||
from sqlalchemy.engine import Engine
|
||||
from sqlalchemy.exc import CompileError
|
||||
from sqlalchemy.orm import Session, sessionmaker, scoped_session
|
||||
from sqlalchemy.sql import and_, or_, text
|
||||
|
||||
from platypush.plugins import Plugin, action
|
||||
|
||||
|
@ -23,10 +25,17 @@ class DbPlugin(Plugin):
|
|||
|
||||
def __init__(self, engine=None, *args, **kwargs):
|
||||
"""
|
||||
:param engine: Default SQLAlchemy connection engine string (e.g. ``sqlite:///:memory:`` or ``mysql://user:pass@localhost/test``) that will be used. You can override the default engine in the db actions.
|
||||
:param engine: Default SQLAlchemy connection engine string (e.g.
|
||||
``sqlite:///:memory:`` or ``mysql://user:pass@localhost/test``)
|
||||
that will be used. You can override the default engine in the db
|
||||
actions.
|
||||
:type engine: str
|
||||
:param args: Extra arguments that will be passed to ``sqlalchemy.create_engine`` (see https://docs.sqlalchemy.org/en/latest/core/engines.html)
|
||||
:param kwargs: Extra kwargs that will be passed to ``sqlalchemy.create_engine`` (seehttps:///docs.sqlalchemy.org/en/latest/core/engines.html)
|
||||
:param args: Extra arguments that will be passed to
|
||||
``sqlalchemy.create_engine`` (see
|
||||
https://docs.sqlalchemy.org/en/latest/core/engines.html)
|
||||
:param kwargs: Extra kwargs that will be passed to
|
||||
``sqlalchemy.create_engine``
|
||||
(see https:///docs.sqlalchemy.org/en/latest/core/engines.html)
|
||||
"""
|
||||
|
||||
super().__init__()
|
||||
|
@ -46,7 +55,7 @@ class DbPlugin(Plugin):
|
|||
return self.engine
|
||||
|
||||
@staticmethod
|
||||
def _build_condition(_, column, value):
|
||||
def _build_condition(table, column, value): # type: ignore
|
||||
if isinstance(value, str):
|
||||
value = "'{}'".format(value)
|
||||
elif not isinstance(value, int) and not isinstance(value, float):
|
||||
|
@ -70,8 +79,12 @@ class DbPlugin(Plugin):
|
|||
:type statement: str
|
||||
:param engine: Engine to be used (default: default class engine)
|
||||
:type engine: str
|
||||
:param args: Extra arguments that will be passed to ``sqlalchemy.create_engine`` (see https://docs.sqlalchemy.org/en/latest/core/engines.html)
|
||||
:param kwargs: Extra kwargs that will be passed to ``sqlalchemy.create_engine`` (seehttps:///docs.sqlalchemy.org/en/latest/core/engines.html)
|
||||
:param args: Extra arguments that will be passed to
|
||||
``sqlalchemy.create_engine`` (see
|
||||
https://docs.sqlalchemy.org/en/latest/core/engines.html)
|
||||
:param kwargs: Extra kwargs that will be passed to
|
||||
``sqlalchemy.create_engine``
|
||||
(see https:///docs.sqlalchemy.org/en/latest/core/engines.html)
|
||||
"""
|
||||
|
||||
engine = self.get_engine(engine, *args, **kwargs)
|
||||
|
@ -107,24 +120,42 @@ class DbPlugin(Plugin):
|
|||
return table, engine
|
||||
|
||||
@action
|
||||
def select(self, query=None, table=None, filter=None, engine=None, *args, **kwargs):
|
||||
def select(
|
||||
self,
|
||||
query=None,
|
||||
table=None,
|
||||
filter=None,
|
||||
engine=None,
|
||||
data: Optional[dict] = None,
|
||||
*args,
|
||||
**kwargs
|
||||
):
|
||||
"""
|
||||
Returns rows (as a list of hashes) given a query.
|
||||
|
||||
:param query: SQL to be executed
|
||||
:type query: str
|
||||
:param filter: Query WHERE filter expressed as a dictionary. This approach is preferred over specifying raw SQL
|
||||
in ``query`` as the latter approach may be prone to SQL injection, unless you need to build some complex
|
||||
SQL logic.
|
||||
:param filter: Query WHERE filter expressed as a dictionary. This
|
||||
approach is preferred over specifying raw SQL
|
||||
in ``query`` as the latter approach may be prone to SQL injection,
|
||||
unless you need to build some complex SQL logic.
|
||||
:type filter: dict
|
||||
:param table: If you specified a filter instead of a raw query, you'll have to specify the target table
|
||||
:param table: If you specified a filter instead of a raw query, you'll
|
||||
have to specify the target table
|
||||
:type table: str
|
||||
:param engine: Engine to be used (default: default class engine)
|
||||
:type engine: str
|
||||
:param args: Extra arguments that will be passed to ``sqlalchemy.create_engine``
|
||||
(see https://docs.sqlalchemy.org/en/latest/core/engines.html)
|
||||
:param kwargs: Extra kwargs that will be passed to ``sqlalchemy.create_engine``
|
||||
(seehttps:///docs.sqlalchemy.org/en/latest/core/engines.html)
|
||||
:param data: If ``query`` is an SQL string, then you can use
|
||||
SQLAlchemy's *placeholders* mechanism. You can specify placeholders
|
||||
in the query for values that you want to be safely serialized, and
|
||||
their values can be specified on the ``data`` attribute in a
|
||||
``name`` ➡️ ``value`` mapping format.
|
||||
:param args: Extra arguments that will be passed to
|
||||
``sqlalchemy.create_engine`` (see
|
||||
https://docs.sqlalchemy.org/en/latest/core/engines.html)
|
||||
:param kwargs: Extra kwargs that will be passed to
|
||||
``sqlalchemy.create_engine`` (see
|
||||
https:///docs.sqlalchemy.org/en/latest/core/engines.html)
|
||||
:returns: List of hashes representing the result rows.
|
||||
|
||||
Examples:
|
||||
|
@ -137,7 +168,10 @@ class DbPlugin(Plugin):
|
|||
"action": "db.select",
|
||||
"args": {
|
||||
"engine": "sqlite:///:memory:",
|
||||
"query": "SELECT id, name FROM table"
|
||||
"query": "SELECT id, name FROM table WHERE name = :name",
|
||||
"data": {
|
||||
"name": "foobar"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -166,19 +200,24 @@ class DbPlugin(Plugin):
|
|||
|
||||
engine = self.get_engine(engine, *args, **kwargs)
|
||||
|
||||
if isinstance(query, str):
|
||||
query = text(query)
|
||||
|
||||
if table:
|
||||
table, engine = self._get_table(table, engine=engine, *args, **kwargs)
|
||||
query = table.select()
|
||||
|
||||
if filter:
|
||||
for (k,v) in filter.items():
|
||||
for (k, v) in filter.items():
|
||||
query = query.where(self._build_condition(table, k, v))
|
||||
|
||||
if query is None:
|
||||
raise RuntimeError('You need to specify either "query", or "table" and "filter"')
|
||||
raise RuntimeError(
|
||||
'You need to specify either "query", or "table" and "filter"'
|
||||
)
|
||||
|
||||
with engine.connect() as connection:
|
||||
result = connection.execute(query)
|
||||
result = connection.execute(query, **(data or {}))
|
||||
columns = result.keys()
|
||||
rows = [
|
||||
{col: row[i] for i, col in enumerate(list(columns))}
|
||||
|
@ -188,8 +227,16 @@ class DbPlugin(Plugin):
|
|||
return rows
|
||||
|
||||
@action
|
||||
def insert(self, table, records, engine=None, key_columns=None,
|
||||
on_duplicate_update=False, *args, **kwargs):
|
||||
def insert(
|
||||
self,
|
||||
table,
|
||||
records,
|
||||
engine=None,
|
||||
key_columns=None,
|
||||
on_duplicate_update=False,
|
||||
*args,
|
||||
**kwargs
|
||||
):
|
||||
"""
|
||||
Inserts records (as a list of hashes) into a table.
|
||||
|
||||
|
@ -199,12 +246,25 @@ class DbPlugin(Plugin):
|
|||
:type records: list
|
||||
:param engine: Engine to be used (default: default class engine)
|
||||
:type engine: str
|
||||
:param key_columns: Set it to specify the names of the key columns for ``table``. Set it if you want your statement to be executed with the ``on_duplicate_update`` flag.
|
||||
:param key_columns: Set it to specify the names of the key columns for
|
||||
``table``. Set it if you want your statement to be executed with
|
||||
the ``on_duplicate_update`` flag.
|
||||
:type key_columns: list
|
||||
:param on_duplicate_update: If set, update the records in case of duplicate rows (default: False). If set, you'll need to specify ``key_columns`` as well.
|
||||
:param on_duplicate_update: If set, update the records in case of
|
||||
duplicate rows (default: False). If set, you'll need to specify
|
||||
``key_columns`` as well. If ``key_columns`` is set, existing
|
||||
records are found but ``on_duplicate_update`` is false, then
|
||||
existing records will be ignored.
|
||||
:type on_duplicate_update: bool
|
||||
:param args: Extra arguments that will be passed to ``sqlalchemy.create_engine`` (see https://docs.sqlalchemy.org/en/latest/core/engines.html)
|
||||
:param kwargs: Extra kwargs that will be passed to ``sqlalchemy.create_engine`` (seehttps:///docs.sqlalchemy.org/en/latest/core/engines.html)
|
||||
:param args: Extra arguments that will be passed to
|
||||
``sqlalchemy.create_engine`` (see
|
||||
https://docs.sqlalchemy.org/en/latest/core/engines.html)
|
||||
:param kwargs: Extra kwargs that will be passed to
|
||||
``sqlalchemy.create_engine``
|
||||
(see https:///docs.sqlalchemy.org/en/latest/core/engines.html)
|
||||
|
||||
:return: The inserted records, if the underlying engine supports the
|
||||
``RETURNING`` statement, otherwise nothing.
|
||||
|
||||
Example:
|
||||
|
||||
|
@ -232,24 +292,107 @@ class DbPlugin(Plugin):
|
|||
}
|
||||
"""
|
||||
|
||||
if on_duplicate_update:
|
||||
assert (
|
||||
key_columns
|
||||
), 'on_duplicate_update requires key_columns to be specified'
|
||||
|
||||
if key_columns is None:
|
||||
key_columns = []
|
||||
|
||||
engine = self.get_engine(engine, *args, **kwargs)
|
||||
table, engine = self._get_table(table, engine=engine, *args, **kwargs)
|
||||
insert_records = records
|
||||
update_records = []
|
||||
returned_records = []
|
||||
|
||||
with engine.connect() as connection:
|
||||
# Upsert case
|
||||
if key_columns:
|
||||
insert_records, update_records = self._get_new_and_existing_records(
|
||||
connection, table, records, key_columns
|
||||
)
|
||||
|
||||
with connection.begin():
|
||||
if insert_records:
|
||||
insert = table.insert().values(insert_records)
|
||||
ret = self._execute_try_returning(connection, insert)
|
||||
if ret:
|
||||
returned_records += ret
|
||||
|
||||
if update_records and on_duplicate_update:
|
||||
ret = self._update(connection, table, update_records, key_columns)
|
||||
if ret:
|
||||
returned_records = ret + returned_records
|
||||
|
||||
if returned_records:
|
||||
return returned_records
|
||||
|
||||
@staticmethod
|
||||
def _execute_try_returning(connection, stmt):
|
||||
ret = None
|
||||
stmt_with_ret = stmt.returning('*')
|
||||
|
||||
try:
|
||||
ret = connection.execute(stmt_with_ret)
|
||||
except CompileError as e:
|
||||
if str(e).startswith('RETURNING is not supported'):
|
||||
connection.execute(stmt)
|
||||
else:
|
||||
raise e
|
||||
|
||||
if ret:
|
||||
return [
|
||||
{col.name: getattr(row, col.name, None) for col in stmt.table.c}
|
||||
for row in ret
|
||||
]
|
||||
|
||||
def _get_new_and_existing_records(self, connection, table, records, key_columns):
|
||||
records_by_key = {
|
||||
tuple(record.get(k) for k in key_columns): record for record in records
|
||||
}
|
||||
|
||||
query = table.select().where(
|
||||
or_(
|
||||
and_(
|
||||
self._build_condition(table, k, record.get(k)) for k in key_columns
|
||||
)
|
||||
for record in records
|
||||
)
|
||||
)
|
||||
|
||||
existing_records = {
|
||||
tuple(getattr(record, k, None) for k in key_columns): record
|
||||
for record in connection.execute(query).all()
|
||||
}
|
||||
|
||||
update_records = [
|
||||
record for k, record in records_by_key.items() if k in existing_records
|
||||
]
|
||||
|
||||
insert_records = [
|
||||
record for k, record in records_by_key.items() if k not in existing_records
|
||||
]
|
||||
|
||||
return insert_records, update_records
|
||||
|
||||
def _update(self, connection, table, records, key_columns):
|
||||
updated_records = []
|
||||
for record in records:
|
||||
table, engine = self._get_table(table, engine=engine, *args, **kwargs)
|
||||
insert = table.insert().values(**record)
|
||||
key = {k: v for (k, v) in record.items() if k in key_columns}
|
||||
values = {k: v for (k, v) in record.items() if k not in key_columns}
|
||||
update = table.update()
|
||||
|
||||
try:
|
||||
engine.execute(insert)
|
||||
except Exception as e:
|
||||
if on_duplicate_update and key_columns:
|
||||
self.update(table=table, records=records,
|
||||
key_columns=key_columns, engine=engine,
|
||||
*args, **kwargs)
|
||||
else:
|
||||
raise e
|
||||
for (k, v) in key.items():
|
||||
update = update.where(self._build_condition(table, k, v))
|
||||
|
||||
update = update.values(**values)
|
||||
ret = self._execute_try_returning(connection, update)
|
||||
if ret:
|
||||
updated_records += ret
|
||||
|
||||
if updated_records:
|
||||
return updated_records
|
||||
|
||||
@action
|
||||
def update(self, table, records, key_columns, engine=None, *args, **kwargs):
|
||||
|
@ -264,8 +407,15 @@ class DbPlugin(Plugin):
|
|||
:type key_columns: list
|
||||
:param engine: Engine to be used (default: default class engine)
|
||||
:type engine: str
|
||||
:param args: Extra arguments that will be passed to ``sqlalchemy.create_engine`` (see https://docs.sqlalchemy.org/en/latest/core/engines.html)
|
||||
:param kwargs: Extra kwargs that will be passed to ``sqlalchemy.create_engine`` (seehttps:///docs.sqlalchemy.org/en/latest/core/engines.html)
|
||||
:param args: Extra arguments that will be passed to
|
||||
``sqlalchemy.create_engine`` (see
|
||||
https://docs.sqlalchemy.org/en/latest/core/engines.html)
|
||||
:param kwargs: Extra kwargs that will be passed to
|
||||
``sqlalchemy.create_engine``
|
||||
(see https:///docs.sqlalchemy.org/en/latest/core/engines.html)
|
||||
|
||||
:return: The inserted records, if the underlying engine supports the
|
||||
``RETURNING`` statement, otherwise nothing.
|
||||
|
||||
Example:
|
||||
|
||||
|
@ -293,21 +443,10 @@ class DbPlugin(Plugin):
|
|||
}
|
||||
}
|
||||
"""
|
||||
|
||||
engine = self.get_engine(engine, *args, **kwargs)
|
||||
|
||||
for record in records:
|
||||
with engine.connect() as connection:
|
||||
table, engine = self._get_table(table, engine=engine, *args, **kwargs)
|
||||
key = { k:v for (k,v) in record.items() if k in key_columns }
|
||||
values = { k:v for (k,v) in record.items() if k not in key_columns }
|
||||
|
||||
update = table.update()
|
||||
|
||||
for (k,v) in key.items():
|
||||
update = update.where(self._build_condition(table, k, v))
|
||||
|
||||
update = update.values(**values)
|
||||
engine.execute(update)
|
||||
return self._update(connection, table, records, key_columns)
|
||||
|
||||
@action
|
||||
def delete(self, table, records, engine=None, *args, **kwargs):
|
||||
|
@ -320,8 +459,12 @@ class DbPlugin(Plugin):
|
|||
:type records: list
|
||||
:param engine: Engine to be used (default: default class engine)
|
||||
:type engine: str
|
||||
:param args: Extra arguments that will be passed to ``sqlalchemy.create_engine`` (see https://docs.sqlalchemy.org/en/latest/core/engines.html)
|
||||
:param kwargs: Extra kwargs that will be passed to ``sqlalchemy.create_engine`` (seehttps:///docs.sqlalchemy.org/en/latest/core/engines.html)
|
||||
:param args: Extra arguments that will be passed to
|
||||
``sqlalchemy.create_engine`` (see
|
||||
https://docs.sqlalchemy.org/en/latest/core/engines.html)
|
||||
:param kwargs: Extra kwargs that will be passed to
|
||||
``sqlalchemy.create_engine``
|
||||
(see https:///docs.sqlalchemy.org/en/latest/core/engines.html)
|
||||
|
||||
Example:
|
||||
|
||||
|
@ -344,14 +487,15 @@ class DbPlugin(Plugin):
|
|||
|
||||
engine = self.get_engine(engine, *args, **kwargs)
|
||||
|
||||
for record in records:
|
||||
table, engine = self._get_table(table, engine=engine, *args, **kwargs)
|
||||
delete = table.delete()
|
||||
with engine.connect() as connection, connection.begin():
|
||||
for record in records:
|
||||
table, engine = self._get_table(table, engine=engine, *args, **kwargs)
|
||||
delete = table.delete()
|
||||
|
||||
for (k,v) in record.items():
|
||||
delete = delete.where(self._build_condition(table, k, v))
|
||||
for (k, v) in record.items():
|
||||
delete = delete.where(self._build_condition(table, k, v))
|
||||
|
||||
engine.execute(delete)
|
||||
connection.execute(delete)
|
||||
|
||||
def create_all(self, engine, base):
|
||||
self._session_locks[engine.url] = self._session_locks.get(engine.url, RLock())
|
||||
|
@ -359,7 +503,9 @@ class DbPlugin(Plugin):
|
|||
base.metadata.create_all(engine)
|
||||
|
||||
@contextmanager
|
||||
def get_session(self, engine=None, *args, **kwargs) -> Generator[Session, None, None]:
|
||||
def get_session(
|
||||
self, engine=None, *args, **kwargs
|
||||
) -> Generator[Session, None, None]:
|
||||
engine = self.get_engine(engine, *args, **kwargs)
|
||||
self._session_locks[engine.url] = self._session_locks.get(engine.url, RLock())
|
||||
with self._session_locks[engine.url]:
|
||||
|
|
|
@ -6,9 +6,17 @@ from platypush.message.response import Response
|
|||
from platypush.plugins import action
|
||||
from platypush.plugins.media import PlayerState
|
||||
from platypush.plugins.music import MusicPlugin
|
||||
from platypush.schemas.spotify import SpotifyDeviceSchema, SpotifyStatusSchema, SpotifyTrackSchema, \
|
||||
SpotifyHistoryItemSchema, SpotifyPlaylistSchema, SpotifyAlbumSchema, SpotifyEpisodeSchema, SpotifyShowSchema, \
|
||||
SpotifyArtistSchema
|
||||
from platypush.schemas.spotify import (
|
||||
SpotifyDeviceSchema,
|
||||
SpotifyStatusSchema,
|
||||
SpotifyTrackSchema,
|
||||
SpotifyHistoryItemSchema,
|
||||
SpotifyPlaylistSchema,
|
||||
SpotifyAlbumSchema,
|
||||
SpotifyEpisodeSchema,
|
||||
SpotifyShowSchema,
|
||||
SpotifyArtistSchema,
|
||||
)
|
||||
|
||||
|
||||
class MusicSpotifyPlugin(MusicPlugin, SpotifyMixin):
|
||||
|
@ -45,9 +53,16 @@ class MusicSpotifyPlugin(MusicPlugin, SpotifyMixin):
|
|||
be printed on the application logs/stdout.
|
||||
"""
|
||||
|
||||
def __init__(self, client_id: Optional[str] = None, client_secret: Optional[str] = None, **kwargs):
|
||||
def __init__(
|
||||
self,
|
||||
client_id: Optional[str] = None,
|
||||
client_secret: Optional[str] = None,
|
||||
**kwargs,
|
||||
):
|
||||
MusicPlugin.__init__(self, **kwargs)
|
||||
SpotifyMixin.__init__(self, client_id=client_id, client_secret=client_secret, **kwargs)
|
||||
SpotifyMixin.__init__(
|
||||
self, client_id=client_id, client_secret=client_secret, **kwargs
|
||||
)
|
||||
self._players_by_id = {}
|
||||
self._players_by_name = {}
|
||||
# Playlist ID -> snapshot ID and tracks cache
|
||||
|
@ -63,14 +78,16 @@ class MusicSpotifyPlugin(MusicPlugin, SpotifyMixin):
|
|||
return dev
|
||||
|
||||
@staticmethod
|
||||
def _parse_datetime(dt: Optional[Union[str, datetime, int, float]]) -> Optional[datetime]:
|
||||
def _parse_datetime(
|
||||
dt: Optional[Union[str, datetime, int, float]]
|
||||
) -> Optional[datetime]:
|
||||
if isinstance(dt, str):
|
||||
try:
|
||||
dt = float(dt)
|
||||
except (ValueError, TypeError):
|
||||
return datetime.fromisoformat(dt)
|
||||
|
||||
if isinstance(dt, int) or isinstance(dt, float):
|
||||
if isinstance(dt, (int, float)):
|
||||
return datetime.fromtimestamp(dt)
|
||||
|
||||
return dt
|
||||
|
@ -85,18 +102,12 @@ class MusicSpotifyPlugin(MusicPlugin, SpotifyMixin):
|
|||
devices = self.spotify_user_call('/v1/me/player/devices').get('devices', [])
|
||||
self._players_by_id = {
|
||||
**self._players_by_id,
|
||||
**{
|
||||
dev['id']: dev
|
||||
for dev in devices
|
||||
}
|
||||
**{dev['id']: dev for dev in devices},
|
||||
}
|
||||
|
||||
self._players_by_name = {
|
||||
**self._players_by_name,
|
||||
**{
|
||||
dev['name']: dev
|
||||
for dev in devices
|
||||
}
|
||||
**{dev['name']: dev for dev in devices},
|
||||
}
|
||||
|
||||
return SpotifyDeviceSchema().dump(devices, many=True)
|
||||
|
@ -118,7 +129,7 @@ class MusicSpotifyPlugin(MusicPlugin, SpotifyMixin):
|
|||
params={
|
||||
'volume_percent': volume,
|
||||
**({'device_id': device} if device else {}),
|
||||
}
|
||||
},
|
||||
)
|
||||
|
||||
def _get_volume(self, device: Optional[str] = None) -> Optional[int]:
|
||||
|
@ -138,10 +149,13 @@ class MusicSpotifyPlugin(MusicPlugin, SpotifyMixin):
|
|||
if device:
|
||||
device = self._get_device(device)['id']
|
||||
|
||||
self.spotify_user_call('/v1/me/player/volume', params={
|
||||
'volume_percent': min(100, (self._get_volume() or 0) + delta),
|
||||
**({'device_id': device} if device else {}),
|
||||
})
|
||||
self.spotify_user_call(
|
||||
'/v1/me/player/volume',
|
||||
params={
|
||||
'volume_percent': min(100, (self._get_volume() or 0) + delta),
|
||||
**({'device_id': device} if device else {}),
|
||||
},
|
||||
)
|
||||
|
||||
@action
|
||||
def voldown(self, delta: int = 5, device: Optional[str] = None):
|
||||
|
@ -154,10 +168,13 @@ class MusicSpotifyPlugin(MusicPlugin, SpotifyMixin):
|
|||
if device:
|
||||
device = self._get_device(device)['id']
|
||||
|
||||
self.spotify_user_call('/v1/me/player/volume', params={
|
||||
'volume_percent': max(0, (self._get_volume() or 0) - delta),
|
||||
**({'device_id': device} if device else {}),
|
||||
})
|
||||
self.spotify_user_call(
|
||||
'/v1/me/player/volume',
|
||||
params={
|
||||
'volume_percent': max(0, (self._get_volume() or 0) - delta),
|
||||
**({'device_id': device} if device else {}),
|
||||
},
|
||||
)
|
||||
|
||||
@action
|
||||
def play(self, resource: Optional[str] = None, device: Optional[str] = None):
|
||||
|
@ -192,8 +209,12 @@ class MusicSpotifyPlugin(MusicPlugin, SpotifyMixin):
|
|||
|
||||
# noinspection PyUnresolvedReferences
|
||||
status = self.status().output
|
||||
state = 'play' \
|
||||
if status.get('device_id') != device or status.get('state') != PlayerState.PLAY.value else 'pause'
|
||||
state = (
|
||||
'play'
|
||||
if status.get('device_id') != device
|
||||
or status.get('state') != PlayerState.PLAY.value
|
||||
else 'pause'
|
||||
)
|
||||
|
||||
self.spotify_user_call(
|
||||
f'/v1/me/player/{state}',
|
||||
|
@ -212,7 +233,7 @@ class MusicSpotifyPlugin(MusicPlugin, SpotifyMixin):
|
|||
status = self.status().output
|
||||
if status.get('state') == PlayerState.PLAY.value:
|
||||
self.spotify_user_call(
|
||||
f'/v1/me/player/pause',
|
||||
'/v1/me/player/pause',
|
||||
method='put',
|
||||
)
|
||||
|
||||
|
@ -230,7 +251,7 @@ class MusicSpotifyPlugin(MusicPlugin, SpotifyMixin):
|
|||
status = self.status().output
|
||||
if status.get('state') != PlayerState.PLAY.value:
|
||||
self.spotify_user_call(
|
||||
f'/v1/me/player/play',
|
||||
'/v1/me/player/play',
|
||||
method='put',
|
||||
params={
|
||||
**({'device_id': device} if device else {}),
|
||||
|
@ -261,7 +282,7 @@ class MusicSpotifyPlugin(MusicPlugin, SpotifyMixin):
|
|||
"""
|
||||
device = self._get_device(device)['id']
|
||||
self.spotify_user_call(
|
||||
f'/v1/me/player',
|
||||
'/v1/me/player',
|
||||
method='put',
|
||||
json={
|
||||
'device_ids': [device],
|
||||
|
@ -279,7 +300,7 @@ class MusicSpotifyPlugin(MusicPlugin, SpotifyMixin):
|
|||
device = self._get_device(device)['id']
|
||||
|
||||
self.spotify_user_call(
|
||||
f'/v1/me/player/next',
|
||||
'/v1/me/player/next',
|
||||
method='post',
|
||||
params={
|
||||
**({'device_id': device} if device else {}),
|
||||
|
@ -297,7 +318,7 @@ class MusicSpotifyPlugin(MusicPlugin, SpotifyMixin):
|
|||
device = self._get_device(device)['id']
|
||||
|
||||
self.spotify_user_call(
|
||||
f'/v1/me/player/previous',
|
||||
'/v1/me/player/previous',
|
||||
method='post',
|
||||
params={
|
||||
**({'device_id': device} if device else {}),
|
||||
|
@ -316,7 +337,7 @@ class MusicSpotifyPlugin(MusicPlugin, SpotifyMixin):
|
|||
device = self._get_device(device)['id']
|
||||
|
||||
self.spotify_user_call(
|
||||
f'/v1/me/player/seek',
|
||||
'/v1/me/player/seek',
|
||||
method='put',
|
||||
params={
|
||||
'position_ms': int(position * 1000),
|
||||
|
@ -338,13 +359,16 @@ class MusicSpotifyPlugin(MusicPlugin, SpotifyMixin):
|
|||
if value is None:
|
||||
# noinspection PyUnresolvedReferences
|
||||
status = self.status().output
|
||||
state = 'context' \
|
||||
if status.get('device_id') != device or not status.get('repeat') else 'off'
|
||||
state = (
|
||||
'context'
|
||||
if status.get('device_id') != device or not status.get('repeat')
|
||||
else 'off'
|
||||
)
|
||||
else:
|
||||
state = value is True
|
||||
|
||||
self.spotify_user_call(
|
||||
f'/v1/me/player/repeat',
|
||||
'/v1/me/player/repeat',
|
||||
method='put',
|
||||
params={
|
||||
'state': 'context' if state else 'off',
|
||||
|
@ -366,12 +390,12 @@ class MusicSpotifyPlugin(MusicPlugin, SpotifyMixin):
|
|||
if value is None:
|
||||
# noinspection PyUnresolvedReferences
|
||||
status = self.status().output
|
||||
state = True if status.get('device_id') != device or not status.get('random') else False
|
||||
state = bool(status.get('device_id') != device or not status.get('random'))
|
||||
else:
|
||||
state = value is True
|
||||
|
||||
self.spotify_user_call(
|
||||
f'/v1/me/player/shuffle',
|
||||
'/v1/me/player/shuffle',
|
||||
method='put',
|
||||
params={
|
||||
'state': state,
|
||||
|
@ -380,8 +404,12 @@ class MusicSpotifyPlugin(MusicPlugin, SpotifyMixin):
|
|||
)
|
||||
|
||||
@action
|
||||
def history(self, limit: int = 20, before: Optional[Union[datetime, str, int]] = None,
|
||||
after: Optional[Union[datetime, str, int]] = None):
|
||||
def history(
|
||||
self,
|
||||
limit: int = 20,
|
||||
before: Optional[Union[datetime, str, int]] = None,
|
||||
after: Optional[Union[datetime, str, int]] = None,
|
||||
):
|
||||
"""
|
||||
Get a list of recently played track on the account.
|
||||
|
||||
|
@ -396,21 +424,26 @@ class MusicSpotifyPlugin(MusicPlugin, SpotifyMixin):
|
|||
after = self._parse_datetime(after)
|
||||
assert not (before and after), 'before and after cannot both be set'
|
||||
|
||||
results = self._spotify_paginate_results('/v1/me/player/recently-played',
|
||||
limit=limit,
|
||||
params={
|
||||
'limit': min(limit, 50),
|
||||
**({'before': before} if before else {}),
|
||||
**({'after': after} if after else {}),
|
||||
})
|
||||
results = self._spotify_paginate_results(
|
||||
'/v1/me/player/recently-played',
|
||||
limit=limit,
|
||||
params={
|
||||
'limit': min(limit, 50),
|
||||
**({'before': before} if before else {}),
|
||||
**({'after': after} if after else {}),
|
||||
},
|
||||
)
|
||||
|
||||
return SpotifyHistoryItemSchema().dump([
|
||||
{
|
||||
**item.pop('track'),
|
||||
**item,
|
||||
}
|
||||
for item in results
|
||||
], many=True)
|
||||
return SpotifyHistoryItemSchema().dump(
|
||||
[
|
||||
{
|
||||
**item.pop('track'),
|
||||
**item,
|
||||
}
|
||||
for item in results
|
||||
],
|
||||
many=True,
|
||||
)
|
||||
|
||||
@action
|
||||
def add(self, resource: str, device: Optional[str] = None, **kwargs):
|
||||
|
@ -424,7 +457,7 @@ class MusicSpotifyPlugin(MusicPlugin, SpotifyMixin):
|
|||
device = self._get_device(device)['id']
|
||||
|
||||
self.spotify_user_call(
|
||||
f'/v1/me/player/queue',
|
||||
'/v1/me/player/queue',
|
||||
method='post',
|
||||
params={
|
||||
'uri': resource,
|
||||
|
@ -472,7 +505,9 @@ class MusicSpotifyPlugin(MusicPlugin, SpotifyMixin):
|
|||
return SpotifyTrackSchema().dump(track)
|
||||
|
||||
@action
|
||||
def get_playlists(self, limit: int = 1000, offset: int = 0, user: Optional[str] = None):
|
||||
def get_playlists(
|
||||
self, limit: int = 1000, offset: int = 0, user: Optional[str] = None
|
||||
):
|
||||
"""
|
||||
Get the user's playlists.
|
||||
|
||||
|
@ -483,7 +518,8 @@ class MusicSpotifyPlugin(MusicPlugin, SpotifyMixin):
|
|||
"""
|
||||
playlists = self._spotify_paginate_results(
|
||||
f'/v1/{"users/" + user if user else "me"}/playlists',
|
||||
limit=limit, offset=offset
|
||||
limit=limit,
|
||||
offset=offset,
|
||||
)
|
||||
|
||||
return SpotifyPlaylistSchema().dump(playlists, many=True)
|
||||
|
@ -491,36 +527,45 @@ class MusicSpotifyPlugin(MusicPlugin, SpotifyMixin):
|
|||
def _get_playlist(self, playlist: str) -> dict:
|
||||
playlists = self.get_playlists().output
|
||||
playlists = [
|
||||
pl for pl in playlists if (
|
||||
pl['id'] == playlist or
|
||||
pl['uri'] == playlist or
|
||||
pl['name'] == playlist
|
||||
)
|
||||
pl
|
||||
for pl in playlists
|
||||
if (pl['id'] == playlist or pl['uri'] == playlist or pl['name'] == playlist)
|
||||
]
|
||||
|
||||
assert playlists, f'No such playlist ID, URI or name: {playlist}'
|
||||
return playlists[0]
|
||||
|
||||
def _get_playlist_tracks_from_cache(self, id: str, snapshot_id: str, limit: Optional[int] = None,
|
||||
offset: int = 0) -> Optional[Iterable]:
|
||||
def _get_playlist_tracks_from_cache(
|
||||
self, id: str, snapshot_id: str, limit: Optional[int] = None, offset: int = 0
|
||||
) -> Optional[Iterable]:
|
||||
snapshot = self._playlist_snapshots.get(id)
|
||||
if (
|
||||
not snapshot or
|
||||
snapshot['snapshot_id'] != snapshot_id or
|
||||
(limit is None and snapshot['limit'] is not None)
|
||||
not snapshot
|
||||
or snapshot['snapshot_id'] != snapshot_id
|
||||
or (limit is None and snapshot['limit'] is not None)
|
||||
):
|
||||
return
|
||||
|
||||
if limit is not None and snapshot['limit'] is not None:
|
||||
stored_range = (snapshot['limit'], snapshot['limit'] + snapshot['offset'])
|
||||
requested_range = (limit, limit + offset)
|
||||
if requested_range[0] < stored_range[0] or requested_range[1] > stored_range[1]:
|
||||
if (
|
||||
requested_range[0] < stored_range[0]
|
||||
or requested_range[1] > stored_range[1]
|
||||
):
|
||||
return
|
||||
|
||||
return snapshot['tracks']
|
||||
|
||||
def _cache_playlist_data(self, id: str, snapshot_id: str, tracks: Iterable[dict], limit: Optional[int] = None,
|
||||
offset: int = 0, **_):
|
||||
def _cache_playlist_data(
|
||||
self,
|
||||
id: str,
|
||||
snapshot_id: str,
|
||||
tracks: Iterable[dict],
|
||||
limit: Optional[int] = None,
|
||||
offset: int = 0,
|
||||
**_,
|
||||
):
|
||||
self._playlist_snapshots[id] = {
|
||||
'id': id,
|
||||
'tracks': tracks,
|
||||
|
@ -530,7 +575,13 @@ class MusicSpotifyPlugin(MusicPlugin, SpotifyMixin):
|
|||
}
|
||||
|
||||
@action
|
||||
def get_playlist(self, playlist: str, with_tracks: bool = True, limit: Optional[int] = None, offset: int = 0):
|
||||
def get_playlist(
|
||||
self,
|
||||
playlist: str,
|
||||
with_tracks: bool = True,
|
||||
limit: Optional[int] = None,
|
||||
offset: int = 0,
|
||||
):
|
||||
"""
|
||||
Get a playlist content.
|
||||
|
||||
|
@ -544,8 +595,10 @@ class MusicSpotifyPlugin(MusicPlugin, SpotifyMixin):
|
|||
playlist = self._get_playlist(playlist)
|
||||
if with_tracks:
|
||||
playlist['tracks'] = self._get_playlist_tracks_from_cache(
|
||||
playlist['id'], snapshot_id=playlist['snapshot_id'],
|
||||
limit=limit, offset=offset
|
||||
playlist['id'],
|
||||
snapshot_id=playlist['snapshot_id'],
|
||||
limit=limit,
|
||||
offset=offset,
|
||||
)
|
||||
|
||||
if playlist['tracks'] is None:
|
||||
|
@ -554,13 +607,16 @@ class MusicSpotifyPlugin(MusicPlugin, SpotifyMixin):
|
|||
**track,
|
||||
'track': {
|
||||
**track['track'],
|
||||
'position': offset+i+1,
|
||||
}
|
||||
'position': offset + i + 1,
|
||||
},
|
||||
}
|
||||
for i, track in enumerate(self._spotify_paginate_results(
|
||||
f'/v1/playlists/{playlist["id"]}/tracks',
|
||||
limit=limit, offset=offset
|
||||
))
|
||||
for i, track in enumerate(
|
||||
self._spotify_paginate_results(
|
||||
f'/v1/playlists/{playlist["id"]}/tracks',
|
||||
limit=limit,
|
||||
offset=offset,
|
||||
)
|
||||
)
|
||||
]
|
||||
|
||||
self._cache_playlist_data(**playlist, limit=limit, offset=offset)
|
||||
|
@ -568,7 +624,12 @@ class MusicSpotifyPlugin(MusicPlugin, SpotifyMixin):
|
|||
return SpotifyPlaylistSchema().dump(playlist)
|
||||
|
||||
@action
|
||||
def add_to_playlist(self, playlist: str, resources: Union[str, Iterable[str]], position: Optional[int] = None):
|
||||
def add_to_playlist(
|
||||
self,
|
||||
playlist: str,
|
||||
resources: Union[str, Iterable[str]],
|
||||
position: Optional[int] = None,
|
||||
):
|
||||
"""
|
||||
Add one or more items to a playlist.
|
||||
|
||||
|
@ -585,11 +646,14 @@ class MusicSpotifyPlugin(MusicPlugin, SpotifyMixin):
|
|||
},
|
||||
json={
|
||||
'uris': [
|
||||
uri.strip() for uri in (
|
||||
resources.split(',') if isinstance(resources, str) else resources
|
||||
uri.strip()
|
||||
for uri in (
|
||||
resources.split(',')
|
||||
if isinstance(resources, str)
|
||||
else resources
|
||||
)
|
||||
]
|
||||
}
|
||||
},
|
||||
)
|
||||
|
||||
snapshot_id = response.get('snapshot_id')
|
||||
|
@ -611,18 +675,27 @@ class MusicSpotifyPlugin(MusicPlugin, SpotifyMixin):
|
|||
'tracks': [
|
||||
{'uri': uri.strip()}
|
||||
for uri in (
|
||||
resources.split(',') if isinstance(resources, str) else resources
|
||||
resources.split(',')
|
||||
if isinstance(resources, str)
|
||||
else resources
|
||||
)
|
||||
]
|
||||
}
|
||||
},
|
||||
)
|
||||
|
||||
snapshot_id = response.get('snapshot_id')
|
||||
assert snapshot_id is not None, 'Could not save playlist'
|
||||
|
||||
@action
|
||||
def playlist_move(self, playlist: str, from_pos: int, to_pos: int, range_length: int = 1,
|
||||
resources: Optional[Union[str, Iterable[str]]] = None, **_):
|
||||
def playlist_move(
|
||||
self,
|
||||
playlist: str,
|
||||
from_pos: int,
|
||||
to_pos: int,
|
||||
range_length: int = 1,
|
||||
resources: Optional[Union[str, Iterable[str]]] = None,
|
||||
**_,
|
||||
):
|
||||
"""
|
||||
Move or replace elements in a playlist.
|
||||
|
||||
|
@ -641,12 +714,21 @@ class MusicSpotifyPlugin(MusicPlugin, SpotifyMixin):
|
|||
'range_start': int(from_pos) + 1,
|
||||
'range_length': int(range_length),
|
||||
'insert_before': int(to_pos) + 1,
|
||||
**({'uris': [
|
||||
uri.strip() for uri in (
|
||||
resources.split(',') if isinstance(resources, str) else resources
|
||||
)
|
||||
]} if resources else {})
|
||||
}
|
||||
**(
|
||||
{
|
||||
'uris': [
|
||||
uri.strip()
|
||||
for uri in (
|
||||
resources.split(',')
|
||||
if isinstance(resources, str)
|
||||
else resources
|
||||
)
|
||||
]
|
||||
}
|
||||
if resources
|
||||
else {}
|
||||
),
|
||||
},
|
||||
)
|
||||
|
||||
snapshot_id = response.get('snapshot_id')
|
||||
|
@ -673,8 +755,14 @@ class MusicSpotifyPlugin(MusicPlugin, SpotifyMixin):
|
|||
|
||||
# noinspection PyShadowingBuiltins
|
||||
@action
|
||||
def search(self, query: Optional[Union[str, dict]] = None, limit: int = 50, offset: int = 0, type: str = 'track',
|
||||
**filter) -> Iterable[dict]:
|
||||
def search(
|
||||
self,
|
||||
query: Optional[Union[str, dict]] = None,
|
||||
limit: int = 50,
|
||||
offset: int = 0,
|
||||
type: str = 'track',
|
||||
**filter,
|
||||
) -> Iterable[dict]:
|
||||
"""
|
||||
Search for tracks matching a certain criteria.
|
||||
|
||||
|
@ -714,12 +802,16 @@ class MusicSpotifyPlugin(MusicPlugin, SpotifyMixin):
|
|||
}.get('uri', [])
|
||||
|
||||
uris = uri.split(',') if isinstance(uri, str) else uri
|
||||
params = {
|
||||
'ids': ','.join([uri.split(':')[-1].strip() for uri in uris]),
|
||||
} if uris else {
|
||||
'q': self._make_filter(query, **filter),
|
||||
'type': type,
|
||||
}
|
||||
params = (
|
||||
{
|
||||
'ids': ','.join([uri.split(':')[-1].strip() for uri in uris]),
|
||||
}
|
||||
if uris
|
||||
else {
|
||||
'q': self._make_filter(query, **filter),
|
||||
'type': type,
|
||||
}
|
||||
)
|
||||
|
||||
response = self._spotify_paginate_results(
|
||||
f'/v1/{type + "s" if uris else "search"}',
|
||||
|
@ -739,7 +831,7 @@ class MusicSpotifyPlugin(MusicPlugin, SpotifyMixin):
|
|||
track.get('track'),
|
||||
track.get('title'),
|
||||
track.get('popularity'),
|
||||
)
|
||||
),
|
||||
)
|
||||
|
||||
schema_class = None
|
||||
|
@ -759,6 +851,31 @@ class MusicSpotifyPlugin(MusicPlugin, SpotifyMixin):
|
|||
|
||||
return response
|
||||
|
||||
@action
|
||||
def create_playlist(
|
||||
self, name: str, description: Optional[str] = None, public: bool = False
|
||||
):
|
||||
"""
|
||||
Create a playlist.
|
||||
|
||||
:param name: Playlist name.
|
||||
:param description: Optional playlist description.
|
||||
:param public: Whether the new playlist should be public
|
||||
(default: False).
|
||||
:return: .. schema:: spotify.SpotifyPlaylistSchema
|
||||
"""
|
||||
ret = self.spotify_user_call(
|
||||
'/v1/users/me/playlists',
|
||||
method='post',
|
||||
json={
|
||||
'name': name,
|
||||
'description': description,
|
||||
'public': public,
|
||||
},
|
||||
)
|
||||
|
||||
return SpotifyPlaylistSchema().dump(ret)
|
||||
|
||||
@action
|
||||
def follow_playlist(self, playlist: str, public: bool = True):
|
||||
"""
|
||||
|
@ -774,7 +891,7 @@ class MusicSpotifyPlugin(MusicPlugin, SpotifyMixin):
|
|||
method='put',
|
||||
json={
|
||||
'public': public,
|
||||
}
|
||||
},
|
||||
)
|
||||
|
||||
@action
|
||||
|
@ -792,10 +909,7 @@ class MusicSpotifyPlugin(MusicPlugin, SpotifyMixin):
|
|||
|
||||
@staticmethod
|
||||
def _uris_to_id(*uris: str) -> Iterable[str]:
|
||||
return [
|
||||
uri.split(':')[-1]
|
||||
for uri in uris
|
||||
]
|
||||
return [uri.split(':')[-1] for uri in uris]
|
||||
|
||||
@action
|
||||
def get_albums(self, limit: int = 50, offset: int = 0) -> List[dict]:
|
||||
|
@ -811,7 +925,8 @@ class MusicSpotifyPlugin(MusicPlugin, SpotifyMixin):
|
|||
'/v1/me/albums',
|
||||
limit=limit,
|
||||
offset=offset,
|
||||
), many=True
|
||||
),
|
||||
many=True,
|
||||
)
|
||||
|
||||
@action
|
||||
|
@ -852,9 +967,7 @@ class MusicSpotifyPlugin(MusicPlugin, SpotifyMixin):
|
|||
return [
|
||||
SpotifyTrackSchema().dump(item['track'])
|
||||
for item in self._spotify_paginate_results(
|
||||
'/v1/me/tracks',
|
||||
limit=limit,
|
||||
offset=offset
|
||||
'/v1/me/tracks', limit=limit, offset=offset
|
||||
)
|
||||
]
|
||||
|
||||
|
@ -898,7 +1011,8 @@ class MusicSpotifyPlugin(MusicPlugin, SpotifyMixin):
|
|||
'/v1/me/episodes',
|
||||
limit=limit,
|
||||
offset=offset,
|
||||
), many=True
|
||||
),
|
||||
many=True,
|
||||
)
|
||||
|
||||
@action
|
||||
|
@ -941,7 +1055,8 @@ class MusicSpotifyPlugin(MusicPlugin, SpotifyMixin):
|
|||
'/v1/me/shows',
|
||||
limit=limit,
|
||||
offset=offset,
|
||||
), many=True
|
||||
),
|
||||
many=True,
|
||||
)
|
||||
|
||||
@action
|
||||
|
|
397
platypush/plugins/music/tidal/__init__.py
Normal file
397
platypush/plugins/music/tidal/__init__.py
Normal file
|
@ -0,0 +1,397 @@
|
|||
import json
|
||||
import os
|
||||
import pathlib
|
||||
|
||||
from datetime import datetime
|
||||
from typing import Iterable, Optional, Union
|
||||
|
||||
from platypush.config import Config
|
||||
from platypush.context import Variable, get_bus
|
||||
from platypush.message.event.music.tidal import TidalPlaylistUpdatedEvent
|
||||
from platypush.plugins import RunnablePlugin, action
|
||||
from platypush.plugins.music.tidal.workers import get_items
|
||||
from platypush.schemas.tidal import (
|
||||
TidalAlbumSchema,
|
||||
TidalPlaylistSchema,
|
||||
TidalArtistSchema,
|
||||
TidalSearchResultsSchema,
|
||||
TidalTrackSchema,
|
||||
)
|
||||
|
||||
|
||||
class MusicTidalPlugin(RunnablePlugin):
|
||||
"""
|
||||
Plugin to interact with the user's Tidal account and library.
|
||||
|
||||
Upon the first login, the application will prompt you with a link to
|
||||
connect to your Tidal account. Once authorized, you should no longer be
|
||||
required to explicitly login.
|
||||
|
||||
Triggers:
|
||||
|
||||
* :class:`platypush.message.event.music.TidalPlaylistUpdatedEvent`: when a user playlist
|
||||
is updated.
|
||||
|
||||
Requires:
|
||||
|
||||
* **tidalapi** (``pip install 'tidalapi >= 0.7.0'``)
|
||||
|
||||
"""
|
||||
|
||||
_base_url = 'https://api.tidalhifi.com/v1/'
|
||||
_default_credentials_file = os.path.join(
|
||||
str(Config.get('workdir')), 'tidal', 'credentials.json'
|
||||
)
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
quality: str = 'high',
|
||||
credentials_file: str = _default_credentials_file,
|
||||
**kwargs,
|
||||
):
|
||||
"""
|
||||
:param quality: Default audio quality. Default: ``high``.
|
||||
Supported: [``loseless``, ``master``, ``high``, ``low``].
|
||||
:param credentials_file: Path to the file where the OAuth session
|
||||
parameters will be stored (default:
|
||||
``<WORKDIR>/tidal/credentials.json``).
|
||||
"""
|
||||
from tidalapi import Quality
|
||||
|
||||
super().__init__(**kwargs)
|
||||
self._credentials_file = os.path.expanduser(credentials_file)
|
||||
self._user_playlists = {}
|
||||
|
||||
try:
|
||||
self._quality = getattr(Quality, quality.lower())
|
||||
except AttributeError:
|
||||
raise AssertionError(
|
||||
f'Invalid quality: {quality}. Supported values: '
|
||||
f'{[q.name for q in Quality]}'
|
||||
)
|
||||
|
||||
self._session = None
|
||||
|
||||
def _oauth_open_saved_session(self):
|
||||
if not self._session:
|
||||
return
|
||||
|
||||
try:
|
||||
with open(self._credentials_file, 'r') as f:
|
||||
data = json.load(f)
|
||||
self._session.load_oauth_session(
|
||||
data['token_type'], data['access_token'], data['refresh_token']
|
||||
)
|
||||
except Exception as e:
|
||||
self.logger.warning('Could not load %s: %s', self._credentials_file, e)
|
||||
|
||||
def _oauth_create_new_session(self):
|
||||
if not self._session:
|
||||
return
|
||||
|
||||
self._session.login_oauth_simple(function=self.logger.warning) # type: ignore
|
||||
if self._session.check_login():
|
||||
data = {
|
||||
'token_type': self._session.token_type,
|
||||
'session_id': self._session.session_id,
|
||||
'access_token': self._session.access_token,
|
||||
'refresh_token': self._session.refresh_token,
|
||||
}
|
||||
|
||||
pathlib.Path(os.path.dirname(self._credentials_file)).mkdir(
|
||||
parents=True, exist_ok=True
|
||||
)
|
||||
|
||||
with open(self._credentials_file, 'w') as outfile:
|
||||
json.dump(data, outfile)
|
||||
|
||||
@property
|
||||
def session(self):
|
||||
from tidalapi import Config, Session
|
||||
|
||||
if self._session and self._session.check_login():
|
||||
return self._session
|
||||
|
||||
# Attempt to reload the existing session from file
|
||||
self._session = Session(config=Config(quality=self._quality))
|
||||
self._oauth_open_saved_session()
|
||||
if not self._session.check_login():
|
||||
# Create a new session if we couldn't load an existing one
|
||||
self._oauth_create_new_session()
|
||||
|
||||
assert (
|
||||
self._session.user and self._session.check_login()
|
||||
), 'Could not connect to TIDAL'
|
||||
|
||||
return self._session
|
||||
|
||||
@property
|
||||
def user(self):
|
||||
user = self.session.user
|
||||
assert user, 'Not logged in'
|
||||
return user
|
||||
|
||||
@action
|
||||
def create_playlist(self, name: str, description: Optional[str] = None):
|
||||
"""
|
||||
Create a new playlist.
|
||||
|
||||
:param name: Playlist name.
|
||||
:param description: Optional playlist description.
|
||||
:return: .. schema:: tidal.TidalPlaylistSchema
|
||||
"""
|
||||
ret = self.user.create_playlist(name, description)
|
||||
return TidalPlaylistSchema().dump(ret)
|
||||
|
||||
@action
|
||||
def delete_playlist(self, playlist_id: str):
|
||||
"""
|
||||
Delete a playlist by ID.
|
||||
|
||||
:param playlist_id: ID of the playlist to delete.
|
||||
"""
|
||||
pl = self.session.playlist(playlist_id)
|
||||
pl.delete()
|
||||
|
||||
@action
|
||||
def edit_playlist(self, playlist_id: str, title=None, description=None):
|
||||
"""
|
||||
Edit a playlist's metadata.
|
||||
|
||||
:param name: New name.
|
||||
:param description: New description.
|
||||
"""
|
||||
pl = self.session.playlist(playlist_id)
|
||||
pl.edit(title=title, description=description)
|
||||
|
||||
@action
|
||||
def get_playlists(self):
|
||||
"""
|
||||
Get the user's playlists (track lists are excluded).
|
||||
|
||||
:return: .. schema:: tidal.TidalPlaylistSchema(many=True)
|
||||
"""
|
||||
ret = self.user.playlists() + self.user.favorites.playlists()
|
||||
return TidalPlaylistSchema().dump(ret, many=True)
|
||||
|
||||
@action
|
||||
def get_playlist(self, playlist_id: str):
|
||||
"""
|
||||
Get the details of a playlist (including tracks).
|
||||
|
||||
:param playlist_id: Playlist ID.
|
||||
:return: .. schema:: tidal.TidalPlaylistSchema
|
||||
"""
|
||||
pl = self.session.playlist(playlist_id)
|
||||
pl._tracks = get_items(pl.tracks)
|
||||
return TidalPlaylistSchema().dump(pl)
|
||||
|
||||
@action
|
||||
def get_artist(self, artist_id: Union[str, int]):
|
||||
"""
|
||||
Get the details of an artist.
|
||||
|
||||
:param artist_id: Artist ID.
|
||||
:return: .. schema:: tidal.TidalArtistSchema
|
||||
"""
|
||||
ret = self.session.artist(artist_id)
|
||||
ret.albums = get_items(ret.get_albums)
|
||||
return TidalArtistSchema().dump(ret)
|
||||
|
||||
@action
|
||||
def get_album(self, album_id: Union[str, int]):
|
||||
"""
|
||||
Get the details of an album.
|
||||
|
||||
:param artist_id: Album ID.
|
||||
:return: .. schema:: tidal.TidalAlbumSchema
|
||||
"""
|
||||
ret = self.session.album(album_id)
|
||||
return TidalAlbumSchema(with_tracks=True).dump(ret)
|
||||
|
||||
@action
|
||||
def get_track(self, track_id: Union[str, int]):
|
||||
"""
|
||||
Get the details of an track.
|
||||
|
||||
:param artist_id: Track ID.
|
||||
:return: .. schema:: tidal.TidalTrackSchema
|
||||
"""
|
||||
ret = self.session.album(track_id)
|
||||
return TidalTrackSchema().dump(ret)
|
||||
|
||||
@action
|
||||
def search(
|
||||
self,
|
||||
query: str,
|
||||
limit: int = 50,
|
||||
offset: int = 0,
|
||||
type: Optional[str] = None,
|
||||
):
|
||||
"""
|
||||
Perform a search.
|
||||
|
||||
:param query: Query string.
|
||||
:param limit: Maximum results that should be returned (default: 50).
|
||||
:param offset: Search offset (default: 0).
|
||||
:param type: Type of results that should be returned. Default: None
|
||||
(return all the results that match the query). Supported:
|
||||
``artist``, ``album``, ``track`` and ``playlist``.
|
||||
:return: .. schema:: tidal.TidalSearchResultsSchema
|
||||
"""
|
||||
from tidalapi.artist import Artist
|
||||
from tidalapi.album import Album
|
||||
from tidalapi.media import Track
|
||||
from tidalapi.playlist import Playlist
|
||||
|
||||
models = None
|
||||
if type is not None:
|
||||
if type == 'artist':
|
||||
models = [Artist]
|
||||
elif type == 'album':
|
||||
models = [Album]
|
||||
elif type == 'track':
|
||||
models = [Track]
|
||||
elif type == 'playlist':
|
||||
models = [Playlist]
|
||||
else:
|
||||
raise AssertionError(f'Unsupported search type: {type}')
|
||||
|
||||
ret = self.session.search(query, models=models, limit=limit, offset=offset)
|
||||
|
||||
return TidalSearchResultsSchema().dump(ret)
|
||||
|
||||
@action
|
||||
def get_download_url(self, track_id: str) -> str:
|
||||
"""
|
||||
Get the direct download URL of a track.
|
||||
|
||||
:param artist_id: Track ID.
|
||||
"""
|
||||
return self.session.track(track_id).get_url()
|
||||
|
||||
@action
|
||||
def add_to_playlist(self, playlist_id: str, track_ids: Iterable[Union[str, int]]):
|
||||
"""
|
||||
Append one or more tracks to a playlist.
|
||||
|
||||
:param playlist_id: Target playlist ID.
|
||||
:param track_ids: List of track IDs to append.
|
||||
"""
|
||||
pl = self.session.playlist(playlist_id)
|
||||
pl.add(track_ids)
|
||||
|
||||
@action
|
||||
def remove_from_playlist(
|
||||
self,
|
||||
playlist_id: str,
|
||||
track_id: Optional[Union[str, int]] = None,
|
||||
index: Optional[int] = None,
|
||||
):
|
||||
"""
|
||||
Remove a track from a playlist.
|
||||
|
||||
Specify either the ``track_id`` or the ``index``.
|
||||
|
||||
:param playlist_id: Target playlist ID.
|
||||
:param track_id: ID of the track to remove.
|
||||
:param index: Index of the track to remove.
|
||||
"""
|
||||
assert not (
|
||||
track_id is None and index is None
|
||||
), 'Please specify either track_id or index'
|
||||
|
||||
pl = self.session.playlist(playlist_id)
|
||||
if index:
|
||||
pl.remove_by_index(index)
|
||||
if track_id:
|
||||
pl.remove_by_id(track_id)
|
||||
|
||||
@action
|
||||
def add_track(self, track_id: Union[str, int]):
|
||||
"""
|
||||
Add a track to the user's collection.
|
||||
|
||||
:param track_id: Track ID.
|
||||
"""
|
||||
self.user.favorites.add_track(track_id)
|
||||
|
||||
@action
|
||||
def add_album(self, album_id: Union[str, int]):
|
||||
"""
|
||||
Add an album to the user's collection.
|
||||
|
||||
:param album_id: Album ID.
|
||||
"""
|
||||
self.user.favorites.add_album(album_id)
|
||||
|
||||
@action
|
||||
def add_artist(self, artist_id: Union[str, int]):
|
||||
"""
|
||||
Add an artist to the user's collection.
|
||||
|
||||
:param artist_id: Artist ID.
|
||||
"""
|
||||
self.user.favorites.add_artist(artist_id)
|
||||
|
||||
@action
|
||||
def add_playlist(self, playlist_id: str):
|
||||
"""
|
||||
Add a playlist to the user's collection.
|
||||
|
||||
:param playlist_id: Playlist ID.
|
||||
"""
|
||||
self.user.favorites.add_playlist(playlist_id)
|
||||
|
||||
@action
|
||||
def remove_track(self, track_id: Union[str, int]):
|
||||
"""
|
||||
Remove a track from the user's collection.
|
||||
|
||||
:param track_id: Track ID.
|
||||
"""
|
||||
self.user.favorites.remove_track(track_id)
|
||||
|
||||
@action
|
||||
def remove_album(self, album_id: Union[str, int]):
|
||||
"""
|
||||
Remove an album from the user's collection.
|
||||
|
||||
:param album_id: Album ID.
|
||||
"""
|
||||
self.user.favorites.remove_album(album_id)
|
||||
|
||||
@action
|
||||
def remove_artist(self, artist_id: Union[str, int]):
|
||||
"""
|
||||
Remove an artist from the user's collection.
|
||||
|
||||
:param artist_id: Artist ID.
|
||||
"""
|
||||
self.user.favorites.remove_artist(artist_id)
|
||||
|
||||
@action
|
||||
def remove_playlist(self, playlist_id: str):
|
||||
"""
|
||||
Remove a playlist from the user's collection.
|
||||
|
||||
:param playlist_id: Playlist ID.
|
||||
"""
|
||||
self.user.favorites.remove_playlist(playlist_id)
|
||||
|
||||
def main(self):
|
||||
while not self.should_stop():
|
||||
playlists = self.session.user.playlists() # type: ignore
|
||||
|
||||
for pl in playlists:
|
||||
last_updated_var = Variable(f'TIDAL_PLAYLIST_LAST_UPDATE[{pl.id}]')
|
||||
prev_last_updated = last_updated_var.get()
|
||||
if prev_last_updated:
|
||||
prev_last_updated = datetime.fromisoformat(prev_last_updated)
|
||||
if pl.last_updated > prev_last_updated:
|
||||
get_bus().post(TidalPlaylistUpdatedEvent(playlist_id=pl.id))
|
||||
|
||||
if not prev_last_updated or pl.last_updated > prev_last_updated:
|
||||
last_updated_var.set(pl.last_updated.isoformat())
|
||||
|
||||
self.wait_stop(self.poll_interval)
|
9
platypush/plugins/music/tidal/manifest.yaml
Normal file
9
platypush/plugins/music/tidal/manifest.yaml
Normal file
|
@ -0,0 +1,9 @@
|
|||
manifest:
|
||||
events:
|
||||
- platypush.message.event.music.TidalPlaylistUpdatedEvent: when a user playlist
|
||||
is updated.
|
||||
install:
|
||||
pip:
|
||||
- tidalapi >= 0.7.0
|
||||
package: platypush.plugins.music.tidal
|
||||
type: plugin
|
56
platypush/plugins/music/tidal/workers.py
Normal file
56
platypush/plugins/music/tidal/workers.py
Normal file
|
@ -0,0 +1,56 @@
|
|||
from concurrent.futures import ThreadPoolExecutor
|
||||
from typing import Callable
|
||||
|
||||
|
||||
def func_wrapper(args):
|
||||
(f, offset, *args) = args
|
||||
items = f(*args)
|
||||
return [(i + offset, item) for i, item in enumerate(items)]
|
||||
|
||||
|
||||
def get_items(
|
||||
func: Callable,
|
||||
*args,
|
||||
parse: Callable = lambda _: _,
|
||||
chunk_size: int = 100,
|
||||
processes: int = 5,
|
||||
):
|
||||
"""
|
||||
This function performs pagination on a function that supports
|
||||
`limit`/`offset` parameters and it runs API requests in parallel to speed
|
||||
things up.
|
||||
"""
|
||||
items = []
|
||||
offsets = [-chunk_size]
|
||||
remaining = chunk_size * processes
|
||||
|
||||
with ThreadPoolExecutor(
|
||||
processes, thread_name_prefix=f'mopidy-tidal-{func.__name__}-'
|
||||
) as pool:
|
||||
while remaining == chunk_size * processes:
|
||||
offsets = [offsets[-1] + chunk_size * (i + 1) for i in range(processes)]
|
||||
|
||||
pool_results = pool.map(
|
||||
func_wrapper,
|
||||
[
|
||||
(
|
||||
func,
|
||||
offset,
|
||||
*args,
|
||||
chunk_size, # limit
|
||||
offset, # offset
|
||||
)
|
||||
for offset in offsets
|
||||
],
|
||||
)
|
||||
|
||||
new_items = []
|
||||
for results in pool_results:
|
||||
new_items.extend(results)
|
||||
|
||||
remaining = len(new_items)
|
||||
items.extend(new_items)
|
||||
|
||||
items = sorted([_ for _ in items if _], key=lambda item: item[0])
|
||||
sorted_items = [item[1] for item in items]
|
||||
return list(map(parse, sorted_items))
|
|
@ -1,8 +1,13 @@
|
|||
import datetime
|
||||
import os
|
||||
import queue
|
||||
import re
|
||||
import threading
|
||||
import time
|
||||
from typing import Optional, Collection
|
||||
|
||||
from dateutil.tz import tzutc
|
||||
from typing import Iterable, Optional, Collection, Set
|
||||
from xml.etree import ElementTree
|
||||
|
||||
import dateutil.parser
|
||||
import requests
|
||||
|
@ -24,56 +29,67 @@ class RssPlugin(RunnablePlugin):
|
|||
Requires:
|
||||
|
||||
* **feedparser** (``pip install feedparser``)
|
||||
* **defusedxml** (``pip install defusedxml``)
|
||||
|
||||
"""
|
||||
|
||||
user_agent = 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) ' + \
|
||||
'Chrome/62.0.3202.94 Safari/537.36'
|
||||
user_agent = (
|
||||
'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) '
|
||||
+ 'Chrome/62.0.3202.94 Safari/537.36'
|
||||
)
|
||||
|
||||
def __init__(
|
||||
self, subscriptions: Optional[Collection[str]] = None, poll_seconds: int = 300,
|
||||
user_agent: str = user_agent, **kwargs
|
||||
self,
|
||||
subscriptions: Optional[Collection[str]] = None,
|
||||
poll_seconds: int = 300,
|
||||
user_agent: str = user_agent,
|
||||
**kwargs,
|
||||
):
|
||||
"""
|
||||
:param subscriptions: List of feeds to monitor for updates, as URLs.
|
||||
OPML URLs/local files are also supported.
|
||||
:param poll_seconds: How often we should check for updates (default: 300 seconds).
|
||||
:param user_agent: Custom user agent to use for the requests.
|
||||
"""
|
||||
super().__init__(**kwargs)
|
||||
self.subscriptions = subscriptions or []
|
||||
self.poll_seconds = poll_seconds
|
||||
self.user_agent = user_agent
|
||||
self._latest_timestamps = self._get_latest_timestamps()
|
||||
self._feeds_metadata = {}
|
||||
self._feed_worker_queues = [queue.Queue()] * 5
|
||||
self._feed_response_queue = queue.Queue()
|
||||
self._feed_workers = []
|
||||
self._latest_entries = []
|
||||
|
||||
self.subscriptions = list(self._parse_subscriptions(subscriptions or []))
|
||||
|
||||
self._latest_timestamps = self._get_latest_timestamps()
|
||||
|
||||
@staticmethod
|
||||
def _get_feed_latest_timestamp_varname(url: str) -> str:
|
||||
return f'LATEST_FEED_TIMESTAMP[{url}]'
|
||||
|
||||
@classmethod
|
||||
def _get_feed_latest_timestamp(cls, url: str) -> Optional[datetime.datetime]:
|
||||
t = get_plugin('variable').get(
|
||||
cls._get_feed_latest_timestamp_varname(url)
|
||||
).output.get(cls._get_feed_latest_timestamp_varname(url))
|
||||
t = (
|
||||
get_plugin('variable')
|
||||
.get(cls._get_feed_latest_timestamp_varname(url))
|
||||
.output.get(cls._get_feed_latest_timestamp_varname(url))
|
||||
)
|
||||
|
||||
if t:
|
||||
return dateutil.parser.isoparse(t)
|
||||
|
||||
def _get_latest_timestamps(self) -> dict:
|
||||
return {
|
||||
url: self._get_feed_latest_timestamp(url)
|
||||
for url in self.subscriptions
|
||||
}
|
||||
return {url: self._get_feed_latest_timestamp(url) for url in self.subscriptions}
|
||||
|
||||
def _update_latest_timestamps(self) -> None:
|
||||
variable = get_plugin('variable')
|
||||
variable.set(**{
|
||||
self._get_feed_latest_timestamp_varname(url): latest_timestamp
|
||||
for url, latest_timestamp in self._latest_timestamps.items()
|
||||
})
|
||||
variable.set(
|
||||
**{
|
||||
self._get_feed_latest_timestamp_varname(url): latest_timestamp
|
||||
for url, latest_timestamp in self._latest_timestamps.items()
|
||||
}
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def _parse_content(entry) -> Optional[str]:
|
||||
|
@ -96,23 +112,30 @@ class RssPlugin(RunnablePlugin):
|
|||
"""
|
||||
import feedparser
|
||||
|
||||
feed = feedparser.parse(requests.get(url, headers={'User-Agent': self.user_agent}).text)
|
||||
feed = feedparser.parse(
|
||||
requests.get(url, headers={'User-Agent': self.user_agent}).text
|
||||
)
|
||||
return RssFeedEntrySchema().dump(
|
||||
sorted([
|
||||
{
|
||||
'feed_url': url,
|
||||
'feed_title': getattr(feed.feed, 'title', None),
|
||||
'id': getattr(entry, 'id', None),
|
||||
'url': entry.link,
|
||||
'published': datetime.datetime.fromtimestamp(time.mktime(entry.published_parsed)),
|
||||
'title': entry.title,
|
||||
'summary': getattr(entry, 'summary', None),
|
||||
'content': self._parse_content(entry),
|
||||
}
|
||||
for entry in feed.entries
|
||||
if getattr(entry, 'published_parsed', None)
|
||||
], key=lambda e: e['published']),
|
||||
many=True
|
||||
sorted(
|
||||
[
|
||||
{
|
||||
'feed_url': url,
|
||||
'feed_title': getattr(feed.feed, 'title', None),
|
||||
'id': getattr(entry, 'id', None),
|
||||
'url': entry.link,
|
||||
'published': datetime.datetime.fromtimestamp(
|
||||
time.mktime(entry.published_parsed)
|
||||
),
|
||||
'title': entry.title,
|
||||
'summary': getattr(entry, 'summary', None),
|
||||
'content': self._parse_content(entry),
|
||||
}
|
||||
for entry in feed.entries
|
||||
if getattr(entry, 'published_parsed', None)
|
||||
],
|
||||
key=lambda e: e['published'],
|
||||
),
|
||||
many=True,
|
||||
)
|
||||
|
||||
@action
|
||||
|
@ -123,7 +146,9 @@ class RssPlugin(RunnablePlugin):
|
|||
:param limit: Maximum number of entries to return (default: 20).
|
||||
:return: .. schema:: rss.RssFeedEntrySchema(many=True)
|
||||
"""
|
||||
return sorted(self._latest_entries, key=lambda e: e['published'], reverse=True)[:limit]
|
||||
return sorted(self._latest_entries, key=lambda e: e['published'], reverse=True)[
|
||||
:limit
|
||||
]
|
||||
|
||||
def _feed_worker(self, q: queue.Queue):
|
||||
while not self.should_stop():
|
||||
|
@ -133,18 +158,157 @@ class RssPlugin(RunnablePlugin):
|
|||
continue
|
||||
|
||||
try:
|
||||
self._feed_response_queue.put({
|
||||
'url': url,
|
||||
'content': self.parse_feed(url).output,
|
||||
})
|
||||
self._feed_response_queue.put(
|
||||
{
|
||||
'url': url,
|
||||
'content': self.parse_feed(url).output,
|
||||
}
|
||||
)
|
||||
except Exception as e:
|
||||
self._feed_response_queue.put({
|
||||
'url': url,
|
||||
'error': e,
|
||||
})
|
||||
self._feed_response_queue.put(
|
||||
{
|
||||
'url': url,
|
||||
'error': e,
|
||||
}
|
||||
)
|
||||
|
||||
self._feed_response_queue.put(None)
|
||||
|
||||
def _parse_opml_lists(self, subs: Iterable[str]) -> Set[str]:
|
||||
from defusedxml import ElementTree
|
||||
|
||||
feeds = set()
|
||||
subs = set(subs)
|
||||
content_by_sub = {}
|
||||
urls = {sub for sub in subs if re.search(r'^https?://', sub)}
|
||||
files = {os.path.expanduser(sub) for sub in subs if sub not in urls}
|
||||
|
||||
for url in urls:
|
||||
try:
|
||||
content_by_sub[url] = requests.get(
|
||||
url,
|
||||
headers={
|
||||
'User-Agent': self.user_agent,
|
||||
},
|
||||
).text
|
||||
except Exception as e:
|
||||
self.logger.warning('Could not retrieve subscription %s: %s', url, e)
|
||||
|
||||
for file in files:
|
||||
try:
|
||||
with open(file, 'r') as f:
|
||||
content_by_sub[file] = f.read()
|
||||
except Exception as e:
|
||||
self.logger.warning('Could not open file %s: %s', file, e)
|
||||
|
||||
for sub, content in content_by_sub.items():
|
||||
root = ElementTree.fromstring(content.strip())
|
||||
if root.tag != 'opml':
|
||||
self.logger.warning('%s is not a valid OPML resource', sub)
|
||||
continue
|
||||
|
||||
feeds.update(self._parse_feeds_from_outlines(root.findall('body/outline')))
|
||||
|
||||
return feeds
|
||||
|
||||
def _parse_feeds_from_outlines(
|
||||
self,
|
||||
outlines: Iterable[ElementTree.Element],
|
||||
) -> Set[str]:
|
||||
feeds = set()
|
||||
outlines = list(outlines)
|
||||
|
||||
while outlines:
|
||||
outline = outlines.pop(0)
|
||||
if 'xmlUrl' in outline.attrib:
|
||||
url = outline.attrib['xmlUrl']
|
||||
feeds.add(url)
|
||||
self._feeds_metadata[url] = {
|
||||
**self._feeds_metadata.get(url, {}),
|
||||
'title': outline.attrib.get('title'),
|
||||
'description': outline.attrib.get('text'),
|
||||
'url': outline.attrib.get('htmlUrl'),
|
||||
}
|
||||
|
||||
for i, child in enumerate(outline.iter()):
|
||||
if i > 0:
|
||||
outlines.append(child)
|
||||
|
||||
return feeds
|
||||
|
||||
def _parse_subscriptions(self, subs: Iterable[str]) -> Iterable[str]:
|
||||
import feedparser
|
||||
|
||||
self.logger.info('Parsing feed subscriptions')
|
||||
feeds = set()
|
||||
lists = set()
|
||||
|
||||
for sub in subs:
|
||||
try:
|
||||
# Check if it's an OPML list of feeds or an individual feed
|
||||
feed = feedparser.parse(sub)
|
||||
if feed.feed.get('opml'):
|
||||
lists.add(sub)
|
||||
else:
|
||||
channel = feed.get('channel', {})
|
||||
self._feeds_metadata[sub] = {
|
||||
**self._feeds_metadata.get(sub, {}),
|
||||
'title': channel.get('title'),
|
||||
'description': channel.get('description'),
|
||||
'url': channel.get('link'),
|
||||
}
|
||||
|
||||
feeds.add(sub)
|
||||
except Exception as e:
|
||||
self.logger.warning('Could not parse %s: %s', sub, e)
|
||||
|
||||
feeds.update(self._parse_opml_lists(lists))
|
||||
return feeds
|
||||
|
||||
@staticmethod
|
||||
def _datetime_to_string(dt: datetime.datetime) -> str:
|
||||
return dt.replace(tzinfo=tzutc()).strftime('%a, %d %b %Y %H:%M:%S %Z')
|
||||
|
||||
@action
|
||||
def export_to_opml(self) -> str:
|
||||
"""
|
||||
Export the list of subscriptions into OPML format.
|
||||
|
||||
:return: The list of subscriptions as a string in OPML format.
|
||||
"""
|
||||
root = ElementTree.Element('opml', {'version': '2.0'})
|
||||
|
||||
head = ElementTree.Element('head')
|
||||
title = ElementTree.Element('title')
|
||||
title.text = 'Platypush feed subscriptions'
|
||||
created = ElementTree.Element('dateCreated')
|
||||
created.text = self._datetime_to_string(datetime.datetime.utcnow())
|
||||
head.append(title)
|
||||
head.append(created)
|
||||
|
||||
body = ElementTree.Element('body')
|
||||
feeds = ElementTree.Element('outline', {'text': 'Feeds'})
|
||||
|
||||
for sub in self.subscriptions:
|
||||
metadata = self._feeds_metadata.get(sub, {})
|
||||
feed = ElementTree.Element(
|
||||
'outline',
|
||||
{
|
||||
'xmlUrl': sub,
|
||||
'text': metadata.get('description', metadata.get('title', sub)),
|
||||
**({'htmlUrl': metadata['url']} if metadata.get('url') else {}),
|
||||
**({'title': metadata['title']} if metadata.get('title') else {}),
|
||||
},
|
||||
)
|
||||
|
||||
feeds.append(feed)
|
||||
|
||||
body.append(feeds)
|
||||
|
||||
root.append(head)
|
||||
root.append(body)
|
||||
return ElementTree.tostring(root, encoding='utf-8', method='xml').decode()
|
||||
|
||||
def main(self):
|
||||
self._feed_workers = [
|
||||
threading.Thread(target=self._feed_worker, args=(q,))
|
||||
|
@ -154,12 +318,16 @@ class RssPlugin(RunnablePlugin):
|
|||
for worker in self._feed_workers:
|
||||
worker.start()
|
||||
|
||||
self.logger.info(f'Initialized RSS plugin with {len(self.subscriptions)} subscriptions')
|
||||
self.logger.info(
|
||||
f'Initialized RSS plugin with {len(self.subscriptions)} subscriptions'
|
||||
)
|
||||
|
||||
while not self.should_stop():
|
||||
responses = {}
|
||||
for i, url in enumerate(self.subscriptions):
|
||||
worker_queue = self._feed_worker_queues[i % len(self._feed_worker_queues)]
|
||||
worker_queue = self._feed_worker_queues[
|
||||
i % len(self._feed_worker_queues)
|
||||
]
|
||||
worker_queue.put(url)
|
||||
|
||||
time_start = time.time()
|
||||
|
@ -168,12 +336,14 @@ class RssPlugin(RunnablePlugin):
|
|||
new_entries = []
|
||||
|
||||
while (
|
||||
not self.should_stop() and
|
||||
len(responses) < len(self.subscriptions) and
|
||||
time.time() - time_start <= timeout
|
||||
not self.should_stop()
|
||||
and len(responses) < len(self.subscriptions)
|
||||
and time.time() - time_start <= timeout
|
||||
):
|
||||
try:
|
||||
response = self._feed_response_queue.get(block=True, timeout=max_time-time_start)
|
||||
response = self._feed_response_queue.get(
|
||||
block=True, timeout=max_time - time_start
|
||||
)
|
||||
except queue.Empty:
|
||||
self.logger.warning('RSS parse timeout')
|
||||
break
|
||||
|
@ -189,7 +359,9 @@ class RssPlugin(RunnablePlugin):
|
|||
else:
|
||||
responses[url] = response['content']
|
||||
|
||||
responses = {k: v for k, v in responses.items() if not isinstance(v, Exception)}
|
||||
responses = {
|
||||
k: v for k, v in responses.items() if not isinstance(v, Exception)
|
||||
}
|
||||
|
||||
for url, response in responses.items():
|
||||
latest_timestamp = self._latest_timestamps.get(url)
|
||||
|
@ -205,7 +377,7 @@ class RssPlugin(RunnablePlugin):
|
|||
|
||||
self._update_latest_timestamps()
|
||||
self._latest_entries = new_entries
|
||||
time.sleep(self.poll_seconds)
|
||||
self.wait_stop(self.poll_seconds)
|
||||
|
||||
def stop(self):
|
||||
super().stop()
|
||||
|
|
|
@ -4,5 +4,6 @@ manifest:
|
|||
install:
|
||||
pip:
|
||||
- feedparser
|
||||
- defusedxml
|
||||
package: platypush.plugins.rss
|
||||
type: plugin
|
||||
|
|
|
@ -37,7 +37,7 @@ class TorrentPlugin(Plugin):
|
|||
torrent_state = {}
|
||||
transfers = {}
|
||||
# noinspection HttpUrlsUsage
|
||||
default_popcorn_base_url = 'http://popcorn-ru.tk'
|
||||
default_popcorn_base_url = 'http://popcorn-time.ga'
|
||||
|
||||
def __init__(self, download_dir=None, torrent_ports=None, imdb_key=None, popcorn_base_url=default_popcorn_base_url,
|
||||
**kwargs):
|
||||
|
|
228
platypush/schemas/tidal.py
Normal file
228
platypush/schemas/tidal.py
Normal file
|
@ -0,0 +1,228 @@
|
|||
from marshmallow import Schema, fields, pre_dump, post_dump
|
||||
|
||||
from platypush.schemas import DateTime
|
||||
|
||||
|
||||
class TidalSchema(Schema):
|
||||
pass
|
||||
|
||||
|
||||
class TidalArtistSchema(TidalSchema):
|
||||
id = fields.String(
|
||||
required=True,
|
||||
dump_only=True,
|
||||
metadata={
|
||||
'example': '3288612',
|
||||
'description': 'Artist ID',
|
||||
},
|
||||
)
|
||||
|
||||
url = fields.String(
|
||||
required=True,
|
||||
dump_only=True,
|
||||
metadata={
|
||||
'description': 'Artist Tidal URL',
|
||||
'example': 'https://tidal.com/artist/3288612',
|
||||
},
|
||||
)
|
||||
|
||||
name = fields.String(required=True)
|
||||
|
||||
@pre_dump
|
||||
def _prefill_url(self, data, *_, **__):
|
||||
data.url = f'https://tidal.com/artist/{data.id}'
|
||||
return data
|
||||
|
||||
|
||||
class TidalAlbumSchema(TidalSchema):
|
||||
def __init__(self, *args, with_tracks=False, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
self._with_tracks = with_tracks
|
||||
|
||||
id = fields.String(
|
||||
required=True,
|
||||
dump_only=True,
|
||||
metadata={
|
||||
'example': '45288612',
|
||||
'description': 'Album ID',
|
||||
},
|
||||
)
|
||||
|
||||
url = fields.String(
|
||||
required=True,
|
||||
dump_only=True,
|
||||
metadata={
|
||||
'description': 'Album Tidal URL',
|
||||
'example': 'https://tidal.com/album/45288612',
|
||||
},
|
||||
)
|
||||
|
||||
name = fields.String(required=True)
|
||||
artist = fields.Nested(TidalArtistSchema)
|
||||
duration = fields.Int(metadata={'description': 'Album duration, in seconds'})
|
||||
year = fields.Integer(metadata={'example': 2003})
|
||||
num_tracks = fields.Int(metadata={'example': 10})
|
||||
tracks = fields.List(fields.Dict(), attribute='_tracks')
|
||||
|
||||
@pre_dump
|
||||
def _prefill_url(self, data, *_, **__):
|
||||
data.url = f'https://tidal.com/album/{data.id}'
|
||||
return data
|
||||
|
||||
@pre_dump
|
||||
def _cache_tracks(self, data, *_, **__):
|
||||
if self._with_tracks:
|
||||
album_id = str(data.id)
|
||||
self.context[album_id] = {
|
||||
'tracks': data.tracks(),
|
||||
}
|
||||
|
||||
return data
|
||||
|
||||
@post_dump
|
||||
def _dump_tracks(self, data, *_, **__):
|
||||
if self._with_tracks:
|
||||
album_id = str(data['id'])
|
||||
ctx = self.context.pop(album_id, {})
|
||||
data['tracks'] = TidalTrackSchema().dump(ctx.pop('tracks', []), many=True)
|
||||
|
||||
return data
|
||||
|
||||
|
||||
class TidalTrackSchema(TidalSchema):
|
||||
id = fields.String(
|
||||
required=True,
|
||||
dump_only=True,
|
||||
metadata={
|
||||
'example': '25288614',
|
||||
'description': 'Track ID',
|
||||
},
|
||||
)
|
||||
|
||||
url = fields.String(
|
||||
required=True,
|
||||
dump_only=True,
|
||||
metadata={
|
||||
'description': 'Track Tidal URL',
|
||||
'example': 'https://tidal.com/track/25288614',
|
||||
},
|
||||
)
|
||||
|
||||
artist = fields.Nested(TidalArtistSchema)
|
||||
album = fields.Nested(TidalAlbumSchema)
|
||||
name = fields.String(metadata={'description': 'Track title'})
|
||||
duration = fields.Int(metadata={'description': 'Track duration, in seconds'})
|
||||
track_num = fields.Int(
|
||||
metadata={'description': 'Index of the track within the album'}
|
||||
)
|
||||
|
||||
@pre_dump
|
||||
def _prefill_url(self, data, *_, **__):
|
||||
data.url = f'https://tidal.com/track/{data.id}'
|
||||
return data
|
||||
|
||||
|
||||
class TidalPlaylistSchema(TidalSchema):
|
||||
id = fields.String(
|
||||
required=True,
|
||||
dump_only=True,
|
||||
attribute='uuid',
|
||||
metadata={
|
||||
'example': '2b288612-34f5-11ed-b42d-001500e8f607',
|
||||
'description': 'Playlist ID',
|
||||
},
|
||||
)
|
||||
|
||||
url = fields.String(
|
||||
required=True,
|
||||
dump_only=True,
|
||||
metadata={
|
||||
'description': 'Playlist Tidal URL',
|
||||
'example': 'https://tidal.com/playlist/2b288612-34f5-11ed-b42d-001500e8f607',
|
||||
},
|
||||
)
|
||||
|
||||
name = fields.String(required=True)
|
||||
description = fields.String()
|
||||
duration = fields.Int(metadata={'description': 'Playlist duration, in seconds'})
|
||||
public = fields.Boolean(attribute='publicPlaylist')
|
||||
owner = fields.String(
|
||||
attribute='creator',
|
||||
metadata={
|
||||
'description': 'Playlist creator/owner ID',
|
||||
},
|
||||
)
|
||||
|
||||
num_tracks = fields.Int(
|
||||
attribute='numberOfTracks',
|
||||
default=0,
|
||||
metadata={
|
||||
'example': 42,
|
||||
'description': 'Number of tracks in the playlist',
|
||||
},
|
||||
)
|
||||
|
||||
created_at = DateTime(
|
||||
attribute='created',
|
||||
metadata={
|
||||
'description': 'When the playlist was created',
|
||||
},
|
||||
)
|
||||
|
||||
last_updated_at = DateTime(
|
||||
attribute='lastUpdated',
|
||||
metadata={
|
||||
'description': 'When the playlist was last updated',
|
||||
},
|
||||
)
|
||||
|
||||
tracks = fields.Nested(TidalTrackSchema, many=True)
|
||||
|
||||
def _flatten_object(self, data, *_, **__):
|
||||
if not isinstance(data, dict):
|
||||
data = {
|
||||
'created': data.created,
|
||||
'creator': data.creator.id,
|
||||
'description': data.description,
|
||||
'duration': data.duration,
|
||||
'lastUpdated': data.last_updated,
|
||||
'uuid': data.id,
|
||||
'name': data.name,
|
||||
'numberOfTracks': data.num_tracks,
|
||||
'publicPlaylist': data.public,
|
||||
'tracks': getattr(data, '_tracks', []),
|
||||
}
|
||||
|
||||
return data
|
||||
|
||||
def _normalize_owner(self, data, *_, **__):
|
||||
owner = data.pop('owner', data.pop('creator', None))
|
||||
if owner:
|
||||
if isinstance(owner, dict):
|
||||
owner = owner['id']
|
||||
data['creator'] = owner
|
||||
|
||||
return data
|
||||
|
||||
def _normalize_name(self, data, *_, **__):
|
||||
if data.get('title'):
|
||||
data['name'] = data.pop('title')
|
||||
return data
|
||||
|
||||
@pre_dump
|
||||
def normalize(self, data, *_, **__):
|
||||
if not isinstance(data, dict):
|
||||
data = self._flatten_object(data)
|
||||
|
||||
self._normalize_name(data)
|
||||
self._normalize_owner(data)
|
||||
if 'tracks' not in data:
|
||||
data['tracks'] = []
|
||||
return data
|
||||
|
||||
|
||||
class TidalSearchResultsSchema(TidalSchema):
|
||||
artists = fields.Nested(TidalArtistSchema, many=True)
|
||||
albums = fields.Nested(TidalAlbumSchema, many=True)
|
||||
tracks = fields.Nested(TidalTrackSchema, many=True)
|
||||
playlists = fields.Nested(TidalPlaylistSchema, many=True)
|
|
@ -1,5 +1,5 @@
|
|||
[bumpversion]
|
||||
current_version = 0.23.4
|
||||
current_version = 0.23.6
|
||||
commit = True
|
||||
tag = True
|
||||
|
||||
|
|
6
setup.py
6
setup.py
|
@ -28,7 +28,7 @@ backend = pkg_files('platypush/backend')
|
|||
|
||||
setup(
|
||||
name="platypush",
|
||||
version="0.23.4",
|
||||
version="0.23.6",
|
||||
author="Fabio Manganiello",
|
||||
author_email="info@fabiomanganiello.com",
|
||||
description="Platypush service",
|
||||
|
@ -64,7 +64,7 @@ setup(
|
|||
'zeroconf>=0.27.0',
|
||||
'tz',
|
||||
'python-dateutil',
|
||||
'cryptography',
|
||||
# 'cryptography',
|
||||
'pyjwt',
|
||||
'marshmallow',
|
||||
'frozendict',
|
||||
|
@ -86,7 +86,7 @@ setup(
|
|||
# Support for MQTT backends
|
||||
'mqtt': ['paho-mqtt'],
|
||||
# Support for RSS feeds parser
|
||||
'rss': ['feedparser'],
|
||||
'rss': ['feedparser', 'defusedxml'],
|
||||
# Support for PDF generation
|
||||
'pdf': ['weasyprint'],
|
||||
# Support for Philips Hue plugin
|
||||
|
|
Loading…
Reference in a new issue