Merge branch 'master' into 191-support-for-general-entities-backend-and-plugin

This commit is contained in:
Fabio Manganiello 2022-04-28 01:58:24 +02:00
commit d22fbcd9db
Signed by: blacklight
GPG Key ID: D90FBA7F76362774
15 changed files with 647 additions and 248 deletions

View File

@ -1,3 +1,4 @@
recursive-include platypush/backend/http/webapp/dist * recursive-include platypush/backend/http/webapp/dist *
include platypush/plugins/http/webpage/mercury-parser.js include platypush/plugins/http/webpage/mercury-parser.js
include platypush/config/*.yaml
global-include manifest.yaml global-include manifest.yaml

View File

@ -91,14 +91,16 @@ class HttpBackend(Backend):
other music plugin enabled. --> other music plugin enabled. -->
<Music class="col-3" /> <Music class="col-3" />
<!-- Show current date, time and weather. It requires a `weather` plugin or backend enabled --> <!-- Show current date, time and weather.
It requires a `weather` plugin or backend enabled -->
<DateTimeWeather class="col-3" /> <DateTimeWeather class="col-3" />
</Row> </Row>
<!-- Display the following widgets on a second row --> <!-- Display the following widgets on a second row -->
<Row> <Row>
<!-- Show a carousel of images from a local folder. For security reasons, the folder must be <!-- Show a carousel of images from a local folder. For security reasons, the folder must be
explicitly exposed as an HTTP resource through the backend `resource_dirs` attribute. --> explicitly exposed as an HTTP resource through the backend
`resource_dirs` attribute. -->
<ImageCarousel class="col-6" img-dir="/mnt/hd/photos/carousel" /> <ImageCarousel class="col-6" img-dir="/mnt/hd/photos/carousel" />
<!-- Show the news headlines parsed from a list of RSS feed and stored locally through the <!-- Show the news headlines parsed from a list of RSS feed and stored locally through the
@ -151,11 +153,7 @@ class HttpBackend(Backend):
Requires: Requires:
* **flask** (``pip install flask``) * **gunicorn** (``pip install gunicorn``) - optional, to run the Platypush webapp over uWSGI.
* **bcrypt** (``pip install bcrypt``)
* **magic** (``pip install python-magic``), optional, for MIME type
support if you want to enable media streaming
* **gunicorn** (``pip install gunicorn``) - optional but recommended.
By default the Platypush web server will run in a By default the Platypush web server will run in a
process spawned on the fly by the HTTP backend. However, being a process spawned on the fly by the HTTP backend. However, being a
@ -174,12 +172,22 @@ class HttpBackend(Backend):
_DEFAULT_HTTP_PORT = 8008 _DEFAULT_HTTP_PORT = 8008
_DEFAULT_WEBSOCKET_PORT = 8009 _DEFAULT_WEBSOCKET_PORT = 8009
def __init__(self, port=_DEFAULT_HTTP_PORT, def __init__(
websocket_port=_DEFAULT_WEBSOCKET_PORT, self,
bind_address='0.0.0.0', port=_DEFAULT_HTTP_PORT,
disable_websocket=False, resource_dirs=None, websocket_port=_DEFAULT_WEBSOCKET_PORT,
ssl_cert=None, ssl_key=None, ssl_cafile=None, ssl_capath=None, bind_address='0.0.0.0',
maps=None, run_externally=False, uwsgi_args=None, **kwargs): disable_websocket=False,
resource_dirs=None,
ssl_cert=None,
ssl_key=None,
ssl_cafile=None,
ssl_capath=None,
maps=None,
run_externally=False,
uwsgi_args=None,
**kwargs
):
""" """
:param port: Listen port for the web server (default: 8008) :param port: Listen port for the web server (default: 8008)
:type port: int :type port: int
@ -246,26 +254,37 @@ class HttpBackend(Backend):
self.bind_address = bind_address self.bind_address = bind_address
if resource_dirs: if resource_dirs:
self.resource_dirs = {name: os.path.abspath( self.resource_dirs = {
os.path.expanduser(d)) for name, d in resource_dirs.items()} name: os.path.abspath(os.path.expanduser(d))
for name, d in resource_dirs.items()
}
else: else:
self.resource_dirs = {} self.resource_dirs = {}
self.active_websockets = set() self.active_websockets = set()
self.run_externally = run_externally self.run_externally = run_externally
self.uwsgi_args = uwsgi_args or [] self.uwsgi_args = uwsgi_args or []
self.ssl_context = get_ssl_server_context(ssl_cert=ssl_cert, self.ssl_context = (
ssl_key=ssl_key, get_ssl_server_context(
ssl_cafile=ssl_cafile, ssl_cert=ssl_cert,
ssl_capath=ssl_capath) \ ssl_key=ssl_key,
if ssl_cert else None ssl_cafile=ssl_cafile,
ssl_capath=ssl_capath,
)
if ssl_cert
else None
)
if self.uwsgi_args: if self.uwsgi_args:
self.uwsgi_args = [str(_) for _ in self.uwsgi_args] + \ self.uwsgi_args = [str(_) for _ in self.uwsgi_args] + [
['--module', 'platypush.backend.http.uwsgi', '--enable-threads'] '--module',
'platypush.backend.http.uwsgi',
'--enable-threads',
]
self.local_base_url = '{proto}://localhost:{port}'.\ self.local_base_url = '{proto}://localhost:{port}'.format(
format(proto=('https' if ssl_cert else 'http'), port=self.port) proto=('https' if ssl_cert else 'http'), port=self.port
)
self._websocket_lock_timeout = 10 self._websocket_lock_timeout = 10
self._websocket_lock = threading.RLock() self._websocket_lock = threading.RLock()
@ -275,7 +294,7 @@ class HttpBackend(Backend):
self.logger.warning('Use cURL or any HTTP client to query the HTTP backend') self.logger.warning('Use cURL or any HTTP client to query the HTTP backend')
def on_stop(self): def on_stop(self):
""" On backend stop """ """On backend stop"""
super().on_stop() super().on_stop()
self.logger.info('Received STOP event on HttpBackend') self.logger.info('Received STOP event on HttpBackend')
@ -284,7 +303,9 @@ class HttpBackend(Backend):
self.server_proc.kill() self.server_proc.kill()
self.server_proc.wait(timeout=10) self.server_proc.wait(timeout=10)
if self.server_proc.poll() is not None: if self.server_proc.poll() is not None:
self.logger.info('HTTP server process may be still alive at termination') self.logger.info(
'HTTP server process may be still alive at termination'
)
else: else:
self.logger.info('HTTP server process terminated') self.logger.info('HTTP server process terminated')
else: else:
@ -293,17 +314,25 @@ class HttpBackend(Backend):
if self.server_proc.is_alive(): if self.server_proc.is_alive():
self.server_proc.kill() self.server_proc.kill()
if self.server_proc.is_alive(): if self.server_proc.is_alive():
self.logger.info('HTTP server process may be still alive at termination') self.logger.info(
'HTTP server process may be still alive at termination'
)
else: else:
self.logger.info('HTTP server process terminated') self.logger.info('HTTP server process terminated')
if self.websocket_thread and self.websocket_thread.is_alive() and self._websocket_loop: if (
self.websocket_thread
and self.websocket_thread.is_alive()
and self._websocket_loop
):
self._websocket_loop.stop() self._websocket_loop.stop()
self.logger.info('HTTP websocket service terminated') self.logger.info('HTTP websocket service terminated')
def _acquire_websocket_lock(self, ws): def _acquire_websocket_lock(self, ws):
try: try:
acquire_ok = self._websocket_lock.acquire(timeout=self._websocket_lock_timeout) acquire_ok = self._websocket_lock.acquire(
timeout=self._websocket_lock_timeout
)
if not acquire_ok: if not acquire_ok:
raise TimeoutError('Websocket lock acquire timeout') raise TimeoutError('Websocket lock acquire timeout')
@ -313,13 +342,19 @@ class HttpBackend(Backend):
finally: finally:
self._websocket_lock.release() self._websocket_lock.release()
acquire_ok = self._websocket_locks[addr].acquire(timeout=self._websocket_lock_timeout) acquire_ok = self._websocket_locks[addr].acquire(
timeout=self._websocket_lock_timeout
)
if not acquire_ok: if not acquire_ok:
raise TimeoutError('Websocket on address {} not ready to receive data'.format(addr)) raise TimeoutError(
'Websocket on address {} not ready to receive data'.format(addr)
)
def _release_websocket_lock(self, ws): def _release_websocket_lock(self, ws):
try: try:
acquire_ok = self._websocket_lock.acquire(timeout=self._websocket_lock_timeout) acquire_ok = self._websocket_lock.acquire(
timeout=self._websocket_lock_timeout
)
if not acquire_ok: if not acquire_ok:
raise TimeoutError('Websocket lock acquire timeout') raise TimeoutError('Websocket lock acquire timeout')
@ -327,12 +362,15 @@ class HttpBackend(Backend):
if addr in self._websocket_locks: if addr in self._websocket_locks:
self._websocket_locks[addr].release() self._websocket_locks[addr].release()
except Exception as e: except Exception as e:
self.logger.warning('Unhandled exception while releasing websocket lock: {}'.format(str(e))) self.logger.warning(
'Unhandled exception while releasing websocket lock: {}'.format(str(e))
)
finally: finally:
self._websocket_lock.release() self._websocket_lock.release()
def notify_web_clients(self, event): def notify_web_clients(self, event):
""" Notify all the connected web clients (over websocket) of a new event """ """Notify all the connected web clients (over websocket) of a new event"""
async def send_event(ws): async def send_event(ws):
try: try:
self._acquire_websocket_lock(ws) self._acquire_websocket_lock(ws)
@ -349,26 +387,35 @@ class HttpBackend(Backend):
try: try:
loop.run_until_complete(send_event(_ws)) loop.run_until_complete(send_event(_ws))
except ConnectionClosed: except ConnectionClosed:
self.logger.warning('Websocket client {} connection lost'.format(_ws.remote_address)) self.logger.warning(
'Websocket client {} connection lost'.format(_ws.remote_address)
)
self.active_websockets.remove(_ws) self.active_websockets.remove(_ws)
if _ws.remote_address in self._websocket_locks: if _ws.remote_address in self._websocket_locks:
del self._websocket_locks[_ws.remote_address] del self._websocket_locks[_ws.remote_address]
def websocket(self): def websocket(self):
""" Websocket main server """ """Websocket main server"""
set_thread_name('WebsocketServer') set_thread_name('WebsocketServer')
async def register_websocket(websocket, path): async def register_websocket(websocket, path):
address = websocket.remote_address if websocket.remote_address \ address = (
websocket.remote_address
if websocket.remote_address
else '<unknown client>' else '<unknown client>'
)
self.logger.info('New websocket connection from {} on path {}'.format(address, path)) self.logger.info(
'New websocket connection from {} on path {}'.format(address, path)
)
self.active_websockets.add(websocket) self.active_websockets.add(websocket)
try: try:
await websocket.recv() await websocket.recv()
except ConnectionClosed: except ConnectionClosed:
self.logger.info('Websocket client {} closed connection'.format(address)) self.logger.info(
'Websocket client {} closed connection'.format(address)
)
self.active_websockets.remove(websocket) self.active_websockets.remove(websocket)
if address in self._websocket_locks: if address in self._websocket_locks:
del self._websocket_locks[address] del self._websocket_locks[address]
@ -379,8 +426,13 @@ class HttpBackend(Backend):
self._websocket_loop = get_or_create_event_loop() self._websocket_loop = get_or_create_event_loop()
self._websocket_loop.run_until_complete( self._websocket_loop.run_until_complete(
websocket_serve(register_websocket, self.bind_address, self.websocket_port, websocket_serve(
**websocket_args)) register_websocket,
self.bind_address,
self.websocket_port,
**websocket_args
)
)
self._websocket_loop.run_forever() self._websocket_loop.run_forever()
def _start_web_server(self): def _start_web_server(self):
@ -415,8 +467,9 @@ class HttpBackend(Backend):
self.websocket_thread.start() self.websocket_thread.start()
if not self.run_externally: if not self.run_externally:
self.server_proc = Process(target=self._start_web_server(), self.server_proc = Process(
name='WebServer') target=self._start_web_server(), name='WebServer'
)
self.server_proc.start() self.server_proc.start()
self.server_proc.join() self.server_proc.join()
elif self.uwsgi_args: elif self.uwsgi_args:
@ -424,9 +477,11 @@ class HttpBackend(Backend):
self.logger.info('Starting uWSGI with arguments {}'.format(uwsgi_cmd)) self.logger.info('Starting uWSGI with arguments {}'.format(uwsgi_cmd))
self.server_proc = subprocess.Popen(uwsgi_cmd) self.server_proc = subprocess.Popen(uwsgi_cmd)
else: else:
self.logger.info('The web server is configured to be launched externally but ' + self.logger.info(
'no uwsgi_args were provided. Make sure that you run another external service' + 'The web server is configured to be launched externally but '
'for the webserver (e.g. nginx)') + 'no uwsgi_args were provided. Make sure that you run another external service'
+ 'for the webserver (e.g. nginx)'
)
# vim:sw=4:ts=4:et: # vim:sw=4:ts=4:et:

View File

@ -2,9 +2,6 @@ manifest:
events: {} events: {}
install: install:
pip: pip:
- flask
- bcrypt
- python-magic
- gunicorn - gunicorn
package: platypush.backend.http package: platypush.backend.http
type: backend type: backend

View File

@ -26,7 +26,7 @@ class LinodeBackend(SensorBackend):
self.instances = set(instances or []) self.instances = set(instances or [])
def process_data(self, data: Dict[str, dict], new_data: Optional[Dict[str, dict]] = None, **kwargs): def process_data(self, data: Dict[str, dict], new_data: Optional[Dict[str, dict]] = None, **kwargs):
instances = data['instances'] instances = data.get('instances', {})
old_instances = (self.data or {}).get('instances', {}) old_instances = (self.data or {}).get('instances', {})
if self.instances: if self.instances:

View File

@ -1,4 +1,5 @@
import datetime import datetime
import glob
import importlib import importlib
import inspect import inspect
import logging import logging
@ -6,19 +7,25 @@ import os
import pathlib import pathlib
import pkgutil import pkgutil
import re import re
import shutil
import socket import socket
import sys import sys
from typing import Optional from typing import Optional
import yaml import yaml
from platypush.utils import get_hash, is_functional_procedure, is_functional_hook, is_functional_cron from platypush.utils import (
get_hash,
is_functional_procedure,
is_functional_hook,
is_functional_cron,
)
""" Config singleton instance """ """ Config singleton instance """
_default_config_instance = None _default_config_instance = None
class Config(object): class Config:
""" """
Configuration base class Configuration base class
Usage: Usage:
@ -45,7 +52,9 @@ class Config(object):
'now': datetime.datetime.now, 'now': datetime.datetime.now,
} }
_workdir_location = os.path.join(os.path.expanduser('~'), '.local', 'share', 'platypush') _workdir_location = os.path.join(
os.path.expanduser('~'), '.local', 'share', 'platypush'
)
_included_files = set() _included_files = set()
def __init__(self, cfgfile=None): def __init__(self, cfgfile=None):
@ -61,14 +70,12 @@ class Config(object):
cfgfile = self._get_default_cfgfile() cfgfile = self._get_default_cfgfile()
if cfgfile is None: if cfgfile is None:
raise RuntimeError('No config file specified and nothing found in {}' cfgfile = self._create_default_config()
.format(self._cfgfile_locations))
self._cfgfile = os.path.abspath(os.path.expanduser(cfgfile)) self._cfgfile = os.path.abspath(os.path.expanduser(cfgfile))
self._config = self._read_config_file(self._cfgfile) self._config = self._read_config_file(self._cfgfile)
if 'token' in self._config: if 'token' in self._config:
self._config['token'] = self._config['token']
self._config['token_hash'] = get_hash(self._config['token']) self._config['token_hash'] = get_hash(self._config['token'])
if 'workdir' not in self._config: if 'workdir' not in self._config:
@ -76,11 +83,15 @@ class Config(object):
os.makedirs(self._config['workdir'], exist_ok=True) os.makedirs(self._config['workdir'], exist_ok=True)
if 'scripts_dir' not in self._config: if 'scripts_dir' not in self._config:
self._config['scripts_dir'] = os.path.join(os.path.dirname(cfgfile), 'scripts') self._config['scripts_dir'] = os.path.join(
os.path.dirname(cfgfile), 'scripts'
)
os.makedirs(self._config['scripts_dir'], mode=0o755, exist_ok=True) os.makedirs(self._config['scripts_dir'], mode=0o755, exist_ok=True)
if 'dashboards_dir' not in self._config: if 'dashboards_dir' not in self._config:
self._config['dashboards_dir'] = os.path.join(os.path.dirname(cfgfile), 'dashboards') self._config['dashboards_dir'] = os.path.join(
os.path.dirname(cfgfile), 'dashboards'
)
os.makedirs(self._config['dashboards_dir'], mode=0o755, exist_ok=True) os.makedirs(self._config['dashboards_dir'], mode=0o755, exist_ok=True)
init_py = os.path.join(self._config['scripts_dir'], '__init__.py') init_py = os.path.join(self._config['scripts_dir'], '__init__.py')
@ -90,13 +101,20 @@ class Config(object):
# Include scripts_dir parent in sys.path so members can be imported in scripts # Include scripts_dir parent in sys.path so members can be imported in scripts
# through the `scripts` package # through the `scripts` package
scripts_parent_dir = str(pathlib.Path(self._config['scripts_dir']).absolute().parent) scripts_parent_dir = str(
pathlib.Path(self._config['scripts_dir']).absolute().parent
)
sys.path = [scripts_parent_dir] + sys.path sys.path = [scripts_parent_dir] + sys.path
self._config['db'] = self._config.get('main.db', { self._config['db'] = self._config.get(
'engine': 'sqlite:///' + os.path.join( 'main.db',
os.path.expanduser('~'), '.local', 'share', 'platypush', 'main.db') {
}) 'engine': 'sqlite:///'
+ os.path.join(
os.path.expanduser('~'), '.local', 'share', 'platypush', 'main.db'
)
},
)
logging_config = { logging_config = {
'level': logging.INFO, 'level': logging.INFO,
@ -112,8 +130,11 @@ class Config(object):
try: try:
os.makedirs(logdir, exist_ok=True) os.makedirs(logdir, exist_ok=True)
except Exception as e: except Exception as e:
print('Unable to create logs directory {}: {}'.format( print(
logdir, str(e))) 'Unable to create logs directory {}: {}'.format(
logdir, str(e)
)
)
v = logfile v = logfile
del logging_config['stream'] del logging_config['stream']
@ -150,9 +171,18 @@ class Config(object):
self._init_components() self._init_components()
self._init_dashboards(self._config['dashboards_dir']) self._init_dashboards(self._config['dashboards_dir'])
def _create_default_config(self):
cfg_mod_dir = os.path.dirname(os.path.abspath(__file__))
cfgfile = self._cfgfile_locations[0]
cfgdir = pathlib.Path(cfgfile).parent
cfgdir.mkdir(parents=True, exist_ok=True)
for cfgfile in glob.glob(os.path.join(cfg_mod_dir, 'config*.yaml')):
shutil.copy(cfgfile, str(cfgdir))
return cfgfile
def _read_config_file(self, cfgfile): def _read_config_file(self, cfgfile):
cfgfile_dir = os.path.dirname(os.path.abspath( cfgfile_dir = os.path.dirname(os.path.abspath(os.path.expanduser(cfgfile)))
os.path.expanduser(cfgfile)))
config = {} config = {}
@ -164,9 +194,11 @@ class Config(object):
for section in file_config: for section in file_config:
if section == 'include': if section == 'include':
include_files = file_config[section] \ include_files = (
if isinstance(file_config[section], list) \ file_config[section]
if isinstance(file_config[section], list)
else [file_config[section]] else [file_config[section]]
)
for include_file in include_files: for include_file in include_files:
if not os.path.isabs(include_file): if not os.path.isabs(include_file):
@ -178,9 +210,13 @@ class Config(object):
config[incl_section] = included_config[incl_section] config[incl_section] = included_config[incl_section]
elif section == 'scripts_dir': elif section == 'scripts_dir':
assert isinstance(file_config[section], str) assert isinstance(file_config[section], str)
config['scripts_dir'] = os.path.abspath(os.path.expanduser(file_config[section])) config['scripts_dir'] = os.path.abspath(
elif 'disabled' not in file_config[section] \ os.path.expanduser(file_config[section])
or file_config[section]['disabled'] is False: )
elif (
'disabled' not in file_config[section]
or file_config[section]['disabled'] is False
):
config[section] = file_config[section] config[section] = file_config[section]
return config return config
@ -189,27 +225,37 @@ class Config(object):
try: try:
module = importlib.import_module(modname) module = importlib.import_module(modname)
except Exception as e: except Exception as e:
print('Unhandled exception while importing module {}: {}'.format(modname, str(e))) print(
'Unhandled exception while importing module {}: {}'.format(
modname, str(e)
)
)
return return
prefix = modname + '.' if prefix is None else prefix prefix = modname + '.' if prefix is None else prefix
self.procedures.update(**{ self.procedures.update(
prefix + name: obj **{
for name, obj in inspect.getmembers(module) prefix + name: obj
if is_functional_procedure(obj) for name, obj in inspect.getmembers(module)
}) if is_functional_procedure(obj)
}
)
self.event_hooks.update(**{ self.event_hooks.update(
prefix + name: obj **{
for name, obj in inspect.getmembers(module) prefix + name: obj
if is_functional_hook(obj) for name, obj in inspect.getmembers(module)
}) if is_functional_hook(obj)
}
)
self.cronjobs.update(**{ self.cronjobs.update(
prefix + name: obj **{
for name, obj in inspect.getmembers(module) prefix + name: obj
if is_functional_cron(obj) for name, obj in inspect.getmembers(module)
}) if is_functional_cron(obj)
}
)
def _load_scripts(self): def _load_scripts(self):
scripts_dir = self._config['scripts_dir'] scripts_dir = self._config['scripts_dir']
@ -218,14 +264,19 @@ class Config(object):
scripts_modname = os.path.basename(scripts_dir) scripts_modname = os.path.basename(scripts_dir)
self._load_module(scripts_modname, prefix='') self._load_module(scripts_modname, prefix='')
for _, modname, _ in pkgutil.walk_packages(path=[scripts_dir], onerror=lambda x: None): for _, modname, _ in pkgutil.walk_packages(
path=[scripts_dir], onerror=lambda _: None
):
self._load_module(modname) self._load_module(modname)
sys.path = sys_path sys.path = sys_path
def _init_components(self): def _init_components(self):
for key in self._config.keys(): for key in self._config.keys():
if key.startswith('backend.') and '.'.join(key.split('.')[1:]) in self._backend_manifests: if (
key.startswith('backend.')
and '.'.join(key.split('.')[1:]) in self._backend_manifests
):
backend_name = '.'.join(key.split('.')[1:]) backend_name = '.'.join(key.split('.')[1:])
self.backends[backend_name] = self._config[key] self.backends[backend_name] = self._config[key]
elif key.startswith('event.hook.'): elif key.startswith('event.hook.'):
@ -236,7 +287,7 @@ class Config(object):
self.cronjobs[cron_name] = self._config[key] self.cronjobs[cron_name] = self._config[key]
elif key.startswith('procedure.'): elif key.startswith('procedure.'):
tokens = key.split('.') tokens = key.split('.')
_async = True if len(tokens) > 2 and tokens[1] == 'async' else False _async = bool(len(tokens) > 2 and tokens[1] == 'async')
procedure_name = '.'.join(tokens[2:] if len(tokens) > 2 else tokens[1:]) procedure_name = '.'.join(tokens[2:] if len(tokens) > 2 else tokens[1:])
args = [] args = []
m = re.match(r'^([^(]+)\(([^)]+)\)\s*', procedure_name) m = re.match(r'^([^(]+)\(([^)]+)\)\s*', procedure_name)
@ -265,7 +316,11 @@ class Config(object):
self._init_manifests(plugins_dir) self._init_manifests(plugins_dir)
self._init_manifests(backends_dir) self._init_manifests(backends_dir)
else: else:
manifests_map = self._plugin_manifests if base_dir.endswith('plugins') else self._backend_manifests manifests_map = (
self._plugin_manifests
if base_dir.endswith('plugins')
else self._backend_manifests
)
for mf in pathlib.Path(base_dir).rglob('manifest.yaml'): for mf in pathlib.Path(base_dir).rglob('manifest.yaml'):
with open(mf, 'r') as f: with open(mf, 'r') as f:
manifest = yaml.safe_load(f)['manifest'] manifest = yaml.safe_load(f)['manifest']
@ -279,12 +334,11 @@ class Config(object):
for (key, value) in self._default_constants.items(): for (key, value) in self._default_constants.items():
self.constants[key] = value self.constants[key] = value
@staticmethod def _get_dashboard(
def get_dashboard(name: str, dashboards_dir: Optional[str] = None) -> Optional[str]: self, name: str, dashboards_dir: Optional[str] = None
global _default_config_instance ) -> Optional[str]:
dashboards_dir = dashboards_dir or self._config['dashboards_dir']
# noinspection PyProtectedMember,PyProtectedMember,PyUnresolvedReferences assert dashboards_dir
dashboards_dir = dashboards_dir or _default_config_instance._config['dashboards_dir']
abspath = os.path.join(dashboards_dir, name + '.xml') abspath = os.path.join(dashboards_dir, name + '.xml')
if not os.path.isfile(abspath): if not os.path.isfile(abspath):
return return
@ -292,24 +346,37 @@ class Config(object):
with open(abspath, 'r') as fp: with open(abspath, 'r') as fp:
return fp.read() return fp.read()
@classmethod def _get_dashboards(self, dashboards_dir: Optional[str] = None) -> dict:
def get_dashboards(cls, dashboards_dir: Optional[str] = None) -> dict:
global _default_config_instance
dashboards = {} dashboards = {}
# noinspection PyProtectedMember,PyProtectedMember,PyUnresolvedReferences dashboards_dir = dashboards_dir or self._config['dashboards_dir']
dashboards_dir = dashboards_dir or _default_config_instance._config['dashboards_dir'] assert dashboards_dir
for f in os.listdir(dashboards_dir): for f in os.listdir(dashboards_dir):
abspath = os.path.join(dashboards_dir, f) abspath = os.path.join(dashboards_dir, f)
if not os.path.isfile(abspath) or not abspath.endswith('.xml'): if not os.path.isfile(abspath) or not abspath.endswith('.xml'):
continue continue
name = f.split('.xml')[0] name = f.split('.xml')[0]
dashboards[name] = cls.get_dashboard(name, dashboards_dir) dashboards[name] = self._get_dashboard(name, dashboards_dir)
return dashboards return dashboards
@staticmethod
def get_dashboard(name: str, dashboards_dir: Optional[str] = None) -> Optional[str]:
global _default_config_instance
if _default_config_instance is None:
_default_config_instance = Config()
return _default_config_instance._get_dashboard(name, dashboards_dir)
@classmethod
def get_dashboards(cls, dashboards_dir: Optional[str] = None) -> dict:
global _default_config_instance
if _default_config_instance is None:
_default_config_instance = Config()
return _default_config_instance._get_dashboards(dashboards_dir)
def _init_dashboards(self, dashboards_dir: str): def _init_dashboards(self, dashboards_dir: str):
self.dashboards = self.get_dashboards(dashboards_dir) self.dashboards = self._get_dashboards(dashboards_dir)
@staticmethod @staticmethod
def get_backends(): def get_backends():
@ -400,4 +467,5 @@ class Config(object):
return _default_config_instance._config return _default_config_instance._config
# vim:sw=4:ts=4:et: # vim:sw=4:ts=4:et:

View File

@ -0,0 +1,6 @@
# Auto-generated configuration file.
# Do not edit manually - use the config.yaml file for manual modifications
# instead
backend.http:
enabled: True

View File

@ -0,0 +1,2 @@
include:
- config.auto.yaml

View File

@ -26,14 +26,14 @@ main_bus = None
def register_backends(bus=None, global_scope=False, **kwargs): def register_backends(bus=None, global_scope=False, **kwargs):
""" Initialize the backend objects based on the configuration and returns """Initialize the backend objects based on the configuration and returns
a name -> backend_instance map. a name -> backend_instance map.
Params: Params:
bus -- If specific (it usually should), the messages processed by the bus -- If specific (it usually should), the messages processed by the
backends will be posted on this bus. backends will be posted on this bus.
kwargs -- Any additional key-value parameters required to initialize the backends kwargs -- Any additional key-value parameters required to initialize the backends
""" """
global main_bus global main_bus
if bus: if bus:
@ -57,8 +57,7 @@ def register_backends(bus=None, global_scope=False, **kwargs):
b = getattr(module, cls_name)(bus=bus, **cfg, **kwargs) b = getattr(module, cls_name)(bus=bus, **cfg, **kwargs)
backends[name] = b backends[name] = b
except AttributeError as e: except AttributeError as e:
logger.warning('No such class in {}: {}'.format( logger.warning('No such class in {}: {}'.format(module.__name__, cls_name))
module.__name__, cls_name))
raise RuntimeError(e) raise RuntimeError(e)
return backends return backends
@ -74,15 +73,15 @@ def register_plugins(bus=None):
def get_backend(name): def get_backend(name):
""" Returns the backend instance identified by name if it exists """ """Returns the backend instance identified by name if it exists"""
global backends global backends
return backends.get(name) return backends.get(name)
def get_plugin(plugin_name, reload=False): def get_plugin(plugin_name, reload=False):
""" Registers a plugin instance by name if not registered already, or """Registers a plugin instance by name if not registered already, or
returns the registered plugin instance""" returns the registered plugin instance"""
global plugins global plugins
global plugins_init_locks global plugins_init_locks
@ -104,8 +103,9 @@ def get_plugin(plugin_name, reload=False):
cls_name += token.title() cls_name += token.title()
cls_name += 'Plugin' cls_name += 'Plugin'
plugin_conf = Config.get_plugins()[plugin_name] \ plugin_conf = (
if plugin_name in Config.get_plugins() else {} Config.get_plugins()[plugin_name] if plugin_name in Config.get_plugins() else {}
)
if 'disabled' in plugin_conf: if 'disabled' in plugin_conf:
if plugin_conf['disabled'] is True: if plugin_conf['disabled'] is True:
@ -120,7 +120,9 @@ def get_plugin(plugin_name, reload=False):
try: try:
plugin_class = getattr(plugin, cls_name) plugin_class = getattr(plugin, cls_name)
except AttributeError as e: except AttributeError as e:
logger.warning('No such class in {}: {} [error: {}]'.format(plugin_name, cls_name, str(e))) logger.warning(
'No such class in {}: {} [error: {}]'.format(plugin_name, cls_name, str(e))
)
raise RuntimeError(e) raise RuntimeError(e)
with plugins_init_locks[plugin_name]: with plugins_init_locks[plugin_name]:
@ -137,13 +139,14 @@ def get_bus() -> Bus:
return main_bus return main_bus
from platypush.bus.redis import RedisBus from platypush.bus.redis import RedisBus
return RedisBus() return RedisBus()
def get_or_create_event_loop(): def get_or_create_event_loop():
try: try:
loop = asyncio.get_event_loop() loop = asyncio.get_event_loop()
except RuntimeError: except (DeprecationWarning, RuntimeError):
loop = asyncio.new_event_loop() loop = asyncio.new_event_loop()
asyncio.set_event_loop(loop) asyncio.set_event_loop(loop)

View File

@ -7,11 +7,20 @@ import croniter
from dateutil.tz import gettz from dateutil.tz import gettz
from platypush.procedure import Procedure from platypush.procedure import Procedure
from platypush.utils import is_functional_cron from platypush.utils import is_functional_cron, set_thread_name
logger = logging.getLogger('platypush:cron') logger = logging.getLogger('platypush:cron')
def get_now() -> datetime.datetime:
"""
:return: A timezone-aware representation of `now`
"""
return datetime.datetime.now().replace(
tzinfo=gettz() # lgtm [py/call-to-non-callable]
)
class CronjobState(enum.IntEnum): class CronjobState(enum.IntEnum):
IDLE = 0 IDLE = 0
WAIT = 1 WAIT = 1
@ -20,21 +29,36 @@ class CronjobState(enum.IntEnum):
ERROR = 4 ERROR = 4
class CronjobEvent(enum.IntEnum):
NONE = 0
STOP = 1
TIME_SYNC = 2
class Cronjob(threading.Thread): class Cronjob(threading.Thread):
def __init__(self, name, cron_expression, actions): def __init__(self, name, cron_expression, actions):
super().__init__() super().__init__()
self.cron_expression = cron_expression self.cron_expression = cron_expression
self.name = name self.name = name
self.state = CronjobState.IDLE self.state = CronjobState.IDLE
self._should_stop = threading.Event() self._event = threading.Event()
self._event_type = CronjobEvent.NONE
self._event_lock = threading.RLock()
if isinstance(actions, dict) or isinstance(actions, list): if isinstance(actions, (list, dict)):
self.actions = Procedure.build(name=name + '__Cron', _async=False, requests=actions) self.actions = Procedure.build(
name=name + '__Cron', _async=False, requests=actions
)
else: else:
self.actions = actions self.actions = actions
def notify(self, event: CronjobEvent):
with self._event_lock:
self._event_type = event
self._event.set()
def run(self): def run(self):
self.state = CronjobState.WAIT set_thread_name(f'cron:{self.name}')
self.wait() self.wait()
if self.should_stop(): if self.should_stop():
return return
@ -57,26 +81,38 @@ class Cronjob(threading.Thread):
self.state = CronjobState.ERROR self.state = CronjobState.ERROR
def wait(self): def wait(self):
now = datetime.datetime.now().replace(tzinfo=gettz()) # lgtm [py/call-to-non-callable] with self._event_lock:
cron = croniter.croniter(self.cron_expression, now) self.state = CronjobState.WAIT
next_run = cron.get_next() self._event.clear()
self._should_stop.wait(next_run - now.timestamp()) self._event_type = CronjobEvent.TIME_SYNC
while self._event_type == CronjobEvent.TIME_SYNC:
now = get_now()
self._event_type = CronjobEvent.NONE
cron = croniter.croniter(self.cron_expression, now)
next_run = cron.get_next()
self._event.wait(max(0, next_run - now.timestamp()))
def stop(self): def stop(self):
self._should_stop.set() self._event_type = CronjobEvent.STOP
self._event.set()
def should_stop(self): def should_stop(self):
return self._should_stop.is_set() return self._event_type == CronjobEvent.STOP
class CronScheduler(threading.Thread): class CronScheduler(threading.Thread):
def __init__(self, jobs): def __init__(self, jobs, poll_seconds: float = 0.5):
super().__init__() super().__init__()
self.jobs_config = jobs self.jobs_config = jobs
self._jobs = {} self._jobs = {}
self._poll_seconds = max(1e-3, poll_seconds)
self._should_stop = threading.Event() self._should_stop = threading.Event()
logger.info('Cron scheduler initialized with {} jobs'. logger.info(
format(len(self.jobs_config.keys()))) 'Cron scheduler initialized with {} jobs'.format(
len(self.jobs_config.keys())
)
)
def _get_job(self, name, config): def _get_job(self, name, config):
job = self._jobs.get(name) job = self._jobs.get(name)
@ -84,14 +120,21 @@ class CronScheduler(threading.Thread):
return job return job
if isinstance(config, dict): if isinstance(config, dict):
self._jobs[name] = Cronjob(name=name, cron_expression=config['cron_expression'], self._jobs[name] = Cronjob(
actions=config['actions']) name=name,
cron_expression=config['cron_expression'],
actions=config['actions'],
)
elif is_functional_cron(config): elif is_functional_cron(config):
self._jobs[name] = Cronjob(name=name, cron_expression=config.cron_expression, self._jobs[name] = Cronjob(
actions=config) name=name, cron_expression=config.cron_expression, actions=config
)
else: else:
raise AssertionError('Expected type dict or function for cron {}, got {}'.format( raise AssertionError(
name, type(config))) 'Expected type dict or function for cron {}, got {}'.format(
name, type(config)
)
)
return self._jobs[name] return self._jobs[name]
@ -112,7 +155,22 @@ class CronScheduler(threading.Thread):
if job.state == CronjobState.IDLE: if job.state == CronjobState.IDLE:
job.start() job.start()
self._should_stop.wait(timeout=0.5) t_before_wait = get_now().timestamp()
self._should_stop.wait(timeout=self._poll_seconds)
t_after_wait = get_now().timestamp()
time_drift = abs(t_after_wait - t_before_wait) - self._poll_seconds
if not self.should_stop() and time_drift > 1:
# If the system clock has been adjusted by more than one second
# (e.g. because of DST change or NTP sync) then ensure that the
# registered cronjobs are synchronized with the new datetime
logger.info(
'System clock drift detected: %f secs. Synchronizing the cronjobs',
time_drift,
)
for job in self._jobs.values():
job.notify(CronjobEvent.TIME_SYNC)
logger.info('Terminating cron scheduler') logger.info('Terminating cron scheduler')

View File

@ -50,8 +50,13 @@ class NextcloudPlugin(Plugin):
""" """
def __init__(self, url: Optional[str] = None, username: Optional[str] = None, password: Optional[str] = None, def __init__(
**kwargs): self,
url: Optional[str] = None,
username: Optional[str] = None,
password: Optional[str] = None,
**kwargs
):
""" """
:param url: URL to the index of your default NextCloud instance. :param url: URL to the index of your default NextCloud instance.
:param username: Default NextCloud username. :param username: Default NextCloud username.
@ -61,8 +66,13 @@ class NextcloudPlugin(Plugin):
self.conf = ClientConfig(url=url, username=username, password=password) self.conf = ClientConfig(url=url, username=username, password=password)
self._client = self._get_client(**self.conf.to_dict()) self._client = self._get_client(**self.conf.to_dict())
def _get_client(self, url: Optional[str] = None, username: Optional[str] = None, password: Optional[str] = None, def _get_client(
raise_on_empty: bool = False): self,
url: Optional[str] = None,
username: Optional[str] = None,
password: Optional[str] = None,
raise_on_empty: bool = False,
):
from nextcloud import NextCloud from nextcloud import NextCloud
if not url: if not url:
@ -71,19 +81,25 @@ class NextcloudPlugin(Plugin):
raise AssertionError('No url/username/password provided') raise AssertionError('No url/username/password provided')
return None return None
return NextCloud(endpoint=self.conf.url, user=self.conf.username, password=self.conf.password, return NextCloud(
json_output=True) endpoint=self.conf.url,
user=self.conf.username,
password=self.conf.password,
)
return NextCloud(endpoint=url, user=username, password=password, json_output=True) return NextCloud(endpoint=url, user=username, password=password)
@staticmethod @staticmethod
def _get_permissions(permissions: Optional[List[str]]) -> int: def _get_permissions(permissions: Optional[List[str]]) -> int:
int_perm = 0 int_perm = 0
for perm in (permissions or []): for perm in permissions or []:
perm = perm.upper() perm = perm.upper()
assert hasattr(Permission, perm), 'Unknown permissions type: {}. Supported permissions: {}'.format( assert hasattr(
perm, [p.name.lower() for p in Permission]) Permission, perm
), 'Unknown permissions type: {}. Supported permissions: {}'.format(
perm, [p.name.lower() for p in Permission]
)
if perm == 'ALL': if perm == 'ALL':
int_perm = Permission.ALL.value int_perm = Permission.ALL.value
@ -96,8 +112,11 @@ class NextcloudPlugin(Plugin):
@staticmethod @staticmethod
def _get_share_type(share_type: str) -> int: def _get_share_type(share_type: str) -> int:
share_type = share_type.upper() share_type = share_type.upper()
assert hasattr(ShareType, share_type), 'Unknown share type: {}. Supported share types: {}'.format( assert hasattr(
share_type, [s.name.lower() for s in ShareType]) ShareType, share_type
), 'Unknown share type: {}. Supported share types: {}'.format(
share_type, [s.name.lower() for s in ShareType]
)
return getattr(ShareType, share_type).value return getattr(ShareType, share_type).value
@ -114,13 +133,23 @@ class NextcloudPlugin(Plugin):
args=', '.join(args), args=', '.join(args),
sep=', ' if args and kwargs else '', sep=', ' if args and kwargs else '',
kwargs=', '.join(['{}={}'.format(k, v) for k, v in kwargs.items()]), kwargs=', '.join(['{}={}'.format(k, v) for k, v in kwargs.items()]),
error=response.meta.get('message', '[No message]') if hasattr(response, 'meta') else response.raw.reason) error=response.meta.get('message', '[No message]')
if hasattr(response, 'meta')
else response.raw.reason,
)
return response.data return response.json_data
@action @action
def get_activities(self, since: Optional[id] = None, limit: Optional[int] = None, object_type: Optional[str] = None, def get_activities(
object_id: Optional[int] = None, sort: str = 'desc', **server_args) -> List[str]: self,
since: Optional[id] = None,
limit: Optional[int] = None,
object_type: Optional[str] = None,
object_id: Optional[int] = None,
sort: str = 'desc',
**server_args
) -> List[str]:
""" """
Get the list of recent activities on an instance. Get the list of recent activities on an instance.
@ -132,9 +161,15 @@ class NextcloudPlugin(Plugin):
:param server_args: Override the default server settings (see :meth:`._get_client` arguments). :param server_args: Override the default server settings (see :meth:`._get_client` arguments).
:return: The list of selected activities. :return: The list of selected activities.
""" """
return self._execute(server_args, 'get_activities', since=since, limit=limit, object_type=object_type, return self._execute(
object_id=object_id, server_args,
sort=sort) 'get_activities',
since=since,
limit=limit,
object_type=object_type,
object_id=object_id,
sort=sort,
)
@action @action
def get_apps(self, **server_args) -> List[str]: def get_apps(self, **server_args) -> List[str]:
@ -216,8 +251,13 @@ class NextcloudPlugin(Plugin):
return self._execute(server_args, 'get_group', group_id) return self._execute(server_args, 'get_group', group_id)
@action @action
def get_groups(self, search: Optional[str] = None, limit: Optional[int] = None, offset: Optional[int] = None, def get_groups(
**server_args) -> List[str]: self,
search: Optional[str] = None,
limit: Optional[int] = None,
offset: Optional[int] = None,
**server_args
) -> List[str]:
""" """
Search for groups. Search for groups.
@ -226,7 +266,9 @@ class NextcloudPlugin(Plugin):
:param offset: Start offset. :param offset: Start offset.
:param server_args: Override the default server settings (see :meth:`._get_client` arguments). :param server_args: Override the default server settings (see :meth:`._get_client` arguments).
""" """
return self._execute(server_args, 'get_groups', search=search, limit=limit, offset=offset).get('groups', []) return self._execute(
server_args, 'get_groups', search=search, limit=limit, offset=offset
).get('groups', [])
@action @action
def create_group_folder(self, name: str, **server_args): def create_group_folder(self, name: str, **server_args):
@ -268,7 +310,9 @@ class NextcloudPlugin(Plugin):
return self._execute(server_args, 'get_group_folders') return self._execute(server_args, 'get_group_folders')
@action @action
def rename_group_folder(self, folder_id: Union[int, str], new_name: str, **server_args): def rename_group_folder(
self, folder_id: Union[int, str], new_name: str, **server_args
):
""" """
Rename a group folder. Rename a group folder.
@ -279,7 +323,9 @@ class NextcloudPlugin(Plugin):
self._execute(server_args, 'rename_group_folder', folder_id, new_name) self._execute(server_args, 'rename_group_folder', folder_id, new_name)
@action @action
def grant_access_to_group_folder(self, folder_id: Union[int, str], group_id: str, **server_args): def grant_access_to_group_folder(
self, folder_id: Union[int, str], group_id: str, **server_args
):
""" """
Grant access to a group folder to a given group. Grant access to a group folder to a given group.
@ -290,7 +336,9 @@ class NextcloudPlugin(Plugin):
self._execute(server_args, 'grant_access_to_group_folder', folder_id, group_id) self._execute(server_args, 'grant_access_to_group_folder', folder_id, group_id)
@action @action
def revoke_access_to_group_folder(self, folder_id: Union[int, str], group_id: str, **server_args): def revoke_access_to_group_folder(
self, folder_id: Union[int, str], group_id: str, **server_args
):
""" """
Revoke access to a group folder to a given group. Revoke access to a group folder to a given group.
@ -301,7 +349,9 @@ class NextcloudPlugin(Plugin):
self._execute(server_args, 'revoke_access_to_group_folder', folder_id, group_id) self._execute(server_args, 'revoke_access_to_group_folder', folder_id, group_id)
@action @action
def set_group_folder_quota(self, folder_id: Union[int, str], quota: Optional[int], **server_args): def set_group_folder_quota(
self, folder_id: Union[int, str], quota: Optional[int], **server_args
):
""" """
Set the quota of a group folder. Set the quota of a group folder.
@ -309,11 +359,21 @@ class NextcloudPlugin(Plugin):
:param quota: Quota in bytes - set None for unlimited. :param quota: Quota in bytes - set None for unlimited.
:param server_args: Override the default server settings (see :meth:`._get_client` arguments). :param server_args: Override the default server settings (see :meth:`._get_client` arguments).
""" """
self._execute(server_args, 'set_quota_of_group_folder', folder_id, quota if quota is not None else -3) self._execute(
server_args,
'set_quota_of_group_folder',
folder_id,
quota if quota is not None else -3,
)
@action @action
def set_group_folder_permissions(self, folder_id: Union[int, str], group_id: str, permissions: List[str], def set_group_folder_permissions(
**server_args): self,
folder_id: Union[int, str],
group_id: str,
permissions: List[str],
**server_args
):
""" """
Set the permissions on a folder for a group. Set the permissions on a folder for a group.
@ -330,8 +390,13 @@ class NextcloudPlugin(Plugin):
:param server_args: Override the default server settings (see :meth:`._get_client` arguments). :param server_args: Override the default server settings (see :meth:`._get_client` arguments).
""" """
self._execute(server_args, 'set_permissions_to_group_folder', folder_id, group_id, self._execute(
self._get_permissions(permissions)) server_args,
'set_permissions_to_group_folder',
folder_id,
group_id,
self._get_permissions(permissions),
)
@action @action
def get_notifications(self, **server_args) -> list: def get_notifications(self, **server_args) -> list:
@ -372,8 +437,16 @@ class NextcloudPlugin(Plugin):
self._execute(server_args, 'delete_notification', notification_id) self._execute(server_args, 'delete_notification', notification_id)
@action @action
def create_share(self, path: str, share_type: str, share_with: Optional[str] = None, public_upload: bool = False, def create_share(
password: Optional[str] = None, permissions: Optional[List[str]] = None, **server_args) -> dict: self,
path: str,
share_type: str,
share_with: Optional[str] = None,
public_upload: bool = False,
password: Optional[str] = None,
permissions: Optional[List[str]] = None,
**server_args
) -> dict:
""" """
Share a file/folder with a user/group or a public link. Share a file/folder with a user/group or a public link.
@ -442,9 +515,16 @@ class NextcloudPlugin(Plugin):
""" """
share_type = self._get_share_type(share_type) share_type = self._get_share_type(share_type)
permissions = self._get_permissions(permissions or ['read']) permissions = self._get_permissions(permissions or ['read'])
return self._execute(server_args, 'create_share', path, share_type=share_type, share_with=share_with, return self._execute(
public_upload=public_upload, server_args,
password=password, permissions=permissions) 'create_share',
path,
share_type=share_type,
share_with=share_with,
public_upload=public_upload,
password=password,
permissions=permissions,
)
@action @action
def get_shares(self, **server_args) -> List[dict]: def get_shares(self, **server_args) -> List[dict]:
@ -516,8 +596,15 @@ class NextcloudPlugin(Plugin):
return self._execute(server_args, 'get_share_info', str(share_id)) return self._execute(server_args, 'get_share_info', str(share_id))
@action @action
def update_share(self, share_id: int, public_upload: Optional[bool] = None, password: Optional[str] = None, def update_share(
permissions: Optional[List[str]] = None, expire_date: Optional[str] = None, **server_args): self,
share_id: int,
public_upload: Optional[bool] = None,
password: Optional[str] = None,
permissions: Optional[List[str]] = None,
expire_date: Optional[str] = None,
**server_args
):
""" """
Update the permissions of a shared resource. Update the permissions of a shared resource.
@ -539,8 +626,15 @@ class NextcloudPlugin(Plugin):
if permissions: if permissions:
permissions = self._get_permissions(permissions) permissions = self._get_permissions(permissions)
self._execute(server_args, 'update_share', share_id, public_upload=public_upload, password=password, self._execute(
permissions=permissions, expire_date=expire_date) server_args,
'update_share',
share_id,
public_upload=public_upload,
password=password,
permissions=permissions,
expire_date=expire_date,
)
@action @action
def create_user(self, user_id: str, password: str, **server_args): def create_user(self, user_id: str, password: str, **server_args):
@ -611,8 +705,13 @@ class NextcloudPlugin(Plugin):
return self._execute(server_args, 'get_user', user_id) return self._execute(server_args, 'get_user', user_id)
@action @action
def get_users(self, search: Optional[str] = None, limit: Optional[int] = None, offset: Optional[int] = None, def get_users(
**server_args) -> List[str]: self,
search: Optional[str] = None,
limit: Optional[int] = None,
offset: Optional[int] = None,
**server_args
) -> List[str]:
""" """
Get the list of users matching some search criteria. Get the list of users matching some search criteria.
@ -621,7 +720,9 @@ class NextcloudPlugin(Plugin):
:param offset: Search results offset (default: None). :param offset: Search results offset (default: None).
:return: List of the matched user IDs. :return: List of the matched user IDs.
""" """
return self._execute(server_args, 'get_users', search=search, limit=limit, offset=offset) return self._execute(
server_args, 'get_users', search=search, limit=limit, offset=offset
)
@action @action
def delete_user(self, user_id: str, **server_args): def delete_user(self, user_id: str, **server_args):
@ -733,8 +834,15 @@ class NextcloudPlugin(Plugin):
self._execute(server_args, 'delete_path', user_id, path) self._execute(server_args, 'delete_path', user_id, path)
@action @action
def upload_file(self, remote_path: str, local_path: Optional[str] = None, content: Optional[str] = None, def upload_file(
user_id: Optional[str] = None, timestamp: Optional[Union[datetime, int, str]] = None, **server_args): self,
remote_path: str,
local_path: Optional[str] = None,
content: Optional[str] = None,
user_id: Optional[str] = None,
timestamp: Optional[Union[datetime, int, str]] = None,
**server_args
):
""" """
Upload a file. Upload a file.
@ -753,17 +861,32 @@ class NextcloudPlugin(Plugin):
if isinstance(timestamp, datetime): if isinstance(timestamp, datetime):
timestamp = int(timestamp.timestamp()) timestamp = int(timestamp.timestamp())
assert (local_path or content) and not (local_path and content), 'Please specify either local_path or content' assert (local_path or content) and not (
local_path and content
), 'Please specify either local_path or content'
if local_path: if local_path:
method = 'upload_file' method = 'upload_file'
local_path = os.path.abspath(os.path.expanduser(local_path)) local_path = os.path.abspath(os.path.expanduser(local_path))
else: else:
method = 'upload_file_contents' method = 'upload_file_contents'
return self._execute(server_args, method, user_id, local_path or content, remote_path, timestamp=timestamp) return self._execute(
server_args,
method,
user_id,
local_path or content,
remote_path,
timestamp=timestamp,
)
@action @action
def download_file(self, remote_path: str, local_path: str, user_id: Optional[str] = None, **server_args): def download_file(
self,
remote_path: str,
local_path: str,
user_id: Optional[str] = None,
**server_args
):
""" """
Download a file. Download a file.
@ -783,8 +906,14 @@ class NextcloudPlugin(Plugin):
os.chdir(cur_dir) os.chdir(cur_dir)
@action @action
def list(self, path: str, user_id: Optional[str] = None, depth: int = 1, all_properties: bool = False, def list(
**server_args) -> List[dict]: self,
path: str,
user_id: Optional[str] = None,
depth: int = 1,
all_properties: bool = False,
**server_args
) -> List[dict]:
""" """
List the content of a folder on the NextCloud instance. List the content of a folder on the NextCloud instance.
@ -795,10 +924,19 @@ class NextcloudPlugin(Plugin):
:param server_args: Override the default server settings (see :meth:`._get_client` arguments). :param server_args: Override the default server settings (see :meth:`._get_client` arguments).
""" """
user_id = user_id or server_args.get('username', self.conf.username) user_id = user_id or server_args.get('username', self.conf.username)
return self._execute(server_args, 'list_folders', user_id, path, depth=depth, all_properties=all_properties) return self._execute(
server_args,
'list_folders',
user_id,
path,
depth=depth,
all_properties=all_properties,
)
@action @action
def list_favorites(self, path: Optional[str] = None, user_id: Optional[str] = None, **server_args) -> List[dict]: def list_favorites(
self, path: Optional[str] = None, user_id: Optional[str] = None, **server_args
) -> List[dict]:
""" """
List the favorite items for a user. List the favorite items for a user.
@ -810,7 +948,9 @@ class NextcloudPlugin(Plugin):
return self._execute(server_args, 'list_folders', user_id, path) return self._execute(server_args, 'list_folders', user_id, path)
@action @action
def mark_favorite(self, path: Optional[str] = None, user_id: Optional[str] = None, **server_args): def mark_favorite(
self, path: Optional[str] = None, user_id: Optional[str] = None, **server_args
):
""" """
Add a path to a user's favorites. Add a path to a user's favorites.
@ -822,7 +962,14 @@ class NextcloudPlugin(Plugin):
self._execute(server_args, 'set_favorites', user_id, path) self._execute(server_args, 'set_favorites', user_id, path)
@action @action
def copy(self, path: str, destination: str, user_id: Optional[str] = None, overwrite: bool = False, **server_args): def copy(
self,
path: str,
destination: str,
user_id: Optional[str] = None,
overwrite: bool = False,
**server_args
):
""" """
Copy a resource to another path. Copy a resource to another path.
@ -833,10 +980,19 @@ class NextcloudPlugin(Plugin):
:param server_args: Override the default server settings (see :meth:`._get_client` arguments). :param server_args: Override the default server settings (see :meth:`._get_client` arguments).
""" """
user_id = user_id or server_args.get('username', self.conf.username) user_id = user_id or server_args.get('username', self.conf.username)
self._execute(server_args, 'copy_path', user_id, path, destination, overwrite=overwrite) self._execute(
server_args, 'copy_path', user_id, path, destination, overwrite=overwrite
)
@action @action
def move(self, path: str, destination: str, user_id: Optional[str] = None, overwrite: bool = False, **server_args): def move(
self,
path: str,
destination: str,
user_id: Optional[str] = None,
overwrite: bool = False,
**server_args
):
""" """
Move a resource to another path. Move a resource to another path.
@ -847,7 +1003,9 @@ class NextcloudPlugin(Plugin):
:param server_args: Override the default server settings (see :meth:`._get_client` arguments). :param server_args: Override the default server settings (see :meth:`._get_client` arguments).
""" """
user_id = user_id or server_args.get('username', self.conf.username) user_id = user_id or server_args.get('username', self.conf.username)
self._execute(server_args, 'move_path', user_id, path, destination, overwrite=overwrite) self._execute(
server_args, 'move_path', user_id, path, destination, overwrite=overwrite
)
# vim:sw=4:ts=4:et: # vim:sw=4:ts=4:et:

View File

@ -1,3 +1,8 @@
[tool.black] [tool.black]
skip-string-normalization = true skip-string-normalization = true
skip-numeric-underscore-normalization = true skip-numeric-underscore-normalization = true
[tool.pytest.ini_options]
filterwarnings = [
'ignore:There is no current event loop:DeprecationWarning',
]

View File

@ -20,3 +20,4 @@ zeroconf
paho-mqtt paho-mqtt
websocket-client websocket-client
croniter croniter
python-magic

View File

@ -17,7 +17,7 @@ def readfile(fname):
def pkg_files(dir): def pkg_files(dir):
paths = [] paths = []
# noinspection PyShadowingNames # noinspection PyShadowingNames
for (path, dirs, files) in os.walk(dir): for (path, _, files) in os.walk(dir):
for file in files: for file in files:
paths.append(os.path.join('..', path, file)) paths.append(os.path.join('..', path, file))
return paths return paths
@ -68,17 +68,21 @@ setup(
'pyjwt', 'pyjwt',
'marshmallow', 'marshmallow',
'frozendict', 'frozendict',
'flask',
'bcrypt',
'python-magic',
], ],
extras_require={ extras_require={
# Support for thread custom name # Support for thread custom name
'threadname': ['python-prctl'], 'threadname': ['python-prctl'],
# Support for Kafka backend and plugin # Support for Kafka backend and plugin
'kafka': ['kafka-python'], 'kafka': ['kafka-python'],
# Support for Pushbullet backend and plugin # Support for Pushbullet backend and plugin
'pushbullet': ['pushbullet.py @ https://github.com/rbrcsk/pushbullet.py/tarball/master'], 'pushbullet': [
# Support for HTTP backend 'pushbullet.py @ https://github.com/rbrcsk/pushbullet.py/tarball/master'
'http': ['flask', 'bcrypt', 'python-magic', 'gunicorn'], ],
# Support for HTTP backend over uWSGI
'http': ['gunicorn'],
# Support for MQTT backends # Support for MQTT backends
'mqtt': ['paho-mqtt'], 'mqtt': ['paho-mqtt'],
# Support for RSS feeds parser # Support for RSS feeds parser
@ -90,7 +94,11 @@ setup(
# Support for MPD/Mopidy music server plugin and backend # Support for MPD/Mopidy music server plugin and backend
'mpd': ['python-mpd2'], 'mpd': ['python-mpd2'],
# Support for Google text2speech plugin # Support for Google text2speech plugin
'google-tts': ['oauth2client', 'google-api-python-client', 'google-cloud-texttospeech'], 'google-tts': [
'oauth2client',
'google-api-python-client',
'google-cloud-texttospeech',
],
# Support for OMXPlayer plugin # Support for OMXPlayer plugin
'omxplayer': ['omxplayer-wrapper'], 'omxplayer': ['omxplayer-wrapper'],
# Support for YouTube # Support for YouTube
@ -138,7 +146,8 @@ setup(
# Support for web media subtitles # Support for web media subtitles
'subtitles': [ 'subtitles': [
'webvtt-py', 'webvtt-py',
'python-opensubtitles @ https://github.com/agonzalezro/python-opensubtitles/tarball/master'], 'python-opensubtitles @ https://github.com/agonzalezro/python-opensubtitles/tarball/master',
],
# Support for mpv player plugin # Support for mpv player plugin
'mpv': ['python-mpv'], 'mpv': ['python-mpv'],
# Support for NFC tags # Support for NFC tags
@ -156,14 +165,21 @@ setup(
# Support for Dropbox integration # Support for Dropbox integration
'dropbox': ['dropbox'], 'dropbox': ['dropbox'],
# Support for Leap Motion backend # Support for Leap Motion backend
'leap': ['leap-sdk @ https://github.com/BlackLight/leap-sdk-python3/tarball/master'], 'leap': [
'leap-sdk @ https://github.com/BlackLight/leap-sdk-python3/tarball/master'
],
# Support for Flic buttons # Support for Flic buttons
'flic': ['flic @ https://github.com/50ButtonsEach/fliclib-linux-hci/tarball/master'], 'flic': [
'flic @ https://github.com/50ButtonsEach/fliclib-linux-hci/tarball/master'
],
# Support for Alexa/Echo plugin # Support for Alexa/Echo plugin
'alexa': ['avs @ https://github.com/BlackLight/avs/tarball/master'], 'alexa': ['avs @ https://github.com/BlackLight/avs/tarball/master'],
# Support for bluetooth devices # Support for bluetooth devices
'bluetooth': ['pybluez', 'gattlib', 'bluetooth': [
'pyobex @ https://github.com/BlackLight/PyOBEX/tarball/master'], 'pybluez',
'gattlib',
'pyobex @ https://github.com/BlackLight/PyOBEX/tarball/master',
],
# Support for TP-Link devices # Support for TP-Link devices
'tplink': ['pyHS100'], 'tplink': ['pyHS100'],
# Support for PMW3901 2-Dimensional Optical Flow Sensor # Support for PMW3901 2-Dimensional Optical Flow Sensor
@ -231,7 +247,9 @@ setup(
# Support for Twilio integration # Support for Twilio integration
'twilio': ['twilio'], 'twilio': ['twilio'],
# Support for DHT11/DHT22/AM2302 temperature/humidity sensors # Support for DHT11/DHT22/AM2302 temperature/humidity sensors
'dht': ['Adafruit_Python_DHT @ git+https://github.com/adafruit/Adafruit_Python_DHT'], 'dht': [
'Adafruit_Python_DHT @ git+https://github.com/adafruit/Adafruit_Python_DHT'
],
# Support for LCD display integration # Support for LCD display integration
'lcd': ['RPi.GPIO', 'RPLCD'], 'lcd': ['RPi.GPIO', 'RPLCD'],
# Support for IMAP mail integration # Support for IMAP mail integration

View File

@ -2,25 +2,34 @@ import datetime
from platypush.cron import cron from platypush.cron import cron
from tests.test_cron import tmp_files, tmp_files_ready, \ from tests.test_cron import test_timeout, cron_queue
test_timeout, expected_cron_file_content
def make_cron_expr(cron_time: datetime.datetime):
return '{min} {hour} {day} {month} * {sec}'.format(
min=cron_time.minute,
hour=cron_time.hour,
day=cron_time.day,
month=cron_time.month,
sec=cron_time.second,
)
# Prepare a cronjob that should start test_timeout/2 seconds from the application start # Prepare a cronjob that should start test_timeout/2 seconds from the application start
cron_time = datetime.datetime.now() + datetime.timedelta(seconds=test_timeout/2) cron_time = datetime.datetime.now() + datetime.timedelta(seconds=test_timeout / 2)
cron_expr = '{min} {hour} {day} {month} * {sec}'.format(
min=cron_time.minute, hour=cron_time.hour, day=cron_time.day,
month=cron_time.month, sec=cron_time.second)
@cron(cron_expr) @cron(make_cron_expr(cron_time))
def cron_test(**_): def cron_test(**_):
""" cron_queue.put('cron_test')
Simple cronjob that awaits for ``../test_cron.py`` to be ready and writes the expected
content to the monitored temporary file.
"""
files_ready = tmp_files_ready.wait(timeout=test_timeout)
assert files_ready, \
'The test did not prepare the temporary files within {} seconds'.format(test_timeout)
with open(tmp_files[0], 'w') as f:
f.write(expected_cron_file_content) # Prepare another cronjob that should start 1hr + test_timeout/2 seconds from the application start
cron_time = datetime.datetime.now() + datetime.timedelta(
hours=1, seconds=test_timeout / 2
)
@cron(make_cron_expr(cron_time))
def cron_1hr_test(**_):
cron_queue.put('cron_1hr_test')

View File

@ -1,43 +1,61 @@
import os import datetime
import queue
import pytest import pytest
import tempfile
import threading
import time import time
tmp_files = [] from dateutil.tz import gettz
tmp_files_ready = threading.Event() from mock import patch
test_timeout = 10 test_timeout = 10
expected_cron_file_content = 'The cronjob ran successfully!' cron_queue = queue.Queue()
@pytest.fixture(scope='module', autouse=True) class MockDatetime(datetime.datetime):
def tmp_file(*_): timedelta = datetime.timedelta()
tmp_file = tempfile.NamedTemporaryFile(prefix='platypush-test-cron-',
suffix='.txt', delete=False)
tmp_files.append(tmp_file.name)
tmp_files_ready.set()
yield tmp_file.name
for f in tmp_files: @classmethod
if os.path.isfile(f): def now(cls):
os.unlink(f) return super().now(tz=gettz()) + cls.timedelta
def test_cron_execution(tmp_file): def _test_cron_queue(expected_msg: str):
msg = None
test_start = time.time()
while time.time() - test_start <= test_timeout and msg != expected_msg:
try:
msg = cron_queue.get(block=True, timeout=test_timeout)
except queue.Empty:
break
assert msg == expected_msg, 'The expected cronjob has not been executed'
def test_cron_execution():
""" """
Test that the cronjob in ``../etc/scripts/test_cron.py`` runs successfully. Test that the cronjob in ``../etc/scripts/test_cron.py`` runs successfully.
""" """
actual_cron_file_content = None _test_cron_queue('cron_test')
test_start = time.time()
while actual_cron_file_content != expected_cron_file_content and \
time.time() - test_start < test_timeout:
with open(tmp_file, 'r') as f:
actual_cron_file_content = f.read()
time.sleep(0.5)
assert actual_cron_file_content == expected_cron_file_content, \ def test_cron_execution_upon_system_clock_change():
'cron_test failed to run within {} seconds'.format(test_timeout) """
Test that the cronjob runs at the right time even upon DST or other
system clock changes.
"""
# Mock datetime.datetime with a class that has overridable timedelta
patcher = patch('datetime.datetime', MockDatetime)
try:
patcher.start()
time.sleep(1)
# Simulate a +1hr shift on the system clock
MockDatetime.timedelta = datetime.timedelta(hours=1)
time.sleep(1)
finally:
patcher.stop()
# Ensure that the cronjob that was supposed to run in an hour is now running
_test_cron_queue('cron_1hr_test')
if __name__ == '__main__': if __name__ == '__main__':