Compare commits
12 Commits
c6e2390a6c
...
1e93af86f4
Author | SHA1 | Date |
---|---|---|
Fabio Manganiello | 1e93af86f4 | |
Fabio Manganiello | 53bdcb9604 | |
Fabio Manganiello | 9acd71944c | |
Fabio Manganiello | e5a5ac5ffb | |
Fabio Manganiello | d872835093 | |
Fabio Manganiello | 608844ca0c | |
Fabio Manganiello | 841643f3ff | |
Fabio Manganiello | 40557f5d5d | |
Fabio Manganiello | 4da3c13976 | |
Fabio Manganiello | 4d52fd35b9 | |
Fabio Manganiello | 40d3ad1150 | |
Fabio Manganiello | fd7037d048 |
|
@ -84,8 +84,11 @@ steps:
|
|||
- git remote rm origin
|
||||
- git remote add origin git@git.platypush.tech:platypush/platypush.git
|
||||
|
||||
# Push to the `stable` branch
|
||||
# Merge and push to the `stable` branch
|
||||
- git checkout stable
|
||||
- git rebase master
|
||||
- git push -u origin stable
|
||||
- git checkout master
|
||||
|
||||
# Restore the original git configuration
|
||||
- mv /tmp/git.config.orig $GIT_CONF
|
||||
|
|
|
@ -3,7 +3,6 @@ import os
|
|||
import re
|
||||
import sys
|
||||
import textwrap as tw
|
||||
from contextlib import contextmanager
|
||||
|
||||
from sphinx.application import Sphinx
|
||||
|
||||
|
@ -13,14 +12,15 @@ base_path = os.path.abspath(
|
|||
|
||||
sys.path.insert(0, base_path)
|
||||
|
||||
from platypush.utils import get_plugin_name_by_class # noqa
|
||||
from platypush.utils.mock import mock # noqa
|
||||
from platypush.utils.reflection import IntegrationMetadata, import_file # noqa
|
||||
from platypush.common.reflection import Integration # noqa
|
||||
from platypush.utils import get_plugin_name_by_class, import_file # noqa
|
||||
from platypush.utils.mock import auto_mocks # noqa
|
||||
from platypush.utils.mock.modules import mock_imports # noqa
|
||||
|
||||
|
||||
class IntegrationEnricher:
|
||||
@staticmethod
|
||||
def add_events(source: list[str], manifest: IntegrationMetadata, idx: int) -> int:
|
||||
def add_events(source: list[str], manifest: Integration, idx: int) -> int:
|
||||
if not manifest.events:
|
||||
return idx
|
||||
|
||||
|
@ -37,7 +37,7 @@ class IntegrationEnricher:
|
|||
return idx + 1
|
||||
|
||||
@staticmethod
|
||||
def add_actions(source: list[str], manifest: IntegrationMetadata, idx: int) -> int:
|
||||
def add_actions(source: list[str], manifest: Integration, idx: int) -> int:
|
||||
if not (manifest.actions and manifest.cls):
|
||||
return idx
|
||||
|
||||
|
@ -60,7 +60,7 @@ class IntegrationEnricher:
|
|||
|
||||
@classmethod
|
||||
def add_install_deps(
|
||||
cls, source: list[str], manifest: IntegrationMetadata, idx: int
|
||||
cls, source: list[str], manifest: Integration, idx: int
|
||||
) -> int:
|
||||
deps = manifest.deps
|
||||
parsed_deps = {
|
||||
|
@ -106,9 +106,7 @@ class IntegrationEnricher:
|
|||
return idx
|
||||
|
||||
@classmethod
|
||||
def add_description(
|
||||
cls, source: list[str], manifest: IntegrationMetadata, idx: int
|
||||
) -> int:
|
||||
def add_description(cls, source: list[str], manifest: Integration, idx: int) -> int:
|
||||
docs = (
|
||||
doc
|
||||
for doc in (
|
||||
|
@ -127,7 +125,7 @@ class IntegrationEnricher:
|
|||
|
||||
@classmethod
|
||||
def add_conf_snippet(
|
||||
cls, source: list[str], manifest: IntegrationMetadata, idx: int
|
||||
cls, source: list[str], manifest: Integration, idx: int
|
||||
) -> int:
|
||||
source.insert(
|
||||
idx,
|
||||
|
@ -163,8 +161,8 @@ class IntegrationEnricher:
|
|||
if not os.path.isfile(manifest_file):
|
||||
return
|
||||
|
||||
with mock_imports():
|
||||
manifest = IntegrationMetadata.from_manifest(manifest_file)
|
||||
with auto_mocks():
|
||||
manifest = Integration.from_manifest(manifest_file)
|
||||
idx = self.add_description(src, manifest, idx=3)
|
||||
idx = self.add_conf_snippet(src, manifest, idx=idx)
|
||||
idx = self.add_install_deps(src, manifest, idx=idx)
|
||||
|
@ -175,14 +173,6 @@ class IntegrationEnricher:
|
|||
source[0] = '\n'.join(src)
|
||||
|
||||
|
||||
@contextmanager
|
||||
def mock_imports():
|
||||
conf_mod = import_file(os.path.join(base_path, 'docs', 'source', 'conf.py'))
|
||||
mock_mods = getattr(conf_mod, 'autodoc_mock_imports', [])
|
||||
with mock(*mock_mods):
|
||||
yield
|
||||
|
||||
|
||||
def setup(app: Sphinx):
|
||||
app.connect('source-read', IntegrationEnricher())
|
||||
return {
|
||||
|
|
|
@ -163,9 +163,9 @@ latex_documents = [
|
|||
man_pages = [(master_doc, 'platypush', 'platypush Documentation', [author], 1)]
|
||||
|
||||
|
||||
# -- Options for Texinfo output ----------------------------------------------
|
||||
# -- Options for TexInfo output ----------------------------------------------
|
||||
|
||||
# Grouping the document tree into Texinfo files. List of tuples
|
||||
# Grouping the document tree into TexInfo files. List of tuples
|
||||
# (source start file, target name, title, author,
|
||||
# dir menu entry, description, category)
|
||||
texinfo_documents = [
|
||||
|
@ -193,126 +193,25 @@ autodoc_default_options = {
|
|||
'show-inheritance': True,
|
||||
}
|
||||
|
||||
autodoc_mock_imports = [
|
||||
'gunicorn',
|
||||
'googlesamples.assistant.grpc.audio_helpers',
|
||||
'google.assistant.embedded',
|
||||
'google.assistant.library',
|
||||
'google.assistant.library.event',
|
||||
'google.assistant.library.file_helpers',
|
||||
'google.oauth2.credentials',
|
||||
'oauth2client',
|
||||
'apiclient',
|
||||
'tenacity',
|
||||
'smartcard',
|
||||
'Leap',
|
||||
'oauth2client',
|
||||
'rtmidi',
|
||||
'bluetooth',
|
||||
'gevent.wsgi',
|
||||
'Adafruit_IO',
|
||||
'pyclip',
|
||||
'pydbus',
|
||||
'inputs',
|
||||
'inotify',
|
||||
'omxplayer',
|
||||
'plexapi',
|
||||
'cwiid',
|
||||
'sounddevice',
|
||||
'soundfile',
|
||||
'numpy',
|
||||
'cv2',
|
||||
'nfc',
|
||||
'ndef',
|
||||
'bcrypt',
|
||||
'google',
|
||||
'feedparser',
|
||||
'kafka',
|
||||
'googlesamples',
|
||||
'icalendar',
|
||||
'httplib2',
|
||||
'mpd',
|
||||
'serial',
|
||||
'pyHS100',
|
||||
'grpc',
|
||||
'envirophat',
|
||||
'gps',
|
||||
'picamera',
|
||||
'pmw3901',
|
||||
'PIL',
|
||||
'croniter',
|
||||
'pyaudio',
|
||||
'avs',
|
||||
'PyOBEX',
|
||||
'PyOBEX.client',
|
||||
'todoist',
|
||||
'trello',
|
||||
'telegram',
|
||||
'telegram.ext',
|
||||
'pyfirmata2',
|
||||
'cups',
|
||||
'graphyte',
|
||||
'cpuinfo',
|
||||
'psutil',
|
||||
'openzwave',
|
||||
'deepspeech',
|
||||
'wave',
|
||||
'pvporcupine ',
|
||||
'pvcheetah',
|
||||
'pyotp',
|
||||
'linode_api4',
|
||||
'pyzbar',
|
||||
'tensorflow',
|
||||
'keras',
|
||||
'pandas',
|
||||
'samsungtvws',
|
||||
'paramiko',
|
||||
'luma',
|
||||
'zeroconf',
|
||||
'dbus',
|
||||
'gi',
|
||||
'gi.repository',
|
||||
'twilio',
|
||||
'Adafruit_Python_DHT',
|
||||
'RPi.GPIO',
|
||||
'RPLCD',
|
||||
'imapclient',
|
||||
'pysmartthings',
|
||||
'aiohttp',
|
||||
'watchdog',
|
||||
'pyngrok',
|
||||
'irc',
|
||||
'irc.bot',
|
||||
'irc.strings',
|
||||
'irc.client',
|
||||
'irc.connection',
|
||||
'irc.events',
|
||||
'defusedxml',
|
||||
'nio',
|
||||
'aiofiles',
|
||||
'aiofiles.os',
|
||||
'async_lru',
|
||||
'bleak',
|
||||
'bluetooth_numbers',
|
||||
'TheengsDecoder',
|
||||
'simple_websocket',
|
||||
'uvicorn',
|
||||
'websockets',
|
||||
'docutils',
|
||||
'aioxmpp',
|
||||
]
|
||||
|
||||
sys.path.insert(0, os.path.abspath('../..'))
|
||||
|
||||
from platypush.utils.mock.modules import mock_imports # noqa
|
||||
|
||||
def skip(app, what, name, obj, skip, options):
|
||||
autodoc_mock_imports = [*mock_imports]
|
||||
|
||||
|
||||
# _ = app
|
||||
# __ = what
|
||||
# ___ = obj
|
||||
# ____ = options
|
||||
def _skip(_, __, name, ___, skip, ____):
|
||||
if name == "__init__":
|
||||
return False
|
||||
return skip
|
||||
|
||||
|
||||
def setup(app):
|
||||
app.connect("autodoc-skip-member", skip)
|
||||
app.connect("autodoc-skip-member", _skip)
|
||||
|
||||
|
||||
# vim:sw=4:ts=4:et:
|
||||
|
|
|
@ -1,18 +1,18 @@
|
|||
import importlib
|
||||
import inspect
|
||||
import os
|
||||
import sys
|
||||
from typing import Iterable, Optional
|
||||
|
||||
import pkgutil
|
||||
|
||||
from platypush.backend import Backend
|
||||
from platypush.context import get_plugin
|
||||
from platypush.message.event import Event
|
||||
from platypush.message.response import Response
|
||||
from platypush.plugins import Plugin
|
||||
from platypush.utils.manifest import Manifests
|
||||
|
||||
|
||||
def _get_inspect_plugin():
|
||||
p = get_plugin('inspect')
|
||||
assert p, 'Could not load the `inspect` plugin'
|
||||
return p
|
||||
|
||||
|
||||
def get_all_plugins():
|
||||
return sorted([mf.component_name for mf in Manifests.by_base_class(Plugin)])
|
||||
|
||||
|
@ -22,11 +22,35 @@ def get_all_backends():
|
|||
|
||||
|
||||
def get_all_events():
|
||||
return _get_inspect_plugin().get_all_events().output
|
||||
return _get_modules(Event)
|
||||
|
||||
|
||||
def get_all_responses():
|
||||
return _get_inspect_plugin().get_all_responses().output
|
||||
return _get_modules(Response)
|
||||
|
||||
|
||||
def _get_modules(base_type: type):
|
||||
ret = set()
|
||||
base_dir = os.path.dirname(inspect.getfile(base_type))
|
||||
package = base_type.__module__
|
||||
|
||||
for _, mod_name, _ in pkgutil.walk_packages([base_dir], prefix=package + '.'):
|
||||
try:
|
||||
module = importlib.import_module(mod_name)
|
||||
except Exception:
|
||||
print('Could not import module', mod_name, file=sys.stderr)
|
||||
continue
|
||||
|
||||
for _, obj_type in inspect.getmembers(module):
|
||||
if (
|
||||
inspect.isclass(obj_type)
|
||||
and issubclass(obj_type, base_type)
|
||||
# Exclude the base_type itself
|
||||
and obj_type != base_type
|
||||
):
|
||||
ret.add(obj_type.__module__.replace(package + '.', '', 1))
|
||||
|
||||
return list(ret)
|
||||
|
||||
|
||||
def _generate_components_doc(
|
||||
|
@ -122,7 +146,7 @@ def generate_events_doc():
|
|||
_generate_components_doc(
|
||||
index_name='events',
|
||||
package_name='message.event',
|
||||
components=sorted(event for event in get_all_events().keys() if event),
|
||||
components=sorted(event for event in get_all_events() if event),
|
||||
)
|
||||
|
||||
|
||||
|
@ -130,9 +154,7 @@ def generate_responses_doc():
|
|||
_generate_components_doc(
|
||||
index_name='responses',
|
||||
package_name='message.response',
|
||||
components=sorted(
|
||||
response for response in get_all_responses().keys() if response
|
||||
),
|
||||
components=sorted(response for response in get_all_responses() if response),
|
||||
)
|
||||
|
||||
|
||||
|
|
|
@ -42,6 +42,7 @@ class Application:
|
|||
config_file: Optional[str] = None,
|
||||
workdir: Optional[str] = None,
|
||||
logsdir: Optional[str] = None,
|
||||
cachedir: Optional[str] = None,
|
||||
device_id: Optional[str] = None,
|
||||
pidfile: Optional[str] = None,
|
||||
requests_to_process: Optional[int] = None,
|
||||
|
@ -62,6 +63,8 @@ class Application:
|
|||
``filename`` setting under the ``logging`` section of the
|
||||
configuration file is used. If not set, logging will be sent to
|
||||
stdout and stderr.
|
||||
:param cachedir: Overrides the ``cachedir`` setting in the configuration
|
||||
file (default: None).
|
||||
:param device_id: Override the device ID used to identify this
|
||||
instance. If not passed here, it is inferred from the configuration
|
||||
(device_id field). If not present there either, it is inferred from
|
||||
|
@ -106,6 +109,9 @@ class Application:
|
|||
self.config_file,
|
||||
device_id=device_id,
|
||||
workdir=os.path.abspath(os.path.expanduser(workdir)) if workdir else None,
|
||||
cachedir=os.path.abspath(os.path.expanduser(cachedir))
|
||||
if cachedir
|
||||
else None,
|
||||
ctrl_sock=os.path.abspath(os.path.expanduser(ctrl_sock))
|
||||
if ctrl_sock
|
||||
else None,
|
||||
|
@ -206,6 +212,7 @@ class Application:
|
|||
return cls(
|
||||
config_file=opts.config,
|
||||
workdir=opts.workdir,
|
||||
cachedir=opts.cachedir,
|
||||
logsdir=opts.logsdir,
|
||||
device_id=opts.device_id,
|
||||
pidfile=opts.pidfile,
|
||||
|
|
|
@ -34,7 +34,8 @@ class GoogleFitBackend(Backend):
|
|||
"""
|
||||
:param data_sources: Google Fit data source IDs to monitor. You can
|
||||
get a list of the available data sources through the
|
||||
:meth:`platypush.plugins.google.fit.get_data_sources` action
|
||||
:meth:`platypush.plugins.google.fit.GoogleFitPlugin.get_data_sources`
|
||||
action
|
||||
:type data_sources: list[str]
|
||||
|
||||
:param user_id: Google user ID to track (default: 'me')
|
||||
|
|
|
@ -23,7 +23,7 @@ class MidiBackend(Backend):
|
|||
"""
|
||||
:param device_name: Name of the MIDI device. *N.B.* either
|
||||
`device_name` or `port_number` must be set.
|
||||
Use :meth:`platypush.plugins.midi.query_ports` to get the
|
||||
Use :meth:`platypush.plugins.midi.MidiPlugin.query_ports` to get the
|
||||
available ports indices and names
|
||||
:type device_name: str
|
||||
|
||||
|
|
|
@ -32,6 +32,14 @@ def parse_cmdline(args: Sequence[str]) -> argparse.Namespace:
|
|||
help='Custom working directory to be used for the application',
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
'--cachedir',
|
||||
dest='cachedir',
|
||||
required=False,
|
||||
default=None,
|
||||
help='Custom cache directory',
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
'--device-id',
|
||||
'-d',
|
||||
|
|
|
@ -1,3 +1,6 @@
|
|||
from contextlib import contextmanager
|
||||
from dataclasses import dataclass
|
||||
|
||||
from sqlalchemy import __version__
|
||||
|
||||
sa_version = tuple(map(int, __version__.split('.')))
|
||||
|
@ -8,3 +11,38 @@ else:
|
|||
from sqlalchemy.ext.declarative import declarative_base
|
||||
|
||||
Base = declarative_base()
|
||||
|
||||
|
||||
@dataclass
|
||||
class DbContext:
|
||||
"""
|
||||
Context flags for the database session.
|
||||
"""
|
||||
|
||||
override_definitions: bool = False
|
||||
|
||||
|
||||
_ctx = DbContext()
|
||||
|
||||
|
||||
@contextmanager
|
||||
def override_definitions():
|
||||
"""
|
||||
Temporarily override the definitions of the entities in the entities
|
||||
registry.
|
||||
|
||||
This is useful when the entities are being imported off-context, like
|
||||
e.g. in the `inspect` or `alembic` modules.
|
||||
"""
|
||||
_ctx.override_definitions = True
|
||||
yield
|
||||
_ctx.override_definitions = False
|
||||
|
||||
|
||||
def is_defined(table_name: str) -> bool:
|
||||
"""
|
||||
Check if the given entity class is defined in the entities registry.
|
||||
|
||||
:param table_name: Name of the table associated to the entity class.
|
||||
"""
|
||||
return not _ctx.override_definitions and table_name in Base.metadata
|
||||
|
|
|
@ -0,0 +1,7 @@
|
|||
from ._model import Integration, Message
|
||||
|
||||
|
||||
__all__ = [
|
||||
"Integration",
|
||||
"Message",
|
||||
]
|
|
@ -0,0 +1,16 @@
|
|||
from .action import Action
|
||||
from .argument import Argument
|
||||
from .constructor import Constructor
|
||||
from .integration import Integration
|
||||
from .message import Message
|
||||
from .returns import ReturnValue
|
||||
|
||||
|
||||
__all__ = [
|
||||
"Action",
|
||||
"Argument",
|
||||
"Constructor",
|
||||
"Integration",
|
||||
"Message",
|
||||
"ReturnValue",
|
||||
]
|
|
@ -0,0 +1,7 @@
|
|||
from .._parser import DocstringParser
|
||||
|
||||
|
||||
class Action(DocstringParser):
|
||||
"""
|
||||
Represents an integration action.
|
||||
"""
|
|
@ -0,0 +1,27 @@
|
|||
from dataclasses import dataclass
|
||||
from typing import Optional, Type
|
||||
|
||||
from .._serialize import Serializable
|
||||
from .._utils import type_str
|
||||
|
||||
|
||||
@dataclass
|
||||
class Argument(Serializable):
|
||||
"""
|
||||
Represents an integration constructor/action parameter.
|
||||
"""
|
||||
|
||||
name: str
|
||||
required: bool = False
|
||||
doc: Optional[str] = None
|
||||
type: Optional[Type] = None
|
||||
default: Optional[str] = None
|
||||
|
||||
def to_dict(self) -> dict:
|
||||
return {
|
||||
"name": self.name,
|
||||
"required": self.required,
|
||||
"doc": self.doc,
|
||||
"type": type_str(self.type),
|
||||
"default": self.default,
|
||||
}
|
|
@ -0,0 +1,68 @@
|
|||
from abc import ABC, abstractmethod
|
||||
from typing import Dict, Type
|
||||
|
||||
from .argument import Argument
|
||||
|
||||
|
||||
class Component(ABC):
|
||||
"""
|
||||
Abstract interface for all the application components exposed through the
|
||||
`inspect` plugin.
|
||||
|
||||
It includes integrations (plugins and backends) and messages (events and
|
||||
responses).
|
||||
"""
|
||||
|
||||
@staticmethod
|
||||
def _merge_params(params: Dict[str, Argument], new_params: Dict[str, Argument]):
|
||||
"""
|
||||
Utility function to merge a new mapping of parameters into an existing one.
|
||||
"""
|
||||
for param_name, param in new_params.items():
|
||||
# Set the parameter if it doesn't exist
|
||||
if param_name not in params:
|
||||
params[param_name] = param
|
||||
|
||||
# Set the parameter documentation if it's not set
|
||||
if param.doc and not params[param_name].doc:
|
||||
params[param_name].doc = param.doc
|
||||
|
||||
# If the new parameter has required=False,
|
||||
# then that should also be the value for the current ones
|
||||
if param.required is False:
|
||||
params[param_name].required = False
|
||||
|
||||
# If the new parameter has a default value, and the current
|
||||
# one doesn't, then the default value should be set as the new one.
|
||||
if param.default is not None and params[param_name].default is None:
|
||||
params[param_name].default = param.default
|
||||
|
||||
@classmethod
|
||||
@abstractmethod
|
||||
def by_name(cls, name: str) -> "Component":
|
||||
"""
|
||||
:param name: Component type name.
|
||||
:return: A parsed component class given its name/type name.
|
||||
"""
|
||||
|
||||
@classmethod
|
||||
@abstractmethod
|
||||
def by_type(cls, type: Type) -> "Component":
|
||||
"""
|
||||
:param type: Component type.
|
||||
:return: A parsed component class given its type.
|
||||
"""
|
||||
|
||||
@property
|
||||
@abstractmethod
|
||||
def cls(self) -> Type:
|
||||
"""
|
||||
:return: The class of a component.
|
||||
"""
|
||||
|
||||
@property
|
||||
@abstractmethod
|
||||
def doc_url(self) -> str:
|
||||
"""
|
||||
:return: The URL of the documentation of the component.
|
||||
"""
|
|
@ -0,0 +1 @@
|
|||
doc_base_url = 'https://docs.platypush.tech/platypush'
|
|
@ -0,0 +1,23 @@
|
|||
from typing import Union, Type, Callable
|
||||
|
||||
from .._parser import DocstringParser
|
||||
|
||||
|
||||
class Constructor(DocstringParser):
|
||||
"""
|
||||
Represents an integration constructor.
|
||||
"""
|
||||
|
||||
@classmethod
|
||||
def parse(cls, obj: Union[Type, Callable]) -> "Constructor":
|
||||
"""
|
||||
Parse the parameters of a class constructor or action method.
|
||||
|
||||
:param obj: Base type of the object.
|
||||
:return: The parsed parameters.
|
||||
"""
|
||||
init = getattr(obj, "__init__", None)
|
||||
if init and callable(init):
|
||||
return super().parse(init)
|
||||
|
||||
return super().parse(obj)
|
|
@ -4,49 +4,25 @@ import os
|
|||
import re
|
||||
import textwrap as tw
|
||||
from dataclasses import dataclass, field
|
||||
from importlib.machinery import SourceFileLoader
|
||||
from importlib.util import spec_from_loader, module_from_spec
|
||||
from typing import Optional, Type, Union, Callable, Dict, Set
|
||||
from typing import Type, Optional, Dict, Set
|
||||
|
||||
from platypush.utils import (
|
||||
get_backend_class_by_name,
|
||||
get_backend_name_by_class,
|
||||
get_plugin_class_by_name,
|
||||
get_plugin_name_by_class,
|
||||
get_backend_name_by_class,
|
||||
get_decorators,
|
||||
)
|
||||
from platypush.utils.manifest import Manifest, ManifestType, Dependencies
|
||||
from platypush.utils.reflection._parser import DocstringParser, Parameter
|
||||
|
||||
|
||||
class Action(DocstringParser):
|
||||
"""
|
||||
Represents an integration action.
|
||||
"""
|
||||
|
||||
|
||||
class Constructor(DocstringParser):
|
||||
"""
|
||||
Represents an integration constructor.
|
||||
"""
|
||||
|
||||
@classmethod
|
||||
def parse(cls, obj: Union[Type, Callable]) -> "Constructor":
|
||||
"""
|
||||
Parse the parameters of a class constructor or action method.
|
||||
|
||||
:param obj: Base type of the object.
|
||||
:return: The parsed parameters.
|
||||
"""
|
||||
init = getattr(obj, "__init__", None)
|
||||
if init and callable(init):
|
||||
return super().parse(init)
|
||||
|
||||
return super().parse(obj)
|
||||
from .._serialize import Serializable
|
||||
from . import Constructor, Action
|
||||
from .component import Component
|
||||
from .constants import doc_base_url
|
||||
|
||||
|
||||
@dataclass
|
||||
class IntegrationMetadata:
|
||||
class Integration(Component, Serializable):
|
||||
"""
|
||||
Represents the metadata of an integration (plugin or backend).
|
||||
"""
|
||||
|
@ -62,32 +38,44 @@ class IntegrationMetadata:
|
|||
_skip_manifest: bool = False
|
||||
|
||||
def __post_init__(self):
|
||||
"""
|
||||
Initialize the manifest object.
|
||||
"""
|
||||
if not self._skip_manifest:
|
||||
self._init_manifest()
|
||||
|
||||
@staticmethod
|
||||
def _merge_params(params: Dict[str, Parameter], new_params: Dict[str, Parameter]):
|
||||
"""
|
||||
Utility function to merge a new mapping of parameters into an existing one.
|
||||
"""
|
||||
for param_name, param in new_params.items():
|
||||
# Set the parameter if it doesn't exist
|
||||
if param_name not in params:
|
||||
params[param_name] = param
|
||||
|
||||
# Set the parameter documentation if it's not set
|
||||
if param.doc and not params[param_name].doc:
|
||||
params[param_name].doc = param.doc
|
||||
|
||||
# If the new parameter has required=False,
|
||||
# then that should also be the value for the current ones
|
||||
if param.required is False:
|
||||
params[param_name].required = False
|
||||
|
||||
# If the new parameter has a default value, and the current
|
||||
# one doesn't, then the default value should be set as the new one.
|
||||
if param.default is not None and params[param_name].default is None:
|
||||
params[param_name].default = param.default
|
||||
def to_dict(self) -> dict:
|
||||
return {
|
||||
"name": self.name,
|
||||
"type": f"{self.type.__module__}.{self.type.__qualname__}",
|
||||
"doc": self.doc,
|
||||
"doc_url": self.doc_url,
|
||||
"args": {
|
||||
**(
|
||||
{name: arg.to_dict() for name, arg in self.constructor.args.items()}
|
||||
if self.constructor
|
||||
else {}
|
||||
),
|
||||
},
|
||||
"actions": {
|
||||
k: {
|
||||
"doc_url": f"{self.doc_url}#{self.cls.__module__}.{self.cls.__qualname__}.{k}",
|
||||
**v.to_dict(),
|
||||
}
|
||||
for k, v in self.actions.items()
|
||||
if self.cls
|
||||
},
|
||||
"events": {
|
||||
f"{e.__module__}.{e.__qualname__}": {
|
||||
"doc": inspect.getdoc(e),
|
||||
"doc_url": f"{doc_base_url}/events/"
|
||||
+ ".".join(e.__module__.split(".")[3:])
|
||||
+ f".html#{e.__module__}.{e.__qualname__}",
|
||||
}
|
||||
for e in self.events
|
||||
},
|
||||
"deps": self.deps.to_dict(),
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def _merge_actions(cls, actions: Dict[str, Action], new_actions: Dict[str, Action]):
|
||||
|
@ -104,7 +92,7 @@ class IntegrationMetadata:
|
|||
actions[action_name].doc = action.doc
|
||||
|
||||
# Merge the parameters
|
||||
cls._merge_params(actions[action_name].params, action.params)
|
||||
cls._merge_params(actions[action_name].args, action.args)
|
||||
|
||||
@classmethod
|
||||
def _merge_events(cls, events: Set[Type], new_events: Set[Type]):
|
||||
|
@ -114,7 +102,7 @@ class IntegrationMetadata:
|
|||
events.update(new_events)
|
||||
|
||||
@classmethod
|
||||
def by_name(cls, name: str) -> "IntegrationMetadata":
|
||||
def by_name(cls, name: str) -> "Integration":
|
||||
"""
|
||||
:param name: Integration name.
|
||||
:return: A parsed Integration class given its type.
|
||||
|
@ -127,7 +115,7 @@ class IntegrationMetadata:
|
|||
return cls.by_type(type)
|
||||
|
||||
@classmethod
|
||||
def by_type(cls, type: Type, _skip_manifest: bool = False) -> "IntegrationMetadata":
|
||||
def by_type(cls, type: Type, _skip_manifest: bool = False) -> "Integration":
|
||||
"""
|
||||
:param type: Integration type (plugin or backend).
|
||||
:param _skip_manifest: Whether we should skip parsing the manifest file for this integration
|
||||
|
@ -167,7 +155,7 @@ class IntegrationMetadata:
|
|||
p_obj = cls.by_type(p_type, _skip_manifest=True)
|
||||
# Merge constructor parameters
|
||||
if obj.constructor and p_obj.constructor:
|
||||
cls._merge_params(obj.constructor.params, p_obj.constructor.params)
|
||||
cls._merge_params(obj.constructor.args, p_obj.constructor.args)
|
||||
|
||||
# Merge actions
|
||||
cls._merge_actions(obj.actions, p_obj.actions)
|
||||
|
@ -194,8 +182,25 @@ class IntegrationMetadata:
|
|||
|
||||
return getter(".".join(self.manifest.package.split(".")[2:]))
|
||||
|
||||
@property
|
||||
def base_type(self) -> Type:
|
||||
"""
|
||||
:return: The base type of this integration, either :class:`platypush.backend.Backend` or
|
||||
:class:`platypush.plugins.Plugin`.
|
||||
"""
|
||||
from platypush.backend import Backend
|
||||
from platypush.plugins import Plugin
|
||||
|
||||
assert self.cls, f'No class found for integration {self.name}'
|
||||
if issubclass(self.cls, Plugin):
|
||||
return Plugin
|
||||
if issubclass(self.cls, Backend):
|
||||
return Backend
|
||||
|
||||
raise AssertionError(f"Unknown base type for {self.cls}")
|
||||
|
||||
@classmethod
|
||||
def from_manifest(cls, manifest_file: str) -> "IntegrationMetadata":
|
||||
def from_manifest(cls, manifest_file: str) -> "Integration":
|
||||
"""
|
||||
Create an `IntegrationMetadata` object from a manifest file.
|
||||
|
||||
|
@ -302,27 +307,26 @@ class IntegrationMetadata:
|
|||
else ""
|
||||
)
|
||||
+ "\n"
|
||||
for name, param in self.constructor.params.items()
|
||||
for name, param in self.constructor.args.items()
|
||||
)
|
||||
if self.constructor and self.constructor.params
|
||||
if self.constructor and self.constructor.args
|
||||
else " # No configuration required\n"
|
||||
)
|
||||
)
|
||||
|
||||
@property
|
||||
def doc_url(self) -> str:
|
||||
"""
|
||||
:return: URL of the documentation for the integration.
|
||||
"""
|
||||
from platypush.backend import Backend
|
||||
from platypush.plugins import Plugin
|
||||
|
||||
def import_file(path: str, name: Optional[str] = None):
|
||||
"""
|
||||
Import a Python file as a module, even if no __init__.py is
|
||||
defined in the directory.
|
||||
if issubclass(self.type, Plugin):
|
||||
section = 'plugins'
|
||||
elif issubclass(self.type, Backend):
|
||||
section = 'backend'
|
||||
else:
|
||||
raise AssertionError(f'Unknown integration type {self.type}')
|
||||
|
||||
:param path: Path of the file to import.
|
||||
:param name: Custom name for the imported module (default: same as the file's basename).
|
||||
:return: The imported module.
|
||||
"""
|
||||
name = name or re.split(r"\.py$", os.path.basename(path))[0]
|
||||
loader = SourceFileLoader(name, os.path.expanduser(path))
|
||||
mod_spec = spec_from_loader(name, loader)
|
||||
assert mod_spec, f"Cannot create module specification for {path}"
|
||||
mod = module_from_spec(mod_spec)
|
||||
loader.exec_module(mod)
|
||||
return mod
|
||||
return f"{doc_base_url}/{section}/{self.name}.html"
|
|
@ -0,0 +1,109 @@
|
|||
import contextlib
|
||||
import importlib
|
||||
import inspect
|
||||
from dataclasses import dataclass
|
||||
from typing import Type, Optional
|
||||
|
||||
from .._serialize import Serializable
|
||||
from . import Constructor
|
||||
from .component import Component
|
||||
from .constants import doc_base_url
|
||||
|
||||
|
||||
@dataclass
|
||||
class Message(Component, Serializable):
|
||||
"""
|
||||
Represents the metadata of a message type (event or response).
|
||||
"""
|
||||
|
||||
name: str
|
||||
type: Type
|
||||
doc: Optional[str] = None
|
||||
constructor: Optional[Constructor] = None
|
||||
|
||||
def to_dict(self) -> dict:
|
||||
return {
|
||||
"name": self.name,
|
||||
"type": f"{self.type.__module__}.{self.type.__qualname__}",
|
||||
"doc": self.doc,
|
||||
"doc_url": self.doc_url,
|
||||
"args": {
|
||||
**(
|
||||
{name: arg.to_dict() for name, arg in self.constructor.args.items()}
|
||||
if self.constructor
|
||||
else {}
|
||||
),
|
||||
},
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def by_name(cls, name: str) -> "Message":
|
||||
"""
|
||||
:param name: Message type name.
|
||||
:return: A parsed message class given its type.
|
||||
"""
|
||||
return cls.by_type(cls._get_cls(name))
|
||||
|
||||
@classmethod
|
||||
def by_type(cls, type: Type) -> "Message":
|
||||
"""
|
||||
:param type: Message type.
|
||||
:return: A parsed message class given its type.
|
||||
"""
|
||||
from platypush.message import Message as MessageClass
|
||||
|
||||
assert issubclass(type, MessageClass), f"Expected a Message class, got {type}"
|
||||
obj = cls(
|
||||
name=f'{type.__module__}.{type.__qualname__}',
|
||||
type=type,
|
||||
doc=inspect.getdoc(type),
|
||||
constructor=Constructor.parse(type),
|
||||
)
|
||||
|
||||
for p_type in inspect.getmro(type)[1:]:
|
||||
# Don't go upper in the hierarchy.
|
||||
if p_type == type:
|
||||
break
|
||||
|
||||
with contextlib.suppress(AssertionError):
|
||||
p_obj = cls.by_type(p_type)
|
||||
# Merge constructor parameters
|
||||
if obj.constructor and p_obj.constructor:
|
||||
cls._merge_params(obj.constructor.args, p_obj.constructor.args)
|
||||
|
||||
return obj
|
||||
|
||||
@property
|
||||
def cls(self) -> Type:
|
||||
"""
|
||||
:return: The class of a message.
|
||||
"""
|
||||
return self._get_cls(self.name)
|
||||
|
||||
@staticmethod
|
||||
def _get_cls(name: str) -> Type:
|
||||
"""
|
||||
:param name: Full qualified type name, module included.
|
||||
:return: The associated class.
|
||||
"""
|
||||
tokens = name.split(".")
|
||||
module = importlib.import_module(".".join(tokens[:-1]))
|
||||
return getattr(module, tokens[-1])
|
||||
|
||||
@property
|
||||
def doc_url(self) -> str:
|
||||
"""
|
||||
:return: URL of the documentation for the message.
|
||||
"""
|
||||
from platypush.message.event import Event
|
||||
from platypush.message.response import Response
|
||||
|
||||
if issubclass(self.type, Event):
|
||||
section = 'events'
|
||||
elif issubclass(self.type, Response):
|
||||
section = 'responses'
|
||||
else:
|
||||
raise AssertionError(f'Unknown message type {self.type}')
|
||||
|
||||
mod_name = '.'.join(self.name.split('.')[3:-1])
|
||||
return f"{doc_base_url}/{section}/{mod_name}.html#{self.name}"
|
|
@ -0,0 +1,21 @@
|
|||
from dataclasses import dataclass
|
||||
from typing import Optional, Type
|
||||
|
||||
from .._serialize import Serializable
|
||||
from .._utils import type_str
|
||||
|
||||
|
||||
@dataclass
|
||||
class ReturnValue(Serializable):
|
||||
"""
|
||||
Represents the return value of an action.
|
||||
"""
|
||||
|
||||
doc: Optional[str] = None
|
||||
type: Optional[Type] = None
|
||||
|
||||
def to_dict(self) -> dict:
|
||||
return {
|
||||
"doc": self.doc,
|
||||
"type": type_str(self.type),
|
||||
}
|
|
@ -0,0 +1,6 @@
|
|||
from .docstring import DocstringParser
|
||||
|
||||
|
||||
__all__ = [
|
||||
"DocstringParser",
|
||||
]
|
|
@ -0,0 +1,76 @@
|
|||
import inspect
|
||||
import textwrap as tw
|
||||
from dataclasses import dataclass, field
|
||||
from typing import (
|
||||
Any,
|
||||
Callable,
|
||||
Iterable,
|
||||
List,
|
||||
Optional,
|
||||
Tuple,
|
||||
Type,
|
||||
get_type_hints,
|
||||
)
|
||||
|
||||
from .._model.argument import Argument
|
||||
from .._model.returns import ReturnValue
|
||||
from .state import ParseState
|
||||
|
||||
|
||||
@dataclass
|
||||
class ParseContext:
|
||||
"""
|
||||
Runtime parsing context.
|
||||
"""
|
||||
|
||||
obj: Callable
|
||||
state: ParseState = ParseState.DOC
|
||||
cur_param: Optional[str] = None
|
||||
doc: Optional[str] = None
|
||||
returns: ReturnValue = field(default_factory=ReturnValue)
|
||||
parsed_params: dict[str, Argument] = field(default_factory=dict)
|
||||
|
||||
def __post_init__(self):
|
||||
"""
|
||||
Initialize the return type and parameters from the function annotations.
|
||||
"""
|
||||
|
||||
# Initialize the return type from the annotations
|
||||
annotations = getattr(self.obj, "__annotations__", {})
|
||||
if annotations:
|
||||
self.returns.type = annotations.get("return")
|
||||
|
||||
# Initialize the parameters from the signature
|
||||
spec = inspect.getfullargspec(self.obj)
|
||||
defaults = spec.defaults or ()
|
||||
defaults = defaults + ((Any,) * (len(self.param_names) - len(defaults or ())))
|
||||
self.parsed_params = {
|
||||
name: Argument(
|
||||
name=name,
|
||||
type=self.param_types.get(name),
|
||||
default=default if default is not Any else None,
|
||||
required=default is Any,
|
||||
)
|
||||
for name, default in zip(self.param_names, defaults)
|
||||
}
|
||||
|
||||
@property
|
||||
def spec(self) -> inspect.FullArgSpec:
|
||||
return inspect.getfullargspec(self.obj)
|
||||
|
||||
@property
|
||||
def param_names(self) -> List[str]:
|
||||
return list(self.spec.args[1:])
|
||||
|
||||
@property
|
||||
def param_defaults(self) -> Tuple[Any]:
|
||||
defaults = self.spec.defaults or ()
|
||||
return ((Any,) * (len(self.spec.args[1:]) - len(defaults))) + defaults
|
||||
|
||||
@property
|
||||
def param_types(self) -> dict[str, Type]:
|
||||
return get_type_hints(self.obj)
|
||||
|
||||
@property
|
||||
def doc_lines(self) -> Iterable[str]:
|
||||
return tw.dedent(inspect.getdoc(self.obj) or "").split("\n")
|
|
@ -1,97 +1,17 @@
|
|||
import inspect
|
||||
import re
|
||||
import textwrap as tw
|
||||
from contextlib import contextmanager
|
||||
from dataclasses import dataclass, field
|
||||
from enum import IntEnum
|
||||
from typing import (
|
||||
Any,
|
||||
Optional,
|
||||
Iterable,
|
||||
Type,
|
||||
get_type_hints,
|
||||
Callable,
|
||||
Tuple,
|
||||
Generator,
|
||||
Dict,
|
||||
)
|
||||
from typing import Callable, Dict, Generator, Optional
|
||||
|
||||
from .._model.argument import Argument
|
||||
from .._model.returns import ReturnValue
|
||||
from .._serialize import Serializable
|
||||
from .context import ParseContext
|
||||
from .rst import RstExtensionsMixin
|
||||
from .state import ParseState
|
||||
|
||||
|
||||
@dataclass
|
||||
class ReturnValue:
|
||||
"""
|
||||
Represents the return value of an action.
|
||||
"""
|
||||
|
||||
doc: Optional[str] = None
|
||||
type: Optional[Type] = None
|
||||
|
||||
|
||||
@dataclass
|
||||
class Parameter:
|
||||
"""
|
||||
Represents an integration constructor/action parameter.
|
||||
"""
|
||||
|
||||
name: str
|
||||
required: bool = False
|
||||
doc: Optional[str] = None
|
||||
type: Optional[Type] = None
|
||||
default: Optional[str] = None
|
||||
|
||||
|
||||
class ParseState(IntEnum):
|
||||
"""
|
||||
Parse state.
|
||||
"""
|
||||
|
||||
DOC = 0
|
||||
PARAM = 1
|
||||
TYPE = 2
|
||||
RETURN = 3
|
||||
|
||||
|
||||
@dataclass
|
||||
class ParseContext:
|
||||
"""
|
||||
Runtime parsing context.
|
||||
"""
|
||||
|
||||
obj: Callable
|
||||
state: ParseState = ParseState.DOC
|
||||
cur_param: Optional[str] = None
|
||||
doc: Optional[str] = None
|
||||
returns: ReturnValue = field(default_factory=ReturnValue)
|
||||
parsed_params: dict[str, Parameter] = field(default_factory=dict)
|
||||
|
||||
def __post_init__(self):
|
||||
annotations = getattr(self.obj, "__annotations__", {})
|
||||
if annotations:
|
||||
self.returns.type = annotations.get("return")
|
||||
|
||||
@property
|
||||
def spec(self) -> inspect.FullArgSpec:
|
||||
return inspect.getfullargspec(self.obj)
|
||||
|
||||
@property
|
||||
def param_names(self) -> Iterable[str]:
|
||||
return self.spec.args[1:]
|
||||
|
||||
@property
|
||||
def param_defaults(self) -> Tuple[Any]:
|
||||
defaults = self.spec.defaults or ()
|
||||
return ((Any,) * (len(self.spec.args[1:]) - len(defaults))) + defaults
|
||||
|
||||
@property
|
||||
def param_types(self) -> dict[str, Type]:
|
||||
return get_type_hints(self.obj)
|
||||
|
||||
@property
|
||||
def doc_lines(self) -> Iterable[str]:
|
||||
return tw.dedent(inspect.getdoc(self.obj) or "").split("\n")
|
||||
|
||||
|
||||
class DocstringParser:
|
||||
class DocstringParser(Serializable, RstExtensionsMixin):
|
||||
"""
|
||||
Mixin for objects that can parse docstrings.
|
||||
"""
|
||||
|
@ -105,14 +25,42 @@ class DocstringParser:
|
|||
self,
|
||||
name: str,
|
||||
doc: Optional[str] = None,
|
||||
params: Optional[Dict[str, Parameter]] = None,
|
||||
args: Optional[Dict[str, Argument]] = None,
|
||||
has_varargs: bool = False,
|
||||
has_kwargs: bool = False,
|
||||
returns: Optional[ReturnValue] = None,
|
||||
):
|
||||
self.name = name
|
||||
self.doc = doc
|
||||
self.params = params or {}
|
||||
self.args = args or {}
|
||||
self.has_varargs = has_varargs
|
||||
self.has_kwargs = has_kwargs
|
||||
self.returns = returns
|
||||
|
||||
def to_dict(self) -> dict:
|
||||
return {
|
||||
"name": self.name,
|
||||
"doc": self.doc,
|
||||
"args": {k: v.to_dict() for k, v in self.args.items()},
|
||||
"has_varargs": self.has_varargs,
|
||||
"has_kwargs": self.has_kwargs,
|
||||
"returns": self.returns.to_dict() if self.returns else None,
|
||||
}
|
||||
|
||||
@staticmethod
|
||||
def _norm_indent(text: Optional[str]) -> Optional[str]:
|
||||
"""
|
||||
Normalize the indentation of a docstring.
|
||||
|
||||
:param text: Input docstring
|
||||
:return: A representation of the docstring where all the leading spaces have been removed.
|
||||
"""
|
||||
if not text:
|
||||
return None
|
||||
|
||||
lines = text.split("\n")
|
||||
return (lines[0] + "\n" + tw.dedent("\n".join(lines[1:]) or "")).strip()
|
||||
|
||||
@classmethod
|
||||
@contextmanager
|
||||
def _parser(cls, obj: Callable) -> Generator[ParseContext, None, None]:
|
||||
|
@ -123,28 +71,15 @@ class DocstringParser:
|
|||
:return: The parsing context.
|
||||
"""
|
||||
|
||||
def norm_indent(text: Optional[str]) -> Optional[str]:
|
||||
"""
|
||||
Normalize the indentation of a docstring.
|
||||
|
||||
:param text: Input docstring
|
||||
:return: A representation of the docstring where all the leading spaces have been removed.
|
||||
"""
|
||||
if not text:
|
||||
return None
|
||||
|
||||
lines = text.split("\n")
|
||||
return (lines[0] + "\n" + tw.dedent("\n".join(lines[1:]) or "")).strip()
|
||||
|
||||
ctx = ParseContext(obj)
|
||||
yield ctx
|
||||
|
||||
# Normalize the parameters docstring indentation
|
||||
for param in ctx.parsed_params.values():
|
||||
param.doc = norm_indent(param.doc)
|
||||
param.doc = cls._norm_indent(param.doc)
|
||||
|
||||
# Normalize the return docstring indentation
|
||||
ctx.returns.doc = norm_indent(ctx.returns.doc)
|
||||
ctx.returns.doc = cls._norm_indent(ctx.returns.doc)
|
||||
|
||||
@staticmethod
|
||||
def _is_continuation_line(line: str) -> bool:
|
||||
|
@ -169,6 +104,9 @@ class DocstringParser:
|
|||
if cls._default_docstring.match(line):
|
||||
return
|
||||
|
||||
# Expand any custom RST extensions
|
||||
line = cls._expand_rst_extensions(line, ctx)
|
||||
|
||||
# Update the return type docstring if required
|
||||
m = cls._return_doc_re.match(line)
|
||||
if m or (ctx.state == ParseState.RETURN and cls._is_continuation_line(line)):
|
||||
|
@ -178,28 +116,17 @@ class DocstringParser:
|
|||
).rstrip()
|
||||
return
|
||||
|
||||
# Create a new parameter entry if the docstring says so
|
||||
# Initialize the documentation of a parameter on :param: docstring lines
|
||||
m = cls._param_doc_re.match(line)
|
||||
if m:
|
||||
if m and ctx.parsed_params.get(m.group("name")):
|
||||
ctx.state = ParseState.PARAM
|
||||
idx = len(ctx.parsed_params)
|
||||
ctx.cur_param = m.group("name")
|
||||
|
||||
# Skip vararg/var keyword parameters
|
||||
if ctx.cur_param in {ctx.spec.varkw, ctx.spec.varargs}:
|
||||
return
|
||||
|
||||
ctx.parsed_params[ctx.cur_param] = Parameter(
|
||||
name=ctx.cur_param,
|
||||
required=(
|
||||
idx >= len(ctx.param_defaults) or ctx.param_defaults[idx] is Any
|
||||
),
|
||||
doc=m.group("doc"),
|
||||
type=ctx.param_types.get(ctx.cur_param),
|
||||
default=ctx.param_defaults[idx]
|
||||
if idx < len(ctx.param_defaults) and ctx.param_defaults[idx] is not Any
|
||||
else None,
|
||||
)
|
||||
ctx.parsed_params[ctx.cur_param].doc = m.group("doc")
|
||||
return
|
||||
|
||||
# Update the current parameter docstring if required
|
||||
|
@ -236,6 +163,8 @@ class DocstringParser:
|
|||
return cls(
|
||||
name=obj.__name__,
|
||||
doc=ctx.doc,
|
||||
params=ctx.parsed_params,
|
||||
args=ctx.parsed_params,
|
||||
has_varargs=ctx.spec.varargs is not None,
|
||||
has_kwargs=ctx.spec.varkw is not None,
|
||||
returns=ctx.returns,
|
||||
)
|
|
@ -0,0 +1,162 @@
|
|||
import importlib
|
||||
import logging
|
||||
import re
|
||||
import textwrap as tw
|
||||
|
||||
from .._model.constants import doc_base_url
|
||||
from .context import ParseContext
|
||||
|
||||
|
||||
# pylint: disable=too-few-public-methods
|
||||
class RstExtensionsMixin:
|
||||
"""
|
||||
Mixin class for handling non-standard reStructuredText extensions.
|
||||
"""
|
||||
|
||||
_rst_extensions = {
|
||||
name: re.compile(regex)
|
||||
for name, regex in {
|
||||
"class": "(:class:`(?P<name>[^`]+)`)",
|
||||
"method": "(:meth:`(?P<name>[^`]+)`)",
|
||||
"function": "(:func:`(?P<name>[^`]+)`)",
|
||||
"schema": r"^((?P<indent>\s*)(?P<before>.*)"
|
||||
r"(\.\. schema:: (?P<name>[\w.]+)\s*"
|
||||
r"(\((?P<args>.+?)\))?)(?P<after>.*))$",
|
||||
}.items()
|
||||
}
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@classmethod
|
||||
def _expand_rst_extensions(cls, docstr: str, ctx: ParseContext) -> str:
|
||||
"""
|
||||
Expand the known reStructuredText extensions in a docstring.
|
||||
"""
|
||||
for ex_name, regex in cls._rst_extensions.items():
|
||||
match = regex.search(docstr)
|
||||
if not match:
|
||||
continue
|
||||
|
||||
try:
|
||||
docstr = (
|
||||
cls._expand_schema(docstr, match)
|
||||
if ex_name == "schema"
|
||||
else cls._expand_module(docstr, ex_name, match, ctx)
|
||||
)
|
||||
except Exception as e:
|
||||
cls.logger.warning(
|
||||
"Could not import module %s: %s", match.group("name"), e
|
||||
)
|
||||
continue
|
||||
|
||||
return docstr
|
||||
|
||||
@classmethod
|
||||
def _expand_schema(cls, docstr: str, match: re.Match) -> str:
|
||||
from marshmallow import missing
|
||||
from marshmallow.validate import OneOf
|
||||
|
||||
value = match.group("name")
|
||||
mod = importlib.import_module(
|
||||
"platypush.schemas." + ".".join(value.split(".")[:-1])
|
||||
)
|
||||
obj_cls = getattr(mod, value.split(".")[-1])
|
||||
schema_args = (
|
||||
eval(f'dict({match.group("args")})') if match.group("args") else {}
|
||||
)
|
||||
obj = obj_cls(**schema_args)
|
||||
|
||||
schema_doc = tw.indent(
|
||||
".. code-block:: python\n\n"
|
||||
+ tw.indent(
|
||||
("[" if obj.many else "")
|
||||
+ "{\n"
|
||||
+ tw.indent(
|
||||
"\n".join(
|
||||
(
|
||||
(
|
||||
"# " + field.metadata["description"] + "\n"
|
||||
if field.metadata.get("description")
|
||||
else ""
|
||||
)
|
||||
+ (
|
||||
"# Possible values: "
|
||||
+ str(field.validate.choices)
|
||||
+ "\n"
|
||||
if isinstance(field.validate, OneOf)
|
||||
else ""
|
||||
)
|
||||
+ f'"{field_name}": '
|
||||
+ (
|
||||
(
|
||||
'"'
|
||||
+ field.metadata.get("example", field.default)
|
||||
+ '"'
|
||||
if isinstance(
|
||||
field.metadata.get("example", field.default),
|
||||
str,
|
||||
)
|
||||
else str(
|
||||
field.metadata.get("example", field.default)
|
||||
)
|
||||
)
|
||||
if not (
|
||||
field.metadata.get("example") is None
|
||||
and field.default is missing
|
||||
)
|
||||
else "..."
|
||||
)
|
||||
)
|
||||
for field_name, field in obj.fields.items()
|
||||
),
|
||||
prefix=" ",
|
||||
)
|
||||
+ "\n}"
|
||||
+ ("]" if obj.many else ""),
|
||||
prefix=" ",
|
||||
),
|
||||
prefix=match.group("indent") + " ",
|
||||
)
|
||||
|
||||
docstr = docstr.replace(
|
||||
match.group(0),
|
||||
match.group("before") + "\n\n" + schema_doc + "\n\n" + match.group("after"),
|
||||
)
|
||||
|
||||
return docstr
|
||||
|
||||
@classmethod
|
||||
def _expand_module(
|
||||
cls, docstr: str, ex_name: str, match: re.Match, ctx: ParseContext
|
||||
) -> str:
|
||||
value = match.group("name")
|
||||
if value.startswith("."):
|
||||
modname = ctx.obj.__module__ # noqa
|
||||
obj_name = ctx.obj.__qualname__
|
||||
elif ex_name == "method":
|
||||
modname = ".".join(value.split(".")[:-2])
|
||||
obj_name = ".".join(value.split(".")[-2:])
|
||||
else:
|
||||
modname = ".".join(value.split(".")[:-1])
|
||||
obj_name = value.split(".")[-1]
|
||||
|
||||
url_path = None
|
||||
|
||||
if modname.startswith("platypush.plugins"):
|
||||
url_path = "plugins/" + ".".join(modname.split(".")[2:])
|
||||
elif modname.startswith("platypush.backend"):
|
||||
url_path = "backends/" + ".".join(modname.split(".")[2:])
|
||||
elif modname.startswith("platypush.message.event"):
|
||||
url_path = "events/" + ".".join(modname.split(".")[3:])
|
||||
elif modname.startswith("platypush.message.response"):
|
||||
url_path = "responses/" + ".".join(modname.split(".")[3:])
|
||||
|
||||
if url_path:
|
||||
docstr = docstr.replace(
|
||||
match.group(0),
|
||||
f"`{obj_name} <{doc_base_url}/{url_path}.html#{modname}.{obj_name}>`_",
|
||||
)
|
||||
else:
|
||||
docstr = docstr.replace(match.group(0), f"``{value}``")
|
||||
|
||||
return docstr
|
|
@ -0,0 +1,12 @@
|
|||
from enum import IntEnum
|
||||
|
||||
|
||||
class ParseState(IntEnum):
|
||||
"""
|
||||
Parse state.
|
||||
"""
|
||||
|
||||
DOC = 0
|
||||
PARAM = 1
|
||||
TYPE = 2
|
||||
RETURN = 3
|
|
@ -0,0 +1,14 @@
|
|||
from abc import ABC, abstractmethod
|
||||
|
||||
|
||||
class Serializable(ABC):
|
||||
"""
|
||||
Base class for reflection entities that can be serialized to JSON/YAML.
|
||||
"""
|
||||
|
||||
@abstractmethod
|
||||
def to_dict(self) -> dict:
|
||||
"""
|
||||
Serialize the entity to a string.
|
||||
"""
|
||||
raise NotImplementedError()
|
|
@ -0,0 +1,12 @@
|
|||
import re
|
||||
from typing import Optional, Type
|
||||
|
||||
|
||||
def type_str(t: Optional[Type]) -> Optional[str]:
|
||||
"""
|
||||
:return: A human-readable representation of a type.
|
||||
"""
|
||||
if not t:
|
||||
return None
|
||||
|
||||
return re.sub(r"<class '(.*)'>", r'\1', str(t).replace('typing.', ''))
|
|
@ -56,17 +56,48 @@ class Config:
|
|||
'now': datetime.datetime.now,
|
||||
}
|
||||
|
||||
# Default working directory:
|
||||
# - $XDG_DATA_HOME/platypush if XDG_DATA_HOME is set
|
||||
# - /var/lib/platypush if the user is root
|
||||
# - $HOME/.local/share/platypush otherwise
|
||||
_workdir_location = os.path.join(
|
||||
*(
|
||||
(os.environ['XDG_DATA_HOME'], 'platypush')
|
||||
(os.environ['XDG_DATA_HOME'],)
|
||||
if os.environ.get('XDG_DATA_HOME')
|
||||
else (os.path.expanduser('~'), '.local', 'share', 'platypush')
|
||||
)
|
||||
else (
|
||||
(os.sep, 'var', 'lib')
|
||||
if os.geteuid() == 0
|
||||
else (os.path.expanduser('~'), '.local', 'share')
|
||||
)
|
||||
),
|
||||
'platypush',
|
||||
)
|
||||
|
||||
# Default cache directory:
|
||||
# - $XDG_CACHE_DIR/platypush if XDG_CACHE_DIR is set
|
||||
# - /var/cache/platypush if the user is root
|
||||
# - $HOME/.cache/platypush otherwise
|
||||
_cachedir_location = os.path.join(
|
||||
*(
|
||||
(os.environ['XDG_CACHE_DIR'],)
|
||||
if os.environ.get('XDG_CACHE_DIR')
|
||||
else (
|
||||
(os.sep, 'var', 'cache')
|
||||
if os.geteuid() == 0
|
||||
else (os.path.expanduser('~'), '.cache')
|
||||
)
|
||||
),
|
||||
'platypush',
|
||||
)
|
||||
|
||||
_included_files: Set[str] = set()
|
||||
|
||||
def __init__(self, cfgfile: Optional[str] = None, workdir: Optional[str] = None):
|
||||
def __init__(
|
||||
self,
|
||||
cfgfile: Optional[str] = None,
|
||||
workdir: Optional[str] = None,
|
||||
cachedir: Optional[str] = None,
|
||||
):
|
||||
"""
|
||||
Constructor. Always use the class as a singleton (i.e. through
|
||||
Config.init), you won't probably need to call the constructor directly
|
||||
|
@ -74,6 +105,7 @@ class Config:
|
|||
:param cfgfile: Config file path (default: retrieve the first available
|
||||
location in _cfgfile_locations).
|
||||
:param workdir: Overrides the default working directory.
|
||||
:param cachedir: Overrides the default cache directory.
|
||||
"""
|
||||
|
||||
self.backends = {}
|
||||
|
@ -91,7 +123,7 @@ class Config:
|
|||
self._config = self._read_config_file(self.config_file)
|
||||
|
||||
self._init_secrets()
|
||||
self._init_dirs(workdir=workdir)
|
||||
self._init_dirs(workdir=workdir, cachedir=cachedir)
|
||||
self._init_db()
|
||||
self._init_logging()
|
||||
self._init_device_id()
|
||||
|
@ -168,29 +200,32 @@ class Config:
|
|||
for k, v in self._config['environment'].items():
|
||||
os.environ[k] = str(v)
|
||||
|
||||
def _init_dirs(self, workdir: Optional[str] = None):
|
||||
def _init_workdir(self, workdir: Optional[str] = None):
|
||||
if workdir:
|
||||
self._config['workdir'] = workdir
|
||||
if not self._config.get('workdir'):
|
||||
self._config['workdir'] = self._workdir_location
|
||||
|
||||
self._config['workdir'] = os.path.expanduser(
|
||||
os.path.expanduser(self._config['workdir'])
|
||||
)
|
||||
self._config['workdir'] = os.path.expanduser(self._config['workdir'])
|
||||
pathlib.Path(self._config['workdir']).mkdir(parents=True, exist_ok=True)
|
||||
|
||||
def _init_cachedir(self, cachedir: Optional[str] = None):
|
||||
if cachedir:
|
||||
self._config['cachedir'] = cachedir
|
||||
if not self._config.get('cachedir'):
|
||||
self._config['cachedir'] = self._cachedir_location
|
||||
|
||||
self._config['cachedir'] = os.path.expanduser(self._config['cachedir'])
|
||||
pathlib.Path(self._config['cachedir']).mkdir(parents=True, exist_ok=True)
|
||||
|
||||
def _init_scripts_dir(self):
|
||||
# Create the scripts directory if it doesn't exist
|
||||
if 'scripts_dir' not in self._config:
|
||||
self._config['scripts_dir'] = os.path.join(
|
||||
os.path.dirname(self.config_file), 'scripts'
|
||||
)
|
||||
os.makedirs(self._config['scripts_dir'], mode=0o755, exist_ok=True)
|
||||
|
||||
if 'dashboards_dir' not in self._config:
|
||||
self._config['dashboards_dir'] = os.path.join(
|
||||
os.path.dirname(self.config_file), 'dashboards'
|
||||
)
|
||||
os.makedirs(self._config['dashboards_dir'], mode=0o755, exist_ok=True)
|
||||
|
||||
# Create a default (empty) __init__.py in the scripts folder
|
||||
init_py = os.path.join(self._config['scripts_dir'], '__init__.py')
|
||||
if not os.path.isfile(init_py):
|
||||
|
@ -204,6 +239,19 @@ class Config:
|
|||
)
|
||||
sys.path = [scripts_parent_dir] + sys.path
|
||||
|
||||
def _init_dashboards_dir(self):
|
||||
if 'dashboards_dir' not in self._config:
|
||||
self._config['dashboards_dir'] = os.path.join(
|
||||
os.path.dirname(self.config_file), 'dashboards'
|
||||
)
|
||||
os.makedirs(self._config['dashboards_dir'], mode=0o755, exist_ok=True)
|
||||
|
||||
def _init_dirs(self, workdir: Optional[str] = None, cachedir: Optional[str] = None):
|
||||
self._init_workdir(workdir=workdir)
|
||||
self._init_cachedir(cachedir=cachedir)
|
||||
self._init_scripts_dir()
|
||||
self._init_dashboards_dir()
|
||||
|
||||
def _init_secrets(self):
|
||||
if 'token' in self._config:
|
||||
self._config['token_hash'] = get_hash(self._config['token'])
|
||||
|
@ -425,6 +473,7 @@ class Config:
|
|||
cls,
|
||||
cfgfile: Optional[str] = None,
|
||||
workdir: Optional[str] = None,
|
||||
cachedir: Optional[str] = None,
|
||||
force_reload: bool = False,
|
||||
) -> "Config":
|
||||
"""
|
||||
|
@ -432,7 +481,7 @@ class Config:
|
|||
"""
|
||||
if force_reload or cls._instance is None:
|
||||
cfg_args = [cfgfile] if cfgfile else []
|
||||
cls._instance = Config(*cfg_args, workdir=workdir)
|
||||
cls._instance = Config(*cfg_args, workdir=workdir, cachedir=cachedir)
|
||||
return cls._instance
|
||||
|
||||
@classmethod
|
||||
|
@ -496,6 +545,7 @@ class Config:
|
|||
cfgfile: Optional[str] = None,
|
||||
device_id: Optional[str] = None,
|
||||
workdir: Optional[str] = None,
|
||||
cachedir: Optional[str] = None,
|
||||
ctrl_sock: Optional[str] = None,
|
||||
**_,
|
||||
):
|
||||
|
@ -505,13 +555,18 @@ class Config:
|
|||
:param cfgfile: Path to the config file (default: _cfgfile_locations)
|
||||
:param device_id: Override the configured device_id.
|
||||
:param workdir: Override the configured working directory.
|
||||
:param cachedir: Override the configured cache directory.
|
||||
:param ctrl_sock: Override the configured control socket.
|
||||
"""
|
||||
cfg = cls._get_instance(cfgfile, workdir=workdir, force_reload=True)
|
||||
cfg = cls._get_instance(
|
||||
cfgfile, workdir=workdir, cachedir=cachedir, force_reload=True
|
||||
)
|
||||
if device_id:
|
||||
cfg.set('device_id', device_id)
|
||||
if workdir:
|
||||
cfg.set('workdir', workdir)
|
||||
if cachedir:
|
||||
cfg.set('cachedir', cachedir)
|
||||
if ctrl_sock:
|
||||
cfg.set('ctrl_sock', ctrl_sock)
|
||||
|
||||
|
@ -526,6 +581,15 @@ class Config:
|
|||
assert workdir
|
||||
return workdir # type: ignore
|
||||
|
||||
@classmethod
|
||||
def get_cachedir(cls) -> str:
|
||||
"""
|
||||
:return: The path of the configured cache directory.
|
||||
"""
|
||||
workdir = cls._get_instance().get('cachedir')
|
||||
assert workdir
|
||||
return workdir # type: ignore
|
||||
|
||||
@classmethod
|
||||
def get(cls, key: Optional[str] = None, default: Optional[Any] = None):
|
||||
"""
|
||||
|
|
|
@ -55,11 +55,29 @@
|
|||
# # If not specified, then one of the following will be used:
|
||||
# #
|
||||
# # - $XDG_DATA_HOME/platypush if the XDG_DATA_HOME environment variable is set.
|
||||
# # - /var/lib/platypush if the user is root.
|
||||
# # - $HOME/.local/share/platypush otherwise.
|
||||
#
|
||||
# workdir: ~/.local/share/platypush
|
||||
###
|
||||
|
||||
### -----------------
|
||||
### Cache directory
|
||||
### -----------------
|
||||
|
||||
###
|
||||
# # Note that the cache directory can also be specified at runtime using the
|
||||
# # --cachedir option.
|
||||
# #
|
||||
# # If not specified, then one of the following will be used:
|
||||
# #
|
||||
# # - $XDG_CACHE_DIR/platypush if the XDG_CACHE_DIR environment variable is set.
|
||||
# # - /var/cache/platypush if the user is root.
|
||||
# # - $HOME/.cache/platypush otherwise.
|
||||
#
|
||||
# cachedir: ~/.cache/platypush
|
||||
###
|
||||
|
||||
### ----------------------
|
||||
### Database configuration
|
||||
### ----------------------
|
||||
|
|
|
@ -30,7 +30,7 @@ from sqlalchemy.orm.exc import ObjectDeletedError
|
|||
|
||||
import platypush
|
||||
from platypush.config import Config
|
||||
from platypush.common.db import Base
|
||||
from platypush.common.db import Base, is_defined
|
||||
from platypush.message import JSONAble, Message
|
||||
|
||||
EntityRegistryType = Dict[str, Type['Entity']]
|
||||
|
@ -52,7 +52,7 @@ fail.
|
|||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
if 'entity' not in Base.metadata:
|
||||
if not is_defined('entity'):
|
||||
|
||||
class Entity(Base):
|
||||
"""
|
||||
|
|
|
@ -1,11 +1,11 @@
|
|||
from sqlalchemy import Column, Integer, ForeignKey
|
||||
|
||||
from platypush.common.db import Base
|
||||
from platypush.common.db import is_defined
|
||||
|
||||
from .three_axis import ThreeAxisSensor
|
||||
|
||||
|
||||
if 'accelerometer' not in Base.metadata:
|
||||
if not is_defined('accelerometer'):
|
||||
|
||||
class Accelerometer(ThreeAxisSensor):
|
||||
"""
|
||||
|
@ -20,6 +20,7 @@ if 'accelerometer' not in Base.metadata:
|
|||
primary_key=True,
|
||||
)
|
||||
|
||||
__table_args__ = {'extend_existing': True}
|
||||
__mapper_args__ = {
|
||||
'polymorphic_identity': __tablename__,
|
||||
}
|
||||
|
|
|
@ -1,12 +1,12 @@
|
|||
from sqlalchemy import Column, Integer, ForeignKey
|
||||
|
||||
from platypush.common.db import Base
|
||||
from platypush.common.db import is_defined
|
||||
|
||||
from .dimmers import Dimmer
|
||||
from .switches import Switch
|
||||
|
||||
|
||||
if 'volume' not in Base.metadata:
|
||||
if not is_defined('volume'):
|
||||
|
||||
class Volume(Dimmer):
|
||||
__tablename__ = 'volume'
|
||||
|
@ -15,12 +15,13 @@ if 'volume' not in Base.metadata:
|
|||
Integer, ForeignKey(Dimmer.id, ondelete='CASCADE'), primary_key=True
|
||||
)
|
||||
|
||||
__table_args__ = {'extend_existing': True}
|
||||
__mapper_args__ = {
|
||||
'polymorphic_identity': __tablename__,
|
||||
}
|
||||
|
||||
|
||||
if 'muted' not in Base.metadata:
|
||||
if not is_defined('muted'):
|
||||
|
||||
class Muted(Switch):
|
||||
__tablename__ = 'muted'
|
||||
|
@ -29,6 +30,7 @@ if 'muted' not in Base.metadata:
|
|||
Integer, ForeignKey(Switch.id, ondelete='CASCADE'), primary_key=True
|
||||
)
|
||||
|
||||
__table_args__ = {'extend_existing': True}
|
||||
__mapper_args__ = {
|
||||
'polymorphic_identity': __tablename__,
|
||||
}
|
||||
|
|
|
@ -1,11 +1,11 @@
|
|||
from sqlalchemy import Column, Integer, ForeignKey
|
||||
|
||||
from platypush.common.db import Base
|
||||
from platypush.common.db import is_defined
|
||||
|
||||
from .sensors import NumericSensor
|
||||
|
||||
|
||||
if 'battery' not in Base.metadata:
|
||||
if not is_defined('battery'):
|
||||
|
||||
class Battery(NumericSensor):
|
||||
__tablename__ = 'battery'
|
||||
|
@ -19,6 +19,7 @@ if 'battery' not in Base.metadata:
|
|||
Integer, ForeignKey(NumericSensor.id, ondelete='CASCADE'), primary_key=True
|
||||
)
|
||||
|
||||
__table_args__ = {'extend_existing': True}
|
||||
__mapper_args__ = {
|
||||
'polymorphic_identity': __tablename__,
|
||||
}
|
||||
|
|
|
@ -9,13 +9,13 @@ from sqlalchemy import (
|
|||
String,
|
||||
)
|
||||
|
||||
from platypush.common.db import Base
|
||||
from platypush.common.db import is_defined
|
||||
|
||||
from ..devices import Device
|
||||
from ._service import BluetoothService
|
||||
|
||||
|
||||
if 'bluetooth_device' not in Base.metadata:
|
||||
if not is_defined('bluetooth_device'):
|
||||
|
||||
class BluetoothDevice(Device):
|
||||
"""
|
||||
|
@ -68,6 +68,7 @@ if 'bluetooth_device' not in Base.metadata:
|
|||
model_id = Column(String, default=None)
|
||||
""" Device model ID. """
|
||||
|
||||
__table_args__ = {'extend_existing': True}
|
||||
__mapper_args__ = {
|
||||
'polymorphic_identity': __tablename__,
|
||||
}
|
||||
|
|
|
@ -8,10 +8,10 @@ from sqlalchemy import (
|
|||
String,
|
||||
)
|
||||
|
||||
from platypush.common.db import Base
|
||||
from platypush.common.db import is_defined
|
||||
from platypush.entities import Entity
|
||||
|
||||
if 'bluetooth_service' not in Base.metadata:
|
||||
if not is_defined('bluetooth_service'):
|
||||
|
||||
class BluetoothService(Entity):
|
||||
"""
|
||||
|
@ -44,6 +44,7 @@ if 'bluetooth_service' not in Base.metadata:
|
|||
connected = Column(Boolean, default=False)
|
||||
""" Whether an active connection exists to this service. """
|
||||
|
||||
__table_args__ = {'extend_existing': True}
|
||||
__mapper_args__ = {
|
||||
'polymorphic_identity': __tablename__,
|
||||
}
|
||||
|
|
|
@ -6,14 +6,14 @@ from sqlalchemy import (
|
|||
Integer,
|
||||
)
|
||||
|
||||
from platypush.common.db import Base
|
||||
from platypush.common.db import is_defined
|
||||
|
||||
from .sensors import EnumSensor
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
if 'button' not in Base.metadata:
|
||||
if not is_defined('button'):
|
||||
|
||||
class Button(EnumSensor):
|
||||
__tablename__ = 'button'
|
||||
|
@ -22,6 +22,7 @@ if 'button' not in Base.metadata:
|
|||
Integer, ForeignKey(EnumSensor.id, ondelete='CASCADE'), primary_key=True
|
||||
)
|
||||
|
||||
__table_args__ = {'extend_existing': True}
|
||||
__mapper_args__ = {
|
||||
'polymorphic_identity': __tablename__,
|
||||
}
|
||||
|
|
|
@ -6,12 +6,12 @@ from sqlalchemy import (
|
|||
String,
|
||||
)
|
||||
|
||||
from platypush.common.db import Base
|
||||
from platypush.common.db import is_defined
|
||||
|
||||
from .devices import Device
|
||||
|
||||
|
||||
if 'cloud_instance' not in Base.metadata:
|
||||
if not is_defined('cloud_instance'):
|
||||
|
||||
class CloudInstance(Device):
|
||||
"""
|
||||
|
@ -38,6 +38,7 @@ if 'cloud_instance' not in Base.metadata:
|
|||
alerts = Column(JSON)
|
||||
backups = Column(JSON)
|
||||
|
||||
__table_args__ = {'extend_existing': True}
|
||||
__mapper_args__ = {
|
||||
'polymorphic_identity': __tablename__,
|
||||
}
|
||||
|
|
|
@ -1,11 +1,11 @@
|
|||
from sqlalchemy import Column, Integer, ForeignKey
|
||||
|
||||
from platypush.common.db import Base
|
||||
from platypush.common.db import is_defined
|
||||
|
||||
from .sensors import BinarySensor
|
||||
|
||||
|
||||
if 'contact_sensor' not in Base.metadata:
|
||||
if not is_defined('contact_sensor'):
|
||||
|
||||
class ContactSensor(BinarySensor):
|
||||
"""
|
||||
|
@ -18,6 +18,7 @@ if 'contact_sensor' not in Base.metadata:
|
|||
Integer, ForeignKey(BinarySensor.id, ondelete='CASCADE'), primary_key=True
|
||||
)
|
||||
|
||||
__table_args__ = {'extend_existing': True}
|
||||
__mapper_args__ = {
|
||||
'polymorphic_identity': __tablename__,
|
||||
}
|
||||
|
|
|
@ -1,11 +1,11 @@
|
|||
from sqlalchemy import Column, Integer, Boolean, ForeignKey
|
||||
|
||||
from platypush.common.db import Base
|
||||
from platypush.common.db import is_defined
|
||||
|
||||
from ._base import Entity
|
||||
|
||||
|
||||
if 'device' not in Base.metadata:
|
||||
if not is_defined('device'):
|
||||
|
||||
class Device(Entity):
|
||||
"""
|
||||
|
@ -19,6 +19,7 @@ if 'device' not in Base.metadata:
|
|||
)
|
||||
reachable = Column(Boolean, default=True)
|
||||
|
||||
__table_args__ = {'extend_existing': True}
|
||||
__mapper_args__ = {
|
||||
'polymorphic_identity': __tablename__,
|
||||
}
|
||||
|
|
|
@ -1,11 +1,11 @@
|
|||
from sqlalchemy import Column, Integer, ForeignKey, Float, String
|
||||
|
||||
from platypush.common.db import Base
|
||||
from platypush.common.db import is_defined
|
||||
|
||||
from .devices import Device
|
||||
|
||||
|
||||
if 'dimmer' not in Base.metadata:
|
||||
if not is_defined('dimmer'):
|
||||
|
||||
class Dimmer(Device):
|
||||
"""
|
||||
|
@ -24,6 +24,7 @@ if 'dimmer' not in Base.metadata:
|
|||
value = Column(Float)
|
||||
unit = Column(String)
|
||||
|
||||
__table_args__ = {'extend_existing': True}
|
||||
__mapper_args__ = {
|
||||
'polymorphic_identity': __tablename__,
|
||||
}
|
||||
|
|
|
@ -1,11 +1,11 @@
|
|||
from sqlalchemy import Column, Integer, ForeignKey
|
||||
|
||||
from platypush.common.db import Base
|
||||
from platypush.common.db import is_defined
|
||||
|
||||
from .sensors import NumericSensor
|
||||
|
||||
|
||||
if 'distance_sensor' not in Base.metadata:
|
||||
if not is_defined('distance_sensor'):
|
||||
|
||||
class DistanceSensor(NumericSensor):
|
||||
"""
|
||||
|
@ -18,6 +18,7 @@ if 'distance_sensor' not in Base.metadata:
|
|||
Integer, ForeignKey(NumericSensor.id, ondelete='CASCADE'), primary_key=True
|
||||
)
|
||||
|
||||
__table_args__ = {'extend_existing': True}
|
||||
__mapper_args__ = {
|
||||
'polymorphic_identity': __tablename__,
|
||||
}
|
||||
|
|
|
@ -1,11 +1,11 @@
|
|||
from sqlalchemy import Column, Integer, ForeignKey
|
||||
|
||||
from platypush.common.db import Base
|
||||
from platypush.common.db import is_defined
|
||||
|
||||
from .sensors import NumericSensor
|
||||
|
||||
|
||||
if 'power_sensor' not in Base.metadata:
|
||||
if not is_defined('power_sensor'):
|
||||
|
||||
class PowerSensor(NumericSensor):
|
||||
__tablename__ = 'power_sensor'
|
||||
|
@ -14,12 +14,13 @@ if 'power_sensor' not in Base.metadata:
|
|||
Integer, ForeignKey(NumericSensor.id, ondelete='CASCADE'), primary_key=True
|
||||
)
|
||||
|
||||
__table_args__ = {'extend_existing': True}
|
||||
__mapper_args__ = {
|
||||
'polymorphic_identity': __tablename__,
|
||||
}
|
||||
|
||||
|
||||
if 'current_sensor' not in Base.metadata:
|
||||
if not is_defined('current_sensor'):
|
||||
|
||||
class CurrentSensor(NumericSensor):
|
||||
__tablename__ = 'current_sensor'
|
||||
|
@ -28,12 +29,13 @@ if 'current_sensor' not in Base.metadata:
|
|||
Integer, ForeignKey(NumericSensor.id, ondelete='CASCADE'), primary_key=True
|
||||
)
|
||||
|
||||
__table_args__ = {'extend_existing': True}
|
||||
__mapper_args__ = {
|
||||
'polymorphic_identity': __tablename__,
|
||||
}
|
||||
|
||||
|
||||
if 'voltage_sensor' not in Base.metadata:
|
||||
if not is_defined('voltage_sensor'):
|
||||
|
||||
class VoltageSensor(NumericSensor):
|
||||
__tablename__ = 'voltage_sensor'
|
||||
|
@ -42,12 +44,13 @@ if 'voltage_sensor' not in Base.metadata:
|
|||
Integer, ForeignKey(NumericSensor.id, ondelete='CASCADE'), primary_key=True
|
||||
)
|
||||
|
||||
__table_args__ = {'extend_existing': True}
|
||||
__mapper_args__ = {
|
||||
'polymorphic_identity': __tablename__,
|
||||
}
|
||||
|
||||
|
||||
if 'energy_sensor' not in Base.metadata:
|
||||
if not is_defined('energy_sensor'):
|
||||
|
||||
class EnergySensor(NumericSensor):
|
||||
__tablename__ = 'energy_sensor'
|
||||
|
@ -56,6 +59,7 @@ if 'energy_sensor' not in Base.metadata:
|
|||
Integer, ForeignKey(NumericSensor.id, ondelete='CASCADE'), primary_key=True
|
||||
)
|
||||
|
||||
__table_args__ = {'extend_existing': True}
|
||||
__mapper_args__ = {
|
||||
'polymorphic_identity': __tablename__,
|
||||
}
|
||||
|
|
|
@ -1,11 +1,11 @@
|
|||
from sqlalchemy import Column, Integer, ForeignKey
|
||||
|
||||
from platypush.common.db import Base
|
||||
from platypush.common.db import is_defined
|
||||
|
||||
from .sensors import NumericSensor
|
||||
|
||||
|
||||
if 'heart_rate_sensor' not in Base.metadata:
|
||||
if not is_defined('heart_rate_sensor'):
|
||||
|
||||
class HeartRateSensor(NumericSensor):
|
||||
"""
|
||||
|
@ -18,6 +18,7 @@ if 'heart_rate_sensor' not in Base.metadata:
|
|||
Integer, ForeignKey(NumericSensor.id, ondelete='CASCADE'), primary_key=True
|
||||
)
|
||||
|
||||
__table_args__ = {'extend_existing': True}
|
||||
__mapper_args__ = {
|
||||
'polymorphic_identity': __tablename__,
|
||||
}
|
||||
|
|
|
@ -1,11 +1,11 @@
|
|||
from sqlalchemy import Column, Integer, ForeignKey
|
||||
|
||||
from platypush.common.db import Base
|
||||
from platypush.common.db import is_defined
|
||||
|
||||
from .sensors import NumericSensor
|
||||
|
||||
|
||||
if 'humidity_sensor' not in Base.metadata:
|
||||
if not is_defined('humidity_sensor'):
|
||||
|
||||
class HumiditySensor(NumericSensor):
|
||||
"""
|
||||
|
@ -18,12 +18,13 @@ if 'humidity_sensor' not in Base.metadata:
|
|||
Integer, ForeignKey(NumericSensor.id, ondelete='CASCADE'), primary_key=True
|
||||
)
|
||||
|
||||
__table_args__ = {'extend_existing': True}
|
||||
__mapper_args__ = {
|
||||
'polymorphic_identity': __tablename__,
|
||||
}
|
||||
|
||||
|
||||
if 'dew_point_sensor' not in Base.metadata:
|
||||
if not is_defined('dew_point_sensor'):
|
||||
|
||||
class DewPointSensor(NumericSensor):
|
||||
"""
|
||||
|
@ -36,6 +37,7 @@ if 'dew_point_sensor' not in Base.metadata:
|
|||
Integer, ForeignKey(NumericSensor.id, ondelete='CASCADE'), primary_key=True
|
||||
)
|
||||
|
||||
__table_args__ = {'extend_existing': True}
|
||||
__mapper_args__ = {
|
||||
'polymorphic_identity': __tablename__,
|
||||
}
|
||||
|
|
|
@ -1,11 +1,11 @@
|
|||
from sqlalchemy import Column, Integer, ForeignKey
|
||||
|
||||
from platypush.common.db import Base
|
||||
from platypush.common.db import is_defined
|
||||
|
||||
from .sensors import NumericSensor
|
||||
|
||||
|
||||
if 'illuminance_sensor' not in Base.metadata:
|
||||
if not is_defined('illuminance_sensor'):
|
||||
|
||||
class IlluminanceSensor(NumericSensor):
|
||||
__tablename__ = 'illuminance_sensor'
|
||||
|
@ -14,6 +14,7 @@ if 'illuminance_sensor' not in Base.metadata:
|
|||
Integer, ForeignKey(NumericSensor.id, ondelete='CASCADE'), primary_key=True
|
||||
)
|
||||
|
||||
__table_args__ = {'extend_existing': True}
|
||||
__mapper_args__ = {
|
||||
'polymorphic_identity': __tablename__,
|
||||
}
|
||||
|
|
|
@ -1,11 +1,11 @@
|
|||
from sqlalchemy import Column, Integer, String, ForeignKey, Boolean, Float
|
||||
|
||||
from platypush.common.db import Base
|
||||
from platypush.common.db import is_defined
|
||||
|
||||
from .devices import Device
|
||||
|
||||
|
||||
if 'light' not in Base.metadata:
|
||||
if not is_defined('light'):
|
||||
|
||||
class Light(Device):
|
||||
__tablename__ = 'light'
|
||||
|
@ -34,6 +34,7 @@ if 'light' not in Base.metadata:
|
|||
temperature_min = Column(Float)
|
||||
temperature_max = Column(Float)
|
||||
|
||||
__table_args__ = {'extend_existing': True}
|
||||
__mapper_args__ = {
|
||||
'polymorphic_identity': __tablename__,
|
||||
}
|
||||
|
|
|
@ -1,11 +1,11 @@
|
|||
from sqlalchemy import Column, Integer, ForeignKey
|
||||
|
||||
from platypush.common.db import Base
|
||||
from platypush.common.db import is_defined
|
||||
|
||||
from .sensors import NumericSensor
|
||||
|
||||
|
||||
if 'link_quality' not in Base.metadata:
|
||||
if not is_defined('link_quality'):
|
||||
|
||||
class LinkQuality(NumericSensor):
|
||||
__tablename__ = 'link_quality'
|
||||
|
@ -19,6 +19,7 @@ if 'link_quality' not in Base.metadata:
|
|||
Integer, ForeignKey(NumericSensor.id, ondelete='CASCADE'), primary_key=True
|
||||
)
|
||||
|
||||
__table_args__ = {'extend_existing': True}
|
||||
__mapper_args__ = {
|
||||
'polymorphic_identity': __tablename__,
|
||||
}
|
||||
|
|
|
@ -1,11 +1,11 @@
|
|||
from sqlalchemy import Column, Integer, ForeignKey
|
||||
|
||||
from platypush.common.db import Base
|
||||
from platypush.common.db import is_defined
|
||||
|
||||
from .three_axis import ThreeAxisSensor
|
||||
|
||||
|
||||
if 'magnetometer' not in Base.metadata:
|
||||
if not is_defined('magnetometer'):
|
||||
|
||||
class Magnetometer(ThreeAxisSensor):
|
||||
"""
|
||||
|
@ -20,6 +20,7 @@ if 'magnetometer' not in Base.metadata:
|
|||
primary_key=True,
|
||||
)
|
||||
|
||||
__table_args__ = {'extend_existing': True}
|
||||
__mapper_args__ = {
|
||||
'polymorphic_identity': __tablename__,
|
||||
}
|
||||
|
|
|
@ -1,11 +1,11 @@
|
|||
from sqlalchemy import Column, Integer, ForeignKey
|
||||
|
||||
from platypush.common.db import Base
|
||||
from platypush.common.db import is_defined
|
||||
|
||||
from .sensors import BinarySensor
|
||||
|
||||
|
||||
if 'motion_sensor' not in Base.metadata:
|
||||
if not is_defined('motion_sensor'):
|
||||
|
||||
class MotionSensor(BinarySensor):
|
||||
__tablename__ = 'motion_sensor'
|
||||
|
@ -14,6 +14,7 @@ if 'motion_sensor' not in Base.metadata:
|
|||
Integer, ForeignKey(BinarySensor.id, ondelete='CASCADE'), primary_key=True
|
||||
)
|
||||
|
||||
__table_args__ = {'extend_existing': True}
|
||||
__mapper_args__ = {
|
||||
'polymorphic_identity': __tablename__,
|
||||
}
|
||||
|
|
|
@ -1,11 +1,11 @@
|
|||
from sqlalchemy import Column, Integer, ForeignKey
|
||||
|
||||
from platypush.common.db import Base
|
||||
from platypush.common.db import is_defined
|
||||
|
||||
from .sensors import BinarySensor
|
||||
|
||||
|
||||
if 'presence_sensor' not in Base.metadata:
|
||||
if not is_defined('presence_sensor'):
|
||||
|
||||
class PresenceSensor(BinarySensor):
|
||||
"""
|
||||
|
@ -18,6 +18,7 @@ if 'presence_sensor' not in Base.metadata:
|
|||
Integer, ForeignKey(BinarySensor.id, ondelete='CASCADE'), primary_key=True
|
||||
)
|
||||
|
||||
__table_args__ = {'extend_existing': True}
|
||||
__mapper_args__ = {
|
||||
'polymorphic_identity': __tablename__,
|
||||
}
|
||||
|
|
|
@ -1,11 +1,11 @@
|
|||
from sqlalchemy import Column, Integer, ForeignKey
|
||||
|
||||
from platypush.common.db import Base
|
||||
from platypush.common.db import is_defined
|
||||
|
||||
from .sensors import NumericSensor
|
||||
|
||||
|
||||
if 'pressure_sensor' not in Base.metadata:
|
||||
if not is_defined('pressure_sensor'):
|
||||
|
||||
class PressureSensor(NumericSensor):
|
||||
"""
|
||||
|
@ -18,6 +18,7 @@ if 'pressure_sensor' not in Base.metadata:
|
|||
Integer, ForeignKey(NumericSensor.id, ondelete='CASCADE'), primary_key=True
|
||||
)
|
||||
|
||||
__table_args__ = {'extend_existing': True}
|
||||
__mapper_args__ = {
|
||||
'polymorphic_identity': __tablename__,
|
||||
}
|
||||
|
|
|
@ -12,7 +12,7 @@ from sqlalchemy import (
|
|||
String,
|
||||
)
|
||||
|
||||
from platypush.common.db import Base
|
||||
from platypush.common.db import is_defined
|
||||
|
||||
from .devices import Device
|
||||
|
||||
|
@ -32,7 +32,7 @@ class Sensor(Device):
|
|||
super().__init__(*args, **kwargs)
|
||||
|
||||
|
||||
if 'raw_sensor' not in Base.metadata:
|
||||
if not is_defined('raw_sensor'):
|
||||
|
||||
class RawSensor(Sensor):
|
||||
"""
|
||||
|
@ -86,12 +86,13 @@ if 'raw_sensor' not in Base.metadata:
|
|||
self.is_binary = False
|
||||
self.is_json = False
|
||||
|
||||
__table_args__ = {'extend_existing': True}
|
||||
__mapper_args__ = {
|
||||
'polymorphic_identity': __tablename__,
|
||||
}
|
||||
|
||||
|
||||
if 'numeric_sensor' not in Base.metadata and 'percent_sensor' not in Base.metadata:
|
||||
if not is_defined('numeric_sensor') and not is_defined('percent_sensor'):
|
||||
|
||||
class NumericSensor(Sensor):
|
||||
"""
|
||||
|
@ -109,6 +110,7 @@ if 'numeric_sensor' not in Base.metadata and 'percent_sensor' not in Base.metada
|
|||
max = Column(Float)
|
||||
unit = Column(String)
|
||||
|
||||
__table_args__ = {'extend_existing': True}
|
||||
__mapper_args__ = {
|
||||
'polymorphic_identity': __tablename__,
|
||||
}
|
||||
|
@ -124,6 +126,7 @@ if 'numeric_sensor' not in Base.metadata and 'percent_sensor' not in Base.metada
|
|||
Integer, ForeignKey(NumericSensor.id, ondelete='CASCADE'), primary_key=True
|
||||
)
|
||||
|
||||
__table_args__ = {'extend_existing': True}
|
||||
__mapper_args__ = {
|
||||
'polymorphic_identity': __tablename__,
|
||||
}
|
||||
|
@ -135,7 +138,7 @@ if 'numeric_sensor' not in Base.metadata and 'percent_sensor' not in Base.metada
|
|||
super().__init__(*args, **kwargs)
|
||||
|
||||
|
||||
if 'binary_sensor' not in Base.metadata:
|
||||
if not is_defined('binary_sensor'):
|
||||
|
||||
class BinarySensor(Sensor):
|
||||
"""
|
||||
|
@ -163,12 +166,13 @@ if 'binary_sensor' not in Base.metadata:
|
|||
)
|
||||
value = Column(Boolean)
|
||||
|
||||
__table_args__ = {'extend_existing': True}
|
||||
__mapper_args__ = {
|
||||
'polymorphic_identity': __tablename__,
|
||||
}
|
||||
|
||||
|
||||
if 'enum_sensor' not in Base.metadata:
|
||||
if not is_defined('enum_sensor'):
|
||||
|
||||
class EnumSensor(Sensor):
|
||||
"""
|
||||
|
@ -184,12 +188,13 @@ if 'enum_sensor' not in Base.metadata:
|
|||
values = Column(JSON)
|
||||
""" Possible values for the sensor, as a JSON array. """
|
||||
|
||||
__table_args__ = {'extend_existing': True}
|
||||
__mapper_args__ = {
|
||||
'polymorphic_identity': __tablename__,
|
||||
}
|
||||
|
||||
|
||||
if 'composite_sensor' not in Base.metadata:
|
||||
if not is_defined('composite_sensor'):
|
||||
|
||||
class CompositeSensor(Sensor):
|
||||
"""
|
||||
|
@ -204,6 +209,7 @@ if 'composite_sensor' not in Base.metadata:
|
|||
)
|
||||
value = Column(JSON)
|
||||
|
||||
__table_args__ = {'extend_existing': True}
|
||||
__mapper_args__ = {
|
||||
'polymorphic_identity': __tablename__,
|
||||
}
|
||||
|
|
|
@ -1,11 +1,11 @@
|
|||
from sqlalchemy import Column, Integer, ForeignKey
|
||||
|
||||
from platypush.common.db import Base
|
||||
from platypush.common.db import is_defined
|
||||
|
||||
from .sensors import NumericSensor
|
||||
|
||||
|
||||
if 'steps_sensor' not in Base.metadata:
|
||||
if not is_defined('steps_sensor'):
|
||||
|
||||
class StepsSensor(NumericSensor):
|
||||
"""
|
||||
|
@ -18,6 +18,7 @@ if 'steps_sensor' not in Base.metadata:
|
|||
Integer, ForeignKey(NumericSensor.id, ondelete='CASCADE'), primary_key=True
|
||||
)
|
||||
|
||||
__table_args__ = {'extend_existing': True}
|
||||
__mapper_args__ = {
|
||||
'polymorphic_identity': __tablename__,
|
||||
}
|
||||
|
|
|
@ -1,11 +1,11 @@
|
|||
from sqlalchemy import Column, Integer, ForeignKey, Boolean, String, JSON
|
||||
|
||||
from platypush.common.db import Base
|
||||
from platypush.common.db import is_defined
|
||||
|
||||
from .devices import Device
|
||||
|
||||
|
||||
if 'switch' not in Base.metadata:
|
||||
if not is_defined('switch'):
|
||||
|
||||
class Switch(Device):
|
||||
__tablename__ = 'switch'
|
||||
|
@ -15,12 +15,13 @@ if 'switch' not in Base.metadata:
|
|||
)
|
||||
state = Column(Boolean)
|
||||
|
||||
__table_args__ = {'extend_existing': True}
|
||||
__mapper_args__ = {
|
||||
'polymorphic_identity': __tablename__,
|
||||
}
|
||||
|
||||
|
||||
if 'enum_switch' not in Base.metadata:
|
||||
if not is_defined('enum_switch'):
|
||||
|
||||
class EnumSwitch(Device):
|
||||
__tablename__ = 'enum_switch'
|
||||
|
@ -31,6 +32,7 @@ if 'enum_switch' not in Base.metadata:
|
|||
value = Column(String)
|
||||
values = Column(JSON)
|
||||
|
||||
__table_args__ = {'extend_existing': True}
|
||||
__mapper_args__ = {
|
||||
'polymorphic_identity': __tablename__,
|
||||
}
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
from sqlalchemy import Boolean, Column, Float, ForeignKey, Integer, JSON, String
|
||||
|
||||
from platypush.common.db import Base
|
||||
from platypush.common.db import is_defined
|
||||
|
||||
from . import Entity
|
||||
from .devices import Device
|
||||
|
@ -8,7 +8,7 @@ from .sensors import NumericSensor, PercentSensor
|
|||
from .temperature import TemperatureSensor
|
||||
|
||||
|
||||
if 'cpu' not in Base.metadata:
|
||||
if not is_defined('cpu'):
|
||||
|
||||
class Cpu(Entity):
|
||||
"""
|
||||
|
@ -23,12 +23,13 @@ if 'cpu' not in Base.metadata:
|
|||
|
||||
percent = Column(Float)
|
||||
|
||||
__table_args__ = {'extend_existing': True}
|
||||
__mapper_args__ = {
|
||||
'polymorphic_identity': __tablename__,
|
||||
}
|
||||
|
||||
|
||||
if 'cpu_info' not in Base.metadata:
|
||||
if not is_defined('cpu_info'):
|
||||
|
||||
class CpuInfo(Entity):
|
||||
"""
|
||||
|
@ -54,12 +55,13 @@ if 'cpu_info' not in Base.metadata:
|
|||
l2_cache_size = Column(Integer)
|
||||
l3_cache_size = Column(Integer)
|
||||
|
||||
__table_args__ = {'extend_existing': True}
|
||||
__mapper_args__ = {
|
||||
'polymorphic_identity': __tablename__,
|
||||
}
|
||||
|
||||
|
||||
if 'cpu_times' not in Base.metadata:
|
||||
if not is_defined('cpu_times'):
|
||||
|
||||
class CpuTimes(Entity):
|
||||
"""
|
||||
|
@ -72,12 +74,13 @@ if 'cpu_times' not in Base.metadata:
|
|||
Integer, ForeignKey(Entity.id, ondelete='CASCADE'), primary_key=True
|
||||
)
|
||||
|
||||
__table_args__ = {'extend_existing': True}
|
||||
__mapper_args__ = {
|
||||
'polymorphic_identity': __tablename__,
|
||||
}
|
||||
|
||||
|
||||
if 'cpu_stats' not in Base.metadata:
|
||||
if not is_defined('cpu_stats'):
|
||||
|
||||
class CpuStats(Entity):
|
||||
"""
|
||||
|
@ -90,12 +93,13 @@ if 'cpu_stats' not in Base.metadata:
|
|||
Integer, ForeignKey(Entity.id, ondelete='CASCADE'), primary_key=True
|
||||
)
|
||||
|
||||
__table_args__ = {'extend_existing': True}
|
||||
__mapper_args__ = {
|
||||
'polymorphic_identity': __tablename__,
|
||||
}
|
||||
|
||||
|
||||
if 'memory_stats' not in Base.metadata:
|
||||
if not is_defined('memory_stats'):
|
||||
|
||||
class MemoryStats(Entity):
|
||||
"""
|
||||
|
@ -119,12 +123,13 @@ if 'memory_stats' not in Base.metadata:
|
|||
shared = Column(Integer)
|
||||
percent = Column(Float)
|
||||
|
||||
__table_args__ = {'extend_existing': True}
|
||||
__mapper_args__ = {
|
||||
'polymorphic_identity': __tablename__,
|
||||
}
|
||||
|
||||
|
||||
if 'swap_stats' not in Base.metadata:
|
||||
if not is_defined('swap_stats'):
|
||||
|
||||
class SwapStats(Entity):
|
||||
"""
|
||||
|
@ -142,12 +147,13 @@ if 'swap_stats' not in Base.metadata:
|
|||
free = Column(Integer)
|
||||
percent = Column(Float)
|
||||
|
||||
__table_args__ = {'extend_existing': True}
|
||||
__mapper_args__ = {
|
||||
'polymorphic_identity': __tablename__,
|
||||
}
|
||||
|
||||
|
||||
if 'disk' not in Base.metadata:
|
||||
if not is_defined('disk'):
|
||||
|
||||
class Disk(Entity):
|
||||
"""
|
||||
|
@ -175,12 +181,13 @@ if 'disk' not in Base.metadata:
|
|||
write_time = Column(Float)
|
||||
busy_time = Column(Float)
|
||||
|
||||
__table_args__ = {'extend_existing': True}
|
||||
__mapper_args__ = {
|
||||
'polymorphic_identity': __tablename__,
|
||||
}
|
||||
|
||||
|
||||
if 'network_interface' not in Base.metadata:
|
||||
if not is_defined('network_interface'):
|
||||
|
||||
class NetworkInterface(Device):
|
||||
"""
|
||||
|
@ -207,12 +214,13 @@ if 'network_interface' not in Base.metadata:
|
|||
duplex = Column(String)
|
||||
flags = Column(JSON)
|
||||
|
||||
__table_args__ = {'extend_existing': True}
|
||||
__mapper_args__ = {
|
||||
'polymorphic_identity': __tablename__,
|
||||
}
|
||||
|
||||
|
||||
if 'system_temperature' not in Base.metadata:
|
||||
if not is_defined('system_temperature'):
|
||||
|
||||
class SystemTemperature(TemperatureSensor):
|
||||
"""
|
||||
|
@ -230,12 +238,13 @@ if 'system_temperature' not in Base.metadata:
|
|||
high = Column(Float)
|
||||
critical = Column(Float)
|
||||
|
||||
__table_args__ = {'extend_existing': True}
|
||||
__mapper_args__ = {
|
||||
'polymorphic_identity': __tablename__,
|
||||
}
|
||||
|
||||
|
||||
if 'system_fan' not in Base.metadata:
|
||||
if not is_defined('system_fan'):
|
||||
|
||||
class SystemFan(NumericSensor):
|
||||
"""
|
||||
|
@ -250,12 +259,13 @@ if 'system_fan' not in Base.metadata:
|
|||
primary_key=True,
|
||||
)
|
||||
|
||||
__table_args__ = {'extend_existing': True}
|
||||
__mapper_args__ = {
|
||||
'polymorphic_identity': __tablename__,
|
||||
}
|
||||
|
||||
|
||||
if 'system_battery' not in Base.metadata:
|
||||
if not is_defined('system_battery'):
|
||||
|
||||
class SystemBattery(PercentSensor):
|
||||
"""
|
||||
|
@ -273,6 +283,7 @@ if 'system_battery' not in Base.metadata:
|
|||
seconds_left = Column(Float)
|
||||
power_plugged = Column(Boolean)
|
||||
|
||||
__table_args__ = {'extend_existing': True}
|
||||
__mapper_args__ = {
|
||||
'polymorphic_identity': __tablename__,
|
||||
}
|
||||
|
|
|
@ -1,11 +1,11 @@
|
|||
from sqlalchemy import Column, Integer, ForeignKey
|
||||
|
||||
from platypush.common.db import Base
|
||||
from platypush.common.db import is_defined
|
||||
|
||||
from .sensors import NumericSensor
|
||||
|
||||
|
||||
if 'temperature_sensor' not in Base.metadata:
|
||||
if not is_defined('temperature_sensor'):
|
||||
|
||||
class TemperatureSensor(NumericSensor):
|
||||
__tablename__ = 'temperature_sensor'
|
||||
|
@ -14,6 +14,7 @@ if 'temperature_sensor' not in Base.metadata:
|
|||
Integer, ForeignKey(NumericSensor.id, ondelete='CASCADE'), primary_key=True
|
||||
)
|
||||
|
||||
__table_args__ = {'extend_existing': True}
|
||||
__mapper_args__ = {
|
||||
'polymorphic_identity': __tablename__,
|
||||
}
|
||||
|
|
|
@ -1,13 +1,13 @@
|
|||
from typing import Iterable, Mapping, Optional, Union
|
||||
from sqlalchemy import Column, Integer, ForeignKey
|
||||
|
||||
from platypush.common.db import Base
|
||||
from platypush.common.db import is_defined
|
||||
from platypush.common.sensors import Numeric
|
||||
|
||||
from .sensors import RawSensor
|
||||
|
||||
|
||||
if 'three_axis_sensor' not in Base.metadata:
|
||||
if not is_defined('three_axis_sensor'):
|
||||
|
||||
class ThreeAxisSensor(RawSensor):
|
||||
"""
|
||||
|
@ -20,6 +20,7 @@ if 'three_axis_sensor' not in Base.metadata:
|
|||
Integer, ForeignKey(RawSensor.id, ondelete='CASCADE'), primary_key=True
|
||||
)
|
||||
|
||||
__table_args__ = {'extend_existing': True}
|
||||
__mapper_args__ = {
|
||||
'polymorphic_identity': __tablename__,
|
||||
}
|
||||
|
|
|
@ -1,11 +1,11 @@
|
|||
from sqlalchemy import Column, Integer, ForeignKey
|
||||
|
||||
from platypush.common.db import Base
|
||||
from platypush.common.db import is_defined
|
||||
|
||||
from .sensors import NumericSensor
|
||||
|
||||
|
||||
if 'time_duration' not in Base.metadata:
|
||||
if not is_defined('time_duration'):
|
||||
|
||||
class TimeDuration(NumericSensor):
|
||||
"""
|
||||
|
@ -18,6 +18,7 @@ if 'time_duration' not in Base.metadata:
|
|||
Integer, ForeignKey(NumericSensor.id, ondelete='CASCADE'), primary_key=True
|
||||
)
|
||||
|
||||
__table_args__ = {'extend_existing': True}
|
||||
__mapper_args__ = {
|
||||
'polymorphic_identity': __tablename__,
|
||||
}
|
||||
|
|
|
@ -2,14 +2,14 @@ import logging
|
|||
|
||||
from sqlalchemy import Column, ForeignKey, Integer, String
|
||||
|
||||
from platypush.common.db import Base
|
||||
from platypush.common.db import is_defined
|
||||
|
||||
from . import Entity
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
if 'variable' not in Base.metadata:
|
||||
if not is_defined('variable'):
|
||||
|
||||
class Variable(Entity):
|
||||
"""
|
||||
|
|
|
@ -1,11 +1,11 @@
|
|||
from sqlalchemy import Column, Integer, ForeignKey
|
||||
|
||||
from platypush.common.db import Base
|
||||
from platypush.common.db import is_defined
|
||||
|
||||
from .sensors import NumericSensor
|
||||
|
||||
|
||||
if 'weight_sensor' not in Base.metadata:
|
||||
if not is_defined('weight_sensor'):
|
||||
|
||||
class WeightSensor(NumericSensor):
|
||||
"""
|
||||
|
@ -18,6 +18,7 @@ if 'weight_sensor' not in Base.metadata:
|
|||
Integer, ForeignKey(NumericSensor.id, ondelete='CASCADE'), primary_key=True
|
||||
)
|
||||
|
||||
__table_args__ = {'extend_existing': True}
|
||||
__mapper_args__ = {
|
||||
'polymorphic_identity': __tablename__,
|
||||
}
|
||||
|
|
|
@ -16,8 +16,10 @@ EXPOSE 8008
|
|||
|
||||
VOLUME /etc/platypush
|
||||
VOLUME /var/lib/platypush
|
||||
VOLUME /var/cache/platypush
|
||||
|
||||
CMD platypush \
|
||||
--start-redis \
|
||||
--config /etc/platypush/config.yaml \
|
||||
--workdir /var/lib/platypush
|
||||
--workdir /var/lib/platypush \
|
||||
--cachedir /var/cache/platypush
|
||||
|
|
|
@ -20,8 +20,10 @@ EXPOSE 8008
|
|||
|
||||
VOLUME /etc/platypush
|
||||
VOLUME /var/lib/platypush
|
||||
VOLUME /var/cache/platypush
|
||||
|
||||
CMD platypush \
|
||||
--start-redis \
|
||||
--config /etc/platypush/config.yaml \
|
||||
--workdir /var/lib/platypush
|
||||
--workdir /var/lib/platypush \
|
||||
--cachedir /var/cache/platypush
|
||||
|
|
|
@ -19,8 +19,10 @@ EXPOSE 8008
|
|||
|
||||
VOLUME /etc/platypush
|
||||
VOLUME /var/lib/platypush
|
||||
VOLUME /var/cache/platypush
|
||||
|
||||
CMD platypush \
|
||||
--start-redis \
|
||||
--config /etc/platypush/config.yaml \
|
||||
--workdir /var/lib/platypush
|
||||
--workdir /var/lib/platypush \
|
||||
--cachedir /var/cache/platypush
|
||||
|
|
|
@ -20,8 +20,10 @@ EXPOSE 8008
|
|||
|
||||
VOLUME /etc/platypush
|
||||
VOLUME /var/lib/platypush
|
||||
VOLUME /var/cache/platypush
|
||||
|
||||
CMD platypush \
|
||||
--start-redis \
|
||||
--config /etc/platypush/config.yaml \
|
||||
--workdir /var/lib/platypush
|
||||
--workdir /var/lib/platypush \
|
||||
--cachedir /var/cache/platypush
|
||||
|
|
|
@ -34,21 +34,21 @@ class Message:
|
|||
def parse_numpy(obj):
|
||||
try:
|
||||
import numpy as np
|
||||
except ImportError:
|
||||
return
|
||||
|
||||
if isinstance(obj, np.floating):
|
||||
return float(obj)
|
||||
if isinstance(obj, np.integer):
|
||||
return int(obj)
|
||||
if isinstance(obj, np.ndarray):
|
||||
return obj.tolist()
|
||||
if isinstance(obj, decimal.Decimal):
|
||||
return float(obj)
|
||||
if isinstance(obj, (bytes, bytearray)):
|
||||
return '0x' + ''.join([f'{x:02x}' for x in obj])
|
||||
if callable(obj):
|
||||
return '<function at {}.{}>'.format(obj.__module__, obj.__name__)
|
||||
if isinstance(obj, np.floating):
|
||||
return float(obj)
|
||||
if isinstance(obj, np.integer):
|
||||
return int(obj)
|
||||
if isinstance(obj, np.ndarray):
|
||||
return obj.tolist()
|
||||
if isinstance(obj, decimal.Decimal):
|
||||
return float(obj)
|
||||
if isinstance(obj, (bytes, bytearray)):
|
||||
return '0x' + ''.join([f'{x:02x}' for x in obj])
|
||||
if callable(obj):
|
||||
return '<function at {}.{}>'.format(obj.__module__, obj.__name__)
|
||||
except (ImportError, TypeError):
|
||||
pass
|
||||
|
||||
return
|
||||
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
from datetime import datetime
|
||||
from typing import Dict, Any
|
||||
from typing import Any, Dict, Optional
|
||||
|
||||
from platypush.message.event import Event
|
||||
|
||||
|
@ -13,13 +13,13 @@ class MatrixEvent(Event):
|
|||
self,
|
||||
*args,
|
||||
server_url: str,
|
||||
sender_id: str | None = None,
|
||||
sender_display_name: str | None = None,
|
||||
sender_avatar_url: str | None = None,
|
||||
room_id: str | None = None,
|
||||
room_name: str | None = None,
|
||||
room_topic: str | None = None,
|
||||
server_timestamp: datetime | None = None,
|
||||
sender_id: Optional[str] = None,
|
||||
sender_display_name: Optional[str] = None,
|
||||
sender_avatar_url: Optional[str] = None,
|
||||
room_id: Optional[str] = None,
|
||||
room_name: Optional[str] = None,
|
||||
room_topic: Optional[str] = None,
|
||||
server_timestamp: Optional[datetime] = None,
|
||||
**kwargs
|
||||
):
|
||||
"""
|
||||
|
@ -70,11 +70,11 @@ class MatrixMessageEvent(MatrixEvent):
|
|||
self,
|
||||
*args,
|
||||
body: str = '',
|
||||
url: str | None = None,
|
||||
thumbnail_url: str | None = None,
|
||||
mimetype: str | None = None,
|
||||
formatted_body: str | None = None,
|
||||
format: str | None = None,
|
||||
url: Optional[str] = None,
|
||||
thumbnail_url: Optional[str] = None,
|
||||
mimetype: Optional[str] = None,
|
||||
formatted_body: Optional[str] = None,
|
||||
format: Optional[str] = None,
|
||||
**kwargs
|
||||
):
|
||||
"""
|
||||
|
@ -148,7 +148,7 @@ class MatrixCallEvent(MatrixEvent):
|
|||
"""
|
||||
|
||||
def __init__(
|
||||
self, *args, call_id: str, version: int, sdp: str | None = None, **kwargs
|
||||
self, *args, call_id: str, version: int, sdp: Optional[str] = None, **kwargs
|
||||
):
|
||||
"""
|
||||
:param call_id: The unique ID of the call.
|
||||
|
@ -163,7 +163,7 @@ class MatrixCallInviteEvent(MatrixCallEvent):
|
|||
Event triggered when the user is invited to a call.
|
||||
"""
|
||||
|
||||
def __init__(self, *args, invite_validity: float | None = None, **kwargs):
|
||||
def __init__(self, *args, invite_validity: Optional[float] = None, **kwargs):
|
||||
"""
|
||||
:param invite_validity: For how long the invite will be valid, in seconds.
|
||||
:param sdp: SDP text of the session description.
|
||||
|
@ -242,7 +242,9 @@ class MatrixUserPresenceEvent(MatrixEvent):
|
|||
Event triggered when a user comes online or goes offline.
|
||||
"""
|
||||
|
||||
def __init__(self, *args, is_active: bool, last_active: datetime | None, **kwargs):
|
||||
def __init__(
|
||||
self, *args, is_active: bool, last_active: Optional[datetime], **kwargs
|
||||
):
|
||||
"""
|
||||
:param is_active: True if the user is currently online.
|
||||
:param topic: When the user was last active.
|
||||
|
|
|
@ -73,7 +73,7 @@ class CalendarIcalPlugin(Plugin, CalendarInterface):
|
|||
def get_upcoming_events(self, *_, only_participating=True, **__):
|
||||
"""
|
||||
Get the upcoming events. See
|
||||
:func:`~platypush.plugins.calendar.CalendarPlugin.get_upcoming_events`.
|
||||
:meth:`platypush.plugins.calendar.CalendarPlugin.get_upcoming_events`.
|
||||
"""
|
||||
|
||||
from icalendar import Calendar
|
||||
|
|
|
@ -49,7 +49,7 @@ class GoogleCalendarPlugin(GooglePlugin, CalendarInterface):
|
|||
def get_upcoming_events(self, max_results=10):
|
||||
"""
|
||||
Get the upcoming events. See
|
||||
:func:`~platypush.plugins.calendar.CalendarPlugin.get_upcoming_events`.
|
||||
:meth:`platypush.plugins.calendar.CalendarPlugin.get_upcoming_events`.
|
||||
"""
|
||||
|
||||
now = datetime.datetime.utcnow().isoformat() + 'Z'
|
||||
|
|
|
@ -1,36 +1,24 @@
|
|||
from collections import defaultdict
|
||||
import importlib
|
||||
import inspect
|
||||
import json
|
||||
import os
|
||||
import pathlib
|
||||
import pickle
|
||||
import pkgutil
|
||||
from types import ModuleType
|
||||
from typing import Callable, Dict, Generator, Optional, Type, Union
|
||||
from concurrent.futures import Future, ThreadPoolExecutor
|
||||
from typing import List, Optional
|
||||
|
||||
from platypush.backend import Backend
|
||||
from platypush.common.db import override_definitions
|
||||
from platypush.common.reflection import Integration, Message as MessageMetadata
|
||||
from platypush.config import Config
|
||||
from platypush.plugins import Plugin, action
|
||||
from platypush.message import Message
|
||||
from platypush.message.event import Event
|
||||
from platypush.message.response import Response
|
||||
from platypush.utils import (
|
||||
get_backend_class_by_name,
|
||||
get_backend_name_by_class,
|
||||
get_plugin_class_by_name,
|
||||
get_plugin_name_by_class,
|
||||
)
|
||||
from platypush.utils.manifest import Manifests
|
||||
from platypush.utils.mock import auto_mocks
|
||||
from platypush.utils.manifest import Manifest, Manifests
|
||||
|
||||
from ._context import ComponentContext
|
||||
from ._model import (
|
||||
BackendModel,
|
||||
EventModel,
|
||||
Model,
|
||||
PluginModel,
|
||||
ResponseModel,
|
||||
)
|
||||
from ._cache import Cache
|
||||
from ._serialize import ProcedureEncoder
|
||||
|
||||
|
||||
|
@ -39,297 +27,211 @@ class InspectPlugin(Plugin):
|
|||
This plugin can be used to inspect platypush plugins and backends
|
||||
"""
|
||||
|
||||
_num_workers = 8
|
||||
"""Number of threads to use for the inspection."""
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
super().__init__(**kwargs)
|
||||
self._components_cache_file = os.path.join(
|
||||
Config.get('workdir'), # type: ignore
|
||||
'components.cache', # type: ignore
|
||||
)
|
||||
self._components_context: Dict[type, ComponentContext] = defaultdict(
|
||||
ComponentContext
|
||||
)
|
||||
self._components_cache: Dict[type, dict] = defaultdict(dict)
|
||||
self._load_components_cache()
|
||||
self._cache_file = os.path.join(Config.get_cachedir(), 'components.json')
|
||||
self._cache = Cache()
|
||||
self._load_cache()
|
||||
|
||||
def _load_components_cache(self):
|
||||
def _load_cache(self):
|
||||
"""
|
||||
Loads the components cache from disk.
|
||||
"""
|
||||
try:
|
||||
with open(self._components_cache_file, 'rb') as f:
|
||||
self._components_cache = pickle.load(f)
|
||||
except Exception as e:
|
||||
self.logger.warning('Could not initialize the components cache: %s', e)
|
||||
self.logger.info(
|
||||
'The plugin will initialize the cache by scanning '
|
||||
'the integrations at the next run. This may take a while'
|
||||
)
|
||||
|
||||
def _flush_components_cache(self):
|
||||
"""
|
||||
Flush the current components cache to disk.
|
||||
"""
|
||||
with open(self._components_cache_file, 'wb') as f:
|
||||
pickle.dump(self._components_cache, f)
|
||||
|
||||
def _get_cached_component(
|
||||
self, base_type: type, comp_type: type
|
||||
) -> Optional[Model]:
|
||||
"""
|
||||
Retrieve a cached component's ``Model``.
|
||||
|
||||
:param base_type: The base type of the component (e.g. ``Plugin`` or
|
||||
``Backend``).
|
||||
:param comp_type: The specific type of the component (e.g.
|
||||
``MusicMpdPlugin`` or ``HttpBackend``).
|
||||
:return: The cached component's ``Model`` if it exists, otherwise null.
|
||||
"""
|
||||
return self._components_cache.get(base_type, {}).get(comp_type)
|
||||
|
||||
def _cache_component(
|
||||
self,
|
||||
base_type: type,
|
||||
comp_type: type,
|
||||
model: Model,
|
||||
index_by_module: bool = False,
|
||||
):
|
||||
"""
|
||||
Cache the ``Model`` object for a component.
|
||||
|
||||
:param base_type: The base type of the component (e.g. ``Plugin`` or
|
||||
``Backend``).
|
||||
:param comp_type: The specific type of the component (e.g.
|
||||
``MusicMpdPlugin`` or ``HttpBackend``).
|
||||
:param model: The ``Model`` object to cache.
|
||||
:param index_by_module: If ``True``, the ``Model`` object will be
|
||||
indexed according to the ``base_type -> module -> comp_type``
|
||||
mapping, otherwise ``base_type -> comp_type``.
|
||||
"""
|
||||
if index_by_module:
|
||||
if not self._components_cache.get(base_type, {}).get(model.package):
|
||||
self._components_cache[base_type][model.package] = {}
|
||||
self._components_cache[base_type][model.package][comp_type] = model
|
||||
else:
|
||||
self._components_cache[base_type][comp_type] = model
|
||||
|
||||
def _scan_integrations(self, base_type: type):
|
||||
"""
|
||||
A generator that scans the manifest files given a ``base_type``
|
||||
(``Plugin`` or ``Backend``) and yields the parsed submodules.
|
||||
"""
|
||||
for manifest in Manifests.by_base_class(base_type):
|
||||
with self._cache.lock(), auto_mocks(), override_definitions():
|
||||
try:
|
||||
yield importlib.import_module(manifest.package)
|
||||
self._cache = Cache.load(self._cache_file)
|
||||
except Exception as e:
|
||||
self.logger.debug(
|
||||
'Could not import module %s: %s',
|
||||
manifest.package,
|
||||
self.logger.warning(
|
||||
'Could not initialize the components cache from %s: %s',
|
||||
self._cache_file,
|
||||
e,
|
||||
)
|
||||
continue
|
||||
self._cache = Cache()
|
||||
|
||||
def _scan_modules(self, base_type: type) -> Generator[ModuleType, None, None]:
|
||||
self._refresh_cache()
|
||||
|
||||
def _refresh_cache(self):
|
||||
"""
|
||||
A generator that scan the modules given a ``base_type`` (e.g. ``Event``).
|
||||
Refreshes the components cache.
|
||||
"""
|
||||
cache_version_differs = self._cache.version != Cache.cur_version
|
||||
|
||||
Unlike :meth:`._scan_integrations`, this method recursively scans the
|
||||
modules using ``pkgutil`` instead of using the information provided in
|
||||
the integrations' manifest files.
|
||||
with ThreadPoolExecutor(self._num_workers) as pool:
|
||||
futures = []
|
||||
|
||||
for base_type in [Plugin, Backend]:
|
||||
futures.append(
|
||||
pool.submit(
|
||||
self._scan_integrations,
|
||||
base_type,
|
||||
pool=pool,
|
||||
force_refresh=cache_version_differs,
|
||||
futures=futures,
|
||||
)
|
||||
)
|
||||
|
||||
for base_type in [Event, Response]:
|
||||
futures.append(
|
||||
pool.submit(
|
||||
self._scan_modules,
|
||||
base_type,
|
||||
pool=pool,
|
||||
force_refresh=cache_version_differs,
|
||||
futures=futures,
|
||||
)
|
||||
)
|
||||
|
||||
while futures:
|
||||
futures.pop().result()
|
||||
|
||||
if self._cache.has_changes:
|
||||
self.logger.info('Saving new components cache to %s', self._cache_file)
|
||||
self._cache.dump(self._cache_file)
|
||||
self._cache.loaded_at = self._cache.saved_at
|
||||
|
||||
def _scan_integration(self, manifest: Manifest):
|
||||
"""
|
||||
Scans a single integration from the manifest and adds it to the cache.
|
||||
"""
|
||||
try:
|
||||
self._cache_integration(Integration.from_manifest(manifest.file))
|
||||
except Exception as e:
|
||||
self.logger.warning(
|
||||
'Could not import module %s: %s',
|
||||
manifest.package,
|
||||
e,
|
||||
)
|
||||
|
||||
def _scan_integrations(
|
||||
self,
|
||||
base_type: type,
|
||||
pool: ThreadPoolExecutor,
|
||||
futures: List[Future],
|
||||
force_refresh: bool = False,
|
||||
):
|
||||
"""
|
||||
Scans the integrations with a manifest file (plugins and backends) and
|
||||
refreshes the cache.
|
||||
"""
|
||||
for manifest in Manifests.by_base_class(base_type):
|
||||
# An integration metadata needs to be refreshed if it's been
|
||||
# modified since it was last loaded, or if it's not in the
|
||||
# cache.
|
||||
if force_refresh or self._needs_refresh(manifest.file):
|
||||
futures.append(pool.submit(self._scan_integration, manifest))
|
||||
|
||||
def _scan_module(self, base_type: type, modname: str):
|
||||
"""
|
||||
Scans a single module for objects that match the given base_type and
|
||||
adds them to the cache.
|
||||
"""
|
||||
try:
|
||||
module = importlib.import_module(modname)
|
||||
except Exception as e:
|
||||
self.logger.warning('Could not import module %s: %s', modname, e)
|
||||
return
|
||||
|
||||
for _, obj_type in inspect.getmembers(module):
|
||||
if (
|
||||
inspect.isclass(obj_type)
|
||||
and issubclass(obj_type, base_type)
|
||||
# Exclude the base_type itself
|
||||
and obj_type != base_type
|
||||
):
|
||||
self.logger.info(
|
||||
'Scanned %s: %s',
|
||||
base_type.__name__,
|
||||
f'{module.__name__}.{obj_type.__name__}',
|
||||
)
|
||||
|
||||
self._cache.set(
|
||||
base_type, obj_type, MessageMetadata.by_type(obj_type).to_dict()
|
||||
)
|
||||
|
||||
def _scan_modules(
|
||||
self,
|
||||
base_type: type,
|
||||
pool: ThreadPoolExecutor,
|
||||
futures: List[Future],
|
||||
force_refresh: bool = False,
|
||||
):
|
||||
"""
|
||||
A generator that scans the modules given a ``base_type`` (e.g. ``Event``).
|
||||
|
||||
It's a bit more inefficient than :meth:`._scan_integrations` because it
|
||||
needs to inspect all the members of a module to find the ones that
|
||||
match the given ``base_type``, but it works fine for simple components
|
||||
(like messages) that don't require extra recursive parsing and don't
|
||||
have a manifest.
|
||||
"""
|
||||
prefix = base_type.__module__ + '.'
|
||||
path = str(pathlib.Path(inspect.getfile(base_type)).parent)
|
||||
|
||||
for _, modname, _ in pkgutil.walk_packages(
|
||||
for _, modname, __ in pkgutil.walk_packages(
|
||||
path=[path], prefix=prefix, onerror=lambda _: None
|
||||
):
|
||||
try:
|
||||
yield importlib.import_module(modname)
|
||||
filename = self._module_filename(path, '.'.join(modname.split('.')[3:]))
|
||||
if not (force_refresh or self._needs_refresh(filename)):
|
||||
continue
|
||||
except Exception as e:
|
||||
self.logger.debug('Could not import module %s: %s', modname, e)
|
||||
self.logger.warning('Could not scan module %s: %s', modname, e)
|
||||
continue
|
||||
|
||||
def _init_component(
|
||||
self,
|
||||
base_type: type,
|
||||
comp_type: type,
|
||||
model_type: Type[Model],
|
||||
index_by_module: bool = False,
|
||||
) -> Model:
|
||||
futures.append(pool.submit(self._scan_module, base_type, modname))
|
||||
|
||||
def _needs_refresh(self, filename: str) -> bool:
|
||||
"""
|
||||
Initialize a component's ``Model`` object and cache it.
|
||||
|
||||
:param base_type: The base type of the component (e.g. ``Plugin`` or
|
||||
``Backend``).
|
||||
:param comp_type: The specific type of the component (e.g.
|
||||
``MusicMpdPlugin`` or ``HttpBackend``).
|
||||
:param model_type: The type of the ``Model`` object that should be
|
||||
created.
|
||||
:param index_by_module: If ``True``, the ``Model`` object will be
|
||||
indexed according to the ``base_type -> module -> comp_type``
|
||||
mapping, otherwise ``base_type -> comp_type``.
|
||||
:return: The initialized component's ``Model`` object.
|
||||
:return: True if the given file needs to be refreshed in the cache.
|
||||
"""
|
||||
prefix = base_type.__module__ + '.'
|
||||
comp_file = inspect.getsourcefile(comp_type)
|
||||
model = None
|
||||
mtime = None
|
||||
|
||||
if comp_file:
|
||||
mtime = os.stat(comp_file).st_mtime
|
||||
cached_model = self._get_cached_component(base_type, comp_type)
|
||||
|
||||
# Only update the component model if its source file was
|
||||
# modified since the last time it was scanned
|
||||
if (
|
||||
cached_model
|
||||
and cached_model.last_modified
|
||||
and mtime <= cached_model.last_modified
|
||||
):
|
||||
model = cached_model
|
||||
|
||||
if not model:
|
||||
self.logger.info('Scanning component %s', comp_type.__name__)
|
||||
model = model_type(comp_type, prefix=prefix, last_modified=mtime)
|
||||
|
||||
self._cache_component(
|
||||
base_type, comp_type, model, index_by_module=index_by_module
|
||||
)
|
||||
return model
|
||||
|
||||
def _init_modules(
|
||||
self,
|
||||
base_type: type,
|
||||
model_type: Type[Model],
|
||||
):
|
||||
"""
|
||||
Initializes, parses and caches all the components of a given type.
|
||||
|
||||
Unlike :meth:`._scan_integrations`, this method inspects all the
|
||||
members of a ``module`` for those that match the given ``base_type``
|
||||
instead of relying on the information provided in the manifest.
|
||||
|
||||
It is a bit more inefficient, but it works fine for simple components
|
||||
(like entities and messages) that don't require extra recursive parsing
|
||||
logic for their docs (unlike plugins).
|
||||
"""
|
||||
for module in self._scan_modules(base_type):
|
||||
for _, obj_type in inspect.getmembers(module):
|
||||
if (
|
||||
inspect.isclass(obj_type)
|
||||
and issubclass(obj_type, base_type)
|
||||
# Exclude the base_type itself
|
||||
and obj_type != base_type
|
||||
):
|
||||
self._init_component(
|
||||
base_type=base_type,
|
||||
comp_type=obj_type,
|
||||
model_type=model_type,
|
||||
index_by_module=True,
|
||||
)
|
||||
|
||||
def _init_integrations(
|
||||
self,
|
||||
base_type: Type[Union[Plugin, Backend]],
|
||||
model_type: Type[Union[PluginModel, BackendModel]],
|
||||
class_by_name: Callable[[str], Optional[type]],
|
||||
):
|
||||
"""
|
||||
Initializes, parses and caches all the integrations of a given type.
|
||||
|
||||
:param base_type: The base type of the component (e.g. ``Plugin`` or
|
||||
``Backend``).
|
||||
:param model_type: The type of the ``Model`` objects that should be
|
||||
created.
|
||||
:param class_by_name: A function that returns the class of a given
|
||||
integration given its qualified name.
|
||||
"""
|
||||
for module in self._scan_integrations(base_type):
|
||||
comp_name = '.'.join(module.__name__.split('.')[2:])
|
||||
comp_type = class_by_name(comp_name)
|
||||
if not comp_type:
|
||||
continue
|
||||
|
||||
self._init_component(
|
||||
base_type=base_type,
|
||||
comp_type=comp_type,
|
||||
model_type=model_type,
|
||||
)
|
||||
|
||||
self._flush_components_cache()
|
||||
|
||||
def _init_plugins(self):
|
||||
"""
|
||||
Initializes and caches all the available plugins.
|
||||
"""
|
||||
self._init_integrations(
|
||||
base_type=Plugin,
|
||||
model_type=PluginModel,
|
||||
class_by_name=get_plugin_class_by_name,
|
||||
return os.lstat(os.path.dirname(filename)).st_mtime > (
|
||||
self._cache.saved_at or 0
|
||||
)
|
||||
|
||||
def _init_backends(self):
|
||||
@staticmethod
|
||||
def _module_filename(path: str, modname: str) -> str:
|
||||
"""
|
||||
Initializes and caches all the available backends.
|
||||
:param path: Path to the module.
|
||||
:param modname: Module name.
|
||||
:return: The full path to the module file.
|
||||
"""
|
||||
self._init_integrations(
|
||||
base_type=Backend,
|
||||
model_type=BackendModel,
|
||||
class_by_name=get_backend_class_by_name,
|
||||
)
|
||||
filename = os.path.join(path, *modname.split('.')) + '.py'
|
||||
|
||||
def _init_events(self):
|
||||
"""
|
||||
Initializes and caches all the available events.
|
||||
"""
|
||||
self._init_modules(
|
||||
base_type=Event,
|
||||
model_type=EventModel,
|
||||
)
|
||||
if not os.path.isfile(filename):
|
||||
filename = os.path.join(path, *modname.split('.'), '__init__.py')
|
||||
|
||||
def _init_responses(self):
|
||||
"""
|
||||
Initializes and caches all the available responses.
|
||||
"""
|
||||
self._init_modules(
|
||||
base_type=Response,
|
||||
model_type=ResponseModel,
|
||||
)
|
||||
assert os.path.isfile(filename), f'No such file or directory: {filename}'
|
||||
return filename
|
||||
|
||||
def _init_components(self, base_type: type, initializer: Callable[[], None]):
|
||||
def _cache_integration(self, integration: Integration) -> dict:
|
||||
"""
|
||||
Context manager boilerplate for the other ``_init_*`` methods.
|
||||
:param integration: The :class:`.IntegrationMetadata` object.
|
||||
:return: The initialized component's metadata dict.
|
||||
"""
|
||||
ctx = self._components_context[base_type]
|
||||
with ctx.init_lock:
|
||||
if not ctx.refreshed.is_set():
|
||||
initializer()
|
||||
ctx.refreshed.set()
|
||||
self.logger.info(
|
||||
'Scanned %s: %s', integration.base_type.__name__, integration.name
|
||||
)
|
||||
meta = integration.to_dict()
|
||||
self._cache.set(integration.base_type, integration.type, meta)
|
||||
return meta
|
||||
|
||||
@action
|
||||
def get_all_plugins(self):
|
||||
"""
|
||||
Get information about all the available plugins.
|
||||
"""
|
||||
self._init_components(Plugin, self._init_plugins)
|
||||
return json.dumps(
|
||||
{
|
||||
get_plugin_name_by_class(cls): dict(plugin)
|
||||
for cls, plugin in self._components_cache.get(Plugin, {}).items()
|
||||
},
|
||||
cls=Message.Encoder,
|
||||
)
|
||||
return json.dumps(self._cache.to_dict().get('plugins', {}), cls=Message.Encoder)
|
||||
|
||||
@action
|
||||
def get_all_backends(self):
|
||||
"""
|
||||
Get information about all the available backends.
|
||||
"""
|
||||
self._init_components(Backend, self._init_backends)
|
||||
return json.dumps(
|
||||
{
|
||||
get_backend_name_by_class(cls): dict(backend)
|
||||
for cls, backend in self._components_cache.get(Backend, {}).items()
|
||||
}
|
||||
self._cache.to_dict().get('backends', {}), cls=Message.Encoder
|
||||
)
|
||||
|
||||
@action
|
||||
|
@ -337,33 +239,15 @@ class InspectPlugin(Plugin):
|
|||
"""
|
||||
Get information about all the available events.
|
||||
"""
|
||||
self._init_components(Event, self._init_events)
|
||||
return json.dumps(
|
||||
{
|
||||
package: {
|
||||
obj_type.__name__: dict(event_model)
|
||||
for obj_type, event_model in events.items()
|
||||
}
|
||||
for package, events in self._components_cache.get(Event, {}).items()
|
||||
}
|
||||
)
|
||||
return json.dumps(self._cache.to_dict().get('events', {}), cls=Message.Encoder)
|
||||
|
||||
@action
|
||||
def get_all_responses(self):
|
||||
"""
|
||||
Get information about all the available responses.
|
||||
"""
|
||||
self._init_components(Response, self._init_responses)
|
||||
return json.dumps(
|
||||
{
|
||||
package: {
|
||||
obj_type.__name__: dict(response_model)
|
||||
for obj_type, response_model in responses.items()
|
||||
}
|
||||
for package, responses in self._components_cache.get(
|
||||
Response, {}
|
||||
).items()
|
||||
}
|
||||
self._cache.to_dict().get('responses', {}), cls=Message.Encoder
|
||||
)
|
||||
|
||||
@action
|
||||
|
|
|
@ -0,0 +1,248 @@
|
|||
from contextlib import contextmanager
|
||||
import json
|
||||
import logging
|
||||
from collections import defaultdict
|
||||
from time import time
|
||||
from threading import RLock
|
||||
from typing import Dict, Optional
|
||||
|
||||
from platypush.backend import Backend
|
||||
from platypush.message.event import Event
|
||||
from platypush.message.response import Response
|
||||
from platypush.plugins import Plugin
|
||||
from platypush.utils import (
|
||||
get_backend_class_by_name,
|
||||
get_backend_name_by_class,
|
||||
get_plugin_class_by_name,
|
||||
get_plugin_name_by_class,
|
||||
)
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class Cache:
|
||||
"""
|
||||
A cache for the parsed integration metadata.
|
||||
|
||||
Cache structure:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
{
|
||||
<integration_category>: {
|
||||
<integration_type>: {
|
||||
'doc': <integration_docstring>,
|
||||
'args': {
|
||||
<arg_name>: {
|
||||
'name': <arg_name>,
|
||||
'type': <arg_type>,
|
||||
'doc': <arg_docstring>,
|
||||
'default': <arg_default_value>,
|
||||
'required': <arg_required>,
|
||||
},
|
||||
...
|
||||
},
|
||||
'actions': {
|
||||
<action_name>: {
|
||||
'name': <action_name>,
|
||||
'doc': <action_docstring>,
|
||||
'args': {
|
||||
...
|
||||
},
|
||||
'returns': {
|
||||
'type': <return_type>,
|
||||
'doc': <return_docstring>,
|
||||
},
|
||||
},
|
||||
...
|
||||
},
|
||||
'events': [
|
||||
<event_type1>,
|
||||
<event_type2>,
|
||||
...
|
||||
],
|
||||
},
|
||||
...
|
||||
},
|
||||
...
|
||||
}
|
||||
|
||||
"""
|
||||
|
||||
cur_version = 1
|
||||
"""
|
||||
Cache version, used to detect breaking changes in the cache logic that require a cache refresh.
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
items: Optional[Dict[type, Dict[type, dict]]] = None,
|
||||
saved_at: Optional[float] = None,
|
||||
loaded_at: Optional[float] = None,
|
||||
version: int = cur_version,
|
||||
):
|
||||
self.saved_at = saved_at
|
||||
self.loaded_at = loaded_at
|
||||
self._cache: Dict[type, Dict[type, dict]] = defaultdict(dict)
|
||||
self._lock = RLock()
|
||||
self.version = version
|
||||
self.has_changes = False
|
||||
|
||||
if items:
|
||||
self._cache.update(items)
|
||||
self.loaded_at = time()
|
||||
|
||||
@classmethod
|
||||
def load(cls, cache_file: str) -> 'Cache':
|
||||
"""
|
||||
Loads the components cache from disk.
|
||||
|
||||
:param cache_file: Cache file path.
|
||||
"""
|
||||
with open(cache_file, 'r') as f:
|
||||
data = json.load(f)
|
||||
return cls.from_dict(data)
|
||||
|
||||
def dump(self, cache_file: str):
|
||||
"""
|
||||
Dumps the components cache to disk.
|
||||
|
||||
:param cache_file: Cache file path.
|
||||
"""
|
||||
from platypush.message import Message
|
||||
|
||||
self.version = self.cur_version
|
||||
|
||||
with open(cache_file, 'w') as f:
|
||||
self.saved_at = time()
|
||||
json.dump(
|
||||
{
|
||||
'saved_at': self.saved_at,
|
||||
'version': self.version,
|
||||
'items': self.to_dict(),
|
||||
},
|
||||
f,
|
||||
cls=Message.Encoder,
|
||||
)
|
||||
|
||||
self.has_changes = False
|
||||
|
||||
@classmethod
|
||||
def from_dict(cls, data: dict) -> 'Cache':
|
||||
"""
|
||||
Creates a cache from a JSON-serializable dictionary.
|
||||
"""
|
||||
return cls(
|
||||
items={
|
||||
Backend: {
|
||||
k: v
|
||||
for k, v in {
|
||||
get_backend_class_by_name(backend_type): backend_meta
|
||||
for backend_type, backend_meta in data.get('items', {})
|
||||
.get('backends', {})
|
||||
.items()
|
||||
}.items()
|
||||
if k
|
||||
},
|
||||
Plugin: {
|
||||
k: v
|
||||
for k, v in {
|
||||
get_plugin_class_by_name(plugin_type): plugin_meta
|
||||
for plugin_type, plugin_meta in data.get('items', {})
|
||||
.get('plugins', {})
|
||||
.items()
|
||||
}.items()
|
||||
if k
|
||||
},
|
||||
Event: data.get('items', {}).get('events', {}),
|
||||
Response: data.get('items', {}).get('responses', {}),
|
||||
},
|
||||
loaded_at=time(),
|
||||
saved_at=data.get('saved_at'),
|
||||
version=data.get('version', cls.cur_version),
|
||||
)
|
||||
|
||||
def to_dict(self) -> Dict[str, Dict[str, dict]]:
|
||||
"""
|
||||
Converts the cache items to a JSON-serializable dictionary.
|
||||
"""
|
||||
return {
|
||||
'backends': {
|
||||
k: v
|
||||
for k, v in {
|
||||
get_backend_name_by_class(backend_type): backend_meta
|
||||
for backend_type, backend_meta in self.backends.items()
|
||||
}.items()
|
||||
if k
|
||||
},
|
||||
'plugins': {
|
||||
k: v
|
||||
for k, v in {
|
||||
get_plugin_name_by_class(plugin_type): plugin_meta
|
||||
for plugin_type, plugin_meta in self.plugins.items()
|
||||
}.items()
|
||||
if k
|
||||
},
|
||||
'events': {
|
||||
(k if isinstance(k, str) else f'{k.__module__}.{k.__qualname__}'): v
|
||||
for k, v in self.events.items()
|
||||
if k
|
||||
},
|
||||
'responses': {
|
||||
(k if isinstance(k, str) else f'{k.__module__}.{k.__qualname__}'): v
|
||||
for k, v in self.responses.items()
|
||||
if k
|
||||
},
|
||||
}
|
||||
|
||||
def get(self, category: type, obj_type: Optional[type] = None) -> Optional[dict]:
|
||||
"""
|
||||
Retrieves an object from the cache.
|
||||
|
||||
:param category: Category type.
|
||||
:param obj_type: Object type.
|
||||
:return: Object metadata.
|
||||
"""
|
||||
collection = self._cache[category]
|
||||
if not obj_type:
|
||||
return collection
|
||||
return collection.get(obj_type)
|
||||
|
||||
def set(self, category: type, obj_type: type, value: dict):
|
||||
"""
|
||||
Set an object on the cache.
|
||||
|
||||
:param category: Category type.
|
||||
:param obj_type: Object type.
|
||||
:param value: Value to set.
|
||||
"""
|
||||
self._cache[category][obj_type] = value
|
||||
self.has_changes = True
|
||||
|
||||
@property
|
||||
def plugins(self) -> Dict[type, dict]:
|
||||
"""Plugins metadata."""
|
||||
return self._cache[Plugin]
|
||||
|
||||
@property
|
||||
def backends(self) -> Dict[type, dict]:
|
||||
"""Backends metadata."""
|
||||
return self._cache[Backend]
|
||||
|
||||
@property
|
||||
def events(self) -> Dict[type, dict]:
|
||||
"""Events metadata."""
|
||||
return self._cache[Event]
|
||||
|
||||
@property
|
||||
def responses(self) -> Dict[type, dict]:
|
||||
"""Responses metadata."""
|
||||
return self._cache[Response]
|
||||
|
||||
@contextmanager
|
||||
def lock(self):
|
||||
"""
|
||||
Context manager that acquires a lock on the cache.
|
||||
"""
|
||||
with self._lock:
|
||||
yield
|
|
@ -1,12 +0,0 @@
|
|||
from dataclasses import dataclass, field
|
||||
import threading
|
||||
|
||||
|
||||
@dataclass
|
||||
class ComponentContext:
|
||||
"""
|
||||
This class is used to store the context of a component type.
|
||||
"""
|
||||
|
||||
init_lock: threading.RLock = field(default_factory=threading.RLock)
|
||||
refreshed: threading.Event = field(default_factory=threading.Event)
|
|
@ -1,262 +0,0 @@
|
|||
import inspect
|
||||
import json
|
||||
import re
|
||||
from typing import Callable, List, Optional, Type
|
||||
|
||||
from platypush.backend import Backend
|
||||
from platypush.message.event import Event
|
||||
from platypush.message.response import Response
|
||||
from platypush.plugins import Plugin
|
||||
from platypush.utils import get_decorators
|
||||
|
||||
from ._parsers import (
|
||||
BackendParser,
|
||||
EventParser,
|
||||
MethodParser,
|
||||
Parser,
|
||||
PluginParser,
|
||||
ResponseParser,
|
||||
SchemaParser,
|
||||
)
|
||||
|
||||
|
||||
class Model:
|
||||
"""
|
||||
Base class for component models.
|
||||
"""
|
||||
|
||||
_parsers: List[Type[Parser]] = [
|
||||
BackendParser,
|
||||
EventParser,
|
||||
MethodParser,
|
||||
PluginParser,
|
||||
ResponseParser,
|
||||
SchemaParser,
|
||||
]
|
||||
|
||||
_param_docstring_re = re.compile(r'^\s*:param ([^:]+):\s*(.*)')
|
||||
_type_docstring_re = re.compile(r'^\s*:type ([^:]+):\s*([^\s]+).*')
|
||||
_return_docstring_re = re.compile(r'^\s*:return:\s+(.*)')
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
obj_type: type,
|
||||
name: Optional[str] = None,
|
||||
doc: Optional[str] = None,
|
||||
prefix: str = '',
|
||||
last_modified: Optional[float] = None,
|
||||
) -> None:
|
||||
"""
|
||||
:param obj_type: Type of the component.
|
||||
:param name: Name of the component.
|
||||
:param doc: Documentation of the component.
|
||||
:param last_modified: Last modified timestamp of the component.
|
||||
"""
|
||||
self._obj_type = obj_type
|
||||
self.package = obj_type.__module__[len(prefix) :]
|
||||
self.name = name or self.package
|
||||
self.last_modified = last_modified
|
||||
|
||||
docstring = doc or ''
|
||||
if obj_type.__doc__:
|
||||
docstring += '\n\n' + obj_type.__doc__
|
||||
|
||||
if hasattr(obj_type, '__init__'):
|
||||
docstring += '\n\n' + (obj_type.__init__.__doc__ or '')
|
||||
|
||||
self.doc, argsdoc = self._parse_docstring(docstring, obj_type=obj_type)
|
||||
self.args = {}
|
||||
self.has_kwargs = False
|
||||
self.has_varargs = False
|
||||
|
||||
for arg in list(inspect.signature(obj_type).parameters.values())[1:]:
|
||||
if arg.kind == arg.VAR_KEYWORD:
|
||||
self.has_kwargs = True
|
||||
continue
|
||||
|
||||
if arg.kind == arg.VAR_POSITIONAL:
|
||||
self.has_varargs = True
|
||||
continue
|
||||
|
||||
self.args[arg.name] = {
|
||||
'default': (
|
||||
arg.default if not issubclass(arg.default.__class__, type) else None
|
||||
),
|
||||
'doc': argsdoc.get(arg.name, {}).get('name'),
|
||||
'required': arg.default is inspect._empty,
|
||||
'type': (
|
||||
argsdoc.get(arg.name, {}).get('type')
|
||||
or (
|
||||
(
|
||||
arg.annotation.__name__
|
||||
if arg.annotation.__module__ == 'builtins'
|
||||
else (
|
||||
None
|
||||
if arg.annotation is inspect._empty
|
||||
else str(arg.annotation).replace('typing.', '')
|
||||
)
|
||||
)
|
||||
if arg.annotation
|
||||
else None
|
||||
)
|
||||
),
|
||||
}
|
||||
|
||||
def __str__(self):
|
||||
"""
|
||||
:return: JSON string representation of the model.
|
||||
"""
|
||||
return json.dumps(dict(self), indent=2, sort_keys=True)
|
||||
|
||||
def __repr__(self):
|
||||
"""
|
||||
:return: JSON string representation of the model.
|
||||
"""
|
||||
return json.dumps(dict(self))
|
||||
|
||||
def __iter__(self):
|
||||
"""
|
||||
Iterator for the model public attributes/values pairs.
|
||||
"""
|
||||
for attr in ['name', 'args', 'doc', 'has_varargs', 'has_kwargs']:
|
||||
yield attr, getattr(self, attr)
|
||||
|
||||
@classmethod
|
||||
def _parse_docstring(cls, docstring: str, obj_type: type):
|
||||
new_docstring = ''
|
||||
params = {}
|
||||
cur_param = None
|
||||
cur_param_docstring = ''
|
||||
param_types = {}
|
||||
|
||||
if not docstring:
|
||||
return None, {}
|
||||
|
||||
for line in docstring.split('\n'):
|
||||
m = cls._param_docstring_re.match(line)
|
||||
if m:
|
||||
if cur_param:
|
||||
params[cur_param] = cur_param_docstring
|
||||
|
||||
cur_param = m.group(1)
|
||||
cur_param_docstring = m.group(2)
|
||||
continue
|
||||
|
||||
m = cls._type_docstring_re.match(line)
|
||||
if m:
|
||||
if cur_param:
|
||||
param_types[cur_param] = m.group(2).strip()
|
||||
params[cur_param] = cur_param_docstring
|
||||
|
||||
cur_param = None
|
||||
continue
|
||||
|
||||
m = cls._return_docstring_re.match(line)
|
||||
if m:
|
||||
if cur_param:
|
||||
params[cur_param] = cur_param_docstring
|
||||
|
||||
new_docstring += '\n\n**Returns:**\n\n' + m.group(1).strip() + ' '
|
||||
cur_param = None
|
||||
continue
|
||||
|
||||
if cur_param:
|
||||
if not line.strip():
|
||||
params[cur_param] = cur_param_docstring
|
||||
cur_param = None
|
||||
cur_param_docstring = ''
|
||||
else:
|
||||
cur_param_docstring += '\n' + line.strip() + ' '
|
||||
else:
|
||||
new_docstring += line + '\n'
|
||||
|
||||
if cur_param:
|
||||
params[cur_param] = cur_param_docstring
|
||||
|
||||
for param, doc in params.items():
|
||||
params[param] = {
|
||||
'name': cls._post_process_docstring(doc, obj_type=obj_type)
|
||||
}
|
||||
|
||||
param_type = param_types.pop(param, None)
|
||||
if param_type is not None:
|
||||
params[param]['type'] = param_type
|
||||
|
||||
return cls._post_process_docstring(new_docstring, obj_type=obj_type), params
|
||||
|
||||
@classmethod
|
||||
def _post_process_docstring(cls, docstring: str, obj_type: type) -> str:
|
||||
for parsers in cls._parsers:
|
||||
docstring = parsers.parse(docstring, obj_type=obj_type)
|
||||
return docstring.strip()
|
||||
|
||||
|
||||
# pylint: disable=too-few-public-methods
|
||||
class BackendModel(Model):
|
||||
"""
|
||||
Model for backend components.
|
||||
"""
|
||||
|
||||
def __init__(self, obj_type: Type[Backend], *args, **kwargs):
|
||||
super().__init__(obj_type, *args, **kwargs)
|
||||
|
||||
|
||||
# pylint: disable=too-few-public-methods
|
||||
class PluginModel(Model):
|
||||
"""
|
||||
Model for plugin components.
|
||||
"""
|
||||
|
||||
def __init__(self, obj_type: Type[Plugin], prefix: str = '', **kwargs):
|
||||
super().__init__(
|
||||
obj_type,
|
||||
name=re.sub(r'\._plugin$', '', obj_type.__module__[len(prefix) :]),
|
||||
**kwargs,
|
||||
)
|
||||
|
||||
self.actions = {
|
||||
action_name: ActionModel(getattr(obj_type, action_name))
|
||||
for action_name in get_decorators(obj_type, climb_class_hierarchy=True).get(
|
||||
'action', []
|
||||
)
|
||||
}
|
||||
|
||||
def __iter__(self):
|
||||
"""
|
||||
Overrides the default implementation of ``__iter__`` to also include
|
||||
plugin actions.
|
||||
"""
|
||||
for attr in ['name', 'args', 'actions', 'doc', 'has_varargs', 'has_kwargs']:
|
||||
if attr == 'actions':
|
||||
yield attr, {
|
||||
name: dict(action) for name, action in self.actions.items()
|
||||
}
|
||||
else:
|
||||
yield attr, getattr(self, attr)
|
||||
|
||||
|
||||
class EventModel(Model):
|
||||
"""
|
||||
Model for event components.
|
||||
"""
|
||||
|
||||
def __init__(self, obj_type: Type[Event], **kwargs):
|
||||
super().__init__(obj_type, **kwargs)
|
||||
|
||||
|
||||
class ResponseModel(Model):
|
||||
"""
|
||||
Model for response components.
|
||||
"""
|
||||
|
||||
def __init__(self, obj_type: Type[Response], **kwargs):
|
||||
super().__init__(obj_type, **kwargs)
|
||||
|
||||
|
||||
class ActionModel(Model):
|
||||
"""
|
||||
Model for plugin action components.
|
||||
"""
|
||||
|
||||
def __init__(self, obj_type: Type[Callable], *args, **kwargs):
|
||||
super().__init__(obj_type, name=obj_type.__name__, *args, **kwargs)
|
|
@ -1,18 +0,0 @@
|
|||
from ._backend import BackendParser
|
||||
from ._base import Parser
|
||||
from ._event import EventParser
|
||||
from ._method import MethodParser
|
||||
from ._plugin import PluginParser
|
||||
from ._response import ResponseParser
|
||||
from ._schema import SchemaParser
|
||||
|
||||
|
||||
__all__ = [
|
||||
'BackendParser',
|
||||
'EventParser',
|
||||
'MethodParser',
|
||||
'Parser',
|
||||
'PluginParser',
|
||||
'ResponseParser',
|
||||
'SchemaParser',
|
||||
]
|
|
@ -1,32 +0,0 @@
|
|||
import re
|
||||
|
||||
from ._base import Parser
|
||||
|
||||
|
||||
class BackendParser(Parser):
|
||||
"""
|
||||
Parse backend references in the docstrings with rendered links to their
|
||||
respective documentation.
|
||||
"""
|
||||
|
||||
_backend_regex = re.compile(
|
||||
r'(\s*):class:`(platypush\.backend\.(.+?))`', re.MULTILINE
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def parse(cls, docstring: str, *_, **__) -> str:
|
||||
while True:
|
||||
m = cls._backend_regex.search(docstring)
|
||||
if not m:
|
||||
break
|
||||
|
||||
class_name = m.group(3).split('.')[-1]
|
||||
package = '.'.join(m.group(3).split('.')[:-1])
|
||||
docstring = cls._backend_regex.sub(
|
||||
f'{m.group(1)}`{class_name} '
|
||||
f'<https://docs.platypush.tech/platypush/backend/{package}.html#{m.group(2)}>`_',
|
||||
docstring,
|
||||
count=1,
|
||||
)
|
||||
|
||||
return docstring
|
|
@ -1,12 +0,0 @@
|
|||
from abc import ABC, abstractmethod
|
||||
|
||||
|
||||
class Parser(ABC):
|
||||
"""
|
||||
Base class for parsers.
|
||||
"""
|
||||
|
||||
@classmethod
|
||||
@abstractmethod
|
||||
def parse(cls, docstring: str, obj_type: type) -> str:
|
||||
raise NotImplementedError()
|
|
@ -1,32 +0,0 @@
|
|||
import re
|
||||
|
||||
from ._base import Parser
|
||||
|
||||
|
||||
class EventParser(Parser):
|
||||
"""
|
||||
Parse event references in the docstrings with rendered links to their
|
||||
respective documentation.
|
||||
"""
|
||||
|
||||
_event_regex = re.compile(
|
||||
r'(\s*):class:`(platypush\.message\.event\.(.+?))`', re.MULTILINE
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def parse(cls, docstring: str, *_, **__) -> str:
|
||||
while True:
|
||||
m = cls._event_regex.search(docstring)
|
||||
if not m:
|
||||
break
|
||||
|
||||
class_name = m.group(3).split('.')[-1]
|
||||
package = '.'.join(m.group(3).split('.')[:-1])
|
||||
docstring = cls._event_regex.sub(
|
||||
f'{m.group(1)}`{class_name} '
|
||||
f'<https://docs.platypush.tech/platypush/events/{package}.html#{m.group(2)}>`_',
|
||||
docstring,
|
||||
count=1,
|
||||
)
|
||||
|
||||
return docstring
|
|
@ -1,60 +0,0 @@
|
|||
import re
|
||||
|
||||
from ._base import Parser
|
||||
|
||||
|
||||
class MethodParser(Parser):
|
||||
"""
|
||||
Parse method references in the docstrings with rendered links to their
|
||||
respective documentation.
|
||||
"""
|
||||
|
||||
_abs_method_regex = re.compile(
|
||||
r'(\s*):meth:`(platypush\.plugins\.(.+?))`', re.MULTILINE
|
||||
)
|
||||
|
||||
_rel_method_regex = re.compile(r'(\s*):meth:`\.(.+?)`', re.MULTILINE)
|
||||
|
||||
@classmethod
|
||||
def parse(cls, docstring: str, obj_type: type) -> str:
|
||||
while True:
|
||||
m = cls._rel_method_regex.search(docstring)
|
||||
if m:
|
||||
tokens = m.group(2).split('.')
|
||||
method = tokens[-1]
|
||||
package = obj_type.__module__
|
||||
rel_package = '.'.join(package.split('.')[2:])
|
||||
full_name = '.'.join(
|
||||
[
|
||||
package,
|
||||
'.'.join(obj_type.__qualname__.split('.')[:-1]),
|
||||
method,
|
||||
]
|
||||
)
|
||||
|
||||
docstring = cls._rel_method_regex.sub(
|
||||
f'{m.group(1)}`{package}.{method} '
|
||||
f'<https://docs.platypush.tech/platypush/plugins/{rel_package}.html#{full_name}>`_',
|
||||
docstring,
|
||||
count=1,
|
||||
)
|
||||
|
||||
continue
|
||||
|
||||
m = cls._abs_method_regex.search(docstring)
|
||||
if m:
|
||||
tokens = m.group(3).split('.')
|
||||
method = tokens[-1]
|
||||
package = '.'.join(tokens[:-2])
|
||||
docstring = cls._abs_method_regex.sub(
|
||||
f'{m.group(1)}`{package}.{method} '
|
||||
f'<https://docs.platypush.tech/platypush/plugins/{package}.html#{m.group(2)}>`_',
|
||||
docstring,
|
||||
count=1,
|
||||
)
|
||||
|
||||
continue
|
||||
|
||||
break
|
||||
|
||||
return docstring
|
|
@ -1,32 +0,0 @@
|
|||
import re
|
||||
|
||||
from ._base import Parser
|
||||
|
||||
|
||||
class PluginParser(Parser):
|
||||
"""
|
||||
Parse plugin references in the docstrings with rendered links to their
|
||||
respective documentation.
|
||||
"""
|
||||
|
||||
_plugin_regex = re.compile(
|
||||
r'(\s*):class:`(platypush\.plugins\.(.+?))`', re.MULTILINE
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def parse(cls, docstring: str, *_, **__) -> str:
|
||||
while True:
|
||||
m = cls._plugin_regex.search(docstring)
|
||||
if not m:
|
||||
break
|
||||
|
||||
class_name = m.group(3).split('.')[-1]
|
||||
package = '.'.join(m.group(3).split('.')[:-1])
|
||||
docstring = cls._plugin_regex.sub(
|
||||
f'{m.group(1)}`{class_name} '
|
||||
f'<https://docs.platypush.tech/platypush/plugins/{package}.html#{m.group(2)}>`_',
|
||||
docstring,
|
||||
count=1,
|
||||
)
|
||||
|
||||
return docstring
|
|
@ -1,32 +0,0 @@
|
|||
import re
|
||||
|
||||
from ._base import Parser
|
||||
|
||||
|
||||
class ResponseParser(Parser):
|
||||
"""
|
||||
Parse response references in the docstrings with rendered links to their
|
||||
respective documentation.
|
||||
"""
|
||||
|
||||
_response_regex = re.compile(
|
||||
r'(\s*):class:`(platypush\.message\.response\.(.+?))`', re.MULTILINE
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def parse(cls, docstring: str, *_, **__) -> str:
|
||||
while True:
|
||||
m = cls._response_regex.search(docstring)
|
||||
if not m:
|
||||
break
|
||||
|
||||
class_name = m.group(3).split('.')[-1]
|
||||
package = '.'.join(m.group(3).split('.')[:-1])
|
||||
docstring = cls._response_regex.sub(
|
||||
f'{m.group(1)}`{class_name} '
|
||||
f'<https://docs.platypush.tech/platypush/responses/{package}.html#{m.group(2)}>`_',
|
||||
docstring,
|
||||
count=1,
|
||||
)
|
||||
|
||||
return docstring
|
|
@ -1,95 +0,0 @@
|
|||
import importlib
|
||||
import inspect
|
||||
import json
|
||||
import os
|
||||
from random import randint
|
||||
import re
|
||||
import textwrap
|
||||
|
||||
from marshmallow import fields
|
||||
|
||||
import platypush.schemas
|
||||
|
||||
from ._base import Parser
|
||||
|
||||
|
||||
class SchemaParser(Parser):
|
||||
"""
|
||||
Support for response/message schemas in the docs. Format: ``.. schema:: rel_path.SchemaClass(arg1=value1, ...)``,
|
||||
where ``rel_path`` is the path of the schema relative to ``platypush/schemas``.
|
||||
"""
|
||||
|
||||
_schemas_path = os.path.dirname(inspect.getfile(platypush.schemas))
|
||||
_schema_regex = re.compile(
|
||||
r'^(\s*)\.\.\s+schema::\s*([a-zA-Z0-9._]+)\s*(\((.+?)\))?', re.MULTILINE
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def _get_field_value(cls, field):
|
||||
metadata = getattr(field, 'metadata', {})
|
||||
if metadata.get('example'):
|
||||
return metadata['example']
|
||||
if metadata.get('description'):
|
||||
return metadata['description']
|
||||
|
||||
if isinstance(field, fields.Number):
|
||||
return randint(1, 99)
|
||||
if isinstance(field, fields.Boolean):
|
||||
return bool(randint(0, 1))
|
||||
if isinstance(field, fields.URL):
|
||||
return 'https://example.org'
|
||||
if isinstance(field, fields.List):
|
||||
return [cls._get_field_value(field.inner)]
|
||||
if isinstance(field, fields.Dict):
|
||||
return {
|
||||
cls._get_field_value(field.key_field)
|
||||
if field.key_field
|
||||
else 'key': cls._get_field_value(field.value_field)
|
||||
if field.value_field
|
||||
else 'value'
|
||||
}
|
||||
if isinstance(field, fields.Nested):
|
||||
ret = {
|
||||
name: cls._get_field_value(f)
|
||||
for name, f in field.nested().fields.items()
|
||||
}
|
||||
|
||||
return [ret] if field.many else ret
|
||||
|
||||
return str(field.__class__.__name__).lower()
|
||||
|
||||
@classmethod
|
||||
def parse(cls, docstring: str, *_, **__) -> str:
|
||||
while True:
|
||||
m = cls._schema_regex.search(docstring)
|
||||
if not m:
|
||||
break
|
||||
|
||||
schema_module_name = '.'.join(
|
||||
['platypush.schemas', *(m.group(2).split('.')[:-1])]
|
||||
)
|
||||
schema_module = importlib.import_module(schema_module_name)
|
||||
schema_class = getattr(schema_module, m.group(2).split('.')[-1])
|
||||
schema_args = eval(f'dict({m.group(4)})') if m.group(4) else {}
|
||||
schema = schema_class(**schema_args)
|
||||
parsed_schema = {
|
||||
name: cls._get_field_value(field)
|
||||
for name, field in schema.fields.items()
|
||||
if not field.load_only
|
||||
}
|
||||
|
||||
if schema.many:
|
||||
parsed_schema = [parsed_schema]
|
||||
|
||||
padding = m.group(1)
|
||||
docstring = cls._schema_regex.sub(
|
||||
textwrap.indent('\n\n.. code-block:: json\n\n', padding)
|
||||
+ textwrap.indent(
|
||||
json.dumps(parsed_schema, sort_keys=True, indent=2),
|
||||
padding + ' ',
|
||||
).replace('\n\n', '\n')
|
||||
+ '\n\n',
|
||||
docstring,
|
||||
)
|
||||
|
||||
return docstring
|
|
@ -5,7 +5,7 @@ import pathlib
|
|||
import re
|
||||
|
||||
from dataclasses import dataclass
|
||||
from typing import Collection, Coroutine, Sequence
|
||||
from typing import Collection, Coroutine, Optional, Sequence
|
||||
from urllib.parse import urlparse
|
||||
|
||||
from nio import (
|
||||
|
@ -47,7 +47,7 @@ class Credentials:
|
|||
server_url: str
|
||||
user_id: str
|
||||
access_token: str
|
||||
device_id: str | None
|
||||
device_id: Optional[str] = None
|
||||
|
||||
def to_dict(self) -> dict:
|
||||
return {
|
||||
|
@ -98,22 +98,22 @@ class MatrixPlugin(AsyncRunnablePlugin):
|
|||
def __init__(
|
||||
self,
|
||||
server_url: str = 'https://matrix-client.matrix.org',
|
||||
user_id: str | None = None,
|
||||
password: str | None = None,
|
||||
access_token: str | None = None,
|
||||
device_name: str | None = 'platypush',
|
||||
device_id: str | None = None,
|
||||
download_path: str | None = None,
|
||||
user_id: Optional[str] = None,
|
||||
password: Optional[str] = None,
|
||||
access_token: Optional[str] = None,
|
||||
device_name: Optional[str] = 'platypush',
|
||||
device_id: Optional[str] = None,
|
||||
download_path: Optional[str] = None,
|
||||
autojoin_on_invite: bool = True,
|
||||
autotrust_devices: bool = False,
|
||||
autotrust_devices_whitelist: Collection[str] | None = None,
|
||||
autotrust_users_whitelist: Collection[str] | None = None,
|
||||
autotrust_rooms_whitelist: Collection[str] | None = None,
|
||||
autotrust_devices_whitelist: Optional[Collection[str]] = None,
|
||||
autotrust_users_whitelist: Optional[Collection[str]] = None,
|
||||
autotrust_rooms_whitelist: Optional[Collection[str]] = None,
|
||||
**kwargs,
|
||||
):
|
||||
"""
|
||||
Authentication requires user_id/password on the first login.
|
||||
Afterwards, session credentials are stored under
|
||||
Afterward, session credentials are stored under
|
||||
``<$PLATYPUSH_WORKDIR>/matrix/credentials.json`` (default:
|
||||
``~/.local/share/platypush/matrix/credentials.json``), and you can
|
||||
remove the cleartext credentials from your configuration file.
|
||||
|
@ -299,9 +299,9 @@ class MatrixPlugin(AsyncRunnablePlugin):
|
|||
self,
|
||||
room_id: str,
|
||||
message_type: str = 'text',
|
||||
body: str | None = None,
|
||||
attachment: str | None = None,
|
||||
tx_id: str | None = None,
|
||||
body: Optional[str] = None,
|
||||
attachment: Optional[str] = None,
|
||||
tx_id: Optional[str] = None,
|
||||
ignore_unverified_devices: bool = False,
|
||||
):
|
||||
"""
|
||||
|
@ -388,8 +388,8 @@ class MatrixPlugin(AsyncRunnablePlugin):
|
|||
def get_messages(
|
||||
self,
|
||||
room_id: str,
|
||||
start: str | None = None,
|
||||
end: str | None = None,
|
||||
start: Optional[str] = None,
|
||||
end: Optional[str] = None,
|
||||
backwards: bool = True,
|
||||
limit: int = 10,
|
||||
):
|
||||
|
@ -442,10 +442,11 @@ class MatrixPlugin(AsyncRunnablePlugin):
|
|||
return MatrixDeviceSchema().dump(self._get_device(device_id))
|
||||
|
||||
@action
|
||||
def update_device(self, device_id: str, display_name: str | None = None):
|
||||
def update_device(self, device_id: str, display_name: Optional[str] = None):
|
||||
"""
|
||||
Update information about a user's device.
|
||||
|
||||
:param device_id: Device ID.
|
||||
:param display_name: New display name.
|
||||
:return: .. schema:: matrix.MatrixDeviceSchema
|
||||
"""
|
||||
|
@ -460,8 +461,8 @@ class MatrixPlugin(AsyncRunnablePlugin):
|
|||
def delete_devices(
|
||||
self,
|
||||
devices: Sequence[str],
|
||||
username: str | None = None,
|
||||
password: str | None = None,
|
||||
username: Optional[str] = None,
|
||||
password: Optional[str] = None,
|
||||
):
|
||||
"""
|
||||
Delete a list of devices from the user's authorized list and invalidate
|
||||
|
@ -564,7 +565,7 @@ class MatrixPlugin(AsyncRunnablePlugin):
|
|||
self.client.unverify_device(device)
|
||||
|
||||
@action
|
||||
def mxc_to_http(self, url: str, homeserver: str | None = None) -> str:
|
||||
def mxc_to_http(self, url: str, homeserver: Optional[str] = None) -> str:
|
||||
"""
|
||||
Convert a Matrix URL (in the format ``mxc://server/media_id``) to an
|
||||
HTTP URL.
|
||||
|
@ -587,8 +588,8 @@ class MatrixPlugin(AsyncRunnablePlugin):
|
|||
def download(
|
||||
self,
|
||||
url: str,
|
||||
download_path: str | None = None,
|
||||
filename: str | None = None,
|
||||
download_path: Optional[str] = None,
|
||||
filename: Optional[str] = None,
|
||||
allow_remote=True,
|
||||
):
|
||||
"""
|
||||
|
@ -641,8 +642,8 @@ class MatrixPlugin(AsyncRunnablePlugin):
|
|||
def upload(
|
||||
self,
|
||||
file: str,
|
||||
name: str | None = None,
|
||||
content_type: str | None = None,
|
||||
name: Optional[str] = None,
|
||||
content_type: Optional[str] = None,
|
||||
encrypt: bool = False,
|
||||
) -> str:
|
||||
"""
|
||||
|
@ -665,9 +666,9 @@ class MatrixPlugin(AsyncRunnablePlugin):
|
|||
@action
|
||||
def create_room(
|
||||
self,
|
||||
name: str | None = None,
|
||||
alias: str | None = None,
|
||||
topic: str | None = None,
|
||||
name: Optional[str] = None,
|
||||
alias: Optional[str] = None,
|
||||
topic: Optional[str] = None,
|
||||
is_public: bool = False,
|
||||
is_direct: bool = False,
|
||||
federate: bool = True,
|
||||
|
@ -729,7 +730,7 @@ class MatrixPlugin(AsyncRunnablePlugin):
|
|||
self._loop_execute(self.client.room_invite(room_id, user_id))
|
||||
|
||||
@action
|
||||
def kick(self, room_id: str, user_id: str, reason: str | None = None):
|
||||
def kick(self, room_id: str, user_id: str, reason: Optional[str] = None):
|
||||
"""
|
||||
Kick a user out of a room.
|
||||
|
||||
|
@ -740,7 +741,7 @@ class MatrixPlugin(AsyncRunnablePlugin):
|
|||
self._loop_execute(self.client.room_kick(room_id, user_id, reason))
|
||||
|
||||
@action
|
||||
def ban(self, room_id: str, user_id: str, reason: str | None = None):
|
||||
def ban(self, room_id: str, user_id: str, reason: Optional[str] = None):
|
||||
"""
|
||||
Ban a user from a room.
|
||||
|
||||
|
|
|
@ -100,7 +100,7 @@ class Credentials:
|
|||
server_url: str
|
||||
user_id: str
|
||||
access_token: str
|
||||
device_id: str | None
|
||||
device_id: Optional[str] = None
|
||||
|
||||
def to_dict(self) -> dict:
|
||||
return {
|
||||
|
@ -116,13 +116,13 @@ class MatrixClient(AsyncClient):
|
|||
self,
|
||||
*args,
|
||||
credentials_file: str,
|
||||
store_path: str | None = None,
|
||||
store_path: Optional[str] = None,
|
||||
config: Optional[AsyncClientConfig] = None,
|
||||
autojoin_on_invite=True,
|
||||
autotrust_devices=False,
|
||||
autotrust_devices_whitelist: Collection[str] | None = None,
|
||||
autotrust_rooms_whitelist: Collection[str] | None = None,
|
||||
autotrust_users_whitelist: Collection[str] | None = None,
|
||||
autotrust_devices_whitelist: Optional[Collection[str]] = None,
|
||||
autotrust_rooms_whitelist: Optional[Collection[str]] = None,
|
||||
autotrust_users_whitelist: Optional[Collection[str]] = None,
|
||||
**kwargs,
|
||||
):
|
||||
credentials_file = os.path.abspath(os.path.expanduser(credentials_file))
|
||||
|
@ -158,7 +158,7 @@ class MatrixClient(AsyncClient):
|
|||
store_path, 'attachment_keys.json'
|
||||
)
|
||||
self._encrypted_attachments_keystore = {}
|
||||
self._sync_store_timer: threading.Timer | None = None
|
||||
self._sync_store_timer: Optional[threading.Timer] = None
|
||||
keystore = {}
|
||||
|
||||
try:
|
||||
|
@ -206,9 +206,9 @@ class MatrixClient(AsyncClient):
|
|||
|
||||
async def login(
|
||||
self,
|
||||
password: str | None = None,
|
||||
device_name: str | None = None,
|
||||
token: str | None = None,
|
||||
password: Optional[str] = None,
|
||||
device_name: Optional[str] = None,
|
||||
token: Optional[str] = None,
|
||||
) -> LoginResponse:
|
||||
self._load_from_file()
|
||||
login_res = None
|
||||
|
@ -289,7 +289,7 @@ class MatrixClient(AsyncClient):
|
|||
|
||||
@logged_in
|
||||
async def room_messages(
|
||||
self, room_id: str, start: str | None = None, *args, **kwargs
|
||||
self, room_id: str, start: Optional[str] = None, *args, **kwargs
|
||||
) -> RoomMessagesResponse:
|
||||
if not start:
|
||||
start = self._last_batches_by_room.get(room_id, {}).get('prev_batch')
|
||||
|
@ -351,9 +351,9 @@ class MatrixClient(AsyncClient):
|
|||
)
|
||||
|
||||
def get_devices_by_user(
|
||||
self, user_id: str | None = None
|
||||
self, user_id: Optional[str] = None
|
||||
) -> Dict[str, Dict[str, OlmDevice]] | Dict[str, OlmDevice]:
|
||||
devices = {user: devices for user, devices in self.device_store.items()}
|
||||
devices = dict(self.device_store.items())
|
||||
|
||||
if user_id:
|
||||
devices = devices.get(user_id, {})
|
||||
|
@ -370,7 +370,7 @@ class MatrixClient(AsyncClient):
|
|||
return self.get_devices().get(device_id)
|
||||
|
||||
def get_devices_by_room(
|
||||
self, room_id: str | None = None
|
||||
self, room_id: Optional[str] = None
|
||||
) -> Dict[str, Dict[str, OlmDevice]] | Dict[str, OlmDevice]:
|
||||
devices = {
|
||||
room_id: {
|
||||
|
@ -432,7 +432,7 @@ class MatrixClient(AsyncClient):
|
|||
|
||||
@alru_cache(maxsize=500)
|
||||
@client_session
|
||||
async def get_profile(self, user_id: str | None = None) -> ProfileGetResponse:
|
||||
async def get_profile(self, user_id: Optional[str] = None) -> ProfileGetResponse:
|
||||
"""
|
||||
Cached version of get_profile.
|
||||
"""
|
||||
|
@ -459,7 +459,7 @@ class MatrixClient(AsyncClient):
|
|||
self,
|
||||
server_name: str,
|
||||
media_id: str,
|
||||
filename: str | None = None,
|
||||
filename: Optional[str] = None,
|
||||
allow_remote: bool = True,
|
||||
):
|
||||
response = await super().download(
|
||||
|
|
|
@ -245,7 +245,7 @@ class MediaMplayerPlugin(MediaPlugin):
|
|||
"""
|
||||
Execute a raw MPlayer command. See
|
||||
https://www.mplayerhq.hu/DOCS/tech/slave.txt for a reference or call
|
||||
:meth:`platypush.plugins.media.mplayer.list_actions` to get a list
|
||||
:meth:`.list_actions` to get a list
|
||||
"""
|
||||
|
||||
args = args or []
|
||||
|
|
|
@ -368,7 +368,7 @@ class RssPlugin(RunnablePlugin):
|
|||
responses[url] = response['content']
|
||||
|
||||
responses = {
|
||||
k: v for k, v in responses.items() if not isinstance(v, Exception)
|
||||
k: v for k, v in responses.items() if v and not isinstance(v, Exception)
|
||||
}
|
||||
|
||||
for url, response in responses.items():
|
||||
|
|
|
@ -40,11 +40,11 @@ class SoundPlugin(RunnablePlugin):
|
|||
):
|
||||
"""
|
||||
:param input_device: Index or name of the default input device. Use
|
||||
:meth:`platypush.plugins.sound.query_devices` to get the
|
||||
available devices. Default: system default
|
||||
:meth:`.query_devices` to get the available devices. Default: system
|
||||
default
|
||||
:param output_device: Index or name of the default output device.
|
||||
Use :meth:`platypush.plugins.sound.query_devices` to get the
|
||||
available devices. Default: system default
|
||||
Use :meth:`.query_devices` to get the available devices. Default:
|
||||
system default
|
||||
:param input_blocksize: Blocksize to be applied to the input device.
|
||||
Try to increase this value if you get input overflow errors while
|
||||
recording. Default: 1024
|
||||
|
@ -160,8 +160,7 @@ class SoundPlugin(RunnablePlugin):
|
|||
in the audio file in file mode, 1 if in synth mode
|
||||
:param volume: Playback volume, between 0 and 100. Default: 100.
|
||||
:param stream_index: If specified, play to an already active stream
|
||||
index (you can get them through
|
||||
:meth:`platypush.plugins.sound.query_streams`). Default:
|
||||
index (you can get them through :meth:`.query_streams`). Default:
|
||||
creates a new audio stream through PortAudio.
|
||||
:param stream_name: Name of the stream to play to. If set, the sound
|
||||
will be played to the specified stream name, or a stream with that
|
||||
|
|
|
@ -13,6 +13,8 @@ import socket
|
|||
import ssl
|
||||
import time
|
||||
import urllib.request
|
||||
from importlib.machinery import SourceFileLoader
|
||||
from importlib.util import spec_from_loader, module_from_spec
|
||||
from multiprocessing import Lock as PLock
|
||||
from tempfile import gettempdir
|
||||
from threading import Lock as TLock
|
||||
|
@ -86,7 +88,7 @@ def get_backend_module_by_name(backend_name):
|
|||
return None
|
||||
|
||||
|
||||
def get_plugin_class_by_name(plugin_name):
|
||||
def get_plugin_class_by_name(plugin_name) -> Optional[type]:
|
||||
"""Gets the class of a plugin by name (e.g. "music.mpd" or "media.vlc")"""
|
||||
|
||||
module = get_plugin_module_by_name(plugin_name)
|
||||
|
@ -123,7 +125,7 @@ def get_plugin_name_by_class(plugin) -> Optional[str]:
|
|||
return '.'.join(class_tokens)
|
||||
|
||||
|
||||
def get_backend_class_by_name(backend_name: str):
|
||||
def get_backend_class_by_name(backend_name: str) -> Optional[type]:
|
||||
"""Gets the class of a backend by name (e.g. "backend.http" or "backend.mqtt")"""
|
||||
|
||||
module = get_backend_module_by_name(backend_name)
|
||||
|
@ -685,4 +687,22 @@ def get_message_response(msg):
|
|||
return response
|
||||
|
||||
|
||||
def import_file(path: str, name: Optional[str] = None):
|
||||
"""
|
||||
Import a Python file as a module, even if no __init__.py is
|
||||
defined in the directory.
|
||||
|
||||
:param path: Path of the file to import.
|
||||
:param name: Custom name for the imported module (default: same as the file's basename).
|
||||
:return: The imported module.
|
||||
"""
|
||||
name = name or re.split(r"\.py$", os.path.basename(path))[0]
|
||||
loader = SourceFileLoader(name, os.path.expanduser(path))
|
||||
mod_spec = spec_from_loader(name, loader)
|
||||
assert mod_spec, f"Cannot create module specification for {path}"
|
||||
mod = module_from_spec(mod_spec)
|
||||
loader.exec_module(mod)
|
||||
return mod
|
||||
|
||||
|
||||
# vim:sw=4:ts=4:et:
|
||||
|
|
|
@ -273,6 +273,14 @@ class Dependencies:
|
|||
by_pkg_manager: Dict[PackageManagers, Set[str]] = field(default_factory=dict)
|
||||
""" All system dependencies, grouped by package manager. """
|
||||
|
||||
def to_dict(self):
|
||||
return {
|
||||
'before': self.before,
|
||||
'packages': list(self.packages),
|
||||
'pip': self.pip,
|
||||
'after': self.after,
|
||||
}
|
||||
|
||||
@property
|
||||
def _is_venv(self) -> bool:
|
||||
"""
|
||||
|
@ -517,6 +525,17 @@ class Manifest(ABC):
|
|||
:return: The type of the manifest.
|
||||
"""
|
||||
|
||||
@property
|
||||
def file(self) -> str:
|
||||
"""
|
||||
:return: The path to the manifest file.
|
||||
"""
|
||||
return os.path.join(
|
||||
get_src_root(),
|
||||
*self.package.split('.')[1:],
|
||||
'manifest.yaml',
|
||||
)
|
||||
|
||||
def _init_deps(self, install: Mapping[str, Iterable[str]]) -> Dependencies:
|
||||
deps = Dependencies()
|
||||
for key, items in install.items():
|
||||
|
|
|
@ -6,6 +6,8 @@ from importlib.machinery import ModuleSpec
|
|||
from types import ModuleType
|
||||
from typing import Any, Iterator, Sequence, Generator, Optional, List
|
||||
|
||||
from .modules import mock_imports
|
||||
|
||||
|
||||
class MockObject:
|
||||
"""
|
||||
|
@ -137,7 +139,7 @@ class MockModule(ModuleType):
|
|||
class MockFinder(MetaPathFinder):
|
||||
"""A finder for mocking."""
|
||||
|
||||
def __init__(self, modules: Sequence[str]) -> None:
|
||||
def __init__(self, modules: Sequence[str]) -> None: # noqa
|
||||
super().__init__()
|
||||
self.modules = modules
|
||||
self.loader = MockLoader(self)
|
||||
|
@ -146,9 +148,9 @@ class MockFinder(MetaPathFinder):
|
|||
def find_spec(
|
||||
self,
|
||||
fullname: str,
|
||||
path: Sequence[Optional[bytes]] | None,
|
||||
path: Optional[Sequence[Optional[bytes]]] = None,
|
||||
target: Optional[ModuleType] = None,
|
||||
) -> ModuleSpec | None:
|
||||
) -> Optional[ModuleSpec]:
|
||||
for modname in self.modules:
|
||||
# check if fullname is (or is a descendant of) one of our targets
|
||||
if modname == fullname or fullname.startswith(modname + "."):
|
||||
|
@ -178,7 +180,7 @@ class MockLoader(Loader):
|
|||
|
||||
|
||||
@contextmanager
|
||||
def mock(*modules: str) -> Generator[None, None, None]:
|
||||
def mock(*mods: str) -> Generator[None, None, None]:
|
||||
"""
|
||||
Insert mock modules during context::
|
||||
|
||||
|
@ -188,10 +190,25 @@ def mock(*modules: str) -> Generator[None, None, None]:
|
|||
"""
|
||||
finder = None
|
||||
try:
|
||||
finder = MockFinder(modules)
|
||||
finder = MockFinder(mods)
|
||||
sys.meta_path.insert(0, finder)
|
||||
yield
|
||||
finally:
|
||||
if finder:
|
||||
sys.meta_path.remove(finder)
|
||||
finder.invalidate_caches()
|
||||
|
||||
|
||||
@contextmanager
|
||||
def auto_mocks():
|
||||
"""
|
||||
Automatically mock all the modules listed in ``mock_imports``.
|
||||
"""
|
||||
with mock(*mock_imports):
|
||||
yield
|
||||
|
||||
|
||||
__all__ = [
|
||||
"auto_mocks",
|
||||
"mock",
|
||||
]
|
|
@ -0,0 +1,111 @@
|
|||
mock_imports = [
|
||||
"Adafruit_IO",
|
||||
"Adafruit_Python_DHT",
|
||||
"Leap",
|
||||
"PIL",
|
||||
"PyOBEX",
|
||||
"PyOBEX.client",
|
||||
"RPLCD",
|
||||
"RPi.GPIO",
|
||||
"TheengsDecoder",
|
||||
"aiofiles",
|
||||
"aiofiles.os",
|
||||
"aiohttp",
|
||||
"aioxmpp",
|
||||
"apiclient",
|
||||
"async_lru",
|
||||
"avs",
|
||||
"bcrypt",
|
||||
"bleak",
|
||||
"bluetooth",
|
||||
"bluetooth_numbers",
|
||||
"cpuinfo",
|
||||
"croniter",
|
||||
"cups",
|
||||
"cv2",
|
||||
"cwiid",
|
||||
"dbus",
|
||||
"deepspeech",
|
||||
"defusedxml",
|
||||
"docutils",
|
||||
"envirophat",
|
||||
"feedparser",
|
||||
"gevent.wsgi",
|
||||
"gi",
|
||||
"gi.repository",
|
||||
"google",
|
||||
"google.assistant.embedded",
|
||||
"google.assistant.library",
|
||||
"google.assistant.library.event",
|
||||
"google.assistant.library.file_helpers",
|
||||
"google.oauth2.credentials",
|
||||
"googlesamples",
|
||||
"googlesamples.assistant.grpc.audio_helpers",
|
||||
"gps",
|
||||
"graphyte",
|
||||
"grpc",
|
||||
"gunicorn",
|
||||
"httplib2",
|
||||
"icalendar",
|
||||
"imapclient",
|
||||
"inotify",
|
||||
"inputs",
|
||||
"irc",
|
||||
"irc.bot",
|
||||
"irc.client",
|
||||
"irc.connection",
|
||||
"irc.events",
|
||||
"irc.strings",
|
||||
"kafka",
|
||||
"keras",
|
||||
"linode_api4",
|
||||
"luma",
|
||||
"mpd",
|
||||
"ndef",
|
||||
"nfc",
|
||||
"nio",
|
||||
"numpy",
|
||||
"oauth2client",
|
||||
"oauth2client",
|
||||
"omxplayer",
|
||||
"openzwave",
|
||||
"pandas",
|
||||
"paramiko",
|
||||
"picamera",
|
||||
"plexapi",
|
||||
"pmw3901",
|
||||
"psutil",
|
||||
"pvcheetah",
|
||||
"pvporcupine ",
|
||||
"pyHS100",
|
||||
"pyaudio",
|
||||
"pyclip",
|
||||
"pydbus",
|
||||
"pyfirmata2",
|
||||
"pyngrok",
|
||||
"pyotp",
|
||||
"pysmartthings",
|
||||
"pyzbar",
|
||||
"rtmidi",
|
||||
"samsungtvws",
|
||||
"serial",
|
||||
"simple_websocket",
|
||||
"smartcard",
|
||||
"sounddevice",
|
||||
"soundfile",
|
||||
"telegram",
|
||||
"telegram.ext",
|
||||
"tenacity",
|
||||
"tensorflow",
|
||||
"todoist",
|
||||
"trello",
|
||||
"twilio",
|
||||
"uvicorn",
|
||||
"watchdog",
|
||||
"wave",
|
||||
"websockets",
|
||||
"zeroconf",
|
||||
]
|
||||
"""
|
||||
List of modules that should be mocked when building the documentation or running tests.
|
||||
"""
|
Loading…
Reference in New Issue