2019-12-08 16:25:03 +01:00
|
|
|
import importlib
|
|
|
|
import inspect
|
|
|
|
import json
|
2023-05-17 00:05:22 +02:00
|
|
|
import os
|
|
|
|
import pathlib
|
|
|
|
import pkgutil
|
2023-10-09 01:22:04 +02:00
|
|
|
from concurrent.futures import Future, ThreadPoolExecutor
|
|
|
|
from typing import List, Optional
|
2019-12-08 16:25:03 +01:00
|
|
|
|
2019-12-30 10:16:55 +01:00
|
|
|
from platypush.backend import Backend
|
2023-10-09 01:22:04 +02:00
|
|
|
from platypush.common.db import override_definitions
|
|
|
|
from platypush.common.reflection import Integration, Message as MessageMetadata
|
2020-06-17 22:31:04 +02:00
|
|
|
from platypush.config import Config
|
2019-12-08 16:25:03 +01:00
|
|
|
from platypush.plugins import Plugin, action
|
2023-09-17 02:11:52 +02:00
|
|
|
from platypush.message import Message
|
2019-12-30 18:50:01 +01:00
|
|
|
from platypush.message.event import Event
|
2020-03-05 17:39:50 +01:00
|
|
|
from platypush.message.response import Response
|
2023-10-18 22:10:32 +02:00
|
|
|
from platypush.utils import get_enabled_backends, get_enabled_plugins
|
2023-10-09 01:22:04 +02:00
|
|
|
from platypush.utils.mock import auto_mocks
|
2024-01-06 03:02:31 +01:00
|
|
|
from platypush.utils.manifest import Manifest, Manifests, PackageManagers
|
2023-10-09 01:22:04 +02:00
|
|
|
|
|
|
|
from ._cache import Cache
|
2023-05-17 00:05:22 +02:00
|
|
|
from ._serialize import ProcedureEncoder
|
2019-12-08 16:25:03 +01:00
|
|
|
|
|
|
|
|
|
|
|
class InspectPlugin(Plugin):
|
|
|
|
"""
|
|
|
|
This plugin can be used to inspect platypush plugins and backends
|
|
|
|
"""
|
|
|
|
|
2023-10-09 01:22:04 +02:00
|
|
|
_num_workers = 8
|
|
|
|
"""Number of threads to use for the inspection."""
|
|
|
|
|
2019-12-08 16:25:03 +01:00
|
|
|
def __init__(self, **kwargs):
|
|
|
|
super().__init__(**kwargs)
|
2023-10-09 01:22:04 +02:00
|
|
|
self._cache = Cache()
|
|
|
|
self._load_cache()
|
2023-05-17 00:05:22 +02:00
|
|
|
|
2024-01-06 22:21:59 +01:00
|
|
|
@property
|
|
|
|
def cache_file(self) -> str:
|
|
|
|
"""
|
|
|
|
:return: The path to the components cache file.
|
|
|
|
"""
|
|
|
|
import platypush
|
|
|
|
|
|
|
|
return os.path.join(
|
|
|
|
os.path.dirname(inspect.getfile(platypush)),
|
|
|
|
'components.json.gz',
|
|
|
|
)
|
|
|
|
|
2023-10-09 01:22:04 +02:00
|
|
|
def _load_cache(self):
|
2023-05-17 00:05:22 +02:00
|
|
|
"""
|
|
|
|
Loads the components cache from disk.
|
|
|
|
"""
|
2023-10-09 01:22:04 +02:00
|
|
|
with self._cache.lock(), auto_mocks(), override_definitions():
|
|
|
|
try:
|
2024-01-06 22:21:59 +01:00
|
|
|
self._cache = Cache.load(self.cache_file)
|
2023-10-09 01:22:04 +02:00
|
|
|
except Exception as e:
|
|
|
|
self.logger.warning(
|
|
|
|
'Could not initialize the components cache from %s: %s',
|
2024-01-06 22:21:59 +01:00
|
|
|
self.cache_file,
|
2023-10-09 01:22:04 +02:00
|
|
|
e,
|
|
|
|
)
|
|
|
|
|
2024-01-06 22:21:59 +01:00
|
|
|
def refresh_cache(self, force: bool = False):
|
2023-05-17 00:05:22 +02:00
|
|
|
"""
|
2023-10-09 01:22:04 +02:00
|
|
|
Refreshes the components cache.
|
2023-05-17 00:05:22 +02:00
|
|
|
"""
|
2023-10-09 01:22:04 +02:00
|
|
|
cache_version_differs = self._cache.version != Cache.cur_version
|
2024-01-06 22:21:59 +01:00
|
|
|
force = force or cache_version_differs
|
2023-05-17 00:05:22 +02:00
|
|
|
|
2024-01-06 22:21:59 +01:00
|
|
|
with self._cache.lock(), auto_mocks(), override_definitions(), ThreadPoolExecutor(
|
|
|
|
self._num_workers
|
|
|
|
) as pool:
|
2023-10-09 01:22:04 +02:00
|
|
|
futures = []
|
2023-05-17 00:05:22 +02:00
|
|
|
|
2023-10-09 01:22:04 +02:00
|
|
|
for base_type in [Plugin, Backend]:
|
|
|
|
futures.append(
|
|
|
|
pool.submit(
|
|
|
|
self._scan_integrations,
|
|
|
|
base_type,
|
|
|
|
pool=pool,
|
2024-01-06 22:21:59 +01:00
|
|
|
force_refresh=force,
|
2023-10-09 01:22:04 +02:00
|
|
|
futures=futures,
|
|
|
|
)
|
|
|
|
)
|
2023-05-17 00:05:22 +02:00
|
|
|
|
2023-10-09 01:22:04 +02:00
|
|
|
for base_type in [Event, Response]:
|
|
|
|
futures.append(
|
|
|
|
pool.submit(
|
|
|
|
self._scan_modules,
|
|
|
|
base_type,
|
|
|
|
pool=pool,
|
2024-01-06 22:21:59 +01:00
|
|
|
force_refresh=force,
|
2023-10-09 01:22:04 +02:00
|
|
|
futures=futures,
|
|
|
|
)
|
2023-05-13 13:44:46 +02:00
|
|
|
)
|
2019-12-08 16:25:03 +01:00
|
|
|
|
2023-10-09 01:22:04 +02:00
|
|
|
while futures:
|
|
|
|
futures.pop().result()
|
2019-12-08 16:25:03 +01:00
|
|
|
|
2023-10-09 01:22:04 +02:00
|
|
|
if self._cache.has_changes:
|
2024-01-06 22:21:59 +01:00
|
|
|
self.logger.info('Saving new components cache to %s', self.cache_file)
|
|
|
|
self._cache.dump(self.cache_file)
|
2023-10-09 01:22:04 +02:00
|
|
|
self._cache.loaded_at = self._cache.saved_at
|
2019-12-30 10:16:55 +01:00
|
|
|
|
2024-01-06 22:21:59 +01:00
|
|
|
return self._cache
|
|
|
|
|
2023-10-09 01:22:04 +02:00
|
|
|
def _scan_integration(self, manifest: Manifest):
|
|
|
|
"""
|
|
|
|
Scans a single integration from the manifest and adds it to the cache.
|
|
|
|
"""
|
|
|
|
try:
|
|
|
|
self._cache_integration(Integration.from_manifest(manifest.file))
|
|
|
|
except Exception as e:
|
|
|
|
self.logger.warning(
|
|
|
|
'Could not import module %s: %s',
|
|
|
|
manifest.package,
|
|
|
|
e,
|
|
|
|
)
|
2019-12-30 10:16:55 +01:00
|
|
|
|
2023-10-09 01:22:04 +02:00
|
|
|
def _scan_integrations(
|
2023-05-17 00:05:22 +02:00
|
|
|
self,
|
|
|
|
base_type: type,
|
2023-10-09 01:22:04 +02:00
|
|
|
pool: ThreadPoolExecutor,
|
|
|
|
futures: List[Future],
|
|
|
|
force_refresh: bool = False,
|
|
|
|
):
|
2023-05-17 00:05:22 +02:00
|
|
|
"""
|
2023-10-09 01:22:04 +02:00
|
|
|
Scans the integrations with a manifest file (plugins and backends) and
|
|
|
|
refreshes the cache.
|
2023-05-17 00:05:22 +02:00
|
|
|
"""
|
2023-10-09 01:22:04 +02:00
|
|
|
for manifest in Manifests.by_base_class(base_type):
|
|
|
|
# An integration metadata needs to be refreshed if it's been
|
|
|
|
# modified since it was last loaded, or if it's not in the
|
|
|
|
# cache.
|
|
|
|
if force_refresh or self._needs_refresh(manifest.file):
|
|
|
|
futures.append(pool.submit(self._scan_integration, manifest))
|
2019-12-30 10:16:55 +01:00
|
|
|
|
2023-10-09 01:22:04 +02:00
|
|
|
def _scan_module(self, base_type: type, modname: str):
|
|
|
|
"""
|
|
|
|
Scans a single module for objects that match the given base_type and
|
|
|
|
adds them to the cache.
|
|
|
|
"""
|
|
|
|
try:
|
|
|
|
module = importlib.import_module(modname)
|
|
|
|
except Exception as e:
|
|
|
|
self.logger.warning('Could not import module %s: %s', modname, e)
|
|
|
|
return
|
2019-12-30 18:50:01 +01:00
|
|
|
|
2023-10-09 01:22:04 +02:00
|
|
|
for _, obj_type in inspect.getmembers(module):
|
2023-05-17 00:05:22 +02:00
|
|
|
if (
|
2023-10-09 01:22:04 +02:00
|
|
|
inspect.isclass(obj_type)
|
|
|
|
and issubclass(obj_type, base_type)
|
|
|
|
# Exclude the base_type itself
|
|
|
|
and obj_type != base_type
|
2023-05-17 00:05:22 +02:00
|
|
|
):
|
2023-10-09 01:22:04 +02:00
|
|
|
self.logger.info(
|
|
|
|
'Scanned %s: %s',
|
|
|
|
base_type.__name__,
|
|
|
|
f'{module.__name__}.{obj_type.__name__}',
|
|
|
|
)
|
2019-12-30 18:50:01 +01:00
|
|
|
|
2023-10-09 01:22:04 +02:00
|
|
|
self._cache.set(
|
|
|
|
base_type, obj_type, MessageMetadata.by_type(obj_type).to_dict()
|
|
|
|
)
|
2023-05-17 00:05:22 +02:00
|
|
|
|
2023-10-09 01:22:04 +02:00
|
|
|
def _scan_modules(
|
2023-05-17 00:05:22 +02:00
|
|
|
self,
|
|
|
|
base_type: type,
|
2023-10-09 01:22:04 +02:00
|
|
|
pool: ThreadPoolExecutor,
|
|
|
|
futures: List[Future],
|
|
|
|
force_refresh: bool = False,
|
2023-05-17 00:05:22 +02:00
|
|
|
):
|
|
|
|
"""
|
2023-10-09 01:22:04 +02:00
|
|
|
A generator that scans the modules given a ``base_type`` (e.g. ``Event``).
|
2020-03-05 23:19:26 +01:00
|
|
|
|
2023-10-09 01:22:04 +02:00
|
|
|
It's a bit more inefficient than :meth:`._scan_integrations` because it
|
|
|
|
needs to inspect all the members of a module to find the ones that
|
|
|
|
match the given ``base_type``, but it works fine for simple components
|
|
|
|
(like messages) that don't require extra recursive parsing and don't
|
|
|
|
have a manifest.
|
2023-05-17 00:05:22 +02:00
|
|
|
"""
|
2023-10-09 01:22:04 +02:00
|
|
|
prefix = base_type.__module__ + '.'
|
|
|
|
path = str(pathlib.Path(inspect.getfile(base_type)).parent)
|
2023-05-17 00:05:22 +02:00
|
|
|
|
2023-10-09 01:22:04 +02:00
|
|
|
for _, modname, __ in pkgutil.walk_packages(
|
|
|
|
path=[path], prefix=prefix, onerror=lambda _: None
|
|
|
|
):
|
|
|
|
try:
|
|
|
|
filename = self._module_filename(path, '.'.join(modname.split('.')[3:]))
|
|
|
|
if not (force_refresh or self._needs_refresh(filename)):
|
|
|
|
continue
|
|
|
|
except Exception as e:
|
|
|
|
self.logger.warning('Could not scan module %s: %s', modname, e)
|
2023-05-17 00:05:22 +02:00
|
|
|
continue
|
|
|
|
|
2023-10-09 01:22:04 +02:00
|
|
|
futures.append(pool.submit(self._scan_module, base_type, modname))
|
2023-05-17 00:05:22 +02:00
|
|
|
|
2023-10-09 01:22:04 +02:00
|
|
|
def _needs_refresh(self, filename: str) -> bool:
|
2023-05-17 00:05:22 +02:00
|
|
|
"""
|
2023-10-09 01:22:04 +02:00
|
|
|
:return: True if the given file needs to be refreshed in the cache.
|
2023-05-17 00:05:22 +02:00
|
|
|
"""
|
2024-01-06 22:21:59 +01:00
|
|
|
dirname = os.path.dirname(filename)
|
|
|
|
if not os.path.isdir(dirname):
|
|
|
|
return True
|
|
|
|
|
|
|
|
return os.lstat(dirname).st_mtime > (self._cache.saved_at or 0)
|
2023-05-17 00:05:22 +02:00
|
|
|
|
2023-10-09 01:22:04 +02:00
|
|
|
@staticmethod
|
|
|
|
def _module_filename(path: str, modname: str) -> str:
|
2023-05-17 00:05:22 +02:00
|
|
|
"""
|
2023-10-09 01:22:04 +02:00
|
|
|
:param path: Path to the module.
|
|
|
|
:param modname: Module name.
|
|
|
|
:return: The full path to the module file.
|
2023-05-17 00:05:22 +02:00
|
|
|
"""
|
2023-10-09 01:22:04 +02:00
|
|
|
filename = os.path.join(path, *modname.split('.')) + '.py'
|
2023-05-17 00:05:22 +02:00
|
|
|
|
2023-10-09 01:22:04 +02:00
|
|
|
if not os.path.isfile(filename):
|
|
|
|
filename = os.path.join(path, *modname.split('.'), '__init__.py')
|
2023-05-17 00:05:22 +02:00
|
|
|
|
2023-10-09 01:22:04 +02:00
|
|
|
assert os.path.isfile(filename), f'No such file or directory: {filename}'
|
|
|
|
return filename
|
2023-05-17 00:05:22 +02:00
|
|
|
|
2023-10-09 01:22:04 +02:00
|
|
|
def _cache_integration(self, integration: Integration) -> dict:
|
2023-05-17 00:05:22 +02:00
|
|
|
"""
|
2023-10-09 01:22:04 +02:00
|
|
|
:param integration: The :class:`.IntegrationMetadata` object.
|
|
|
|
:return: The initialized component's metadata dict.
|
2023-05-17 00:05:22 +02:00
|
|
|
"""
|
2023-10-09 01:22:04 +02:00
|
|
|
self.logger.info(
|
|
|
|
'Scanned %s: %s', integration.base_type.__name__, integration.name
|
|
|
|
)
|
|
|
|
meta = integration.to_dict()
|
|
|
|
self._cache.set(integration.base_type, integration.type, meta)
|
|
|
|
return meta
|
2020-03-05 17:39:50 +01:00
|
|
|
|
2019-12-08 16:25:03 +01:00
|
|
|
@action
|
2023-05-14 15:06:34 +02:00
|
|
|
def get_all_plugins(self):
|
2019-12-08 16:25:03 +01:00
|
|
|
"""
|
2023-05-14 15:06:34 +02:00
|
|
|
Get information about all the available plugins.
|
2019-12-08 16:25:03 +01:00
|
|
|
"""
|
2023-10-09 01:22:04 +02:00
|
|
|
return json.dumps(self._cache.to_dict().get('plugins', {}), cls=Message.Encoder)
|
2019-12-08 16:25:03 +01:00
|
|
|
|
2019-12-30 10:16:55 +01:00
|
|
|
@action
|
2023-05-14 15:06:34 +02:00
|
|
|
def get_all_backends(self):
|
2019-12-30 10:16:55 +01:00
|
|
|
"""
|
2023-05-14 15:06:34 +02:00
|
|
|
Get information about all the available backends.
|
2019-12-30 10:16:55 +01:00
|
|
|
"""
|
2023-05-17 00:05:22 +02:00
|
|
|
return json.dumps(
|
2023-10-09 01:22:04 +02:00
|
|
|
self._cache.to_dict().get('backends', {}), cls=Message.Encoder
|
2023-05-17 00:05:22 +02:00
|
|
|
)
|
2019-12-30 10:16:55 +01:00
|
|
|
|
2019-12-30 18:50:01 +01:00
|
|
|
@action
|
2023-05-14 15:06:34 +02:00
|
|
|
def get_all_events(self):
|
2019-12-30 18:50:01 +01:00
|
|
|
"""
|
2023-05-14 15:06:34 +02:00
|
|
|
Get information about all the available events.
|
2019-12-30 18:50:01 +01:00
|
|
|
"""
|
2023-10-09 01:22:04 +02:00
|
|
|
return json.dumps(self._cache.to_dict().get('events', {}), cls=Message.Encoder)
|
2019-12-30 18:50:01 +01:00
|
|
|
|
2020-03-05 17:39:50 +01:00
|
|
|
@action
|
2023-05-14 15:06:34 +02:00
|
|
|
def get_all_responses(self):
|
2020-03-05 17:39:50 +01:00
|
|
|
"""
|
2023-05-14 15:06:34 +02:00
|
|
|
Get information about all the available responses.
|
2020-03-05 17:39:50 +01:00
|
|
|
"""
|
2023-05-17 00:05:22 +02:00
|
|
|
return json.dumps(
|
2023-10-09 01:22:04 +02:00
|
|
|
self._cache.to_dict().get('responses', {}), cls=Message.Encoder
|
2023-05-17 00:05:22 +02:00
|
|
|
)
|
2020-03-05 17:39:50 +01:00
|
|
|
|
2020-06-17 22:31:04 +02:00
|
|
|
@action
|
|
|
|
def get_procedures(self) -> dict:
|
|
|
|
"""
|
|
|
|
Get the list of procedures installed on the device.
|
|
|
|
"""
|
|
|
|
return json.loads(json.dumps(Config.get_procedures(), cls=ProcedureEncoder))
|
|
|
|
|
2020-08-31 18:26:08 +02:00
|
|
|
@action
|
2022-03-03 15:15:55 +01:00
|
|
|
def get_config(self, entry: Optional[str] = None) -> Optional[dict]:
|
2020-08-31 18:26:08 +02:00
|
|
|
"""
|
|
|
|
Return the configuration of the application or of a section.
|
|
|
|
|
|
|
|
:param entry: [Optional] configuration entry name to retrieve (e.g. ``workdir`` or ``backend.http``).
|
|
|
|
:return: The requested configuration object.
|
|
|
|
"""
|
|
|
|
if entry:
|
|
|
|
return Config.get(entry)
|
|
|
|
|
2023-05-17 00:05:22 +02:00
|
|
|
return Config.get()
|
2020-08-31 18:26:08 +02:00
|
|
|
|
2023-10-18 22:10:32 +02:00
|
|
|
@action
|
|
|
|
def get_enabled_plugins(self) -> List[str]:
|
|
|
|
"""
|
|
|
|
Get the list of enabled plugins.
|
|
|
|
"""
|
|
|
|
return list(get_enabled_plugins().keys())
|
|
|
|
|
|
|
|
@action
|
|
|
|
def get_enabled_backends(self) -> List[str]:
|
|
|
|
"""
|
|
|
|
Get the list of enabled backends.
|
|
|
|
"""
|
|
|
|
return list(get_enabled_backends().keys())
|
|
|
|
|
2024-01-06 03:02:31 +01:00
|
|
|
@action
|
|
|
|
def get_pkg_managers(self) -> dict:
|
|
|
|
"""
|
|
|
|
Get the list of supported package managers. This is supposed to be an
|
|
|
|
internal-only method, only used by the UI to populate the install
|
|
|
|
commands.
|
|
|
|
"""
|
|
|
|
pkg_manager = PackageManagers.scan()
|
|
|
|
return {
|
|
|
|
'items': {
|
|
|
|
pkg.value.executable: {
|
|
|
|
'executable': pkg.value.executable,
|
|
|
|
'install': pkg.value.install,
|
|
|
|
'install_doc': pkg.value.install_doc,
|
|
|
|
'uninstall': pkg.value.uninstall,
|
|
|
|
'list': pkg.value.list,
|
|
|
|
'default_os': pkg.value.default_os,
|
|
|
|
}
|
|
|
|
for pkg in PackageManagers
|
|
|
|
},
|
|
|
|
'current': pkg_manager.value.executable if pkg_manager else None,
|
|
|
|
}
|
|
|
|
|
2019-12-08 16:25:03 +01:00
|
|
|
|
|
|
|
# vim:sw=4:ts=4:et:
|