forked from platypush/platypush
Merge branch 'integrations/joplin'
This commit is contained in:
commit
d218b17cdf
6 changed files with 550 additions and 40 deletions
platypush
|
@ -76,6 +76,7 @@ class Note(Storable):
|
|||
altitude: Optional[float] = None
|
||||
author: Optional[str] = None
|
||||
source: Optional[NoteSource] = None
|
||||
_path: Optional[str] = None
|
||||
|
||||
def __post_init__(self):
|
||||
"""
|
||||
|
@ -83,6 +84,28 @@ class Note(Storable):
|
|||
"""
|
||||
self.digest = self._update_digest()
|
||||
|
||||
@property
|
||||
def path(self) -> str:
|
||||
# If the path is already set, return it
|
||||
if self._path:
|
||||
return self._path
|
||||
|
||||
# Recursively build the path by expanding the parent collections
|
||||
path = []
|
||||
parent = self.parent
|
||||
while parent:
|
||||
path.append(parent.title)
|
||||
parent = parent.parent
|
||||
|
||||
return '/'.join(reversed(path)) + f'/{self.title}.md'
|
||||
|
||||
@path.setter
|
||||
def path(self, value: str):
|
||||
"""
|
||||
Set the path for the note.
|
||||
"""
|
||||
self._path = value
|
||||
|
||||
def _update_digest(self) -> Optional[str]:
|
||||
if self.content and not self.digest:
|
||||
self.digest = sha256(self.content.encode('utf-8')).hexdigest()
|
||||
|
@ -96,6 +119,7 @@ class Note(Storable):
|
|||
for field in self.__dataclass_fields__
|
||||
if not field.startswith('_') and field != 'parent'
|
||||
},
|
||||
'path': self.path,
|
||||
'parent': (
|
||||
{
|
||||
'id': self.parent.id if self.parent else None,
|
||||
|
|
|
@ -1,3 +1,4 @@
|
|||
import re
|
||||
from abc import ABC, abstractmethod
|
||||
from concurrent.futures import ThreadPoolExecutor
|
||||
from datetime import datetime
|
||||
|
@ -18,9 +19,19 @@ from platypush.message.event.notes import (
|
|||
CollectionDeletedEvent,
|
||||
)
|
||||
from platypush.plugins import RunnablePlugin, action
|
||||
from platypush.utils import to_datetime
|
||||
|
||||
from .db import DbMixin
|
||||
from ._model import CollectionsDelta, NotesDelta, StateDelta
|
||||
from ._model import (
|
||||
ApiSettings,
|
||||
CollectionsDelta,
|
||||
Item,
|
||||
ItemType,
|
||||
NotesDelta,
|
||||
Results,
|
||||
ResultsType,
|
||||
StateDelta,
|
||||
)
|
||||
|
||||
|
||||
class BaseNotePlugin(RunnablePlugin, DbMixin, ABC):
|
||||
|
@ -28,15 +39,19 @@ class BaseNotePlugin(RunnablePlugin, DbMixin, ABC):
|
|||
Base class for note-taking plugins.
|
||||
"""
|
||||
|
||||
def __init__(self, *args, poll_interval: float = 300, **kwargs):
|
||||
def __init__(
|
||||
self, *args, poll_interval: float = 300, timeout: Optional[int] = 60, **kwargs
|
||||
):
|
||||
"""
|
||||
:param poll_interval: Poll interval in seconds to check for updates (default: 300).
|
||||
If set to zero or null, the plugin will not poll for updates,
|
||||
and events will be generated only when you manually call :meth:`.sync`.
|
||||
:param timeout: Timeout in seconds for the plugin operations (default: 60).
|
||||
"""
|
||||
RunnablePlugin.__init__(self, *args, poll_interval=poll_interval, **kwargs)
|
||||
DbMixin.__init__(self, *args, **kwargs)
|
||||
self._sync_lock = RLock()
|
||||
self._timeout = timeout
|
||||
self.__last_sync_time: Optional[datetime] = None
|
||||
|
||||
@property
|
||||
|
@ -81,7 +96,15 @@ class BaseNotePlugin(RunnablePlugin, DbMixin, ABC):
|
|||
"""
|
||||
|
||||
@abstractmethod
|
||||
def _fetch_notes(self, *args, **kwargs) -> Iterable[Note]:
|
||||
def _fetch_notes(
|
||||
self,
|
||||
*args,
|
||||
filter: Optional[Dict[str, Any]] = None, # pylint: disable=redefined-builtin
|
||||
sort: Optional[Dict[str, bool]] = None,
|
||||
limit: Optional[int] = None,
|
||||
offset: Optional[int] = None,
|
||||
**kwargs,
|
||||
) -> Iterable[Note]:
|
||||
"""
|
||||
Don't call this directly if possible.
|
||||
Instead, use :meth:`.get_notes` method to retrieve notes and update the cache
|
||||
|
@ -144,7 +167,15 @@ class BaseNotePlugin(RunnablePlugin, DbMixin, ABC):
|
|||
"""
|
||||
|
||||
@abstractmethod
|
||||
def _fetch_collections(self, *args, **kwargs) -> Iterable[NoteCollection]:
|
||||
def _fetch_collections(
|
||||
self,
|
||||
*args,
|
||||
filter: Optional[Dict[str, Any]] = None, # pylint: disable=redefined-builtin
|
||||
sort: Optional[Dict[str, bool]] = None,
|
||||
limit: Optional[int] = None,
|
||||
offset: Optional[int] = None,
|
||||
**kwargs,
|
||||
) -> Iterable[NoteCollection]:
|
||||
"""
|
||||
Don't call this directly if possible.
|
||||
Instead, use :meth:`.get_collections` to retrieve collections and update the cache
|
||||
|
@ -190,6 +221,7 @@ class BaseNotePlugin(RunnablePlugin, DbMixin, ABC):
|
|||
def _process_results( # pylint: disable=too-many-positional-arguments
|
||||
self,
|
||||
items: Iterable[Any],
|
||||
results_type: ResultsType,
|
||||
limit: Optional[int] = None,
|
||||
offset: Optional[int] = None,
|
||||
sort: Optional[Dict[str, bool]] = None,
|
||||
|
@ -202,7 +234,10 @@ class BaseNotePlugin(RunnablePlugin, DbMixin, ABC):
|
|||
items = [
|
||||
item
|
||||
for item in items
|
||||
if all(getattr(item, k) == v for k, v in filter.items())
|
||||
if all(
|
||||
re.search(v, str(getattr(item, k, '')), re.IGNORECASE)
|
||||
for k, v in filter.items()
|
||||
)
|
||||
]
|
||||
|
||||
items = sorted(
|
||||
|
@ -211,13 +246,30 @@ class BaseNotePlugin(RunnablePlugin, DbMixin, ABC):
|
|||
reverse=any(not ascending for ascending in sort.values()),
|
||||
)
|
||||
|
||||
if offset is not None:
|
||||
supports_limit = False
|
||||
supports_offset = False
|
||||
|
||||
if results_type == ResultsType.NOTES:
|
||||
supports_limit = self._api_settings.supports_notes_limit
|
||||
supports_offset = self._api_settings.supports_notes_offset
|
||||
elif results_type == ResultsType.COLLECTIONS:
|
||||
supports_limit = self._api_settings.supports_collections_limit
|
||||
supports_offset = self._api_settings.supports_collections_offset
|
||||
elif results_type == ResultsType.SEARCH:
|
||||
supports_limit = self._api_settings.supports_search_limit
|
||||
supports_offset = self._api_settings.supports_search_offset
|
||||
|
||||
if offset is not None and not supports_offset:
|
||||
items = items[offset:]
|
||||
if limit is not None:
|
||||
if limit is not None and not supports_limit:
|
||||
items = items[:limit]
|
||||
|
||||
return items
|
||||
|
||||
@property
|
||||
def _api_settings(self) -> ApiSettings:
|
||||
return ApiSettings()
|
||||
|
||||
def _dispatch_events(self, *events):
|
||||
"""
|
||||
Dispatch the given events to the event bus.
|
||||
|
@ -346,7 +398,14 @@ class BaseNotePlugin(RunnablePlugin, DbMixin, ABC):
|
|||
with self._sync_lock:
|
||||
self._notes = {
|
||||
note.id: self._merge_note(note)
|
||||
for note in self._fetch_notes(*args, **kwargs)
|
||||
for note in self._fetch_notes(
|
||||
*args,
|
||||
limit=limit,
|
||||
offset=offset,
|
||||
sort=sort,
|
||||
filter=filter,
|
||||
**kwargs,
|
||||
)
|
||||
}
|
||||
self._refresh_notes_cache()
|
||||
|
||||
|
@ -356,6 +415,7 @@ class BaseNotePlugin(RunnablePlugin, DbMixin, ABC):
|
|||
offset=offset,
|
||||
sort=sort,
|
||||
filter=filter,
|
||||
results_type=ResultsType.NOTES,
|
||||
)
|
||||
|
||||
def _get_collection(self, collection_id: Any, *args, **kwargs) -> NoteCollection:
|
||||
|
@ -390,7 +450,14 @@ class BaseNotePlugin(RunnablePlugin, DbMixin, ABC):
|
|||
with self._sync_lock:
|
||||
self._collections = {
|
||||
collection.id: collection
|
||||
for collection in self._fetch_collections(*args, **kwargs)
|
||||
for collection in self._fetch_collections(
|
||||
*args,
|
||||
limit=limit,
|
||||
offset=offset,
|
||||
sort=sort,
|
||||
filter=filter,
|
||||
**kwargs,
|
||||
)
|
||||
}
|
||||
self._refresh_collections_cache()
|
||||
|
||||
|
@ -400,6 +467,7 @@ class BaseNotePlugin(RunnablePlugin, DbMixin, ABC):
|
|||
offset=offset,
|
||||
sort=sort,
|
||||
filter=filter,
|
||||
results_type=ResultsType.COLLECTIONS,
|
||||
)
|
||||
|
||||
def _refresh_notes_cache(self):
|
||||
|
@ -533,6 +601,108 @@ class BaseNotePlugin(RunnablePlugin, DbMixin, ABC):
|
|||
)
|
||||
self._refresh_notes_cache()
|
||||
|
||||
@abstractmethod
|
||||
def _search(
|
||||
self,
|
||||
query: str,
|
||||
*args,
|
||||
item_type: ItemType,
|
||||
include_terms: Optional[Dict[str, Any]] = None,
|
||||
exclude_terms: Optional[Dict[str, Any]] = None,
|
||||
created_before: Optional[datetime] = None,
|
||||
created_after: Optional[datetime] = None,
|
||||
updated_before: Optional[datetime] = None,
|
||||
updated_after: Optional[datetime] = None,
|
||||
limit: Optional[int] = None,
|
||||
offset: Optional[int] = 0,
|
||||
**kwargs,
|
||||
) -> Results:
|
||||
"""
|
||||
Search for notes or collections based on the provided query and filters.
|
||||
"""
|
||||
|
||||
@action
|
||||
def search(
|
||||
self,
|
||||
*args,
|
||||
query: str,
|
||||
item_type: ItemType = ItemType.NOTE,
|
||||
include_terms: Optional[Dict[str, Any]] = None,
|
||||
exclude_terms: Optional[Dict[str, Any]] = None,
|
||||
created_before: Optional[datetime] = None,
|
||||
created_after: Optional[datetime] = None,
|
||||
updated_before: Optional[datetime] = None,
|
||||
updated_after: Optional[datetime] = None,
|
||||
limit: Optional[int] = None,
|
||||
offset: Optional[int] = 0,
|
||||
**kwargs,
|
||||
):
|
||||
"""
|
||||
Search for notes or collections based on the provided query and filters.
|
||||
|
||||
In most of the cases (but it depends on the backend) double-quoted
|
||||
search terms will match exact phrases, while unquoted queries will
|
||||
match any of the words in the query.
|
||||
|
||||
Wildcards (again, depending on the backend) in the search terms are
|
||||
also supported.
|
||||
|
||||
:param query: The search query string (it will be searched in all the
|
||||
fields).
|
||||
:param item_type: The type of items to search for - ``note``,
|
||||
``collection``, or ``tag`` (default: ``note``).
|
||||
:param include_terms: Optional dictionary of terms to include in the search.
|
||||
The keys are field names and the values are strings to match against.
|
||||
:param exclude_terms: Optional dictionary of terms to exclude from the search.
|
||||
The keys are field names and the values are strings to exclude from the results.
|
||||
:param created_before: Optional datetime ISO string or UNIX timestamp
|
||||
to filter items created before this date.
|
||||
:param created_after: Optional datetime ISO string or UNIX timestamp
|
||||
to filter items created after this date.
|
||||
:param updated_before: Optional datetime ISO string or UNIX timestamp
|
||||
to filter items updated before this date.
|
||||
:param updated_after: Optional datetime ISO string or UNIX timestamp
|
||||
to filter items updated after this date.
|
||||
:param limit: Maximum number of items to retrieve (default: None,
|
||||
meaning no limit, or depending on the default limit of the backend).
|
||||
:param offset: Offset to start retrieving items from (default: 0).
|
||||
:return: An iterable of matching items, format:
|
||||
|
||||
.. code-block:: javascript
|
||||
|
||||
{
|
||||
"has_more": false
|
||||
"results" [
|
||||
{
|
||||
"type": "note",
|
||||
"item": {
|
||||
"id": "note-id",
|
||||
"title": "Note Title",
|
||||
"content": "Note content...",
|
||||
"created_at": "2023-10-01T12:00:00Z",
|
||||
"updated_at": "2023-10-01T12:00:00Z",
|
||||
// ...
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
"""
|
||||
return self._search(
|
||||
query,
|
||||
*args,
|
||||
item_type=item_type,
|
||||
include_terms=include_terms,
|
||||
exclude_terms=exclude_terms,
|
||||
created_before=to_datetime(created_before) if created_before else None,
|
||||
created_after=to_datetime(created_after) if created_after else None,
|
||||
updated_before=to_datetime(updated_before) if updated_before else None,
|
||||
updated_after=to_datetime(updated_after) if updated_after else None,
|
||||
limit=limit,
|
||||
offset=offset,
|
||||
**kwargs,
|
||||
).to_dict()
|
||||
|
||||
@action
|
||||
def get_note(self, note_id: Any, *args, **kwargs) -> dict:
|
||||
"""
|
||||
|
@ -565,7 +735,9 @@ class BaseNotePlugin(RunnablePlugin, DbMixin, ABC):
|
|||
:param sort: A dictionary specifying the fields to sort by and their order.
|
||||
Example: {'created_at': True} sorts by creation date in ascending
|
||||
order, while {'created_at': False} sorts in descending order.
|
||||
:param filter: A dictionary specifying filters to apply to the collections.
|
||||
:param filter: A dictionary specifying filters to apply to the notes, in the form
|
||||
of a dictionary where the keys are field names and the values are regular expressions
|
||||
to match against the field values.
|
||||
:param fetch: If True, always fetch the latest collections from the backend,
|
||||
regardless of the cache state (default: False).
|
||||
:param kwargs: Additional keyword arguments to pass to the fetch method.
|
||||
|
@ -769,7 +941,9 @@ class BaseNotePlugin(RunnablePlugin, DbMixin, ABC):
|
|||
:param sort: A dictionary specifying the fields to sort by and their order.
|
||||
Example: {'created_at': True} sorts by creation date in ascending
|
||||
order, while {'created_at': False} sorts in descending order.
|
||||
:param filter: A dictionary specifying filters to apply to the collections.
|
||||
:param filter: A dictionary specifying filters to apply to the collections, in the form
|
||||
of a dictionary where the keys are field names and the values are regular expressions
|
||||
to match against the field values.
|
||||
:param fetch: If True, always fetch the latest collections from the backend,
|
||||
regardless of the cache state (default: False).
|
||||
:param kwargs: Additional keyword arguments to pass to the fetch method.
|
||||
|
@ -973,6 +1147,9 @@ class BaseNotePlugin(RunnablePlugin, DbMixin, ABC):
|
|||
)
|
||||
|
||||
# Update the local cache with the latest notes and collections
|
||||
if not state_delta.is_empty():
|
||||
self.logger.info('Synchronizing changes: %s', state_delta)
|
||||
|
||||
self._db_sync(state_delta)
|
||||
self._last_sync_time = datetime.fromtimestamp(state_delta.latest_updated_at)
|
||||
self._process_events(state_delta)
|
||||
|
@ -1012,3 +1189,14 @@ class BaseNotePlugin(RunnablePlugin, DbMixin, ABC):
|
|||
self.logger.error('Error during sync: %s', e)
|
||||
finally:
|
||||
self.wait_stop(self.poll_interval)
|
||||
|
||||
|
||||
__all__ = [
|
||||
'ApiSettings',
|
||||
'BaseNotePlugin',
|
||||
'Item',
|
||||
'ItemType',
|
||||
'Note',
|
||||
'NoteCollection',
|
||||
'NoteSource',
|
||||
]
|
||||
|
|
|
@ -1,7 +1,8 @@
|
|||
from dataclasses import dataclass, field
|
||||
from typing import Any, Dict
|
||||
from enum import Enum
|
||||
from typing import Any, Dict, Iterable
|
||||
|
||||
from platypush.common.notes import Note, NoteCollection
|
||||
from platypush.common.notes import Note, NoteCollection, Serializable, Storable
|
||||
|
||||
|
||||
@dataclass
|
||||
|
@ -20,6 +21,16 @@ class NotesDelta:
|
|||
"""
|
||||
return not (self.added or self.updated or self.deleted)
|
||||
|
||||
def __str__(self):
|
||||
"""
|
||||
String representation of the NotesDelta.
|
||||
"""
|
||||
return (
|
||||
f'NotesDelta(added={len(self.added)}, '
|
||||
f'updated={len(self.updated)}, '
|
||||
f'deleted={len(self.deleted)})'
|
||||
)
|
||||
|
||||
|
||||
@dataclass
|
||||
class CollectionsDelta:
|
||||
|
@ -37,6 +48,16 @@ class CollectionsDelta:
|
|||
"""
|
||||
return not (self.added or self.updated or self.deleted)
|
||||
|
||||
def __str__(self):
|
||||
"""
|
||||
String representation of the CollectionsDelta.
|
||||
"""
|
||||
return (
|
||||
f'CollectionsDelta(added={len(self.added)}, '
|
||||
f'updated={len(self.updated)}, '
|
||||
f'deleted={len(self.deleted)})'
|
||||
)
|
||||
|
||||
|
||||
@dataclass
|
||||
class StateDelta:
|
||||
|
@ -53,3 +74,93 @@ class StateDelta:
|
|||
Check if the state delta is empty (no changes in notes or collections).
|
||||
"""
|
||||
return self.notes.is_empty() and self.collections.is_empty()
|
||||
|
||||
def __str__(self):
|
||||
"""
|
||||
String representation of the StateDelta.
|
||||
"""
|
||||
return (
|
||||
f'StateDelta(notes={self.notes}, '
|
||||
f'collections={self.collections}, '
|
||||
f'latest_updated_at={self.latest_updated_at})'
|
||||
)
|
||||
|
||||
|
||||
class ItemType(Enum):
|
||||
"""
|
||||
Enum representing the type of item.
|
||||
"""
|
||||
|
||||
NOTE = 'note'
|
||||
COLLECTION = 'collection'
|
||||
TAG = 'tag'
|
||||
|
||||
|
||||
@dataclass
|
||||
class Item(Serializable):
|
||||
"""
|
||||
Represents a generic note item.
|
||||
"""
|
||||
|
||||
type: ItemType
|
||||
item: Storable
|
||||
|
||||
def __post_init__(self):
|
||||
"""
|
||||
Validate the item type after initialization.
|
||||
"""
|
||||
if not isinstance(self.type, ItemType):
|
||||
raise ValueError(f'Invalid item type: {self.type}')
|
||||
|
||||
def to_dict(self) -> Dict[str, Any]:
|
||||
"""
|
||||
Convert the item to a dictionary representation.
|
||||
"""
|
||||
return {
|
||||
'type': self.type.value,
|
||||
'item': self.item.to_dict(),
|
||||
}
|
||||
|
||||
|
||||
@dataclass
|
||||
class Results(Serializable):
|
||||
"""
|
||||
Represents a collection of results, which can include notes, collections, and tags.
|
||||
"""
|
||||
|
||||
items: Iterable[Item] = field(default_factory=list)
|
||||
has_more: bool = False
|
||||
|
||||
def to_dict(self) -> Dict[str, Any]:
|
||||
"""
|
||||
Convert the results to a dictionary representation.
|
||||
"""
|
||||
return {
|
||||
'results': [item.to_dict() for item in self.items],
|
||||
'has_more': self.has_more,
|
||||
}
|
||||
|
||||
|
||||
@dataclass
|
||||
class ApiSettings:
|
||||
"""
|
||||
Represents plugin-specific API settings.
|
||||
"""
|
||||
|
||||
supports_notes_limit: bool = False
|
||||
supports_notes_offset: bool = False
|
||||
supports_collections_limit: bool = False
|
||||
supports_collections_offset: bool = False
|
||||
supports_search_limit: bool = False
|
||||
supports_search_offset: bool = False
|
||||
supports_search: bool = False
|
||||
|
||||
|
||||
class ResultsType(Enum):
|
||||
"""
|
||||
Enum representing the type of results.
|
||||
"""
|
||||
|
||||
NOTES = 'notes'
|
||||
COLLECTIONS = 'collections'
|
||||
SEARCH = 'search'
|
||||
|
|
|
@ -2,6 +2,7 @@ from contextlib import contextmanager
|
|||
from threading import Event, RLock
|
||||
from typing import Any, Dict, Generator
|
||||
|
||||
from sqlalchemy import and_
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from platypush.common.notes import Note, NoteCollection
|
||||
|
@ -16,7 +17,7 @@ from ._model import (
|
|||
)
|
||||
|
||||
|
||||
class DbMixin:
|
||||
class DbMixin: # pylint: disable=too-few-public-methods
|
||||
"""
|
||||
Mixin class for the database synchronization layer.
|
||||
"""
|
||||
|
@ -73,6 +74,7 @@ class DbMixin:
|
|||
Convert a Note object to a DbNote object.
|
||||
"""
|
||||
return DbNote(
|
||||
id=note._db_id, # pylint:disable=protected-access
|
||||
external_id=note.id,
|
||||
plugin=self._plugin_name,
|
||||
title=note.title,
|
||||
|
@ -183,6 +185,7 @@ class DbMixin:
|
|||
return
|
||||
|
||||
with self._get_db_session(autoflush=False) as session:
|
||||
# Add new/updated collections
|
||||
for collection in [
|
||||
*state.collections.added.values(),
|
||||
*state.collections.updated.values(),
|
||||
|
@ -190,21 +193,36 @@ class DbMixin:
|
|||
db_collection = self._to_db_collection(collection)
|
||||
session.merge(db_collection)
|
||||
|
||||
for collection in state.collections.deleted.values():
|
||||
session.query(DbNoteCollection).filter_by(
|
||||
id=collection._db_id # pylint:disable=protected-access
|
||||
).delete()
|
||||
# Delete removed collections
|
||||
session.query(DbNoteCollection).filter(
|
||||
and_(
|
||||
DbNoteCollection.plugin == self._plugin_name,
|
||||
DbNoteCollection.external_id.in_(
|
||||
[
|
||||
collection.id
|
||||
for collection in state.collections.deleted.values()
|
||||
]
|
||||
),
|
||||
)
|
||||
).delete()
|
||||
|
||||
session.flush() # Ensure collections are saved before notes
|
||||
# Ensure that collections are saved before notes
|
||||
session.flush()
|
||||
|
||||
# Add new/updated notes
|
||||
for note in [*state.notes.added.values(), *state.notes.updated.values()]:
|
||||
db_note = self._to_db_note(note)
|
||||
session.merge(db_note)
|
||||
|
||||
for note in state.notes.deleted.values():
|
||||
session.query(DbNote).filter_by(
|
||||
id=note._db_id # pylint:disable=protected-access
|
||||
).delete()
|
||||
# Delete removed notes
|
||||
session.query(DbNote).filter(
|
||||
and_(
|
||||
DbNote.plugin == self._plugin_name,
|
||||
DbNote.external_id.in_(
|
||||
[note.id for note in state.notes.deleted.values()]
|
||||
),
|
||||
)
|
||||
).delete()
|
||||
|
||||
session.commit()
|
||||
|
||||
|
|
|
@ -1,11 +1,17 @@
|
|||
import datetime
|
||||
from typing import Any, List, Optional
|
||||
from datetime import datetime
|
||||
from typing import Any, Dict, List, Optional
|
||||
from urllib.parse import urljoin
|
||||
|
||||
import requests
|
||||
|
||||
from platypush.common.notes import Note, NoteCollection, NoteSource
|
||||
from platypush.plugins._notes import BaseNotePlugin
|
||||
from platypush.plugins._notes import (
|
||||
ApiSettings,
|
||||
BaseNotePlugin,
|
||||
Item,
|
||||
ItemType,
|
||||
Results,
|
||||
)
|
||||
|
||||
|
||||
class JoplinPlugin(BaseNotePlugin):
|
||||
|
@ -141,6 +147,26 @@ class JoplinPlugin(BaseNotePlugin):
|
|||
'updated_time',
|
||||
)
|
||||
|
||||
# Mapping of the internal note fields to the Joplin API fields.
|
||||
_joplin_search_fields = {
|
||||
'id': 'id',
|
||||
'title': 'title',
|
||||
'content': 'body',
|
||||
'type': 'type',
|
||||
'parent': 'notebook',
|
||||
'latitude': 'latitude',
|
||||
'longitude': 'longitude',
|
||||
'altitude': 'altitude',
|
||||
'source': 'sourceurl',
|
||||
}
|
||||
|
||||
# Mapping of ItemType values to Joplin API item types.
|
||||
_joplin_item_types = {
|
||||
ItemType.NOTE: 'note',
|
||||
ItemType.COLLECTION: 'folder',
|
||||
ItemType.TAG: 'tag',
|
||||
}
|
||||
|
||||
def __init__(self, *args, host: str, port: int = 41184, token: str, **kwargs):
|
||||
"""
|
||||
:param host: The hostname or IP address of your Joplin application.
|
||||
|
@ -172,7 +198,9 @@ class JoplinPlugin(BaseNotePlugin):
|
|||
)
|
||||
|
||||
params['token'] = self.token
|
||||
response = requests.request(method, url, params=params, timeout=10, **kwargs)
|
||||
response = requests.request(
|
||||
method, url, params=params, timeout=self._timeout, **kwargs
|
||||
)
|
||||
|
||||
if not response.ok:
|
||||
err = response.text
|
||||
|
@ -206,13 +234,13 @@ class JoplinPlugin(BaseNotePlugin):
|
|||
)
|
||||
|
||||
@staticmethod
|
||||
def _parse_time(t: Optional[int]) -> Optional[datetime.datetime]:
|
||||
def _parse_time(t: Optional[int]) -> Optional[datetime]:
|
||||
"""
|
||||
Parse a Joplin timestamp (in milliseconds) into a datetime object.
|
||||
"""
|
||||
if t is None:
|
||||
return None
|
||||
return datetime.datetime.fromtimestamp(t / 1000)
|
||||
return datetime.fromtimestamp(t / 1000)
|
||||
|
||||
def _to_note(self, data: dict) -> Note:
|
||||
parent_id = data.get('parent_id')
|
||||
|
@ -252,6 +280,17 @@ class JoplinPlugin(BaseNotePlugin):
|
|||
updated_at=self._parse_time(data.get('updated_time')),
|
||||
)
|
||||
|
||||
def _offset_to_page(
|
||||
self, offset: Optional[int], limit: Optional[int]
|
||||
) -> Optional[int]:
|
||||
"""
|
||||
Convert an offset to a page number for Joplin API requests.
|
||||
"""
|
||||
limit = limit or 100 # Default limit if not provided
|
||||
if offset is None:
|
||||
return None
|
||||
return (offset // limit) + 1 if limit > 0 else 1
|
||||
|
||||
def _fetch_note(self, note_id: Any, *_, **__) -> Optional[Note]:
|
||||
note = None
|
||||
err = None
|
||||
|
@ -282,17 +321,27 @@ class JoplinPlugin(BaseNotePlugin):
|
|||
|
||||
return self._to_note(note) # type: ignore[return-value]
|
||||
|
||||
def _fetch_notes(self, *_, **__) -> List[Note]:
|
||||
def _fetch_notes(
|
||||
self, *_, limit: Optional[int] = None, offset: Optional[int] = None, **__
|
||||
) -> List[Note]:
|
||||
"""
|
||||
Fetch notes from Joplin.
|
||||
"""
|
||||
notes_data = (
|
||||
self._exec(
|
||||
'GET', 'notes', params={'fields': ','.join(self._default_note_fields)}
|
||||
)
|
||||
or {}
|
||||
).get('items', [])
|
||||
return [self._to_note(note) for note in notes_data]
|
||||
return [
|
||||
self._to_note(note)
|
||||
for note in (
|
||||
self._exec(
|
||||
'GET',
|
||||
'notes',
|
||||
params={
|
||||
'fields': ','.join(self._default_note_fields),
|
||||
'limit': limit,
|
||||
'page': self._offset_to_page(offset=offset, limit=limit),
|
||||
},
|
||||
)
|
||||
or {}
|
||||
).get('items', [])
|
||||
]
|
||||
|
||||
def _create_note(
|
||||
self,
|
||||
|
@ -382,7 +431,9 @@ class JoplinPlugin(BaseNotePlugin):
|
|||
|
||||
return self._to_collection(collection_data)
|
||||
|
||||
def _fetch_collections(self, *_, **__) -> List[NoteCollection]:
|
||||
def _fetch_collections(
|
||||
self, *_, limit: Optional[int] = None, offset: Optional[int] = None, **__
|
||||
) -> List[NoteCollection]:
|
||||
"""
|
||||
Fetch collections (folders) from Joplin.
|
||||
"""
|
||||
|
@ -390,7 +441,11 @@ class JoplinPlugin(BaseNotePlugin):
|
|||
self._exec(
|
||||
'GET',
|
||||
'folders',
|
||||
params={'fields': ','.join(self._default_collection_fields)},
|
||||
params={
|
||||
'fields': ','.join(self._default_collection_fields),
|
||||
'limit': limit,
|
||||
'page': self._offset_to_page(offset=offset, limit=limit),
|
||||
},
|
||||
)
|
||||
or {}
|
||||
).get('items', [])
|
||||
|
@ -440,5 +495,119 @@ class JoplinPlugin(BaseNotePlugin):
|
|||
"""
|
||||
self._exec('DELETE', f'folders/{collection_id}')
|
||||
|
||||
def _build_search_query(
|
||||
self,
|
||||
query: str,
|
||||
*,
|
||||
include_terms: Optional[Dict[str, Any]] = None,
|
||||
exclude_terms: Optional[Dict[str, Any]] = None,
|
||||
created_before: Optional[datetime] = None,
|
||||
created_after: Optional[datetime] = None,
|
||||
updated_before: Optional[datetime] = None,
|
||||
updated_after: Optional[datetime] = None,
|
||||
) -> str:
|
||||
query += ' ' + ' '.join(
|
||||
[
|
||||
f'{self._joplin_search_fields.get(k, k)}:"{v}"'
|
||||
for k, v in (include_terms or {}).items()
|
||||
]
|
||||
)
|
||||
|
||||
query += ' ' + ' '.join(
|
||||
[
|
||||
f'-{self._joplin_search_fields.get(k, k)}:"{v}"'
|
||||
for k, v in (exclude_terms or {}).items()
|
||||
]
|
||||
)
|
||||
|
||||
if created_before:
|
||||
query += f' -created:{created_before.strftime("%Y%m%d")}'
|
||||
if created_after:
|
||||
query += f' created:{created_after.strftime("%Y%m%d")}'
|
||||
if updated_before:
|
||||
query += f' -updated:{updated_before.strftime("%Y%m%d")}'
|
||||
if updated_after:
|
||||
query += f' updated:{updated_after.strftime("%Y%m%d")}'
|
||||
|
||||
return query.strip()
|
||||
|
||||
@property
|
||||
def _api_settings(self) -> ApiSettings:
|
||||
return ApiSettings(
|
||||
supports_notes_limit=True,
|
||||
supports_notes_offset=True,
|
||||
supports_collections_limit=True,
|
||||
supports_collections_offset=True,
|
||||
supports_search_limit=True,
|
||||
supports_search_offset=True,
|
||||
supports_search=True,
|
||||
)
|
||||
|
||||
def _search(
|
||||
self,
|
||||
query: str,
|
||||
*_,
|
||||
item_type: ItemType,
|
||||
include_terms: Optional[Dict[str, Any]] = None,
|
||||
exclude_terms: Optional[Dict[str, Any]] = None,
|
||||
created_before: Optional[datetime] = None,
|
||||
created_after: Optional[datetime] = None,
|
||||
updated_before: Optional[datetime] = None,
|
||||
updated_after: Optional[datetime] = None,
|
||||
limit: Optional[int] = None,
|
||||
offset: Optional[int] = 0,
|
||||
**__,
|
||||
) -> Results:
|
||||
"""
|
||||
Search for notes or collections based on the provided query and filters.
|
||||
"""
|
||||
api_item_type = self._joplin_item_types.get(item_type)
|
||||
assert (
|
||||
api_item_type
|
||||
), f'Invalid item type: {item_type}. Supported types: {list(self._joplin_item_types.keys())}'
|
||||
|
||||
limit = limit or 100
|
||||
results = (
|
||||
self._exec(
|
||||
'GET',
|
||||
'search',
|
||||
params={
|
||||
'type': api_item_type,
|
||||
'limit': limit,
|
||||
'page': self._offset_to_page(offset=offset, limit=limit),
|
||||
'fields': ','.join(
|
||||
self._default_note_fields
|
||||
if item_type == ItemType.NOTE
|
||||
else self._default_collection_fields
|
||||
),
|
||||
'query': self._build_search_query(
|
||||
query,
|
||||
include_terms=include_terms,
|
||||
exclude_terms=exclude_terms,
|
||||
created_before=created_before,
|
||||
created_after=created_after,
|
||||
updated_before=updated_before,
|
||||
updated_after=updated_after,
|
||||
),
|
||||
},
|
||||
)
|
||||
or {}
|
||||
)
|
||||
|
||||
return Results(
|
||||
has_more=bool(results.get('has_more')),
|
||||
items=[
|
||||
Item(
|
||||
type=item_type,
|
||||
item=(
|
||||
self._to_note(result)
|
||||
if item_type == ItemType.NOTE
|
||||
else self._to_collection(result)
|
||||
),
|
||||
)
|
||||
for result in results.get('items', [])
|
||||
],
|
||||
)
|
||||
|
||||
|
||||
# vim:sw=4:ts=4:et:
|
||||
|
|
|
@ -700,7 +700,7 @@ def to_datetime(t: Union[str, int, float, datetime.datetime]) -> datetime.dateti
|
|||
if isinstance(t, (int, float)):
|
||||
return datetime.datetime.fromtimestamp(t, tz=tz.tzutc())
|
||||
if isinstance(t, str):
|
||||
return parser.parse(t)
|
||||
return parser.isoparse(t)
|
||||
return t
|
||||
|
||||
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue