diff --git a/platypush/backend/covid19/__init__.py b/platypush/backend/covid19/__init__.py index 598f10871..1be1db016 100644 --- a/platypush/backend/covid19/__init__.py +++ b/platypush/backend/covid19/__init__.py @@ -3,8 +3,7 @@ import os from typing import Optional, Union, List, Dict, Any from sqlalchemy import create_engine, Column, Integer, String, DateTime -from sqlalchemy.orm import sessionmaker, scoped_session -from sqlalchemy.ext.declarative import declarative_base +from sqlalchemy.orm import sessionmaker, scoped_session, declarative_base from platypush.backend import Backend from platypush.config import Config @@ -17,10 +16,10 @@ Session = scoped_session(sessionmaker()) class Covid19Update(Base): - """ Models the Covid19Data table """ + """Models the Covid19Data table""" __tablename__ = 'covid19data' - __table_args__ = ({'sqlite_autoincrement': True}) + __table_args__ = {'sqlite_autoincrement': True} country = Column(String, primary_key=True) confirmed = Column(Integer, nullable=False, default=0) @@ -40,7 +39,12 @@ class Covid19Backend(Backend): """ # noinspection PyProtectedMember - def __init__(self, country: Optional[Union[str, List[str]]], poll_seconds: Optional[float] = 3600.0, **kwargs): + def __init__( + self, + country: Optional[Union[str, List[str]]], + poll_seconds: Optional[float] = 3600.0, + **kwargs + ): """ :param country: Default country (or list of countries) to retrieve the stats for. It can either be the full country name or the country code. Special values: @@ -56,7 +60,9 @@ class Covid19Backend(Backend): super().__init__(poll_seconds=poll_seconds, **kwargs) self._plugin: Covid19Plugin = get_plugin('covid19') self.country: List[str] = self._plugin._get_countries(country) - self.workdir = os.path.join(os.path.expanduser(Config.get('workdir')), 'covid19') + self.workdir = os.path.join( + os.path.expanduser(Config.get('workdir')), 'covid19' + ) self.dbfile = os.path.join(self.workdir, 'data.db') os.makedirs(self.workdir, exist_ok=True) @@ -67,22 +73,30 @@ class Covid19Backend(Backend): self.logger.info('Stopped Covid19 backend') def _process_update(self, summary: Dict[str, Any], session: Session): - update_time = datetime.datetime.fromisoformat(summary['Date'].replace('Z', '+00:00')) + update_time = datetime.datetime.fromisoformat( + summary['Date'].replace('Z', '+00:00') + ) - self.bus.post(Covid19UpdateEvent( - country=summary['Country'], - country_code=summary['CountryCode'], - confirmed=summary['TotalConfirmed'], - deaths=summary['TotalDeaths'], - recovered=summary['TotalRecovered'], - update_time=update_time, - )) + self.bus.post( + Covid19UpdateEvent( + country=summary['Country'], + country_code=summary['CountryCode'], + confirmed=summary['TotalConfirmed'], + deaths=summary['TotalDeaths'], + recovered=summary['TotalRecovered'], + update_time=update_time, + ) + ) - session.merge(Covid19Update(country=summary['CountryCode'], - confirmed=summary['TotalConfirmed'], - deaths=summary['TotalDeaths'], - recovered=summary['TotalRecovered'], - last_updated_at=update_time)) + session.merge( + Covid19Update( + country=summary['CountryCode'], + confirmed=summary['TotalConfirmed'], + deaths=summary['TotalDeaths'], + recovered=summary['TotalRecovered'], + last_updated_at=update_time, + ) + ) def loop(self): # noinspection PyUnresolvedReferences @@ -90,23 +104,30 @@ class Covid19Backend(Backend): if not summaries: return - engine = create_engine('sqlite:///{}'.format(self.dbfile), connect_args={'check_same_thread': False}) + engine = create_engine( + 'sqlite:///{}'.format(self.dbfile), + connect_args={'check_same_thread': False}, + ) Base.metadata.create_all(engine) Session.configure(bind=engine) session = Session() last_records = { record.country: record - for record in session.query(Covid19Update).filter(Covid19Update.country.in_(self.country)).all() + for record in session.query(Covid19Update) + .filter(Covid19Update.country.in_(self.country)) + .all() } for summary in summaries: country = summary['CountryCode'] last_record = last_records.get(country) - if not last_record or \ - summary['TotalConfirmed'] != last_record.confirmed or \ - summary['TotalDeaths'] != last_record.deaths or \ - summary['TotalRecovered'] != last_record.recovered: + if ( + not last_record + or summary['TotalConfirmed'] != last_record.confirmed + or summary['TotalDeaths'] != last_record.deaths + or summary['TotalRecovered'] != last_record.recovered + ): self._process_update(summary=summary, session=session) session.commit() diff --git a/platypush/backend/github/__init__.py b/platypush/backend/github/__init__.py index ad49b73d4..0a1bc3e67 100644 --- a/platypush/backend/github/__init__.py +++ b/platypush/backend/github/__init__.py @@ -6,15 +6,28 @@ from typing import Optional, List import requests from sqlalchemy import create_engine, Column, String, DateTime -from sqlalchemy.ext.declarative import declarative_base -from sqlalchemy.orm import sessionmaker, scoped_session +from sqlalchemy.orm import sessionmaker, scoped_session, declarative_base from platypush.backend import Backend from platypush.config import Config -from platypush.message.event.github import GithubPushEvent, GithubCommitCommentEvent, GithubCreateEvent, \ - GithubDeleteEvent, GithubEvent, GithubForkEvent, GithubWikiEvent, GithubIssueCommentEvent, GithubIssueEvent, \ - GithubMemberEvent, GithubPublicEvent, GithubPullRequestEvent, GithubPullRequestReviewCommentEvent, \ - GithubReleaseEvent, GithubSponsorshipEvent, GithubWatchEvent +from platypush.message.event.github import ( + GithubPushEvent, + GithubCommitCommentEvent, + GithubCreateEvent, + GithubDeleteEvent, + GithubEvent, + GithubForkEvent, + GithubWikiEvent, + GithubIssueCommentEvent, + GithubIssueEvent, + GithubMemberEvent, + GithubPublicEvent, + GithubPullRequestEvent, + GithubPullRequestReviewCommentEvent, + GithubReleaseEvent, + GithubSponsorshipEvent, + GithubWatchEvent, +) Base = declarative_base() Session = scoped_session(sessionmaker()) @@ -71,8 +84,17 @@ class GithubBackend(Backend): _base_url = 'https://api.github.com' - def __init__(self, user: str, user_token: str, repos: Optional[List[str]] = None, org: Optional[str] = None, - poll_seconds: int = 60, max_events_per_scan: Optional[int] = 10, *args, **kwargs): + def __init__( + self, + user: str, + user_token: str, + repos: Optional[List[str]] = None, + org: Optional[str] = None, + poll_seconds: int = 60, + max_events_per_scan: Optional[int] = 10, + *args, + **kwargs + ): """ If neither ``repos`` nor ``org`` is specified then the backend will monitor all new events on user level. @@ -102,17 +124,23 @@ class GithubBackend(Backend): def _request(self, uri: str, method: str = 'get') -> dict: method = getattr(requests, method.lower()) - return method(self._base_url + uri, auth=(self.user, self.user_token), - headers={'Accept': 'application/vnd.github.v3+json'}).json() + return method( + self._base_url + uri, + auth=(self.user, self.user_token), + headers={'Accept': 'application/vnd.github.v3+json'}, + ).json() def _init_db(self): - engine = create_engine('sqlite:///{}'.format(self.dbfile), connect_args={'check_same_thread': False}) + engine = create_engine( + 'sqlite:///{}'.format(self.dbfile), + connect_args={'check_same_thread': False}, + ) Base.metadata.create_all(engine) Session.configure(bind=engine) @staticmethod def _to_datetime(time_string: str) -> datetime.datetime: - """ Convert ISO 8061 string format with leading 'Z' into something understandable by Python """ + """Convert ISO 8061 string format with leading 'Z' into something understandable by Python""" return datetime.datetime.fromisoformat(time_string[:-1] + '+00:00') @staticmethod @@ -128,7 +156,11 @@ class GithubBackend(Backend): def _get_last_event_time(self, uri: str): with self.db_lock: record = self._get_or_create_resource(uri=uri, session=Session()) - return record.last_updated_at.replace(tzinfo=datetime.timezone.utc) if record.last_updated_at else None + return ( + record.last_updated_at.replace(tzinfo=datetime.timezone.utc) + if record.last_updated_at + else None + ) def _update_last_event_time(self, uri: str, last_updated_at: datetime.datetime): with self.db_lock: @@ -158,9 +190,18 @@ class GithubBackend(Backend): 'WatchEvent': GithubWatchEvent, } - event_type = event_mapping[event['type']] if event['type'] in event_mapping else GithubEvent - return event_type(event_type=event['type'], actor=event['actor'], repo=event.get('repo', {}), - payload=event['payload'], created_at=cls._to_datetime(event['created_at'])) + event_type = ( + event_mapping[event['type']] + if event['type'] in event_mapping + else GithubEvent + ) + return event_type( + event_type=event['type'], + actor=event['actor'], + repo=event.get('repo', {}), + payload=event['payload'], + created_at=cls._to_datetime(event['created_at']), + ) def _events_monitor(self, uri: str, method: str = 'get'): def thread(): @@ -175,7 +216,10 @@ class GithubBackend(Backend): fired_events = [] for event in events: - if self.max_events_per_scan and len(fired_events) >= self.max_events_per_scan: + if ( + self.max_events_per_scan + and len(fired_events) >= self.max_events_per_scan + ): break event_time = self._to_datetime(event['created_at']) @@ -189,14 +233,19 @@ class GithubBackend(Backend): for event in fired_events: self.bus.post(event) - self._update_last_event_time(uri=uri, last_updated_at=new_last_event_time) + self._update_last_event_time( + uri=uri, last_updated_at=new_last_event_time + ) except Exception as e: - self.logger.warning('Encountered exception while fetching events from {}: {}'.format( - uri, str(e))) + self.logger.warning( + 'Encountered exception while fetching events from {}: {}'.format( + uri, str(e) + ) + ) self.logger.exception(e) - finally: - if self.wait_stop(timeout=self.poll_seconds): - break + + if self.wait_stop(timeout=self.poll_seconds): + break return thread @@ -206,12 +255,30 @@ class GithubBackend(Backend): if self.repos: for repo in self.repos: - monitors.append(threading.Thread(target=self._events_monitor('/networks/{repo}/events'.format(repo=repo)))) + monitors.append( + threading.Thread( + target=self._events_monitor( + '/networks/{repo}/events'.format(repo=repo) + ) + ) + ) if self.org: - monitors.append(threading.Thread(target=self._events_monitor('/orgs/{org}/events'.format(org=self.org)))) + monitors.append( + threading.Thread( + target=self._events_monitor( + '/orgs/{org}/events'.format(org=self.org) + ) + ) + ) if not (self.repos or self.org): - monitors.append(threading.Thread(target=self._events_monitor('/users/{user}/events'.format(user=self.user)))) + monitors.append( + threading.Thread( + target=self._events_monitor( + '/users/{user}/events'.format(user=self.user) + ) + ) + ) for monitor in monitors: monitor.start() @@ -222,4 +289,5 @@ class GithubBackend(Backend): self.logger.info('Github backend terminated') + # vim:sw=4:ts=4:et: diff --git a/platypush/backend/http/request/rss/__init__.py b/platypush/backend/http/request/rss/__init__.py index b16565dc5..7ca6d9c64 100644 --- a/platypush/backend/http/request/rss/__init__.py +++ b/platypush/backend/http/request/rss/__init__.py @@ -2,11 +2,17 @@ import datetime import enum import os -from sqlalchemy import create_engine, Column, Integer, String, DateTime, \ - Enum, ForeignKey +from sqlalchemy import ( + create_engine, + Column, + Integer, + String, + DateTime, + Enum, + ForeignKey, +) -from sqlalchemy.orm import sessionmaker, scoped_session -from sqlalchemy.ext.declarative import declarative_base +from sqlalchemy.orm import sessionmaker, scoped_session, declarative_base from sqlalchemy.sql.expression import func from platypush.backend.http.request import HttpRequest @@ -44,18 +50,31 @@ class RssUpdates(HttpRequest): """ - user_agent = 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) ' + \ - 'Chrome/62.0.3202.94 Safari/537.36' + user_agent = ( + 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) ' + + 'Chrome/62.0.3202.94 Safari/537.36' + ) - def __init__(self, url, title=None, headers=None, params=None, max_entries=None, - extract_content=False, digest_format=None, user_agent: str = user_agent, - body_style: str = 'font-size: 22px; ' + - 'font-family: "Merriweather", Georgia, "Times New Roman", Times, serif;', - title_style: str = 'margin-top: 30px', - subtitle_style: str = 'margin-top: 10px; page-break-after: always', - article_title_style: str = 'page-break-before: always', - article_link_style: str = 'color: #555; text-decoration: none; border-bottom: 1px dotted', - article_content_style: str = '', *argv, **kwargs): + def __init__( + self, + url, + title=None, + headers=None, + params=None, + max_entries=None, + extract_content=False, + digest_format=None, + user_agent: str = user_agent, + body_style: str = 'font-size: 22px; ' + + 'font-family: "Merriweather", Georgia, "Times New Roman", Times, serif;', + title_style: str = 'margin-top: 30px', + subtitle_style: str = 'margin-top: 10px; page-break-after: always', + article_title_style: str = 'page-break-before: always', + article_link_style: str = 'color: #555; text-decoration: none; border-bottom: 1px dotted', + article_content_style: str = '', + *argv, + **kwargs, + ): """ :param url: URL to the RSS feed to be monitored. :param title: Optional title for the feed. @@ -91,7 +110,9 @@ class RssUpdates(HttpRequest): # If true, then the http.webpage plugin will be used to parse the content self.extract_content = extract_content - self.digest_format = digest_format.lower() if digest_format else None # Supported formats: html, pdf + self.digest_format = ( + digest_format.lower() if digest_format else None + ) # Supported formats: html, pdf os.makedirs(os.path.expanduser(os.path.dirname(self.dbfile)), exist_ok=True) @@ -119,7 +140,11 @@ class RssUpdates(HttpRequest): @staticmethod def _get_latest_update(session, source_id): - return session.query(func.max(FeedEntry.published)).filter_by(source_id=source_id).scalar() + return ( + session.query(func.max(FeedEntry.published)) + .filter_by(source_id=source_id) + .scalar() + ) def _parse_entry_content(self, link): self.logger.info('Extracting content from {}'.format(link)) @@ -130,14 +155,20 @@ class RssUpdates(HttpRequest): errors = response.errors if not output: - self.logger.warning('Mercury parser error: {}'.format(errors or '[unknown error]')) + self.logger.warning( + 'Mercury parser error: {}'.format(errors or '[unknown error]') + ) return return output.get('content') def get_new_items(self, response): import feedparser - engine = create_engine('sqlite:///{}'.format(self.dbfile), connect_args={'check_same_thread': False}) + + engine = create_engine( + 'sqlite:///{}'.format(self.dbfile), + connect_args={'check_same_thread': False}, + ) Base.metadata.create_all(engine) Session.configure(bind=engine) @@ -157,12 +188,16 @@ class RssUpdates(HttpRequest): content = u'''