2018-01-27 04:31:09 +01:00
|
|
|
import datetime
|
2018-01-28 02:01:54 +01:00
|
|
|
import enum
|
2018-01-27 04:31:09 +01:00
|
|
|
import os
|
|
|
|
|
2022-04-05 22:47:44 +02:00
|
|
|
from sqlalchemy import (
|
|
|
|
create_engine,
|
|
|
|
Column,
|
|
|
|
Integer,
|
|
|
|
String,
|
|
|
|
DateTime,
|
|
|
|
Enum,
|
|
|
|
ForeignKey,
|
|
|
|
)
|
|
|
|
|
2023-04-24 23:21:39 +02:00
|
|
|
from sqlalchemy.orm import sessionmaker, scoped_session
|
2018-05-01 10:13:37 +02:00
|
|
|
from sqlalchemy.sql.expression import func
|
2018-01-27 04:31:09 +01:00
|
|
|
|
|
|
|
from platypush.backend.http.request import HttpRequest
|
2023-04-24 23:21:39 +02:00
|
|
|
from platypush.common.db import declarative_base
|
2018-01-27 04:31:09 +01:00
|
|
|
from platypush.config import Config
|
2019-07-24 19:02:53 +02:00
|
|
|
from platypush.context import get_plugin
|
2018-01-27 04:31:09 +01:00
|
|
|
from platypush.message.event.http.rss import NewFeedEvent
|
|
|
|
|
|
|
|
Base = declarative_base()
|
|
|
|
Session = scoped_session(sessionmaker())
|
|
|
|
|
|
|
|
|
2018-01-28 02:01:54 +01:00
|
|
|
class RssUpdates(HttpRequest):
|
2019-12-01 23:35:05 +01:00
|
|
|
"""
|
2020-09-05 16:54:08 +02:00
|
|
|
Gets new items in an RSS feed. You can use this type of object within the context of the
|
|
|
|
:class:`platypush.backend.http.poll.HttpPollBackend` backend. Example:
|
|
|
|
|
|
|
|
.. code-block:: yaml
|
|
|
|
|
|
|
|
backend.http.poll:
|
|
|
|
requests:
|
|
|
|
- type: platypush.backend.http.request.rss.RssUpdates
|
|
|
|
url: https://www.technologyreview.com/feed/
|
|
|
|
title: MIT Technology Review
|
|
|
|
poll_seconds: 86400 # Poll once a day
|
|
|
|
digest_format: html # Generate an HTML feed with the new items
|
|
|
|
|
2019-12-01 23:35:05 +01:00
|
|
|
"""
|
2018-01-27 04:31:09 +01:00
|
|
|
|
2022-04-05 22:47:44 +02:00
|
|
|
user_agent = (
|
|
|
|
'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) '
|
|
|
|
+ 'Chrome/62.0.3202.94 Safari/537.36'
|
|
|
|
)
|
|
|
|
|
|
|
|
def __init__(
|
|
|
|
self,
|
|
|
|
url,
|
|
|
|
title=None,
|
|
|
|
headers=None,
|
|
|
|
params=None,
|
|
|
|
max_entries=None,
|
|
|
|
extract_content=False,
|
|
|
|
digest_format=None,
|
|
|
|
user_agent: str = user_agent,
|
|
|
|
body_style: str = 'font-size: 22px; '
|
|
|
|
+ 'font-family: "Merriweather", Georgia, "Times New Roman", Times, serif;',
|
|
|
|
title_style: str = 'margin-top: 30px',
|
|
|
|
subtitle_style: str = 'margin-top: 10px; page-break-after: always',
|
|
|
|
article_title_style: str = 'page-break-before: always',
|
|
|
|
article_link_style: str = 'color: #555; text-decoration: none; border-bottom: 1px dotted',
|
|
|
|
article_content_style: str = '',
|
|
|
|
*argv,
|
|
|
|
**kwargs,
|
|
|
|
):
|
2020-09-05 16:54:08 +02:00
|
|
|
"""
|
|
|
|
:param url: URL to the RSS feed to be monitored.
|
|
|
|
:param title: Optional title for the feed.
|
|
|
|
:param headers: Extra headers to be passed to the request.
|
|
|
|
:param params: Extra GET parameters to be appended to the URL.
|
|
|
|
:param max_entries: Maximum number of entries that will be returned in a single
|
|
|
|
:class:`platypush.message.event.http.rss.NewFeedEvent` event.
|
|
|
|
:param extract_content: Whether the context should also be extracted (through the
|
|
|
|
:class:`platypush.plugins.http.webpage.HttpWebpagePlugin` plugin) (default: ``False``).
|
|
|
|
:param digest_format: Format of the digest output file (default: None, text. Other supported types: ``html``
|
|
|
|
and ``pdf`` (requires the ``weasyprint`` module installed).
|
|
|
|
:param user_agent: User agent string to be passed on the request.
|
|
|
|
:param body_style: CSS style for the body.
|
|
|
|
:param title_style: CSS style for the feed title.
|
|
|
|
:param subtitle_style: CSS style for the feed subtitle.
|
|
|
|
:param article_title_style: CSS style for the article titles.
|
|
|
|
:param article_link_style: CSS style for the article link.
|
|
|
|
:param article_content_style: CSS style for the article content.
|
|
|
|
"""
|
2019-07-16 22:38:42 +02:00
|
|
|
self.workdir = os.path.join(os.path.expanduser(Config.get('workdir')), 'feeds')
|
2019-07-17 14:57:50 +02:00
|
|
|
self.dbfile = os.path.join(self.workdir, 'rss.db')
|
2018-01-27 04:31:09 +01:00
|
|
|
self.url = url
|
2018-01-28 02:01:54 +01:00
|
|
|
self.title = title
|
|
|
|
self.max_entries = max_entries
|
2020-09-05 16:54:08 +02:00
|
|
|
self.user_agent = user_agent
|
|
|
|
self.body_style = body_style
|
|
|
|
self.title_style = title_style
|
|
|
|
self.subtitle_style = subtitle_style
|
|
|
|
self.article_title_style = article_title_style
|
|
|
|
self.article_link_style = article_link_style
|
|
|
|
self.article_content_style = article_content_style
|
2019-07-24 19:02:53 +02:00
|
|
|
|
|
|
|
# If true, then the http.webpage plugin will be used to parse the content
|
|
|
|
self.extract_content = extract_content
|
|
|
|
|
2022-04-05 22:47:44 +02:00
|
|
|
self.digest_format = (
|
|
|
|
digest_format.lower() if digest_format else None
|
|
|
|
) # Supported formats: html, pdf
|
2018-01-27 04:31:09 +01:00
|
|
|
|
2018-01-27 13:54:15 +01:00
|
|
|
os.makedirs(os.path.expanduser(os.path.dirname(self.dbfile)), exist_ok=True)
|
2018-01-27 04:31:09 +01:00
|
|
|
|
2019-07-24 19:02:53 +02:00
|
|
|
if headers is None:
|
|
|
|
headers = {}
|
2018-01-28 02:01:54 +01:00
|
|
|
headers['User-Agent'] = self.user_agent
|
|
|
|
|
2018-01-27 04:31:09 +01:00
|
|
|
request_args = {
|
|
|
|
'method': 'get',
|
|
|
|
'url': self.url,
|
2018-01-28 02:01:54 +01:00
|
|
|
'headers': headers,
|
2018-01-27 04:31:09 +01:00
|
|
|
'params': params or {},
|
|
|
|
}
|
|
|
|
|
2018-06-08 16:54:15 +02:00
|
|
|
super().__init__(skip_first_call=False, args=request_args, *argv, **kwargs)
|
2018-01-27 04:31:09 +01:00
|
|
|
|
|
|
|
def _get_or_create_source(self, session):
|
|
|
|
record = session.query(FeedSource).filter_by(url=self.url).first()
|
|
|
|
if record is None:
|
2018-05-01 10:13:37 +02:00
|
|
|
record = FeedSource(url=self.url, title=self.title)
|
2018-01-27 04:31:09 +01:00
|
|
|
session.add(record)
|
|
|
|
|
|
|
|
session.commit()
|
|
|
|
return record
|
|
|
|
|
2019-07-24 19:02:53 +02:00
|
|
|
@staticmethod
|
|
|
|
def _get_latest_update(session, source_id):
|
2022-04-05 22:47:44 +02:00
|
|
|
return (
|
|
|
|
session.query(func.max(FeedEntry.published))
|
|
|
|
.filter_by(source_id=source_id)
|
|
|
|
.scalar()
|
|
|
|
)
|
2018-05-01 10:13:37 +02:00
|
|
|
|
2018-01-27 04:31:09 +01:00
|
|
|
def _parse_entry_content(self, link):
|
2019-07-25 18:23:28 +02:00
|
|
|
self.logger.info('Extracting content from {}'.format(link))
|
|
|
|
|
2019-07-24 19:02:53 +02:00
|
|
|
parser = get_plugin('http.webpage')
|
2020-09-05 00:50:04 +02:00
|
|
|
response = parser.simplify(link)
|
|
|
|
output = response.output
|
|
|
|
errors = response.errors
|
2018-08-19 23:51:59 +02:00
|
|
|
|
2020-09-05 00:50:04 +02:00
|
|
|
if not output:
|
2022-04-05 22:47:44 +02:00
|
|
|
self.logger.warning(
|
|
|
|
'Mercury parser error: {}'.format(errors or '[unknown error]')
|
|
|
|
)
|
2019-07-19 17:00:06 +02:00
|
|
|
return
|
|
|
|
|
2020-09-05 00:50:04 +02:00
|
|
|
return output.get('content')
|
2018-01-27 04:31:09 +01:00
|
|
|
|
|
|
|
def get_new_items(self, response):
|
2019-12-01 23:35:05 +01:00
|
|
|
import feedparser
|
2022-04-05 22:47:44 +02:00
|
|
|
|
|
|
|
engine = create_engine(
|
|
|
|
'sqlite:///{}'.format(self.dbfile),
|
|
|
|
connect_args={'check_same_thread': False},
|
|
|
|
)
|
2018-04-28 19:11:14 +02:00
|
|
|
|
|
|
|
Base.metadata.create_all(engine)
|
|
|
|
Session.configure(bind=engine)
|
|
|
|
self._get_or_create_source(session=Session())
|
|
|
|
|
2018-01-27 04:31:09 +01:00
|
|
|
feed = feedparser.parse(response.text)
|
|
|
|
session = Session()
|
|
|
|
source_record = self._get_or_create_source(session=session)
|
|
|
|
session.add(source_record)
|
|
|
|
parse_start_time = datetime.datetime.utcnow()
|
|
|
|
entries = []
|
2018-05-01 10:13:37 +02:00
|
|
|
latest_update = self._get_latest_update(session, source_record.id)
|
2018-01-27 04:31:09 +01:00
|
|
|
|
2018-01-28 02:01:54 +01:00
|
|
|
if not self.title and 'title' in feed.feed:
|
|
|
|
self.title = feed.feed['title']
|
|
|
|
source_record.title = self.title
|
|
|
|
|
2019-10-13 12:29:39 +02:00
|
|
|
content = u'''
|
2020-09-05 16:54:08 +02:00
|
|
|
<h1 style="{title_style}">{title}</h1>
|
2022-04-05 22:47:44 +02:00
|
|
|
<h2 style="{subtitle_style}">Feeds digest generated on {creation_date}</h2>'''.format(
|
|
|
|
title_style=self.title_style,
|
|
|
|
title=self.title,
|
|
|
|
subtitle_style=self.subtitle_style,
|
|
|
|
creation_date=datetime.datetime.now().strftime('%d %B %Y, %H:%M'),
|
|
|
|
)
|
2019-10-13 12:29:39 +02:00
|
|
|
|
2022-04-05 22:47:44 +02:00
|
|
|
self.logger.info(
|
|
|
|
'Parsed {:d} items from RSS feed <{}>'.format(len(feed.entries), self.url)
|
|
|
|
)
|
2018-04-28 19:11:14 +02:00
|
|
|
|
2018-01-27 04:31:09 +01:00
|
|
|
for entry in feed.entries:
|
2018-05-07 18:33:44 +02:00
|
|
|
if not entry.published_parsed:
|
|
|
|
continue
|
|
|
|
|
2018-10-21 14:47:52 +02:00
|
|
|
try:
|
|
|
|
entry_timestamp = datetime.datetime(*entry.published_parsed[:6])
|
|
|
|
|
2022-04-05 22:47:44 +02:00
|
|
|
if latest_update is None or entry_timestamp > latest_update:
|
|
|
|
self.logger.info(
|
|
|
|
'Processed new item from RSS feed <{}>'.format(self.url)
|
|
|
|
)
|
2018-10-21 14:47:52 +02:00
|
|
|
entry.summary = entry.summary if hasattr(entry, 'summary') else None
|
|
|
|
|
2019-07-24 19:02:53 +02:00
|
|
|
if self.extract_content:
|
2018-10-21 14:47:52 +02:00
|
|
|
entry.content = self._parse_entry_content(entry.link)
|
|
|
|
elif hasattr(entry, 'summary'):
|
|
|
|
entry.content = entry.summary
|
|
|
|
else:
|
|
|
|
entry.content = None
|
|
|
|
|
2020-09-05 01:39:16 +02:00
|
|
|
content += u'''
|
2020-09-05 16:54:08 +02:00
|
|
|
<h1 style="{article_title_style}">
|
|
|
|
<a href="{link}" target="_blank" style="{article_link_style}">{title}</a>
|
2020-09-05 01:39:16 +02:00
|
|
|
</h1>
|
2020-09-05 16:54:08 +02:00
|
|
|
<div class="_parsed-content" style="{article_content_style}">{content}</div>'''.format(
|
2022-04-05 22:47:44 +02:00
|
|
|
article_title_style=self.article_title_style,
|
|
|
|
article_link_style=self.article_link_style,
|
|
|
|
article_content_style=self.article_content_style,
|
|
|
|
link=entry.link,
|
|
|
|
title=entry.title,
|
|
|
|
content=entry.content,
|
|
|
|
)
|
2018-10-21 14:47:52 +02:00
|
|
|
|
|
|
|
e = {
|
|
|
|
'entry_id': entry.id,
|
|
|
|
'title': entry.title,
|
|
|
|
'link': entry.link,
|
|
|
|
'summary': entry.summary,
|
|
|
|
'content': entry.content,
|
|
|
|
'source_id': source_record.id,
|
|
|
|
'published': entry_timestamp,
|
|
|
|
}
|
|
|
|
|
|
|
|
entries.append(e)
|
|
|
|
session.add(FeedEntry(**e))
|
2019-07-24 19:02:53 +02:00
|
|
|
if self.max_entries and len(entries) > self.max_entries:
|
|
|
|
break
|
2018-10-21 14:47:52 +02:00
|
|
|
except Exception as e:
|
2022-04-05 22:47:44 +02:00
|
|
|
self.logger.warning(
|
|
|
|
'Exception encountered while parsing RSS '
|
|
|
|
+ f'RSS feed {entry.link}: {e}'
|
|
|
|
)
|
2019-07-19 17:00:06 +02:00
|
|
|
self.logger.exception(e)
|
2018-01-27 04:31:09 +01:00
|
|
|
|
|
|
|
source_record.last_updated_at = parse_start_time
|
2018-01-28 02:01:54 +01:00
|
|
|
digest_filename = None
|
2018-01-27 04:31:09 +01:00
|
|
|
|
|
|
|
if entries:
|
2022-04-05 22:47:44 +02:00
|
|
|
self.logger.info(
|
|
|
|
'Parsed {} new entries from the RSS feed {}'.format(
|
|
|
|
len(entries), self.title
|
|
|
|
)
|
|
|
|
)
|
2018-01-28 02:01:54 +01:00
|
|
|
|
2018-05-07 19:44:34 +02:00
|
|
|
if self.digest_format:
|
2022-04-05 22:47:44 +02:00
|
|
|
digest_filename = os.path.join(
|
|
|
|
self.workdir,
|
|
|
|
'cache',
|
|
|
|
'{}_{}.{}'.format(
|
|
|
|
datetime.datetime.now().strftime('%Y-%m-%dT%H:%M:%S'),
|
|
|
|
self.title,
|
|
|
|
self.digest_format,
|
|
|
|
),
|
|
|
|
)
|
2018-01-28 02:01:54 +01:00
|
|
|
|
2018-05-07 19:44:34 +02:00
|
|
|
os.makedirs(os.path.dirname(digest_filename), exist_ok=True)
|
2018-01-28 02:01:54 +01:00
|
|
|
|
2018-05-07 19:44:34 +02:00
|
|
|
if self.digest_format == 'html':
|
2019-10-13 12:29:39 +02:00
|
|
|
content = '''
|
|
|
|
<html>
|
|
|
|
<head>
|
|
|
|
<title>{title}</title>
|
|
|
|
</head>
|
2020-09-05 16:54:08 +02:00
|
|
|
<body style="{body_style}">{content}</body>
|
2019-10-13 12:29:39 +02:00
|
|
|
</html>
|
2022-04-05 22:47:44 +02:00
|
|
|
'''.format(
|
|
|
|
title=self.title, body_style=self.body_style, content=content
|
|
|
|
)
|
2019-10-13 12:29:39 +02:00
|
|
|
|
2018-05-07 19:44:34 +02:00
|
|
|
with open(digest_filename, 'w', encoding='utf-8') as f:
|
2019-10-13 12:29:39 +02:00
|
|
|
f.write(content)
|
2018-05-07 19:44:34 +02:00
|
|
|
elif self.digest_format == 'pdf':
|
2021-04-05 00:58:44 +02:00
|
|
|
from weasyprint import HTML, CSS
|
2022-04-05 22:47:44 +02:00
|
|
|
|
2021-09-16 17:53:40 +02:00
|
|
|
try:
|
|
|
|
from weasyprint.fonts import FontConfiguration
|
|
|
|
except ImportError:
|
|
|
|
from weasyprint.document import FontConfiguration
|
2019-10-13 12:29:39 +02:00
|
|
|
|
2021-04-05 00:58:44 +02:00
|
|
|
body_style = 'body { ' + self.body_style + ' }'
|
2019-10-13 12:29:39 +02:00
|
|
|
font_config = FontConfiguration()
|
2022-04-05 22:47:44 +02:00
|
|
|
css = [
|
|
|
|
CSS('https://fonts.googleapis.com/css?family=Merriweather'),
|
|
|
|
CSS(string=body_style, font_config=font_config),
|
|
|
|
]
|
2019-10-13 12:29:39 +02:00
|
|
|
|
2021-04-05 00:58:44 +02:00
|
|
|
HTML(string=content).write_pdf(digest_filename, stylesheets=css)
|
2018-05-07 19:44:34 +02:00
|
|
|
else:
|
2022-04-05 22:47:44 +02:00
|
|
|
raise RuntimeError(
|
|
|
|
f'Unsupported format: {self.digest_format}. Supported formats: html, pdf'
|
|
|
|
)
|
2018-01-28 02:01:54 +01:00
|
|
|
|
2022-04-05 22:47:44 +02:00
|
|
|
digest_entry = FeedDigest(
|
|
|
|
source_id=source_record.id,
|
|
|
|
format=self.digest_format,
|
|
|
|
filename=digest_filename,
|
|
|
|
)
|
2018-01-28 02:01:54 +01:00
|
|
|
|
2018-05-07 19:44:34 +02:00
|
|
|
session.add(digest_entry)
|
2022-04-05 22:47:44 +02:00
|
|
|
self.logger.info(
|
|
|
|
'{} digest ready: {}'.format(self.digest_format, digest_filename)
|
|
|
|
)
|
2018-01-28 02:01:54 +01:00
|
|
|
|
|
|
|
session.commit()
|
2018-06-06 20:09:18 +02:00
|
|
|
self.logger.info('Parsing RSS feed {}: completed'.format(self.title))
|
2018-01-27 04:31:09 +01:00
|
|
|
|
2022-04-05 22:47:44 +02:00
|
|
|
return NewFeedEvent(
|
|
|
|
request=dict(self),
|
|
|
|
response=entries,
|
|
|
|
source_id=source_record.id,
|
|
|
|
source_title=source_record.title,
|
|
|
|
title=self.title,
|
|
|
|
digest_format=self.digest_format,
|
|
|
|
digest_filename=digest_filename,
|
|
|
|
)
|
2018-01-27 04:31:09 +01:00
|
|
|
|
|
|
|
|
|
|
|
class FeedSource(Base):
|
2022-04-05 22:47:44 +02:00
|
|
|
"""Models the FeedSource table, containing RSS sources to be parsed"""
|
2018-01-28 02:01:54 +01:00
|
|
|
|
2018-01-27 04:31:09 +01:00
|
|
|
__tablename__ = 'FeedSource'
|
2022-04-05 22:47:44 +02:00
|
|
|
__table_args__ = {'sqlite_autoincrement': True}
|
2018-01-27 04:31:09 +01:00
|
|
|
|
|
|
|
id = Column(Integer, primary_key=True)
|
|
|
|
title = Column(String)
|
|
|
|
url = Column(String, unique=True)
|
|
|
|
last_updated_at = Column(DateTime)
|
|
|
|
|
|
|
|
|
|
|
|
class FeedEntry(Base):
|
2022-04-05 22:47:44 +02:00
|
|
|
"""Models the FeedEntry table, which contains RSS entries"""
|
2018-01-28 02:01:54 +01:00
|
|
|
|
2018-01-27 04:31:09 +01:00
|
|
|
__tablename__ = 'FeedEntry'
|
2022-04-05 22:47:44 +02:00
|
|
|
__table_args__ = {'sqlite_autoincrement': True}
|
2018-01-27 04:31:09 +01:00
|
|
|
|
|
|
|
id = Column(Integer, primary_key=True)
|
|
|
|
entry_id = Column(String)
|
|
|
|
source_id = Column(Integer, ForeignKey('FeedSource.id'), nullable=False)
|
|
|
|
title = Column(String)
|
|
|
|
link = Column(String)
|
2018-05-05 23:59:43 +02:00
|
|
|
summary = Column(String)
|
2018-01-27 04:31:09 +01:00
|
|
|
content = Column(String)
|
|
|
|
published = Column(DateTime)
|
|
|
|
|
|
|
|
|
2018-01-28 02:01:54 +01:00
|
|
|
class FeedDigest(Base):
|
2022-04-05 22:47:44 +02:00
|
|
|
"""Models the FeedDigest table, containing feed digests either in HTML
|
|
|
|
or PDF format"""
|
2018-01-28 02:01:54 +01:00
|
|
|
|
|
|
|
class DigestFormat(enum.Enum):
|
|
|
|
html = 1
|
|
|
|
pdf = 2
|
|
|
|
|
|
|
|
__tablename__ = 'FeedDigest'
|
2022-04-05 22:47:44 +02:00
|
|
|
__table_args__ = {'sqlite_autoincrement': True}
|
2018-01-28 02:01:54 +01:00
|
|
|
|
|
|
|
id = Column(Integer, primary_key=True)
|
|
|
|
source_id = Column(Integer, ForeignKey('FeedSource.id'), nullable=False)
|
|
|
|
format = Column(Enum(DigestFormat), nullable=False)
|
|
|
|
filename = Column(String, nullable=False)
|
|
|
|
created_at = Column(DateTime, nullable=False, default=datetime.datetime.utcnow)
|
|
|
|
|
2022-04-05 22:47:44 +02:00
|
|
|
|
2018-01-27 04:31:09 +01:00
|
|
|
# vim:sw=4:ts=4:et:
|