aboutsummaryrefslogtreecommitdiff
path: root/pyaggr3g470r/lib
diff options
context:
space:
mode:
Diffstat (limited to 'pyaggr3g470r/lib')
-rw-r--r--pyaggr3g470r/lib/article_utils.py14
-rw-r--r--pyaggr3g470r/lib/crawler.py1
2 files changed, 9 insertions, 6 deletions
diff --git a/pyaggr3g470r/lib/article_utils.py b/pyaggr3g470r/lib/article_utils.py
index 3c642167..115b6058 100644
--- a/pyaggr3g470r/lib/article_utils.py
+++ b/pyaggr3g470r/lib/article_utils.py
@@ -35,8 +35,10 @@ def extract_id(entry, keys=[('link', 'link'),
def construct_article(entry, feed):
+ if hasattr(feed, 'dump'): # this way can be a sqlalchemy obj or a dict
+ feed = feed.dump()
"Safe method to transorm a feedparser entry into an article"
- now = datetime.now()
+ date = datetime.now()
for date_key in ('published', 'updated'):
if entry.get(date_key):
@@ -63,12 +65,12 @@ def construct_article(entry, feed):
logger.warning("Unable to get the real URL of %s. Error: %s",
article_link, error)
- return {'feed_id': feed.id,
- 'user_id': feed.user_id,
+ return {'feed_id': feed['id'],
+ 'user_id': feed['user_id'],
'entry_id': extract_id(entry).get('entry_id', None),
- 'link': entry.get('link', feed.site_link),
+ 'link': entry.get('link', feed['site_link']),
'title': entry.get('title', 'No title'),
'readed': False, 'like': False,
'content': content,
- 'retrieved_date': now.isoformat(),
- 'date': (date or now).isoformat()}
+ 'retrieved_date': date.isoformat(),
+ 'date': date.isoformat()}
diff --git a/pyaggr3g470r/lib/crawler.py b/pyaggr3g470r/lib/crawler.py
index 8d2de15f..de557e45 100644
--- a/pyaggr3g470r/lib/crawler.py
+++ b/pyaggr3g470r/lib/crawler.py
@@ -143,6 +143,7 @@ class PyAggUpdater(AbstractCrawler):
up_feed[key] = fresh_feed[key]
if not self.feed.get('title'):
up_feed['title'] = fresh_feed.get('title', '')
+ up_feed['user_id'] = self.feed['user_id']
logger.info('%r %r - pushing feed attrs %r',
self.feed['id'], self.feed['title'],
bgstack15