From dba5533af05a63cd2cb8ad8bdaf62c38b19ea71b Mon Sep 17 00:00:00 2001 From: François Schmidts Date: Tue, 7 Jul 2015 11:21:51 +0200 Subject: making the crawler getting the feed with high traffic earlier --- pyaggr3g470r/lib/crawler.py | 7 +++++++ 1 file changed, 7 insertions(+) (limited to 'pyaggr3g470r/lib') diff --git a/pyaggr3g470r/lib/crawler.py b/pyaggr3g470r/lib/crawler.py index de557e45..e5998776 100644 --- a/pyaggr3g470r/lib/crawler.py +++ b/pyaggr3g470r/lib/crawler.py @@ -17,6 +17,7 @@ import conf import json import logging import feedparser +from datetime import datetime, timedelta from functools import wraps from time import strftime, gmtime from concurrent.futures import ThreadPoolExecutor @@ -118,7 +119,9 @@ class PyAggUpdater(AbstractCrawler): results = response.result().json() logger.debug('%r %r - %d entries were not matched and will be created', self.feed['id'], self.feed['title'], len(results)) + article_created = False for id_to_create in results: + article_created = True entry = construct_article( self.entries[tuple(sorted(id_to_create.items()))], self.feed) @@ -144,6 +147,10 @@ class PyAggUpdater(AbstractCrawler): if not self.feed.get('title'): up_feed['title'] = fresh_feed.get('title', '') up_feed['user_id'] = self.feed['user_id'] + # re-getting that feed earlier since new entries appeared + if article_created: + up_feed['last_retrieved'] \ + = (datetime.now() - timedelta(minutes=45)).isoformat() logger.info('%r %r - pushing feed attrs %r', self.feed['id'], self.feed['title'], -- cgit