From b1d92793268f9db737837c0899272d576c45c537 Mon Sep 17 00:00:00 2001 From: François Schmidts Date: Fri, 15 Jan 2016 14:39:49 +0100 Subject: fixing logging --- src/web/lib/crawler.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) (limited to 'src/web/lib/crawler.py') diff --git a/src/web/lib/crawler.py b/src/web/lib/crawler.py index 979ccbfc..7343ea4d 100644 --- a/src/web/lib/crawler.py +++ b/src/web/lib/crawler.py @@ -155,9 +155,9 @@ class FeedCrawler(AbstractCrawler): response.raise_for_status() except Exception as error: error_count = self.feed['error_count'] + 1 - logger.error('%r %r - an error occured while fetching ' - 'feed; bumping error count to %r', self.feed['id'], - self.feed['title'], error_count) + logger.exception('%r %r - an error occured while fetching ' + 'feed; bumping error count to %r', + self.feed['id'], self.feed['title'], error_count) future = self.query_pyagg('put', 'feed/%d' % self.feed['id'], {'error_count': error_count, 'last_error': str(error), -- cgit From 462f6d3b21558ed0a283c24e0e0332eac6ccbbb3 Mon Sep 17 00:00:00 2001 From: François Schmidts Date: Fri, 11 Sep 2015 18:28:12 +0200 Subject: base modification in model for category support --- src/web/lib/crawler.py | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) (limited to 'src/web/lib/crawler.py') diff --git a/src/web/lib/crawler.py b/src/web/lib/crawler.py index 7343ea4d..f480fe96 100644 --- a/src/web/lib/crawler.py +++ b/src/web/lib/crawler.py @@ -18,7 +18,6 @@ import json import logging import feedparser from datetime import datetime, timedelta -from functools import wraps from time import strftime, gmtime from concurrent.futures import ThreadPoolExecutor from requests_futures.sessions import FuturesSession @@ -132,7 +131,7 @@ class PyAggUpdater(AbstractCrawler): {key: "%s -> %s" % (up_feed[key], self.feed.get(key)) for key in up_feed if up_feed[key] != self.feed.get(key)}) - future = self.query_pyagg('put', 'feed/%d' % self.feed['id'], up_feed) + self.query_pyagg('put', 'feed/%d' % self.feed['id'], up_feed) class FeedCrawler(AbstractCrawler): @@ -144,8 +143,8 @@ class FeedCrawler(AbstractCrawler): def clean_feed(self): """Will reset the errors counters on a feed that have known errors""" if self.feed.get('error_count') or self.feed.get('last_error'): - future = self.query_pyagg('put', 'feed/%d' % self.feed['id'], - {'error_count': 0, 'last_error': ''}) + self.query_pyagg('put', 'feed/%d' % self.feed['id'], + {'error_count': 0, 'last_error': ''}) def callback(self, response): """will fetch the feed and interprete results (304, etag) or will -- cgit