From ad67a17f02ee9ffca1e15f2aed93b9ea89531be6 Mon Sep 17 00:00:00 2001 From: Cédric Bonhomme Date: Sat, 29 Oct 2016 14:46:10 +0200 Subject: Uses a strem handler for the logging on Heroku. --- src/bootstrap.py | 8 ++++++-- src/crawler/classic_crawler.py | 1 + 2 files changed, 7 insertions(+), 2 deletions(-) (limited to 'src') diff --git a/src/bootstrap.py b/src/bootstrap.py index 9358e3d8..f4b4591c 100644 --- a/src/bootstrap.py +++ b/src/bootstrap.py @@ -13,9 +13,13 @@ from urllib.parse import urlsplit def set_logging(log_path, log_level=logging.INFO, log_format='%(asctime)s %(levelname)s %(message)s'): formater = logging.Formatter(log_format) - handler = logging.FileHandler(log_path) + if conf.ON_HEROKU: + handler = logging.StreamHandler() + else: + handler = logging.FileHandler(log_path) handler.setFormatter(formater) - for logger_name in ('bootstrap', 'web', 'manager', 'runserver', 'classic_crawler'): + for logger_name in ('bootstrap', 'web', 'manager', 'runserver', + 'classic_crawler'): logger = logging.getLogger(logger_name) logger.addHandler(handler) logger.setLevel(log_level) diff --git a/src/crawler/classic_crawler.py b/src/crawler/classic_crawler.py index 4125dad7..f537cfd0 100644 --- a/src/crawler/classic_crawler.py +++ b/src/crawler/classic_crawler.py @@ -79,6 +79,7 @@ async def parse_feed(user, feed): except Exception as e: up_feed['last_error'] = str(e) up_feed['error_count'] = feed.error_count + 1 + logger.exception("error when parsing feed: " + str(e)) finally: up_feed['last_retrieved'] = datetime.now(dateutil.tz.tzlocal()) if parsed_feed is None: -- cgit