From 82bd60530546147df8cf4edd4cb3e844928888f2 Mon Sep 17 00:00:00 2001 From: Cédric Bonhomme Date: Sun, 19 May 2019 15:14:08 +0200 Subject: fixed module name of the crawler in bootstrap.py --- src/bootstrap.py | 2 +- src/crawler/default_crawler.py | 4 ++++ 2 files changed, 5 insertions(+), 1 deletion(-) (limited to 'src') diff --git a/src/bootstrap.py b/src/bootstrap.py index 8e337c4a..8e5413e0 100644 --- a/src/bootstrap.py +++ b/src/bootstrap.py @@ -14,7 +14,7 @@ def set_logging(log_path=None, log_level=logging.INFO, modules=(), log_format='%(asctime)s %(levelname)s %(message)s'): if not modules: modules = ('root', 'bootstrap', 'runserver', - 'web', 'crawler.classic_crawler', 'manager', 'plugins') + 'web', 'crawler.default_crawler', 'manager', 'plugins') if conf.ON_HEROKU: log_format = '%(levelname)s %(message)s' if log_path: diff --git a/src/crawler/default_crawler.py b/src/crawler/default_crawler.py index e5df3c04..d4a8266b 100644 --- a/src/crawler/default_crawler.py +++ b/src/crawler/default_crawler.py @@ -59,6 +59,8 @@ else: async def get(*args, **kwargs): #kwargs["connector"] = aiohttp.TCPConnector(verify_ssl=False) try: + logger.info('Retrieving feed {}'.format(args[0])) + print('Retrieving feed {}'.format(args[0])) data = feedparser.parse(args[0]) return data except Exception as e: @@ -102,7 +104,9 @@ async def parse_feed(user, feed): # Feed information try: + print('contructing feed') construct_feed_from(feed.link, parsed_feed).update(up_feed) + print('constructing feed 1') except: logger.exception('error when constructing feed: {}'.format(feed.link)) if feed.title and 'title' in up_feed: -- cgit