diff options
author | Cédric Bonhomme <cedric@cedricbonhomme.org> | 2019-05-19 15:14:08 +0200 |
---|---|---|
committer | Cédric Bonhomme <cedric@cedricbonhomme.org> | 2019-05-19 15:14:08 +0200 |
commit | 82bd60530546147df8cf4edd4cb3e844928888f2 (patch) | |
tree | e080f02c0dc0da80aae56bf9b2046dd39dd12424 /src/crawler/default_crawler.py | |
parent | Temporary fix for the logging module. Closes #44 (diff) | |
download | newspipe-82bd60530546147df8cf4edd4cb3e844928888f2.tar.gz newspipe-82bd60530546147df8cf4edd4cb3e844928888f2.tar.bz2 newspipe-82bd60530546147df8cf4edd4cb3e844928888f2.zip |
fixed module name of the crawler in bootstrap.py
Diffstat (limited to 'src/crawler/default_crawler.py')
-rw-r--r-- | src/crawler/default_crawler.py | 4 |
1 files changed, 4 insertions, 0 deletions
diff --git a/src/crawler/default_crawler.py b/src/crawler/default_crawler.py index e5df3c04..d4a8266b 100644 --- a/src/crawler/default_crawler.py +++ b/src/crawler/default_crawler.py @@ -59,6 +59,8 @@ else: async def get(*args, **kwargs): #kwargs["connector"] = aiohttp.TCPConnector(verify_ssl=False) try: + logger.info('Retrieving feed {}'.format(args[0])) + print('Retrieving feed {}'.format(args[0])) data = feedparser.parse(args[0]) return data except Exception as e: @@ -102,7 +104,9 @@ async def parse_feed(user, feed): # Feed information try: + print('contructing feed') construct_feed_from(feed.link, parsed_feed).update(up_feed) + print('constructing feed 1') except: logger.exception('error when constructing feed: {}'.format(feed.link)) if feed.title and 'title' in up_feed: |