diff options
author | Cédric Bonhomme <cedric@cedricbonhomme.org> | 2019-05-19 15:15:21 +0200 |
---|---|---|
committer | Cédric Bonhomme <cedric@cedricbonhomme.org> | 2019-05-19 15:15:21 +0200 |
commit | 4fbc475bf60b47bf0b81e5b5cb2169550c0f874e (patch) | |
tree | ed9d5f7fb08a5ca5d16153b24def124a203c8cad /src/crawler | |
parent | fixed module name of the crawler in bootstrap.py (diff) | |
download | newspipe-4fbc475bf60b47bf0b81e5b5cb2169550c0f874e.tar.gz newspipe-4fbc475bf60b47bf0b81e5b5cb2169550c0f874e.tar.bz2 newspipe-4fbc475bf60b47bf0b81e5b5cb2169550c0f874e.zip |
Removed debug print
Diffstat (limited to 'src/crawler')
-rw-r--r-- | src/crawler/default_crawler.py | 3 |
1 files changed, 0 insertions, 3 deletions
diff --git a/src/crawler/default_crawler.py b/src/crawler/default_crawler.py index d4a8266b..38810fc0 100644 --- a/src/crawler/default_crawler.py +++ b/src/crawler/default_crawler.py @@ -60,7 +60,6 @@ async def get(*args, **kwargs): #kwargs["connector"] = aiohttp.TCPConnector(verify_ssl=False) try: logger.info('Retrieving feed {}'.format(args[0])) - print('Retrieving feed {}'.format(args[0])) data = feedparser.parse(args[0]) return data except Exception as e: @@ -104,9 +103,7 @@ async def parse_feed(user, feed): # Feed information try: - print('contructing feed') construct_feed_from(feed.link, parsed_feed).update(up_feed) - print('constructing feed 1') except: logger.exception('error when constructing feed: {}'.format(feed.link)) if feed.title and 'title' in up_feed: |