aboutsummaryrefslogtreecommitdiff
path: root/src
diff options
context:
space:
mode:
authorCédric Bonhomme <cedric@cedricbonhomme.org>2019-05-12 19:27:01 +0200
committerCédric Bonhomme <cedric@cedricbonhomme.org>2019-05-12 19:27:01 +0200
commit3610fef4c379e9cf6c9dd9b60494663ded77c664 (patch)
tree50b9196ea5cb00c54efe85ab3ee278ef6b051cf0 /src
parentNow using psycopg2-binary instead of using psycopg. (diff)
downloadnewspipe-3610fef4c379e9cf6c9dd9b60494663ded77c664.tar.gz
newspipe-3610fef4c379e9cf6c9dd9b60494663ded77c664.tar.bz2
newspipe-3610fef4c379e9cf6c9dd9b60494663ded77c664.zip
Temporary fix for the logging module. Closes #44
Diffstat (limited to 'src')
-rw-r--r--src/crawler/default_crawler.py7
-rw-r--r--src/lib/feed_utils.py4
2 files changed, 8 insertions, 3 deletions
diff --git a/src/crawler/default_crawler.py b/src/crawler/default_crawler.py
index 80cee92f..e5df3c04 100644
--- a/src/crawler/default_crawler.py
+++ b/src/crawler/default_crawler.py
@@ -101,7 +101,10 @@ async def parse_feed(user, feed):
up_feed['last_error'] = ""
# Feed information
- construct_feed_from(feed.link, parsed_feed).update(up_feed)
+ try:
+ construct_feed_from(feed.link, parsed_feed).update(up_feed)
+ except:
+ logger.exception('error when constructing feed: {}'.format(feed.link))
if feed.title and 'title' in up_feed:
# do not override the title set by the user
del up_feed['title']
@@ -198,7 +201,7 @@ def retrieve_feed(loop, user, feed_id=None):
try:
loop.run_until_complete(asyncio.wait(tasks))
- except Exception:
+ except:
logger.exception('an error occured')
finally:
logger.info('Articles retrieved for {}'.format(user.nickname))
diff --git a/src/lib/feed_utils.py b/src/lib/feed_utils.py
index 4c2cee29..c2d4ca6e 100644
--- a/src/lib/feed_utils.py
+++ b/src/lib/feed_utils.py
@@ -73,7 +73,9 @@ def construct_feed_from(url=None, fp_parsed=None, feed=None, query_site=True):
try:
response = requests.get(feed['site_link'], **requests_kwargs)
- except Exception:
+ except requests.exceptions.InvalidSchema as e:
+ return feed
+ except:
logger.exception('failed to retrieve %r', feed['site_link'])
return feed
bs_parsed = BeautifulSoup(response.content, 'html.parser',
bgstack15