diff options
author | Cédric Bonhomme <cedric@cedricbonhomme.org> | 2015-03-08 00:20:56 +0100 |
---|---|---|
committer | Cédric Bonhomme <cedric@cedricbonhomme.org> | 2015-03-08 00:20:56 +0100 |
commit | cc7cbac144891994f30c55b9bbea8a3465f9f091 (patch) | |
tree | 4c07ad8b4ddb3fe64ffad35d99f813adf8675c8b | |
parent | updated documentation (db migration section). (diff) | |
download | newspipe-cc7cbac144891994f30c55b9bbea8a3465f9f091.tar.gz newspipe-cc7cbac144891994f30c55b9bbea8a3465f9f091.tar.bz2 newspipe-cc7cbac144891994f30c55b9bbea8a3465f9f091.zip |
Better handling of the error logging in the crawler.
-rw-r--r-- | pyaggr3g470r/crawler.py | 22 |
1 files changed, 13 insertions, 9 deletions
diff --git a/pyaggr3g470r/crawler.py b/pyaggr3g470r/crawler.py index ebcb8ce4..ded9df6f 100644 --- a/pyaggr3g470r/crawler.py +++ b/pyaggr3g470r/crawler.py @@ -56,7 +56,7 @@ def get(*args, **kwargs): return (yield from response.read_and_close(decode=False)) except Exception as e: #print(e) - return None + raise e @asyncio.coroutine def parse_feed(user, feed): @@ -66,14 +66,17 @@ def parse_feed(user, feed): data = None with (yield from sem): - data = yield from get(feed.link) - - if data is None: - feed.error_count += 1 - if feed.error_count > 2: - feed.enabled = False - db.session.commit() - return + try: + data = yield from get(feed.link) + except Exception as e: + feed.last_error = str(e) + finally: + if data is None: + feed.error_count += 1 + if feed.error_count > 2: + feed.enabled = False + db.session.commit() + return a_feed = feedparser.parse(data) if a_feed['bozo'] == 1: @@ -88,6 +91,7 @@ def parse_feed(user, feed): feed.last_retrieved = datetime.now(dateutil.tz.tzlocal()) feed.error_count = 0 + feed.last_error = "" # Feed informations if feed.title == "": |