diff options
author | François Schmidts <francois.schmidts@gmail.com> | 2015-09-11 18:28:12 +0200 |
---|---|---|
committer | François Schmidts <francois.schmidts@gmail.com> | 2016-01-26 23:46:30 +0100 |
commit | 462f6d3b21558ed0a283c24e0e0332eac6ccbbb3 (patch) | |
tree | 451c583b5f47bbf6e38743881c66f2f27371bd82 /src/crawler.py | |
parent | moving the root of source code from / to /src/ (diff) | |
download | newspipe-462f6d3b21558ed0a283c24e0e0332eac6ccbbb3.tar.gz newspipe-462f6d3b21558ed0a283c24e0e0332eac6ccbbb3.tar.bz2 newspipe-462f6d3b21558ed0a283c24e0e0332eac6ccbbb3.zip |
base modification in model for category support
Diffstat (limited to 'src/crawler.py')
-rw-r--r-- | src/crawler.py | 5 |
1 files changed, 1 insertions, 4 deletions
diff --git a/src/crawler.py b/src/crawler.py index 1a759945..0598c418 100644 --- a/src/crawler.py +++ b/src/crawler.py @@ -58,9 +58,6 @@ else: async def get(*args, **kwargs): #kwargs["connector"] = aiohttp.TCPConnector(verify_ssl=False) try: - #logger.info("Fetching the feed: " + args[0]) - #response = yield from aiohttp.request('GET', *args, **kwargs) - #return (yield from response.read_and_close(decode=False)) data = feedparser.parse(args[0]) return data except Exception as e: @@ -131,7 +128,7 @@ async def insert_database(user, feed): new_articles.append(art_contr.create(**article)) logger.info("New article % (%r) added.", article['title'], article['link']) - except Exception as e: + except Exception: logger.exception("Error when inserting article in database:") continue return new_articles |