diff options
author | Cédric Bonhomme <cedric@cedricbonhomme.org> | 2015-06-02 10:00:51 +0200 |
---|---|---|
committer | Cédric Bonhomme <cedric@cedricbonhomme.org> | 2015-06-02 10:00:51 +0200 |
commit | b07e05cc47b53c1b02392a5deb5b3fa6fa85569a (patch) | |
tree | d1490c7f601e641168ba9da0c0e70629461bf627 | |
parent | Bug fix: the 'expire_articles' function was deleting articles of all users! (diff) | |
download | newspipe-b07e05cc47b53c1b02392a5deb5b3fa6fa85569a.tar.gz newspipe-b07e05cc47b53c1b02392a5deb5b3fa6fa85569a.tar.bz2 newspipe-b07e05cc47b53c1b02392a5deb5b3fa6fa85569a.zip |
It is recommended to close the loop at the end of the procesus (https://groups.google.com/forum/#!topic/python-tulip/8bRLexUzeU4).
-rwxr-xr-x | manager.py | 6 | ||||
-rw-r--r-- | pyaggr3g470r/crawler.py | 9 |
2 files changed, 9 insertions, 6 deletions
@@ -1,6 +1,8 @@ #! /usr/bin/env python # -*- coding: utf-8 -*- +import asyncio + from bootstrap import application, db, populate_g from flask.ext.script import Manager from flask.ext.migrate import Migrate, MigrateCommand @@ -55,10 +57,12 @@ def fetch_asyncio(user_id, feed_id): except: feed_id = None + loop = asyncio.get_event_loop() for user in users: if user.activation_key == "": print("Fetching articles for " + user.nickname) - feed_getter = crawler.retrieve_feed(user, feed_id) + feed_getter = crawler.retrieve_feed(loop, user, feed_id) + loop.close() if __name__ == '__main__': manager.run() diff --git a/pyaggr3g470r/crawler.py b/pyaggr3g470r/crawler.py index 8052fb63..6bf216f3 100644 --- a/pyaggr3g470r/crawler.py +++ b/pyaggr3g470r/crawler.py @@ -223,7 +223,7 @@ def init_process(user, feed): #print('inserted articles for {}'.format(feed.title)) return articles -def retrieve_feed(user, feed_id=None): +def retrieve_feed(loop, user, feed_id=None): """ Launch the processus. """ @@ -239,9 +239,8 @@ def retrieve_feed(user, feed_id=None): if feeds == []: return - + import time # Launch the process for all the feeds - loop = asyncio.get_event_loop() tasks = [] try: # Python 3.5 (test) @@ -250,7 +249,7 @@ def retrieve_feed(user, feed_id=None): tasks = [init_process(user, feed) for feed in feeds] try: loop.run_until_complete(asyncio.wait(tasks)) - finally: - loop.close() + except Exception as e: + print(e) logger.info("All articles retrieved. End of the processus.") |