diff options
-rw-r--r-- | pyaggr3g470r/crawler.py | 10 |
1 files changed, 6 insertions, 4 deletions
diff --git a/pyaggr3g470r/crawler.py b/pyaggr3g470r/crawler.py index 7815a631..1bc224fb 100644 --- a/pyaggr3g470r/crawler.py +++ b/pyaggr3g470r/crawler.py @@ -40,6 +40,12 @@ from pyaggr3g470r.models import User, Article logger = logging.getLogger(__name__) +# +# asyncio examples: +# -http://compiletoi.net/fast-scraping-in-python-with-asyncio.html +# - https://gist.github.com/kunev/f83146d407c81a2d64a6 +# + @asyncio.coroutine def fetch(user, feed): """ @@ -167,8 +173,6 @@ def insert_database(user, feed): #db.session.close() return new_articles - - @asyncio.coroutine def done(feed): print('done {}'.format(feed.title)) @@ -178,8 +182,6 @@ def process_data(user, feed): data = yield from asyncio.async(insert_database(user, feed)) print('inserted articles for {}'.format(feed.title)) - - def retrieve_feed(user, feed_id=None): """ Launch the processus. |