diff options
author | Cédric Bonhomme <cedric@cedricbonhomme.org> | 2015-01-21 08:09:19 +0100 |
---|---|---|
committer | Cédric Bonhomme <cedric@cedricbonhomme.org> | 2015-01-21 08:09:19 +0100 |
commit | 98efe42fcd56743083f52696aff65a15f6f88883 (patch) | |
tree | a62a5012cf51b1e731e35766bdb8111b88180946 /pyaggr3g470r | |
parent | First implementation with asyncio (not really async for the moment). (diff) | |
download | newspipe-98efe42fcd56743083f52696aff65a15f6f88883.tar.gz newspipe-98efe42fcd56743083f52696aff65a15f6f88883.tar.bz2 newspipe-98efe42fcd56743083f52696aff65a15f6f88883.zip |
Added link to examples.
Diffstat (limited to 'pyaggr3g470r')
-rw-r--r-- | pyaggr3g470r/crawler.py | 10 |
1 files changed, 6 insertions, 4 deletions
diff --git a/pyaggr3g470r/crawler.py b/pyaggr3g470r/crawler.py index 7815a631..1bc224fb 100644 --- a/pyaggr3g470r/crawler.py +++ b/pyaggr3g470r/crawler.py @@ -40,6 +40,12 @@ from pyaggr3g470r.models import User, Article logger = logging.getLogger(__name__) +# +# asyncio examples: +# -http://compiletoi.net/fast-scraping-in-python-with-asyncio.html +# - https://gist.github.com/kunev/f83146d407c81a2d64a6 +# + @asyncio.coroutine def fetch(user, feed): """ @@ -167,8 +173,6 @@ def insert_database(user, feed): #db.session.close() return new_articles - - @asyncio.coroutine def done(feed): print('done {}'.format(feed.title)) @@ -178,8 +182,6 @@ def process_data(user, feed): data = yield from asyncio.async(insert_database(user, feed)) print('inserted articles for {}'.format(feed.title)) - - def retrieve_feed(user, feed_id=None): """ Launch the processus. |