diff options
author | Cédric Bonhomme <cedric@cedricbonhomme.org> | 2014-04-13 12:56:50 +0200 |
---|---|---|
committer | Cédric Bonhomme <cedric@cedricbonhomme.org> | 2014-04-13 12:56:50 +0200 |
commit | 4e6c9024dddc5c09307fd06eb38ecd57916da102 (patch) | |
tree | 7b45b10f22dbadfdfec84e17f97b3a44a069967d | |
parent | Test of the new crawler with gevent. (diff) | |
download | newspipe-4e6c9024dddc5c09307fd06eb38ecd57916da102.tar.gz newspipe-4e6c9024dddc5c09307fd06eb38ecd57916da102.tar.bz2 newspipe-4e6c9024dddc5c09307fd06eb38ecd57916da102.zip |
The crawler is launched in a other process.
-rw-r--r-- | conf.py | 7 | ||||
-rw-r--r-- | fetch.py | 6 | ||||
-rw-r--r-- | pyaggr3g470r/views.py | 10 |
3 files changed, 16 insertions, 7 deletions
@@ -8,6 +8,9 @@ This file contain the variables used by the application. import os, sys +basedir = os.path.abspath(os.path.dirname(__file__)) +PATH = os.path.abspath(".") + ON_HEROKU = int(os.environ.get('HEROKU', 0)) == 1 if not ON_HEROKU: @@ -41,9 +44,6 @@ if not ON_HEROKU: MAIL_PASSWORD = config.get('mail', 'password') MAIL_FROM = config.get('mail', 'mail_from') MAIL_TO = config.get('mail', 'mail_to') - - basedir = os.path.abspath(os.path.dirname(__file__)) - PATH = os.path.abspath(".") WEBZINE_ROOT = PATH + "/pyaggr3g470r/var/export/" @@ -62,6 +62,7 @@ else: WEBZINE_ROOT = "/tmp/" + CSRF_ENABLED = True # slow database query threshold (in seconds) DATABASE_QUERY_TIMEOUT = 0.5 @@ -12,5 +12,9 @@ from pyaggr3g470r import feedgetter if __name__ == "__main__": # Point of entry in execution mode + try: + feed_id = int(sys.argv[2]) + except: + feed_id = None feed_getter = feedgetter.FeedGetter(sys.argv[1]) - feed_getter.retrieve_feed(None) + feed_getter.retrieve_feed(feed_id) diff --git a/pyaggr3g470r/views.py b/pyaggr3g470r/views.py index ddc0ddf0..c2f49717 100644 --- a/pyaggr3g470r/views.py +++ b/pyaggr3g470r/views.py @@ -188,9 +188,13 @@ def fetch(feed_id=None): Triggers the download of news. """ #feed_getter = feedgetter.FeedGetter(g.user.email) - feed_getter = crawler.FeedGetter(g.user.email) - feed_getter.retrieve_feed(feed_id) - flash("New articles retrieved.", 'success') + import os, subprocess + cmd = ['python', conf.basedir+'/fetch.py', g.user.email, str(feed_id)] + p = subprocess.Popen(cmd, stdout=subprocess.PIPE) + + #feed_getter = crawler.FeedGetter(g.user.email) + #feed_getter.retrieve_feed(feed_id) + flash("Downloading articles...", 'success') return redirect(redirect_url()) @app.route('/about/', methods=['GET']) |