From 4a8438d7f2b7b16941240b91f39a9402c431ffc2 Mon Sep 17 00:00:00 2001 From: François Schmidts Date: Tue, 2 Feb 2016 23:30:57 +0100 Subject: writing a bit of doc, moving crawler together --- src/manager.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) (limited to 'src/manager.py') diff --git a/src/manager.py b/src/manager.py index f7240670..781d742b 100755 --- a/src/manager.py +++ b/src/manager.py @@ -32,7 +32,7 @@ def db_create(): @manager.command def fetch(limit=100, retreive_all=False): "Crawl the feeds with the client crawler." - from web.lib.crawler import CrawlerScheduler + from crawler.http_crawler import CrawlerScheduler scheduler = CrawlerScheduler(conf.API_LOGIN, conf.API_PASSWD) scheduler.run(limit=limit, retreive_all=retreive_all) scheduler.wait() @@ -47,7 +47,7 @@ def fetch_asyncio(user_id, feed_id): populate_g() from flask import g from web.models import User - import crawler + from crawler import classic_crawler users = [] try: users = User.query.filter(User.id == int(user_id)).all() @@ -67,7 +67,7 @@ def fetch_asyncio(user_id, feed_id): if user.activation_key == "": print("Fetching articles for " + user.nickname) g.user = user - crawler.retrieve_feed(loop, g.user, feed_id) + classic_crawler.retrieve_feed(loop, g.user, feed_id) loop.close() from scripts.probes import ArticleProbe, FeedProbe -- cgit