aboutsummaryrefslogtreecommitdiff
path: root/src/manager.py
diff options
context:
space:
mode:
authorCédric Bonhomme <cedric@cedricbonhomme.org>2016-02-03 07:11:37 +0100
committerCédric Bonhomme <cedric@cedricbonhomme.org>2016-02-03 07:11:37 +0100
commitda929a367c3f1fe5f3546be82e47111c2fa84ad3 (patch)
tree89380f41b802256d8fdbf724e7d9e63b48209b4a /src/manager.py
parentMerge pull request #30 from jaesivsm/master (diff)
parentwriting a bit of doc, moving crawler together (diff)
downloadnewspipe-da929a367c3f1fe5f3546be82e47111c2fa84ad3.tar.gz
newspipe-da929a367c3f1fe5f3546be82e47111c2fa84ad3.tar.bz2
newspipe-da929a367c3f1fe5f3546be82e47111c2fa84ad3.zip
Merge pull request #31 from jaesivsm/master
redoing UI
Diffstat (limited to 'src/manager.py')
-rwxr-xr-xsrc/manager.py10
1 files changed, 7 insertions, 3 deletions
diff --git a/src/manager.py b/src/manager.py
index e1f0878b..781d742b 100755
--- a/src/manager.py
+++ b/src/manager.py
@@ -12,6 +12,7 @@ Migrate(application, db)
manager = Manager(application)
manager.add_command('db', MigrateCommand)
+
@manager.command
def db_empty():
"Will drop every datas stocked in db."
@@ -19,6 +20,7 @@ def db_empty():
populate_g()
web.models.db_empty(db)
+
@manager.command
def db_create():
"Will create the database from conf parameters."
@@ -26,14 +28,16 @@ def db_create():
populate_g()
web.models.db_create(db)
+
@manager.command
def fetch(limit=100, retreive_all=False):
"Crawl the feeds with the client crawler."
- from web.lib.crawler import CrawlerScheduler
+ from crawler.http_crawler import CrawlerScheduler
scheduler = CrawlerScheduler(conf.API_LOGIN, conf.API_PASSWD)
scheduler.run(limit=limit, retreive_all=retreive_all)
scheduler.wait()
+
@manager.command
def fetch_asyncio(user_id, feed_id):
"Crawl the feeds with asyncio."
@@ -43,7 +47,7 @@ def fetch_asyncio(user_id, feed_id):
populate_g()
from flask import g
from web.models import User
- import crawler
+ from crawler import classic_crawler
users = []
try:
users = User.query.filter(User.id == int(user_id)).all()
@@ -63,7 +67,7 @@ def fetch_asyncio(user_id, feed_id):
if user.activation_key == "":
print("Fetching articles for " + user.nickname)
g.user = user
- feed_getter = crawler.retrieve_feed(loop, g.user, feed_id)
+ classic_crawler.retrieve_feed(loop, g.user, feed_id)
loop.close()
from scripts.probes import ArticleProbe, FeedProbe
bgstack15