aboutsummaryrefslogtreecommitdiff
path: root/manager.py
diff options
context:
space:
mode:
authorCédric Bonhomme <cedric@cedricbonhomme.org>2020-03-09 23:16:05 +0100
committerCédric Bonhomme <cedric@cedricbonhomme.org>2020-03-09 23:16:05 +0100
commit3ab6290d4994b33cdbf831523938cdb18a13bf49 (patch)
tree685980f53aaa3eda4e27ddfc7032554f55528e57 /manager.py
parentImproved method to detect current version of the Newspipe instance. (diff)
downloadnewspipe-3ab6290d4994b33cdbf831523938cdb18a13bf49.tar.gz
newspipe-3ab6290d4994b33cdbf831523938cdb18a13bf49.tar.bz2
newspipe-3ab6290d4994b33cdbf831523938cdb18a13bf49.zip
Refactoring the backend.
Diffstat (limited to 'manager.py')
-rwxr-xr-xmanager.py94
1 files changed, 94 insertions, 0 deletions
diff --git a/manager.py b/manager.py
new file mode 100755
index 00000000..bf935632
--- /dev/null
+++ b/manager.py
@@ -0,0 +1,94 @@
+#! /usr/bin/env python
+# -*- coding: utf-8 -*-
+
+import os
+import logging
+from datetime import datetime
+from werkzeug.security import generate_password_hash
+from flask_script import Manager
+from flask_migrate import Migrate, MigrateCommand
+
+import newspipe.models
+from newspipe.controllers import UserController
+from newspipe.bootstrap import application, db, set_logging
+
+logger = logging.getLogger("manager")
+
+Migrate(application, db)
+
+manager = Manager(application)
+manager.add_command("db", MigrateCommand)
+
+
+@manager.command
+def db_empty():
+ "Will drop every datas stocked in db."
+ with application.app_context():
+ web.models.db_empty(db)
+
+
+@manager.command
+def db_create():
+ "Will create the database from conf parameters."
+ admin = {
+ "is_admin": True,
+ "is_api": True,
+ "is_active": True,
+ "nickname": "admin",
+ "pwdhash": generate_password_hash(os.environ.get("ADMIN_PASSWORD", "password")),
+ }
+ with application.app_context():
+ db.create_all()
+ UserController(ignore_context=True).create(**admin)
+
+
+@manager.command
+def create_admin(nickname, password):
+ "Will create an admin user."
+ admin = {
+ "is_admin": True,
+ "is_api": True,
+ "is_active": True,
+ "nickname": nickname,
+ "pwdhash": generate_password_hash(password),
+ }
+ with application.app_context():
+ UserController(ignore_context=True).create(**admin)
+
+
+@manager.command
+def fetch_asyncio(user_id=None, feed_id=None):
+ "Crawl the feeds with asyncio."
+ import asyncio
+
+ with application.app_context():
+ from newspipe.crawler import default_crawler
+
+ filters = {}
+ filters["is_active"] = True
+ filters["automatic_crawling"] = True
+ if None is not user_id:
+ filters["id"] = user_id
+ users = UserController().read(**filters).all()
+
+ try:
+ feed_id = int(feed_id)
+ except:
+ feed_id = None
+
+ loop = asyncio.get_event_loop()
+ queue = asyncio.Queue(maxsize=3, loop=loop)
+
+ producer_coro = default_crawler.retrieve_feed(queue, users, feed_id)
+ consumer_coro = default_crawler.insert_articles(queue, 1)
+
+ logger.info("Starting crawler.")
+ start = datetime.now()
+ loop.run_until_complete(asyncio.gather(producer_coro, consumer_coro))
+ end = datetime.now()
+ loop.close()
+ logger.info("Crawler finished in {} seconds.".format((end - start).seconds))
+
+
+if __name__ == "__main__":
+ manager.run()
bgstack15