aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--migrations/versions/8bf5694c0b9e_add_column_automatic_crawling_to_the_.py25
-rwxr-xr-xsrc/manager.py22
-rw-r--r--src/web/forms.py2
-rwxr-xr-xsrc/web/lib/misc_utils.py2
-rw-r--r--src/web/models/user.py2
-rw-r--r--src/web/templates/profile.html4
-rw-r--r--src/web/views/user.py1
7 files changed, 44 insertions, 14 deletions
diff --git a/migrations/versions/8bf5694c0b9e_add_column_automatic_crawling_to_the_.py b/migrations/versions/8bf5694c0b9e_add_column_automatic_crawling_to_the_.py
new file mode 100644
index 00000000..5728449a
--- /dev/null
+++ b/migrations/versions/8bf5694c0b9e_add_column_automatic_crawling_to_the_.py
@@ -0,0 +1,25 @@
+"""add column automatic_crawling to the user table
+
+Revision ID: 8bf5694c0b9e
+Revises: 5553a6c05fa7
+Create Date: 2016-10-06 13:47:32.784711
+
+"""
+
+# revision identifiers, used by Alembic.
+revision = '8bf5694c0b9e'
+down_revision = '5553a6c05fa7'
+branch_labels = None
+depends_on = None
+
+from alembic import op
+import sqlalchemy as sa
+
+
+def upgrade():
+ op.add_column('user', sa.Column('automatic_crawling',
+ sa.Boolean(), default=True))
+
+
+def downgrade():
+ op.drop_column('user', 'automatic_crawling')
diff --git a/src/manager.py b/src/manager.py
index a256684f..cd656226 100755
--- a/src/manager.py
+++ b/src/manager.py
@@ -49,21 +49,18 @@ def fetch(limit=100, retreive_all=False):
@manager.command
-def fetch_asyncio(user_id, feed_id):
+def fetch_asyncio(user_id=None, feed_id=None):
"Crawl the feeds with asyncio."
import asyncio
with application.app_context():
from crawler import classic_crawler
- ucontr = UserController()
- users = []
- try:
- users = ucontr.read(id=int(user_id)).all()
- except Exception as e:
- users = ucontr.read().all()
- finally:
- if users == []:
- users = ucontr.read().all()
+ filters = {}
+ filters['is_active'] = True
+ filters['automatic_crawling'] = True
+ if None is not user_id:
+ filters['id'] = user_id
+ users = UserController().read(**filters).all()
try:
feed_id = int(feed_id)
@@ -72,9 +69,8 @@ def fetch_asyncio(user_id, feed_id):
loop = asyncio.get_event_loop()
for user in users:
- if user.is_active:
- logger.info("Fetching articles for " + user.nickname)
- classic_crawler.retrieve_feed(loop, user, feed_id)
+ logger.info("Fetching articles for " + user.nickname)
+ classic_crawler.retrieve_feed(loop, user, feed_id)
loop.close()
diff --git a/src/web/forms.py b/src/web/forms.py
index 69abac46..033ccbbe 100644
--- a/src/web/forms.py
+++ b/src/web/forms.py
@@ -156,6 +156,8 @@ class ProfileForm(Form):
validators.Required(lazy_gettext("Please enter your email."))])
password = PasswordField(lazy_gettext("Password"))
password_conf = PasswordField(lazy_gettext("Password Confirmation"))
+ automatic_crawling = BooleanField(lazy_gettext("Automatic crawling"),
+ default=True)
webpage = URLField(lazy_gettext("Webpage"))
twitter = URLField(lazy_gettext("Twitter"))
is_public_profile = BooleanField(lazy_gettext("Public profile"),
diff --git a/src/web/lib/misc_utils.py b/src/web/lib/misc_utils.py
index fc49dd6d..8166df29 100755
--- a/src/web/lib/misc_utils.py
+++ b/src/web/lib/misc_utils.py
@@ -108,7 +108,7 @@ def fetch(id, feed_id=None):
The "asyncio" crawler is launched with the manager.
"""
cmd = [sys.executable, conf.BASE_DIR + '/manager.py', 'fetch_asyncio',
- str(id), str(feed_id)]
+ '--user_id='+str(id)]
return subprocess.Popen(cmd, stdout=subprocess.PIPE)
def history(user_id, year=None, month=None):
diff --git a/src/web/models/user.py b/src/web/models/user.py
index 59c4ad38..2b069728 100644
--- a/src/web/models/user.py
+++ b/src/web/models/user.py
@@ -48,6 +48,8 @@ class User(db.Model, UserMixin, RightMixin):
email = db.Column(db.String(254), index=True, unique=True)
pwdhash = db.Column(db.String())
+ automatic_crawling = db.Column(db.Boolean(), default=True)
+
is_public_profile = db.Column(db.Boolean(), default=False)
webpage = db.Column(db.String(), default="")
twitter = db.Column(db.String(), default="")
diff --git a/src/web/templates/profile.html b/src/web/templates/profile.html
index d21608cf..39834ca5 100644
--- a/src/web/templates/profile.html
+++ b/src/web/templates/profile.html
@@ -42,6 +42,10 @@
{{ form.is_public_profile.label }}
{{ form.is_public_profile(class_="form-control") }} {% for error in form.is_public_profile.errors %} <span style="color: red;">{{ error }}<br /></span>{% endfor %}
<p>{{ _('Your profile will be available <a href="%(url)s">here</a>.', url=url_for('user.profile_public', nickname=user.nickname) ) }}</p>
+
+ {{ form.automatic_crawling.label }}
+ {{ form.automatic_crawling(class_="form-control") }} {% for error in form.automatic_crawling.errors %} <span style="color: red;">{{ error }}<br /></span>{% endfor %}
+ <p>{{ _('Uncheck if you are using your own crawler.') }}</p>
</div>
</div>
<div class="row">
diff --git a/src/web/views/user.py b/src/web/views/user.py
index bf568d69..26d65de3 100644
--- a/src/web/views/user.py
+++ b/src/web/views/user.py
@@ -107,6 +107,7 @@ def profile():
{'nickname': form.nickname.data,
'email': form.email.data,
'password': form.password.data,
+ 'automatic_crawling': form.automatic_crawling.data,
'is_public_profile': form.is_public_profile.data,
'webpage': form.webpage.data,
'twitter': form.twitter.data})
bgstack15