aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--src/conf.py2
-rw-r--r--src/conf/conf.cfg-sample2
-rw-r--r--src/crawler/default_crawler.py (renamed from src/crawler/classic_crawler.py)0
-rwxr-xr-xsrc/lib/misc_utils.py6
-rwxr-xr-xsrc/manager.py13
-rw-r--r--src/web/js/stores/MenuStore.js2
-rw-r--r--src/web/templates/layout.html2
-rw-r--r--src/web/views/home.py4
8 files changed, 11 insertions, 20 deletions
diff --git a/src/conf.py b/src/conf.py
index f12854dd..70fde9c4 100644
--- a/src/conf.py
+++ b/src/conf.py
@@ -40,7 +40,7 @@ DEFAULTS = {"platform_url": "https://www.newspipe.org/",
"ssl": "true",
"host": "0.0.0.0",
"port": "5000",
- "crawling_method": "classic",
+ "crawling_method": "default",
"crawler_user_agent": "Newspipe (https://github.com/newspipe)",
"crawler_timeout": "30",
"crawler_resolv": "false",
diff --git a/src/conf/conf.cfg-sample b/src/conf/conf.cfg-sample
index 24433517..b1dde26f 100644
--- a/src/conf/conf.cfg-sample
+++ b/src/conf/conf.cfg-sample
@@ -14,7 +14,7 @@ log_level = info
[database]
database_url = postgres://pgsqluser:pgsqlpwd@127.0.0.1:5432/aggregator
[crawler]
-crawling_method = classic
+crawling_method = default
default_max_error = 6
user_agent = Newspipe (https://github.com/newspipe/newspipe)
timeout = 30
diff --git a/src/crawler/classic_crawler.py b/src/crawler/default_crawler.py
index 34726a83..34726a83 100644
--- a/src/crawler/classic_crawler.py
+++ b/src/crawler/default_crawler.py
diff --git a/src/lib/misc_utils.py b/src/lib/misc_utils.py
index d594c01e..7d9f55fd 100755
--- a/src/lib/misc_utils.py
+++ b/src/lib/misc_utils.py
@@ -20,9 +20,9 @@
# along with this program. If not, see <http://www.gnu.org/licenses/>.
__author__ = "Cedric Bonhomme"
-__version__ = "$Revision: 1.9 $"
+__version__ = "$Revision: 1.10 $"
__date__ = "$Date: 2010/12/07 $"
-__revision__ = "$Date: 2016/01/17 $"
+__revision__ = "$Date: 2016/11/22 $"
__copyright__ = "Copyright (c) Cedric Bonhomme"
__license__ = "AGPLv3"
@@ -98,7 +98,7 @@ def opened_w_error(filename, mode="r"):
def fetch(id, feed_id=None):
"""
Fetch the feeds in a new processus.
- The "asyncio" crawler is launched with the manager.
+ The default crawler ("asyncio") is launched with the manager.
"""
cmd = [sys.executable, conf.BASE_DIR + '/manager.py', 'fetch_asyncio',
'--user_id='+str(id)]
diff --git a/src/manager.py b/src/manager.py
index 46f8fe10..60e4c4f1 100755
--- a/src/manager.py
+++ b/src/manager.py
@@ -41,21 +41,12 @@ def db_create():
@manager.command
-def fetch(limit=100, retreive_all=False):
- "Crawl the feeds with the client crawler."
- from crawler.http_crawler import CrawlerScheduler
- scheduler = CrawlerScheduler(conf.API_LOGIN, conf.API_PASSWD)
- scheduler.run(limit=limit, retreive_all=retreive_all)
- scheduler.wait()
-
-
-@manager.command
def fetch_asyncio(user_id=None, feed_id=None):
"Crawl the feeds with asyncio."
import asyncio
with application.app_context():
- from crawler import classic_crawler
+ from crawler import default_crawler
filters = {}
filters['is_active'] = True
filters['automatic_crawling'] = True
@@ -73,7 +64,7 @@ def fetch_asyncio(user_id=None, feed_id=None):
start = datetime.now()
loop = asyncio.get_event_loop()
for user in users:
- classic_crawler.retrieve_feed(loop, user, feed_id)
+ default_crawler.retrieve_feed(loop, user, feed_id)
loop.close()
end = datetime.now()
diff --git a/src/web/js/stores/MenuStore.js b/src/web/js/stores/MenuStore.js
index f1b83dce..770bc501 100644
--- a/src/web/js/stores/MenuStore.js
+++ b/src/web/js/stores/MenuStore.js
@@ -8,7 +8,7 @@ var assign = require('object-assign');
var MenuStore = assign({}, EventEmitter.prototype, {
_datas: {filter: 'unread', feeds: {}, categories: {}, categories_order: [],
active_type: null, active_id: null,
- is_admin: false, crawling_method: 'classic',
+ is_admin: false, crawling_method: 'default',
all_unread_count: 0, max_error: 0, error_threshold: 0,
all_folded: false},
getAll: function() {
diff --git a/src/web/templates/layout.html b/src/web/templates/layout.html
index ad0b0ea0..5bd5c3d6 100644
--- a/src/web/templates/layout.html
+++ b/src/web/templates/layout.html
@@ -42,7 +42,7 @@
<div id="navbar" class="navbar-collapse collapse">
<ul class="nav navbar-nav navbar-right">
{% if current_user.is_authenticated %}
- {% if conf.CRAWLING_METHOD == "classic" and (not conf.ON_HEROKU or current_user.is_admin) %}
+ {% if conf.CRAWLING_METHOD == "default" and (not conf.ON_HEROKU or current_user.is_admin) %}
<li><a href="/fetch"><span class="glyphicon glyphicon-import"></span> {{ _('Fetch') }}</a></li>
{% endif %}
<li class="dropdown">
diff --git a/src/web/views/home.py b/src/web/views/home.py
index 5274dc12..34ecb9fa 100644
--- a/src/web/views/home.py
+++ b/src/web/views/home.py
@@ -160,9 +160,9 @@ def mark_all_as_read():
def fetch(feed_id=None):
"""
Triggers the download of news.
- News are downloaded in a separated process, mandatory for Heroku.
+ News are downloaded in a separated process.
"""
- if conf.CRAWLING_METHOD == "classic" \
+ if conf.CRAWLING_METHOD == "default" \
and (not conf.ON_HEROKU or current_user.is_admin):
misc_utils.fetch(current_user.id, feed_id)
flash(gettext("Downloading articles..."), "info")
bgstack15