aboutsummaryrefslogtreecommitdiff
path: root/src
diff options
context:
space:
mode:
authorCédric Bonhomme <cedric@cedricbonhomme.org>2016-11-17 09:24:23 +0100
committerCédric Bonhomme <cedric@cedricbonhomme.org>2016-11-17 09:24:23 +0100
commit85f6275906a604736e8f6dd67bb281889ec189cc (patch)
tree9705fa208e7da6530e84e47d40a7adcfea39d852 /src
parentUpdated CHANGELOG. (diff)
downloadnewspipe-85f6275906a604736e8f6dd67bb281889ec189cc.tar.gz
newspipe-85f6275906a604736e8f6dd67bb281889ec189cc.tar.bz2
newspipe-85f6275906a604736e8f6dd67bb281889ec189cc.zip
Cleaning config variables.
Diffstat (limited to 'src')
-rw-r--r--src/conf.py18
-rw-r--r--src/conf/conf.cfg-sample3
2 files changed, 7 insertions, 14 deletions
diff --git a/src/conf.py b/src/conf.py
index 466d9c88..f12854dd 100644
--- a/src/conf.py
+++ b/src/conf.py
@@ -29,9 +29,6 @@ DEFAULTS = {"platform_url": "https://www.newspipe.org/",
"admin_email": "info@newspipe.org",
"postmark_api_key": "",
"token_validity_period": "3600",
- "nb_worker": "100",
- "api_login": "",
- "api_passwd": "",
"default_max_error": "3",
"log_path": "newspipe.log",
"log_level": "info",
@@ -73,12 +70,17 @@ else:
config = Config()
+WEBSERVER_HOST = config.get('webserver', 'host')
+WEBSERVER_PORT = config.getint('webserver', 'port')
+WEBSERVER_SECRET = config.get('webserver', 'secret_key')
+
+CDN_ADDRESS = config.get('cdn', 'cdn_address')
+
PLATFORM_URL = config.get('misc', 'platform_url')
ADMIN_EMAIL = config.get('misc', 'admin_email')
SELF_REGISTRATION = config.getboolean('misc', 'self_registration')
SECURITY_PASSWORD_SALT = config.get('misc', 'security_password_salt')
TOKEN_VALIDITY_PERIOD = config.getint('misc', 'token_validity_period')
-NB_WORKER = config.getint('misc', 'nb_worker')
if not ON_HEROKU:
LOG_PATH = os.path.abspath(config.get('misc', 'log_path'))
else:
@@ -92,8 +94,6 @@ LOG_LEVEL = {'debug': logging.DEBUG,
SQLALCHEMY_DATABASE_URI = config.get('database', 'database_url')
CRAWLING_METHOD = config.get('crawler', 'crawling_method')
-API_LOGIN = config.get('crawler', 'api_login')
-API_PASSWD = config.get('crawler', 'api_passwd')
CRAWLER_USER_AGENT = config.get('crawler', 'user_agent')
DEFAULT_MAX_ERROR = config.getint('crawler', 'default_max_error')
ERROR_THRESHOLD = int(DEFAULT_MAX_ERROR / 2)
@@ -101,12 +101,6 @@ CRAWLER_TIMEOUT = config.get('crawler', 'timeout')
CRAWLER_RESOLV = config.getboolean('crawler', 'resolv')
FEED_REFRESH_INTERVAL = config.getint('crawler', 'feed_refresh_interval')
-WEBSERVER_HOST = config.get('webserver', 'host')
-WEBSERVER_PORT = config.getint('webserver', 'port')
-WEBSERVER_SECRET = config.get('webserver', 'secret_key')
-
-CDN_ADDRESS = config.get('cdn', 'cdn_address')
-
NOTIFICATION_EMAIL = config.get('notification', 'notification_email')
NOTIFICATION_HOST = config.get('notification', 'host')
NOTIFICATION_PORT = config.getint('notification', 'port')
diff --git a/src/conf/conf.cfg-sample b/src/conf/conf.cfg-sample
index 7c4668af..24433517 100644
--- a/src/conf/conf.cfg-sample
+++ b/src/conf/conf.cfg-sample
@@ -9,8 +9,7 @@ platform_url = http://127.0.0.1:5000/
admin_email =
security_password_salt = a secret to confirm user account
token_validity_period = 3600
-log_path = ./var/newspipe.log
-nb_worker = 5
+log_path = ./var/log/newspipe.log
log_level = info
[database]
database_url = postgres://pgsqluser:pgsqlpwd@127.0.0.1:5432/aggregator
bgstack15