diff options
-rw-r--r-- | instance/production.py | 37 | ||||
-rw-r--r-- | newspipe/conf.py | 100 |
2 files changed, 15 insertions, 122 deletions
diff --git a/instance/production.py b/instance/production.py index 487c9cbe..76b0bcb6 100644 --- a/instance/production.py +++ b/instance/production.py @@ -1,24 +1,13 @@ -# webserver +# Webserver HOST = "127.0.0.1" PORT = 5000 DEBUG = False -TESTING = False API_ROOT = "/api/v2.0" SECRET_KEY = "LCx3BchmHRxFzkEv4BqQJyeXRLXenf" SECURITY_PASSWORD_SALT = "L8gTsyrpRQEF8jNWQPyvRfv7U5kJkD" - -# misc -ADMIN_EMAIL = "admin@admin.localhost" -TOKEN_VALIDITY_PERIOD = 3600 -LOG_LEVEL = "info" -LOG_PATH = "./var/newspipe.log" -NB_WORKER = 5 -SELF_REGISTRATION = True - - -# database +# Database DB_CONFIG_DICT = { "user": "user", "password": "password", @@ -29,21 +18,18 @@ DATABASE_NAME = "newspipe" SQLALCHEMY_DATABASE_URI = "postgres://{user}:{password}@{host}:{port}/{name}".format( name=DATABASE_NAME, **DB_CONFIG_DICT ) -SQLALCHEMY_TRACK_MODIFICATIONS = False - -# crawler +# Crawler CRAWLING_METHOD = "default" DEFAULT_MAX_ERROR = 3 HTTP_PROXY = "" -USER_AGENT = "Newspipe (https://git.sr.ht/~cedric/newspipe)" +CRAWLER_USER_AGENT = "Newspipe (https://git.sr.ht/~cedric/newspipe)" +CRAWLER_TIMEOUT = 5 +CRAWLER_RESOLV = False RESOLVE_ARTICLE_URL = False -TIMEOUT = 30 -RESOLV = False -FEED_REFRESH_INTERVAL = 0 - +FEED_REFRESH_INTERVAL = 100 -# notification +# Notification MAIL_SERVER = "localhost" MAIL_PORT = 25 MAIL_USE_TLS = False @@ -52,3 +38,10 @@ MAIL_DEBUG = DEBUG MAIL_USERNAME = None MAIL_PASSWORD = None MAIL_DEFAULT_SENDER = ADMIN_EMAIL + +# Misc +ADMIN_EMAIL = "admin@admin.localhost" +TOKEN_VALIDITY_PERIOD = 3600 +LOG_LEVEL = "info" +LOG_PATH = "./var/newspipe.log" +SELF_REGISTRATION = True diff --git a/newspipe/conf.py b/newspipe/conf.py deleted file mode 100644 index 09a38be5..00000000 --- a/newspipe/conf.py +++ /dev/null @@ -1,100 +0,0 @@ -#! /usr/bin/env python -# -*- coding: utf-8 -*- -""" Program variables. - -This file contain the variables used by the application. -""" -import configparser as confparser -import os -import logging - -BASE_DIR = os.path.abspath(os.path.dirname(__file__)) -PATH = os.path.abspath(".") - - -# available languages -LANGUAGES = {"en": "English", "fr": "French"} - -TIME_ZONE = {"en": "US/Eastern", "fr": "Europe/Paris"} - -DEFAULTS = { - "platform_url": "https://www.newspipe.org/", - "self_registration": "false", - "cdn_address": "", - "admin_email": "info@newspipe.org", - "token_validity_period": "3600", - "default_max_error": "3", - "log_path": "newspipe.log", - "log_level": "info", - "secret_key": "", - "security_password_salt": "", - "enabled": "false", - "notification_email": "info@newspipe.org", - "tls": "false", - "ssl": "true", - "host": "0.0.0.0", - "port": "5000", - "crawling_method": "default", - "crawler_user_agent": "Newspipe (https://github.com/newspipe)", - "crawler_timeout": "30", - "crawler_resolv": "false", - "feed_refresh_interval": "120", -} - - -# load the configuration -config = confparser.SafeConfigParser(defaults=DEFAULTS) -config.read(os.path.join(BASE_DIR, "conf/conf.cfg")) - - -WEBSERVER_HOST = config.get("webserver", "host") -WEBSERVER_PORT = config.getint("webserver", "port") -WEBSERVER_SECRET = config.get("webserver", "secret_key") -WEBSERVER_DEBUG = config.getboolean("webserver", "debug") - -CDN_ADDRESS = config.get("cdn", "cdn_address") - -try: - PLATFORM_URL = config.get("misc", "platform_url") -except: - PLATFORM_URL = "https://www.newspipe.org/" -ADMIN_EMAIL = config.get("misc", "admin_email") -SELF_REGISTRATION = config.getboolean("misc", "self_registration") -SECURITY_PASSWORD_SALT = config.get("misc", "security_password_salt") -try: - TOKEN_VALIDITY_PERIOD = config.getint("misc", "token_validity_period") -except: - TOKEN_VALIDITY_PERIOD = int(config.get("misc", "token_validity_period")) -LOG_PATH = os.path.abspath(config.get("misc", "log_path")) -LOG_LEVEL = { - "debug": logging.DEBUG, - "info": logging.INFO, - "warn": logging.WARN, - "error": logging.ERROR, - "fatal": logging.FATAL, -}[config.get("misc", "log_level")] - -SQLALCHEMY_DATABASE_URI = config.get("database", "database_url") - -CRAWLING_METHOD = config.get("crawler", "crawling_method") -CRAWLER_USER_AGENT = config.get("crawler", "user_agent") -DEFAULT_MAX_ERROR = config.getint("crawler", "default_max_error") -ERROR_THRESHOLD = int(DEFAULT_MAX_ERROR / 2) -CRAWLER_TIMEOUT = config.get("crawler", "timeout") -CRAWLER_RESOLV = config.getboolean("crawler", "resolv") -try: - FEED_REFRESH_INTERVAL = config.getint("crawler", "feed_refresh_interval") -except: - FEED_REFRESH_INTERVAL = int(config.get("crawler", "feed_refresh_interval")) - -NOTIFICATION_EMAIL = config.get("notification", "notification_email") -NOTIFICATION_HOST = config.get("notification", "host") -NOTIFICATION_PORT = config.getint("notification", "port") -NOTIFICATION_TLS = config.getboolean("notification", "tls") -NOTIFICATION_SSL = config.getboolean("notification", "ssl") -NOTIFICATION_USERNAME = config.get("notification", "username") -NOTIFICATION_PASSWORD = config.get("notification", "password") - -CSRF_ENABLED = True -# slow database query threshold (in seconds) -DATABASE_QUERY_TIMEOUT = 0.5 |