diff options
author | François Schmidts <francois.schmidts@gmail.com> | 2015-04-16 17:28:37 +0200 |
---|---|---|
committer | François Schmidts <francois.schmidts@gmail.com> | 2015-04-16 17:28:37 +0200 |
commit | 6119f21815e0fe00db04caff4272344fa10da0de (patch) | |
tree | 1f5aad5dd88763f791c7d27a16ed44d1684f229d | |
parent | misc bugfix (dict should be set at load time) and var renaming (diff) | |
download | newspipe-6119f21815e0fe00db04caff4272344fa10da0de.tar.gz newspipe-6119f21815e0fe00db04caff4272344fa10da0de.tar.bz2 newspipe-6119f21815e0fe00db04caff4272344fa10da0de.zip |
facto on config and more option in log levels
-rw-r--r-- | bootstrap.py | 5 | ||||
-rw-r--r-- | conf.py | 126 | ||||
-rw-r--r-- | conf/conf.cfg-sample | 2 | ||||
-rwxr-xr-x | runserver.py | 3 |
4 files changed, 65 insertions, 71 deletions
diff --git a/bootstrap.py b/bootstrap.py index 9c2ce049..7a5a9b6e 100644 --- a/bootstrap.py +++ b/bootstrap.py @@ -22,13 +22,12 @@ from flask.ext.sqlalchemy import SQLAlchemy # Create Flask application application = Flask('pyaggr3g470r') -application.debug = conf.WEBSERVER_DEBUG +application.debug = conf.LOG_LEVEL <= logging.DEBUG scheme, domain, _, _, _ = urlsplit(conf.PLATFORM_URL) application.config['SERVER_NAME'] = domain application.config['PREFERRED_URL_SCHEME'] = scheme -set_logging(conf.LOG_PATH, - log_level=logging.DEBUG if conf.WEBSERVER_DEBUG else logging.INFO) +set_logging(conf.LOG_PATH, log_level=conf.LOG_LEVEL) # Create dummy secrey key so we can use sessions application.config['SECRET_KEY'] = getattr(conf, 'WEBSERVER_SECRET', None) @@ -5,6 +5,7 @@ This file contain the variables used by the application. """ import os +import logging basedir = os.path.abspath(os.path.dirname(__file__)) PATH = os.path.abspath(".") @@ -27,11 +28,11 @@ DEFAULTS = {"python": "/usr/bin/python3.4", "nb_worker": "100", "default_max_error": "3", "log_path": "pyaggr3g470r.log", + "log_level": "info", "user_agent": "pyAggr3g470r " "(https://bitbucket.org/cedricbonhomme/pyaggr3g470r)", "resolve_article_url": "false", "http_proxy": "", - "debug": "true", "secret": "", "enabled": "false", "email": "", @@ -40,6 +41,7 @@ DEFAULTS = {"python": "/usr/bin/python3.4", "host": "0.0.0.0", "port": "5000", "crawling_method": "classic", + "webzine_root": "/tmp", } if not ON_HEROKU: @@ -50,72 +52,64 @@ if not ON_HEROKU: # load the configuration config = confparser.SafeConfigParser(defaults=DEFAULTS) config.read(os.path.join(basedir, "conf/conf.cfg")) - - PLATFORM_URL = config.get('misc', 'platform_url') - ADMIN_EMAIL = config.get('misc', 'admin_email') - RECAPTCHA_PUBLIC_KEY = config.get('misc', 'recaptcha_public_key') - RECAPTCHA_PRIVATE_KEY = config.get('misc', - 'recaptcha_private_key') - LOG_PATH = config.get('misc', 'log_path') - PYTHON = config.get('misc', 'python') - NB_WORKER = config.getint('misc', 'nb_worker') - - WHOOSH_ENABLED = True - - SQLALCHEMY_DATABASE_URI = config.get('database', 'uri') - - HTTP_PROXY = config.get('feedparser', 'http_proxy') - USER_AGENT = config.get('feedparser', 'user_agent') - RESOLVE_ARTICLE_URL = config.getboolean('feedparser', - 'resolve_article_url') - DEFAULT_MAX_ERROR = config.getint('feedparser', - 'default_max_error') - CRAWLING_METHOD = config.get('feedparser', 'crawling_method') - - WEBSERVER_DEBUG = config.getboolean('webserver', 'debug') - WEBSERVER_HOST = config.get('webserver', 'host') - WEBSERVER_PORT = config.getint('webserver', 'port') - WEBSERVER_SECRET = config.get('webserver', 'secret') - - NOTIFICATION_EMAIL = config.get('notification', 'email') - NOTIFICATION_HOST = config.get('notification', 'host') - NOTIFICATION_PORT = config.getint('notification', 'port') - NOTIFICATION_TLS = config.getboolean('notification', 'tls') - NOTIFICATION_SSL = config.getboolean('notification', 'ssl') - NOTIFICATION_USERNAME = config.get('notification', 'username') - NOTIFICATION_PASSWORD = config.get('notification', 'password') - - WEBZINE_ROOT = PATH + "/pyaggr3g470r/var/export/" - else: - PLATFORM_URL = os.environ.get('PLATFORM_URL', - 'https://pyaggr3g470r.herokuapp.com/') - ADMIN_EMAIL = os.environ.get('ADMIN_EMAIL', '') - RECAPTCHA_PUBLIC_KEY = os.environ.get('RECAPTCHA_PUBLIC_KEY', '') - RECAPTCHA_PRIVATE_KEY = os.environ.get('RECAPTCHA_PRIVATE_KEY', '') - LOG_PATH = os.environ.get('LOG_PATH', 'pyaggr3g470r.log') - PYTHON = 'python' - - SQLALCHEMY_DATABASE_URI = os.environ['DATABASE_URL'] - - HTTP_PROXY = "" - USER_AGENT = "Mozilla/5.0 " \ - "(X11; Debian; Linux x86_64; rv:28.0) Gecko/20100101 Firefox/28.0" - RESOLVE_ARTICLE_URL = int(os.environ.get('RESOLVE_ARTICLE_URL', 0)) == 1 - DEFAULT_MAX_ERROR = int(os.environ.get('DEFAULT_MAX_ERROR', 6)) - CRAWLING_METHOD = os.environ.get('CRAWLING_METHOD', - DEFAULTS['crawling_method']) - - WEBSERVER_DEBUG = False - WEBSERVER_HOST = '0.0.0.0' - WEBSERVER_PORT = int(os.environ.get('PORT', 5000)) - WEBSERVER_SECRET = os.environ.get('SECRET_KEY', None) - - NOTIFICATION_EMAIL = os.environ.get('NOTIFICATION_EMAIL', '') - POSTMARK_API_KEY = os.environ.get('POSTMARK_API_KEY', '') - - WEBZINE_ROOT = "/tmp/" - + class Config(object): + def get(self, _, name): + return os.environ.get(name.upper(), DEFAULTS.get(name)) + + def getint(self, _, name): + return int(self.get(_, name)) + + def getboolean(self, _, name): + value = self.get(_, name) + if value == 'true': + return True + elif value == 'false': + return False + return None + config = Config() + + +PLATFORM_URL = config.get('misc', 'platform_url') +ADMIN_EMAIL = config.get('misc', 'admin_email') +RECAPTCHA_PUBLIC_KEY = config.get('misc', 'recaptcha_public_key') +RECAPTCHA_PRIVATE_KEY = config.get('misc', + 'recaptcha_private_key') +LOG_PATH = config.get('misc', 'log_path') +PYTHON = config.get('misc', 'python') +NB_WORKER = config.getint('misc', 'nb_worker') + +WHOOSH_ENABLED = True + +SQLALCHEMY_DATABASE_URI = config.get('database', 'uri') + +HTTP_PROXY = config.get('feedparser', 'http_proxy') +USER_AGENT = config.get('feedparser', 'user_agent') +RESOLVE_ARTICLE_URL = config.getboolean('feedparser', + 'resolve_article_url') +DEFAULT_MAX_ERROR = config.getint('feedparser', + 'default_max_error') +CRAWLING_METHOD = config.get('feedparser', 'crawling_method') + +LOG_LEVEL = {'debug': logging.DEBUG, + 'info': logging.INFO, + 'warn': logging.WARN, + 'error': logging.ERROR, + 'fatal': logging.FATAL}[config.get('misc', 'log_level')] + +WEBSERVER_HOST = config.get('webserver', 'host') +WEBSERVER_PORT = config.getint('webserver', 'port') +WEBSERVER_SECRET = config.get('webserver', 'secret') + +NOTIFICATION_EMAIL = config.get('notification', 'email') +NOTIFICATION_HOST = config.get('notification', 'host') +NOTIFICATION_PORT = config.getint('notification', 'port') +NOTIFICATION_TLS = config.getboolean('notification', 'tls') +NOTIFICATION_SSL = config.getboolean('notification', 'ssl') +NOTIFICATION_USERNAME = config.get('notification', 'username') +NOTIFICATION_PASSWORD = config.get('notification', 'password') + +WEBZINE_ROOT = config.get('webserver', 'webzine_root') CSRF_ENABLED = True # slow database query threshold (in seconds) diff --git a/conf/conf.cfg-sample b/conf/conf.cfg-sample index 159af449..c594cd4a 100644 --- a/conf/conf.cfg-sample +++ b/conf/conf.cfg-sample @@ -6,6 +6,7 @@ recaptcha_private_key = log_path = ./pyaggr3g470r/var/pyaggr3g470r.log python = python3.3 nb_worker = 5 +log_level = info [database] uri = postgres://pgsqluser:pgsqlpwd@127.0.0.1:5432/aggregator [feedparser] @@ -15,7 +16,6 @@ resolve_article_url = false default_max_error = 6 crawling_method = classic [webserver] -debug = true host = 0.0.0.0 port = 5000 secret = a secret only you know diff --git a/runserver.py b/runserver.py index a80b0c39..5f20ddd4 100755 --- a/runserver.py +++ b/runserver.py @@ -18,6 +18,7 @@ # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. +import logging import calendar from bootstrap import conf, application, populate_g from flask.ext.babel import Babel @@ -55,4 +56,4 @@ with application.app_context(): if __name__ == '__main__': application.run(host=conf.WEBSERVER_HOST, port=conf.WEBSERVER_PORT, - debug=conf.WEBSERVER_DEBUG) + debug=conf.LOG_LEVEL <= logging.DEBUG) |