aboutsummaryrefslogtreecommitdiff
path: root/src/conf.py
blob: ced602ca02ac94ba5d2cef030d8ac4027dfbf0f6 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
#! /usr/bin/env python
# -*- coding: utf-8 -*-
""" Program variables.

This file contain the variables used by the application.
"""
import os
import logging

BASE_DIR = os.path.abspath(os.path.dirname(__file__))
PATH = os.path.abspath(".")
API_ROOT = '/api/v2.0'

# available languages
LANGUAGES = {
    'en': 'English',
    'fr': 'French'
}

TIME_ZONE = {
    "en": "US/Eastern",
    "fr": "Europe/Paris"
}

ON_HEROKU = int(os.environ.get('HEROKU', 0)) == 1
DEFAULTS = {"platform_url": "https://www.newspipe.org/",
            "self_registration": "false",
            "cdn_address": "",
            "admin_email": "info@newspipe.org",
            "sendgrid_api_key": "",
            "token_validity_period": "3600",
            "default_max_error": "3",
            "log_path": "newspipe.log",
            "log_level": "info",
            "secret_key": "",
            "security_password_salt": "",
            "enabled": "false",
            "notification_email": "info@newspipe.org",
            "tls": "false",
            "ssl": "true",
            "host": "0.0.0.0",
            "port": "5000",
            "crawling_method": "default",
            "crawler_user_agent": "Newspipe (https://github.com/newspipe)",
            "crawler_timeout": "30",
            "crawler_resolv": "false",
            "feed_refresh_interval": "120"
            }

if not ON_HEROKU:
    import configparser as confparser
    # load the configuration
    config = confparser.SafeConfigParser(defaults=DEFAULTS)
    config.read(os.path.join(BASE_DIR, "conf/conf.cfg"))
else:
    class Config(object):
        def get(self, _, name):
            return os.environ.get(name.upper(), DEFAULTS.get(name))

        def getint(self, _, name):
            return int(self.get(_, name))

        def getboolean(self, _, name):
            value = self.get(_, name)
            if value == 'true':
                return True
            elif value == 'false':
                return False
            return None
    config = Config()


WEBSERVER_HOST = config.get('webserver', 'host')
WEBSERVER_PORT = config.getint('webserver', 'port')
WEBSERVER_SECRET = config.get('webserver', 'secret_key')
WEBSERVER_DEBUG = config.getboolean('webserver', 'debug')

CDN_ADDRESS = config.get('cdn', 'cdn_address')

try:
    PLATFORM_URL = config.get('misc', 'platform_url')
except:
    PLATFORM_URL = "https://www.newspipe.org/"
ADMIN_EMAIL = config.get('misc', 'admin_email')
SELF_REGISTRATION = config.getboolean('misc', 'self_registration')
SECURITY_PASSWORD_SALT = config.get('misc', 'security_password_salt')
try:
    TOKEN_VALIDITY_PERIOD = config.getint('misc', 'token_validity_period')
except:
    TOKEN_VALIDITY_PERIOD = int(config.get('misc', 'token_validity_period'))
if not ON_HEROKU:
    LOG_PATH = os.path.abspath(config.get('misc', 'log_path'))
else:
    LOG_PATH = ''
LOG_LEVEL = {'debug': logging.DEBUG,
             'info': logging.INFO,
             'warn': logging.WARN,
             'error': logging.ERROR,
             'fatal': logging.FATAL}[config.get('misc', 'log_level')]

SQLALCHEMY_DATABASE_URI = config.get('database', 'database_url')

CRAWLING_METHOD = config.get('crawler', 'crawling_method')
CRAWLER_USER_AGENT = config.get('crawler', 'user_agent')
DEFAULT_MAX_ERROR = config.getint('crawler', 'default_max_error')
ERROR_THRESHOLD = int(DEFAULT_MAX_ERROR / 2)
CRAWLER_TIMEOUT = config.get('crawler', 'timeout')
CRAWLER_RESOLV = config.getboolean('crawler', 'resolv')
try:
    FEED_REFRESH_INTERVAL = config.getint('crawler', 'feed_refresh_interval')
except:
    FEED_REFRESH_INTERVAL = int(config.get('crawler', 'feed_refresh_interval'))

NOTIFICATION_EMAIL = config.get('notification', 'notification_email')
NOTIFICATION_HOST = config.get('notification', 'host')
NOTIFICATION_PORT = config.getint('notification', 'port')
NOTIFICATION_TLS = config.getboolean('notification', 'tls')
NOTIFICATION_SSL = config.getboolean('notification', 'ssl')
NOTIFICATION_USERNAME = config.get('notification', 'username')
NOTIFICATION_PASSWORD = config.get('notification', 'password')
SENDGRID_API_KEY = config.get('notification', 'sendgrid_api_key')
POSTMARK_API_KEY = ''

CSRF_ENABLED = True
# slow database query threshold (in seconds)
DATABASE_QUERY_TIMEOUT = 0.5
bgstack15