diff options
-rw-r--r-- | src/conf.py | 4 | ||||
-rwxr-xr-x | src/web/utils.py | 28 |
2 files changed, 0 insertions, 32 deletions
diff --git a/src/conf.py b/src/conf.py index 38dbee69..5bf831a7 100644 --- a/src/conf.py +++ b/src/conf.py @@ -36,7 +36,6 @@ DEFAULTS = {"platform_url": "https://JARR.herokuapp.com/", "log_level": "info", "user_agent": "JARR (https://github.com/JARR-aggregator)", "resolve_article_url": "false", - "http_proxy": "", "secret": "", "enabled": "false", "notification_email": "jarr@no-reply.com", @@ -84,11 +83,8 @@ NB_WORKER = config.getint('misc', 'nb_worker') API_LOGIN = config.get('crawler', 'api_login') API_PASSWD = config.get('crawler', 'api_passwd') -WHOOSH_ENABLED = True - SQLALCHEMY_DATABASE_URI = config.get('database', 'database_url') -HTTP_PROXY = config.get('crawler', 'http_proxy') USER_AGENT = config.get('crawler', 'user_agent') RESOLVE_ARTICLE_URL = config.getboolean('crawler', 'resolve_article_url') diff --git a/src/web/utils.py b/src/web/utils.py index 9e3ad3ed..f770f597 100755 --- a/src/web/utils.py +++ b/src/web/utils.py @@ -241,34 +241,6 @@ def clean_url(url): parsed_url.fragment ]).rstrip('=') -def open_url(url): - """ - Open an URL with the proxy and the user-agent - specified in the configuration file. - """ - if conf.HTTP_PROXY == "": - proxy = {} - else: - proxy = {"http": conf.HTTP_PROXY} - opener = urllib.request.FancyURLopener(proxy) - try: - opener = urllib.request.build_opener() - opener.addheaders = [('User-agent', conf.USER_AGENT)] - return (True, opener.open(url)) - except urllib.error.HTTPError as e: - # server couldn't fulfill the request - error = (url, e.code, - http.server.BaseHTTPRequestHandler.responses[e.code][1]) - return (False, error) - except urllib.error.URLError as e: - # failed to reach the server - if type(e.reason) == str: - error = (url, e.reason, e.reason) - else: - error = (url, e.reason.errno, e.reason.strerror) - return (False, error) - - def load_stop_words(): """ Load the stop words and return them in a list. |