aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorCédric Bonhomme <cedric@cedricbonhomme.org>2016-02-14 11:41:59 +0100
committerCédric Bonhomme <cedric@cedricbonhomme.org>2016-02-14 11:41:59 +0100
commitf9c61cbd2a4f44a84367f163d0258f676acb1f17 (patch)
treeca23e3c2b88b2959d4c5f82389a8f939b8a10aee
parentMajor changes to the right panel. (diff)
downloadnewspipe-f9c61cbd2a4f44a84367f163d0258f676acb1f17.tar.gz
newspipe-f9c61cbd2a4f44a84367f163d0258f676acb1f17.tar.bz2
newspipe-f9c61cbd2a4f44a84367f163d0258f676acb1f17.zip
Removed proxy support. Not so used and better to set at the system level.
-rw-r--r--src/conf.py4
-rwxr-xr-xsrc/web/utils.py28
2 files changed, 0 insertions, 32 deletions
diff --git a/src/conf.py b/src/conf.py
index 38dbee69..5bf831a7 100644
--- a/src/conf.py
+++ b/src/conf.py
@@ -36,7 +36,6 @@ DEFAULTS = {"platform_url": "https://JARR.herokuapp.com/",
"log_level": "info",
"user_agent": "JARR (https://github.com/JARR-aggregator)",
"resolve_article_url": "false",
- "http_proxy": "",
"secret": "",
"enabled": "false",
"notification_email": "jarr@no-reply.com",
@@ -84,11 +83,8 @@ NB_WORKER = config.getint('misc', 'nb_worker')
API_LOGIN = config.get('crawler', 'api_login')
API_PASSWD = config.get('crawler', 'api_passwd')
-WHOOSH_ENABLED = True
-
SQLALCHEMY_DATABASE_URI = config.get('database', 'database_url')
-HTTP_PROXY = config.get('crawler', 'http_proxy')
USER_AGENT = config.get('crawler', 'user_agent')
RESOLVE_ARTICLE_URL = config.getboolean('crawler',
'resolve_article_url')
diff --git a/src/web/utils.py b/src/web/utils.py
index 9e3ad3ed..f770f597 100755
--- a/src/web/utils.py
+++ b/src/web/utils.py
@@ -241,34 +241,6 @@ def clean_url(url):
parsed_url.fragment
]).rstrip('=')
-def open_url(url):
- """
- Open an URL with the proxy and the user-agent
- specified in the configuration file.
- """
- if conf.HTTP_PROXY == "":
- proxy = {}
- else:
- proxy = {"http": conf.HTTP_PROXY}
- opener = urllib.request.FancyURLopener(proxy)
- try:
- opener = urllib.request.build_opener()
- opener.addheaders = [('User-agent', conf.USER_AGENT)]
- return (True, opener.open(url))
- except urllib.error.HTTPError as e:
- # server couldn't fulfill the request
- error = (url, e.code,
- http.server.BaseHTTPRequestHandler.responses[e.code][1])
- return (False, error)
- except urllib.error.URLError as e:
- # failed to reach the server
- if type(e.reason) == str:
- error = (url, e.reason, e.reason)
- else:
- error = (url, e.reason.errno, e.reason.strerror)
- return (False, error)
-
-
def load_stop_words():
"""
Load the stop words and return them in a list.
bgstack15