diff options
-rwxr-xr-x | source/feedgetter.py | 2 | ||||
-rwxr-xr-x | source/utils.py | 15 |
2 files changed, 13 insertions, 4 deletions
diff --git a/source/feedgetter.py b/source/feedgetter.py index a2cdb752..a27a068c 100755 --- a/source/feedgetter.py +++ b/source/feedgetter.py @@ -69,7 +69,7 @@ class FeedGetter(object): if conf.HTTP_PROXY == "": self.proxy = urllib.request.ProxyHandler({}) else: - self.proxy = urllib.request.ProxyHandler({"http":conf.HTTP_PROXY}) + self.proxy = urllib.request.ProxyHandler({"http" : conf.HTTP_PROXY}) feedparser.USER_AGENT = conf.USER_AGENT def retrieve_feed(self, feed_url=None, feed_original=None): diff --git a/source/utils.py b/source/utils.py index aaaa558c..173b08cd 100755 --- a/source/utils.py +++ b/source/utils.py @@ -86,17 +86,26 @@ def detect_url_errors(list_of_urls): Return a list of error(s). """ errors = [] + if conf.HTTP_PROXY == "": + proxy = {} + else: + proxy = {"http" : conf.HTTP_PROXY} + opener = urllib.request.FancyURLopener(proxy) for url in list_of_urls: - req = urllib.request.Request(url) try: - urllib.request.urlopen(req) + opener = urllib.request.build_opener() + opener.addheaders = [('User-agent', conf.USER_AGENT)] + opener.open(url) except urllib.error.HTTPError as e: # server couldn't fulfill the request errors.append((url, e.code, \ http.server.BaseHTTPRequestHandler.responses[e.code][1])) except urllib.error.URLError as e: # failed to reach the server - errors.append((url, e.reason.errno ,e.reason.strerror)) + if type(e.reason) == str: + errors.append((url, e.reason, e.reason)) + else: + errors.append((url, e.reason.errno, e.reason.strerror)) return errors def generate_qr_code(article): |