diff options
author | François Schmidts <francois.schmidts@gmail.com> | 2015-07-05 17:41:06 +0200 |
---|---|---|
committer | François Schmidts <francois.schmidts@gmail.com> | 2015-07-06 09:04:58 +0200 |
commit | c1551acb30513f96d0053b96e240da7ab68833d2 (patch) | |
tree | 9b7beac4a521edf095d6c352cd2d0801428b521b /pyaggr3g470r | |
parent | constructing feed from normal url also (diff) | |
download | newspipe-c1551acb30513f96d0053b96e240da7ab68833d2.tar.gz newspipe-c1551acb30513f96d0053b96e240da7ab68833d2.tar.bz2 newspipe-c1551acb30513f96d0053b96e240da7ab68833d2.zip |
making bookmaklet work for any url
Diffstat (limited to 'pyaggr3g470r')
-rw-r--r-- | pyaggr3g470r/lib/utils.py | 21 | ||||
-rw-r--r-- | pyaggr3g470r/views/feed.py | 5 |
2 files changed, 15 insertions, 11 deletions
diff --git a/pyaggr3g470r/lib/utils.py b/pyaggr3g470r/lib/utils.py index 6d6725c8..a4f2e043 100644 --- a/pyaggr3g470r/lib/utils.py +++ b/pyaggr3g470r/lib/utils.py @@ -1,9 +1,12 @@ import types import urllib +import logging import requests import feedparser from bs4 import BeautifulSoup, SoupStrainer +logger = logging.getLogger(__name__) + def default_handler(obj): """JSON handler for default query formatting""" @@ -48,8 +51,13 @@ def construct_feed_from(url=None, fp_parsed=None, feed=None, query_site=True): if url is None and fp_parsed is not None: url = fp_parsed.get('url') if url is not None and fp_parsed is None: - response = requests.get(url, verify=False) - fp_parsed = feedparser.parse(response.content) + try: + response = requests.get(url, verify=False) + fp_parsed = feedparser.parse(response.content, + request_headers=response.headers) + except Exception: + logger.exception('failed to retreive that url') + fp_parsed = {'bozo': True} assert url is not None and fp_parsed is not None feed = feed or {} feed_split = urllib.parse.urlsplit(url) @@ -106,17 +114,14 @@ def construct_feed_from(url=None, fp_parsed=None, feed=None, query_site=True): if feed['icon'] is not None: break - if feed['icon'] is None: + if feed.get('icon') is None: feed['icon'] = try_splits('/favicon.ico', site_split, feed_split) - if feed['icon'] is None: + if 'icon' in feed and feed['icon'] is None: del feed['icon'] if not feed.get('link'): alternate = bs_parsed.find_all(check_keys(rel=['alternate'], type=['application/rss+xml'])) - if len(alternate) == 1: - feed['link'] = alternate[0].attrs['href'] - elif len(alternate) > 1: + if len(alternate) >= 1: feed['link'] = alternate[0].attrs['href'] - feed['other_link'] = [al.attrs['href'] for al in alternate[1:]] return feed diff --git a/pyaggr3g470r/views/feed.py b/pyaggr3g470r/views/feed.py index d31aa212..224e27fb 100644 --- a/pyaggr3g470r/views/feed.py +++ b/pyaggr3g470r/views/feed.py @@ -1,6 +1,6 @@ #! /usr/bin/env python # -*- coding: utf-8 - - +import logging from datetime import datetime from sqlalchemy import desc from werkzeug.exceptions import BadRequest @@ -99,8 +99,7 @@ def bookmarklet(): if feed_exists: flash(gettext("Couldn't add feed: feed already exists."), "warning") - return redirect(url_for('feed.form', - feed_id=existing_feeds[0].id)) + return redirect(url_for('feed.form', feed_id=feed_exists[0].id)) feed = feed_contr.create(**construct_feed_from(url)) flash(gettext('Feed was successfully created.'), 'success') |