aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorFrançois Schmidts <francois.schmidts@gmail.com>2016-01-11 10:16:41 +0100
committerFrançois Schmidts <francois.schmidts@gmail.com>2016-01-11 10:16:41 +0100
commit8a72e6ae786ffde8e841afbe725a19b3b874f87e (patch)
tree36d6ff44e909aa00bcb52adbe1f2fe5ede1f53f3
parentusing user agent in web crawler (diff)
downloadnewspipe-8a72e6ae786ffde8e841afbe725a19b3b874f87e.tar.gz
newspipe-8a72e6ae786ffde8e841afbe725a19b3b874f87e.tar.bz2
newspipe-8a72e6ae786ffde8e841afbe725a19b3b874f87e.zip
fixing stuffs
* no more warning on constructing feeds * using the configured user agent for constructing feed * regrouping the logic behind knowing if the parsing of a feed worked
-rw-r--r--src/crawler.py4
-rw-r--r--src/web/lib/feed_utils.py13
2 files changed, 12 insertions, 5 deletions
diff --git a/src/crawler.py b/src/crawler.py
index 22e73754..1a759945 100644
--- a/src/crawler.py
+++ b/src/crawler.py
@@ -37,7 +37,7 @@ import conf
from bootstrap import db
from web.models import User
from web.controllers import FeedController, ArticleController
-from web.lib.feed_utils import construct_feed_from
+from web.lib.feed_utils import construct_feed_from, is_parsing_ok
from web.lib.article_utils import construct_article, extract_id
logger = logging.getLogger(__name__)
@@ -87,7 +87,7 @@ async def parse_feed(user, feed):
FeedController().update({'id': feed.id}, up_feed)
return
- if parsed_feed['bozo'] == 1 and parsed_feed['entries'] == []:
+ if not is_parsing_ok(parsed_feed):
up_feed['last_error'] = str(parsed_feed['bozo_exception'])
up_feed['error_count'] = feed.error_count + 1
FeedController().update({'id': feed.id}, up_feed)
diff --git a/src/web/lib/feed_utils.py b/src/web/lib/feed_utils.py
index f3b18224..14e6b82b 100644
--- a/src/web/lib/feed_utils.py
+++ b/src/web/lib/feed_utils.py
@@ -2,19 +2,26 @@ import urllib
import logging
import requests
import feedparser
+from conf import USER_AGENT
from bs4 import BeautifulSoup, SoupStrainer
from web.lib.utils import try_keys, try_get_icon_url, rebuild_url
logger = logging.getLogger(__name__)
+logging.captureWarnings(True)
+
+
+def is_parsing_ok(parsed_feed):
+ return parsed_feed['entries'] or not parsed_feed['bozo']
def construct_feed_from(url=None, fp_parsed=None, feed=None, query_site=True):
+ requests_kwargs = {'headers': {'User-Agent': USER_AGENT}, 'verify': False}
if url is None and fp_parsed is not None:
url = fp_parsed.get('url')
if url is not None and fp_parsed is None:
try:
- response = requests.get(url, verify=False)
+ response = requests.get(url, **requests_kwargs)
fp_parsed = feedparser.parse(response.content,
request_headers=response.headers)
except Exception:
@@ -24,7 +31,7 @@ def construct_feed_from(url=None, fp_parsed=None, feed=None, query_site=True):
feed = feed or {}
feed_split = urllib.parse.urlsplit(url)
site_split = None
- if not fp_parsed['bozo']:
+ if is_parsing_ok(fp_parsed):
feed['link'] = url
feed['site_link'] = try_keys(fp_parsed['feed'], 'href', 'link')
feed['title'] = fp_parsed['feed'].get('title')
@@ -48,7 +55,7 @@ def construct_feed_from(url=None, fp_parsed=None, feed=None, query_site=True):
return feed
try:
- response = requests.get(feed['site_link'], verify=False)
+ response = requests.get(feed['site_link'], **requests_kwargs)
except Exception:
logger.exception('failed to retreive %r', feed['site_link'])
return feed
bgstack15