diff options
author | Cédric Bonhomme <cedric@cedricbonhomme.org> | 2018-03-31 14:55:04 +0200 |
---|---|---|
committer | GitHub <noreply@github.com> | 2018-03-31 14:55:04 +0200 |
commit | 1873ed81705399658ce7628f1148327a5c595909 (patch) | |
tree | 7abea01f509a5efc136dfddbf260f60d80268186 /src/lib | |
parent | Renew certificate. (diff) | |
parent | Correct spelling mistakes. (diff) | |
download | newspipe-1873ed81705399658ce7628f1148327a5c595909.tar.gz newspipe-1873ed81705399658ce7628f1148327a5c595909.tar.bz2 newspipe-1873ed81705399658ce7628f1148327a5c595909.zip |
Merge pull request #37 from EdwardBetts/spelling
Correct spelling mistakes.
Diffstat (limited to 'src/lib')
-rw-r--r-- | src/lib/article_utils.py | 2 | ||||
-rw-r--r-- | src/lib/feed_utils.py | 4 |
2 files changed, 3 insertions, 3 deletions
diff --git a/src/lib/article_utils.py b/src/lib/article_utils.py index 49494e85..9891e29f 100644 --- a/src/lib/article_utils.py +++ b/src/lib/article_utils.py @@ -23,7 +23,7 @@ def extract_id(entry): async def construct_article(entry, feed, fields=None, fetch=True): - "Safe method to transorm a feedparser entry into an article" + "Safe method to transform a feedparser entry into an article" now = datetime.utcnow() article = {} def push_in_article(key, value): diff --git a/src/lib/feed_utils.py b/src/lib/feed_utils.py index 492391aa..4c2cee29 100644 --- a/src/lib/feed_utils.py +++ b/src/lib/feed_utils.py @@ -42,7 +42,7 @@ def construct_feed_from(url=None, fp_parsed=None, feed=None, query_site=True): fp_parsed = feedparser.parse(response.content, request_headers=response.headers) except Exception: - logger.exception('failed to retreive that url') + logger.exception('failed to retrieve that url') fp_parsed = {'bozo': True} assert url is not None and fp_parsed is not None feed = feed or {} @@ -74,7 +74,7 @@ def construct_feed_from(url=None, fp_parsed=None, feed=None, query_site=True): try: response = requests.get(feed['site_link'], **requests_kwargs) except Exception: - logger.exception('failed to retreive %r', feed['site_link']) + logger.exception('failed to retrieve %r', feed['site_link']) return feed bs_parsed = BeautifulSoup(response.content, 'html.parser', parse_only=SoupStrainer('head')) |