aboutsummaryrefslogtreecommitdiff
path: root/src/lib
diff options
context:
space:
mode:
authorEdward Betts <edward@4angle.com>2018-03-31 13:39:26 +0100
committerEdward Betts <edward@4angle.com>2018-03-31 13:39:26 +0100
commit877eaa155fef72102a5bd10302ad958f487260f3 (patch)
tree7abea01f509a5efc136dfddbf260f60d80268186 /src/lib
parentRenew certificate. (diff)
downloadnewspipe-877eaa155fef72102a5bd10302ad958f487260f3.tar.gz
newspipe-877eaa155fef72102a5bd10302ad958f487260f3.tar.bz2
newspipe-877eaa155fef72102a5bd10302ad958f487260f3.zip
Correct spelling mistakes.
Diffstat (limited to 'src/lib')
-rw-r--r--src/lib/article_utils.py2
-rw-r--r--src/lib/feed_utils.py4
2 files changed, 3 insertions, 3 deletions
diff --git a/src/lib/article_utils.py b/src/lib/article_utils.py
index 49494e85..9891e29f 100644
--- a/src/lib/article_utils.py
+++ b/src/lib/article_utils.py
@@ -23,7 +23,7 @@ def extract_id(entry):
async def construct_article(entry, feed, fields=None, fetch=True):
- "Safe method to transorm a feedparser entry into an article"
+ "Safe method to transform a feedparser entry into an article"
now = datetime.utcnow()
article = {}
def push_in_article(key, value):
diff --git a/src/lib/feed_utils.py b/src/lib/feed_utils.py
index 492391aa..4c2cee29 100644
--- a/src/lib/feed_utils.py
+++ b/src/lib/feed_utils.py
@@ -42,7 +42,7 @@ def construct_feed_from(url=None, fp_parsed=None, feed=None, query_site=True):
fp_parsed = feedparser.parse(response.content,
request_headers=response.headers)
except Exception:
- logger.exception('failed to retreive that url')
+ logger.exception('failed to retrieve that url')
fp_parsed = {'bozo': True}
assert url is not None and fp_parsed is not None
feed = feed or {}
@@ -74,7 +74,7 @@ def construct_feed_from(url=None, fp_parsed=None, feed=None, query_site=True):
try:
response = requests.get(feed['site_link'], **requests_kwargs)
except Exception:
- logger.exception('failed to retreive %r', feed['site_link'])
+ logger.exception('failed to retrieve %r', feed['site_link'])
return feed
bs_parsed = BeautifulSoup(response.content, 'html.parser',
parse_only=SoupStrainer('head'))
bgstack15