aboutsummaryrefslogtreecommitdiff
path: root/source/utils.py
diff options
context:
space:
mode:
authorCédric Bonhomme <kimble.mandel@gmail.com>2013-07-24 08:53:38 +0200
committerCédric Bonhomme <kimble.mandel@gmail.com>2013-07-24 08:53:38 +0200
commit347d4830182b41f13a2b40bb76f4c731cccf598a (patch)
tree0da9063e5eda2c4f137700bda2be3a9bcdd591d1 /source/utils.py
parentImproved remove_feed function. (diff)
downloadnewspipe-347d4830182b41f13a2b40bb76f4c731cccf598a.tar.gz
newspipe-347d4830182b41f13a2b40bb76f4c731cccf598a.tar.bz2
newspipe-347d4830182b41f13a2b40bb76f4c731cccf598a.zip
Better and safe files handling with context manager.
Diffstat (limited to 'source/utils.py')
-rwxr-xr-xsource/utils.py55
1 files changed, 43 insertions, 12 deletions
diff --git a/source/utils.py b/source/utils.py
index 3f945a87..a099d52c 100755
--- a/source/utils.py
+++ b/source/utils.py
@@ -56,6 +56,7 @@ import http.server
from bs4 import BeautifulSoup
from collections import Counter
+from contextlib import contextmanager
import conf
@@ -67,6 +68,18 @@ url_finders = [ \
re.compile("'\\<((mailto:)|)[-A-Za-z0-9\\.]+@[-A-Za-z0-9\\.]+") \
]
+@contextmanager
+def opened_w_error(filename, mode="r"):
+ try:
+ f = open(filename, mode)
+ except IOError as err:
+ yield None, err
+ else:
+ try:
+ yield f, None
+ finally:
+ f.close()
+
def detect_url_errors(list_of_urls):
"""
Detect URL errors.
@@ -130,8 +143,11 @@ def load_stop_words():
stop_words = []
for stop_wods_list in stop_words_lists:
- with open(stop_wods_list, "r") as stop_wods_file:
- stop_words += stop_wods_file.read().split(";")
+ with opened_w_error(stop_wods_list, "r") as (stop_wods_file, err):
+ if err:
+ stop_words = []
+ else:
+ stop_words += stop_wods_file.read().split(";")
return stop_words
def top_words(articles, n=10, size=5):
@@ -206,8 +222,11 @@ def add_feed(feed_url):
"""
Add the URL feed_url in the file feed.lst.
"""
- with open(conf.FEED_LIST, "r") as f:
- lines = f.readlines()
+ with opened_w_error(conf.FEED_LIST, "r") as (f, err):
+ if err:
+ return False
+ else:
+ lines = f.readlines()
lines = list(map(str.strip, lines))
if feed_url in lines:
return False
@@ -221,30 +240,42 @@ def change_feed_url(old_feed_url, new_feed_url):
Change the URL of a feed given in parameter.
"""
# Replace the URL in the text file
- with open(conf.FEED_LIST, "r") as f:
- lines = f.readlines()
+ with opened_w_error(conf.FEED_LIST, "r") as (f, err):
+ if err:
+ return False
+ else:
+ lines = f.readlines()
lines = list(map(str.strip, lines))
try:
lines[lines.index(old_feed_url)] = new_feed_url
except:
return False
- with open(conf.FEED_LIST, "w") as f:
- f.write("\n".join(lines))
+ with opened_w_error(conf.FEED_LIST, "w") as (f, err):
+ if err:
+ return False
+ else:
+ f.write("\n".join(lines))
return True
def remove_feed(feed_url):
"""
Remove a feed from the file feed.lst and from the SQLite base.
"""
- with open(conf.FEED_LIST, "r") as f:
- lines = f.readlines()
+ with opened_w_error(conf.FEED_LIST, "r") as (f, err):
+ if err:
+ return False
+ else:
+ lines = f.readlines()
lines = list(map(str.strip, lines))
try:
del lines[lines.index(feed_url)]
except:
return False
- with open(conf.FEED_LIST, "w") as f:
- f.write("\n".join(lines))
+ with opened_w_error(conf.FEED_LIST, "w") as (f, err):
+ if err:
+ return False
+ else:
+ f.write("\n".join(lines))
return True
def search_feed(url):
bgstack15