aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorcedricbonhomme <devnull@localhost>2010-07-03 12:03:17 +0200
committercedricbonhomme <devnull@localhost>2010-07-03 12:03:17 +0200
commit9eef4f7a81f06cba4ea008ff79a2bc5b6303b1f1 (patch)
treef25dd31bc148f5d972675383195fc71e7183ad6c
parentAdded link and title attribute to logo image. (diff)
downloadnewspipe-9eef4f7a81f06cba4ea008ff79a2bc5b6303b1f1.tar.gz
newspipe-9eef4f7a81f06cba4ea008ff79a2bc5b6303b1f1.tar.bz2
newspipe-9eef4f7a81f06cba4ea008ff79a2bc5b6303b1f1.zip
Improvements, exception management.
-rw-r--r--README1
-rwxr-xr-xpyAggr3g470r.py7
-rwxr-xr-xutils.py10
3 files changed, 14 insertions, 4 deletions
diff --git a/README b/README
index 6cc28c13..fcaf8b01 100644
--- a/README
+++ b/README
@@ -19,6 +19,7 @@ based on CherryPy. Articles are stored in a SQLite base.
* Python (tested with 2.4 to 2.7)
* CherryPy (version 3 and up)
* sqlite
+* BeautifulSoup
=== Optional module ===
diff --git a/pyAggr3g470r.py b/pyAggr3g470r.py
index 0d8c6874..66379d4b 100755
--- a/pyAggr3g470r.py
+++ b/pyAggr3g470r.py
@@ -87,6 +87,9 @@ class Root:
html += """<a href="/list_notification"><img src="/css/img/email.png" title="Active e-mail notifications (%s)" /></a>\n""" % \
(len([feed for feed in self.feeds.values() if feed[6] == "1"]),)
+ html += """<a href="/unread/All">Unread article(s): %s</a>\n""" % \
+ (sum([feed[1] for feed in self.feeds.values()]),)
+
for rss_feed_id in self.articles.keys():
html += """<h2><a name="%s"><a href="%s" rel="noreferrer"
target="_blank">%s</a></a>
@@ -760,8 +763,10 @@ class Root:
html += """<div class="left inner">"""
# search the feed in the HTML page with BeautifulSoup
feed_url = utils.search_feed(url)
+ if feed_url is None:
+ self.error_page("Impossible to find a feed at this URL.")
# if a feed exists
- if feed_url is not None:
+ else:
result = utils.add_feed(feed_url)
# if the feed is not already in the file feed.lst
if result is False:
diff --git a/utils.py b/utils.py
index e027f1dd..b3c464bb 100755
--- a/utils.py
+++ b/utils.py
@@ -212,7 +212,7 @@ def remove_feed(feed_url):
if feed_url not in line:
feeds.append(line.replace("\n", ""))
with open("./var/feed.lst", "w") as f:
- f.write("\n".join(feeds))
+ f.write("\n".join(feeds) + "\n")
# Remove articles from this feed from the SQLite base.
try:
conn = sqlite3.connect(sqlite_base, isolation_level = None)
@@ -228,8 +228,12 @@ def search_feed(url):
"""
Search a feed in a HTML page.
"""
- page = urllib2.urlopen(url)
- soup = BeautifulSoup(page)
+ soup = None
+ try:
+ page = urllib2.urlopen(url)
+ soup = BeautifulSoup(page)
+ except:
+ return None
feed_links = soup('link', type='application/atom+xml')
feed_links.extend(soup('link', type='application/rss+xml'))
for feed_link in feed_links:
bgstack15