aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorcedricbonhomme <devnull@localhost>2012-03-19 23:08:54 +0100
committercedricbonhomme <devnull@localhost>2012-03-19 23:08:54 +0100
commita6dcc1081ecba7f3edd3cabd4a226bdafa6c594f (patch)
tree1d819694a83aadefb8c9ceefe8bb5adcee18c98c
parentAdded script to convert the SQLite database to a MongoDB database for the new... (diff)
downloadnewspipe-a6dcc1081ecba7f3edd3cabd4a226bdafa6c594f.tar.gz
newspipe-a6dcc1081ecba7f3edd3cabd4a226bdafa6c594f.tar.bz2
newspipe-a6dcc1081ecba7f3edd3cabd4a226bdafa6c594f.zip
Removed the now useless function to load articles from a SQLite database.
-rwxr-xr-xutils.py94
1 files changed, 1 insertions, 93 deletions
diff --git a/utils.py b/utils.py
index 2c0dc0d8..8a19ea75 100755
--- a/utils.py
+++ b/utils.py
@@ -350,96 +350,4 @@ def search_feed(url):
if url not in feed_link['href']:
return urlparse.urljoin(url, feed_link['href'])
return feed_link['href']
- return None
-
-def load_feed():
- """
- Load feeds and articles in a dictionary.
- """
- LOCKER.acquire()
- list_of_feeds = []
- list_of_articles = []
- try:
- conn = sqlite3.connect(sqlite_base, isolation_level = None)
- c = conn.cursor()
- list_of_feeds = c.execute("SELECT * FROM feeds").fetchall()
- except:
- pass
-
- nb_articles = 0
- nb_unread_articles = 0
- nb_mail_notifications = 0
- nb_favorites = 0
-
- # Contains the list of Feed object.
- feeds = OrderedDict()
-
- if list_of_feeds != []:
- # Case-insensitive sorting
- tupleList = [(x[0].lower(), x) for x in list_of_feeds]
- tupleList.sort(key=operator.itemgetter(0))
-
- # Walk through the list of feeds
- for feed in [x[1] for x in tupleList]:
- try:
- list_of_articles = c.execute(\
- "SELECT * FROM articles WHERE feed_link='" + \
- feed[2] + "'").fetchall()
- except:
- LOCKER.release()
- continue
- sha1_hash = hashlib.sha1()
- sha1_hash.update(feed[2].encode('utf-8'))
- feed_id = sha1_hash.hexdigest()
-
- # Current Feed object
- feed_object = articles.Feed()
- feed_object.feed_id = feed_id.encode('utf-8')
- feed_object.feed_title = feed[0].encode('utf-8')
- feed_object.feed_image = feed[3].encode('utf-8')
- feed_object.feed_link = feed[2].encode('utf-8')
- feed_object.feed_site_link = feed[1].encode('utf-8')
- feed_object.mail = feed[4]
-
- if list_of_articles != []:
- list_of_articles.sort(lambda x,y: compare(y[0], x[0]))
- if MAX_NB_ARTICLES != -1:
- list_of_articles = list_of_articles[:MAX_NB_ARTICLES]
- # Walk through the list of articles for the current feed.
- for article in list_of_articles:
- sha1_hash = hashlib.sha1()
- sha1_hash.update(article[2].encode('utf-8'))
- article_id = sha1_hash.hexdigest()
-
- # Current Article object
- article_object = articles.Article()
- article_object.article_id = article_id.encode('utf-8')
- article_object.article_date = article[0].encode('utf-8')
- article_object.article_title = unescape(article[1]).encode('utf-8')
- article_object.article_link = article[2].encode('utf-8')
- article_object.article_description = unescape(article[3]).encode('utf-8')
- article_object.article_readed = article[4]
- article_object.like = article[6]
-
- feed_object.articles[article_id] = article_object
-
- # update the number of favorites articles
- nb_favorites = nb_favorites + int(article[6])
-
-
- # informations about a feed
- feed_object.nb_articles = len(feed_object.articles)
- feed_object.nb_unread_articles = len([article for article in feed_object.articles.values() \
- if article.article_readed=="0"])
-
- feeds[feed_id] = feed_object
-
- nb_articles += feed_object.nb_articles
- nb_unread_articles += feed_object.nb_unread_articles
- nb_mail_notifications += int(feed_object.mail)
-
- c.close()
- LOCKER.release()
- return (feeds, nb_articles, nb_unread_articles, nb_favorites, nb_mail_notifications)
- LOCKER.release()
- return (feeds, nb_articles, nb_unread_articles, nb_favorites, nb_mail_notifications)
+ return None \ No newline at end of file
bgstack15