aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorcedricbonhomme <devnull@localhost>2012-04-22 10:50:37 +0200
committercedricbonhomme <devnull@localhost>2012-04-22 10:50:37 +0200
commitb4ee6edb8a9be1f6e4ddc06bc39f8d29dd317ad0 (patch)
treed13d1f0a463030b3ca2ad88753719090a6d9de21
parentAdded comments. (diff)
downloadnewspipe-b4ee6edb8a9be1f6e4ddc06bc39f8d29dd317ad0.tar.gz
newspipe-b4ee6edb8a9be1f6e4ddc06bc39f8d29dd317ad0.tar.bz2
newspipe-b4ee6edb8a9be1f6e4ddc06bc39f8d29dd317ad0.zip
Minor refactoring with pylint.
-rwxr-xr-xsource/feedgetter.py7
-rwxr-xr-xsource/pyAggr3g470r.py22
-rwxr-xr-xsource/utils.py6
3 files changed, 13 insertions, 22 deletions
diff --git a/source/feedgetter.py b/source/feedgetter.py
index 14ef2edf..f713f672 100755
--- a/source/feedgetter.py
+++ b/source/feedgetter.py
@@ -26,11 +26,8 @@ __revision__ = "$Date: 2012/04/22 $"
__copyright__ = "Copyright (c) Cedric Bonhomme"
__license__ = "GPLv3"
-import os.path
-import traceback
import threading
import feedparser
-import hashlib
from BeautifulSoup import BeautifulSoup
from datetime import datetime
@@ -38,10 +35,8 @@ from datetime import datetime
import utils
import mongodb
-feeds_list = []
list_of_threads = []
-
class FeedGetter(object):
"""
This class is in charge of retrieving feeds listed in ./var/feed.lst.
@@ -123,7 +118,7 @@ class FeedGetter(object):
try:
# article description
description = article.description
- except Exception, e:
+ except Exception:
description = ""
description = str(BeautifulSoup(description))
article_title = str(BeautifulSoup(article.title))
diff --git a/source/pyAggr3g470r.py b/source/pyAggr3g470r.py
index 9a506ffd..4b363cdb 100755
--- a/source/pyAggr3g470r.py
+++ b/source/pyAggr3g470r.py
@@ -40,7 +40,6 @@ __license__ = "GPLv3"
import os
import re
-import time
import cherrypy
import calendar
@@ -355,7 +354,7 @@ class Root:
html += """<h1>Articles containing the string <i>%s</i></h1><br />""" % (query,)
if feed_id is not None:
- for article in self.feeds[feed_id].articles.values():
+ for article in self.mongo.get_articles_from_collection(feed_id):
article_content = utils.clear_string(article.article_description)
if not article_content:
utils.clear_string(article.article_title)
@@ -1017,7 +1016,6 @@ class Root:
"""
try:
action, feed_id = param.split(':')
- feed = self.feeds[feed_id]
except:
return self.error_page("Bad URL. This feed do not exists.")
@@ -1205,8 +1203,8 @@ class Root:
"""
Delete all articles.
"""
- utils.drop_base()
- return self.management()
+ self.mongo.drop_database()
+ return self.index()
drop_base.exposed = True
@@ -1217,7 +1215,7 @@ class Root:
the appropriate function of the 'export' module.
"""
try:
- getattr(export, export_method)(self.feeds)
+ getattr(export, export_method)(self.mongo.get_all_articles())
except Exception, e:
return self.error_page(e)
return self.management()
@@ -1238,8 +1236,10 @@ class Root:
except:
return self.error_page("Bad URL.")
try:
- feed = self.feeds[feed_id]
- article = feed.articles[article_id]
+ feed_id, article_id = param.split(':')
+ feed = self.mongo.get_collection(feed_id)
+ articles = self.mongo.get_articles_from_collection(feed_id)
+ article = self.mongo.get_article(feed_id, article_id)
except:
self.error_page("This article do not exists.")
try:
@@ -1249,9 +1249,9 @@ class Root:
# directories already exists (not a problem)
pass
section = ez_epub.Section()
- section.title = article.article_title.decode('utf-8')
- section.paragraphs = [utils.clear_string(article.article_description).decode('utf-8')]
- ez_epub.makeBook(article.article_title.decode('utf-8'), [feed.feed_title.decode('utf-8')], [section], \
+ section.title = article["article_title"].decode('utf-8')
+ section.paragraphs = [utils.clear_string(article["article_description"]).decode('utf-8')]
+ ez_epub.makeBook(article["article_title"].decode('utf-8'), [feed["feed_title"].decode('utf-8')], [section], \
os.path.normpath(folder) + "article.epub", lang='en-US', cover=None)
return self.article(param)
diff --git a/source/utils.py b/source/utils.py
index d6365073..0928160a 100755
--- a/source/utils.py
+++ b/source/utils.py
@@ -51,11 +51,7 @@ import BaseHTTPServer
from BeautifulSoup import BeautifulSoup
from datetime import datetime
-from string import punctuation
from collections import Counter
-from collections import OrderedDict
-
-from StringIO import StringIO
import os
import ConfigParser
@@ -116,7 +112,7 @@ def detect_url_errors(list_of_urls):
urllib2.urlopen(req)
except urllib2.HTTPError, e:
# server couldn't fulfill the request
- errors.append((url, e.code, \
+ errors.append((url, e.code, \
BaseHTTPServer.BaseHTTPRequestHandler.responses[e.code][1]))
except urllib2.URLError, e:
# failed to reach the server
bgstack15