diff options
-rwxr-xr-x | pyAggr3g470r.py | 26 | ||||
-rwxr-xr-x | utils.py | 4 |
2 files changed, 27 insertions, 3 deletions
diff --git a/pyAggr3g470r.py b/pyAggr3g470r.py index eb7accce..f84a82d0 100755 --- a/pyAggr3g470r.py +++ b/pyAggr3g470r.py @@ -242,7 +242,7 @@ class Root: return html + "</div>" - def management(self, word_size=6): + def management(self, word_size=6, max_nb_articles=5): """ Management page. Allows adding and deleting feeds. Export functions of the SQLite data base @@ -277,9 +277,21 @@ class Root: html += """Database: %s.\n<br />Size: %s bytes.</p>\n""" % \ (os.path.abspath(utils.sqlite_base), os.path.getsize(utils.sqlite_base)) - # Export functions html += """<form method=get action="/fetch/">\n<input type="submit" value="Fetch all feeds"></form>\n""" html += """<form method=get action="/drop_base">\n<input type="submit" value="Delete all articles"></form>\n""" + + + html += '<form method=get action="/set_max_articles/">' + html += "For each feed only load the " + html += """<input type="number" name="max_nb_articles" value="%s" min="5" max="5000" step="1" size="2">\n""" % (max_nb_articles) + html += " last articles." + if utils.MAX_NB_ARTICLES == -1: + html += "<br />All articles are currently loaded." + else: + html += "<br />For each feed only " + str(utils.MAX_NB_ARTICLES) + " articles are currently loaded." + html += "</form>" + + # Export functions html += "<h1>Export articles</h1>\n\n" html += """<form method=get action="/export/"><select name="export_method">\n""" html += """\t<option value="export_HTML" selected='selected'>HTML</option>\n""" @@ -1071,6 +1083,16 @@ class Root: change_feed_url.exposed = True + def set_max_articles(self, max_nb_articles=0): + """ + """ + utils.MAX_NB_ARTICLES = int(max_nb_articles) + self.update() + return self.management() + + set_max_articles.exposed = True + + def delete_article(self, param): """ Delete an article. @@ -67,6 +67,8 @@ smtp_server = config.get('mail','smtp') username = config.get('mail','username') password = config.get('mail','password') +MAX_NB_ARTICLES = -1 + # regular expression to chech URL url_finders = [ \ re.compile("([0-9]{1,3}\\.[0-9]{1,3}\\.[0-9]{1,3}\\.[0-9]{1,3}|(((news|telnet|nttp|file|http|ftp|https)://)|(www|ftp)[-A-Za-z0-9]*\\.)[-A-Za-z0-9\\.]+)(:[0-9]*)?/[-A-Za-z0-9_\\$\\.\\+\\!\\*\\(\\),;:@&=\\?/~\\#\\%]*[^]'\\.}>\\),\\\"]"), \ @@ -372,7 +374,7 @@ def load_feed(): if list_of_articles != []: list_of_articles.sort(lambda x,y: compare(y[0], x[0])) # Walk through the list of articles for the current feed. - for article in list_of_articles: + for article in list_of_articles[:MAX_NB_ARTICLES]: sha1_hash.update(article[2].encode('utf-8')) article_id = sha1_hash.hexdigest() |