#! /usr/bin/env python #-*- coding: utf-8 -*- import os import hashlib import utils htmlheader = '\n' + \ '' + \ '\n\tpyAggr3g470r - News aggregator\n' + \ '\t' + \ '\n\t\n' + \ '\n\t\n' + \ '\n' htmlfooter = '

This software is under GPLv3 license. You are welcome to copy, modify or' + \ ' redistribute the source code according to the' + \ ' GPLv3 license.

\n' + \ '\n' def export_webzine(feeds): """ """ index = htmlheader index = "
\n\n
" index += htmlfooter with open(utils.path + "/var/export/webzine/" + "index.html", "w") as f: f.write(index) def exports(feeds, export_method): for feed in self.feeds.values(): # creates folder for each stream folder = utils.path + "/var/export/" + \ utils.normalize_filename(feed.feed_title.strip().replace(':', '').lower()) try: os.makedirs(folder) except OSError: # directories already exists (not a problem) pass for article in feed.articles.values(): name = article.article_date.strip().replace(' ', '_') # Export all articles in HTML format if export_method == "export_HTML": name = os.path.normpath(folder + "/" + name + ".html") content = htmlheader() content += '\n
\n' content += """

%s


""" % \ (article.article_link, article.article_title) content += article.article_description content += "
\n
\n" content += htmlfooter # Export for dokuwiki # example: http://wiki.cedricbonhomme.org/doku.php/news-archives elif export_method == "export_dokuwiki": name = os.path.normpath(folder + "/" + name.replace(':', '-') + ".txt") content = "" content += '\n
\n' content += """

%s


""" % \ (article.article_link, article.article_title) content += article.article_description content += '
\n
Generated with pyAggr3g470r\n' # Export all articles in raw text elif export_method == "export_TXT": content = "Title: " + article.article_title + "\n\n\n" content += utils.clear_string(article.article_description) name = os.path.normpath(folder + "/" + name + ".txt") with open(name, "w") as f: f.write(content)