aboutsummaryrefslogtreecommitdiff
path: root/feedgetter.py
blob: 631b63743d633d15f430c6f6f1b1703ecc2afd89 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
#! /usr/local/bin/python
#-*- coding: utf-8 -*-

from __future__ import with_statement

__author__ = "Cedric Bonhomme"
__version__ = "$Revision: 0.6 $"
__date__ = "$Date: 2010/02/05 $"
__copyright__ = "Copyright (c) 2010 Cedric Bonhomme"
__license__ = "GPLv3"

import re
import sqlite3
import hashlib
import threading
import feedparser

from datetime import datetime

url_finders = [ \
    re.compile("([0-9]{1,3}\\.[0-9]{1,3}\\.[0-9]{1,3}\\.[0-9]{1,3}|(((news|telnet|nttp|file|http|ftp|https)://)|(www|ftp)[-A-Za-z0-9]*\\.)[-A-Za-z0-9\\.]+)(:[0-9]*)?/[-A-Za-z0-9_\\$\\.\\+\\!\\*\\(\\),;:@&=\\?/~\\#\\%]*[^]'\\.}>\\),\\\"]"), \
    re.compile("([0-9]{1,3}\\.[0-9]{1,3}\\.[0-9]{1,3}\\.[0-9]{1,3}|(((news|telnet|nttp|file|http|ftp|https)://)|(www|ftp)[-A-Za-z0-9]*\\.)[-A-Za-z0-9\\.]+)(:[0-9]*)?"), \
    re.compile("(~/|/|\\./)([-A-Za-z0-9_\\$\\.\\+\\!\\*\\(\\),;:@&=\\?/~\\#\\%]|\\\\)+"), \
    re.compile("'\\<((mailto:)|)[-A-Za-z0-9\\.]+@[-A-Za-z0-9\\.]+"), \
]

feeds_list = []
list_of_threads = []


class FeedGetter(object):
    """
    """
    def __init__(self):
        """
        Initializes the base and variables.
        """
        # Create the base if not exists.
        sqlite3.register_adapter(str, lambda s : s.decode('utf-8'))
        self.conn = sqlite3.connect("./var/feed.db", isolation_level = None)
        self.c = self.conn.cursor()
        self.c.execute('''create table if not exists rss_feed
                    (article_date text, article_title text, \
                    article_link text PRIMARY KEY, article_description text, \
                    feed_title text, feed_site_link text, \
                    article_readed text)''')
        self.conn.commit()
        self.c.close()

        # mutex to protect the SQLite base
        self.locker = threading.Lock()

    def retrieve_feed(self):
        """
        Parse the file 'feeds.lst' and launch a thread for each RSS feed.
        """
        with open("./var/feed.lst") as f:
            for a_feed in f:
                # test if the URL is well formed
                for url_regexp in url_finders:
                    if url_regexp.match(a_feed):
                        the_good_url = url_regexp.match(a_feed).group(0).replace("\n", "")
                        try:
                            # launch a new thread for the RSS feed
                            thread = threading.Thread(None, self.process, \
                                                None, (the_good_url,))
                            thread.start()
                            list_of_threads.append(thread)
                        except:
                            pass
                        break

        # wait for all threads are done
        for th in list_of_threads:
            th.join()

    def process(self, the_good_url):
        """Request the URL

        Executed in a thread.
        SQLite objects created in a thread can only be used in that same thread !
        """
        # Protect this part of code.
        self.locker.acquire()

        self.conn = sqlite3.connect("./var/feed.db", isolation_level = None)
        self.c = self.conn.cursor()

        # Add the articles in the base.
        self.add_into_sqlite(feedparser.parse(the_good_url))

        self.conn.commit()
        self.c.close()

        # Release this part of code.
        self.locker.release()

    def add_into_sqlite(self, a_feed):
        """
        Add the articles of the feed 'a_feed' in the SQLite base.
        """
        for article in a_feed['entries']:
            try:
                description = article.description.encode('utf-8')
            except Exception, e:
                description = ""

            sha256_hash = hashlib.sha256()
            sha256_hash.update(article.link.encode('utf-8'))
            article_id = sha256_hash.hexdigest()

            try:
                self.c.execute('insert into rss_feed values (?,?,?,?,?,?,?)', (\
                        datetime(*article.updated_parsed[:6]), \
                        article.title.encode('utf-8'), \
                        article.link.encode('utf-8'), \
                        description, \
                        a_feed.feed.title.encode('utf-8'), \
                        a_feed.feed.link.encode('utf-8'), \
                        "0"))
            except sqlite3.IntegrityError:
                pass


if __name__ == "__main__":
    # Point of entry in execution mode
    feed_getter = FeedGetter()
    feed_getter.retrieve_feed()
bgstack15