1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
|
#! /usr/local/bin/python
#-*- coding: utf-8 -*-
from __future__ import with_statement
__author__ = "Cedric Bonhomme"
__version__ = "$Revision: 0.8 $"
__date__ = "$Date: 2010/03/01 $"
__copyright__ = "Copyright (c) 2010 Cedric Bonhomme"
__license__ = "GPLv3"
import re
import sqlite3
import threading
import feedparser
from datetime import datetime
import utils
url_finders = [ \
re.compile("([0-9]{1,3}\\.[0-9]{1,3}\\.[0-9]{1,3}\\.[0-9]{1,3}|(((news|telnet|nttp|file|http|ftp|https)://)|(www|ftp)[-A-Za-z0-9]*\\.)[-A-Za-z0-9\\.]+)(:[0-9]*)?/[-A-Za-z0-9_\\$\\.\\+\\!\\*\\(\\),;:@&=\\?/~\\#\\%]*[^]'\\.}>\\),\\\"]"), \
re.compile("([0-9]{1,3}\\.[0-9]{1,3}\\.[0-9]{1,3}\\.[0-9]{1,3}|(((news|telnet|nttp|file|http|ftp|https)://)|(www|ftp)[-A-Za-z0-9]*\\.)[-A-Za-z0-9\\.]+)(:[0-9]*)?"), \
re.compile("(~/|/|\\./)([-A-Za-z0-9_\\$\\.\\+\\!\\*\\(\\),;:@&=\\?/~\\#\\%]|\\\\)+"), \
re.compile("'\\<((mailto:)|)[-A-Za-z0-9\\.]+@[-A-Za-z0-9\\.]+"), \
]
feeds_list = []
list_of_threads = []
class FeedGetter(object):
"""
"""
def __init__(self):
"""
Initializes the base and variables.
"""
# Create the base if not exists.
utils.create_base()
# mutex to protect the SQLite base
self.locker = threading.Lock()
def retrieve_feed(self):
"""
Parse the file 'feeds.lst' and launch a thread for each RSS feed.
"""
with open("./var/feed.lst") as f:
for a_feed in f:
# test if the URL is well formed
for url_regexp in url_finders:
if url_regexp.match(a_feed):
the_good_url = url_regexp.match(a_feed).group(0).replace("\n", "")
try:
# launch a new thread for the RSS feed
thread = threading.Thread(None, self.process, \
None, (the_good_url,))
thread.start()
list_of_threads.append(thread)
except:
pass
break
# wait for all threads are done
for th in list_of_threads:
th.join()
def process(self, the_good_url):
"""Request the URL
Executed in a thread.
SQLite objects created in a thread can only be used in that same thread !
"""
# Protect this part of code.
self.locker.acquire()
self.conn = sqlite3.connect("./var/feed.db", isolation_level = None)
self.c = self.conn.cursor()
# Add the articles in the base.
self.add_into_sqlite(the_good_url)
self.conn.commit()
self.c.close()
# Release this part of code.
self.locker.release()
def add_into_sqlite(self, feed_link):
"""
Add the articles of the feed 'a_feed' in the SQLite base.
"""
a_feed = feedparser.parse(feed_link)
if a_feed['entries'] == []:
return
try:
feed_image = a_feed.feed.image.href
except:
feed_image = "/css/img/feed-icon-28x28.png"
try:
self.c.execute('insert into feeds values (?,?,?,?,?)', (\
a_feed.feed.title.encode('utf-8'), \
a_feed.feed.link.encode('utf-8'), \
feed_link, \
feed_image,
"0"))
except sqlite3.IntegrityError:
# feed already in the base
pass
for article in a_feed['entries']:
try:
description = article.description.encode('utf-8')
except Exception, e:
description = ""
try:
self.c.execute('insert into articles values (?,?,?,?,?,?)', (\
datetime(*article.updated_parsed[:6]), \
article.title.encode('utf-8'), \
article.link.encode('utf-8'), \
description, \
"0", \
feed_link))
result = self.c.execute("SELECT mail from feeds WHERE feed_site_link='" + \
a_feed.feed.link.encode('utf-8') + "'").fetchall()
if result[0][0] == "1":
# send the article
utils.send_mail(utils.mail_from, utils.mail_to, \
a_feed.feed.title.encode('utf-8'), description)
threading.Thread(None, utils.send_mail, \
None, (utils.mail_from, utils.mail_to, \
a_feed.feed.title.encode('utf-8'), description) \
)
except sqlite3.IntegrityError:
# article already in the base
pass
if __name__ == "__main__":
# Point of entry in execution mode
feed_getter = FeedGetter()
feed_getter.retrieve_feed()
|