diff options
-rw-r--r-- | NEWS.rst | 4 | ||||
-rw-r--r-- | documentation/conf.py | 4 | ||||
-rw-r--r-- | documentation/index.rst | 3 | ||||
-rw-r--r-- | documentation/requirements.rst | 16 |
4 files changed, 24 insertions, 3 deletions
@@ -1,5 +1,9 @@ pyAggr3g470r project news +6.2: 2015-02-26 + The system of email notifications for new articles has been removed. + This feature was hardly used. + 6.1: 2015-02-23 Improvements: articles are now identified with the id provided by the RSS/ATOM feed. diff --git a/documentation/conf.py b/documentation/conf.py index b148de9d..00190f45 100644 --- a/documentation/conf.py +++ b/documentation/conf.py @@ -48,9 +48,9 @@ copyright = u'2015, Cédric Bonhomme' # built documents. # # The short X.Y version. -version = '6.0' +version = '6.2' # The full version, including alpha/beta/rc tags. -release = '6.0' +release = '6.2' # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. diff --git a/documentation/index.rst b/documentation/index.rst index d01dc583..947612a1 100644 --- a/documentation/index.rst +++ b/documentation/index.rst @@ -7,12 +7,13 @@ Welcome to pyAggr3g470r's documentation! ======================================== -Deployment and configuration +Configuration and deployment ============================ .. toctree:: :maxdepth: 2 + requirements deployment Web services diff --git a/documentation/requirements.rst b/documentation/requirements.rst new file mode 100644 index 00000000..a3630144 --- /dev/null +++ b/documentation/requirements.rst @@ -0,0 +1,16 @@ +Requirements +============ + +The complete list of required Python modules is in the file +``requirements.txt``. + +The core technologies used are: + +* `Flask <http://flask.pocoo.org>`_ for the web backend; +* `asyncio <https://www.python.org/dev/peps/pep-3156/>`_ for the crawler; +* `SQLAlchemy <http://www.sqlalchemy.org>`_ for the data base. + +Python 3.4 is highly recommended, especially for the feed crawler. +The web server is working with Python 2.7 and Python 3. + +It is possible to connect your own crawler to the RESTful API. |