aboutsummaryrefslogtreecommitdiff
path: root/src
diff options
context:
space:
mode:
authorCédric Bonhomme <cedric@cedricbonhomme.org>2016-11-03 08:09:08 +0100
committerCédric Bonhomme <cedric@cedricbonhomme.org>2016-11-03 08:09:08 +0100
commit9c36e268e3a7927aed7e5e3cb57000a6e39bbe73 (patch)
treec86e2b19553ac4dda28662c4c0dbb47d5a992f02 /src
parentuseless return (diff)
downloadnewspipe-9c36e268e3a7927aed7e5e3cb57000a6e39bbe73.tar.gz
newspipe-9c36e268e3a7927aed7e5e3cb57000a6e39bbe73.tar.bz2
newspipe-9c36e268e3a7927aed7e5e3cb57000a6e39bbe73.zip
Starting to improve the loging for Heroku (with logging.StreamHandler).
Diffstat (limited to 'src')
-rw-r--r--src/bootstrap.py18
-rw-r--r--src/conf.py20
-rw-r--r--src/crawler/classic_crawler.py14
-rwxr-xr-xsrc/manager.py4
4 files changed, 31 insertions, 25 deletions
diff --git a/src/bootstrap.py b/src/bootstrap.py
index 92399e74..2b94b1b9 100644
--- a/src/bootstrap.py
+++ b/src/bootstrap.py
@@ -10,18 +10,22 @@ import flask_restless
from urllib.parse import urlsplit
-def set_logging(log_path, log_level=logging.INFO,
+def set_logging(log_path=None, log_level=logging.INFO, modules=(),
log_format='%(asctime)s %(levelname)s %(message)s'):
- formater = logging.Formatter(log_format)
- if conf.ON_HEROKU:
- handler = logging.StreamHandler()
- else:
+ if not modules:
+ modules = ('root', 'bootstrap', 'runserver',
+ 'web', 'crawler.classic_crawler', 'manager', 'plugins')
+ if log_path:
handler = logging.FileHandler(log_path)
+ else:
+ handler = logging.StreamHandler()
+ formater = logging.Formatter(log_format)
handler.setFormatter(formater)
- for logger_name in ('bootstrap', 'web', 'manager', 'runserver',
- 'classic_crawler'):
+ for logger_name in modules:
logger = logging.getLogger(logger_name)
logger.addHandler(handler)
+ for handler in logger.handlers:
+ handler.setLevel(log_level)
logger.setLevel(log_level)
from flask import Flask
diff --git a/src/conf.py b/src/conf.py
index 9bb05094..9718f07c 100644
--- a/src/conf.py
+++ b/src/conf.py
@@ -75,13 +75,21 @@ ADMIN_EMAIL = config.get('misc', 'admin_email')
SELF_REGISTRATION = config.getboolean('misc', 'self_registration')
SECURITY_PASSWORD_SALT = config.get('misc', 'security_password_salt')
TOKEN_VALIDITY_PERIOD = config.getint('misc', 'token_validity_period')
-LOG_PATH = os.path.abspath(config.get('misc', 'log_path'))
NB_WORKER = config.getint('misc', 'nb_worker')
-API_LOGIN = config.get('crawler', 'api_login')
-API_PASSWD = config.get('crawler', 'api_passwd')
+if not ON_HEROKU:
+ LOG_PATH = os.path.abspath(config.get('misc', 'log_path'))
+else:
+ LOG_PATH = ''
+LOG_LEVEL = {'debug': logging.DEBUG,
+ 'info': logging.INFO,
+ 'warn': logging.WARN,
+ 'error': logging.ERROR,
+ 'fatal': logging.FATAL}[config.get('misc', 'log_level')]
SQLALCHEMY_DATABASE_URI = config.get('database', 'database_url')
+API_LOGIN = config.get('crawler', 'api_login')
+API_PASSWD = config.get('crawler', 'api_passwd')
USER_AGENT = config.get('crawler', 'user_agent')
DEFAULT_MAX_ERROR = config.getint('crawler',
'default_max_error')
@@ -89,11 +97,7 @@ ERROR_THRESHOLD = int(DEFAULT_MAX_ERROR / 2)
CRAWLING_METHOD = config.get('crawler', 'crawling_method')
-LOG_LEVEL = {'debug': logging.DEBUG,
- 'info': logging.INFO,
- 'warn': logging.WARN,
- 'error': logging.ERROR,
- 'fatal': logging.FATAL}[config.get('misc', 'log_level')]
+
WEBSERVER_HOST = config.get('webserver', 'host')
WEBSERVER_PORT = config.getint('webserver', 'port')
diff --git a/src/crawler/classic_crawler.py b/src/crawler/classic_crawler.py
index 60538581..610f3544 100644
--- a/src/crawler/classic_crawler.py
+++ b/src/crawler/classic_crawler.py
@@ -86,7 +86,7 @@ async def parse_feed(user, feed):
try:
FeedController().update({'id': feed.id}, up_feed)
except Exception as e:
- print('something bad here: ' + str(e))
+ logger.exception('something bad here: ' + str(e))
return
if not is_parsing_ok(parsed_feed):
@@ -116,9 +116,9 @@ async def insert_database(user, feed):
if None is articles:
return []
- logger.info('inserting articles for {}'.format(feed.title))
+ logger.info('Inserting articles for {}'.format(feed.title))
- logger.info("Database insertion...")
+ logger.info('Database insertion for {}'.format(feed.title))
new_articles = []
art_contr = ArticleController(user.id)
for article in articles:
@@ -141,9 +141,7 @@ async def insert_database(user, feed):
new_updated_date = dateutil.parser.parse(article['updated'])
except Exception as e:
new_updated_date = existing_article.date
- print(new_updated_date)
- logger.exception("new_updated_date failed: ")
- print("new_updated_date failed: ")
+ logger.exception('new_updated_date failed: {}'.format(e))
if None is existing_article.updated_date:
existing_article.updated_date = new_updated_date.replace(tzinfo=None)
@@ -180,14 +178,14 @@ async def init_process(user, feed):
logger.debug('inserted articles for %s', feed.title)
return articles
except Exception as e:
- print('init_process: ' + str(e))
+ logger.exception('init_process: ' + str(e))
def retrieve_feed(loop, user, feed_id=None):
"""
Launch the processus.
"""
- logger.info("Starting to retrieve feeds.")
+ logger.info('Starting to retrieve feeds for {}'.format(user.nickname))
# Get the list of feeds to fetch
user = User.query.filter(User.email == user.email).first()
diff --git a/src/manager.py b/src/manager.py
index 155e7c3d..47a88339 100755
--- a/src/manager.py
+++ b/src/manager.py
@@ -4,14 +4,14 @@
import os
import logging
from werkzeug import generate_password_hash
-from bootstrap import application, db, conf
+from bootstrap import application, db, conf, set_logging
from flask_script import Manager
from flask_migrate import Migrate, MigrateCommand
import web.models
from web.controllers import UserController
-logger = logging.getLogger(__name__)
+logger = logging.getLogger('manager')
Migrate(application, db)
bgstack15