aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--README.md14
-rw-r--r--instance/production.py2
-rw-r--r--instance/sqlite.py2
3 files changed, 10 insertions, 8 deletions
diff --git a/README.md b/README.md
index 069b8487..daf6d96b 100644
--- a/README.md
+++ b/README.md
@@ -37,7 +37,7 @@ Newspipe is really easy to deploy.
### Requirements
```bash
-$ sudo apt-get install npm postgresql
+$ sudo apt-get install npm
```
## Configure and install the application
@@ -47,19 +47,21 @@ $ git clone https://git.sr.ht/~cedric/newspipe
$ cd newspipe/
$ npm install
$ poetry install
-$ cp instance/production.py instance/development.py
+$ export Newspipe_CONFIG=sqlite.py
$ poetry shell
$ python manager.py db_create
$ python runserver.py
* Running on http://0.0.0.0:5000/ (Press CTRL+C to quit)
```
-If you want to use SQLite you do not need to install PostgreSQL. Simply use
-the provided configuration file (in ``instance/sqlite.py``) thank to this
-environment variable:
+If you want to use PostgreSQL you can customize
+the provided example configuration file (in ``instance/production.py``):
```bash
-export Newspipe_CONFIG=sqlite.py
+$ sudo apt-get install postgresql
+$ cp instance/production.py instance/postgresql.py
+$ vim instance/postgresql.py # customize it
+$ export Newspipe_CONFIG=postgresql.py
```
## License
diff --git a/instance/production.py b/instance/production.py
index 37f8ee4a..798c5a04 100644
--- a/instance/production.py
+++ b/instance/production.py
@@ -29,7 +29,7 @@ SQLALCHEMY_DATABASE_URI = "postgres://{user}:{password}@{host}:{port}/{name}".fo
# Crawler
CRAWLING_METHOD = "default"
-DEFAULT_MAX_ERROR = 3
+DEFAULT_MAX_ERROR = 6
HTTP_PROXY = ""
CRAWLER_USER_AGENT = "Newspipe (https://git.sr.ht/~cedric/newspipe)"
CRAWLER_TIMEOUT = 30
diff --git a/instance/sqlite.py b/instance/sqlite.py
index b307eaca..fd6438cb 100644
--- a/instance/sqlite.py
+++ b/instance/sqlite.py
@@ -20,7 +20,7 @@ SQLALCHEMY_DATABASE_URI = "sqlite:///newspipe.db"
# Crawler
CRAWLING_METHOD = "default"
-DEFAULT_MAX_ERROR = 3
+DEFAULT_MAX_ERROR = 6
HTTP_PROXY = ""
CRAWLER_USER_AGENT = "Newspipe (https://git.sr.ht/~cedric/newspipe)"
CRAWLER_TIMEOUT = 30
bgstack15