aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorFrançois Schmidts <francois.schmidts@gmail.com>2015-03-03 23:05:21 +0100
committerFrançois Schmidts <francois.schmidts@gmail.com>2015-03-03 23:05:21 +0100
commit8e515cbf172f1aa7da37882fc3973f5a2dd70dd0 (patch)
treeae0a275bf82c62eca621a020a0e6ac1f6928e6d3
parentfixing documentation for the new (diff)
downloadnewspipe-8e515cbf172f1aa7da37882fc3973f5a2dd70dd0.tar.gz
newspipe-8e515cbf172f1aa7da37882fc3973f5a2dd70dd0.tar.bz2
newspipe-8e515cbf172f1aa7da37882fc3973f5a2dd70dd0.zip
last fixes
-rw-r--r--documentation/web-services.rst14
-rwxr-xr-xmanager.py2
-rw-r--r--pyaggr3g470r/lib/crawler.py5
3 files changed, 15 insertions, 6 deletions
diff --git a/documentation/web-services.rst b/documentation/web-services.rst
index d7a1b6aa..16a425ad 100644
--- a/documentation/web-services.rst
+++ b/documentation/web-services.rst
@@ -7,15 +7,23 @@ Articles
.. code-block:: python
>>> import requests, json
- >>> r = requests.get("https://pyaggr3g470r.herokuapp.com/api/v2.0/articles",
+ >>> r = requests.get("https://pyaggr3g470r.herokuapp.com/api/v2.0/article/1s",
+ ... headers={'Content-type': 'application/json'},
... auth=("your-nickname", "your-password"))
>>> r.status_code
200 # OK
>>> rjson = r.json()
- >>> rjson[0]["title"]
+ >>> rjson["title"]
'Sponsors required for KDE code sprint in Randa'
- >>> rjson[0]["date"]
+ >>> rjson["date"]
'Wed, 18 Jun 2014 14:25:18 GMT'
+ >>> r = requests.get("https://pyaggr3g470r.herokuapp.com/api/v2.0/article/1s",
+ ... headers={'Content-type': 'application/json'},
+ ... auth=("your-nickname", "your-password"),
+ ... data=json.dumps({'id__in': [1, 2]}))
+ >>> r.json()
+ [{'id': 1, 'title': 'article1', [...]},
+ {'id': 2, 'title': 'article2', [...]}]
Add an article:
diff --git a/manager.py b/manager.py
index 904fccb0..89fd2bf1 100755
--- a/manager.py
+++ b/manager.py
@@ -84,7 +84,7 @@ def db_create():
@manager.command
-def fetch(user, password, limit=300):
+def fetch(user, password, limit=10):
from pyaggr3g470r.lib.crawler import CrawlerScheduler
scheduler = CrawlerScheduler(user, password)
scheduler.run(limit=limit)
diff --git a/pyaggr3g470r/lib/crawler.py b/pyaggr3g470r/lib/crawler.py
index c00b0dbf..64ef8b6d 100644
--- a/pyaggr3g470r/lib/crawler.py
+++ b/pyaggr3g470r/lib/crawler.py
@@ -148,9 +148,10 @@ class PyAggUpdater(AbstractCrawler):
logger.debug('%r %r - %d entries were not matched and will be created',
self.feed['id'], self.feed['title'], len(results))
for id_to_create in results:
- entry = self.entries[tuple(sorted(id_to_create.items()))]
+ entry = self.to_article(
+ self.entries[tuple(sorted(id_to_create.items()))])
logger.info('creating %r - %r', entry['title'], id_to_create)
- self.query_pyagg('post', 'article', self.to_article(entry))
+ self.query_pyagg('post', 'article', entry)
now = datetime.now()
logger.debug('%r %r - updating feed etag %r last_mod %r',
bgstack15