aboutsummaryrefslogtreecommitdiff
path: root/newspipe/crawler
diff options
context:
space:
mode:
authorCédric Bonhomme <cedric@cedricbonhomme.org>2021-07-03 21:49:20 +0200
committerCédric Bonhomme <cedric@cedricbonhomme.org>2021-07-03 21:49:20 +0200
commit119a8f3e7c2dc0840312ee9e051b899e7b2c031d (patch)
treeaa3167f93e823efa34894766a82d3d025cda8048 /newspipe/crawler
parentdelete read articles retrieved since more than 15 days. (diff)
downloadnewspipe-119a8f3e7c2dc0840312ee9e051b899e7b2c031d.tar.gz
newspipe-119a8f3e7c2dc0840312ee9e051b899e7b2c031d.tar.bz2
newspipe-119a8f3e7c2dc0840312ee9e051b899e7b2c031d.zip
deleted read article (and not liked) that are retrieved since more than 15 days.
Diffstat (limited to 'newspipe/crawler')
-rw-r--r--newspipe/crawler/default_crawler.py14
1 files changed, 9 insertions, 5 deletions
diff --git a/newspipe/crawler/default_crawler.py b/newspipe/crawler/default_crawler.py
index 10ac1cc3..53b960ba 100644
--- a/newspipe/crawler/default_crawler.py
+++ b/newspipe/crawler/default_crawler.py
@@ -112,8 +112,7 @@ async def parse_feed(user, feed):
async def insert_articles(queue, nḅ_producers=1):
- """Consumer coroutines.
- """
+ """Consumer coroutines."""
nb_producers_done = 0
while True:
item = await queue.get()
@@ -172,14 +171,19 @@ async def retrieve_feed(queue, users, feed_id=None):
filters["last_retrieved__lt"] = datetime.now() - timedelta(
minutes=application.config["FEED_REFRESH_INTERVAL"]
)
- #feeds = FeedController().read(**filters).all()
- feeds = [] # temporary fix for: sqlalchemy.exc.OperationalError: (psycopg2.OperationalError) SSL SYSCALL error: EOF detected
+ # feeds = FeedController().read(**filters).all()
+ feeds = (
+ []
+ ) # temporary fix for: sqlalchemy.exc.OperationalError: (psycopg2.OperationalError) SSL SYSCALL error: EOF detected
for feed in user.feeds:
if not feed.enabled:
continue
if feed.error_count > application.config["DEFAULT_MAX_ERROR"]:
continue
- if feed.last_retrieved > (datetime.now() - timedelta(minutes=application.config["FEED_REFRESH_INTERVAL"])):
+ if feed.last_retrieved > (
+ datetime.now()
+ - timedelta(minutes=application.config["FEED_REFRESH_INTERVAL"])
+ ):
continue
if None is feed_id or (feed_id and feed_id == feed.id):
feeds.append(feed)
bgstack15