mirror of
https://github.com/samuelclay/NewsBlur.git
synced 2025-08-31 21:41:33 +00:00
Attempting to impose longer time limits for archive fetch.
This commit is contained in:
parent
9759bda53a
commit
c20eed028c
2 changed files with 6 additions and 2 deletions
|
@ -580,7 +580,8 @@ class UserSubscription(models.Model):
|
|||
def schedule_fetch_archive_feeds_for_user(cls, user_id):
|
||||
from apps.profile.tasks import FetchArchiveFeedsForUser
|
||||
FetchArchiveFeedsForUser.apply_async(kwargs=dict(user_id=user_id),
|
||||
queue='search_indexer')
|
||||
queue='search_indexer',
|
||||
time_limit=settings.MAX_SECONDS_COMPLETE_ARCHIVE_FETCH)
|
||||
|
||||
# Should be run as a background task
|
||||
@classmethod
|
||||
|
@ -611,7 +612,8 @@ class UserSubscription(models.Model):
|
|||
|
||||
search_chunks = [FetchArchiveFeedsChunk.s(feed_ids=feed_id_chunk,
|
||||
user_id=user_id
|
||||
).set(queue='search_indexer').set(time_limit=1500)
|
||||
).set(queue='search_indexer')
|
||||
.set(time_limit=settings.MAX_SECONDS_ARCHIVE_FETCH_SINGLE_FEED)
|
||||
for feed_id_chunk in feed_id_chunks]
|
||||
callback = FinishFetchArchiveFeeds.s(user_id=user_id,
|
||||
start_time=start_time,
|
||||
|
|
|
@ -97,6 +97,8 @@ PAYPAL_TEST = False
|
|||
DATA_UPLOAD_MAX_MEMORY_SIZE = 5242880 # 5 MB
|
||||
FILE_UPLOAD_MAX_MEMORY_SIZE = 5242880 # 5 MB
|
||||
PROMETHEUS_EXPORT_MIGRATIONS = False
|
||||
MAX_SECONDS_COMPLETE_ARCHIVE_FETCH = 60 * 60 * 1 # 1 hour
|
||||
MAX_SECONDS_ARCHIVE_FETCH_SINGLE_FEED = 60 * 10 # 10 minutes
|
||||
|
||||
# Uncomment below to force all feeds to store this many stories. Default is to cut
|
||||
# off at 25 stories for single subscriber non-premium feeds and 500 for popular feeds.
|
||||
|
|
Loading…
Add table
Reference in a new issue