mirror of
https://github.com/samuelclay/NewsBlur.git
synced 2025-09-18 21:50:56 +00:00
Handling soft time limits while fetching archives.
This commit is contained in:
parent
d47053d049
commit
81f51f1723
4 changed files with 15 additions and 5 deletions
|
@ -613,7 +613,8 @@ class UserSubscription(models.Model):
|
|||
search_chunks = [FetchArchiveFeedsChunk.s(feed_ids=feed_id_chunk,
|
||||
user_id=user_id
|
||||
).set(queue='search_indexer')
|
||||
.set(time_limit=settings.MAX_SECONDS_ARCHIVE_FETCH_SINGLE_FEED)
|
||||
.set(time_limit=settings.MAX_SECONDS_ARCHIVE_FETCH_SINGLE_FEED,
|
||||
soft_time_limit=settings.MAX_SECONDS_ARCHIVE_FETCH_SINGLE_FEED-30)
|
||||
for feed_id_chunk in feed_id_chunks]
|
||||
callback = FinishFetchArchiveFeeds.s(user_id=user_id,
|
||||
start_time=start_time,
|
||||
|
|
|
@ -98,7 +98,7 @@ DATA_UPLOAD_MAX_MEMORY_SIZE = 5242880 # 5 MB
|
|||
FILE_UPLOAD_MAX_MEMORY_SIZE = 5242880 # 5 MB
|
||||
PROMETHEUS_EXPORT_MIGRATIONS = False
|
||||
MAX_SECONDS_COMPLETE_ARCHIVE_FETCH = 60 * 60 * 1 # 1 hour
|
||||
MAX_SECONDS_ARCHIVE_FETCH_SINGLE_FEED = 60 * 10 # 10 minutes
|
||||
MAX_SECONDS_ARCHIVE_FETCH_SINGLE_FEED = 60 * 1 # 1 minutes
|
||||
|
||||
# Uncomment below to force all feeds to store this many stories. Default is to cut
|
||||
# off at 25 stories for single subscriber non-premium feeds and 500 for popular feeds.
|
||||
|
|
|
@ -5,9 +5,11 @@
|
|||
{% block body %}
|
||||
<h2 style="color: #282F33; font-size: 18px; font-weight: bold;">Now everything you read on the web is archived, searchable, shareable, and not going anywhere</h2>
|
||||
|
||||
<img src="https://{% current_domain %}/media/img/email/datacenter.svg" style="width:180px;height:180px;margin: 24px auto;" alt="Your archive sits in the cloud">
|
||||
<center>
|
||||
<img src="https://{% current_domain %}/media/img/email/datacenter.svg" style="width:180px;height:180px;margin: 24px auto 6px;" alt="Your archive sits in the cloud">
|
||||
|
||||
<h2 syle="font-size: 34px;text-align: center;display: block; margin: 0 auto 24px;">{{ total_story_count|commify }} stories across {{ feed_count|commify }} sites</h2>
|
||||
<h2 syle="font-size: 34px;text-align: center;display: block; margin: 0 auto 24px;">{{ total_story_count|commify }} stories across {{ feed_count|commify }} sites</h2>
|
||||
</center>
|
||||
|
||||
<p style="line-height: 20px;">A <b>huge</b> thank you for subscribing to the premium archive tier. Your subscription helps pay for the machines which store and process your archive.</p>
|
||||
<p style="line-height: 20px;">NewsBlur is an archive for the web you follow. Search it, clip highlights and share notes, and rest assured that your data will remain even as the web it pulls from may not. Thanks again for supporting NewsBlur.</p>
|
||||
|
|
|
@ -874,7 +874,14 @@ class FeedFetcherWorker:
|
|||
if self.options.get('archive_page', None):
|
||||
for feed_id in feed_queue:
|
||||
feed = self.refresh_feed(feed_id)
|
||||
self.fetch_and_process_archive_pages(feed_id)
|
||||
try:
|
||||
self.fetch_and_process_archive_pages(feed_id)
|
||||
except SoftTimeLimitExceeded:
|
||||
logging.debug(
|
||||
' ---> [%-30s] ~FRTime limit reached while fetching ~FGarchive pages~FR. Made it to ~SB%s'
|
||||
% (feed.log_title[:30], self.options['archive_page'])
|
||||
)
|
||||
pass
|
||||
if len(feed_queue) == 1:
|
||||
feed = self.refresh_feed(feed_queue[0])
|
||||
return feed
|
||||
|
|
Loading…
Add table
Reference in a new issue