Merge branch 'master' into discover

* master:
  iOS 15 screenshots
  Don't use the underscore cache buster anymore.
  Correct archive sub count
  Don't resync stories on new sub if it already has archive subscribers.
This commit is contained in:
Samuel Clay 2024-07-06 07:25:03 -04:00
commit c31893f669
4 changed files with 20 additions and 8 deletions

View file

@ -465,6 +465,11 @@ class UserSubscription(models.Model):
else:
message = "This address does not point to an RSS feed or a website with an RSS feed."
else:
allow_skip_resync = False
if user.profile.is_archive and feed.active_premium_subscribers != 0:
# Skip resync if there are already active archive subscribers
allow_skip_resync = True
us, subscription_created = cls.objects.get_or_create(
feed=feed,
user=user,
@ -499,7 +504,7 @@ class UserSubscription(models.Model):
MActivity.new_feed_subscription(user_id=user.pk, feed_id=feed.pk, feed_title=feed.title)
feed.setup_feed_for_premium_subscribers()
feed.setup_feed_for_premium_subscribers(allow_skip_resync=allow_skip_resync)
feed.count_subscribers()
r = redis.Redis(connection_pool=settings.REDIS_PUBSUB_POOL)

View file

@ -379,8 +379,8 @@ class Feed(models.Model):
self.search_indexed = True
self.save()
def sync_redis(self):
return MStory.sync_feed_redis(self.pk)
def sync_redis(self, allow_skip_resync=False):
return MStory.sync_feed_redis(self.pk, allow_skip_resync=allow_skip_resync)
def expire_redis(self, r=None):
if not r:
@ -724,11 +724,11 @@ class Feed(models.Model):
for feed in feeds:
feed.setup_feed_for_premium_subscribers()
def setup_feed_for_premium_subscribers(self):
def setup_feed_for_premium_subscribers(self, allow_skip_resync=False):
self.count_subscribers()
self.count_similar_feeds()
self.set_next_scheduled_update(verbose=settings.DEBUG)
self.sync_redis()
self.sync_redis(allow_skip_resync=allow_skip_resync)
def check_feed_link_for_feed_address(self):
@timelimit(10)
@ -3328,11 +3328,17 @@ class MStory(mongo.Document):
r.zrem("zF:%s" % self.story_feed_id, self.story_hash)
@classmethod
def sync_feed_redis(cls, story_feed_id):
def sync_feed_redis(cls, story_feed_id, allow_skip_resync=False):
r = redis.Redis(connection_pool=settings.REDIS_STORY_HASH_POOL)
feed = Feed.get_by_id(story_feed_id)
stories = cls.objects.filter(story_feed_id=story_feed_id, story_date__gte=feed.unread_cutoff)
if allow_skip_resync and stories.count() > 1000:
logging.debug(
f" ---> [{feed.log_title[:30]}] ~FYSkipping resync of ~SB{stories.count()}~SN stories because it already had archive subscribers"
)
return
# Don't delete redis keys because they take time to rebuild and subs can
# be counted incorrectly during that time.
# r.delete('F:%s' % story_feed_id)

Binary file not shown.

View file

@ -113,8 +113,9 @@ class FetchFeed:
address = self.options["archive_page_link"]
elif self.options.get("archive_page", None):
address = qurl(address, add={self.options["archive_page_key"]: self.options["archive_page"]})
elif address.startswith("http") and not any(item in address for item in NO_UNDERSCORE_ADDRESSES):
address = qurl(address, add={"_": random.randint(0, 10000)})
# Don't use the underscore cache buster: https://forum.newsblur.com/t/jwz-feed-broken-hes-mad-about-url-parameters/10742/15
# elif address.startswith("http") and not any(item in address for item in NO_UNDERSCORE_ADDRESSES):
# address = qurl(address, add={"_": random.randint(0, 10000)})
logging.debug(" ---> [%-30s] ~FBForcing fetch: %s" % (self.feed.log_title[:30], address))
elif not self.feed.fetched_once or not self.feed.known_good:
modified = None