diff --git a/apps/reader/models.py b/apps/reader/models.py index 7a06bce4d..6c89e888e 100644 --- a/apps/reader/models.py +++ b/apps/reader/models.py @@ -465,6 +465,11 @@ class UserSubscription(models.Model): else: message = "This address does not point to an RSS feed or a website with an RSS feed." else: + allow_skip_resync = False + if user.profile.is_archive and feed.active_premium_subscribers != 0: + # Skip resync if there are already active archive subscribers + allow_skip_resync = True + us, subscription_created = cls.objects.get_or_create( feed=feed, user=user, @@ -499,7 +504,7 @@ class UserSubscription(models.Model): MActivity.new_feed_subscription(user_id=user.pk, feed_id=feed.pk, feed_title=feed.title) - feed.setup_feed_for_premium_subscribers() + feed.setup_feed_for_premium_subscribers(allow_skip_resync=allow_skip_resync) feed.count_subscribers() r = redis.Redis(connection_pool=settings.REDIS_PUBSUB_POOL) diff --git a/apps/rss_feeds/models.py b/apps/rss_feeds/models.py index 7b76cba6a..61a139e19 100755 --- a/apps/rss_feeds/models.py +++ b/apps/rss_feeds/models.py @@ -379,8 +379,8 @@ class Feed(models.Model): self.search_indexed = True self.save() - def sync_redis(self): - return MStory.sync_feed_redis(self.pk) + def sync_redis(self, allow_skip_resync=False): + return MStory.sync_feed_redis(self.pk, allow_skip_resync=allow_skip_resync) def expire_redis(self, r=None): if not r: @@ -724,11 +724,11 @@ class Feed(models.Model): for feed in feeds: feed.setup_feed_for_premium_subscribers() - def setup_feed_for_premium_subscribers(self): + def setup_feed_for_premium_subscribers(self, allow_skip_resync=False): self.count_subscribers() self.count_similar_feeds() self.set_next_scheduled_update(verbose=settings.DEBUG) - self.sync_redis() + self.sync_redis(allow_skip_resync=allow_skip_resync) def check_feed_link_for_feed_address(self): @timelimit(10) @@ -3328,11 +3328,17 @@ class MStory(mongo.Document): r.zrem("zF:%s" % self.story_feed_id, self.story_hash) @classmethod - def sync_feed_redis(cls, story_feed_id): + def sync_feed_redis(cls, story_feed_id, allow_skip_resync=False): r = redis.Redis(connection_pool=settings.REDIS_STORY_HASH_POOL) feed = Feed.get_by_id(story_feed_id) stories = cls.objects.filter(story_feed_id=story_feed_id, story_date__gte=feed.unread_cutoff) + if allow_skip_resync and stories.count() > 1000: + logging.debug( + f" ---> [{feed.log_title[:30]}] ~FYSkipping resync of ~SB{stories.count()}~SN stories because it already had archive subscribers" + ) + return + # Don't delete redis keys because they take time to rebuild and subs can # be counted incorrectly during that time. # r.delete('F:%s' % story_feed_id) diff --git a/media/img/originals/iOS 15.sketch b/media/img/originals/iOS 15.sketch new file mode 100644 index 000000000..e807f7aef Binary files /dev/null and b/media/img/originals/iOS 15.sketch differ diff --git a/utils/feed_fetcher.py b/utils/feed_fetcher.py index 5b2c74701..0a7527f8a 100644 --- a/utils/feed_fetcher.py +++ b/utils/feed_fetcher.py @@ -113,8 +113,9 @@ class FetchFeed: address = self.options["archive_page_link"] elif self.options.get("archive_page", None): address = qurl(address, add={self.options["archive_page_key"]: self.options["archive_page"]}) - elif address.startswith("http") and not any(item in address for item in NO_UNDERSCORE_ADDRESSES): - address = qurl(address, add={"_": random.randint(0, 10000)}) + # Don't use the underscore cache buster: https://forum.newsblur.com/t/jwz-feed-broken-hes-mad-about-url-parameters/10742/15 + # elif address.startswith("http") and not any(item in address for item in NO_UNDERSCORE_ADDRESSES): + # address = qurl(address, add={"_": random.randint(0, 10000)}) logging.debug(" ---> [%-30s] ~FBForcing fetch: %s" % (self.feed.log_title[:30], address)) elif not self.feed.fetched_once or not self.feed.known_good: modified = None