mirror of
https://github.com/samuelclay/NewsBlur.git
synced 2025-08-05 16:58:59 +00:00
Adding DAYS_OF_UNREAD_ARCHIVE and updating all the upread_cutoff's I could find. Doesn't seem to work yet when marking an older story as unread, but there's probably a line somewhere that's preventing the unread from working.
This commit is contained in:
parent
548ea15a9f
commit
1dd921fa61
5 changed files with 66 additions and 76 deletions
|
@ -69,7 +69,9 @@ class Profile(models.Model):
|
||||||
)
|
)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def unread_cutoff(self, force_premium=False):
|
def unread_cutoff(self, force_premium=False, force_archive=False):
|
||||||
|
if self.is_archive or force_archive:
|
||||||
|
return datetime.datetime.utcnow() - datetime.timedelta(days=settings.DAYS_OF_UNREAD_ARCHIVE)
|
||||||
if self.is_premium or force_premium:
|
if self.is_premium or force_premium:
|
||||||
return datetime.datetime.utcnow() - datetime.timedelta(days=settings.DAYS_OF_UNREAD)
|
return datetime.datetime.utcnow() - datetime.timedelta(days=settings.DAYS_OF_UNREAD)
|
||||||
|
|
||||||
|
@ -79,6 +81,12 @@ class Profile(models.Model):
|
||||||
def unread_cutoff_premium(self):
|
def unread_cutoff_premium(self):
|
||||||
return datetime.datetime.utcnow() - datetime.timedelta(days=settings.DAYS_OF_UNREAD)
|
return datetime.datetime.utcnow() - datetime.timedelta(days=settings.DAYS_OF_UNREAD)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def days_of_story_hashes(self):
|
||||||
|
if self.is_archive:
|
||||||
|
return settings.DAYS_OF_STORY_HASHES_ARCHIVE
|
||||||
|
return settings.DAYS_OF_STORY_HASHES
|
||||||
|
|
||||||
def canonical(self):
|
def canonical(self):
|
||||||
return {
|
return {
|
||||||
'is_premium': self.is_premium,
|
'is_premium': self.is_premium,
|
||||||
|
|
|
@ -126,7 +126,7 @@ class UserSubscription(models.Model):
|
||||||
|
|
||||||
current_time = int(time.time() + 60*60*24)
|
current_time = int(time.time() + 60*60*24)
|
||||||
if not cutoff_date:
|
if not cutoff_date:
|
||||||
cutoff_date = datetime.datetime.now() - datetime.timedelta(days=settings.DAYS_OF_STORY_HASHES)
|
cutoff_date = datetime.datetime.now() - datetime.timedelta(days=UserSubscription.days_of_story_hashes_for_user(user_id))
|
||||||
feed_counter = 0
|
feed_counter = 0
|
||||||
|
|
||||||
read_dates = dict()
|
read_dates = dict()
|
||||||
|
@ -216,7 +216,7 @@ class UserSubscription(models.Model):
|
||||||
|
|
||||||
current_time = int(time.time() + 60*60*24)
|
current_time = int(time.time() + 60*60*24)
|
||||||
if not cutoff_date:
|
if not cutoff_date:
|
||||||
cutoff_date = datetime.datetime.now() - datetime.timedelta(days=settings.DAYS_OF_UNREAD)
|
cutoff_date = datetime.datetime.now() - datetime.timedelta(days=self.user.profile.days_of_story_hashes)
|
||||||
if read_filter == "unread":
|
if read_filter == "unread":
|
||||||
cutoff_date = max(cutoff_date, self.mark_read_date)
|
cutoff_date = max(cutoff_date, self.mark_read_date)
|
||||||
elif default_cutoff_date:
|
elif default_cutoff_date:
|
||||||
|
@ -334,6 +334,11 @@ class UserSubscription(models.Model):
|
||||||
|
|
||||||
return story_hashes, unread_feed_story_hashes
|
return story_hashes, unread_feed_story_hashes
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def days_of_story_hashes_for_user(cls, user_id):
|
||||||
|
user = User.objects.get(pk=user_id)
|
||||||
|
return user.profile.days_of_story_hashes
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def truncate_river(cls, user_id, feed_ids, read_filter, cache_prefix=""):
|
def truncate_river(cls, user_id, feed_ids, read_filter, cache_prefix=""):
|
||||||
rt = redis.Redis(connection_pool=settings.REDIS_STORY_HASH_TEMP_POOL)
|
rt = redis.Redis(connection_pool=settings.REDIS_STORY_HASH_TEMP_POOL)
|
||||||
|
@ -1051,11 +1056,8 @@ class RUserStory:
|
||||||
ps = redis.Redis(connection_pool=settings.REDIS_PUBSUB_POOL)
|
ps = redis.Redis(connection_pool=settings.REDIS_PUBSUB_POOL)
|
||||||
if not username:
|
if not username:
|
||||||
username = User.objects.get(pk=user_id).username
|
username = User.objects.get(pk=user_id).username
|
||||||
# if not r2:
|
|
||||||
# r2 = redis.Redis(connection_pool=settings.REDIS_STORY_HASH_POOL2)
|
|
||||||
|
|
||||||
p = r.pipeline()
|
p = r.pipeline()
|
||||||
# p2 = r2.pipeline()
|
|
||||||
feed_ids = set()
|
feed_ids = set()
|
||||||
friend_ids = set()
|
friend_ids = set()
|
||||||
|
|
||||||
|
@ -1079,7 +1081,6 @@ class RUserStory:
|
||||||
cls.mark_read(user_id, feed_id, story_hash, social_user_ids=friends_with_shares, r=p, username=username, ps=ps)
|
cls.mark_read(user_id, feed_id, story_hash, social_user_ids=friends_with_shares, r=p, username=username, ps=ps)
|
||||||
|
|
||||||
p.execute()
|
p.execute()
|
||||||
# p2.execute()
|
|
||||||
|
|
||||||
return list(feed_ids), list(friend_ids)
|
return list(feed_ids), list(friend_ids)
|
||||||
|
|
||||||
|
@ -1091,8 +1092,6 @@ class RUserStory:
|
||||||
s = redis.Redis(connection_pool=settings.REDIS_POOL)
|
s = redis.Redis(connection_pool=settings.REDIS_POOL)
|
||||||
if not ps:
|
if not ps:
|
||||||
ps = redis.Redis(connection_pool=settings.REDIS_PUBSUB_POOL)
|
ps = redis.Redis(connection_pool=settings.REDIS_PUBSUB_POOL)
|
||||||
# if not r2:
|
|
||||||
# r2 = redis.Redis(connection_pool=settings.REDIS_STORY_HASH_POOL2)
|
|
||||||
|
|
||||||
friend_ids = set()
|
friend_ids = set()
|
||||||
feed_id, _ = MStory.split_story_hash(story_hash)
|
feed_id, _ = MStory.split_story_hash(story_hash)
|
||||||
|
@ -1118,6 +1117,8 @@ class RUserStory:
|
||||||
feed_read_key = "fR:%s:%s" % (feed_id, week_of_year)
|
feed_read_key = "fR:%s:%s" % (feed_id, week_of_year)
|
||||||
|
|
||||||
r.incr(feed_read_key)
|
r.incr(feed_read_key)
|
||||||
|
# This settings.DAYS_OF_STORY_HASHES doesn't need to consider potential pro subscribers
|
||||||
|
# because the feed_read_key is really only used for statistics and not unreads
|
||||||
r.expire(feed_read_key, 2*settings.DAYS_OF_STORY_HASHES*24*60*60)
|
r.expire(feed_read_key, 2*settings.DAYS_OF_STORY_HASHES*24*60*60)
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
|
@ -1125,8 +1126,6 @@ class RUserStory:
|
||||||
aggregated=False, r=None, username=None, ps=None):
|
aggregated=False, r=None, username=None, ps=None):
|
||||||
if not r:
|
if not r:
|
||||||
r = redis.Redis(connection_pool=settings.REDIS_STORY_HASH_POOL)
|
r = redis.Redis(connection_pool=settings.REDIS_STORY_HASH_POOL)
|
||||||
# if not r2:
|
|
||||||
# r2 = redis.Redis(connection_pool=settings.REDIS_STORY_HASH_POOL2)
|
|
||||||
|
|
||||||
story_hash = MStory.ensure_story_hash(story_hash, story_feed_id=story_feed_id)
|
story_hash = MStory.ensure_story_hash(story_hash, story_feed_id=story_feed_id)
|
||||||
|
|
||||||
|
@ -1134,9 +1133,7 @@ class RUserStory:
|
||||||
|
|
||||||
def redis_commands(key):
|
def redis_commands(key):
|
||||||
r.sadd(key, story_hash)
|
r.sadd(key, story_hash)
|
||||||
# r2.sadd(key, story_hash)
|
r.expire(key, Feed.days_of_story_hashes_for_feed(story_feed_id)*24*60*60)
|
||||||
r.expire(key, settings.DAYS_OF_STORY_HASHES*24*60*60)
|
|
||||||
# r2.expire(key, settings.DAYS_OF_STORY_HASHES*24*60*60)
|
|
||||||
|
|
||||||
all_read_stories_key = 'RS:%s' % (user_id)
|
all_read_stories_key = 'RS:%s' % (user_id)
|
||||||
redis_commands(all_read_stories_key)
|
redis_commands(all_read_stories_key)
|
||||||
|
@ -1156,18 +1153,21 @@ class RUserStory:
|
||||||
key = 'lRS:%s' % user_id
|
key = 'lRS:%s' % user_id
|
||||||
r.lpush(key, story_hash)
|
r.lpush(key, story_hash)
|
||||||
r.ltrim(key, 0, 1000)
|
r.ltrim(key, 0, 1000)
|
||||||
r.expire(key, settings.DAYS_OF_STORY_HASHES*24*60*60)
|
r.expire(key, Feed.days_of_story_hashes_for_feed(story_feed_id)*24*60*60)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def story_can_be_marked_read_by_user(story, user):
|
def story_can_be_marked_read_by_user(story, user):
|
||||||
message = None
|
message = None
|
||||||
if story.story_date < user.profile.unread_cutoff:
|
if story.story_date < user.profile.unread_cutoff:
|
||||||
if user.profile.is_premium:
|
if user.profile.is_archive:
|
||||||
|
message = "Story is more than %s days old, cannot mark as unread." % (
|
||||||
|
settings.DAYS_OF_UNREAD_ARCHIVE)
|
||||||
|
elif user.profile.is_premium:
|
||||||
message = "Story is more than %s days old, cannot mark as unread." % (
|
message = "Story is more than %s days old, cannot mark as unread." % (
|
||||||
settings.DAYS_OF_UNREAD)
|
settings.DAYS_OF_UNREAD)
|
||||||
elif story.story_date > user.profile.unread_cutoff_premium:
|
elif story.story_date > user.profile.unread_cutoff_premium:
|
||||||
message = "Story is more than %s days old. Premiums can mark unread up to 30 days." % (
|
message = "Story is more than %s days old. Premium accounts can mark unread up to %s days, and Premium Archive accounts can mark any story as unread." % (
|
||||||
settings.DAYS_OF_UNREAD_FREE)
|
settings.DAYS_OF_UNREAD_FREE, settings.DAYS_OF_UNREAD)
|
||||||
else:
|
else:
|
||||||
message = "Story is more than %s days old, cannot mark as unread." % (
|
message = "Story is more than %s days old, cannot mark as unread." % (
|
||||||
settings.DAYS_OF_UNREAD_FREE)
|
settings.DAYS_OF_UNREAD_FREE)
|
||||||
|
@ -1177,7 +1177,6 @@ class RUserStory:
|
||||||
def mark_unread(user_id, story_feed_id, story_hash, social_user_ids=None, r=None, username=None, ps=None):
|
def mark_unread(user_id, story_feed_id, story_hash, social_user_ids=None, r=None, username=None, ps=None):
|
||||||
if not r:
|
if not r:
|
||||||
r = redis.Redis(connection_pool=settings.REDIS_STORY_HASH_POOL)
|
r = redis.Redis(connection_pool=settings.REDIS_STORY_HASH_POOL)
|
||||||
# r2 = redis.Redis(connection_pool=settings.REDIS_STORY_HASH_POOL2)
|
|
||||||
|
|
||||||
story_hash = MStory.ensure_story_hash(story_hash, story_feed_id=story_feed_id)
|
story_hash = MStory.ensure_story_hash(story_hash, story_feed_id=story_feed_id)
|
||||||
|
|
||||||
|
@ -1185,9 +1184,7 @@ class RUserStory:
|
||||||
|
|
||||||
def redis_commands(key):
|
def redis_commands(key):
|
||||||
r.srem(key, story_hash)
|
r.srem(key, story_hash)
|
||||||
# r2.srem(key, story_hash)
|
r.expire(key, Feed.days_of_story_hashes_for_feed(story_feed_id)*24*60*60)
|
||||||
r.expire(key, settings.DAYS_OF_STORY_HASHES*24*60*60)
|
|
||||||
# r2.expire(key, settings.DAYS_OF_STORY_HASHES*24*60*60)
|
|
||||||
|
|
||||||
all_read_stories_key = 'RS:%s' % (user_id)
|
all_read_stories_key = 'RS:%s' % (user_id)
|
||||||
redis_commands(all_read_stories_key)
|
redis_commands(all_read_stories_key)
|
||||||
|
@ -1231,9 +1228,7 @@ class RUserStory:
|
||||||
@classmethod
|
@classmethod
|
||||||
def switch_feed(cls, user_id, old_feed_id, new_feed_id):
|
def switch_feed(cls, user_id, old_feed_id, new_feed_id):
|
||||||
r = redis.Redis(connection_pool=settings.REDIS_STORY_HASH_POOL)
|
r = redis.Redis(connection_pool=settings.REDIS_STORY_HASH_POOL)
|
||||||
# r2 = redis.Redis(connection_pool=settings.REDIS_STORY_HASH_POOL2)
|
|
||||||
p = r.pipeline()
|
p = r.pipeline()
|
||||||
# p2 = r2.pipeline()
|
|
||||||
story_hashes = cls.get_stories(user_id, old_feed_id, r=r)
|
story_hashes = cls.get_stories(user_id, old_feed_id, r=r)
|
||||||
|
|
||||||
for story_hash in story_hashes:
|
for story_hash in story_hashes:
|
||||||
|
@ -1241,18 +1236,13 @@ class RUserStory:
|
||||||
new_story_hash = "%s:%s" % (new_feed_id, hash_story)
|
new_story_hash = "%s:%s" % (new_feed_id, hash_story)
|
||||||
read_feed_key = "RS:%s:%s" % (user_id, new_feed_id)
|
read_feed_key = "RS:%s:%s" % (user_id, new_feed_id)
|
||||||
p.sadd(read_feed_key, new_story_hash)
|
p.sadd(read_feed_key, new_story_hash)
|
||||||
# p2.sadd(read_feed_key, new_story_hash)
|
p.expire(read_feed_key, Feed.days_of_story_hashes_for_feed(new_feed_id)*24*60*60)
|
||||||
p.expire(read_feed_key, settings.DAYS_OF_STORY_HASHES*24*60*60)
|
|
||||||
# p2.expire(read_feed_key, settings.DAYS_OF_STORY_HASHES*24*60*60)
|
|
||||||
|
|
||||||
read_user_key = "RS:%s" % (user_id)
|
read_user_key = "RS:%s" % (user_id)
|
||||||
p.sadd(read_user_key, new_story_hash)
|
p.sadd(read_user_key, new_story_hash)
|
||||||
# p2.sadd(read_user_key, new_story_hash)
|
p.expire(read_user_key, Feed.days_of_story_hashes_for_feed(new_feed_id)*24*60*60)
|
||||||
p.expire(read_user_key, settings.DAYS_OF_STORY_HASHES*24*60*60)
|
|
||||||
# p2.expire(read_user_key, settings.DAYS_OF_STORY_HASHES*24*60*60)
|
|
||||||
|
|
||||||
p.execute()
|
p.execute()
|
||||||
# p2.execute()
|
|
||||||
|
|
||||||
if len(story_hashes) > 0:
|
if len(story_hashes) > 0:
|
||||||
logging.info(" ---> %s read stories" % len(story_hashes))
|
logging.info(" ---> %s read stories" % len(story_hashes))
|
||||||
|
@ -1260,9 +1250,7 @@ class RUserStory:
|
||||||
@classmethod
|
@classmethod
|
||||||
def switch_hash(cls, feed, old_hash, new_hash):
|
def switch_hash(cls, feed, old_hash, new_hash):
|
||||||
r = redis.Redis(connection_pool=settings.REDIS_STORY_HASH_POOL)
|
r = redis.Redis(connection_pool=settings.REDIS_STORY_HASH_POOL)
|
||||||
# r2 = redis.Redis(connection_pool=settings.REDIS_STORY_HASH_POOL2)
|
|
||||||
p = r.pipeline()
|
p = r.pipeline()
|
||||||
# p2 = r2.pipeline()
|
|
||||||
|
|
||||||
usersubs = UserSubscription.objects.filter(feed_id=feed.pk, last_read_date__gte=feed.unread_cutoff)
|
usersubs = UserSubscription.objects.filter(feed_id=feed.pk, last_read_date__gte=feed.unread_cutoff)
|
||||||
logging.info(" ---> ~SB%s usersubs~SN to switch read story hashes..." % len(usersubs))
|
logging.info(" ---> ~SB%s usersubs~SN to switch read story hashes..." % len(usersubs))
|
||||||
|
@ -1271,18 +1259,13 @@ class RUserStory:
|
||||||
read = r.sismember(rs_key, old_hash)
|
read = r.sismember(rs_key, old_hash)
|
||||||
if read:
|
if read:
|
||||||
p.sadd(rs_key, new_hash)
|
p.sadd(rs_key, new_hash)
|
||||||
# p2.sadd(rs_key, new_hash)
|
p.expire(rs_key, feed.days_of_story_hashes*24*60*60)
|
||||||
p.expire(rs_key, settings.DAYS_OF_STORY_HASHES*24*60*60)
|
|
||||||
# p2.expire(rs_key, settings.DAYS_OF_STORY_HASHES*24*60*60)
|
|
||||||
|
|
||||||
read_user_key = "RS:%s" % sub.user.pk
|
read_user_key = "RS:%s" % sub.user.pk
|
||||||
p.sadd(read_user_key, new_hash)
|
p.sadd(read_user_key, new_hash)
|
||||||
# p2.sadd(read_user_key, new_hash)
|
p.expire(read_user_key, feed.days_of_story_hashes*24*60*60)
|
||||||
p.expire(read_user_key, settings.DAYS_OF_STORY_HASHES*24*60*60)
|
|
||||||
# p2.expire(read_user_key, settings.DAYS_OF_STORY_HASHES*24*60*60)
|
|
||||||
|
|
||||||
p.execute()
|
p.execute()
|
||||||
# p2.execute()
|
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def read_story_count(cls, user_id):
|
def read_story_count(cls, user_id):
|
||||||
|
|
|
@ -154,12 +154,26 @@ class Feed(models.Model):
|
||||||
@property
|
@property
|
||||||
def unread_cutoff(self):
|
def unread_cutoff(self):
|
||||||
if self.archive_subscribers > 0:
|
if self.archive_subscribers > 0:
|
||||||
return datetime.datetime.utcnow() - datetime.timedelta(days=9999)
|
return datetime.datetime.utcnow() - datetime.timedelta(days=settings.DAYS_OF_UNREAD_ARCHIVE)
|
||||||
if self.active_premium_subscribers > 0:
|
if self.premium_subscribers > 0:
|
||||||
return datetime.datetime.utcnow() - datetime.timedelta(days=settings.DAYS_OF_UNREAD)
|
return datetime.datetime.utcnow() - datetime.timedelta(days=settings.DAYS_OF_UNREAD)
|
||||||
|
|
||||||
return datetime.datetime.utcnow() - datetime.timedelta(days=settings.DAYS_OF_UNREAD_FREE)
|
return datetime.datetime.utcnow() - datetime.timedelta(days=settings.DAYS_OF_UNREAD_FREE)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def days_of_story_hashes_for_feed(cls, feed_id):
|
||||||
|
try:
|
||||||
|
feed = cls.objects.only('archive_subscribers').get(pk=feed_id)
|
||||||
|
return feed.days_of_story_hashes
|
||||||
|
except cls.DoesNotExist:
|
||||||
|
return settings.DAYS_OF_STORY_HASHES
|
||||||
|
|
||||||
|
@property
|
||||||
|
def days_of_story_hashes(self):
|
||||||
|
if self.archive_subscribers > 0:
|
||||||
|
return settings.DAYS_OF_STORY_HASHES_ARCHIVE
|
||||||
|
return settings.DAYS_OF_STORY_HASHES
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def story_hashes_in_unread_cutoff(self):
|
def story_hashes_in_unread_cutoff(self):
|
||||||
r = redis.Redis(connection_pool=settings.REDIS_STORY_HASH_POOL)
|
r = redis.Redis(connection_pool=settings.REDIS_STORY_HASH_POOL)
|
||||||
|
@ -328,13 +342,9 @@ class Feed(models.Model):
|
||||||
def expire_redis(self, r=None):
|
def expire_redis(self, r=None):
|
||||||
if not r:
|
if not r:
|
||||||
r = redis.Redis(connection_pool=settings.REDIS_STORY_HASH_POOL)
|
r = redis.Redis(connection_pool=settings.REDIS_STORY_HASH_POOL)
|
||||||
# if not r2:
|
|
||||||
# r2 = redis.Redis(connection_pool=settings.REDIS_STORY_HASH_POOL2)
|
|
||||||
|
|
||||||
r.expire('F:%s' % self.pk, settings.DAYS_OF_STORY_HASHES*24*60*60)
|
r.expire('F:%s' % self.pk, self.days_of_story_hashes*24*60*60)
|
||||||
# r2.expire('F:%s' % self.pk, settings.DAYS_OF_STORY_HASHES*24*60*60)
|
r.expire('zF:%s' % self.pk, self.days_of_story_hashes*24*60*60)
|
||||||
r.expire('zF:%s' % self.pk, settings.DAYS_OF_STORY_HASHES*24*60*60)
|
|
||||||
# r2.expire('zF:%s' % self.pk, settings.DAYS_OF_STORY_HASHES*24*60*60)
|
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def low_volume_feeds(cls, feed_ids, stories_per_month=30):
|
def low_volume_feeds(cls, feed_ids, stories_per_month=30):
|
||||||
|
@ -1517,7 +1527,9 @@ class Feed(models.Model):
|
||||||
feed = Feed.objects.get(pk=feed_id)
|
feed = Feed.objects.get(pk=feed_id)
|
||||||
except Feed.DoesNotExist:
|
except Feed.DoesNotExist:
|
||||||
continue
|
continue
|
||||||
if feed.active_subscribers <= 0 and (not feed.last_story_date or feed.last_story_date < month_ago):
|
if (feed.active_subscribers <= 0 and
|
||||||
|
feed.archive_subscribers <= 0 and
|
||||||
|
(not feed.last_story_date or feed.last_story_date < month_ago)):
|
||||||
months_ago = 6
|
months_ago = 6
|
||||||
if feed.last_story_date:
|
if feed.last_story_date:
|
||||||
months_ago = int((now - feed.last_story_date).days / 30.0)
|
months_ago = int((now - feed.last_story_date).days / 30.0)
|
||||||
|
@ -1537,7 +1549,7 @@ class Feed(models.Model):
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def story_cutoff(self):
|
def story_cutoff(self):
|
||||||
if self.archive_subscribers >= 1:
|
if self.archive_subscribers > 0:
|
||||||
return 10000
|
return 10000
|
||||||
|
|
||||||
cutoff = 500
|
cutoff = 500
|
||||||
|
@ -2777,52 +2789,36 @@ class MStory(mongo.Document):
|
||||||
def sync_redis(self, r=None):
|
def sync_redis(self, r=None):
|
||||||
if not r:
|
if not r:
|
||||||
r = redis.Redis(connection_pool=settings.REDIS_STORY_HASH_POOL)
|
r = redis.Redis(connection_pool=settings.REDIS_STORY_HASH_POOL)
|
||||||
# if not r2:
|
feed = Feed.get_by_id(self.story_feed_id)
|
||||||
# r2 = redis.Redis(connection_pool=settings.REDIS_STORY_HASH_POOL2)
|
|
||||||
UNREAD_CUTOFF = datetime.datetime.now() - datetime.timedelta(days=settings.DAYS_OF_STORY_HASHES)
|
|
||||||
|
|
||||||
if self.id and self.story_date > UNREAD_CUTOFF:
|
if self.id and self.story_date > feed.unread_cutoff:
|
||||||
feed_key = 'F:%s' % self.story_feed_id
|
feed_key = 'F:%s' % self.story_feed_id
|
||||||
r.sadd(feed_key, self.story_hash)
|
r.sadd(feed_key, self.story_hash)
|
||||||
r.expire(feed_key, settings.DAYS_OF_STORY_HASHES*24*60*60)
|
r.expire(feed_key, feed.days_of_story_hashes*24*60*60)
|
||||||
# r2.sadd(feed_key, self.story_hash)
|
|
||||||
# r2.expire(feed_key, settings.DAYS_OF_STORY_HASHES*24*60*60)
|
|
||||||
|
|
||||||
r.zadd('z' + feed_key, { self.story_hash: time.mktime(self.story_date.timetuple()) })
|
r.zadd('z' + feed_key, { self.story_hash: time.mktime(self.story_date.timetuple()) })
|
||||||
r.expire('z' + feed_key, settings.DAYS_OF_STORY_HASHES*24*60*60)
|
r.expire('z' + feed_key, feed.days_of_story_hashes*24*60*60)
|
||||||
# r2.zadd('z' + feed_key, self.story_hash, time.mktime(self.story_date.timetuple()))
|
|
||||||
# r2.expire('z' + feed_key, settings.DAYS_OF_STORY_HASHES*24*60*60)
|
|
||||||
|
|
||||||
def remove_from_redis(self, r=None):
|
def remove_from_redis(self, r=None):
|
||||||
if not r:
|
if not r:
|
||||||
r = redis.Redis(connection_pool=settings.REDIS_STORY_HASH_POOL)
|
r = redis.Redis(connection_pool=settings.REDIS_STORY_HASH_POOL)
|
||||||
# if not r2:
|
|
||||||
# r2 = redis.Redis(connection_pool=settings.REDIS_STORY_HASH_POOL2)
|
|
||||||
if self.id:
|
if self.id:
|
||||||
r.srem('F:%s' % self.story_feed_id, self.story_hash)
|
r.srem('F:%s' % self.story_feed_id, self.story_hash)
|
||||||
# r2.srem('F:%s' % self.story_feed_id, self.story_hash)
|
|
||||||
r.zrem('zF:%s' % self.story_feed_id, self.story_hash)
|
r.zrem('zF:%s' % self.story_feed_id, self.story_hash)
|
||||||
# r2.zrem('zF:%s' % self.story_feed_id, self.story_hash)
|
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def sync_feed_redis(cls, story_feed_id):
|
def sync_feed_redis(cls, story_feed_id):
|
||||||
r = redis.Redis(connection_pool=settings.REDIS_STORY_HASH_POOL)
|
r = redis.Redis(connection_pool=settings.REDIS_STORY_HASH_POOL)
|
||||||
# r2 = redis.Redis(connection_pool=settings.REDIS_STORY_HASH_POOL2)
|
|
||||||
UNREAD_CUTOFF = datetime.datetime.now() - datetime.timedelta(days=settings.DAYS_OF_STORY_HASHES)
|
|
||||||
feed = Feed.get_by_id(story_feed_id)
|
feed = Feed.get_by_id(story_feed_id)
|
||||||
stories = cls.objects.filter(story_feed_id=story_feed_id, story_date__gte=UNREAD_CUTOFF)
|
stories = cls.objects.filter(story_feed_id=story_feed_id, story_date__gte=feed.unread_cutoff)
|
||||||
r.delete('F:%s' % story_feed_id)
|
r.delete('F:%s' % story_feed_id)
|
||||||
# r2.delete('F:%s' % story_feed_id)
|
|
||||||
r.delete('zF:%s' % story_feed_id)
|
r.delete('zF:%s' % story_feed_id)
|
||||||
# r2.delete('zF:%s' % story_feed_id)
|
|
||||||
|
|
||||||
logging.info(" ---> [%-30s] ~FMSyncing ~SB%s~SN stories to redis" % (feed and feed.log_title[:30] or story_feed_id, stories.count()))
|
logging.info(" ---> [%-30s] ~FMSyncing ~SB%s~SN stories to redis" % (feed and feed.log_title[:30] or story_feed_id, stories.count()))
|
||||||
p = r.pipeline()
|
p = r.pipeline()
|
||||||
# p2 = r2.pipeline()
|
|
||||||
for story in stories:
|
for story in stories:
|
||||||
story.sync_redis(r=p)
|
story.sync_redis(r=p)
|
||||||
p.execute()
|
p.execute()
|
||||||
# p2.execute()
|
|
||||||
|
|
||||||
def count_comments(self):
|
def count_comments(self):
|
||||||
from apps.social.models import MSharedStory
|
from apps.social.models import MSharedStory
|
||||||
|
|
|
@ -20,7 +20,7 @@ SESSION_COOKIE_DOMAIN = 'localhost'
|
||||||
|
|
||||||
DOCKERBUILD = True
|
DOCKERBUILD = True
|
||||||
DEBUG = False
|
DEBUG = False
|
||||||
# DEBUG = True
|
DEBUG = True
|
||||||
|
|
||||||
# DEBUG_ASSETS controls JS/CSS asset packaging. Turning this off requires you to run
|
# DEBUG_ASSETS controls JS/CSS asset packaging. Turning this off requires you to run
|
||||||
# `./manage.py collectstatic` first. Turn this on for development so you can see
|
# `./manage.py collectstatic` first. Turn this on for development so you can see
|
||||||
|
@ -33,7 +33,7 @@ DEBUG_ASSETS = True
|
||||||
# down verbosity.
|
# down verbosity.
|
||||||
DEBUG_QUERIES = DEBUG
|
DEBUG_QUERIES = DEBUG
|
||||||
DEBUG_QUERIES_SUMMARY_ONLY = True
|
DEBUG_QUERIES_SUMMARY_ONLY = True
|
||||||
# DEBUG_QUERIES_SUMMARY_ONLY = False
|
DEBUG_QUERIES_SUMMARY_ONLY = False
|
||||||
|
|
||||||
MEDIA_URL = '/media/'
|
MEDIA_URL = '/media/'
|
||||||
IMAGES_URL = '/imageproxy'
|
IMAGES_URL = '/imageproxy'
|
||||||
|
|
|
@ -251,9 +251,12 @@ logging.getLogger("urllib3").setLevel(logging.WARNING)
|
||||||
|
|
||||||
DAYS_OF_UNREAD = 30
|
DAYS_OF_UNREAD = 30
|
||||||
DAYS_OF_UNREAD_FREE = 14
|
DAYS_OF_UNREAD_FREE = 14
|
||||||
|
DAYS_OF_UNREAD_ARCHIVE = 9999
|
||||||
# DoSH can be more, since you can up this value by N, and after N days,
|
# DoSH can be more, since you can up this value by N, and after N days,
|
||||||
# you can then up the DAYS_OF_UNREAD value with no impact.
|
# you can then up the DAYS_OF_UNREAD value with no impact.
|
||||||
DAYS_OF_STORY_HASHES = 30
|
# The max is for archive subscribers.
|
||||||
|
DAYS_OF_STORY_HASHES = DAYS_OF_UNREAD
|
||||||
|
DAYS_OF_STORY_HASHES_ARCHIVE = DAYS_OF_UNREAD_ARCHIVE
|
||||||
|
|
||||||
# SUBSCRIBER_EXPIRE sets the number of days after which a user who hasn't logged in
|
# SUBSCRIBER_EXPIRE sets the number of days after which a user who hasn't logged in
|
||||||
# is no longer considered an active subscriber
|
# is no longer considered an active subscriber
|
||||||
|
|
Loading…
Add table
Reference in a new issue