mirror of
https://github.com/viq/NewsBlur.git
synced 2025-09-18 21:43:31 +00:00
User-dependent days of unread. 14 for free users, 30 for premium.
This commit is contained in:
parent
4ae66cfdb0
commit
06eeaaba0b
9 changed files with 121 additions and 91 deletions
|
@ -56,6 +56,13 @@ class Profile(models.Model):
|
|||
def __unicode__(self):
|
||||
return "%s <%s> (Premium: %s)" % (self.user, self.user.email, self.is_premium)
|
||||
|
||||
@property
|
||||
def unread_cutoff(self):
|
||||
if self.is_premium:
|
||||
return datetime.datetime.utcnow() - datetime.timedelta(days=settings.DAYS_OF_UNREAD)
|
||||
|
||||
return datetime.datetime.utcnow() - datetime.timedelta(days=settings.DAYS_OF_UNREAD_FREE)
|
||||
|
||||
def canonical(self):
|
||||
return {
|
||||
'is_premium': self.is_premium,
|
||||
|
|
|
@ -104,7 +104,7 @@ class UserSubscription(models.Model):
|
|||
|
||||
@classmethod
|
||||
def story_hashes(cls, user_id, feed_ids=None, usersubs=None, read_filter="unread", order="newest",
|
||||
include_timestamps=False, group_by_feed=True):
|
||||
include_timestamps=False, group_by_feed=True, cutoff_date=None):
|
||||
r = redis.Redis(connection_pool=settings.REDIS_STORY_HASH_POOL)
|
||||
pipeline = r.pipeline()
|
||||
story_hashes = {} if group_by_feed else []
|
||||
|
@ -117,8 +117,9 @@ class UserSubscription(models.Model):
|
|||
|
||||
read_dates = dict((us.feed_id, int(us.mark_read_date.strftime('%s'))) for us in usersubs)
|
||||
current_time = int(time.time() + 60*60*24)
|
||||
unread_interval = datetime.datetime.now() - datetime.timedelta(days=settings.DAYS_OF_UNREAD_NEW)
|
||||
unread_timestamp = int(time.mktime(unread_interval.timetuple()))-1000
|
||||
if not cutoff_date:
|
||||
cutoff_date = datetime.datetime.now() - datetime.timedelta(days=settings.DAYS_OF_STORY_HASHES)
|
||||
unread_timestamp = int(time.mktime(cutoff_date.timetuple()))-1000
|
||||
feed_counter = 0
|
||||
|
||||
for feed_id_group in chunks(feed_ids, 20):
|
||||
|
@ -165,7 +166,8 @@ class UserSubscription(models.Model):
|
|||
|
||||
return story_hashes
|
||||
|
||||
def get_stories(self, offset=0, limit=6, order='newest', read_filter='all', withscores=False, hashes_only=False):
|
||||
def get_stories(self, offset=0, limit=6, order='newest', read_filter='all', withscores=False,
|
||||
hashes_only=False, cutoff_date=None):
|
||||
r = redis.Redis(connection_pool=settings.REDIS_STORY_HASH_POOL)
|
||||
ignore_user_stories = False
|
||||
|
||||
|
@ -191,14 +193,15 @@ class UserSubscription(models.Model):
|
|||
r.zinterstore(unread_ranked_stories_key, [sorted_stories_key, unread_stories_key])
|
||||
|
||||
current_time = int(time.time() + 60*60*24)
|
||||
if not cutoff_date:
|
||||
cutoff_date = datetime.datetime.now() - datetime.timedelta(days=settings.DAYS_OF_UNREAD)
|
||||
|
||||
if order == 'oldest':
|
||||
byscorefunc = r.zrangebyscore
|
||||
if read_filter == 'unread':
|
||||
min_score = int(time.mktime(self.mark_read_date.timetuple())) + 1
|
||||
else:
|
||||
now = datetime.datetime.now()
|
||||
unread_cutoff = now - datetime.timedelta(days=settings.DAYS_OF_UNREAD_NEW)
|
||||
min_score = int(time.mktime(unread_cutoff.timetuple()))-1000
|
||||
min_score = int(time.mktime(cutoff_date.timetuple()))-1000
|
||||
max_score = current_time
|
||||
else:
|
||||
byscorefunc = r.zrevrangebyscore
|
||||
|
@ -238,7 +241,7 @@ class UserSubscription(models.Model):
|
|||
|
||||
@classmethod
|
||||
def feed_stories(cls, user_id, feed_ids=None, offset=0, limit=6,
|
||||
order='newest', read_filter='all', usersubs=None):
|
||||
order='newest', read_filter='all', usersubs=None, cutoff_date=None):
|
||||
r = redis.Redis(connection_pool=settings.REDIS_STORY_HASH_POOL)
|
||||
|
||||
if order == 'oldest':
|
||||
|
@ -269,7 +272,8 @@ class UserSubscription(models.Model):
|
|||
story_hashes = cls.story_hashes(user_id, feed_ids=feed_ids,
|
||||
read_filter=read_filter, order=order,
|
||||
include_timestamps=True,
|
||||
group_by_feed=False, usersubs=usersubs)
|
||||
group_by_feed=False, usersubs=usersubs,
|
||||
cutoff_date=cutoff_date)
|
||||
if not story_hashes:
|
||||
return [], []
|
||||
|
||||
|
@ -283,7 +287,8 @@ class UserSubscription(models.Model):
|
|||
unread_story_hashes = cls.story_hashes(user_id, feed_ids=feed_ids,
|
||||
read_filter="unread", order=order,
|
||||
include_timestamps=True,
|
||||
group_by_feed=False)
|
||||
group_by_feed=False,
|
||||
cutoff_date=cutoff_date)
|
||||
if unread_story_hashes:
|
||||
for unread_story_hash_group in chunks(unread_story_hashes, 100):
|
||||
r.zadd(unread_ranked_stories_keys, **dict(unread_story_hash_group))
|
||||
|
@ -360,14 +365,12 @@ class UserSubscription(models.Model):
|
|||
if feed_ids:
|
||||
user_subs = user_subs.filter(feed__in=feed_ids)
|
||||
|
||||
UNREAD_CUTOFF = datetime.datetime.utcnow() - datetime.timedelta(days=settings.DAYS_OF_UNREAD)
|
||||
|
||||
for i, sub in enumerate(user_subs):
|
||||
# Count unreads if subscription is stale.
|
||||
if (force or
|
||||
sub.needs_unread_recalc or
|
||||
sub.unread_count_updated < UNREAD_CUTOFF or
|
||||
sub.oldest_unread_story_date < UNREAD_CUTOFF):
|
||||
sub.unread_count_updated < user.profile.unread_cutoff or
|
||||
sub.oldest_unread_story_date < user.profile.unread_cutoff):
|
||||
sub = sub.calculate_feed_scores(silent=True, force=force)
|
||||
if not sub: continue # TODO: Figure out the correct sub and give it a new feed_id
|
||||
|
||||
|
@ -463,10 +466,9 @@ class UserSubscription(models.Model):
|
|||
def calculate_feed_scores(self, silent=False, stories=None, force=False):
|
||||
# now = datetime.datetime.strptime("2009-07-06 22:30:03", "%Y-%m-%d %H:%M:%S")
|
||||
now = datetime.datetime.now()
|
||||
UNREAD_CUTOFF = now - datetime.timedelta(days=settings.DAYS_OF_UNREAD)
|
||||
oldest_unread_story_date = now
|
||||
|
||||
if self.user.profile.last_seen_on < UNREAD_CUTOFF and not force:
|
||||
if self.user.profile.last_seen_on < self.user.profile.unread_cutoff and not force:
|
||||
# if not silent:
|
||||
# logging.info(' ---> [%s] SKIPPING Computing scores: %s (1 week+)' % (self.user, self.feed))
|
||||
return self
|
||||
|
@ -484,7 +486,7 @@ class UserSubscription(models.Model):
|
|||
feed_scores = dict(negative=0, neutral=0, positive=0)
|
||||
|
||||
# Two weeks in age. If mark_read_date is older, mark old stories as read.
|
||||
date_delta = UNREAD_CUTOFF
|
||||
date_delta = self.user.profile.unread_cutoff
|
||||
if date_delta < self.mark_read_date:
|
||||
date_delta = self.mark_read_date
|
||||
else:
|
||||
|
@ -494,7 +496,8 @@ class UserSubscription(models.Model):
|
|||
if not stories:
|
||||
stories = cache.get('S:%s' % self.feed_id)
|
||||
|
||||
unread_story_hashes = self.get_stories(read_filter='unread', limit=500, hashes_only=True)
|
||||
unread_story_hashes = self.get_stories(read_filter='unread', limit=500, hashes_only=True,
|
||||
cutoff_date=self.user.profile.unread_cutoff)
|
||||
|
||||
if not stories:
|
||||
stories_db = MStory.objects(story_hash__in=unread_story_hashes)
|
||||
|
@ -551,7 +554,9 @@ class UserSubscription(models.Model):
|
|||
else:
|
||||
feed_scores['neutral'] += 1
|
||||
else:
|
||||
unread_story_hashes = self.get_stories(read_filter='unread', limit=500, hashes_only=True, withscores=True)
|
||||
unread_story_hashes = self.get_stories(read_filter='unread', limit=500, hashes_only=True,
|
||||
withscores=True,
|
||||
cutoff_date=self.user.profile.unread_cutoff)
|
||||
feed_scores['neutral'] = len(unread_story_hashes)
|
||||
if feed_scores['neutral']:
|
||||
oldest_unread_story_date = datetime.datetime.fromtimestamp(unread_story_hashes[-1][1])
|
||||
|
@ -710,8 +715,8 @@ class RUserStory:
|
|||
def redis_commands(key):
|
||||
r.sadd(key, story_hash)
|
||||
# r2.sadd(key, story_hash)
|
||||
r.expire(key, settings.DAYS_OF_UNREAD_NEW*24*60*60)
|
||||
# r2.expire(key, settings.DAYS_OF_UNREAD_NEW*24*60*60)
|
||||
r.expire(key, settings.DAYS_OF_STORY_HASHES*24*60*60)
|
||||
# r2.expire(key, settings.DAYS_OF_STORY_HASHES*24*60*60)
|
||||
|
||||
all_read_stories_key = 'RS:%s' % (user_id)
|
||||
redis_commands(all_read_stories_key)
|
||||
|
@ -766,14 +771,14 @@ class RUserStory:
|
|||
read_feed_key = "RS:%s:%s" % (user_id, new_feed_id)
|
||||
p.sadd(read_feed_key, new_story_hash)
|
||||
# p2.sadd(read_feed_key, new_story_hash)
|
||||
p.expire(read_feed_key, settings.DAYS_OF_UNREAD_NEW*24*60*60)
|
||||
# p2.expire(read_feed_key, settings.DAYS_OF_UNREAD_NEW*24*60*60)
|
||||
p.expire(read_feed_key, settings.DAYS_OF_STORY_HASHES*24*60*60)
|
||||
# p2.expire(read_feed_key, settings.DAYS_OF_STORY_HASHES*24*60*60)
|
||||
|
||||
read_user_key = "RS:%s" % (user_id)
|
||||
p.sadd(read_user_key, new_story_hash)
|
||||
# p2.sadd(read_user_key, new_story_hash)
|
||||
p.expire(read_user_key, settings.DAYS_OF_UNREAD_NEW*24*60*60)
|
||||
# p2.expire(read_user_key, settings.DAYS_OF_UNREAD_NEW*24*60*60)
|
||||
p.expire(read_user_key, settings.DAYS_OF_STORY_HASHES*24*60*60)
|
||||
# p2.expire(read_user_key, settings.DAYS_OF_STORY_HASHES*24*60*60)
|
||||
|
||||
p.execute()
|
||||
# p2.execute()
|
||||
|
@ -787,7 +792,7 @@ class RUserStory:
|
|||
# r2 = redis.Redis(connection_pool=settings.REDIS_STORY_HASH_POOL2)
|
||||
p = r.pipeline()
|
||||
# p2 = r2.pipeline()
|
||||
UNREAD_CUTOFF = datetime.datetime.now() - datetime.timedelta(days=settings.DAYS_OF_UNREAD_NEW)
|
||||
UNREAD_CUTOFF = datetime.datetime.now() - datetime.timedelta(days=settings.DAYS_OF_STORY_HASHES)
|
||||
|
||||
usersubs = UserSubscription.objects.filter(feed_id=feed_id, last_read_date__gte=UNREAD_CUTOFF)
|
||||
logging.info(" ---> ~SB%s usersubs~SN to switch read story hashes..." % len(usersubs))
|
||||
|
@ -797,14 +802,14 @@ class RUserStory:
|
|||
if read:
|
||||
p.sadd(rs_key, new_hash)
|
||||
# p2.sadd(rs_key, new_hash)
|
||||
p.expire(rs_key, settings.DAYS_OF_UNREAD_NEW*24*60*60)
|
||||
# p2.expire(rs_key, settings.DAYS_OF_UNREAD_NEW*24*60*60)
|
||||
p.expire(rs_key, settings.DAYS_OF_STORY_HASHES*24*60*60)
|
||||
# p2.expire(rs_key, settings.DAYS_OF_STORY_HASHES*24*60*60)
|
||||
|
||||
read_user_key = "RS:%s" % sub.user.pk
|
||||
p.sadd(read_user_key, new_hash)
|
||||
# p2.sadd(read_user_key, new_hash)
|
||||
p.expire(read_user_key, settings.DAYS_OF_UNREAD_NEW*24*60*60)
|
||||
# p2.expire(read_user_key, settings.DAYS_OF_UNREAD_NEW*24*60*60)
|
||||
p.expire(read_user_key, settings.DAYS_OF_STORY_HASHES*24*60*60)
|
||||
# p2.expire(read_user_key, settings.DAYS_OF_STORY_HASHES*24*60*60)
|
||||
|
||||
p.execute()
|
||||
# p2.execute()
|
||||
|
|
|
@ -548,7 +548,8 @@ def load_single_feed(request, feed_id):
|
|||
stories = []
|
||||
message = "You must be a premium subscriber to search."
|
||||
elif usersub and (read_filter == 'unread' or order == 'oldest'):
|
||||
stories = usersub.get_stories(order=order, read_filter=read_filter, offset=offset, limit=limit)
|
||||
stories = usersub.get_stories(order=order, read_filter=read_filter, offset=offset, limit=limit,
|
||||
cutoff_date=user.profile.unread_cutoff)
|
||||
else:
|
||||
stories = feed.get_stories(offset, limit)
|
||||
|
||||
|
@ -583,7 +584,8 @@ def load_single_feed(request, feed_id):
|
|||
unread_story_hashes = []
|
||||
if stories:
|
||||
if (read_filter == 'all' or query) and usersub:
|
||||
unread_story_hashes = usersub.get_stories(read_filter='unread', limit=500, hashes_only=True)
|
||||
unread_story_hashes = usersub.get_stories(read_filter='unread', limit=500, hashes_only=True,
|
||||
cutoff_date=user.profile.unread_cutoff)
|
||||
story_hashes = [story['story_hash'] for story in stories]
|
||||
starred_stories = MStarredStory.objects(user_id=user.pk,
|
||||
story_feed_id=feed.pk,
|
||||
|
@ -875,7 +877,8 @@ def load_river_stories__redis(request):
|
|||
mstories = stories
|
||||
unread_feed_story_hashes = UserSubscription.story_hashes(user.pk, feed_ids=feed_ids,
|
||||
read_filter="unread", order=order,
|
||||
group_by_feed=False)
|
||||
group_by_feed=False,
|
||||
cutoff_date=user.profile.unread_cutoff)
|
||||
else:
|
||||
stories = []
|
||||
message = "You must be a premium subscriber to search."
|
||||
|
@ -892,6 +895,7 @@ def load_river_stories__redis(request):
|
|||
"order": order,
|
||||
"read_filter": read_filter,
|
||||
"usersubs": usersubs,
|
||||
"cutoff_date": user.profile.unread_cutoff,
|
||||
}
|
||||
story_hashes, unread_feed_story_hashes = UserSubscription.feed_stories(**params)
|
||||
else:
|
||||
|
@ -1023,7 +1027,8 @@ def unread_story_hashes__old(request):
|
|||
continue
|
||||
unread_feed_story_hashes[feed_id] = us.get_stories(read_filter='unread', limit=500,
|
||||
withscores=include_timestamps,
|
||||
hashes_only=True)
|
||||
hashes_only=True,
|
||||
cutoff_date=user.profile.unread_cutoff)
|
||||
story_hash_count += len(unread_feed_story_hashes[feed_id])
|
||||
|
||||
logging.user(request, "~FYLoading ~FCunread story hashes~FY: ~SB%s feeds~SN (%s story hashes)" %
|
||||
|
@ -1041,7 +1046,8 @@ def unread_story_hashes(request):
|
|||
|
||||
story_hashes = UserSubscription.story_hashes(user.pk, feed_ids=feed_ids,
|
||||
order=order, read_filter=read_filter,
|
||||
include_timestamps=include_timestamps)
|
||||
include_timestamps=include_timestamps,
|
||||
cutoff_date=user.profile.unread_cutoff)
|
||||
logging.user(request, "~FYLoading ~FCunread story hashes~FY: ~SB%s feeds~SN (%s story hashes)" %
|
||||
(len(feed_ids), len(story_hashes)))
|
||||
return dict(unread_feed_story_hashes=story_hashes)
|
||||
|
@ -1261,12 +1267,11 @@ def mark_story_as_unread(request):
|
|||
# these would be ignored.
|
||||
data = usersub.mark_story_ids_as_read(newer_stories, request=request)
|
||||
|
||||
UNREAD_CUTOFF = (datetime.datetime.utcnow() -
|
||||
datetime.timedelta(days=settings.DAYS_OF_UNREAD))
|
||||
if story.story_date < UNREAD_CUTOFF:
|
||||
if story.story_date < request.user.profile.unread_cutoff:
|
||||
data['code'] = -1
|
||||
data['message'] = "Story is more than %s days old, cannot mark as unread." % (
|
||||
settings.DAYS_OF_UNREAD)
|
||||
settings.DAYS_OF_UNREAD if request.user.profile.is_premium else
|
||||
settings.DAYS_OF_UNREAD_FREE)
|
||||
|
||||
social_subs = MSocialSubscription.mark_dirty_sharing_story(user_id=request.user.pk,
|
||||
story_feed_id=feed_id,
|
||||
|
|
|
@ -101,7 +101,10 @@ class Feed(models.Model):
|
|||
|
||||
@property
|
||||
def title(self):
|
||||
return self.feed_title or "[Untitled]"
|
||||
title = self.feed_title or "[Untitled]"
|
||||
if self.active_premium_subscribers >= 1:
|
||||
title = "%s*" % title[:29]
|
||||
return title
|
||||
|
||||
@property
|
||||
def permalink(self):
|
||||
|
@ -130,6 +133,13 @@ class Feed(models.Model):
|
|||
def s3_icons_key(self):
|
||||
return "%s.png" % self.pk
|
||||
|
||||
@property
|
||||
def unread_cutoff(self):
|
||||
if self.active_premium_subscribers > 0:
|
||||
return datetime.datetime.utcnow() - datetime.timedelta(days=settings.DAYS_OF_UNREAD)
|
||||
|
||||
return datetime.datetime.utcnow() - datetime.timedelta(days=settings.DAYS_OF_UNREAD_FREE)
|
||||
|
||||
def canonical(self, full=False, include_favicon=True):
|
||||
feed = {
|
||||
'id': self.pk,
|
||||
|
@ -243,10 +253,10 @@ class Feed(models.Model):
|
|||
# if not r2:
|
||||
# r2 = redis.Redis(connection_pool=settings.REDIS_STORY_HASH_POOL2)
|
||||
|
||||
r.expire('F:%s' % self.pk, settings.DAYS_OF_UNREAD_NEW*24*60*60)
|
||||
# r2.expire('F:%s' % self.pk, settings.DAYS_OF_UNREAD_NEW*24*60*60)
|
||||
r.expire('zF:%s' % self.pk, settings.DAYS_OF_UNREAD_NEW*24*60*60)
|
||||
# r2.expire('zF:%s' % self.pk, settings.DAYS_OF_UNREAD_NEW*24*60*60)
|
||||
r.expire('F:%s' % self.pk, settings.DAYS_OF_STORY_HASHES*24*60*60)
|
||||
# r2.expire('F:%s' % self.pk, settings.DAYS_OF_STORY_HASHES*24*60*60)
|
||||
r.expire('zF:%s' % self.pk, settings.DAYS_OF_STORY_HASHES*24*60*60)
|
||||
# r2.expire('zF:%s' % self.pk, settings.DAYS_OF_STORY_HASHES*24*60*60)
|
||||
|
||||
@classmethod
|
||||
def autocomplete(self, prefix, limit=5):
|
||||
|
@ -1072,7 +1082,7 @@ class Feed(models.Model):
|
|||
@classmethod
|
||||
def trim_old_stories(cls, start=0, verbose=True, dryrun=False):
|
||||
now = datetime.datetime.now()
|
||||
month_ago = now - datetime.timedelta(days=settings.DAYS_OF_UNREAD_NEW)
|
||||
month_ago = now - datetime.timedelta(days=settings.DAYS_OF_STORY_HASHES)
|
||||
feed_count = Feed.objects.latest('pk').pk
|
||||
total = 0
|
||||
for feed_id in xrange(start, feed_count):
|
||||
|
@ -1803,19 +1813,19 @@ class MStory(mongo.Document):
|
|||
r = redis.Redis(connection_pool=settings.REDIS_STORY_HASH_POOL)
|
||||
# if not r2:
|
||||
# r2 = redis.Redis(connection_pool=settings.REDIS_STORY_HASH_POOL2)
|
||||
UNREAD_CUTOFF = datetime.datetime.now() - datetime.timedelta(days=settings.DAYS_OF_UNREAD_NEW)
|
||||
UNREAD_CUTOFF = datetime.datetime.now() - datetime.timedelta(days=settings.DAYS_OF_STORY_HASHES)
|
||||
|
||||
if self.id and self.story_date > UNREAD_CUTOFF:
|
||||
feed_key = 'F:%s' % self.story_feed_id
|
||||
r.sadd(feed_key, self.story_hash)
|
||||
r.expire(feed_key, settings.DAYS_OF_UNREAD_NEW*24*60*60)
|
||||
r.expire(feed_key, settings.DAYS_OF_STORY_HASHES*24*60*60)
|
||||
# r2.sadd(feed_key, self.story_hash)
|
||||
# r2.expire(feed_key, settings.DAYS_OF_UNREAD_NEW*24*60*60)
|
||||
# r2.expire(feed_key, settings.DAYS_OF_STORY_HASHES*24*60*60)
|
||||
|
||||
r.zadd('z' + feed_key, self.story_hash, time.mktime(self.story_date.timetuple()))
|
||||
r.expire('z' + feed_key, settings.DAYS_OF_UNREAD_NEW*24*60*60)
|
||||
r.expire('z' + feed_key, settings.DAYS_OF_STORY_HASHES*24*60*60)
|
||||
# r2.zadd('z' + feed_key, self.story_hash, time.mktime(self.story_date.timetuple()))
|
||||
# r2.expire('z' + feed_key, settings.DAYS_OF_UNREAD_NEW*24*60*60)
|
||||
# r2.expire('z' + feed_key, settings.DAYS_OF_STORY_HASHES*24*60*60)
|
||||
|
||||
def remove_from_redis(self, r=None):
|
||||
if not r:
|
||||
|
@ -1832,7 +1842,7 @@ class MStory(mongo.Document):
|
|||
def sync_feed_redis(cls, story_feed_id):
|
||||
r = redis.Redis(connection_pool=settings.REDIS_STORY_HASH_POOL)
|
||||
# r2 = redis.Redis(connection_pool=settings.REDIS_STORY_HASH_POOL2)
|
||||
UNREAD_CUTOFF = datetime.datetime.now() - datetime.timedelta(days=settings.DAYS_OF_UNREAD_NEW)
|
||||
UNREAD_CUTOFF = datetime.datetime.now() - datetime.timedelta(days=settings.DAYS_OF_STORY_HASHES)
|
||||
feed = Feed.get_by_id(story_feed_id)
|
||||
stories = cls.objects.filter(story_feed_id=story_feed_id, story_date__gte=UNREAD_CUTOFF)
|
||||
r.delete('F:%s' % story_feed_id)
|
||||
|
|
|
@ -832,15 +832,13 @@ class MSocialSubscription(mongo.Document):
|
|||
profiles = MSocialProfile.objects.filter(user_id__in=social_user_ids)
|
||||
profiles = dict((p.user_id, p) for p in profiles)
|
||||
|
||||
UNREAD_CUTOFF = datetime.datetime.utcnow() - datetime.timedelta(days=settings.DAYS_OF_UNREAD)
|
||||
|
||||
for i, sub in enumerate(user_subs):
|
||||
# Count unreads if subscription is stale.
|
||||
if (sub.needs_unread_recalc or
|
||||
(sub.unread_count_updated and
|
||||
sub.unread_count_updated < UNREAD_CUTOFF) or
|
||||
sub.unread_count_updated < user.profile.unread_cutoff) or
|
||||
(sub.oldest_unread_story_date and
|
||||
sub.oldest_unread_story_date < UNREAD_CUTOFF)):
|
||||
sub.oldest_unread_story_date < user.profile.unread_cutoff)):
|
||||
sub = sub.calculate_feed_scores(force=True, silent=True)
|
||||
|
||||
feed_id = "social:%s" % sub.subscription_user_id
|
||||
|
@ -899,12 +897,14 @@ class MSocialSubscription(mongo.Document):
|
|||
byscorefunc = r.zrangebyscore
|
||||
min_score = mark_read_time
|
||||
max_score = current_time
|
||||
else:
|
||||
else: # newest
|
||||
byscorefunc = r.zrevrangebyscore
|
||||
min_score = current_time
|
||||
now = datetime.datetime.now()
|
||||
unread_cutoff = cutoff_date
|
||||
if not unread_cutoff:
|
||||
unread_cutoff = now - datetime.timedelta(days=settings.DAYS_OF_UNREAD)
|
||||
max_score = int(time.mktime(unread_cutoff.timetuple()))-1000
|
||||
max_score = int(time.mktime(unread_cutoff.timetuple()))-1
|
||||
story_ids = byscorefunc(unread_ranked_stories_key, min_score,
|
||||
max_score, start=offset, num=limit,
|
||||
withscores=withscores)
|
||||
|
@ -920,7 +920,9 @@ class MSocialSubscription(mongo.Document):
|
|||
return story_ids
|
||||
|
||||
@classmethod
|
||||
def feed_stories(cls, user_id, social_user_ids, offset=0, limit=6, order='newest', read_filter='all', relative_user_id=None, cache=True):
|
||||
def feed_stories(cls, user_id, social_user_ids, offset=0, limit=6,
|
||||
order='newest', read_filter='all', relative_user_id=None, cache=True,
|
||||
cutoff_date=None):
|
||||
r = redis.Redis(connection_pool=settings.REDIS_STORY_HASH_POOL)
|
||||
|
||||
if not relative_user_id:
|
||||
|
@ -954,7 +956,7 @@ class MSocialSubscription(mongo.Document):
|
|||
us = cls.objects.get(user_id=relative_user_id, subscription_user_id=social_user_id)
|
||||
story_hashes = us.get_stories(offset=0, limit=100,
|
||||
order=order, read_filter=read_filter,
|
||||
withscores=True)
|
||||
withscores=True, cutoff_date=cutoff_date)
|
||||
if story_hashes:
|
||||
r.zadd(ranked_stories_keys, **dict(story_hashes))
|
||||
|
||||
|
@ -1062,7 +1064,7 @@ class MSocialSubscription(mongo.Document):
|
|||
return data
|
||||
|
||||
def mark_feed_read(self, cutoff_date=None):
|
||||
UNREAD_CUTOFF = datetime.datetime.utcnow() - datetime.timedelta(days=settings.DAYS_OF_UNREAD)
|
||||
user_profile = Profile.objects.get(user_id=self.user_id)
|
||||
recount = True
|
||||
|
||||
if cutoff_date:
|
||||
|
@ -1070,7 +1072,7 @@ class MSocialSubscription(mongo.Document):
|
|||
else:
|
||||
# Use the latest story to get last read time.
|
||||
latest_shared_story = MSharedStory.objects(user_id=self.subscription_user_id,
|
||||
shared_date__gte=UNREAD_CUTOFF
|
||||
shared_date__gte=user_profile.unread_cutoff
|
||||
).order_by('shared_date').only('shared_date').first()
|
||||
if latest_shared_story:
|
||||
cutoff_date = latest_shared_story['shared_date'] + datetime.timedelta(seconds=1)
|
||||
|
@ -1102,10 +1104,9 @@ class MSocialSubscription(mongo.Document):
|
|||
return self
|
||||
|
||||
now = datetime.datetime.now()
|
||||
UNREAD_CUTOFF = now - datetime.timedelta(days=settings.DAYS_OF_UNREAD)
|
||||
user = User.objects.get(pk=self.user_id)
|
||||
user_profile = Profile.objects.get(user_id=self.user_id)
|
||||
|
||||
if user.profile.last_seen_on < UNREAD_CUTOFF:
|
||||
if user_profile.last_seen_on < user_profile.unread_cutoff:
|
||||
# if not silent:
|
||||
# logging.info(' ---> [%s] SKIPPING Computing scores: %s (1 week+)' % (self.user, self.feed))
|
||||
return self
|
||||
|
@ -1113,13 +1114,14 @@ class MSocialSubscription(mongo.Document):
|
|||
feed_scores = dict(negative=0, neutral=0, positive=0)
|
||||
|
||||
# Two weeks in age. If mark_read_date is older, mark old stories as read.
|
||||
date_delta = UNREAD_CUTOFF
|
||||
date_delta = user_profile.unread_cutoff
|
||||
if date_delta < self.mark_read_date:
|
||||
date_delta = self.mark_read_date
|
||||
else:
|
||||
self.mark_read_date = date_delta
|
||||
|
||||
unread_story_hashes = self.get_stories(read_filter='unread', limit=500, hashes_only=True)
|
||||
unread_story_hashes = self.get_stories(read_filter='unread', limit=500, hashes_only=True,
|
||||
cutoff_date=user_profile.unread_cutoff)
|
||||
stories_db = MSharedStory.objects(user_id=self.subscription_user_id,
|
||||
story_hash__in=unread_story_hashes)
|
||||
story_feed_ids = set()
|
||||
|
@ -1199,7 +1201,7 @@ class MSocialSubscription(mongo.Document):
|
|||
self.mark_feed_read()
|
||||
|
||||
if not silent:
|
||||
logging.info(' ---> [%s] Computing social scores: %s (%s/%s/%s)' % (user.username, self.subscription_user_id, feed_scores['negative'], feed_scores['neutral'], feed_scores['positive']))
|
||||
logging.info(' ---> [%s] Computing social scores: %s (%s/%s/%s)' % (user_profile, self.subscription_user_id, feed_scores['negative'], feed_scores['neutral'], feed_scores['positive']))
|
||||
|
||||
return self
|
||||
|
||||
|
@ -1601,10 +1603,10 @@ class MSharedStory(mongo.Document):
|
|||
time.mktime(self.shared_date.timetuple()))
|
||||
# r2.zadd('zB:%s' % self.user_id, self.feed_guid_hash,
|
||||
# time.mktime(self.shared_date.timetuple()))
|
||||
r.expire('B:%s' % self.user_id, settings.DAYS_OF_UNREAD_NEW*24*60*60)
|
||||
# r2.expire('B:%s' % self.user_id, settings.DAYS_OF_UNREAD_NEW*24*60*60)
|
||||
r.expire('zB:%s' % self.user_id, settings.DAYS_OF_UNREAD_NEW*24*60*60)
|
||||
# r2.expire('zB:%s' % self.user_id, settings.DAYS_OF_UNREAD_NEW*24*60*60)
|
||||
r.expire('B:%s' % self.user_id, settings.DAYS_OF_STORY_HASHES*24*60*60)
|
||||
# r2.expire('B:%s' % self.user_id, settings.DAYS_OF_STORY_HASHES*24*60*60)
|
||||
r.expire('zB:%s' % self.user_id, settings.DAYS_OF_STORY_HASHES*24*60*60)
|
||||
# r2.expire('zB:%s' % self.user_id, settings.DAYS_OF_STORY_HASHES*24*60*60)
|
||||
|
||||
def remove_from_redis(self):
|
||||
r = redis.Redis(connection_pool=settings.REDIS_POOL)
|
||||
|
|
|
@ -51,7 +51,6 @@ def load_social_stories(request, user_id, username=None):
|
|||
|
||||
if page: offset = limit * (int(page) - 1)
|
||||
now = localtime_for_timezone(datetime.datetime.now(), user.profile.timezone)
|
||||
UNREAD_CUTOFF = datetime.datetime.utcnow() - datetime.timedelta(days=settings.DAYS_OF_UNREAD)
|
||||
|
||||
social_profile = MSocialProfile.get_user(social_user.pk)
|
||||
try:
|
||||
|
@ -68,7 +67,7 @@ def load_social_stories(request, user_id, username=None):
|
|||
stories = []
|
||||
message = "You must be a premium subscriber to search."
|
||||
elif socialsub and (read_filter == 'unread' or order == 'oldest'):
|
||||
story_hashes = socialsub.get_stories(order=order, read_filter=read_filter, offset=offset, limit=limit)
|
||||
story_hashes = socialsub.get_stories(order=order, read_filter=read_filter, offset=offset, limit=limit, cutoff_date=user.profile.unread_cutoff)
|
||||
story_date_order = "%sshared_date" % ('' if order == 'oldest' else '-')
|
||||
if story_hashes:
|
||||
mstories = MSharedStory.objects(user_id=social_user.pk,
|
||||
|
@ -89,7 +88,7 @@ def load_social_stories(request, user_id, username=None):
|
|||
unsub_feed_ids = list(set(story_feed_ids).difference(set(usersubs_map.keys())))
|
||||
unsub_feeds = Feed.objects.filter(pk__in=unsub_feed_ids)
|
||||
unsub_feeds = [feed.canonical(include_favicon=False) for feed in unsub_feeds]
|
||||
date_delta = UNREAD_CUTOFF
|
||||
date_delta = user.profile.unread_cutoff
|
||||
if socialsub and date_delta < socialsub.mark_read_date:
|
||||
date_delta = socialsub.mark_read_date
|
||||
|
||||
|
@ -106,7 +105,7 @@ def load_social_stories(request, user_id, username=None):
|
|||
|
||||
unread_story_hashes = []
|
||||
if (read_filter == 'all' or query) and socialsub:
|
||||
unread_story_hashes = socialsub.get_stories(read_filter='unread', limit=500)
|
||||
unread_story_hashes = socialsub.get_stories(read_filter='unread', limit=500, cutoff_date=user.profile.unread_cutoff)
|
||||
story_hashes = [story['story_hash'] for story in stories]
|
||||
|
||||
starred_stories = MStarredStory.objects(user_id=user.pk,
|
||||
|
@ -188,7 +187,6 @@ def load_river_blurblog(request):
|
|||
relative_user_id = request.REQUEST.get('relative_user_id', None)
|
||||
global_feed = request.REQUEST.get('global_feed', None)
|
||||
now = localtime_for_timezone(datetime.datetime.now(), user.profile.timezone)
|
||||
UNREAD_CUTOFF = datetime.datetime.utcnow() - datetime.timedelta(days=settings.DAYS_OF_UNREAD)
|
||||
|
||||
if global_feed:
|
||||
global_user = User.objects.get(username='popular')
|
||||
|
@ -208,7 +206,8 @@ def load_river_blurblog(request):
|
|||
user.pk, social_user_ids,
|
||||
offset=offset, limit=limit,
|
||||
order=order, read_filter=read_filter,
|
||||
relative_user_id=relative_user_id)
|
||||
relative_user_id=relative_user_id,
|
||||
cutoff_date=user.profile.unread_cutoff)
|
||||
mstories = MStory.find_by_story_hashes(story_hashes)
|
||||
story_hashes_to_dates = dict(zip(story_hashes, story_dates))
|
||||
def sort_stories_by_hash(a, b):
|
||||
|
@ -296,7 +295,7 @@ def load_river_blurblog(request):
|
|||
user.profile.timezone)
|
||||
story['shared_date'] = format_story_link_date__long(shared_date, now)
|
||||
story['shared_comments'] = strip_tags(shared_stories[story['story_hash']]['comments'])
|
||||
if (shared_stories[story['story_hash']]['shared_date'] < UNREAD_CUTOFF or
|
||||
if (shared_stories[story['story_hash']]['shared_date'] < user.profile.unread_cutoff or
|
||||
story['story_hash'] in read_feed_story_hashes):
|
||||
story['read_status'] = 1
|
||||
|
||||
|
@ -367,7 +366,8 @@ def load_social_page(request, user_id, username=None, **kwargs):
|
|||
offset=offset, limit=limit+1,
|
||||
# order=order, read_filter=read_filter,
|
||||
relative_user_id=relative_user_id,
|
||||
cache=request.user.is_authenticated())
|
||||
cache=request.user.is_authenticated(),
|
||||
cutoff_date=user.profile.unread_cutoff)
|
||||
if len(story_ids) > limit:
|
||||
has_next_page = True
|
||||
story_ids = story_ids[:-1]
|
||||
|
|
|
@ -187,7 +187,11 @@ LOGGING = {
|
|||
# ==========================
|
||||
|
||||
DAYS_OF_UNREAD = 30
|
||||
DAYS_OF_UNREAD_NEW = 30
|
||||
DAYS_OF_UNREAD_FREE = 14
|
||||
# DoSH can be more, since you can up this value by N, and after N days,
|
||||
# you can then up the DAYS_OF_UNREAD value with no impact.
|
||||
DAYS_OF_STORY_HASHES = 30
|
||||
|
||||
SUBSCRIBER_EXPIRE = 2
|
||||
|
||||
AUTH_PROFILE_MODULE = 'newsblur.UserProfile'
|
||||
|
|
|
@ -130,8 +130,6 @@ class ProcessFeed:
|
|||
|
||||
ret_values = dict(new=0, updated=0, same=0, error=0)
|
||||
|
||||
# logging.debug(u' ---> [%d] Processing %s' % (self.feed.id, self.feed.feed_title))
|
||||
|
||||
if hasattr(self.fpf, 'status'):
|
||||
if self.options['verbose']:
|
||||
if self.fpf.bozo and self.fpf.status != 304:
|
||||
|
@ -507,7 +505,7 @@ class Dispatcher:
|
|||
self.publish_to_subscribers(feed)
|
||||
|
||||
done_msg = (u'%2s ---> [%-30s] ~FYProcessed in ~FM~SB%.4ss~FY~SN (~FB%s~FY) [%s]' % (
|
||||
identity, feed.feed_title[:30], delta,
|
||||
identity, feed.title[:30], delta,
|
||||
feed.pk, self.feed_trans[ret_feed],))
|
||||
logging.debug(done_msg)
|
||||
total_duration = time.time() - start_duration
|
||||
|
@ -533,10 +531,9 @@ class Dispatcher:
|
|||
logging.debug(" ***> [%-30s] ~BMRedis is unavailable for real-time." % (feed.title[:30],))
|
||||
|
||||
def count_unreads_for_subscribers(self, feed):
|
||||
UNREAD_CUTOFF = datetime.datetime.utcnow() - datetime.timedelta(days=settings.DAYS_OF_UNREAD)
|
||||
user_subs = UserSubscription.objects.filter(feed=feed,
|
||||
active=True,
|
||||
user__profile__last_seen_on__gte=UNREAD_CUTOFF)\
|
||||
user__profile__last_seen_on__gte=feed.unread_cutoff)\
|
||||
.order_by('-last_read_date')
|
||||
|
||||
if not user_subs.count():
|
||||
|
@ -550,9 +547,9 @@ class Dispatcher:
|
|||
if self.options['compute_scores']:
|
||||
r = redis.Redis(connection_pool=settings.REDIS_STORY_HASH_POOL)
|
||||
stories = MStory.objects(story_feed_id=feed.pk,
|
||||
story_date__gte=UNREAD_CUTOFF)
|
||||
story_date__gte=feed.unread_cutoff)
|
||||
stories = Feed.format_stories(stories, feed.pk)
|
||||
story_hashes = r.zrangebyscore('zF:%s' % feed.pk, int(UNREAD_CUTOFF.strftime('%s')),
|
||||
story_hashes = r.zrangebyscore('zF:%s' % feed.pk, int(feed.unread_cutoff.strftime('%s')),
|
||||
int(time.time() + 60*60*24))
|
||||
missing_story_hashes = set(story_hashes) - set([s['story_hash'] for s in stories])
|
||||
if missing_story_hashes:
|
||||
|
|
|
@ -80,7 +80,7 @@ def colorize(msg):
|
|||
r'\*\*\*>' : '~FB~SB~BB--->~BT~FW',
|
||||
r'\[' : '~SB~FB[~SN~FM',
|
||||
r'AnonymousUser' : '~FBAnonymousUser',
|
||||
r'\*\]' : '~SN~FR*]',
|
||||
r'\*(\s*)~FB~SB\]' : r'~SN~FR*\1~FB~SB]',
|
||||
r'\]' : '~FB~SB]~FW~SN',
|
||||
}
|
||||
colors = {
|
||||
|
|
Loading…
Add table
Reference in a new issue