diff --git a/apps/reader/models.py b/apps/reader/models.py index c73249ade..3d8f6c72b 100644 --- a/apps/reader/models.py +++ b/apps/reader/models.py @@ -188,7 +188,8 @@ class UserSubscription(models.Model): story_hashes = us.get_stories(offset=0, limit=200, order=order, read_filter=read_filter, withscores=True) - unread_feed_story_hashes[feed_id] = us.get_stories(read_filter='unread', limit=500, fetch_stories=False) + unread_feed_story_hashes[feed_id] = us.get_stories(read_filter='unread', limit=500, + fetch_stories=False) if story_hashes: r.zadd(ranked_stories_keys, **dict(story_hashes)) diff --git a/apps/social/models.py b/apps/social/models.py index 003ec7811..b929cee01 100644 --- a/apps/social/models.py +++ b/apps/social/models.py @@ -18,6 +18,7 @@ from django.core.urlresolvers import reverse from django.template.loader import render_to_string from django.template.defaultfilters import slugify from django.core.mail import EmailMultiAlternatives +from django.core.cache import cache from apps.reader.models import UserSubscription, MUserStory from apps.analyzer.models import MClassifierFeed, MClassifierAuthor, MClassifierTag, MClassifierTitle from apps.analyzer.models import apply_classifier_titles, apply_classifier_feeds, apply_classifier_authors, apply_classifier_tags @@ -791,7 +792,7 @@ class MSocialSubscription(mongo.Document): } def get_stories(self, offset=0, limit=6, order='newest', read_filter='all', - withscores=False, everything_unread=False): + withscores=False, everything_unread=False, fetch_stories=True): r = redis.Redis(connection_pool=settings.REDIS_STORY_HASH_POOL) ignore_user_stories = False @@ -808,7 +809,8 @@ class MSocialSubscription(mongo.Document): r.sdiffstore(unread_stories_key, stories_key, read_stories_key) sorted_stories_key = 'zB:%s' % (self.subscription_user_id) - unread_ranked_stories_key = 'zUB:%s:%s' % (self.user_id, self.subscription_user_id) + unread_ranked_stories_key = 'z%sUB:%s:%s' % ('f' if fetch_stories else '', + self.user_id, self.subscription_user_id) r.zinterstore(unread_ranked_stories_key, [sorted_stories_key, unread_stories_key]) current_time = int(time.time() + 60*60*24) @@ -849,31 +851,40 @@ class MSocialSubscription(mongo.Document): if not isinstance(social_user_ids, list): social_user_ids = [social_user_ids] - unread_ranked_stories_keys = 'zU:%s:social' % (user_id) - if offset and r.exists(unread_ranked_stories_keys): - story_hashes = range_func(unread_ranked_stories_keys, offset, limit, withscores=True) + ranked_stories_keys = 'zU:%s:social' % (user_id) + unread_ranked_stories_keys = 'zfU:%s:social' % (user_id) + unread_story_hashes = cache.get(unread_ranked_stories_keys) + if offset and r.exists(ranked_stories_keys) and unread_story_hashes: + story_hashes = range_func(ranked_stories_keys, offset, limit, withscores=True) if story_hashes: - return zip(*story_hashes) + story_hashes, story_dates = zip(*story_hashes) + return story_hashes, story_dates, unread_story_hashes else: - return [], [] + return [], [], {} else: - r.delete(unread_ranked_stories_keys) - + r.delete(ranked_stories_keys) + cache.delete(unread_ranked_stories_keys) + + unread_feed_story_hashes = {} for social_user_id in social_user_ids: us = cls.objects.get(user_id=relative_user_id, subscription_user_id=social_user_id) story_hashes = us.get_stories(offset=0, limit=100, order=order, read_filter=read_filter, withscores=True, everything_unread=everything_unread) + unread_feed_story_hashes[social_user_id] = us.get_stories(read_filter='unread', limit=500, + fetch_stories=False) if story_hashes: - r.zadd(unread_ranked_stories_keys, **dict(story_hashes)) + r.zadd(ranked_stories_keys, **dict(story_hashes)) - story_hashes = range_func(unread_ranked_stories_keys, offset, limit, withscores=True) - r.expire(unread_ranked_stories_keys, 24*60*60) - + story_hashes = range_func(ranked_stories_keys, offset, limit, withscores=True) + r.expire(ranked_stories_keys, 24*60*60) + cache.set(unread_ranked_stories_keys, unread_feed_story_hashes, 24*60*60) + if story_hashes: - return zip(*story_hashes) + story_hashes, story_dates = zip(*story_hashes) + return story_hashes, story_dates, unread_feed_story_hashes else: - return [], [] + return [], [], {} def mark_story_ids_as_read(self, story_ids, feed_id=None, mark_all_read=False, request=None): data = dict(code=0, payload=story_ids) diff --git a/apps/social/views.py b/apps/social/views.py index ea6888863..a9d51d46f 100644 --- a/apps/social/views.py +++ b/apps/social/views.py @@ -177,7 +177,6 @@ def load_river_blurblog(request): relative_user_id = request.REQUEST.get('relative_user_id', None) global_feed = request.REQUEST.get('global_feed', None) now = localtime_for_timezone(datetime.datetime.now(), user.profile.timezone) - UNREAD_CUTOFF = datetime.datetime.utcnow() - datetime.timedelta(days=settings.DAYS_OF_UNREAD) if global_feed: global_user = User.objects.get(username='popular') @@ -193,11 +192,12 @@ def load_river_blurblog(request): offset = (page-1) * limit limit = page * limit - 1 - story_hashes, story_dates = MSocialSubscription.feed_stories(user.pk, social_user_ids, - offset=offset, limit=limit, - order=order, read_filter=read_filter, - relative_user_id=relative_user_id, - everything_unread=global_feed) + story_hashes, story_dates, unread_feed_story_hashes = MSocialSubscription.feed_stories( + user.pk, social_user_ids, + offset=offset, limit=limit, + order=order, read_filter=read_filter, + relative_user_id=relative_user_id, + everything_unread=global_feed) mstories = MStory.find_by_story_hashes(story_hashes) story_hashes_to_dates = dict(zip(story_hashes, story_dates)) def sort_stories_by_hash(a, b): @@ -222,7 +222,6 @@ def load_river_blurblog(request): unsub_feeds = Feed.objects.filter(pk__in=unsub_feed_ids) unsub_feeds = [feed.canonical(include_favicon=False) for feed in unsub_feeds] - # Find starred stories if story_feed_ids: story_hashes = [story['story_hash'] for story in stories] starred_stories = MStarredStory.objects( @@ -236,17 +235,10 @@ def load_river_blurblog(request): .only('story_hash', 'shared_date', 'comments') shared_stories = dict([(story.story_hash, dict(shared_date=story.shared_date, comments=story.comments)) - for story in shared_stories]) - - userstories_db = MUserStory.objects(user_id=user.pk, - feed_id__in=story_feed_ids, - story_id__in=story_ids).only('story_id') - userstories = set(us.story_id for us in userstories_db) - + for story in shared_stories]) else: starred_stories = {} shared_stories = {} - userstories = [] # Intelligence classifiers for all feeds involved if story_feed_ids: @@ -268,12 +260,12 @@ def load_river_blurblog(request): # Just need to format stories for story in stories: - if story['id'] in userstories: - story['read_status'] = 1 - elif story['story_date'] < UNREAD_CUTOFF: - story['read_status'] = 1 - else: - story['read_status'] = 0 + story['read_status'] = 1 + print unread_feed_story_hashes + for social_user_id in unread_feed_story_hashes.keys(): + if story['story_hash'] in unread_feed_story_hashes[social_user_id]: + story['read_status'] = 0 + break story_date = localtime_for_timezone(story['story_date'], user.profile.timezone) story['short_parsed_date'] = format_story_link_date__short(story_date, now) story['long_parsed_date'] = format_story_link_date__long(story_date, now)