Only reload dashboard stories on new stories, not just read stories. Also sync read stories through pubsub.

This commit is contained in:
Samuel Clay 2016-12-05 17:10:47 -08:00
parent a68713ece1
commit 10675090ef
5 changed files with 66 additions and 22 deletions

View file

@ -2,13 +2,12 @@ import datetime
import time
import re
import redis
from collections import defaultdict
from operator import itemgetter
from pprint import pprint
from utils import log as logging
from utils import json_functions as json
from django.db import models, IntegrityError
from django.db.models import Q, F
from django.db.models import Q
from django.db.models import Count
from django.conf import settings
from django.contrib.auth.models import User
@ -125,7 +124,6 @@ class UserSubscription(models.Model):
current_time = int(time.time() + 60*60*24)
if not cutoff_date:
cutoff_date = datetime.datetime.now() - datetime.timedelta(days=settings.DAYS_OF_STORY_HASHES)
unread_timestamp = int(time.mktime(cutoff_date.timetuple()))-1000
feed_counter = 0
read_dates = dict()
@ -644,6 +642,7 @@ class UserSubscription(models.Model):
def mark_story_ids_as_read(self, story_hashes, request=None, aggregated=False):
data = dict(code=0, payload=story_hashes)
r = redis.Redis(connection_pool=settings.REDIS_PUBSUB_POOL)
if not request:
request = self.user
@ -660,6 +659,9 @@ class UserSubscription(models.Model):
for story_hash in set(story_hashes):
RUserStory.mark_read(self.user_id, self.feed_id, story_hash, aggregated=aggregated)
r.publish(self.user.username, 'story:read:%s' % story_hash)
r.publish(self.user.username, 'feed:%s' % self.feed_id)
return data
@ -936,13 +938,12 @@ class UserSubscription(models.Model):
results_queued = p.execute()
except:
results_queued = map(lambda x: False, range(len(feed_ids)))
safety_net = []
for f, feed_id in enumerate(feed_ids):
scheduled_updates = results[f*2]
error_feeds = results[f*2+1]
queued_feeds = results[f]
queued_feeds = results_queued[f]
if not scheduled_updates and not queued_feeds and not error_feeds:
safety_net.append(feed_id)
@ -997,11 +998,14 @@ class UserSubscription(models.Model):
class RUserStory:
@classmethod
def mark_story_hashes_read(cls, user_id, story_hashes, r=None, s=None):
def mark_story_hashes_read(cls, user_id, story_hashes, username=None, r=None, s=None):
if not r:
r = redis.Redis(connection_pool=settings.REDIS_STORY_HASH_POOL)
if not s:
s = redis.Redis(connection_pool=settings.REDIS_POOL)
ps = redis.Redis(connection_pool=settings.REDIS_PUBSUB_POOL)
if not username:
username = User.objects.get(pk=user_id).username
# if not r2:
# r2 = redis.Redis(connection_pool=settings.REDIS_STORY_HASH_POOL2)
@ -1027,7 +1031,7 @@ class RUserStory:
share_key = "S:%s" % (story_hash)
friends_with_shares = [int(f) for f in s.sinter(share_key, friend_key)]
friend_ids.update(friends_with_shares)
cls.mark_read(user_id, feed_id, story_hash, social_user_ids=friends_with_shares, r=p)
cls.mark_read(user_id, feed_id, story_hash, social_user_ids=friends_with_shares, r=p, username=username, ps=ps)
p.execute()
# p2.execute()
@ -1035,11 +1039,13 @@ class RUserStory:
return list(feed_ids), list(friend_ids)
@classmethod
def mark_story_hash_unread(cls, user_id, story_hash, r=None, s=None):
def mark_story_hash_unread(cls, user, story_hash, r=None, s=None, ps=None):
if not r:
r = redis.Redis(connection_pool=settings.REDIS_STORY_HASH_POOL)
if not s:
s = redis.Redis(connection_pool=settings.REDIS_POOL)
if not ps:
ps = redis.Redis(connection_pool=settings.REDIS_PUBSUB_POOL)
# if not r2:
# r2 = redis.Redis(connection_pool=settings.REDIS_STORY_HASH_POOL2)
@ -1047,11 +1053,12 @@ class RUserStory:
feed_id, _ = MStory.split_story_hash(story_hash)
# Find other social feeds with this story to update their counts
friend_key = "F:%s:F" % (user_id)
friend_key = "F:%s:F" % (user.pk)
share_key = "S:%s" % (story_hash)
friends_with_shares = [int(f) for f in s.sinter(share_key, friend_key)]
friend_ids.update(friends_with_shares)
cls.mark_unread(user_id, feed_id, story_hash, social_user_ids=friends_with_shares, r=r)
cls.mark_unread(user.pk, feed_id, story_hash, social_user_ids=friends_with_shares, r=r,
username=user.username, ps=ps)
return feed_id, list(friend_ids)
@ -1070,7 +1077,7 @@ class RUserStory:
@classmethod
def mark_read(cls, user_id, story_feed_id, story_hash, social_user_ids=None,
aggregated=False, r=None):
aggregated=False, r=None, username=None, ps=None):
if not r:
r = redis.Redis(connection_pool=settings.REDIS_STORY_HASH_POOL)
# if not r2:
@ -1092,6 +1099,9 @@ class RUserStory:
read_story_key = 'RS:%s:%s' % (user_id, story_feed_id)
redis_commands(read_story_key)
if ps and username:
ps.publish(username, 'story:read:%s' % story_hash)
if social_user_ids:
for social_user_id in social_user_ids:
social_read_story_key = 'RS:%s:B:%s' % (user_id, social_user_id)
@ -1119,7 +1129,7 @@ class RUserStory:
return message
@staticmethod
def mark_unread(user_id, story_feed_id, story_hash, social_user_ids=None, r=None):
def mark_unread(user_id, story_feed_id, story_hash, social_user_ids=None, r=None, username=None, ps=None):
if not r:
r = redis.Redis(connection_pool=settings.REDIS_STORY_HASH_POOL)
# r2 = redis.Redis(connection_pool=settings.REDIS_STORY_HASH_POOL2)
@ -1143,6 +1153,9 @@ class RUserStory:
read_stories_list_key = 'lRS:%s' % user_id
r.lrem(read_stories_list_key, story_hash)
if ps and username:
ps.publish(username, 'story:unread:%s' % story_hash)
if social_user_ids:
for social_user_id in social_user_ids:
social_read_story_key = 'RS:%s:B:%s' % (user_id, social_user_id)

View file

@ -1524,9 +1524,6 @@ def mark_story_as_read(request):
else:
data = dict(code=-1, errors=["User is not subscribed to this feed."])
r = redis.Redis(connection_pool=settings.REDIS_PUBSUB_POOL)
r.publish(request.user.username, 'feed:%s' % feed_id)
return data
@ajax_login_required
@ -1538,7 +1535,7 @@ def mark_story_hashes_as_read(request):
except UnreadablePostError:
return dict(code=-1, message="Missing `story_hash` list parameter.")
feed_ids, friend_ids = RUserStory.mark_story_hashes_read(request.user.pk, story_hashes)
feed_ids, friend_ids = RUserStory.mark_story_hashes_read(request.user.pk, story_hashes, username=request.user.username)
if friend_ids:
socialsubs = MSocialSubscription.objects.filter(
@ -1684,7 +1681,7 @@ def mark_story_as_unread(request):
story_guid_hash=story.guid_hash)
dirty_count = social_subs and social_subs.count()
dirty_count = ("(%s social_subs)" % dirty_count) if dirty_count else ""
RUserStory.mark_story_hash_unread(user_id=request.user.pk, story_hash=story.story_hash)
RUserStory.mark_story_hash_unread(request.user, story_hash=story.story_hash)
r = redis.Redis(connection_pool=settings.REDIS_PUBSUB_POOL)
r.publish(request.user.username, 'feed:%s' % feed_id)
@ -1719,7 +1716,7 @@ def mark_story_hash_as_unread(request):
data = usersub.invert_read_stories_after_unread_story(story, request)
r.publish(request.user.username, 'feed:%s' % feed_id)
feed_id, friend_ids = RUserStory.mark_story_hash_unread(request.user.pk, story_hash)
feed_id, friend_ids = RUserStory.mark_story_hash_unread(request.user, story_hash)
if friend_ids:
socialsubs = MSocialSubscription.objects.filter(

View file

@ -2094,6 +2094,10 @@ a img {
.NB-empty .NB-end-line {
display: none;
}
.NB-module-river .NB-end-line {
display: none;
}
/* ============================= */
/* = Story Detail in List View = */
/* ============================= */

View file

@ -310,6 +310,18 @@ NEWSBLUR.Collections.Stories = Backbone.Collection.extend({
}
},
mark_read_pubsub: function(story_hash) {
var story = this.get_by_story_hash(story_hash);
if (!story) return;
story.set('read_status', 1);
},
mark_unread_pubsub: function(story_hash) {
var story = this.get_by_story_hash(story_hash);
if (!story) return;
story.set('read_status', 0);
},
mark_unread: function(story, options) {
options = options || {};
NEWSBLUR.assets.mark_story_as_unread(story.id, story.get('story_feed_id'), _.bind(function(read) {

View file

@ -4731,11 +4731,17 @@
this.socket.removeAllListeners('feed:update');
this.socket.on('feed:update', _.bind(function(feed_id, message) {
NEWSBLUR.log(['Real-time feed update', feed_id, message]);
this.feed_unread_count(feed_id, {realtime: true});
this.feed_unread_count(feed_id, {
realtime: true,
new_story: true
});
}, this));
this.socket.removeAllListeners(NEWSBLUR.Globals.username);
this.socket.removeAllListeners("user:update");
this.socket.on('user:update', _.bind(function(username, message) {
NEWSBLUR.log(['Real-time user update', username, message]);
if (this.flags.social_view) return;
if (_.string.startsWith(message, 'feed:')) {
feed_id = parseInt(message.replace('feed:', ''), 10);
@ -4745,12 +4751,22 @@
}
if (feed_id != this.active_feed &&
!_.contains(active_feed_ids, feed_id)) {
NEWSBLUR.log(['Real-time user update', username, feed_id]);
NEWSBLUR.log(['Real-time user update for feed', username, feed_id]);
this.feed_unread_count(feed_id);
}
} else if (_.string.startsWith(message, 'story:read')) {
NEWSBLUR.log(['Real-time user update for read story', username, message]);
var story_hash = message.replace('story:read:', '');
NEWSBLUR.assets.stories.mark_read_pubsub(story_hash);
NEWSBLUR.assets.dashboard_stories.mark_read_pubsub(story_hash);
} else if (_.string.startsWith(message, 'story:unread')) {
NEWSBLUR.log(['Real-time user update for unread story', username, message]);
var story_hash = message.replace('story:unread:', '');
NEWSBLUR.assets.stories.mark_unread_pubsub(story_hash);
NEWSBLUR.assets.dashboard_stories.mark_unread_pubsub(story_hash);
} else if (_.string.startsWith(message, 'social:')) {
if (message != this.active_feed) {
NEWSBLUR.log(['Real-time user update', username, message]);
NEWSBLUR.log(['Real-time user update for social', username, message]);
this.feed_unread_count(message);
}
} else if (message == "interaction:new") {
@ -4931,7 +4947,9 @@
_.delay(_.bind(function() {
this.model.feed_unread_count(feed_id, options.callback);
NEWSBLUR.app.dashboard_river.load_stories();
if (options.new_story) {
NEWSBLUR.app.dashboard_river.load_stories();
}
}, this), Math.random() * delay);
},