Merge branch 'story_hash_2' into circular

* story_hash_2:
  Another bug found by @kyleniemeyer's story. a - b was supposed to be b - a.
  Fixing Global shared stories by checking for missing story hashes properly.
  Using index in msharedstory search.
  Using new story_hashes instead of story object ids.

Conflicts:
	apps/social/models.py
This commit is contained in:
Samuel Clay 2013-05-01 13:47:57 -07:00
commit b6e84c1a9a
5 changed files with 64 additions and 44 deletions

View file

@ -112,9 +112,9 @@ class UserSubscription(models.Model):
hashpipe.execute()
def get_stories(self, offset=0, limit=6, order='newest', read_filter='all', withscores=False):
r = redis.Redis(connection_pool=settings.REDIS_STORY_POOL)
r = redis.Redis(connection_pool=settings.REDIS_STORY_HASH_POOL)
ignore_user_stories = False
stories_key = 'F:%s' % (self.feed_id)
read_stories_key = 'RS:%s:%s' % (self.user_id, self.feed_id)
unread_stories_key = 'U:%s:%s' % (self.user_id, self.feed_id)
@ -169,14 +169,11 @@ class UserSubscription(models.Model):
if not ignore_user_stories:
r.delete(unread_stories_key)
# XXX TODO: Remove below line after combing redis for these None's.
story_ids = [s for s in story_ids if s and s != 'None'] # ugh, hack
if withscores:
return story_ids
elif story_ids:
story_date_order = "%sstory_date" % ('' if order == 'oldest' else '-')
mstories = MStory.objects(id__in=story_ids).order_by(story_date_order)
mstories = MStory.objects(story_hash__in=story_ids).order_by(story_date_order)
stories = Feed.format_stories(mstories)
return stories
else:
@ -184,7 +181,7 @@ class UserSubscription(models.Model):
@classmethod
def feed_stories(cls, user_id, feed_ids, offset=0, limit=6, order='newest', read_filter='all'):
r = redis.Redis(connection_pool=settings.REDIS_STORY_POOL)
r = redis.Redis(connection_pool=settings.REDIS_STORY_HASH_POOL)
if order == 'oldest':
range_func = r.zrange
@ -196,8 +193,8 @@ class UserSubscription(models.Model):
unread_ranked_stories_keys = 'zU:%s:feeds' % (user_id)
if offset and r.exists(unread_ranked_stories_keys):
story_guids = range_func(unread_ranked_stories_keys, offset, limit)
return story_guids
story_hashes = range_func(unread_ranked_stories_keys, offset, limit)
return story_hashes
else:
r.delete(unread_ranked_stories_keys)
@ -206,17 +203,17 @@ class UserSubscription(models.Model):
us = cls.objects.get(user=user_id, feed=feed_id)
except cls.DoesNotExist:
continue
story_guids = us.get_stories(offset=0, limit=200,
order=order, read_filter=read_filter,
withscores=True)
story_hashes = us.get_stories(offset=0, limit=200,
order=order, read_filter=read_filter,
withscores=True)
if story_guids:
r.zadd(unread_ranked_stories_keys, **dict(story_guids))
if story_hashes:
r.zadd(unread_ranked_stories_keys, **dict(story_hashes))
story_guids = range_func(unread_ranked_stories_keys, offset, limit)
story_hashes = range_func(unread_ranked_stories_keys, offset, limit)
r.expire(unread_ranked_stories_keys, 24*60*60)
return story_guids
return story_hashes
@classmethod
def add_subscription(cls, user, feed_address, folder=None, bookmarklet=False, auto_active=True,

View file

@ -769,10 +769,10 @@ def load_river_stories__redis(request):
offset = (page-1) * limit
limit = page * limit - 1
story_ids = UserSubscription.feed_stories(user.pk, feed_ids, offset=offset, limit=limit,
order=order, read_filter=read_filter)
story_hashes = UserSubscription.feed_stories(user.pk, feed_ids, offset=offset, limit=limit,
order=order, read_filter=read_filter)
story_date_order = "%sstory_date" % ('' if order == 'oldest' else '-')
mstories = MStory.objects(id__in=story_ids).order_by(story_date_order)
mstories = MStory.objects(story_hash__in=story_hashes).order_by(story_date_order)
stories = Feed.format_stories(mstories)
found_feed_ids = list(set([story['story_feed_id'] for story in stories]))
stories, user_profiles = MSharedStory.stories_with_comments_and_profiles(stories, user.pk)

View file

@ -1129,6 +1129,7 @@ class Feed(models.Model):
story_content = story_db.story_content_z and zlib.decompress(story_db.story_content_z) or ''
story = {}
story['story_hash'] = getattr(story_db, 'story_hash', None)
story['story_tags'] = story_db.story_tags or []
story['story_date'] = story_db.story_date.replace(tzinfo=None)
story['story_authors'] = story_db.story_author_name
@ -1625,7 +1626,27 @@ class MStory(mongo.Document):
if len(stories) < count:
shared_stories = list(MSharedStory.objects(id__in=story_ids))
stories.extend(shared_stories)
print stories, multiple, story_ids, type(story_ids)
if not multiple:
stories = stories[0]
return stories
@classmethod
def find_by_story_hashes(cls, story_hashes):
from apps.social.models import MSharedStory
count = len(story_hashes)
multiple = isinstance(story_hashes, list) or isinstance(story_hashes, tuple)
stories = list(cls.objects(story_hash__in=story_hashes))
if len(stories) < count:
hashes_found = [s.story_hash for s in stories]
remaining_hashes = list(set(story_hashes) - set(hashes_found))
story_feed_ids = [h.split(':')[0] for h in remaining_hashes]
shared_stories = list(MSharedStory.objects(story_feed_id__in=story_feed_ids,
story_hash__in=remaining_hashes))
stories.extend(shared_stories)
if not multiple:
stories = stories[0]

View file

@ -853,7 +853,7 @@ class MSocialSubscription(mongo.Document):
def get_stories(self, offset=0, limit=6, order='newest', read_filter='all',
withscores=False, everything_unread=False):
r = redis.Redis(connection_pool=settings.REDIS_STORY_POOL)
r = redis.Redis(connection_pool=settings.REDIS_STORY_HASH_POOL)
ignore_user_stories = False
stories_key = 'B:%s' % (self.subscription_user_id)
@ -893,11 +893,11 @@ class MSocialSubscription(mongo.Document):
if not ignore_user_stories:
r.delete(unread_stories_key)
return [story_id for story_id in story_ids if story_id and story_id != 'None']
return story_ids
@classmethod
def feed_stories(cls, user_id, social_user_ids, offset=0, limit=6, order='newest', read_filter='all', relative_user_id=None, everything_unread=False, cache=True):
r = redis.Redis(connection_pool=settings.REDIS_STORY_POOL)
r = redis.Redis(connection_pool=settings.REDIS_STORY_HASH_POOL)
if not relative_user_id:
relative_user_id = user_id
@ -912,9 +912,9 @@ class MSocialSubscription(mongo.Document):
unread_ranked_stories_keys = 'zU:%s:social' % (user_id)
if offset and r.exists(unread_ranked_stories_keys) and cache:
story_guids = range_func(unread_ranked_stories_keys, offset, offset+limit, withscores=True)
if story_guids:
return zip(*story_guids)
story_hashes = range_func(unread_ranked_stories_keys, offset, offset+limit, withscores=True)
if story_hashes:
return zip(*story_hashes)
else:
return [], []
else:
@ -922,17 +922,17 @@ class MSocialSubscription(mongo.Document):
for social_user_id in social_user_ids:
us = cls.objects.get(user_id=relative_user_id, subscription_user_id=social_user_id)
story_guids = us.get_stories(offset=0, limit=50,
order=order, read_filter=read_filter,
withscores=True, everything_unread=everything_unread)
if story_guids:
r.zadd(unread_ranked_stories_keys, **dict(story_guids))
story_hashes = us.get_stories(offset=0, limit=50,
order=order, read_filter=read_filter,
withscores=True, everything_unread=everything_unread)
if story_hashes:
r.zadd(unread_ranked_stories_keys, **dict(story_hashes))
story_guids = range_func(unread_ranked_stories_keys, offset, offset+limit, withscores=True)
story_hashes = range_func(unread_ranked_stories_keys, offset, offset+limit, withscores=True)
r.expire(unread_ranked_stories_keys, 24*60*60)
if story_guids:
return zip(*story_guids)
if story_hashes:
return zip(*story_hashes)
else:
return [], []

View file

@ -61,11 +61,11 @@ def load_social_stories(request, user_id, username=None):
if social_profile.private and not social_profile.is_followed_by_user(user.pk):
message = "%s has a private blurblog and you must be following them in order to read it." % social_profile.username
elif socialsub and (read_filter == 'unread' or order == 'oldest'):
story_ids = socialsub.get_stories(order=order, read_filter=read_filter, offset=offset, limit=limit)
story_hashes = socialsub.get_stories(order=order, read_filter=read_filter, offset=offset, limit=limit)
story_date_order = "%sshared_date" % ('' if order == 'oldest' else '-')
if story_ids:
if story_hashes:
mstories = MSharedStory.objects(user_id=social_user.pk,
story_db_id__in=story_ids).order_by(story_date_order)
story_hash__in=story_hashes).order_by(story_date_order)
stories = Feed.format_stories(mstories)
else:
mstories = MSharedStory.objects(user_id=social_user.pk).order_by('-shared_date')[offset:offset+limit]
@ -203,19 +203,21 @@ def load_river_blurblog(request):
offset = (page-1) * limit
limit = page * limit - 1
story_ids, story_dates = MSocialSubscription.feed_stories(user.pk, social_user_ids,
story_hashes, story_dates = MSocialSubscription.feed_stories(user.pk, social_user_ids,
offset=offset, limit=limit,
order=order, read_filter=read_filter,
relative_user_id=relative_user_id,
everything_unread=global_feed)
mstories = MStory.find_by_id(story_ids)
story_id_to_dates = dict(zip(story_ids, story_dates))
def sort_stories_by_id(a, b):
return int(story_id_to_dates[str(b.id)]) - int(story_id_to_dates[str(a.id)])
sorted_mstories = sorted(mstories, cmp=sort_stories_by_id)
mstories = MStory.find_by_story_hashes(story_hashes)
story_hashes_to_dates = dict(zip(story_hashes, story_dates))
def sort_stories_by_hash(a, b):
return (int(story_hashes_to_dates[str(b.story_hash)]) -
int(story_hashes_to_dates[str(a.story_hash)]))
sorted_mstories = sorted(mstories, cmp=sort_stories_by_hash)
stories = Feed.format_stories(sorted_mstories)
for s, story in enumerate(stories):
story['story_date'] = datetime.datetime.fromtimestamp(story_dates[s])
timestamp = story_hashes_to_dates[story['story_hash']]
story['story_date'] = datetime.datetime.fromtimestamp(timestamp)
share_relative_user_id = relative_user_id
if global_feed:
share_relative_user_id = user.pk