Turning off redis dupe story id writer.

This commit is contained in:
Samuel Clay 2013-08-14 14:32:50 -07:00
parent c238973417
commit aab1f39df1
4 changed files with 74 additions and 81 deletions

View file

@ -653,14 +653,14 @@ class UserSubscription(models.Model):
class RUserStory:
@classmethod
def mark_story_hashes_read(cls, user_id, story_hashes, r=None, r2=None):
def mark_story_hashes_read(cls, user_id, story_hashes, r=None):
if not r:
r = redis.Redis(connection_pool=settings.REDIS_STORY_HASH_POOL)
if not r2:
r2 = redis.Redis(connection_pool=settings.REDIS_STORY_HASH_POOL2)
# if not r2:
# r2 = redis.Redis(connection_pool=settings.REDIS_STORY_HASH_POOL2)
p = r.pipeline()
p2 = r2.pipeline()
# p2 = r2.pipeline()
feed_ids = set()
friend_ids = set()
@ -676,19 +676,19 @@ class RUserStory:
share_key = "S:%s" % (story_hash)
friends_with_shares = [int(f) for f in r.sinter(share_key, friend_key)]
friend_ids.update(friends_with_shares)
cls.mark_read(user_id, feed_id, story_hash, social_user_ids=friends_with_shares, r=p, r2=p2)
cls.mark_read(user_id, feed_id, story_hash, social_user_ids=friends_with_shares, r=p)
p.execute()
p2.execute()
# p2.execute()
return list(feed_ids), list(friend_ids)
@classmethod
def mark_read(cls, user_id, story_feed_id, story_hash, social_user_ids=None, r=None, r2=None):
def mark_read(cls, user_id, story_feed_id, story_hash, social_user_ids=None, r=None):
if not r:
r = redis.Redis(connection_pool=settings.REDIS_STORY_HASH_POOL)
if not r2:
r2 = redis.Redis(connection_pool=settings.REDIS_STORY_HASH_POOL2)
# if not r2:
# r2 = redis.Redis(connection_pool=settings.REDIS_STORY_HASH_POOL2)
story_hash = MStory.ensure_story_hash(story_hash, story_feed_id=story_feed_id)
@ -696,9 +696,9 @@ class RUserStory:
def redis_commands(key):
r.sadd(key, story_hash)
r2.sadd(key, story_hash)
# r2.sadd(key, story_hash)
r.expire(key, settings.DAYS_OF_UNREAD_NEW*24*60*60)
r2.expire(key, settings.DAYS_OF_UNREAD_NEW*24*60*60)
# r2.expire(key, settings.DAYS_OF_UNREAD_NEW*24*60*60)
all_read_stories_key = 'RS:%s' % (user_id)
redis_commands(all_read_stories_key)
@ -715,12 +715,12 @@ class RUserStory:
def mark_unread(user_id, story_feed_id, story_hash, social_user_ids=None):
r = redis.Redis(connection_pool=settings.REDIS_POOL)
h = redis.Redis(connection_pool=settings.REDIS_STORY_HASH_POOL)
h2 = redis.Redis(connection_pool=settings.REDIS_STORY_HASH_POOL2)
# h2 = redis.Redis(connection_pool=settings.REDIS_STORY_HASH_POOL2)
h.srem('RS:%s' % user_id, story_hash)
h2.srem('RS:%s' % user_id, story_hash)
# h2.srem('RS:%s' % user_id, story_hash)
h.srem('RS:%s:%s' % (user_id, story_feed_id), story_hash)
h2.srem('RS:%s:%s' % (user_id, story_feed_id), story_hash)
# h2.srem('RS:%s:%s' % (user_id, story_feed_id), story_hash)
# Find other social feeds with this story to update their counts
friend_key = "F:%s:F" % (user_id)
@ -730,7 +730,7 @@ class RUserStory:
if friends_with_shares:
for social_user_id in friends_with_shares:
h.srem('RS:%s:B:%s' % (user_id, social_user_id), story_hash)
h2.srem('RS:%s:B:%s' % (user_id, social_user_id), story_hash)
# h2.srem('RS:%s:B:%s' % (user_id, social_user_id), story_hash)
@staticmethod
def get_stories(user_id, feed_id, r=None):
@ -742,9 +742,9 @@ class RUserStory:
@classmethod
def switch_feed(cls, user_id, old_feed_id, new_feed_id):
r = redis.Redis(connection_pool=settings.REDIS_STORY_HASH_POOL)
r2 = redis.Redis(connection_pool=settings.REDIS_STORY_HASH_POOL2)
# r2 = redis.Redis(connection_pool=settings.REDIS_STORY_HASH_POOL2)
p = r.pipeline()
p2 = r2.pipeline()
# p2 = r2.pipeline()
story_hashes = cls.get_stories(user_id, old_feed_id, r=r)
for story_hash in story_hashes:
@ -752,18 +752,18 @@ class RUserStory:
new_story_hash = "%s:%s" % (new_feed_id, hash_story)
read_feed_key = "RS:%s:%s" % (user_id, new_feed_id)
p.sadd(read_feed_key, new_story_hash)
p2.sadd(read_feed_key, new_story_hash)
# p2.sadd(read_feed_key, new_story_hash)
p.expire(read_feed_key, settings.DAYS_OF_UNREAD_NEW*24*60*60)
p2.expire(read_feed_key, settings.DAYS_OF_UNREAD_NEW*24*60*60)
# p2.expire(read_feed_key, settings.DAYS_OF_UNREAD_NEW*24*60*60)
read_user_key = "RS:%s" % (user_id)
p.sadd(read_user_key, new_story_hash)
p2.sadd(read_user_key, new_story_hash)
# p2.sadd(read_user_key, new_story_hash)
p.expire(read_user_key, settings.DAYS_OF_UNREAD_NEW*24*60*60)
p2.expire(read_user_key, settings.DAYS_OF_UNREAD_NEW*24*60*60)
# p2.expire(read_user_key, settings.DAYS_OF_UNREAD_NEW*24*60*60)
p.execute()
p2.execute()
# p2.execute()
if len(story_hashes) > 0:
logging.info(" ---> %s read stories" % len(story_hashes))
@ -771,11 +771,10 @@ class RUserStory:
@classmethod
def switch_hash(cls, feed_id, old_hash, new_hash):
r = redis.Redis(connection_pool=settings.REDIS_STORY_HASH_POOL)
r2 = redis.Redis(connection_pool=settings.REDIS_STORY_HASH_POOL2)
# r2 = redis.Redis(connection_pool=settings.REDIS_STORY_HASH_POOL2)
p = r.pipeline()
p2 = r2.pipeline()
# p2 = r2.pipeline()
UNREAD_CUTOFF = datetime.datetime.now() - datetime.timedelta(days=settings.DAYS_OF_UNREAD_NEW)
now = int(time.time())
usersubs = UserSubscription.objects.filter(feed_id=feed_id, last_read_date__gte=UNREAD_CUTOFF)
logging.info(" ---> ~SB%s usersubs~SN to switch read story hashes..." % len(usersubs))
@ -784,22 +783,18 @@ class RUserStory:
read = r.sismember(rs_key, old_hash)
if read:
p.sadd(rs_key, new_hash)
p2.sadd(rs_key, new_hash)
p2.zadd('z' + rs_key, new_hash, now)
# p2.sadd(rs_key, new_hash)
p.expire(rs_key, settings.DAYS_OF_UNREAD_NEW*24*60*60)
p2.expire(rs_key, settings.DAYS_OF_UNREAD_NEW*24*60*60)
p2.expire('z' + rs_key, settings.DAYS_OF_UNREAD_NEW*24*60*60)
# p2.expire(rs_key, settings.DAYS_OF_UNREAD_NEW*24*60*60)
read_user_key = "RS:%s" % sub.user.pk
p.sadd(read_user_key, new_hash)
p2.sadd(read_user_key, new_hash)
p2.zadd('z' + read_user_key, new_hash, now)
# p2.sadd(read_user_key, new_hash)
p.expire(read_user_key, settings.DAYS_OF_UNREAD_NEW*24*60*60)
p2.expire(read_user_key, settings.DAYS_OF_UNREAD_NEW*24*60*60)
p2.expire('z' + read_user_key, settings.DAYS_OF_UNREAD_NEW*24*60*60)
# p2.expire(read_user_key, settings.DAYS_OF_UNREAD_NEW*24*60*60)
p.execute()
p2.execute()
# p2.execute()
class UserSubscriptionFolders(models.Model):

View file

@ -236,16 +236,16 @@ class Feed(models.Model):
def sync_redis(self):
return MStory.sync_feed_redis(self.pk)
def expire_redis(self, r=None, r2=None):
def expire_redis(self, r=None):
if not r:
r = redis.Redis(connection_pool=settings.REDIS_STORY_HASH_POOL)
if not r2:
r2 = redis.Redis(connection_pool=settings.REDIS_STORY_HASH_POOL2)
# if not r2:
# r2 = redis.Redis(connection_pool=settings.REDIS_STORY_HASH_POOL2)
r.expire('F:%s' % self.pk, settings.DAYS_OF_UNREAD_NEW*24*60*60)
r2.expire('F:%s' % self.pk, settings.DAYS_OF_UNREAD_NEW*24*60*60)
# r2.expire('F:%s' % self.pk, settings.DAYS_OF_UNREAD_NEW*24*60*60)
r.expire('zF:%s' % self.pk, settings.DAYS_OF_UNREAD_NEW*24*60*60)
r2.expire('zF:%s' % self.pk, settings.DAYS_OF_UNREAD_NEW*24*60*60)
# r2.expire('zF:%s' % self.pk, settings.DAYS_OF_UNREAD_NEW*24*60*60)
@classmethod
def autocomplete(self, prefix, limit=5):
@ -1785,55 +1785,55 @@ class MStory(mongo.Document):
return story_hashes
def sync_redis(self, r=None, r2=None):
def sync_redis(self, r=None):
if not r:
r = redis.Redis(connection_pool=settings.REDIS_STORY_HASH_POOL)
if not r2:
r2 = redis.Redis(connection_pool=settings.REDIS_STORY_HASH_POOL2)
# if not r2:
# r2 = redis.Redis(connection_pool=settings.REDIS_STORY_HASH_POOL2)
UNREAD_CUTOFF = datetime.datetime.now() - datetime.timedelta(days=settings.DAYS_OF_UNREAD_NEW)
if self.id and self.story_date > UNREAD_CUTOFF:
feed_key = 'F:%s' % self.story_feed_id
r.sadd(feed_key, self.story_hash)
r.expire(feed_key, settings.DAYS_OF_UNREAD_NEW*24*60*60)
r2.sadd(feed_key, self.story_hash)
r2.expire(feed_key, settings.DAYS_OF_UNREAD_NEW*24*60*60)
# r2.sadd(feed_key, self.story_hash)
# r2.expire(feed_key, settings.DAYS_OF_UNREAD_NEW*24*60*60)
r.zadd('z' + feed_key, self.story_hash, time.mktime(self.story_date.timetuple()))
r.expire('z' + feed_key, settings.DAYS_OF_UNREAD_NEW*24*60*60)
r2.zadd('z' + feed_key, self.story_hash, time.mktime(self.story_date.timetuple()))
r2.expire('z' + feed_key, settings.DAYS_OF_UNREAD_NEW*24*60*60)
# r2.zadd('z' + feed_key, self.story_hash, time.mktime(self.story_date.timetuple()))
# r2.expire('z' + feed_key, settings.DAYS_OF_UNREAD_NEW*24*60*60)
def remove_from_redis(self, r=None, r2=None):
def remove_from_redis(self, r=None):
if not r:
r = redis.Redis(connection_pool=settings.REDIS_STORY_HASH_POOL)
if not r2:
r2 = redis.Redis(connection_pool=settings.REDIS_STORY_HASH_POOL2)
# if not r2:
# r2 = redis.Redis(connection_pool=settings.REDIS_STORY_HASH_POOL2)
if self.id:
r.srem('F:%s' % self.story_feed_id, self.story_hash)
r2.srem('F:%s' % self.story_feed_id, self.story_hash)
# r2.srem('F:%s' % self.story_feed_id, self.story_hash)
r.zrem('zF:%s' % self.story_feed_id, self.story_hash)
r2.zrem('zF:%s' % self.story_feed_id, self.story_hash)
# r2.zrem('zF:%s' % self.story_feed_id, self.story_hash)
@classmethod
def sync_feed_redis(cls, story_feed_id):
r = redis.Redis(connection_pool=settings.REDIS_STORY_HASH_POOL)
r2 = redis.Redis(connection_pool=settings.REDIS_STORY_HASH_POOL2)
# r2 = redis.Redis(connection_pool=settings.REDIS_STORY_HASH_POOL2)
UNREAD_CUTOFF = datetime.datetime.now() - datetime.timedelta(days=settings.DAYS_OF_UNREAD_NEW)
feed = Feed.get_by_id(story_feed_id)
stories = cls.objects.filter(story_feed_id=story_feed_id, story_date__gte=UNREAD_CUTOFF)
r.delete('F:%s' % story_feed_id)
r2.delete('F:%s' % story_feed_id)
# r2.delete('F:%s' % story_feed_id)
r.delete('zF:%s' % story_feed_id)
r2.delete('zF:%s' % story_feed_id)
# r2.delete('zF:%s' % story_feed_id)
logging.info(" ---> [%-30s] ~FMSyncing ~SB%s~SN stories to redis" % (feed and feed.title[:30] or story_feed_id, stories.count()))
p = r.pipeline()
p2 = r2.pipeline()
# p2 = r2.pipeline()
for story in stories:
story.sync_redis(r=p, r2=p2)
story.sync_redis(r=p)
p.execute()
p2.execute()
# p2.execute()
def count_comments(self):
from apps.social.models import MSharedStory

View file

@ -1536,7 +1536,7 @@ class MSharedStory(mongo.Document):
def sync_all_redis(cls, drop=False):
r = redis.Redis(connection_pool=settings.REDIS_POOL)
h = redis.Redis(connection_pool=settings.REDIS_STORY_HASH_POOL)
h2 = redis.Redis(connection_pool=settings.REDIS_STORY_HASH_POOL2)
# h2 = redis.Redis(connection_pool=settings.REDIS_STORY_HASH_POOL2)
if drop:
for key_name in ["C", "S"]:
keys = r.keys("%s:*" % key_name)
@ -1545,7 +1545,7 @@ class MSharedStory(mongo.Document):
r.delete(key)
for story in cls.objects.all():
story.sync_redis_shares(r=r)
story.sync_redis_story(r=h, r2=h2)
story.sync_redis_story(r=h)
def sync_redis(self):
self.sync_redis_shares()
@ -1563,22 +1563,22 @@ class MSharedStory(mongo.Document):
else:
r.srem(comment_key, self.user_id)
def sync_redis_story(self, r=None, r2=None):
def sync_redis_story(self, r=None):
if not r:
r = redis.Redis(connection_pool=settings.REDIS_STORY_HASH_POOL)
if not r2:
r2 = redis.Redis(connection_pool=settings.REDIS_STORY_HASH_POOL2)
# if not r2:
# r2 = redis.Redis(connection_pool=settings.REDIS_STORY_HASH_POOL2)
r.sadd('B:%s' % self.user_id, self.feed_guid_hash)
r2.sadd('B:%s' % self.user_id, self.feed_guid_hash)
# r2.sadd('B:%s' % self.user_id, self.feed_guid_hash)
r.zadd('zB:%s' % self.user_id, self.feed_guid_hash,
time.mktime(self.shared_date.timetuple()))
r2.zadd('zB:%s' % self.user_id, self.feed_guid_hash,
time.mktime(self.shared_date.timetuple()))
# r2.zadd('zB:%s' % self.user_id, self.feed_guid_hash,
# time.mktime(self.shared_date.timetuple()))
r.expire('B:%s' % self.user_id, settings.DAYS_OF_UNREAD_NEW*24*60*60)
r2.expire('B:%s' % self.user_id, settings.DAYS_OF_UNREAD_NEW*24*60*60)
# r2.expire('B:%s' % self.user_id, settings.DAYS_OF_UNREAD_NEW*24*60*60)
r.expire('zB:%s' % self.user_id, settings.DAYS_OF_UNREAD_NEW*24*60*60)
r2.expire('zB:%s' % self.user_id, settings.DAYS_OF_UNREAD_NEW*24*60*60)
# r2.expire('zB:%s' % self.user_id, settings.DAYS_OF_UNREAD_NEW*24*60*60)
def remove_from_redis(self):
r = redis.Redis(connection_pool=settings.REDIS_POOL)
@ -1589,11 +1589,11 @@ class MSharedStory(mongo.Document):
r.srem(comment_key, self.user_id)
h = redis.Redis(connection_pool=settings.REDIS_STORY_HASH_POOL)
h2 = redis.Redis(connection_pool=settings.REDIS_STORY_HASH_POOL2)
# h2 = redis.Redis(connection_pool=settings.REDIS_STORY_HASH_POOL2)
h.srem('B:%s' % self.user_id, self.feed_guid_hash)
h2.srem('B:%s' % self.user_id, self.feed_guid_hash)
# h2.srem('B:%s' % self.user_id, self.feed_guid_hash)
h.zrem('zB:%s' % self.user_id, self.feed_guid_hash)
h2.zrem('zB:%s' % self.user_id, self.feed_guid_hash)
# h2.zrem('zB:%s' % self.user_id, self.feed_guid_hash)
def publish_update_to_subscribers(self):
try:

View file

@ -580,17 +580,15 @@ MONGOANALYTICSDB = connect(MONGO_ANALYTICS_DB.pop('name'), **MONGO_ANALYTICS_DB)
# = Redis =
# =========
REDIS_POOL = redis.ConnectionPool(host=REDIS['host'], port=6379, db=0)
REDIS_ANALYTICS_POOL = redis.ConnectionPool(host=REDIS['host'], port=6379, db=2)
REDIS_STATISTICS_POOL = redis.ConnectionPool(host=REDIS['host'], port=6379, db=3)
REDIS_FEED_POOL = redis.ConnectionPool(host=REDIS['host'], port=6379, db=4)
REDIS_SESSION_POOL = redis.ConnectionPool(host=REDIS['host'], port=6379, db=5)
# REDIS_CACHE_POOL = redis.ConnectionPool(host=REDIS['host'], port=6379, db=6) # Duped in CACHES
REDIS_STORY_HASH_POOL = redis.ConnectionPool(host=REDIS_STORY['host'], port=6379, db=1)
REDIS_PUBSUB_POOL = redis.ConnectionPool(host=REDIS_PUBSUB['host'], port=6379, db=0)
REDIS_STORY_HASH_POOL2 = redis.ConnectionPool(host=REDIS['host'], port=6379, db=8)
REDIS_POOL = redis.ConnectionPool(host=REDIS['host'], port=6379, db=0)
REDIS_ANALYTICS_POOL = redis.ConnectionPool(host=REDIS['host'], port=6379, db=2)
REDIS_STATISTICS_POOL = redis.ConnectionPool(host=REDIS['host'], port=6379, db=3)
REDIS_FEED_POOL = redis.ConnectionPool(host=REDIS['host'], port=6379, db=4)
REDIS_SESSION_POOL = redis.ConnectionPool(host=REDIS['host'], port=6379, db=5)
# REDIS_CACHE_POOL = redis.ConnectionPool(host=REDIS['host'], port=6379, db=6) # Duped in CACHES
REDIS_PUBSUB_POOL = redis.ConnectionPool(host=REDIS_PUBSUB['host'], port=6379, db=0)
REDIS_STORY_HASH_POOL = redis.ConnectionPool(host=REDIS_STORY['host'], port=6379, db=1)
# REDIS_STORY_HASH_POOL2 = redis.ConnectionPool(host=REDIS['host'], port=6379, db=8)
# ==========
# = Assets =