Renaming redis feed pool to redis feed update pool in prep for redis feed sub pool.

This commit is contained in:
Samuel Clay 2015-07-27 18:35:25 -07:00
parent 037a82164a
commit 6a20478a7f
9 changed files with 25 additions and 24 deletions

View file

@ -1074,28 +1074,28 @@ class RNewUserQueue:
@classmethod
def add_user(cls, user_id):
r = redis.Redis(connection_pool=settings.REDIS_FEED_POOL)
r = redis.Redis(connection_pool=settings.REDIS_FEED_UPDATE_POOL)
now = time.time()
r.zadd(cls.KEY, user_id, now)
@classmethod
def user_count(cls):
r = redis.Redis(connection_pool=settings.REDIS_FEED_POOL)
r = redis.Redis(connection_pool=settings.REDIS_FEED_UPDATE_POOL)
count = r.zcard(cls.KEY)
return count
@classmethod
def user_position(cls, user_id):
r = redis.Redis(connection_pool=settings.REDIS_FEED_POOL)
r = redis.Redis(connection_pool=settings.REDIS_FEED_UPDATE_POOL)
position = r.zrank(cls.KEY, user_id)
if position >= 0:
return position + 1
@classmethod
def pop_user(cls):
r = redis.Redis(connection_pool=settings.REDIS_FEED_POOL)
r = redis.Redis(connection_pool=settings.REDIS_FEED_UPDATE_POOL)
user = r.zrange(cls.KEY, 0, 0)[0]
r.zrem(cls.KEY, user)

View file

@ -847,7 +847,7 @@ class UserSubscription(models.Model):
@classmethod
def verify_feeds_scheduled(cls, user_id):
r = redis.Redis(connection_pool=settings.REDIS_FEED_POOL)
r = redis.Redis(connection_pool=settings.REDIS_FEED_UPDATE_POOL)
user = User.objects.get(pk=user_id)
subs = cls.objects.filter(user=user)
feed_ids = [sub.feed.pk for sub in subs]

View file

@ -3,5 +3,5 @@ import redis
from apps.social.models import *
r = redis.Redis(connection_pool=settings.REDIS_FEED_POOL)
r = redis.Redis(connection_pool=settings.REDIS_FEED_UPDATE_POOL)
print "Redis: %s" % r

View file

@ -7,7 +7,7 @@ from apps.rss_feeds.models import Feed
class Migration(DataMigration):
def forwards(self, orm):
r = redis.Redis(connection_pool=settings.REDIS_FEED_POOL)
r = redis.Redis(connection_pool=settings.REDIS_FEED_UPDATE_POOL)
start = 0
for f in xrange(start, Feed.objects.latest('pk').pk, 1000):
print " ---> %s" % f

View file

@ -427,7 +427,7 @@ class Feed(models.Model):
@classmethod
def task_feeds(cls, feeds, queue_size=12, verbose=True):
if not feeds: return
r = redis.Redis(connection_pool=settings.REDIS_FEED_POOL)
r = redis.Redis(connection_pool=settings.REDIS_FEED_UPDATE_POOL)
if isinstance(feeds, Feed):
if verbose:
@ -452,7 +452,7 @@ class Feed(models.Model):
@classmethod
def drain_task_feeds(cls):
r = redis.Redis(connection_pool=settings.REDIS_FEED_POOL)
r = redis.Redis(connection_pool=settings.REDIS_FEED_UPDATE_POOL)
tasked_feeds = r.zrange('tasked_feeds', 0, -1)
logging.debug(" ---> ~FRDraining %s tasked feeds..." % len(tasked_feeds))
@ -903,7 +903,7 @@ class Feed(models.Model):
def update(self, **kwargs):
from utils import feed_fetcher
r = redis.Redis(connection_pool=settings.REDIS_FEED_POOL)
r = redis.Redis(connection_pool=settings.REDIS_FEED_UPDATE_POOL)
original_feed_id = int(self.pk)
if getattr(settings, 'TEST_DEBUG', False):
@ -1551,7 +1551,7 @@ class Feed(models.Model):
return total
def set_next_scheduled_update(self, verbose=False, skip_scheduling=False):
r = redis.Redis(connection_pool=settings.REDIS_FEED_POOL)
r = redis.Redis(connection_pool=settings.REDIS_FEED_UPDATE_POOL)
total = self.get_next_scheduled_update(force=True, verbose=verbose)
error_count = self.error_count
@ -1585,13 +1585,13 @@ class Feed(models.Model):
@property
def error_count(self):
r = redis.Redis(connection_pool=settings.REDIS_FEED_POOL)
r = redis.Redis(connection_pool=settings.REDIS_FEED_UPDATE_POOL)
fetch_errors = int(r.zscore('error_feeds', self.pk) or 0)
return fetch_errors + self.errors_since_good
def schedule_feed_fetch_immediately(self, verbose=True):
r = redis.Redis(connection_pool=settings.REDIS_FEED_POOL)
r = redis.Redis(connection_pool=settings.REDIS_FEED_UPDATE_POOL)
if verbose:
logging.debug(' ---> [%-30s] Scheduling feed fetch immediately...' % (unicode(self)[:30]))
@ -1611,7 +1611,7 @@ class Feed(models.Model):
self.save()
def queue_pushed_feed_xml(self, xml, latest_push_date_delta=None):
r = redis.Redis(connection_pool=settings.REDIS_FEED_POOL)
r = redis.Redis(connection_pool=settings.REDIS_FEED_UPDATE_POOL)
queue_size = r.llen("push_feeds")
if latest_push_date_delta:

View file

@ -18,7 +18,7 @@ class TaskFeeds(Task):
settings.LOG_TO_STREAM = True
now = datetime.datetime.utcnow()
start = time.time()
r = redis.Redis(connection_pool=settings.REDIS_FEED_POOL)
r = redis.Redis(connection_pool=settings.REDIS_FEED_UPDATE_POOL)
tasked_feeds_size = r.zcard('tasked_feeds')
hour_ago = now - datetime.timedelta(hours=1)
@ -60,7 +60,7 @@ class TaskBrokenFeeds(Task):
settings.LOG_TO_STREAM = True
now = datetime.datetime.utcnow()
start = time.time()
r = redis.Redis(connection_pool=settings.REDIS_FEED_POOL)
r = redis.Redis(connection_pool=settings.REDIS_FEED_UPDATE_POOL)
logging.debug(" ---> ~SN~FBQueuing broken feeds...")
@ -126,7 +126,7 @@ class UpdateFeeds(Task):
def run(self, feed_pks, **kwargs):
from apps.rss_feeds.models import Feed
from apps.statistics.models import MStatistics
r = redis.Redis(connection_pool=settings.REDIS_FEED_POOL)
r = redis.Redis(connection_pool=settings.REDIS_FEED_UPDATE_POOL)
mongodb_replication_lag = int(MStatistics.get('mongodb_replication_lag', 0))
compute_scores = bool(mongodb_replication_lag < 10)

View file

@ -652,13 +652,14 @@ CACHES = {
REDIS_POOL = redis.ConnectionPool(host=REDIS['host'], port=6379, db=0)
REDIS_ANALYTICS_POOL = redis.ConnectionPool(host=REDIS['host'], port=6379, db=2)
REDIS_STATISTICS_POOL = redis.ConnectionPool(host=REDIS['host'], port=6379, db=3)
REDIS_FEED_POOL = redis.ConnectionPool(host=REDIS['host'], port=6379, db=4)
REDIS_SESSION_POOL = redis.ConnectionPool(host=SESSION_REDIS_HOST, port=6379, db=5)
# REDIS_CACHE_POOL = redis.ConnectionPool(host=REDIS['host'], port=6379, db=6) # Duped in CACHES
REDIS_PUBSUB_POOL = redis.ConnectionPool(host=REDIS_PUBSUB['host'], port=6379, db=0)
REDIS_STORY_HASH_POOL = redis.ConnectionPool(host=REDIS_STORY['host'], port=6379, db=1)
REDIS_FEED_UPDATE_POOL = redis.ConnectionPool(host=REDIS['host'], port=6379, db=4)
# REDIS_STORY_HASH_POOL2 = redis.ConnectionPool(host=REDIS['host'], port=6379, db=8)
REDIS_STORY_HASH_TEMP_POOL = redis.ConnectionPool(host=REDIS['host'], port=6379, db=10)
# REDIS_CACHE_POOL = redis.ConnectionPool(host=REDIS['host'], port=6379, db=6) # Duped in CACHES
REDIS_SESSION_POOL = redis.ConnectionPool(host=SESSION_REDIS_HOST, port=6379, db=5)
REDIS_STORY_HASH_POOL = redis.ConnectionPool(host=REDIS_STORY['host'], port=6379, db=1)
REDIS_FEED_SUB_POOL = redis.ConnectionPool(host=REDIS_STORY['host'], port=6379, db=2)
REDIS_PUBSUB_POOL = redis.ConnectionPool(host=REDIS_PUBSUB['host'], port=6379, db=0)
# ==========
# = Assets =

View file

@ -50,7 +50,7 @@ class NBMuninGraph(MuninGraph):
push_feeds = PushSubscription.objects.filter(verified=True).count()
MStatistics.set('munin:push_feeds', push_feeds, 60*60*12)
r = redis.Redis(connection_pool=settings.REDIS_FEED_POOL)
r = redis.Redis(connection_pool=settings.REDIS_FEED_UPDATE_POOL)
return {
'scheduled_feeds': r.zcard('scheduled_updates'),

View file

@ -26,7 +26,7 @@ class NBMuninGraph(MuninGraph):
def calculate_metrics(self):
from django.conf import settings
r = redis.Redis(connection_pool=settings.REDIS_FEED_POOL)
r = redis.Redis(connection_pool=settings.REDIS_FEED_UPDATE_POOL)
return {
'update_queue': r.scard("queued_feeds"),