Cleaning redis stories for 1% of all feed fetches.

This commit is contained in:
Samuel Clay 2012-11-26 10:39:10 -08:00
parent 6b9c737366
commit cf554bb598
2 changed files with 16 additions and 0 deletions

14
fabfile.py vendored
View file

@ -192,11 +192,19 @@ def staging_full():
@parallel
def celery():
celery_slow()
def celery_slow():
with cd(env.NEWSBLUR_PATH):
run('git pull')
celery_stop()
celery_start()
def celery_fast():
with cd(env.NEWSBLUR_PATH):
run('git pull')
celery_reload()
@parallel
def celery_stop():
with cd(env.NEWSBLUR_PATH):
@ -210,6 +218,12 @@ def celery_start():
run('sudo supervisorctl start celery')
run('tail logs/newsblur.log')
@parallel
def celery_reload():
with cd(env.NEWSBLUR_PATH):
run('sudo supervisorctl reload celery')
run('tail logs/newsblur.log')
def kill_celery():
with cd(env.NEWSBLUR_PATH):
run('ps aux | grep celeryd | egrep -v grep | awk \'{print $2}\' | sudo xargs kill -9')

View file

@ -366,6 +366,8 @@ class Dispatcher:
feed.fetched_once = True
feed = feed.save()
# MUserStory.delete_old_stories(feed_id=feed.pk)
if random.random() <= 0.01:
feed.sync_redis()
try:
self.count_unreads_for_subscribers(feed)
except TimeoutError: