Adding clean stories task. Updating log colors.

This commit is contained in:
Samuel Clay 2012-12-26 20:00:38 -08:00
parent 70f2da6218
commit c223fa892c
6 changed files with 38 additions and 26 deletions

View file

@ -584,6 +584,7 @@ class MUserStory(mongo.Document):
story_date = mongo.DateTimeField()
story = mongo.ReferenceField(MStory, dbref=True)
found_story = mongo.GenericReferenceField()
shared = mongo.BooleanField()
meta = {
'collection': 'userstories',

View file

@ -3,7 +3,7 @@ from celery.task import Task
from utils import log as logging
from django.contrib.auth.models import User
from django.conf import settings
from apps.reader.models import UserSubscription
from apps.reader.models import UserSubscription, MUserStory
from apps.social.models import MSocialSubscription
@ -46,4 +46,21 @@ class CleanAnalytics(Task):
})
settings.MONGOANALYTICSDB.nbanalytics.page_loads.remove({
"date": {"$lt": day_ago},
})
})
class CleanStories(Task):
name = 'clean-stories'
def run(self, **kwargs):
days_ago = (datetime.datetime.utcnow() -
datetime.timedelta(days=settings.DAYS_OF_UNREAD*5))
old_stories = MUserStory.objects.filter(read_date__lte=days_ago)
logging.debug(" ---> Cleaning stories from %s days ago... %s/%s read stories" % (
settings.DAYS_OF_UNREAD*5,
MUserStory.objects.count(),
old_stories.count()
))
for s, story in enumerate(old_stories):
if (s+1) % 1000 == 0:
logging.debug(" ---> %s stories removed..." % (s+1))
story.delete()

View file

@ -249,8 +249,8 @@ def load_feeds(request):
if not user_subs:
categories = MCategory.serialize()
logging.user(request, "~FBLoading ~SB~FY%s~FB~SN/~SB~FM%s~FB~SN feeds/socials. %s" % (
len(feeds.keys()), len(social_feeds), '~SBUpdating counts.' if update_counts else ''))
logging.user(request, "~FB~SBLoading ~FY%s~FB/~FM%s~FB feeds/socials%s" % (
len(feeds.keys()), len(social_feeds), '. ~FCUpdating counts.' if update_counts else ''))
data = {
'feeds': feeds.values() if version == 2 else feeds,
@ -341,8 +341,8 @@ def load_feeds_flat(request):
if not user_subs:
categories = MCategory.serialize()
logging.user(request, "~FBLoading ~SB~FY%s~FB~SN/~SB~FM%s~FB~SN feeds/socials ~FMflat~FB. %s" % (
len(feeds.keys()), len(social_feeds), '~SBUpdating counts.' if update_counts else ''))
logging.user(request, "~FB~SBLoading ~FY%s~FB/~FM%s~FB feeds/socials ~FMflat~FB%s" % (
len(feeds.keys()), len(social_feeds), '. ~FCUpdating counts.' if update_counts else ''))
data = {
"flat_folders": flat_folders,

View file

@ -36,7 +36,6 @@ from vendor.timezones.utilities import localtime_for_timezone
@json.json_view
def load_social_stories(request, user_id, username=None):
start = time.time()
user = get_user(request)
social_user_id = int(user_id)
social_user = get_object_or_404(User, pk=social_user_id)
@ -74,8 +73,6 @@ def load_social_stories(request, user_id, username=None):
if not stories:
return dict(stories=[], message=message)
checkpoint1 = time.time()
stories, user_profiles = MSharedStory.stories_with_comments_and_profiles(stories, user.pk, check_all=True)
story_feed_ids = list(set(s['story_feed_id'] for s in stories))
@ -99,8 +96,6 @@ def load_social_stories(request, user_id, username=None):
classifier_titles = classifier_titles + list(MClassifierTitle.objects(user_id=user.pk, feed_id__in=story_feed_ids))
classifier_tags = classifier_tags + list(MClassifierTag.objects(user_id=user.pk, feed_id__in=story_feed_ids))
checkpoint2 = time.time()
story_ids = [story['id'] for story in stories]
userstories_db = MUserStory.objects(user_id=user.pk,
feed_id__in=story_feed_ids,
@ -168,10 +163,8 @@ def load_social_stories(request, user_id, username=None):
socialsub.needs_unread_recalc = True
socialsub.save()
diff1 = checkpoint1-start
diff2 = checkpoint2-start
logging.user(request, "~FYLoading ~FMshared stories~FY: ~SB%s%s ~SN(~SB%.4ss/%.4ss~SN)" % (
social_profile.title[:22], ('~SN/p%s' % page) if page > 1 else '', diff1, diff2))
logging.user(request, "~FYLoading ~FMshared stories~FY: ~SB%s%s" % (
social_profile.title[:22], ('~SN/p%s' % page) if page > 1 else ''))
return {
"stories": stories,
@ -331,7 +324,6 @@ def load_river_blurblog(request):
}
def load_social_page(request, user_id, username=None, **kwargs):
start = time.time()
user = request.user
social_user_id = int(user_id)
social_user = get_object_or_404(User, pk=social_user_id)
@ -365,8 +357,6 @@ def load_social_page(request, user_id, username=None, **kwargs):
has_next_page = True
stories = stories[:-1]
checkpoint1 = time.time()
if not stories:
params = {
"user": user,
@ -394,8 +384,6 @@ def load_social_page(request, user_id, username=None, **kwargs):
stories, profiles = MSharedStory.stories_with_comments_and_profiles(stories, social_user.pk,
check_all=True)
checkpoint2 = time.time()
if user.is_authenticated():
for story in stories:
if user.pk in story['share_user_ids']:
@ -446,12 +434,8 @@ def load_social_page(request, user_id, username=None, **kwargs):
'active_story' : active_story,
}
diff1 = checkpoint1-start
diff2 = checkpoint2-start
timediff = time.time()-start
logging.user(request, "~FYLoading ~FMsocial page~FY: ~SB%s%s ~SN(%.4s seconds, ~SB%.4s/%.4s~SN)" % (
social_profile.title[:22], ('~SN/p%s' % page) if page > 1 else '', timediff,
diff1, diff2))
logging.user(request, "~FYLoading ~FMsocial page~FY: ~SB%s%s" % (
social_profile.title[:22], ('~SN/p%s' % page) if page > 1 else ''))
if format == 'html':
template = 'social/social_stories.xhtml'
else:

5
fabfile.py vendored
View file

@ -157,6 +157,11 @@ def deploy_code(copy_assets=False, full=False):
run('curl -s http://%s > /dev/null' % env.host)
run('curl -s http://%s/api/add_site_load_script/ABCDEF > /dev/null' % env.host)
@parallel
def kill():
sudo('supervisorctl reload')
run('pkill -c gunicorn')
def deploy_node():
with cd(env.NEWSBLUR_PATH):
run('sudo supervisorctl restart node_unread')

View file

@ -362,6 +362,11 @@ CELERYBEAT_SCHEDULE = {
'schedule': datetime.timedelta(hours=12),
'options': {'queue': 'beat_tasks'},
},
'clean-stories': {
'task': 'clean-stories',
'schedule': datetime.timedelta(hours=24),
'options': {'queue': 'beat_tasks'},
},
'premium-expire': {
'task': 'premium-expire',
'schedule': datetime.timedelta(hours=24),