Migrating to celery 4.0

This commit is contained in:
Samuel Clay 2020-11-13 12:14:37 -05:00
parent 6990e0844d
commit 0f77d1c441
8 changed files with 384 additions and 452 deletions

View file

@ -1,13 +1,12 @@
from celery.task import Task from celery.task import task
from utils import log as logging from utils import log as logging
class EmailPopularityQuery(Task): @task()
def EmailPopularityQuery(pk):
from apps.analyzer.models import MPopularityQuery
def run(self, pk): query = MPopularityQuery.objects.get(pk=pk)
from apps.analyzer.models import MPopularityQuery logging.debug(" -> ~BB~FCRunning popularity query: ~SB%s" % query)
query = MPopularityQuery.objects.get(pk=pk) query.send_email()
logging.debug(" -> ~BB~FCRunning popularity query: ~SB%s" % query)
query.send_email()

View file

@ -1,21 +1,20 @@
from celery.task import Task from celery.task import task
from django.contrib.auth.models import User from django.contrib.auth.models import User
from apps.feed_import.models import UploadedOPML, OPMLImporter from apps.feed_import.models import UploadedOPML, OPMLImporter
from apps.reader.models import UserSubscription from apps.reader.models import UserSubscription
from utils import log as logging from utils import log as logging
class ProcessOPML(Task): @task()
def ProcessOPML(user_id):
user = User.objects.get(pk=user_id)
logging.user(user, "~FR~SBOPML upload (task) starting...")
def run(self, user_id): opml = UploadedOPML.objects.filter(user_id=user_id).first()
user = User.objects.get(pk=user_id) opml_importer = OPMLImporter(opml.opml_file, user)
logging.user(user, "~FR~SBOPML upload (task) starting...") opml_importer.process()
opml = UploadedOPML.objects.filter(user_id=user_id).first() feed_count = UserSubscription.objects.filter(user=user).count()
opml_importer = OPMLImporter(opml.opml_file, user) user.profile.send_upload_opml_finished_email(feed_count)
opml_importer.process() logging.user(user, "~FR~SBOPML upload (task): ~SK%s~SN~SB~FR feeds" % (feed_count))
feed_count = UserSubscription.objects.filter(user=user).count()
user.profile.send_upload_opml_finished_email(feed_count)
logging.user(user, "~FR~SBOPML upload (task): ~SK%s~SN~SB~FR feeds" % (feed_count))

View file

@ -1,10 +1,9 @@
from celery.task import Task from celery.task import task
from django.contrib.auth.models import User from django.contrib.auth.models import User
from apps.notifications.models import MUserFeedNotification from apps.notifications.models import MUserFeedNotification
from utils import log as logging from utils import log as logging
class QueueNotifications(Task): @task()
def QueueNotifications(feed_id, new_stories):
def run(self, feed_id, new_stories): MUserFeedNotification.push_feed_notifications(feed_id, new_stories)
MUserFeedNotification.push_feed_notifications(feed_id, new_stories)

View file

@ -1,90 +1,76 @@
import datetime import datetime
from celery.task import Task from celery.task import task
from apps.profile.models import Profile, RNewUserQueue from apps.profile.models import Profile, RNewUserQueue
from utils import log as logging from utils import log as logging
from apps.reader.models import UserSubscription, UserSubscriptionFolders from apps.reader.models import UserSubscription, UserSubscriptionFolders
from apps.social.models import MSocialServices, MActivity, MInteraction from apps.social.models import MSocialServices, MActivity, MInteraction
class EmailNewUser(Task): @task(name="email-new-user")
def EmailNewUser(user_id):
user_profile = Profile.objects.get(user__pk=user_id)
user_profile.send_new_user_email()
def run(self, user_id): @task(name="email-new-premium")
user_profile = Profile.objects.get(user__pk=user_id) def EmailNewPremium(user_id):
user_profile.send_new_user_email() user_profile = Profile.objects.get(user__pk=user_id)
user_profile.send_new_premium_email()
class EmailNewPremium(Task): @task(name="premium-expire")
def PremiumExpire(**kwargs):
# Get expired but grace period users
two_days_ago = datetime.datetime.now() - datetime.timedelta(days=2)
thirty_days_ago = datetime.datetime.now() - datetime.timedelta(days=30)
expired_profiles = Profile.objects.filter(is_premium=True,
premium_expire__lte=two_days_ago,
premium_expire__gt=thirty_days_ago)
logging.debug(" ---> %s users have expired premiums, emailing grace..." % expired_profiles.count())
for profile in expired_profiles:
if profile.grace_period_email_sent():
continue
profile.setup_premium_history()
if profile.premium_expire < two_days_ago:
profile.send_premium_expire_grace_period_email()
def run(self, user_id): # Get fully expired users
user_profile = Profile.objects.get(user__pk=user_id) expired_profiles = Profile.objects.filter(is_premium=True,
user_profile.send_new_premium_email() premium_expire__lte=thirty_days_ago)
logging.debug(" ---> %s users have expired premiums, deactivating and emailing..." % expired_profiles.count())
for profile in expired_profiles:
profile.setup_premium_history()
if profile.premium_expire < thirty_days_ago:
profile.send_premium_expire_email()
profile.deactivate_premium()
class PremiumExpire(Task): @task(name="activate-next-new-user")
name = 'premium-expire' def ActivateNextNewUser():
RNewUserQueue.activate_next()
def run(self, **kwargs): @task(name="cleanup-user")
# Get expired but grace period users def CleanupUser(user_id):
two_days_ago = datetime.datetime.now() - datetime.timedelta(days=2) UserSubscription.trim_user_read_stories(user_id)
thirty_days_ago = datetime.datetime.now() - datetime.timedelta(days=30) UserSubscription.verify_feeds_scheduled(user_id)
expired_profiles = Profile.objects.filter(is_premium=True, Profile.count_all_feed_subscribers_for_user(user_id)
premium_expire__lte=two_days_ago, MInteraction.trim(user_id)
premium_expire__gt=thirty_days_ago) MActivity.trim(user_id)
logging.debug(" ---> %s users have expired premiums, emailing grace..." % expired_profiles.count()) UserSubscriptionFolders.add_missing_feeds_for_user(user_id)
for profile in expired_profiles: UserSubscriptionFolders.compact_for_user(user_id)
if profile.grace_period_email_sent(): # UserSubscription.refresh_stale_feeds(user_id)
continue
profile.setup_premium_history()
if profile.premium_expire < two_days_ago:
profile.send_premium_expire_grace_period_email()
# Get fully expired users try:
expired_profiles = Profile.objects.filter(is_premium=True, ss = MSocialServices.objects.get(user_id=user_id)
premium_expire__lte=thirty_days_ago) except MSocialServices.DoesNotExist:
logging.debug(" ---> %s users have expired premiums, deactivating and emailing..." % expired_profiles.count()) logging.debug(" ---> ~FRCleaning up user, can't find social_services for user_id: ~SB%s" % user_id)
for profile in expired_profiles: return
profile.setup_premium_history() ss.sync_twitter_photo()
if profile.premium_expire < thirty_days_ago:
profile.send_premium_expire_email() @task(name="clean-spam")
profile.deactivate_premium() def CleanSpam():
logging.debug(" ---> Finding spammers...")
Profile.clear_dead_spammers(confirm=True)
class ActivateNextNewUser(Task):
name = 'activate-next-new-user' @task(name="reimport-stripe-history")
def ReimportStripeHistory():
def run(self): logging.debug(" ---> Reimporting Stripe history...")
RNewUserQueue.activate_next() Profile.reimport_stripe_history(limit=10, days=1)
class CleanupUser(Task):
name = 'cleanup-user'
def run(self, user_id):
UserSubscription.trim_user_read_stories(user_id)
UserSubscription.verify_feeds_scheduled(user_id)
Profile.count_all_feed_subscribers_for_user(user_id)
MInteraction.trim(user_id)
MActivity.trim(user_id)
UserSubscriptionFolders.add_missing_feeds_for_user(user_id)
UserSubscriptionFolders.compact_for_user(user_id)
# UserSubscription.refresh_stale_feeds(user_id)
try:
ss = MSocialServices.objects.get(user_id=user_id)
except MSocialServices.DoesNotExist:
logging.debug(" ---> ~FRCleaning up user, can't find social_services for user_id: ~SB%s" % user_id)
return
ss.sync_twitter_photo()
class CleanSpam(Task):
name = 'clean-spam'
def run(self, **kwargs):
logging.debug(" ---> Finding spammers...")
Profile.clear_dead_spammers(confirm=True)
class ReimportStripeHistory(Task):
name = 'reimport-stripe-history'
def run(self, **kwargs):
logging.debug(" ---> Reimporting Stripe history...")
Profile.reimport_stripe_history(limit=10, days=1)

View file

@ -3,7 +3,7 @@ import os
import shutil import shutil
import time import time
import redis import redis
from celery.task import Task from celery.task import task
from celery.exceptions import SoftTimeLimitExceeded from celery.exceptions import SoftTimeLimitExceeded
from utils import log as logging from utils import log as logging
from utils import s3_utils as s3 from utils import s3_utils as s3
@ -13,259 +13,230 @@ from utils.mongo_raw_log_middleware import MongoDumpMiddleware
from utils.redis_raw_log_middleware import RedisDumpMiddleware from utils.redis_raw_log_middleware import RedisDumpMiddleware
FEED_TASKING_MAX = 10000 FEED_TASKING_MAX = 10000
class TaskFeeds(Task): @task(name='task-feeds')
name = 'task-feeds' def TaskFeeds():
from apps.rss_feeds.models import Feed
settings.LOG_TO_STREAM = True
now = datetime.datetime.utcnow()
start = time.time()
r = redis.Redis(connection_pool=settings.REDIS_FEED_UPDATE_POOL)
tasked_feeds_size = r.zcard('tasked_feeds')
def run(self, **kwargs): hour_ago = now - datetime.timedelta(hours=1)
from apps.rss_feeds.models import Feed r.zremrangebyscore('fetched_feeds_last_hour', 0, int(hour_ago.strftime('%s')))
settings.LOG_TO_STREAM = True
now = datetime.datetime.utcnow()
start = time.time()
r = redis.Redis(connection_pool=settings.REDIS_FEED_UPDATE_POOL)
tasked_feeds_size = r.zcard('tasked_feeds')
hour_ago = now - datetime.timedelta(hours=1) now_timestamp = int(now.strftime("%s"))
r.zremrangebyscore('fetched_feeds_last_hour', 0, int(hour_ago.strftime('%s'))) queued_feeds = r.zrangebyscore('scheduled_updates', 0, now_timestamp)
r.zremrangebyscore('scheduled_updates', 0, now_timestamp)
if not queued_feeds:
logging.debug(" ---> ~SN~FB~BMNo feeds to queue! Exiting...")
return
now_timestamp = int(now.strftime("%s")) r.sadd('queued_feeds', *queued_feeds)
queued_feeds = r.zrangebyscore('scheduled_updates', 0, now_timestamp) logging.debug(" ---> ~SN~FBQueuing ~SB%s~SN stale feeds (~SB%s~SN/~FG%s~FB~SN/%s tasked/queued/scheduled)" % (
r.zremrangebyscore('scheduled_updates', 0, now_timestamp) len(queued_feeds),
if not queued_feeds: r.zcard('tasked_feeds'),
logging.debug(" ---> ~SN~FB~BMNo feeds to queue! Exiting...") r.scard('queued_feeds'),
return r.zcard('scheduled_updates')))
r.sadd('queued_feeds', *queued_feeds) # Regular feeds
logging.debug(" ---> ~SN~FBQueuing ~SB%s~SN stale feeds (~SB%s~SN/~FG%s~FB~SN/%s tasked/queued/scheduled)" % ( if tasked_feeds_size < FEED_TASKING_MAX:
len(queued_feeds), feeds = r.srandmember('queued_feeds', FEED_TASKING_MAX)
r.zcard('tasked_feeds'), Feed.task_feeds(feeds, verbose=True)
r.scard('queued_feeds'), active_count = len(feeds)
r.zcard('scheduled_updates'))) else:
logging.debug(" ---> ~SN~FBToo many tasked feeds. ~SB%s~SN tasked." % tasked_feeds_size)
active_count = 0
# Regular feeds logging.debug(" ---> ~SN~FBTasking %s feeds took ~SB%s~SN seconds (~SB%s~SN/~FG%s~FB~SN/%s tasked/queued/scheduled)" % (
if tasked_feeds_size < FEED_TASKING_MAX: active_count,
feeds = r.srandmember('queued_feeds', FEED_TASKING_MAX) int((time.time() - start)),
Feed.task_feeds(feeds, verbose=True) r.zcard('tasked_feeds'),
active_count = len(feeds) r.scard('queued_feeds'),
else: r.zcard('scheduled_updates')))
logging.debug(" ---> ~SN~FBToo many tasked feeds. ~SB%s~SN tasked." % tasked_feeds_size)
active_count = 0
logging.debug(" ---> ~SN~FBTasking %s feeds took ~SB%s~SN seconds (~SB%s~SN/~FG%s~FB~SN/%s tasked/queued/scheduled)" % ( @task(name='task-broken-feeds')
active_count, def TaskBrokenFeeds():
int((time.time() - start)), from apps.rss_feeds.models import Feed
r.zcard('tasked_feeds'), settings.LOG_TO_STREAM = True
r.scard('queued_feeds'), now = datetime.datetime.utcnow()
r.zcard('scheduled_updates'))) start = time.time()
r = redis.Redis(connection_pool=settings.REDIS_FEED_UPDATE_POOL)
class TaskBrokenFeeds(Task): logging.debug(" ---> ~SN~FBQueuing broken feeds...")
name = 'task-broken-feeds'
max_retries = 0
ignore_result = True
def run(self, **kwargs): # Force refresh feeds
from apps.rss_feeds.models import Feed refresh_feeds = Feed.objects.filter(
settings.LOG_TO_STREAM = True active=True,
now = datetime.datetime.utcnow() fetched_once=False,
start = time.time() active_subscribers__gte=1
r = redis.Redis(connection_pool=settings.REDIS_FEED_UPDATE_POOL) ).order_by('?')[:100]
refresh_count = refresh_feeds.count()
cp1 = time.time()
logging.debug(" ---> ~SN~FBQueuing broken feeds...") logging.debug(" ---> ~SN~FBFound %s active, unfetched broken feeds" % refresh_count)
# Force refresh feeds # Mistakenly inactive feeds
refresh_feeds = Feed.objects.filter( hours_ago = (now - datetime.timedelta(minutes=10)).strftime('%s')
active=True, old_tasked_feeds = r.zrangebyscore('tasked_feeds', 0, hours_ago)
fetched_once=False, inactive_count = len(old_tasked_feeds)
active_subscribers__gte=1 if inactive_count:
).order_by('?')[:100] r.zremrangebyscore('tasked_feeds', 0, hours_ago)
refresh_count = refresh_feeds.count() # r.sadd('queued_feeds', *old_tasked_feeds)
cp1 = time.time() for feed_id in old_tasked_feeds:
r.zincrby('error_feeds', 1, feed_id)
feed = Feed.get_by_id(feed_id)
feed.set_next_scheduled_update()
logging.debug(" ---> ~SN~FBRe-queuing ~SB%s~SN dropped/broken feeds (~SB%s/%s~SN queued/tasked)" % (
inactive_count,
r.scard('queued_feeds'),
r.zcard('tasked_feeds')))
cp2 = time.time()
logging.debug(" ---> ~SN~FBFound %s active, unfetched broken feeds" % refresh_count) old = now - datetime.timedelta(days=1)
old_feeds = Feed.objects.filter(
next_scheduled_update__lte=old,
active_subscribers__gte=1
).order_by('?')[:500]
old_count = old_feeds.count()
cp3 = time.time()
# Mistakenly inactive feeds logging.debug(" ---> ~SN~FBTasking ~SBrefresh:~FC%s~FB inactive:~FC%s~FB old:~FC%s~SN~FB broken feeds... (%.4s/%.4s/%.4s)" % (
hours_ago = (now - datetime.timedelta(minutes=10)).strftime('%s') refresh_count,
old_tasked_feeds = r.zrangebyscore('tasked_feeds', 0, hours_ago) inactive_count,
inactive_count = len(old_tasked_feeds) old_count,
if inactive_count: cp1 - start,
r.zremrangebyscore('tasked_feeds', 0, hours_ago) cp2 - cp1,
# r.sadd('queued_feeds', *old_tasked_feeds) cp3 - cp2,
for feed_id in old_tasked_feeds: ))
r.zincrby('error_feeds', 1, feed_id)
feed = Feed.get_by_id(feed_id)
feed.set_next_scheduled_update()
logging.debug(" ---> ~SN~FBRe-queuing ~SB%s~SN dropped/broken feeds (~SB%s/%s~SN queued/tasked)" % (
inactive_count,
r.scard('queued_feeds'),
r.zcard('tasked_feeds')))
cp2 = time.time()
old = now - datetime.timedelta(days=1) Feed.task_feeds(refresh_feeds, verbose=False)
old_feeds = Feed.objects.filter( Feed.task_feeds(old_feeds, verbose=False)
next_scheduled_update__lte=old,
active_subscribers__gte=1
).order_by('?')[:500]
old_count = old_feeds.count()
cp3 = time.time()
logging.debug(" ---> ~SN~FBTasking ~SBrefresh:~FC%s~FB inactive:~FC%s~FB old:~FC%s~SN~FB broken feeds... (%.4s/%.4s/%.4s)" % ( logging.debug(" ---> ~SN~FBTasking broken feeds took ~SB%s~SN seconds (~SB%s~SN/~FG%s~FB~SN/%s tasked/queued/scheduled)" % (
refresh_count, int((time.time() - start)),
inactive_count, r.zcard('tasked_feeds'),
old_count, r.scard('queued_feeds'),
cp1 - start, r.zcard('scheduled_updates')))
cp2 - cp1,
cp3 - cp2,
))
Feed.task_feeds(refresh_feeds, verbose=False) @task(name='update-feeds', time_limit=10*60, soft_time_limit=9*60, ignore_result=True)
Feed.task_feeds(old_feeds, verbose=False) def UpdateFeeds(feed_pks):
from apps.rss_feeds.models import Feed
from apps.statistics.models import MStatistics
r = redis.Redis(connection_pool=settings.REDIS_FEED_UPDATE_POOL)
logging.debug(" ---> ~SN~FBTasking broken feeds took ~SB%s~SN seconds (~SB%s~SN/~FG%s~FB~SN/%s tasked/queued/scheduled)" % ( mongodb_replication_lag = int(MStatistics.get('mongodb_replication_lag', 0))
int((time.time() - start)), compute_scores = bool(mongodb_replication_lag < 10)
r.zcard('tasked_feeds'),
r.scard('queued_feeds'),
r.zcard('scheduled_updates')))
class UpdateFeeds(Task): profiler = DBProfilerMiddleware()
name = 'update-feeds' profiler_activated = profiler.process_celery()
max_retries = 0 if profiler_activated:
ignore_result = True mongo_middleware = MongoDumpMiddleware()
time_limit = 10*60 mongo_middleware.process_celery(profiler)
soft_time_limit = 9*60 redis_middleware = RedisDumpMiddleware()
redis_middleware.process_celery(profiler)
def run(self, feed_pks, **kwargs): options = {
from apps.rss_feeds.models import Feed 'quick': float(MStatistics.get('quick_fetch', 0)),
from apps.statistics.models import MStatistics 'updates_off': MStatistics.get('updates_off', False),
r = redis.Redis(connection_pool=settings.REDIS_FEED_UPDATE_POOL) 'compute_scores': compute_scores,
'mongodb_replication_lag': mongodb_replication_lag,
}
mongodb_replication_lag = int(MStatistics.get('mongodb_replication_lag', 0)) if not isinstance(feed_pks, list):
compute_scores = bool(mongodb_replication_lag < 10) feed_pks = [feed_pks]
profiler = DBProfilerMiddleware() for feed_pk in feed_pks:
profiler_activated = profiler.process_celery() feed = Feed.get_by_id(feed_pk)
if profiler_activated: if not feed or feed.pk != int(feed_pk):
mongo_middleware = MongoDumpMiddleware() logging.info(" ---> ~FRRemoving feed_id %s from tasked_feeds queue, points to %s..." % (feed_pk, feed and feed.pk))
mongo_middleware.process_celery(profiler) r.zrem('tasked_feeds', feed_pk)
redis_middleware = RedisDumpMiddleware() if not feed:
redis_middleware.process_celery(profiler) continue
try:
feed.update(**options)
except SoftTimeLimitExceeded, e:
feed.save_feed_history(505, 'Timeout', e)
logging.info(" ---> [%-30s] ~BR~FWTime limit hit!~SB~FR Moving on to next feed..." % feed)
if profiler_activated: profiler.process_celery_finished()
options = { @task(name='new-feeds', time_limit=10*60, soft_time_limit=9*60, ignore_result=True)
'quick': float(MStatistics.get('quick_fetch', 0)), def NewFeeds(feed_pks):
'updates_off': MStatistics.get('updates_off', False), from apps.rss_feeds.models import Feed
'compute_scores': compute_scores, if not isinstance(feed_pks, list):
'mongodb_replication_lag': mongodb_replication_lag, feed_pks = [feed_pks]
}
if not isinstance(feed_pks, list): options = {}
feed_pks = [feed_pks] for feed_pk in feed_pks:
feed = Feed.get_by_id(feed_pk)
if not feed: continue
feed.update(options=options)
for feed_pk in feed_pks: @task(name='push-feeds', ignore_result=True)
feed = Feed.get_by_id(feed_pk) def PushFeeds(feed_id, xml):
if not feed or feed.pk != int(feed_pk): from apps.rss_feeds.models import Feed
logging.info(" ---> ~FRRemoving feed_id %s from tasked_feeds queue, points to %s..." % (feed_pk, feed and feed.pk)) from apps.statistics.models import MStatistics
r.zrem('tasked_feeds', feed_pk)
if not feed:
continue
try:
feed.update(**options)
except SoftTimeLimitExceeded, e:
feed.save_feed_history(505, 'Timeout', e)
logging.info(" ---> [%-30s] ~BR~FWTime limit hit!~SB~FR Moving on to next feed..." % feed)
if profiler_activated: profiler.process_celery_finished()
class NewFeeds(Task): mongodb_replication_lag = int(MStatistics.get('mongodb_replication_lag', 0))
name = 'new-feeds' compute_scores = bool(mongodb_replication_lag < 60)
max_retries = 0
ignore_result = True
time_limit = 10*60
soft_time_limit = 9*60
def run(self, feed_pks, **kwargs): options = {
from apps.rss_feeds.models import Feed 'feed_xml': xml,
if not isinstance(feed_pks, list): 'compute_scores': compute_scores,
feed_pks = [feed_pks] 'mongodb_replication_lag': mongodb_replication_lag,
}
feed = Feed.get_by_id(feed_id)
if feed:
feed.update(options=options)
options = {} @task(name='backup-mongo', ignore_result=True)
for feed_pk in feed_pks: def BackupMongo():
feed = Feed.get_by_id(feed_pk) COLLECTIONS = "classifier_tag classifier_author classifier_feed classifier_title userstories starred_stories shared_stories category category_site sent_emails social_profile social_subscription social_services statistics feedback"
if not feed: continue
feed.update(options=options)
class PushFeeds(Task): date = time.strftime('%Y-%m-%d-%H-%M')
name = 'push-feeds' collections = COLLECTIONS.split(' ')
max_retries = 0 db_name = 'newsblur'
ignore_result = True dir_name = 'backup_mongo_%s' % date
filename = '%s.tgz' % dir_name
def run(self, feed_id, xml, **kwargs): os.mkdir(dir_name)
from apps.rss_feeds.models import Feed
from apps.statistics.models import MStatistics
mongodb_replication_lag = int(MStatistics.get('mongodb_replication_lag', 0)) for collection in collections:
compute_scores = bool(mongodb_replication_lag < 60) cmd = 'mongodump --db %s --collection %s -o %s' % (db_name, collection, dir_name)
logging.debug(' ---> ~FMDumping ~SB%s~SN: %s' % (collection, cmd))
options = {
'feed_xml': xml,
'compute_scores': compute_scores,
'mongodb_replication_lag': mongodb_replication_lag,
}
feed = Feed.get_by_id(feed_id)
if feed:
feed.update(options=options)
class BackupMongo(Task):
name = 'backup-mongo'
max_retries = 0
ignore_result = True
def run(self, **kwargs):
COLLECTIONS = "classifier_tag classifier_author classifier_feed classifier_title userstories starred_stories shared_stories category category_site sent_emails social_profile social_subscription social_services statistics feedback"
date = time.strftime('%Y-%m-%d-%H-%M')
collections = COLLECTIONS.split(' ')
db_name = 'newsblur'
dir_name = 'backup_mongo_%s' % date
filename = '%s.tgz' % dir_name
os.mkdir(dir_name)
for collection in collections:
cmd = 'mongodump --db %s --collection %s -o %s' % (db_name, collection, dir_name)
logging.debug(' ---> ~FMDumping ~SB%s~SN: %s' % (collection, cmd))
os.system(cmd)
cmd = 'tar -jcf %s %s' % (filename, dir_name)
os.system(cmd) os.system(cmd)
logging.debug(' ---> ~FRUploading ~SB~FM%s~SN~FR to S3...' % filename) cmd = 'tar -jcf %s %s' % (filename, dir_name)
s3.save_file_in_s3(filename) os.system(cmd)
shutil.rmtree(dir_name)
os.remove(filename) logging.debug(' ---> ~FRUploading ~SB~FM%s~SN~FR to S3...' % filename)
logging.debug(' ---> ~FRFinished uploading ~SB~FM%s~SN~FR to S3.' % filename) s3.save_file_in_s3(filename)
shutil.rmtree(dir_name)
os.remove(filename)
logging.debug(' ---> ~FRFinished uploading ~SB~FM%s~SN~FR to S3.' % filename)
class ScheduleImmediateFetches(Task): @task()
def ScheduleImmediateFetches(feed_ids, user_id=None):
from apps.rss_feeds.models import Feed
def run(self, feed_ids, user_id=None, **kwargs): if not isinstance(feed_ids, list):
from apps.rss_feeds.models import Feed feed_ids = [feed_ids]
if not isinstance(feed_ids, list): Feed.schedule_feed_fetches_immediately(feed_ids, user_id=user_id)
feed_ids = [feed_ids]
Feed.schedule_feed_fetches_immediately(feed_ids, user_id=user_id)
class SchedulePremiumSetup(Task): @task()
def SchedulePremiumSetup(feed_ids):
from apps.rss_feeds.models import Feed
def run(self, feed_ids, **kwargs): if not isinstance(feed_ids, list):
from apps.rss_feeds.models import Feed feed_ids = [feed_ids]
if not isinstance(feed_ids, list): Feed.setup_feeds_for_premium_subscribers(feed_ids)
feed_ids = [feed_ids]
Feed.setup_feeds_for_premium_subscribers(feed_ids) @task()
def ScheduleCountTagsForUser(user_id):
from apps.rss_feeds.models import MStarredStoryCounts
class ScheduleCountTagsForUser(Task): MStarredStoryCounts.count_for_user(user_id)
def run(self, user_id):
from apps.rss_feeds.models import MStarredStoryCounts
MStarredStoryCounts.count_for_user(user_id)

View file

@ -1,26 +1,21 @@
from celery.task import Task from celery.task import task
class IndexSubscriptionsForSearch(Task): @task()
def IndexSubscriptionsForSearch(user_id):
from apps.search.models import MUserSearch
def run(self, user_id): user_search = MUserSearch.get_user(user_id)
from apps.search.models import MUserSearch user_search.index_subscriptions_for_search()
user_search = MUserSearch.get_user(user_id) @task()
user_search.index_subscriptions_for_search() def IndexSubscriptionsChunkForSearch(feed_ids, user_id):
from apps.search.models import MUserSearch
class IndexSubscriptionsChunkForSearch(Task): user_search = MUserSearch.get_user(user_id)
user_search.index_subscriptions_chunk_for_search(feed_ids)
ignore_result = False @task()
def IndexFeedsForSearch(feed_ids, user_id):
from apps.search.models import MUserSearch
def run(self, feed_ids, user_id): MUserSearch.index_feeds_for_search(feed_ids, user_id)
from apps.search.models import MUserSearch
user_search = MUserSearch.get_user(user_id)
user_search.index_subscriptions_chunk_for_search(feed_ids)
class IndexFeedsForSearch(Task):
def run(self, feed_ids, user_id):
from apps.search.models import MUserSearch
MUserSearch.index_feeds_for_search(feed_ids, user_id)

View file

@ -1,92 +1,79 @@
from bson.objectid import ObjectId from bson.objectid import ObjectId
from celery.task import Task from celery.task import task
from apps.social.models import MSharedStory, MSocialProfile, MSocialServices, MSocialSubscription from apps.social.models import MSharedStory, MSocialProfile, MSocialServices, MSocialSubscription
from django.contrib.auth.models import User from django.contrib.auth.models import User
from utils import log as logging from utils import log as logging
class PostToService(Task): @task()
def PostToService(shared_story_id, service):
def run(self, shared_story_id, service): try:
try:
shared_story = MSharedStory.objects.get(id=ObjectId(shared_story_id))
shared_story.post_to_service(service)
except MSharedStory.DoesNotExist:
logging.debug(" ---> Shared story not found (%s). Can't post to: %s" % (shared_story_id, service))
class EmailNewFollower(Task):
def run(self, follower_user_id, followee_user_id):
user_profile = MSocialProfile.get_user(followee_user_id)
user_profile.send_email_for_new_follower(follower_user_id)
class EmailFollowRequest(Task):
def run(self, follower_user_id, followee_user_id):
user_profile = MSocialProfile.get_user(followee_user_id)
user_profile.send_email_for_follow_request(follower_user_id)
class EmailFirstShare(Task):
def run(self, user_id):
user = User.objects.get(pk=user_id)
user.profile.send_first_share_to_blurblog_email()
class EmailCommentReplies(Task):
def run(self, shared_story_id, reply_id):
shared_story = MSharedStory.objects.get(id=ObjectId(shared_story_id)) shared_story = MSharedStory.objects.get(id=ObjectId(shared_story_id))
shared_story.send_emails_for_new_reply(ObjectId(reply_id)) shared_story.post_to_service(service)
except MSharedStory.DoesNotExist:
logging.debug(" ---> Shared story not found (%s). Can't post to: %s" % (shared_story_id, service))
class EmailStoryReshares(Task): @task()
def EmailNewFollower(follower_user_id, followee_user_id):
user_profile = MSocialProfile.get_user(followee_user_id)
user_profile.send_email_for_new_follower(follower_user_id)
def run(self, shared_story_id): @task()
def EmailFollowRequest(follower_user_id, followee_user_id):
user_profile = MSocialProfile.get_user(followee_user_id)
user_profile.send_email_for_follow_request(follower_user_id)
@task()
def EmailFirstShare(user_id):
user = User.objects.get(pk=user_id)
user.profile.send_first_share_to_blurblog_email()
@task()
def EmailCommentReplies(shared_story_id, reply_id):
shared_story = MSharedStory.objects.get(id=ObjectId(shared_story_id))
shared_story.send_emails_for_new_reply(ObjectId(reply_id))
@task
def EmailStoryReshares(shared_story_id):
shared_story = MSharedStory.objects.get(id=ObjectId(shared_story_id))
shared_story.send_email_for_reshare()
@task()
def SyncTwitterFriends(user_id):
social_services = MSocialServices.objects.get(user_id=user_id)
social_services.sync_twitter_friends()
@task()
def SyncFacebookFriends(user_id):
social_services = MSocialServices.objects.get(user_id=user_id)
social_services.sync_facebook_friends()
@task(name="share-popular-stories")
def SharePopularStories():
logging.debug(" ---> Sharing popular stories...")
MSharedStory.share_popular_stories(interactive=False)
@task(name='clean-social-spam')
def CleanSocialSpam():
logging.debug(" ---> Finding social spammers...")
MSharedStory.count_potential_spammers(destroy=True)
@task()
def UpdateRecalcForSubscription(subscription_user_id, shared_story_id):
user = User.objects.get(pk=subscription_user_id)
socialsubs = MSocialSubscription.objects.filter(subscription_user_id=subscription_user_id)
try:
shared_story = MSharedStory.objects.get(id=ObjectId(shared_story_id)) shared_story = MSharedStory.objects.get(id=ObjectId(shared_story_id))
shared_story.send_email_for_reshare() except MSharedStory.DoesNotExist:
return
class SyncTwitterFriends(Task): logging.debug(" ---> ~FM~SNFlipping unread recalc for ~SB%s~SN subscriptions to ~SB%s's blurblog~SN" % (
socialsubs.count(),
user.username
))
for socialsub in socialsubs:
socialsub.needs_unread_recalc = True
socialsub.save()
def run(self, user_id): shared_story.publish_update_to_subscribers()
social_services = MSocialServices.objects.get(user_id=user_id)
social_services.sync_twitter_friends()
class SyncFacebookFriends(Task):
def run(self, user_id):
social_services = MSocialServices.objects.get(user_id=user_id)
social_services.sync_facebook_friends()
class SharePopularStories(Task):
name = 'share-popular-stories'
def run(self, **kwargs):
logging.debug(" ---> Sharing popular stories...")
MSharedStory.share_popular_stories(interactive=False)
class CleanSocialSpam(Task):
name = 'clean-social-spam'
def run(self, **kwargs):
logging.debug(" ---> Finding social spammers...")
MSharedStory.count_potential_spammers(destroy=True)
class UpdateRecalcForSubscription(Task):
def run(self, subscription_user_id, shared_story_id):
user = User.objects.get(pk=subscription_user_id)
socialsubs = MSocialSubscription.objects.filter(subscription_user_id=subscription_user_id)
try:
shared_story = MSharedStory.objects.get(id=ObjectId(shared_story_id))
except MSharedStory.DoesNotExist:
return
logging.debug(" ---> ~FM~SNFlipping unread recalc for ~SB%s~SN subscriptions to ~SB%s's blurblog~SN" % (
socialsubs.count(),
user.username
))
for socialsub in socialsubs:
socialsub.needs_unread_recalc = True
socialsub.save()
shared_story.publish_update_to_subscribers()

View file

@ -1,21 +1,17 @@
from celery.task import Task from celery.task import task
from apps.statistics.models import MStatistics from apps.statistics.models import MStatistics
from apps.statistics.models import MFeedback from apps.statistics.models import MFeedback
# from utils import log as logging # from utils import log as logging
class CollectStats(Task): @task(name='collect-stats')
name = 'collect-stats' def CollectStats():
logging.debug(" ---> ~FBCollecting stats...")
def run(self, **kwargs): MStatistics.collect_statistics()
# logging.debug(" ---> ~FBCollecting stats...")
MStatistics.collect_statistics()
class CollectFeedback(Task): @task(name='collect-feedback')
name = 'collect-feedback' def CollectFeedback():
logging.debug(" ---> ~FBCollecting feedback...")
def run(self, **kwargs): MFeedback.collect_feedback()
# logging.debug(" ---> ~FBCollecting feedback...")
MFeedback.collect_feedback()