2011-04-15 11:34:41 -04:00
|
|
|
import datetime
|
|
|
|
import mongoengine as mongo
|
2011-05-07 17:22:41 -04:00
|
|
|
import urllib2
|
2011-04-16 16:21:00 -04:00
|
|
|
from django.db.models import Avg, Count
|
2011-05-01 19:58:40 -04:00
|
|
|
from apps.rss_feeds.models import MFeedFetchHistory, MPageFetchHistory, FeedLoadtime
|
2011-04-15 11:34:41 -04:00
|
|
|
from apps.profile.models import Profile
|
2011-04-16 16:21:00 -04:00
|
|
|
from utils import json_functions as json
|
2011-04-18 09:09:57 -04:00
|
|
|
|
|
|
|
|
2011-04-15 11:34:41 -04:00
|
|
|
class MStatistics(mongo.Document):
|
|
|
|
key = mongo.StringField(unique=True)
|
2011-04-16 16:21:00 -04:00
|
|
|
value = mongo.StringField()
|
2011-04-15 11:34:41 -04:00
|
|
|
|
|
|
|
meta = {
|
|
|
|
'collection': 'statistics',
|
|
|
|
'allow_inheritance': False,
|
|
|
|
'indexes': ['key'],
|
|
|
|
}
|
|
|
|
|
|
|
|
def __unicode__(self):
|
|
|
|
return "%s: %s" % (self.key, self.value)
|
|
|
|
|
|
|
|
@classmethod
|
|
|
|
def all(cls):
|
2011-04-16 16:21:00 -04:00
|
|
|
values = dict([(stat.key, stat.value) for stat in cls.objects.all()])
|
|
|
|
for key, value in values.items():
|
|
|
|
if key in ('avg_time_taken', 'sites_loaded'):
|
|
|
|
values[key] = json.decode(value)
|
|
|
|
elif key in ('feeds_fetched', 'premium_users', 'standard_users', 'latest_sites_loaded',
|
|
|
|
'max_sites_loaded'):
|
|
|
|
values[key] = int(value)
|
|
|
|
elif key in ('latest_avg_time_taken', 'max_avg_time_taken'):
|
|
|
|
values[key] = float(value)
|
|
|
|
|
|
|
|
return values
|
2011-04-15 11:34:41 -04:00
|
|
|
|
|
|
|
@classmethod
|
|
|
|
def collect_statistics(cls):
|
2011-11-01 19:00:58 -07:00
|
|
|
now = datetime.datetime.now()
|
2011-04-15 11:34:41 -04:00
|
|
|
last_day = datetime.datetime.now() - datetime.timedelta(hours=24)
|
2011-04-18 09:09:57 -04:00
|
|
|
cls.collect_statistics_feeds_fetched(last_day)
|
2011-11-01 19:14:18 -07:00
|
|
|
print "Feeds Fetched: %s" % (datetime.datetime.now() - now)
|
2011-04-18 09:09:57 -04:00
|
|
|
cls.collect_statistics_premium_users(last_day)
|
2011-11-01 19:14:18 -07:00
|
|
|
print "Premiums: %s" % (datetime.datetime.now() - now)
|
2011-04-18 09:09:57 -04:00
|
|
|
cls.collect_statistics_standard_users(last_day)
|
2011-11-01 19:14:18 -07:00
|
|
|
print "Standard users: %s" % (datetime.datetime.now() - now)
|
2011-04-18 09:09:57 -04:00
|
|
|
cls.collect_statistics_sites_loaded(last_day)
|
2011-11-01 19:14:18 -07:00
|
|
|
print "Sites loaded: %s" % (datetime.datetime.now() - now)
|
2011-04-18 09:09:57 -04:00
|
|
|
|
|
|
|
@classmethod
|
|
|
|
def collect_statistics_feeds_fetched(cls, last_day=None):
|
|
|
|
if not last_day:
|
|
|
|
last_day = datetime.datetime.now() - datetime.timedelta(hours=24)
|
|
|
|
|
2011-11-01 19:08:07 -07:00
|
|
|
feeds_fetched = MFeedFetchHistory.objects.count()
|
2011-04-15 11:34:41 -04:00
|
|
|
cls.objects(key='feeds_fetched').update_one(upsert=True, key='feeds_fetched', value=feeds_fetched)
|
2011-11-02 09:43:20 -07:00
|
|
|
pages_fetched = MPageFetchHistory.objects.count()
|
|
|
|
cls.objects(key='pages_fetched').update_one(upsert=True, key='pages_fetched', value=pages_fetched)
|
2011-04-15 11:34:41 -04:00
|
|
|
|
2011-11-01 21:20:07 -07:00
|
|
|
from utils.feed_functions import timelimit, TimeoutError
|
2011-11-01 19:14:18 -07:00
|
|
|
@timelimit(60)
|
|
|
|
def delete_old_history():
|
|
|
|
MFeedFetchHistory.objects(fetch_date__lt=last_day).delete()
|
|
|
|
MPageFetchHistory.objects(fetch_date__lt=last_day).delete()
|
2011-11-01 21:20:07 -07:00
|
|
|
try:
|
|
|
|
delete_old_history()
|
|
|
|
except TimeoutError:
|
|
|
|
print "Timed out on deleting old history. Shit."
|
2011-05-01 19:58:40 -04:00
|
|
|
|
2011-04-18 09:09:57 -04:00
|
|
|
return feeds_fetched
|
|
|
|
|
|
|
|
@classmethod
|
|
|
|
def collect_statistics_premium_users(cls, last_day=None):
|
|
|
|
if not last_day:
|
|
|
|
last_day = datetime.datetime.now() - datetime.timedelta(hours=24)
|
|
|
|
|
2011-04-15 11:34:41 -04:00
|
|
|
premium_users = Profile.objects.filter(last_seen_on__gte=last_day, is_premium=True).count()
|
|
|
|
cls.objects(key='premium_users').update_one(upsert=True, key='premium_users', value=premium_users)
|
|
|
|
|
2011-04-18 09:09:57 -04:00
|
|
|
return premium_users
|
|
|
|
|
|
|
|
@classmethod
|
|
|
|
def collect_statistics_standard_users(cls, last_day=None):
|
|
|
|
if not last_day:
|
|
|
|
last_day = datetime.datetime.now() - datetime.timedelta(hours=24)
|
|
|
|
|
2011-04-15 11:34:41 -04:00
|
|
|
standard_users = Profile.objects.filter(last_seen_on__gte=last_day, is_premium=False).count()
|
2011-04-16 16:21:00 -04:00
|
|
|
cls.objects(key='standard_users').update_one(upsert=True, key='standard_users', value=standard_users)
|
2011-04-18 09:09:57 -04:00
|
|
|
|
|
|
|
return standard_users
|
|
|
|
|
|
|
|
@classmethod
|
|
|
|
def collect_statistics_sites_loaded(cls, last_day=None):
|
|
|
|
if not last_day:
|
|
|
|
last_day = datetime.datetime.now() - datetime.timedelta(hours=24)
|
2011-04-16 16:21:00 -04:00
|
|
|
now = datetime.datetime.now()
|
|
|
|
sites_loaded = []
|
|
|
|
avg_time_taken = []
|
2011-04-18 09:09:57 -04:00
|
|
|
|
2011-04-16 16:21:00 -04:00
|
|
|
for hour in range(24):
|
|
|
|
start_hours_ago = now - datetime.timedelta(hours=hour)
|
|
|
|
end_hours_ago = now - datetime.timedelta(hours=hour+1)
|
|
|
|
aggregates = dict(count=Count('loadtime'), avg=Avg('loadtime'))
|
|
|
|
load_times = FeedLoadtime.objects.filter(
|
|
|
|
date_accessed__lte=start_hours_ago,
|
|
|
|
date_accessed__gte=end_hours_ago
|
|
|
|
).aggregate(**aggregates)
|
|
|
|
sites_loaded.append(load_times['count'] or 0)
|
|
|
|
avg_time_taken.append(load_times['avg'] or 0)
|
|
|
|
sites_loaded.reverse()
|
|
|
|
avg_time_taken.reverse()
|
2011-04-18 09:09:57 -04:00
|
|
|
|
|
|
|
values = (
|
|
|
|
('sites_loaded', json.encode(sites_loaded)),
|
|
|
|
('avg_time_taken', json.encode(avg_time_taken)),
|
|
|
|
('latest_sites_loaded', sites_loaded[-1]),
|
|
|
|
('latest_avg_time_taken', avg_time_taken[-1]),
|
|
|
|
('max_sites_loaded', max(sites_loaded)),
|
|
|
|
('max_avg_time_taken', max(1, max(avg_time_taken))),
|
|
|
|
)
|
|
|
|
for key, value in values:
|
|
|
|
cls.objects(key=key).update_one(upsert=True, key=key, value=value)
|
2011-05-07 17:22:41 -04:00
|
|
|
|
|
|
|
class MFeedback(mongo.Document):
|
|
|
|
date = mongo.StringField()
|
|
|
|
summary = mongo.StringField()
|
|
|
|
subject = mongo.StringField()
|
|
|
|
url = mongo.StringField()
|
|
|
|
style = mongo.StringField()
|
2011-06-15 11:21:55 -04:00
|
|
|
order = mongo.IntField()
|
2011-05-07 17:22:41 -04:00
|
|
|
|
|
|
|
meta = {
|
|
|
|
'collection': 'feedback',
|
|
|
|
'allow_inheritance': False,
|
|
|
|
'indexes': ['style'],
|
2011-06-15 11:21:55 -04:00
|
|
|
'ordering': ['order'],
|
2011-05-07 17:22:41 -04:00
|
|
|
}
|
|
|
|
|
|
|
|
def __unicode__(self):
|
|
|
|
return "%s: (%s) %s" % (self.style, self.date, self.subject)
|
|
|
|
|
|
|
|
@classmethod
|
|
|
|
def collect_feedback(cls):
|
|
|
|
data = urllib2.urlopen('https://getsatisfaction.com/newsblur/topics.widget').read()
|
|
|
|
data = json.decode(data[1:-1])
|
2011-06-15 11:21:55 -04:00
|
|
|
i = 0
|
2011-05-07 17:22:41 -04:00
|
|
|
if len(data):
|
|
|
|
cls.objects.delete()
|
|
|
|
for feedback in data:
|
2011-06-15 11:21:55 -04:00
|
|
|
feedback['order'] = i
|
|
|
|
i += 1
|
2011-05-07 18:52:11 -04:00
|
|
|
for removal in ['about', 'less than']:
|
|
|
|
if removal in feedback['date']:
|
|
|
|
feedback['date'] = feedback['date'].replace(removal, '')
|
2011-07-17 20:53:30 -07:00
|
|
|
for feedback in data:
|
|
|
|
# Convert unicode to strings.
|
|
|
|
fb = dict([(str(k), v) for k, v in feedback.items()])
|
|
|
|
cls.objects.create(**fb)
|
2011-05-07 17:22:41 -04:00
|
|
|
|
|
|
|
@classmethod
|
|
|
|
def all(cls):
|
|
|
|
feedbacks = cls.objects.all()[:5]
|
|
|
|
|
|
|
|
return feedbacks
|