2011-04-15 11:34:41 -04:00
|
|
|
import datetime
|
|
|
|
import mongoengine as mongo
|
2020-06-15 03:52:07 -04:00
|
|
|
import urllib.request, urllib.error, urllib.parse
|
2013-03-29 13:14:19 -07:00
|
|
|
import redis
|
2018-04-17 16:50:34 -07:00
|
|
|
import dateutil
|
2012-08-16 23:33:48 -07:00
|
|
|
from django.conf import settings
|
2012-04-12 13:37:19 -07:00
|
|
|
from apps.social.models import MSharedStory
|
2011-04-15 11:34:41 -04:00
|
|
|
from apps.profile.models import Profile
|
2013-03-29 13:14:19 -07:00
|
|
|
from apps.statistics.rstats import RStats, round_time
|
2018-04-17 16:50:34 -07:00
|
|
|
from utils.story_functions import relative_date
|
2011-04-16 16:21:00 -04:00
|
|
|
from utils import json_functions as json
|
2012-08-16 23:33:48 -07:00
|
|
|
from utils import db_functions
|
2015-11-28 13:56:50 -08:00
|
|
|
from utils import log as logging
|
2011-04-18 09:09:57 -04:00
|
|
|
|
2011-04-15 11:34:41 -04:00
|
|
|
class MStatistics(mongo.Document):
|
|
|
|
key = mongo.StringField(unique=True)
|
2012-07-25 14:03:15 -07:00
|
|
|
value = mongo.DynamicField()
|
2015-07-21 15:17:24 -07:00
|
|
|
expiration_date = mongo.DateTimeField()
|
2011-04-15 11:34:41 -04:00
|
|
|
|
|
|
|
meta = {
|
|
|
|
'collection': 'statistics',
|
|
|
|
'allow_inheritance': False,
|
|
|
|
'indexes': ['key'],
|
|
|
|
}
|
|
|
|
|
2020-06-30 20:50:30 -04:00
|
|
|
def __str__(self):
|
2011-04-15 11:34:41 -04:00
|
|
|
return "%s: %s" % (self.key, self.value)
|
|
|
|
|
2012-02-24 16:43:08 -08:00
|
|
|
@classmethod
|
|
|
|
def get(cls, key, default=None):
|
|
|
|
obj = cls.objects.filter(key=key).first()
|
|
|
|
if not obj:
|
|
|
|
return default
|
2015-07-21 15:15:42 -07:00
|
|
|
if obj.expiration_date and obj.expiration_date < datetime.datetime.now():
|
2015-07-21 15:19:09 -07:00
|
|
|
obj.delete()
|
2015-07-21 15:15:42 -07:00
|
|
|
return default
|
2012-02-24 16:43:08 -08:00
|
|
|
return obj.value
|
|
|
|
|
|
|
|
@classmethod
|
2015-07-21 15:15:42 -07:00
|
|
|
def set(cls, key, value, expiration_sec=None):
|
2015-07-20 16:44:50 -07:00
|
|
|
try:
|
|
|
|
obj = cls.objects.get(key=key)
|
|
|
|
except cls.DoesNotExist:
|
|
|
|
obj = cls.objects.create(key=key)
|
2012-02-24 16:43:08 -08:00
|
|
|
obj.value = value
|
2015-07-21 15:15:42 -07:00
|
|
|
if expiration_sec:
|
|
|
|
obj.expiration_date = datetime.datetime.now() + datetime.timedelta(seconds=expiration_sec)
|
2012-02-24 16:43:08 -08:00
|
|
|
obj.save()
|
|
|
|
|
2011-04-15 11:34:41 -04:00
|
|
|
@classmethod
|
|
|
|
def all(cls):
|
2012-07-25 14:03:15 -07:00
|
|
|
stats = cls.objects.all()
|
|
|
|
values = dict([(stat.key, stat.value) for stat in stats])
|
2020-06-15 03:52:07 -04:00
|
|
|
for key, value in list(values.items()):
|
2012-04-12 13:37:19 -07:00
|
|
|
if key in ('avg_time_taken', 'sites_loaded', 'stories_shared'):
|
2011-04-16 16:21:00 -04:00
|
|
|
values[key] = json.decode(value)
|
|
|
|
elif key in ('feeds_fetched', 'premium_users', 'standard_users', 'latest_sites_loaded',
|
2012-04-12 13:37:19 -07:00
|
|
|
'max_sites_loaded', 'max_stories_shared'):
|
2011-04-16 16:21:00 -04:00
|
|
|
values[key] = int(value)
|
2016-12-15 12:24:15 -08:00
|
|
|
elif key in ('latest_avg_time_taken', 'max_avg_time_taken', 'last_5_min_time_taken'):
|
2011-04-16 16:21:00 -04:00
|
|
|
values[key] = float(value)
|
2012-07-17 12:07:26 -07:00
|
|
|
|
|
|
|
values['total_sites_loaded'] = sum(values['sites_loaded']) if 'sites_loaded' in values else 0
|
|
|
|
values['total_stories_shared'] = sum(values['stories_shared']) if 'stories_shared' in values else 0
|
2012-05-11 09:17:58 -07:00
|
|
|
|
2011-04-16 16:21:00 -04:00
|
|
|
return values
|
2011-04-15 11:34:41 -04:00
|
|
|
|
|
|
|
@classmethod
|
|
|
|
def collect_statistics(cls):
|
2011-11-01 19:00:58 -07:00
|
|
|
now = datetime.datetime.now()
|
2012-09-27 15:21:38 -07:00
|
|
|
cls.collect_statistics_premium_users()
|
2020-06-15 03:52:07 -04:00
|
|
|
print("Premiums: %s" % (datetime.datetime.now() - now))
|
2012-09-27 15:21:38 -07:00
|
|
|
cls.collect_statistics_standard_users()
|
2020-06-15 03:52:07 -04:00
|
|
|
print("Standard users: %s" % (datetime.datetime.now() - now))
|
2012-09-27 15:21:38 -07:00
|
|
|
cls.collect_statistics_sites_loaded()
|
2020-06-15 03:52:07 -04:00
|
|
|
print("Sites loaded: %s" % (datetime.datetime.now() - now))
|
2012-09-27 15:21:38 -07:00
|
|
|
cls.collect_statistics_stories_shared()
|
2020-06-15 03:52:07 -04:00
|
|
|
print("Stories shared: %s" % (datetime.datetime.now() - now))
|
2012-08-17 00:00:02 -07:00
|
|
|
cls.collect_statistics_for_db()
|
2020-06-15 03:52:07 -04:00
|
|
|
print("DB Stats: %s" % (datetime.datetime.now() - now))
|
2013-03-22 16:13:14 -07:00
|
|
|
cls.collect_statistics_feeds_fetched()
|
2020-06-15 03:52:07 -04:00
|
|
|
print("Feeds Fetched: %s" % (datetime.datetime.now() - now))
|
2011-04-18 09:09:57 -04:00
|
|
|
|
|
|
|
@classmethod
|
2012-09-27 15:21:38 -07:00
|
|
|
def collect_statistics_feeds_fetched(cls):
|
2013-04-15 12:08:53 -07:00
|
|
|
feeds_fetched = RStats.count('feed_fetch', hours=24)
|
2013-04-15 14:30:31 -07:00
|
|
|
cls.objects(key='feeds_fetched').update_one(upsert=True,
|
|
|
|
set__key='feeds_fetched',
|
|
|
|
set__value=feeds_fetched)
|
2011-05-01 19:58:40 -04:00
|
|
|
|
2011-04-18 09:09:57 -04:00
|
|
|
return feeds_fetched
|
|
|
|
|
|
|
|
@classmethod
|
2012-09-27 15:21:38 -07:00
|
|
|
def collect_statistics_premium_users(cls):
|
|
|
|
last_day = datetime.datetime.now() - datetime.timedelta(hours=24)
|
|
|
|
|
2011-04-15 11:34:41 -04:00
|
|
|
premium_users = Profile.objects.filter(last_seen_on__gte=last_day, is_premium=True).count()
|
2012-04-12 13:37:19 -07:00
|
|
|
cls.objects(key='premium_users').update_one(upsert=True, set__key='premium_users', set__value=premium_users)
|
2011-04-15 11:34:41 -04:00
|
|
|
|
2011-04-18 09:09:57 -04:00
|
|
|
return premium_users
|
|
|
|
|
|
|
|
@classmethod
|
2012-09-27 15:21:38 -07:00
|
|
|
def collect_statistics_standard_users(cls):
|
|
|
|
last_day = datetime.datetime.now() - datetime.timedelta(hours=24)
|
2011-04-18 09:09:57 -04:00
|
|
|
|
2011-04-15 11:34:41 -04:00
|
|
|
standard_users = Profile.objects.filter(last_seen_on__gte=last_day, is_premium=False).count()
|
2012-04-12 13:37:19 -07:00
|
|
|
cls.objects(key='standard_users').update_one(upsert=True, set__key='standard_users', set__value=standard_users)
|
2011-04-18 09:09:57 -04:00
|
|
|
|
|
|
|
return standard_users
|
|
|
|
|
|
|
|
@classmethod
|
2012-09-27 15:21:38 -07:00
|
|
|
def collect_statistics_sites_loaded(cls):
|
2013-03-29 13:14:19 -07:00
|
|
|
now = round_time(datetime.datetime.now(), round_to=60)
|
2011-04-16 16:21:00 -04:00
|
|
|
sites_loaded = []
|
|
|
|
avg_time_taken = []
|
2016-12-15 10:47:48 -08:00
|
|
|
last_5_min_time_taken = 0
|
2013-03-29 13:14:19 -07:00
|
|
|
r = redis.Redis(connection_pool=settings.REDIS_STATISTICS_POOL)
|
|
|
|
|
2011-04-16 16:21:00 -04:00
|
|
|
for hour in range(24):
|
2013-03-29 13:14:19 -07:00
|
|
|
start_hours_ago = now - datetime.timedelta(hours=hour+1)
|
|
|
|
|
|
|
|
pipe = r.pipeline()
|
|
|
|
for m in range(60):
|
|
|
|
minute = start_hours_ago + datetime.timedelta(minutes=m)
|
|
|
|
key = "%s:%s" % (RStats.stats_type('page_load'), minute.strftime('%s'))
|
|
|
|
pipe.get("%s:s" % key)
|
|
|
|
pipe.get("%s:a" % key)
|
|
|
|
|
|
|
|
times = pipe.execute()
|
|
|
|
|
|
|
|
counts = [int(c) for c in times[::2] if c]
|
|
|
|
avgs = [float(a) for a in times[1::2] if a]
|
2012-09-27 15:21:38 -07:00
|
|
|
|
2016-12-15 10:47:48 -08:00
|
|
|
if hour == 0:
|
2016-12-15 12:24:15 -08:00
|
|
|
last_5_min_time_taken = round(sum(avgs[:1]) / max(1, sum(counts[:1])), 2)
|
2016-12-15 10:47:48 -08:00
|
|
|
|
2013-03-29 13:14:19 -07:00
|
|
|
if counts and avgs:
|
2016-12-15 12:24:15 -08:00
|
|
|
count = max(1, sum(counts))
|
2013-03-29 13:14:19 -07:00
|
|
|
avg = round(sum(avgs) / count, 3)
|
|
|
|
else:
|
|
|
|
count = 0
|
|
|
|
avg = 0
|
2012-09-27 15:21:38 -07:00
|
|
|
|
|
|
|
sites_loaded.append(count)
|
|
|
|
avg_time_taken.append(avg)
|
|
|
|
|
2011-04-16 16:21:00 -04:00
|
|
|
sites_loaded.reverse()
|
|
|
|
avg_time_taken.reverse()
|
2013-03-29 13:14:19 -07:00
|
|
|
|
2011-04-18 09:09:57 -04:00
|
|
|
values = (
|
|
|
|
('sites_loaded', json.encode(sites_loaded)),
|
|
|
|
('avg_time_taken', json.encode(avg_time_taken)),
|
|
|
|
('latest_sites_loaded', sites_loaded[-1]),
|
|
|
|
('latest_avg_time_taken', avg_time_taken[-1]),
|
|
|
|
('max_sites_loaded', max(sites_loaded)),
|
|
|
|
('max_avg_time_taken', max(1, max(avg_time_taken))),
|
2016-12-15 10:47:48 -08:00
|
|
|
('last_5_min_time_taken', last_5_min_time_taken),
|
2011-04-18 09:09:57 -04:00
|
|
|
)
|
|
|
|
for key, value in values:
|
2012-04-12 13:37:19 -07:00
|
|
|
cls.objects(key=key).update_one(upsert=True, set__key=key, set__value=value)
|
|
|
|
|
|
|
|
@classmethod
|
2012-09-27 15:21:38 -07:00
|
|
|
def collect_statistics_stories_shared(cls):
|
2012-04-12 13:37:19 -07:00
|
|
|
now = datetime.datetime.now()
|
|
|
|
stories_shared = []
|
|
|
|
|
|
|
|
for hour in range(24):
|
|
|
|
start_hours_ago = now - datetime.timedelta(hours=hour)
|
|
|
|
end_hours_ago = now - datetime.timedelta(hours=hour+1)
|
|
|
|
shares = MSharedStory.objects.filter(
|
|
|
|
shared_date__lte=start_hours_ago,
|
|
|
|
shared_date__gte=end_hours_ago
|
|
|
|
).count()
|
|
|
|
stories_shared.append(shares)
|
|
|
|
|
|
|
|
stories_shared.reverse()
|
|
|
|
|
|
|
|
values = (
|
|
|
|
('stories_shared', json.encode(stories_shared)),
|
|
|
|
('latest_stories_shared', stories_shared[-1]),
|
|
|
|
('max_stories_shared', max(stories_shared)),
|
|
|
|
)
|
|
|
|
for key, value in values:
|
|
|
|
cls.objects(key=key).update_one(upsert=True, set__key=key, set__value=value)
|
2012-07-02 10:15:17 -07:00
|
|
|
|
2012-08-16 23:33:48 -07:00
|
|
|
@classmethod
|
2021-01-04 10:22:04 -05:00
|
|
|
def collect_statistics_for_db(cls, debug=False):
|
2012-08-16 23:33:48 -07:00
|
|
|
lag = db_functions.mongo_max_replication_lag(settings.MONGODB)
|
|
|
|
cls.set('mongodb_replication_lag', lag)
|
2014-03-18 19:19:43 -07:00
|
|
|
|
|
|
|
now = round_time(datetime.datetime.now(), round_to=60)
|
|
|
|
r = redis.Redis(connection_pool=settings.REDIS_STATISTICS_POOL)
|
|
|
|
db_times = {}
|
|
|
|
latest_db_times = {}
|
|
|
|
|
2015-12-16 17:31:41 -08:00
|
|
|
for db in ['sql', 'mongo', 'redis', 'task_sql', 'task_mongo', 'task_redis']:
|
2014-03-18 19:19:43 -07:00
|
|
|
db_times[db] = []
|
|
|
|
for hour in range(24):
|
|
|
|
start_hours_ago = now - datetime.timedelta(hours=hour+1)
|
|
|
|
|
|
|
|
pipe = r.pipeline()
|
|
|
|
for m in range(60):
|
|
|
|
minute = start_hours_ago + datetime.timedelta(minutes=m)
|
|
|
|
key = "DB:%s:%s" % (db, minute.strftime('%s'))
|
2021-01-04 10:22:04 -05:00
|
|
|
if debug:
|
|
|
|
print(" -> %s:c" % key)
|
2014-03-18 19:19:43 -07:00
|
|
|
pipe.get("%s:c" % key)
|
|
|
|
pipe.get("%s:t" % key)
|
|
|
|
|
|
|
|
times = pipe.execute()
|
|
|
|
|
|
|
|
counts = [int(c or 0) for c in times[::2]]
|
|
|
|
avgs = [float(a or 0) for a in times[1::2]]
|
|
|
|
if counts and avgs:
|
|
|
|
count = sum(counts)
|
|
|
|
avg = round(sum(avgs) / count, 3) if count else 0
|
|
|
|
else:
|
|
|
|
count = 0
|
|
|
|
avg = 0
|
|
|
|
|
|
|
|
if hour == 0:
|
|
|
|
latest_count = float(counts[-1]) if len(counts) else 0
|
|
|
|
latest_avg = float(avgs[-1]) if len(avgs) else 0
|
|
|
|
latest_db_times[db] = latest_avg / latest_count if latest_count else 0
|
|
|
|
db_times[db].append(avg)
|
|
|
|
|
|
|
|
db_times[db].reverse()
|
|
|
|
|
|
|
|
values = (
|
|
|
|
('avg_sql_times', json.encode(db_times['sql'])),
|
|
|
|
('avg_mongo_times', json.encode(db_times['mongo'])),
|
|
|
|
('avg_redis_times', json.encode(db_times['redis'])),
|
|
|
|
('latest_sql_avg', latest_db_times['sql']),
|
|
|
|
('latest_mongo_avg', latest_db_times['mongo']),
|
|
|
|
('latest_redis_avg', latest_db_times['redis']),
|
2015-12-16 17:31:41 -08:00
|
|
|
('latest_task_sql_avg', latest_db_times['task_sql']),
|
|
|
|
('latest_task_mongo_avg', latest_db_times['task_mongo']),
|
|
|
|
('latest_task_redis_avg', latest_db_times['task_redis']),
|
2014-03-18 19:19:43 -07:00
|
|
|
)
|
|
|
|
for key, value in values:
|
|
|
|
cls.objects(key=key).update_one(upsert=True, set__key=key, set__value=value)
|
2012-12-10 13:33:37 -08:00
|
|
|
|
2011-05-07 17:22:41 -04:00
|
|
|
|
|
|
|
class MFeedback(mongo.Document):
|
2018-04-17 16:50:34 -07:00
|
|
|
date = mongo.DateTimeField()
|
|
|
|
date_short = mongo.StringField()
|
2011-05-07 17:22:41 -04:00
|
|
|
subject = mongo.StringField()
|
|
|
|
url = mongo.StringField()
|
|
|
|
style = mongo.StringField()
|
2011-06-15 11:21:55 -04:00
|
|
|
order = mongo.IntField()
|
2011-05-07 17:22:41 -04:00
|
|
|
|
|
|
|
meta = {
|
|
|
|
'collection': 'feedback',
|
|
|
|
'allow_inheritance': False,
|
|
|
|
'indexes': ['style'],
|
2011-06-15 11:21:55 -04:00
|
|
|
'ordering': ['order'],
|
2011-05-07 17:22:41 -04:00
|
|
|
}
|
|
|
|
|
2018-04-17 16:50:34 -07:00
|
|
|
CATEGORIES = {
|
|
|
|
5: 'idea',
|
|
|
|
6: 'problem',
|
|
|
|
7: 'praise',
|
|
|
|
8: 'question',
|
|
|
|
}
|
|
|
|
|
2020-06-30 20:50:30 -04:00
|
|
|
def __str__(self):
|
2011-05-07 17:22:41 -04:00
|
|
|
return "%s: (%s) %s" % (self.style, self.date, self.subject)
|
|
|
|
|
|
|
|
@classmethod
|
|
|
|
def collect_feedback(cls):
|
2018-04-17 16:50:34 -07:00
|
|
|
seen_posts = set()
|
2015-11-28 13:56:50 -08:00
|
|
|
try:
|
2020-06-15 03:52:07 -04:00
|
|
|
data = urllib.request.urlopen('https://forum.newsblur.com/posts.json').read()
|
|
|
|
except (urllib.error.HTTPError) as e:
|
2015-11-28 13:56:50 -08:00
|
|
|
logging.debug(" ***> Failed to collect feedback: %s" % e)
|
|
|
|
return
|
2018-04-17 16:50:34 -07:00
|
|
|
data = json.decode(data).get('latest_posts', "")
|
|
|
|
|
|
|
|
if not len(data):
|
2020-06-15 03:52:07 -04:00
|
|
|
print("No data!")
|
2018-04-17 16:50:34 -07:00
|
|
|
return
|
|
|
|
|
|
|
|
cls.objects.delete()
|
|
|
|
post_count = 0
|
|
|
|
for post in data:
|
|
|
|
if post['topic_id'] in seen_posts: continue
|
|
|
|
seen_posts.add(post['topic_id'])
|
|
|
|
feedback = {}
|
|
|
|
feedback['order'] = post_count
|
|
|
|
post_count += 1
|
|
|
|
feedback['date'] = dateutil.parser.parse(post['created_at']).replace(tzinfo=None)
|
|
|
|
feedback['date_short'] = relative_date(feedback['date'])
|
|
|
|
feedback['subject'] = post['topic_title']
|
|
|
|
feedback['url'] = "https://forum.newsblur.com/t/%s/%s/%s" % (post['topic_slug'], post['topic_id'], post['post_number'])
|
|
|
|
feedback['style'] = cls.CATEGORIES[post['category_id']]
|
|
|
|
cls.objects.create(**feedback)
|
2020-06-15 03:52:07 -04:00
|
|
|
print("%s: %s (%s)" % (feedback['style'], feedback['subject'], feedback['date_short']))
|
2018-04-17 16:50:34 -07:00
|
|
|
if post_count >= 4: break
|
2011-05-07 17:22:41 -04:00
|
|
|
|
|
|
|
@classmethod
|
|
|
|
def all(cls):
|
2012-04-19 22:38:00 -07:00
|
|
|
feedbacks = cls.objects.all()[:4]
|
2011-05-07 17:22:41 -04:00
|
|
|
|
2012-09-06 17:16:01 -07:00
|
|
|
return feedbacks
|
|
|
|
|
|
|
|
|
|
|
|
class MAnalyticsFetcher(mongo.Document):
|
|
|
|
date = mongo.DateTimeField(default=datetime.datetime.now)
|
|
|
|
feed_id = mongo.IntField()
|
2012-09-06 17:39:18 -07:00
|
|
|
feed_fetch = mongo.FloatField()
|
|
|
|
feed_process = mongo.FloatField()
|
|
|
|
page = mongo.FloatField()
|
|
|
|
icon = mongo.FloatField()
|
|
|
|
total = mongo.FloatField()
|
2012-09-06 22:31:44 -07:00
|
|
|
server = mongo.StringField()
|
|
|
|
feed_code = mongo.IntField()
|
2012-09-06 17:16:01 -07:00
|
|
|
|
|
|
|
meta = {
|
|
|
|
'db_alias': 'nbanalytics',
|
|
|
|
'collection': 'feed_fetches',
|
|
|
|
'allow_inheritance': False,
|
2012-09-06 22:31:44 -07:00
|
|
|
'indexes': ['date', 'feed_id', 'server', 'feed_code'],
|
2012-09-06 17:16:01 -07:00
|
|
|
'ordering': ['date'],
|
|
|
|
}
|
|
|
|
|
2020-06-30 20:50:30 -04:00
|
|
|
def __str__(self):
|
2012-09-06 17:39:18 -07:00
|
|
|
return "%s: %.4s+%.4s+%.4s+%.4s = %.4ss" % (self.feed_id, self.feed_fetch,
|
|
|
|
self.feed_process,
|
|
|
|
self.page,
|
|
|
|
self.icon,
|
|
|
|
self.total)
|
2012-09-06 17:16:01 -07:00
|
|
|
|
|
|
|
@classmethod
|
2012-09-06 17:39:18 -07:00
|
|
|
def add(cls, feed_id, feed_fetch, feed_process,
|
2012-09-06 22:31:44 -07:00
|
|
|
page, icon, total, feed_code):
|
2012-09-27 13:18:51 -07:00
|
|
|
server_name = settings.SERVER_NAME
|
|
|
|
if 'app' in server_name: return
|
|
|
|
|
2012-09-06 17:39:18 -07:00
|
|
|
if icon and page:
|
|
|
|
icon -= page
|
|
|
|
if page and feed_process:
|
|
|
|
page -= feed_process
|
2012-09-06 23:33:42 -07:00
|
|
|
elif page and feed_fetch:
|
|
|
|
page -= feed_fetch
|
2012-09-06 17:39:18 -07:00
|
|
|
if feed_process and feed_fetch:
|
|
|
|
feed_process -= feed_fetch
|
2012-09-06 17:16:01 -07:00
|
|
|
|
2012-09-06 17:39:18 -07:00
|
|
|
cls.objects.create(feed_id=feed_id, feed_fetch=feed_fetch,
|
|
|
|
feed_process=feed_process,
|
2012-09-06 22:31:44 -07:00
|
|
|
page=page, icon=icon, total=total,
|
|
|
|
server=server_name, feed_code=feed_code)
|
2012-09-06 17:16:01 -07:00
|
|
|
|
|
|
|
@classmethod
|
|
|
|
def calculate_stats(cls, stats):
|
|
|
|
return cls.aggregate(**stats)
|
2018-02-26 16:00:44 -08:00
|
|
|
|
|
|
|
|
|
|
|
class MAnalyticsLoader(mongo.Document):
|
|
|
|
date = mongo.DateTimeField(default=datetime.datetime.now)
|
|
|
|
page_load = mongo.FloatField()
|
|
|
|
server = mongo.StringField()
|
|
|
|
|
|
|
|
meta = {
|
|
|
|
'db_alias': 'nbanalytics',
|
|
|
|
'collection': 'page_loads',
|
|
|
|
'allow_inheritance': False,
|
|
|
|
'indexes': ['date', 'server'],
|
|
|
|
'ordering': ['date'],
|
|
|
|
}
|
|
|
|
|
2020-06-30 20:50:30 -04:00
|
|
|
def __str__(self):
|
2018-02-26 16:00:44 -08:00
|
|
|
return "%s: %.4ss" % (self.server, self.page_load)
|
|
|
|
|
|
|
|
@classmethod
|
|
|
|
def add(cls, page_load):
|
|
|
|
server_name = settings.SERVER_NAME
|
|
|
|
|
|
|
|
cls.objects.create(page_load=page_load, server=server_name)
|
|
|
|
|
|
|
|
@classmethod
|
|
|
|
def calculate_stats(cls, stats):
|
|
|
|
return cls.aggregate(**stats)
|