Merge branch 'dblog'

* dblog:
  Bumping db logging down to 1% of all requests.
  Adding special DB request logs for sql, mongo, and redis.
This commit is contained in:
Samuel Clay 2014-03-19 14:22:41 -07:00
commit 7a1f19dccb
6 changed files with 138 additions and 19 deletions

View file

@ -2,12 +2,14 @@ import datetime
import re
import random
import time
import redis
from utils import log as logging
from django.http import HttpResponse
from django.conf import settings
from django.db import connection
from django.template import Template, Context
from apps.profile.tasks import CleanupUser
from apps.statistics.rstats import round_time
from utils import json_functions as json
class LastSeenMiddleware(object):
@ -36,10 +38,39 @@ class LastSeenMiddleware(object):
return response
class DBProfilerMiddleware:
def process_request(self, request):
setattr(request, 'activated_segments', [])
if random.random() < .01:
request.activated_segments.append('db_profiler')
connection.use_debug_cursor = True
def process_exception(self, request, exception):
if hasattr(request, 'sql_times_elapsed'):
self._save_times(request.sql_times_elapsed)
def process_response(self, request, response):
if hasattr(request, 'sql_times_elapsed'):
self._save_times(request.sql_times_elapsed)
return response
def _save_times(self, db_times):
r = redis.Redis(connection_pool=settings.REDIS_STATISTICS_POOL)
pipe = r.pipeline()
minute = round_time(round_to=60)
for db, duration in db_times.items():
key = "DB:%s:%s" % (db, minute.strftime('%s'))
pipe.incr("%s:c" % key)
pipe.expireat("%s:c" % key, (minute + datetime.timedelta(days=2)).strftime("%s"))
if duration:
pipe.incrbyfloat("%s:t" % key, duration)
pipe.expireat("%s:t" % key, (minute + datetime.timedelta(days=2)).strftime("%s"))
pipe.execute()
class SQLLogToConsoleMiddleware:
def process_response(self, request, response):
if settings.DEBUG and connection.queries:
if (settings.DEBUG or 'db_profiler' in request.activated_segments) and connection.queries:
time_elapsed = sum([float(q['time']) for q in connection.queries])
queries = connection.queries
for query in queries:
@ -54,7 +85,20 @@ class SQLLogToConsoleMiddleware:
query['sql'] = re.sub(r'UPDATE', '~FY~SBUPDATE', query['sql'])
query['sql'] = re.sub(r'DELETE', '~FR~SBDELETE', query['sql'])
t = Template("{% for sql in sqllog %}{% if not forloop.first %} {% endif %}[{{forloop.counter}}] ~FC{{sql.time}}s~FW: {{sql.sql|safe}}{% if not forloop.last %}\n{% endif %}{% endfor %}")
logging.debug(t.render(Context({'sqllog':queries,'count':len(queries),'time':time_elapsed})))
if settings.DEBUG:
logging.debug(t.render(Context({
'sqllog': queries,
'count': len(queries),
'time': time_elapsed,
})))
times_elapsed = {
'sql': sum([float(q['time'])
for q in queries if not q.get('mongo') and
not q.get('redis')]),
'mongo': sum([float(q['time']) for q in queries if q.get('mongo')]),
'redis': sum([float(q['time']) for q in queries if q.get('redis')]),
}
setattr(request, 'sql_times_elapsed', times_elapsed)
return response
SIMPSONS_QUOTES = [

View file

@ -170,6 +170,53 @@ class MStatistics(mongo.Document):
def collect_statistics_for_db(cls):
lag = db_functions.mongo_max_replication_lag(settings.MONGODB)
cls.set('mongodb_replication_lag', lag)
now = round_time(datetime.datetime.now(), round_to=60)
r = redis.Redis(connection_pool=settings.REDIS_STATISTICS_POOL)
db_times = {}
latest_db_times = {}
for db in ['sql', 'mongo', 'redis']:
db_times[db] = []
for hour in range(24):
start_hours_ago = now - datetime.timedelta(hours=hour+1)
pipe = r.pipeline()
for m in range(60):
minute = start_hours_ago + datetime.timedelta(minutes=m)
key = "DB:%s:%s" % (db, minute.strftime('%s'))
pipe.get("%s:c" % key)
pipe.get("%s:t" % key)
times = pipe.execute()
counts = [int(c or 0) for c in times[::2]]
avgs = [float(a or 0) for a in times[1::2]]
if counts and avgs:
count = sum(counts)
avg = round(sum(avgs) / count, 3) if count else 0
else:
count = 0
avg = 0
if hour == 0:
latest_count = float(counts[-1]) if len(counts) else 0
latest_avg = float(avgs[-1]) if len(avgs) else 0
latest_db_times[db] = latest_avg / latest_count if latest_count else 0
db_times[db].append(avg)
db_times[db].reverse()
values = (
('avg_sql_times', json.encode(db_times['sql'])),
('avg_mongo_times', json.encode(db_times['mongo'])),
('avg_redis_times', json.encode(db_times['redis'])),
('latest_sql_avg', latest_db_times['sql']),
('latest_mongo_avg', latest_db_times['mongo']),
('latest_redis_avg', latest_db_times['redis']),
)
for key, value in values:
cls.objects(key=key).update_one(upsert=True, set__key=key, set__value=value)
class MFeedback(mongo.Document):

View file

@ -112,6 +112,10 @@ MIDDLEWARE_CLASSES = (
'corsheaders.middleware.CorsMiddleware',
'oauth2_provider.middleware.OAuth2TokenMiddleware',
# 'debug_toolbar.middleware.DebugToolbarMiddleware',
'apps.profile.middleware.DBProfilerMiddleware',
'apps.profile.middleware.SQLLogToConsoleMiddleware',
'utils.mongo_raw_log_middleware.MongoDumpMiddleware',
'utils.redis_raw_log_middleware.RedisDumpMiddleware',
)
AUTHENTICATION_BACKENDS = (
@ -622,9 +626,6 @@ REDIS_STORY_HASH_TEMP_POOL = redis.ConnectionPool(host=REDIS['host'], port=6379,
JAMMIT = jammit.JammitAssets(NEWSBLUR_DIR)
if DEBUG:
# MIDDLEWARE_CLASSES += ('apps.profile.middleware.SQLLogToConsoleMiddleware',)
# MIDDLEWARE_CLASSES += ('utils.mongo_raw_log_middleware.MongoDumpMiddleware',)
# MIDDLEWARE_CLASSES += ('utils.redis_raw_log_middleware.RedisDumpMiddleware',)
MIDDLEWARE_CLASSES += ('utils.request_introspection_middleware.DumpRequestMiddleware',)
MIDDLEWARE_CLASSES += ('utils.exception_middleware.ConsoleExceptionMiddleware',)

View file

@ -8,14 +8,14 @@ import struct
import bson
from bson.errors import InvalidBSON
class MongoDumpMiddleware(object):
def __init__(self):
if not settings.DEBUG:
raise MiddlewareNotUsed()
class MongoDumpMiddleware(object):
def activated(self, request):
return (settings.DEBUG or 'db_profiler' in request.activated_segments)
def process_view(self, request, callback, callback_args, callback_kwargs):
if not self.activated(request): return
self._used_msg_ids = []
if settings.DEBUG and not getattr(MongoClient, '_logging', False):
if not getattr(MongoClient, '_logging', False):
# save old methods
setattr(MongoClient, '_logging', True)
# # save old methods

View file

@ -0,0 +1,28 @@
#!/usr/bin/env python
from utils.munin.base import MuninGraph
class NBMuninGraph(MuninGraph):
@property
def graph_config(self):
return {
'graph_category' : 'NewsBlur',
'graph_title' : 'NewsBlur DB Times',
'graph_vlabel' : 'Database times (seconds)',
'graph_args' : '-l 0',
'sql_avg.label': 'SQL avg times (5m)',
'mongo_avg.label': 'Mongo avg times (5m)',
'redis_avg.label': 'Redis avg times (5m)',
}
def calculate_metrics(self):
from apps.statistics.models import MStatistics
return {
'sql_avg': MStatistics.get('latest_sql_avg'),
'mongo_avg': MStatistics.get('latest_mongo_avg'),
'redis_avg': MStatistics.get('latest_redis_avg'),
}
if __name__ == '__main__':
NBMuninGraph().run()

View file

@ -4,13 +4,13 @@ from django.db import connection
from redis.connection import Connection
from time import time
class RedisDumpMiddleware(object):
def __init__(self):
if not settings.DEBUG:
raise MiddlewareNotUsed()
class RedisDumpMiddleware(object):
def activated(self, request):
return (settings.DEBUG or 'db_profiler' in request.activated_segments)
def process_view(self, request, callback, callback_args, callback_kwargs):
if settings.DEBUG and not getattr(Connection, '_logging', False):
if not self.activated(request): return
if not getattr(Connection, '_logging', False):
# save old methods
setattr(Connection, '_logging', True)
# self.orig_pack_command = \
@ -18,7 +18,6 @@ class RedisDumpMiddleware(object):
# instrument methods to record messages
Connection.pack_command = \
self._instrument(Connection.pack_command)
return None
def process_response(self, request, response):
# if settings.DEBUG and hasattr(self, 'orig_pack_command'):