NewsBlur/config/munin/postgres_commits_
2016-11-11 11:09:13 -08:00

61 lines
2 KiB
Python
Executable file

#!/srv/newsblur/venv/newsblur/bin/python
# -*- coding: utf-8 -*-
"""
Based on a plugin by BjØrn Ruberg.
Plugin to monitor PostgreSQL commits/rollbacks.
"Why should I care?"
- Too many commits can really bog down the database, as it checks all
the tables for consitency after each change.
- Software is often set to 'AutoCommit = 1', meaning a commit is done
after each transaction. This is a good idea with brittle code so that
you can get some work done if not all, but when you're inserting 10,000
rows this can really suck.
- If you see a spike in rollbacks, some db programmer is probably
abusing their session, or a stored proceudre has gone horribly wrong
and isn't leaving a trace. Time for the rolled-up newspaper.
Find out more at
http://www.postgresql.org/docs/8.2/interactive/monitoring-stats.html
(where "8.2" can be the version of PostgreSQL you have installed)
"""
from vendor.munin.postgres import MuninPostgresPlugin
class MuninPostgresCommitsPlugin(MuninPostgresPlugin):
dbname_in_args = True
args = "--base 1000"
vlabel = "Sessions per ${graph_period}"
info = "Shows number of commits and rollbacks"
fields = (
('commits', dict(
label = "commits",
info = "SQL sessions terminated with a commit command",
type = "DERIVE",
min = "0",
)),
('rollbacks', dict(
label = "rollbacks",
info = "SQL sessions terminated with a rollback command",
type = "DERIVE",
min = "0",
)),
)
@property
def title(self):
return "Postgres commits/rollbacks on %s" % self.dbname
def execute(self):
c = self.cursor()
c.execute("SELECT xact_commit, xact_rollback FROM pg_stat_database WHERE datname = %s", (self.dbname,))
values = {}
for row in c.fetchall():
values["commits"] = row[0]
values["rollbacks"] = row[1]
return values
if __name__ == "__main__":
MuninPostgresCommitsPlugin().run()