mirror of
https://github.com/samuelclay/NewsBlur.git
synced 2025-04-13 09:42:01 +00:00
fix flask metrics python files
This commit is contained in:
parent
dcf7364f6b
commit
7fa72f5be3
4 changed files with 50 additions and 41 deletions
|
@ -4,18 +4,17 @@ from newsblur_web import settings
|
|||
import sentry_sdk
|
||||
from sentry_sdk.integrations.flask import FlaskIntegration
|
||||
|
||||
sentry_sdk.init(
|
||||
dsn=settings.FLASK_SENTRY_DSN,
|
||||
integrations=[FlaskIntegration()],
|
||||
traces_sample_rate=1.0,
|
||||
)
|
||||
if settings.FLASK_SENTRY_DSN is not None:
|
||||
sentry_sdk.init(
|
||||
dsn=settings.FLASK_SENTRY_DSN,
|
||||
integrations=[FlaskIntegration()],
|
||||
traces_sample_rate=1.0,
|
||||
)
|
||||
|
||||
app = Flask(__name__)
|
||||
|
||||
if settings.MONGO_DB['host'] == 'db_mongo:29019':
|
||||
host = settings.MONGO_DB['host'].split(":")[0]
|
||||
port = int(settings.MONGO_DB['host'].split(":")[1])
|
||||
connection = pymongo.MongoClient(host, port)
|
||||
if settings.DOCKERBUILD:
|
||||
connection = pymongo.MongoClient(f"mongodb://{settings.MONGO_DB['host']}")
|
||||
else:
|
||||
connection = pymongo.MongoClient(f"mongodb://{settings.MONGO_DB['username']}:{settings.MONGO_DB['password']}@{settings.SERVER_NAME}/?authSource=admin")
|
||||
|
||||
|
|
|
@ -1,22 +1,23 @@
|
|||
from flask import Flask, render_template, Response
|
||||
from newsblur_web import settings
|
||||
#import sentry_sdk
|
||||
#from sentry_sdk.integrations.flask import FlaskIntegration
|
||||
import sentry_sdk
|
||||
from sentry_sdk.integrations.flask import FlaskIntegration
|
||||
import redis
|
||||
|
||||
#sentry_sdk.init(
|
||||
# dsn=settings.FLASK_SENTRY_DSN,
|
||||
# integrations=[FlaskIntegration()],
|
||||
# traces_sample_rate=1.0,
|
||||
#)
|
||||
if settings.FLASK_SENTRY_DSN is not None:
|
||||
sentry_sdk.init(
|
||||
dsn=settings.FLASK_SENTRY_DSN,
|
||||
integrations=[FlaskIntegration()],
|
||||
traces_sample_rate=1.0,
|
||||
)
|
||||
|
||||
app = Flask(__name__)
|
||||
|
||||
INSTANCES = {
|
||||
'redis-sessions': settings.REDIS_SESSIONS,
|
||||
'redis-story': settings.REDIS_STORY,
|
||||
'redis-pubsub': settings.REDIS_PUBSUB,
|
||||
'redis-user': settings.REDIS,
|
||||
'db-redis-sessions': settings.REDIS_SESSIONS,
|
||||
'db-redis-story': settings.REDIS_STORY,
|
||||
'db-redis-pubsub': settings.REDIS_PUBSUB,
|
||||
'db-redis-user': settings.REDIS_USER,
|
||||
}
|
||||
|
||||
class RedisMetric(object):
|
||||
|
@ -29,13 +30,18 @@ class RedisMetric(object):
|
|||
r = redis.Redis(host, port)
|
||||
return r.info()
|
||||
|
||||
def execute(self):
|
||||
data = {}
|
||||
def redis_servers_stats(self):
|
||||
for instance, redis_config in INSTANCES.items():
|
||||
if not settings.DOCKERBUILD and settings.SERVER_NAME != instance:
|
||||
continue
|
||||
host = redis_config['host']
|
||||
port = redis_config['port']
|
||||
stats = self.get_info(host, port)
|
||||
|
||||
yield instance, stats
|
||||
|
||||
def execute(self):
|
||||
data = {}
|
||||
for instance, stats in self.redis_servers_stats():
|
||||
values = {}
|
||||
for k in self.fields:
|
||||
try:
|
||||
|
@ -55,17 +61,14 @@ class RedisMetric(object):
|
|||
|
||||
def get_db_size_data(self):
|
||||
data = {}
|
||||
for instance, redis_config in INSTANCES.items():
|
||||
host = redis_config['host']
|
||||
port = redis_config['port']
|
||||
stats = self.get_info(host, port)
|
||||
for instance, stats in self.redis_servers_stats():
|
||||
dbs = [stat for stat in stats.keys() if stat.startswith('db')]
|
||||
for db in dbs:
|
||||
data[f'{instance}-{db}'] = f'size {{db="{db}", instance="{instance}"}} {stats[db]["keys"]}'
|
||||
data[f'{instance}-{db}'] = f' size {{db="{db}", instance="{instance}"}} {stats[db]["keys"]}'
|
||||
return data
|
||||
|
||||
def get_context(self):
|
||||
if self.fields[0][0] == 'db-size':
|
||||
if self.fields[0][0] == 'size':
|
||||
formatted_data = self.get_db_size_data()
|
||||
else:
|
||||
values = self.execute()
|
||||
|
@ -137,8 +140,8 @@ def size():
|
|||
conf = {
|
||||
'title': "Redis DB size",
|
||||
'fields': (
|
||||
('db-size', dict(
|
||||
label="db-size",
|
||||
('size', dict(
|
||||
label="size",
|
||||
type="gauge",
|
||||
)),
|
||||
)
|
||||
|
|
|
@ -107,7 +107,7 @@ BROKER_URL = "redis://db_redis:6579/0"
|
|||
CELERY_RESULT_BACKEND = BROKER_URL
|
||||
CELERY_WORKER_CONCURRENCY = 1
|
||||
|
||||
REDIS = {
|
||||
REDIS_USER = {
|
||||
'host': 'db_redis',
|
||||
'port': 6579
|
||||
}
|
||||
|
|
|
@ -57,6 +57,9 @@ RECAPTCHA_SECRET_KEY = "YOUR_RECAPTCHA_KEY"
|
|||
YOUTUBE_API_KEY = "YOUR_YOUTUBE_API_KEY"
|
||||
IMAGES_SECRET_KEY = "YOUR_IMAGES_SECRET_KEY"
|
||||
DOCKERBUILD = os.getenv("DOCKERBUILD")
|
||||
REDIS_USER = None
|
||||
FLASK_SENTRY_DSN = None
|
||||
|
||||
# ===================
|
||||
# = Global Settings =
|
||||
# ===================
|
||||
|
@ -714,7 +717,7 @@ else:
|
|||
|
||||
CELERY_REDIS_DB_NUM = 4
|
||||
SESSION_REDIS_DB = 5
|
||||
CELERY_BROKER_URL = "redis://%s:%s/%s" % (REDIS['host'], REDIS_PORT,CELERY_REDIS_DB_NUM)
|
||||
CELERY_BROKER_URL = "redis://%s:%s/%s" % (REDIS_USER['host'], REDIS_PORT,CELERY_REDIS_DB_NUM)
|
||||
CELERY_RESULT_BACKEND = CELERY_BROKER_URL
|
||||
BROKER_TRANSPORT_OPTIONS = {
|
||||
"max_retries": 3,
|
||||
|
@ -733,10 +736,14 @@ SESSION_REDIS = {
|
|||
'retry_on_timeout': True
|
||||
}
|
||||
|
||||
if REDIS_USER is None:
|
||||
# REDIS has been renamed to REDIS_USER.
|
||||
REDIS_USER = REDIS
|
||||
|
||||
CACHES = {
|
||||
'default': {
|
||||
'BACKEND': 'redis_cache.RedisCache',
|
||||
'LOCATION': '%s:%s' % (REDIS['host'], REDIS_PORT),
|
||||
'LOCATION': '%s:%s' % (REDIS_USER['host'], REDIS_PORT),
|
||||
'OPTIONS': {
|
||||
'DB': 6,
|
||||
'PARSER_CLASS': 'redis.connection.HiredisParser',
|
||||
|
@ -745,13 +752,13 @@ CACHES = {
|
|||
},
|
||||
}
|
||||
|
||||
REDIS_POOL = redis.ConnectionPool(host=REDIS['host'], port=REDIS_PORT, db=0, decode_responses=True)
|
||||
REDIS_ANALYTICS_POOL = redis.ConnectionPool(host=REDIS['host'], port=REDIS_PORT, db=2, decode_responses=True)
|
||||
REDIS_STATISTICS_POOL = redis.ConnectionPool(host=REDIS['host'], port=REDIS_PORT, db=3, decode_responses=True)
|
||||
REDIS_FEED_UPDATE_POOL = redis.ConnectionPool(host=REDIS['host'], port=REDIS_PORT, db=4, decode_responses=True)
|
||||
# REDIS_STORY_HASH_POOL2 = redis.ConnectionPool(host=REDIS['host'], port=REDIS_PORT, db=8) # Only used when changing DAYS_OF_UNREAD
|
||||
REDIS_STORY_HASH_TEMP_POOL = redis.ConnectionPool(host=REDIS['host'], port=REDIS_PORT, db=10, decode_responses=True)
|
||||
# REDIS_CACHE_POOL = redis.ConnectionPool(host=REDIS['host'], port=REDIS_PORT, db=6) # Duped in CACHES
|
||||
REDIS_POOL = redis.ConnectionPool(host=REDIS_USER['host'], port=REDIS_PORT, db=0, decode_responses=True)
|
||||
REDIS_ANALYTICS_POOL = redis.ConnectionPool(host=REDIS_USER['host'], port=REDIS_PORT, db=2, decode_responses=True)
|
||||
REDIS_STATISTICS_POOL = redis.ConnectionPool(host=REDIS_USER['host'], port=REDIS_PORT, db=3, decode_responses=True)
|
||||
REDIS_FEED_UPDATE_POOL = redis.ConnectionPool(host=REDIS_USER['host'], port=REDIS_PORT, db=4, decode_responses=True)
|
||||
# REDIS_STORY_HASH_POOL2 = redis.ConnectionPool(host=REDIS_USER['host'], port=REDIS_PORT, db=8) # Only used when changing DAYS_OF_UNREAD
|
||||
REDIS_STORY_HASH_TEMP_POOL = redis.ConnectionPool(host=REDIS_USER['host'], port=REDIS_PORT, db=10, decode_responses=True)
|
||||
# REDIS_CACHE_POOL = redis.ConnectionPool(host=REDIS_USER['host'], port=REDIS_PORT, db=6) # Duped in CACHES
|
||||
REDIS_STORY_HASH_POOL = redis.ConnectionPool(host=REDIS_STORY['host'], port=REDIS_PORT, db=1, decode_responses=True)
|
||||
REDIS_STORY_HASH_POOL_ENCODED = redis.ConnectionPool(host=REDIS_STORY['host'], port=REDIS_PORT, db=1, decode_responses=False)
|
||||
REDIS_FEED_READ_POOL = redis.ConnectionPool(host=REDIS_SESSIONS['host'], port=REDIS_PORT, db=1, decode_responses=True)
|
||||
|
|
Loading…
Add table
Reference in a new issue