Merge branch 'sictiru' of github.com:samuelclay/NewsBlur into sictiru

This commit is contained in:
Andrei 2022-10-28 16:10:49 -07:00
commit b5040d8134
36 changed files with 839 additions and 265 deletions

View file

@ -133,10 +133,16 @@ You got the downtime message either through email or SMS. This is the order of o
When the new redis server is connected to the primary redis server:
# db-redis-story2 = moving to new server
# db-redis-story = old server about to be shutdown
# db-redis-story1 = old server about to be shutdown
# Edit digitalocean.tf to change db-redis-story count to 2
make plan
make apply
make firewall
# Wait for redis to sync, takes 5-10 minutes
# Edit redis/consul_service.json to switch primary to db-redis-story2
make celery_stop
make maintenance_on
apd -l db-redis-story2 -t replicaofnoone
aps -l db-redis-story,db-redis-story2 -t consul
aps -l db-redis-story1,db-redis-story2 -t consul
make maintenance_off
make task

View file

@ -16,6 +16,9 @@ groups:
node: inventory_hostname.startswith('node')
node_socket: inventory_hostname.startswith('node-socket')
node_images: inventory_hostname.startswith('node-images')
node_text: inventory_hostname.startswith('node-text')
node_page: inventory_hostname.startswith('node-page')
node_favicons: inventory_hostname.startswith('node-favicons')
# debugs: inventory_hostname.startswith('debug')

View file

@ -6,7 +6,7 @@
],
"checks": [{
"id": "consul-manager",
"http": "http://{{ ansible_ssh_host }}:8500",
"http": "http://{{ ansible_host }}:8500",
"interval": "15s",
"failures_before_critical": 4
}],

View file

@ -7,7 +7,7 @@
"port": 9200,
"checks": [{
"id": "es-ping",
"http": "http://{{ ansible_ssh_host }}:5579/db_check/elasticsearch",
"http": "http://{{ ansible_host }}:5579/db_check/elasticsearch",
"interval": "15s",
"failures_before_critical": 4
}]

View file

@ -6,7 +6,7 @@
],
"checks": [{
"id": "grafana-ping",
"http": "http://{{ ansible_ssh_host }}:3000/api/health",
"http": "http://{{ ansible_host }}:3000/api/health",
"interval": "15s",
"failures_before_critical": 4
}],

View file

@ -9,10 +9,10 @@
"checks": [
{
"id": "{{inventory_hostname}}-exporter-ping",
"http": "http://{{ ansible_ssh_host }}:9216",
"http": "http://{{ ansible_host }}:9216",
"interval": "15s",
"failures_before_critical": 4
}
]
}
}
}

View file

@ -8,7 +8,7 @@
"port": 27017,
"checks": [{
"id": "mongo-analytics-ping",
"http": "http://{{ ansible_ssh_host }}:5579/db_check/mongo_analytics?consul=1",
"http": "http://{{ ansible_host }}:5579/db_check/mongo_analytics?consul=1",
"interval": "15s"
}]
}

View file

@ -8,7 +8,7 @@
"port": 27017,
"checks": [{
"id": "mongo-ping",
"http": "http://{{ ansible_ssh_host }}:5579/db_check/mongo?consul=1",
"http": "http://{{ ansible_host }}:5579/db_check/mongo?consul=1",
"interval": "15s",
"failures_before_critical": 4
}]

View file

@ -9,10 +9,10 @@
"checks": [
{
"id": "{{inventory_hostname}}-node-exporter-ping",
"http": "http://{{ ansible_ssh_host }}:9100",
"http": "http://{{ ansible_host }}:9100",
"interval": "15s",
"failures_before_critical": 4
}
]
}
}
}

View file

@ -13,13 +13,13 @@
"checks": [{
"id": "{{inventory_hostname}}-ping",
{% if item.target_host == "node-images" %}
"http": "http://{{ ansible_ssh_host }}:{{ item.port }}/sc,seLJDaKBog3LLEMDe8cjBefMhnVSibO4RA5boZhWcVZ0=/https://samuelclay.com/static/images/2019%20-%20Cuba.jpg",
"http": "http://{{ ansible_host }}:{{ item.port }}/sc,seLJDaKBog3LLEMDe8cjBefMhnVSibO4RA5boZhWcVZ0=/https://samuelclay.com/static/images/2019%20-%20Cuba.jpg",
{% elif item.target_host == "node-favicons" %}
"http": "http://{{ ansible_ssh_host }}:{{ item.port }}/rss_feeds/icon/1",
"http": "http://{{ ansible_host }}:{{ item.port }}/rss_feeds/icon/1",
{% elif item.target_host == "node-text" %}
"http": "http://{{ ansible_ssh_host }}:{{ item.port }}/rss_feeds/original_text_fetcher?test=1",
"http": "http://{{ ansible_host }}:{{ item.port }}/rss_feeds/original_text_fetcher?test=1",
{% elif item.target_host == "node-page" %}
"http": "http://{{ ansible_ssh_host }}:{{ item.port }}/original_page/1?test=1",
"http": "http://{{ ansible_host }}:{{ item.port }}/original_page/1?test=1",
{% endif %}
"interval": "15s"
}]

View file

@ -9,10 +9,10 @@
"checks": [
{
"id": "{{inventory_hostname}}-exporter-ping",
"http": "http://{{ ansible_ssh_host }}:9187",
"http": "http://{{ ansible_host }}:9187",
"interval": "15s",
"failures_before_critical": 4
}
]
}
}
}

View file

@ -11,7 +11,7 @@
"port": 5432,
"checks": [{
"id": "postgres-ping",
"http": "http://{{ ansible_ssh_host }}:5579/db_check/postgres?consul=1",
"http": "http://{{ ansible_host }}:5579/db_check/postgres?consul=1",
"interval": "15s",
"failures_before_critical": 4
}]

View file

@ -6,10 +6,10 @@
],
"checks": [{
"id": "prometheus-ping",
"http": "http://{{ ansible_ssh_host }}:9090/metrics",
"http": "http://{{ ansible_host }}:9090/metrics",
"interval": "15s",
"failures_before_critical": 4
}],
"port": 9090
}
}
}

View file

@ -9,10 +9,10 @@
"checks": [
{
"id": "{{ item.redis_target }}-exporter-ping",
"http": "http://{{ ansible_ssh_host }}:{{ item.port }}",
"http": "http://{{ ansible_host }}:{{ item.port }}",
"interval": "15s",
"failures_before_critical": 4
}
]
}
}
}

View file

@ -21,6 +21,13 @@
become: yes
sysctl: name=vm.overcommit_memory value=1 state=present reload=yes
- name: Template redis.conf file
copy:
src: /srv/newsblur/docker/redis/redis.conf
dest: /srv/newsblur/docker/redis/redis.conf
notify: restart redis
register: updated_config
- name: Template redis_replica.conf file
template:
src: /srv/newsblur/docker/redis/redis_replica.conf.j2
@ -40,7 +47,7 @@
become: yes
docker_container:
name: redis
image: redis:6.2.7
image: redis:7
state: started
command: /usr/local/etc/redis/redis_server.conf
container_default_behavior: no_defaults

View file

@ -1,6 +1,6 @@
{
"service": {
{% if inventory_hostname in ["db-redis-user", "db-redis-story1", "db-redis-session", "db-redis-pubsub"] %}
{% if inventory_hostname in ["db-redis-user", "db-redis-story2", "db-redis-session", "db-redis-pubsub"] %}
"name": "{{ inventory_hostname|regex_replace('\d+', '') }}",
{% else %}
"name": "{{ inventory_hostname|regex_replace('\d+', '') }}-staging",
@ -13,15 +13,15 @@
"checks": [{
"id": "{{inventory_hostname}}-ping",
{% if inventory_hostname.startswith('db-redis-story') %}
"http": "http://{{ ansible_ssh_host }}:5579/db_check/redis_story?consul=1",
"http": "http://{{ ansible_host }}:5579/db_check/redis_story?consul=1",
{% elif inventory_hostname.startswith('db-redis-user') %}
"http": "http://{{ ansible_ssh_host }}:5579/db_check/redis_user?consul=1",
"http": "http://{{ ansible_host }}:5579/db_check/redis_user?consul=1",
{% elif inventory_hostname.startswith('db-redis-pubsub') %}
"http": "http://{{ ansible_ssh_host }}:5579/db_check/redis_pubsub?consul=1",
"http": "http://{{ ansible_host }}:5579/db_check/redis_pubsub?consul=1",
{% elif inventory_hostname.startswith('db-redis-sessions') %}
"http": "http://{{ ansible_ssh_host }}:5579/db_check/redis_sessions?consul=1",
"http": "http://{{ ansible_host }}:5579/db_check/redis_sessions?consul=1",
{% else %}
"http": "http://{{ ansible_ssh_host }}:5000/db_check/redis?consul=1",
"http": "http://{{ ansible_host }}:5000/db_check/redis?consul=1",
{% endif %}
"interval": "15s",
"failures_before_critical": 4

View file

@ -1028,7 +1028,9 @@ class Profile(models.Model):
self.setup_premium_history()
if not self.is_premium:
if order_id == "nb.premium.archive.99":
self.activate_archive()
elif not self.is_premium:
self.activate_premium()
logging.user(self.user, "~FG~BBNew Android premium subscription: $%s~FW" % amount)

View file

@ -1159,7 +1159,7 @@ def starred_stories_rss_feed_tag(request, user_id, secret_token, tag_slug):
def folder_rss_feed(request, user_id, secret_token, unread_filter, folder_slug):
domain = Site.objects.get_current().domain
date_hack_2023 = (datetime.datetime.now() > datetime.datetime(2023, 7, 1))
try:
user = User.objects.get(pk=user_id)
except User.DoesNotExist:
@ -1169,7 +1169,7 @@ def folder_rss_feed(request, user_id, secret_token, unread_filter, folder_slug):
feed_ids, folder_title = user_sub_folders.feed_ids_under_folder_slug(folder_slug)
usersubs = UserSubscription.subs_for_feeds(user.pk, feed_ids=feed_ids)
if feed_ids and user.profile.is_archive:
if feed_ids and ((user.profile.is_archive and date_hack_2023) or (not date_hack_2023)):
params = {
"user_id": user.pk,
"feed_ids": feed_ids,
@ -1268,7 +1268,7 @@ def folder_rss_feed(request, user_id, secret_token, unread_filter, folder_slug):
rss.add_item(**story_data)
# TODO: Remove below date hack to accomodate users who paid for premium but want folder rss
if not user.profile.is_archive and (datetime.datetime.now() > datetime.datetime(2023, 7, 1)):
if not user.profile.is_archive and date_hack_2023:
story_data = {
'title': "You must have a premium archive subscription on NewsBlur to have RSS feeds for folders.",
'link': "https://%s/?next=premium" % domain,
@ -1404,7 +1404,12 @@ def load_river_stories__redis(request):
user_search = None
offset = (page-1) * limit
story_date_order = "%sstory_date" % ('' if order == 'oldest' else '-')
if user.pk == 86178:
# Disable Michael_Novakhov account
logging.user(request, "~FCLoading ~FMMichael_Novakhov~SN's river, resource usage too high, ignoring.")
return HttpResponse("Resource usage too high", status=429)
if infrequent:
feed_ids = Feed.low_volume_feeds(feed_ids, stories_per_month=infrequent)

View file

@ -52,6 +52,7 @@ def TaskFeeds():
r.zcard('tasked_feeds'),
r.scard('queued_feeds'),
r.zcard('scheduled_updates')))
logging.debug(" ---> ~FBFeeds being tasked: ~SB%s" % feeds)
@app.task(name='task-broken-feeds')
def TaskBrokenFeeds():

View file

@ -8,6 +8,7 @@ from django.views.decorators.http import condition
from django.http import HttpResponseForbidden, HttpResponseRedirect, HttpResponse, Http404
from django.conf import settings
from django.contrib.auth.decorators import login_required
from django.contrib.auth.models import User
# from django.db import IntegrityError
from apps.rss_feeds.models import Feed, merge_feeds
from apps.rss_feeds.models import MFetchHistory
@ -510,19 +511,21 @@ def status(request):
return HttpResponseForbidden()
minutes = int(request.GET.get('minutes', 1))
now = datetime.datetime.now()
hour_ago = now + datetime.timedelta(minutes=minutes)
username = request.GET.get('user', '') or request.GET.get('username', '')
if username:
user = User.objects.get(username=username)
if username == "all":
feeds = Feed.objects.filter(next_scheduled_update__lte=hour_ago).order_by('next_scheduled_update')
else:
user = request.user
usersubs = UserSubscription.objects.filter(user=user)
feed_ids = usersubs.values('feed_id')
if minutes > 0:
hour_ago = now + datetime.timedelta(minutes=minutes)
feeds = Feed.objects.filter(pk__in=feed_ids, next_scheduled_update__lte=hour_ago).order_by('next_scheduled_update')
else:
hour_ago = now + datetime.timedelta(minutes=minutes)
feeds = Feed.objects.filter(pk__in=feed_ids, last_update__gte=hour_ago).order_by('-last_update')
if username:
user = User.objects.get(username=username)
else:
user = request.user
usersubs = UserSubscription.objects.filter(user=user)
feed_ids = usersubs.values('feed_id')
if minutes > 0:
feeds = Feed.objects.filter(pk__in=feed_ids, next_scheduled_update__lte=hour_ago).order_by('next_scheduled_update')
else:
feeds = Feed.objects.filter(pk__in=feed_ids, last_update__gte=hour_ago).order_by('-last_update')
r = redis.Redis(connection_pool=settings.REDIS_FEED_UPDATE_POOL)
queues = {

View file

@ -5,4 +5,5 @@ urlpatterns = [
url(r'^dashboard_graphs', views.dashboard_graphs, name='statistics-graphs'),
url(r'^feedback_table', views.feedback_table, name='feedback-table'),
url(r'^revenue', views.revenue, name='revenue'),
url(r'^slow', views.slow, name='slow'),
]

View file

@ -1,12 +1,22 @@
import base64
import pickle
import redis
import datetime
from operator import countOf
from collections import defaultdict
from django.http import HttpResponse
from django.shortcuts import render
from django.contrib.auth.decorators import login_required
from django.contrib.auth.models import AnonymousUser
from django.contrib.auth.models import User
from django.conf import settings
from django.utils import feedgenerator
from django.http import HttpResponseForbidden
from apps.statistics.models import MStatistics, MFeedback
from apps.statistics.rstats import round_time
from apps.profile.models import PaymentHistory
from utils import log as logging
def dashboard_graphs(request):
statistics = MStatistics.all()
return render(
@ -49,4 +59,60 @@ def revenue(request):
request.META.get('HTTP_USER_AGENT', "")[:24]
))
return HttpResponse(rss.writeString('utf-8'), content_type='application/rss+xml')
@login_required
def slow(request):
r = redis.Redis(connection_pool=settings.REDIS_STATISTICS_POOL)
if not request.user.is_staff and not settings.DEBUG:
logging.user(request, "~SKNON-STAFF VIEWING SLOW STATUS!")
assert False
return HttpResponseForbidden()
now = datetime.datetime.now()
all_queries = {}
user_id_counts = {}
path_counts = {}
users = {}
for minutes_ago in range(60*6):
dt_ago = now - datetime.timedelta(minutes=minutes_ago)
minute = round_time(dt_ago, round_to=60)
dt_ago_str = minute.strftime("%a %b %-d, %Y %H:%M")
name = f"SLOW:{minute.strftime('%s')}"
minute_queries = r.lrange(name, 0, -1)
for query_raw in minute_queries:
query = pickle.loads(base64.b64decode(query_raw))
user_id = query['user_id']
if dt_ago_str not in all_queries:
all_queries[dt_ago_str] = []
if user_id in users:
user = users[user_id]
elif int(user_id) != 0:
try:
user = User.objects.get(pk=user_id)
except User.DoesNotExist:
continue
users[user_id] = user
else:
user = AnonymousUser()
users[user_id] = user
query['user'] = user
query['datetime'] = minute
all_queries[dt_ago_str].append(query)
if user_id not in user_id_counts:
user_id_counts[user_id] = 0
user_id_counts[user_id] += 1
if query['path'] not in path_counts:
path_counts[query['path']] = 0
path_counts[query['path']] += 1
user_counts = []
for user_id, count in user_id_counts.items():
user_counts.append({'user': users[user_id], 'count': count})
return render(request, 'statistics/slow.xhtml', {
'all_queries': all_queries,
'user_counts': user_counts,
'path_counts': path_counts,
})

View file

@ -54,8 +54,8 @@ android {
applicationId "com.newsblur"
minSdkVersion 21
targetSdkVersion 31
versionCode 205
versionName "12.0.1"
versionCode 207
versionName "12.1.1"
}
compileOptions.with {
sourceCompatibility = JavaVersion.VERSION_1_8

View file

@ -63,7 +63,7 @@ frontend public
use_backend app_push if { hdr_end(host) -i push.newsblur.com }
use_backend node_socket if { path_beg /v3/socket.io/ }
use_backend node_favicon if { path_beg /rss_feeds/icon/ }
use_backend node_favicons if { path_beg /rss_feeds/icon/ }
use_backend node_text if { path_beg /rss_feeds/original_text_fetcher }
use_backend node_images if { hdr_end(host) -i imageproxy.newsblur.com }
use_backend node_images if { hdr_end(host) -i imageproxy2.newsblur.com }
@ -174,7 +174,7 @@ backend node_socket
server {{host}} {{host}}.node.nyc1.consul:8008
{% endfor %}
backend node_favicon
backend node_favicons
http-check expect rstatus 200|503
option httpchk GET /rss_feeds/icon/1
balance roundrobin
@ -184,7 +184,10 @@ backend node_text
http-check expect rstatus 200|503
option httpchk GET /rss_feeds/original_text_fetcher?test=1
balance roundrobin
server-template node-text 1 _node-text._tcp.service.nyc1.consul:8008 check inter 2000ms resolvers consul resolve-prefer ipv4 resolve-opts allow-dup-ip init-addr none
default-server check inter 2000ms resolvers consul resolve-prefer ipv4 resolve-opts allow-dup-ip init-addr none
{% for host in groups.node_text %}
server {{host}} {{host}}.node.nyc1.consul:8008
{% endfor %}
backend node_page
http-check expect rstatus 200|503

File diff suppressed because it is too large Load diff

View file

@ -19,6 +19,6 @@
.NB-status td {
border-top: 1px solid #F0F0F0;
margin: 0;
padding: 0 0;
padding: 0 6px 0 0;
max-width: 300px;
}

View file

@ -45,7 +45,7 @@ AUTO_PREMIUM_NEW_USERS = True
AUTO_PREMIUM_ARCHIVE_NEW_USERS = True
AUTO_PREMIUM_PRO_NEW_USERS = True
AUTO_PREMIUM = True
AUTO_PREMIUM = False
# AUTO_PREMIUM = False
if not AUTO_PREMIUM:
AUTO_PREMIUM_NEW_USERS = False
AUTO_PREMIUM_ARCHIVE_NEW_USERS = False

View file

@ -71,6 +71,6 @@ favicons = (app) =>
if ENV_DEV or ENV_DOCKER
res.redirect '/media/img/icons/nouns/world.svg'
else
res.redirect 'https://beta.newsblur.com/media/img/icons/nouns/world.svg'
res.redirect 'https://newsblur.com/media/img/icons/nouns/world.svg'
exports.favicons = favicons

View file

@ -88,7 +88,7 @@
if (ENV_DEV || ENV_DOCKER) {
return res.redirect('/media/img/icons/nouns/world.svg');
} else {
return res.redirect('https://beta.newsblur.com/media/img/icons/nouns/world.svg');
return res.redirect('https://newsblur.com/media/img/icons/nouns/world.svg');
}
}
});

View file

@ -85,7 +85,7 @@
<img src="/media/img/logo_512.png" class="logo">
<h1>NewsBlur is in <span class="error404">maintenance mode</span></h1>
<div class="description">
<p>Moving to a larger Redis story DB, since the existing DB is buckling under the new load from the requirements of the new NewsBlur Premium Archive subscription.</p>
<p>Moving to another Redis story DB, since the existing DB is having some issues. Check the daily load time graph to see how it's had an impact. Always good to do this, expect fast load times after this.</p>
<p>To pass the time, <a href="http://mltshp.com/popular">check out what's popular on MLTSHP</a>.</p>
</div>
</div>

View file

@ -2,7 +2,7 @@
{% load utils_tags tz %}
{% block bodyclass %}NB-body-status{% endblock %}
{% block bodyclass %}NB-body-status NB-static{% endblock %}
{% block content %}
@ -21,6 +21,7 @@
<th style="white-space: nowrap">Last Update<br>Next Update</th>
<th>Min to<br>next update</th>
<th>Decay</th>
<th>Last fetch</th>
<th>Subs</th>
<th>Active</th>
<th>Premium</th>
@ -29,11 +30,13 @@
<th>Act. Prem</th>
<th>Per Month</th>
<th>Last Month</th>
<th>In Archive</th>
<th>File size (b)</th>
</tr>
{% for feed in feeds %}
<tr>
<td>{{ feed.pk }}</td>
<td><img class="NB-favicon" src="/rss_feeds/icon/{{ feed.pk }}" /> {{ feed.feed_title|truncatewords:4 }}</td>
<td title="{{ feed.feed_address }}"><img class="NB-favicon" src="/rss_feeds/icon/{{ feed.pk }}" /> {{ feed.feed_title|truncatewords:4 }}</td>
<td>{{ feed.last_update|smooth_timedelta }}</td>
<td class="NB-status-update" style="white-space: nowrap">
{% localdatetime feed.last_update "%b %d, %Y %H:%M:%S" %}
@ -42,6 +45,7 @@
</td>
<td>{{ feed.next_scheduled_update|smooth_timedelta }}</td>
<td>{{ feed.min_to_decay }}</td>
<td>{{ feed.last_load_time }}</td>
<td>{{ feed.num_subscribers }}</td>
<td style="color: {% if feed.active_subscribers == 0 %}lightgrey{% else %}darkblue{% endif %}">{{ feed.active_subscribers }}</td>
<td style="color: {% if feed.premium_subscribers == 0 %}lightgrey{% else %}darkblue{% endif %}">{{ feed.premium_subscribers }}</td>
@ -50,6 +54,8 @@
<td style="color: {% if feed.active_premium_subscribers == 0 %}lightgrey{% else %}darkblue{% endif %}">{{ feed.active_premium_subscribers }}</td>
<td style="color: {% if feed.average_stories_per_month == 0 %}lightgrey{% else %}{% endif %}">{{ feed.average_stories_per_month }}</td>
<td style="color: {% if feed.stories_last_month == 0 %}lightgrey{% else %}{% endif %}">{{ feed.stories_last_month }}</td>
<td style="color: {% if feed.archive_count == 0 %}lightgrey{% else %}{% endif %}">{{ feed.archive_count }}</td>
<td style="color: {% if feed.fs_size_bytes == 0 %}lightgrey{% else %}{% endif %}">{{ feed.fs_size_bytes|commify }}</td>
</tr>
{% endfor %}

View file

@ -0,0 +1,54 @@
{% extends 'base.html' %}
{% load utils_tags tz %}
{% block bodyclass %}NB-body-status NB-static{% endblock %}
{% block content %}
<div class="NB-module">
<div class="queries">
<table class="NB-status">
{% for user_count in user_counts %}
<tr>
{% if forloop.first %}<td rowspan={{user_counts|length}} valign=top><b>Users</b>{% endif %}
<td><b>{{ user_count.user }}</b></td>
<td>{{ user_count.count }}</td>
</tr>
{% endfor %}
</table>
</div>
<div class="queries">
<table class="NB-status">
{% for path, count in path_counts.items %}
<tr>
{% if forloop.first %}<td rowspan={{path_counts|length}} valign=top><b>Paths</b>{% endif %}
<td><b>{{ path }}</b></td>
<td>{{ count }}</td>
</tr>
{% endfor %}
</table>
</div>
</div>
<table class="NB-status">
{% for dt_str, queries in all_queries.items %}
{% for query in queries %}
<tr>
{% if forloop.first %}
<td rowspan={{ queries|length }} valign=top> <b>
{% localdatetime query.datetime "%a %b %d, %Y %H:%M" %}
</b></td>
{% endif %}
<td>{{ query.user }}</td>
<td>{{ query.time }}</td>
<td>{{ query.method }}</td>
<td>{{ query.path }}</td>
<td>{% if query.data %}{{ query.data }}{% endif %}</td>
</tr>
{% endfor %}
{% endfor %}
</table>
{% endblock content %}

View file

@ -228,8 +228,9 @@ resource "digitalocean_droplet" "discovery" {
}
resource "digitalocean_droplet" "node-text" {
count = 2
image = var.droplet_os
name = "node-text"
name = contains([0], count.index) ? "node-text" : "node-text${count.index+1}"
region = var.droplet_region
size = var.droplet_size
ssh_keys = [digitalocean_ssh_key.default.fingerprint]
@ -263,8 +264,9 @@ resource "digitalocean_droplet" "node-socket" {
}
resource "digitalocean_droplet" "node-favicons" {
count = 2
image = var.droplet_os
name = "node-favicons"
name = "node-favicons${count.index+1}"
region = var.droplet_region
size = var.droplet_size
ssh_keys = [digitalocean_ssh_key.default.fingerprint]
@ -378,11 +380,12 @@ resource "digitalocean_droplet" "db-redis-sessions" {
}
resource "digitalocean_droplet" "db-redis-story" {
count = 1
count = 2
image = var.droplet_os
name = "db-redis-story${count.index+1}"
region = var.droplet_region
size = contains([1], count.index) ? "m-8vcpu-64gb" : var.redis_story_droplet_size
# size = var.redis_story_droplet_size
ssh_keys = [digitalocean_ssh_key.default.fingerprint]
provisioner "local-exec" {
command = "/srv/newsblur/ansible/utils/generate_inventory.py; sleep 120"
@ -464,12 +467,13 @@ resource "digitalocean_droplet" "db-postgres" {
# servers=$(for i in {1..9}; do echo -n "-target=\"digitalocean_droplet.db-mongo-primary[$i]\" " ; done); tf plan -refresh=false `eval echo $servers`
#
resource "digitalocean_droplet" "db-mongo-primary" {
count = 1
count = 2
backups = contains([0], count.index) ? false : true
image = var.droplet_os
name = "db-mongo-primary${count.index+1}"
region = var.droplet_region
size = contains([1], count.index) ? "m3-8vcpu-64gb" : var.mongo_primary_droplet_size
# size = contains([1], count.index) ? "m3-8vcpu-64gb" : var.mongo_primary_droplet_size
size = var.mongo_primary_droplet_size
ssh_keys = [digitalocean_ssh_key.default.fingerprint]
provisioner "local-exec" {
command = "/srv/newsblur/ansible/utils/generate_inventory.py; sleep 120"

View file

@ -54,7 +54,7 @@ variable "droplet_os" {
variable "sentry_droplet_size" {
type = string
default = "s-4vcpu-8gb"
default = "s-8vcpu-16gb"
}
variable "metrics_droplet_size" {
@ -89,5 +89,5 @@ variable "elasticsearch_droplet_size" {
variable "redis_story_droplet_size" {
type = string
default = "m-8vcpu-64gb"
default = "m-4vcpu-32gb"
}

View file

@ -1,11 +1,17 @@
from django.conf import settings
from utils import log as logging
from apps.statistics.rstats import round_time
import pickle
import base64
import time
import redis
IGNORE_PATHS = [
"/_haproxychk",
]
RECORD_SLOW_REQUESTS_ABOVE_SECONDS = 10
class DumpRequestMiddleware:
def process_request(self, request):
if settings.DEBUG and request.path not in IGNORE_PATHS:
@ -40,22 +46,31 @@ class DumpRequestMiddleware:
redis_log
))
return response
def elapsed_time(self, request):
time_elapsed = ""
if hasattr(request, 'start_time'):
seconds = time.time() - request.start_time
color = '~FB'
if seconds >= 1:
color = '~FR'
elif seconds > .2:
color = '~SB~FK'
time_elapsed = "[%s%.4ss~SB] " % (
color,
seconds,
)
return time_elapsed
if seconds > RECORD_SLOW_REQUESTS_ABOVE_SECONDS:
r = redis.Redis(connection_pool=settings.REDIS_STATISTICS_POOL)
pipe = r.pipeline()
minute = round_time(round_to=60)
name = f"SLOW:{minute.strftime('%s')}"
user_id = request.user.pk if request.user.is_authenticated else "0"
data_string = None
if request.method == "GET":
data_string = ' '.join([f"{key}={value}" for key, value in request.GET.items()])
elif request.method == "GET":
data_string = ' '.join([f"{key}={value}" for key, value in request.POST.items()])
data = {
"user_id": user_id,
"time": round(seconds, 2),
"path": request.path,
"method": request.method,
"data": data_string,
}
pipe.lpush(name, base64.b64encode(pickle.dumps(data)).decode('utf-8'))
pipe.expire(name, 60*60*12) # 12 hours
pipe.execute()
return response
def color_db(self, seconds, default):
color = default