Merge branch 'master' into docker_django3.1

* master:
  Upgrading to mongo 3.4.
  Use email if username doesnt work.
  Removing old types field. Needed to upgrade to mongo 3.4.
  Checking if a feed has any notifications on it, set a max of 30 min fetch (yes this is a pretty good way of getting your feeds to fetch faster, but youll have to deal with the notifications).
  Fix task fetch monitor.
  Attempting to see iOS premium status over email.
  Handling work queue missing error.
  Limiting work queue checks.
  Executable monitor work queue.
  Adding monitor for work queue.
  Pointing to the new ssl cert.
  More descriptive failure email
  No default value
  Spitting out error on failure.
  Handling missing values
  Needs redis
  Only warn about falling feed fetches when actually falling.
  Revert "Removing unused types key from index."
  Don't need full health checks in debug.
This commit is contained in:
Samuel Clay 2021-01-23 19:29:11 -05:00
commit 3ad5295a33
9 changed files with 101 additions and 24 deletions

View file

@ -68,7 +68,7 @@ class MUserFeedNotification(mongo.Document):
'collection': 'notifications',
'indexes': ['feed_id',
{'fields': ['user_id', 'feed_id'],
'unique': True
'unique': True,
}],
'allow_inheritance': False,
}

View file

@ -488,6 +488,8 @@ class Profile(models.Model):
@property
def latest_paypal_email(self):
ipn = PayPalIPN.objects.filter(custom=self.user.username)
if not len(ipn):
ipn = PayPalIPN.objects.filter(payer_email=self.user.email)
if not len(ipn):
return

View file

@ -710,8 +710,8 @@ def email_optout(request):
@json.json_view
def ios_subscription_status(request):
logging.debug(" ---> iOS Subscription Status: %s" % request.body)
subject = "iOS Subscription Status"
data = json.decode(request.body)
subject = "iOS Subscription Status: %s" % data.get('notification_type', "[missing]")
message = """%s""" % (request.body)
mail_admins(subject, message)

View file

@ -2098,12 +2098,15 @@ class Feed(models.Model):
if self.min_to_decay and not force and not premium_speed:
return self.min_to_decay
from apps.notifications.models import MUserFeedNotification
if premium_speed:
self.active_premium_subscribers += 1
spd = self.stories_last_month / 30.0
subs = (self.active_premium_subscribers +
((self.active_subscribers - self.active_premium_subscribers) / 10.0))
notification_count = MUserFeedNotification.objects.filter(feed_id=self.pk).count()
# Calculate sub counts:
# SELECT COUNT(*) FROM feeds WHERE active_premium_subscribers > 10 AND stories_last_month >= 30;
# SELECT COUNT(*) FROM feeds WHERE active_premium_subscribers > 1 AND active_premium_subscribers < 10 AND stories_last_month >= 30;
@ -2162,6 +2165,10 @@ class Feed(models.Model):
if len(fetch_history['push_history']):
total = total * 12
# Any notifications means a 30 min minumum
if notification_count > 0:
total = min(total, 30)
# 4 hour max for premiums, 48 hour max for free
if subs >= 1:
total = min(total, 60*4*1)

View file

@ -74,8 +74,8 @@ backend imageproxy
server imageproxy01 imageproxy:80 check inter 2000ms
backend push
option httpchk GET /_haproxychk
http-check expect rstatus 200|503
# option httpchk GET /_haproxychk
# http-check expect rstatus 200|503
server push 127.0.0.1:8000 check inter 2000ms
backend node_socket

View file

@ -166,7 +166,7 @@ backend maintenance
{{ maintenance }}
listen stats
bind :1936 ssl crt newsblur.pem
bind :1936 ssl crt newsblur.com.crt
stats enable
stats hide-version
stats realm Haproxy\ Statistics

28
fabfile.py vendored
View file

@ -529,8 +529,8 @@ def virtualenv():
def setup_pip():
with cd(env.VENDOR_PATH), settings(warn_only=True):
run('curl https://bootstrap.pypa.io/get-pip.py --output get-pip.py')
sudo('python2 get-pip.py')
run('curl https://bootstrap.pypa.io/2.6/get-pip.py | sudo python2')
# sudo('python2 get-pip.py')
@parallel
@ -546,9 +546,9 @@ def pip():
sudo('mkswap /swapfile')
sudo('swapon /swapfile')
sudo('chown %s.%s -R %s' % (env.user, env.user, os.path.join(env.NEWSBLUR_PATH, 'venv')))
run('easy_install -U pip')
run('pip install --upgrade pip')
run('pip install --upgrade setuptools')
# run('easy_install -U pip')
# run('pip install --upgrade pip')
# run('pip install --upgrade setuptools')
run('pip install -r requirements.txt')
if role == "task":
with settings(warn_only=True):
@ -865,15 +865,14 @@ def copy_certificates():
run('ln -fs %s %s' % (privkey_path, os.path.join(cert_path, 'newsblur.com.crt.key'))) # HAProxy
put(os.path.join(env.SECRETS_PATH, 'certificates/comodo/dhparams.pem'), cert_path)
put(os.path.join(env.SECRETS_PATH, 'certificates/ios/aps_development.pem'), cert_path)
# Export aps.cer from Apple issued certificate using Keychain Assistant
# openssl x509 -in aps.cer -inform DER -outform PEM -out aps.pem
put(os.path.join(env.SECRETS_PATH, 'certificates/ios/aps.pem'), cert_path)
# Export aps.p12 from aps.cer using Keychain Assistant
# openssl pkcs12 -in aps.p12 -out aps.p12.pem -nodes
put(os.path.join(env.SECRETS_PATH, 'certificates/ios/aps.p12.pem'), cert_path)
# run('cat %s/newsblur.com.crt > %s/newsblur.pem' % (cert_path, cert_path))
# run('echo "\n" >> %s/newsblur.pem' % (cert_path))
# run('cat %s/newsblur.com.key >> %s/newsblur.pem' % (cert_path, cert_path))
def setup_certbot():
sudo('snap install --classic certbot')
sudo('snap set certbot trust-plugin-with-root=ok')
@ -1252,7 +1251,7 @@ def disable_thp():
sudo('update-rc.d disable-transparent-hugepages defaults')
def setup_mongo():
MONGODB_VERSION = "3.2.22"
MONGODB_VERSION = "3.4.24"
pull()
disable_thp()
sudo('systemctl enable rc-local.service') # Enable rc.local
@ -1263,11 +1262,11 @@ def setup_mongo():
echo never > /sys/kernel/mm/transparent_hugepage/defrag\n\
fi\n\n\
exit 0" | sudo tee /etc/rc.local')
sudo('curl -fsSL https://www.mongodb.org/static/pgp/server-3.2.asc | sudo apt-key add -')
sudo('curl -fsSL https://www.mongodb.org/static/pgp/server-3.4.asc | sudo apt-key add -')
# sudo('echo "deb http://downloads-distro.mongodb.org/repo/ubuntu-upstart dist 10gen" | sudo tee /etc/apt/sources.list.d/mongodb.list')
# sudo('echo "\ndeb http://downloads-distro.mongodb.org/repo/debian-sysvinit dist 10gen" | sudo tee -a /etc/apt/sources.list')
# sudo('echo "deb http://repo.mongodb.org/apt/ubuntu trusty/mongodb-org/3.2 multiverse" | sudo tee /etc/apt/sources.list.d/mongodb-org-3.2.list')
sudo('echo "deb http://repo.mongodb.org/apt/ubuntu xenial/mongodb-org/3.2 multiverse" | sudo tee /etc/apt/sources.list.d/mongodb-org-3.2.list')
sudo('echo "deb http://repo.mongodb.org/apt/ubuntu xenial/mongodb-org/3.4 multiverse" | sudo tee /etc/apt/sources.list.d/mongodb-org-3.4.list')
sudo('apt-get update')
sudo('apt-get install -y mongodb-org=%s mongodb-org-server=%s mongodb-org-shell=%s mongodb-org-mongos=%s mongodb-org-tools=%s' %
(MONGODB_VERSION, MONGODB_VERSION, MONGODB_VERSION, MONGODB_VERSION, MONGODB_VERSION))
@ -1529,6 +1528,11 @@ def setup_newsletter_monitor():
sudo('ln -fs %s/utils/monitor_newsletter_delivery.py /etc/cron.hourly/monitor_newsletter_delivery' % env.NEWSBLUR_PATH)
sudo('/etc/cron.hourly/monitor_newsletter_delivery')
@parallel
def setup_queue_monitor():
sudo('ln -fs %s/utils/monitor_work_queue.py /etc/cron.hourly/monitor_work_queue' % env.NEWSBLUR_PATH)
sudo('/etc/cron.hourly/monitor_work_queue')
@parallel
def setup_redis_monitor():
run('sleep 5') # Wait for redis to startup so the log file is there

View file

@ -7,6 +7,7 @@ import subprocess
import requests
from newsblur import settings
import socket
import redis
import pymongo
def main():
@ -18,25 +19,34 @@ def main():
admin_email = settings.ADMINS[0][1]
failed = False
feeds_fetched = 0
FETCHES_DROP_AMOUNT = 0
redis_task_fetches = 0
monitor_key = "Monitor:task_fetches"
r = redis.Redis(connection_pool=settings.REDIS_ANALYTICS_POOL)
try:
client = pymongo.MongoClient('mongodb://%s' % settings.MONGO_DB['host'])
feeds_fetched = client.newsblur.statistics.find_one({"key": "feeds_fetched"})['value']
redis_task_fetches = int(r.get(monitor_key) or 0)
except Exception as e:
failed = e
if feeds_fetched < 5000000:
if feeds_fetched < 5000000 and feeds_fetched <= (redis_task_fetches - FETCHES_DROP_AMOUNT):
failed = True
if failed:
requests.post(
"https://api.mailgun.net/v2/%s/messages" % settings.MAILGUN_SERVER_NAME,
auth=("api", settings.MAILGUN_ACCESS_KEY),
data={"from": "NewsBlur Task Monitor: %s <admin@%s.newsblur.com>" % (hostname, hostname),
"to": [admin_email],
"subject": "%s feeds fetched falling: %s" % (hostname, feeds_fetched),
"text": "Feed fetches are falling: %s" % (feeds_fetched)})
print(" ---> Feeds fetched falling! %s" % (feeds_fetched))
"subject": "%s feeds fetched falling: %s (from %s)" % (hostname, feeds_fetched, redis_task_fetches),
"text": "Feed fetches are falling: %s (from %s) %s" % (feeds_fetched, redis_task_fetches, failed)})
r.set(monitor_key, feeds_fetched)
r.expire(monitor_key, 60*60*3) # 3 hours
print(" ---> Feeds fetched falling! %s %s" % (feeds_fetched, failed))
else:
print(" ---> Feeds fetched OK: %s" % (feeds_fetched))

54
utils/monitor_work_queue.py Executable file
View file

@ -0,0 +1,54 @@
#!/srv/newsblur/venv/newsblur/bin/python
import sys
sys.path.append('/srv/newsblur')
import subprocess
import requests
from newsblur import settings
import socket
import redis
import pymongo
def main():
df = subprocess.Popen(["df", "/"], stdout=subprocess.PIPE)
output = df.communicate()[0]
device, size, used, available, percent, mountpoint = output.split("\n")[1].split()
hostname = socket.gethostname()
percent = int(percent.strip('%'))
admin_email = settings.ADMINS[0][1]
failed = False
work_queue_size = 0
QUEUE_DROP_AMOUNT = 0
redis_work_queue = 0
monitor_key = "Monitor:work_queue"
r_monitor = redis.Redis(connection_pool=settings.REDIS_ANALYTICS_POOL)
r = redis.Redis(connection_pool=settings.REDIS_FEED_UPDATE_POOL)
try:
work_queue_size = int(r.llen("work_queue"))
redis_work_queue = int(r_monitor.get(monitor_key) or 0)
except Exception as e:
failed = e
if work_queue_size > 100 and work_queue_size > (redis_work_queue + QUEUE_DROP_AMOUNT):
failed = True
if failed:
requests.post(
"https://api.mailgun.net/v2/%s/messages" % settings.MAILGUN_SERVER_NAME,
auth=("api", settings.MAILGUN_ACCESS_KEY),
data={"from": "NewsBlur Queue Monitor: %s <admin@%s.newsblur.com>" % (hostname, hostname),
"to": [admin_email],
"subject": "%s work queue rising: %s (from %s)" % (hostname, work_queue_size, redis_work_queue),
"text": "Work queue is rising: %s (from %s) %s" % (work_queue_size, redis_work_queue, failed)})
r_monitor.set(monitor_key, work_queue_size)
r_monitor.expire(monitor_key, 60*60*3) # 3 hours
print(" ---> Work queue rising! %s %s" % (work_queue_size, failed))
else:
print(" ---> Work queue OK: %s" % (work_queue_size))
if __name__ == '__main__':
main()