fix the monitors to work in cronjob and remove dead code

This commit is contained in:
Jonathan Math 2021-06-19 09:56:35 -06:00
parent 65bcb5e9e1
commit ad85403ab7
4 changed files with 4 additions and 15 deletions

View file

@ -1,4 +1,4 @@
#!/srv/newsblur/venv/newsblur3/bin/python
#!/usr/local/bin/python3
import sys
sys.path.append('/srv/newsblur')
@ -9,9 +9,6 @@ from newsblur_web import settings
import socket
def main():
df = subprocess.Popen(["df", "/"], stdout=subprocess.PIPE)
output = df.communicate()[0]
device, size, used, available, percent, mountpoint = output.split("\n")[1].split()
hostname = socket.gethostname()
admin_email = settings.ADMINS[0][1]

View file

@ -16,7 +16,7 @@ def main():
ten_min_ago = datetime.datetime.now() - datetime.timedelta(minutes=10)
hostname = socket.gethostname()
modified_minutes = datetime.datetime.now() - modified
log_tail = os.popen(os.getenv('redis_log_path')).read()
log_tail = os.popen("tail -n 100 $(docker inspect --format='\{\{.LogPath}}' redis)").read()
if True:
#if modified < ten_min_ago:
requests.post(

View file

@ -1,4 +1,4 @@
#!/srv/newsblur/venv/newsblur3/bin/python
#!/usr/local/bin/python3
import sys
sys.path.append('/srv/newsblur')
@ -11,11 +11,7 @@ import redis
import pymongo
def main():
df = subprocess.Popen(["df", "/"], stdout=subprocess.PIPE)
output = df.communicate()[0]
device, size, used, available, percent, mountpoint = output.split("\n")[1].split()
hostname = socket.gethostname()
percent = int(percent.strip('%'))
admin_email = settings.ADMINS[0][1]
failed = False
feeds_fetched = 0

View file

@ -1,4 +1,4 @@
#!/srv/newsblur/venv/newsblur/bin/python
#!/usr/local/bin/python3
import sys
sys.path.append('/srv/newsblur')
@ -11,11 +11,7 @@ import redis
import pymongo
def main():
df = subprocess.Popen(["df", "/"], stdout=subprocess.PIPE)
output = df.communicate()[0]
device, size, used, available, percent, mountpoint = output.split("\n")[1].split()
hostname = socket.gethostname()
percent = int(percent.strip('%'))
admin_email = settings.ADMINS[0][1]
failed = False
work_queue_size = 0