add mongo backup cronjob for ansible and integrate backup_mongo.py with docker setup

This commit is contained in:
Jonathan Math 2021-06-19 12:02:29 -06:00
parent 8d6a9732b3
commit a71069994c
2 changed files with 55 additions and 35 deletions

View file

@ -31,6 +31,7 @@
- /mnt/{{ inventory_hostname | regex_replace('db-|-', '') }}:/data/db
- /srv/newsblur/ansible/roles/mongo/templates/mongo.conf:/etc/mongod.conf
- /var/log/mongodb/:/var/log/mongodb/
- /opt/mongo/newsblur/backup:/backup/'
- name: Register mongo in consul
tags: consul
@ -65,3 +66,44 @@
newsblur/newsblur_python3 /srv/newsblur/utils/monitor_disk_usage.py $output
tags:
- sanity-checker
- name: Add mongo backup
cron:
name: mongo backup
minute: "0"
hour: "4"
job:
collections=(
classifier_tag
classifier_author
classifier_feed
classifier_title
userstories
shared_stories
category
category_site
sent_emails
social_profile
social_subscription
social_services
statistics
user_search
feedback
)
for collection in collections; do
echo Dumping $collection
now=$(date '+%Y-%m-%d-%H-%M')
docker exec -it mongo mongodump --db newsblur --collection $collection -o /backup/backup_mongo_${now}
echo Compressing /opt/mongo/newsblur/backup/backup_mongo_${now}.tgz
tar -zcf /opt/mongo/newsblur/backup/backup_mongo_${now}.tgz /opt/mongo/newsblur/backup/backup_mongo_${now})
done;
echo Uploading backups to S3
docker run --rm
-v /srv/newsblur:/srv/newsblur
-v /opt/mongo/newsblur/backup/:/opt/mongo/newsblur/backup/
--network=newsblurnet
/srv/newsblur/utils/backups/backup_mongo.py

View file

@ -1,40 +1,18 @@
import os
import sys
import shutil
CURRENT_DIR = os.path.dirname(__file__)
NEWSBLUR_DIR = ''.join([CURRENT_DIR, '/../../'])
sys.path.insert(0, NEWSBLUR_DIR)
os.environ['DJANGO_SETTINGS_MODULE'] = 'newsblur_web.settings'
from newsblur_web import settings
import boto3
import time
import s3
filenames = [f for f in os.listdir('/opt/mongo/newsblur/backup/') if '.tgz' in f]
COLLECTIONS = "classifier_tag classifier_author classifier_feed classifier_title userstories shared_stories category category_site sent_emails social_profile social_subscription social_services statistics user_search feedback"
if False:
COLLECTIONS += " starred_stories"
date = time.strftime('%Y-%m-%d-%H-%M')
collections = COLLECTIONS.split(' ')
db_name = 'newsblur'
dir_name = 'backup_mongo_%s' % date
filename = '%s.tgz' % dir_name
os.mkdir(dir_name)
for collection in collections:
cmd = 'mongodump --db %s --collection %s -o %s' % (db_name, collection, dir_name)
print("Dumping %s: %s" % (collection, cmd))
os.system(cmd)
print("Compressing %s..." % filename)
cmd = 'tar -zcf %s %s' % (filename, dir_name)
os.system(cmd)
print('Uploading %s to S3...' % filename)
try:
s3.save_file_in_s3(filename, name="mongo/%s" % (filename))
except Exception as e:
print(" ****> Exceptions: %s" % e)
shutil.rmtree(dir_name)
os.remove(filename)
for filename i filenames:
print('Uploading %s to S3...' % filename)
try:
s3 = boto3.resource('s3')
bucket = s3.Bucket(settings.get('S3_BACKUP_BUCKET'))
bucket.upload_file(filename, name="mongo/%s" % (filename))
except Exception as e:
print(" ****> Exceptions: %s" % e)
shutil.rmtree(filename[:-4])
os.remove(filename)