diff --git a/docker/mongo/backup_mongo.sh b/docker/mongo/backup_mongo.sh index b83de5593..c3b97c66f 100755 --- a/docker/mongo/backup_mongo.sh +++ b/docker/mongo/backup_mongo.sh @@ -56,7 +56,7 @@ for collection in ${collections[@]}; do docker exec -it mongo mongodump -d newsblur -c $collection -o /backup done; -echo " ---> Compressing /srv/newsblur/backup/newsblur into /srv/newsblur/backup/backup_mongo.tgz" +echo " ---> Compressing /srv/newsblur/backup/newsblur into /srv/newsblur/backup/backup_mongo_${now}.tgz" tar -zcf /srv/newsblur/backup/backup_mongo_${now}.tgz -C / srv/newsblur/backup/newsblur echo " ---> Uploading backups to S3" diff --git a/docker/postgres/backup_postgres.sh b/docker/postgres/backup_postgres.sh new file mode 100644 index 000000000..7a4a6c5df --- /dev/null +++ b/docker/postgres/backup_postgres.sh @@ -0,0 +1,23 @@ +#!/usr/bin/env bash + +now=$(date '+%Y-%m-%d-%H-%M') + +echo "---> PG dumping - ${now}" +BACKUP_FILE=/var/lib/postgresql/backup/backup_postgresql_${now}.sql; +sudo docker exec -it postgres /usr/lib/postgresql/13/bin/pg_dump -U newsblur -h 127.0.0.1 -Fc newsblur > $BACKUP_FILE + +echo " ---> Compressing $BACKUP_FILE" +gzip $BACKUP_FILE + +echo " ---> Uploading postgres backup to S3" +sudo docker run --user 1000:1001 --rm \ + -v /srv/newsblur:/srv/newsblur \ + -v /srv/newsblur/backups/:/srv/newsblur/backups/ \ + --network=host \ + newsblur/newsblur_python3 \ + python /srv/newsblur/utils/backups/backup_psql.py + +# Don't delete backup since the backup_mongo.py script will rm them +## rm /opt/mongo/newsblur/backup/backup_mongo_${now}.tgz +## rm /opt/mongo/newsblur/backup/backup_mongo_${now} +echo " ---> Finished uploading backups to S3: " diff --git a/utils/backups/backup_psql.py b/utils/backups/backup_psql.py index f00ecca02..f3ab9919e 100644 --- a/utils/backups/backup_psql.py +++ b/utils/backups/backup_psql.py @@ -35,8 +35,8 @@ from django.conf import settings s3 = boto3.client('s3', aws_access_key_id=settings.S3_ACCESS_KEY, aws_secret_access_key=settings.S3_SECRET) hostname = socket.gethostname().replace('-','_') -s3_object_name = f'backup_{hostname}/backup_{hostname}_{time.strftime("%Y-%m-%d-%H-%M")}.rdb.gz' -path = os.listdir('/backup')[0] -print('Uploading %s (from %s) to S3...' % (s3_object_name, path)) +s3_object_name = f'backup_{hostname}/backup_{hostname}_{time.strftime("%Y-%m-%d-%H-%M")}.sql.gz' +path = os.listdir('/srv/newsblur/backups/')[0] +print('Uploading %s to %s on S3 bucket %s' % (path, s3_object_name, settings.S3_BACKUP_BUCKET)) s3.upload_file(path, settings.S3_BACKUP_BUCKET, s3_object_name, Callback=ProgressPercentage(path))