Compressing Postgres db sql backup.

This commit is contained in:
Samuel Clay 2022-02-03 15:39:36 -05:00
parent 1397693d54
commit 5bde153147
3 changed files with 27 additions and 4 deletions

View file

@ -56,7 +56,7 @@ for collection in ${collections[@]}; do
docker exec -it mongo mongodump -d newsblur -c $collection -o /backup
done;
echo " ---> Compressing /srv/newsblur/backup/newsblur into /srv/newsblur/backup/backup_mongo.tgz"
echo " ---> Compressing /srv/newsblur/backup/newsblur into /srv/newsblur/backup/backup_mongo_${now}.tgz"
tar -zcf /srv/newsblur/backup/backup_mongo_${now}.tgz -C / srv/newsblur/backup/newsblur
echo " ---> Uploading backups to S3"

View file

@ -0,0 +1,23 @@
#!/usr/bin/env bash
now=$(date '+%Y-%m-%d-%H-%M')
echo "---> PG dumping - ${now}"
BACKUP_FILE=/var/lib/postgresql/backup/backup_postgresql_${now}.sql;
sudo docker exec -it postgres /usr/lib/postgresql/13/bin/pg_dump -U newsblur -h 127.0.0.1 -Fc newsblur > $BACKUP_FILE
echo " ---> Compressing $BACKUP_FILE"
gzip $BACKUP_FILE
echo " ---> Uploading postgres backup to S3"
sudo docker run --user 1000:1001 --rm \
-v /srv/newsblur:/srv/newsblur \
-v /srv/newsblur/backups/:/srv/newsblur/backups/ \
--network=host \
newsblur/newsblur_python3 \
python /srv/newsblur/utils/backups/backup_psql.py
# Don't delete backup since the backup_mongo.py script will rm them
## rm /opt/mongo/newsblur/backup/backup_mongo_${now}.tgz
## rm /opt/mongo/newsblur/backup/backup_mongo_${now}
echo " ---> Finished uploading backups to S3: "

View file

@ -35,8 +35,8 @@ from django.conf import settings
s3 = boto3.client('s3', aws_access_key_id=settings.S3_ACCESS_KEY, aws_secret_access_key=settings.S3_SECRET)
hostname = socket.gethostname().replace('-','_')
s3_object_name = f'backup_{hostname}/backup_{hostname}_{time.strftime("%Y-%m-%d-%H-%M")}.rdb.gz'
path = os.listdir('/backup')[0]
print('Uploading %s (from %s) to S3...' % (s3_object_name, path))
s3_object_name = f'backup_{hostname}/backup_{hostname}_{time.strftime("%Y-%m-%d-%H-%M")}.sql.gz'
path = os.listdir('/srv/newsblur/backups/')[0]
print('Uploading %s to %s on S3 bucket %s' % (path, s3_object_name, settings.S3_BACKUP_BUCKET))
s3.upload_file(path, settings.S3_BACKUP_BUCKET, s3_object_name, Callback=ProgressPercentage(path))