- Preencha as variáveis.
- Coloque a senha do banco de dados no arquivo /root/.pgpass (se o backup estiver sendo executado como root, caso contrário, coloque-a em $ HOME / .pgpass do usuário que executará o script).
- Lucro.
#!/bin/bash
#
# Author: Bruno Coimbra <bbcoimbra@gmail.com>
#
# Backups database located in DB_HOST, DB_PORT, DB_NAME
# and can be accessed using DB_USER. Password should be
# located in $HOME/.pgpass and this file should be
# chmod 0600[1].
#
# Target bucket should be set in BACKUP_BUCKET variable.
#
# AWS credentials should be available as needed by aws-cli[2].
#
# Dependencies:
#
# * pg_dump executable (can be found in postgresql-client-<version> package)
# * aws-cli (with python environment configured execute 'pip install awscli')
#
#
# References
# [1] - http://www.postgresql.org/docs/9.3/static/libpq-pgpass.html
# [2] - http://docs.aws.amazon.com/cli/latest/userguide/cli-chap-welcome.html
#
#
###############
### Variables
export AWS_ACCESS_KEY_ID="<your_KEY_ID>"
export AWS_SECRET_ACCESS_KEY="<your_SECRET_KEY>"
DB_HOST="<db_host>"
DB_PORT="<db_port>"
DB_NAME="<db_name>"
DB_USER="<db_user>"
BACKUP_BUCKET="<bucket_name>"
###############
#
# **RISK ZONE** DON'T TOUCH below this line unless you know
# exactly what you are doing.
#
###############
set -e
export PATH="/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin"
### Variables
S3_BACKUP_BUCKET=${BACKUP_BUCKET:-test-db-backup-bucket}
TEMPFILE_PREFIX="db-$DB_NAME-backup"
TEMPFILE="$(mktemp -t $TEMPFILE_PREFIX-XXXXXXXX)"
DATE="$(date +%Y-%m-%d)"
TIMESTAMP="$(date +%s)"
BACKUPFILE="backup-$DB_NAME-$TIMESTAMP.sql.gz"
LOGTAG="DB $DB_NAME Backup"
### Validations
if [[ ! -r "$HOME/.pgpass" ]]; then
logger -t "$LOGTAG" "$0: Can't find database credentials. $HOME/.pgpass file isn't readable. Aborted."
exit 1
fi
if ! which pg_dump > /dev/null; then
logger -t "$LOGTAG" "$0: Can't find 'pg_dump' executable. Aborted."
exit 1
fi
if ! which aws > /dev/null; then
logger -t "$LOGTAG" "$0: Can't find 'aws cli' executable. Aborted."
exit 1
fi
logger -t "$LOGTAG" "$0: remove any previous dirty backup file"
rm -f /tmp/$TEMPFILE_PREFIX*
### Generate dump and compress it
logger -t "$LOGTAG" "Dumping Database..."
pg_dump -O -x -h "$DB_HOST" -p "$DB_PORT" -U "$DB_USER" -w "$DB_NAME" > "$TEMPFILE"
logger -t "$LOGTAG" "Dumped."
logger -t "$LOGTAG" "Compressing file..."
nice gzip -9 "$TEMPFILE"
logger -t "$LOGTAG" "Compressed."
mv "$TEMPFILE.gz" "$BACKUPFILE"
### Upload it to S3 Bucket and cleanup
logger -t "$LOGTAG" "Uploading '$BACKUPFILE' to S3..."
aws s3 cp "$BACKUPFILE" "s3://$S3_BACKUP_BUCKET/$DATE/$BACKUPFILE"
logger -t "$LOGTAG" "Uploaded."
logger -t "$LOGTAG" "Clean-up..."
rm -f $TEMPFILE
rm -f $BACKUPFILE
rm -f /tmp/$TEMPFILE_PREFIX*
logger -t "$LOGTAG" "Finished."
exit 0