2016-11-16 20:41:40 +01:00
|
|
|
#!/bin/sh
|
|
|
|
|
|
|
|
# Make sure to:
|
2016-11-16 20:52:27 +01:00
|
|
|
# 1) Name this file `backup.sh` and place it in /home/ec2-user (or symlink it)
|
2016-11-16 20:41:40 +01:00
|
|
|
# 2) Run sudo apt-get install awscli to install the AWSCLI
|
|
|
|
# 3) Run aws configure (enter s3-authorized IAM user and specify region)
|
|
|
|
# 4) Fill in DB host + name
|
|
|
|
# 5) Create S3 bucket for the backups and fill it in below (set a lifecycle rule to expire files older than X days in the bucket)
|
|
|
|
# 6) Run chmod +x backup.sh
|
|
|
|
# 7) Test it out via ./backup.sh
|
2016-11-16 20:52:27 +01:00
|
|
|
# 8) Set up a daily backup at 8AM UTC via `crontab -e`:
|
|
|
|
# 0 8 * * * /home/ec2-user/backup.sh > /home/ec2-user/backup.log
|
2016-11-16 20:41:40 +01:00
|
|
|
|
|
|
|
# DB host (secondary preferred as to avoid impacting primary performance)
|
|
|
|
HOST=localhost:27017
|
|
|
|
|
|
|
|
# DB name
|
|
|
|
DBNAME=p5js-web-editor
|
|
|
|
|
|
|
|
# S3 bucket name
|
|
|
|
BUCKET=p5js-web-editor-backup
|
|
|
|
|
|
|
|
# Linux user account
|
|
|
|
USER=ec2-user
|
|
|
|
|
|
|
|
# Current time
|
|
|
|
TIME=`/bin/date +%d-%m-%Y-%T`
|
|
|
|
|
|
|
|
# Backup directory
|
|
|
|
DEST=/home/$USER/tmp
|
|
|
|
|
|
|
|
# Tar file of backup directory
|
|
|
|
TAR=$DEST/../$TIME.tar
|
|
|
|
|
|
|
|
# Create backup dir (-p to avoid warning if already exists)
|
|
|
|
/bin/mkdir -p $DEST
|
|
|
|
|
|
|
|
# Log
|
|
|
|
echo "Backing up $HOST/$DBNAME to s3://$BUCKET/ on $TIME";
|
|
|
|
|
|
|
|
# Dump from mongodb host into backup directory
|
|
|
|
/usr/bin/mongodump -h $HOST -d $DBNAME -o $DEST
|
|
|
|
|
|
|
|
# Create tar of backup directory
|
|
|
|
/bin/tar cvf $TAR -C $DEST .
|
|
|
|
|
|
|
|
# Upload tar to s3
|
|
|
|
/usr/bin/aws s3 cp $TAR s3://$BUCKET/
|
|
|
|
|
|
|
|
# Remove tar file locally
|
|
|
|
/bin/rm -f $TAR
|
|
|
|
|
|
|
|
# Remove backup directory
|
|
|
|
/bin/rm -rf $DEST
|
|
|
|
|
|
|
|
# All done
|
|
|
|
echo "Backup available at https://s3.amazonaws.com/$BUCKET/$TIME.tar"
|