#!/bin/sh## Backup script for mysql and local files.# Last update: 2016-03-15## This script uses gsutil to upload files to Google Cloud Storage.## Install notes for cron:# - copy this script into /etc/cron.daily (or anywhere you like).# - make sure gsutil is on PATH for cron exeuction (read note below).# - make sure /etc/boto.cfg exists (gsutil config file)## Note about PATH: cron PATH is set in /etc/crontab##fail on error (-e). fail on variable unset (-u).set -o nounset
set -o errexit
#Edit this variablesDB_USERNAME=username
DB_PASSWORD=password
DB_NAME=database
SQL_FILE_PREFIX=backup-
LOCAL_FILE=/my/app/files/
LOCAL_FILE_PREFIX=files-
BACKUP_DIR=/var/backups
GOOGLE_BUCKET=gs://my-bucket/backups
TMP_DIR=/tmp/
#Default gsutil config file in /etc/boto.cfg#export BOTO_CONFIG=/root/.boto#Script variables. Do not edit.#Date in format YYYMMDD_HHMMSSNOW=$(date +"%Y%m%d_%H%M%S")SQL_FILE_NAME=$SQL_FILE_PREFIX$NOW.sql
SQL_FILE=$TMP_DIR/$SQL_FILE_NAMESQL_COMPRESSED_FILE_NAME=$SQL_FILE_NAME.tar.gz
SQL_COMPRESSED_FILE=$TMP_DIR/$SQL_COMPRESSED_FILE_NAMELOCAL_FILE_NAME=$LOCAL_FILE_PREFIX$NOW.tar.gz
echo Creating working dir...
mkdir -p -v $TMP_DIRecho Creating backup for database "$DB_NAME" to file "$SQL_FILE"...
mysqldump -u $DB_USERNAME -p$DB_PASSWORD$DB_NAME > $SQL_FILEecho Compressing SQL backup...
tar -zcf $SQL_COMPRESSED_FILE -C $TMP_DIR$SQL_FILE_NAMEecho Copying SQL backup to "$BACKUP_DIR"...
cp $SQL_COMPRESSED_FILE$BACKUP_DIRecho Compressing local files to "$BACKUP_DIR"...
tar zcf $BACKUP_DIR/$LOCAL_FILE_NAME$LOCAL_FILEecho Uploading to Google Cloud Storage...
gsutil cp $SQL_COMPRESSED_FILE$GOOGLE_BUCKETgsutil cp $BACKUP_DIR/$LOCAL_FILE_NAME$GOOGLE_BUCKETecho Cleaning up and deleting backup files older than 5 days...
rm $SQL_FILErm $SQL_COMPRESSED_FILEfind $BACKUP_DIR/$LOCAL_FILE_PREFIX* -mtime +5 -exec rm {}\;find $BACKUP_DIR/$SQL_FILE_PREFIX* -mtime +5 -exec rm {}\;echo Backup finished.
Comments (0)
HTTPSSSH
You can clone a snippet to your computer for local editing.
Learn more.