#!/bin/sh
# This script will backup one or more mySQL databases
# and then optionally email them and/or FTP them
# This script will create a different backup file for each database by day of the week
# i.e. 1-dbname1.sql.gz for database=dbname1 on Monday (day=1)
# This is a trick so that you never have more than 7 days worth of backups on your FTP server.
# as the weeks rotate, the files from the same day of the prev week are overwritten.
############################################################
#===> site-specific variables - customize for your site
# List all of the MySQL databases that you want to backup in here,
# each seperated by a space
databases="db_name"
# Directory where you want the backup files to be placed
backupdir=/home/username/backup
# MySQL dump command, use the full path name here
mysqldumpcmd=/usr/bin/mysqldump
# MySQL Username and password
userpassword=" --user=user_name --password=pass_here"
# MySQL dump options
dumpoptions=" --quick --add-drop-table --add-locks --extended-insert --lock-tables"
# Unix Commands
gzip=/bin/gzip
uuencode=/usr/bin/uuencode
mail=/bin/mail
# Send Backup? Would you like the backup emailed to you?
# Set to "y" if you do
sendbackup="n"
subject="Backup is done"
mailto="email@domain.tld"
#===> site-specific variables for FTP Use FQDN or IP
ftpbackup="y"
ftpserver="ftp.domain.tld"
ftpuser="ftp_username"
ftppasswd="ftp_pass"
# If you are keeping the backups in a subdir to your FTP root
ftpdir="/"
#===> END site-specific variables - customize for your site
############################################################
# Get the Day of the Week (0-6)
# This allows to save one backup for each day of the week
# Just alter the date command if you want to use a timestamp
DOW=`date +%w`
# Create our backup directory if not already there
mkdir -p ${backupdir}
if [ ! -d ${backupdir} ]
then
echo "Not a directory: ${backupdir}"
exit 1
fi
# Dump all of our databases
echo "Dumping MySQL Databases"
for database in $databases
do
$mysqldumpcmd $userpassword $dumpoptions $database > ${backupdir}/${DOW}-${database}.sql
done
# Compress all of our backup files
echo "Compressing Dump Files"
for database in $databases
do
rm -f ${backupdir}/${DOW}-${database}.sql.gz
$gzip ${backupdir}/${DOW}-${database}.sql
done
# Send the backups via email
if [ $sendbackup = "y" ]
then
for database in $databases
do
$uuencode ${backupdir}/${DOW}-${database}.sql.gz > ${backupdir}/${DOW}-${database}.sql.gz.uu
$mail -s "$subject : $database" $mailto < ${backupdir}/${DOW}-${database}.sql.gz.uu
done
fi
# FTP it to the off-site server
echo "FTP file to $ftpserver FTP server"
if [ $ftpbackup = "y" ]
then
for database in $databases
do
echo "==> ${backupdir}/${DOW}-${database}.sql.gz"
ftp -n $ftpserver <<EOF
user $ftpuser $ftppasswd
bin
prompt
cd $ftpdir
lcd ${backupdir}
put ${DOW}-${database}.sql.gz
quit
EOF
done
fi
# And we're done
ls -l ${backupdir}
echo "Dump Complete!"
exit
Создание архивной копии MySQL - вариант 2
#!/bin/bash
# Destiny folder where backups are stored
DEST=/tmp/bacula/server01
CURRDATE=$(date +"%F")
# Hostname where MySQL is running
HOSTNAME="srv-mysql"
# User name to make backup
USER="root"
# File where has the mysql user password
PASS="$(cat /root/etc/mysqlpass)"
DATABASES=$(mysql -h $HOSTNAME -u $USER -p$PASS -e "SHOW DATABASES;" | tr -d "| " | grep -v Database)
[ ! -d $DEST ] && mkdir -p $DEST
for db in $DATABASES; do
FILE="${DEST}/$db.sql.gz"
FILEDATE=
# Be sure to make one backup per day
[ -f $FILE ] && FILEDATE=$(date -r $FILE +"%F")
[ "$FILEDATE" == "$CURRDATE" ] && continue
[ -f $FILE ] && mv "$FILE" "${FILE}.old"
mysqldump --single-transaction --routines --quick -h $HOSTNAME -u $USER -p$PASS -B $db | gzip > "$FILE"
chown bacula:disk "$FILE"
rm -f "${FILE}.old"
done