From e7ed2be473d6137123cae24dcaaced430bd0b2d1 Mon Sep 17 00:00:00 2001 From: caos30 Date: Mon, 17 Oct 2022 20:29:21 -0700 Subject: [PATCH 1/5] Use the DB_HOST parameter on wp-config to run mysqldump database Hi. Thanks for this extremely simple and useful script. I modified 2 lines on the script to be able to use the DB_HOST parameter defined on wp-config.php ;-) Cheers! Sergi --- WordPressBackup.sh | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/WordPressBackup.sh b/WordPressBackup.sh index 168f31e..5817a32 100755 --- a/WordPressBackup.sh +++ b/WordPressBackup.sh @@ -62,6 +62,7 @@ for backupprofile in $profile ; do db_name=$(grep DB_NAME "${wp_config}" | cut -f4 -d"'") db_user=$(grep DB_USER "${wp_config}" | cut -f4 -d"'") db_pass=$(grep DB_PASSWORD "${wp_config}" | cut -f4 -d"'") + db_host=$(grep DB_HOST "${wp_config}" | cut -f4 -d"'") table_prefix=$(grep table_prefix "${wp_config}" | cut -f2 -d"'") # Creates a Backup Directory if one does not exist. @@ -71,7 +72,7 @@ for backupprofile in $profile ; do cd ${backup_location}/${user}/${wp_domain} # MySQL Takes a Dump and compress the Home Directory - mysqldump -u ${db_user} -p${db_pass} ${db_name} | gzip > ./${backupname}-DB.sql.gz && + mysqldump -u ${db_user} --host ${db_host} -p${db_pass} ${db_name} | gzip > ./${backupname}-DB.sql.gz && tar zcPf ./${backupname}-FILES.tar.gz ${wp_root} # Compresses the MySQL Dump and the Home Directory From ee3a6bd1babcd318221e4dca6452d7f1f9cbb194 Mon Sep 17 00:00:00 2001 From: caos30 Date: Tue, 18 Oct 2022 00:46:01 -0700 Subject: [PATCH 2/5] Backup only a list of files and subdirectories In certain hosting configurations i have a wordpress site in the document root of the domain name and i have in subdirectories other sites belonging to subdomains of the same domain name. So i need to backup only the index.php file and the wp-* directories and files of the root directory. Furthermore, i usually have other directories storing other data in the same root directory of the main WP site... This change in the SH script need an optional new parameter called file_list in the profile file for the wp site. Something like this: file_list="wp-* favicon.ico index*" if you comment this file then the default behaviour of the script is the same until now: backup all the directories and files inside the document root (wp_root parameter on the profile file). --- WordPressBackup.sh | 15 ++++++++++++++- 1 file changed, 14 insertions(+), 1 deletion(-) diff --git a/WordPressBackup.sh b/WordPressBackup.sh index 5817a32..a0edbcd 100755 --- a/WordPressBackup.sh +++ b/WordPressBackup.sh @@ -65,6 +65,19 @@ for backupprofile in $profile ; do db_host=$(grep DB_HOST "${wp_config}" | cut -f4 -d"'") table_prefix=$(grep table_prefix "${wp_config}" | cut -f2 -d"'") + # Which files & sub-directories to backup of the root directory + if [ "${file_list}" = "" ]; then + file_list_bckp = ${wp_root} + else + files=(${file_list}) + file_list_absolute="" + for i in "${!files[@]}" + do + file_list_absolute="${file_list_absolute} ${wp_root}/${files[i]}"; + done + file_list_bckp=${file_list_absolute} + fi + # Creates a Backup Directory if one does not exist. mkdir -p ${backup_location}/${user}/${wp_domain}/ @@ -73,7 +86,7 @@ for backupprofile in $profile ; do # MySQL Takes a Dump and compress the Home Directory mysqldump -u ${db_user} --host ${db_host} -p${db_pass} ${db_name} | gzip > ./${backupname}-DB.sql.gz && - tar zcPf ./${backupname}-FILES.tar.gz ${wp_root} + tar zcPf ./${backupname}-FILES.tar.gz ${file_list_bckp} # Compresses the MySQL Dump and the Home Directory tar zcPf ./${wp_domain}-${backupname}.tar.gz ./${backupname}-FILES.tar.gz ./${backupname}-DB.sql.gz From de6b8f52be12124196eb81b1b4fc96a240535c9d Mon Sep 17 00:00:00 2001 From: caos30 Date: Tue, 18 Oct 2022 00:48:09 -0700 Subject: [PATCH 3/5] Backup only a list of files and subdirectories (II) This is the change required on the profile to be able to backup only some files/directories on the wp_root path. --- backup.profile.example | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/backup.profile.example b/backup.profile.example index 69c609d..97eaf12 100644 --- a/backup.profile.example +++ b/backup.profile.example @@ -12,6 +12,11 @@ wp_domain=example.com # Directory that WordPress is installed at wp_root=/home/admin/example.com/public_html +# Optional: backup only these files & directories to be included +# Example (all WP): wp-* favicon.ico index* +# Example (only user content): wp-content wp-config* +file_list="wp-* favicon.ico index*" + # Backup location backup_location=/backups From 1b232709e2d61dc41a2e31d0681ed33be3a7f4ac Mon Sep 17 00:00:00 2001 From: caos30 Date: Tue, 18 Oct 2022 02:11:59 -0700 Subject: [PATCH 4/5] Final optimization of CPU & time consuming, not compressing I'm sorry, because i've needed to refactor my 2 previous commits. But finally it save almost 10 times the time needed to make a backup of about 7Gb of space. The solution was not only not merge the DB backup with the backup of the Home directory, but also NOT COMPRESS that Home directory copy. So instead of get a compressed TAR.GZ of the Home Directory we get a TAR file containing the Home Directory (or the files and subdirectories you specify in the profile). Furthermore, when you specify not to compress i choosed to not to join both copies (DB + HOME DIR) and leave it as 2 different files per daily backup. At least for me has another advantage: i can get a copy of the database without decompress the daily backup. My change introduced in this commit needs this in the profile (i will commit it in the next commit): # Optional: if your files & directories to be backed up are too bigger in size, you can set this to false # and then that backup will not be compressed and the the daily backup were composed by 2 files (SQL.GZ for DB and TAR for files&dirs) compressed_tar_file=false --- WordPressBackup.sh | 53 ++++++++++++++++++++++++++++++++-------------- 1 file changed, 37 insertions(+), 16 deletions(-) diff --git a/WordPressBackup.sh b/WordPressBackup.sh index a0edbcd..7661865 100755 --- a/WordPressBackup.sh +++ b/WordPressBackup.sh @@ -67,7 +67,7 @@ for backupprofile in $profile ; do # Which files & sub-directories to backup of the root directory if [ "${file_list}" = "" ]; then - file_list_bckp = ${wp_root} + file_list_bckp=${wp_root} else files=(${file_list}) file_list_absolute="" @@ -85,23 +85,44 @@ for backupprofile in $profile ; do cd ${backup_location}/${user}/${wp_domain} # MySQL Takes a Dump and compress the Home Directory - mysqldump -u ${db_user} --host ${db_host} -p${db_pass} ${db_name} | gzip > ./${backupname}-DB.sql.gz && - tar zcPf ./${backupname}-FILES.tar.gz ${file_list_bckp} - - # Compresses the MySQL Dump and the Home Directory - tar zcPf ./${wp_domain}-${backupname}.tar.gz ./${backupname}-FILES.tar.gz ./${backupname}-DB.sql.gz - chmod 600 ./${wp_domain}-${backupname}.tar.gz - - # Generates the Backup Size - FILENAME=${backup_location}/${user}/${wp_domain}/${wp_domain}-${backupname}.tar.gz - FILESIZE=$(du -h "$FILENAME") - if [ "${quiet}" = "0" ]; then - echo "$FILESIZE" - fi + if [ "${compressed_tar_file}" != false ]; then + mysqldump -u ${db_user} --host ${db_host} -p${db_pass} ${db_name} | gzip > ./${backupname}-DB.sql.gz && + tar zcPf ./${backupname}-FILES.tar.gz ${file_list_bckp} + + # Compresses the MySQL Dump and the Home Directory + tar zcPf ./${wp_domain}-${backupname}.tar.gz ./${backupname}-FILES.tar.gz ./${backupname}-DB.sql.gz + chmod 600 ./${wp_domain}-${backupname}.tar.gz + + # Generates the Backup Size + #FILENAME=${backup_location}/${user}/${wp_domain}/${wp_domain}-${backupname}.tar.gz + FILENAME=${wp_domain}-${backupname}.tar.gz + FILESIZE=$(du -h "$FILENAME") + if [ "${quiet}" = "0" ]; then + echo "$FILESIZE" + fi + + #Removes the SQL dump and Home DIR to conserve space + rm -rf ./${backupname}-FILES.tar.gz ./${backupname}-DB.sql.gz - #Removes the SQL dump and Home DIR to conserve space - rm -rf ./${backupname}-FILES.tar.gz ./${backupname}-DB.sql.gz + else + mysqldump -u ${db_user} --host ${db_host} -p${db_pass} ${db_name} | gzip > ./${backupname}-DB.sql.gz && + tar -cPf ./${backupname}-FILES.tar ${file_list_bckp} + + # Generates the Backup files Size + if [ "${quiet}" = "0" ]; then + #FILENAME=${backup_location}/${user}/${wp_domain}/${backupname}-DB.sql.gz + FILENAME=${backupname}-DB.sql.gz + FILESIZE=$(du -h "$FILENAME") + echo "$FILESIZE" + + #FILENAME=${backup_location}/${user}/${wp_domain}/${backupname}-FILES.tar + FILENAME=${backupname}-FILES.tar + FILESIZE=$(du -h "$FILENAME") + echo "$FILESIZE" + fi + fi + #Deletes any Backup older than X days find ${backup_location}/${user}/${wp_domain}/ -type f -mtime +${keepdays} -exec rm {} \; fi From 4ad9a5942e31c5c24199d24e15aedf4ed7a488b3 Mon Sep 17 00:00:00 2001 From: caos30 Date: Tue, 18 Oct 2022 02:16:29 -0700 Subject: [PATCH 5/5] Optional parameter on profile: NO COMPRESSED TAR This new OPTIONAL parameter is to make run my other previous commit, to get 2 FILES each day per WP site backed up (instead a unique compressed TAR.GZ), and the one containing the Home Directory is not compressed (so it is a TAR file, not TAR.GZ) and it save almost 10 times of processing time for a Home directory of about 7Gb with a lot of videos an photos. The resulting TAR file is maybe 20-35% bigger, but i prefer to get it faster than smaller. Space is not so a problem. --- backup.profile.example | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/backup.profile.example b/backup.profile.example index 97eaf12..1d51f8a 100644 --- a/backup.profile.example +++ b/backup.profile.example @@ -17,6 +17,10 @@ wp_root=/home/admin/example.com/public_html # Example (only user content): wp-content wp-config* file_list="wp-* favicon.ico index*" +# Optional: if your files & directories to be backed up are too bigger in size, you can set this to false +# and then that backup will not be compressed and the the daily backup were composed by 2 files (SQL.GZ for DB and TAR for files&dirs) +compressed_tar_file=false + # Backup location backup_location=/backups