Skip to content

Commit 2073900

Browse files
committed
Added support for sorting backups by subdirectories
1 parent b3afc7b commit 2073900

File tree

1 file changed

+26
-10
lines changed

1 file changed

+26
-10
lines changed

backup.sh

Lines changed: 26 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -29,6 +29,9 @@ pushToLog()
2929

3030
CUR_PATH=$(dirname "$0")
3131
SCRIPT_LOG_PATH=
32+
SORT_BACKUPS=
33+
CLOUD_SUBDIR_FILES=
34+
CLOUD_SUBDIR_BASES=
3235
SCRIPT_INSTANCE_KEY=$(tr -cd 'a-zA-Z0-9' < /dev/urandom | head -c 10)
3336
SCRIPT_ERRORS_TMP="/tmp/wscb.tmp.${SCRIPT_INSTANCE_KEY}"
3437

@@ -159,6 +162,11 @@ fi
159162
SCRIPT_INSTANCE_KEY=$(tr -cd 'a-zA-Z0-9' < /dev/urandom | head -c 10)
160163
RSYNC_EXCLUDE_LIST_FILE="${TMP_PATH}/WebServerCloudBackups.tmp.rsync_exclude.${SCRIPT_INSTANCE_KEY}"
161164

165+
if [[ "$SORT_BACKUPS" == "true" ]]; then
166+
CLOUD_SUBDIR_FILES="/files"
167+
CLOUD_SUBDIR_BASES="/databases"
168+
fi
169+
162170
# projects loop
163171
for i in "${!projects[@]}"
164172
do
@@ -258,12 +266,16 @@ do
258266

259267
touch "$CHECK_FILE"
260268

261-
CLOUD_FOLDER_CHECK=$(curl -fsS --user "$CLOUD_USER":"$CLOUD_PASS" -T "$CHECK_FILE" "${PROJECT_CLOUD_PATH}/" 2>&1 >/dev/null)
269+
CLOUD_FOLDER_CHECK=$(curl -fsS --user "$CLOUD_USER":"$CLOUD_PASS" -T "$CHECK_FILE" "${PROJECT_CLOUD_PATH}${CLOUD_SUBDIR_FILES}/" 2>&1 >/dev/null)
262270

263271
if [ -n "$CLOUD_FOLDER_CHECK" ]; then
264272
curl -fsS --user "$CLOUD_USER":"$CLOUD_PASS" -X MKCOL "$PROJECT_CLOUD_PATH" > /dev/null 2>"$SCRIPT_ERRORS_TMP" || { pushToLog "[ERROR] - Can't create directory for $PROJECT_NAME files in cloud (proto: ${CLOUD_PROTO_PROJECT_FILES}; period: ${PERIOD}; cloud path: ${PROJECT_CLOUD_PATH})"; CLOUD_CHECK_FAIL=1; }
273+
274+
if [[ "$SORT_BACKUPS" == "true" ]]; then
275+
curl -fsS --user "$CLOUD_USER":"$CLOUD_PASS" -X MKCOL "${PROJECT_CLOUD_PATH}${CLOUD_SUBDIR_FILES}" > /dev/null 2>"$SCRIPT_ERRORS_TMP" || { pushToLog "[ERROR] - Can't create subdirectory for $PROJECT_NAME files in cloud (proto: ${CLOUD_PROTO_PROJECT_FILES}; period: ${PERIOD}; cloud path: ${PROJECT_CLOUD_PATH}${CLOUD_SUBDIR_FILES})"; CLOUD_CHECK_FAIL=1; }
276+
fi
265277
else
266-
curl -fsS --user "$CLOUD_USER":"$CLOUD_PASS" -X DELETE "${PROJECT_CLOUD_PATH}/check_folder_in_cloud" > /dev/null 2>"$SCRIPT_ERRORS_TMP" || { pushToLog "[ERROR] - Can't remove check file for $PROJECT_NAME files in cloud (proto: ${CLOUD_PROTO_PROJECT_FILES}; period: ${PERIOD}; check file cloud path: ${PROJECT_CLOUD_PATH}/check_folder_in_cloud)"; CLOUD_CHECK_FAIL=1; }
278+
curl -fsS --user "$CLOUD_USER":"$CLOUD_PASS" -X DELETE "${PROJECT_CLOUD_PATH}${CLOUD_SUBDIR_FILES}/check_folder_in_cloud" > /dev/null 2>"$SCRIPT_ERRORS_TMP" || { pushToLog "[ERROR] - Can't remove check file for $PROJECT_NAME files in cloud (proto: ${CLOUD_PROTO_PROJECT_FILES}; period: ${PERIOD}; check file cloud path: ${PROJECT_CLOUD_PATH}${CLOUD_SUBDIR_FILES}/check_folder_in_cloud)"; CLOUD_CHECK_FAIL=1; }
267279
fi
268280

269281
rm "$CHECK_FILE"
@@ -420,17 +432,17 @@ do
420432
UPLOAD_FAIL=0
421433

422434
if [[ $CLOUD_PROTO_PROJECT_FILES == "webdav" ]]; then
423-
curl -fsS --user "$CLOUD_USER":"$CLOUD_PASS" -T "{$(ls $ARCHIVE_PATH* | tr '\n' ',' | sed 's/,$//g')}" "${PROJECT_CLOUD_PATH}/" > /dev/null 2>"$SCRIPT_ERRORS_TMP" || { pushToLog "[ERROR] - Error occurred while uploading $PROJECT_NAME files archive (proto: ${CLOUD_PROTO_PROJECT_FILES}; period: ${PERIOD})"; UPLOAD_FAIL=1; }
435+
curl -fsS --user "$CLOUD_USER":"$CLOUD_PASS" -T "{$(ls $ARCHIVE_PATH* | tr '\n' ',' | sed 's/,$//g')}" "${PROJECT_CLOUD_PATH}${CLOUD_SUBDIR_FILES}/" > /dev/null 2>"$SCRIPT_ERRORS_TMP" || { pushToLog "[ERROR] - Error occurred while uploading $PROJECT_NAME files archive (proto: ${CLOUD_PROTO_PROJECT_FILES}; period: ${PERIOD})"; UPLOAD_FAIL=1; }
424436
elif [[ $CLOUD_PROTO_PROJECT_FILES == "s3" ]]; then
425-
s3cmd put $(ls "$ARCHIVE_PATH"* | tr '\n' ' ') "${PROJECT_CLOUD_PATH}/" > /dev/null 2>"$SCRIPT_ERRORS_TMP" || { pushToLog "[ERROR] - Error occurred while uploading $PROJECT_NAME files archive (proto: ${CLOUD_PROTO_PROJECT_FILES}; period: ${PERIOD})"; UPLOAD_FAIL=1; }
437+
s3cmd put $(ls "$ARCHIVE_PATH"* | tr '\n' ' ') "${PROJECT_CLOUD_PATH}${CLOUD_SUBDIR_FILES}/" > /dev/null 2>"$SCRIPT_ERRORS_TMP" || { pushToLog "[ERROR] - Error occurred while uploading $PROJECT_NAME files archive (proto: ${CLOUD_PROTO_PROJECT_FILES}; period: ${PERIOD})"; UPLOAD_FAIL=1; }
426438
fi
427439

428440
if [ "$UPLOAD_FAIL" -eq 0 ]; then
429441
$SETCOLOR_SUCCESS
430442
echo "[OK]"
431443
$SETCOLOR_NORMAL
432444

433-
NEW_BACKUP_FILES="${PROJECT_CLOUD_PATH}/"$(ls "$ARCHIVE_PATH"* | sed 's/.*\///g' | tr '\n' ',' | sed 's/,$//g' | sed "s|,|,$PROJECT_CLOUD_PATH/|g")
445+
NEW_BACKUP_FILES="${PROJECT_CLOUD_PATH}${CLOUD_SUBDIR_FILES}/"$(ls "$ARCHIVE_PATH"* | sed 's/.*\///g' | tr '\n' ',' | sed 's/,$//g' | sed "s|,|,${PROJECT_CLOUD_PATH}${CLOUD_SUBDIR_FILES}/|g")
434446

435447
echo "$NEW_BACKUP_FILES" > "${LAST_BACKUPS_PATH}/${PROJECT_NAME}_files_${PERIOD}"
436448
else
@@ -635,12 +647,16 @@ do
635647

636648
touch "$CHECK_FILE"
637649

638-
CLOUD_FOLDER_CHECK=$(curl -fsS --user "$CLOUD_USER":"$CLOUD_PASS" -T "$CHECK_FILE" "${PROJECT_CLOUD_PATH}/" 2>&1 >/dev/null)
650+
CLOUD_FOLDER_CHECK=$(curl -fsS --user "$CLOUD_USER":"$CLOUD_PASS" -T "$CHECK_FILE" "${PROJECT_CLOUD_PATH}${CLOUD_SUBDIR_BASES}/" 2>&1 >/dev/null)
639651

640652
if [ -n "$CLOUD_FOLDER_CHECK" ]; then
641653
curl -fsS --user "$CLOUD_USER":"$CLOUD_PASS" -X MKCOL "$PROJECT_CLOUD_PATH" > /dev/null 2>"$SCRIPT_ERRORS_TMP" || { pushToLog "[ERROR] - Can't create directory for $PROJECT_NAME databases in cloud (proto: ${CLOUD_PROTO_PROJECT_DB}; period: ${PERIOD}; cloud path: ${PROJECT_CLOUD_PATH})"; CLOUD_CHECK_FAIL=1; }
654+
655+
if [[ "$SORT_BACKUPS" == "true" ]]; then
656+
curl -fsS --user "$CLOUD_USER":"$CLOUD_PASS" -X MKCOL "${PROJECT_CLOUD_PATH}${CLOUD_SUBDIR_BASES}" > /dev/null 2>"$SCRIPT_ERRORS_TMP" || { pushToLog "[ERROR] - Can't create subdirectory for $PROJECT_NAME databases in cloud (proto: ${CLOUD_PROTO_PROJECT_DB}; period: ${PERIOD}; cloud path: ${PROJECT_CLOUD_PATH}${CLOUD_SUBDIR_BASES})"; CLOUD_CHECK_FAIL=1; }
657+
fi
642658
else
643-
curl -fsS --user "$CLOUD_USER":"$CLOUD_PASS" -X DELETE "${PROJECT_CLOUD_PATH}/check_folder_in_cloud" > /dev/null 2>"$SCRIPT_ERRORS_TMP" || { pushToLog "[ERROR] - Can't remove check file for $PROJECT_NAME databases in cloud (proto: ${CLOUD_PROTO_PROJECT_DB}; period: ${PERIOD}; check file cloud path: ${PROJECT_CLOUD_PATH}/check_folder_in_cloud)"; CLOUD_CHECK_FAIL=1; }
659+
curl -fsS --user "$CLOUD_USER":"$CLOUD_PASS" -X DELETE "${PROJECT_CLOUD_PATH}${CLOUD_SUBDIR_BASES}/check_folder_in_cloud" > /dev/null 2>"$SCRIPT_ERRORS_TMP" || { pushToLog "[ERROR] - Can't remove check file for $PROJECT_NAME databases in cloud (proto: ${CLOUD_PROTO_PROJECT_DB}; period: ${PERIOD}; check file cloud path: ${PROJECT_CLOUD_PATH}${CLOUD_SUBDIR_BASES}/check_folder_in_cloud)"; CLOUD_CHECK_FAIL=1; }
644660
fi
645661

646662
rm "$CHECK_FILE"
@@ -740,17 +756,17 @@ do
740756
UPLOAD_FAIL=0
741757

742758
if [[ $CLOUD_PROTO_PROJECT_DB == "webdav" ]]; then
743-
curl -fsS --user "$CLOUD_USER":"$CLOUD_PASS" -T "{$(ls $ARCHIVE_PATH* | tr '\n' ',' | sed 's/,$//g')}" "${PROJECT_CLOUD_PATH}/" > /dev/null 2>"$SCRIPT_ERRORS_TMP" || { pushToLog "[ERROR] - Error occurred while uploading $PROJECT_NAME database archive (proto: ${CLOUD_PROTO_PROJECT_DB}; period: ${PERIOD})"; UPLOAD_FAIL=1; }
759+
curl -fsS --user "$CLOUD_USER":"$CLOUD_PASS" -T "{$(ls $ARCHIVE_PATH* | tr '\n' ',' | sed 's/,$//g')}" "${PROJECT_CLOUD_PATH}${CLOUD_SUBDIR_BASES}/" > /dev/null 2>"$SCRIPT_ERRORS_TMP" || { pushToLog "[ERROR] - Error occurred while uploading $PROJECT_NAME database archive (proto: ${CLOUD_PROTO_PROJECT_DB}; period: ${PERIOD})"; UPLOAD_FAIL=1; }
744760
elif [[ $CLOUD_PROTO_PROJECT_DB == "s3" ]]; then
745-
s3cmd put $(ls "$ARCHIVE_PATH"* | tr '\n' ' ') "${PROJECT_CLOUD_PATH}/" > /dev/null 2>"$SCRIPT_ERRORS_TMP" || { pushToLog "[ERROR] - Error occurred while uploading $PROJECT_NAME database archive (proto: ${CLOUD_PROTO_PROJECT_DB}; period: ${PERIOD})"; UPLOAD_FAIL=1; }
761+
s3cmd put $(ls "$ARCHIVE_PATH"* | tr '\n' ' ') "${PROJECT_CLOUD_PATH}${CLOUD_SUBDIR_BASES}/" > /dev/null 2>"$SCRIPT_ERRORS_TMP" || { pushToLog "[ERROR] - Error occurred while uploading $PROJECT_NAME database archive (proto: ${CLOUD_PROTO_PROJECT_DB}; period: ${PERIOD})"; UPLOAD_FAIL=1; }
746762
fi
747763

748764
if [ "$UPLOAD_FAIL" -eq 0 ]; then
749765
$SETCOLOR_SUCCESS
750766
echo "[OK]"
751767
$SETCOLOR_NORMAL
752768

753-
NEW_BACKUP_FILES="${PROJECT_CLOUD_PATH}"/$(ls "$ARCHIVE_PATH"* | sed 's/.*\///g' | tr '\n' ',' | sed 's/,$//g' | sed "s|,|,$PROJECT_CLOUD_PATH/|g")
769+
NEW_BACKUP_FILES="${PROJECT_CLOUD_PATH}${CLOUD_SUBDIR_BASES}"/$(ls "$ARCHIVE_PATH"* | sed 's/.*\///g' | tr '\n' ',' | sed 's/,$//g' | sed "s|,|,${PROJECT_CLOUD_PATH}${CLOUD_SUBDIR_BASES}/|g")
754770

755771
echo "$NEW_BACKUP_FILES" > "${LAST_BACKUPS_PATH}/${PROJECT_NAME}_base_${PERIOD}"
756772
else

0 commit comments

Comments
 (0)