Skip to content

Instantly share code, notes, and snippets.

@dertin
Last active March 14, 2019 16:34
Show Gist options
  • Save dertin/a60352c7c11358bf98d9004ea00d8ca7 to your computer and use it in GitHub Desktop.
Save dertin/a60352c7c11358bf98d9004ea00d8ca7 to your computer and use it in GitHub Desktop.

Revisions

  1. dertin revised this gist Mar 14, 2019. 1 changed file with 2 additions and 2 deletions.
    4 changes: 2 additions & 2 deletions s3BackUp.sh
    Original file line number Diff line number Diff line change
    @@ -100,7 +100,7 @@ function s3-multipart-upload() {
    else
    echo "Something went wrong! $filePath was not uploaded to S3 bucket: $bucket"
    # SEND FAULT REPORT
    s3-send-mail "[email protected]" "[email protected]" "ALERT BACKUP FAULT" "Verify the sending of file parts to the AWS S3 service"
    s3-send-mail MAIL_FROM MAIL_TO "ALERT BACKUP FAULT - ${BACKUP_NAME}" "Verify the sending of file parts to the AWS S3 service"
    exit 1
    fi

    @@ -124,7 +124,7 @@ function main() {
    if [ $? != 0 ]; then
    echo "File is corrupted ... $1"
    # SEND FAULT REPORT
    s3-send-mail "[email protected]" "[email protected]" "ALERT BACKUP FAULT" "Check backup compression"
    s3-send-mail MAIL_FROM MAIL_TO "ALERT BACKUP FAULT - ${BACKUP_NAME}" "Check backup compression"
    exit 1
    fi

  2. dertin revised this gist Mar 14, 2019. 1 changed file with 12 additions and 5 deletions.
    17 changes: 12 additions & 5 deletions s3BackUp.sh
    Original file line number Diff line number Diff line change
    @@ -1,16 +1,23 @@
    #!/bin/bash

    # CONFIG
    # Usage:
    # bash s3BackUp.sh YOUR_BACKUP_DIRECTORY BACKUP_NAME YOUR_BUCKET MAIL_FROM MAIL_TO (OPTIONAL: S3_FOLDER PROFILE)
    # bash s3BackUp.sh /var/www/webdisk/example.com/ example my_bucket [email protected] [email protected] backup default

    # Arguments:
    readonly BACKUP_PATH_NO_REMOVE=$1
    readonly BACKUP_NAME=$2
    readonly BACKUP_NAME=$2
    readonly S3_BUCKET_NAME=$3
    readonly S3_FOLDER=${4-backup}
    readonly MAIL_FROM=${4}
    readonly MAIL_TO=${5}
    readonly S3_FOLDER=${6-backup}
    readonly PROFILE=${7-default}

    # Default:
    readonly PREFIX=backup_
    readonly DATE=`date +%d-%m-%Y`
    readonly BACKUP_FILE_NAME=${PREFIX}${BACKUP_NAME}_${DATE}.tgz
    readonly BACKUP_PATH_FILE=${HOME}/${BACKUP_FILE_NAME}

    readonly S3_BUCKET_BACKUP=s3://${S3_BUCKET_NAME}/${S3_FOLDER}/
    readonly S3_OUTPUT_BACKUP=${S3_FOLDER}/${BACKUP_FILE_NAME}
    readonly TEMP_PARTS=${HOME}/temp-parts
    @@ -124,7 +131,7 @@ function main() {
    rm -r ${TEMP_EXTRACT}

    # SEND NEW BACKUP TO S3
    s3-multipart-upload ${BACKUP_PATH_FILE} ${S3_BUCKET_NAME} ${S3_OUTPUT_BACKUP} ${TEMP_PARTS}
    s3-multipart-upload ${BACKUP_PATH_FILE} ${S3_BUCKET_NAME} ${S3_OUTPUT_BACKUP} ${TEMP_PARTS} ${PROFILE}

    # DELETE OLD BACKUP IN S3
    aws s3 ls ${S3_BUCKET_BACKUP} | while read -r line;
  3. dertin revised this gist Mar 14, 2019. 1 changed file with 15 additions and 13 deletions.
    28 changes: 15 additions & 13 deletions s3BackUp.sh
    Original file line number Diff line number Diff line change
    @@ -1,24 +1,26 @@
    #!/bin/bash

    # DIRECTORY TO BACK UP
    readonly BACKUP_PATH_NO_REMOVE=/var/www/webdisk/example.com/
    # CONFIG
    readonly BACKUP_PATH_NO_REMOVE=$1
    readonly BACKUP_NAME=$2
    readonly S3_BUCKET_NAME=$3
    readonly S3_FOLDER=${4-backup}

    readonly DATE=`date +%d-%m-%Y`
    readonly PREFIX=backup_
    readonly S3_FOLDER=backup
    readonly BACKUP_NAME=${PREFIX}example.com_${DATE}.tgz
    readonly BACKUP_FILE=${HOME}/${BACKUP_NAME}
    readonly S3_BUCKET_NAME=example.com
    readonly DATE=`date +%d-%m-%Y`
    readonly BACKUP_FILE_NAME=${PREFIX}${BACKUP_NAME}_${DATE}.tgz
    readonly BACKUP_PATH_FILE=${HOME}/${BACKUP_FILE_NAME}

    readonly S3_BUCKET_BACKUP=s3://${S3_BUCKET_NAME}/${S3_FOLDER}/
    readonly S3_OUTPUT_BACKUP=${S3_FOLDER}/${BACKUP_NAME}
    readonly S3_OUTPUT_BACKUP=${S3_FOLDER}/${BACKUP_FILE_NAME}
    readonly TEMP_PARTS=${HOME}/temp-parts
    readonly TEMP_EXTRACT=${HOME}/temp-extract

    #####
    function finish() {
    rm -r ${TEMP_PARTS} 2> /dev/null
    rm -r ${TEMP_EXTRACT} 2> /dev/null
    rm ${BACKUP_FILE} 2> /dev/null
    rm ${BACKUP_PATH_FILE} 2> /dev/null
    }
    trap finish EXIT
    #####
    @@ -102,16 +104,16 @@ function s3-multipart-upload() {
    #####
    function main() {
    # Release file in local and remote destination
    rm ${BACKUP_FILE} 2> /dev/null
    rm ${BACKUP_PATH_FILE} 2> /dev/null

    # CREATE BACKUP ARCHIVE
    tar czvf ${BACKUP_FILE} ${BACKUP_PATH_NO_REMOVE}
    tar czvf ${BACKUP_PATH_FILE} ${BACKUP_PATH_NO_REMOVE}

    # VERIFY BACKING ARCHIVE
    rm -r ${TEMP_EXTRACT}
    mkdir -p ${TEMP_EXTRACT}

    tar xzf ${BACKUP_FILE} --directory ${TEMP_EXTRACT} > /dev/null
    tar xzf ${BACKUP_PATH_FILE} --directory ${TEMP_EXTRACT} > /dev/null
    if [ $? != 0 ]; then
    echo "File is corrupted ... $1"
    # SEND FAULT REPORT
    @@ -122,7 +124,7 @@ function main() {
    rm -r ${TEMP_EXTRACT}

    # SEND NEW BACKUP TO S3
    s3-multipart-upload ${BACKUP_FILE} ${S3_BUCKET_NAME} ${S3_OUTPUT_BACKUP} ${TEMP_PARTS}
    s3-multipart-upload ${BACKUP_PATH_FILE} ${S3_BUCKET_NAME} ${S3_OUTPUT_BACKUP} ${TEMP_PARTS}

    # DELETE OLD BACKUP IN S3
    aws s3 ls ${S3_BUCKET_BACKUP} | while read -r line;
  4. dertin revised this gist Mar 14, 2019. 1 changed file with 2 additions and 2 deletions.
    4 changes: 2 additions & 2 deletions s3BackUp.sh
    Original file line number Diff line number Diff line change
    @@ -9,7 +9,7 @@ readonly S3_FOLDER=backup
    readonly BACKUP_NAME=${PREFIX}example.com_${DATE}.tgz
    readonly BACKUP_FILE=${HOME}/${BACKUP_NAME}
    readonly S3_BUCKET_NAME=example.com
    readonly S3_BUCKET_BACKUP=s3://${S3_BUCKET_NAME}/${S3_FOLDER}
    readonly S3_BUCKET_BACKUP=s3://${S3_BUCKET_NAME}/${S3_FOLDER}/
    readonly S3_OUTPUT_BACKUP=${S3_FOLDER}/${BACKUP_NAME}
    readonly TEMP_PARTS=${HOME}/temp-parts
    readonly TEMP_EXTRACT=${HOME}/temp-extract
    @@ -138,7 +138,7 @@ function main() {

    if [[ $filePath != "" ]]
    then
    aws s3 rm ${S3_BUCKET_BACKUP}/$filePath
    aws s3 rm ${S3_BUCKET_BACKUP}$filePath
    fi
    fi
    fi
  5. dertin revised this gist Mar 14, 2019. 1 changed file with 1 addition and 1 deletion.
    2 changes: 1 addition & 1 deletion s3BackUp.sh
    Original file line number Diff line number Diff line change
    @@ -9,7 +9,7 @@ readonly S3_FOLDER=backup
    readonly BACKUP_NAME=${PREFIX}example.com_${DATE}.tgz
    readonly BACKUP_FILE=${HOME}/${BACKUP_NAME}
    readonly S3_BUCKET_NAME=example.com
    readonly S3_BUCKET_BACKUP=s3://${S3_BUCKET_NAME}/${S3_FOLDER}/
    readonly S3_BUCKET_BACKUP=s3://${S3_BUCKET_NAME}/${S3_FOLDER}
    readonly S3_OUTPUT_BACKUP=${S3_FOLDER}/${BACKUP_NAME}
    readonly TEMP_PARTS=${HOME}/temp-parts
    readonly TEMP_EXTRACT=${HOME}/temp-extract
  6. dertin revised this gist Mar 14, 2019. 1 changed file with 1 addition and 1 deletion.
    2 changes: 1 addition & 1 deletion s3BackUp.sh
    Original file line number Diff line number Diff line change
    @@ -9,7 +9,7 @@ readonly S3_FOLDER=backup
    readonly BACKUP_NAME=${PREFIX}example.com_${DATE}.tgz
    readonly BACKUP_FILE=${HOME}/${BACKUP_NAME}
    readonly S3_BUCKET_NAME=example.com
    readonly S3_BUCKET_BACKUP=${S3_BUCKET_NAME}/${S3_FOLDER}/
    readonly S3_BUCKET_BACKUP=s3://${S3_BUCKET_NAME}/${S3_FOLDER}/
    readonly S3_OUTPUT_BACKUP=${S3_FOLDER}/${BACKUP_NAME}
    readonly TEMP_PARTS=${HOME}/temp-parts
    readonly TEMP_EXTRACT=${HOME}/temp-extract
  7. dertin revised this gist Mar 13, 2019. 1 changed file with 1 addition and 1 deletion.
    2 changes: 1 addition & 1 deletion s3BackUp.sh
    Original file line number Diff line number Diff line change
    @@ -9,7 +9,7 @@ readonly S3_FOLDER=backup
    readonly BACKUP_NAME=${PREFIX}example.com_${DATE}.tgz
    readonly BACKUP_FILE=${HOME}/${BACKUP_NAME}
    readonly S3_BUCKET_NAME=example.com
    readonly S3_BUCKET_BACKUP=s3:://${S3_BUCKET_NAME}/${S3_FOLDER}/
    readonly S3_BUCKET_BACKUP=${S3_BUCKET_NAME}/${S3_FOLDER}/
    readonly S3_OUTPUT_BACKUP=${S3_FOLDER}/${BACKUP_NAME}
    readonly TEMP_PARTS=${HOME}/temp-parts
    readonly TEMP_EXTRACT=${HOME}/temp-extract
  8. dertin created this gist Mar 13, 2019.
    149 changes: 149 additions & 0 deletions s3BackUp.sh
    Original file line number Diff line number Diff line change
    @@ -0,0 +1,149 @@
    #!/bin/bash

    # DIRECTORY TO BACK UP
    readonly BACKUP_PATH_NO_REMOVE=/var/www/webdisk/example.com/

    readonly DATE=`date +%d-%m-%Y`
    readonly PREFIX=backup_
    readonly S3_FOLDER=backup
    readonly BACKUP_NAME=${PREFIX}example.com_${DATE}.tgz
    readonly BACKUP_FILE=${HOME}/${BACKUP_NAME}
    readonly S3_BUCKET_NAME=example.com
    readonly S3_BUCKET_BACKUP=s3:://${S3_BUCKET_NAME}/${S3_FOLDER}/
    readonly S3_OUTPUT_BACKUP=${S3_FOLDER}/${BACKUP_NAME}
    readonly TEMP_PARTS=${HOME}/temp-parts
    readonly TEMP_EXTRACT=${HOME}/temp-extract

    #####
    function finish() {
    rm -r ${TEMP_PARTS} 2> /dev/null
    rm -r ${TEMP_EXTRACT} 2> /dev/null
    rm ${BACKUP_FILE} 2> /dev/null
    }
    trap finish EXIT
    #####
    function s3-send-mail() {
    local from=$1
    local to=$2
    local subject=$3
    local text=$4
    aws ses send-email \
    --from $from \
    --destination "ToAddresses=${to}" \
    --message "Subject={Data=${subject},Charset=utf8},Body={Text={Data=${text},Charset=utf8},Html={Data=${text},Charset=utf8}}"
    }
    #####
    function s3-multipart-upload() {
    sudo apt-get install -y -qq jq
    local filePath=$1 # file to upload
    local bucket=$2 # name of S3 bucket
    local s3Folder=$3 # destination of the file in S3
    local dirParts=$4 # local folder where you create the parts of the file to send
    local profile=${5-default} # configuration profile of aws-cli

    #Set to 90 MBs as default, 100 MBs is the limit for AWS files
    mbSplitSize=90
    ((partSize = $mbSplitSize * 1000000))

    # Get main file size
    echo "Preparing $filePath for multipart upload"
    fileSize=`wc -c $filePath | awk '{print $1}'`
    ((parts = ($fileSize+$partSize-1) / partSize))

    # Get main file hash
    mainMd5Hash=`openssl md5 -binary $filePath | base64`

    # Make directory to store temporary parts
    echo "Splitting $filePath into $parts temporary parts"
    rm -r ${dirParts}
    mkdir -p ${dirParts}
    cd ${dirParts}
    split -b $partSize $filePath

    # Create mutlipart upload
    echo "Initiating multipart upload for $filePath"
    uploadId=`aws s3api create-multipart-upload --bucket $bucket --key $s3Folder --metadata md5=$mainMd5Hash --profile $profile | jq -r '.UploadId'`

    # Generate fileparts.json file that will be used at the end of the multipart upload
    jsonData="{\"Parts\":["
    for file in *
    do
    ((index++))
    echo "Uploading part $index of $parts..."
    hashData=`openssl md5 -binary $file | base64`
    eTag=`aws s3api upload-part --bucket $bucket --key $s3Folder --part-number $index --body $file --upload-id $uploadId --profile $profile | jq -r '.ETag'`
    jsonData+="{\"ETag\":$eTag,\"PartNumber\":$index}"

    if (( $index == $parts ))
    then
    jsonData+="]}"
    else
    jsonData+=","
    fi
    done
    jq -n $jsonData > fileparts.json

    # Complete multipart upload, check ETag to verify success
    mainEtag=`aws s3api complete-multipart-upload --multipart-upload file://fileparts.json --bucket $bucket --key $s3Folder --upload-id $uploadId --profile $profile | jq -r '.ETag'`
    if [[ $mainEtag != "" ]];
    then
    echo "Successfully uploaded: $filePath to S3 bucket: $bucket"
    else
    echo "Something went wrong! $filePath was not uploaded to S3 bucket: $bucket"
    # SEND FAULT REPORT
    s3-send-mail "[email protected]" "[email protected]" "ALERT BACKUP FAULT" "Verify the sending of file parts to the AWS S3 service"
    exit 1
    fi

    # Clean up files
    rm -r ${TEMP_PARTS}
    cd ..
    }
    #####
    function main() {
    # Release file in local and remote destination
    rm ${BACKUP_FILE} 2> /dev/null

    # CREATE BACKUP ARCHIVE
    tar czvf ${BACKUP_FILE} ${BACKUP_PATH_NO_REMOVE}

    # VERIFY BACKING ARCHIVE
    rm -r ${TEMP_EXTRACT}
    mkdir -p ${TEMP_EXTRACT}

    tar xzf ${BACKUP_FILE} --directory ${TEMP_EXTRACT} > /dev/null
    if [ $? != 0 ]; then
    echo "File is corrupted ... $1"
    # SEND FAULT REPORT
    s3-send-mail "[email protected]" "[email protected]" "ALERT BACKUP FAULT" "Check backup compression"
    exit 1
    fi

    rm -r ${TEMP_EXTRACT}

    # SEND NEW BACKUP TO S3
    s3-multipart-upload ${BACKUP_FILE} ${S3_BUCKET_NAME} ${S3_OUTPUT_BACKUP} ${TEMP_PARTS}

    # DELETE OLD BACKUP IN S3
    aws s3 ls ${S3_BUCKET_BACKUP} | while read -r line;
    do
    strCreateDate=`echo $line|awk {'print $1" "$2'}`
    if date -d "${strCreateDate}" >/dev/null 2>&1
    then
    createDate=`date --date "$strCreateDate" +%s`
    olderThan=`date --date "7 days ago" +%s`
    if [[ $createDate -lt $olderThan ]]
    then
    filePath=`echo $line|awk {'print $4'}`

    if [[ $filePath != "" ]]
    then
    aws s3 rm ${S3_BUCKET_BACKUP}/$filePath
    fi
    fi
    fi
    done;
    }
    #####

    main