Skip to content

Instantly share code, notes, and snippets.

@smarteist
Last active September 6, 2024 15:53
Show Gist options
  • Select an option

  • Save smarteist/bdc2b3a9f3c74ad1b848355f6257c9d5 to your computer and use it in GitHub Desktop.

Select an option

Save smarteist/bdc2b3a9f3c74ad1b848355f6257c9d5 to your computer and use it in GitHub Desktop.

Revisions

  1. smarteist revised this gist Sep 6, 2024. 1 changed file with 21 additions and 34 deletions.
    55 changes: 21 additions & 34 deletions downloader.sh
    Original file line number Diff line number Diff line change
    @@ -6,45 +6,32 @@ urls=(
    "https://example.com/file2.mp3"
    )

    # Number of retries on failure
    RETRY_LIMIT=3

    # Define the resumable downloader function
    # Resumable downloader function
    download_file() {
    url="$1"
    fileName=$(basename "$url")
    retries=0

    echo "Starting download: $fileName from $url"

    while [ $retries -lt $RETRY_LIMIT ]; do
    if [ -f "$fileName" ]; then
    from=$(stat -c%s "$fileName")
    echo "Resuming download from byte $from..."
    curl -L --progress-bar -C $from -o "$fileName" "$url"
    else
    curl -L --progress-bar -o "$fileName" "$url"
    fi

    if [ $? -eq 0 ]; then
    echo "Download finished: $fileName"
    return 0
    else
    retries=$((retries + 1))
    echo "Failed to download $fileName. Retrying ($retries/$RETRY_LIMIT)..."
    sleep 2 # Optional sleep before retrying
    fi
    done

    echo "Download failed after $RETRY_LIMIT attempts: $fileName"
    return 1
    fileName=$(basename "$url") # Fixed the missing closing parenthesis

    # Get the file size directly using curl's -I (HEAD request) and grep
    totalSize=$(curl -sI "$url" | grep -i Content-Length | awk '{print $2}' | tr -d '\r')
    [ -z "$totalSize" ] && totalSize="unknown"

    echo "File: $fileName | Total Size: $totalSize bytes"

    if [ -f "$fileName" ]; then
    from=$(stat -c%s "$fileName")
    echo "Resuming from byte $from..."
    curl -L --progress-bar -C $from -o "$fileName" "$url"
    else
    curl -L --progress-bar -o "$fileName" "$url"
    fi

    echo "Finished: $fileName"
    }

    # Iterate over the URLs and download the files (in parallel if needed)
    # Download each file
    for url in "${urls[@]}"; do
    download_file "$url" &
    download_file "$url"
    echo
    done
    # Wait for all background jobs to finish
    wait

    echo "All downloads complete."
  2. smarteist revised this gist Sep 6, 2024. 1 changed file with 34 additions and 12 deletions.
    46 changes: 34 additions & 12 deletions downloader.sh
    Original file line number Diff line number Diff line change
    @@ -2,27 +2,49 @@

    # Define the list of URLs
    urls=(
    "https://.mkv"
    "https://.mkv"
    "https://example.com/file1.mkv"
    "https://example.com/file2.mp3"
    )

    # Number of retries on failure
    RETRY_LIMIT=3

    # Define the resumable downloader function
    download_file() {
    url="$1"
    fileName=$(basename "$url")
    retries=0

    echo "Starting download: $fileName from $url"

    if [ -f "$fileName" ]; then
    from=$(stat -c%s "$fileName")
    curl -L -C $from -o "$fileName" "$url"
    else
    curl -L -o "$fileName" "$url"
    fi
    while [ $retries -lt $RETRY_LIMIT ]; do
    if [ -f "$fileName" ]; then
    from=$(stat -c%s "$fileName")
    echo "Resuming download from byte $from..."
    curl -L --progress-bar -C $from -o "$fileName" "$url"
    else
    curl -L --progress-bar -o "$fileName" "$url"
    fi

    echo "Download finished: $fileName"
    if [ $? -eq 0 ]; then
    echo "Download finished: $fileName"
    return 0
    else
    retries=$((retries + 1))
    echo "Failed to download $fileName. Retrying ($retries/$RETRY_LIMIT)..."
    sleep 2 # Optional sleep before retrying
    fi
    done

    echo "Download failed after $RETRY_LIMIT attempts: $fileName"
    return 1
    }

    # Iterate over the URLs and download the files
    # Iterate over the URLs and download the files (in parallel if needed)
    for url in "${urls[@]}"; do
    download_file "$url"
    echo
    download_file "$url" &
    done
    # Wait for all background jobs to finish
    wait

    echo "All downloads complete."
  3. smarteist revised this gist Feb 2, 2024. 1 changed file with 1 addition and 0 deletions.
    1 change: 1 addition & 0 deletions downloader.sh
    Original file line number Diff line number Diff line change
    @@ -3,6 +3,7 @@
    # Define the list of URLs
    urls=(
    "https://.mkv"
    "https://.mkv"
    )

    # Define the resumable downloader function
  4. smarteist created this gist Feb 2, 2024.
    27 changes: 27 additions & 0 deletions downloader.sh
    Original file line number Diff line number Diff line change
    @@ -0,0 +1,27 @@
    #!/bin/bash

    # Define the list of URLs
    urls=(
    "https://.mkv"
    )

    # Define the resumable downloader function
    download_file() {
    url="$1"
    fileName=$(basename "$url")

    if [ -f "$fileName" ]; then
    from=$(stat -c%s "$fileName")
    curl -L -C $from -o "$fileName" "$url"
    else
    curl -L -o "$fileName" "$url"
    fi

    echo "Download finished: $fileName"
    }

    # Iterate over the URLs and download the files
    for url in "${urls[@]}"; do
    download_file "$url"
    echo
    done