Skip to content

Instantly share code, notes, and snippets.

@gphg
Last active October 17, 2025 06:15
Show Gist options
  • Select an option

  • Save gphg/04d8bd12a55ec46de7b28258b381b9db to your computer and use it in GitHub Desktop.

Select an option

Save gphg/04d8bd12a55ec46de7b28258b381b9db to your computer and use it in GitHub Desktop.

Revisions

  1. gphg revised this gist Oct 17, 2025. 1 changed file with 18 additions and 14 deletions.
    32 changes: 18 additions & 14 deletions download_x_video.sh
    Original file line number Diff line number Diff line change
    @@ -1,40 +1,46 @@
    #!/usr/bin/bash
    #!/bin/bash

    # Exit immediately if a command exits with a non-zero status or an unset variable is used.
    set -euo pipefail

    # A script to convert Twitter/X status URLs to the fixupx.com direct video link format
    # and initiate a download using wget.

    # Function: convert_x_link
    # Purpose: Takes a full Twitter/X status URL and converts it to a fixupx.com
    # direct video link by extracting the status ID.
    # Purpose: Takes a full Twitter/X status URL, strips tracking parameters, and
    # converts it to a fixupx.com direct video link by extracting the status ID.
    # Arguments:
    # $1 - The full URL (e.g., https://x.com/user/status/1971608893710361043)
    # $1 - The full URL (e.g., https://x.com/user/status/12345?t=tracking)
    # Output: The converted fixupx URL.
    convert_x_link() {
    local url="$1"

    # 1. Check if the input is empty
    if [[ -z "$url" ]]; then
    echo ""
    return
    fi

    # 2. Extract the status ID using Bash parameter expansion.
    # 2. Strip any query parameters (everything from the first '?' to the end).
    # This uses Bash parameter expansion: '%%' removes the longest match from the end.
    local clean_url="${url%%\?*}"

    # 3. Extract the status ID using Bash parameter expansion.
    # This removes the longest matching pattern from the beginning of the string up to the last '/'.
    # Example: https://x.com/.../status/12345 becomes 12345
    local status_id="${url##*/}"
    local status_id="${clean_url##*/}"

    # 3. Basic validation for the extracted ID (should be purely numeric and long)
    # 4. Basic validation for the extracted ID (should be purely numeric and long)
    if ! [[ "$status_id" =~ ^[0-9]{15,}$ ]]; then
    # Print error to standard error stream
    echo "Error: Skipped invalid status link: $url (ID: $status_id)" >&2
    echo ""
    return
    fi

    # 4. Construct the new target URL for wget
    # 5. Construct the new target URL for wget
    local fixupx_url="https://fixupx.com/i/status/${status_id}.mp4"

    # Return the new URL
    echo "$fixupx_url"
    }
    @@ -47,14 +53,13 @@ if [[ $# -eq 0 ]]; then
    exit 1
    fi

    # Array to store the final converted URLs. We must use an array for safe passing to exec.
    # The user requested '$ARRAY', but the variable name used here is DOWNLOAD_URLS for clarity.
    # Array to store the final converted URLs.
    DOWNLOAD_URLS=()

    # Loop through all command-line arguments passed to the script ($@)
    for link in "$@"; do
    converted_link=$(convert_x_link "$link")

    # Only add successful conversions to the array
    if [[ -n "$converted_link" ]]; then
    DOWNLOAD_URLS+=("$converted_link")
    @@ -75,5 +80,4 @@ echo "Initiating 'wget' to download..."
    # Execute wget with the array of converted URLs.
    # 'exec' replaces the current shell process with the wget process.
    # The quoting "${DOWNLOAD_URLS[@]}" ensures each element is passed as a separate, safe argument.
    # This fulfills the user's request to pass the array to wget.
    exec wget "${DOWNLOAD_URLS[@]}"
  2. gphg created this gist Oct 15, 2025.
    79 changes: 79 additions & 0 deletions download_x_video.sh
    Original file line number Diff line number Diff line change
    @@ -0,0 +1,79 @@
    #!/usr/bin/bash
    set -euo pipefail

    # A script to convert Twitter/X status URLs to the fixupx.com direct video link format
    # and initiate a download using wget.

    # Function: convert_x_link
    # Purpose: Takes a full Twitter/X status URL and converts it to a fixupx.com
    # direct video link by extracting the status ID.
    # Arguments:
    # $1 - The full URL (e.g., https://x.com/user/status/1971608893710361043)
    # Output: The converted fixupx URL.
    convert_x_link() {
    local url="$1"

    # 1. Check if the input is empty
    if [[ -z "$url" ]]; then
    echo ""
    return
    fi

    # 2. Extract the status ID using Bash parameter expansion.
    # This removes the longest matching pattern from the beginning of the string up to the last '/'.
    # Example: https://x.com/.../status/12345 becomes 12345
    local status_id="${url##*/}"

    # 3. Basic validation for the extracted ID (should be purely numeric and long)
    if ! [[ "$status_id" =~ ^[0-9]{15,}$ ]]; then
    # Print error to standard error stream
    echo "Error: Skipped invalid status link: $url (ID: $status_id)" >&2
    echo ""
    return
    fi

    # 4. Construct the new target URL for wget
    local fixupx_url="https://fixupx.com/i/status/${status_id}.mp4"

    # Return the new URL
    echo "$fixupx_url"
    }

    # --- Main Script Execution ---

    # Check if any arguments were provided
    if [[ $# -eq 0 ]]; then
    echo "Usage: $0 <x_link_1> [x_link_2]..."
    exit 1
    fi

    # Array to store the final converted URLs. We must use an array for safe passing to exec.
    # The user requested '$ARRAY', but the variable name used here is DOWNLOAD_URLS for clarity.
    DOWNLOAD_URLS=()

    # Loop through all command-line arguments passed to the script ($@)
    for link in "$@"; do
    converted_link=$(convert_x_link "$link")

    # Only add successful conversions to the array
    if [[ -n "$converted_link" ]]; then
    DOWNLOAD_URLS+=("$converted_link")
    fi
    done

    # Check if any valid URLs were collected
    if [[ ${#DOWNLOAD_URLS[@]} -eq 0 ]]; then
    echo "Aborting: No valid X/Twitter status links were converted."
    exit 1
    fi

    echo "--- Converted URLs Ready for Download ---"
    echo "Target URLs: ${DOWNLOAD_URLS[@]}"
    echo ""
    echo "Initiating 'wget' to download..."

    # Execute wget with the array of converted URLs.
    # 'exec' replaces the current shell process with the wget process.
    # The quoting "${DOWNLOAD_URLS[@]}" ensures each element is passed as a separate, safe argument.
    # This fulfills the user's request to pass the array to wget.
    exec wget "${DOWNLOAD_URLS[@]}"