Skip to content

Instantly share code, notes, and snippets.

@gphg
Last active October 17, 2025 06:15
Show Gist options
  • Save gphg/04d8bd12a55ec46de7b28258b381b9db to your computer and use it in GitHub Desktop.
Save gphg/04d8bd12a55ec46de7b28258b381b9db to your computer and use it in GitHub Desktop.
#!/bin/bash
# Exit immediately if a command exits with a non-zero status or an unset variable is used.
set -euo pipefail
# A script to convert Twitter/X status URLs to the fixupx.com direct video link format
# and initiate a download using wget.
# Function: convert_x_link
# Purpose: Takes a full Twitter/X status URL, strips tracking parameters, and
# converts it to a fixupx.com direct video link by extracting the status ID.
# Arguments:
# $1 - The full URL (e.g., https://x.com/user/status/12345?t=tracking)
# Output: The converted fixupx URL.
convert_x_link() {
local url="$1"
# 1. Check if the input is empty
if [[ -z "$url" ]]; then
echo ""
return
fi
# 2. Strip any query parameters (everything from the first '?' to the end).
# This uses Bash parameter expansion: '%%' removes the longest match from the end.
local clean_url="${url%%\?*}"
# 3. Extract the status ID using Bash parameter expansion.
# This removes the longest matching pattern from the beginning of the string up to the last '/'.
# Example: https://x.com/.../status/12345 becomes 12345
local status_id="${clean_url##*/}"
# 4. Basic validation for the extracted ID (should be purely numeric and long)
if ! [[ "$status_id" =~ ^[0-9]{15,}$ ]]; then
# Print error to standard error stream
echo "Error: Skipped invalid status link: $url (ID: $status_id)" >&2
echo ""
return
fi
# 5. Construct the new target URL for wget
local fixupx_url="https://fixupx.com/i/status/${status_id}.mp4"
# Return the new URL
echo "$fixupx_url"
}
# --- Main Script Execution ---
# Check if any arguments were provided
if [[ $# -eq 0 ]]; then
echo "Usage: $0 <x_link_1> [x_link_2]..."
exit 1
fi
# Array to store the final converted URLs.
DOWNLOAD_URLS=()
# Loop through all command-line arguments passed to the script ($@)
for link in "$@"; do
converted_link=$(convert_x_link "$link")
# Only add successful conversions to the array
if [[ -n "$converted_link" ]]; then
DOWNLOAD_URLS+=("$converted_link")
fi
done
# Check if any valid URLs were collected
if [[ ${#DOWNLOAD_URLS[@]} -eq 0 ]]; then
echo "Aborting: No valid X/Twitter status links were converted."
exit 1
fi
echo "--- Converted URLs Ready for Download ---"
echo "Target URLs: ${DOWNLOAD_URLS[@]}"
echo ""
echo "Initiating 'wget' to download..."
# Execute wget with the array of converted URLs.
# 'exec' replaces the current shell process with the wget process.
# The quoting "${DOWNLOAD_URLS[@]}" ensures each element is passed as a separate, safe argument.
exec wget "${DOWNLOAD_URLS[@]}"
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment