• ☆ Yσɠƚԋσʂ ☆@lemmygrad.mlOP
    link
    fedilink
    English
    arrow-up
    6
    ·
    4 days ago

    a script to download all the images courtesy of DeepSeek :)

    # Script to download multiple URLs from a text file with improved line handling
    # Usage: ./download_urls.sh urls.txt [output_directory]
    
    # Check if input file is provided
    if [ -z "$1" ]; then
        echo "Error: Please provide a text file containing URLs"
        echo "Usage: $0 <input_file> [output_directory]"
        exit 1
    fi
    
    input_file="$1"
    output_dir="${2:-./downloads}"
    
    # Check if input file exists
    if [ ! -f "$input_file" ]; then
        echo "Error: Input file '$input_file' not found"
        exit 1
    fi
    
    # Create output directory if it doesn't exist
    mkdir -p "$output_dir"
    
    # Read and process valid URLs into an array
    urls=()
    while IFS= read -r line || [[ -n "$line" ]]; do
        # Trim leading/trailing whitespace and remove CR characters
        trimmed_line=$(echo "$line" | sed -e 's/^[[:space:]]*//' -e 's/[[:space:]]*$//' | tr -d '\r')
        
        # Skip empty lines after trimming
        [[ -z "$trimmed_line" ]] && continue
        
        # Validate URL format
        if [[ "$trimmed_line" =~ ^https?:// ]]; then
            urls+=("$trimmed_line")
        else
            echo "Skipping invalid URL: $trimmed_line"
        fi
    done < "$input_file"
    
    total_urls=${#urls[@]}
    
    if [[ $total_urls -eq 0 ]]; then
        echo "Error: No valid URLs found in input file"
        exit 1
    fi
    
    echo "Starting download of $total_urls files to $output_dir"
    current=1
    
    # Download each URL from the array
    for url in "${urls[@]}"; do
        # Extract filename from URL or generate unique name
        filename=$(basename "$url")
        if [[ -z "$filename" || "$filename" =~ ^$ ]]; then
            filename="file_$(date +%s%N)_${current}.download"
        fi
    
        echo "[$current/$total_urls] Downloading $url"
        
        # Download with curl including error handling
        if ! curl -L --progress-bar --fail "$url" -o "$output_dir/$filename"; then
            echo "Warning: Failed to download $url"
            rm -f "$output_dir/$filename" 2>/dev/null
        fi
        
        ((current++))
    done
    
    echo "Download complete. Files saved to $output_dir"