#!/bin/bash # TODO, warning! This script is horrible and insecure with background jobs show_help() { echo "Usage: $0 [--abort-on-found|-a] [--parallel|-p] [--custom-grep-search|-c ]" echo "Options:" echo " --abort-on-found|-a (optional) The script will update each gallery download until it finds an already downloaded file." echo " --parallel|-a (optional) The script will update each gallery download in parallel, with background jobs." echo " --custom-grep-search|-c (optional) Includes a string to filter the URLs." } should_abort_on_found=false should_parallel_download=false custom_grep_search='' while [ "$#" -gt 0 ]; do case "$1" in --help|-h) show_help; exit ;; --abort-on-found|-a) should_abort_on_found=true; shift ;; --parallel|-p) should_parallel_download=true; shift ;; --custom-grep-search|-c) custom_grep_search="$2"; shift 2 ;; *) shift ;; esac done gallery_dl_path=/mnt/e/home/Documents/data-hoarding cd "$gallery_dl_path" urls=($(grep -v "filter" -i "$gallery_dl_path/todo.md" | grep -Eo "(http|https)://[a-zA-Z0-9./?~?=_%:-]*")) if [ ! -z "$custom_grep_search" ]; then filtered_urls=() for url in "${urls[@]}"; do if [[ "$url" == *"$custom_grep_search"* ]]; then filtered_urls+=("$url") fi done urls=(${filtered_urls[@]}) # debug if false; then echo "Filtered URLs:" for url in ${filtered_urls[@]}; do echo " $url" done exit 0 fi fi # debug if false; then echo "URLs:" for url in ${urls[@]}; do echo " $url" done exit 0 fi gdl_command_base='gallery-dl' if $should_abort_on_found; then gdl_command_base+=' --abort 1' fi sleep 5 & for url in ${urls[@]}; do gdl_command="$gdl_command_base $url" if $should_parallel_download; then gdl_command+=' &' fi # debug if false; then echo "Command:" echo " $gdl_command" fi eval $gdl_command done