Files
dotfiles/scripts/shell/gdl-update-todo.sh
2024-06-20 05:13:56 -03:00

89 lines
1.9 KiB
Bash
Executable File

#!/bin/bash
# TODO, warning! This script is horrible and insecure with background jobs
show_help() {
echo "Usage: $0 [--no-abort-on-found] [--no-parallel] [--custom-grep-search|-c <string>]"
echo "Options:"
echo " --no-abort-on-found (optional) The script will not update each gallery download until it finds an already downloaded file."
echo " --no-parallel (optional) The script will not update each gallery download in parallel, with background jobs."
echo " --custom-grep-search|-c (optional) Includes a string to filter the URLs."
}
should_abort_on_found=true
should_parallel_download=true
custom_grep_search=''
while [ "$#" -gt 0 ]; do
case "$1" in
--help|-h) show_help; exit ;;
# --no-abort-on-found) should_abort_on_found=false; shift ;;
# --no-parallel) should_parallel_download=false; shift ;;
--custom-grep-search|-c) custom_grep_search="$2"; shift 2 ;;
*) shift ;;
esac
done
gallery_dl_path=/mnt/e/home/Documents/data-hoarding
cd "$gallery_dl_path"
urls=($(grep -v "filter" -i "$gallery_dl_path/todo.md" | grep -Eo "(http|https)://[a-zA-Z0-9./?~?=_%:-]*"))
if [ ! -z "$custom_grep_search" ]; then
filtered_urls=()
for url in "${urls[@]}"; do
if [[ "$url" == *"$custom_grep_search"* ]]; then
filtered_urls+=("$url")
fi
done
urls=(${filtered_urls[@]})
# debug
if false; then
echo "Filtered URLs:"
for url in ${filtered_urls[@]}; do
echo " $url"
done
exit 0
fi
fi
# debug
if false; then
echo "URLs:"
for url in ${urls[@]}; do
echo " $url"
done
exit 0
fi
gdl_command_base='gallery-dl'
if $should_abort_on_found; then
gdl_command_base+=' --abort 1'
fi
sleep 5 &
for url in ${urls[@]}; do
gdl_command="$gdl_command_base $url"
if $should_parallel_download; then
gdl_command+=' &'
fi
# debug
if false; then
echo "Command:"
echo " $gdl_command"
fi
eval $gdl_command
done