Add my-scripts and docker folders
This commit is contained in:
1
.gitignore
vendored
Normal file
1
.gitignore
vendored
Normal file
@@ -0,0 +1 @@
|
|||||||
|
**/volumes
|
||||||
41
docker/composes/linuxserver/piwigo/docker-compose.yml
Normal file
41
docker/composes/linuxserver/piwigo/docker-compose.yml
Normal file
@@ -0,0 +1,41 @@
|
|||||||
|
---
|
||||||
|
version: "2.1"
|
||||||
|
services:
|
||||||
|
piwigo:
|
||||||
|
image: lscr.io/linuxserver/piwigo:13.8.0
|
||||||
|
container_name: piwigo-app
|
||||||
|
environment:
|
||||||
|
- PUID=1000
|
||||||
|
- PGID=1000
|
||||||
|
- TZ=Etc/UTC
|
||||||
|
volumes:
|
||||||
|
- ./volumes/piwigo/config:/config
|
||||||
|
- ./volumes/piwigo/gallery:/gallery
|
||||||
|
ports:
|
||||||
|
- 80:80
|
||||||
|
restart: unless-stopped
|
||||||
|
nginx:
|
||||||
|
image: linuxserver/nginx:1.24.0
|
||||||
|
container_name: piwigo-nginx
|
||||||
|
environment:
|
||||||
|
- PUID=1000
|
||||||
|
- PGID=1000
|
||||||
|
- TZ=Etc/UTC
|
||||||
|
volumes:
|
||||||
|
- ./volumes/nginx/config:/config
|
||||||
|
ports:
|
||||||
|
- 81:80
|
||||||
|
- 444:443
|
||||||
|
restart: unless-stopped
|
||||||
|
mariadb:
|
||||||
|
image: linuxserver/mariadb:10.11.5
|
||||||
|
container_name: piwigo-mariadb
|
||||||
|
environment:
|
||||||
|
- PUID=1000
|
||||||
|
- PGID=1000
|
||||||
|
- MYSQL_ROOT_PASSWORD=admin
|
||||||
|
- TZ=Etc/UTC
|
||||||
|
volumes:
|
||||||
|
- ./volumes/mariadb/config:/config
|
||||||
|
ports:
|
||||||
|
- 3306:3306
|
||||||
35
my-scripts/shell/dockerStart.sh
Executable file
35
my-scripts/shell/dockerStart.sh
Executable file
@@ -0,0 +1,35 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
# Function to display usage information
|
||||||
|
# show_help() {
|
||||||
|
# echo "Usage: $0 [options]"
|
||||||
|
# echo "Options:"
|
||||||
|
# echo " --compose Start compose containers"
|
||||||
|
# echo " --help Display this help message"
|
||||||
|
# exit 1
|
||||||
|
# }
|
||||||
|
|
||||||
|
# docker without docker desktop yippie!!!
|
||||||
|
# source: https://gbbigardi.medium.com/wsl-2-arch-docker-e-um-pouco-mais-do-meu-ambiente-de-desenvolvimento-42adc48368d4
|
||||||
|
|
||||||
|
if [ ! -S "$DOCKER_SOCK" ]; then
|
||||||
|
mkdir -pm o=,ug=rwx "$DOCKER_DIR"
|
||||||
|
chgrp docker "$DOCKER_DIR"
|
||||||
|
sudo /mnt/c/Windows/System32/wsl.exe -d $DOCKER_DISTRO sh -c "nohup sudo -b dockerd < /dev/null > $DOCKER_DIR/dockerd.log 2>&1"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Check for options
|
||||||
|
# case "$1" in
|
||||||
|
# --compose)
|
||||||
|
# sleep 2
|
||||||
|
# cd ~/documents/repos/zaks-clouds-manager/
|
||||||
|
# docker compose up -d
|
||||||
|
# ;;
|
||||||
|
# --help)
|
||||||
|
# show_help
|
||||||
|
# ;;
|
||||||
|
# *)
|
||||||
|
# echo "Unknown option: $1"
|
||||||
|
# show_help
|
||||||
|
# ;;
|
||||||
|
# esac
|
||||||
10
my-scripts/shell/ffmpeg_get_video_resolution.sh
Executable file
10
my-scripts/shell/ffmpeg_get_video_resolution.sh
Executable file
@@ -0,0 +1,10 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
if [ $# -ne 1 ]; then
|
||||||
|
echo "Usage: $0 <input_file>"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
input_file="$1"
|
||||||
|
|
||||||
|
ffprobe -v error -select_streams v:0 -show_entries stream=width,height -of csv=s=x:p=0 "$input_file"
|
||||||
128
my-scripts/shell/ffmpeg_helper.sh
Executable file
128
my-scripts/shell/ffmpeg_helper.sh
Executable file
@@ -0,0 +1,128 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
show_help() {
|
||||||
|
echo "Usage: $0 [--input|-i input_file] [--crop|-c width:height] [--trim-start start] [--trim-end end] [--crf crf_value] [--fps new_fps] [--scale640] [--merge-audio] [--remove-audio] --output|-o output_file"
|
||||||
|
echo "Options:"
|
||||||
|
echo " --input|-i Specify the input video file (required)."
|
||||||
|
echo " --crop|-c Specify the width:height for video cropping."
|
||||||
|
echo " --trim-start Specify the start timestamp for video trimming."
|
||||||
|
echo " --trim-end Specify the end timestamp for video trimming."
|
||||||
|
echo " --crf Specify the CRF value for video compressing."
|
||||||
|
echo " --fps Specify the new frames per second for the video."
|
||||||
|
echo " --scale640 Scale the video to 640x320."
|
||||||
|
echo " --merge-audio Merge all audio tracks into the main track."
|
||||||
|
echo " --remove-audio Suppress all audio and remove audio tracks."
|
||||||
|
echo " --output|-o Specify the output filename (required)."
|
||||||
|
}
|
||||||
|
|
||||||
|
remove_audio() {
|
||||||
|
input_file="$1"
|
||||||
|
output_file="$2"
|
||||||
|
|
||||||
|
ffmpeg -i "$input_file" -an "$output_file"
|
||||||
|
}
|
||||||
|
|
||||||
|
if ! command -v ffmpeg &> /dev/null; then
|
||||||
|
echo "Error: 'ffmpeg' command not found. Please install ffmpeg."
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
input_file=""
|
||||||
|
crop=""
|
||||||
|
trim_start=""
|
||||||
|
trim_end=""
|
||||||
|
crf=""
|
||||||
|
new_fps=""
|
||||||
|
scale640=false
|
||||||
|
merge_audio=false
|
||||||
|
remove_audio=false
|
||||||
|
output_file=""
|
||||||
|
|
||||||
|
while [ "$#" -gt 0 ]; do
|
||||||
|
case "$1" in
|
||||||
|
--help|-h) show_help; exit ;;
|
||||||
|
--input|-i) input_file="$2"; shift 2 ;;
|
||||||
|
--crop|-c) crop="$2"; shift 2 ;;
|
||||||
|
--trim-start) trim_start="$2"; shift 2 ;;
|
||||||
|
--trim-end) trim_end="$2"; shift 2 ;;
|
||||||
|
--crf) crf="$2"; shift 2 ;;
|
||||||
|
--fps) new_fps="$2"; shift 2 ;;
|
||||||
|
--scale640) scale640=true; shift ;;
|
||||||
|
--merge-audio) merge_audio=true; shift ;;
|
||||||
|
--remove-audio) remove_audio=true; shift;;
|
||||||
|
--output|-o) output_file="$2"; shift 2 ;;
|
||||||
|
*) shift ;;
|
||||||
|
esac
|
||||||
|
done
|
||||||
|
|
||||||
|
if [ -z "$input_file" ]; then
|
||||||
|
echo "Error: Input filename (--input or -i) is required."
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [ -z "$output_file" ]; then
|
||||||
|
echo "Error: Output filename (--output or -o) is required."
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [ -z "$crop" ] && [ -z "$trim_start" ] && [ -z "$trim_end" ] && [ -z "$crf" ] && [ -z "$new_fps" ] && [ "$merge_audio" = false ] && [ "$scale640" = false ] && [ "$remove_audio" = false ]; then
|
||||||
|
echo "Error: At least one optional parameter is required."
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [ -n "$trim_start" ] && [ -z "$trim_end" ]; then
|
||||||
|
echo "Error: If using --trim-start, you must also specify --trim-end."
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [ -n "$trim_end" ] && [ -z "$trim_start" ]; then
|
||||||
|
echo "Error: If using --trim-end, you must also specify --trim-start."
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [ -n "$crop" ] && [ "$scale640" = true ]; then
|
||||||
|
echo "Error: Cannot use both --crop and --scale640 together."
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [ "$merge_audio" = true ] && [ "$remove_audio" = true ]; then
|
||||||
|
echo "Error: Cannot use both --merge-audio and --remove-audio together."
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
ffmpeg_command="ffmpeg -i \"$input_file\""
|
||||||
|
|
||||||
|
if [ "$remove_audio" = true ]; then
|
||||||
|
ffmpeg_command+=" -an"
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [ -n "$crop" ]; then
|
||||||
|
ffmpeg_command+=" -vf crop=$crop"
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [ -n "$trim_start" ] && [ -n "$trim_end" ]; then
|
||||||
|
ffmpeg_command+=" -ss $trim_start -to $trim_end"
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [ -n "$crf" ]; then
|
||||||
|
ffmpeg_command+=" -c:v libx264 -crf $crf"
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [ -n "$new_fps" ]; then
|
||||||
|
ffmpeg_command+=" -r $new_fps"
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [ "$scale640" = true ]; then
|
||||||
|
ffmpeg_command+=" -vf scale=640:320"
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [ "$merge_audio" = true ]; then
|
||||||
|
num_audio_streams=$(ffprobe -loglevel error -select_streams a -show_entries stream=codec_type -of csv=p=0 "$input_file" | wc -l)
|
||||||
|
ffmpeg_command+=" -filter_complex amerge=inputs=$num_audio_streams"
|
||||||
|
fi
|
||||||
|
|
||||||
|
ffmpeg_command+=" \"$output_file\""
|
||||||
|
|
||||||
|
echo "Running ffmpeg command:"
|
||||||
|
echo "$ffmpeg_command"
|
||||||
|
eval "$ffmpeg_command"
|
||||||
76
my-scripts/shell/get_rewritteninrust_package_names.sh
Executable file
76
my-scripts/shell/get_rewritteninrust_package_names.sh
Executable file
@@ -0,0 +1,76 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
# Step 1: Get HTML content of the page
|
||||||
|
html_content=$(curl -s https://zaiste.net/posts/shell-commands-rust/)
|
||||||
|
|
||||||
|
# Step 2: Extract href attribute values of "GitHub" links
|
||||||
|
github_links=$(echo "$html_content" | grep -oE '<a [^>]+>GitHub<\/a>' | grep -oE 'href="[^"]+"' | sed 's/href="//;s/"$//')
|
||||||
|
|
||||||
|
# Step 3: Transform URLs
|
||||||
|
transformed_urls=()
|
||||||
|
for link in $github_links; do
|
||||||
|
if [[ "$link" != *"github"* ]]; then
|
||||||
|
continue
|
||||||
|
fi
|
||||||
|
|
||||||
|
transformed_url=$(echo "$link" | sed 's/github.com/raw.githubusercontent.com/;s#/$##')
|
||||||
|
transformed_urls+=("$transformed_url/master/Cargo.toml")
|
||||||
|
done
|
||||||
|
|
||||||
|
echo "Transformed URLs:"
|
||||||
|
for url in "${transformed_urls[@]}"; do
|
||||||
|
echo "$url"
|
||||||
|
done
|
||||||
|
|
||||||
|
# Step 4: Test URLs and gather results
|
||||||
|
existing_urls=()
|
||||||
|
non_existing_urls=()
|
||||||
|
for url in "${transformed_urls[@]}"; do
|
||||||
|
response=$(curl -s --head -w %{http_code} "$url" -o /dev/null)
|
||||||
|
if [ "$response" -eq 200 ]; then
|
||||||
|
existing_urls+=("$url")
|
||||||
|
else
|
||||||
|
non_existing_urls+=("$url")
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
|
||||||
|
# Step 5 and 6: Search for "name" property and categorize URLs
|
||||||
|
successful_search=()
|
||||||
|
unsuccessful_search=()
|
||||||
|
for url in "${existing_urls[@]}"; do
|
||||||
|
html_content=$(curl -s "$url")
|
||||||
|
if grep -qE "\[package\]" <<< "$html_content"; then
|
||||||
|
# name_line=$(grep -A 1 "\[package\]" <<< "$html_content" | grep -E '^name = "[^"]+"' | sed 's/name = "//;s/"$//')
|
||||||
|
name_line=$(awk -F'"' '/^\[package\]/ { in_package = 1 } in_package && /name =/ { print $2; exit }' <<< "$html_content")
|
||||||
|
echo "name_line: ${name_line}"
|
||||||
|
if [[ "$name_line" != "[package]" ]]; then
|
||||||
|
successful_search+=("$url $name_line")
|
||||||
|
else
|
||||||
|
unsuccessful_search+=("$url")
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
|
||||||
|
# Step 7: Print results
|
||||||
|
echo -e "\n\n\nExisting URLs:"
|
||||||
|
for url in "${existing_urls[@]}"; do
|
||||||
|
echo "$url"
|
||||||
|
done
|
||||||
|
|
||||||
|
echo -e "\n\n\nNon-existing URLs:"
|
||||||
|
for url in "${non_existing_urls[@]}"; do
|
||||||
|
echo "$url"
|
||||||
|
done
|
||||||
|
|
||||||
|
# echo -e "\n\n\nSuccessful search for 'name' property:"
|
||||||
|
echo -ne "\n\n\ncargo install"
|
||||||
|
for result in "${successful_search[@]}"; do
|
||||||
|
# url=$(echo "$result" | cut -d' ' -f1)
|
||||||
|
name=$(echo "$result" | cut -d' ' -f2)
|
||||||
|
echo -n " $name"
|
||||||
|
done
|
||||||
|
|
||||||
|
# echo -e "\n\n\nUnsuccessful search for 'name' property:"
|
||||||
|
# for url in "${unsuccessful_search[@]}"; do
|
||||||
|
# echo "$url"
|
||||||
|
# done
|
||||||
48
my-scripts/shell/list_dockerhub_image_tags.sh
Executable file
48
my-scripts/shell/list_dockerhub_image_tags.sh
Executable file
@@ -0,0 +1,48 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
check_command_availability() {
|
||||||
|
if ! command -v "$1" &> /dev/null; then
|
||||||
|
echo "Error: '$1' command not found. Please install $1."
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
if [ -z "$1" ]; then
|
||||||
|
echo "Usage: $0 <image> [-s] [-o <output_file>]"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
IMAGE="$1"
|
||||||
|
SORT_FLAG=""
|
||||||
|
OUTPUT_FILE=""
|
||||||
|
|
||||||
|
while [ "$#" -gt 0 ]; do
|
||||||
|
case "$1" in
|
||||||
|
-s) SORT_FLAG="-s"; shift ;;
|
||||||
|
-o) OUTPUT_FILE="$2"; shift 2 ;;
|
||||||
|
*) shift ;;
|
||||||
|
esac
|
||||||
|
done
|
||||||
|
|
||||||
|
# Check command availability
|
||||||
|
check_command_availability skopeo
|
||||||
|
check_command_availability jq
|
||||||
|
|
||||||
|
# Store the result of skopeo command
|
||||||
|
skopeo_result=$(skopeo inspect docker://docker.io/${IMAGE} | jq -r '.RepoTags[]')
|
||||||
|
|
||||||
|
# Determine if sorting is needed
|
||||||
|
sort_command="cat"
|
||||||
|
if [ "$SORT_FLAG" = "-s" ]; then
|
||||||
|
sort_command="sort"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Pipe the result through the sort command (conditionally)
|
||||||
|
sorted_result=$(echo "$skopeo_result" | $sort_command)
|
||||||
|
|
||||||
|
# Determine if output file is needed
|
||||||
|
if [ -z "$OUTPUT_FILE" ]; then
|
||||||
|
echo "$sorted_result"
|
||||||
|
else
|
||||||
|
echo "$sorted_result" > "$OUTPUT_FILE"
|
||||||
|
fi
|
||||||
5
my-scripts/shell/rclone-syncs.sh
Executable file
5
my-scripts/shell/rclone-syncs.sh
Executable file
@@ -0,0 +1,5 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
SYNCS_PATH=/mnt/e/Windows/Documents/Sync
|
||||||
|
rclone sync -P $SYNCS_PATH/google-drive google-drive-zakdragonbites:
|
||||||
|
rclone sync -P $SYNCS_PATH/nextcloud nextcloud-renner-cloud:
|
||||||
10
my-scripts/shell/runDockerWsl.sh
Executable file
10
my-scripts/shell/runDockerWsl.sh
Executable file
@@ -0,0 +1,10 @@
|
|||||||
|
# docker
|
||||||
|
DOCKER_DISTRO="Arch"
|
||||||
|
DOCKER_DIR=/mnt/wsl/shared-docker
|
||||||
|
DOCKER_SOCK="$DOCKER_DIR/docker.sock"
|
||||||
|
export DOCKER_HOST="unix://$DOCKER_SOCK"
|
||||||
|
if [ ! -S "$DOCKER_SOCK" ]; then
|
||||||
|
mkdir -pm o=,ug=rwx "$DOCKER_DIR"
|
||||||
|
chgrp docker "$DOCKER_DIR"
|
||||||
|
/mnt/c/Windows/System32/wsl.exe -d $DOCKER_DISTRO sh -c "nohup sudo -b dockerd < /dev/null > $DOCKER_DIR/dockerd.log 2>&1"
|
||||||
|
fi
|
||||||
Reference in New Issue
Block a user