#!/bin/bash STASH_IMPORT_DIR="" STASH_API_KEY="" STASH_HOST="" STASH_PORT=0 STASH_YTDLP_FORMAT="%(title)s [%(id)s].%(ext)s" # https://github.com/yt-dlp/yt-dlp#output-template STASH_PRINT_PREFIX="Get: " STASH_URL_FIXERS=() source .env TARGET_DIR=$(readlink -m "$STASH_IMPORT_DIR/$(date +%Y%m)") mkdir -p $TARGET_DIR download_file() { FILE_URL="$1" extensions="(jpg|JPG|jpeg|JPEG|png|PNG|gif|GIF|mp4|MP4)" rgx_file="^.*\.$extensions$" rgx_filename="[A-Za-z0-9_]*.$extensions" rgx_dbu='http(s?)://.*donmai.us.*/posts/' if [[ $FILE_URL =~ $rgx_dbu ]]; then FILE_URL=$(curl -s "$1" | grep -Eo "http(s?)://.*donmai.us.*/original/[A-Za-z0-9/_]*\.(jpg|jpeg|png|gif|mp4)" | grep '__' -m1) fi if [[ $FILE_URL =~ $rgx_file ]]; then echo $STASH_PRINT_PREFIX $(echo $FILE_URL | grep -Eo "$rgx_filename") curl -sO "$FILE_URL" --output-dir "$2/" else echo $STASH_PRINT_PREFIX $FILE_URL yt-dlp $FILE_URL -o "$2/$3" fi } rgx_url='^http(s?):\/\/[-a-zA-Z0-9@:%._\+~#=]{2,256}\.[a-z]{2,6}\b[-a-zA-Z0-9@:%_\+.~#?&\/\/=]*$' if [[ $1 =~ $rgx_url ]]; then # Download using yt-dlp download_file "$1" "$TARGET_DIR" "$STASH_YTDLP_FORMAT" else isFile=$(file -0 "$1" | cut -d $'\0' -f2) case "$isFile" in (*text*) # Download as multiple URLs from the provided source file echo "Reading list of $(wc -l $1 | awk '{print $1}') URL(s)" while read p; do download_file "$p" "$TARGET_DIR" "$STASH_YTDLP_FORMAT" done <$1 ;; (*) rsync "$1" "$TARGET_DIR/" ;; esac fi # Update stash echo "Updating Stash..." curl -S -s -o /dev/null -X POST -H "ApiKey: $STASH_API_KEY" -H "Content-Type: application/json" --data '{ "query": "mutation { metadataScan (input:{useFileMetadata: false})}" }' $STASH_HOST:$STASH_PORT/graphql