Updated Stasher to support multiple (one or more) URLs as arguments
This commit is contained in:
parent
70ec8c4e78
commit
99caef1de0
61
stasher.py
61
stasher.py
|
@ -39,6 +39,7 @@ def find_booru_artist(page_url):
|
||||||
# Clean up and format the artist name
|
# Clean up and format the artist name
|
||||||
artist_name = artist_name[0].strip()
|
artist_name = artist_name[0].strip()
|
||||||
artist_name = ''.join(c if c.isalnum() or c.isspace() else '_' for c in artist_name).lower().strip()
|
artist_name = ''.join(c if c.isalnum() or c.isspace() else '_' for c in artist_name).lower().strip()
|
||||||
|
artist_name = artist_name.replace(' ', '_')
|
||||||
|
|
||||||
return artist_name
|
return artist_name
|
||||||
|
|
||||||
|
@ -73,7 +74,6 @@ def download_file(file_url, download_dir, ytdlp_prefix):
|
||||||
artist = None
|
artist = None
|
||||||
if re.match(rgx_booru, file_url):
|
if re.match(rgx_booru, file_url):
|
||||||
artist = find_booru_artist(file_url)
|
artist = find_booru_artist(file_url)
|
||||||
print("Artist is:", artist)
|
|
||||||
file_url = re.search(rgx_booru_v2, requests.get(file_url).text).group(1).strip()
|
file_url = re.search(rgx_booru_v2, requests.get(file_url).text).group(1).strip()
|
||||||
|
|
||||||
if not file_url:
|
if not file_url:
|
||||||
|
@ -81,6 +81,8 @@ def download_file(file_url, download_dir, ytdlp_prefix):
|
||||||
|
|
||||||
if re.match(rgx_file, file_url):
|
if re.match(rgx_file, file_url):
|
||||||
print(STASH_PRINT_PREFIX, file_url)
|
print(STASH_PRINT_PREFIX, file_url)
|
||||||
|
#if artist:
|
||||||
|
#print("Artist is:", artist)
|
||||||
try:
|
try:
|
||||||
response = requests.get(file_url, stream=True)
|
response = requests.get(file_url, stream=True)
|
||||||
if response.status_code == 200:
|
if response.status_code == 200:
|
||||||
|
@ -118,23 +120,23 @@ def download_file(file_url, download_dir, ytdlp_prefix):
|
||||||
print(f"Failed to run yt-dlp command. Error: {e}")
|
print(f"Failed to run yt-dlp command. Error: {e}")
|
||||||
return False
|
return False
|
||||||
|
|
||||||
def find_path_or_url_arg(): #chatgpt
|
def is_path_or_url(arg): #chatgpt
|
||||||
global url_pattern
|
global url_pattern
|
||||||
for arg in sys.argv[1:]:
|
|
||||||
# Check if the argument is a valid file path
|
# Check if the argument is a valid file path
|
||||||
if os.path.exists(arg):
|
if os.path.exists(arg):
|
||||||
return arg
|
return arg
|
||||||
|
|
||||||
# Use regular expression to check if the argument matches a URL pattern
|
# Use regular expression to check if the argument matches a URL pattern
|
||||||
if re.match(url_pattern, arg):
|
if re.match(url_pattern, arg):
|
||||||
return arg
|
return arg
|
||||||
|
|
||||||
# If no path or URL argument is found, return None
|
# If no path or URL argument is found, return None
|
||||||
return None
|
return None
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
parser = argparse.ArgumentParser(description="Download files or update stash.")
|
parser = argparse.ArgumentParser(description="Download files or update stash.")
|
||||||
parser.add_argument("url_or_path", metavar="URL_or_path", nargs="?", help="URL or file path to download")
|
parser.add_argument("url_or_path", metavar="URL_or_path", nargs="+", help="URL or file path to download")
|
||||||
parser.add_argument("-o", "--overwrite", action="store_true", help="Overwrite existing files if present")
|
parser.add_argument("-o", "--overwrite", action="store_true", help="Overwrite existing files if present")
|
||||||
parser.add_argument("-u", "--update", action="store_true", help="Update stash")
|
parser.add_argument("-u", "--update", action="store_true", help="Update stash")
|
||||||
parser.add_argument("-n", "--no-update", action="store_true", help="Do not update stash")
|
parser.add_argument("-n", "--no-update", action="store_true", help="Do not update stash")
|
||||||
|
@ -150,24 +152,31 @@ if __name__ == "__main__":
|
||||||
exit(0)
|
exit(0)
|
||||||
|
|
||||||
url_or_path = args.url_or_path
|
url_or_path = args.url_or_path
|
||||||
if url_or_path is None:
|
|
||||||
|
valid_args = []
|
||||||
|
if not args.url_or_path is None:
|
||||||
|
valid_args = [arg for arg in args.url_or_path if is_path_or_url(arg) is not None]
|
||||||
|
|
||||||
|
if valid_args is None or len(valid_args) == 0:
|
||||||
print("Valid URL or file path required")
|
print("Valid URL or file path required")
|
||||||
exit(1)
|
exit(1)
|
||||||
elif re.match(url_pattern, url_or_path):
|
|
||||||
# Download using yt-dlp
|
for valid_url in valid_args:
|
||||||
if not download_file(url_or_path, STASH_IMPORT_DIR, STASH_YTDLP_FORMAT):
|
if re.match(url_pattern, valid_url):
|
||||||
print("Stopped")
|
# Download using yt-dlp
|
||||||
exit(1)
|
if not download_file(valid_url, STASH_IMPORT_DIR, STASH_YTDLP_FORMAT):
|
||||||
else:
|
print("Stopped")
|
||||||
is_file = subprocess.check_output(["file", "-0", url_or_path]).decode().split("\x00")[1]
|
exit(1)
|
||||||
if "text" in is_file:
|
|
||||||
# Download as multiple URLs from the provided source file
|
|
||||||
print(f"Reading list of {sum(1 for _ in open(url_or_path))} URL(s)")
|
|
||||||
with open(url_or_path) as source_file:
|
|
||||||
for url in source_file:
|
|
||||||
download_file(url.strip(), STASH_IMPORT_DIR, STASH_YTDLP_FORMAT)
|
|
||||||
else:
|
else:
|
||||||
subprocess.run(["rsync", url_or_path, STASH_IMPORT_DIR], check=True)
|
is_file = subprocess.check_output(["file", "-0", valid_url]).decode().split("\x00")[1]
|
||||||
|
if "text" in is_file:
|
||||||
|
# Download as multiple URLs from the provided source file
|
||||||
|
print(f"Reading list of {sum(1 for _ in open(valid_url))} URL(s)")
|
||||||
|
with open(valid_url) as source_file:
|
||||||
|
for url in source_file:
|
||||||
|
download_file(url.strip(), STASH_IMPORT_DIR, STASH_YTDLP_FORMAT)
|
||||||
|
else:
|
||||||
|
subprocess.run(["rsync", valid_url, STASH_IMPORT_DIR], check=True)
|
||||||
|
|
||||||
# Update stash
|
# Update stash
|
||||||
if not args.no_update:
|
if not args.no_update:
|
||||||
|
|
Loading…
Reference in New Issue