This repository has been archived by the owner on Nov 8, 2023. It is now read-only.
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathget_gifs.py
56 lines (38 loc) · 1.64 KB
/
get_gifs.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
from pathlib import Path
import json
import requests
import argparse
def download_gifs(json_file_path: str, dont_rename_files: bool):
with open(json_file_path, "r") as json_file:
data = json.load(json_file)
gifs = data["_state"]["favorites"]
failed_urls = []
for i, gif in enumerate(reversed(gifs)):
url = gif["src"].strip("/")
if not url.startswith(("http://", "https://")):
url = f"http://{url}"
file_name = url.rsplit("/", 1)[-1] if dont_rename_files else f"{hex(i)[2:]}.{url.rsplit('/', 1)[-1].rsplit('.', 1)[-1]}"
path = Path(f"output/{file_name}")
progress = f"[{i + 1}/{len(gifs)}] {path} "
print(progress, end="")
if path.exists():
print("[already downloaded]")
continue
response = requests.get(url)
content = response.content
if not (response.ok and content):
failed_urls.append(f"{progress}- {url}")
print("❌")
continue
with open(path, "wb") as gif_file:
gif_file.write(content)
print("✔️")
if failed_urls:
with open("failed.txt", "w") as text_file:
text_file.write("\n".join(failed_urls))
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument("--json-file-path", "-j", type=str, required=True, help="Path to GIFFavoritesStore JSON file")
parser.add_argument("--dont-rename-files", "-d", type=bool, default=True, help="Whether to keep original file names")
args = parser.parse_args()
download_gifs(json_file_path=args.json_file_path, rename_files=args.dont_rename_files)