-
Notifications
You must be signed in to change notification settings - Fork 5
/
Copy pathYTBackup.py
142 lines (112 loc) · 4.23 KB
/
YTBackup.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
import os
import json
import sys
import time
import subprocess
import urllib.request
from bs4 import BeautifulSoup
from pytube import YouTube
COMMAND_TYPE = sys.argv
ERROR = ''
# Backup function
def Backup():
def get_media_metadata(filename):
result = subprocess.Popen(
["hachoir-metadata", filename, "--raw"],
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT,
)
results = result.stdout.read().decode("utf-8").split("\r\n")
# just a little bit of error checking
if isinstance(results, list):
try:
if results[0].startswith("[err!]"):
ERROR = 'FILE_ERROR'
return ERROR
if results[3].startswith("- height: "):
return int(results[3].lstrip("- height: "))
else:
return None
except IndexError:
print("\n* No Dimensions Found for " + filename)
return None
else:
return None
data = []
PATH_NAME = input("Enter Directory Name To Backup: ")
# spinner strings
def spinning_cursor():
while True:
for cursor in "|/-\\":
yield cursor
spinner = spinning_cursor()
# creates the array
def create_file_lists(path):
# spinner
print("\rDoing Work ( {0} )".format(next(spinner)), end="")
if os.path.isfile(path):
video_info = get_media_metadata(os.path.relpath(path))
# checks if there is any error (ie: if its not a video)
if video_info != 'FILE_ERROR':
data.append([os.path.relpath(path), video_info])
if os.path.isdir(path):
for x in os.listdir(path):
# recursively get the files
create_file_lists(os.path.join(path, x))
return data
# create json file
def createJSON():
with open(PATH_NAME + ".json", "w") as outfile:
json.dump(create_file_lists(PATH_NAME), outfile)
print("\nSuccessfully created " + PATH_NAME + ".json")
createJSON()
# Restore function
def Restore():
# BACKUP_FILE = 'videos.json'
BACKUP_FILE = input("Enter Backup file path: ")
OUTPUT_FOLDER = input("Enter Output Folder Name: ")
# mix it up, soup it up
def get_video_link(title):
print('\nFetching "' + title + '"')
query = urllib.parse.quote(title)
url = "https://www.youtube.com/results?search_query=" + query
response = urllib.request.urlopen(url)
html = response.read()
soup = BeautifulSoup(html, "html.parser")
return soup.find(attrs={"class": "yt-uix-tile-link"})["href"]
# progress bar
def get_progress(stream, chunk, file_handle, bytes_remaining):
percent = float(round((1 - bytes_remaining / filesize), 2))
progress = "\rProgress: [{0:50s}] {1:.1f}%".format("#" * int(percent * 50), percent * 100)
print(progress, end="")
global filesize
global filesizeMB
# read json
with open(BACKUP_FILE) as f:
data = json.load(f)
for i in data:
# get the title - "my-video.mp"
title = os.path.basename(i[0])
title, ext = os.path.splitext(title)
# get the dirname - "/video/html"
dirname = os.path.join(OUTPUT_FOLDER, os.path.dirname(i[0]))
if not os.path.exists(dirname):
os.makedirs(dirname)
yt = YouTube(get_video_link(title), on_progress_callback=get_progress)
# get the video by resolution/quality
video = yt.streams.filter(res=str(i[1]) + "p").first()
# if the specific quality is not available fallback to max quality
if video is None:
video = yt.streams.first()
filesize = video.filesize
filesizeMB = str(round(filesize / float(1 << 20), 2)) + "MB"
print('\nDownloading "' + str(title) + '" size: ' + filesizeMB)
video.download(dirname)
# check for args
if len(COMMAND_TYPE) > 1:
if COMMAND_TYPE[1] == "--backup":
Backup()
elif COMMAND_TYPE[1] == "--restore":
Restore()
else:
print("Please specify what do you want to do --backup or --restore")