-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathbackup.py
186 lines (157 loc) · 7.69 KB
/
backup.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
import os,sys
from datetime import datetime
import json
import subprocess
import gzip
jobs = {}
destinations = {}
log_data = []
settings = {}
path_temp = "dummy"
def run_jobs():
global destinations,path_temp
path_temp = settings['temp'].strip()
if path_temp == "":
print ("Temp directory blank. Please define a temporary directory on settings.json")
log("FAULT","admin","Temp directory blank. Script can't run without a temp directory.")
if path_temp[-1] != '/':
path_temp = path_temp+'/'
# cleanup
if clean_up()== 0:
log("INFO","admin","Cleaning up temp directory.")
for job in jobs:
job_name = job['name']
log("INFO", job_name,"Starting backup.")
destiny = return_array(job['destinations'])
if len(destiny)==0:
log("WARN",job_name,"Job has no destination, skipping...")
continue
#compress paths
if "path" in job:
compress_paths(job)
# Process docker
if "docker" in job:
compress_docker(job)
# upload compressed files to backup servers
upload_files(job)
# cleanup
if clean_up() == 0:
log("INFO",job_name,"All cleaned up, job finished.")
def compress_paths(job):
job_name = job['name']
paths = return_array(job["path"])
for path in paths:
log("INFO",job_name,"Compressing "+path)
path_stamp = path.replace('/','-')
p,output,err = exec_command(['tar','cpfz',path_temp+job_name+path_stamp+'-'+datestamp()+'.tar.gz',path],False)
if p ==1:
log("WARN",job_name,"Exit "+str(p)+" - "+err)
elif p==0:
log("INFO",job_name,"Compression successfull.")
else:
log("FAULT",job_name,"Exit "+str(p)+" - "+err);
def compress_docker(job):
job_name = job['name']
dockers = return_array(job["docker"])
for docker in dockers:
container = ""
# compress docker image
# Needs to find a better way to compress the image, as well as get rid of "shell=True"
for container in return_array(docker):
try:
log("INFO",job_name,"Stopping container...")
p = subprocess.check_output("docker stop "+container["name"],shell=True)
except subprocess.CalledProcessError as erro:
log("FAULT",job_name,"Failed to stop container "+container["name"]+", skipping.")
continue
log("INFO",job_name,"Compressing data for docker container "+container["name"])
for pth in return_array(container["paths"]):
try:
log("INFO",job_name,"Compressing path "+pth+".")
command = "docker run --rm --volumes-from "+container["name"]+" -v "+path_temp+":/backup"+ " alpine tar czf /backup/"+job_name+"-docker-"+container["name"]+pth.replace("/","-")+"-"+datestamp()+".tar.gz "+pth
p = subprocess.check_output(command,shell=True)
except subprocess.CalledProcessError as grepexc:
if grepexc.returncode == 0:
log("INFO",job_name,"Docker container "+container["name"]+" volume "+pth+" sucessfully compressed.")
else:
log("FAULT",job_name,"Compressing the container "+container+" has failed.")
log("INFO",job_name,"Restarting container")
try:
p = subprocess.check_output("docker start "+container["name"],shell=True)
except subprocess.CalledProcessError as erro:
log("FAULT",job_name,"Failed to restart container.")
def upload_files(job):
job_name = job['name']
destiny = return_array(job['destinations'])
# transfer files to remote servers
for destination in destiny:
dest = {}
for d in destinations:
if d["name"] == destination:
dest = d
log("INFO",job_name,"Starting to upload files to remote server: "+dest["address"])
command = ["scp"]
if "key" in dest:
command.extend(["-i",dest["key"]])
if "port" in dest:
command.extend(["-P",dest["port"]])
command.extend([path_temp+"*",dest["address"]+"/"+job_name])
command = [ x.encode("ascii") for x in command ]
p = ""
try:
p = subprocess.check_output(" ".join(command),shell = True)
except subprocess.CalledProcessError as grepexc:
if grepexc.returncode == 0:
log("INFO",job_name,"Files transferred successfully! Cleaning up...")
else:
log("FAULT",job_name,"Transfer has failed! check your settings: "+p)
def clean_up():
try:
p = subprocess.check_output("rm -rf "+path_temp+"*",shell = True)
return 0
except subprocess.CalledProcessError as grepexc:
if grepexc.returncode != 0:
log("FAULT",job_name,"Failed to clean up: "+grepexc.output)
return grepexc.returncode
def exec_command(command, shell):
p = subprocess.Popen(command, shell,stdout=subprocess.PIPE, stderr = subprocess.PIPE)
output,err = p.communicate()
return p.returncode,output,err
def return_array(obj):
if isinstance(obj,list):
return obj
else:
return [obj]
def log(level,job,message):
global log_data
loginfo = str(datetime.now()).split('.')[0]+" ["+level+"]"+" "+job+" "+message
log_data.append(loginfo)
with open("backups.log","a") as mylog:
mylog.write(loginfo+'\n')
if "-v" in sys.argv:
print (loginfo)
def datestamp():
fulltime = str(datetime.now())
partialtime = ""+fulltime[0:10]+"-"+fulltime[11:16].replace(':','-')
return partialtime
def read_jobs():
global jobs
with open("servers.json","r") as f:
jobs = json.load(f)
return jobs
def read_destinations():
global destinations
with open("destinations.json","r") as f:
destinations = json.load(f)
def read_settings():
global settings
with open("settings.json","r") as f:
settings = json.load(f)
return settings
if __name__ == "__main__":
pathname = os.path.dirname(os.path.realpath(__file__))
os.chdir(pathname)
read_jobs()
read_destinations()
read_settings()
run_jobs()