-
Notifications
You must be signed in to change notification settings - Fork 0
/
transfer_srw_data.py
62 lines (50 loc) · 3.21 KB
/
transfer_srw_data.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
from get_srw_data import GetSrwData
from progress_bar import ProgressPercentage
from upload_data import UploadData
class TransferSrwData():
"""
Obtain directories for the datasets tracked by the data tracker bot.
"""
def __init__(self, linked_home_dir, platform="orion"):
"""
Args:
linked_home_dir (str): User directory linked to the RDHPCS' root
data directory.
platform (str): RDHPCS of where the datasets will be sourced.
"""
# Establish locality of where the dataseta will be sourced.
self.linked_home_dir = linked_home_dir
if platform == "orion":
self.fix_data_dir = self.linked_home_dir + "/noaa/fv3-cam/UFS_SRW_App/develop/fix.tar"
self.input_model_data_dir = linked_home_dir + "/noaa/fv3-cam/UFS_SRW_App/develop/input_model_data.tar"
self.natural_earth_data_dir = linked_home_dir + "/noaa/fv3-cam/UFS_SRW_App/develop/NaturalEarth"
else:
print("Select a different platform.")
# Instantiate SRW uploader
self.srw_uploader = GetSrwData(None, None, None, self.fix_data_dir, self.input_model_data_dir, self.natural_earth_dir)
# List all data directories from sources (filtered)
print("\nExtracting list of data directories from sources (filtered)...")
self.ma_data_list = self.srw_uploader.ma_data_list # derived from tar
self.fix_data_list = self.srw_uploader.fix_data_list # derived from tar
self.ne_data_list = self.srw_uploader.ne_data_list # not derved from a tar
# SRW input model analysis & fixed data file locations (filtered)
print("Extracting list of data directories from work directory (filtered)...")
self.srw_ma_data_dirs = self.srw_uploader.ma_file_dirs
self.srw_fix_data_dirs = self.srw_uploader.fix_file_dirs
self.srw_ne_dirs = self.srw_uploader.natural_earth_dirs
# Select model analysis files based on external model it was generated by (filtered)
print("Partitioning data directories from work directory (filtered) into categories ...")
self.srw_ma_dict = self.srw_uploader.partition_ma_datasets
self.srw_fix_dict = self.srw_uploader.partition_fixed_datasets
self.srw_ne_dict = self.srw_uploader.partition_ne_datasets
print("\033[1m" + f"\nSRW MA data:" + "\033[0m" + f"\n{self.srw_ma_dict}")
print("\033[1m" + f"\nSRW Fix data:" + "\033[0m" + f"\n{self.srw_fix_dict}")
print("\033[1m" + f"\nSRW Natural Earth data:" + "\033[0m" + f"\n{self.srw_ne_dict}")
# Upload fixed, input model, & Natural Earth data.
#UploadData(srw_fix_dict, use_bucket='srw').upload_files2cloud()
#UploadData(srw_ma_dict, use_bucket='srw').upload_files2cloud()
#UploadData(srw_ne_dict, use_bucket='srw').upload_files2cloud()
print("\033[1m" + f"\nSRW Fix, MA, & Natural Earth data transfer to S3 bucket complete." + "\033[0m")
if __name__ == '__main__':
# Obtain directories & upload to cloud for all the fix and model input SRW datasets
srw_xfer = TransferSrwData(linked_home_dir="/home/schin/work", platform="orion")