-
Notifications
You must be signed in to change notification settings - Fork 1
/
Copy pathopt_utils.py
162 lines (129 loc) · 4.36 KB
/
opt_utils.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
import os
from ruamel import yaml
import csv
import zipfile
import pandas as pd
import numpy as np
from sklearn.metrics import explained_variance_score, mean_absolute_error, mean_squared_error, r2_score
from sklearn.externals import joblib
import logging
logger = logging.getLogger('optimizer')
pandas_dataframe_styles = {
'font-family': 'monospace',
'white-space': 'pre'
}
def readCSV(filename):
global logger
try:
df = pd.read_csv(filename, sep=",", header="infer", skiprows=0, na_values="null")
except IOException as e:
logger.error(e)
else:
return df
def removeMissingData(df):
cleanDF = df.dropna(axis=0)
return cleanDF
def dropVariable(df, column):
del df[column]
return df
def preProcessing(df):
df = df.copy()
# Drop Time
if( df.columns.contains('Time') ):
df = dropVariable(df, 'Time')
logger.info('Time column dropped from data frame')
if( df.columns.contains('timestamp') ):
df = dropVariable(df, 'timestamp')
logger.info('timestamp column dropped from data frame')
if( df.columns.contains('avg latency (quantile 0.9)') ):
df = dropVariable(df, 'avg latency (quantile 0.9)')
logger.info('avg latency (quantile 0.9) column dropped from data frame')
# Remove cases with missing values
df = removeMissingData(df)
return df
def renameVariable(df, old_var_name, new_var_name):
new_df = df.copy()
if( df.columns.contains(old_var_name) ):
new_df.rename(columns={old_var_name: new_var_name}, inplace=True)
else:
logger.info('--------------------- Wrong Column Name ---------------------')
return new_df
def dropFirstCases(df, n):
new_df = df.copy()
filteredDF = new_df[new_df.index > n]
return filteredDF
def read_yaml(yaml_file):
global logger
with open(yaml_file, 'r') as stream:
try:
yaml_data = yaml.safe_load(stream)
except (FileNotFoundError, IOError, yaml.YAMLError) as e:
logger.error(e)
else:
logger.info('----------------------- READ YAML ------------------------')
logger.info(f'Loaded yaml: {yaml_data}')
return yaml_data
def create_dirs(directories):
for dir in directories:
if not os.path.exists(dir):
try:
os.makedirs(dir)
except IOError as e:
logger.error(e)
else:
logger.info(f'Directory {dir} exists')
def reset_output(output_filename):
if os.path.exists(output_filename):
try:
os.remove(output_filename)
logger.info(f'Output file {output_filename} deleted')
except (FileNotFoundError, IOError) as e:
logger.error(e)
else:
logger.info(f'Output file {output_filename} does not exist')
def write_yaml(yaml_file, data):
global logger
with open(yaml_file, 'w') as stream:
try:
yaml.dump(data, stream, default_flow_style=False)
except (IOError, yaml.YAMLError) as e:
logger.error(e)
def read_data(filename, skip_header=False):
global logger
with open(filename, 'r') as csv_file:
try:
reader = csv.reader(csv_file, quoting=csv.QUOTE_NONNUMERIC, quotechar='"')
except (FileNotFoundError, IOError, csv.Error) as e:
logger.error(e)
else:
if skip_header:
next(reader, None)
return list(reader)
def persist_data(filename, data, mode):
global logger
try:
with open(filename, mode) as stream:
# wr = csv.writer(stream, quoting=csv.QUOTE_NONNUMERIC)
wr = csv.writer(stream)
if isinstance(data[0], list):
for line in data:
wr.writerow(line)
else:
wr.writerow(data)
logger.info(f'Data file {filename} created')
except (FileNotFoundError, IOError) as e:
logger.error(e)
def zip_files(files, zip_filename):
global logger
try:
zipf = zipfile.ZipFile(zip_filename, 'w', zipfile.ZIP_DEFLATED)
except IOError as e:
logger.error(e)
else:
try:
for file in files:
zipf.write(file)
except (FileNotFoundError, IOError) as e:
logger.error(e)
finally:
zipf.close()