Skip to content

Commit

Permalink
increase resolution
Browse files Browse the repository at this point in the history
  • Loading branch information
philippwitte committed Mar 19, 2020
1 parent 89de2ba commit 5cd4036
Show file tree
Hide file tree
Showing 6 changed files with 33 additions and 34 deletions.
9 changes: 4 additions & 5 deletions numerical_examples/cost/plot_cost_comparison.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@
print("Found ", len(file_timings), " file(s).")
timings = []
for filename in file_timings:
timings.append(np.load(filename))
timings.append(np.load(filename, allow_pickle=True))
T = timings[0]
array_size = T.shape[0]

Expand All @@ -34,7 +34,7 @@
ylabel('Runtime per gradient [s]', fontsize=10)

tight_layout()
savefig('figure_1a.png', dpi=300, format='png')
savefig('figure_11a.png', dpi=600, format='png')

####################################################################################################

Expand Down Expand Up @@ -97,7 +97,7 @@ def model_idle_time(timings, num_nodes, sort=False):
legend(['EC2 cluster', 'AWS Batch'], fontsize=9)
ax1.set_xlim([1, array_size])
fig.tight_layout() # otherwise the right y-label is slightly clipped
savefig('figure_1b.png', dpi=300, format='png')
savefig('figure_11b.png', dpi=600, format='png')


# Plot runtime as function of the number of cluster nodes
Expand All @@ -109,6 +109,5 @@ def model_idle_time(timings, num_nodes, sort=False):
xlabel('No. of instances', fontsize=10)
ylabel('Time-to-solution [s]', fontsize=10)
tight_layout()
savefig('figure_1c.png', dpi=300, format='png')

savefig('figure_11c.png', dpi=600, format='png')
show()
12 changes: 6 additions & 6 deletions numerical_examples/resilience/plot_resilience_modeled.py
Original file line number Diff line number Diff line change
Expand Up @@ -67,15 +67,15 @@ def model_runtimes(container_times, schedule, restart=True, restart_time=120):
####################################################################################################

# Model resilience for saving in memory or w/ opt. checkpointing
case = 'memory'
#case = 'checkpointing'
#case = 'memory'
case = 'checkpointing'

# Load timings w/o interruptions
path = os.getcwd()
if case == 'memory':
T = np.load(path + '/timings_memory.dat')
T = np.load(path + '/timings_memory.dat', allow_pickle=True)
else:
T = np.load(path + '/timings_checkpointing.dat')
T = np.load(path + '/timings_checkpointing.dat', allow_pickle=True)
batchsize = T.shape[0]

container_times = (T[:,2] - T[:,1])/1e3
Expand Down Expand Up @@ -129,6 +129,6 @@ def model_runtimes(container_times, schedule, restart=True, restart_time=120):
plt.tight_layout()

if case == 'memory':
savefig('resilience_save_in_memory.png', dpi=300, format='png')
savefig('figure_12a.png', dpi=600, format='png')
else:
savefig('resilience_save_in_checkpointing_2.png', dpi=300, format='png')
savefig('figure_12b.png', dpi=600, format='png')
12 changes: 6 additions & 6 deletions numerical_examples/strong_scaling/plot_omp_timings.py
Original file line number Diff line number Diff line change
Expand Up @@ -75,13 +75,13 @@
timings_optimum = []

for filename in file_timings_default:
timings_default.append(np.load(path_default + filename))
timings_default.append(np.load(path_default + filename, allow_pickle=True))
for filename in file_timings_no_ht:
timings_no_ht.append(np.load(path_no_ht + filename))
timings_no_ht.append(np.load(path_no_ht + filename, allow_pickle=True))
for filename in file_timings_ec2:
timings_ec2.append(np.load(path_ec2 + filename))
timings_ec2.append(np.load(path_ec2 + filename, allow_pickle=True))
for filename in file_timings_optimum:
timings_optimum.append(np.load(path_optimum + filename))
timings_optimum.append(np.load(path_optimum + filename, allow_pickle=True))

# Timings
# create=0; start=1; end=2; var=3;
Expand Down Expand Up @@ -194,7 +194,7 @@
ax.tick_params(axis='x', labelsize=10)
plt.legend(['Batch default', 'Batch no HT', 'EC2 metal', 'Optimum'], loc='lower left', fontsize=9)
plt.tight_layout()
savefig('strong_scaling_omp_speedup.png', dpi=300, format='png')
savefig('strong_scaling_omp_speedup.png', dpi=600, format='png')

# Kernel runtimes
fig, ax = plt.subplots(figsize=(3.33, 3))
Expand All @@ -215,6 +215,6 @@
plt.yticks(np.array([250, 500, 1000, 2000]), ('250', '500', '1000', '2000'), size=10)
plt.legend(['Batch default', 'Batch no HT', 'EC2 metal', 'Optimum'], loc='upper right', fontsize=9)
plt.tight_layout()
savefig('strong_scaling_omp_times.png', dpi=300, format='png')
savefig('strong_scaling_omp_times.png', dpi=600, format='png')

plt.show()
16 changes: 8 additions & 8 deletions numerical_examples/strong_scaling_mpi/plot_mpi_strong_scaling.py
Original file line number Diff line number Diff line change
Expand Up @@ -52,13 +52,13 @@
timings_c5n_metal = []

for filename in file_timings_r5:
timings_r5.append(np.load(path_r5 + filename)) # timings: list of num_files entries: 1 x 6
timings_r5.append(np.load(path_r5 + filename, allow_pickle=True)) # timings: list of num_files entries: 1 x 6
for filename in file_timings_r5_metal:
timings_r5_metal.append(np.load(path_r5_metal + filename))
timings_r5_metal.append(np.load(path_r5_metal + filename, allow_pickle=True))
for filename in file_timings_c5n:
timings_c5n.append(np.load(path_c5n + filename))
timings_c5n.append(np.load(path_c5n + filename, allow_pickle=True))
for filename in file_timings_c5n_metal:
timings_c5n_metal.append(np.load(path_c5n_metal + filename))
timings_c5n_metal.append(np.load(path_c5n_metal + filename, allow_pickle=True))

# Timings
# create=0; start=1; end=2; var=3; devito=4; script=5
Expand Down Expand Up @@ -160,7 +160,7 @@
ax.set_ylim([0.15, 1.1])
plt.legend(['r5.24xlarge', 'r5.metal', 'c5n.18xlarge', 'c5n.metal'], loc='lower left', fontsize=9)
plt.tight_layout()
savefig('strong_scaling_mpi_speedup_single_thread.png', dpi=300, format='png')
savefig('strong_scaling_mpi_speedup_single_thread.png', dpi=600, format='png')

# Timings plot
fig, ax = plt.subplots(figsize=(3.33, 3))
Expand All @@ -179,7 +179,7 @@
ax.tick_params(axis='x', labelsize=10)
plt.legend(['r5.24xlarge', 'r5.metal','c5n.18xlarge', 'c5n.metal'], loc='upper right', fontsize=9)
plt.tight_layout()
savefig('strong_scaling_mpi_runtime_single_thread.png', dpi=300, format='png')
savefig('strong_scaling_mpi_runtime_single_thread.png', dpi=600, format='png')

# Breakdown of timings c5n
fig, ax = plt.subplots(figsize=(3.33, 3))
Expand All @@ -199,7 +199,7 @@
ax.set_ylim([30, 1400])
plt.legend(['Job', 'Container', 'Python', 'Kernel'], fontsize=9)
plt.tight_layout()
savefig('strong_scaling_breakdown_c5n.png', dpi=300, format='png')
savefig('strong_scaling_breakdown_c5n.png', dpi=600, format='png')

# Cost r5 vs c5n
r5_on_demand = 6.048/60/60
Expand Down Expand Up @@ -236,6 +236,6 @@
ax.tick_params(axis='x', labelsize=10)
plt.legend(['r5 on-demand', 'r5 spot', 'c5n on-demand', 'c5n spot'], fontsize=9)
plt.tight_layout()
savefig('strong_scaling_cost_single_thread.png', dpi=300, format='png')
savefig('strong_scaling_cost_single_thread.png', dpi=600, format='png')

plt.show()
Original file line number Diff line number Diff line change
Expand Up @@ -36,10 +36,10 @@
timings_c5n = []

for filename in file_timings_r5:
timings_r5.append(np.load(path_r5 + filename)) # timings: list of num_files entries: 1 x 6
timings_r5.append(np.load(path_r5 + filename, allow_pickle=True)) # timings: list of num_files entries: 1 x 6

for filename in file_timings_c5n:
timings_c5n.append(np.load(path_c5n + filename))
timings_c5n.append(np.load(path_c5n + filename, allow_pickle=True))

# Timings
# create=0; start=1; end=2; var=3; devito=4; script=5
Expand Down Expand Up @@ -106,7 +106,7 @@ def autolabel(rects, labels, scale):
autolabel(bar2, labels_r5, 7)

plt.tight_layout()
savefig('strong_scaling_runtime_max_threads.png', dpi=300, format='png')
savefig('strong_scaling_runtime_max_threads.png', dpi=600, format='png')

# Cost r5 vs c5n
# Cost plot (N. Virigina, May 13, 2019, 10:04 PM)
Expand Down Expand Up @@ -145,6 +145,6 @@ def autolabel(rects, labels, scale):
ax.set_ylim([0, .5])
plt.legend(['r5 on-demand', 'r5 spot', 'c5n on-demand', 'c5n spot', ], fontsize=9)
plt.tight_layout()
savefig('strong_scaling_cost_max_thread.png', dpi=300, format='png')
savefig('strong_scaling_cost_max_thread.png', dpi=600, format='png')

plt.show()
10 changes: 5 additions & 5 deletions numerical_examples/weak_scaling/plot_weak_scaling_results.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@
print("Found ", len(file_timings), " file(s).")
timings = []
for filename in file_timings:
timings.append(np.load(path + filename))
timings.append(np.load(path + filename, allow_pickle=True))

# Timings
# create=0; start=1; end=2; var=3;
Expand Down Expand Up @@ -76,7 +76,7 @@ def confidence_interval(x, z=1.645):
ax.set_ylim([0, 1000])
plt.legend(['Full job', 'Container', 'Reduction'], fontsize=9)
plt.tight_layout()
savefig('weak_scaling_gradients.png', dpi=300, format='png')
savefig('weak_scaling_gradients.png', dpi=600, format='png')

# Average startup time
startup_times = job_time - container_time - reduction_time
Expand All @@ -88,7 +88,7 @@ def confidence_interval(x, z=1.645):
ax.tick_params(axis='y', labelsize=10)
ax.tick_params(axis='x', labelsize=10)
plt.tight_layout()
savefig('job_runtime_first.png', dpi=300, format='png')
savefig('job_runtime_first.png', dpi=600, format='png')

# Average container runtime and cost
fig, ax1 = plt.subplots(figsize=(3.66, 3))
Expand All @@ -109,7 +109,7 @@ def confidence_interval(x, z=1.645):
fig.tight_layout() # otherwise the right y-label is slightly clipped
axx2 = gca()
ax2.set_ylim([0, axx2.get_ylim()[1]*1.15])
savefig('container_runtime_cost.png', dpi=300, format='png')
savefig('container_runtime_cost.png', dpi=600, format='png')

# Plot additional reduction time
fig, ax = plt.subplots(figsize=(3.33, 3))
Expand All @@ -120,6 +120,6 @@ def confidence_interval(x, z=1.645):
ax.tick_params(axis='y', labelsize=10)
ax.tick_params(axis='x', labelsize=10)
plt.tight_layout()
savefig('reduction_runtime_mean.png', dpi=300, format='png')
savefig('reduction_runtime_mean.png', dpi=600, format='png')

plt.show()

0 comments on commit 5cd4036

Please sign in to comment.