source: trunk/platforms/tsar_generic_xbar/scripts/create_graphs.py

Last change on this file was 1048, checked in by meunier, 8 years ago
  • Update of tsar_xbar_cluster (scripts and openmp support)
  • Property svn:executable set to *
File size: 35.8 KB
Line 
1#!/usr/bin/python
2
3import subprocess
4import os
5import re
6import sys
7
8
9apps = [ 'blackscholes', 'filt_ga', 'fft_ga', 'histo-opt', 'kmeans-opt', 'linear_regression', 'lu', 'mandel', 'pca-opt', 'radix_ga', 'string_match' ]
10#apps = [ 'blackscholes', 'fft_ga', 'filt_ga', 'histo-opt', 'kmeans-opt', 'linear_regression', 'lu', 'mandel', 'pca-opt', 'radix_ga' ]
11#apps = [ 'histo-opt', 'mandel', 'filt_ga', 'radix_ga', 'fft_ga', 'pca-opt', 'fft', 'radix', 'filter', 'kmeans-opt' ]
12#apps = [ 'histogram', 'mandel', 'filter', 'fft', 'fft_ga', 'filt_ga', 'pca', 'lu' ]  # radix radix_ga kmeans
13#apps = [ 'fal_sh_2' ]
14nb_procs = [ 1, 4, 8, 16, 32, 64, 128, 256 ]
15#nb_procs = [ 256 ]
16single_protocols = ['dhccp', 'th0', 'hmesi', 'wtidl']
17joint_protocols = ['dhccp', 'th0', 'hmesi', 'wtidl' ]
18#joint_protocols = []
19
20top_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), "..")
21scripts_path = os.path.join(top_path, 'scripts')
22counter_defs_name = os.path.join(scripts_path, "counter_defs.py")
23
24exec(file(counter_defs_name))
25
26gen_dir = 'generated'
27graph_dir = 'graph'
28template_dir = 'templates'
29data_dir = 'data'
30
31log_stdo_name = '_stdo_'
32log_term_name = '_term_'
33
34coherence_tmpl     = os.path.join(scripts_path, template_dir, 'coherence_template.gp') # 1 graph per appli
35speedup_tmpl       = os.path.join(scripts_path, template_dir, 'speedup_template.gp')
36metric_tmpl        = os.path.join(scripts_path, template_dir, 'metric_template.gp') # 1 graph per metric
37stacked_tmpl       = os.path.join(scripts_path, template_dir, 'stacked_template.gp')
38cst_exec_time_tmpl = os.path.join(scripts_path, template_dir, 'cst_exec_time_template.gp')
39
40
41
42def create_file(name, content):
43   file = open(name, 'w')
44   file.write(content)
45   file.close()
46   
47def is_numeric(s):
48   try:
49      float(s)
50      return True
51   except ValueError:
52      return False
53
54def get_x_y(nb_procs):
55   x = 1
56   y = 1
57   to_x = True
58   while (x * y * 4 < nb_procs):
59      if to_x:
60         x = x * 2
61      else:
62         y = y * 2
63      to_x = not to_x
64   return x, y
65
66
67
68# We first fill the m_metric_id table
69for metric in all_metrics:
70   for tag in all_tags:
71      if m_metric_tag[metric] == tag:
72         m_metric_id[tag] = metric
73         break
74
75
76# We start by processing all the log files
77# Term files are processed for exec time only
78# Init files are processed for all metrics
79exec_time = {}
80metrics_val = {}
81for prot in joint_protocols:
82   metrics_val[prot] = {}
83   exec_time[prot] = {}
84   for app in apps:
85      exec_time[prot][app] = {}
86      metrics_val[prot][app] = {}
87      for i in nb_procs:
88         metrics_val[prot][app][i] = {}
89         log_stdo_file = os.path.join(scripts_path, data_dir, app + '_' + prot + log_stdo_name + str(i))
90         log_term_file = os.path.join(scripts_path, data_dir, app + '_' + prot + log_term_name + str(i))
91   
92         # Term
93         lines = open(log_term_file, 'r')
94         for line in lines:
95            tokens = line[:-1].split()
96            if len(tokens) > 0 and tokens[0] == "[PARALLEL_COMPUTE]":
97               exec_time[prot][app][i] = int(tokens[len(tokens) - 1])
98   
99         # Init files
100         lines = open(log_stdo_file, 'r')
101         for line in lines:
102            tokens = line[:-1].split()
103            if len(tokens) == 0:
104               continue
105            tag = tokens[0]
106            value = tokens[len(tokens) - 1]
107            pattern = re.compile('\[0[0-9][0-9]\]')
108            if pattern.match(tag):
109               metric = m_metric_id[tag]
110               if (not metrics_val[prot][app][i].has_key(metric) or tag == "[000]" or tag == "[001]"):
111                  # We don't add cycles of all Memcaches (they must be the same for all)
112                  metrics_val[prot][app][i][metric] = int(value)
113               else:
114                  metrics_val[prot][app][i][metric] += int(value)
115   
116# Completing unset metrics (i.e. they are not present in the data file) with 0
117for prot in joint_protocols:
118   for app in apps:
119      for i in nb_procs:
120         for metric in all_metrics:
121            if metric not in metrics_val[prot][app][i]:
122               metrics_val[prot][app][i][metric] = 0
123
124# We make a 2nd pass to fill the derived fields, e.g. nb_total_updates
125for prot in joint_protocols:
126   for app in apps:
127      for i in nb_procs:
128         x, y = get_x_y(i)
129         metrics_val[prot][app][i]['total_read']      = metrics_val[prot][app][i]['local_read']      + metrics_val[prot][app][i]['remote_read']
130         metrics_val[prot][app][i]['total_write']     = metrics_val[prot][app][i]['local_write']     + metrics_val[prot][app][i]['remote_write']
131         metrics_val[prot][app][i]['total_ll']        = metrics_val[prot][app][i]['local_ll']        + metrics_val[prot][app][i]['remote_ll']
132         metrics_val[prot][app][i]['total_sc']        = metrics_val[prot][app][i]['local_sc']        + metrics_val[prot][app][i]['remote_sc']
133         metrics_val[prot][app][i]['total_cas']       = metrics_val[prot][app][i]['local_cas']       + metrics_val[prot][app][i]['remote_cas']
134         metrics_val[prot][app][i]['total_update']    = metrics_val[prot][app][i]['local_update']    + metrics_val[prot][app][i]['remote_update']
135         metrics_val[prot][app][i]['total_m_inv']     = metrics_val[prot][app][i]['local_m_inv']     + metrics_val[prot][app][i]['remote_m_inv']
136         metrics_val[prot][app][i]['total_cleanup']   = metrics_val[prot][app][i]['local_cleanup']   + metrics_val[prot][app][i]['remote_cleanup']
137         metrics_val[prot][app][i]['total_cleanup_d'] = metrics_val[prot][app][i]['local_cleanup_d'] + metrics_val[prot][app][i]['remote_cleanup_d']
138         metrics_val[prot][app][i]['total_getm']      = metrics_val[prot][app][i]['local_getm']      + metrics_val[prot][app][i]['remote_getm']
139         metrics_val[prot][app][i]['total_inval_ro']  = metrics_val[prot][app][i]['local_inval_ro']  + metrics_val[prot][app][i]['remote_inval_ro']
140         metrics_val[prot][app][i]['total_direct']    = metrics_val[prot][app][i]['total_read']      + metrics_val[prot][app][i]['total_write']
141         metrics_val[prot][app][i]['total_ncc_to_cc'] = metrics_val[prot][app][i]['ncc_to_cc_read']  + metrics_val[prot][app][i]['ncc_to_cc_write']
142         metrics_val[prot][app][i]['direct_cost']     = metrics_val[prot][app][i]['read_cost']       + metrics_val[prot][app][i]['write_cost']
143         metrics_val[prot][app][i]['broadcast_cost']  = metrics_val[prot][app][i]['broadcast'] * 2 * (x * y - 1)
144         #metrics_val[prot][app][i]['coherence_cost']  = metrics_val[prot][app][i]['broadcast_cost']  + metrics_val[prot][app][i]['m_inv_cost'] + metrics_val[prot][app][i]['update_cost']
145         metrics_val[prot][app][i]['coherence_cost']  = metrics_val[prot][app][i]['broadcast_cost']  + metrics_val[prot][app][i]['m_inv_cost'] + metrics_val[prot][app][i]['update_cost'] + metrics_val[prot][app][i]['cleanup_cost'] + metrics_val[prot][app][i]['cleanup_d_cost'] + metrics_val[prot][app][i]['inval_ro_cost']
146         if metrics_val[prot][app][i]['broadcast'] < metrics_val[prot][app][i]['write_broadcast']:
147            # test to patch a bug in mem_cache
148            metrics_val[prot][app][i]['nonwrite_broadcast'] = 0
149            print "*** Error which should not happen anymore: incorrect number of Broadcasts/Write Broadcasts"
150         else:
151            metrics_val[prot][app][i]['nonwrite_broadcast'] = metrics_val[prot][app][i]['broadcast'] - metrics_val[prot][app][i]['write_broadcast']
152   
153         metrics_val[prot][app][i]['total_stacked'] = 0
154         for stacked_metric in stacked_metrics:
155            metrics_val[prot][app][i]['total_stacked'] += metrics_val[prot][app][i][stacked_metric]
156
157           
158print "mkdir -p", os.path.join(scripts_path, gen_dir)
159subprocess.call([ 'mkdir', '-p', os.path.join(scripts_path, gen_dir) ])
160
161print "mkdir -p", os.path.join(scripts_path, graph_dir)
162subprocess.call([ 'mkdir', '-p', os.path.join(scripts_path, graph_dir) ])
163
164#############################################################
165#### Graph 1 : Coherence traffic Cost per application     ###
166#############################################################
167#
168#for prot in single_protocols:
169#   for app in apps:
170#      data_coherence_name = os.path.join(scripts_path, gen_dir, prot + '_' + app + '_coherence.dat')
171#      gp_coherence_name   = os.path.join(scripts_path, gen_dir, prot + '_' + app + '_coherence.gp')
172#   
173#      # Creating the data file
174#      width = 15
175#      content = ""
176#     
177#      for metric in [ '#nb_procs' ] + grouped_metrics:
178#         content += metric + " "
179#         nb_spaces = width - len(metric)
180#         content += nb_spaces * ' '
181#      content += "\n"
182#   
183#      for i in nb_procs:
184#         content += "%-15d " % i
185#         for metric in grouped_metrics:
186#            #print "app : %s - prot : %s - i : %d" % (app, prot, i)
187#            val = float(metrics_val[prot][app][i][metric]) / exec_time[prot][app][i] * 1000
188#            content += "%-15f " % val
189#         content += "\n"
190#     
191#      create_file(data_coherence_name, content)
192#   
193#      # Creating the gp file
194#      template_file = open(coherence_tmpl, 'r')
195#      template = template_file.read()
196#     
197#      plot_str = ""
198#      col = 2
199#      for metric in grouped_metrics:
200#         if metric != grouped_metrics[0]:
201#            plot_str += ", \\\n    "
202#         plot_str += "\"" + data_coherence_name + "\" using ($1):($" + str(col) + ") lc rgb " + colors[col - 2] + " title \"" + m_metric_name[metric] + "\" with linespoint"
203#         col += 1
204#      gp_commands = template % dict(app_name = m_app_name[app], nb_procs = nb_procs[-1] + 1, plot_str = plot_str, svg_name = os.path.join(graph_dir, prot + '_' + app + '_coherence'))
205#     
206#      create_file(gp_coherence_name, gp_commands)
207#     
208#      # Calling gnuplot
209#      print "gnuplot", gp_coherence_name
210#      subprocess.call([ 'gnuplot', gp_coherence_name ])
211#
212#
213#############################################################
214#### Graph 2 : Speedup per Application                    ###
215#############################################################
216#
217#if 1 in nb_procs:
218#   for prot in single_protocols:
219#      for app in apps:
220#   
221#         data_speedup_name = os.path.join(scripts_path, gen_dir, prot + '_' + app + '_speedup.dat')
222#         gp_speedup_name   = os.path.join(scripts_path, gen_dir, prot + '_' + app + '_speedup.gp')
223#         
224#         # Creating data file
225#         width = 15
226#         content = "#nb_procs"
227#         nb_spaces = width - len(content)
228#         content += nb_spaces * ' '
229#         content += "speedup\n"
230#   
231#         for i in nb_procs:
232#            content += "%-15d " % i
233#            val = exec_time[prot][app][i]
234#            content += "%-15f\n" % (exec_time[prot][app][1] / float(val))
235#   
236#         plot_str = "\"" + data_speedup_name + "\" using ($1):($2) lc rgb \"#654387\" title \"Speedup\" with linespoint"
237#         
238#         create_file(data_speedup_name, content)
239#         
240#         # Creating the gp file
241#         template_file = open(speedup_tmpl, 'r')
242#         template = template_file.read()
243#         
244#         gp_commands = template % dict(appli = m_app_name[app], nb_procs = nb_procs[-1] + 1, plot_str = plot_str, svg_name = os.path.join(graph_dir, prot + '_' + app + '_speedup'))
245#         
246#         create_file(gp_speedup_name, gp_commands)
247#         
248#         # Calling gnuplot
249#         print "gnuplot", gp_speedup_name
250#         subprocess.call([ 'gnuplot', gp_speedup_name ])
251#
252#
253#############################################################
254#### Graph 3 : All speedups on the same Graph             ###
255#############################################################
256#
257#if 1 in nb_procs:
258#   for prot in single_protocols:
259#      # This graph uses the same template as the graph 2
260#      data_speedup_name = os.path.join(scripts_path, gen_dir, prot + '_all_speedup.dat')
261#      gp_speedup_name   = os.path.join(scripts_path, gen_dir, prot + '_all_speedup.gp')
262#     
263#      # Creating data file
264#      width = 15
265#      content = "#nb_procs"
266#      nb_spaces = width - len(content)
267#      content += (nb_spaces + 1) * ' '
268#      for app in apps:
269#         content += app + " "
270#         content += (width - len(app)) * " "
271#      content += "\n"
272#     
273#      for i in nb_procs:
274#         content += "%-15d " % i
275#         for app in apps:
276#            val = exec_time[prot][app][i]
277#            content += "%-15f " % (exec_time[prot][app][1] / float(val))
278#         content += "\n"
279#     
280#      create_file(data_speedup_name, content)
281#     
282#      # Creating gp file
283#      template_file = open(speedup_tmpl, 'r')
284#      template = template_file.read()
285#     
286#      plot_str = ""
287#      col = 2
288#      for app in apps:
289#         if app != apps[0]:
290#            plot_str += ", \\\n     "
291#         plot_str += "\"" + data_speedup_name + "\" using ($1):($" + str(col) + ") lc rgb %s title \"" % (colors[col - 2])  + m_app_name[app] + "\" with linespoint"
292#         col += 1
293#     
294#      gp_commands = template % dict(appli = "All Applications", nb_procs = nb_procs[-1] + 1, plot_str = plot_str, svg_name = os.path.join(graph_dir, prot + '_all_speedup'))
295#         
296#      create_file(gp_speedup_name, gp_commands)
297#         
298#      # Calling gnuplot
299#      print "gnuplot", gp_speedup_name
300#      subprocess.call([ 'gnuplot', gp_speedup_name ])
301#
302#
303#############################################################
304#### Graph 4 : Graph per metric                           ###
305#############################################################
306#
307## The following section creates the graphs grouped by measure (e.g. #broadcasts)
308## The template file cannot be easily created otherwise it would not be generic
309## in many ways. This is why it is mainly created here.
310## Graphs are created for metric in the "individual_metrics" list
311#
312#for prot in single_protocols:
313#   for metric in individual_metrics:
314#      data_metric_name = os.path.join(scripts_path, gen_dir, prot + '_' + metric + '.dat')
315#      gp_metric_name   = os.path.join(scripts_path, gen_dir, prot + '_' + metric + '.gp')
316#   
317#      # Creating the gp file
318#      # Setting xtics, i.e. number of procs for each application
319#      xtics_str = "("
320#      first = True
321#      xpos = 1
322#      app_labels = ""
323#      for num_appli in range(0, len(apps)):
324#         for i in nb_procs:
325#            if not first:
326#               xtics_str += ", "
327#            first = False
328#            if i == nb_procs[0]:
329#               xpos_first = xpos
330#            xtics_str += "\"%d\" %.1f" % (i, xpos)
331#            xpos_last = xpos
332#            xpos += 1.5
333#         xpos += 0.5
334#         app_name_xpos = float((xpos_first + xpos_last)) / 2
335#         app_labels += "set label \"%s\" at first %f,character 1 center font \"Times,12\"\n" % (m_app_name[apps[num_appli]], app_name_xpos)
336#      xtics_str += ")"
337#   
338#      xmax_val = float(xpos - 1)
339#   
340#      # Writing the lines of "plot"
341#      plot_str = ""
342#      xpos = 0
343#      first = True
344#      column = 2
345#      for i in range(0, len(nb_procs)):
346#         if not first:
347#            plot_str += ", \\\n    "
348#         first = False
349#         plot_str += "\"%s\" using ($1+%.1f):($%d) lc rgb %s notitle with boxes" % (data_metric_name, xpos, column, colors[i])
350#         column += 1
351#         xpos += 1.5
352#   
353#      template_file = open(metric_tmpl, 'r')
354#      template = template_file.read()
355#   
356#      gp_commands = template % dict(xtics_str = xtics_str, app_labels = app_labels, ylabel_str = m_metric_name[metric], norm_factor_str = m_norm_factor_name[m_metric_norm[metric]], xmax_val = xmax_val, plot_str = plot_str, svg_name = os.path.join(graph_dir, prot + '_' + metric))
357#   
358#      create_file(gp_metric_name, gp_commands)
359#     
360#      # Creating the data file
361#      width = 15
362#      content = "#x_pos"
363#      nb_spaces = width - len(content)
364#      content += nb_spaces * ' '
365#      for i in nb_procs:
366#         content += "%-15d" % i
367#      content += "\n"
368#   
369#      x_pos = 1
370#      for app in apps:
371#         # Computation of x_pos
372#         content += "%-15f" % x_pos
373#         x_pos += len(nb_procs) * 1.5 + 0.5
374#         for i in nb_procs:
375#            if m_metric_norm[metric] == "N":
376#               content += "%-15d" % (metrics_val[prot][app][i][metric])
377#            elif m_metric_norm[metric] == "P":
378#               content += "%-15f" % (float(metrics_val[prot][app][i][metric]) / i)
379#            elif m_metric_norm[metric] == "C":
380#               content += "%-15f" % (float(metrics_val[prot][app][i][metric]) / exec_time[prot][app][i] * 1000)
381#            elif m_metric_norm[metric] == "W":
382#               content += "%-15f" % (float(metrics_val[prot][app][i][metric]) / float(metrics_val[prot][app][i]['total_write'])) # Number of writes
383#            elif m_metric_norm[metric] == "R":
384#               content += "%-15f" % (float(metrics_val[prot][app][i][metric]) / float(metrics_val[prot][app][i]['total_read'])) # Number of reads
385#            elif m_metric_norm[metric] == "D":
386#               content += "%-15f" % (float(metrics_val[prot][app][i][metric]) / float(metrics_val[prot][app][i]['total_direct'])) # Number of req.
387#            elif is_numeric(m_metric_norm[metric]):
388#               content += "%-15f" % (float(metrics_val[prot][app][i][metric]) / float(metrics_val[prot][app][int(m_metric_norm[metric])][metric]))
389#            else:
390#               assert(False)
391#   
392#         app_name = m_app_name[app]
393#         content += "#" + app_name + "\n"
394#     
395#      create_file(data_metric_name, content)
396#   
397#      # Calling gnuplot
398#      print "gnuplot", gp_metric_name
399#      subprocess.call([ 'gnuplot', gp_metric_name ])
400#
401#
402#############################################################
403#### Graph 5 : Stacked histogram with counters            ###
404#############################################################
405#
406## The following section creates a stacked histogram containing
407## the metrics in the "stacked_metric" list
408## It is normalized per application w.r.t the values on 256 procs
409#
410#for prot in single_protocols:
411#   data_stacked_name = os.path.join(scripts_path, gen_dir, prot + '_stacked.dat')
412#   gp_stacked_name   = os.path.join(scripts_path, gen_dir, prot + '_stacked.gp')
413#   
414#   norm_factor_value = nb_procs[-1]
415#   
416#   # Creating the gp file
417#   template_file = open(stacked_tmpl, 'r')
418#   template = template_file.read()
419#   
420#   xtics_str = "("
421#   first = True
422#   xpos = 1
423#   app_labels = ""
424#   for num_appli in range(0, len(apps)):
425#      for i in nb_procs[1:len(nb_procs)]: # skipping values for 1 proc
426#         if not first:
427#            xtics_str += ", "
428#         first = False
429#         if i == nb_procs[1]:
430#            xpos_first = xpos
431#         xtics_str += "\"%d\" %d -1" % (i, xpos)
432#         xpos_last = xpos
433#         xpos += 1
434#      xpos += 1
435#      app_name_xpos = float((xpos_first + xpos_last)) / 2
436#      app_labels += "set label \"%s\" at first %f,character 1 center font \"Times,12\"\n" % (m_app_name[apps[num_appli]], app_name_xpos)
437#   xtics_str += ")"
438#   
439#   plot_str = "newhistogram \"\""
440#   n = 1
441#   for stacked_metric in stacked_metrics:
442#      plot_str += ", \\\n    " + "'" + data_stacked_name + "'" + " using " + str(n) + " lc rgb " + colors[n] + " title \"" + m_metric_name[stacked_metric] + "\""
443#      n += 1
444#   
445#   ylabel_str = "Breakdown of Coherence Traffic Normalized w.r.t. \\nthe Values on %d Processors" % norm_factor_value
446#   content = template % dict(svg_name = os.path.join(graph_dir, prot + '_stacked'), xtics_str = xtics_str, plot_str = plot_str, ylabel_str = ylabel_str, app_labels = app_labels, prot_labels = "")
447#   
448#   create_file(gp_stacked_name, content)
449#   
450#   # Creating the data file
451#   # Values are normalized by application, w.r.t. the number of requests for a given number of procs
452#   content = "#"
453#   for stacked_metric in stacked_metrics:
454#      content += stacked_metric
455#      content += ' ' + ' ' * (15 - len(stacked_metric))
456#   content += "\n"
457#   for app in apps:
458#      if app != apps[0]:
459#         for i in range(0, len(stacked_metrics)):
460#            content += "%-15f" % 0.0
461#         content += "\n"
462#      for i in nb_procs[1:len(nb_procs)]:
463#         for stacked_metric in stacked_metrics:
464#            metric_val = metrics_val[prot][app][norm_factor_value]['total_stacked'] # Normalisation
465#            if metric_val != 0:
466#               content += "%-15f" % (float(metrics_val[prot][app][i][stacked_metric]) / metric_val)
467#            else:
468#               content += "%-15f" % 0
469#         content += "\n"
470#   
471#   create_file(data_stacked_name, content)
472#   # Calling gnuplot
473#   print "gnuplot", gp_stacked_name
474#   subprocess.call([ 'gnuplot', gp_stacked_name ])
475
476
477
478#################################################################################
479### Graph 6 : Stacked histogram with coherence cost compared to r/w cost      ###
480#################################################################################
481
482# The following section creates pairs of stacked histograms, normalized w.r.t. the first one.
483# The first one contains the cost of reads and writes, the second contains the cost
484# of m_inv, m_up and broadcasts (extrapolated)
485
486for prot in single_protocols:
487   data_cost_filename = os.path.join(scripts_path, gen_dir, prot + '_relative_cost.dat')
488   gp_cost_filename   = os.path.join(scripts_path, gen_dir, prot + '_relative_cost.gp')
489   
490   direct_cost_metrics = [ 'read_cost', 'write_cost' ]
491   #coherence_cost_metrics = ['update_cost', 'm_inv_cost', 'broadcast_cost' ]
492   coherence_cost_metrics = ['coherence_cost']
493   
494   # Creating the gp file
495   template_file = open(stacked_tmpl, 'r')
496   template = template_file.read()
497   
498   xtics_str = "("
499   first = True
500   xpos = 1
501   app_labels = ""
502   for num_appli in range(0, len(apps)):
503      first_proc = True
504      for i in nb_procs:
505         if i > 4:
506            if not first:
507               xtics_str += ", "
508            first = False
509            if first_proc:
510               first_proc = False
511               xpos_first = xpos
512            xtics_str += "\"%d\" %f -1" % (i, float(xpos + 0.5))
513            xpos_last = xpos
514            xpos += 3
515      app_name_xpos = float((xpos_first + xpos_last)) / 2
516      app_labels += "set label \"%s\" at first %f,character 1 center font \"Times,28\"\n" % (m_app_name[apps[num_appli]], app_name_xpos)
517      #xpos += 1
518   xtics_str += ")"
519   
520   plot_str = "newhistogram \"\""
521   n = 1
522   for cost_metric in direct_cost_metrics + coherence_cost_metrics:
523      plot_str += ", \\\n    " + "'" + data_cost_filename + "'" + " using " + str(n) + " lc rgb " + colors[n] + " title \"" + m_metric_name[cost_metric] + "\""
524      n += 1
525   
526   ylabel_str = "Coherence Cost Vs. Direct Requests Cost,\\nNormalized w.r.t. Direct Requests Cost"
527   content = template % dict(svg_name = os.path.join(graph_dir, prot + '_rel_cost'), xtics_str = xtics_str, plot_str = plot_str, ylabel_str = ylabel_str, app_labels = app_labels, prot_labels = "")
528   
529   create_file(gp_cost_filename, content)
530   
531   # Creating the data file
532   # Values are normalized by application, w.r.t. the number of requests for a given number of procs
533   content = "#"
534   for cost_metric in direct_cost_metrics:
535      content += cost_metric
536      content += ' ' + ' ' * (15 - len(cost_metric))
537   for cost_metric in coherence_cost_metrics:
538      content += cost_metric
539      content += ' ' + ' ' * (15 - len(cost_metric))
540   content += "\n"
541   for app in apps:
542      if app != apps[0]:
543         for i in range(0, len(direct_cost_metrics) + len(coherence_cost_metrics)):
544            content += "%-15f" % 0.0
545         content += "\n"
546      for i in nb_procs:
547         if i > 4:
548            for cost_metric in direct_cost_metrics:
549               if metrics_val[prot][app][i]['direct_cost'] == 0:
550                  print "Error: prot : ", prot, " - app : ", app, " - i : ", i
551                  content += "%-15f" % 0
552               else:
553                  content += "%-15f" % (float(metrics_val[prot][app][i][cost_metric]) / metrics_val[prot][app][i]['direct_cost'])
554            for cost_metric in coherence_cost_metrics:
555               content += "%-15f" % 0.0
556            content += "\n"
557            for cost_metric in direct_cost_metrics:
558               content += "%-15f" % 0.0
559            for cost_metric in coherence_cost_metrics:
560               if metrics_val[prot][app][i]['direct_cost'] == 0:
561                  print "Error: prot : ", prot, " - app : ", app, " - i : ", i
562                  content += "%-15f" % 0
563               else:
564                  content += "%-15f" % (float(metrics_val[prot][app][i][cost_metric]) / metrics_val[prot][app][i]['direct_cost'])
565            content += "\n"
566            if i != nb_procs[-1]:
567               for j in range(0, len(direct_cost_metrics) + len(coherence_cost_metrics)):
568                  content += "%-15f" % 0.0
569               content += "\n"
570   
571   create_file(data_cost_filename, content)
572   # Calling gnuplot
573   print "gnuplot", gp_cost_filename
574   subprocess.call([ 'gnuplot', gp_cost_filename ])
575
576
577#################################################################################
578### Joint Graphs to several architectures                                     ###
579#################################################################################
580
581if len(joint_protocols) == 0:
582   sys.exit()
583
584#################################################################################
585### Graph 7: Comparison of Speedups (normalized w.r.t. 1 proc on first arch)  ###
586#################################################################################
587
588
589#if 1 in nb_procs:
590#   for app in apps:
591#   
592#      data_speedup_name = os.path.join(scripts_path, gen_dir, 'joint_' + app + '_speedup.dat')
593#      gp_speedup_name   = os.path.join(scripts_path, gen_dir, 'joint_' + app + '_speedup.gp')
594#     
595#      # Creating data file
596#      width = 15
597#      content = "#nb_procs"
598#      nb_spaces = width - len(content)
599#      content += nb_spaces * ' '
600#      content += "speedup\n"
601#   
602#      for i in nb_procs:
603#         content += "%-15d " % i
604#         for prot in joint_protocols:
605#            val = exec_time[prot][app][i]
606#            content += "%-15f " % (exec_time[joint_protocols[0]][app][1] / float(val))
607#         content += "\n"
608#   
609#      create_file(data_speedup_name, content)
610#     
611#      # Creating the gp file
612#      template_file = open(speedup_tmpl, 'r')
613#      template = template_file.read()
614#     
615#      plot_str = ""
616#      col = 2
617#      for prot in joint_protocols:
618#         if prot != joint_protocols[0]:
619#            plot_str += ", \\\n     "
620#         plot_str += "\"" + data_speedup_name + "\" using ($1):($" + str(col) + ") lc rgb %s title \"" % (colors[col - 2])  + m_prot_name[prot] + "\" with linespoint"
621#         col += 1
622#     
623#      gp_commands = template % dict(appli = m_app_name[app] + " Normalized w.r.t. " + m_prot_name[joint_protocols[0]] + " on 1 Processor", nb_procs = nb_procs[-1] + 1, plot_str = plot_str, svg_name = os.path.join(graph_dir, 'joint_' + app + '_speedup'))
624#     
625#      create_file(gp_speedup_name, gp_commands)
626#     
627#      # Calling gnuplot
628#      print "gnuplot", gp_speedup_name
629#      subprocess.call([ 'gnuplot', gp_speedup_name ])
630
631
632#################################################################################
633### Graph 8 : Joint Stacked histogram with coherence cost and r/w cost        ###
634#################################################################################
635
636# The following section creates pairs of stacked histograms for each arch for each number of proc for each app, normalized by (app x number of procs) (with first arch, R/W cost, first of the 2*num_arch histo). It is close to Graph 6
637
638data_cost_filename = os.path.join(scripts_path, gen_dir, 'joint_relative_cost.dat')
639gp_cost_filename   = os.path.join(scripts_path, gen_dir, 'joint_relative_cost.gp')
640   
641direct_cost_metrics = [ 'read_cost', 'write_cost', 'getm_cost' ]
642coherence_cost_metrics = ['update_cost', 'm_inv_cost', 'broadcast_cost', 'inval_ro_cost', 'cleanup_cost', 'cleanup_d_cost' ]
643 
644# Creating the gp file
645template_file = open(stacked_tmpl, 'r')
646template = template_file.read()
647 
648xtics_str = "("
649first = True
650barwidth = 1
651xpos = 1 # successive x position of the center of the first bar in an application
652app_labels = ""
653prot_labels = ""
654for num_appli in range(0, len(apps)):
655   first_proc = True
656   for i in nb_procs:
657      if i > 4:
658         x = 0 # local var for computing position of protocol names
659         for prot in joint_protocols:
660            prot_labels += "set label \"%s\" at first %f, character 2 center font \"Times,10\" rotate by 45\n" % (m_prot_name[prot], float((xpos - 0.5)) + x - 1) # -0.5 instead of +0.5, don't know why... (bug gnuplot?)
661            x += 2
662
663         if not first:
664            xtics_str += ", "
665         first = False
666         if first_proc:
667            first_proc = False
668            xpos_first = xpos
669         xtics_str += "\"%d\" %f -1" % (i, float(xpos - 0.5 + len(joint_protocols)))
670         #xtics_str += "\"\" %f -1" % (float(xpos - 0.5 + len(joint_protocols))) # not to have proc number displayed
671         xpos_last = xpos
672         xpos += 1 + len(joint_protocols) * 2 * barwidth
673   app_name_xpos = float((xpos_first + xpos_last)) / 2 + 2
674   app_labels += "set label \"%s\" at first %f,character 1 center font \"Times,12\"\n" % (m_app_name[apps[num_appli]], app_name_xpos)
675   #xpos += 1
676xtics_str += ")"
677
678plot_str = "newhistogram \"\""
679n = 1
680for cost_metric in direct_cost_metrics + coherence_cost_metrics:
681   plot_str += ", \\\n    " + "'" + data_cost_filename + "'" + " using " + str(n) + " lc rgb " + colors[n] + " title \"" + m_metric_name[cost_metric] + "\""
682   n += 1
683
684ylabel_str = "Coherence Cost vs. Direct Requests Cost,\\nNormalized per Application for each Number of Processors"
685content = template % dict(svg_name = os.path.join(graph_dir, 'joint_rel_cost'), xtics_str = xtics_str, plot_str = plot_str, ylabel_str = ylabel_str, app_labels = app_labels, prot_labels = prot_labels)
686
687create_file(gp_cost_filename, content)
688
689# Creating the data file
690# Values are normalized by application, w.r.t. the number of requests for a given number of procs
691content = "#"
692for cost_metric in direct_cost_metrics:
693   content += cost_metric
694   content += ' ' + ' ' * (15 - len(cost_metric))
695for cost_metric in coherence_cost_metrics:
696   content += cost_metric
697   content += ' ' + ' ' * (15 - len(cost_metric))
698content += "\n"
699for app in apps:
700   if app != apps[0]:
701      for j in range(0, len(direct_cost_metrics) + len(coherence_cost_metrics)):
702         content += "%-15f" % 0.0
703      content += "\n"
704   for i in nb_procs:
705      if i > 4:
706         for prot in joint_protocols:
707            if metrics_val[joint_protocols[0]][app][i]['direct_cost'] == 0:
708               continue
709            for cost_metric in direct_cost_metrics:
710               content += "%-15f" % (float(metrics_val[prot][app][i][cost_metric]) / metrics_val[joint_protocols[0]][app][i]['direct_cost'])
711            for cost_metric in coherence_cost_metrics:
712               content += "%-15f" % 0.0
713            content += "\n"
714            for cost_metric in direct_cost_metrics:
715               content += "%-15f" % 0.0
716            for cost_metric in coherence_cost_metrics:
717               content += "%-15f" % (float(metrics_val[prot][app][i][cost_metric]) / metrics_val[joint_protocols[0]][app][i]['direct_cost'])
718            content += "\n"
719         if i != nb_procs[-1]:
720            for j in range(0, len(direct_cost_metrics) + len(coherence_cost_metrics)):
721               content += "%-15f" % 0.0
722            content += "\n"
723
724create_file(data_cost_filename, content)
725# Calling gnuplot
726print "gnuplot", gp_cost_filename
727subprocess.call([ 'gnuplot', gp_cost_filename ])
728
729
730
731#################################################################################
732### Graph 9 :         ###
733#################################################################################
734
735
736data_metric_filename = os.path.join(scripts_path, gen_dir, 'single_metric.dat')
737gp_metric_filename   = os.path.join(scripts_path, gen_dir, 'single_metric.gp')
738   
739metric = 'total_write'
740 
741# Creating the gp file
742template_file = open(stacked_tmpl, 'r')
743template = template_file.read()
744   
745xtics_str = "("
746first = True
747xpos = 0 # successive x position of the center of the first bar in a application
748app_labels = ""
749prot_labels = ""
750for num_appli in range(0, len(apps)):
751   first_proc = True
752   for i in nb_procs:
753      x = 0 # local var for computing position of protocol names
754      #for prot in joint_protocols:
755         #prot_labels += "set label \"%s\" at first %f, character 2 center font \"Times,10\"\n" % (m_prot_name[prot], float((xpos - 0.5)) + x) # -0.5 instead of +0.5, don't know why... (bug gnuplot?)
756         #x += 1
757
758      if not first:
759         xtics_str += ", "
760      first = False
761      if first_proc:
762         first_proc = False
763         xpos_first = xpos
764      xtics_str += "\"%d\" %f -1" % (i, float(xpos - 0.5 + len(joint_protocols)))
765      xpos_last = xpos
766      xpos += 1 + len(joint_protocols)
767   app_name_xpos = float((xpos_first + xpos_last)) / 2
768   app_labels += "set label \"%s\" at first %f,character 1 center font \"Times,12\"\n" % (m_app_name[apps[num_appli]], app_name_xpos)
769   xpos += 1
770xtics_str += ")"
771
772n = 1
773plot_str = "newhistogram \"\""
774for prot in joint_protocols:
775   plot_str += ", \\\n    " + "'" + data_metric_filename + "'" + " using " + str(n) + " lc rgb " + colors[n] + " title \"" + m_metric_name[metric] + " for " + m_prot_name[prot] + "\""
776   n += 1
777
778ylabel_str = "%(m)s" % dict(m = m_metric_name[metric])
779content = template % dict(svg_name = os.path.join(graph_dir, 'single_metric'), xtics_str = xtics_str, plot_str = plot_str, ylabel_str = ylabel_str, app_labels = app_labels, prot_labels = prot_labels)
780
781create_file(gp_metric_filename, content)
782
783# Creating the data file
784content = "#" + metric
785content += "\n"
786for app in apps:
787   if app != apps[0]:
788      for prot in joint_protocols:
789         for p in joint_protocols:
790            content += "%-15f " % 0.0
791         content += "\n"
792   for i in nb_procs:
793      for prot in joint_protocols:
794         for p in joint_protocols:
795            if p != prot:
796               content += "%-15f " % 0
797            else:
798               content += "%-15f " % (float(metrics_val[prot][app][i][metric]))
799         content += "\n"
800      if i != nb_procs[-1]:
801         for p in joint_protocols:
802            content += "%-15f " % 0.0
803         content += "\n"
804
805create_file(data_metric_filename, content)
806# Calling gnuplot
807print "gnuplot", gp_metric_filename
808subprocess.call([ 'gnuplot', gp_metric_filename ])
809
810
811#########################################################################################################
812### Graph 10: Normalized exec times with no log scale: when scalability is achieved by constant time  ###
813#########################################################################################################
814
815
816if 1 in nb_procs:
817   for app in apps:
818   
819      data_speedup_name = os.path.join(scripts_path, gen_dir, 'joint_' + app + '_cst_exec_time.dat')
820      gp_speedup_name   = os.path.join(scripts_path, gen_dir, 'joint_' + app + '_cst_exec_time.gp')
821     
822      # Creating data file
823      width = 15
824      content = "#nb_procs"
825      nb_spaces = width - len(content)
826      content += nb_spaces * ' '
827      content += "norm_exec_time\n"
828   
829      for i in nb_procs:
830         content += "%-15d " % i
831         for prot in joint_protocols:
832            val = exec_time[prot][app][i]
833            content += "%-15f " % (float(val) / exec_time[joint_protocols[0]][app][1])
834         content += "\n"
835   
836      create_file(data_speedup_name, content)
837     
838      # Creating the gp file
839      template_file = open(cst_exec_time_tmpl, 'r')
840      template = template_file.read()
841     
842      plot_str = ""
843      col = 2
844      for prot in joint_protocols:
845         if prot != joint_protocols[0]:
846            plot_str += ", \\\n     "
847         plot_str += "\"" + data_speedup_name + "\" using ($1):($" + str(col) + ") lc rgb %s title \"" % (colors[col - 2])  + m_prot_name[prot] + "\" with linespoint"
848         col += 1
849     
850      gp_commands = template % dict(appli = m_app_name[app] + " Normalized w.r.t. " + m_prot_name[joint_protocols[0]] + " on 1 Processor", nb_procs = nb_procs[-1] + 1, plot_str = plot_str, svg_name = os.path.join(graph_dir, 'joint_' + app + '_cst_exec_time'))
851     
852      create_file(gp_speedup_name, gp_commands)
853     
854      # Calling gnuplot
855      print "gnuplot", gp_speedup_name
856      subprocess.call([ 'gnuplot', gp_speedup_name ])
857
858
859
Note: See TracBrowser for help on using the repository browser.