source: trunk/platforms/tsar_generic_xbar/scripts/create_graphs.py @ 1007

Last change on this file since 1007 was 836, checked in by meunier, 10 years ago

Trunk:

  • Cosmetic in mem_cache_directory.h and xram_transaction.h
  • Renamed mem_cache param dspin_in_width and dspin_out_width to memc_dspin_in_width and memc_dspin_out_width (because of a bug in soclib-cc ?). Should have updated these names in the .sd or .py files of all platforms
  • Updated the scripts for tsar_generic_xbar to take into account the ideal write-through + added a graph in create_graphs.py
  • Property svn:executable set to *
File size: 31.2 KB
Line 
1#!/usr/bin/python
2
3import subprocess
4import os
5import re
6import sys
7
8
9apps = [ 'filter', 'lu', 'fft_ga' ]
10#apps = [ 'histogram', 'mandel', 'filter', 'fft', 'fft_ga', 'filt_ga', 'pca', 'lu' ]  # radix radix_ga kmeans
11#apps = [ 'filt_ga' ]
12nb_procs = [ 1, 4, 8, 16, 32, 64 ]
13single_protocols = ['dhccp', 'rwt' ]
14joint_protocols = ['dhccp', 'rwt' ]
15#joint_protocols = []
16
17top_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), "..")
18scripts_path = os.path.join(top_path, 'scripts')
19counter_defs_name = os.path.join(scripts_path, "counter_defs.py")
20
21exec(file(counter_defs_name))
22
23gen_dir = 'generated'
24graph_dir = 'graph'
25template_dir = 'templates'
26data_dir = 'data'
27
28log_stdo_name = '_stdo_'
29log_term_name = '_term_'
30
31coherence_tmpl = os.path.join(scripts_path, template_dir, 'coherence_template.gp') # 1 graph per appli
32speedup_tmpl   = os.path.join(scripts_path, template_dir, 'speedup_template.gp')
33metric_tmpl    = os.path.join(scripts_path, template_dir, 'metric_template.gp') # 1 graph per metric
34stacked_tmpl   = os.path.join(scripts_path, template_dir, 'stacked_template.gp')
35
36
37
38def create_file(name, content):
39   file = open(name, 'w')
40   file.write(content)
41   file.close()
42   
43def is_numeric(s):
44   try:
45      float(s)
46      return True
47   except ValueError:
48      return False
49
50def get_x_y(nb_procs):
51   x = 1
52   y = 1
53   to_x = True
54   while (x * y * 4 < nb_procs):
55      if to_x:
56         x = x * 2
57      else:
58         y = y * 2
59      to_x = not to_x
60   return x, y
61
62
63
64# We first fill the m_metric_id table
65for metric in all_metrics:
66   for tag in all_tags:
67      if m_metric_tag[metric] == tag:
68         m_metric_id[tag] = metric
69         break
70
71
72# We start by processing all the log files
73# Term files are processed for exec time only
74# Init files are processed for all metrics
75exec_time = {}
76metrics_val = {}
77for prot in single_protocols:
78   metrics_val[prot] = {}
79   exec_time[prot] = {}
80   for app in apps:
81      exec_time[prot][app] = {}
82      metrics_val[prot][app] = {}
83      for i in nb_procs:
84         metrics_val[prot][app][i] = {}
85         log_stdo_file = os.path.join(scripts_path, data_dir, app + '_' + prot + log_stdo_name + str(i))
86         log_term_file = os.path.join(scripts_path, data_dir, app + '_' + prot + log_term_name + str(i))
87   
88         # Term
89         lines = open(log_term_file, 'r')
90         for line in lines:
91            tokens = line[:-1].split()
92            if len(tokens) > 0 and tokens[0] == "[PARALLEL_COMPUTE]":
93               exec_time[prot][app][i] = int(tokens[len(tokens) - 1])
94   
95         # Init files
96         lines = open(log_stdo_file, 'r')
97         for line in lines:
98            tokens = line[:-1].split()
99            if len(tokens) == 0:
100               continue
101            tag = tokens[0]
102            value = tokens[len(tokens) - 1]
103            pattern = re.compile('\[0[0-9][0-9]\]')
104            if pattern.match(tag):
105               metric = m_metric_id[tag]
106               if (not metrics_val[prot][app][i].has_key(metric) or tag == "[000]" or tag == "[001]"):
107                  # We don't add cycles of all Memcaches (they must be the same for all)
108                  metrics_val[prot][app][i][metric] = int(value)
109               else:
110                  metrics_val[prot][app][i][metric] += int(value)
111   
112# Completing unset metrics (i.e. they are not present in the data file) with 0
113for prot in single_protocols:
114   for app in apps:
115      for i in nb_procs:
116         for metric in all_metrics:
117            if metric not in metrics_val[prot][app][i]:
118               metrics_val[prot][app][i][metric] = 0
119
120# We make a 2nd pass to fill the derived fields, e.g. nb_total_updates
121for prot in single_protocols:
122   for app in apps:
123      for i in nb_procs:
124         x, y = get_x_y(i)
125         metrics_val[prot][app][i]['total_read']      = metrics_val[prot][app][i]['local_read']      + metrics_val[prot][app][i]['remote_read']
126         metrics_val[prot][app][i]['total_write']     = metrics_val[prot][app][i]['local_write']     + metrics_val[prot][app][i]['remote_write']
127         metrics_val[prot][app][i]['total_ll']        = metrics_val[prot][app][i]['local_ll']        + metrics_val[prot][app][i]['remote_ll']
128         metrics_val[prot][app][i]['total_sc']        = metrics_val[prot][app][i]['local_sc']        + metrics_val[prot][app][i]['remote_sc']
129         metrics_val[prot][app][i]['total_cas']       = metrics_val[prot][app][i]['local_cas']       + metrics_val[prot][app][i]['remote_cas']
130         metrics_val[prot][app][i]['total_update']    = metrics_val[prot][app][i]['local_update']    + metrics_val[prot][app][i]['remote_update']
131         metrics_val[prot][app][i]['total_m_inv']     = metrics_val[prot][app][i]['local_m_inv']     + metrics_val[prot][app][i]['remote_m_inv']
132         metrics_val[prot][app][i]['total_cleanup']   = metrics_val[prot][app][i]['local_cleanup']   + metrics_val[prot][app][i]['remote_cleanup']
133         metrics_val[prot][app][i]['total_cleanup_d'] = metrics_val[prot][app][i]['local_cleanup_d'] + metrics_val[prot][app][i]['remote_cleanup_d']
134         metrics_val[prot][app][i]['total_getm']      = metrics_val[prot][app][i]['local_getm']      + metrics_val[prot][app][i]['remote_getm']
135         metrics_val[prot][app][i]['total_inval_ro']  = metrics_val[prot][app][i]['local_inval_ro']  + metrics_val[prot][app][i]['remote_inval_ro']
136         metrics_val[prot][app][i]['total_direct']    = metrics_val[prot][app][i]['total_read']      + metrics_val[prot][app][i]['total_write']
137         metrics_val[prot][app][i]['total_ncc_to_cc'] = metrics_val[prot][app][i]['ncc_to_cc_read']  + metrics_val[prot][app][i]['ncc_to_cc_write']
138         metrics_val[prot][app][i]['direct_cost']     = metrics_val[prot][app][i]['read_cost']       + metrics_val[prot][app][i]['write_cost']
139         metrics_val[prot][app][i]['broadcast_cost']  = metrics_val[prot][app][i]['broadcast'] * (x * y - 1)
140         if metrics_val[prot][app][i]['broadcast'] < metrics_val[prot][app][i]['write_broadcast']:
141            # test to patch a bug in mem_cache
142            metrics_val[prot][app][i]['nonwrite_broadcast'] = 0
143            print "*** Error which should not happen anymore: incorrect number of Broadcasts/Write Broadcasts"
144         else:
145            metrics_val[prot][app][i]['nonwrite_broadcast'] = metrics_val[prot][app][i]['broadcast'] - metrics_val[prot][app][i]['write_broadcast']
146   
147         metrics_val[prot][app][i]['total_stacked'] = 0
148         for stacked_metric in stacked_metrics:
149            metrics_val[prot][app][i]['total_stacked'] += metrics_val[prot][app][i][stacked_metric]
150
151           
152print "mkdir -p", os.path.join(scripts_path, gen_dir)
153subprocess.call([ 'mkdir', '-p', os.path.join(scripts_path, gen_dir) ])
154
155print "mkdir -p", os.path.join(scripts_path, graph_dir)
156subprocess.call([ 'mkdir', '-p', os.path.join(scripts_path, graph_dir) ])
157
158############################################################
159### Graph 1 : Coherence traffic Cost per application     ###
160############################################################
161
162for prot in single_protocols:
163   for app in apps:
164      data_coherence_name = os.path.join(scripts_path, gen_dir, prot + '_' + app + '_coherence.dat')
165      gp_coherence_name   = os.path.join(scripts_path, gen_dir, prot + '_' + app + '_coherence.gp')
166   
167      # Creating the data file
168      width = 15
169      content = ""
170     
171      for metric in [ '#nb_procs' ] + grouped_metrics:
172         content += metric + " "
173         nb_spaces = width - len(metric)
174         content += nb_spaces * ' '
175      content += "\n"
176   
177      for i in nb_procs:
178         content += "%-15d " % i
179         for metric in grouped_metrics:
180            val = float(metrics_val[prot][app][i][metric]) / exec_time[prot][app][i] * 1000
181            content += "%-15f " % val
182         content += "\n"
183     
184      create_file(data_coherence_name, content)
185   
186      # Creating the gp file
187      template_file = open(coherence_tmpl, 'r')
188      template = template_file.read()
189     
190      plot_str = ""
191      col = 2
192      for metric in grouped_metrics:
193         if metric != grouped_metrics[0]:
194            plot_str += ", \\\n    "
195         plot_str += "\"" + data_coherence_name + "\" using ($1):($" + str(col) + ") lc rgb " + colors[col - 2] + " title \"" + m_metric_name[metric] + "\" with linespoint"
196         col += 1
197      gp_commands = template % dict(app_name = m_app_name[app], nb_procs = nb_procs[-1] + 1, plot_str = plot_str, svg_name = os.path.join(graph_dir, prot + '_' + app + '_coherence'))
198     
199      create_file(gp_coherence_name, gp_commands)
200     
201      # Calling gnuplot
202      print "gnuplot", gp_coherence_name
203      subprocess.call([ 'gnuplot', gp_coherence_name ])
204
205
206############################################################
207### Graph 2 : Speedup per Application                    ###
208############################################################
209
210for prot in single_protocols:
211   for app in apps:
212   
213      data_speedup_name = os.path.join(scripts_path, gen_dir, prot + '_' + app + '_speedup.dat')
214      gp_speedup_name   = os.path.join(scripts_path, gen_dir, prot + '_' + app + '_speedup.gp')
215     
216      # Creating data file
217      width = 15
218      content = "#nb_procs"
219      nb_spaces = width - len(content)
220      content += nb_spaces * ' '
221      content += "speedup\n"
222   
223      for i in nb_procs:
224         content += "%-15d " % i
225         val = exec_time[prot][app][i]
226         content += "%-15f\n" % (exec_time[prot][app][1] / float(val))
227   
228      plot_str = "\"" + data_speedup_name + "\" using ($1):($2) lc rgb \"#654387\" title \"Speedup\" with linespoint"
229     
230      create_file(data_speedup_name, content)
231     
232      # Creating the gp file
233      template_file = open(speedup_tmpl, 'r')
234      template = template_file.read()
235     
236      gp_commands = template % dict(appli = m_app_name[app], nb_procs = nb_procs[-1] + 1, plot_str = plot_str, svg_name = os.path.join(graph_dir, prot + '_' + app + '_speedup'))
237     
238      create_file(gp_speedup_name, gp_commands)
239     
240      # Calling gnuplot
241      print "gnuplot", gp_speedup_name
242      subprocess.call([ 'gnuplot', gp_speedup_name ])
243
244
245############################################################
246### Graph 3 : All speedups on the same Graph             ###
247############################################################
248
249for prot in single_protocols:
250   # This graph uses the same template as the graph 2
251   data_speedup_name = os.path.join(scripts_path, gen_dir, prot + '_all_speedup.dat')
252   gp_speedup_name   = os.path.join(scripts_path, gen_dir, prot + '_all_speedup.gp')
253   
254   # Creating data file
255   width = 15
256   content = "#nb_procs"
257   nb_spaces = width - len(content)
258   content += (nb_spaces + 1) * ' '
259   for app in apps:
260      content += app + " "
261      content += (width - len(app)) * " "
262   content += "\n"
263   
264   for i in nb_procs:
265      content += "%-15d " % i
266      for app in apps:
267         val = exec_time[prot][app][i]
268         content += "%-15f " % (exec_time[prot][app][1] / float(val))
269      content += "\n"
270   
271   create_file(data_speedup_name, content)
272   
273   # Creating gp file
274   template_file = open(speedup_tmpl, 'r')
275   template = template_file.read()
276   
277   plot_str = ""
278   col = 2
279   for app in apps:
280      if app != apps[0]:
281         plot_str += ", \\\n     "
282      plot_str += "\"" + data_speedup_name + "\" using ($1):($" + str(col) + ") lc rgb %s title \"" % (colors[col - 2])  + m_app_name[app] + "\" with linespoint"
283      col += 1
284   
285   gp_commands = template % dict(appli = "All Applications", nb_procs = nb_procs[-1] + 1, plot_str = plot_str, svg_name = os.path.join(graph_dir, prot + '_all_speedup'))
286     
287   create_file(gp_speedup_name, gp_commands)
288     
289   # Calling gnuplot
290   print "gnuplot", gp_speedup_name
291   subprocess.call([ 'gnuplot', gp_speedup_name ])
292
293
294############################################################
295### Graph 4 : Graph per metric                           ###
296############################################################
297
298# The following section creates the graphs grouped by measure (e.g. #broadcasts)
299# The template file cannot be easily created otherwise it would not be generic
300# in many ways. This is why it is mainly created here.
301# Graphs are created for metric in the "individual_metrics" list
302
303for prot in single_protocols:
304   for metric in individual_metrics:
305      data_metric_name = os.path.join(scripts_path, gen_dir, prot + '_' + metric + '.dat')
306      gp_metric_name   = os.path.join(scripts_path, gen_dir, prot + '_' + metric + '.gp')
307   
308      # Creating the gp file
309      # Setting xtics, i.e. number of procs for each application
310      xtics_str = "("
311      first = True
312      xpos = 1
313      app_labels = ""
314      for num_appli in range(0, len(apps)):
315         for i in nb_procs:
316            if not first:
317               xtics_str += ", "
318            first = False
319            if i == nb_procs[0]:
320               xpos_first = xpos
321            xtics_str += "\"%d\" %.1f" % (i, xpos)
322            xpos_last = xpos
323            xpos += 1.5
324         xpos += 0.5
325         app_name_xpos = float((xpos_first + xpos_last)) / 2
326         app_labels += "set label \"%s\" at first %f,character 1 center font \"Times,12\"\n" % (m_app_name[apps[num_appli]], app_name_xpos)
327      xtics_str += ")"
328   
329      xmax_val = float(xpos - 1)
330   
331      # Writing the lines of "plot"
332      plot_str = ""
333      xpos = 0
334      first = True
335      column = 2
336      for i in range(0, len(nb_procs)):
337         if not first:
338            plot_str += ", \\\n    "
339         first = False
340         plot_str += "\"%s\" using ($1+%.1f):($%d) lc rgb %s notitle with boxes" % (data_metric_name, xpos, column, colors[i])
341         column += 1
342         xpos += 1.5
343   
344      template_file = open(metric_tmpl, 'r')
345      template = template_file.read()
346   
347      gp_commands = template % dict(xtics_str = xtics_str, app_labels = app_labels, ylabel_str = m_metric_name[metric], norm_factor_str = m_norm_factor_name[m_metric_norm[metric]], xmax_val = xmax_val, plot_str = plot_str, svg_name = os.path.join(graph_dir, prot + '_' + metric))
348   
349      create_file(gp_metric_name, gp_commands)
350     
351      # Creating the data file
352      width = 15
353      content = "#x_pos"
354      nb_spaces = width - len(content)
355      content += nb_spaces * ' '
356      for i in nb_procs:
357         content += "%-15d" % i
358      content += "\n"
359   
360      x_pos = 1
361      for app in apps:
362         # Computation of x_pos
363         content += "%-15f" % x_pos
364         x_pos += len(nb_procs) * 1.5 + 0.5
365         for i in nb_procs:
366            if m_metric_norm[metric] == "N":
367               content += "%-15d" % (metrics_val[prot][app][i][metric])
368            elif m_metric_norm[metric] == "P":
369               content += "%-15f" % (float(metrics_val[prot][app][i][metric]) / i)
370            elif m_metric_norm[metric] == "C":
371               content += "%-15f" % (float(metrics_val[prot][app][i][metric]) / exec_time[prot][app][i] * 1000)
372            elif m_metric_norm[metric] == "W":
373               content += "%-15f" % (float(metrics_val[prot][app][i][metric]) / float(metrics_val[prot][app][i]['total_write'])) # Number of writes
374            elif m_metric_norm[metric] == "R":
375               content += "%-15f" % (float(metrics_val[prot][app][i][metric]) / float(metrics_val[prot][app][i]['total_read'])) # Number of reads
376            elif m_metric_norm[metric] == "D":
377               content += "%-15f" % (float(metrics_val[prot][app][i][metric]) / float(metrics_val[prot][app][i]['total_direct'])) # Number of req.
378            elif is_numeric(m_metric_norm[metric]):
379               content += "%-15f" % (float(metrics_val[prot][app][i][metric]) / float(metrics_val[prot][app][int(m_metric_norm[metric])][metric]))
380            else:
381               assert(False)
382   
383         app_name = m_app_name[app]
384         content += "#" + app_name + "\n"
385     
386      create_file(data_metric_name, content)
387   
388      # Calling gnuplot
389      print "gnuplot", gp_metric_name
390      subprocess.call([ 'gnuplot', gp_metric_name ])
391
392
393############################################################
394### Graph 5 : Stacked histogram with counters            ###
395############################################################
396
397# The following section creates a stacked histogram containing
398# the metrics in the "stacked_metric" list
399# It is normalized per application w.r.t the values on 256 procs
400
401for prot in single_protocols:
402   data_stacked_name = os.path.join(scripts_path, gen_dir, prot + '_stacked.dat')
403   gp_stacked_name   = os.path.join(scripts_path, gen_dir, prot + '_stacked.gp')
404   
405   norm_factor_value = nb_procs[-1]
406   
407   # Creating the gp file
408   template_file = open(stacked_tmpl, 'r')
409   template = template_file.read()
410   
411   xtics_str = "("
412   first = True
413   xpos = 1
414   app_labels = ""
415   for num_appli in range(0, len(apps)):
416      for i in nb_procs:
417         if not first:
418            xtics_str += ", "
419         first = False
420         if i == nb_procs[0]:
421            xpos_first = xpos
422         xtics_str += "\"%d\" %d -1" % (i, xpos)
423         xpos_last = xpos
424         xpos += 1
425      xpos += 1
426      app_name_xpos = float((xpos_first + xpos_last)) / 2
427      app_labels += "set label \"%s\" at first %f,character 1 center font \"Times,12\"\n" % (m_app_name[apps[num_appli]], app_name_xpos)
428   xtics_str += ")"
429   
430   plot_str = "newhistogram \"\""
431   n = 1
432   for stacked_metric in stacked_metrics:
433      plot_str += ", \\\n    " + "'" + data_stacked_name + "'" + " using " + str(n) + " lc rgb " + colors[n] + " title \"" + m_metric_name[stacked_metric] + "\""
434      n += 1
435   
436   ylabel_str = "Breakdown of Coherence Traffic Normalized w.r.t. \\nthe Values on %d Processors" % norm_factor_value
437   content = template % dict(svg_name = os.path.join(graph_dir, prot + '_stacked'), xtics_str = xtics_str, plot_str = plot_str, ylabel_str = ylabel_str, app_labels = app_labels, prot_labels = "")
438   
439   create_file(gp_stacked_name, content)
440   
441   # Creating the data file
442   # Values are normalized by application, w.r.t. the number of requests for a given number of procs
443   content = "#"
444   for stacked_metric in stacked_metrics:
445      content += stacked_metric
446      content += ' ' + ' ' * (15 - len(stacked_metric))
447   content += "\n"
448   for app in apps:
449      if app != apps[0]:
450         for i in range(0, len(stacked_metrics)):
451            content += "%-15f" % 0.0
452         content += "\n"
453      for i in nb_procs:
454         for stacked_metric in stacked_metrics:
455            content += "%-15f" % (float(metrics_val[prot][app][i][stacked_metric]) / metrics_val[prot][app][norm_factor_value]['total_stacked'])
456         content += "\n"
457   
458   create_file(data_stacked_name, content)
459   # Calling gnuplot
460   print "gnuplot", gp_stacked_name
461   subprocess.call([ 'gnuplot', gp_stacked_name ])
462
463
464
465#################################################################################
466### Graph 6 : Stacked histogram with coherence cost compared to r/w cost      ###
467#################################################################################
468
469# The following section creates pairs of stacked histograms, normalized w.r.t. the first one.
470# The first one contains the cost of reads and writes, the second contains the cost
471# of m_inv, m_up and broadcasts (extrapolated)
472
473for prot in single_protocols:
474   data_cost_filename = os.path.join(scripts_path, gen_dir, prot + '_relative_cost.dat')
475   gp_cost_filename   = os.path.join(scripts_path, gen_dir, prot + '_relative_cost.gp')
476   
477   direct_cost_metrics = [ 'read_cost', 'write_cost' ]
478   coherence_cost_metrics = ['update_cost', 'm_inv_cost', 'broadcast_cost' ]
479   
480   # Creating the gp file
481   template_file = open(stacked_tmpl, 'r')
482   template = template_file.read()
483   
484   xtics_str = "("
485   first = True
486   xpos = 1
487   app_labels = ""
488   for num_appli in range(0, len(apps)):
489      first_proc = True
490      for i in nb_procs:
491         if i > 4:
492            if not first:
493               xtics_str += ", "
494            first = False
495            if first_proc:
496               first_proc = False
497               xpos_first = xpos
498            xtics_str += "\"%d\" %f -1" % (i, float(xpos + 0.5))
499            xpos_last = xpos
500            xpos += 3
501      app_name_xpos = float((xpos_first + xpos_last)) / 2
502      app_labels += "set label \"%s\" at first %f,character 1 center font \"Times,12\"\n" % (m_app_name[apps[num_appli]], app_name_xpos)
503      #xpos += 1
504   xtics_str += ")"
505   
506   plot_str = "newhistogram \"\""
507   n = 1
508   for cost_metric in direct_cost_metrics + coherence_cost_metrics:
509      plot_str += ", \\\n    " + "'" + data_cost_filename + "'" + " using " + str(n) + " lc rgb " + colors[n] + " title \"" + m_metric_name[cost_metric] + "\""
510      n += 1
511   
512   ylabel_str = "Coherence Cost Compared to Direct Requests Cost,\\nNormalized per Application for each Number of Processors"
513   content = template % dict(svg_name = os.path.join(graph_dir, prot + '_rel_cost'), xtics_str = xtics_str, plot_str = plot_str, ylabel_str = ylabel_str, app_labels = app_labels, prot_labels = "")
514   
515   create_file(gp_cost_filename, content)
516   
517   # Creating the data file
518   # Values are normalized by application, w.r.t. the number of requests for a given number of procs
519   content = "#"
520   for cost_metric in direct_cost_metrics:
521      content += cost_metric
522      content += ' ' + ' ' * (15 - len(cost_metric))
523   for cost_metric in coherence_cost_metrics:
524      content += cost_metric
525      content += ' ' + ' ' * (15 - len(cost_metric))
526   content += "\n"
527   for app in apps:
528      if app != apps[0]:
529         for i in range(0, len(direct_cost_metrics) + len(coherence_cost_metrics)):
530            content += "%-15f" % 0.0
531         content += "\n"
532      for i in nb_procs:
533         if i > 4:
534            for cost_metric in direct_cost_metrics:
535               content += "%-15f" % (float(metrics_val[prot][app][i][cost_metric]) / metrics_val[prot][app][i]['direct_cost'])
536            for cost_metric in coherence_cost_metrics:
537               content += "%-15f" % 0.0
538            content += "\n"
539            for cost_metric in direct_cost_metrics:
540               content += "%-15f" % 0.0
541            for cost_metric in coherence_cost_metrics:
542               content += "%-15f" % (float(metrics_val[prot][app][i][cost_metric]) / metrics_val[prot][app][i]['direct_cost'])
543            content += "\n"
544            if i != nb_procs[-1]:
545               for j in range(0, len(direct_cost_metrics) + len(coherence_cost_metrics)):
546                  content += "%-15f" % 0.0
547               content += "\n"
548   
549   create_file(data_cost_filename, content)
550   # Calling gnuplot
551   print "gnuplot", gp_cost_filename
552   subprocess.call([ 'gnuplot', gp_cost_filename ])
553
554
555#################################################################################
556### Joint Graphs to several architectures                                     ###
557#################################################################################
558
559if len(joint_protocols) == 0:
560   sys.exit()
561
562#################################################################################
563### Graph 7: Comparison of Speedups (normalized w.r.t. 1 proc on first arch)  ###
564#################################################################################
565
566
567for app in apps:
568
569   data_speedup_name = os.path.join(scripts_path, gen_dir, 'joint_' + app + '_speedup.dat')
570   gp_speedup_name   = os.path.join(scripts_path, gen_dir, 'joint_' + app + '_speedup.gp')
571   
572   # Creating data file
573   width = 15
574   content = "#nb_procs"
575   nb_spaces = width - len(content)
576   content += nb_spaces * ' '
577   content += "speedup\n"
578
579   for i in nb_procs:
580      content += "%-15d " % i
581      for prot in joint_protocols:
582         val = exec_time[prot][app][i]
583         content += "%-15f " % (exec_time[joint_protocols[0]][app][1] / float(val))
584      content += "\n"
585
586   create_file(data_speedup_name, content)
587   
588   # Creating the gp file
589   template_file = open(speedup_tmpl, 'r')
590   template = template_file.read()
591 
592   plot_str = ""
593   col = 2
594   for prot in joint_protocols:
595      if prot != joint_protocols[0]:
596         plot_str += ", \\\n     "
597      plot_str += "\"" + data_speedup_name + "\" using ($1):($" + str(col) + ") lc rgb %s title \"" % (colors[col - 2])  + m_prot_name[prot] + "\" with linespoint"
598      col += 1
599 
600   gp_commands = template % dict(appli = m_app_name[app] + " Normalized w.r.t. " + m_prot_name[joint_protocols[0]] + " on 1 Processor", nb_procs = nb_procs[-1] + 1, plot_str = plot_str, svg_name = os.path.join(graph_dir, 'joint_' + app + '_speedup'))
601   
602   create_file(gp_speedup_name, gp_commands)
603   
604   # Calling gnuplot
605   print "gnuplot", gp_speedup_name
606   subprocess.call([ 'gnuplot', gp_speedup_name ])
607
608
609#################################################################################
610### Graph 8 : Joint Stacked histogram with coherence cost and r/w cost        ###
611#################################################################################
612
613# The following section creates pairs of stacked histograms for each arch for each number of proc for each app, normalized by (app x number of procs) (with first arch, R/W cost, first of the 2*num_arch histo). It is close to Graph 6
614
615data_cost_filename = os.path.join(scripts_path, gen_dir, 'joint_relative_cost.dat')
616gp_cost_filename   = os.path.join(scripts_path, gen_dir, 'joint_relative_cost.gp')
617   
618direct_cost_metrics = [ 'read_cost', 'write_cost', 'getm_cost' ]
619coherence_cost_metrics = ['update_cost', 'm_inv_cost', 'broadcast_cost', 'inval_ro_cost', 'cleanup_cost', 'cleanup_d_cost' ]
620 
621# Creating the gp file
622template_file = open(stacked_tmpl, 'r')
623template = template_file.read()
624   
625xtics_str = "("
626first = True
627xpos = 1 # successive x position of the center of the first bar in a application
628app_labels = ""
629prot_labels = ""
630for num_appli in range(0, len(apps)):
631   first_proc = True
632   for i in nb_procs:
633      if i > 4:
634         x = 0 # local var for computing position of protocol names
635         for prot in joint_protocols:
636            prot_labels += "set label \"%s\" at first %f, character 2 center font \"Times,10\"\n" % (m_prot_name[prot], float((xpos - 0.5)) + x) # -0.5 instead of +0.5, don't know why... (bug gnuplot?)
637            x += 2
638
639         if not first:
640            xtics_str += ", "
641         first = False
642         if first_proc:
643            first_proc = False
644            xpos_first = xpos
645         xtics_str += "\"%d\" %f -1" % (i, float(xpos - 0.5 + len(joint_protocols)))
646         xpos_last = xpos
647         xpos += 1 + len(joint_protocols) * 2
648   app_name_xpos = float((xpos_first + xpos_last)) / 2
649   app_labels += "set label \"%s\" at first %f,character 1 center font \"Times,12\"\n" % (m_app_name[apps[num_appli]], app_name_xpos)
650   xpos += 1
651xtics_str += ")"
652
653plot_str = "newhistogram \"\""
654n = 1
655for cost_metric in direct_cost_metrics + coherence_cost_metrics:
656   plot_str += ", \\\n    " + "'" + data_cost_filename + "'" + " using " + str(n) + " lc rgb " + colors[n] + " title \"" + m_metric_name[cost_metric] + "\""
657   n += 1
658
659ylabel_str = "Coherence Cost and Direct Requests Cost,\\nNormalized per Application for each Number of Processors"
660content = template % dict(svg_name = os.path.join(graph_dir, 'joint_rel_cost'), xtics_str = xtics_str, plot_str = plot_str, ylabel_str = ylabel_str, app_labels = app_labels, prot_labels = prot_labels)
661
662create_file(gp_cost_filename, content)
663
664# Creating the data file
665# Values are normalized by application, w.r.t. the number of requests for a given number of procs
666content = "#"
667for cost_metric in direct_cost_metrics:
668   content += cost_metric
669   content += ' ' + ' ' * (15 - len(cost_metric))
670for cost_metric in coherence_cost_metrics:
671   content += cost_metric
672   content += ' ' + ' ' * (15 - len(cost_metric))
673content += "\n"
674for app in apps:
675   if app != apps[0]:
676      for j in range(0, len(direct_cost_metrics) + len(coherence_cost_metrics)):
677         content += "%-15f" % 0.0
678      content += "\n"
679   for i in nb_procs:
680      if i > 4:
681         for prot in joint_protocols:
682            for cost_metric in direct_cost_metrics:
683               content += "%-15f" % (float(metrics_val[prot][app][i][cost_metric]) / metrics_val[joint_protocols[0]][app][i]['direct_cost'])
684            for cost_metric in coherence_cost_metrics:
685               content += "%-15f" % 0.0
686            content += "\n"
687            for cost_metric in direct_cost_metrics:
688               content += "%-15f" % 0.0
689            for cost_metric in coherence_cost_metrics:
690               content += "%-15f" % (float(metrics_val[prot][app][i][cost_metric]) / metrics_val[joint_protocols[0]][app][i]['direct_cost'])
691            content += "\n"
692         if i != nb_procs[-1]:
693            for j in range(0, len(direct_cost_metrics) + len(coherence_cost_metrics)):
694               content += "%-15f" % 0.0
695            content += "\n"
696
697create_file(data_cost_filename, content)
698# Calling gnuplot
699print "gnuplot", gp_cost_filename
700subprocess.call([ 'gnuplot', gp_cost_filename ])
701
702
703
704#################################################################################
705### Graph 9 :         ###
706#################################################################################
707
708
709data_metric_filename = os.path.join(scripts_path, gen_dir, 'single_metric.dat')
710gp_metric_filename   = os.path.join(scripts_path, gen_dir, 'single_metric.gp')
711   
712metric = 'total_write'
713 
714# Creating the gp file
715template_file = open(stacked_tmpl, 'r')
716template = template_file.read()
717   
718xtics_str = "("
719first = True
720xpos = 0 # successive x position of the center of the first bar in a application
721app_labels = ""
722prot_labels = ""
723for num_appli in range(0, len(apps)):
724   first_proc = True
725   for i in nb_procs:
726      x = 0 # local var for computing position of protocol names
727      #for prot in joint_protocols:
728         #prot_labels += "set label \"%s\" at first %f, character 2 center font \"Times,10\"\n" % (m_prot_name[prot], float((xpos - 0.5)) + x) # -0.5 instead of +0.5, don't know why... (bug gnuplot?)
729         #x += 1
730
731      if not first:
732         xtics_str += ", "
733      first = False
734      if first_proc:
735         first_proc = False
736         xpos_first = xpos
737      xtics_str += "\"%d\" %f -1" % (i, float(xpos - 0.5 + len(joint_protocols)))
738      xpos_last = xpos
739      xpos += 1 + len(joint_protocols)
740   app_name_xpos = float((xpos_first + xpos_last)) / 2
741   app_labels += "set label \"%s\" at first %f,character 1 center font \"Times,12\"\n" % (m_app_name[apps[num_appli]], app_name_xpos)
742   xpos += 1
743xtics_str += ")"
744
745n = 1
746plot_str = "newhistogram \"\""
747for prot in joint_protocols:
748   plot_str += ", \\\n    " + "'" + data_metric_filename + "'" + " using " + str(n) + " lc rgb " + colors[n] + " title \"" + m_metric_name[metric] + " for " + m_prot_name[prot] + "\""
749   n += 1
750
751ylabel_str = "%(m)s" % dict(m = m_metric_name[metric])
752content = template % dict(svg_name = os.path.join(graph_dir, 'single_metric'), xtics_str = xtics_str, plot_str = plot_str, ylabel_str = ylabel_str, app_labels = app_labels, prot_labels = prot_labels)
753
754create_file(gp_metric_filename, content)
755
756# Creating the data file
757content = "#" + metric
758content += "\n"
759for app in apps:
760   if app != apps[0]:
761      for prot in joint_protocols:
762         for p in joint_protocols:
763            content += "%-15f " % 0.0
764         content += "\n"
765   for i in nb_procs:
766      for prot in joint_protocols:
767         for p in joint_protocols:
768            if p != prot:
769               content += "%-15f " % 0
770            else:
771               content += "%-15f " % (float(metrics_val[prot][app][i][metric]))
772         content += "\n"
773      if i != nb_procs[-1]:
774         for p in joint_protocols:
775            content += "%-15f " % 0.0
776         content += "\n"
777
778create_file(data_metric_filename, content)
779# Calling gnuplot
780print "gnuplot", gp_metric_filename
781subprocess.call([ 'gnuplot', gp_metric_filename ])
782
783
784
785
Note: See TracBrowser for help on using the repository browser.