source: trunk/platforms/tsar_generic_xbar/scripts/create_graphs.py @ 765

Last change on this file since 765 was 749, checked in by meunier, 10 years ago
  • Updating tsar_generic_xbar topcell so that the -NCYCLES option be considered even when debug is deactivated
  • Updating the simulation scripts to reflect benchmarks evolutions
  • Property svn:executable set to *
File size: 18.9 KB
Line 
1#!/usr/bin/python
2
3import subprocess
4import os
5import re
6
7
8
9apps = [ 'histogram', 'mandel', 'filter', 'radix', 'radix_ga', 'fft', 'fft_ga', 'filt_ga', 'kmeans', 'pca', 'lu' ]
10nb_procs = [ 1, 4, 8, 16, 32, 64, 128, 256 ]
11
12top_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), "..")
13scripts_path = os.path.join(top_path, 'scripts')
14counter_defs_name = os.path.join(scripts_path, "counter_defs.py")
15
16exec(file(counter_defs_name))
17
18gen_dir = 'generated'
19graph_dir = 'graph'
20template_dir = 'templates'
21data_dir = 'data'
22
23log_init_name = 'log_init_'
24log_term_name = 'log_term_'
25
26coherence_tmpl = os.path.join(scripts_path, template_dir, 'coherence_template.gp') # 1 graph per appli
27speedup_tmpl   = os.path.join(scripts_path, template_dir, 'speedup_template.gp')
28metric_tmpl    = os.path.join(scripts_path, template_dir, 'metric_template.gp') # 1 graph per metric
29stacked_tmpl   = os.path.join(scripts_path, template_dir, 'stacked_template.gp')
30
31
32
33def create_file(name, content):
34   file = open(name, 'w')
35   file.write(content)
36   file.close()
37   
38def is_numeric(s):
39   try:
40      float(s)
41      return True
42   except ValueError:
43      return False
44
45def get_x_y(nb_procs):
46   x = 1
47   y = 1
48   to_x = True
49   while (x * y * 4 < nb_procs):
50      if to_x:
51         x = x * 2
52      else:
53         y = y * 2
54      to_x = not to_x
55   return x, y
56
57
58
59# We first fill the m_metric_id table
60for metric in all_metrics:
61   for tag in all_tags:
62      if m_metric_tag[metric] == tag:
63         m_metric_id[tag] = metric
64         break
65
66
67# We start by processing all the log files
68# Term files are processed for exec time only
69# Init files are processed for all metrics
70exec_time = {}
71metrics_val = {}
72for app in apps:
73   exec_time[app] = {}
74   metrics_val[app] = {}
75   for i in nb_procs:
76      metrics_val[app][i] = {}
77      log_init_file = os.path.join(scripts_path, data_dir, app + '_' + log_init_name + str(i))
78      log_term_file = os.path.join(scripts_path, data_dir, app + '_' + log_term_name + str(i))
79
80      # Term
81      lines = open(log_term_file, 'r')
82      for line in lines:
83         tokens = line[:-1].split()
84         if len(tokens) > 0 and tokens[0] == "[PARALLEL_COMPUTE]":
85            exec_time[app][i] = int(tokens[len(tokens) - 1])
86
87      # Init files
88      lines = open(log_init_file, 'r')
89      for line in lines:
90         tokens = line[:-1].split()
91         if len(tokens) == 0:
92            continue
93         tag = tokens[0]
94         value = tokens[len(tokens) - 1]
95         pattern = re.compile('\[0[0-9][0-9]\]')
96         if pattern.match(tag):
97            metric = m_metric_id[tag]
98            if (not metrics_val[app][i].has_key(metric) or tag == "[000]" or tag == "[001]"):
99               # We don't add cycles of all Memcaches (they must be the same for all)
100               metrics_val[app][i][metric] = int(value)
101            else:
102               metrics_val[app][i][metric] += int(value)
103           
104# We make a 2nd pass to fill the derived fields, e.g. nb_total_updates
105for app in apps:
106   for i in nb_procs:
107      x, y = get_x_y(i)
108      metrics_val[app][i]['total_read']     = metrics_val[app][i]['local_read']    + metrics_val[app][i]['remote_read']
109      metrics_val[app][i]['total_write']    = metrics_val[app][i]['local_write']   + metrics_val[app][i]['remote_write']
110      metrics_val[app][i]['total_ll']       = metrics_val[app][i]['local_ll']      + metrics_val[app][i]['remote_ll']
111      metrics_val[app][i]['total_sc']       = metrics_val[app][i]['local_sc']      + metrics_val[app][i]['remote_sc']
112      metrics_val[app][i]['total_cas']      = metrics_val[app][i]['local_cas']     + metrics_val[app][i]['remote_cas']
113      metrics_val[app][i]['total_update']   = metrics_val[app][i]['local_update']  + metrics_val[app][i]['remote_update']
114      metrics_val[app][i]['total_m_inv']    = metrics_val[app][i]['local_m_inv']   + metrics_val[app][i]['remote_m_inv']
115      metrics_val[app][i]['total_cleanup']  = metrics_val[app][i]['local_cleanup'] + metrics_val[app][i]['remote_cleanup']
116      metrics_val[app][i]['total_direct']   = metrics_val[app][i]['total_read']    + metrics_val[app][i]['total_write']
117      metrics_val[app][i]['direct_cost']    = metrics_val[app][i]['read_cost']     + metrics_val[app][i]['write_cost']
118      metrics_val[app][i]['broadcast_cost'] = metrics_val[app][i]['broadcast'] * (x * y - 1)
119      if metrics_val[app][i]['broadcast'] < metrics_val[app][i]['write_broadcast']:
120         # test to patch a bug in mem_cache
121         metrics_val[app][i]['nonwrite_broadcast'] = 0
122      else:
123         metrics_val[app][i]['nonwrite_broadcast'] = metrics_val[app][i]['broadcast'] - metrics_val[app][i]['write_broadcast']
124
125      metrics_val[app][i]['total_stacked'] = 0
126      for stacked_metric in stacked_metrics:
127         metrics_val[app][i]['total_stacked'] += metrics_val[app][i][stacked_metric]
128
129           
130print "mkdir -p", os.path.join(scripts_path, gen_dir)
131subprocess.call([ 'mkdir', '-p', os.path.join(scripts_path, gen_dir) ])
132
133print "mkdir -p", os.path.join(scripts_path, graph_dir)
134subprocess.call([ 'mkdir', '-p', os.path.join(scripts_path, graph_dir) ])
135
136############################################################
137### Graph 1 : Coherence traffic Cost per application     ###
138############################################################
139
140for app in apps:
141   data_coherence_name = os.path.join(scripts_path, gen_dir, app + '_coherence.dat')
142   gp_coherence_name   = os.path.join(scripts_path, gen_dir, app + '_coherence.gp')
143
144   # Creating the data file
145   width = 15
146   content = ""
147   
148   for metric in [ '#nb_procs' ] + grouped_metrics:
149      content += metric + " "
150      nb_spaces = width - len(metric)
151      content += nb_spaces * ' '
152   content += "\n"
153
154   for i in nb_procs:
155      content += "%-15d " % i
156      for metric in grouped_metrics:
157         val = float(metrics_val[app][i][metric]) / exec_time[app][i] * 1000
158         content += "%-15f " % val
159      content += "\n"
160   
161   create_file(data_coherence_name, content)
162
163   # Creating the gp file
164   template_file = open(coherence_tmpl, 'r')
165   template = template_file.read()
166   
167   plot_str = ""
168   col = 2
169   for metric in grouped_metrics:
170      if metric != grouped_metrics[0]:
171         plot_str += ", \\\n    "
172      plot_str += "\"" + data_coherence_name + "\" using ($1):($" + str(col) + ") lc rgb " + colors[col - 2] + " title \"" + m_metric_name[metric] + "\" with linespoint"
173      col += 1
174   gp_commands = template % dict(app_name = m_app_name[app], nb_procs = nb_procs[-1] + 1, plot_str = plot_str, svg_name = os.path.join(graph_dir, app + '_coherence'))
175   
176   create_file(gp_coherence_name, gp_commands)
177   
178   # Calling gnuplot
179   print "gnuplot", gp_coherence_name
180   subprocess.call([ 'gnuplot', gp_coherence_name ])
181
182
183############################################################
184### Graph 2 : Speedup per Application                    ###
185############################################################
186
187for app in apps:
188
189   data_speedup_name   = os.path.join(scripts_path, gen_dir, app + '_speedup.dat')
190   gp_speedup_name     = os.path.join(scripts_path, gen_dir, app + '_speedup.gp')
191   
192   # Creating data file
193   width = 15
194   content = "#nb_procs"
195   nb_spaces = width - len(content)
196   content += nb_spaces * ' '
197   content += "speedup\n"
198
199   for i in nb_procs:
200      content += "%-15d " % i
201      val = exec_time[app][i]
202      content += "%-15f\n" % (exec_time[app][1] / float(val))
203
204   plot_str = "\"" + data_speedup_name + "\" using ($1):($2) lc rgb \"#654387\" title \"Speedup\" with linespoint"
205   
206   create_file(data_speedup_name, content)
207   
208   # Creating the gp file
209   template_file = open(speedup_tmpl, 'r')
210   template = template_file.read()
211   
212   gp_commands = template % dict(appli = m_app_name[app], nb_procs = nb_procs[-1] + 1, plot_str = plot_str, svg_name = os.path.join(graph_dir, app + '_speedup'))
213   
214   create_file(gp_speedup_name, gp_commands)
215   
216   # Calling gnuplot
217   print "gnuplot", gp_speedup_name
218   subprocess.call([ 'gnuplot', gp_speedup_name ])
219
220
221############################################################
222### Graph 3 : All speedups on the same Graph             ###
223############################################################
224
225# This graph uses the same template as the graph 2
226
227data_speedup_name = os.path.join(scripts_path, gen_dir, 'all_speedup.dat')
228gp_speedup_name   = os.path.join(scripts_path, gen_dir, 'all_speedup.gp')
229
230# Creating data file
231width = 15
232content = "#nb_procs"
233nb_spaces = width - len(content)
234content += (nb_spaces + 1) * ' '
235for app in apps:
236   content += app + " "
237   content += (width - len(app)) * " "
238content += "\n"
239
240for i in nb_procs:
241   content += "%-15d " % i
242   for app in apps:
243      val = exec_time[app][i]
244      content += "%-15f " % (exec_time[app][1] / float(val))
245   content += "\n"
246
247create_file(data_speedup_name, content)
248
249# Creating gp file
250template_file = open(speedup_tmpl, 'r')
251template = template_file.read()
252
253plot_str = ""
254col = 2
255for app in apps:
256   if app != apps[0]:
257      plot_str += ", \\\n     "
258   plot_str += "\"" + data_speedup_name + "\" using ($1):($" + str(col) + ") lc rgb %s title \"" % (colors[col - 2])  + m_app_name[app] + "\" with linespoint"
259   col += 1
260
261gp_commands = template % dict(appli = "All Applications", nb_procs = nb_procs[-1] + 1, plot_str = plot_str, svg_name = os.path.join(graph_dir, 'all_speedup'))
262   
263create_file(gp_speedup_name, gp_commands)
264   
265# Calling gnuplot
266print "gnuplot", gp_speedup_name
267subprocess.call([ 'gnuplot', gp_speedup_name ])
268
269
270############################################################
271### Graph 4 : Graph per metric                           ###
272############################################################
273
274# The following section creates the graphs grouped by measure (e.g. #broadcasts)
275# The template file cannot be easily created otherwise it would not be generic
276# in many ways. This is why it is mainly created here.
277# Graphs are created for metric in the "individual_metrics" list
278
279for metric in individual_metrics:
280   data_metric_name = os.path.join(scripts_path, gen_dir, metric + '.dat')
281   gp_metric_name   = os.path.join(scripts_path, gen_dir, metric + '.gp')
282
283   # Creating the gp file
284   # Setting xtics, i.e. number of procs for each application
285   xtics_str = "("
286   first = True
287   xpos = 1
288   app_labels = ""
289   for num_appli in range(0, len(apps)):
290      for i in nb_procs:
291         if not first:
292            xtics_str += ", "
293         first = False
294         if i == nb_procs[0]:
295            xpos_first = xpos
296         xtics_str += "\"%d\" %.1f" % (i, xpos)
297         xpos_last = xpos
298         xpos += 1.5
299      xpos += 0.5
300      app_name_xpos = float((xpos_first + xpos_last)) / 2
301      app_labels += "set label \"%s\" at first %f,character 1 center font\"Times,12\"\n" % (m_app_name[apps[num_appli]], app_name_xpos)
302   xtics_str += ")"
303
304   xmax_val = xpos + 0.5
305
306   # Writing the lines of "plot"
307   plot_str = ""
308   xpos = 0
309   first = True
310   column = 2
311   for i in range(0, len(nb_procs)):
312      if not first:
313         plot_str += ", \\\n    "
314      first = False
315      plot_str += "\"%s\" using ($1+%.1f):($%d) lc rgb %s notitle with boxes" % (data_metric_name, xpos, column, colors[i])
316      column += 1
317      xpos += 1.5
318
319   template_file = open(metric_tmpl, 'r')
320   template = template_file.read()
321
322   gp_commands = template % dict(xtics_str = xtics_str, app_labels = app_labels, ylabel_str = m_metric_name[metric], norm_factor_str = m_norm_factor_name[m_metric_norm[metric]], xmax_val = xmax_val, plot_str = plot_str, svg_name = os.path.join(graph_dir, metric))
323
324   create_file(gp_metric_name, gp_commands)
325   
326   # Creating the data file
327   width = 15
328   content = "#x_pos"
329   nb_spaces = width - len(content)
330   content += nb_spaces * ' '
331   for i in nb_procs:
332      content += "%-15d" % i
333   content += "\n"
334
335   x_pos = 1
336   for app in apps:
337      # Computation of x_pos
338      content += "%-15f" % x_pos
339      x_pos += len(nb_procs) * 1.5 + 0.5
340      for i in nb_procs:
341         if m_metric_norm[metric] == "N":
342            content += "%-15d" % (metrics_val[app][i][metric])
343         elif m_metric_norm[metric] == "P":
344            content += "%-15f" % (float(metrics_val[app][i][metric]) / i)
345         elif m_metric_norm[metric] == "C":
346            content += "%-15f" % (float(metrics_val[app][i][metric]) / exec_time[app][i] * 1000)
347         elif m_metric_norm[metric] == "W":
348            content += "%-15f" % (float(metrics_val[app][i][metric]) / float(metrics_val[app][i]['total_write'])) # Number of writes
349         elif m_metric_norm[metric] == "R":
350            content += "%-15f" % (float(metrics_val[app][i][metric]) / float(metrics_val[app][i]['total_read'])) # Number of reads
351         elif m_metric_norm[metric] == "D":
352            content += "%-15f" % (float(metrics_val[app][i][metric]) / float(metrics_val[app][i]['total_direct'])) # Number of req.
353         elif is_numeric(m_metric_norm[metric]):
354            content += "%-15f" % (float(metrics_val[app][i][metric]) / float(metrics_val[app][int(m_metric_norm[metric])][metric]))
355         else:
356            assert(False)
357
358      app_name = m_app_name[app]
359      content += "#" + app_name + "\n"
360   
361   create_file(data_metric_name, content)
362
363   # Calling gnuplot
364   print "gnuplot", gp_metric_name
365   subprocess.call([ 'gnuplot', gp_metric_name ])
366
367
368############################################################
369### Graph 5 : Stacked histogram with counters            ###
370############################################################
371
372# The following section creates a stacked histogram containing
373# the metrics in the "stacked_metric" list
374# It is normalized per application w.r.t the values on 256 procs
375
376data_stacked_name = os.path.join(scripts_path, gen_dir, 'stacked.dat')
377gp_stacked_name   = os.path.join(scripts_path, gen_dir, 'stacked.gp')
378
379norm_factor_value = 256
380
381# Creating the gp file
382template_file = open(stacked_tmpl, 'r')
383template = template_file.read()
384
385xtics_str = "("
386first = True
387xpos = 1
388app_labels = ""
389for num_appli in range(0, len(apps)):
390   for i in nb_procs:
391      if not first:
392         xtics_str += ", "
393      first = False
394      if i == nb_procs[0]:
395         xpos_first = xpos
396      xtics_str += "\"%d\" %d -1" % (i, xpos)
397      xpos_last = xpos
398      xpos += 1
399   xpos += 1
400   app_name_xpos = float((xpos_first + xpos_last)) / 2
401   app_labels += "set label \"%s\" at first %f,character 1 center font\"Times,12\"\n" % (m_app_name[apps[num_appli]], app_name_xpos)
402xtics_str += ")"
403
404plot_str = "newhistogram \"\""
405n = 1
406for stacked_metric in stacked_metrics:
407   plot_str += ", \\\n    " + "'" + data_stacked_name + "'" + " using " + str(n) + " lc rgb " + colors[n] + " title \"" + m_metric_name[stacked_metric] + "\""
408   n += 1
409
410ylabel_str = "Breakdown of Coherence Traffic Normalized w.r.t. \\nthe Values on %d Processors" % norm_factor_value
411content = template % dict(svg_name = os.path.join(graph_dir, 'stacked'), xtics_str = xtics_str, plot_str = plot_str, ylabel_str = ylabel_str, app_labels = app_labels)
412
413create_file(gp_stacked_name, content)
414
415# Creating the data file
416# Values are normalized by application, w.r.t. the number of requests for a given number of procs
417content = "#"
418for stacked_metric in stacked_metrics:
419   content += stacked_metric
420   content += ' ' + ' ' * (15 - len(stacked_metric))
421content += "\n"
422for app in apps:
423   if app != apps[0]:
424      for i in range(0, len(stacked_metrics)):
425         content += "%-15f" % 0.0
426      content += "\n"
427   for i in nb_procs:
428      for stacked_metric in stacked_metrics:
429         content += "%-15f" % (float(metrics_val[app][i][stacked_metric]) / metrics_val[app][norm_factor_value]['total_stacked'])
430      content += "\n"
431
432create_file(data_stacked_name, content)
433# Calling gnuplot
434print "gnuplot", gp_stacked_name
435subprocess.call([ 'gnuplot', gp_stacked_name ])
436
437
438
439#################################################################################
440### Graph 6 : Stacked histogram with coherence cost compared to r/w cost      ###
441#################################################################################
442
443# The following section creates pairs of stacked histograms, normalized w.r.t. the first one.
444# The first one contains the cost of reads and writes, the second contains the cost
445# of m_inv, m_up and broadcasts (extrapolated)
446
447data_cost_filename = os.path.join(scripts_path, gen_dir, 'relative_cost.dat')
448gp_cost_filename   = os.path.join(scripts_path, gen_dir, 'relative_cost.gp')
449
450direct_cost_metrics = [ 'read_cost', 'write_cost' ]
451coherence_cost_metrics = ['update_cost', 'm_inv_cost', 'broadcast_cost' ]
452
453# Creating the gp file
454template_file = open(stacked_tmpl, 'r')
455template = template_file.read()
456
457xtics_str = "("
458first = True
459xpos = 1.5
460app_labels = ""
461for num_appli in range(0, len(apps)):
462   first_proc = True
463   for i in nb_procs:
464      if i > 4:
465         if not first:
466            xtics_str += ", "
467         first = False
468         if first_proc:
469            first_proc = False
470            xpos_first = xpos
471         xtics_str += "\"%d\" %f -1" % (i, xpos)
472         xpos_last = xpos
473         xpos += 3
474   app_name_xpos = float((xpos_first + xpos_last)) / 2
475   app_labels += "set label \"%s\" at first %f,character 1 center font\"Times,12\"\n" % (m_app_name[apps[num_appli]], app_name_xpos)
476   xpos += 1
477xtics_str += ")"
478
479plot_str = "newhistogram \"\""
480n = 1
481for cost_metric in direct_cost_metrics + coherence_cost_metrics:
482   plot_str += ", \\\n    " + "'" + data_cost_filename + "'" + " using " + str(n) + " lc rgb " + colors[n] + " title \"" + m_metric_name[cost_metric] + "\""
483   n += 1
484
485ylabel_str = "Coherence Cost Compared to Direct Requests Cost,\\nNormalized per Application for each Number of Processors"
486content = template % dict(svg_name = os.path.join(graph_dir, 'rel_cost'), xtics_str = xtics_str, plot_str = plot_str, ylabel_str = ylabel_str, app_labels = app_labels)
487
488create_file(gp_cost_filename, content)
489
490# Creating the data file
491# Values are normalized by application, w.r.t. the number of requests for a given number of procs
492content = "#"
493for cost_metric in direct_cost_metrics:
494   content += cost_metric
495   content += ' ' + ' ' * (15 - len(cost_metric))
496for cost_metric in coherence_cost_metrics:
497   content += cost_metric
498   content += ' ' + ' ' * (15 - len(cost_metric))
499content += "\n"
500for app in apps:
501   if app != apps[0]:
502      for i in range(0, len(direct_cost_metrics) + len(coherence_cost_metrics)):
503         content += "%-15f" % 0.0
504      content += "\n"
505   for i in nb_procs:
506      if i > 4:
507         for cost_metric in direct_cost_metrics:
508            content += "%-15f" % (float(metrics_val[app][i][cost_metric]) / metrics_val[app][i]['direct_cost'])
509         for cost_metric in coherence_cost_metrics:
510            content += "%-15f" % 0.0
511         content += "\n"
512         for cost_metric in direct_cost_metrics:
513            content += "%-15f" % 0.0
514         for cost_metric in coherence_cost_metrics:
515            content += "%-15f" % (float(metrics_val[app][i][cost_metric]) / metrics_val[app][i]['direct_cost'])
516         content += "\n"
517         for i in range(0, len(direct_cost_metrics) + len(coherence_cost_metrics)):
518            content += "%-15f" % 0.0
519         content += "\n"
520
521create_file(data_cost_filename, content)
522# Calling gnuplot
523print "gnuplot", gp_cost_filename
524subprocess.call([ 'gnuplot', gp_cost_filename ])
525
526
Note: See TracBrowser for help on using the repository browser.