1 | #!/usr/bin/python |
---|
2 | # -*- coding: utf-8 -*- |
---|
3 | |
---|
4 | # Note: This code should stay compatible with |
---|
5 | # python 2 as it will execute on computation servers |
---|
6 | |
---|
7 | # FIXME? |
---|
8 | # When deactivating assert for perf eval, exec times are slower... |
---|
9 | |
---|
10 | from __future__ import print_function |
---|
11 | import os |
---|
12 | import sys |
---|
13 | import re |
---|
14 | import filecmp |
---|
15 | import shutil |
---|
16 | import random |
---|
17 | |
---|
18 | from common import * |
---|
19 | |
---|
20 | # Directories and files |
---|
21 | top_path = os.path.abspath(os.path.join(os.path.dirname(os.path.realpath(__file__)), "..")) |
---|
22 | scripts_path = os.path.join(top_path, "scripts") |
---|
23 | pyconf_file = os.path.join(scripts_path, "config.py") |
---|
24 | config_file = os.path.join(top_path, "include/config.h") |
---|
25 | images_dir = os.path.join(top_path, "../../images") |
---|
26 | binary_file = os.path.join(top_path, "appli.elf") |
---|
27 | |
---|
28 | base_output_dir = "output" # For output images and stat files (ref + exec) |
---|
29 | base_logs_dir = "logs" # Log of execution, i.e. what is printed on screen |
---|
30 | base_data_dir = "data" # For timing information extracted |
---|
31 | |
---|
32 | |
---|
33 | |
---|
34 | #images = ['boulons.pgm', 'cadastre.pgm', 'alea1.pgm', 'alea2.pgm', 'alea3.pgm'] |
---|
35 | images = ['cadastre.pgm'] |
---|
36 | |
---|
37 | images = map(lambda x:os.path.join(images_dir, x), images) |
---|
38 | |
---|
39 | # Parameters |
---|
40 | # - with eval_perf, num_internal_runs should be used, as this allows to mitigate the cost of the extra |
---|
41 | # run required to have the correct "ne" value (number of labels), and only the times from last application run are taken |
---|
42 | # - With check_results, num_app_runs should be used, so as to have a number of checks equals to the number of runs, |
---|
43 | # because only one check per application run is performed |
---|
44 | num_app_runs = 1 # Number of times the application is launched per configuration |
---|
45 | num_internal_runs = 10 # Number of times the image is processed inside the application |
---|
46 | check_results = False |
---|
47 | eval_perf = True |
---|
48 | use_valgrind = False |
---|
49 | use_rand_images = True |
---|
50 | threads = [1, 4, 16, 64] |
---|
51 | use_dsk = True |
---|
52 | # Using dsk will store generated random images, otherwise they are re-generated at each run to save disk space |
---|
53 | granularities = [1, 4, 16] |
---|
54 | img_size = 2048 |
---|
55 | |
---|
56 | # Configurations |
---|
57 | configs = [ |
---|
58 | #{'SLOW':'1', 'FAST':'0', 'FEATURES':'0', 'PARMERGE':'0', 'ARSP':'0'}, |
---|
59 | {'SLOW':'0', 'FAST':'1', 'FEATURES':'0', 'PARMERGE':'0', 'ARSP':'0'}, |
---|
60 | #{'SLOW':'1', 'FAST':'0', 'FEATURES':'1', 'PARMERGE':'0', 'ARSP':'0'}, |
---|
61 | {'SLOW':'0', 'FAST':'1', 'FEATURES':'1', 'PARMERGE':'0', 'ARSP':'0'}, |
---|
62 | {'SLOW':'0', 'FAST':'1', 'FEATURES':'0', 'PARMERGE':'1', 'ARSP':'0'}, |
---|
63 | {'SLOW':'0', 'FAST':'1', 'FEATURES':'1', 'PARMERGE':'1', 'ARSP':'0'}, |
---|
64 | {'SLOW':'0', 'FAST':'1', 'FEATURES':'0', 'PARMERGE':'1', 'ARSP':'1'}, |
---|
65 | {'SLOW':'0', 'FAST':'1', 'FEATURES':'1', 'PARMERGE':'1', 'ARSP':'1'}, |
---|
66 | ] |
---|
67 | |
---|
68 | |
---|
69 | # Other parameters which shouldn't be changed |
---|
70 | rand_seed = 7 |
---|
71 | |
---|
72 | check_pyconf_file(pyconf_file) |
---|
73 | |
---|
74 | # Loading config file |
---|
75 | exec(file(pyconf_file)) |
---|
76 | |
---|
77 | if use_dsk: |
---|
78 | try: |
---|
79 | dsk_dir |
---|
80 | if not os.path.exists(dsk_dir): |
---|
81 | print("mkdir %s", dsk_dir) |
---|
82 | os.mkdir(dsk_dir) |
---|
83 | except NameError: |
---|
84 | print("*** Warning: variable dsk_dir is not defined in file %s; using current directory for storing output files" % (short_path(pyconf_file))) |
---|
85 | use_dsk = False |
---|
86 | except OSError: |
---|
87 | print("*** Warning: Impossible to create directory %s; using current directory for storing output files" % (dsk_dir)) |
---|
88 | use_dsk = False |
---|
89 | |
---|
90 | if use_rand_images: |
---|
91 | try: |
---|
92 | rand_img_dir |
---|
93 | if not os.path.exists(rand_img_dir): |
---|
94 | print("mkdir %s", rand_img_dir) |
---|
95 | os.mkdir(rand_img_dir) |
---|
96 | except NameError: |
---|
97 | print("*** Error: variable rand_img_dir, containing the path the directory of the random images (either pre-existing or to be generated), is not defined in file %s" % (short_path(pyconf_file))) |
---|
98 | sys.exit(1) |
---|
99 | except OSError: |
---|
100 | print("*** Error: Impossible to create directory %s" % (rand_img_dir)) |
---|
101 | sys.exit(1) |
---|
102 | |
---|
103 | |
---|
104 | # Updating output directories |
---|
105 | if use_dsk: |
---|
106 | output_dir = os.path.join(dsk_dir, base_output_dir) |
---|
107 | logs_dir = os.path.join(dsk_dir, base_logs_dir) |
---|
108 | data_dir = os.path.join(dsk_dir, base_data_dir) |
---|
109 | else: |
---|
110 | output_dir = os.path.join(scripts_path, base_output_dir) |
---|
111 | logs_dir = os.path.join(scripts_path, base_logs_dir) |
---|
112 | data_dir = os.path.join(scripts_path, base_data_dir) |
---|
113 | |
---|
114 | |
---|
115 | if check_results and eval_perf: |
---|
116 | print("*** Warning: check_results and eval_perf modes are both set\n") |
---|
117 | if eval_perf and use_valgrind: |
---|
118 | print("*** Warning: using valgrind while eval_perf mode is set\n") |
---|
119 | if eval_perf and num_app_runs != 1: |
---|
120 | print("*** Warning: using eval_perf with num_app_runs != 1\n") |
---|
121 | if check_results and num_internal_runs != 1: |
---|
122 | print("*** Warning: using check_results with num_internal_runs != 1\n") |
---|
123 | |
---|
124 | |
---|
125 | |
---|
126 | def update_config_file(config): |
---|
127 | if os.path.isfile(config_file): |
---|
128 | print("# Updating file %s" % (config_file)) |
---|
129 | f = open(config_file, "r") |
---|
130 | lines = f.readlines() |
---|
131 | f.close() |
---|
132 | |
---|
133 | f = open(config_file, "w") |
---|
134 | |
---|
135 | for line in lines: |
---|
136 | line_with_key = False |
---|
137 | for key in config.keys(): |
---|
138 | if "#define %s" % (key) in line: |
---|
139 | f.write("#define %s %s\n" % (key, config[key])) |
---|
140 | line_with_key = True |
---|
141 | break |
---|
142 | if not line_with_key: |
---|
143 | if "#define MCA_VERBOSE_LEVEL" in line: |
---|
144 | if eval_perf: |
---|
145 | verb_level = 1 |
---|
146 | else: |
---|
147 | verb_level = 2 |
---|
148 | f.write("#define MCA_VERBOSE_LEVEL %d\n" % verb_level) |
---|
149 | else: |
---|
150 | f.write(line) |
---|
151 | |
---|
152 | f.close() |
---|
153 | else: |
---|
154 | print("# Creating file %s" % (config_file)) |
---|
155 | f = open(config_file, "w") |
---|
156 | f.write("\n") |
---|
157 | for key in config.keys(): |
---|
158 | f.write("#define %s %s\n" % (key, config[key])) |
---|
159 | f.write("\n") |
---|
160 | f.close() |
---|
161 | |
---|
162 | |
---|
163 | |
---|
164 | |
---|
165 | if not os.path.exists(output_dir): |
---|
166 | my_mkdir(output_dir) |
---|
167 | |
---|
168 | if not os.path.exists(logs_dir): |
---|
169 | my_mkdir(logs_dir) |
---|
170 | |
---|
171 | if not os.path.exists(data_dir): |
---|
172 | my_mkdir(data_dir) |
---|
173 | |
---|
174 | |
---|
175 | |
---|
176 | stat_array = {} |
---|
177 | perf_array = {} |
---|
178 | |
---|
179 | for config in configs: |
---|
180 | fconfig = frozenset(config.iteritems()) |
---|
181 | perf_array[fconfig] = {} |
---|
182 | update_config_file(config) |
---|
183 | features = config['FEATURES'] == '1' |
---|
184 | |
---|
185 | # Compile application |
---|
186 | my_chdir(top_path) |
---|
187 | cmd = ['make'] |
---|
188 | #if eval_perf: |
---|
189 | # cmd.extend(['IGNORE_ASSERT=true']) |
---|
190 | print_and_call(cmd) |
---|
191 | my_chdir(scripts_path) |
---|
192 | |
---|
193 | for granularity in granularities: |
---|
194 | perf_array[fconfig][granularity] = {} |
---|
195 | img_idx = 0 |
---|
196 | while not use_rand_images and img_idx != len(images) or use_rand_images and img_idx != 101: |
---|
197 | # Compute image and stat filenames |
---|
198 | if use_rand_images: |
---|
199 | random_img_file = get_random_img_file(img_idx, img_size, img_size, granularity, rand_seed) |
---|
200 | random_img_file = os.path.join(rand_img_dir, random_img_file) |
---|
201 | if not os.path.isfile(random_img_file): |
---|
202 | # We generate the random image if it does not exist |
---|
203 | print("# Generating random image %s with granularity = %d and density = %d" % (random_img_file, granularity, img_idx)) |
---|
204 | gen_random_image(random_img_file, img_size, img_size, granularity, float(img_idx) / 100, rand_seed) |
---|
205 | image = random_img_file |
---|
206 | else: |
---|
207 | image = images[img_idx] |
---|
208 | img_basename = os.path.splitext(os.path.basename(image))[0] |
---|
209 | perf_array[fconfig][granularity][img_basename] = {} |
---|
210 | ref_bmpfile = os.path.join(output_dir, os.path.splitext(os.path.basename(image))[0] + "_ref.bmp") |
---|
211 | ref_statfile = os.path.join(output_dir, os.path.splitext(os.path.basename(image))[0] + "_ref.txt") |
---|
212 | |
---|
213 | for nthreads in threads: |
---|
214 | perf_array[fconfig][granularity][img_basename][nthreads] = {} |
---|
215 | for run in range(num_app_runs): |
---|
216 | if not os.path.exists(ref_bmpfile): |
---|
217 | # Generating the reference file if it does not exist |
---|
218 | bmpfile = ref_bmpfile |
---|
219 | else: |
---|
220 | bmpfile = os.path.join(output_dir, os.path.splitext(os.path.basename(image))[0] + ".bmp") |
---|
221 | if os.path.exists(bmpfile): |
---|
222 | os.remove(bmpfile) |
---|
223 | |
---|
224 | if not os.path.exists(ref_statfile): |
---|
225 | statfile = ref_statfile |
---|
226 | else: |
---|
227 | statfile = os.path.join(output_dir, os.path.splitext(os.path.basename(image))[0] + ".txt") |
---|
228 | if os.path.exists(statfile): |
---|
229 | os.remove(statfile) |
---|
230 | |
---|
231 | cmd = [] |
---|
232 | if use_valgrind: |
---|
233 | cmd.append('valgrind') |
---|
234 | |
---|
235 | cmd.extend([short_path(binary_file), '-n', str(nthreads), '-i', short_path(image)]) |
---|
236 | |
---|
237 | if num_internal_runs > 1: |
---|
238 | cmd.extend(['-r', str(num_internal_runs)]) |
---|
239 | |
---|
240 | if check_results: |
---|
241 | cmd.extend(['-o', short_path(bmpfile), '-g']) |
---|
242 | |
---|
243 | if check_results and features: |
---|
244 | cmd.append('-d') |
---|
245 | |
---|
246 | config_keys = config.keys() |
---|
247 | logfile = get_filename(logs_dir, nthreads, config, features, img_basename) |
---|
248 | output = print_and_popen(cmd, logfile) |
---|
249 | outlines = output.splitlines() |
---|
250 | |
---|
251 | # if performance evaluation, get timing measurements |
---|
252 | # Only the last application run is considered |
---|
253 | if eval_perf: |
---|
254 | for line in outlines: |
---|
255 | tokens = line.split() |
---|
256 | if len(tokens) == 0: |
---|
257 | continue |
---|
258 | tag = tokens[0] |
---|
259 | pattern = re.compile('\[THREAD_STEP_([0-9]+)\]'); |
---|
260 | match = pattern.match(tag) |
---|
261 | if match: |
---|
262 | step = match.group(1) |
---|
263 | value = tokens[len(tokens) - 1] |
---|
264 | perf_array[fconfig][granularity][img_basename][nthreads][step] = int(value) |
---|
265 | |
---|
266 | |
---|
267 | # Checking against reference output image |
---|
268 | if check_results and bmpfile != ref_bmpfile: |
---|
269 | print("diff %s %s" % (short_path(bmpfile), short_path(ref_bmpfile))) |
---|
270 | if not filecmp.cmp(bmpfile, ref_bmpfile): |
---|
271 | print("*** Error: files %s and %s differ" % (short_path(bmpfile), short_path(ref_bmpfile))) |
---|
272 | sys.exit(1) |
---|
273 | |
---|
274 | # Checking for valgrind errors |
---|
275 | if use_valgrind: |
---|
276 | if not "== ERROR SUMMARY: 0 errors from 0 contexts" in output: |
---|
277 | print("*** Error: Valgrind error") |
---|
278 | sys.exit(1) |
---|
279 | if not "== All heap blocks were freed -- no leaks are possible" in output: |
---|
280 | print("*** Error: Valgrind detected a memory leak") |
---|
281 | sys.exit(1) |
---|
282 | |
---|
283 | # Extracting features for correctness verification |
---|
284 | if check_results and features: |
---|
285 | stat_array = {} |
---|
286 | in_stats = False |
---|
287 | index = 0 |
---|
288 | for line in outlines: |
---|
289 | if "[STATS]" in line: |
---|
290 | in_stats = True |
---|
291 | continue |
---|
292 | if "[/STATS]" in line: |
---|
293 | in_stats = False |
---|
294 | break |
---|
295 | if in_stats: |
---|
296 | tokens = line.split() |
---|
297 | assert(len(tokens) == 8) |
---|
298 | stat_array[index] = {} |
---|
299 | for j in range(len(tokens)): |
---|
300 | stat_array[index][j] = tokens[j] |
---|
301 | index += 1 |
---|
302 | |
---|
303 | # Dump stat array in stat file |
---|
304 | file = open(statfile, 'w') |
---|
305 | for i in range(len(stat_array)): |
---|
306 | for j in range(8): # 8 is the number of features per element |
---|
307 | file.write("%s " % stat_array[i][j]) |
---|
308 | file.write("\n"); |
---|
309 | file.close() |
---|
310 | |
---|
311 | # Comparison to reference |
---|
312 | if statfile != ref_statfile: |
---|
313 | print("diff %s %s" % (short_path(statfile), short_path(ref_statfile))) |
---|
314 | if not filecmp.cmp(statfile, ref_statfile): |
---|
315 | print("*** Error: feature files %s and %s differ" % (short_path(statfile), short_path(ref_statfile))) |
---|
316 | sys.exit(1) |
---|
317 | |
---|
318 | # End of the num_runs simus |
---|
319 | if eval_perf: |
---|
320 | datafile = get_filename(data_dir, nthreads, config, features, img_basename) |
---|
321 | file = open(datafile, 'w') |
---|
322 | for step in sorted(perf_array[fconfig][granularity][img_basename][nthreads].keys()): |
---|
323 | # Average time for each step |
---|
324 | file.write("[STEP_%s] %d\n" % (step, perf_array[fconfig][granularity][img_basename][nthreads][step])) |
---|
325 | |
---|
326 | img_idx += 1 |
---|
327 | #end image list (101 rand or explicit list) |
---|
328 | #end granularity |
---|
329 | #end config |
---|
330 | |
---|
331 | |
---|
332 | |
---|
333 | |
---|
334 | |
---|
335 | |
---|
336 | |
---|
337 | |
---|
338 | |
---|
339 | |
---|
340 | |
---|
341 | |
---|
342 | |
---|
343 | |
---|