New upstream version 18.02
[deb_dpdk.git] / examples / ip_pipeline / config / pipeline-to-core-mapping.py
1 #!/usr/bin/env python
2 # SPDX-License-Identifier: BSD-3-Clause
3 # Copyright(c) 2016 Intel Corporation
4
5 #
6 # This script maps the set of pipelines identified (MASTER pipelines are
7 # ignored) from the input configuration file to the set of cores
8 # provided as input argument and creates configuration files for each of
9 # the mapping combinations.
10 #
11
12 from __future__ import print_function
13 from collections import namedtuple
14 import argparse
15 import array
16 import errno
17 import itertools
18 import os
19 import re
20 import sys
21
22 # default values
23 enable_stage0_traceout = 1
24 enable_stage1_traceout = 1
25 enable_stage2_traceout = 1
26
27 enable_stage1_fileout = 1
28 enable_stage2_fileout = 1
29
30 Constants = namedtuple('Constants', ['MAX_CORES', 'MAX_PIPELINES'])
31 constants = Constants(16, 64)
32
33 # pattern for physical core
34 pattern_phycore = '^(s|S)\d(c|C)[1-9][0-9]*$'
35 reg_phycore = re.compile(pattern_phycore)
36
37
38 def popcount(mask):
39     return bin(mask).count("1")
40
41
42 def len2mask(length):
43     if (length == 0):
44         return 0
45
46     if (length > 64):
47         sys.exit('error: len2mask - length %i > 64. exiting' % length)
48
49     return int('1' * length, 2)
50
51
52 def bitstring_write(n, n_bits):
53     tmpstr = ""
54     if (n_bits > 64):
55         return
56
57     i = n_bits - 1
58     while (i >= 0):
59         cond = (n & (1 << i))
60         if (cond):
61             print('1', end='')
62             tmpstr += '1'
63         else:
64             print('0', end='')
65             tmpstr += '0'
66         i -= 1
67     return tmpstr
68
69
70 class Cores0:
71
72     def __init__(self):
73         self.n_pipelines = 0
74
75
76 class Cores1:
77
78     def __init__(self):
79         self.pipelines = 0
80         self.n_pipelines = 0
81
82
83 class Cores2:
84
85     def __init__(self):
86         self.pipelines = 0
87         self.n_pipelines = 0
88         self.counter = 0
89         self.counter_max = 0
90         self.bitpos = array.array(
91             "L", itertools.repeat(0, constants.MAX_PIPELINES))
92
93
94 class Context0:
95
96     def __init__(self):
97         self.cores = [Cores0() for i in range(0, constants.MAX_CORES)]
98         self.n_cores = 0
99         self.n_pipelines = 0
100         self.n_pipelines0 = 0
101         self.pos = 0
102         self.file_comment = ""
103         self.ctx1 = None
104         self.ctx2 = None
105
106     def stage0_print(self):
107         print('printing Context0 obj')
108         print('c0.cores(n_pipelines) = [ ', end='')
109         for cores_count in range(0, constants.MAX_CORES):
110             print(self.cores[cores_count].n_pipelines, end=' ')
111         print(']')
112         print('c0.n_cores = %d' % self.n_cores)
113         print('c0.n_pipelines = %d' % self.n_pipelines)
114         print('c0.n_pipelines0 = %d' % self.n_pipelines0)
115         print('c0.pos = %d' % self.pos)
116         print('c0.file_comment = %s' % self.file_comment)
117         if (self.ctx1 is not None):
118             print('c0.ctx1 = ', end='')
119             print(repr(self.ctx1))
120         else:
121             print('c0.ctx1 = None')
122
123         if (self.ctx2 is not None):
124             print('c0.ctx2 = ', end='')
125             print(repr(self.ctx2))
126         else:
127             print('c0.ctx2 = None')
128
129     def stage0_init(self, num_cores, num_pipelines, ctx1, ctx2):
130         self.n_cores = num_cores
131         self.n_pipelines = num_pipelines
132         self.ctx1 = ctx1
133         self.ctx2 = ctx2
134
135     def stage0_process(self):
136         # stage0 init
137         self.cores[0].n_pipelines = self.n_pipelines
138         self.n_pipelines0 = 0
139         self.pos = 1
140
141         while True:
142             # go forward
143             while True:
144                 if ((self.pos < self.n_cores) and (self.n_pipelines0 > 0)):
145                     self.cores[self.pos].n_pipelines = min(
146                         self.cores[self.pos - 1].n_pipelines,
147                         self.n_pipelines0)
148                     self.n_pipelines0 -= self.cores[self.pos].n_pipelines
149                     self.pos += 1
150                 else:
151                     break
152
153             # check solution
154             if (self.n_pipelines0 == 0):
155                 self.stage0_log()
156                 self.ctx1.stage1_init(self, self.ctx2)  # self is object c0
157                 self.ctx1.stage1_process()
158
159             # go backward
160             while True:
161                 if (self.pos == 0):
162                     return
163
164                 self.pos -= 1
165                 if ((self.cores[self.pos].n_pipelines > 1) and
166                         (self.pos != (self.n_cores - 1))):
167                     break
168
169                 self.n_pipelines0 += self.cores[self.pos].n_pipelines
170                 self.cores[self.pos].n_pipelines = 0
171
172             # rearm
173             self.cores[self.pos].n_pipelines -= 1
174             self.n_pipelines0 += 1
175             self.pos += 1
176
177     def stage0_log(self):
178         tmp_file_comment = ""
179         if(enable_stage0_traceout != 1):
180             return
181
182         print('STAGE0: ', end='')
183         tmp_file_comment += 'STAGE0: '
184         for cores_count in range(0, self.n_cores):
185             print('C%d = %d\t'
186                   % (cores_count,
187                       self.cores[cores_count].n_pipelines), end='')
188             tmp_file_comment += "C{} = {}\t".format(
189                 cores_count, self.cores[cores_count].n_pipelines)
190         # end for
191         print('')
192         self.ctx1.stage0_file_comment = tmp_file_comment
193         self.ctx2.stage0_file_comment = tmp_file_comment
194
195
196 class Context1:
197     _fileTrace = None
198
199     def __init__(self):
200         self.cores = [Cores1() for i in range(constants.MAX_CORES)]
201         self.n_cores = 0
202         self.n_pipelines = 0
203         self.pos = 0
204         self.stage0_file_comment = ""
205         self.stage1_file_comment = ""
206
207         self.ctx2 = None
208         self.arr_pipelines2cores = []
209
210     def stage1_reset(self):
211         for i in range(constants.MAX_CORES):
212             self.cores[i].pipelines = 0
213             self.cores[i].n_pipelines = 0
214
215         self.n_cores = 0
216         self.n_pipelines = 0
217         self.pos = 0
218         self.ctx2 = None
219         # clear list
220         del self.arr_pipelines2cores[:]
221
222     def stage1_print(self):
223         print('printing Context1 obj')
224         print('ctx1.cores(pipelines,n_pipelines) = [ ', end='')
225         for cores_count in range(0, constants.MAX_CORES):
226             print('(%d,%d)' % (self.cores[cores_count].pipelines,
227                                self.cores[cores_count].n_pipelines), end=' ')
228         print(']')
229         print('ctx1.n_cores = %d' % self.n_cores)
230         print('ctx1.n_pipelines = %d' % self.n_pipelines)
231         print('ctx1.pos = %d' % self.pos)
232         print('ctx1.stage0_file_comment = %s' % self.stage0_file_comment)
233         print('ctx1.stage1_file_comment = %s' % self.stage1_file_comment)
234         if (self.ctx2 is not None):
235             print('ctx1.ctx2 = ', end='')
236             print(self.ctx2)
237         else:
238             print('ctx1.ctx2 = None')
239
240     def stage1_init(self, c0, ctx2):
241         self.stage1_reset()
242         self.n_cores = 0
243         while (c0.cores[self.n_cores].n_pipelines > 0):
244             self.n_cores += 1
245
246         self.n_pipelines = c0.n_pipelines
247         self.ctx2 = ctx2
248
249         self.arr_pipelines2cores = [0] * self.n_pipelines
250
251         i = 0
252         while (i < self.n_cores):
253             self.cores[i].n_pipelines = c0.cores[i].n_pipelines
254             i += 1
255
256     def stage1_process(self):
257         pipelines_max = len2mask(self.n_pipelines)
258         while True:
259             pos = 0
260             overlap = 0
261
262             if (self.cores[self.pos].pipelines == pipelines_max):
263                 if (self.pos == 0):
264                     return
265
266                 self.cores[self.pos].pipelines = 0
267                 self.pos -= 1
268                 continue
269
270             self.cores[self.pos].pipelines += 1
271             if (popcount(self.cores[self.pos].pipelines) !=
272                     self.cores[self.pos].n_pipelines):
273                 continue
274
275             overlap = 0
276             pos = 0
277             while (pos < self.pos):
278                 if ((self.cores[self.pos].pipelines) &
279                         (self.cores[pos].pipelines)):
280                     overlap = 1
281                     break
282                 pos += 1
283
284             if (overlap):
285                 continue
286
287             if ((self.pos > 0) and
288                 ((self.cores[self.pos].n_pipelines) ==
289                     (self.cores[self.pos - 1].n_pipelines)) and
290                     ((self.cores[self.pos].pipelines) <
291                         (self.cores[self.pos - 1].pipelines))):
292                 continue
293
294             if (self.pos == self.n_cores - 1):
295                 self.stage1_log()
296                 self.ctx2.stage2_init(self)
297                 self.ctx2.stage2_process()
298
299                 if (self.pos == 0):
300                     return
301
302                 self.cores[self.pos].pipelines = 0
303                 self.pos -= 1
304                 continue
305
306             self.pos += 1
307
308     def stage1_log(self):
309         tmp_file_comment = ""
310         if(enable_stage1_traceout == 1):
311             print('STAGE1: ', end='')
312             tmp_file_comment += 'STAGE1: '
313             i = 0
314             while (i < self.n_cores):
315                 print('C%d = [' % i, end='')
316                 tmp_file_comment += "C{} = [".format(i)
317
318                 j = self.n_pipelines - 1
319                 while (j >= 0):
320                     cond = ((self.cores[i].pipelines) & (1 << j))
321                     if (cond):
322                         print('1', end='')
323                         tmp_file_comment += '1'
324                     else:
325                         print('0', end='')
326                         tmp_file_comment += '0'
327                     j -= 1
328
329                 print(']\t', end='')
330                 tmp_file_comment += ']\t'
331                 i += 1
332
333             print('\n', end='')
334             self.stage1_file_comment = tmp_file_comment
335             self.ctx2.stage1_file_comment = tmp_file_comment
336
337         # check if file traceing is enabled
338         if(enable_stage1_fileout != 1):
339             return
340
341         # spit out the combination to file
342         self.stage1_process_file()
343
344     def stage1_updateCoresInBuf(self, nPipeline, sCore):
345         rePipeline = self._fileTrace.arr_pipelines[nPipeline]
346         rePipeline = rePipeline.replace("[", "\[").replace("]", "\]")
347         reCore = 'core\s*=\s*((\d*)|(((s|S)\d)?(c|C)[1-9][0-9]*)).*\n'
348         sSubs = 'core = ' + sCore + '\n'
349
350         reg_pipeline = re.compile(rePipeline)
351         search_match = reg_pipeline.search(self._fileTrace.in_buf)
352
353         if(search_match):
354             pos = search_match.start()
355             substr1 = self._fileTrace.in_buf[:pos]
356             substr2 = self._fileTrace.in_buf[pos:]
357             substr2 = re.sub(reCore, sSubs, substr2, 1)
358             self._fileTrace.in_buf = substr1 + substr2
359
360     def stage1_process_file(self):
361         outFileName = os.path.join(self._fileTrace.out_path,
362                                    self._fileTrace.prefix_outfile)
363         outFileName += "_{}CoReS".format(self.n_cores)
364
365         i = 0  # represents core number
366         while (i < self.n_cores):
367             j = self.n_pipelines - 1
368             pipeline_idx = 0
369             while(j >= 0):
370                 cond = ((self.cores[i].pipelines) & (1 << j))
371                 if (cond):
372                     # update the pipelines array to match the core
373                     # only in case of cond match
374                     self.arr_pipelines2cores[
375                         pipeline_idx] = fileTrace.in_physical_cores[i]
376
377                 j -= 1
378                 pipeline_idx += 1
379
380             i += 1
381
382         # update the in_buf as per the arr_pipelines2cores
383         for pipeline_idx in range(len(self.arr_pipelines2cores)):
384             outFileName += "_{}".format(self.arr_pipelines2cores[pipeline_idx])
385             self.stage1_updateCoresInBuf(
386                 pipeline_idx, self.arr_pipelines2cores[pipeline_idx])
387
388         # by now the in_buf is all set to be written to file
389         outFileName += self._fileTrace.suffix_outfile
390         outputFile = open(outFileName, "w")
391
392         # write out the comments
393         strTruncated = ("", "(Truncated)")[self._fileTrace.ncores_truncated]
394         outputFile.write(
395             "; =============== Pipeline-to-Core Mapping ================\n"
396             "; Generated from file {}\n"
397             "; Input pipelines = {}\n"
398             "; Input cores = {}\n"
399             "; N_PIPELINES = {} N_CORES = {} {} hyper_thread = {}\n"
400             .format(
401                 self._fileTrace.in_file_namepath,
402                 fileTrace.arr_pipelines,
403                 fileTrace.in_physical_cores,
404                 self._fileTrace.n_pipelines,
405                 self._fileTrace.n_cores,
406                 strTruncated,
407                 self._fileTrace.hyper_thread))
408
409         outputFile.write(
410             "; {stg0cmt}\n"
411             "; {stg1cmt}\n"
412             "; ========================================================\n"
413             "; \n"
414             .format(
415                 stg0cmt=self.stage0_file_comment,
416                 stg1cmt=self.stage1_file_comment))
417
418         # write buffer contents
419         outputFile.write(self._fileTrace.in_buf)
420         outputFile.flush()
421         outputFile.close()
422
423
424 class Context2:
425     _fileTrace = None
426
427     def __init__(self):
428         self.cores = [Cores2() for i in range(constants.MAX_CORES)]
429         self.n_cores = 0
430         self.n_pipelines = 0
431         self.pos = 0
432         self.stage0_file_comment = ""
433         self.stage1_file_comment = ""
434         self.stage2_file_comment = ""
435
436         # each array entry is a pipeline mapped to core stored as string
437         # pipeline ranging from 1 to n, however stored in zero based array
438         self.arr2_pipelines2cores = []
439
440     def stage2_print(self):
441         print('printing Context2 obj')
442         print('ctx2.cores(pipelines, n_pipelines, counter, counter_max) =')
443         for cores_count in range(0, constants.MAX_CORES):
444             print('core[%d] = (%d,%d,%d,%d)' % (
445                 cores_count,
446                 self.cores[cores_count].pipelines,
447                 self.cores[cores_count].n_pipelines,
448                 self.cores[cores_count].counter,
449                 self.cores[cores_count].counter_max))
450
451             print('ctx2.n_cores = %d' % self.n_cores, end='')
452             print('ctx2.n_pipelines = %d' % self.n_pipelines, end='')
453             print('ctx2.pos = %d' % self.pos)
454             print('ctx2.stage0_file_comment = %s' %
455                   self.self.stage0_file_comment)
456             print('ctx2.stage1_file_comment = %s' %
457                   self.self.stage1_file_comment)
458             print('ctx2.stage2_file_comment = %s' %
459                   self.self.stage2_file_comment)
460
461     def stage2_reset(self):
462         for i in range(0, constants.MAX_CORES):
463             self.cores[i].pipelines = 0
464             self.cores[i].n_pipelines = 0
465             self.cores[i].counter = 0
466             self.cores[i].counter_max = 0
467
468             for idx in range(0, constants.MAX_PIPELINES):
469                 self.cores[i].bitpos[idx] = 0
470
471         self.n_cores = 0
472         self.n_pipelines = 0
473         self.pos = 0
474         # clear list
475         del self.arr2_pipelines2cores[:]
476
477     def bitpos_load(self, coreidx):
478         i = j = 0
479         while (i < self.n_pipelines):
480             if ((self.cores[coreidx].pipelines) &
481                     (1 << i)):
482                 self.cores[coreidx].bitpos[j] = i
483                 j += 1
484             i += 1
485         self.cores[coreidx].n_pipelines = j
486
487     def bitpos_apply(self, in_buf, pos, n_pos):
488         out = 0
489         for i in range(0, n_pos):
490             out |= (in_buf & (1 << i)) << (pos[i] - i)
491
492         return out
493
494     def stage2_init(self, ctx1):
495         self.stage2_reset()
496         self.n_cores = ctx1.n_cores
497         self.n_pipelines = ctx1.n_pipelines
498
499         self.arr2_pipelines2cores = [''] * self.n_pipelines
500
501         core_idx = 0
502         while (core_idx < self.n_cores):
503             self.cores[core_idx].pipelines = ctx1.cores[core_idx].pipelines
504
505             self.bitpos_load(core_idx)
506             core_idx += 1
507
508     def stage2_log(self):
509         tmp_file_comment = ""
510         if(enable_stage2_traceout == 1):
511             print('STAGE2: ', end='')
512             tmp_file_comment += 'STAGE2: '
513
514             for i in range(0, self.n_cores):
515                 mask = len2mask(self.cores[i].n_pipelines)
516                 pipelines_ht0 = self.bitpos_apply(
517                     (~self.cores[i].counter) & mask,
518                     self.cores[i].bitpos,
519                     self.cores[i].n_pipelines)
520
521                 pipelines_ht1 = self.bitpos_apply(
522                     self.cores[i].counter,
523                     self.cores[i].bitpos,
524                     self.cores[i].n_pipelines)
525
526                 print('C%dHT0 = [' % i, end='')
527                 tmp_file_comment += "C{}HT0 = [".format(i)
528                 tmp_file_comment += bitstring_write(
529                     pipelines_ht0, self.n_pipelines)
530
531                 print(']\tC%dHT1 = [' % i, end='')
532                 tmp_file_comment += "]\tC{}HT1 = [".format(i)
533                 tmp_file_comment += bitstring_write(
534                     pipelines_ht1, self.n_pipelines)
535                 print(']\t', end='')
536                 tmp_file_comment += ']\t'
537
538             print('')
539             self.stage2_file_comment = tmp_file_comment
540
541         # check if file traceing is enabled
542         if(enable_stage2_fileout != 1):
543             return
544         # spit out the combination to file
545         self.stage2_process_file()
546
547     def stage2_updateCoresInBuf(self, nPipeline, sCore):
548         rePipeline = self._fileTrace.arr_pipelines[nPipeline]
549         rePipeline = rePipeline.replace("[", "\[").replace("]", "\]")
550         reCore = 'core\s*=\s*((\d*)|(((s|S)\d)?(c|C)[1-9][0-9]*)).*\n'
551         sSubs = 'core = ' + sCore + '\n'
552
553         reg_pipeline = re.compile(rePipeline)
554         search_match = reg_pipeline.search(self._fileTrace.in_buf)
555
556         if(search_match):
557             pos = search_match.start()
558             substr1 = self._fileTrace.in_buf[:pos]
559             substr2 = self._fileTrace.in_buf[pos:]
560             substr2 = re.sub(reCore, sSubs, substr2, 1)
561             self._fileTrace.in_buf = substr1 + substr2
562
563     def pipelines2cores(self, n, n_bits, nCore, bHT):
564         if (n_bits > 64):
565             return
566
567         i = n_bits - 1
568         pipeline_idx = 0
569         while (i >= 0):
570             cond = (n & (1 << i))
571             if (cond):
572                 # update the pipelines array to match the core
573                 # only in case of cond match
574                 # PIPELINE0 and core 0 are reserved
575                 if(bHT):
576                     tmpCore = fileTrace.in_physical_cores[nCore] + 'h'
577                     self.arr2_pipelines2cores[pipeline_idx] = tmpCore
578                 else:
579                     self.arr2_pipelines2cores[pipeline_idx] = \
580                         fileTrace.in_physical_cores[nCore]
581
582             i -= 1
583             pipeline_idx += 1
584
585     def stage2_process_file(self):
586         outFileName = os.path.join(self._fileTrace.out_path,
587                                    self._fileTrace.prefix_outfile)
588         outFileName += "_{}CoReS".format(self.n_cores)
589
590         for i in range(0, self.n_cores):
591             mask = len2mask(self.cores[i].n_pipelines)
592             pipelines_ht0 = self.bitpos_apply((~self.cores[i].counter) & mask,
593                                               self.cores[i].bitpos,
594                                               self.cores[i].n_pipelines)
595
596             pipelines_ht1 = self.bitpos_apply(self.cores[i].counter,
597                                               self.cores[i].bitpos,
598                                               self.cores[i].n_pipelines)
599
600             # update pipelines to core mapping
601             self.pipelines2cores(pipelines_ht0, self.n_pipelines, i, False)
602             self.pipelines2cores(pipelines_ht1, self.n_pipelines, i, True)
603
604         # update the in_buf as per the arr_pipelines2cores
605         for pipeline_idx in range(len(self.arr2_pipelines2cores)):
606             outFileName += "_{}".format(
607                 self.arr2_pipelines2cores[pipeline_idx])
608             self.stage2_updateCoresInBuf(
609                 pipeline_idx, self.arr2_pipelines2cores[pipeline_idx])
610
611         # by now the in_buf is all set to be written to file
612         outFileName += self._fileTrace.suffix_outfile
613         outputFile = open(outFileName, "w")
614
615         # write the file comments
616         strTruncated = ("", "(Truncated)")[self._fileTrace.ncores_truncated]
617         outputFile.write(
618             "; =============== Pipeline-to-Core Mapping ================\n"
619             "; Generated from file {}\n"
620             "; Input pipelines = {}\n"
621             "; Input cores = {}\n"
622             "; N_PIPELINES = {}  N_CORES = {} {} hyper_thread = {} \n"
623             .format(
624                 self._fileTrace.in_file_namepath,
625                 fileTrace.arr_pipelines,
626                 fileTrace.in_physical_cores,
627                 self._fileTrace.n_pipelines,
628                 self._fileTrace.n_cores,
629                 strTruncated,
630                 self._fileTrace.hyper_thread))
631
632         outputFile.write(
633             "; {stg0cmt}\n"
634             "; {stg1cmt}\n"
635             "; {stg2cmt}\n"
636             "; ========================================================\n"
637             "; \n"
638             .format(
639                 stg0cmt=self.stage0_file_comment,
640                 stg1cmt=self.stage1_file_comment,
641                 stg2cmt=self.stage2_file_comment))
642
643         # write the buffer contents
644         outputFile.write(self._fileTrace.in_buf)
645         outputFile.flush()
646         outputFile.close()
647
648     def stage2_process(self):
649         i = 0
650         while(i < self.n_cores):
651             self.cores[i].counter_max = len2mask(
652                 self.cores[i].n_pipelines - 1)
653             i += 1
654
655         self.pos = self.n_cores - 1
656         while True:
657             if (self.pos == self.n_cores - 1):
658                 self.stage2_log()
659
660             if (self.cores[self.pos].counter ==
661                     self.cores[self.pos].counter_max):
662                 if (self.pos == 0):
663                     return
664
665                 self.cores[self.pos].counter = 0
666                 self.pos -= 1
667                 continue
668
669             self.cores[self.pos].counter += 1
670             if(self.pos < self.n_cores - 1):
671                 self.pos += 1
672
673
674 class FileTrace:
675
676     def __init__(self, filenamepath):
677         self.in_file_namepath = os.path.abspath(filenamepath)
678         self.in_filename = os.path.basename(self.in_file_namepath)
679         self.in_path = os.path.dirname(self.in_file_namepath)
680
681         filenamesplit = self.in_filename.split('.')
682         self.prefix_outfile = filenamesplit[0]
683         self.suffix_outfile = ".cfg"
684
685         # output folder:  in the same folder as input file
686         # create new folder in the name of input file
687         self.out_path = os.path.join(
688             os.path.abspath(os.path.dirname(__file__)),
689             self.prefix_outfile)
690
691         try:
692             os.makedirs(self.out_path)
693         except OSError as excep:
694             if excep.errno == errno.EEXIST and os.path.isdir(self.out_path):
695                 pass
696             else:
697                 raise
698
699         self.in_buf = None
700         self.arr_pipelines = []  # holds the positions of search
701
702         self.max_cores = 15
703         self.max_pipelines = 15
704
705         self.in_physical_cores = None
706         self.hyper_thread = None
707
708         # save the num of pipelines determined from input file
709         self.n_pipelines = 0
710         # save the num of cores input (or the truncated value)
711         self.n_cores = 0
712         self.ncores_truncated = False
713
714     def print_TraceFile(self):
715         print("self.in_file_namepath = ", self.in_file_namepath)
716         print("self.in_filename = ", self.in_filename)
717         print("self.in_path = ", self.in_path)
718         print("self.out_path = ", self.out_path)
719         print("self.prefix_outfile = ", self.prefix_outfile)
720         print("self.suffix_outfile = ", self.suffix_outfile)
721         print("self.in_buf = ", self.in_buf)
722         print("self.arr_pipelines =", self.arr_pipelines)
723         print("self.in_physical_cores", self.in_physical_cores)
724         print("self.hyper_thread", self.hyper_thread)
725
726
727 def process(n_cores, n_pipelines, fileTrace):
728     '''process and map pipelines, cores.'''
729     if (n_cores == 0):
730         sys.exit('N_CORES is 0, exiting')
731
732     if (n_pipelines == 0):
733         sys.exit('N_PIPELINES is 0, exiting')
734
735     if (n_cores > n_pipelines):
736         print('\nToo many cores, truncating N_CORES to N_PIPELINES')
737         n_cores = n_pipelines
738         fileTrace.ncores_truncated = True
739
740     fileTrace.n_pipelines = n_pipelines
741     fileTrace.n_cores = n_cores
742
743     strTruncated = ("", "(Truncated)")[fileTrace.ncores_truncated]
744     print("N_PIPELINES = {}, N_CORES = {} {}"
745           .format(n_pipelines, n_cores, strTruncated))
746     print("---------------------------------------------------------------")
747
748     ctx0_inst = Context0()
749     ctx1_inst = Context1()
750     ctx2_inst = Context2()
751
752     # initialize the class variables
753     ctx1_inst._fileTrace = fileTrace
754     ctx2_inst._fileTrace = fileTrace
755
756     ctx0_inst.stage0_init(n_cores, n_pipelines, ctx1_inst, ctx2_inst)
757     ctx0_inst.stage0_process()
758
759
760 def validate_core(core):
761     match = reg_phycore.match(core)
762     if(match):
763         return True
764     else:
765         return False
766
767
768 def validate_phycores(phy_cores):
769     '''validate physical cores, check if unique.'''
770     # eat up whitespaces
771     phy_cores = phy_cores.strip().split(',')
772
773     # check if the core list is unique
774     if(len(phy_cores) != len(set(phy_cores))):
775         print('list of physical cores has duplicates')
776         return None
777
778     for core in phy_cores:
779         if not validate_core(core):
780             print('invalid physical core specified.')
781             return None
782     return phy_cores
783
784
785 def scanconfigfile(fileTrace):
786     '''scan input file for pipelines, validate then process.'''
787     # open file
788     filetoscan = open(fileTrace.in_file_namepath, 'r')
789     fileTrace.in_buf = filetoscan.read()
790
791     # reset iterator on open file
792     filetoscan.seek(0)
793
794     # scan input file for pipelines
795     # master pipelines to be ignored
796     pattern_pipeline = r'\[PIPELINE\d*\]'
797     pattern_mastertype = r'type\s*=\s*MASTER'
798
799     pending_pipeline = False
800     for line in filetoscan:
801         match_pipeline = re.search(pattern_pipeline, line)
802         match_type = re.search('type\s*=', line)
803         match_mastertype = re.search(pattern_mastertype, line)
804
805         if(match_pipeline):
806             sPipeline = line[match_pipeline.start():match_pipeline.end()]
807             pending_pipeline = True
808         elif(match_type):
809             # found a type definition...
810             if(match_mastertype is None):
811                 # and this is not a master pipeline...
812                 if(pending_pipeline):
813                     # add it to the list of pipelines to be mapped
814                     fileTrace.arr_pipelines.append(sPipeline)
815                     pending_pipeline = False
816             else:
817                 # and this is a master pipeline...
818                 # ignore the current and move on to next
819                 sPipeline = ""
820                 pending_pipeline = False
821     filetoscan.close()
822
823     # validate if pipelines are unique
824     if(len(fileTrace.arr_pipelines) != len(set(fileTrace.arr_pipelines))):
825         sys.exit('Error: duplicate pipelines in input file')
826
827     num_pipelines = len(fileTrace.arr_pipelines)
828     num_cores = len(fileTrace.in_physical_cores)
829
830     print("-------------------Pipeline-to-core mapping--------------------")
831     print("Input pipelines = {}\nInput cores = {}"
832           .format(fileTrace.arr_pipelines, fileTrace.in_physical_cores))
833
834     # input configuration file validations goes here
835     if (num_cores > fileTrace.max_cores):
836         sys.exit('Error: number of cores specified > max_cores (%d)' %
837                  fileTrace.max_cores)
838
839     if (num_pipelines > fileTrace.max_pipelines):
840         sys.exit('Error: number of pipelines in input \
841                 cfg file > max_pipelines (%d)' % fileTrace.max_pipelines)
842
843     # call process to generate pipeline-to-core mapping, trace and log
844     process(num_cores, num_pipelines, fileTrace)
845
846
847 if __name__ == "__main__":
848     parser = argparse.ArgumentParser(description='mappipelines')
849
850     reqNamedGrp = parser.add_argument_group('required named args')
851     reqNamedGrp.add_argument(
852         '-i',
853         '--input-file',
854         type=argparse.FileType('r'),
855         help='Input config file',
856         required=True)
857
858     reqNamedGrp.add_argument(
859         '-pc',
860         '--physical-cores',
861         type=validate_phycores,
862         help='''Enter available CPU cores in
863                 format:\"<core>,<core>,...\"
864                 where each core format: \"s<SOCKETID>c<COREID>\"
865                 where SOCKETID={0..9}, COREID={1-99}''',
866         required=True)
867
868     # add optional arguments
869     parser.add_argument(
870         '-ht',
871         '--hyper-thread',
872         help='enable/disable hyper threading. default is ON',
873         default='ON',
874         choices=['ON', 'OFF'])
875
876     parser.add_argument(
877         '-nO',
878         '--no-output-file',
879         help='''disable output config file generation.
880                 Output file generation is enabled by default''',
881         action="store_true")
882
883     args = parser.parse_args()
884
885     if(args.physical_cores is None):
886         parser.error("invalid physical_cores specified")
887
888     # create object of FileTrace and initialise
889     fileTrace = FileTrace(args.input_file.name)
890     fileTrace.in_physical_cores = args.physical_cores
891     fileTrace.hyper_thread = args.hyper_thread
892
893     if(fileTrace.hyper_thread == 'OFF'):
894         print("!!!!disabling stage2 HT!!!!")
895         enable_stage2_traceout = 0
896         enable_stage2_fileout = 0
897     elif(fileTrace.hyper_thread == 'ON'):
898         print("!!!!HT enabled. disabling stage1 file generation.!!!!")
899         enable_stage1_fileout = 0
900
901     if(args.no_output_file is True):
902         print("!!!!disabling stage1 and stage2 fileout!!!!")
903         enable_stage1_fileout = 0
904         enable_stage2_fileout = 0
905
906     scanconfigfile(fileTrace)