5 # Copyright(c) 2016 Intel Corporation. All rights reserved.
8 # Redistribution and use in source and binary forms, with or without
9 # modification, are permitted provided that the following conditions
12 # * Redistributions of source code must retain the above copyright
13 # notice, this list of conditions and the following disclaimer.
14 # * Redistributions in binary form must reproduce the above copyright
15 # notice, this list of conditions and the following disclaimer in
16 # the documentation and/or other materials provided with the
18 # * Neither the name of Intel Corporation nor the names of its
19 # contributors may be used to endorse or promote products derived
20 # from this software without specific prior written permission.
22 # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
23 # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
24 # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
25 # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
26 # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
27 # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
28 # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
29 # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
30 # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
31 # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
32 # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
35 # This script maps the set of pipelines identified (MASTER pipelines are
36 # ignored) from the input configuration file to the set of cores
37 # provided as input argument and creates configuration files for each of
38 # the mapping combinations.
41 from __future__ import print_function
50 from collections import namedtuple
53 enable_stage0_traceout = 1
54 enable_stage1_traceout = 1
55 enable_stage2_traceout = 1
57 enable_stage1_fileout = 1
58 enable_stage2_fileout = 1
60 Constants = namedtuple('Constants', ['MAX_CORES', 'MAX_PIPELINES'])
61 constants = Constants(16, 64)
63 # pattern for physical core
64 pattern_phycore = '^(s|S)\d(c|C)[1-9][0-9]*$'
65 reg_phycore = re.compile(pattern_phycore)
69 return bin(mask).count("1")
77 sys.exit('error: len2mask - length %i > 64. exiting' % length)
79 return int('1' * length, 2)
82 def bitstring_write(n, n_bits):
120 self.bitpos = array.array(
121 "L", itertools.repeat(0, constants.MAX_PIPELINES))
127 self.cores = [Cores0() for i in range(0, constants.MAX_CORES)]
130 self.n_pipelines0 = 0
132 self.file_comment = ""
136 def stage0_print(self):
137 print('printing Context0 obj')
138 print('c0.cores(n_pipelines) = [ ', end='')
139 for cores_count in range(0, constants.MAX_CORES):
140 print(self.cores[cores_count].n_pipelines, end=' ')
142 print('c0.n_cores = %d' % self.n_cores)
143 print('c0.n_pipelines = %d' % self.n_pipelines)
144 print('c0.n_pipelines0 = %d' % self.n_pipelines0)
145 print('c0.pos = %d' % self.pos)
146 print('c0.file_comment = %s' % self.file_comment)
147 if (self.ctx1 is not None):
148 print('c0.ctx1 = ', end='')
149 print(repr(self.ctx1))
151 print('c0.ctx1 = None')
153 if (self.ctx2 is not None):
154 print('c0.ctx2 = ', end='')
155 print(repr(self.ctx2))
157 print('c0.ctx2 = None')
159 def stage0_init(self, num_cores, num_pipelines, ctx1, ctx2):
160 self.n_cores = num_cores
161 self.n_pipelines = num_pipelines
165 def stage0_process(self):
167 self.cores[0].n_pipelines = self.n_pipelines
168 self.n_pipelines0 = 0
174 if ((self.pos < self.n_cores) and (self.n_pipelines0 > 0)):
175 self.cores[self.pos].n_pipelines = min(
176 self.cores[self.pos - 1].n_pipelines,
178 self.n_pipelines0 -= self.cores[self.pos].n_pipelines
184 if (self.n_pipelines0 == 0):
186 self.ctx1.stage1_init(self, self.ctx2) # self is object c0
187 self.ctx1.stage1_process()
195 if ((self.cores[self.pos].n_pipelines > 1) and
196 (self.pos != (self.n_cores - 1))):
199 self.n_pipelines0 += self.cores[self.pos].n_pipelines
200 self.cores[self.pos].n_pipelines = 0
203 self.cores[self.pos].n_pipelines -= 1
204 self.n_pipelines0 += 1
207 def stage0_log(self):
208 tmp_file_comment = ""
209 if(enable_stage0_traceout != 1):
212 print('STAGE0: ', end='')
213 tmp_file_comment += 'STAGE0: '
214 for cores_count in range(0, self.n_cores):
217 self.cores[cores_count].n_pipelines), end='')
218 tmp_file_comment += "C{} = {}\t".format(
219 cores_count, self.cores[cores_count].n_pipelines)
222 self.ctx1.stage0_file_comment = tmp_file_comment
223 self.ctx2.stage0_file_comment = tmp_file_comment
230 self.cores = [Cores1() for i in range(constants.MAX_CORES)]
234 self.stage0_file_comment = ""
235 self.stage1_file_comment = ""
238 self.arr_pipelines2cores = []
240 def stage1_reset(self):
241 for i in range(constants.MAX_CORES):
242 self.cores[i].pipelines = 0
243 self.cores[i].n_pipelines = 0
250 del self.arr_pipelines2cores[:]
252 def stage1_print(self):
253 print('printing Context1 obj')
254 print('ctx1.cores(pipelines,n_pipelines) = [ ', end='')
255 for cores_count in range(0, constants.MAX_CORES):
256 print('(%d,%d)' % (self.cores[cores_count].pipelines,
257 self.cores[cores_count].n_pipelines), end=' ')
259 print('ctx1.n_cores = %d' % self.n_cores)
260 print('ctx1.n_pipelines = %d' % self.n_pipelines)
261 print('ctx1.pos = %d' % self.pos)
262 print('ctx1.stage0_file_comment = %s' % self.stage0_file_comment)
263 print('ctx1.stage1_file_comment = %s' % self.stage1_file_comment)
264 if (self.ctx2 is not None):
265 print('ctx1.ctx2 = ', end='')
268 print('ctx1.ctx2 = None')
270 def stage1_init(self, c0, ctx2):
273 while (c0.cores[self.n_cores].n_pipelines > 0):
276 self.n_pipelines = c0.n_pipelines
279 self.arr_pipelines2cores = [0] * self.n_pipelines
282 while (i < self.n_cores):
283 self.cores[i].n_pipelines = c0.cores[i].n_pipelines
286 def stage1_process(self):
287 pipelines_max = len2mask(self.n_pipelines)
292 if (self.cores[self.pos].pipelines == pipelines_max):
296 self.cores[self.pos].pipelines = 0
300 self.cores[self.pos].pipelines += 1
301 if (popcount(self.cores[self.pos].pipelines) !=
302 self.cores[self.pos].n_pipelines):
307 while (pos < self.pos):
308 if ((self.cores[self.pos].pipelines) &
309 (self.cores[pos].pipelines)):
317 if ((self.pos > 0) and
318 ((self.cores[self.pos].n_pipelines) ==
319 (self.cores[self.pos - 1].n_pipelines)) and
320 ((self.cores[self.pos].pipelines) <
321 (self.cores[self.pos - 1].pipelines))):
324 if (self.pos == self.n_cores - 1):
326 self.ctx2.stage2_init(self)
327 self.ctx2.stage2_process()
332 self.cores[self.pos].pipelines = 0
338 def stage1_log(self):
339 tmp_file_comment = ""
340 if(enable_stage1_traceout == 1):
341 print('STAGE1: ', end='')
342 tmp_file_comment += 'STAGE1: '
344 while (i < self.n_cores):
345 print('C%d = [' % i, end='')
346 tmp_file_comment += "C{} = [".format(i)
348 j = self.n_pipelines - 1
350 cond = ((self.cores[i].pipelines) & (1 << j))
353 tmp_file_comment += '1'
356 tmp_file_comment += '0'
360 tmp_file_comment += ']\t'
364 self.stage1_file_comment = tmp_file_comment
365 self.ctx2.stage1_file_comment = tmp_file_comment
367 # check if file traceing is enabled
368 if(enable_stage1_fileout != 1):
371 # spit out the combination to file
372 self.stage1_process_file()
374 def stage1_updateCoresInBuf(self, nPipeline, sCore):
375 rePipeline = self._fileTrace.arr_pipelines[nPipeline]
376 rePipeline = rePipeline.replace("[", "\[").replace("]", "\]")
377 reCore = 'core\s*=\s*((\d*)|(((s|S)\d)?(c|C)[1-9][0-9]*)).*\n'
378 sSubs = 'core = ' + sCore + '\n'
380 reg_pipeline = re.compile(rePipeline)
381 search_match = reg_pipeline.search(self._fileTrace.in_buf)
384 pos = search_match.start()
385 substr1 = self._fileTrace.in_buf[:pos]
386 substr2 = self._fileTrace.in_buf[pos:]
387 substr2 = re.sub(reCore, sSubs, substr2, 1)
388 self._fileTrace.in_buf = substr1 + substr2
390 def stage1_process_file(self):
391 outFileName = os.path.join(self._fileTrace.out_path,
392 self._fileTrace.prefix_outfile)
393 outFileName += "_{}CoReS".format(self.n_cores)
395 i = 0 # represents core number
396 while (i < self.n_cores):
397 j = self.n_pipelines - 1
400 cond = ((self.cores[i].pipelines) & (1 << j))
402 # update the pipelines array to match the core
403 # only in case of cond match
404 self.arr_pipelines2cores[
405 pipeline_idx] = fileTrace.in_physical_cores[i]
412 # update the in_buf as per the arr_pipelines2cores
413 for pipeline_idx in range(len(self.arr_pipelines2cores)):
414 outFileName += "_{}".format(self.arr_pipelines2cores[pipeline_idx])
415 self.stage1_updateCoresInBuf(
416 pipeline_idx, self.arr_pipelines2cores[pipeline_idx])
418 # by now the in_buf is all set to be written to file
419 outFileName += self._fileTrace.suffix_outfile
420 outputFile = open(outFileName, "w")
422 # write out the comments
423 strTruncated = ("", "(Truncated)")[self._fileTrace.ncores_truncated]
425 "; =============== Pipeline-to-Core Mapping ================\n"
426 "; Generated from file {}\n"
427 "; Input pipelines = {}\n"
428 "; Input cores = {}\n"
429 "; N_PIPELINES = {} N_CORES = {} {} hyper_thread = {}\n"
431 self._fileTrace.in_file_namepath,
432 fileTrace.arr_pipelines,
433 fileTrace.in_physical_cores,
434 self._fileTrace.n_pipelines,
435 self._fileTrace.n_cores,
437 self._fileTrace.hyper_thread))
442 "; ========================================================\n"
445 stg0cmt=self.stage0_file_comment,
446 stg1cmt=self.stage1_file_comment))
448 # write buffer contents
449 outputFile.write(self._fileTrace.in_buf)
458 self.cores = [Cores2() for i in range(constants.MAX_CORES)]
462 self.stage0_file_comment = ""
463 self.stage1_file_comment = ""
464 self.stage2_file_comment = ""
466 # each array entry is a pipeline mapped to core stored as string
467 # pipeline ranging from 1 to n, however stored in zero based array
468 self.arr2_pipelines2cores = []
470 def stage2_print(self):
471 print('printing Context2 obj')
472 print('ctx2.cores(pipelines, n_pipelines, counter, counter_max) =')
473 for cores_count in range(0, constants.MAX_CORES):
474 print('core[%d] = (%d,%d,%d,%d)' % (
476 self.cores[cores_count].pipelines,
477 self.cores[cores_count].n_pipelines,
478 self.cores[cores_count].counter,
479 self.cores[cores_count].counter_max))
481 print('ctx2.n_cores = %d' % self.n_cores, end='')
482 print('ctx2.n_pipelines = %d' % self.n_pipelines, end='')
483 print('ctx2.pos = %d' % self.pos)
484 print('ctx2.stage0_file_comment = %s' %
485 self.self.stage0_file_comment)
486 print('ctx2.stage1_file_comment = %s' %
487 self.self.stage1_file_comment)
488 print('ctx2.stage2_file_comment = %s' %
489 self.self.stage2_file_comment)
491 def stage2_reset(self):
492 for i in range(0, constants.MAX_CORES):
493 self.cores[i].pipelines = 0
494 self.cores[i].n_pipelines = 0
495 self.cores[i].counter = 0
496 self.cores[i].counter_max = 0
498 for idx in range(0, constants.MAX_PIPELINES):
499 self.cores[i].bitpos[idx] = 0
505 del self.arr2_pipelines2cores[:]
507 def bitpos_load(self, coreidx):
509 while (i < self.n_pipelines):
510 if ((self.cores[coreidx].pipelines) &
512 self.cores[coreidx].bitpos[j] = i
515 self.cores[coreidx].n_pipelines = j
517 def bitpos_apply(self, in_buf, pos, n_pos):
519 for i in range(0, n_pos):
520 out |= (in_buf & (1 << i)) << (pos[i] - i)
524 def stage2_init(self, ctx1):
526 self.n_cores = ctx1.n_cores
527 self.n_pipelines = ctx1.n_pipelines
529 self.arr2_pipelines2cores = [''] * self.n_pipelines
532 while (core_idx < self.n_cores):
533 self.cores[core_idx].pipelines = ctx1.cores[core_idx].pipelines
535 self.bitpos_load(core_idx)
538 def stage2_log(self):
539 tmp_file_comment = ""
540 if(enable_stage2_traceout == 1):
541 print('STAGE2: ', end='')
542 tmp_file_comment += 'STAGE2: '
544 for i in range(0, self.n_cores):
545 mask = len2mask(self.cores[i].n_pipelines)
546 pipelines_ht0 = self.bitpos_apply(
547 (~self.cores[i].counter) & mask,
548 self.cores[i].bitpos,
549 self.cores[i].n_pipelines)
551 pipelines_ht1 = self.bitpos_apply(
552 self.cores[i].counter,
553 self.cores[i].bitpos,
554 self.cores[i].n_pipelines)
556 print('C%dHT0 = [' % i, end='')
557 tmp_file_comment += "C{}HT0 = [".format(i)
558 tmp_file_comment += bitstring_write(
559 pipelines_ht0, self.n_pipelines)
561 print(']\tC%dHT1 = [' % i, end='')
562 tmp_file_comment += "]\tC{}HT1 = [".format(i)
563 tmp_file_comment += bitstring_write(
564 pipelines_ht1, self.n_pipelines)
566 tmp_file_comment += ']\t'
569 self.stage2_file_comment = tmp_file_comment
571 # check if file traceing is enabled
572 if(enable_stage2_fileout != 1):
574 # spit out the combination to file
575 self.stage2_process_file()
577 def stage2_updateCoresInBuf(self, nPipeline, sCore):
578 rePipeline = self._fileTrace.arr_pipelines[nPipeline]
579 rePipeline = rePipeline.replace("[", "\[").replace("]", "\]")
580 reCore = 'core\s*=\s*((\d*)|(((s|S)\d)?(c|C)[1-9][0-9]*)).*\n'
581 sSubs = 'core = ' + sCore + '\n'
583 reg_pipeline = re.compile(rePipeline)
584 search_match = reg_pipeline.search(self._fileTrace.in_buf)
587 pos = search_match.start()
588 substr1 = self._fileTrace.in_buf[:pos]
589 substr2 = self._fileTrace.in_buf[pos:]
590 substr2 = re.sub(reCore, sSubs, substr2, 1)
591 self._fileTrace.in_buf = substr1 + substr2
593 def pipelines2cores(self, n, n_bits, nCore, bHT):
600 cond = (n & (1 << i))
602 # update the pipelines array to match the core
603 # only in case of cond match
604 # PIPELINE0 and core 0 are reserved
606 tmpCore = fileTrace.in_physical_cores[nCore] + 'h'
607 self.arr2_pipelines2cores[pipeline_idx] = tmpCore
609 self.arr2_pipelines2cores[pipeline_idx] = \
610 fileTrace.in_physical_cores[nCore]
615 def stage2_process_file(self):
616 outFileName = os.path.join(self._fileTrace.out_path,
617 self._fileTrace.prefix_outfile)
618 outFileName += "_{}CoReS".format(self.n_cores)
620 for i in range(0, self.n_cores):
621 mask = len2mask(self.cores[i].n_pipelines)
622 pipelines_ht0 = self.bitpos_apply((~self.cores[i].counter) & mask,
623 self.cores[i].bitpos,
624 self.cores[i].n_pipelines)
626 pipelines_ht1 = self.bitpos_apply(self.cores[i].counter,
627 self.cores[i].bitpos,
628 self.cores[i].n_pipelines)
630 # update pipelines to core mapping
631 self.pipelines2cores(pipelines_ht0, self.n_pipelines, i, False)
632 self.pipelines2cores(pipelines_ht1, self.n_pipelines, i, True)
634 # update the in_buf as per the arr_pipelines2cores
635 for pipeline_idx in range(len(self.arr2_pipelines2cores)):
636 outFileName += "_{}".format(
637 self.arr2_pipelines2cores[pipeline_idx])
638 self.stage2_updateCoresInBuf(
639 pipeline_idx, self.arr2_pipelines2cores[pipeline_idx])
641 # by now the in_buf is all set to be written to file
642 outFileName += self._fileTrace.suffix_outfile
643 outputFile = open(outFileName, "w")
645 # write the file comments
646 strTruncated = ("", "(Truncated)")[self._fileTrace.ncores_truncated]
648 "; =============== Pipeline-to-Core Mapping ================\n"
649 "; Generated from file {}\n"
650 "; Input pipelines = {}\n"
651 "; Input cores = {}\n"
652 "; N_PIPELINES = {} N_CORES = {} {} hyper_thread = {} \n"
654 self._fileTrace.in_file_namepath,
655 fileTrace.arr_pipelines,
656 fileTrace.in_physical_cores,
657 self._fileTrace.n_pipelines,
658 self._fileTrace.n_cores,
660 self._fileTrace.hyper_thread))
666 "; ========================================================\n"
669 stg0cmt=self.stage0_file_comment,
670 stg1cmt=self.stage1_file_comment,
671 stg2cmt=self.stage2_file_comment))
673 # write the buffer contents
674 outputFile.write(self._fileTrace.in_buf)
678 def stage2_process(self):
680 while(i < self.n_cores):
681 self.cores[i].counter_max = len2mask(
682 self.cores[i].n_pipelines - 1)
685 self.pos = self.n_cores - 1
687 if (self.pos == self.n_cores - 1):
690 if (self.cores[self.pos].counter ==
691 self.cores[self.pos].counter_max):
695 self.cores[self.pos].counter = 0
699 self.cores[self.pos].counter += 1
700 if(self.pos < self.n_cores - 1):
706 def __init__(self, filenamepath):
707 self.in_file_namepath = os.path.abspath(filenamepath)
708 self.in_filename = os.path.basename(self.in_file_namepath)
709 self.in_path = os.path.dirname(self.in_file_namepath)
711 filenamesplit = self.in_filename.split('.')
712 self.prefix_outfile = filenamesplit[0]
713 self.suffix_outfile = ".cfg"
715 # output folder: in the same folder as input file
716 # create new folder in the name of input file
717 self.out_path = os.path.join(
718 os.path.abspath(os.path.dirname(__file__)),
722 os.makedirs(self.out_path)
723 except OSError as excep:
724 if excep.errno == errno.EEXIST and os.path.isdir(self.out_path):
730 self.arr_pipelines = [] # holds the positions of search
733 self.max_pipelines = 15
735 self.in_physical_cores = None
736 self.hyper_thread = None
738 # save the num of pipelines determined from input file
740 # save the num of cores input (or the truncated value)
742 self.ncores_truncated = False
744 def print_TraceFile(self):
745 print("self.in_file_namepath = ", self.in_file_namepath)
746 print("self.in_filename = ", self.in_filename)
747 print("self.in_path = ", self.in_path)
748 print("self.out_path = ", self.out_path)
749 print("self.prefix_outfile = ", self.prefix_outfile)
750 print("self.suffix_outfile = ", self.suffix_outfile)
751 print("self.in_buf = ", self.in_buf)
752 print("self.arr_pipelines =", self.arr_pipelines)
753 print("self.in_physical_cores", self.in_physical_cores)
754 print("self.hyper_thread", self.hyper_thread)
757 def process(n_cores, n_pipelines, fileTrace):
758 '''process and map pipelines, cores.'''
760 sys.exit('N_CORES is 0, exiting')
762 if (n_pipelines == 0):
763 sys.exit('N_PIPELINES is 0, exiting')
765 if (n_cores > n_pipelines):
766 print('\nToo many cores, truncating N_CORES to N_PIPELINES')
767 n_cores = n_pipelines
768 fileTrace.ncores_truncated = True
770 fileTrace.n_pipelines = n_pipelines
771 fileTrace.n_cores = n_cores
773 strTruncated = ("", "(Truncated)")[fileTrace.ncores_truncated]
774 print("N_PIPELINES = {}, N_CORES = {} {}"
775 .format(n_pipelines, n_cores, strTruncated))
776 print("---------------------------------------------------------------")
778 ctx0_inst = Context0()
779 ctx1_inst = Context1()
780 ctx2_inst = Context2()
782 # initialize the class variables
783 ctx1_inst._fileTrace = fileTrace
784 ctx2_inst._fileTrace = fileTrace
786 ctx0_inst.stage0_init(n_cores, n_pipelines, ctx1_inst, ctx2_inst)
787 ctx0_inst.stage0_process()
790 def validate_core(core):
791 match = reg_phycore.match(core)
798 def validate_phycores(phy_cores):
799 '''validate physical cores, check if unique.'''
801 phy_cores = phy_cores.strip().split(',')
803 # check if the core list is unique
804 if(len(phy_cores) != len(set(phy_cores))):
805 print('list of physical cores has duplicates')
808 for core in phy_cores:
809 if not validate_core(core):
810 print('invalid physical core specified.')
815 def scanconfigfile(fileTrace):
816 '''scan input file for pipelines, validate then process.'''
818 filetoscan = open(fileTrace.in_file_namepath, 'r')
819 fileTrace.in_buf = filetoscan.read()
821 # reset iterator on open file
824 # scan input file for pipelines
825 # master pipelines to be ignored
826 pattern_pipeline = r'\[PIPELINE\d*\]'
827 pattern_mastertype = r'type\s*=\s*MASTER'
829 pending_pipeline = False
830 for line in filetoscan:
831 match_pipeline = re.search(pattern_pipeline, line)
832 match_type = re.search('type\s*=', line)
833 match_mastertype = re.search(pattern_mastertype, line)
836 sPipeline = line[match_pipeline.start():match_pipeline.end()]
837 pending_pipeline = True
839 # found a type definition...
840 if(match_mastertype is None):
841 # and this is not a master pipeline...
842 if(pending_pipeline):
843 # add it to the list of pipelines to be mapped
844 fileTrace.arr_pipelines.append(sPipeline)
845 pending_pipeline = False
847 # and this is a master pipeline...
848 # ignore the current and move on to next
850 pending_pipeline = False
853 # validate if pipelines are unique
854 if(len(fileTrace.arr_pipelines) != len(set(fileTrace.arr_pipelines))):
855 sys.exit('Error: duplicate pipelines in input file')
857 num_pipelines = len(fileTrace.arr_pipelines)
858 num_cores = len(fileTrace.in_physical_cores)
860 print("-------------------Pipeline-to-core mapping--------------------")
861 print("Input pipelines = {}\nInput cores = {}"
862 .format(fileTrace.arr_pipelines, fileTrace.in_physical_cores))
864 # input configuration file validations goes here
865 if (num_cores > fileTrace.max_cores):
866 sys.exit('Error: number of cores specified > max_cores (%d)' %
869 if (num_pipelines > fileTrace.max_pipelines):
870 sys.exit('Error: number of pipelines in input \
871 cfg file > max_pipelines (%d)' % fileTrace.max_pipelines)
873 # call process to generate pipeline-to-core mapping, trace and log
874 process(num_cores, num_pipelines, fileTrace)
877 if __name__ == "__main__":
878 parser = argparse.ArgumentParser(description='mappipelines')
880 reqNamedGrp = parser.add_argument_group('required named args')
881 reqNamedGrp.add_argument(
884 type=argparse.FileType('r'),
885 help='Input config file',
888 reqNamedGrp.add_argument(
891 type=validate_phycores,
892 help='''Enter available CPU cores in
893 format:\"<core>,<core>,...\"
894 where each core format: \"s<SOCKETID>c<COREID>\"
895 where SOCKETID={0..9}, COREID={1-99}''',
898 # add optional arguments
902 help='enable/disable hyper threading. default is ON',
904 choices=['ON', 'OFF'])
909 help='''disable output config file generation.
910 Output file generation is enabled by default''',
913 args = parser.parse_args()
915 if(args.physical_cores is None):
916 parser.error("invalid physical_cores specified")
918 # create object of FileTrace and initialise
919 fileTrace = FileTrace(args.input_file.name)
920 fileTrace.in_physical_cores = args.physical_cores
921 fileTrace.hyper_thread = args.hyper_thread
923 if(fileTrace.hyper_thread == 'OFF'):
924 print("!!!!disabling stage2 HT!!!!")
925 enable_stage2_traceout = 0
926 enable_stage2_fileout = 0
927 elif(fileTrace.hyper_thread == 'ON'):
928 print("!!!!HT enabled. disabling stage1 file generation.!!!!")
929 enable_stage1_fileout = 0
931 if(args.no_output_file is True):
932 print("!!!!disabling stage1 and stage2 fileout!!!!")
933 enable_stage1_fileout = 0
934 enable_stage2_fileout = 0
936 scanconfigfile(fileTrace)