blob: 460618fb30bbeef9f37478238f05dd6ad9783ace [file] [log] [blame]
yu.dongc33b3072024-08-21 23:14:49 -07001#!/usr/bin/env python
2# -*- coding: utf-8 -*-
3import csv
4import json
5import re
6import sys
7import logging
8import argparse
9col_tmd = 'TMD File'
10col_regex = 'regex to match Trace Class'
11col_tag = 'Expected Tag'
12def readUtmdToJson(utmd_filename):
13 with open(utmd_filename, 'r') as utmdfile:
14 j = json.load(utmdfile)
15 utmdfile.close()
16 return j
17def writeJsonToUtmd(j, utmd_filename):
18 with open(utmd_filename, 'w') as utmdfile:
19 utmdfile.write(json.dumps(j,
20 sort_keys=True,
21 ensure_ascii=True,
22 indent=2))
23 utmdfile.close()
24
25def updateUtmd(utmd_filename, csv_filename):
26 if utmd_filename.startswith('./'):
27 utmd_filename = utmd_filename[2::]
28 csvfile = open(csv_filename, 'r')
29 j = readUtmdToJson(utmd_filename)
30 if j is None:
31 logging.error('read UTMD failed')
32 sys.exit()
33 if j['traceFamily'] == 'PS':
34 logging.warning('{}-{} not L1 / L2 UTMD'.format(j['module'], j['traceFamily']))
35 sys.exit()
36 fieldnames = (col_tmd, col_regex, col_tag)
37 reader = csv.DictReader( csvfile, fieldnames)
38 logging.info('Module: {} - {}'.format(j['module'], utmd_filename))
39 debug_UH = 'Ultra-High'
40 debug_H = 'High'
41 debug_M = 'Medium'
42 debug_L = 'Low'
43 debug_UL = 'Ultra-Low'
44 #update debugLevel
45 for traceClass in j['traceClassDefs']:
46 for k, v in traceClass.iteritems():
47 if k.endswith('_UH'):
48 v['debugLevel'] = debug_UH
49 elif k.endswith('_H'):
50 v['debugLevel'] = debug_H
51 elif k.endswith('_M'):
52 v['debugLevel'] = debug_M
53 elif k.endswith('_L'):
54 v['debugLevel'] = debug_L
55 elif k.endswith('_UL'):
56 v['debugLevel'] = debug_UL
57 #update tag
58 for row in reader:
59 if row[col_tmd] in utmd_filename:
60 logging.info(row)
61 for traceClass in j['traceClassDefs']:
62 for k, v in traceClass.iteritems():
63 logging.info('Original: {} {}'.format(k, v))
64 #default mapping for entire TMD
65 if row[col_regex] == 'N/A':
66 logging.info('Matching N/A: {}'.format(k))
67 logging.info(k)
68 logging.info(v)
69 v['tag'] = [ row[col_tag] ]
70 #regex to match trace class name
71 else:
72 logging.info('Matching regex: {} {}'.format(k, row[col_regex]))
73 tag = re.sub(row[col_regex], row[col_tag], k)
74 if tag:
75 v['tag'] = [ tag ]
76 logging.info('Changed: {} {}'.format(k, v))
77 writeJsonToUtmd(j, utmd_filename)
78
79def init_logger(log_filename):
80 logging.basicConfig(level=logging.DEBUG,
81 format='%(asctime)s %(name)-12s %(levelname)-8s %(message)s',
82 datefmt='%m-%d %H:%M:%S',
83 filename=log_filename)
84 console = logging.StreamHandler()
85 console.setLevel(logging.DEBUG)
86 formatter = logging.Formatter('%(name)-12s: %(levelname)-8s %(message)s')
87 console.setFormatter(formatter)
88 logging.getLogger('').addHandler(console)
89
90def main():
91 parser = argparse.ArgumentParser(description='update-l1-utmd:\n\
92 Update tag of trace class',
93 #formatter_class=argparse.ArgumentDefaultsHelpFormatter)
94 formatter_class=argparse.RawDescriptionHelpFormatter)
95 parser.add_argument("-v", action="version", version='1.0.0')
96 parser.add_argument("utmd_file",
97 help="input L1 UTMD file")
98 parser.add_argument("csv_file",
99 help="input CSV file(survey table)")
100 parser.add_argument("-l", dest="log_file",
101 help="log file",
102 default='update-l1-utmd.log',
103 action="store")
104 args = parser.parse_args()
105 if args.utmd_file is None:
106 parser.print_help()
107 quit()
108 init_logger(args.log_file)
109 updateUtmd(args.utmd_file, args.csv_file)
110if __name__ == '__main__':
111 main()
112