7 Script listing and counting Athena messages between two points in a log file,
8 typically between start and end of event loop.
19 default_ignore_patterns = [
20 r'TrigCOOLUpd.*INFO.*Reload',
21 r'TrigCOOLUpd.*INFO.*Invalidated',
22 r'warn.*frontier.c.*Request',
23 r'warn.*frontier.c.*Trying',
24 r'AthenaHiveEventLoopMgr.*INFO.*processing event',
25 r'IOVDbFolder.*INFO.*tag.*resolved to',
26 r'IOVDbSvc.*INFO.*Opening.*connection',
27 r'IOVDbSvc.*INFO.*Disconnecting from',
28 r'RootDatabase\.open.*INFO.*File version',
29 r'Domain.*INFO.*DbDatabase',
30 r'INFO Database being retired',
31 r'StorageSvc.*INFO Building shape according to reflection information',
36 parser = argparse.ArgumentParser(usage=
'%(prog)s [options] LogFiles',
38 parser.add_argument(
'logfiles',
41 help=
'Log file(s) to analyse. Multiple space-separated file names may be given.')
42 parser.add_argument(
'-s',
'--startPattern',
44 help=
'Pattern to match start point for log analysis. If empty, log files are analysed from first line.')
45 parser.add_argument(
'-e',
'--endPattern',
47 help=
'Pattern to match end point for log analysis. If empty, log files are analysed until last line.')
48 parser.add_argument(
'-i',
'--ignore',
52 help=
'Add an ignore pattern to the default ones.' +
53 'The option can be specified multiple times. Defaults are: {:s}'.
format(
str(default_ignore_patterns)))
54 parser.add_argument(
'-p',
'--printMessages',
56 help=
'Print the messages found in analysed files')
57 parser.add_argument(
'--saveAll',
59 help=
'Store all the messages into the output JSON file')
60 parser.add_argument(
'-v',
'--verbose',
62 help=
'Increase output verbosity')
67 patterns = [re.compile(p)
for p
in [
'FATAL',
'ERROR',
'WARNING',
'INFO',
'DEBUG',
'VERBOSE']]
70 result[p.pattern] = []
72 result[
'ignored'] = []
74 running =
False if start
else True
76 if not running
and start
and start.search(line):
79 if running
and end
and end.search(line):
87 result[
'ignored'].
append(line)
94 result[p.pattern].
append(line)
98 result[
'other'].
append(line)
105 for p
in result.keys():
106 summary[p] = len(result[p])
107 total =
sum(summary.values())
108 summary[
'all'] = total
113 summary_str =
'Found the following number of messages:\n'
114 for p, n
in summary.items():
115 summary_str +=
'{:8d} {:s} messages\n'.
format(n, p)
116 logging.info(summary_str)
118 for p, lines
in full_result.items():
119 logging.info(
'##### The following %s messages were found #####', p)
125 logging.info(
'Saving results to %s', filename)
126 with open(filename,
'w')
as f:
127 json.dump(result, f, indent=4)
131 logging.info(
'Saving results to %s', filename)
132 with open(filename,
'w')
as f:
133 json.dump(full_result, f, indent=4)
138 logging.basicConfig(stream=sys.stdout,
139 format=
'messageCounter %(levelname)-8s %(message)s',
140 level=logging.DEBUG
if args.verbose
else logging.INFO)
142 start = re.compile(args.startPattern)
if args.startPattern
else None
143 end = re.compile(args.endPattern)
if args.endPattern
else None
144 args.ignore.extend(default_ignore_patterns)
145 ignore = [re.compile(p)
for p
in args.ignore]
147 for fname
in args.logfiles:
148 if not os.path.isfile(fname):
149 logging.error(
'Cannot open file %s, skipping', fname)
151 logging.info(
'Analysing file %s', fname)
152 with open(fname, encoding=
'utf-8')
as f:
156 out_file_name =
'MessageCount.{:s}.json'.
format(fname)
159 all_out_file_name =
'Messages.{:s}.json'.
format(fname)
163 if '__main__' in __name__: