ATLAS Offline Software
Loading...
Searching...
No Matches
messageCounter.py
Go to the documentation of this file.
1#!/usr/bin/env python
2#
3# Copyright (C) 2002-2025 CERN for the benefit of the ATLAS collaboration
4#
5
6'''
7Script listing and counting Athena messages between two points in a log file,
8typically between start and end of event loop.
9'''
10
11import sys
12import os
13import re
14import logging
15import argparse
16import json
17
18
19default_ignore_patterns = [
20 r'TrigCOOLUpd.*INFO.*Reload',
21 r'TrigCOOLUpd.*INFO.*Invalidated',
22 r'warn.*frontier.c.*Request',
23 r'warn.*frontier.c.*Trying',
24 r'AthenaHiveEventLoopMgr.*INFO.*processing event',
25 r'IOVDbFolder.*INFO.*tag.*resolved to',
26 r'IOVDbSvc.*INFO.*Opening.*connection',
27 r'IOVDbSvc.*INFO.*Disconnecting from',
28 r'RootDatabase\.open.*INFO.*File version',
29 r'Domain.*INFO.*DbDatabase',
30 r'INFO Database being retired',
31 r'StorageSvc.*INFO Building shape according to reflection information',
32]
33
34
36 parser = argparse.ArgumentParser(usage='%(prog)s [options] LogFiles',
37 description=__doc__)
38 parser.add_argument('logfiles',
39 metavar='LogFiles',
40 nargs='+',
41 help='Log file(s) to analyse. Multiple space-separated file names may be given.')
42 parser.add_argument('-s', '--startPattern',
43 metavar='PATTERN',
44 help='Pattern to match start point for log analysis. If empty, log files are analysed from first line.')
45 parser.add_argument('-e', '--endPattern',
46 metavar='PATTERN',
47 help='Pattern to match end point for log analysis. If empty, log files are analysed until last line.')
48 parser.add_argument('-i', '--ignore',
49 metavar='PATTERN',
50 action='append',
51 default=[],
52 help='Add an ignore pattern to the default ones.' +
53 'The option can be specified multiple times. Defaults are: {:s}'.format(str(default_ignore_patterns)))
54 parser.add_argument('-p', '--printMessages',
55 action='store_true',
56 help='Print the messages found in analysed files')
57 parser.add_argument('--saveAll',
58 action='store_true',
59 help='Store all the messages into the output JSON file')
60 parser.add_argument('-v', '--verbose',
61 action='store_true',
62 help='Increase output verbosity')
63 return parser
64
65
66def extract_messages(lines, start, end, ignore):
67 patterns = [re.compile(p) for p in ['FATAL', 'ERROR', 'WARNING', 'INFO', 'DEBUG', 'VERBOSE']]
68 result = {}
69 for p in patterns:
70 result[p.pattern] = []
71 result['other'] = []
72 result['ignored'] = []
73
74 running = False if start else True
75 for line in lines:
76 if not running and start and start.search(line):
77 running = True
78 continue
79 if running and end and end.search(line):
80 running = False
81 break
82 if running:
83 matched = False
84 ignored = False
85 for p in ignore:
86 if p.search(line):
87 result['ignored'].append(line)
88 ignored = True
89 break
90 if ignored:
91 continue
92 for p in patterns:
93 if p.search(line):
94 result[p.pattern].append(line)
95 matched = True
96 break
97 if not matched:
98 result['other'].append(line)
99
100 return result
101
102
103def make_summary(result):
104 summary = {}
105 for p in result.keys():
106 summary[p] = len(result[p])
107 total = sum(summary.values())
108 summary['all'] = total
109 return summary
110
111
112def print_result(summary, full_result, print_messages=False):
113 summary_str = 'Found the following number of messages:\n'
114 for p, n in summary.items():
115 summary_str += '{:8d} {:s} messages\n'.format(n, p)
116 logging.info(summary_str)
117 if print_messages:
118 for p, lines in full_result.items():
119 logging.info('##### The following %s messages were found #####', p)
120 for line in lines:
121 print(line, end='') # noqa: ATL901
122
123
124def save_summary_to_json(result, filename):
125 logging.info('Saving results to %s', filename)
126 with open(filename, 'w') as f:
127 json.dump(result, f, indent=4)
128
129
130def save_all_to_json(full_result, filename):
131 logging.info('Saving results to %s', filename)
132 with open(filename, 'w') as f:
133 json.dump(full_result, f, indent=4)
134
135
136def main():
137 args = get_parser().parse_args()
138 logging.basicConfig(stream=sys.stdout,
139 format='messageCounter %(levelname)-8s %(message)s',
140 level=logging.DEBUG if args.verbose else logging.INFO)
141
142 start = re.compile(args.startPattern) if args.startPattern else None
143 end = re.compile(args.endPattern) if args.endPattern else None
144 args.ignore.extend(default_ignore_patterns)
145 ignore = [re.compile(p) for p in args.ignore]
146
147 for fname in args.logfiles:
148 if not os.path.isfile(fname):
149 logging.error('Cannot open file %s, skipping', fname)
150 continue
151 logging.info('Analysing file %s', fname)
152 with open(fname, encoding='utf-8') as f:
153 messages = extract_messages(f, start, end, ignore)
154 summary = make_summary(messages)
155 print_result(summary, messages, args.printMessages)
156 out_file_name = 'MessageCount.{:s}.json'.format(fname)
157 save_summary_to_json(summary, out_file_name)
158 if args.saveAll:
159 all_out_file_name = 'Messages.{:s}.json'.format(fname)
160 save_all_to_json(messages, all_out_file_name)
161
162
163if '__main__' in __name__:
164 sys.exit(main())
void print(char *figname, TCanvas *c1)
save_all_to_json(full_result, filename)
save_summary_to_json(result, filename)
extract_messages(lines, start, end, ignore)
print_result(summary, full_result, print_messages=False)